@@ -40,7 +40,7 @@ pub fn vendor(ws: &Workspace<'_>, opts: &VendorOptions<'_>) -> CargoResult<()> {
4040 extra_workspaces. push ( ws) ;
4141 }
4242 let workspaces = extra_workspaces. iter ( ) . chain ( Some ( ws) ) . collect :: < Vec < _ > > ( ) ;
43- let _lock = gctx. acquire_package_cache_lock ( CacheLockMode :: MutateExclusive ) ?;
43+ let _lock = gctx. acquire_package_cache_lock ( CacheLockMode :: DownloadExclusive ) ?;
4444 let vendor_config = sync ( gctx, & workspaces, opts) . context ( "failed to sync" ) ?;
4545
4646 if gctx. shell ( ) . verbosity ( ) != Verbosity :: Quiet {
@@ -145,17 +145,10 @@ fn sync(
145145 let mut source_replacement_cache =
146146 SourceReplacementCache :: new ( gctx, opts. respect_source_config ) ?;
147147
148- // First up attempt to work around rust-lang/cargo#5956. Apparently build
149- // artifacts sprout up in Cargo's global cache for whatever reason, although
150- // it's unsure what tool is causing these issues at this time. For now we
151- // apply a heavy-hammer approach which is to delete Cargo's unpacked version
152- // of each crate to start off with. After we do this we'll re-resolve and
153- // redownload again, which should trigger Cargo to re-extract all the
154- // crates.
155- //
156- // Note that errors are largely ignored here as this is a best-effort
157- // attempt. If anything fails here we basically just move on to the next
158- // crate to work with.
148+ let mut checksums = HashMap :: new ( ) ;
149+ let mut ids = BTreeMap :: new ( ) ;
150+
151+ // Let's download all crates and start storing internal tables about them.
159152 for ws in workspaces {
160153 let ( packages, resolve) = ops:: resolve_ws ( ws, dry_run)
161154 . with_context ( || format ! ( "failed to load lockfile for {}" , ws. root( ) . display( ) ) ) ?;
@@ -165,54 +158,19 @@ fn sync(
165158 . with_context ( || format ! ( "failed to download packages for {}" , ws. root( ) . display( ) ) ) ?;
166159
167160 for pkg in resolve. iter ( ) {
168- let sid = if opts. respect_source_config {
169- source_replacement_cache. get ( pkg. source_id ( ) ) ?
170- } else {
171- pkg. source_id ( )
172- } ;
161+ let sid = source_replacement_cache. get ( pkg. source_id ( ) ) ?;
173162
174- // Don't delete actual source code!
163+ // Don't vendor path crates since they're already in the repository
175164 if sid. is_path ( ) {
165+ // And don't delete actual source code!
176166 if let Ok ( path) = sid. url ( ) . to_file_path ( ) {
177167 if let Ok ( path) = try_canonicalize ( path) {
178168 to_remove. remove ( & path) ;
179169 }
180170 }
181171 continue ;
182172 }
183- if sid. is_git ( ) {
184- continue ;
185- }
186-
187- // Only delete sources that are safe to delete, i.e. they are caches.
188- if sid. is_registry ( ) {
189- if let Ok ( pkg) = packages. get_one ( pkg) {
190- drop ( fs:: remove_dir_all ( pkg. root ( ) ) ) ;
191- }
192- continue ;
193- }
194- }
195- }
196173
197- let mut checksums = HashMap :: new ( ) ;
198- let mut ids = BTreeMap :: new ( ) ;
199-
200- // Next up let's actually download all crates and start storing internal
201- // tables about them.
202- for ws in workspaces {
203- let ( packages, resolve) = ops:: resolve_ws ( ws, dry_run)
204- . with_context ( || format ! ( "failed to load lockfile for {}" , ws. root( ) . display( ) ) ) ?;
205-
206- packages
207- . get_many ( resolve. iter ( ) )
208- . with_context ( || format ! ( "failed to download packages for {}" , ws. root( ) . display( ) ) ) ?;
209-
210- for pkg in resolve. iter ( ) {
211- // No need to vendor path crates since they're already in the
212- // repository
213- if pkg. source_id ( ) . is_path ( ) {
214- continue ;
215- }
216174 ids. insert (
217175 pkg,
218176 packages
0 commit comments