@@ -126,21 +126,28 @@ type GSUtilRemoteCache struct {
126126func (rs GSUtilRemoteCache ) ExistingPackages (pkgs []* Package ) (map [* Package ]struct {}, error ) {
127127 fmt .Printf ("☁️ checking remote cache for past build artifacts for %d packages\n " , len (pkgs ))
128128
129- packageToURLMap := make (map [* Package ]string )
129+ // Map to store both .tar.gz and .tar URLs for each package
130+ type urlPair struct {
131+ gzURL string
132+ tarURL string
133+ }
134+ packageToURLMap := make (map [* Package ]urlPair )
135+
136+ // Create a list of all possible URLs
137+ var urls []string
130138 for _ , p := range pkgs {
131139 version , err := p .Version ()
132140 if err != nil {
133141 log .WithField ("package" , p .FullName ()).Debug ("Failed to get version for package. Will not check remote cache for package." )
134142 continue
135143 }
136144
137- url := fmt .Sprintf ("gs://%s/%s" , rs .BucketName , fmt .Sprintf ("%s.tar.gz" , version ))
138- packageToURLMap [p ] = url
139- }
140-
141- urls := make ([]string , 0 , len (packageToURLMap ))
142- for _ , url := range packageToURLMap {
143- urls = append (urls , url )
145+ pair := urlPair {
146+ gzURL : fmt .Sprintf ("gs://%s/%s.tar.gz" , rs .BucketName , version ),
147+ tarURL : fmt .Sprintf ("gs://%s/%s.tar" , rs .BucketName , version ),
148+ }
149+ packageToURLMap [p ] = pair
150+ urls = append (urls , pair .gzURL , pair .tarURL )
144151 }
145152
146153 if len (urls ) == 0 {
@@ -163,16 +170,29 @@ func (rs GSUtilRemoteCache) ExistingPackages(pkgs []*Package) (map[*Package]stru
163170
164171 existingURLs := parseGSUtilStatOutput (bytes .NewReader (stdoutBuffer .Bytes ()))
165172 existingPackages := make (map [* Package ]struct {})
173+
166174 for _ , p := range pkgs {
167- url := packageToURLMap [p ]
168- if _ , exists := existingURLs [url ]; exists {
175+ urls := packageToURLMap [p ]
176+ if _ , exists := existingURLs [urls .gzURL ]; exists {
177+ existingPackages [p ] = struct {}{}
178+ continue
179+ }
180+ if _ , exists := existingURLs [urls .tarURL ]; exists {
169181 existingPackages [p ] = struct {}{}
170182 }
171183 }
172184
173185 return existingPackages , nil
174186}
175187
188+ // Helper function to get all possible artifact URLs for a package
189+ func getPackageArtifactURLs (bucketName , version string ) []string {
190+ return []string {
191+ fmt .Sprintf ("gs://%s/%s.tar.gz" , bucketName , version ),
192+ fmt .Sprintf ("gs://%s/%s.tar" , bucketName , version ),
193+ }
194+ }
195+
176196// Download makes a best-effort attempt at downloading previously cached build artifacts
177197func (rs GSUtilRemoteCache ) Download (dst Cache , pkgs []* Package ) error {
178198 fmt .Printf ("☁️ downloading %d cached build artifacts\n " , len (pkgs ))
@@ -305,15 +325,23 @@ func NewS3RemoteCache(bucketName string, cfg *aws.Config) (*S3RemoteCache, error
305325
306326// ExistingPackages returns existing cached build artifacts in the remote cache
307327func (rs * S3RemoteCache ) ExistingPackages (pkgs []* Package ) (map [* Package ]struct {}, error ) {
308- packagesToKeys := make (map [* Package ]string )
328+ type keyPair struct {
329+ gzKey string
330+ tarKey string
331+ }
332+
333+ packagesToKeys := make (map [* Package ]keyPair )
309334 for _ , p := range pkgs {
310335 version , err := p .Version ()
311336 if err != nil {
312337 log .WithField ("package" , p .FullName ()).Debug ("Failed to get version for package. Will not check remote cache for package." )
313338 continue
314339 }
315340
316- packagesToKeys [p ] = filepath .Base (fmt .Sprintf ("%s.tar.gz" , version ))
341+ packagesToKeys [p ] = keyPair {
342+ gzKey : filepath .Base (fmt .Sprintf ("%s.tar.gz" , version )),
343+ tarKey : filepath .Base (fmt .Sprintf ("%s.tar" , version )),
344+ }
317345 }
318346
319347 if len (packagesToKeys ) == 0 {
@@ -322,36 +350,57 @@ func (rs *S3RemoteCache) ExistingPackages(pkgs []*Package) (map[*Package]struct{
322350 log .Debugf ("Checking if %d packages exist in the remote cache using s3" , len (packagesToKeys ))
323351
324352 ch := make (chan * Package , len (packagesToKeys ))
325-
326353 existingPackages := make (map [* Package ]struct {})
354+ var mu sync.Mutex // Mutex for safe concurrent map access
327355 wg := sync.WaitGroup {}
328356
329357 ctx := context .TODO ()
330- for pkg , key := range packagesToKeys {
358+ for pkg , keys := range packagesToKeys {
331359 wg .Add (1 )
332- go func (pkg * Package , key string ) {
360+ go func (pkg * Package , keys keyPair ) {
333361 defer wg .Done ()
334362
335- stat , err := rs .hasObject (ctx , key )
336- if err != nil {
337- log .WithField ("bucket" , rs .BucketName ).WithField ("key" , key ).Debugf ("Failed to check for remote cached object: %s" , err )
363+ // Check for .tar.gz first
364+ if stat , err := rs .hasObject (ctx , keys .gzKey ); err != nil {
365+ log .WithField ("bucket" , rs .BucketName ).WithField ("key" , keys .gzKey ).
366+ Debugf ("Failed to check for remote cached object: %s" , err )
367+ } else if stat {
368+ mu .Lock ()
369+ existingPackages [pkg ] = struct {}{}
370+ mu .Unlock ()
371+ return
338372 }
339- if stat {
340- ch <- pkg
373+
374+ // If .tar.gz doesn't exist, check for .tar
375+ if stat , err := rs .hasObject (ctx , keys .tarKey ); err != nil {
376+ log .WithField ("bucket" , rs .BucketName ).WithField ("key" , keys .tarKey ).
377+ Debugf ("Failed to check for remote cached object: %s" , err )
378+ } else if stat {
379+ mu .Lock ()
380+ existingPackages [pkg ] = struct {}{}
381+ mu .Unlock ()
341382 }
342- }(pkg , key )
383+ }(pkg , keys )
343384 }
344385 wg .Wait ()
345- close (ch )
346386
347- for p := range ch {
348- existingPackages [p ] = struct {}{}
349- }
350- log .WithField ("bucket" , rs .BucketName ).Debugf ("%d/%d packages found in remote cache" , len (existingPackages ), len (packagesToKeys ))
387+ log .WithField ("bucket" , rs .BucketName ).
388+ Debugf ("%d/%d packages found in remote cache" , len (existingPackages ), len (packagesToKeys ))
351389
352390 return existingPackages , nil
353391}
354392
393+ // Helper method to consolidate object existence check logic
394+ func (rs * S3RemoteCache ) checkObjectExists (ctx context.Context , key string ) bool {
395+ stat , err := rs .hasObject (ctx , key )
396+ if err != nil {
397+ log .WithField ("bucket" , rs .BucketName ).WithField ("key" , key ).
398+ Debugf ("Failed to check for remote cached object: %s" , err )
399+ return false
400+ }
401+ return stat
402+ }
403+
355404// Download makes a best-effort attempt at downloading previously cached build artifacts for the given packages
356405// in their current version. A cache miss (i.e. a build artifact not being available) does not constitute an
357406// error. Get should try and download as many artifacts as possible.
0 commit comments