diff --git a/cache/remotecache/azblob/exporter.go b/cache/remotecache/azblob/exporter.go index e95c3d35ae0f..60450ce1a06d 100644 --- a/cache/remotecache/azblob/exporter.go +++ b/cache/remotecache/azblob/exporter.go @@ -18,6 +18,7 @@ import ( "github.com/containerd/containerd/v2/pkg/labels" "github.com/moby/buildkit/cache/remotecache" v1 "github.com/moby/buildkit/cache/remotecache/v1" + cacheimporttypes "github.com/moby/buildkit/cache/remotecache/v1/types" "github.com/moby/buildkit/session" "github.com/moby/buildkit/solver" "github.com/moby/buildkit/util/bklog" @@ -110,7 +111,7 @@ func (ce *exporter) Finalize(ctx context.Context) (map[string]string, error) { layerDone(nil) } - la := &v1.LayerAnnotations{ + la := &cacheimporttypes.LayerAnnotations{ DiffID: diffID, Size: dgstPair.Descriptor.Size, MediaType: dgstPair.Descriptor.MediaType, diff --git a/cache/remotecache/azblob/importer.go b/cache/remotecache/azblob/importer.go index a98d4ca136aa..1566ffa965a1 100644 --- a/cache/remotecache/azblob/importer.go +++ b/cache/remotecache/azblob/importer.go @@ -14,6 +14,7 @@ import ( cerrdefs "github.com/containerd/errdefs" "github.com/moby/buildkit/cache/remotecache" v1 "github.com/moby/buildkit/cache/remotecache/v1" + cacheimporttypes "github.com/moby/buildkit/cache/remotecache/v1/types" "github.com/moby/buildkit/session" "github.com/moby/buildkit/solver" "github.com/moby/buildkit/util/bklog" @@ -118,7 +119,7 @@ func (ci *importer) loadManifest(ctx context.Context, name string) (*v1.CacheCha bklog.G(ctx).Debugf("imported config: %s", string(bytes)) - var config v1.CacheConfig + var config cacheimporttypes.CacheConfig if err := json.Unmarshal(bytes, &config); err != nil { return nil, errors.WithStack(err) } @@ -142,7 +143,7 @@ func (ci *importer) loadManifest(ctx context.Context, name string) (*v1.CacheCha return cc, nil } -func (ci *importer) makeDescriptorProviderPair(l v1.CacheLayer) (*v1.DescriptorProviderPair, error) { +func (ci *importer) makeDescriptorProviderPair(l cacheimporttypes.CacheLayer) (*v1.DescriptorProviderPair, error) { if l.Annotations == nil { return nil, errors.Errorf("cache layer with missing annotations") } diff --git a/cache/remotecache/export.go b/cache/remotecache/export.go index 403cf669ed3e..b570b6a4a2ca 100644 --- a/cache/remotecache/export.go +++ b/cache/remotecache/export.go @@ -9,6 +9,7 @@ import ( "github.com/containerd/containerd/v2/core/content" "github.com/containerd/containerd/v2/core/images" v1 "github.com/moby/buildkit/cache/remotecache/v1" + cacheimporttypes "github.com/moby/buildkit/cache/remotecache/v1/types" "github.com/moby/buildkit/session" "github.com/moby/buildkit/solver" "github.com/moby/buildkit/util/bklog" @@ -219,7 +220,7 @@ func (ce *contentCacheExporter) Finalize(ctx context.Context) (map[string]string desc := ocispecs.Descriptor{ Digest: dgst, Size: int64(len(dt)), - MediaType: v1.CacheConfigMediaTypeV0, + MediaType: cacheimporttypes.CacheConfigMediaTypeV0, } configDone := progress.OneOff(ctx, fmt.Sprintf("writing config %s", dgst)) if err := content.WriteBlob(ctx, ce.ingester, dgst.String(), bytes.NewReader(dt), desc); err != nil { diff --git a/cache/remotecache/gha/gha.go b/cache/remotecache/gha/gha.go index 5e2e0191443a..a8c781f73015 100644 --- a/cache/remotecache/gha/gha.go +++ b/cache/remotecache/gha/gha.go @@ -17,6 +17,7 @@ import ( cerrdefs "github.com/containerd/errdefs" "github.com/moby/buildkit/cache/remotecache" v1 "github.com/moby/buildkit/cache/remotecache/v1" + cacheimporttypes "github.com/moby/buildkit/cache/remotecache/v1/types" "github.com/moby/buildkit/session" "github.com/moby/buildkit/solver" "github.com/moby/buildkit/util/bklog" @@ -267,7 +268,7 @@ func (ce *exporter) Finalize(ctx context.Context) (map[string]string, error) { } layerDone(nil) } - la := &v1.LayerAnnotations{ + la := &cacheimporttypes.LayerAnnotations{ DiffID: diffID, Size: dgstPair.Descriptor.Size, MediaType: dgstPair.Descriptor.MediaType, @@ -328,7 +329,7 @@ func NewImporter(c *Config) (remotecache.Importer, error) { return &importer{cache: cache, config: c}, nil } -func (ci *importer) makeDescriptorProviderPair(l v1.CacheLayer) (*v1.DescriptorProviderPair, error) { +func (ci *importer) makeDescriptorProviderPair(l cacheimporttypes.CacheLayer) (*v1.DescriptorProviderPair, error) { if l.Annotations == nil { return nil, errors.Errorf("cache layer with missing annotations") } @@ -376,7 +377,7 @@ func (ci *importer) loadScope(ctx context.Context, scope string) (*v1.CacheChain return nil, err } - var config v1.CacheConfig + var config cacheimporttypes.CacheConfig if err := json.Unmarshal(buf.Bytes(), &config); err != nil { return nil, errors.WithStack(err) } diff --git a/cache/remotecache/import.go b/cache/remotecache/import.go index ff2aa0204856..5f635a0d02b3 100644 --- a/cache/remotecache/import.go +++ b/cache/remotecache/import.go @@ -12,6 +12,7 @@ import ( "github.com/containerd/containerd/v2/core/images" "github.com/containerd/containerd/v2/pkg/labels" v1 "github.com/moby/buildkit/cache/remotecache/v1" + cacheimporttypes "github.com/moby/buildkit/cache/remotecache/v1/types" "github.com/moby/buildkit/session" "github.com/moby/buildkit/solver" "github.com/moby/buildkit/util/bklog" @@ -69,7 +70,7 @@ func (ci *contentCacheImporter) Resolve(ctx context.Context, desc ocispecs.Descr } for _, m := range mfst.Manifests { - if m.MediaType == v1.CacheConfigMediaTypeV0 { + if m.MediaType == cacheimporttypes.CacheConfigMediaTypeV0 { configDesc = m continue } @@ -84,7 +85,7 @@ func (ci *contentCacheImporter) Resolve(ctx context.Context, desc ocispecs.Descr return nil, err } - if mfst.Config.MediaType == v1.CacheConfigMediaTypeV0 { + if mfst.Config.MediaType == cacheimporttypes.CacheConfigMediaTypeV0 { configDesc = mfst.Config } for _, m := range mfst.Layers { @@ -201,7 +202,7 @@ func (ci *contentCacheImporter) importInlineCache(ctx context.Context, dt []byte return nil } - var config v1.CacheConfig + var config cacheimporttypes.CacheConfig if err := json.Unmarshal(img.Cache, &config.Records); err != nil { return errors.WithStack(err) } @@ -227,7 +228,7 @@ func (ci *contentCacheImporter) importInlineCache(ctx context.Context, dt []byte Descriptor: m, Provider: ci.provider, } - config.Layers = append(config.Layers, v1.CacheLayer{ + config.Layers = append(config.Layers, cacheimporttypes.CacheLayer{ Blob: m.Digest, ParentIndex: i - 1, }) diff --git a/cache/remotecache/inline/inline.go b/cache/remotecache/inline/inline.go index cd2299bffd60..1cb6480ab3f9 100644 --- a/cache/remotecache/inline/inline.go +++ b/cache/remotecache/inline/inline.go @@ -8,6 +8,7 @@ import ( "github.com/containerd/containerd/v2/pkg/labels" "github.com/moby/buildkit/cache/remotecache" v1 "github.com/moby/buildkit/cache/remotecache/v1" + cacheimporttypes "github.com/moby/buildkit/cache/remotecache/v1/types" "github.com/moby/buildkit/session" "github.com/moby/buildkit/solver" "github.com/moby/buildkit/util/bklog" @@ -122,7 +123,7 @@ func (ce *exporter) ExportForLayers(ctx context.Context, layers []digest.Digest) } else { // The layers of the result are not in the same order as the image, so we // have to use ChainedResult to specify each layer of the result individually. - chainedResult := v1.ChainedResult{} + chainedResult := cacheimporttypes.ChainedResult{} for _, resultBlob := range resultBlobs { idx, ok := blobIndexes[resultBlob] if !ok { @@ -130,13 +131,13 @@ func (ce *exporter) ExportForLayers(ctx context.Context, layers []digest.Digest) } chainedResult.LayerIndexes = append(chainedResult.LayerIndexes, idx) } - r.Results[j] = v1.CacheResult{} + r.Results[j] = cacheimporttypes.CacheResult{} r.ChainedResults = append(r.ChainedResults, chainedResult) } // remove any CacheResults that had to be converted to the ChainedResult format. - var filteredResults []v1.CacheResult + var filteredResults []cacheimporttypes.CacheResult for _, rr := range r.Results { - if rr != (v1.CacheResult{}) { + if rr != (cacheimporttypes.CacheResult{}) { filteredResults = append(filteredResults, rr) } } @@ -154,7 +155,7 @@ func (ce *exporter) ExportForLayers(ctx context.Context, layers []digest.Digest) return dt, nil } -func layerToBlobs(idx int, layers []v1.CacheLayer) []digest.Digest { +func layerToBlobs(idx int, layers []cacheimporttypes.CacheLayer) []digest.Digest { var ds []digest.Digest for idx != -1 { layer := layers[idx] diff --git a/cache/remotecache/s3/s3.go b/cache/remotecache/s3/s3.go index d1026bce9fef..201c51cf7b34 100644 --- a/cache/remotecache/s3/s3.go +++ b/cache/remotecache/s3/s3.go @@ -21,6 +21,7 @@ import ( "github.com/containerd/containerd/v2/pkg/labels" "github.com/moby/buildkit/cache/remotecache" v1 "github.com/moby/buildkit/cache/remotecache/v1" + cacheimporttypes "github.com/moby/buildkit/cache/remotecache/v1/types" "github.com/moby/buildkit/session" "github.com/moby/buildkit/solver" "github.com/moby/buildkit/util/compression" @@ -261,7 +262,7 @@ func (e *exporter) Finalize(ctx context.Context) (map[string]string, error) { layerDone(nil) } - la := &v1.LayerAnnotations{ + la := &cacheimporttypes.LayerAnnotations{ DiffID: diffID, Size: dgstPair.Descriptor.Size, MediaType: dgstPair.Descriptor.MediaType, @@ -316,7 +317,7 @@ type importer struct { config Config } -func (i *importer) makeDescriptorProviderPair(l v1.CacheLayer) (*v1.DescriptorProviderPair, error) { +func (i *importer) makeDescriptorProviderPair(l cacheimporttypes.CacheLayer) (*v1.DescriptorProviderPair, error) { if l.Annotations == nil { return nil, errors.Errorf("cache layer with missing annotations") } @@ -344,7 +345,7 @@ func (i *importer) makeDescriptorProviderPair(l v1.CacheLayer) (*v1.DescriptorPr } func (i *importer) load(ctx context.Context) (*v1.CacheChains, error) { - var config v1.CacheConfig + var config cacheimporttypes.CacheConfig found, err := i.s3Client.getManifest(ctx, i.s3Client.manifestKey(i.config.Names[0]), &config) if err != nil { return nil, err @@ -427,7 +428,7 @@ func newS3Client(ctx context.Context, config Config) (*s3Client, error) { }, nil } -func (s3Client *s3Client) getManifest(ctx context.Context, key string, config *v1.CacheConfig) (bool, error) { +func (s3Client *s3Client) getManifest(ctx context.Context, key string, config *cacheimporttypes.CacheConfig) (bool, error) { input := &s3.GetObjectInput{ Bucket: &s3Client.bucket, Key: &key, diff --git a/cache/remotecache/v1/chains.go b/cache/remotecache/v1/chains.go index 1a89400b5cc3..8fd91964a062 100644 --- a/cache/remotecache/v1/chains.go +++ b/cache/remotecache/v1/chains.go @@ -11,6 +11,7 @@ import ( "github.com/cespare/xxhash/v2" "github.com/containerd/containerd/v2/core/content" cerrdefs "github.com/containerd/errdefs" + cacheimporttypes "github.com/moby/buildkit/cache/remotecache/v1/types" "github.com/moby/buildkit/session" "github.com/moby/buildkit/solver" digest "github.com/opencontainers/go-digest" @@ -208,7 +209,7 @@ func IntersectAll[T comparable]( // Marshal aims to validate, normalize and sort the output to ensure a // consistent digest (since cache configs are typically uploaded and stored in // content-addressable OCI registries). -func (c *CacheChains) Marshal(ctx context.Context) (*CacheConfig, DescriptorProvider, error) { +func (c *CacheChains) Marshal(ctx context.Context) (*cacheimporttypes.CacheConfig, DescriptorProvider, error) { st := &marshalState{ chainsByID: map[string]int{}, descriptors: DescriptorProvider{}, @@ -221,7 +222,7 @@ func (c *CacheChains) Marshal(ctx context.Context) (*CacheConfig, DescriptorProv } } - cc := CacheConfig{ + cc := cacheimporttypes.CacheConfig{ Layers: st.layers, Records: st.records, } diff --git a/cache/remotecache/v1/parse.go b/cache/remotecache/v1/parse.go index ff30d9b624a6..dff6cdd71622 100644 --- a/cache/remotecache/v1/parse.go +++ b/cache/remotecache/v1/parse.go @@ -3,6 +3,7 @@ package cacheimport import ( "encoding/json" + cacheimporttypes "github.com/moby/buildkit/cache/remotecache/v1/types" "github.com/moby/buildkit/solver" "github.com/moby/buildkit/util/contentutil" ocispecs "github.com/opencontainers/image-spec/specs-go/v1" @@ -10,7 +11,7 @@ import ( ) func Parse(configJSON []byte, provider DescriptorProvider, t solver.CacheExporterTarget) error { - var config CacheConfig + var config cacheimporttypes.CacheConfig if err := json.Unmarshal(configJSON, &config); err != nil { return errors.WithStack(err) } @@ -18,7 +19,7 @@ func Parse(configJSON []byte, provider DescriptorProvider, t solver.CacheExporte return ParseConfig(config, provider, t) } -func ParseConfig(config CacheConfig, provider DescriptorProvider, t solver.CacheExporterTarget) error { +func ParseConfig(config cacheimporttypes.CacheConfig, provider DescriptorProvider, t solver.CacheExporterTarget) error { cache := map[int]solver.CacheExporterRecord{} for i := range config.Records { @@ -29,7 +30,7 @@ func ParseConfig(config CacheConfig, provider DescriptorProvider, t solver.Cache return nil } -func parseRecord(cc CacheConfig, idx int, provider DescriptorProvider, t solver.CacheExporterTarget, cache map[int]solver.CacheExporterRecord) (solver.CacheExporterRecord, error) { +func parseRecord(cc cacheimporttypes.CacheConfig, idx int, provider DescriptorProvider, t solver.CacheExporterTarget, cache map[int]solver.CacheExporterRecord) (solver.CacheExporterRecord, error) { if r, ok := cache[idx]; ok { if r == nil { return nil, errors.Errorf("invalid looping record") @@ -112,7 +113,7 @@ func parseRecord(cc CacheConfig, idx int, provider DescriptorProvider, t solver. return r, nil } -func getRemoteChain(layers []CacheLayer, idx int, provider DescriptorProvider, visited map[int]struct{}) (*solver.Remote, error) { +func getRemoteChain(layers []cacheimporttypes.CacheLayer, idx int, provider DescriptorProvider, visited map[int]struct{}) (*solver.Remote, error) { if _, ok := visited[idx]; ok { return nil, errors.Errorf("invalid looping layer") } diff --git a/cache/remotecache/v1/doc.go b/cache/remotecache/v1/types/doc.go similarity index 98% rename from cache/remotecache/v1/doc.go rename to cache/remotecache/v1/types/doc.go index 1e3cf32e0086..1c92e04f0a98 100644 --- a/cache/remotecache/v1/doc.go +++ b/cache/remotecache/v1/types/doc.go @@ -1,4 +1,4 @@ -package cacheimport +package cacheimporttypes // Distributable build cache // diff --git a/cache/remotecache/v1/spec.go b/cache/remotecache/v1/types/spec.go similarity index 98% rename from cache/remotecache/v1/spec.go rename to cache/remotecache/v1/types/spec.go index e3e94d790f53..179d48a175d9 100644 --- a/cache/remotecache/v1/spec.go +++ b/cache/remotecache/v1/types/spec.go @@ -1,4 +1,4 @@ -package cacheimport +package cacheimporttypes import ( "time" diff --git a/cache/remotecache/v1/utils.go b/cache/remotecache/v1/utils.go index 59eb9e556a41..0c481fbd6239 100644 --- a/cache/remotecache/v1/utils.go +++ b/cache/remotecache/v1/utils.go @@ -8,17 +8,18 @@ import ( "sort" cerrdefs "github.com/containerd/errdefs" + cacheimporttypes "github.com/moby/buildkit/cache/remotecache/v1/types" "github.com/moby/buildkit/solver" digest "github.com/opencontainers/go-digest" "github.com/pkg/errors" ) // sortConfig sorts the config structure to make sure it is deterministic -func sortConfig(cc *CacheConfig) { +func sortConfig(cc *cacheimporttypes.CacheConfig) { type indexedLayer struct { oldIndex int newIndex int - l CacheLayer + l cacheimporttypes.CacheLayer } unsortedLayers := make([]*indexedLayer, len(cc.Layers)) @@ -36,7 +37,7 @@ func sortConfig(cc *CacheConfig) { l.newIndex = i } - layers := make([]CacheLayer, len(sortedLayers)) + layers := make([]cacheimporttypes.CacheLayer, len(sortedLayers)) for i, l := range sortedLayers { if pID := l.l.ParentIndex; pID != -1 { l.l.ParentIndex = unsortedLayers[pID].newIndex @@ -47,7 +48,7 @@ func sortConfig(cc *CacheConfig) { type indexedRecord struct { oldIndex int newIndex int - r CacheRecord + r cacheimporttypes.CacheRecord } unsortedRecords := make([]*indexedRecord, len(cc.Records)) @@ -88,7 +89,7 @@ func sortConfig(cc *CacheConfig) { l.newIndex = i } - records := make([]CacheRecord, len(sortedRecords)) + records := make([]cacheimporttypes.CacheRecord, len(sortedRecords)) for i, r := range sortedRecords { for j := range r.r.Results { r.r.Results[j].LayerIndex = unsortedLayers[r.r.Results[j].LayerIndex].newIndex @@ -97,7 +98,7 @@ func sortConfig(cc *CacheConfig) { for k := range inputs { r.r.Inputs[j][k].LinkIndex = unsortedRecords[r.r.Inputs[j][k].LinkIndex].newIndex } - slices.SortFunc(inputs, func(a, b CacheInput) int { + slices.SortFunc(inputs, func(a, b cacheimporttypes.CacheInput) int { return cmp.Compare(a.LinkIndex, b.LinkIndex) }) } @@ -119,11 +120,11 @@ type nlink struct { } type marshalState struct { - layers []CacheLayer + layers []cacheimporttypes.CacheLayer chainsByID map[string]int descriptors DescriptorProvider - records []CacheRecord + records []cacheimporttypes.CacheRecord recordsByItem map[*item]int } @@ -164,7 +165,7 @@ func marshalRemote(ctx context.Context, r *solver.Remote, state *marshalState) s } state.chainsByID[id] = len(state.layers) - l := CacheLayer{ + l := cacheimporttypes.CacheLayer{ Blob: desc.Digest, ParentIndex: -1, } @@ -181,9 +182,9 @@ func marshalItem(ctx context.Context, it *item, state *marshalState) error { } state.recordsByItem[it] = -1 - rec := CacheRecord{ + rec := cacheimporttypes.CacheRecord{ Digest: it.dgst, - Inputs: make([][]CacheInput, len(it.parents)), + Inputs: make([][]cacheimporttypes.CacheInput, len(it.parents)), } for i, m := range it.parents { @@ -198,7 +199,7 @@ func marshalItem(ctx context.Context, it *item, state *marshalState) error { if idx == -1 { continue } - rec.Inputs[i] = append(rec.Inputs[i], CacheInput{ + rec.Inputs[i] = append(rec.Inputs[i], cacheimporttypes.CacheInput{ Selector: l.selector, LinkIndex: idx, }) @@ -212,7 +213,7 @@ func marshalItem(ctx context.Context, it *item, state *marshalState) error { if !ok { return errors.Errorf("parent chainid not found") } - rec.Results = append(rec.Results, CacheResult{LayerIndex: idx, CreatedAt: res.CreatedAt}) + rec.Results = append(rec.Results, cacheimporttypes.CacheResult{LayerIndex: idx, CreatedAt: res.CreatedAt}) } } diff --git a/client/client_test.go b/client/client_test.go index cf132ea0f678..51c1b7f06734 100644 --- a/client/client_test.go +++ b/client/client_test.go @@ -42,6 +42,7 @@ import ( "github.com/distribution/reference" intoto "github.com/in-toto/in-toto-golang/in_toto" controlapi "github.com/moby/buildkit/api/services/control" + cacheimporttypes "github.com/moby/buildkit/cache/remotecache/v1/types" "github.com/moby/buildkit/client/llb" "github.com/moby/buildkit/client/llb/sourceresolver" "github.com/moby/buildkit/exporter/containerimage/exptypes" @@ -192,6 +193,7 @@ var allTests = []func(t *testing.T, sb integration.Sandbox){ testBuildExportWithForeignLayer, testZstdLocalCacheExport, testCacheExportIgnoreError, + testCacheExportCacheDeletedContent, testZstdRegistryCacheImportExport, testZstdLocalCacheImportExport, testUncompressedLocalCacheImportExport, @@ -347,6 +349,148 @@ func testCacheExportCacheKeyLoop(t *testing.T, sb integration.Sandbox) { } } +func testCacheExportCacheDeletedContent(t *testing.T, sb integration.Sandbox) { + integration.SkipOnPlatform(t, "windows") + workers.CheckFeatureCompat(t, sb, workers.FeatureCacheExport, workers.FeatureCacheBackendLocal) + c, err := New(sb.Context(), sb.Address()) + require.NoError(t, err) + defer c.Close() + + tmpdir := integration.Tmpdir(t) + + err = os.WriteFile(filepath.Join(tmpdir.Name, "foo"), []byte("foodata"), 0600) + require.NoError(t, err) + + base := llb.Image("alpine:latest").Run(llb.Shlex(`sh -c "echo abc-def > /foo && echo abc > /detection-file"`)).Root() + st := llb.Scratch().File(llb.Copy(base, "/foo", "/out")) + + def, err := st.Marshal(sb.Context()) + require.NoError(t, err) + + _, err = c.Solve(sb.Context(), def, SolveOpt{ + CacheExports: []CacheOptionsEntry{ + { + Type: "local", + Attrs: map[string]string{ + "dest": filepath.Join(tmpdir.Name, "cache"), + "mode": "max", + }, + }, + }, + }, nil) + require.NoError(t, err) + + dt, err := os.ReadFile(filepath.Join(tmpdir.Name, "cache", "index.json")) + require.NoError(t, err) + + var index ocispecs.Index + err = json.Unmarshal(dt, &index) + require.NoError(t, err) + + require.Len(t, index.Manifests, 1) + + dt, err = os.ReadFile(filepath.Join(tmpdir.Name, "cache/blobs/sha256", index.Manifests[0].Digest.Hex())) + require.NoError(t, err) + + var mfst ocispecs.Manifest + err = json.Unmarshal(dt, &mfst) + require.NoError(t, err) + + require.Len(t, mfst.Layers, 3) + require.Equal(t, "application/vnd.buildkit.cacheconfig.v0", mfst.Config.MediaType) + + dt, err = os.ReadFile(filepath.Join(tmpdir.Name, "cache/blobs/sha256", mfst.Config.Digest.Hex())) + require.NoError(t, err) + + var cc cacheimporttypes.CacheConfig + err = json.Unmarshal(dt, &cc) + require.NoError(t, err) + + require.Equal(t, 3, len(cc.Layers)) + require.Equal(t, 5, len(cc.Records)) + + var runLayer *int + for i, l := range cc.Layers { + if l.ParentIndex != -1 { + if runLayer != nil { + t.Fatal("multiple RUN layers") + } + runLayer = &i + } + } + require.NotNil(t, runLayer) + + dt, err = os.ReadFile(filepath.Join(tmpdir.Name, "cache/blobs/sha256", cc.Layers[*runLayer].Blob.Hex())) + require.NoError(t, err) + + m, err := testutil.ReadTarToMap(dt, true) + require.NoError(t, err) + + require.Equal(t, "abc\n", string(m["detection-file"].Data)) + + // delete the blob for the run layer + err = os.Remove(filepath.Join(tmpdir.Name, "cache/blobs/sha256", cc.Layers[*runLayer].Blob.Hex())) + require.NoError(t, err) + + // prune all buildkit state + ensurePruneAll(t, c, sb) + + _, err = c.Solve(sb.Context(), def, SolveOpt{ + CacheImports: []CacheOptionsEntry{ + { + Type: "local", + Attrs: map[string]string{ + "src": filepath.Join(tmpdir.Name, "cache"), + }, + }, + }, + CacheExports: []CacheOptionsEntry{ + { + Type: "local", + Attrs: map[string]string{ + "dest": filepath.Join(tmpdir.Name, "cache2"), + "mode": "max", + }, + }, + }, + }, nil) + require.NoError(t, err) + + dt, err = os.ReadFile(filepath.Join(tmpdir.Name, "cache2", "index.json")) + require.NoError(t, err) + + var index2 ocispecs.Index + err = json.Unmarshal(dt, &index2) + require.NoError(t, err) + + require.Len(t, index2.Manifests, 1) + + dt, err = os.ReadFile(filepath.Join(tmpdir.Name, "cache2/blobs/sha256", index2.Manifests[0].Digest.Hex())) + require.NoError(t, err) + + var mfst2 ocispecs.Manifest + err = json.Unmarshal(dt, &mfst2) + require.NoError(t, err) + + require.Len(t, mfst2.Layers, 1) + require.Equal(t, "application/vnd.buildkit.cacheconfig.v0", mfst2.Config.MediaType) + + dt, err = os.ReadFile(filepath.Join(tmpdir.Name, "cache2/blobs/sha256", mfst2.Config.Digest.Hex())) + require.NoError(t, err) + + var cc2 cacheimporttypes.CacheConfig + err = json.Unmarshal(dt, &cc2) + require.NoError(t, err) + + require.Equal(t, 1, len(cc2.Layers)) + require.Equal(t, 5, len(cc2.Records)) + + for i, r := range cc.Records { + require.Equal(t, cc2.Records[i].Digest, r.Digest) + require.Equal(t, cc2.Records[i].Inputs, r.Inputs) + } +} + func testBridgeNetworking(t *testing.T, sb integration.Sandbox) { if os.Getenv("BUILDKIT_RUN_NETWORK_INTEGRATION_TESTS") == "" { t.SkipNow() diff --git a/frontend/dockerfile/dockerfile_test.go b/frontend/dockerfile/dockerfile_test.go index fea4a9f1c52f..7a78394d7fa2 100644 --- a/frontend/dockerfile/dockerfile_test.go +++ b/frontend/dockerfile/dockerfile_test.go @@ -22,7 +22,7 @@ import ( "testing" "time" - v1 "github.com/moby/buildkit/cache/remotecache/v1" + cacheimporttypes "github.com/moby/buildkit/cache/remotecache/v1/types" "github.com/tonistiigi/fsutil" "golang.org/x/sync/errgroup" "google.golang.org/grpc/status" @@ -6052,7 +6052,7 @@ COPY --from=base unique / require.NoError(t, err) require.Equal(t, ocispecs.MediaTypeImageManifest, img.Manifest.MediaType) - require.Equal(t, v1.CacheConfigMediaTypeV0, img.Manifest.Config.MediaType) + require.Equal(t, cacheimporttypes.CacheConfigMediaTypeV0, img.Manifest.Config.MediaType) dt, err := os.ReadFile(filepath.Join(destDir, "const")) require.NoError(t, err)