1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83
|
package cache
import (
"archive/tar"
"context"
"encoding/json"
"fmt"
"io"
"path"
"sort"
cdcompression "github.com/containerd/containerd/archive/compression"
"github.com/moby/buildkit/session"
)
const keyFileList = "filelist"
// FileList returns an ordered list of files present in the cache record that were
// changed compared to the parent. The paths of the files are in same format as they
// are in the tar stream (AUFS whiteout format). If the reference does not have a
// a blob associated with it, the list is empty.
func (sr *immutableRef) FileList(ctx context.Context, s session.Group) ([]string, error) {
return gFileList.Do(ctx, fmt.Sprintf("filelist-%s", sr.ID()), func(ctx context.Context) ([]string, error) {
dt, err := sr.GetExternal(keyFileList)
if err == nil && dt != nil {
var files []string
if err := json.Unmarshal(dt, &files); err != nil {
return nil, err
}
return files, nil
}
if sr.getBlob() == "" {
return nil, nil
}
// lazy blobs need to be pulled first
if err := sr.ensureLocalContentBlob(ctx, s); err != nil {
return nil, err
}
desc, err := sr.ociDesc(ctx, sr.descHandlers, false)
if err != nil {
return nil, err
}
ra, err := sr.cm.ContentStore.ReaderAt(ctx, desc)
if err != nil {
return nil, err
}
r, err := cdcompression.DecompressStream(io.NewSectionReader(ra, 0, ra.Size()))
if err != nil {
return nil, err
}
defer r.Close()
var files []string
rdr := tar.NewReader(r)
for {
hdr, err := rdr.Next()
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
name := path.Clean(hdr.Name)
files = append(files, name)
}
sort.Strings(files)
dt, err = json.Marshal(files)
if err != nil {
return nil, err
}
if err := sr.SetExternal(keyFileList, dt); err != nil {
return nil, err
}
return files, nil
})
}
|