add //go:build directives to prevent downgrading to go1.16 language
This is a follow-up to 0e73168b7e6d1d029d76d05b843b1aaec46739a8
This repository is not yet a module (i.e., does not have a `go.mod`). This
is not problematic when building the code in GOPATH or "vendor" mode, but
when using the code as a module-dependency (in module-mode), different semantics
are applied since Go1.21, which switches Go _language versions_ on a per-module,
per-package, or even per-file base.
A condensed summary of that logic [is as follows][1]:
- For modules that have a go.mod containing a go version directive; that
version is considered a minimum _required_ version (starting with the
go1.19.13 and go1.20.8 patch releases: before those, it was only a
recommendation).
- For dependencies that don't have a go.mod (not a module), go language
version go1.16 is assumed.
- Likewise, for modules that have a go.mod, but the file does not have a
go version directive, go language version go1.16 is assumed.
- If a go.work file is present, but does not have a go version directive,
language version go1.17 is assumed.
When switching language versions, Go _downgrades_ the language version,
which means that language features (such as generics, and `any`) are not
available, and compilation fails. For example:
# github.com/docker/cli/cli/context/store
/go/pkg/mod/github.com/docker/cli@v25.0.0-beta.2+incompatible/cli/context/store/storeconfig.go:6:24: predeclared any requires go1.18 or later (-lang was set to go1.16; check go.mod)
/go/pkg/mod/github.com/docker/cli@v25.0.0-beta.2+incompatible/cli/context/store/store.go:74:12: predeclared any requires go1.18 or later (-lang was set to go1.16; check go.mod)
Note that these fallbacks are per-module, per-package, and can even be
per-file, so _(indirect) dependencies_ can still use modern language
features, as long as their respective go.mod has a version specified.
Unfortunately, these failures do not occur when building locally (using
vendor / GOPATH mode), but will affect consumers of the module.
Obviously, this situation is not ideal, and the ultimate solution is to
move to go modules (add a go.mod), but this comes with a non-insignificant
risk in other areas (due to our complex dependency tree).
We can revert to using go1.16 language features only, but this may be
limiting, and may still be problematic when (e.g.) matching signatures
of dependencies.
There is an escape hatch: adding a `//go:build` directive to files that
make use of go language features. From the [go toolchain docs][2]:
> The go line for each module sets the language version the compiler enforces
> when compiling packages in that module. The language version can be changed
> on a per-file basis by using a build constraint.
>
> For example, a module containing code that uses the Go 1.21 language version
> should have a `go.mod` file with a go line such as `go 1.21` or `go 1.21.3`.
> If a specific source file should be compiled only when using a newer Go
> toolchain, adding `//go:build go1.22` to that source file both ensures that
> only Go 1.22 and newer toolchains will compile the file and also changes
> the language version in that file to Go 1.22.
This patch adds `//go:build` directives to those files using recent additions
to the language. It's currently using go1.19 as version to match the version
in our "vendor.mod", but we can consider being more permissive ("any" requires
go1.18 or up), or more "optimistic" (force go1.21, which is the version we
currently use to build).
For completeness sake, note that any file _without_ a `//go:build` directive
will continue to use go1.16 language version when used as a module.
[1]: https://github.com/golang/go/blob/58c28ba286dd0e98fe4cca80f5d64bbcb824a685/src/cmd/go/internal/gover/version.go#L9-L56
[2]; https://go.dev/doc/toolchain#:~:text=The%20go%20line%20for,file%20to%20Go%201.22
Signed-off-by: Sebastiaan van Stijn <github@gone.nl>
2023-12-14 07:51:57 -05:00
|
|
|
// FIXME(thaJeztah): remove once we are a module; the go:build directive prevents go from downgrading language version to go1.16:
|
|
|
|
//go:build go1.19
|
|
|
|
|
2018-12-17 05:27:07 -05:00
|
|
|
package store
|
|
|
|
|
|
|
|
import (
|
|
|
|
"archive/tar"
|
2019-05-14 16:14:39 -04:00
|
|
|
"archive/zip"
|
|
|
|
"bufio"
|
|
|
|
"bytes"
|
2018-11-09 09:10:41 -05:00
|
|
|
_ "crypto/sha256" // ensure ids can be computed
|
2018-12-17 05:27:07 -05:00
|
|
|
"encoding/json"
|
|
|
|
"io"
|
2019-05-14 16:14:39 -04:00
|
|
|
"net/http"
|
2018-12-17 05:27:07 -05:00
|
|
|
"path"
|
|
|
|
"path/filepath"
|
2020-09-24 10:24:24 -04:00
|
|
|
"regexp"
|
2018-12-17 05:27:07 -05:00
|
|
|
"strings"
|
2018-11-09 09:10:41 -05:00
|
|
|
|
2019-03-06 09:01:12 -05:00
|
|
|
"github.com/docker/docker/errdefs"
|
2022-03-04 08:43:34 -05:00
|
|
|
"github.com/opencontainers/go-digest"
|
2020-09-16 10:35:04 -04:00
|
|
|
"github.com/pkg/errors"
|
2018-12-17 05:27:07 -05:00
|
|
|
)
|
|
|
|
|
2020-09-24 10:24:24 -04:00
|
|
|
const restrictedNamePattern = "^[a-zA-Z0-9][a-zA-Z0-9_.+-]+$"
|
|
|
|
|
|
|
|
var restrictedNameRegEx = regexp.MustCompile(restrictedNamePattern)
|
|
|
|
|
2018-12-17 05:27:07 -05:00
|
|
|
// Store provides a context store for easily remembering endpoints configuration
|
|
|
|
type Store interface {
|
2019-04-15 06:03:03 -04:00
|
|
|
Reader
|
|
|
|
Lister
|
|
|
|
Writer
|
2019-04-18 09:12:30 -04:00
|
|
|
StorageInfoProvider
|
2019-04-15 06:03:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Reader provides read-only (without list) access to context data
|
|
|
|
type Reader interface {
|
2019-04-18 09:12:30 -04:00
|
|
|
GetMetadata(name string) (Metadata, error)
|
|
|
|
ListTLSFiles(name string) (map[string]EndpointFiles, error)
|
|
|
|
GetTLSData(contextName, endpointName, fileName string) ([]byte, error)
|
2019-04-15 06:03:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Lister provides listing of contexts
|
|
|
|
type Lister interface {
|
2019-04-18 09:12:30 -04:00
|
|
|
List() ([]Metadata, error)
|
2019-04-15 06:03:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// ReaderLister combines Reader and Lister interfaces
|
|
|
|
type ReaderLister interface {
|
|
|
|
Reader
|
|
|
|
Lister
|
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
// StorageInfoProvider provides more information about storage details of contexts
|
|
|
|
type StorageInfoProvider interface {
|
|
|
|
GetStorageInfo(contextName string) StorageInfo
|
2019-04-15 06:03:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Writer provides write access to context data
|
|
|
|
type Writer interface {
|
2019-04-18 09:12:30 -04:00
|
|
|
CreateOrUpdate(meta Metadata) error
|
|
|
|
Remove(name string) error
|
|
|
|
ResetTLSMaterial(name string, data *ContextTLSData) error
|
|
|
|
ResetEndpointTLSMaterial(contextName string, endpointName string, data *EndpointTLSData) error
|
2019-04-15 06:03:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// ReaderWriter combines Reader and Writer interfaces
|
|
|
|
type ReaderWriter interface {
|
|
|
|
Reader
|
|
|
|
Writer
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
// Metadata contains metadata about a context and its endpoints
|
|
|
|
type Metadata struct {
|
2023-11-20 12:04:36 -05:00
|
|
|
Name string `json:",omitempty"`
|
|
|
|
Metadata any `json:",omitempty"`
|
|
|
|
Endpoints map[string]any `json:",omitempty"`
|
2018-11-09 09:10:41 -05:00
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
// StorageInfo contains data about where a given context is stored
|
|
|
|
type StorageInfo struct {
|
2018-11-09 09:10:41 -05:00
|
|
|
MetadataPath string
|
|
|
|
TLSPath string
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// EndpointTLSData represents tls data for a given endpoint
|
|
|
|
type EndpointTLSData struct {
|
|
|
|
Files map[string][]byte
|
|
|
|
}
|
|
|
|
|
|
|
|
// ContextTLSData represents tls data for a whole context
|
|
|
|
type ContextTLSData struct {
|
|
|
|
Endpoints map[string]EndpointTLSData
|
|
|
|
}
|
|
|
|
|
|
|
|
// New creates a store from a given directory.
|
|
|
|
// If the directory does not exist or is empty, initialize it
|
2022-09-28 17:30:04 -04:00
|
|
|
func New(dir string, cfg Config) *ContextStore {
|
2018-12-17 05:27:07 -05:00
|
|
|
metaRoot := filepath.Join(dir, metadataDir)
|
|
|
|
tlsRoot := filepath.Join(dir, tlsDir)
|
|
|
|
|
2022-09-28 17:30:04 -04:00
|
|
|
return &ContextStore{
|
2018-12-17 05:27:07 -05:00
|
|
|
meta: &metadataStore{
|
|
|
|
root: metaRoot,
|
|
|
|
config: cfg,
|
|
|
|
},
|
|
|
|
tls: &tlsStore{
|
|
|
|
root: tlsRoot,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-28 17:30:04 -04:00
|
|
|
// ContextStore implements Store.
|
|
|
|
type ContextStore struct {
|
2018-12-17 05:27:07 -05:00
|
|
|
meta *metadataStore
|
|
|
|
tls *tlsStore
|
|
|
|
}
|
|
|
|
|
2022-09-28 17:30:04 -04:00
|
|
|
// List return all contexts.
|
|
|
|
func (s *ContextStore) List() ([]Metadata, error) {
|
2018-12-17 05:27:07 -05:00
|
|
|
return s.meta.list()
|
|
|
|
}
|
|
|
|
|
2022-03-30 09:27:25 -04:00
|
|
|
// Names return Metadata names for a Lister
|
|
|
|
func Names(s Lister) ([]string, error) {
|
|
|
|
list, err := s.List()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2022-09-03 14:07:29 -04:00
|
|
|
names := make([]string, 0, len(list))
|
2022-03-30 09:27:25 -04:00
|
|
|
for _, item := range list {
|
|
|
|
names = append(names, item.Name)
|
|
|
|
}
|
|
|
|
return names, nil
|
|
|
|
}
|
|
|
|
|
2022-09-28 17:30:04 -04:00
|
|
|
// CreateOrUpdate creates or updates metadata for the context.
|
|
|
|
func (s *ContextStore) CreateOrUpdate(meta Metadata) error {
|
2018-11-09 09:10:41 -05:00
|
|
|
return s.meta.createOrUpdate(meta)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2022-09-28 17:30:04 -04:00
|
|
|
// Remove deletes the context with the given name, if found.
|
|
|
|
func (s *ContextStore) Remove(name string) error {
|
2022-09-28 10:33:18 -04:00
|
|
|
if err := s.meta.remove(name); err != nil {
|
2022-09-28 16:18:51 -04:00
|
|
|
return errors.Wrapf(err, "failed to remove context %s", name)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
2022-09-29 08:55:29 -04:00
|
|
|
if err := s.tls.remove(name); err != nil {
|
2022-09-28 16:18:51 -04:00
|
|
|
return errors.Wrapf(err, "failed to remove context %s", name)
|
|
|
|
}
|
|
|
|
return nil
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2022-09-28 17:30:04 -04:00
|
|
|
// GetMetadata returns the metadata for the context with the given name.
|
|
|
|
// It returns an errdefs.ErrNotFound if the context was not found.
|
|
|
|
func (s *ContextStore) GetMetadata(name string) (Metadata, error) {
|
2022-09-28 16:18:51 -04:00
|
|
|
return s.meta.get(name)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2022-09-28 17:30:04 -04:00
|
|
|
// ResetTLSMaterial removes TLS data for all endpoints in the context and replaces
|
|
|
|
// it with the new data.
|
|
|
|
func (s *ContextStore) ResetTLSMaterial(name string, data *ContextTLSData) error {
|
2022-09-29 08:55:29 -04:00
|
|
|
if err := s.tls.remove(name); err != nil {
|
2022-09-28 16:18:51 -04:00
|
|
|
return err
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
if data == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
for ep, files := range data.Endpoints {
|
|
|
|
for fileName, data := range files.Files {
|
2022-09-28 11:38:12 -04:00
|
|
|
if err := s.tls.createOrUpdate(name, ep, fileName, data); err != nil {
|
2022-09-28 16:18:51 -04:00
|
|
|
return err
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-09-28 17:30:04 -04:00
|
|
|
// ResetEndpointTLSMaterial removes TLS data for the given context and endpoint,
|
|
|
|
// and replaces it with the new data.
|
|
|
|
func (s *ContextStore) ResetEndpointTLSMaterial(contextName string, endpointName string, data *EndpointTLSData) error {
|
2022-09-29 08:55:29 -04:00
|
|
|
if err := s.tls.removeEndpoint(contextName, endpointName); err != nil {
|
2022-09-28 16:18:51 -04:00
|
|
|
return err
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
if data == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
for fileName, data := range data.Files {
|
2022-09-28 11:38:12 -04:00
|
|
|
if err := s.tls.createOrUpdate(contextName, endpointName, fileName, data); err != nil {
|
2022-09-28 16:18:51 -04:00
|
|
|
return err
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-09-28 17:30:04 -04:00
|
|
|
// ListTLSFiles returns the list of TLS files present for each endpoint in the
|
|
|
|
// context.
|
|
|
|
func (s *ContextStore) ListTLSFiles(name string) (map[string]EndpointFiles, error) {
|
2022-09-28 16:18:51 -04:00
|
|
|
return s.tls.listContextData(name)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2022-09-28 17:30:04 -04:00
|
|
|
// GetTLSData reads, and returns the content of the given fileName for an endpoint.
|
|
|
|
// It returns an errdefs.ErrNotFound if the file was not found.
|
|
|
|
func (s *ContextStore) GetTLSData(contextName, endpointName, fileName string) ([]byte, error) {
|
2022-09-28 16:18:51 -04:00
|
|
|
return s.tls.getData(contextName, endpointName, fileName)
|
2018-11-09 09:10:41 -05:00
|
|
|
}
|
|
|
|
|
2022-09-28 17:30:04 -04:00
|
|
|
// GetStorageInfo returns the paths where the Metadata and TLS data are stored
|
|
|
|
// for the context.
|
|
|
|
func (s *ContextStore) GetStorageInfo(contextName string) StorageInfo {
|
2019-04-18 09:12:30 -04:00
|
|
|
return StorageInfo{
|
2022-09-28 12:08:22 -04:00
|
|
|
MetadataPath: s.meta.contextDir(contextdirOf(contextName)),
|
|
|
|
TLSPath: s.tls.contextDir(contextName),
|
2018-11-09 09:10:41 -05:00
|
|
|
}
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2020-09-24 10:24:24 -04:00
|
|
|
// ValidateContextName checks a context name is valid.
|
|
|
|
func ValidateContextName(name string) error {
|
|
|
|
if name == "" {
|
|
|
|
return errors.New("context name cannot be empty")
|
|
|
|
}
|
|
|
|
if name == "default" {
|
|
|
|
return errors.New(`"default" is a reserved context name`)
|
|
|
|
}
|
|
|
|
if !restrictedNameRegEx.MatchString(name) {
|
2022-09-28 16:18:51 -04:00
|
|
|
return errors.Errorf("context name %q is invalid, names are validated against regexp %q", name, restrictedNamePattern)
|
2020-09-24 10:24:24 -04:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-12-17 05:27:07 -05:00
|
|
|
// Export exports an existing namespace into an opaque data stream
|
|
|
|
// This stream is actually a tarball containing context metadata and TLS materials, but it does
|
|
|
|
// not map 1:1 the layout of the context store (don't try to restore it manually without calling store.Import)
|
2019-04-15 06:03:03 -04:00
|
|
|
func Export(name string, s Reader) io.ReadCloser {
|
2018-12-17 05:27:07 -05:00
|
|
|
reader, writer := io.Pipe()
|
|
|
|
go func() {
|
|
|
|
tw := tar.NewWriter(writer)
|
|
|
|
defer tw.Close()
|
|
|
|
defer writer.Close()
|
2019-04-18 09:12:30 -04:00
|
|
|
meta, err := s.GetMetadata(name)
|
2018-12-17 05:27:07 -05:00
|
|
|
if err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
metaBytes, err := json.Marshal(&meta)
|
|
|
|
if err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if err = tw.WriteHeader(&tar.Header{
|
|
|
|
Name: metaFile,
|
2022-09-30 13:13:22 -04:00
|
|
|
Mode: 0o644,
|
2018-12-17 05:27:07 -05:00
|
|
|
Size: int64(len(metaBytes)),
|
|
|
|
}); err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if _, err = tw.Write(metaBytes); err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
2019-04-18 09:12:30 -04:00
|
|
|
tlsFiles, err := s.ListTLSFiles(name)
|
2018-12-17 05:27:07 -05:00
|
|
|
if err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if err = tw.WriteHeader(&tar.Header{
|
|
|
|
Name: "tls",
|
2022-09-30 13:13:22 -04:00
|
|
|
Mode: 0o700,
|
2018-12-17 05:27:07 -05:00
|
|
|
Size: 0,
|
|
|
|
Typeflag: tar.TypeDir,
|
|
|
|
}); err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
for endpointName, endpointFiles := range tlsFiles {
|
|
|
|
if err = tw.WriteHeader(&tar.Header{
|
|
|
|
Name: path.Join("tls", endpointName),
|
2022-09-30 13:13:22 -04:00
|
|
|
Mode: 0o700,
|
2018-12-17 05:27:07 -05:00
|
|
|
Size: 0,
|
|
|
|
Typeflag: tar.TypeDir,
|
|
|
|
}); err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
for _, fileName := range endpointFiles {
|
2019-04-18 09:12:30 -04:00
|
|
|
data, err := s.GetTLSData(name, endpointName, fileName)
|
2018-12-17 05:27:07 -05:00
|
|
|
if err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if err = tw.WriteHeader(&tar.Header{
|
|
|
|
Name: path.Join("tls", endpointName, fileName),
|
2022-09-30 13:13:22 -04:00
|
|
|
Mode: 0o600,
|
2018-12-17 05:27:07 -05:00
|
|
|
Size: int64(len(data)),
|
|
|
|
}); err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if _, err = tw.Write(data); err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
return reader
|
|
|
|
}
|
|
|
|
|
2019-05-14 16:14:39 -04:00
|
|
|
const (
|
|
|
|
maxAllowedFileSizeToImport int64 = 10 << 20
|
|
|
|
zipType string = "application/zip"
|
|
|
|
)
|
|
|
|
|
|
|
|
func getImportContentType(r *bufio.Reader) (string, error) {
|
|
|
|
head, err := r.Peek(512)
|
|
|
|
if err != nil && err != io.EOF {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
return http.DetectContentType(head), nil
|
|
|
|
}
|
|
|
|
|
2018-12-17 05:27:07 -05:00
|
|
|
// Import imports an exported context into a store
|
2019-04-15 06:03:03 -04:00
|
|
|
func Import(name string, s Writer, reader io.Reader) error {
|
2019-05-14 16:14:39 -04:00
|
|
|
// Buffered reader will not advance the buffer, needed to determine content type
|
|
|
|
r := bufio.NewReader(reader)
|
|
|
|
|
|
|
|
importContentType, err := getImportContentType(r)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
switch importContentType {
|
|
|
|
case zipType:
|
|
|
|
return importZip(name, s, r)
|
|
|
|
default:
|
|
|
|
// Assume it's a TAR (TAR does not have a "magic number")
|
|
|
|
return importTar(name, s, r)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-16 10:35:04 -04:00
|
|
|
func isValidFilePath(p string) error {
|
|
|
|
if p != metaFile && !strings.HasPrefix(p, "tls/") {
|
|
|
|
return errors.New("unexpected context file")
|
|
|
|
}
|
|
|
|
if path.Clean(p) != p {
|
|
|
|
return errors.New("unexpected path format")
|
|
|
|
}
|
|
|
|
if strings.Contains(p, `\`) {
|
|
|
|
return errors.New(`unexpected '\' in path`)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-05-14 16:14:39 -04:00
|
|
|
func importTar(name string, s Writer, reader io.Reader) error {
|
|
|
|
tr := tar.NewReader(&LimitedReader{R: reader, N: maxAllowedFileSizeToImport})
|
2018-12-17 05:27:07 -05:00
|
|
|
tlsData := ContextTLSData{
|
|
|
|
Endpoints: map[string]EndpointTLSData{},
|
|
|
|
}
|
2019-06-05 12:12:34 -04:00
|
|
|
var importedMetaFile bool
|
2018-12-17 05:27:07 -05:00
|
|
|
for {
|
|
|
|
hdr, err := tr.Next()
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-09-16 10:35:04 -04:00
|
|
|
if hdr.Typeflag != tar.TypeReg {
|
2018-12-17 05:27:07 -05:00
|
|
|
// skip this entry, only taking files into account
|
|
|
|
continue
|
|
|
|
}
|
2020-09-16 10:35:04 -04:00
|
|
|
if err := isValidFilePath(hdr.Name); err != nil {
|
|
|
|
return errors.Wrap(err, hdr.Name)
|
|
|
|
}
|
2018-12-17 05:27:07 -05:00
|
|
|
if hdr.Name == metaFile {
|
2022-02-25 08:37:07 -05:00
|
|
|
data, err := io.ReadAll(tr)
|
2018-12-17 05:27:07 -05:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-05-14 16:14:39 -04:00
|
|
|
meta, err := parseMetadata(data, name)
|
|
|
|
if err != nil {
|
2018-12-17 05:27:07 -05:00
|
|
|
return err
|
|
|
|
}
|
2019-04-18 09:12:30 -04:00
|
|
|
if err := s.CreateOrUpdate(meta); err != nil {
|
2018-12-17 05:27:07 -05:00
|
|
|
return err
|
|
|
|
}
|
2019-06-05 12:12:34 -04:00
|
|
|
importedMetaFile = true
|
2018-12-17 05:27:07 -05:00
|
|
|
} else if strings.HasPrefix(hdr.Name, "tls/") {
|
2022-02-25 08:37:07 -05:00
|
|
|
data, err := io.ReadAll(tr)
|
2018-12-17 05:27:07 -05:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-05-14 16:14:39 -04:00
|
|
|
if err := importEndpointTLS(&tlsData, hdr.Name, data); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-06-05 12:12:34 -04:00
|
|
|
if !importedMetaFile {
|
|
|
|
return errdefs.InvalidParameter(errors.New("invalid context: no metadata found"))
|
|
|
|
}
|
2019-05-14 16:14:39 -04:00
|
|
|
return s.ResetTLSMaterial(name, &tlsData)
|
|
|
|
}
|
|
|
|
|
|
|
|
func importZip(name string, s Writer, reader io.Reader) error {
|
2022-02-25 08:37:07 -05:00
|
|
|
body, err := io.ReadAll(&LimitedReader{R: reader, N: maxAllowedFileSizeToImport})
|
2019-05-14 16:14:39 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
zr, err := zip.NewReader(bytes.NewReader(body), int64(len(body)))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
tlsData := ContextTLSData{
|
|
|
|
Endpoints: map[string]EndpointTLSData{},
|
|
|
|
}
|
|
|
|
|
2019-06-05 12:12:34 -04:00
|
|
|
var importedMetaFile bool
|
2019-05-14 16:14:39 -04:00
|
|
|
for _, zf := range zr.File {
|
|
|
|
fi := zf.FileInfo()
|
2020-09-16 10:35:04 -04:00
|
|
|
if !fi.Mode().IsRegular() {
|
|
|
|
// skip this entry, only taking regular files into account
|
2019-05-14 16:14:39 -04:00
|
|
|
continue
|
|
|
|
}
|
2020-09-16 10:35:04 -04:00
|
|
|
if err := isValidFilePath(zf.Name); err != nil {
|
|
|
|
return errors.Wrap(err, zf.Name)
|
|
|
|
}
|
2019-05-14 16:14:39 -04:00
|
|
|
if zf.Name == metaFile {
|
|
|
|
f, err := zf.Open()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-02-25 08:37:07 -05:00
|
|
|
data, err := io.ReadAll(&LimitedReader{R: f, N: maxAllowedFileSizeToImport})
|
2019-05-14 16:14:39 -04:00
|
|
|
defer f.Close()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
meta, err := parseMetadata(data, name)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := s.CreateOrUpdate(meta); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-06-05 12:12:34 -04:00
|
|
|
importedMetaFile = true
|
2019-05-14 16:14:39 -04:00
|
|
|
} else if strings.HasPrefix(zf.Name, "tls/") {
|
|
|
|
f, err := zf.Open()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2022-02-25 08:37:07 -05:00
|
|
|
data, err := io.ReadAll(f)
|
2019-05-14 16:14:39 -04:00
|
|
|
defer f.Close()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
err = importEndpointTLS(&tlsData, zf.Name, data)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-06-05 12:12:34 -04:00
|
|
|
if !importedMetaFile {
|
|
|
|
return errdefs.InvalidParameter(errors.New("invalid context: no metadata found"))
|
|
|
|
}
|
2019-04-18 09:12:30 -04:00
|
|
|
return s.ResetTLSMaterial(name, &tlsData)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2019-05-14 16:14:39 -04:00
|
|
|
func parseMetadata(data []byte, name string) (Metadata, error) {
|
|
|
|
var meta Metadata
|
|
|
|
if err := json.Unmarshal(data, &meta); err != nil {
|
|
|
|
return meta, err
|
|
|
|
}
|
2020-09-24 10:24:24 -04:00
|
|
|
if err := ValidateContextName(name); err != nil {
|
|
|
|
return Metadata{}, err
|
|
|
|
}
|
2019-05-14 16:14:39 -04:00
|
|
|
meta.Name = name
|
|
|
|
return meta, nil
|
|
|
|
}
|
|
|
|
|
2023-11-20 11:38:50 -05:00
|
|
|
func importEndpointTLS(tlsData *ContextTLSData, tlsPath string, data []byte) error {
|
|
|
|
parts := strings.SplitN(strings.TrimPrefix(tlsPath, "tls/"), "/", 2)
|
2019-05-14 16:14:39 -04:00
|
|
|
if len(parts) != 2 {
|
|
|
|
// TLS endpoints require archived file directory with 2 layers
|
|
|
|
// i.e. tls/{endpointName}/{fileName}
|
|
|
|
return errors.New("archive format is invalid")
|
|
|
|
}
|
|
|
|
|
|
|
|
epName := parts[0]
|
|
|
|
fileName := parts[1]
|
|
|
|
if _, ok := tlsData.Endpoints[epName]; !ok {
|
|
|
|
tlsData.Endpoints[epName] = EndpointTLSData{
|
|
|
|
Files: map[string][]byte{},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
tlsData.Endpoints[epName].Files[fileName] = data
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-11-09 09:10:41 -05:00
|
|
|
type contextdir string
|
|
|
|
|
|
|
|
func contextdirOf(name string) contextdir {
|
|
|
|
return contextdir(digest.FromString(name).Encoded())
|
|
|
|
}
|