2018-12-17 05:27:07 -05:00
|
|
|
package store
|
|
|
|
|
|
|
|
import (
|
|
|
|
"archive/tar"
|
2019-05-14 16:14:39 -04:00
|
|
|
"archive/zip"
|
|
|
|
"bufio"
|
|
|
|
"bytes"
|
2018-11-09 09:10:41 -05:00
|
|
|
_ "crypto/sha256" // ensure ids can be computed
|
2018-12-17 05:27:07 -05:00
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"io/ioutil"
|
2019-05-14 16:14:39 -04:00
|
|
|
"net/http"
|
2018-12-17 05:27:07 -05:00
|
|
|
"path"
|
|
|
|
"path/filepath"
|
2020-09-24 10:24:24 -04:00
|
|
|
"regexp"
|
2018-12-17 05:27:07 -05:00
|
|
|
"strings"
|
2018-11-09 09:10:41 -05:00
|
|
|
|
2019-03-06 09:01:12 -05:00
|
|
|
"github.com/docker/docker/errdefs"
|
|
|
|
digest "github.com/opencontainers/go-digest"
|
2020-09-16 10:35:04 -04:00
|
|
|
"github.com/pkg/errors"
|
2018-12-17 05:27:07 -05:00
|
|
|
)
|
|
|
|
|
2020-09-24 10:24:24 -04:00
|
|
|
const restrictedNamePattern = "^[a-zA-Z0-9][a-zA-Z0-9_.+-]+$"
|
|
|
|
|
|
|
|
var restrictedNameRegEx = regexp.MustCompile(restrictedNamePattern)
|
|
|
|
|
2018-12-17 05:27:07 -05:00
|
|
|
// Store provides a context store for easily remembering endpoints configuration
|
|
|
|
type Store interface {
|
2019-04-15 06:03:03 -04:00
|
|
|
Reader
|
|
|
|
Lister
|
|
|
|
Writer
|
2019-04-18 09:12:30 -04:00
|
|
|
StorageInfoProvider
|
2019-04-15 06:03:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Reader provides read-only (without list) access to context data
|
|
|
|
type Reader interface {
|
2019-04-18 09:12:30 -04:00
|
|
|
GetMetadata(name string) (Metadata, error)
|
|
|
|
ListTLSFiles(name string) (map[string]EndpointFiles, error)
|
|
|
|
GetTLSData(contextName, endpointName, fileName string) ([]byte, error)
|
2019-04-15 06:03:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Lister provides listing of contexts
|
|
|
|
type Lister interface {
|
2019-04-18 09:12:30 -04:00
|
|
|
List() ([]Metadata, error)
|
2019-04-15 06:03:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// ReaderLister combines Reader and Lister interfaces
|
|
|
|
type ReaderLister interface {
|
|
|
|
Reader
|
|
|
|
Lister
|
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
// StorageInfoProvider provides more information about storage details of contexts
|
|
|
|
type StorageInfoProvider interface {
|
|
|
|
GetStorageInfo(contextName string) StorageInfo
|
2019-04-15 06:03:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Writer provides write access to context data
|
|
|
|
type Writer interface {
|
2019-04-18 09:12:30 -04:00
|
|
|
CreateOrUpdate(meta Metadata) error
|
|
|
|
Remove(name string) error
|
|
|
|
ResetTLSMaterial(name string, data *ContextTLSData) error
|
|
|
|
ResetEndpointTLSMaterial(contextName string, endpointName string, data *EndpointTLSData) error
|
2019-04-15 06:03:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// ReaderWriter combines Reader and Writer interfaces
|
|
|
|
type ReaderWriter interface {
|
|
|
|
Reader
|
|
|
|
Writer
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
// Metadata contains metadata about a context and its endpoints
|
|
|
|
type Metadata struct {
|
2018-11-09 09:10:41 -05:00
|
|
|
Name string `json:",omitempty"`
|
|
|
|
Metadata interface{} `json:",omitempty"`
|
|
|
|
Endpoints map[string]interface{} `json:",omitempty"`
|
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
// StorageInfo contains data about where a given context is stored
|
|
|
|
type StorageInfo struct {
|
2018-11-09 09:10:41 -05:00
|
|
|
MetadataPath string
|
|
|
|
TLSPath string
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// EndpointTLSData represents tls data for a given endpoint
|
|
|
|
type EndpointTLSData struct {
|
|
|
|
Files map[string][]byte
|
|
|
|
}
|
|
|
|
|
|
|
|
// ContextTLSData represents tls data for a whole context
|
|
|
|
type ContextTLSData struct {
|
|
|
|
Endpoints map[string]EndpointTLSData
|
|
|
|
}
|
|
|
|
|
|
|
|
// New creates a store from a given directory.
|
|
|
|
// If the directory does not exist or is empty, initialize it
|
|
|
|
func New(dir string, cfg Config) Store {
|
|
|
|
metaRoot := filepath.Join(dir, metadataDir)
|
|
|
|
tlsRoot := filepath.Join(dir, tlsDir)
|
|
|
|
|
|
|
|
return &store{
|
|
|
|
meta: &metadataStore{
|
|
|
|
root: metaRoot,
|
|
|
|
config: cfg,
|
|
|
|
},
|
|
|
|
tls: &tlsStore{
|
|
|
|
root: tlsRoot,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type store struct {
|
|
|
|
meta *metadataStore
|
|
|
|
tls *tlsStore
|
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
func (s *store) List() ([]Metadata, error) {
|
2018-12-17 05:27:07 -05:00
|
|
|
return s.meta.list()
|
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
func (s *store) CreateOrUpdate(meta Metadata) error {
|
2018-11-09 09:10:41 -05:00
|
|
|
return s.meta.createOrUpdate(meta)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
func (s *store) Remove(name string) error {
|
2018-11-09 09:10:41 -05:00
|
|
|
id := contextdirOf(name)
|
|
|
|
if err := s.meta.remove(id); err != nil {
|
|
|
|
return patchErrContextName(err, name)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
2018-11-09 09:10:41 -05:00
|
|
|
return patchErrContextName(s.tls.removeAllContextData(id), name)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
func (s *store) GetMetadata(name string) (Metadata, error) {
|
2018-11-09 09:10:41 -05:00
|
|
|
res, err := s.meta.get(contextdirOf(name))
|
|
|
|
patchErrContextName(err, name)
|
|
|
|
return res, err
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
func (s *store) ResetTLSMaterial(name string, data *ContextTLSData) error {
|
2018-11-09 09:10:41 -05:00
|
|
|
id := contextdirOf(name)
|
|
|
|
if err := s.tls.removeAllContextData(id); err != nil {
|
|
|
|
return patchErrContextName(err, name)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
if data == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
for ep, files := range data.Endpoints {
|
|
|
|
for fileName, data := range files.Files {
|
2018-11-09 09:10:41 -05:00
|
|
|
if err := s.tls.createOrUpdate(id, ep, fileName, data); err != nil {
|
|
|
|
return patchErrContextName(err, name)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
func (s *store) ResetEndpointTLSMaterial(contextName string, endpointName string, data *EndpointTLSData) error {
|
2018-11-09 09:10:41 -05:00
|
|
|
id := contextdirOf(contextName)
|
|
|
|
if err := s.tls.removeAllEndpointData(id, endpointName); err != nil {
|
|
|
|
return patchErrContextName(err, contextName)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
if data == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
for fileName, data := range data.Files {
|
2018-11-09 09:10:41 -05:00
|
|
|
if err := s.tls.createOrUpdate(id, endpointName, fileName, data); err != nil {
|
|
|
|
return patchErrContextName(err, contextName)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
func (s *store) ListTLSFiles(name string) (map[string]EndpointFiles, error) {
|
2018-11-09 09:10:41 -05:00
|
|
|
res, err := s.tls.listContextData(contextdirOf(name))
|
|
|
|
return res, patchErrContextName(err, name)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
func (s *store) GetTLSData(contextName, endpointName, fileName string) ([]byte, error) {
|
2018-11-09 09:10:41 -05:00
|
|
|
res, err := s.tls.getData(contextdirOf(contextName), endpointName, fileName)
|
|
|
|
return res, patchErrContextName(err, contextName)
|
|
|
|
}
|
|
|
|
|
2019-04-18 09:12:30 -04:00
|
|
|
func (s *store) GetStorageInfo(contextName string) StorageInfo {
|
2018-11-09 09:10:41 -05:00
|
|
|
dir := contextdirOf(contextName)
|
2019-04-18 09:12:30 -04:00
|
|
|
return StorageInfo{
|
2018-11-09 09:10:41 -05:00
|
|
|
MetadataPath: s.meta.contextDir(dir),
|
|
|
|
TLSPath: s.tls.contextDir(dir),
|
|
|
|
}
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2020-09-24 10:24:24 -04:00
|
|
|
// ValidateContextName checks a context name is valid.
|
|
|
|
func ValidateContextName(name string) error {
|
|
|
|
if name == "" {
|
|
|
|
return errors.New("context name cannot be empty")
|
|
|
|
}
|
|
|
|
if name == "default" {
|
|
|
|
return errors.New(`"default" is a reserved context name`)
|
|
|
|
}
|
|
|
|
if !restrictedNameRegEx.MatchString(name) {
|
|
|
|
return fmt.Errorf("context name %q is invalid, names are validated against regexp %q", name, restrictedNamePattern)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-12-17 05:27:07 -05:00
|
|
|
// Export exports an existing namespace into an opaque data stream
|
|
|
|
// This stream is actually a tarball containing context metadata and TLS materials, but it does
|
|
|
|
// not map 1:1 the layout of the context store (don't try to restore it manually without calling store.Import)
|
2019-04-15 06:03:03 -04:00
|
|
|
func Export(name string, s Reader) io.ReadCloser {
|
2018-12-17 05:27:07 -05:00
|
|
|
reader, writer := io.Pipe()
|
|
|
|
go func() {
|
|
|
|
tw := tar.NewWriter(writer)
|
|
|
|
defer tw.Close()
|
|
|
|
defer writer.Close()
|
2019-04-18 09:12:30 -04:00
|
|
|
meta, err := s.GetMetadata(name)
|
2018-12-17 05:27:07 -05:00
|
|
|
if err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
metaBytes, err := json.Marshal(&meta)
|
|
|
|
if err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if err = tw.WriteHeader(&tar.Header{
|
|
|
|
Name: metaFile,
|
|
|
|
Mode: 0644,
|
|
|
|
Size: int64(len(metaBytes)),
|
|
|
|
}); err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if _, err = tw.Write(metaBytes); err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
2019-04-18 09:12:30 -04:00
|
|
|
tlsFiles, err := s.ListTLSFiles(name)
|
2018-12-17 05:27:07 -05:00
|
|
|
if err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if err = tw.WriteHeader(&tar.Header{
|
|
|
|
Name: "tls",
|
|
|
|
Mode: 0700,
|
|
|
|
Size: 0,
|
|
|
|
Typeflag: tar.TypeDir,
|
|
|
|
}); err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
for endpointName, endpointFiles := range tlsFiles {
|
|
|
|
if err = tw.WriteHeader(&tar.Header{
|
|
|
|
Name: path.Join("tls", endpointName),
|
|
|
|
Mode: 0700,
|
|
|
|
Size: 0,
|
|
|
|
Typeflag: tar.TypeDir,
|
|
|
|
}); err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
for _, fileName := range endpointFiles {
|
2019-04-18 09:12:30 -04:00
|
|
|
data, err := s.GetTLSData(name, endpointName, fileName)
|
2018-12-17 05:27:07 -05:00
|
|
|
if err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if err = tw.WriteHeader(&tar.Header{
|
|
|
|
Name: path.Join("tls", endpointName, fileName),
|
|
|
|
Mode: 0600,
|
|
|
|
Size: int64(len(data)),
|
|
|
|
}); err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if _, err = tw.Write(data); err != nil {
|
|
|
|
writer.CloseWithError(err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
return reader
|
|
|
|
}
|
|
|
|
|
2019-05-14 16:14:39 -04:00
|
|
|
const (
|
|
|
|
maxAllowedFileSizeToImport int64 = 10 << 20
|
|
|
|
zipType string = "application/zip"
|
|
|
|
)
|
|
|
|
|
|
|
|
func getImportContentType(r *bufio.Reader) (string, error) {
|
|
|
|
head, err := r.Peek(512)
|
|
|
|
if err != nil && err != io.EOF {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
return http.DetectContentType(head), nil
|
|
|
|
}
|
|
|
|
|
2018-12-17 05:27:07 -05:00
|
|
|
// Import imports an exported context into a store
|
2019-04-15 06:03:03 -04:00
|
|
|
func Import(name string, s Writer, reader io.Reader) error {
|
2019-05-14 16:14:39 -04:00
|
|
|
// Buffered reader will not advance the buffer, needed to determine content type
|
|
|
|
r := bufio.NewReader(reader)
|
|
|
|
|
|
|
|
importContentType, err := getImportContentType(r)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
switch importContentType {
|
|
|
|
case zipType:
|
|
|
|
return importZip(name, s, r)
|
|
|
|
default:
|
|
|
|
// Assume it's a TAR (TAR does not have a "magic number")
|
|
|
|
return importTar(name, s, r)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-16 10:35:04 -04:00
|
|
|
func isValidFilePath(p string) error {
|
|
|
|
if p != metaFile && !strings.HasPrefix(p, "tls/") {
|
|
|
|
return errors.New("unexpected context file")
|
|
|
|
}
|
|
|
|
if path.Clean(p) != p {
|
|
|
|
return errors.New("unexpected path format")
|
|
|
|
}
|
|
|
|
if strings.Contains(p, `\`) {
|
|
|
|
return errors.New(`unexpected '\' in path`)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-05-14 16:14:39 -04:00
|
|
|
func importTar(name string, s Writer, reader io.Reader) error {
|
|
|
|
tr := tar.NewReader(&LimitedReader{R: reader, N: maxAllowedFileSizeToImport})
|
2018-12-17 05:27:07 -05:00
|
|
|
tlsData := ContextTLSData{
|
|
|
|
Endpoints: map[string]EndpointTLSData{},
|
|
|
|
}
|
2019-06-05 12:12:34 -04:00
|
|
|
var importedMetaFile bool
|
2018-12-17 05:27:07 -05:00
|
|
|
for {
|
|
|
|
hdr, err := tr.Next()
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2020-09-16 10:35:04 -04:00
|
|
|
if hdr.Typeflag != tar.TypeReg {
|
2018-12-17 05:27:07 -05:00
|
|
|
// skip this entry, only taking files into account
|
|
|
|
continue
|
|
|
|
}
|
2020-09-16 10:35:04 -04:00
|
|
|
if err := isValidFilePath(hdr.Name); err != nil {
|
|
|
|
return errors.Wrap(err, hdr.Name)
|
|
|
|
}
|
2018-12-17 05:27:07 -05:00
|
|
|
if hdr.Name == metaFile {
|
|
|
|
data, err := ioutil.ReadAll(tr)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-05-14 16:14:39 -04:00
|
|
|
meta, err := parseMetadata(data, name)
|
|
|
|
if err != nil {
|
2018-12-17 05:27:07 -05:00
|
|
|
return err
|
|
|
|
}
|
2019-04-18 09:12:30 -04:00
|
|
|
if err := s.CreateOrUpdate(meta); err != nil {
|
2018-12-17 05:27:07 -05:00
|
|
|
return err
|
|
|
|
}
|
2019-06-05 12:12:34 -04:00
|
|
|
importedMetaFile = true
|
2018-12-17 05:27:07 -05:00
|
|
|
} else if strings.HasPrefix(hdr.Name, "tls/") {
|
|
|
|
data, err := ioutil.ReadAll(tr)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-05-14 16:14:39 -04:00
|
|
|
if err := importEndpointTLS(&tlsData, hdr.Name, data); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-06-05 12:12:34 -04:00
|
|
|
if !importedMetaFile {
|
|
|
|
return errdefs.InvalidParameter(errors.New("invalid context: no metadata found"))
|
|
|
|
}
|
2019-05-14 16:14:39 -04:00
|
|
|
return s.ResetTLSMaterial(name, &tlsData)
|
|
|
|
}
|
|
|
|
|
|
|
|
func importZip(name string, s Writer, reader io.Reader) error {
|
|
|
|
body, err := ioutil.ReadAll(&LimitedReader{R: reader, N: maxAllowedFileSizeToImport})
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
zr, err := zip.NewReader(bytes.NewReader(body), int64(len(body)))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
tlsData := ContextTLSData{
|
|
|
|
Endpoints: map[string]EndpointTLSData{},
|
|
|
|
}
|
|
|
|
|
2019-06-05 12:12:34 -04:00
|
|
|
var importedMetaFile bool
|
2019-05-14 16:14:39 -04:00
|
|
|
for _, zf := range zr.File {
|
|
|
|
fi := zf.FileInfo()
|
2020-09-16 10:35:04 -04:00
|
|
|
if !fi.Mode().IsRegular() {
|
|
|
|
// skip this entry, only taking regular files into account
|
2019-05-14 16:14:39 -04:00
|
|
|
continue
|
|
|
|
}
|
2020-09-16 10:35:04 -04:00
|
|
|
if err := isValidFilePath(zf.Name); err != nil {
|
|
|
|
return errors.Wrap(err, zf.Name)
|
|
|
|
}
|
2019-05-14 16:14:39 -04:00
|
|
|
if zf.Name == metaFile {
|
|
|
|
f, err := zf.Open()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
data, err := ioutil.ReadAll(&LimitedReader{R: f, N: maxAllowedFileSizeToImport})
|
|
|
|
defer f.Close()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
meta, err := parseMetadata(data, name)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if err := s.CreateOrUpdate(meta); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-06-05 12:12:34 -04:00
|
|
|
importedMetaFile = true
|
2019-05-14 16:14:39 -04:00
|
|
|
} else if strings.HasPrefix(zf.Name, "tls/") {
|
|
|
|
f, err := zf.Open()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
data, err := ioutil.ReadAll(f)
|
|
|
|
defer f.Close()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
err = importEndpointTLS(&tlsData, zf.Name, data)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-06-05 12:12:34 -04:00
|
|
|
if !importedMetaFile {
|
|
|
|
return errdefs.InvalidParameter(errors.New("invalid context: no metadata found"))
|
|
|
|
}
|
2019-04-18 09:12:30 -04:00
|
|
|
return s.ResetTLSMaterial(name, &tlsData)
|
2018-12-17 05:27:07 -05:00
|
|
|
}
|
|
|
|
|
2019-05-14 16:14:39 -04:00
|
|
|
func parseMetadata(data []byte, name string) (Metadata, error) {
|
|
|
|
var meta Metadata
|
|
|
|
if err := json.Unmarshal(data, &meta); err != nil {
|
|
|
|
return meta, err
|
|
|
|
}
|
2020-09-24 10:24:24 -04:00
|
|
|
if err := ValidateContextName(name); err != nil {
|
|
|
|
return Metadata{}, err
|
|
|
|
}
|
2019-05-14 16:14:39 -04:00
|
|
|
meta.Name = name
|
|
|
|
return meta, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func importEndpointTLS(tlsData *ContextTLSData, path string, data []byte) error {
|
|
|
|
parts := strings.SplitN(strings.TrimPrefix(path, "tls/"), "/", 2)
|
|
|
|
if len(parts) != 2 {
|
|
|
|
// TLS endpoints require archived file directory with 2 layers
|
|
|
|
// i.e. tls/{endpointName}/{fileName}
|
|
|
|
return errors.New("archive format is invalid")
|
|
|
|
}
|
|
|
|
|
|
|
|
epName := parts[0]
|
|
|
|
fileName := parts[1]
|
|
|
|
if _, ok := tlsData.Endpoints[epName]; !ok {
|
|
|
|
tlsData.Endpoints[epName] = EndpointTLSData{
|
|
|
|
Files: map[string][]byte{},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
tlsData.Endpoints[epName].Files[fileName] = data
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-11-09 09:10:41 -05:00
|
|
|
type setContextName interface {
|
|
|
|
setContext(name string)
|
|
|
|
}
|
|
|
|
|
2018-12-17 05:27:07 -05:00
|
|
|
type contextDoesNotExistError struct {
|
|
|
|
name string
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e *contextDoesNotExistError) Error() string {
|
|
|
|
return fmt.Sprintf("context %q does not exist", e.name)
|
|
|
|
}
|
|
|
|
|
2018-11-09 09:10:41 -05:00
|
|
|
func (e *contextDoesNotExistError) setContext(name string) {
|
|
|
|
e.name = name
|
|
|
|
}
|
|
|
|
|
|
|
|
// NotFound satisfies interface github.com/docker/docker/errdefs.ErrNotFound
|
|
|
|
func (e *contextDoesNotExistError) NotFound() {}
|
|
|
|
|
2019-03-06 09:01:12 -05:00
|
|
|
type tlsDataDoesNotExist interface {
|
|
|
|
errdefs.ErrNotFound
|
|
|
|
IsTLSDataDoesNotExist()
|
|
|
|
}
|
|
|
|
|
2018-12-17 05:27:07 -05:00
|
|
|
type tlsDataDoesNotExistError struct {
|
|
|
|
context, endpoint, file string
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e *tlsDataDoesNotExistError) Error() string {
|
|
|
|
return fmt.Sprintf("tls data for %s/%s/%s does not exist", e.context, e.endpoint, e.file)
|
|
|
|
}
|
|
|
|
|
2018-11-09 09:10:41 -05:00
|
|
|
func (e *tlsDataDoesNotExistError) setContext(name string) {
|
|
|
|
e.context = name
|
|
|
|
}
|
|
|
|
|
|
|
|
// NotFound satisfies interface github.com/docker/docker/errdefs.ErrNotFound
|
|
|
|
func (e *tlsDataDoesNotExistError) NotFound() {}
|
|
|
|
|
2019-03-06 09:01:12 -05:00
|
|
|
// IsTLSDataDoesNotExist satisfies tlsDataDoesNotExist
|
|
|
|
func (e *tlsDataDoesNotExistError) IsTLSDataDoesNotExist() {}
|
|
|
|
|
2018-12-17 05:27:07 -05:00
|
|
|
// IsErrContextDoesNotExist checks if the given error is a "context does not exist" condition
|
|
|
|
func IsErrContextDoesNotExist(err error) bool {
|
|
|
|
_, ok := err.(*contextDoesNotExistError)
|
|
|
|
return ok
|
|
|
|
}
|
|
|
|
|
|
|
|
// IsErrTLSDataDoesNotExist checks if the given error is a "context does not exist" condition
|
|
|
|
func IsErrTLSDataDoesNotExist(err error) bool {
|
2019-03-06 09:01:12 -05:00
|
|
|
_, ok := err.(tlsDataDoesNotExist)
|
2018-12-17 05:27:07 -05:00
|
|
|
return ok
|
|
|
|
}
|
2018-11-09 09:10:41 -05:00
|
|
|
|
|
|
|
type contextdir string
|
|
|
|
|
|
|
|
func contextdirOf(name string) contextdir {
|
|
|
|
return contextdir(digest.FromString(name).Encoded())
|
|
|
|
}
|
|
|
|
|
|
|
|
func patchErrContextName(err error, name string) error {
|
|
|
|
if typed, ok := err.(setContextName); ok {
|
|
|
|
typed.setContext(name)
|
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|