diff --git a/command/stack/deploy.go b/command/stack/deploy.go index 32ebd62d3f..f4730db556 100644 --- a/command/stack/deploy.go +++ b/command/stack/deploy.go @@ -11,13 +11,13 @@ import ( "github.com/spf13/cobra" "golang.org/x/net/context" - "github.com/aanand/compose-file/loader" - composetypes "github.com/aanand/compose-file/types" "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/swarm" "github.com/docker/docker/cli" "github.com/docker/docker/cli/command" "github.com/docker/docker/cli/compose/convert" + "github.com/docker/docker/cli/compose/loader" + composetypes "github.com/docker/docker/cli/compose/types" dockerclient "github.com/docker/docker/client" ) diff --git a/compose/convert/compose.go b/compose/convert/compose.go index e0684482b8..7c410844c7 100644 --- a/compose/convert/compose.go +++ b/compose/convert/compose.go @@ -1,9 +1,9 @@ package convert import ( - composetypes "github.com/aanand/compose-file/types" "github.com/docker/docker/api/types" networktypes "github.com/docker/docker/api/types/network" + composetypes "github.com/docker/docker/cli/compose/types" ) const ( diff --git a/compose/convert/compose_test.go b/compose/convert/compose_test.go index 8f8e8ea6d8..27a67047d8 100644 --- a/compose/convert/compose_test.go +++ b/compose/convert/compose_test.go @@ -3,9 +3,9 @@ package convert import ( "testing" - composetypes "github.com/aanand/compose-file/types" "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/network" + composetypes "github.com/docker/docker/cli/compose/types" "github.com/docker/docker/pkg/testutil/assert" ) diff --git a/compose/convert/service.go b/compose/convert/service.go index 458b518a46..2a8ed8288d 100644 --- a/compose/convert/service.go +++ b/compose/convert/service.go @@ -4,9 +4,9 @@ import ( "fmt" "time" - composetypes "github.com/aanand/compose-file/types" "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/swarm" + composetypes "github.com/docker/docker/cli/compose/types" "github.com/docker/docker/opts" runconfigopts "github.com/docker/docker/runconfig/opts" "github.com/docker/go-connections/nat" diff --git a/compose/convert/service_test.go b/compose/convert/service_test.go index a6884917de..45da764325 100644 --- a/compose/convert/service_test.go +++ b/compose/convert/service_test.go @@ -6,9 +6,9 @@ import ( "testing" "time" - composetypes "github.com/aanand/compose-file/types" "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/swarm" + composetypes "github.com/docker/docker/cli/compose/types" "github.com/docker/docker/pkg/testutil/assert" ) diff --git a/compose/convert/volume.go b/compose/convert/volume.go index 3a7504106a..24442d4dc7 100644 --- a/compose/convert/volume.go +++ b/compose/convert/volume.go @@ -4,8 +4,8 @@ import ( "fmt" "strings" - composetypes "github.com/aanand/compose-file/types" "github.com/docker/docker/api/types/mount" + composetypes "github.com/docker/docker/cli/compose/types" ) type volumes map[string]composetypes.VolumeConfig diff --git a/compose/convert/volume_test.go b/compose/convert/volume_test.go index bcbfb08b95..1132136b22 100644 --- a/compose/convert/volume_test.go +++ b/compose/convert/volume_test.go @@ -3,8 +3,8 @@ package convert import ( "testing" - composetypes "github.com/aanand/compose-file/types" "github.com/docker/docker/api/types/mount" + composetypes "github.com/docker/docker/cli/compose/types" "github.com/docker/docker/pkg/testutil/assert" ) diff --git a/compose/interpolation/interpolation.go b/compose/interpolation/interpolation.go new file mode 100644 index 0000000000..734f28ec9d --- /dev/null +++ b/compose/interpolation/interpolation.go @@ -0,0 +1,90 @@ +package interpolation + +import ( + "fmt" + + "github.com/docker/docker/cli/compose/template" + "github.com/docker/docker/cli/compose/types" +) + +// Interpolate replaces variables in a string with the values from a mapping +func Interpolate(config types.Dict, section string, mapping template.Mapping) (types.Dict, error) { + out := types.Dict{} + + for name, item := range config { + if item == nil { + out[name] = nil + continue + } + interpolatedItem, err := interpolateSectionItem(name, item.(types.Dict), section, mapping) + if err != nil { + return nil, err + } + out[name] = interpolatedItem + } + + return out, nil +} + +func interpolateSectionItem( + name string, + item types.Dict, + section string, + mapping template.Mapping, +) (types.Dict, error) { + + out := types.Dict{} + + for key, value := range item { + interpolatedValue, err := recursiveInterpolate(value, mapping) + if err != nil { + return nil, fmt.Errorf( + "Invalid interpolation format for %#v option in %s %#v: %#v", + key, section, name, err.Template, + ) + } + out[key] = interpolatedValue + } + + return out, nil + +} + +func recursiveInterpolate( + value interface{}, + mapping template.Mapping, +) (interface{}, *template.InvalidTemplateError) { + + switch value := value.(type) { + + case string: + return template.Substitute(value, mapping) + + case types.Dict: + out := types.Dict{} + for key, elem := range value { + interpolatedElem, err := recursiveInterpolate(elem, mapping) + if err != nil { + return nil, err + } + out[key] = interpolatedElem + } + return out, nil + + case []interface{}: + out := make([]interface{}, len(value)) + for i, elem := range value { + interpolatedElem, err := recursiveInterpolate(elem, mapping) + if err != nil { + return nil, err + } + out[i] = interpolatedElem + } + return out, nil + + default: + return value, nil + + } + +} diff --git a/compose/interpolation/interpolation_test.go b/compose/interpolation/interpolation_test.go new file mode 100644 index 0000000000..c3921701b3 --- /dev/null +++ b/compose/interpolation/interpolation_test.go @@ -0,0 +1,59 @@ +package interpolation + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/docker/docker/cli/compose/types" +) + +var defaults = map[string]string{ + "USER": "jenny", + "FOO": "bar", +} + +func defaultMapping(name string) (string, bool) { + val, ok := defaults[name] + return val, ok +} + +func TestInterpolate(t *testing.T) { + services := types.Dict{ + "servicea": types.Dict{ + "image": "example:${USER}", + "volumes": []interface{}{"$FOO:/target"}, + "logging": types.Dict{ + "driver": "${FOO}", + "options": types.Dict{ + "user": "$USER", + }, + }, + }, + } + expected := types.Dict{ + "servicea": types.Dict{ + "image": "example:jenny", + "volumes": []interface{}{"bar:/target"}, + "logging": types.Dict{ + "driver": "bar", + "options": types.Dict{ + "user": "jenny", + }, + }, + }, + } + result, err := Interpolate(services, "service", defaultMapping) + assert.NoError(t, err) + assert.Equal(t, expected, result) +} + +func TestInvalidInterpolation(t *testing.T) { + services := types.Dict{ + "servicea": types.Dict{ + "image": "${", + }, + } + _, err := Interpolate(services, "service", defaultMapping) + assert.EqualError(t, err, `Invalid interpolation format for "image" option in service "servicea": "${"`) +} diff --git a/compose/loader/example1.env b/compose/loader/example1.env new file mode 100644 index 0000000000..3e7a059613 --- /dev/null +++ b/compose/loader/example1.env @@ -0,0 +1,8 @@ +# passed through +FOO=1 + +# overridden in example2.env +BAR=1 + +# overridden in full-example.yml +BAZ=1 diff --git a/compose/loader/example2.env b/compose/loader/example2.env new file mode 100644 index 0000000000..0920d5ab05 --- /dev/null +++ b/compose/loader/example2.env @@ -0,0 +1 @@ +BAR=2 diff --git a/compose/loader/full-example.yml b/compose/loader/full-example.yml new file mode 100644 index 0000000000..fb5686a380 --- /dev/null +++ b/compose/loader/full-example.yml @@ -0,0 +1,287 @@ +version: "3" + +services: + foo: + cap_add: + - ALL + + cap_drop: + - NET_ADMIN + - SYS_ADMIN + + cgroup_parent: m-executor-abcd + + # String or list + command: bundle exec thin -p 3000 + # command: ["bundle", "exec", "thin", "-p", "3000"] + + container_name: my-web-container + + depends_on: + - db + - redis + + deploy: + mode: replicated + replicas: 6 + labels: [FOO=BAR] + update_config: + parallelism: 3 + delay: 10s + failure_action: continue + monitor: 60s + max_failure_ratio: 0.3 + resources: + limits: + cpus: '0.001' + memory: 50M + reservations: + cpus: '0.0001' + memory: 20M + restart_policy: + condition: on_failure + delay: 5s + max_attempts: 3 + window: 120s + placement: + constraints: [node=foo] + + devices: + - "/dev/ttyUSB0:/dev/ttyUSB0" + + # String or list + # dns: 8.8.8.8 + dns: + - 8.8.8.8 + - 9.9.9.9 + + # String or list + # dns_search: example.com + dns_search: + - dc1.example.com + - dc2.example.com + + domainname: foo.com + + # String or list + # entrypoint: /code/entrypoint.sh -p 3000 + entrypoint: ["/code/entrypoint.sh", "-p", "3000"] + + # String or list + # env_file: .env + env_file: + - ./example1.env + - ./example2.env + + # Mapping or list + # Mapping values can be strings, numbers or null + # Booleans are not allowed - must be quoted + environment: + RACK_ENV: development + SHOW: 'true' + SESSION_SECRET: + BAZ: 3 + # environment: + # - RACK_ENV=development + # - SHOW=true + # - SESSION_SECRET + + # Items can be strings or numbers + expose: + - "3000" + - 8000 + + external_links: + - redis_1 + - project_db_1:mysql + - project_db_1:postgresql + + # Mapping or list + # Mapping values must be strings + # extra_hosts: + # somehost: "162.242.195.82" + # otherhost: "50.31.209.229" + extra_hosts: + - "somehost:162.242.195.82" + - "otherhost:50.31.209.229" + + hostname: foo + + healthcheck: + test: echo "hello world" + interval: 10s + timeout: 1s + retries: 5 + + # Any valid image reference - repo, tag, id, sha + image: redis + # image: ubuntu:14.04 + # image: tutum/influxdb + # image: example-registry.com:4000/postgresql + # image: a4bc65fd + # image: busybox@sha256:38a203e1986cf79639cfb9b2e1d6e773de84002feea2d4eb006b52004ee8502d + + ipc: host + + # Mapping or list + # Mapping values can be strings, numbers or null + labels: + com.example.description: "Accounting webapp" + com.example.number: 42 + com.example.empty-label: + # labels: + # - "com.example.description=Accounting webapp" + # - "com.example.number=42" + # - "com.example.empty-label" + + links: + - db + - db:database + - redis + + logging: + driver: syslog + options: + syslog-address: "tcp://192.168.0.42:123" + + mac_address: 02:42:ac:11:65:43 + + # network_mode: "bridge" + # network_mode: "host" + # network_mode: "none" + # Use the network mode of an arbitrary container from another service + # network_mode: "service:db" + # Use the network mode of another container, specified by name or id + # network_mode: "container:some-container" + network_mode: "container:0cfeab0f748b9a743dc3da582046357c6ef497631c1a016d28d2bf9b4f899f7b" + + networks: + some-network: + aliases: + - alias1 + - alias3 + other-network: + ipv4_address: 172.16.238.10 + ipv6_address: 2001:3984:3989::10 + other-other-network: + + pid: "host" + + ports: + - 3000 + - "3000-3005" + - "8000:8000" + - "9090-9091:8080-8081" + - "49100:22" + - "127.0.0.1:8001:8001" + - "127.0.0.1:5000-5010:5000-5010" + + privileged: true + + read_only: true + + restart: always + + security_opt: + - label=level:s0:c100,c200 + - label=type:svirt_apache_t + + stdin_open: true + + stop_grace_period: 20s + + stop_signal: SIGUSR1 + + # String or list + # tmpfs: /run + tmpfs: + - /run + - /tmp + + tty: true + + ulimits: + # Single number or mapping with soft + hard limits + nproc: 65535 + nofile: + soft: 20000 + hard: 40000 + + user: someone + + volumes: + # Just specify a path and let the Engine create a volume + - /var/lib/mysql + # Specify an absolute path mapping + - /opt/data:/var/lib/mysql + # Path on the host, relative to the Compose file + - .:/code + - ./static:/var/www/html + # User-relative path + - ~/configs:/etc/configs/:ro + # Named volume + - datavolume:/var/lib/mysql + + working_dir: /code + +networks: + # Entries can be null, which specifies simply that a network + # called "{project name}_some-network" should be created and + # use the default driver + some-network: + + other-network: + driver: overlay + + driver_opts: + # Values can be strings or numbers + foo: "bar" + baz: 1 + + ipam: + driver: overlay + # driver_opts: + # # Values can be strings or numbers + # com.docker.network.enable_ipv6: "true" + # com.docker.network.numeric_value: 1 + config: + - subnet: 172.16.238.0/24 + # gateway: 172.16.238.1 + - subnet: 2001:3984:3989::/64 + # gateway: 2001:3984:3989::1 + + external-network: + # Specifies that a pre-existing network called "external-network" + # can be referred to within this file as "external-network" + external: true + + other-external-network: + # Specifies that a pre-existing network called "my-cool-network" + # can be referred to within this file as "other-external-network" + external: + name: my-cool-network + +volumes: + # Entries can be null, which specifies simply that a volume + # called "{project name}_some-volume" should be created and + # use the default driver + some-volume: + + other-volume: + driver: flocker + + driver_opts: + # Values can be strings or numbers + foo: "bar" + baz: 1 + + external-volume: + # Specifies that a pre-existing volume called "external-volume" + # can be referred to within this file as "external-volume" + external: true + + other-external-volume: + # Specifies that a pre-existing volume called "my-cool-volume" + # can be referred to within this file as "other-external-volume" + external: + name: my-cool-volume diff --git a/compose/loader/loader.go b/compose/loader/loader.go new file mode 100644 index 0000000000..9e46b97594 --- /dev/null +++ b/compose/loader/loader.go @@ -0,0 +1,611 @@ +package loader + +import ( + "fmt" + "os" + "path" + "reflect" + "regexp" + "sort" + "strings" + + "github.com/docker/docker/cli/compose/interpolation" + "github.com/docker/docker/cli/compose/schema" + "github.com/docker/docker/cli/compose/types" + "github.com/docker/docker/runconfig/opts" + units "github.com/docker/go-units" + shellwords "github.com/mattn/go-shellwords" + "github.com/mitchellh/mapstructure" + yaml "gopkg.in/yaml.v2" +) + +var ( + fieldNameRegexp = regexp.MustCompile("[A-Z][a-z0-9]+") +) + +// ParseYAML reads the bytes from a file, parses the bytes into a mapping +// structure, and returns it. +func ParseYAML(source []byte) (types.Dict, error) { + var cfg interface{} + if err := yaml.Unmarshal(source, &cfg); err != nil { + return nil, err + } + cfgMap, ok := cfg.(map[interface{}]interface{}) + if !ok { + return nil, fmt.Errorf("Top-level object must be a mapping") + } + converted, err := convertToStringKeysRecursive(cfgMap, "") + if err != nil { + return nil, err + } + return converted.(types.Dict), nil +} + +// Load reads a ConfigDetails and returns a fully loaded configuration +func Load(configDetails types.ConfigDetails) (*types.Config, error) { + if len(configDetails.ConfigFiles) < 1 { + return nil, fmt.Errorf("No files specified") + } + if len(configDetails.ConfigFiles) > 1 { + return nil, fmt.Errorf("Multiple files are not yet supported") + } + + configDict := getConfigDict(configDetails) + + if services, ok := configDict["services"]; ok { + if servicesDict, ok := services.(types.Dict); ok { + forbidden := getProperties(servicesDict, types.ForbiddenProperties) + + if len(forbidden) > 0 { + return nil, &ForbiddenPropertiesError{Properties: forbidden} + } + } + } + + if err := schema.Validate(configDict); err != nil { + return nil, err + } + + cfg := types.Config{} + version := configDict["version"].(string) + if version != "3" && version != "3.0" { + return nil, fmt.Errorf(`Unsupported Compose file version: %#v. The only version supported is "3" (or "3.0")`, version) + } + + if services, ok := configDict["services"]; ok { + servicesConfig, err := interpolation.Interpolate(services.(types.Dict), "service", os.LookupEnv) + if err != nil { + return nil, err + } + + servicesList, err := loadServices(servicesConfig, configDetails.WorkingDir) + if err != nil { + return nil, err + } + + cfg.Services = servicesList + } + + if networks, ok := configDict["networks"]; ok { + networksConfig, err := interpolation.Interpolate(networks.(types.Dict), "network", os.LookupEnv) + if err != nil { + return nil, err + } + + networksMapping, err := loadNetworks(networksConfig) + if err != nil { + return nil, err + } + + cfg.Networks = networksMapping + } + + if volumes, ok := configDict["volumes"]; ok { + volumesConfig, err := interpolation.Interpolate(volumes.(types.Dict), "volume", os.LookupEnv) + if err != nil { + return nil, err + } + + volumesMapping, err := loadVolumes(volumesConfig) + if err != nil { + return nil, err + } + + cfg.Volumes = volumesMapping + } + + return &cfg, nil +} + +// GetUnsupportedProperties returns the list of any unsupported properties that are +// used in the Compose files. +func GetUnsupportedProperties(configDetails types.ConfigDetails) []string { + unsupported := map[string]bool{} + + for _, service := range getServices(getConfigDict(configDetails)) { + serviceDict := service.(types.Dict) + for _, property := range types.UnsupportedProperties { + if _, isSet := serviceDict[property]; isSet { + unsupported[property] = true + } + } + } + + return sortedKeys(unsupported) +} + +func sortedKeys(set map[string]bool) []string { + var keys []string + for key := range set { + keys = append(keys, key) + } + sort.Strings(keys) + return keys +} + +// GetDeprecatedProperties returns the list of any deprecated properties that +// are used in the compose files. +func GetDeprecatedProperties(configDetails types.ConfigDetails) map[string]string { + return getProperties(getServices(getConfigDict(configDetails)), types.DeprecatedProperties) +} + +func getProperties(services types.Dict, propertyMap map[string]string) map[string]string { + output := map[string]string{} + + for _, service := range services { + if serviceDict, ok := service.(types.Dict); ok { + for property, description := range propertyMap { + if _, isSet := serviceDict[property]; isSet { + output[property] = description + } + } + } + } + + return output +} + +// ForbiddenPropertiesError is returned when there are properties in the Compose +// file that are forbidden. +type ForbiddenPropertiesError struct { + Properties map[string]string +} + +func (e *ForbiddenPropertiesError) Error() string { + return "Configuration contains forbidden properties" +} + +// TODO: resolve multiple files into a single config +func getConfigDict(configDetails types.ConfigDetails) types.Dict { + return configDetails.ConfigFiles[0].Config +} + +func getServices(configDict types.Dict) types.Dict { + if services, ok := configDict["services"]; ok { + if servicesDict, ok := services.(types.Dict); ok { + return servicesDict + } + } + + return types.Dict{} +} + +func transform(source map[string]interface{}, target interface{}) error { + data := mapstructure.Metadata{} + config := &mapstructure.DecoderConfig{ + DecodeHook: mapstructure.ComposeDecodeHookFunc( + transformHook, + mapstructure.StringToTimeDurationHookFunc()), + Result: target, + Metadata: &data, + } + decoder, err := mapstructure.NewDecoder(config) + if err != nil { + return err + } + err = decoder.Decode(source) + // TODO: log unused keys + return err +} + +func transformHook( + source reflect.Type, + target reflect.Type, + data interface{}, +) (interface{}, error) { + switch target { + case reflect.TypeOf(types.External{}): + return transformExternal(source, target, data) + case reflect.TypeOf(make(map[string]string, 0)): + return transformMapStringString(source, target, data) + case reflect.TypeOf(types.UlimitsConfig{}): + return transformUlimits(source, target, data) + case reflect.TypeOf(types.UnitBytes(0)): + return loadSize(data) + } + switch target.Kind() { + case reflect.Struct: + return transformStruct(source, target, data) + } + return data, nil +} + +// keys needs to be converted to strings for jsonschema +// TODO: don't use types.Dict +func convertToStringKeysRecursive(value interface{}, keyPrefix string) (interface{}, error) { + if mapping, ok := value.(map[interface{}]interface{}); ok { + dict := make(types.Dict) + for key, entry := range mapping { + str, ok := key.(string) + if !ok { + var location string + if keyPrefix == "" { + location = "at top level" + } else { + location = fmt.Sprintf("in %s", keyPrefix) + } + return nil, fmt.Errorf("Non-string key %s: %#v", location, key) + } + var newKeyPrefix string + if keyPrefix == "" { + newKeyPrefix = str + } else { + newKeyPrefix = fmt.Sprintf("%s.%s", keyPrefix, str) + } + convertedEntry, err := convertToStringKeysRecursive(entry, newKeyPrefix) + if err != nil { + return nil, err + } + dict[str] = convertedEntry + } + return dict, nil + } + if list, ok := value.([]interface{}); ok { + var convertedList []interface{} + for index, entry := range list { + newKeyPrefix := fmt.Sprintf("%s[%d]", keyPrefix, index) + convertedEntry, err := convertToStringKeysRecursive(entry, newKeyPrefix) + if err != nil { + return nil, err + } + convertedList = append(convertedList, convertedEntry) + } + return convertedList, nil + } + return value, nil +} + +func loadServices(servicesDict types.Dict, workingDir string) ([]types.ServiceConfig, error) { + var services []types.ServiceConfig + + for name, serviceDef := range servicesDict { + serviceConfig, err := loadService(name, serviceDef.(types.Dict), workingDir) + if err != nil { + return nil, err + } + services = append(services, *serviceConfig) + } + + return services, nil +} + +func loadService(name string, serviceDict types.Dict, workingDir string) (*types.ServiceConfig, error) { + serviceConfig := &types.ServiceConfig{} + if err := transform(serviceDict, serviceConfig); err != nil { + return nil, err + } + serviceConfig.Name = name + + if err := resolveEnvironment(serviceConfig, serviceDict, workingDir); err != nil { + return nil, err + } + + if err := resolveVolumePaths(serviceConfig.Volumes, workingDir); err != nil { + return nil, err + } + + return serviceConfig, nil +} + +func resolveEnvironment(serviceConfig *types.ServiceConfig, serviceDict types.Dict, workingDir string) error { + environment := make(map[string]string) + + if envFileVal, ok := serviceDict["env_file"]; ok { + envFiles := loadStringOrListOfStrings(envFileVal) + + var envVars []string + + for _, file := range envFiles { + filePath := path.Join(workingDir, file) + fileVars, err := opts.ParseEnvFile(filePath) + if err != nil { + return err + } + envVars = append(envVars, fileVars...) + } + + for k, v := range opts.ConvertKVStringsToMap(envVars) { + environment[k] = v + } + } + + for k, v := range serviceConfig.Environment { + environment[k] = v + } + + serviceConfig.Environment = environment + + return nil +} + +func resolveVolumePaths(volumes []string, workingDir string) error { + for i, mapping := range volumes { + parts := strings.SplitN(mapping, ":", 2) + if len(parts) == 1 { + continue + } + + if strings.HasPrefix(parts[0], ".") { + parts[0] = path.Join(workingDir, parts[0]) + } + parts[0] = expandUser(parts[0]) + + volumes[i] = strings.Join(parts, ":") + } + + return nil +} + +// TODO: make this more robust +func expandUser(path string) string { + if strings.HasPrefix(path, "~") { + return strings.Replace(path, "~", os.Getenv("HOME"), 1) + } + return path +} + +func transformUlimits( + source reflect.Type, + target reflect.Type, + data interface{}, +) (interface{}, error) { + switch value := data.(type) { + case int: + return types.UlimitsConfig{Single: value}, nil + case types.Dict: + ulimit := types.UlimitsConfig{} + ulimit.Soft = value["soft"].(int) + ulimit.Hard = value["hard"].(int) + return ulimit, nil + default: + return data, fmt.Errorf("invalid type %T for ulimits", value) + } +} + +func loadNetworks(source types.Dict) (map[string]types.NetworkConfig, error) { + networks := make(map[string]types.NetworkConfig) + err := transform(source, &networks) + if err != nil { + return networks, err + } + for name, network := range networks { + if network.External.External && network.External.Name == "" { + network.External.Name = name + networks[name] = network + } + } + return networks, nil +} + +func loadVolumes(source types.Dict) (map[string]types.VolumeConfig, error) { + volumes := make(map[string]types.VolumeConfig) + err := transform(source, &volumes) + if err != nil { + return volumes, err + } + for name, volume := range volumes { + if volume.External.External && volume.External.Name == "" { + volume.External.Name = name + volumes[name] = volume + } + } + return volumes, nil +} + +func transformStruct( + source reflect.Type, + target reflect.Type, + data interface{}, +) (interface{}, error) { + structValue, ok := data.(map[string]interface{}) + if !ok { + // FIXME: this is necessary because of convertToStringKeysRecursive + structValue, ok = data.(types.Dict) + if !ok { + panic(fmt.Sprintf( + "transformStruct called with non-map type: %T, %s", data, data)) + } + } + + var err error + for i := 0; i < target.NumField(); i++ { + field := target.Field(i) + fieldTag := field.Tag.Get("compose") + + yamlName := toYAMLName(field.Name) + value, ok := structValue[yamlName] + if !ok { + continue + } + + structValue[yamlName], err = convertField( + fieldTag, reflect.TypeOf(value), field.Type, value) + if err != nil { + return nil, fmt.Errorf("field %s: %s", yamlName, err.Error()) + } + } + return structValue, nil +} + +func transformMapStringString( + source reflect.Type, + target reflect.Type, + data interface{}, +) (interface{}, error) { + switch value := data.(type) { + case map[string]interface{}: + return toMapStringString(value), nil + case types.Dict: + return toMapStringString(value), nil + case map[string]string: + return value, nil + default: + return data, fmt.Errorf("invalid type %T for map[string]string", value) + } +} + +func convertField( + fieldTag string, + source reflect.Type, + target reflect.Type, + data interface{}, +) (interface{}, error) { + switch fieldTag { + case "": + return data, nil + case "healthcheck": + return loadHealthcheck(data) + case "list_or_dict_equals": + return loadMappingOrList(data, "="), nil + case "list_or_dict_colon": + return loadMappingOrList(data, ":"), nil + case "list_or_struct_map": + return loadListOrStructMap(data, target) + case "string_or_list": + return loadStringOrListOfStrings(data), nil + case "list_of_strings_or_numbers": + return loadListOfStringsOrNumbers(data), nil + case "shell_command": + return loadShellCommand(data) + case "size": + return loadSize(data) + case "-": + return nil, nil + } + return data, nil +} + +func transformExternal( + source reflect.Type, + target reflect.Type, + data interface{}, +) (interface{}, error) { + switch value := data.(type) { + case bool: + return map[string]interface{}{"external": value}, nil + case types.Dict: + return map[string]interface{}{"external": true, "name": value["name"]}, nil + case map[string]interface{}: + return map[string]interface{}{"external": true, "name": value["name"]}, nil + default: + return data, fmt.Errorf("invalid type %T for external", value) + } +} + +func toYAMLName(name string) string { + nameParts := fieldNameRegexp.FindAllString(name, -1) + for i, p := range nameParts { + nameParts[i] = strings.ToLower(p) + } + return strings.Join(nameParts, "_") +} + +func loadListOrStructMap(value interface{}, target reflect.Type) (interface{}, error) { + if list, ok := value.([]interface{}); ok { + mapValue := map[interface{}]interface{}{} + for _, name := range list { + mapValue[name] = nil + } + return mapValue, nil + } + + return value, nil +} + +func loadListOfStringsOrNumbers(value interface{}) []string { + list := value.([]interface{}) + result := make([]string, len(list)) + for i, item := range list { + result[i] = fmt.Sprint(item) + } + return result +} + +func loadStringOrListOfStrings(value interface{}) []string { + if list, ok := value.([]interface{}); ok { + result := make([]string, len(list)) + for i, item := range list { + result[i] = fmt.Sprint(item) + } + return result + } + return []string{value.(string)} +} + +func loadMappingOrList(mappingOrList interface{}, sep string) map[string]string { + if mapping, ok := mappingOrList.(types.Dict); ok { + return toMapStringString(mapping) + } + if list, ok := mappingOrList.([]interface{}); ok { + result := make(map[string]string) + for _, value := range list { + parts := strings.SplitN(value.(string), sep, 2) + if len(parts) == 1 { + result[parts[0]] = "" + } else { + result[parts[0]] = parts[1] + } + } + return result + } + panic(fmt.Errorf("expected a map or a slice, got: %#v", mappingOrList)) +} + +func loadShellCommand(value interface{}) (interface{}, error) { + if str, ok := value.(string); ok { + return shellwords.Parse(str) + } + return value, nil +} + +func loadHealthcheck(value interface{}) (interface{}, error) { + if str, ok := value.(string); ok { + return append([]string{"CMD-SHELL"}, str), nil + } + return value, nil +} + +func loadSize(value interface{}) (int64, error) { + switch value := value.(type) { + case int: + return int64(value), nil + case string: + return units.RAMInBytes(value) + } + panic(fmt.Errorf("invalid type for size %T", value)) +} + +func toMapStringString(value map[string]interface{}) map[string]string { + output := make(map[string]string) + for key, value := range value { + output[key] = toString(value) + } + return output +} + +func toString(value interface{}) string { + if value == nil { + return "" + } + return fmt.Sprint(value) +} diff --git a/compose/loader/loader_test.go b/compose/loader/loader_test.go new file mode 100644 index 0000000000..e15be7c549 --- /dev/null +++ b/compose/loader/loader_test.go @@ -0,0 +1,782 @@ +package loader + +import ( + "fmt" + "io/ioutil" + "os" + "sort" + "testing" + "time" + + "github.com/docker/docker/cli/compose/types" + "github.com/stretchr/testify/assert" +) + +func buildConfigDetails(source types.Dict) types.ConfigDetails { + workingDir, err := os.Getwd() + if err != nil { + panic(err) + } + + return types.ConfigDetails{ + WorkingDir: workingDir, + ConfigFiles: []types.ConfigFile{ + {Filename: "filename.yml", Config: source}, + }, + Environment: nil, + } +} + +var sampleYAML = ` +version: "3" +services: + foo: + image: busybox + networks: + with_me: + bar: + image: busybox + environment: + - FOO=1 + networks: + - with_ipam +volumes: + hello: + driver: default + driver_opts: + beep: boop +networks: + default: + driver: bridge + driver_opts: + beep: boop + with_ipam: + ipam: + driver: default + config: + - subnet: 172.28.0.0/16 +` + +var sampleDict = types.Dict{ + "version": "3", + "services": types.Dict{ + "foo": types.Dict{ + "image": "busybox", + "networks": types.Dict{"with_me": nil}, + }, + "bar": types.Dict{ + "image": "busybox", + "environment": []interface{}{"FOO=1"}, + "networks": []interface{}{"with_ipam"}, + }, + }, + "volumes": types.Dict{ + "hello": types.Dict{ + "driver": "default", + "driver_opts": types.Dict{ + "beep": "boop", + }, + }, + }, + "networks": types.Dict{ + "default": types.Dict{ + "driver": "bridge", + "driver_opts": types.Dict{ + "beep": "boop", + }, + }, + "with_ipam": types.Dict{ + "ipam": types.Dict{ + "driver": "default", + "config": []interface{}{ + types.Dict{ + "subnet": "172.28.0.0/16", + }, + }, + }, + }, + }, +} + +var sampleConfig = types.Config{ + Services: []types.ServiceConfig{ + { + Name: "foo", + Image: "busybox", + Environment: map[string]string{}, + Networks: map[string]*types.ServiceNetworkConfig{ + "with_me": nil, + }, + }, + { + Name: "bar", + Image: "busybox", + Environment: map[string]string{"FOO": "1"}, + Networks: map[string]*types.ServiceNetworkConfig{ + "with_ipam": nil, + }, + }, + }, + Networks: map[string]types.NetworkConfig{ + "default": { + Driver: "bridge", + DriverOpts: map[string]string{ + "beep": "boop", + }, + }, + "with_ipam": { + Ipam: types.IPAMConfig{ + Driver: "default", + Config: []*types.IPAMPool{ + { + Subnet: "172.28.0.0/16", + }, + }, + }, + }, + }, + Volumes: map[string]types.VolumeConfig{ + "hello": { + Driver: "default", + DriverOpts: map[string]string{ + "beep": "boop", + }, + }, + }, +} + +func TestParseYAML(t *testing.T) { + dict, err := ParseYAML([]byte(sampleYAML)) + if !assert.NoError(t, err) { + return + } + assert.Equal(t, sampleDict, dict) +} + +func TestLoad(t *testing.T) { + actual, err := Load(buildConfigDetails(sampleDict)) + if !assert.NoError(t, err) { + return + } + assert.Equal(t, serviceSort(sampleConfig.Services), serviceSort(actual.Services)) + assert.Equal(t, sampleConfig.Networks, actual.Networks) + assert.Equal(t, sampleConfig.Volumes, actual.Volumes) +} + +func TestParseAndLoad(t *testing.T) { + actual, err := loadYAML(sampleYAML) + if !assert.NoError(t, err) { + return + } + assert.Equal(t, serviceSort(sampleConfig.Services), serviceSort(actual.Services)) + assert.Equal(t, sampleConfig.Networks, actual.Networks) + assert.Equal(t, sampleConfig.Volumes, actual.Volumes) +} + +func TestInvalidTopLevelObjectType(t *testing.T) { + _, err := loadYAML("1") + assert.Error(t, err) + assert.Contains(t, err.Error(), "Top-level object must be a mapping") + + _, err = loadYAML("\"hello\"") + assert.Error(t, err) + assert.Contains(t, err.Error(), "Top-level object must be a mapping") + + _, err = loadYAML("[\"hello\"]") + assert.Error(t, err) + assert.Contains(t, err.Error(), "Top-level object must be a mapping") +} + +func TestNonStringKeys(t *testing.T) { + _, err := loadYAML(` +version: "3" +123: + foo: + image: busybox +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "Non-string key at top level: 123") + + _, err = loadYAML(` +version: "3" +services: + foo: + image: busybox + 123: + image: busybox +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "Non-string key in services: 123") + + _, err = loadYAML(` +version: "3" +services: + foo: + image: busybox +networks: + default: + ipam: + config: + - 123: oh dear +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "Non-string key in networks.default.ipam.config[0]: 123") + + _, err = loadYAML(` +version: "3" +services: + dict-env: + image: busybox + environment: + 1: FOO +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "Non-string key in services.dict-env.environment: 1") +} + +func TestSupportedVersion(t *testing.T) { + _, err := loadYAML(` +version: "3" +services: + foo: + image: busybox +`) + assert.NoError(t, err) + + _, err = loadYAML(` +version: "3.0" +services: + foo: + image: busybox +`) + assert.NoError(t, err) +} + +func TestUnsupportedVersion(t *testing.T) { + _, err := loadYAML(` +version: "2" +services: + foo: + image: busybox +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "version") + + _, err = loadYAML(` +version: "2.0" +services: + foo: + image: busybox +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "version") +} + +func TestInvalidVersion(t *testing.T) { + _, err := loadYAML(` +version: 3 +services: + foo: + image: busybox +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "version must be a string") +} + +func TestV1Unsupported(t *testing.T) { + _, err := loadYAML(` +foo: + image: busybox +`) + assert.Error(t, err) +} + +func TestNonMappingObject(t *testing.T) { + _, err := loadYAML(` +version: "3" +services: + - foo: + image: busybox +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "services must be a mapping") + + _, err = loadYAML(` +version: "3" +services: + foo: busybox +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "services.foo must be a mapping") + + _, err = loadYAML(` +version: "3" +networks: + - default: + driver: bridge +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "networks must be a mapping") + + _, err = loadYAML(` +version: "3" +networks: + default: bridge +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "networks.default must be a mapping") + + _, err = loadYAML(` +version: "3" +volumes: + - data: + driver: local +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "volumes must be a mapping") + + _, err = loadYAML(` +version: "3" +volumes: + data: local +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "volumes.data must be a mapping") +} + +func TestNonStringImage(t *testing.T) { + _, err := loadYAML(` +version: "3" +services: + foo: + image: ["busybox", "latest"] +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "services.foo.image must be a string") +} + +func TestValidEnvironment(t *testing.T) { + config, err := loadYAML(` +version: "3" +services: + dict-env: + image: busybox + environment: + FOO: "1" + BAR: 2 + BAZ: 2.5 + QUUX: + list-env: + image: busybox + environment: + - FOO=1 + - BAR=2 + - BAZ=2.5 + - QUUX= +`) + assert.NoError(t, err) + + expected := map[string]string{ + "FOO": "1", + "BAR": "2", + "BAZ": "2.5", + "QUUX": "", + } + + assert.Equal(t, 2, len(config.Services)) + + for _, service := range config.Services { + assert.Equal(t, expected, service.Environment) + } +} + +func TestInvalidEnvironmentValue(t *testing.T) { + _, err := loadYAML(` +version: "3" +services: + dict-env: + image: busybox + environment: + FOO: ["1"] +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "services.dict-env.environment.FOO must be a string, number or null") +} + +func TestInvalidEnvironmentObject(t *testing.T) { + _, err := loadYAML(` +version: "3" +services: + dict-env: + image: busybox + environment: "FOO=1" +`) + assert.Error(t, err) + assert.Contains(t, err.Error(), "services.dict-env.environment must be a mapping") +} + +func TestEnvironmentInterpolation(t *testing.T) { + config, err := loadYAML(` +version: "3" +services: + test: + image: busybox + labels: + - home1=$HOME + - home2=${HOME} + - nonexistent=$NONEXISTENT + - default=${NONEXISTENT-default} +networks: + test: + driver: $HOME +volumes: + test: + driver: $HOME +`) + + assert.NoError(t, err) + + home := os.Getenv("HOME") + + expectedLabels := map[string]string{ + "home1": home, + "home2": home, + "nonexistent": "", + "default": "default", + } + + assert.Equal(t, expectedLabels, config.Services[0].Labels) + assert.Equal(t, home, config.Networks["test"].Driver) + assert.Equal(t, home, config.Volumes["test"].Driver) +} + +func TestUnsupportedProperties(t *testing.T) { + dict, err := ParseYAML([]byte(` +version: "3" +services: + web: + image: web + build: ./web + links: + - bar + db: + image: db + build: ./db +`)) + assert.NoError(t, err) + + configDetails := buildConfigDetails(dict) + + _, err = Load(configDetails) + assert.NoError(t, err) + + unsupported := GetUnsupportedProperties(configDetails) + assert.Equal(t, []string{"build", "links"}, unsupported) +} + +func TestDeprecatedProperties(t *testing.T) { + dict, err := ParseYAML([]byte(` +version: "3" +services: + web: + image: web + container_name: web + db: + image: db + container_name: db + expose: ["5434"] +`)) + assert.NoError(t, err) + + configDetails := buildConfigDetails(dict) + + _, err = Load(configDetails) + assert.NoError(t, err) + + deprecated := GetDeprecatedProperties(configDetails) + assert.Equal(t, 2, len(deprecated)) + assert.Contains(t, deprecated, "container_name") + assert.Contains(t, deprecated, "expose") +} + +func TestForbiddenProperties(t *testing.T) { + _, err := loadYAML(` +version: "3" +services: + foo: + image: busybox + volumes: + - /data + volume_driver: some-driver + bar: + extends: + service: foo +`) + + assert.Error(t, err) + assert.IsType(t, &ForbiddenPropertiesError{}, err) + fmt.Println(err) + forbidden := err.(*ForbiddenPropertiesError).Properties + + assert.Equal(t, 2, len(forbidden)) + assert.Contains(t, forbidden, "volume_driver") + assert.Contains(t, forbidden, "extends") +} + +func durationPtr(value time.Duration) *time.Duration { + return &value +} + +func int64Ptr(value int64) *int64 { + return &value +} + +func uint64Ptr(value uint64) *uint64 { + return &value +} + +func TestFullExample(t *testing.T) { + bytes, err := ioutil.ReadFile("full-example.yml") + assert.NoError(t, err) + + config, err := loadYAML(string(bytes)) + if !assert.NoError(t, err) { + return + } + + workingDir, err := os.Getwd() + assert.NoError(t, err) + + homeDir := os.Getenv("HOME") + stopGracePeriod := time.Duration(20 * time.Second) + + expectedServiceConfig := types.ServiceConfig{ + Name: "foo", + + CapAdd: []string{"ALL"}, + CapDrop: []string{"NET_ADMIN", "SYS_ADMIN"}, + CgroupParent: "m-executor-abcd", + Command: []string{"bundle", "exec", "thin", "-p", "3000"}, + ContainerName: "my-web-container", + DependsOn: []string{"db", "redis"}, + Deploy: types.DeployConfig{ + Mode: "replicated", + Replicas: uint64Ptr(6), + Labels: map[string]string{"FOO": "BAR"}, + UpdateConfig: &types.UpdateConfig{ + Parallelism: uint64Ptr(3), + Delay: time.Duration(10 * time.Second), + FailureAction: "continue", + Monitor: time.Duration(60 * time.Second), + MaxFailureRatio: 0.3, + }, + Resources: types.Resources{ + Limits: &types.Resource{ + NanoCPUs: "0.001", + MemoryBytes: 50 * 1024 * 1024, + }, + Reservations: &types.Resource{ + NanoCPUs: "0.0001", + MemoryBytes: 20 * 1024 * 1024, + }, + }, + RestartPolicy: &types.RestartPolicy{ + Condition: "on_failure", + Delay: durationPtr(5 * time.Second), + MaxAttempts: uint64Ptr(3), + Window: durationPtr(2 * time.Minute), + }, + Placement: types.Placement{ + Constraints: []string{"node=foo"}, + }, + }, + Devices: []string{"/dev/ttyUSB0:/dev/ttyUSB0"}, + DNS: []string{"8.8.8.8", "9.9.9.9"}, + DNSSearch: []string{"dc1.example.com", "dc2.example.com"}, + DomainName: "foo.com", + Entrypoint: []string{"/code/entrypoint.sh", "-p", "3000"}, + Environment: map[string]string{ + "RACK_ENV": "development", + "SHOW": "true", + "SESSION_SECRET": "", + "FOO": "1", + "BAR": "2", + "BAZ": "3", + }, + Expose: []string{"3000", "8000"}, + ExternalLinks: []string{ + "redis_1", + "project_db_1:mysql", + "project_db_1:postgresql", + }, + ExtraHosts: map[string]string{ + "otherhost": "50.31.209.229", + "somehost": "162.242.195.82", + }, + HealthCheck: &types.HealthCheckConfig{ + Test: []string{ + "CMD-SHELL", + "echo \"hello world\"", + }, + Interval: "10s", + Timeout: "1s", + Retries: uint64Ptr(5), + }, + Hostname: "foo", + Image: "redis", + Ipc: "host", + Labels: map[string]string{ + "com.example.description": "Accounting webapp", + "com.example.number": "42", + "com.example.empty-label": "", + }, + Links: []string{ + "db", + "db:database", + "redis", + }, + Logging: &types.LoggingConfig{ + Driver: "syslog", + Options: map[string]string{ + "syslog-address": "tcp://192.168.0.42:123", + }, + }, + MacAddress: "02:42:ac:11:65:43", + NetworkMode: "container:0cfeab0f748b9a743dc3da582046357c6ef497631c1a016d28d2bf9b4f899f7b", + Networks: map[string]*types.ServiceNetworkConfig{ + "some-network": { + Aliases: []string{"alias1", "alias3"}, + Ipv4Address: "", + Ipv6Address: "", + }, + "other-network": { + Ipv4Address: "172.16.238.10", + Ipv6Address: "2001:3984:3989::10", + }, + "other-other-network": nil, + }, + Pid: "host", + Ports: []string{ + "3000", + "3000-3005", + "8000:8000", + "9090-9091:8080-8081", + "49100:22", + "127.0.0.1:8001:8001", + "127.0.0.1:5000-5010:5000-5010", + }, + Privileged: true, + ReadOnly: true, + Restart: "always", + SecurityOpt: []string{ + "label=level:s0:c100,c200", + "label=type:svirt_apache_t", + }, + StdinOpen: true, + StopSignal: "SIGUSR1", + StopGracePeriod: &stopGracePeriod, + Tmpfs: []string{"/run", "/tmp"}, + Tty: true, + Ulimits: map[string]*types.UlimitsConfig{ + "nproc": { + Single: 65535, + }, + "nofile": { + Soft: 20000, + Hard: 40000, + }, + }, + User: "someone", + Volumes: []string{ + "/var/lib/mysql", + "/opt/data:/var/lib/mysql", + fmt.Sprintf("%s:/code", workingDir), + fmt.Sprintf("%s/static:/var/www/html", workingDir), + fmt.Sprintf("%s/configs:/etc/configs/:ro", homeDir), + "datavolume:/var/lib/mysql", + }, + WorkingDir: "/code", + } + + assert.Equal(t, []types.ServiceConfig{expectedServiceConfig}, config.Services) + + expectedNetworkConfig := map[string]types.NetworkConfig{ + "some-network": {}, + + "other-network": { + Driver: "overlay", + DriverOpts: map[string]string{ + "foo": "bar", + "baz": "1", + }, + Ipam: types.IPAMConfig{ + Driver: "overlay", + Config: []*types.IPAMPool{ + {Subnet: "172.16.238.0/24"}, + {Subnet: "2001:3984:3989::/64"}, + }, + }, + }, + + "external-network": { + External: types.External{ + Name: "external-network", + External: true, + }, + }, + + "other-external-network": { + External: types.External{ + Name: "my-cool-network", + External: true, + }, + }, + } + + assert.Equal(t, expectedNetworkConfig, config.Networks) + + expectedVolumeConfig := map[string]types.VolumeConfig{ + "some-volume": {}, + "other-volume": { + Driver: "flocker", + DriverOpts: map[string]string{ + "foo": "bar", + "baz": "1", + }, + }, + "external-volume": { + External: types.External{ + Name: "external-volume", + External: true, + }, + }, + "other-external-volume": { + External: types.External{ + Name: "my-cool-volume", + External: true, + }, + }, + } + + assert.Equal(t, expectedVolumeConfig, config.Volumes) +} + +func loadYAML(yaml string) (*types.Config, error) { + dict, err := ParseYAML([]byte(yaml)) + if err != nil { + return nil, err + } + + return Load(buildConfigDetails(dict)) +} + +func serviceSort(services []types.ServiceConfig) []types.ServiceConfig { + sort.Sort(servicesByName(services)) + return services +} + +type servicesByName []types.ServiceConfig + +func (sbn servicesByName) Len() int { return len(sbn) } +func (sbn servicesByName) Swap(i, j int) { sbn[i], sbn[j] = sbn[j], sbn[i] } +func (sbn servicesByName) Less(i, j int) bool { return sbn[i].Name < sbn[j].Name } diff --git a/compose/schema/bindata.go b/compose/schema/bindata.go new file mode 100644 index 0000000000..2acc7d29f1 --- /dev/null +++ b/compose/schema/bindata.go @@ -0,0 +1,237 @@ +// Code generated by go-bindata. +// sources: +// data/config_schema_v3.0.json +// DO NOT EDIT! + +package schema + +import ( + "bytes" + "compress/gzip" + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + "strings" + "time" +) + +func bindataRead(data []byte, name string) ([]byte, error) { + gz, err := gzip.NewReader(bytes.NewBuffer(data)) + if err != nil { + return nil, fmt.Errorf("Read %q: %v", name, err) + } + + var buf bytes.Buffer + _, err = io.Copy(&buf, gz) + clErr := gz.Close() + + if err != nil { + return nil, fmt.Errorf("Read %q: %v", name, err) + } + if clErr != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +type asset struct { + bytes []byte + info os.FileInfo +} + +type bindataFileInfo struct { + name string + size int64 + mode os.FileMode + modTime time.Time +} + +func (fi bindataFileInfo) Name() string { + return fi.name +} +func (fi bindataFileInfo) Size() int64 { + return fi.size +} +func (fi bindataFileInfo) Mode() os.FileMode { + return fi.mode +} +func (fi bindataFileInfo) ModTime() time.Time { + return fi.modTime +} +func (fi bindataFileInfo) IsDir() bool { + return false +} +func (fi bindataFileInfo) Sys() interface{} { + return nil +} + +var _dataConfig_schema_v30Json = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\xec\x5a\x4d\x8f\xdb\x36\x13\xbe\xfb\x57\x08\x4a\x6e\xf1\xee\x06\x78\x83\x17\x68\x6e\x3d\xf6\xd4\x9e\xbb\x50\x04\x5a\x1a\xdb\xcc\x52\x24\x33\xa4\x9c\x75\x02\xff\xf7\x82\xfa\xb2\x48\x93\xa2\x6c\x2b\x4d\x0e\xbd\x2c\xd6\xe2\xcc\x70\xbe\xf8\xcc\x70\xa4\xef\xab\x24\x49\xdf\xaa\x62\x0f\x15\x49\x3f\x26\xe9\x5e\x6b\xf9\xf1\xe9\xe9\xb3\x12\xfc\xa1\x7d\xfa\x28\x70\xf7\x54\x22\xd9\xea\x87\xf7\x1f\x9e\xda\x67\x6f\xd2\xb5\xe1\xa3\xa5\x61\x29\x04\xdf\xd2\x5d\xde\xae\xe4\x87\xff\x3d\xbe\x7f\x34\xec\x2d\x89\x3e\x4a\x30\x44\x62\xf3\x19\x0a\xdd\x3e\x43\xf8\x52\x53\x04\xc3\xfc\x9c\x1e\x00\x15\x15\x3c\xcd\xd6\x2b\xb3\x26\x51\x48\x40\x4d\x41\xa5\x1f\x13\xa3\x5c\x92\x0c\x24\xfd\x83\x91\x58\xa5\x91\xf2\x5d\xda\x3c\x3e\x35\x12\x92\x24\x55\x80\x07\x5a\x8c\x24\x0c\xaa\xbe\x79\x3a\xcb\x7f\x1a\xc8\xd6\xae\xd4\x91\xb2\xcd\x73\x49\xb4\x06\xe4\x7f\x5d\xea\xd6\x2c\x7f\x7a\x26\x0f\xdf\x7e\x7f\xf8\xfb\xfd\xc3\x6f\x8f\xf9\x43\xf6\xee\xad\xb5\x6c\xfc\x8b\xb0\x6d\xb7\x2f\x61\x4b\x39\xd5\x54\xf0\x61\xff\x74\xa0\x3c\x75\xff\x9d\x86\x8d\x49\x59\x36\xc4\x84\x59\x7b\x6f\x09\x53\x60\xdb\xcc\x41\x7f\x15\xf8\x12\xb3\x79\x20\xfb\x49\x36\x77\xfb\x7b\x6c\xb6\xcd\x39\x08\x56\x57\xd1\x08\xf6\x54\x3f\xc9\x98\x76\xfb\xfb\xe2\xb7\xea\x8d\x9e\xa4\x6d\x29\x46\x7b\x37\x0a\x5a\xd9\xee\x73\x95\x2f\xdb\xc2\xbe\x1a\x9c\x15\xf0\x52\x09\x92\x89\xa3\x79\x16\xf0\x47\x4b\x50\x01\xd7\xe9\xe0\x82\x24\x49\x37\x35\x65\xa5\xeb\x51\xc1\xe1\x4f\x23\xe2\x79\xf4\x30\x49\xbe\xbb\x07\x7b\x24\xa7\x59\xb7\x7e\x85\x03\x3e\xac\x07\x6c\x19\xd6\x0b\xc1\x35\xbc\xea\xc6\xa8\xe9\xad\x5b\x17\x88\xe2\x05\x70\x4b\x19\xcc\xe5\x20\xb8\x53\x13\x2e\x63\x54\xe9\x5c\x60\x5e\xd2\x42\xa7\x27\x87\xfd\x42\x5e\x3c\x9f\x06\xd6\xd1\xaf\x6c\xe5\x11\x98\x16\x44\xe6\xa4\x2c\x2d\x3b\x08\x22\x39\xa6\xeb\x24\xa5\x1a\x2a\xe5\x37\x31\x49\x6b\x4e\xbf\xd4\xf0\x47\x47\xa2\xb1\x06\x57\x6e\x89\x42\x2e\x2f\x78\x87\xa2\x96\xb9\x24\x68\x12\x6c\xda\xfd\x69\x21\xaa\x8a\xf0\xa5\xb2\xee\x1a\x3b\x66\x78\x5e\x70\x4d\x28\x07\xcc\x39\xa9\x62\x89\x64\x4e\x1d\xf0\x52\xe5\x6d\xfd\x9b\x4c\xa3\x6d\xde\xf2\x2b\x47\xc0\x50\x0c\x17\x8d\x47\xc9\xa7\x12\xbb\x15\x63\x52\xdb\xe8\x96\x3a\x8c\xb9\x02\x82\xc5\xfe\x46\x7e\x51\x11\xca\xe7\xf8\x0e\xb8\xc6\xa3\x14\xb4\xcd\x97\x5f\x2e\x11\x80\x1f\xf2\x01\x4b\xae\x76\x03\xf0\x03\x45\xc1\xab\xfe\x34\xcc\x01\x98\x01\xe4\x0d\xff\xab\x14\x0a\x5c\xc7\x38\x06\x8e\x97\x06\x53\x2d\x9f\xf4\x1c\xcf\xbd\xe1\xeb\x24\xe5\x75\xb5\x01\x34\x2d\x9d\x45\xb9\x15\x58\x11\xa3\x6c\xbf\xf7\x68\xd9\xf2\xb4\x27\xf3\xc6\x0e\x1c\xdb\x60\xca\x3a\x61\x39\xa3\xfc\x65\xf9\x14\x87\x57\x8d\x24\xdf\x0b\xa5\xe7\x63\xf8\x88\x7d\x0f\x84\xe9\x7d\xb1\x87\xe2\x65\x82\x7d\x4c\x65\x71\x0b\xa5\xe7\x24\x39\xad\xc8\x2e\x4e\x24\x8b\x18\x09\x23\x1b\x60\x37\xd9\xb9\xa8\xf3\x47\x62\xc5\x6e\x67\x48\x43\x19\x77\xd1\xb9\x74\xcb\xb1\x9a\x5f\x22\x3d\x00\xce\x2d\xe0\x42\x9e\x1b\x2e\x77\x31\xde\x80\x24\xf1\xee\xd3\x22\xfd\xf4\xd8\x36\x9f\x13\xa7\xaa\xf9\x8f\xb1\x34\x73\xdb\x85\xc4\xa9\xfb\xbe\x27\x8e\x85\xf3\x1a\x0a\x2b\x2a\x15\x29\x4c\xdf\x80\xa0\x02\x71\x3d\x93\x76\xcd\x7e\x5e\x89\x32\x94\xa0\x17\xc4\xae\x6f\x82\x48\x7d\x75\x21\x4c\x6e\xea\x1f\x67\x85\x2e\x7a\x81\x88\x58\x13\x52\x6f\xae\x9a\x67\x75\xe3\x29\xd6\xd0\x11\x46\x89\x82\xf8\x61\x0f\x3a\xd2\x92\x46\xe5\xe1\xc3\xcc\x9c\xf0\xf1\xfe\x7f\x92\x37\xc0\x1a\x94\x39\xbf\x47\x8e\x88\x3a\xab\xd2\x1c\x37\x9f\x22\x59\xe4\xb4\xfd\xe0\x16\x5e\xd2\x32\x8c\x15\x0d\x42\x8c\x0f\x98\x14\xa8\x2f\x4e\xd7\xbf\x53\xee\xdb\xad\xef\xae\xf6\x12\xe9\x81\x32\xd8\x81\x7d\x6b\xd9\x08\xc1\x80\x70\x0b\x7a\x10\x48\x99\x0b\xce\x8e\x33\x28\x95\x26\x18\xbd\x50\x28\x28\x6a\xa4\xfa\x98\x0b\xa9\x17\xef\x33\xd4\xbe\xca\x15\xfd\x06\x76\x34\xcf\x78\xdf\x09\xca\x2c\x1e\x5d\x52\x9e\x0b\x09\x3c\x6a\xa2\xd2\x42\xe6\x8a\xee\x38\x61\x51\x33\x0d\xe9\x0e\x49\x01\xb9\x04\xa4\xa2\xf4\x31\xac\xc7\xb1\x2d\x6b\x24\x26\x9f\x2d\x31\xba\x92\xdb\x1b\x6f\x07\x5a\xc7\x63\x56\x33\x5a\xd1\x70\x32\x7b\x50\x72\x06\x90\xb7\x20\xee\xc7\xee\x09\xdc\x3e\x6b\x4a\xb9\x86\x1d\xa0\x0f\xee\x26\x5a\x87\xe9\xce\x61\x46\xcb\xb0\x27\x68\x47\x69\x42\x8f\x86\x41\x89\xad\xf6\x33\xf8\x1a\x0a\xaf\x5e\xd6\x04\xb7\x91\xb7\xee\x14\xc9\xbc\xf4\x57\x61\xb2\xab\x46\x16\x84\xc5\x93\x17\x16\x6b\x15\xed\xee\xc6\xf3\xc5\x45\x4f\xb2\x69\x61\x4c\x66\x97\xd4\xaf\xc2\xca\x51\xf7\x8a\x09\xaf\x73\x9b\xe8\x05\xf8\x66\x7d\x63\x52\x77\xde\xf7\x3c\x24\x5c\x5f\x25\xce\x53\xd2\xc0\xe0\xcf\xe4\x07\x1e\x2c\xf0\xf0\xf9\x54\xd3\x0a\x44\xad\x23\x54\x08\x1a\xa9\xe3\xf9\x0e\xe9\x2c\x61\xa0\x7e\xcd\x4b\x7b\x49\x15\xd9\x38\xf3\xbf\x01\xa3\x6e\x0a\x6f\x72\x1e\xae\xf6\x97\xf9\xa9\xe0\x8e\x28\x17\x88\xed\x44\x6f\x3e\x0a\x99\x64\xb4\x20\x2a\x86\x32\x77\x5c\x21\x6b\x59\x12\x0d\x79\xfb\x2a\xe9\x2a\x5c\x9f\x00\x74\x49\x90\x30\x06\x8c\xaa\x6a\x0e\x40\xa6\x25\x30\x72\xbc\xa9\xe0\x35\xec\x5b\x42\x59\x8d\x90\x93\x42\x77\x6f\xab\x22\x99\x99\x56\x82\x53\x2d\xbc\x48\x31\x6f\xcb\x8a\xbc\xe6\xfd\xb6\x0d\x89\xf7\x58\x05\x1b\xaf\xb9\xb7\xbf\x51\x26\x28\x51\x63\x71\xe1\xec\x9b\x43\x74\x2e\xe4\x81\x8c\xe9\x77\xbc\x30\x1d\x41\x19\x50\x1a\x2e\xe7\x51\xfe\x68\xdd\xe8\x3a\xc1\x5c\x0a\x46\x8b\xe3\x52\x16\x16\x82\xb7\x4e\x9e\x93\x10\x77\x66\xa0\x49\x07\xd3\xe7\x54\x52\x47\x0f\x6b\xc3\xf0\x95\xf2\x52\x7c\xbd\x62\xc3\xe5\x52\x49\x32\x52\x80\x83\x77\xf7\x3a\x5a\x69\x24\x94\xeb\xab\xcb\xfa\xbd\x66\xdd\x51\xd5\x87\xfc\x8c\xa0\xfe\x40\x17\x7f\xd7\x19\x40\xfa\x42\xd6\xd1\x89\x4d\x05\x95\x40\x6f\x02\x2e\xf0\x6e\x3a\x66\x62\x4f\xb6\x40\x55\x9b\x35\xe2\xeb\xa8\xcc\x8d\x6e\xf1\xab\x44\x7c\x8c\x97\xc5\x01\x89\x4a\x52\x2d\x75\x3a\x66\x0f\x3d\x53\x6f\x0d\x4e\xa6\x87\x05\x49\x78\x60\x10\xd3\x3a\xae\x7b\x47\xa1\xea\x0d\x07\xff\x3d\xfd\xf2\x0a\xe1\x7b\x13\x3b\xff\x0e\x72\x0a\xdf\x38\xee\x03\xbd\xfe\x7d\x45\x20\xaa\xcf\x43\x27\xb9\x1e\x7c\x95\xcd\x0e\x71\xf0\x65\xc1\x72\xfa\x5f\xd9\xe0\xdd\x81\x19\xdd\xb7\x15\x11\xc8\xe8\xa8\xfe\x43\x8c\x5f\x26\xbf\x26\x8a\xe2\x8d\xb7\x83\x2b\x92\xc6\x19\x2b\x8d\x92\xe7\xf2\xea\x38\x15\xe7\xd9\x43\xf1\x8e\x23\xb3\xd5\x70\xc9\x3c\xdf\xad\xd9\x10\x3a\x35\x71\xe8\x49\x02\x43\x52\x67\xd3\xce\x79\xd3\x96\x2f\x98\xb6\x8f\xef\x26\x0a\xc5\xd4\xcb\xab\x1f\x84\xb0\x0b\x4c\x73\xfc\x31\x75\xba\xcb\xde\xbb\x97\x1f\x5f\x05\x90\x6a\xc4\x7f\xf1\x29\x96\xb1\x93\x1f\x2f\x46\x1b\xdf\xed\x31\x5b\xfb\x19\x55\x66\xf9\xc7\x21\x69\x5f\x05\x8f\x70\x22\x1b\x37\xdc\xa1\x30\x7a\x3f\xd0\x72\x87\x7c\xfd\x87\x52\xd9\xf4\x61\x5f\xf5\x7f\x4f\xab\xd3\xea\x9f\x00\x00\x00\xff\xff\xd1\xeb\xc9\xb9\x5c\x2a\x00\x00") + +func dataConfig_schema_v30JsonBytes() ([]byte, error) { + return bindataRead( + _dataConfig_schema_v30Json, + "data/config_schema_v3.0.json", + ) +} + +func dataConfig_schema_v30Json() (*asset, error) { + bytes, err := dataConfig_schema_v30JsonBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "data/config_schema_v3.0.json", size: 0, mode: os.FileMode(0), modTime: time.Unix(0, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + +// Asset loads and returns the asset for the given name. +// It returns an error if the asset could not be found or +// could not be loaded. +func Asset(name string) ([]byte, error) { + cannonicalName := strings.Replace(name, "\\", "/", -1) + if f, ok := _bindata[cannonicalName]; ok { + a, err := f() + if err != nil { + return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err) + } + return a.bytes, nil + } + return nil, fmt.Errorf("Asset %s not found", name) +} + +// MustAsset is like Asset but panics when Asset would return an error. +// It simplifies safe initialization of global variables. +func MustAsset(name string) []byte { + a, err := Asset(name) + if err != nil { + panic("asset: Asset(" + name + "): " + err.Error()) + } + + return a +} + +// AssetInfo loads and returns the asset info for the given name. +// It returns an error if the asset could not be found or +// could not be loaded. +func AssetInfo(name string) (os.FileInfo, error) { + cannonicalName := strings.Replace(name, "\\", "/", -1) + if f, ok := _bindata[cannonicalName]; ok { + a, err := f() + if err != nil { + return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err) + } + return a.info, nil + } + return nil, fmt.Errorf("AssetInfo %s not found", name) +} + +// AssetNames returns the names of the assets. +func AssetNames() []string { + names := make([]string, 0, len(_bindata)) + for name := range _bindata { + names = append(names, name) + } + return names +} + +// _bindata is a table, holding each asset generator, mapped to its name. +var _bindata = map[string]func() (*asset, error){ + "data/config_schema_v3.0.json": dataConfig_schema_v30Json, +} + +// AssetDir returns the file names below a certain +// directory embedded in the file by go-bindata. +// For example if you run go-bindata on data/... and data contains the +// following hierarchy: +// data/ +// foo.txt +// img/ +// a.png +// b.png +// then AssetDir("data") would return []string{"foo.txt", "img"} +// AssetDir("data/img") would return []string{"a.png", "b.png"} +// AssetDir("foo.txt") and AssetDir("notexist") would return an error +// AssetDir("") will return []string{"data"}. +func AssetDir(name string) ([]string, error) { + node := _bintree + if len(name) != 0 { + cannonicalName := strings.Replace(name, "\\", "/", -1) + pathList := strings.Split(cannonicalName, "/") + for _, p := range pathList { + node = node.Children[p] + if node == nil { + return nil, fmt.Errorf("Asset %s not found", name) + } + } + } + if node.Func != nil { + return nil, fmt.Errorf("Asset %s not found", name) + } + rv := make([]string, 0, len(node.Children)) + for childName := range node.Children { + rv = append(rv, childName) + } + return rv, nil +} + +type bintree struct { + Func func() (*asset, error) + Children map[string]*bintree +} +var _bintree = &bintree{nil, map[string]*bintree{ + "data": &bintree{nil, map[string]*bintree{ + "config_schema_v3.0.json": &bintree{dataConfig_schema_v30Json, map[string]*bintree{}}, + }}, +}} + +// RestoreAsset restores an asset under the given directory +func RestoreAsset(dir, name string) error { + data, err := Asset(name) + if err != nil { + return err + } + info, err := AssetInfo(name) + if err != nil { + return err + } + err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755)) + if err != nil { + return err + } + err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode()) + if err != nil { + return err + } + err = os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime()) + if err != nil { + return err + } + return nil +} + +// RestoreAssets restores an asset under the given directory recursively +func RestoreAssets(dir, name string) error { + children, err := AssetDir(name) + // File + if err != nil { + return RestoreAsset(dir, name) + } + // Dir + for _, child := range children { + err = RestoreAssets(dir, filepath.Join(name, child)) + if err != nil { + return err + } + } + return nil +} + +func _filePath(dir, name string) string { + cannonicalName := strings.Replace(name, "\\", "/", -1) + return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...) +} + diff --git a/compose/schema/data/config_schema_v3.0.json b/compose/schema/data/config_schema_v3.0.json new file mode 100644 index 0000000000..520e57d5e2 --- /dev/null +++ b/compose/schema/data/config_schema_v3.0.json @@ -0,0 +1,379 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "id": "config_schema_v3.0.json", + "type": "object", + "required": ["version"], + + "properties": { + "version": { + "type": "string" + }, + + "services": { + "id": "#/properties/services", + "type": "object", + "patternProperties": { + "^[a-zA-Z0-9._-]+$": { + "$ref": "#/definitions/service" + } + }, + "additionalProperties": false + }, + + "networks": { + "id": "#/properties/networks", + "type": "object", + "patternProperties": { + "^[a-zA-Z0-9._-]+$": { + "$ref": "#/definitions/network" + } + } + }, + + "volumes": { + "id": "#/properties/volumes", + "type": "object", + "patternProperties": { + "^[a-zA-Z0-9._-]+$": { + "$ref": "#/definitions/volume" + } + }, + "additionalProperties": false + } + }, + + "additionalProperties": false, + + "definitions": { + + "service": { + "id": "#/definitions/service", + "type": "object", + + "properties": { + "deploy": {"$ref": "#/definitions/deployment"}, + "build": { + "oneOf": [ + {"type": "string"}, + { + "type": "object", + "properties": { + "context": {"type": "string"}, + "dockerfile": {"type": "string"}, + "args": {"$ref": "#/definitions/list_or_dict"} + }, + "additionalProperties": false + } + ] + }, + "cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true}, + "cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true}, + "cgroup_parent": {"type": "string"}, + "command": { + "oneOf": [ + {"type": "string"}, + {"type": "array", "items": {"type": "string"}} + ] + }, + "container_name": {"type": "string"}, + "depends_on": {"$ref": "#/definitions/list_of_strings"}, + "devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true}, + "dns": {"$ref": "#/definitions/string_or_list"}, + "dns_search": {"$ref": "#/definitions/string_or_list"}, + "domainname": {"type": "string"}, + "entrypoint": { + "oneOf": [ + {"type": "string"}, + {"type": "array", "items": {"type": "string"}} + ] + }, + "env_file": {"$ref": "#/definitions/string_or_list"}, + "environment": {"$ref": "#/definitions/list_or_dict"}, + + "expose": { + "type": "array", + "items": { + "type": ["string", "number"], + "format": "expose" + }, + "uniqueItems": true + }, + + "external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true}, + "extra_hosts": {"$ref": "#/definitions/list_or_dict"}, + "healthcheck": {"$ref": "#/definitions/healthcheck"}, + "hostname": {"type": "string"}, + "image": {"type": "string"}, + "ipc": {"type": "string"}, + "labels": {"$ref": "#/definitions/list_or_dict"}, + "links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true}, + + "logging": { + "type": "object", + + "properties": { + "driver": {"type": "string"}, + "options": { + "type": "object", + "patternProperties": { + "^.+$": {"type": ["string", "number", "null"]} + } + } + }, + "additionalProperties": false + }, + + "mac_address": {"type": "string"}, + "network_mode": {"type": "string"}, + + "networks": { + "oneOf": [ + {"$ref": "#/definitions/list_of_strings"}, + { + "type": "object", + "patternProperties": { + "^[a-zA-Z0-9._-]+$": { + "oneOf": [ + { + "type": "object", + "properties": { + "aliases": {"$ref": "#/definitions/list_of_strings"}, + "ipv4_address": {"type": "string"}, + "ipv6_address": {"type": "string"} + }, + "additionalProperties": false + }, + {"type": "null"} + ] + } + }, + "additionalProperties": false + } + ] + }, + "pid": {"type": ["string", "null"]}, + + "ports": { + "type": "array", + "items": { + "type": ["string", "number"], + "format": "ports" + }, + "uniqueItems": true + }, + + "privileged": {"type": "boolean"}, + "read_only": {"type": "boolean"}, + "restart": {"type": "string"}, + "security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true}, + "shm_size": {"type": ["number", "string"]}, + "stdin_open": {"type": "boolean"}, + "stop_signal": {"type": "string"}, + "stop_grace_period": {"type": "string", "format": "duration"}, + "tmpfs": {"$ref": "#/definitions/string_or_list"}, + "tty": {"type": "boolean"}, + "ulimits": { + "type": "object", + "patternProperties": { + "^[a-z]+$": { + "oneOf": [ + {"type": "integer"}, + { + "type":"object", + "properties": { + "hard": {"type": "integer"}, + "soft": {"type": "integer"} + }, + "required": ["soft", "hard"], + "additionalProperties": false + } + ] + } + } + }, + "user": {"type": "string"}, + "volumes": {"type": "array", "items": {"type": "string"}, "uniqueItems": true}, + "working_dir": {"type": "string"} + }, + "additionalProperties": false + }, + + "healthcheck": { + "id": "#/definitions/healthcheck", + "type": ["object", "null"], + "properties": { + "interval": {"type":"string"}, + "timeout": {"type":"string"}, + "retries": {"type": "number"}, + "test": { + "oneOf": [ + {"type": "string"}, + {"type": "array", "items": {"type": "string"}} + ] + }, + "disable": {"type": "boolean"} + }, + "additionalProperties": false + }, + "deployment": { + "id": "#/definitions/deployment", + "type": ["object", "null"], + "properties": { + "mode": {"type": "string"}, + "replicas": {"type": "integer"}, + "labels": {"$ref": "#/definitions/list_or_dict"}, + "update_config": { + "type": "object", + "properties": { + "parallelism": {"type": "integer"}, + "delay": {"type": "string", "format": "duration"}, + "failure_action": {"type": "string"}, + "monitor": {"type": "string", "format": "duration"}, + "max_failure_ratio": {"type": "number"} + }, + "additionalProperties": false + }, + "resources": { + "type": "object", + "properties": { + "limits": {"$ref": "#/definitions/resource"}, + "reservations": {"$ref": "#/definitions/resource"} + } + }, + "restart_policy": { + "type": "object", + "properties": { + "condition": {"type": "string"}, + "delay": {"type": "string", "format": "duration"}, + "max_attempts": {"type": "integer"}, + "window": {"type": "string", "format": "duration"} + }, + "additionalProperties": false + }, + "placement": { + "type": "object", + "properties": { + "constraints": {"type": "array", "items": {"type": "string"}} + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + + "resource": { + "id": "#/definitions/resource", + "type": "object", + "properties": { + "cpus": {"type": "string"}, + "memory": {"type": "string"} + }, + "additionalProperties": false + }, + + "network": { + "id": "#/definitions/network", + "type": ["object", "null"], + "properties": { + "driver": {"type": "string"}, + "driver_opts": { + "type": "object", + "patternProperties": { + "^.+$": {"type": ["string", "number"]} + } + }, + "ipam": { + "type": "object", + "properties": { + "driver": {"type": "string"}, + "config": { + "type": "array", + "items": { + "type": "object", + "properties": { + "subnet": {"type": "string"} + }, + "additionalProperties": false + } + } + }, + "additionalProperties": false + }, + "external": { + "type": ["boolean", "object"], + "properties": { + "name": {"type": "string"} + }, + "additionalProperties": false + }, + "labels": {"$ref": "#/definitions/list_or_dict"} + }, + "additionalProperties": false + }, + + "volume": { + "id": "#/definitions/volume", + "type": ["object", "null"], + "properties": { + "driver": {"type": "string"}, + "driver_opts": { + "type": "object", + "patternProperties": { + "^.+$": {"type": ["string", "number"]} + } + }, + "external": { + "type": ["boolean", "object"], + "properties": { + "name": {"type": "string"} + } + } + }, + "labels": {"$ref": "#/definitions/list_or_dict"}, + "additionalProperties": false + }, + + "string_or_list": { + "oneOf": [ + {"type": "string"}, + {"$ref": "#/definitions/list_of_strings"} + ] + }, + + "list_of_strings": { + "type": "array", + "items": {"type": "string"}, + "uniqueItems": true + }, + + "list_or_dict": { + "oneOf": [ + { + "type": "object", + "patternProperties": { + ".+": { + "type": ["string", "number", "null"] + } + }, + "additionalProperties": false + }, + {"type": "array", "items": {"type": "string"}, "uniqueItems": true} + ] + }, + + "constraints": { + "service": { + "id": "#/definitions/constraints/service", + "anyOf": [ + {"required": ["build"]}, + {"required": ["image"]} + ], + "properties": { + "build": { + "required": ["context"] + } + } + } + } + } +} diff --git a/compose/schema/schema.go b/compose/schema/schema.go new file mode 100644 index 0000000000..6366cab48e --- /dev/null +++ b/compose/schema/schema.go @@ -0,0 +1,113 @@ +package schema + +//go:generate go-bindata -pkg schema -nometadata data + +import ( + "fmt" + "strings" + "time" + + "github.com/xeipuuv/gojsonschema" +) + +type portsFormatChecker struct{} + +func (checker portsFormatChecker) IsFormat(input string) bool { + // TODO: implement this + return true +} + +type durationFormatChecker struct{} + +func (checker durationFormatChecker) IsFormat(input string) bool { + _, err := time.ParseDuration(input) + return err == nil +} + +func init() { + gojsonschema.FormatCheckers.Add("expose", portsFormatChecker{}) + gojsonschema.FormatCheckers.Add("ports", portsFormatChecker{}) + gojsonschema.FormatCheckers.Add("duration", durationFormatChecker{}) +} + +// Validate uses the jsonschema to validate the configuration +func Validate(config map[string]interface{}) error { + schemaData, err := Asset("data/config_schema_v3.0.json") + if err != nil { + return err + } + + schemaLoader := gojsonschema.NewStringLoader(string(schemaData)) + dataLoader := gojsonschema.NewGoLoader(config) + + result, err := gojsonschema.Validate(schemaLoader, dataLoader) + if err != nil { + return err + } + + if !result.Valid() { + return toError(result) + } + + return nil +} + +func toError(result *gojsonschema.Result) error { + err := getMostSpecificError(result.Errors()) + description := getDescription(err) + return fmt.Errorf("%s %s", err.Field(), description) +} + +func getDescription(err gojsonschema.ResultError) string { + if err.Type() == "invalid_type" { + if expectedType, ok := err.Details()["expected"].(string); ok { + return fmt.Sprintf("must be a %s", humanReadableType(expectedType)) + } + } + + return err.Description() +} + +func humanReadableType(definition string) string { + if definition[0:1] == "[" { + allTypes := strings.Split(definition[1:len(definition)-1], ",") + for i, t := range allTypes { + allTypes[i] = humanReadableType(t) + } + return fmt.Sprintf( + "%s or %s", + strings.Join(allTypes[0:len(allTypes)-1], ", "), + allTypes[len(allTypes)-1], + ) + } + if definition == "object" { + return "mapping" + } + if definition == "array" { + return "list" + } + return definition +} + +func getMostSpecificError(errors []gojsonschema.ResultError) gojsonschema.ResultError { + var mostSpecificError gojsonschema.ResultError + + for _, err := range errors { + if mostSpecificError == nil { + mostSpecificError = err + } else if specificity(err) > specificity(mostSpecificError) { + mostSpecificError = err + } else if specificity(err) == specificity(mostSpecificError) { + // Invalid type errors win in a tie-breaker for most specific field name + if err.Type() == "invalid_type" && mostSpecificError.Type() != "invalid_type" { + mostSpecificError = err + } + } + } + + return mostSpecificError +} + +func specificity(err gojsonschema.ResultError) int { + return len(strings.Split(err.Field(), ".")) +} diff --git a/compose/schema/schema_test.go b/compose/schema/schema_test.go new file mode 100644 index 0000000000..be98f807de --- /dev/null +++ b/compose/schema/schema_test.go @@ -0,0 +1,35 @@ +package schema + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +type dict map[string]interface{} + +func TestValid(t *testing.T) { + config := dict{ + "version": "2.1", + "services": dict{ + "foo": dict{ + "image": "busybox", + }, + }, + } + + assert.NoError(t, Validate(config)) +} + +func TestUndefinedTopLevelOption(t *testing.T) { + config := dict{ + "version": "2.1", + "helicopters": dict{ + "foo": dict{ + "image": "busybox", + }, + }, + } + + assert.Error(t, Validate(config)) +} diff --git a/compose/template/template.go b/compose/template/template.go new file mode 100644 index 0000000000..28495baf50 --- /dev/null +++ b/compose/template/template.go @@ -0,0 +1,100 @@ +package template + +import ( + "fmt" + "regexp" + "strings" +) + +var delimiter = "\\$" +var substitution = "[_a-z][_a-z0-9]*(?::?-[^}]+)?" + +var patternString = fmt.Sprintf( + "%s(?i:(?P%s)|(?P%s)|{(?P%s)}|(?P))", + delimiter, delimiter, substitution, substitution, +) + +var pattern = regexp.MustCompile(patternString) + +// InvalidTemplateError is returned when a variable template is not in a valid +// format +type InvalidTemplateError struct { + Template string +} + +func (e InvalidTemplateError) Error() string { + return fmt.Sprintf("Invalid template: %#v", e.Template) +} + +// Mapping is a user-supplied function which maps from variable names to values. +// Returns the value as a string and a bool indicating whether +// the value is present, to distinguish between an empty string +// and the absence of a value. +type Mapping func(string) (string, bool) + +// Substitute variables in the string with their values +func Substitute(template string, mapping Mapping) (result string, err *InvalidTemplateError) { + result = pattern.ReplaceAllStringFunc(template, func(substring string) string { + matches := pattern.FindStringSubmatch(substring) + groups := make(map[string]string) + for i, name := range pattern.SubexpNames() { + if i != 0 { + groups[name] = matches[i] + } + } + + substitution := groups["named"] + if substitution == "" { + substitution = groups["braced"] + } + if substitution != "" { + // Soft default (fall back if unset or empty) + if strings.Contains(substitution, ":-") { + name, defaultValue := partition(substitution, ":-") + value, ok := mapping(name) + if !ok || value == "" { + return defaultValue + } + return value + } + + // Hard default (fall back if-and-only-if empty) + if strings.Contains(substitution, "-") { + name, defaultValue := partition(substitution, "-") + value, ok := mapping(name) + if !ok { + return defaultValue + } + return value + } + + // No default (fall back to empty string) + value, ok := mapping(substitution) + if !ok { + return "" + } + return value + } + + if escaped := groups["escaped"]; escaped != "" { + return escaped + } + + err = &InvalidTemplateError{Template: template} + return "" + }) + + return result, err +} + +// Split the string at the first occurrence of sep, and return the part before the separator, +// and the part after the separator. +// +// If the separator is not found, return the string itself, followed by an empty string. +func partition(s, sep string) (string, string) { + if strings.Contains(s, sep) { + parts := strings.SplitN(s, sep, 2) + return parts[0], parts[1] + } + return s, "" +} diff --git a/compose/template/template_test.go b/compose/template/template_test.go new file mode 100644 index 0000000000..6b81bf0a39 --- /dev/null +++ b/compose/template/template_test.go @@ -0,0 +1,83 @@ +package template + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +var defaults = map[string]string{ + "FOO": "first", + "BAR": "", +} + +func defaultMapping(name string) (string, bool) { + val, ok := defaults[name] + return val, ok +} + +func TestEscaped(t *testing.T) { + result, err := Substitute("$${foo}", defaultMapping) + assert.NoError(t, err) + assert.Equal(t, "${foo}", result) +} + +func TestInvalid(t *testing.T) { + invalidTemplates := []string{ + "${", + "$}", + "${}", + "${ }", + "${ foo}", + "${foo }", + "${foo!}", + } + + for _, template := range invalidTemplates { + _, err := Substitute(template, defaultMapping) + assert.Error(t, err) + assert.IsType(t, &InvalidTemplateError{}, err) + } +} + +func TestNoValueNoDefault(t *testing.T) { + for _, template := range []string{"This ${missing} var", "This ${BAR} var"} { + result, err := Substitute(template, defaultMapping) + assert.NoError(t, err) + assert.Equal(t, "This var", result) + } +} + +func TestValueNoDefault(t *testing.T) { + for _, template := range []string{"This $FOO var", "This ${FOO} var"} { + result, err := Substitute(template, defaultMapping) + assert.NoError(t, err) + assert.Equal(t, "This first var", result) + } +} + +func TestNoValueWithDefault(t *testing.T) { + for _, template := range []string{"ok ${missing:-def}", "ok ${missing-def}"} { + result, err := Substitute(template, defaultMapping) + assert.NoError(t, err) + assert.Equal(t, "ok def", result) + } +} + +func TestEmptyValueWithSoftDefault(t *testing.T) { + result, err := Substitute("ok ${BAR:-def}", defaultMapping) + assert.NoError(t, err) + assert.Equal(t, "ok def", result) +} + +func TestEmptyValueWithHardDefault(t *testing.T) { + result, err := Substitute("ok ${BAR-def}", defaultMapping) + assert.NoError(t, err) + assert.Equal(t, "ok ", result) +} + +func TestNonAlphanumericDefault(t *testing.T) { + result, err := Substitute("ok ${BAR:-/non:-alphanumeric}", defaultMapping) + assert.NoError(t, err) + assert.Equal(t, "ok /non:-alphanumeric", result) +} diff --git a/compose/types/types.go b/compose/types/types.go new file mode 100644 index 0000000000..45923b3460 --- /dev/null +++ b/compose/types/types.go @@ -0,0 +1,232 @@ +package types + +import ( + "time" +) + +// UnsupportedProperties not yet supported by this implementation of the compose file +var UnsupportedProperties = []string{ + "build", + "cap_add", + "cap_drop", + "cgroup_parent", + "devices", + "dns", + "dns_search", + "domainname", + "external_links", + "ipc", + "links", + "mac_address", + "network_mode", + "privileged", + "read_only", + "restart", + "security_opt", + "shm_size", + "stop_signal", + "tmpfs", +} + +// DeprecatedProperties that were removed from the v3 format, but their +// use should not impact the behaviour of the application. +var DeprecatedProperties = map[string]string{ + "container_name": "Setting the container name is not supported.", + "expose": "Exposing ports is unnecessary - services on the same network can access each other's containers on any port.", +} + +// ForbiddenProperties that are not supported in this implementation of the +// compose file. +var ForbiddenProperties = map[string]string{ + "extends": "Support for `extends` is not implemented yet. Use `docker-compose config` to generate a configuration with all `extends` options resolved, and deploy from that.", + "volume_driver": "Instead of setting the volume driver on the service, define a volume using the top-level `volumes` option and specify the driver there.", + "volumes_from": "To share a volume between services, define it using the top-level `volumes` option and reference it from each service that shares it using the service-level `volumes` option.", + "cpu_quota": "Set resource limits using deploy.resources", + "cpu_shares": "Set resource limits using deploy.resources", + "cpuset": "Set resource limits using deploy.resources", + "mem_limit": "Set resource limits using deploy.resources", + "memswap_limit": "Set resource limits using deploy.resources", +} + +// Dict is a mapping of strings to interface{} +type Dict map[string]interface{} + +// ConfigFile is a filename and the contents of the file as a Dict +type ConfigFile struct { + Filename string + Config Dict +} + +// ConfigDetails are the details about a group of ConfigFiles +type ConfigDetails struct { + WorkingDir string + ConfigFiles []ConfigFile + Environment map[string]string +} + +// Config is a full compose file configuration +type Config struct { + Services []ServiceConfig + Networks map[string]NetworkConfig + Volumes map[string]VolumeConfig +} + +// ServiceConfig is the configuration of one service +type ServiceConfig struct { + Name string + + CapAdd []string `mapstructure:"cap_add"` + CapDrop []string `mapstructure:"cap_drop"` + CgroupParent string `mapstructure:"cgroup_parent"` + Command []string `compose:"shell_command"` + ContainerName string `mapstructure:"container_name"` + DependsOn []string `mapstructure:"depends_on"` + Deploy DeployConfig + Devices []string + DNS []string `compose:"string_or_list"` + DNSSearch []string `mapstructure:"dns_search" compose:"string_or_list"` + DomainName string `mapstructure:"domainname"` + Entrypoint []string `compose:"shell_command"` + Environment map[string]string `compose:"list_or_dict_equals"` + Expose []string `compose:"list_of_strings_or_numbers"` + ExternalLinks []string `mapstructure:"external_links"` + ExtraHosts map[string]string `mapstructure:"extra_hosts" compose:"list_or_dict_colon"` + Hostname string + HealthCheck *HealthCheckConfig + Image string + Ipc string + Labels map[string]string `compose:"list_or_dict_equals"` + Links []string + Logging *LoggingConfig + MacAddress string `mapstructure:"mac_address"` + NetworkMode string `mapstructure:"network_mode"` + Networks map[string]*ServiceNetworkConfig `compose:"list_or_struct_map"` + Pid string + Ports []string `compose:"list_of_strings_or_numbers"` + Privileged bool + ReadOnly bool `mapstructure:"read_only"` + Restart string + SecurityOpt []string `mapstructure:"security_opt"` + StdinOpen bool `mapstructure:"stdin_open"` + StopGracePeriod *time.Duration `mapstructure:"stop_grace_period"` + StopSignal string `mapstructure:"stop_signal"` + Tmpfs []string `compose:"string_or_list"` + Tty bool `mapstructure:"tty"` + Ulimits map[string]*UlimitsConfig + User string + Volumes []string + WorkingDir string `mapstructure:"working_dir"` +} + +// LoggingConfig the logging configuration for a service +type LoggingConfig struct { + Driver string + Options map[string]string +} + +// DeployConfig the deployment configuration for a service +type DeployConfig struct { + Mode string + Replicas *uint64 + Labels map[string]string `compose:"list_or_dict_equals"` + UpdateConfig *UpdateConfig `mapstructure:"update_config"` + Resources Resources + RestartPolicy *RestartPolicy `mapstructure:"restart_policy"` + Placement Placement +} + +// HealthCheckConfig the healthcheck configuration for a service +type HealthCheckConfig struct { + Test []string `compose:"healthcheck"` + Timeout string + Interval string + Retries *uint64 + Disable bool +} + +// UpdateConfig the service update configuration +type UpdateConfig struct { + Parallelism *uint64 + Delay time.Duration + FailureAction string `mapstructure:"failure_action"` + Monitor time.Duration + MaxFailureRatio float32 `mapstructure:"max_failure_ratio"` +} + +// Resources the resource limits and reservations +type Resources struct { + Limits *Resource + Reservations *Resource +} + +// Resource is a resource to be limited or reserved +type Resource struct { + // TODO: types to convert from units and ratios + NanoCPUs string `mapstructure:"cpus"` + MemoryBytes UnitBytes `mapstructure:"memory"` +} + +// UnitBytes is the bytes type +type UnitBytes int64 + +// RestartPolicy the service restart policy +type RestartPolicy struct { + Condition string + Delay *time.Duration + MaxAttempts *uint64 `mapstructure:"max_attempts"` + Window *time.Duration +} + +// Placement constraints for the service +type Placement struct { + Constraints []string +} + +// ServiceNetworkConfig is the network configuration for a service +type ServiceNetworkConfig struct { + Aliases []string + Ipv4Address string `mapstructure:"ipv4_address"` + Ipv6Address string `mapstructure:"ipv6_address"` +} + +// UlimitsConfig the ulimit configuration +type UlimitsConfig struct { + Single int + Soft int + Hard int +} + +// NetworkConfig for a network +type NetworkConfig struct { + Driver string + DriverOpts map[string]string `mapstructure:"driver_opts"` + Ipam IPAMConfig + External External + Labels map[string]string `compose:"list_or_dict_equals"` +} + +// IPAMConfig for a network +type IPAMConfig struct { + Driver string + Config []*IPAMPool +} + +// IPAMPool for a network +type IPAMPool struct { + Subnet string +} + +// VolumeConfig for a volume +type VolumeConfig struct { + Driver string + DriverOpts map[string]string `mapstructure:"driver_opts"` + External External + Labels map[string]string `compose:"list_or_dict_equals"` +} + +// External identifies a Volume or Network as a reference to a resource that is +// not managed, and should already exist. +type External struct { + Name string + External bool +}