New static configuration loading system.
Co-authored-by: Mathieu Lonjaret <mathieu.lonjaret@gmail.com>
This commit is contained in:
parent
d18edd6f77
commit
8d7eccad5d
165 changed files with 10894 additions and 6076 deletions
|
@ -125,9 +125,9 @@ type HealthCheck struct {
|
|||
Scheme string `json:"scheme,omitempty" toml:",omitempty"`
|
||||
Path string `json:"path,omitempty" toml:",omitempty"`
|
||||
Port int `json:"port,omitempty" toml:",omitempty,omitzero"`
|
||||
// FIXME change string to parse.Duration
|
||||
// FIXME change string to types.Duration
|
||||
Interval string `json:"interval,omitempty" toml:",omitempty"`
|
||||
// FIXME change string to parse.Duration
|
||||
// FIXME change string to types.Duration
|
||||
Timeout string `json:"timeout,omitempty" toml:",omitempty"`
|
||||
Hostname string `json:"hostname,omitempty" toml:",omitempty"`
|
||||
Headers map[string]string `json:"headers,omitempty" toml:",omitempty"`
|
||||
|
|
50
pkg/config/env/env.go
vendored
Normal file
50
pkg/config/env/env.go
vendored
Normal file
|
@ -0,0 +1,50 @@
|
|||
// Package env implements encoding and decoding between environment variable and a typed Configuration.
|
||||
package env
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
)
|
||||
|
||||
// Decode decodes the given environment variables into the given element.
|
||||
// The operation goes through four stages roughly summarized as:
|
||||
// env vars -> map
|
||||
// map -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> typed element
|
||||
func Decode(environ []string, element interface{}) error {
|
||||
vars := make(map[string]string)
|
||||
for _, evr := range environ {
|
||||
n := strings.SplitN(evr, "=", 2)
|
||||
if strings.HasPrefix(strings.ToUpper(n[0]), "TRAEFIK_") {
|
||||
key := strings.ReplaceAll(strings.ToLower(n[0]), "_", ".")
|
||||
vars[key] = n[1]
|
||||
}
|
||||
}
|
||||
|
||||
return parser.Decode(vars, element)
|
||||
}
|
||||
|
||||
// Encode encodes the configuration in element into the environment variables represented in the returned Flats.
|
||||
// The operation goes through three stages roughly summarized as:
|
||||
// typed configuration in element -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> environment variables with default values (determined by type/kind)
|
||||
func Encode(element interface{}) ([]parser.Flat, error) {
|
||||
if element == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
node, err := parser.EncodeToNode(element, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = parser.AddMetadata(element, node)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return parser.EncodeToFlat(element, node, parser.FlatOpts{Case: "upper", Separator: "_"})
|
||||
}
|
498
pkg/config/env/env_test.go
vendored
Normal file
498
pkg/config/env/env_test.go
vendored
Normal file
|
@ -0,0 +1,498 @@
|
|||
package env
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/containous/traefik/pkg/config/generator"
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDecode(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
environ []string
|
||||
element interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
desc: "no env vars",
|
||||
environ: nil,
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "bool value",
|
||||
environ: []string{"TRAEFIK_FOO=true"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo bool
|
||||
}{
|
||||
Foo: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "equal",
|
||||
environ: []string{"TRAEFIK_FOO=bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
}{
|
||||
Foo: "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple bool flags without value",
|
||||
environ: []string{"TRAEFIK_FOO=true", "TRAEFIK_BAR=true"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
Bar bool
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo bool
|
||||
Bar bool
|
||||
}{
|
||||
Foo: true,
|
||||
Bar: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string",
|
||||
environ: []string{"TRAEFIK_FOO_NAME=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]string
|
||||
}{
|
||||
Foo: map[string]string{
|
||||
"name": "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct",
|
||||
environ: []string{"TRAEFIK_FOO_NAME_VALUE=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct{ Value string }
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]struct{ Value string }
|
||||
}{
|
||||
Foo: map[string]struct{ Value string }{
|
||||
"name": {
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-struct",
|
||||
environ: []string{"TRAEFIK_FOO_NAME_BAR_VALUE=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}
|
||||
}{
|
||||
Foo: map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}{
|
||||
"name": {
|
||||
Bar: &struct {
|
||||
Value string
|
||||
}{
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-map",
|
||||
environ: []string{"TRAEFIK_FOO_NAME1_BAR_NAME2_VALUE=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}
|
||||
}{
|
||||
Foo: map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}{
|
||||
"name1": {
|
||||
Bar: map[string]struct{ Value string }{
|
||||
"name2": {
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice",
|
||||
environ: []string{"TRAEFIK_FOO=bar,baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []string
|
||||
}{
|
||||
Foo: []string{"bar", "baz"},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer value",
|
||||
environ: []string{"TRAEFIK_FOO=true"},
|
||||
element: &struct {
|
||||
Foo *struct{ Field string } `label:"allowEmpty"`
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo *struct{ Field string } `label:"allowEmpty"`
|
||||
}{
|
||||
Foo: &struct{ Field string }{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
err := Decode(test.environ, test.element)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected, test.element)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncode(t *testing.T) {
|
||||
element := &Ya{
|
||||
Foo: &Yaa{
|
||||
FieldIn1: "bar",
|
||||
FieldIn2: false,
|
||||
FieldIn3: 1,
|
||||
FieldIn4: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
FieldIn5: map[string]int{
|
||||
parser.MapNamePlaceholder: 0,
|
||||
},
|
||||
FieldIn6: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
FieldIn7: map[string]struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldIn8: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
FieldIn9: map[string]*struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldIn10: struct{ Field string }{},
|
||||
FieldIn11: &struct{ Field string }{},
|
||||
FieldIn12: func(v string) *string { return &v }(""),
|
||||
FieldIn13: func(v bool) *bool { return &v }(false),
|
||||
FieldIn14: func(v int) *int { return &v }(0),
|
||||
},
|
||||
Field1: "bir",
|
||||
Field2: true,
|
||||
Field3: 0,
|
||||
Field4: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
Field5: map[string]int{
|
||||
parser.MapNamePlaceholder: 0,
|
||||
},
|
||||
Field6: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
Field7: map[string]struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Field8: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
Field9: map[string]*struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Field10: struct{ Field string }{},
|
||||
Field11: &struct{ Field string }{},
|
||||
Field12: func(v string) *string { return &v }(""),
|
||||
Field13: func(v bool) *bool { return &v }(false),
|
||||
Field14: func(v int) *int { return &v }(0),
|
||||
Field15: []int{7},
|
||||
}
|
||||
generator.Generate(element)
|
||||
|
||||
flats, err := Encode(element)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := []parser.Flat{
|
||||
{
|
||||
Name: "TRAEFIK_FIELD1",
|
||||
Description: "",
|
||||
Default: "bir",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD10",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD10_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD11_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD12",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD13",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD14",
|
||||
Description: "",
|
||||
Default: "0",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD15",
|
||||
Description: "",
|
||||
Default: "7",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD2",
|
||||
Description: "",
|
||||
Default: "true",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD3",
|
||||
Description: "",
|
||||
Default: "0",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD4_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD5_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "0",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD6_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD6_\u003cNAME\u003e_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD7_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD7_\u003cNAME\u003e_FIELD_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD8_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD8_\u003cNAME\u003e_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD9_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FIELD9_\u003cNAME\u003e_FIELD_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN1",
|
||||
Description: "",
|
||||
Default: "bar",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN10",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN10_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN11_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN12",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN13",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN14",
|
||||
Description: "",
|
||||
Default: "0",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN2",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN3",
|
||||
Description: "",
|
||||
Default: "1",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN4_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN5_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "0",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN6_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN6_\u003cNAME\u003e_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN7_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN7_\u003cNAME\u003e_FIELD_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN8_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN8_\u003cNAME\u003e_FIELD",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN9_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "TRAEFIK_FOO_FIELDIN9_\u003cNAME\u003e_FIELD_\u003cNAME\u003e",
|
||||
Description: "",
|
||||
Default: "",
|
||||
},
|
||||
}
|
||||
|
||||
assert.Equal(t, expected, flats)
|
||||
}
|
||||
|
||||
type Ya struct {
|
||||
Foo *Yaa
|
||||
Field1 string
|
||||
Field2 bool
|
||||
Field3 int
|
||||
Field4 map[string]string
|
||||
Field5 map[string]int
|
||||
Field6 map[string]struct{ Field string }
|
||||
Field7 map[string]struct{ Field map[string]string }
|
||||
Field8 map[string]*struct{ Field string }
|
||||
Field9 map[string]*struct{ Field map[string]string }
|
||||
Field10 struct{ Field string }
|
||||
Field11 *struct{ Field string }
|
||||
Field12 *string
|
||||
Field13 *bool
|
||||
Field14 *int
|
||||
Field15 []int
|
||||
}
|
||||
|
||||
type Yaa struct {
|
||||
FieldIn1 string
|
||||
FieldIn2 bool
|
||||
FieldIn3 int
|
||||
FieldIn4 map[string]string
|
||||
FieldIn5 map[string]int
|
||||
FieldIn6 map[string]struct{ Field string }
|
||||
FieldIn7 map[string]struct{ Field map[string]string }
|
||||
FieldIn8 map[string]*struct{ Field string }
|
||||
FieldIn9 map[string]*struct{ Field map[string]string }
|
||||
FieldIn10 struct{ Field string }
|
||||
FieldIn11 *struct{ Field string }
|
||||
FieldIn12 *string
|
||||
FieldIn13 *bool
|
||||
FieldIn14 *int
|
||||
}
|
31
pkg/config/file/file.go
Normal file
31
pkg/config/file/file.go
Normal file
|
@ -0,0 +1,31 @@
|
|||
// Package file implements decoding between configuration in a file and a typed Configuration.
|
||||
package file
|
||||
|
||||
import (
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
)
|
||||
|
||||
// Decode decodes the given configuration file into the given element.
|
||||
// The operation goes through three stages roughly summarized as:
|
||||
// file contents -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> typed element
|
||||
func Decode(filePath string, element interface{}) error {
|
||||
if element == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
filters := getRootFieldNames(element)
|
||||
|
||||
root, err := decodeFileToNode(filePath, filters...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = parser.AddMetadata(element, root)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return parser.Fill(element, root)
|
||||
}
|
86
pkg/config/file/file_node.go
Normal file
86
pkg/config/file/file_node.go
Normal file
|
@ -0,0 +1,86 @@
|
|||
package file
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// decodeFileToNode decodes the configuration in filePath in a tree of untyped nodes.
|
||||
// If filters is not empty, it skips any configuration element whose name is
|
||||
// not among filters.
|
||||
func decodeFileToNode(filePath string, filters ...string) (*parser.Node, error) {
|
||||
content, err := ioutil.ReadFile(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data := make(map[string]interface{})
|
||||
|
||||
switch filepath.Ext(filePath) {
|
||||
case ".toml":
|
||||
err = toml.Unmarshal(content, &data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
case ".yml", ".yaml":
|
||||
var err error
|
||||
err = yaml.Unmarshal(content, data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return decodeRawToNode(data, filters...)
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported file extension: %s", filePath)
|
||||
}
|
||||
|
||||
return decodeRawToNode(data, filters...)
|
||||
}
|
||||
|
||||
func getRootFieldNames(element interface{}) []string {
|
||||
if element == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
rootType := reflect.TypeOf(element)
|
||||
|
||||
return getFieldNames(rootType)
|
||||
}
|
||||
|
||||
func getFieldNames(rootType reflect.Type) []string {
|
||||
var names []string
|
||||
|
||||
if rootType.Kind() == reflect.Ptr {
|
||||
rootType = rootType.Elem()
|
||||
}
|
||||
|
||||
if rootType.Kind() != reflect.Struct {
|
||||
return nil
|
||||
}
|
||||
|
||||
for i := 0; i < rootType.NumField(); i++ {
|
||||
field := rootType.Field(i)
|
||||
|
||||
if !parser.IsExported(field) {
|
||||
continue
|
||||
}
|
||||
|
||||
if field.Anonymous &&
|
||||
(field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct || field.Type.Kind() == reflect.Struct) {
|
||||
names = append(names, getFieldNames(field.Type)...)
|
||||
continue
|
||||
}
|
||||
|
||||
names = append(names, field.Name)
|
||||
}
|
||||
|
||||
return names
|
||||
}
|
599
pkg/config/file/file_node_test.go
Normal file
599
pkg/config/file/file_node_test.go
Normal file
|
@ -0,0 +1,599 @@
|
|||
package file
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_getRootFieldNames(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
desc: "simple fields",
|
||||
element: &Yo{},
|
||||
expected: []string{"Foo", "Fii", "Fuu", "Yi"},
|
||||
},
|
||||
{
|
||||
desc: "embedded struct",
|
||||
element: &Yu{},
|
||||
expected: []string{"Foo", "Fii", "Fuu"},
|
||||
},
|
||||
{
|
||||
desc: "embedded struct pointer",
|
||||
element: &Ye{},
|
||||
expected: []string{"Foo", "Fii", "Fuu"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
names := getRootFieldNames(test.element)
|
||||
|
||||
assert.Equal(t, test.expected, names)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_decodeFileToNode_compare(t *testing.T) {
|
||||
nodeToml, err := decodeFileToNode("./fixtures/sample.toml",
|
||||
"Global", "ServersTransport", "EntryPoints", "Providers", "API", "Metrics", "Ping", "Log", "AccessLog", "Tracing", "HostResolver", "ACME")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
nodeYaml, err := decodeFileToNode("./fixtures/sample.yml")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
assert.Equal(t, nodeToml, nodeYaml)
|
||||
}
|
||||
|
||||
func Test_decodeFileToNode_Toml(t *testing.T) {
|
||||
node, err := decodeFileToNode("./fixtures/sample.toml",
|
||||
"Global", "ServersTransport", "EntryPoints", "Providers", "API", "Metrics", "Ping", "Log", "AccessLog", "Tracing", "HostResolver", "ACME")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
expected := &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "ACME",
|
||||
Children: []*parser.Node{
|
||||
{Name: "ACMELogging", Value: "true"},
|
||||
{Name: "CAServer", Value: "foobar"},
|
||||
{Name: "DNSChallenge", Children: []*parser.Node{
|
||||
{Name: "DelayBeforeCheck", Value: "42"},
|
||||
{Name: "DisablePropagationCheck", Value: "true"},
|
||||
{Name: "Provider", Value: "foobar"},
|
||||
{Name: "Resolvers", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "Domains", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "Main", Value: "foobar"},
|
||||
{Name: "SANs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "Main", Value: "foobar"},
|
||||
{Name: "SANs", Value: "foobar,foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "Email", Value: "foobar"},
|
||||
{Name: "EntryPoint", Value: "foobar"},
|
||||
{Name: "HTTPChallenge", Children: []*parser.Node{
|
||||
{Name: "EntryPoint", Value: "foobar"}}},
|
||||
{Name: "KeyType", Value: "foobar"},
|
||||
{Name: "OnHostRule", Value: "true"},
|
||||
{Name: "Storage", Value: "foobar"},
|
||||
{Name: "TLSChallenge"},
|
||||
},
|
||||
},
|
||||
{Name: "API", Children: []*parser.Node{
|
||||
{Name: "Dashboard", Value: "true"},
|
||||
{Name: "EntryPoint", Value: "foobar"},
|
||||
{Name: "Middlewares", Value: "foobar,foobar"},
|
||||
{Name: "Statistics", Children: []*parser.Node{
|
||||
{Name: "RecentErrors", Value: "42"}}}}},
|
||||
{Name: "AccessLog", Children: []*parser.Node{
|
||||
{Name: "BufferingSize", Value: "42"},
|
||||
{Name: "Fields", Children: []*parser.Node{
|
||||
{Name: "DefaultMode", Value: "foobar"},
|
||||
{Name: "Headers", Children: []*parser.Node{
|
||||
{Name: "DefaultMode", Value: "foobar"},
|
||||
{Name: "Names", Children: []*parser.Node{
|
||||
{Name: "name0", Value: "foobar"},
|
||||
{Name: "name1", Value: "foobar"}}}}},
|
||||
{Name: "Names", Children: []*parser.Node{
|
||||
{Name: "name0", Value: "foobar"},
|
||||
{Name: "name1", Value: "foobar"}}}}},
|
||||
{Name: "FilePath", Value: "foobar"},
|
||||
{Name: "Filters", Children: []*parser.Node{
|
||||
{Name: "MinDuration", Value: "42"},
|
||||
{Name: "RetryAttempts", Value: "true"},
|
||||
{Name: "StatusCodes", Value: "foobar,foobar"}}},
|
||||
{Name: "Format", Value: "foobar"}}},
|
||||
{Name: "EntryPoints", Children: []*parser.Node{
|
||||
{Name: "EntryPoint0", Children: []*parser.Node{
|
||||
{Name: "Address", Value: "foobar"},
|
||||
{Name: "ForwardedHeaders", Children: []*parser.Node{
|
||||
{Name: "Insecure", Value: "true"},
|
||||
{Name: "TrustedIPs", Value: "foobar,foobar"}}},
|
||||
{Name: "ProxyProtocol", Children: []*parser.Node{
|
||||
{Name: "Insecure", Value: "true"},
|
||||
{Name: "TrustedIPs", Value: "foobar,foobar"}}},
|
||||
{Name: "Transport", Children: []*parser.Node{
|
||||
{Name: "LifeCycle", Children: []*parser.Node{
|
||||
{Name: "GraceTimeOut", Value: "42"},
|
||||
{Name: "RequestAcceptGraceTimeout", Value: "42"}}},
|
||||
{Name: "RespondingTimeouts", Children: []*parser.Node{
|
||||
{Name: "IdleTimeout", Value: "42"},
|
||||
{Name: "ReadTimeout", Value: "42"},
|
||||
{Name: "WriteTimeout", Value: "42"}}}}}}}}},
|
||||
{Name: "Global", Children: []*parser.Node{
|
||||
{Name: "CheckNewVersion", Value: "true"},
|
||||
{Name: "Debug", Value: "true"},
|
||||
{Name: "SendAnonymousUsage", Value: "true"}}},
|
||||
{Name: "HostResolver", Children: []*parser.Node{
|
||||
{Name: "CnameFlattening", Value: "true"},
|
||||
{Name: "ResolvConfig", Value: "foobar"},
|
||||
{Name: "ResolvDepth", Value: "42"}}},
|
||||
{Name: "Log", Children: []*parser.Node{
|
||||
{Name: "FilePath", Value: "foobar"},
|
||||
{Name: "Format", Value: "foobar"},
|
||||
{Name: "Level", Value: "foobar"}}},
|
||||
{Name: "Metrics", Children: []*parser.Node{
|
||||
{Name: "Datadog", Children: []*parser.Node{
|
||||
{Name: "Address", Value: "foobar"},
|
||||
{Name: "PushInterval", Value: "10s"}}},
|
||||
{Name: "InfluxDB", Children: []*parser.Node{
|
||||
{Name: "Address", Value: "foobar"},
|
||||
{Name: "Database", Value: "foobar"},
|
||||
{Name: "Password", Value: "foobar"},
|
||||
{Name: "Protocol", Value: "foobar"},
|
||||
{Name: "PushInterval", Value: "10s"},
|
||||
{Name: "RetentionPolicy", Value: "foobar"},
|
||||
{Name: "Username", Value: "foobar"}}},
|
||||
{Name: "Prometheus", Children: []*parser.Node{
|
||||
{Name: "Buckets", Value: "42,42"},
|
||||
{Name: "EntryPoint", Value: "foobar"},
|
||||
{Name: "Middlewares", Value: "foobar,foobar"}}},
|
||||
{Name: "StatsD", Children: []*parser.Node{
|
||||
{Name: "Address", Value: "foobar"},
|
||||
{Name: "PushInterval", Value: "10s"}}}}},
|
||||
{Name: "Ping", Children: []*parser.Node{
|
||||
{Name: "EntryPoint", Value: "foobar"},
|
||||
{Name: "Middlewares", Value: "foobar,foobar"}}},
|
||||
{Name: "Providers", Children: []*parser.Node{
|
||||
{Name: "Docker", Children: []*parser.Node{
|
||||
{Name: "Constraints", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "Key", Value: "foobar"},
|
||||
{Name: "MustMatch", Value: "true"},
|
||||
{Name: "Value", Value: "foobar"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "Key", Value: "foobar"},
|
||||
{Name: "MustMatch", Value: "true"},
|
||||
{Name: "Value", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "DefaultRule", Value: "foobar"},
|
||||
{Name: "Endpoint", Value: "foobar"},
|
||||
{Name: "ExposedByDefault", Value: "true"},
|
||||
{Name: "Network", Value: "foobar"},
|
||||
{Name: "SwarmMode", Value: "true"},
|
||||
{Name: "SwarmModeRefreshSeconds", Value: "42"},
|
||||
{Name: "TLS", Children: []*parser.Node{
|
||||
{Name: "CA", Value: "foobar"},
|
||||
{Name: "CAOptional", Value: "true"},
|
||||
{Name: "Cert", Value: "foobar"},
|
||||
{Name: "InsecureSkipVerify", Value: "true"},
|
||||
{Name: "Key", Value: "foobar"}}},
|
||||
{Name: "UseBindPortIP", Value: "true"},
|
||||
{Name: "Watch", Value: "true"}}},
|
||||
{Name: "File", Children: []*parser.Node{
|
||||
{Name: "DebugLogGeneratedTemplate", Value: "true"},
|
||||
{Name: "Directory", Value: "foobar"},
|
||||
{Name: "Filename", Value: "foobar"},
|
||||
{Name: "TraefikFile", Value: "foobar"},
|
||||
{Name: "Watch", Value: "true"}}},
|
||||
{Name: "Kubernetes", Children: []*parser.Node{
|
||||
{Name: "CertAuthFilePath", Value: "foobar"},
|
||||
{Name: "DisablePassHostHeaders", Value: "true"},
|
||||
{Name: "Endpoint", Value: "foobar"},
|
||||
{Name: "IngressClass", Value: "foobar"},
|
||||
{Name: "IngressEndpoint", Children: []*parser.Node{
|
||||
{Name: "Hostname", Value: "foobar"},
|
||||
{Name: "IP", Value: "foobar"},
|
||||
{Name: "PublishedService", Value: "foobar"}}},
|
||||
{Name: "LabelSelector", Value: "foobar"},
|
||||
{Name: "Namespaces", Value: "foobar,foobar"},
|
||||
{Name: "Token", Value: "foobar"}}},
|
||||
{Name: "KubernetesCRD",
|
||||
Children: []*parser.Node{
|
||||
{Name: "CertAuthFilePath", Value: "foobar"},
|
||||
{Name: "DisablePassHostHeaders", Value: "true"},
|
||||
{Name: "Endpoint", Value: "foobar"},
|
||||
{Name: "IngressClass", Value: "foobar"},
|
||||
{Name: "LabelSelector", Value: "foobar"},
|
||||
{Name: "Namespaces", Value: "foobar,foobar"},
|
||||
{Name: "Token", Value: "foobar"}}},
|
||||
{Name: "Marathon", Children: []*parser.Node{
|
||||
{Name: "Basic", Children: []*parser.Node{
|
||||
{Name: "HTTPBasicAuthUser", Value: "foobar"},
|
||||
{Name: "HTTPBasicPassword", Value: "foobar"}}},
|
||||
{Name: "Constraints", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "Key", Value: "foobar"},
|
||||
{Name: "MustMatch", Value: "true"},
|
||||
{Name: "Value", Value: "foobar"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "Key", Value: "foobar"},
|
||||
{Name: "MustMatch", Value: "true"},
|
||||
{Name: "Value", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "DCOSToken", Value: "foobar"},
|
||||
{Name: "DefaultRule", Value: "foobar"},
|
||||
{Name: "DialerTimeout", Value: "42"},
|
||||
{Name: "Endpoint", Value: "foobar"},
|
||||
{Name: "ExposedByDefault", Value: "true"},
|
||||
{Name: "FilterMarathonConstraints", Value: "true"},
|
||||
{Name: "ForceTaskHostname", Value: "true"},
|
||||
{Name: "KeepAlive", Value: "42"},
|
||||
{Name: "RespectReadinessChecks", Value: "true"},
|
||||
{Name: "ResponseHeaderTimeout", Value: "42"},
|
||||
{Name: "TLS", Children: []*parser.Node{
|
||||
{Name: "CA", Value: "foobar"},
|
||||
{Name: "CAOptional", Value: "true"},
|
||||
{Name: "Cert", Value: "foobar"},
|
||||
{Name: "InsecureSkipVerify", Value: "true"},
|
||||
{Name: "Key", Value: "foobar"}}},
|
||||
{Name: "TLSHandshakeTimeout", Value: "42"},
|
||||
{Name: "Trace", Value: "true"},
|
||||
{Name: "Watch", Value: "true"}}},
|
||||
{Name: "ProvidersThrottleDuration", Value: "42"},
|
||||
{Name: "Rancher", Children: []*parser.Node{
|
||||
{Name: "Constraints", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "Key", Value: "foobar"},
|
||||
{Name: "MustMatch", Value: "true"},
|
||||
{Name: "Value", Value: "foobar"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "Key", Value: "foobar"},
|
||||
{Name: "MustMatch", Value: "true"},
|
||||
{Name: "Value", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "DefaultRule", Value: "foobar"},
|
||||
{Name: "EnableServiceHealthFilter", Value: "true"},
|
||||
{Name: "ExposedByDefault", Value: "true"},
|
||||
{Name: "IntervalPoll", Value: "true"},
|
||||
{Name: "Prefix", Value: "foobar"},
|
||||
{Name: "RefreshSeconds", Value: "42"},
|
||||
{Name: "Watch", Value: "true"}}},
|
||||
{Name: "Rest", Children: []*parser.Node{
|
||||
{Name: "EntryPoint", Value: "foobar"}}}}},
|
||||
{Name: "ServersTransport", Children: []*parser.Node{
|
||||
{Name: "ForwardingTimeouts", Children: []*parser.Node{
|
||||
{Name: "DialTimeout", Value: "42"},
|
||||
{Name: "ResponseHeaderTimeout", Value: "42"}}},
|
||||
{Name: "InsecureSkipVerify", Value: "true"},
|
||||
{Name: "MaxIdleConnsPerHost", Value: "42"},
|
||||
{Name: "RootCAs", Value: "foobar,foobar"}}},
|
||||
{Name: "Tracing", Children: []*parser.Node{
|
||||
{Name: "Backend", Value: "foobar"},
|
||||
{Name: "DataDog", Children: []*parser.Node{
|
||||
{Name: "BagagePrefixHeaderName", Value: "foobar"},
|
||||
{Name: "Debug", Value: "true"},
|
||||
{Name: "GlobalTag", Value: "foobar"},
|
||||
{Name: "LocalAgentHostPort", Value: "foobar"},
|
||||
{Name: "ParentIDHeaderName", Value: "foobar"},
|
||||
{Name: "PrioritySampling", Value: "true"},
|
||||
{Name: "SamplingPriorityHeaderName", Value: "foobar"},
|
||||
{Name: "TraceIDHeaderName", Value: "foobar"}}},
|
||||
{Name: "Instana", Children: []*parser.Node{
|
||||
{Name: "LocalAgentHost", Value: "foobar"},
|
||||
{Name: "LocalAgentPort", Value: "42"},
|
||||
{Name: "LogLevel", Value: "foobar"}}},
|
||||
{Name: "Jaeger", Children: []*parser.Node{
|
||||
{Name: "Gen128Bit", Value: "true"},
|
||||
{Name: "LocalAgentHostPort", Value: "foobar"},
|
||||
{Name: "Propagation", Value: "foobar"},
|
||||
{Name: "SamplingParam", Value: "42"},
|
||||
{Name: "SamplingServerURL", Value: "foobar"},
|
||||
{Name: "SamplingType", Value: "foobar"},
|
||||
{Name: "TraceContextHeaderName", Value: "foobar"}}},
|
||||
{Name: "ServiceName", Value: "foobar"},
|
||||
{Name: "SpanNameLimit", Value: "42"},
|
||||
{Name: "Zipkin", Children: []*parser.Node{
|
||||
{Name: "Debug", Value: "true"},
|
||||
{Name: "HTTPEndpoint", Value: "foobar"},
|
||||
{Name: "ID128Bit", Value: "true"},
|
||||
{Name: "SameSpan", Value: "true"},
|
||||
{Name: "SampleRate", Value: "42"}}}}}},
|
||||
}
|
||||
|
||||
assert.Equal(t, expected, node)
|
||||
}
|
||||
|
||||
func Test_decodeFileToNode_Yaml(t *testing.T) {
|
||||
node, err := decodeFileToNode("./fixtures/sample.yml")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
expected := &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "ACME",
|
||||
Children: []*parser.Node{
|
||||
{Name: "ACMELogging", Value: "true"},
|
||||
{Name: "CAServer", Value: "foobar"},
|
||||
{Name: "DNSChallenge", Children: []*parser.Node{
|
||||
{Name: "DelayBeforeCheck", Value: "42"},
|
||||
{Name: "DisablePropagationCheck", Value: "true"},
|
||||
{Name: "Provider", Value: "foobar"},
|
||||
{Name: "Resolvers", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "Domains", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "Main", Value: "foobar"},
|
||||
{Name: "SANs", Value: "foobar,foobar"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "Main", Value: "foobar"},
|
||||
{Name: "SANs", Value: "foobar,foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "Email", Value: "foobar"},
|
||||
{Name: "EntryPoint", Value: "foobar"},
|
||||
{Name: "HTTPChallenge", Children: []*parser.Node{
|
||||
{Name: "EntryPoint", Value: "foobar"}}},
|
||||
{Name: "KeyType", Value: "foobar"},
|
||||
{Name: "OnHostRule", Value: "true"},
|
||||
{Name: "Storage", Value: "foobar"},
|
||||
{Name: "TLSChallenge"},
|
||||
},
|
||||
},
|
||||
{Name: "API", Children: []*parser.Node{
|
||||
{Name: "Dashboard", Value: "true"},
|
||||
{Name: "EntryPoint", Value: "foobar"},
|
||||
{Name: "Middlewares", Value: "foobar,foobar"},
|
||||
{Name: "Statistics", Children: []*parser.Node{
|
||||
{Name: "RecentErrors", Value: "42"}}}}},
|
||||
{Name: "AccessLog", Children: []*parser.Node{
|
||||
{Name: "BufferingSize", Value: "42"},
|
||||
{Name: "Fields", Children: []*parser.Node{
|
||||
{Name: "DefaultMode", Value: "foobar"},
|
||||
{Name: "Headers", Children: []*parser.Node{
|
||||
{Name: "DefaultMode", Value: "foobar"},
|
||||
{Name: "Names", Children: []*parser.Node{
|
||||
{Name: "name0", Value: "foobar"},
|
||||
{Name: "name1", Value: "foobar"}}}}},
|
||||
{Name: "Names", Children: []*parser.Node{
|
||||
{Name: "name0", Value: "foobar"},
|
||||
{Name: "name1", Value: "foobar"}}}}},
|
||||
{Name: "FilePath", Value: "foobar"},
|
||||
{Name: "Filters", Children: []*parser.Node{
|
||||
{Name: "MinDuration", Value: "42"},
|
||||
{Name: "RetryAttempts", Value: "true"},
|
||||
{Name: "StatusCodes", Value: "foobar,foobar"}}},
|
||||
{Name: "Format", Value: "foobar"}}},
|
||||
{Name: "EntryPoints", Children: []*parser.Node{
|
||||
{Name: "EntryPoint0", Children: []*parser.Node{
|
||||
{Name: "Address", Value: "foobar"},
|
||||
{Name: "ForwardedHeaders", Children: []*parser.Node{
|
||||
{Name: "Insecure", Value: "true"},
|
||||
{Name: "TrustedIPs", Value: "foobar,foobar"}}},
|
||||
{Name: "ProxyProtocol", Children: []*parser.Node{
|
||||
{Name: "Insecure", Value: "true"},
|
||||
{Name: "TrustedIPs", Value: "foobar,foobar"}}},
|
||||
{Name: "Transport", Children: []*parser.Node{
|
||||
{Name: "LifeCycle", Children: []*parser.Node{
|
||||
{Name: "GraceTimeOut", Value: "42"},
|
||||
{Name: "RequestAcceptGraceTimeout", Value: "42"}}},
|
||||
{Name: "RespondingTimeouts", Children: []*parser.Node{
|
||||
{Name: "IdleTimeout", Value: "42"},
|
||||
{Name: "ReadTimeout", Value: "42"},
|
||||
{Name: "WriteTimeout", Value: "42"}}}}}}}}},
|
||||
{Name: "Global", Children: []*parser.Node{
|
||||
{Name: "CheckNewVersion", Value: "true"},
|
||||
{Name: "Debug", Value: "true"},
|
||||
{Name: "SendAnonymousUsage", Value: "true"}}},
|
||||
{Name: "HostResolver", Children: []*parser.Node{
|
||||
{Name: "CnameFlattening", Value: "true"},
|
||||
{Name: "ResolvConfig", Value: "foobar"},
|
||||
{Name: "ResolvDepth", Value: "42"}}},
|
||||
{Name: "Log", Children: []*parser.Node{
|
||||
{Name: "FilePath", Value: "foobar"},
|
||||
{Name: "Format", Value: "foobar"},
|
||||
{Name: "Level", Value: "foobar"}}},
|
||||
{Name: "Metrics", Children: []*parser.Node{
|
||||
{Name: "Datadog", Children: []*parser.Node{
|
||||
{Name: "Address", Value: "foobar"},
|
||||
{Name: "PushInterval", Value: "10s"}}},
|
||||
{Name: "InfluxDB", Children: []*parser.Node{
|
||||
{Name: "Address", Value: "foobar"},
|
||||
{Name: "Database", Value: "foobar"},
|
||||
{Name: "Password", Value: "foobar"},
|
||||
{Name: "Protocol", Value: "foobar"},
|
||||
{Name: "PushInterval", Value: "10s"},
|
||||
{Name: "RetentionPolicy", Value: "foobar"},
|
||||
{Name: "Username", Value: "foobar"}}},
|
||||
{Name: "Prometheus", Children: []*parser.Node{
|
||||
{Name: "Buckets", Value: "42,42"},
|
||||
{Name: "EntryPoint", Value: "foobar"},
|
||||
{Name: "Middlewares", Value: "foobar,foobar"}}},
|
||||
{Name: "StatsD", Children: []*parser.Node{
|
||||
{Name: "Address", Value: "foobar"},
|
||||
{Name: "PushInterval", Value: "10s"}}}}},
|
||||
{Name: "Ping", Children: []*parser.Node{
|
||||
{Name: "EntryPoint", Value: "foobar"},
|
||||
{Name: "Middlewares", Value: "foobar,foobar"}}},
|
||||
{Name: "Providers", Children: []*parser.Node{
|
||||
{Name: "Docker", Children: []*parser.Node{
|
||||
{Name: "Constraints", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "Key", Value: "foobar"},
|
||||
{Name: "MustMatch", Value: "true"},
|
||||
{Name: "Value", Value: "foobar"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "Key", Value: "foobar"},
|
||||
{Name: "MustMatch", Value: "true"},
|
||||
{Name: "Value", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "DefaultRule", Value: "foobar"},
|
||||
{Name: "Endpoint", Value: "foobar"},
|
||||
{Name: "ExposedByDefault", Value: "true"},
|
||||
{Name: "Network", Value: "foobar"},
|
||||
{Name: "SwarmMode", Value: "true"},
|
||||
{Name: "SwarmModeRefreshSeconds", Value: "42"},
|
||||
{Name: "TLS", Children: []*parser.Node{
|
||||
{Name: "CA", Value: "foobar"},
|
||||
{Name: "CAOptional", Value: "true"},
|
||||
{Name: "Cert", Value: "foobar"},
|
||||
{Name: "InsecureSkipVerify", Value: "true"},
|
||||
{Name: "Key", Value: "foobar"}}},
|
||||
{Name: "UseBindPortIP", Value: "true"},
|
||||
{Name: "Watch", Value: "true"}}},
|
||||
{Name: "File", Children: []*parser.Node{
|
||||
{Name: "DebugLogGeneratedTemplate", Value: "true"},
|
||||
{Name: "Directory", Value: "foobar"},
|
||||
{Name: "Filename", Value: "foobar"},
|
||||
{Name: "TraefikFile", Value: "foobar"},
|
||||
{Name: "Watch", Value: "true"}}},
|
||||
{Name: "Kubernetes", Children: []*parser.Node{
|
||||
{Name: "CertAuthFilePath", Value: "foobar"},
|
||||
{Name: "DisablePassHostHeaders", Value: "true"},
|
||||
{Name: "Endpoint", Value: "foobar"},
|
||||
{Name: "IngressClass", Value: "foobar"},
|
||||
{Name: "IngressEndpoint", Children: []*parser.Node{
|
||||
{Name: "Hostname", Value: "foobar"},
|
||||
{Name: "IP", Value: "foobar"},
|
||||
{Name: "PublishedService", Value: "foobar"}}},
|
||||
{Name: "LabelSelector", Value: "foobar"},
|
||||
{Name: "Namespaces", Value: "foobar,foobar"},
|
||||
{Name: "Token", Value: "foobar"}}},
|
||||
{Name: "KubernetesCRD",
|
||||
Children: []*parser.Node{
|
||||
{Name: "CertAuthFilePath", Value: "foobar"},
|
||||
{Name: "DisablePassHostHeaders", Value: "true"},
|
||||
{Name: "Endpoint", Value: "foobar"},
|
||||
{Name: "IngressClass", Value: "foobar"},
|
||||
{Name: "LabelSelector", Value: "foobar"},
|
||||
{Name: "Namespaces", Value: "foobar,foobar"},
|
||||
{Name: "Token", Value: "foobar"}}},
|
||||
{Name: "Marathon", Children: []*parser.Node{
|
||||
{Name: "Basic", Children: []*parser.Node{
|
||||
{Name: "HTTPBasicAuthUser", Value: "foobar"},
|
||||
{Name: "HTTPBasicPassword", Value: "foobar"}}},
|
||||
{Name: "Constraints", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "Key", Value: "foobar"},
|
||||
{Name: "MustMatch", Value: "true"},
|
||||
{Name: "Value", Value: "foobar"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "Key", Value: "foobar"},
|
||||
{Name: "MustMatch", Value: "true"},
|
||||
{Name: "Value", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "DCOSToken", Value: "foobar"},
|
||||
{Name: "DefaultRule", Value: "foobar"},
|
||||
{Name: "DialerTimeout", Value: "42"},
|
||||
{Name: "Endpoint", Value: "foobar"},
|
||||
{Name: "ExposedByDefault", Value: "true"},
|
||||
{Name: "FilterMarathonConstraints", Value: "true"},
|
||||
{Name: "ForceTaskHostname", Value: "true"},
|
||||
{Name: "KeepAlive", Value: "42"},
|
||||
{Name: "RespectReadinessChecks", Value: "true"},
|
||||
{Name: "ResponseHeaderTimeout", Value: "42"},
|
||||
{Name: "TLS", Children: []*parser.Node{
|
||||
{Name: "CA", Value: "foobar"},
|
||||
{Name: "CAOptional", Value: "true"},
|
||||
{Name: "Cert", Value: "foobar"},
|
||||
{Name: "InsecureSkipVerify", Value: "true"},
|
||||
{Name: "Key", Value: "foobar"}}},
|
||||
{Name: "TLSHandshakeTimeout", Value: "42"},
|
||||
{Name: "Trace", Value: "true"},
|
||||
{Name: "Watch", Value: "true"}}},
|
||||
{Name: "ProvidersThrottleDuration", Value: "42"},
|
||||
{Name: "Rancher", Children: []*parser.Node{
|
||||
{Name: "Constraints", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "Key", Value: "foobar"},
|
||||
{Name: "MustMatch", Value: "true"},
|
||||
{Name: "Value", Value: "foobar"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "Key", Value: "foobar"},
|
||||
{Name: "MustMatch", Value: "true"},
|
||||
{Name: "Value", Value: "foobar"},
|
||||
}},
|
||||
}},
|
||||
{Name: "DefaultRule", Value: "foobar"},
|
||||
{Name: "EnableServiceHealthFilter", Value: "true"},
|
||||
{Name: "ExposedByDefault", Value: "true"},
|
||||
{Name: "IntervalPoll", Value: "true"},
|
||||
{Name: "Prefix", Value: "foobar"},
|
||||
{Name: "RefreshSeconds", Value: "42"},
|
||||
{Name: "Watch", Value: "true"}}},
|
||||
{Name: "Rest", Children: []*parser.Node{
|
||||
{Name: "EntryPoint", Value: "foobar"}}}}},
|
||||
{Name: "ServersTransport", Children: []*parser.Node{
|
||||
{Name: "ForwardingTimeouts", Children: []*parser.Node{
|
||||
{Name: "DialTimeout", Value: "42"},
|
||||
{Name: "ResponseHeaderTimeout", Value: "42"}}},
|
||||
{Name: "InsecureSkipVerify", Value: "true"},
|
||||
{Name: "MaxIdleConnsPerHost", Value: "42"},
|
||||
{Name: "RootCAs", Value: "foobar,foobar"}}},
|
||||
{Name: "Tracing", Children: []*parser.Node{
|
||||
{Name: "Backend", Value: "foobar"},
|
||||
{Name: "DataDog", Children: []*parser.Node{
|
||||
{Name: "BagagePrefixHeaderName", Value: "foobar"},
|
||||
{Name: "Debug", Value: "true"},
|
||||
{Name: "GlobalTag", Value: "foobar"},
|
||||
{Name: "LocalAgentHostPort", Value: "foobar"},
|
||||
{Name: "ParentIDHeaderName", Value: "foobar"},
|
||||
{Name: "PrioritySampling", Value: "true"},
|
||||
{Name: "SamplingPriorityHeaderName", Value: "foobar"},
|
||||
{Name: "TraceIDHeaderName", Value: "foobar"}}},
|
||||
{Name: "Instana", Children: []*parser.Node{
|
||||
{Name: "LocalAgentHost", Value: "foobar"},
|
||||
{Name: "LocalAgentPort", Value: "42"},
|
||||
{Name: "LogLevel", Value: "foobar"}}},
|
||||
{Name: "Jaeger", Children: []*parser.Node{
|
||||
{Name: "Gen128Bit", Value: "true"},
|
||||
{Name: "LocalAgentHostPort", Value: "foobar"},
|
||||
{Name: "Propagation", Value: "foobar"},
|
||||
{Name: "SamplingParam", Value: "42"},
|
||||
{Name: "SamplingServerURL", Value: "foobar"},
|
||||
{Name: "SamplingType", Value: "foobar"},
|
||||
{Name: "TraceContextHeaderName", Value: "foobar"}}},
|
||||
{Name: "ServiceName", Value: "foobar"},
|
||||
{Name: "SpanNameLimit", Value: "42"},
|
||||
{Name: "Zipkin", Children: []*parser.Node{
|
||||
{Name: "Debug", Value: "true"},
|
||||
{Name: "HTTPEndpoint", Value: "foobar"},
|
||||
{Name: "ID128Bit", Value: "true"},
|
||||
{Name: "SameSpan", Value: "true"},
|
||||
{Name: "SampleRate", Value: "42"}}}}}},
|
||||
}
|
||||
|
||||
assert.Equal(t, expected, node)
|
||||
}
|
76
pkg/config/file/file_test.go
Normal file
76
pkg/config/file/file_test.go
Normal file
|
@ -0,0 +1,76 @@
|
|||
package file
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDecode_TOML(t *testing.T) {
|
||||
f, err := ioutil.TempFile("", "traefik-config-*.toml")
|
||||
require.NoError(t, err)
|
||||
defer func() {
|
||||
_ = os.Remove(f.Name())
|
||||
}()
|
||||
|
||||
_, err = f.Write([]byte(`
|
||||
foo = "bar"
|
||||
fii = "bir"
|
||||
[yi]
|
||||
`))
|
||||
require.NoError(t, err)
|
||||
|
||||
element := &Yo{
|
||||
Fuu: "test",
|
||||
}
|
||||
|
||||
err = Decode(f.Name(), element)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &Yo{
|
||||
Foo: "bar",
|
||||
Fii: "bir",
|
||||
Fuu: "test",
|
||||
Yi: &Yi{
|
||||
Foo: "foo",
|
||||
Fii: "fii",
|
||||
},
|
||||
}
|
||||
assert.Equal(t, expected, element)
|
||||
}
|
||||
|
||||
func TestDecode_YAML(t *testing.T) {
|
||||
f, err := ioutil.TempFile("", "traefik-config-*.yaml")
|
||||
require.NoError(t, err)
|
||||
defer func() {
|
||||
_ = os.Remove(f.Name())
|
||||
}()
|
||||
|
||||
_, err = f.Write([]byte(`
|
||||
foo: bar
|
||||
fii: bir
|
||||
yi: {}
|
||||
`))
|
||||
require.NoError(t, err)
|
||||
|
||||
element := &Yo{
|
||||
Fuu: "test",
|
||||
}
|
||||
|
||||
err = Decode(f.Name(), element)
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &Yo{
|
||||
Foo: "bar",
|
||||
Fii: "bir",
|
||||
Fuu: "test",
|
||||
Yi: &Yi{
|
||||
Foo: "foo",
|
||||
Fii: "fii",
|
||||
},
|
||||
}
|
||||
assert.Equal(t, expected, element)
|
||||
}
|
539
pkg/config/file/fixtures/sample.toml
Normal file
539
pkg/config/file/fixtures/sample.toml
Normal file
|
@ -0,0 +1,539 @@
|
|||
[Global]
|
||||
Debug = true
|
||||
CheckNewVersion = true
|
||||
SendAnonymousUsage = true
|
||||
|
||||
[ServersTransport]
|
||||
InsecureSkipVerify = true
|
||||
RootCAs = ["foobar", "foobar"]
|
||||
MaxIdleConnsPerHost = 42
|
||||
[ServersTransport.ForwardingTimeouts]
|
||||
DialTimeout = 42
|
||||
ResponseHeaderTimeout = 42
|
||||
|
||||
[EntryPoints]
|
||||
|
||||
[EntryPoints.EntryPoint0]
|
||||
Address = "foobar"
|
||||
[EntryPoints.EntryPoint0.Transport]
|
||||
[EntryPoints.EntryPoint0.Transport.LifeCycle]
|
||||
RequestAcceptGraceTimeout = 42
|
||||
GraceTimeOut = 42
|
||||
[EntryPoints.EntryPoint0.Transport.RespondingTimeouts]
|
||||
ReadTimeout = 42
|
||||
WriteTimeout = 42
|
||||
IdleTimeout = 42
|
||||
[EntryPoints.EntryPoint0.ProxyProtocol]
|
||||
Insecure = true
|
||||
TrustedIPs = ["foobar", "foobar"]
|
||||
[EntryPoints.EntryPoint0.ForwardedHeaders]
|
||||
Insecure = true
|
||||
TrustedIPs = ["foobar", "foobar"]
|
||||
|
||||
[Providers]
|
||||
ProvidersThrottleDuration = 42
|
||||
|
||||
[Providers.Docker]
|
||||
Watch = true
|
||||
Endpoint = "foobar"
|
||||
DefaultRule = "foobar"
|
||||
ExposedByDefault = true
|
||||
UseBindPortIP = true
|
||||
SwarmMode = true
|
||||
Network = "foobar"
|
||||
SwarmModeRefreshSeconds = 42
|
||||
|
||||
[[Providers.Docker.Constraints]]
|
||||
Key = "foobar"
|
||||
MustMatch = true
|
||||
Value = "foobar"
|
||||
|
||||
[[Providers.Docker.Constraints]]
|
||||
Key = "foobar"
|
||||
MustMatch = true
|
||||
Value = "foobar"
|
||||
|
||||
[Providers.Docker.TLS]
|
||||
CA = "foobar"
|
||||
CAOptional = true
|
||||
Cert = "foobar"
|
||||
Key = "foobar"
|
||||
InsecureSkipVerify = true
|
||||
|
||||
[Providers.File]
|
||||
Directory = "foobar"
|
||||
Watch = true
|
||||
Filename = "foobar"
|
||||
DebugLogGeneratedTemplate = true
|
||||
TraefikFile = "foobar"
|
||||
|
||||
[Providers.Marathon]
|
||||
Trace = true
|
||||
Watch = true
|
||||
Endpoint = "foobar"
|
||||
DefaultRule = "foobar"
|
||||
ExposedByDefault = true
|
||||
DCOSToken = "foobar"
|
||||
FilterMarathonConstraints = true
|
||||
DialerTimeout = 42
|
||||
ResponseHeaderTimeout = 42
|
||||
TLSHandshakeTimeout = 42
|
||||
KeepAlive = 42
|
||||
ForceTaskHostname = true
|
||||
RespectReadinessChecks = true
|
||||
|
||||
[[Providers.Marathon.Constraints]]
|
||||
Key = "foobar"
|
||||
MustMatch = true
|
||||
Value = "foobar"
|
||||
|
||||
[[Providers.Marathon.Constraints]]
|
||||
Key = "foobar"
|
||||
MustMatch = true
|
||||
Value = "foobar"
|
||||
|
||||
[Providers.Marathon.TLS]
|
||||
CA = "foobar"
|
||||
CAOptional = true
|
||||
Cert = "foobar"
|
||||
Key = "foobar"
|
||||
InsecureSkipVerify = true
|
||||
[Providers.Marathon.Basic]
|
||||
HTTPBasicAuthUser = "foobar"
|
||||
HTTPBasicPassword = "foobar"
|
||||
|
||||
[Providers.Kubernetes]
|
||||
Endpoint = "foobar"
|
||||
Token = "foobar"
|
||||
CertAuthFilePath = "foobar"
|
||||
DisablePassHostHeaders = true
|
||||
Namespaces = ["foobar", "foobar"]
|
||||
LabelSelector = "foobar"
|
||||
IngressClass = "foobar"
|
||||
[Providers.Kubernetes.IngressEndpoint]
|
||||
IP = "foobar"
|
||||
Hostname = "foobar"
|
||||
PublishedService = "foobar"
|
||||
|
||||
[Providers.KubernetesCRD]
|
||||
Endpoint = "foobar"
|
||||
Token = "foobar"
|
||||
CertAuthFilePath = "foobar"
|
||||
DisablePassHostHeaders = true
|
||||
Namespaces = ["foobar", "foobar"]
|
||||
LabelSelector = "foobar"
|
||||
IngressClass = "foobar"
|
||||
|
||||
[Providers.Rest]
|
||||
EntryPoint = "foobar"
|
||||
|
||||
[Providers.Rancher]
|
||||
Watch = true
|
||||
DefaultRule = "foobar"
|
||||
ExposedByDefault = true
|
||||
EnableServiceHealthFilter = true
|
||||
RefreshSeconds = 42
|
||||
IntervalPoll = true
|
||||
Prefix = "foobar"
|
||||
|
||||
[[Providers.Rancher.Constraints]]
|
||||
Key = "foobar"
|
||||
MustMatch = true
|
||||
Value = "foobar"
|
||||
|
||||
[[Providers.Rancher.Constraints]]
|
||||
Key = "foobar"
|
||||
MustMatch = true
|
||||
Value = "foobar"
|
||||
|
||||
[API]
|
||||
EntryPoint = "foobar"
|
||||
Dashboard = true
|
||||
Middlewares = ["foobar", "foobar"]
|
||||
[API.Statistics]
|
||||
RecentErrors = 42
|
||||
|
||||
[Metrics]
|
||||
|
||||
[Metrics.Prometheus]
|
||||
Buckets = [42.0, 42.0]
|
||||
EntryPoint = "foobar"
|
||||
Middlewares = ["foobar", "foobar"]
|
||||
|
||||
[Metrics.Datadog]
|
||||
Address = "foobar"
|
||||
PushInterval = "10s"
|
||||
|
||||
[Metrics.StatsD]
|
||||
Address = "foobar"
|
||||
PushInterval = "10s"
|
||||
|
||||
[Metrics.InfluxDB]
|
||||
Address = "foobar"
|
||||
Protocol = "foobar"
|
||||
PushInterval = "10s"
|
||||
Database = "foobar"
|
||||
RetentionPolicy = "foobar"
|
||||
Username = "foobar"
|
||||
Password = "foobar"
|
||||
|
||||
[Ping]
|
||||
EntryPoint = "foobar"
|
||||
Middlewares = ["foobar", "foobar"]
|
||||
|
||||
[Log]
|
||||
Level = "foobar"
|
||||
FilePath = "foobar"
|
||||
Format = "foobar"
|
||||
|
||||
[AccessLog]
|
||||
FilePath = "foobar"
|
||||
Format = "foobar"
|
||||
BufferingSize = 42
|
||||
[AccessLog.Filters]
|
||||
StatusCodes = ["foobar", "foobar"]
|
||||
RetryAttempts = true
|
||||
MinDuration = 42
|
||||
[AccessLog.Fields]
|
||||
DefaultMode = "foobar"
|
||||
[AccessLog.Fields.Names]
|
||||
name0 = "foobar"
|
||||
name1 = "foobar"
|
||||
[AccessLog.Fields.Headers]
|
||||
DefaultMode = "foobar"
|
||||
[AccessLog.Fields.Headers.Names]
|
||||
name0 = "foobar"
|
||||
name1 = "foobar"
|
||||
|
||||
[Tracing]
|
||||
Backend = "foobar"
|
||||
ServiceName = "foobar"
|
||||
SpanNameLimit = 42
|
||||
|
||||
[Tracing.Jaeger]
|
||||
SamplingServerURL = "foobar"
|
||||
SamplingType = "foobar"
|
||||
SamplingParam = 42.0
|
||||
LocalAgentHostPort = "foobar"
|
||||
Gen128Bit = true
|
||||
Propagation = "foobar"
|
||||
TraceContextHeaderName = "foobar"
|
||||
|
||||
[Tracing.Zipkin]
|
||||
HTTPEndpoint = "foobar"
|
||||
SameSpan = true
|
||||
ID128Bit = true
|
||||
Debug = true
|
||||
SampleRate = 42.0
|
||||
|
||||
[Tracing.DataDog]
|
||||
LocalAgentHostPort = "foobar"
|
||||
GlobalTag = "foobar"
|
||||
Debug = true
|
||||
PrioritySampling = true
|
||||
TraceIDHeaderName = "foobar"
|
||||
ParentIDHeaderName = "foobar"
|
||||
SamplingPriorityHeaderName = "foobar"
|
||||
BagagePrefixHeaderName = "foobar"
|
||||
|
||||
[Tracing.Instana]
|
||||
LocalAgentHost = "foobar"
|
||||
LocalAgentPort = 42
|
||||
LogLevel = "foobar"
|
||||
|
||||
[HostResolver]
|
||||
CnameFlattening = true
|
||||
ResolvConfig = "foobar"
|
||||
ResolvDepth = 42
|
||||
|
||||
[ACME]
|
||||
Email = "foobar"
|
||||
ACMELogging = true
|
||||
CAServer = "foobar"
|
||||
Storage = "foobar"
|
||||
EntryPoint = "foobar"
|
||||
KeyType = "foobar"
|
||||
OnHostRule = true
|
||||
|
||||
[ACME.DNSChallenge]
|
||||
Provider = "foobar"
|
||||
DelayBeforeCheck = 42
|
||||
Resolvers = ["foobar", "foobar"]
|
||||
DisablePropagationCheck = true
|
||||
|
||||
[ACME.HTTPChallenge]
|
||||
EntryPoint = "foobar"
|
||||
|
||||
[ACME.TLSChallenge]
|
||||
|
||||
[[ACME.Domains]]
|
||||
Main = "foobar"
|
||||
SANs = ["foobar", "foobar"]
|
||||
|
||||
[[ACME.Domains]]
|
||||
Main = "foobar"
|
||||
SANs = ["foobar", "foobar"]
|
||||
|
||||
#### Dynamic configuration
|
||||
|
||||
[HTTP]
|
||||
|
||||
[HTTP.Routers]
|
||||
|
||||
[HTTP.Routers.Router0]
|
||||
EntryPoints = ["foobar", "foobar"]
|
||||
Middlewares = ["foobar", "foobar"]
|
||||
Service = "foobar"
|
||||
Rule = "foobar"
|
||||
priority = 42
|
||||
[HTTP.Routers.Router0.tls]
|
||||
|
||||
[HTTP.Middlewares]
|
||||
|
||||
[HTTP.Middlewares.Middleware0.AddPrefix]
|
||||
Prefix = "foobar"
|
||||
|
||||
[HTTP.Middlewares.Middleware1.StripPrefix]
|
||||
Prefixes = ["foobar", "foobar"]
|
||||
|
||||
[HTTP.Middlewares.Middleware2.StripPrefixRegex]
|
||||
Regex = ["foobar", "foobar"]
|
||||
|
||||
[HTTP.Middlewares.Middleware3.ReplacePath]
|
||||
Path = "foobar"
|
||||
|
||||
[HTTP.Middlewares.Middleware4.ReplacePathRegex]
|
||||
Regex = "foobar"
|
||||
Replacement = "foobar"
|
||||
|
||||
[HTTP.Middlewares.Middleware5.Chain]
|
||||
Middlewares = ["foobar", "foobar"]
|
||||
|
||||
[HTTP.Middlewares.Middleware6.IPWhiteList]
|
||||
SourceRange = ["foobar", "foobar"]
|
||||
|
||||
[HTTP.Middlewares.Middleware7.IPWhiteList.IPStrategy]
|
||||
Depth = 42
|
||||
ExcludedIPs = ["foobar", "foobar"]
|
||||
|
||||
[HTTP.Middlewares.Middleware8.Headers]
|
||||
AccessControlAllowCredentials = true
|
||||
AccessControlAllowHeaders = ["foobar", "foobar"]
|
||||
AccessControlAllowMethods = ["foobar", "foobar"]
|
||||
AccessControlAllowOrigin = "foobar"
|
||||
AccessControlExposeHeaders = ["foobar", "foobar"]
|
||||
AccessControlMaxAge = 42
|
||||
AddVaryHeader = true
|
||||
AllowedHosts = ["foobar", "foobar"]
|
||||
HostsProxyHeaders = ["foobar", "foobar"]
|
||||
SSLRedirect = true
|
||||
SSLTemporaryRedirect = true
|
||||
SSLHost = "foobar"
|
||||
SSLForceHost = true
|
||||
STSSeconds = 42
|
||||
STSIncludeSubdomains = true
|
||||
STSPreload = true
|
||||
ForceSTSHeader = true
|
||||
FrameDeny = true
|
||||
CustomFrameOptionsValue = "foobar"
|
||||
ContentTypeNosniff = true
|
||||
BrowserXSSFilter = true
|
||||
CustomBrowserXSSValue = "foobar"
|
||||
ContentSecurityPolicy = "foobar"
|
||||
PublicKey = "foobar"
|
||||
ReferrerPolicy = "foobar"
|
||||
IsDevelopment = true
|
||||
[HTTP.Middlewares.Middleware8.Headers.CustomRequestHeaders]
|
||||
name0 = "foobar"
|
||||
name1 = "foobar"
|
||||
[HTTP.Middlewares.Middleware8.Headers.CustomResponseHeaders]
|
||||
name0 = "foobar"
|
||||
name1 = "foobar"
|
||||
[HTTP.Middlewares.Middleware8.Headers.SSLProxyHeaders]
|
||||
name0 = "foobar"
|
||||
name1 = "foobar"
|
||||
|
||||
[HTTP.Middlewares.Middleware9.Errors]
|
||||
Status = ["foobar", "foobar"]
|
||||
Service = "foobar"
|
||||
Query = "foobar"
|
||||
|
||||
[HTTP.Middlewares.Middleware10.RateLimit]
|
||||
ExtractorFunc = "foobar"
|
||||
[HTTP.Middlewares.Middleware10.RateLimit.RateSet]
|
||||
[HTTP.Middlewares.Middleware10.RateLimit.RateSet.Rate0]
|
||||
Period = 42
|
||||
Average = 42
|
||||
Burst = 42
|
||||
[HTTP.Middlewares.Middleware10.RateLimit.RateSet.Rate1]
|
||||
Period = 42
|
||||
Average = 42
|
||||
Burst = 42
|
||||
|
||||
[HTTP.Middlewares.Middleware11.RedirectRegex]
|
||||
Regex = "foobar"
|
||||
Replacement = "foobar"
|
||||
Permanent = true
|
||||
|
||||
[HTTP.Middlewares.Middleware12.RedirectScheme]
|
||||
Scheme = "foobar"
|
||||
Port = "foobar"
|
||||
Permanent = true
|
||||
|
||||
[HTTP.Middlewares.Middleware13.BasicAuth]
|
||||
Users = ["foobar", "foobar"]
|
||||
UsersFile = "foobar"
|
||||
Realm = "foobar"
|
||||
RemoveHeader = true
|
||||
HeaderField = "foobar"
|
||||
|
||||
[HTTP.Middlewares.Middleware14.DigestAuth]
|
||||
Users = ["foobar", "foobar"]
|
||||
UsersFile = "foobar"
|
||||
RemoveHeader = true
|
||||
Realm = "foobar"
|
||||
HeaderField = "foobar"
|
||||
|
||||
[HTTP.Middlewares.Middleware15.ForwardAuth]
|
||||
Address = "foobar"
|
||||
TrustForwardHeader = true
|
||||
AuthResponseHeaders = ["foobar", "foobar"]
|
||||
[HTTP.Middlewares.Middleware15.ForwardAuth.TLS]
|
||||
CA = "foobar"
|
||||
CAOptional = true
|
||||
Cert = "foobar"
|
||||
Key = "foobar"
|
||||
InsecureSkipVerify = true
|
||||
|
||||
[HTTP.Middlewares.Middleware16.MaxConn]
|
||||
Amount = 42
|
||||
ExtractorFunc = "foobar"
|
||||
|
||||
[HTTP.Middlewares.Middleware17.Buffering]
|
||||
MaxRequestBodyBytes = 42
|
||||
MemRequestBodyBytes = 42
|
||||
MaxResponseBodyBytes = 42
|
||||
MemResponseBodyBytes = 42
|
||||
RetryExpression = "foobar"
|
||||
|
||||
[HTTP.Middlewares.Middleware18.CircuitBreaker]
|
||||
Expression = "foobar"
|
||||
|
||||
[HTTP.Middlewares.Middleware19.Compress]
|
||||
|
||||
[HTTP.Middlewares.Middleware20.PassTLSClientCert]
|
||||
PEM = true
|
||||
[HTTP.Middlewares.Middleware20.PassTLSClientCert.Info]
|
||||
NotAfter = true
|
||||
NotBefore = true
|
||||
Sans = true
|
||||
[HTTP.Middlewares.Middleware20.PassTLSClientCert.Info.Subject]
|
||||
Country = true
|
||||
Province = true
|
||||
Locality = true
|
||||
Organization = true
|
||||
CommonName = true
|
||||
SerialNumber = true
|
||||
DomainComponent = true
|
||||
[HTTP.Middlewares.Middleware20.PassTLSClientCert.Info.Issuer]
|
||||
Country = true
|
||||
Province = true
|
||||
Locality = true
|
||||
Organization = true
|
||||
CommonName = true
|
||||
SerialNumber = true
|
||||
DomainComponent = true
|
||||
|
||||
[HTTP.Middlewares.Middleware21.Retry]
|
||||
Attempts = 42
|
||||
|
||||
[HTTP.Services]
|
||||
[HTTP.Services.Service0]
|
||||
[HTTP.Services.Service0.LoadBalancer]
|
||||
Method = "foobar"
|
||||
PassHostHeader = true
|
||||
|
||||
[[HTTP.Services.Service0.LoadBalancer.Servers]]
|
||||
URL = "foobar"
|
||||
|
||||
[HTTP.Services.Service0.LoadBalancer.Stickiness]
|
||||
CookieName = "foobar"
|
||||
|
||||
[[HTTP.Services.Service0.LoadBalancer.Servers]]
|
||||
URL = "foobar"
|
||||
|
||||
[HTTP.Services.Service0.LoadBalancer.HealthCheck]
|
||||
Scheme = "foobar"
|
||||
Path = "foobar"
|
||||
Port = 42
|
||||
Interval = "foobar"
|
||||
Timeout = "foobar"
|
||||
Hostname = "foobar"
|
||||
[HTTP.Services.Service0.LoadBalancer.HealthCheck.Headers]
|
||||
name0 = "foobar"
|
||||
name1 = "foobar"
|
||||
[HTTP.Services.Service0.LoadBalancer.ResponseForwarding]
|
||||
FlushInterval = "foobar"
|
||||
|
||||
[TCP]
|
||||
|
||||
[TCP.Routers]
|
||||
|
||||
[TCP.Routers.TCPRouter0]
|
||||
EntryPoints = ["foobar", "foobar"]
|
||||
Service = "foobar"
|
||||
Rule = "foobar"
|
||||
[TCP.Routers.TCPRouter0.tls]
|
||||
passthrough = true
|
||||
|
||||
[TCP.Services]
|
||||
|
||||
[TCP.Services.TCPService0]
|
||||
[TCP.Services.TCPService0.LoadBalancer]
|
||||
Method = "foobar"
|
||||
|
||||
[[TCP.Services.TCPService0.LoadBalancer.Servers]]
|
||||
Address = "foobar"
|
||||
|
||||
[[TCP.Services.TCPService0.LoadBalancer.Servers]]
|
||||
Address = "foobar"
|
||||
|
||||
[[TLS]]
|
||||
Stores = ["foobar", "foobar"]
|
||||
[TLS.Certificate]
|
||||
CertFile = "foobar"
|
||||
KeyFile = "foobar"
|
||||
|
||||
[[TLS]]
|
||||
Stores = ["foobar", "foobar"]
|
||||
[TLS.Certificate]
|
||||
CertFile = "foobar"
|
||||
KeyFile = "foobar"
|
||||
|
||||
[TLSOptions]
|
||||
|
||||
[TLSOptions.TLS0]
|
||||
MinVersion = "foobar"
|
||||
CipherSuites = ["foobar", "foobar"]
|
||||
SniStrict = true
|
||||
[TLSOptions.TLS0.ClientCA]
|
||||
Files = ["foobar", "foobar"]
|
||||
Optional = true
|
||||
[TLSOptions.TLS1]
|
||||
MinVersion = "foobar"
|
||||
CipherSuites = ["foobar", "foobar"]
|
||||
SniStrict = true
|
||||
[TLSOptions.TLS1.ClientCA]
|
||||
Files = ["foobar", "foobar"]
|
||||
Optional = true
|
||||
|
||||
[TLSStores]
|
||||
|
||||
[TLSStores.Store0]
|
||||
[TLSStores.Store0.DefaultCertificate]
|
||||
CertFile = "foobar"
|
||||
KeyFile = "foobar"
|
||||
[TLSStores.Store1]
|
||||
[TLSStores.Store1.DefaultCertificate]
|
||||
CertFile = "foobar"
|
||||
KeyFile = "foobar"
|
257
pkg/config/file/fixtures/sample.yml
Normal file
257
pkg/config/file/fixtures/sample.yml
Normal file
|
@ -0,0 +1,257 @@
|
|||
Global:
|
||||
Debug: true
|
||||
CheckNewVersion: true
|
||||
SendAnonymousUsage: true
|
||||
ServersTransport:
|
||||
InsecureSkipVerify: true
|
||||
RootCAs:
|
||||
- foobar
|
||||
- foobar
|
||||
MaxIdleConnsPerHost: 42
|
||||
ForwardingTimeouts:
|
||||
DialTimeout: 42
|
||||
ResponseHeaderTimeout: 42
|
||||
EntryPoints:
|
||||
EntryPoint0:
|
||||
Address: foobar
|
||||
Transport:
|
||||
LifeCycle:
|
||||
RequestAcceptGraceTimeout: 42
|
||||
GraceTimeOut: 42
|
||||
RespondingTimeouts:
|
||||
ReadTimeout: 42
|
||||
WriteTimeout: 42
|
||||
IdleTimeout: 42
|
||||
ProxyProtocol:
|
||||
Insecure: true
|
||||
TrustedIPs:
|
||||
- foobar
|
||||
- foobar
|
||||
ForwardedHeaders:
|
||||
Insecure: true
|
||||
TrustedIPs:
|
||||
- foobar
|
||||
- foobar
|
||||
Providers:
|
||||
ProvidersThrottleDuration: 42
|
||||
Docker:
|
||||
Watch: true
|
||||
Endpoint: foobar
|
||||
DefaultRule: foobar
|
||||
ExposedByDefault: true
|
||||
UseBindPortIP: true
|
||||
SwarmMode: true
|
||||
Network: foobar
|
||||
SwarmModeRefreshSeconds: 42
|
||||
Constraints:
|
||||
- Key: foobar
|
||||
MustMatch: true
|
||||
Value: foobar
|
||||
- Key: foobar
|
||||
MustMatch: true
|
||||
Value: foobar
|
||||
TLS:
|
||||
CA: foobar
|
||||
CAOptional: true
|
||||
Cert: foobar
|
||||
Key: foobar
|
||||
InsecureSkipVerify: true
|
||||
File:
|
||||
Directory: foobar
|
||||
Watch: true
|
||||
Filename: foobar
|
||||
DebugLogGeneratedTemplate: true
|
||||
TraefikFile: foobar
|
||||
Marathon:
|
||||
Trace: true
|
||||
Watch: true
|
||||
Endpoint: foobar
|
||||
DefaultRule: foobar
|
||||
ExposedByDefault: true
|
||||
DCOSToken: foobar
|
||||
FilterMarathonConstraints: true
|
||||
DialerTimeout: 42
|
||||
ResponseHeaderTimeout: 42
|
||||
TLSHandshakeTimeout: 42
|
||||
KeepAlive: 42
|
||||
ForceTaskHostname: true
|
||||
RespectReadinessChecks: true
|
||||
Constraints:
|
||||
- Key: foobar
|
||||
MustMatch: true
|
||||
Value: foobar
|
||||
- Key: foobar
|
||||
MustMatch: true
|
||||
Value: foobar
|
||||
TLS:
|
||||
CA: foobar
|
||||
CAOptional: true
|
||||
Cert: foobar
|
||||
Key: foobar
|
||||
InsecureSkipVerify: true
|
||||
Basic:
|
||||
HTTPBasicAuthUser: foobar
|
||||
HTTPBasicPassword: foobar
|
||||
Kubernetes:
|
||||
Endpoint: foobar
|
||||
Token: foobar
|
||||
CertAuthFilePath: foobar
|
||||
DisablePassHostHeaders: true
|
||||
Namespaces:
|
||||
- foobar
|
||||
- foobar
|
||||
LabelSelector: foobar
|
||||
IngressClass: foobar
|
||||
IngressEndpoint:
|
||||
IP: foobar
|
||||
Hostname: foobar
|
||||
PublishedService: foobar
|
||||
KubernetesCRD:
|
||||
Endpoint: foobar
|
||||
Token: foobar
|
||||
CertAuthFilePath: foobar
|
||||
DisablePassHostHeaders: true
|
||||
Namespaces:
|
||||
- foobar
|
||||
- foobar
|
||||
LabelSelector: foobar
|
||||
IngressClass: foobar
|
||||
Rest:
|
||||
EntryPoint: foobar
|
||||
Rancher:
|
||||
Watch: true
|
||||
DefaultRule: foobar
|
||||
ExposedByDefault: true
|
||||
EnableServiceHealthFilter: true
|
||||
RefreshSeconds: 42
|
||||
IntervalPoll: true
|
||||
Prefix: foobar
|
||||
Constraints:
|
||||
- Key: foobar
|
||||
MustMatch: true
|
||||
Value: foobar
|
||||
- Key: foobar
|
||||
MustMatch: true
|
||||
Value: foobar
|
||||
API:
|
||||
EntryPoint: foobar
|
||||
Dashboard: true
|
||||
Middlewares:
|
||||
- foobar
|
||||
- foobar
|
||||
Statistics:
|
||||
RecentErrors: 42
|
||||
Metrics:
|
||||
Prometheus:
|
||||
Buckets:
|
||||
- 42
|
||||
- 42
|
||||
EntryPoint: foobar
|
||||
Middlewares:
|
||||
- foobar
|
||||
- foobar
|
||||
Datadog:
|
||||
Address: foobar
|
||||
PushInterval: 10s
|
||||
StatsD:
|
||||
Address: foobar
|
||||
PushInterval: 10s
|
||||
InfluxDB:
|
||||
Address: foobar
|
||||
Protocol: foobar
|
||||
PushInterval: 10s
|
||||
Database: foobar
|
||||
RetentionPolicy: foobar
|
||||
Username: foobar
|
||||
Password: foobar
|
||||
Ping:
|
||||
EntryPoint: foobar
|
||||
Middlewares:
|
||||
- foobar
|
||||
- foobar
|
||||
Log:
|
||||
Level: foobar
|
||||
FilePath: foobar
|
||||
Format: foobar
|
||||
AccessLog:
|
||||
FilePath: foobar
|
||||
Format: foobar
|
||||
BufferingSize: 42
|
||||
Filters:
|
||||
StatusCodes:
|
||||
- foobar
|
||||
- foobar
|
||||
RetryAttempts: true
|
||||
MinDuration: 42
|
||||
Fields:
|
||||
DefaultMode: foobar
|
||||
Names:
|
||||
name0: foobar
|
||||
name1: foobar
|
||||
Headers:
|
||||
DefaultMode: foobar
|
||||
Names:
|
||||
name0: foobar
|
||||
name1: foobar
|
||||
Tracing:
|
||||
Backend: foobar
|
||||
ServiceName: foobar
|
||||
SpanNameLimit: 42
|
||||
Jaeger:
|
||||
SamplingServerURL: foobar
|
||||
SamplingType: foobar
|
||||
SamplingParam: 42
|
||||
LocalAgentHostPort: foobar
|
||||
Gen128Bit: true
|
||||
Propagation: foobar
|
||||
TraceContextHeaderName: foobar
|
||||
Zipkin:
|
||||
HTTPEndpoint: foobar
|
||||
SameSpan: true
|
||||
ID128Bit: true
|
||||
Debug: true
|
||||
SampleRate: 42
|
||||
DataDog:
|
||||
LocalAgentHostPort: foobar
|
||||
GlobalTag: foobar
|
||||
Debug: true
|
||||
PrioritySampling: true
|
||||
TraceIDHeaderName: foobar
|
||||
ParentIDHeaderName: foobar
|
||||
SamplingPriorityHeaderName: foobar
|
||||
BagagePrefixHeaderName: foobar
|
||||
Instana:
|
||||
LocalAgentHost: foobar
|
||||
LocalAgentPort: 42
|
||||
LogLevel: foobar
|
||||
HostResolver:
|
||||
CnameFlattening: true
|
||||
ResolvConfig: foobar
|
||||
ResolvDepth: 42
|
||||
ACME:
|
||||
Email: foobar
|
||||
ACMELogging: true
|
||||
CAServer: foobar
|
||||
Storage: foobar
|
||||
EntryPoint: foobar
|
||||
KeyType: foobar
|
||||
OnHostRule: true
|
||||
DNSChallenge:
|
||||
Provider: foobar
|
||||
DelayBeforeCheck: 42
|
||||
Resolvers:
|
||||
- foobar
|
||||
- foobar
|
||||
DisablePropagationCheck: true
|
||||
HTTPChallenge:
|
||||
EntryPoint: foobar
|
||||
TLSChallenge: {}
|
||||
Domains:
|
||||
- Main: foobar
|
||||
SANs:
|
||||
- foobar
|
||||
- foobar
|
||||
- Main: foobar
|
||||
SANs:
|
||||
- foobar
|
||||
- foobar
|
34
pkg/config/file/fixtures_test.go
Normal file
34
pkg/config/file/fixtures_test.go
Normal file
|
@ -0,0 +1,34 @@
|
|||
package file
|
||||
|
||||
type bar string
|
||||
|
||||
type Yo struct {
|
||||
Foo string
|
||||
Fii string
|
||||
Fuu string
|
||||
Yi *Yi `label:"allowEmpty"`
|
||||
}
|
||||
|
||||
func (y *Yo) SetDefaults() {
|
||||
y.Foo = "foo"
|
||||
y.Fii = "fii"
|
||||
}
|
||||
|
||||
type Yi struct {
|
||||
Foo string
|
||||
Fii string
|
||||
Fuu string
|
||||
}
|
||||
|
||||
func (y *Yi) SetDefaults() {
|
||||
y.Foo = "foo"
|
||||
y.Fii = "fii"
|
||||
}
|
||||
|
||||
type Yu struct {
|
||||
Yi
|
||||
}
|
||||
|
||||
type Ye struct {
|
||||
*Yi
|
||||
}
|
128
pkg/config/file/raw_node.go
Normal file
128
pkg/config/file/raw_node.go
Normal file
|
@ -0,0 +1,128 @@
|
|||
package file
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
)
|
||||
|
||||
func decodeRawToNode(data map[string]interface{}, filters ...string) (*parser.Node, error) {
|
||||
root := &parser.Node{
|
||||
Name: "traefik",
|
||||
}
|
||||
|
||||
vData := reflect.ValueOf(data)
|
||||
decodeRaw(root, vData, filters...)
|
||||
|
||||
return root, nil
|
||||
}
|
||||
|
||||
func decodeRaw(node *parser.Node, vData reflect.Value, filters ...string) {
|
||||
sortedKeys := sortKeys(vData, filters)
|
||||
|
||||
for _, key := range sortedKeys {
|
||||
value := reflect.ValueOf(vData.MapIndex(key).Interface())
|
||||
|
||||
child := &parser.Node{Name: key.String()}
|
||||
|
||||
switch value.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
fallthrough
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
fallthrough
|
||||
case reflect.Float32, reflect.Float64:
|
||||
fallthrough
|
||||
case reflect.Bool:
|
||||
fallthrough
|
||||
case reflect.String:
|
||||
child.Value = getSimpleValue(value)
|
||||
case reflect.Slice:
|
||||
var values []string
|
||||
|
||||
for i := 0; i < value.Len(); i++ {
|
||||
item := value.Index(i)
|
||||
switch item.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
fallthrough
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
fallthrough
|
||||
case reflect.Bool:
|
||||
fallthrough
|
||||
case reflect.String:
|
||||
fallthrough
|
||||
case reflect.Map:
|
||||
fallthrough
|
||||
case reflect.Interface:
|
||||
sValue := reflect.ValueOf(item.Interface())
|
||||
if sValue.Kind() == reflect.Map {
|
||||
ch := &parser.Node{
|
||||
Name: "[" + strconv.Itoa(i) + "]",
|
||||
}
|
||||
|
||||
child.Children = append(child.Children, ch)
|
||||
decodeRaw(ch, sValue)
|
||||
} else {
|
||||
values = append(values, getSimpleValue(sValue))
|
||||
}
|
||||
default:
|
||||
panic("Unsupported slice type: " + item.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
child.Value = strings.Join(values, ",")
|
||||
case reflect.Map:
|
||||
decodeRaw(child, value)
|
||||
default:
|
||||
panic("Unsupported type: " + value.Kind().String())
|
||||
}
|
||||
|
||||
node.Children = append(node.Children, child)
|
||||
}
|
||||
}
|
||||
|
||||
func getSimpleValue(item reflect.Value) string {
|
||||
switch item.Kind() {
|
||||
case reflect.String:
|
||||
return item.String()
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return strconv.FormatInt(item.Int(), 10)
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
return strconv.FormatUint(item.Uint(), 10)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return strings.TrimSuffix(strconv.FormatFloat(item.Float(), 'f', 6, 64), ".000000")
|
||||
case reflect.Bool:
|
||||
return strconv.FormatBool(item.Bool())
|
||||
default:
|
||||
panic("Unsupported Simple value type: " + item.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
func sortKeys(vData reflect.Value, filters []string) []reflect.Value {
|
||||
var sortedKeys []reflect.Value
|
||||
|
||||
for _, v := range vData.MapKeys() {
|
||||
rValue := reflect.ValueOf(v.Interface())
|
||||
key := rValue.String()
|
||||
|
||||
if len(filters) == 0 {
|
||||
sortedKeys = append(sortedKeys, rValue)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, filter := range filters {
|
||||
if strings.EqualFold(key, filter) {
|
||||
sortedKeys = append(sortedKeys, rValue)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(sortedKeys, func(i, j int) bool {
|
||||
return sortedKeys[i].String() < sortedKeys[j].String()
|
||||
})
|
||||
|
||||
return sortedKeys
|
||||
}
|
540
pkg/config/file/raw_node_test.go
Normal file
540
pkg/config/file/raw_node_test.go
Normal file
|
@ -0,0 +1,540 @@
|
|||
package file
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_decodeRawToNode(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
data map[string]interface{}
|
||||
expected *parser.Node
|
||||
}{
|
||||
{
|
||||
desc: "empty",
|
||||
data: map[string]interface{}{},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string",
|
||||
data: map[string]interface{}{
|
||||
"foo": "bar",
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string named type",
|
||||
data: map[string]interface{}{
|
||||
"foo": bar("bar"),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool",
|
||||
data: map[string]interface{}{
|
||||
"foo": true,
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "true"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int",
|
||||
data: map[string]interface{}{
|
||||
"foo": 1,
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int8",
|
||||
data: map[string]interface{}{
|
||||
"foo": int8(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int16",
|
||||
data: map[string]interface{}{
|
||||
"foo": int16(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int32",
|
||||
data: map[string]interface{}{
|
||||
"foo": int32(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int64",
|
||||
data: map[string]interface{}{
|
||||
"foo": int64(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint",
|
||||
data: map[string]interface{}{
|
||||
"foo": uint(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint8",
|
||||
data: map[string]interface{}{
|
||||
"foo": uint8(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint16",
|
||||
data: map[string]interface{}{
|
||||
"foo": uint16(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint32",
|
||||
data: map[string]interface{}{
|
||||
"foo": uint32(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint64",
|
||||
data: map[string]interface{}{
|
||||
"foo": uint64(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "float32",
|
||||
data: map[string]interface{}{
|
||||
"foo": float32(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "float64",
|
||||
data: map[string]interface{}{
|
||||
"foo": float64(1),
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []string{"A", "B"},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "A,B"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []int{1, 2},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1,2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int8 slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []int8{1, 2},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1,2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int16 slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []int16{1, 2},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1,2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int32 slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []int32{1, 2},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1,2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int64 slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []int64{1, 2},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1,2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []bool{true, false},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "true,false"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "interface (string) slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []interface{}{"A", "B"},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "A,B"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "interface (int) slice",
|
||||
data: map[string]interface{}{
|
||||
"foo": []interface{}{1, 2},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "1,2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "2 strings",
|
||||
data: map[string]interface{}{
|
||||
"foo": "bar",
|
||||
"fii": "bir",
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Value: "bir"},
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string, level 2",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": "bur",
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "bur"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int, level 2",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": 1,
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "1"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint, level 2",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": uint(1),
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "1"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool, level 2",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": true,
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "true"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string, level 3",
|
||||
data: map[string]interface{}{
|
||||
"foo": map[interface{}]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": "bur",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "bur"}}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int, level 3",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": 1,
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "1"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint, level 3",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": uint(1),
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "1"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool, level 3",
|
||||
data: map[string]interface{}{
|
||||
"fii": map[interface{}]interface{}{
|
||||
"fuu": true,
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Children: []*parser.Node{{Name: "fuu", Value: "true"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct",
|
||||
data: map[string]interface{}{
|
||||
"foo": map[interface{}]interface{}{
|
||||
"field1": "C",
|
||||
"field2": "C",
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "C"},
|
||||
{Name: "field2", Value: "C"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice struct 1",
|
||||
data: map[string]interface{}{
|
||||
"foo": []map[string]interface{}{
|
||||
{"field1": "A", "field2": "A"},
|
||||
{"field1": "B", "field2": "B"},
|
||||
{"field2": "C", "field1": "C"},
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "A"},
|
||||
{Name: "field2", Value: "A"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "B"},
|
||||
{Name: "field2", Value: "B"},
|
||||
}},
|
||||
{Name: "[2]", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "C"},
|
||||
{Name: "field2", Value: "C"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice struct 2",
|
||||
data: map[string]interface{}{
|
||||
"foo": []interface{}{
|
||||
map[interface{}]interface{}{
|
||||
"field2": "A",
|
||||
"field1": "A",
|
||||
},
|
||||
map[interface{}]interface{}{
|
||||
"field1": "B",
|
||||
"field2": "B",
|
||||
},
|
||||
map[interface{}]interface{}{
|
||||
"field1": "C",
|
||||
"field2": "C",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "A"},
|
||||
{Name: "field2", Value: "A"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "B"},
|
||||
{Name: "field2", Value: "B"},
|
||||
}},
|
||||
{Name: "[2]", Children: []*parser.Node{
|
||||
{Name: "field1", Value: "C"},
|
||||
{Name: "field2", Value: "C"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
node, err := decodeRawToNode(test.data)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected, node)
|
||||
})
|
||||
}
|
||||
}
|
44
pkg/config/flag/flag.go
Normal file
44
pkg/config/flag/flag.go
Normal file
|
@ -0,0 +1,44 @@
|
|||
// Package flag implements encoding and decoding between flag arguments and a typed Configuration.
|
||||
package flag
|
||||
|
||||
import (
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
)
|
||||
|
||||
// Decode decodes the given flag arguments into the given element.
|
||||
// The operation goes through four stages roughly summarized as:
|
||||
// flag arguments -> parsed map of flags
|
||||
// map -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> typed element
|
||||
func Decode(args []string, element interface{}) error {
|
||||
ref, err := Parse(args, element)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return parser.Decode(ref, element)
|
||||
}
|
||||
|
||||
// Encode encodes the configuration in element into the flags represented in the returned Flats.
|
||||
// The operation goes through three stages roughly summarized as:
|
||||
// typed configuration in element -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> flags with default values (determined by type/kind)
|
||||
func Encode(element interface{}) ([]parser.Flat, error) {
|
||||
if element == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
node, err := parser.EncodeToNode(element, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = parser.AddMetadata(element, node)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return parser.EncodeToFlat(element, node, parser.FlatOpts{Separator: ".", SkipRoot: true})
|
||||
}
|
926
pkg/config/flag/flag_test.go
Normal file
926
pkg/config/flag/flag_test.go
Normal file
|
@ -0,0 +1,926 @@
|
|||
package flag
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/containous/traefik/pkg/config/generator"
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
"github.com/containous/traefik/pkg/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDecode(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
args []string
|
||||
element interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
desc: "no args",
|
||||
args: nil,
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "types.Duration value",
|
||||
args: []string{"--foo=1"},
|
||||
element: &struct {
|
||||
Foo types.Duration
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo types.Duration
|
||||
}{
|
||||
Foo: types.Duration(1 * time.Second),
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "time.Duration value",
|
||||
args: []string{"--foo=1"},
|
||||
element: &struct {
|
||||
Foo time.Duration
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo time.Duration
|
||||
}{
|
||||
Foo: 1 * time.Nanosecond,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool value",
|
||||
args: []string{"--foo"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo bool
|
||||
}{
|
||||
Foo: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "equal",
|
||||
args: []string{"--foo=bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
}{
|
||||
Foo: "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "space separated",
|
||||
args: []string{"--foo", "bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
}{
|
||||
Foo: "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "space separated with end of parameter",
|
||||
args: []string{"--foo=bir", "--", "--bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
}{
|
||||
Foo: "bir",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple bool flags without value",
|
||||
args: []string{"--foo", "--bar"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
Bar bool
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo bool
|
||||
Bar bool
|
||||
}{
|
||||
Foo: true,
|
||||
Bar: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags",
|
||||
args: []string{"--foo=bar", "--foo=baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []string
|
||||
}{
|
||||
Foo: []string{"bar", "baz"},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string",
|
||||
args: []string{"--foo.name=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]string
|
||||
}{
|
||||
Foo: map[string]string{
|
||||
"name": "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct",
|
||||
args: []string{"--foo.name.value=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct{ Value string }
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]struct{ Value string }
|
||||
}{
|
||||
Foo: map[string]struct{ Value string }{
|
||||
"name": {
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-struct",
|
||||
args: []string{"--foo.name.bar.value=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}
|
||||
}{
|
||||
Foo: map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}{
|
||||
"name": {
|
||||
Bar: &struct {
|
||||
Value string
|
||||
}{
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-map",
|
||||
args: []string{"--foo.name1.bar.name2.value=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}
|
||||
}{
|
||||
Foo: map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}{
|
||||
"name1": {
|
||||
Bar: map[string]struct{ Value string }{
|
||||
"name2": {
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 2",
|
||||
args: []string{"--foo", "bar", "--foo", "baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []string
|
||||
}{
|
||||
Foo: []string{"bar", "baz"},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 3",
|
||||
args: []string{"--foo", "bar", "--foo=", "--baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
Baz bool
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []string
|
||||
Baz bool
|
||||
}{
|
||||
Foo: []string{"bar", ""},
|
||||
Baz: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 4",
|
||||
args: []string{"--foo", "bar", "--foo", "--baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
Baz bool
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []string
|
||||
Baz bool
|
||||
}{
|
||||
Foo: []string{"bar", "--baz"},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of struct",
|
||||
args: []string{
|
||||
"--foo[0].Field1", "bar", "--foo[0].Field2", "6",
|
||||
"--foo[1].Field1", "bur", "--foo[1].Field2", "2",
|
||||
},
|
||||
element: &struct {
|
||||
Foo []struct {
|
||||
Field1 string
|
||||
Field2 int
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []struct {
|
||||
Field1 string
|
||||
Field2 int
|
||||
}
|
||||
}{
|
||||
Foo: []struct {
|
||||
Field1 string
|
||||
Field2 int
|
||||
}{
|
||||
{
|
||||
Field1: "bar",
|
||||
Field2: 6,
|
||||
},
|
||||
{
|
||||
Field1: "bur",
|
||||
Field2: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of pointer of struct",
|
||||
args: []string{
|
||||
"--foo[0].Field1", "bar", "--foo[0].Field2", "6",
|
||||
"--foo[1].Field1", "bur", "--foo[1].Field2", "2",
|
||||
},
|
||||
element: &struct {
|
||||
Foo []*struct {
|
||||
Field1 string
|
||||
Field2 int
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []*struct {
|
||||
Field1 string
|
||||
Field2 int
|
||||
}
|
||||
}{
|
||||
Foo: []*struct {
|
||||
Field1 string
|
||||
Field2 int
|
||||
}{
|
||||
{
|
||||
Field1: "bar",
|
||||
Field2: 6,
|
||||
},
|
||||
{
|
||||
Field1: "bur",
|
||||
Field2: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple string flag",
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
args: []string{"--foo=bar", "--foo=baz"},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
}{
|
||||
Foo: "baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple string flag 2",
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
args: []string{"--foo", "bar", "--foo", "baz"},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
}{
|
||||
Foo: "baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string without value",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Bar bool
|
||||
}{},
|
||||
args: []string{"--foo", "--bar"},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
Bar bool
|
||||
}{
|
||||
Foo: "--bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer value",
|
||||
args: []string{"--foo"},
|
||||
element: &struct {
|
||||
Foo *struct{ Field string } `label:"allowEmpty"`
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo *struct{ Field string } `label:"allowEmpty"`
|
||||
}{
|
||||
Foo: &struct{ Field string }{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
err := Decode(test.args, test.element)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected, test.element)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncode(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected []parser.Flat
|
||||
}{
|
||||
{
|
||||
desc: "string field",
|
||||
element: &struct {
|
||||
Field string `description:"field description"`
|
||||
}{
|
||||
Field: "test",
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "test",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "int field",
|
||||
element: &struct {
|
||||
Field int `description:"field description"`
|
||||
}{
|
||||
Field: 6,
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "6",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "bool field",
|
||||
element: &struct {
|
||||
Field bool `description:"field description"`
|
||||
}{
|
||||
Field: true,
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "true",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "string pointer field",
|
||||
element: &struct {
|
||||
Field *string `description:"field description"`
|
||||
}{
|
||||
Field: func(v string) *string { return &v }("test"),
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "test",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "int pointer field",
|
||||
element: &struct {
|
||||
Field *int `description:"field description"`
|
||||
}{
|
||||
Field: func(v int) *int { return &v }(6),
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "6",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "bool pointer field",
|
||||
element: &struct {
|
||||
Field *bool `description:"field description"`
|
||||
}{
|
||||
Field: func(v bool) *bool { return &v }(true),
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "true",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "slice of string field, no initial value",
|
||||
element: &struct {
|
||||
Field []string `description:"field description"`
|
||||
}{},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "slice of string field, with initial value",
|
||||
element: &struct {
|
||||
Field []string `description:"field description"`
|
||||
}{
|
||||
Field: []string{"foo", "bar"},
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "foo, bar",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "slice of int field, no initial value",
|
||||
element: &struct {
|
||||
Field []int `description:"field description"`
|
||||
}{},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "slice of int field, with initial value",
|
||||
element: &struct {
|
||||
Field []int `description:"field description"`
|
||||
}{
|
||||
Field: []int{6, 3},
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "6, 3",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "map string field",
|
||||
element: &struct {
|
||||
Field map[string]string `description:"field description"`
|
||||
}{
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field.<name>",
|
||||
Description: "field description",
|
||||
Default: "",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer field",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Field string `description:"field description"`
|
||||
} `description:"foo description"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Field string `description:"field description"`
|
||||
}{
|
||||
Field: "test",
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.field",
|
||||
Description: "field description",
|
||||
Default: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer field, allow empty",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Field string `description:"field description"`
|
||||
} `description:"foo description" label:"allowEmpty"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Field string `description:"field description"`
|
||||
}{
|
||||
Field: "test",
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo",
|
||||
Description: "foo description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.field",
|
||||
Description: "field description",
|
||||
Default: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer field level 2",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii *struct {
|
||||
Field string `description:"field description"`
|
||||
} `description:"fii description"`
|
||||
} `description:"foo description"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *struct {
|
||||
Field string `description:"field description"`
|
||||
} `description:"fii description"`
|
||||
}{
|
||||
Fii: &struct {
|
||||
Field string `description:"field description"`
|
||||
}{
|
||||
Field: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.fii.field",
|
||||
Description: "field description",
|
||||
Default: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer field level 2, allow empty",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii *struct {
|
||||
Field string `description:"field description"`
|
||||
} `description:"fii description" label:"allowEmpty"`
|
||||
} `description:"foo description" label:"allowEmpty"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *struct {
|
||||
Field string `description:"field description"`
|
||||
} `description:"fii description" label:"allowEmpty"`
|
||||
}{
|
||||
Fii: &struct {
|
||||
Field string `description:"field description"`
|
||||
}{
|
||||
Field: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo",
|
||||
Description: "foo description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.fii",
|
||||
Description: "fii description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.fii.field",
|
||||
Description: "field description",
|
||||
Default: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string field level 2",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii map[string]string `description:"fii description"`
|
||||
} `description:"foo description"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii map[string]string `description:"fii description"`
|
||||
}{
|
||||
Fii: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.fii.<name>",
|
||||
Description: "fii description",
|
||||
Default: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string pointer field level 2",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii map[string]*string `description:"fii description"`
|
||||
} `description:"foo description"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii map[string]*string `description:"fii description"`
|
||||
}{
|
||||
Fii: map[string]*string{
|
||||
parser.MapNamePlaceholder: func(v string) *string { return &v }(""),
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.fii.<name>",
|
||||
Description: "fii description",
|
||||
Default: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct level 1",
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Field string `description:"field description"`
|
||||
Yo int `description:"yo description"`
|
||||
} `description:"foo description"`
|
||||
}{},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.<name>",
|
||||
Description: "foo description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.<name>.field",
|
||||
Description: "field description",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "foo.<name>.yo",
|
||||
Description: "yo description",
|
||||
Default: "0",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct pointer level 1",
|
||||
element: &struct {
|
||||
Foo map[string]*struct {
|
||||
Field string `description:"field description"`
|
||||
Yo string `description:"yo description"`
|
||||
} `description:"foo description"`
|
||||
}{},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.<name>",
|
||||
Description: "foo description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.<name>.field",
|
||||
Description: "field description",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "foo.<name>.yo",
|
||||
Description: "yo description",
|
||||
Default: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "time duration field",
|
||||
element: &struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}{
|
||||
Field: 1 * time.Second,
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "1s",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "time duration field map",
|
||||
element: &struct {
|
||||
Foo map[string]*struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
} `description:"foo description"`
|
||||
}{
|
||||
Foo: map[string]*struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}{},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.<name>",
|
||||
Description: "foo description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.<name>.field",
|
||||
Description: "field description",
|
||||
Default: "0s",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "time duration field map 2",
|
||||
element: &struct {
|
||||
Foo map[string]*struct {
|
||||
Fii *struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}
|
||||
} `description:"foo description"`
|
||||
}{
|
||||
Foo: map[string]*struct {
|
||||
Fii *struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}
|
||||
}{},
|
||||
},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.<name>",
|
||||
Description: "foo description",
|
||||
Default: "false",
|
||||
},
|
||||
{
|
||||
Name: "foo.<name>.fii.field",
|
||||
Description: "field description",
|
||||
Default: "0s",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "time duration field 2",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}{
|
||||
Field: 1 * time.Second,
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "foo.field",
|
||||
Description: "field description",
|
||||
Default: "1s",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "time duration field 3",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii *struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}
|
||||
}{
|
||||
Fii: &struct {
|
||||
Field time.Duration `description:"field description"`
|
||||
}{
|
||||
Field: 1 * time.Second,
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "foo.fii.field",
|
||||
Description: "field description",
|
||||
Default: "1s",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "time duration field",
|
||||
element: &struct {
|
||||
Field types.Duration `description:"field description"`
|
||||
}{
|
||||
Field: types.Duration(180 * time.Second),
|
||||
},
|
||||
expected: []parser.Flat{{
|
||||
Name: "field",
|
||||
Description: "field description",
|
||||
Default: "180",
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "slice of struct",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii []struct {
|
||||
Field1 string `description:"field1 description"`
|
||||
Field2 int `description:"field2 description"`
|
||||
} `description:"fii description"`
|
||||
} `description:"foo description"`
|
||||
}{},
|
||||
expected: []parser.Flat{
|
||||
{
|
||||
Name: "foo.fii",
|
||||
Description: "fii description",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "foo.fii[0].field1",
|
||||
Description: "field1 description",
|
||||
Default: "",
|
||||
},
|
||||
{
|
||||
Name: "foo.fii[0].field2",
|
||||
Description: "field2 description",
|
||||
Default: "0",
|
||||
},
|
||||
},
|
||||
},
|
||||
// Skipped: because realistically not needed in Traefik for now.
|
||||
// {
|
||||
// desc: "map of map field level 2",
|
||||
// element: &struct {
|
||||
// Foo *struct {
|
||||
// Fii map[string]map[string]string `description:"fii description"`
|
||||
// } `description:"foo description"`
|
||||
// }{
|
||||
// Foo: &struct {
|
||||
// Fii map[string]map[string]string `description:"fii description"`
|
||||
// }{
|
||||
// Fii: map[string]map[string]string{
|
||||
// parser.MapNamePlaceholder: {
|
||||
// parser.MapNamePlaceholder: "test",
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// expected: `XXX`,
|
||||
// },
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
generator.Generate(test.element)
|
||||
|
||||
entries, err := Encode(test.element)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected, entries)
|
||||
})
|
||||
}
|
||||
}
|
108
pkg/config/flag/flagparser.go
Normal file
108
pkg/config/flag/flagparser.go
Normal file
|
@ -0,0 +1,108 @@
|
|||
package flag
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Parse parses the command-line flag arguments into a map,
|
||||
// using the type information in element to discriminate whether a flag is supposed to be a bool,
|
||||
// and other such ambiguities.
|
||||
func Parse(args []string, element interface{}) (map[string]string, error) {
|
||||
f := flagSet{
|
||||
flagTypes: getFlagTypes(element),
|
||||
args: args,
|
||||
values: make(map[string]string),
|
||||
}
|
||||
|
||||
for {
|
||||
seen, err := f.parseOne()
|
||||
if seen {
|
||||
continue
|
||||
}
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
return f.values, nil
|
||||
}
|
||||
|
||||
type flagSet struct {
|
||||
flagTypes map[string]reflect.Kind
|
||||
args []string
|
||||
values map[string]string
|
||||
}
|
||||
|
||||
func (f *flagSet) parseOne() (bool, error) {
|
||||
if len(f.args) == 0 {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
s := f.args[0]
|
||||
if len(s) < 2 || s[0] != '-' {
|
||||
return false, nil
|
||||
}
|
||||
numMinuses := 1
|
||||
if s[1] == '-' {
|
||||
numMinuses++
|
||||
if len(s) == 2 { // "--" terminates the flags
|
||||
f.args = f.args[1:]
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
name := s[numMinuses:]
|
||||
if len(name) == 0 || name[0] == '-' || name[0] == '=' {
|
||||
return false, fmt.Errorf("bad flag syntax: %s", s)
|
||||
}
|
||||
|
||||
// it's a flag. does it have an argument?
|
||||
f.args = f.args[1:]
|
||||
hasValue := false
|
||||
value := ""
|
||||
for i := 1; i < len(name); i++ { // equals cannot be first
|
||||
if name[i] == '=' {
|
||||
value = name[i+1:]
|
||||
hasValue = true
|
||||
name = name[0:i]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if hasValue {
|
||||
f.setValue(name, value)
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if f.flagTypes[name] == reflect.Bool || f.flagTypes[name] == reflect.Ptr {
|
||||
f.setValue(name, "true")
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if len(f.args) > 0 {
|
||||
// value is the next arg
|
||||
hasValue = true
|
||||
value, f.args = f.args[0], f.args[1:]
|
||||
}
|
||||
|
||||
if !hasValue {
|
||||
return false, fmt.Errorf("flag needs an argument: -%s", name)
|
||||
}
|
||||
|
||||
f.setValue(name, value)
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (f *flagSet) setValue(name string, value string) {
|
||||
n := strings.ToLower("traefik." + name)
|
||||
v, ok := f.values[n]
|
||||
|
||||
if ok && f.flagTypes[name] == reflect.Slice {
|
||||
f.values[n] = v + "," + value
|
||||
return
|
||||
}
|
||||
|
||||
f.values[n] = value
|
||||
}
|
255
pkg/config/flag/flagparser_test.go
Normal file
255
pkg/config/flag/flagparser_test.go
Normal file
|
@ -0,0 +1,255 @@
|
|||
package flag
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
args []string
|
||||
element interface{}
|
||||
expected map[string]string
|
||||
}{
|
||||
{
|
||||
desc: "no args",
|
||||
args: nil,
|
||||
expected: map[string]string{},
|
||||
},
|
||||
{
|
||||
desc: "bool value",
|
||||
args: []string{"--foo"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "true",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "equal",
|
||||
args: []string{"--foo=bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "space separated",
|
||||
args: []string{"--foo", "bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "space separated with end of parameter",
|
||||
args: []string{"--foo=bir", "--", "--bar"},
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bir",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple bool flags without value",
|
||||
args: []string{"--foo", "--bar"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
Bar bool
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "true",
|
||||
"traefik.bar": "true",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags",
|
||||
args: []string{"--foo=bar", "--foo=baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar,baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string",
|
||||
args: []string{"--foo.name=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.name": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct",
|
||||
args: []string{"--foo.name.value=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct{ Value string }
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.name.value": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-struct",
|
||||
args: []string{"--foo.name.bar.value=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar *struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.name.bar.value": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct with sub-map",
|
||||
args: []string{"--foo.name1.bar.name2.value=bar"},
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Bar map[string]struct{ Value string }
|
||||
}
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.name1.bar.name2.value": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 2",
|
||||
args: []string{"--foo", "bar", "--foo", "baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar,baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 3",
|
||||
args: []string{"--foo", "bar", "--foo=", "--baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
Baz bool
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar,",
|
||||
"traefik.baz": "true",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice with several flags 4",
|
||||
args: []string{"--foo", "bar", "--foo", "--baz"},
|
||||
element: &struct {
|
||||
Foo []string
|
||||
Baz bool
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "bar,--baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple string flag",
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
args: []string{"--foo=bar", "--foo=baz"},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple string flag 2",
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
args: []string{"--foo", "bar", "--foo", "baz"},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "baz",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string without value",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Bar bool
|
||||
}{},
|
||||
args: []string{"--foo", "--bar"},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "--bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer value",
|
||||
args: []string{"--foo"},
|
||||
element: &struct {
|
||||
Foo *struct{ Field string }
|
||||
}{},
|
||||
expected: map[string]string{
|
||||
"traefik.foo": "true",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fl, err := Parse(test.args, test.element)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, test.expected, fl)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParse_Errors(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
args []string
|
||||
element interface{}
|
||||
}{
|
||||
{
|
||||
desc: "triple hyphen",
|
||||
args: []string{"---foo"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
}{},
|
||||
},
|
||||
{
|
||||
desc: "equal",
|
||||
args: []string{"--=foo"},
|
||||
element: &struct {
|
||||
Foo bool
|
||||
}{},
|
||||
},
|
||||
{
|
||||
desc: "string without value",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Bar bool
|
||||
}{},
|
||||
args: []string{"--foo"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
_, err := Parse(test.args, test.element)
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
}
|
60
pkg/config/flag/flagtype.go
Normal file
60
pkg/config/flag/flagtype.go
Normal file
|
@ -0,0 +1,60 @@
|
|||
package flag
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
)
|
||||
|
||||
func getFlagTypes(element interface{}) map[string]reflect.Kind {
|
||||
ref := map[string]reflect.Kind{}
|
||||
|
||||
if element == nil {
|
||||
return ref
|
||||
}
|
||||
|
||||
tp := reflect.TypeOf(element).Elem()
|
||||
|
||||
addFlagType(ref, "", tp)
|
||||
|
||||
return ref
|
||||
}
|
||||
|
||||
func addFlagType(ref map[string]reflect.Kind, name string, typ reflect.Type) {
|
||||
switch typ.Kind() {
|
||||
case reflect.Bool, reflect.Slice:
|
||||
ref[name] = typ.Kind()
|
||||
|
||||
case reflect.Map:
|
||||
addFlagType(ref, getName(name, parser.MapNamePlaceholder), typ.Elem())
|
||||
|
||||
case reflect.Ptr:
|
||||
if typ.Elem().Kind() == reflect.Struct {
|
||||
ref[name] = typ.Kind()
|
||||
}
|
||||
addFlagType(ref, name, typ.Elem())
|
||||
|
||||
case reflect.Struct:
|
||||
for j := 0; j < typ.NumField(); j++ {
|
||||
subField := typ.Field(j)
|
||||
|
||||
if !parser.IsExported(subField) {
|
||||
continue
|
||||
}
|
||||
|
||||
if subField.Anonymous {
|
||||
addFlagType(ref, getName(name), subField.Type)
|
||||
} else {
|
||||
addFlagType(ref, getName(name, subField.Name), subField.Type)
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
// noop
|
||||
}
|
||||
}
|
||||
|
||||
func getName(names ...string) string {
|
||||
return strings.TrimPrefix(strings.ToLower(strings.Join(names, ".")), ".")
|
||||
}
|
226
pkg/config/flag/flagtype_test.go
Normal file
226
pkg/config/flag/flagtype_test.go
Normal file
|
@ -0,0 +1,226 @@
|
|||
package flag
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func Test_getFlagTypes(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected map[string]reflect.Kind
|
||||
}{
|
||||
{
|
||||
desc: "nil",
|
||||
element: nil,
|
||||
expected: map[string]reflect.Kind{},
|
||||
},
|
||||
{
|
||||
desc: "no fields",
|
||||
element: &struct {
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{},
|
||||
},
|
||||
{
|
||||
desc: "string field",
|
||||
element: &struct {
|
||||
Foo string
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{},
|
||||
},
|
||||
{
|
||||
desc: "bool field level 0",
|
||||
element: &struct {
|
||||
Foo bool
|
||||
fii bool
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool field level 1",
|
||||
element: &struct {
|
||||
Foo struct {
|
||||
Field bool
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo.field": reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool field level 2",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii *struct {
|
||||
Field bool
|
||||
}
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Ptr,
|
||||
"foo.fii": reflect.Ptr,
|
||||
"foo.fii.field": reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "pointer field",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Field string
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Ptr,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool field level 3",
|
||||
element: &struct {
|
||||
Foo *struct {
|
||||
Fii *struct {
|
||||
Fuu *struct {
|
||||
Field bool
|
||||
}
|
||||
}
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Ptr,
|
||||
"foo.fii": reflect.Ptr,
|
||||
"foo.fii.fuu": reflect.Ptr,
|
||||
"foo.fii.fuu.field": reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string",
|
||||
element: &struct {
|
||||
Foo map[string]string
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{},
|
||||
},
|
||||
{
|
||||
desc: "map bool",
|
||||
element: &struct {
|
||||
Foo map[string]bool
|
||||
Fii struct{}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo." + parser.MapNamePlaceholder: reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct",
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Field bool
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo." + parser.MapNamePlaceholder + ".field": reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map map bool",
|
||||
element: &struct {
|
||||
Foo map[string]map[string]bool
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo." + parser.MapNamePlaceholder + "." + parser.MapNamePlaceholder: reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct map",
|
||||
element: &struct {
|
||||
Foo map[string]struct {
|
||||
Fii map[string]bool
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo." + parser.MapNamePlaceholder + ".fii." + parser.MapNamePlaceholder: reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "pointer bool field level 0",
|
||||
element: &struct {
|
||||
Foo *bool
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Bool,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "pointer int field level 0",
|
||||
element: &struct {
|
||||
Foo *int
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{},
|
||||
},
|
||||
{
|
||||
desc: "bool slice field level 0",
|
||||
element: &struct {
|
||||
Foo []bool
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Slice,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string slice field level 0",
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Slice,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice field level 1",
|
||||
element: &struct {
|
||||
Foo struct {
|
||||
Field []string
|
||||
}
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo.field": reflect.Slice,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map slice string",
|
||||
element: &struct {
|
||||
Foo map[string][]string
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo." + parser.MapNamePlaceholder: reflect.Slice,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "embedded struct",
|
||||
element: &struct {
|
||||
Yo
|
||||
}{},
|
||||
expected: map[string]reflect.Kind{
|
||||
"foo": reflect.Bool,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := getFlagTypes(test.element)
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type Yo struct {
|
||||
Foo bool
|
||||
}
|
97
pkg/config/generator/generator.go
Normal file
97
pkg/config/generator/generator.go
Normal file
|
@ -0,0 +1,97 @@
|
|||
// Package generator implements the custom initialization of all the fields of an empty interface.
|
||||
package generator
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
)
|
||||
|
||||
type initializer interface {
|
||||
SetDefaults()
|
||||
}
|
||||
|
||||
// Generate recursively initializes an empty structure, calling SetDefaults on each field, when it applies.
|
||||
func Generate(element interface{}) {
|
||||
if element == nil {
|
||||
return
|
||||
}
|
||||
|
||||
generate(element)
|
||||
}
|
||||
|
||||
func generate(element interface{}) {
|
||||
field := reflect.ValueOf(element)
|
||||
|
||||
fill(field)
|
||||
}
|
||||
|
||||
func fill(field reflect.Value) {
|
||||
switch field.Kind() {
|
||||
case reflect.Ptr:
|
||||
setPtr(field)
|
||||
case reflect.Struct:
|
||||
setStruct(field)
|
||||
case reflect.Map:
|
||||
setMap(field)
|
||||
case reflect.Slice:
|
||||
if field.Type().Elem().Kind() == reflect.Struct ||
|
||||
field.Type().Elem().Kind() == reflect.Ptr && field.Type().Elem().Elem().Kind() == reflect.Struct {
|
||||
slice := reflect.MakeSlice(field.Type(), 1, 1)
|
||||
field.Set(slice)
|
||||
|
||||
// use Ptr to allow "SetDefaults"
|
||||
value := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
setPtr(value)
|
||||
|
||||
elem := value.Elem().Elem()
|
||||
field.Index(0).Set(elem)
|
||||
} else if field.Len() == 0 {
|
||||
slice := reflect.MakeSlice(field.Type(), 0, 0)
|
||||
field.Set(slice)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setPtr(field reflect.Value) {
|
||||
if field.IsNil() {
|
||||
field.Set(reflect.New(field.Type().Elem()))
|
||||
}
|
||||
|
||||
if field.Type().Implements(reflect.TypeOf((*initializer)(nil)).Elem()) {
|
||||
method := field.MethodByName("SetDefaults")
|
||||
if method.IsValid() {
|
||||
method.Call([]reflect.Value{})
|
||||
}
|
||||
}
|
||||
|
||||
fill(field.Elem())
|
||||
}
|
||||
|
||||
func setStruct(field reflect.Value) {
|
||||
for i := 0; i < field.NumField(); i++ {
|
||||
fd := field.Field(i)
|
||||
structField := field.Type().Field(i)
|
||||
|
||||
if structField.Tag.Get(parser.TagLabel) == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
if parser.IsExported(structField) {
|
||||
fill(fd)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setMap(field reflect.Value) {
|
||||
if field.IsNil() {
|
||||
field.Set(reflect.MakeMap(field.Type()))
|
||||
}
|
||||
|
||||
ptrValue := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
fill(ptrValue)
|
||||
|
||||
value := ptrValue.Elem().Elem()
|
||||
key := reflect.ValueOf(parser.MapNamePlaceholder)
|
||||
field.SetMapIndex(key, value)
|
||||
}
|
439
pkg/config/generator/generator_test.go
Normal file
439
pkg/config/generator/generator_test.go
Normal file
|
@ -0,0 +1,439 @@
|
|||
package generator
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestGenerate(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
desc: "nil",
|
||||
},
|
||||
{
|
||||
desc: "simple",
|
||||
element: &Ya{},
|
||||
expected: &Ya{
|
||||
Foo: &Yaa{
|
||||
FieldIn1: "",
|
||||
FieldIn2: false,
|
||||
FieldIn3: 0,
|
||||
FieldIn4: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
FieldIn5: map[string]int{
|
||||
parser.MapNamePlaceholder: 0,
|
||||
},
|
||||
FieldIn6: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
FieldIn7: map[string]struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldIn8: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
FieldIn9: map[string]*struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldIn10: struct{ Field string }{},
|
||||
FieldIn11: &struct{ Field string }{},
|
||||
FieldIn12: func(v string) *string { return &v }(""),
|
||||
FieldIn13: func(v bool) *bool { return &v }(false),
|
||||
FieldIn14: func(v int) *int { return &v }(0),
|
||||
},
|
||||
Field1: "",
|
||||
Field2: false,
|
||||
Field3: 0,
|
||||
Field4: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
Field5: map[string]int{
|
||||
parser.MapNamePlaceholder: 0,
|
||||
},
|
||||
Field6: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
Field7: map[string]struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Field8: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
Field9: map[string]*struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Field10: struct{ Field string }{},
|
||||
Field11: &struct{ Field string }{},
|
||||
Field12: func(v string) *string { return &v }(""),
|
||||
Field13: func(v bool) *bool { return &v }(false),
|
||||
Field14: func(v int) *int { return &v }(0),
|
||||
Field15: []int{},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with initial state",
|
||||
element: &Ya{
|
||||
Foo: &Yaa{
|
||||
FieldIn1: "bar",
|
||||
FieldIn2: false,
|
||||
FieldIn3: 1,
|
||||
FieldIn4: nil,
|
||||
FieldIn5: nil,
|
||||
FieldIn6: nil,
|
||||
FieldIn7: nil,
|
||||
FieldIn8: nil,
|
||||
FieldIn9: nil,
|
||||
FieldIn10: struct{ Field string }{},
|
||||
FieldIn11: nil,
|
||||
FieldIn12: nil,
|
||||
FieldIn13: nil,
|
||||
FieldIn14: nil,
|
||||
},
|
||||
Field1: "bir",
|
||||
Field2: true,
|
||||
Field3: 0,
|
||||
Field4: nil,
|
||||
Field5: nil,
|
||||
Field6: nil,
|
||||
Field7: nil,
|
||||
Field8: nil,
|
||||
Field9: nil,
|
||||
Field10: struct{ Field string }{},
|
||||
Field11: nil,
|
||||
Field12: nil,
|
||||
Field13: nil,
|
||||
Field14: nil,
|
||||
Field15: []int{7},
|
||||
},
|
||||
expected: &Ya{
|
||||
Foo: &Yaa{
|
||||
FieldIn1: "bar",
|
||||
FieldIn2: false,
|
||||
FieldIn3: 1,
|
||||
FieldIn4: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
FieldIn5: map[string]int{
|
||||
parser.MapNamePlaceholder: 0,
|
||||
},
|
||||
FieldIn6: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
FieldIn7: map[string]struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldIn8: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
FieldIn9: map[string]*struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
FieldIn10: struct{ Field string }{},
|
||||
FieldIn11: &struct{ Field string }{},
|
||||
FieldIn12: func(v string) *string { return &v }(""),
|
||||
FieldIn13: func(v bool) *bool { return &v }(false),
|
||||
FieldIn14: func(v int) *int { return &v }(0),
|
||||
},
|
||||
Field1: "bir",
|
||||
Field2: true,
|
||||
Field3: 0,
|
||||
Field4: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
Field5: map[string]int{
|
||||
parser.MapNamePlaceholder: 0,
|
||||
},
|
||||
Field6: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
Field7: map[string]struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Field8: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
Field9: map[string]*struct{ Field map[string]string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Field10: struct{ Field string }{},
|
||||
Field11: &struct{ Field string }{},
|
||||
Field12: func(v string) *string { return &v }(""),
|
||||
Field13: func(v bool) *bool { return &v }(false),
|
||||
Field14: func(v int) *int { return &v }(0),
|
||||
Field15: []int{7},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "setDefault",
|
||||
element: &Hu{},
|
||||
expected: &Hu{
|
||||
Foo: "hu",
|
||||
Fii: &Hi{
|
||||
Field: "hi",
|
||||
},
|
||||
Fuu: map[string]string{"<name>": ""},
|
||||
Fee: map[string]Hi{"<name>": {Field: "hi"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
Generate(test.element)
|
||||
|
||||
assert.Equal(t, test.expected, test.element)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_generate(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
desc: "struct pointer",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Fii *struct{ Field string }
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
Fii *struct{ Field string }
|
||||
}{
|
||||
Foo: "",
|
||||
Fii: &struct{ Field string }{
|
||||
Field: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string slice",
|
||||
element: &struct {
|
||||
Foo []string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []string
|
||||
}{
|
||||
Foo: []string{},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int slice",
|
||||
element: &struct {
|
||||
Foo []int
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []int
|
||||
}{
|
||||
Foo: []int{},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct slice",
|
||||
element: &struct {
|
||||
Foo []struct {
|
||||
Field string
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo []struct {
|
||||
Field string
|
||||
}
|
||||
}{
|
||||
Foo: []struct {
|
||||
Field string
|
||||
}{
|
||||
{Field: ""},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map string",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Fii map[string]string
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
Fii map[string]string
|
||||
}{
|
||||
Foo: "",
|
||||
Fii: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Fii map[string]struct{ Field string }
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
Fii map[string]struct{ Field string }
|
||||
}{
|
||||
Foo: "",
|
||||
Fii: map[string]struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map struct pointer level 2",
|
||||
element: &struct {
|
||||
Foo string
|
||||
Fuu *struct {
|
||||
Fii map[string]*struct{ Field string }
|
||||
}
|
||||
}{},
|
||||
expected: &struct {
|
||||
Foo string
|
||||
Fuu *struct {
|
||||
Fii map[string]*struct{ Field string }
|
||||
}
|
||||
}{
|
||||
Foo: "",
|
||||
Fuu: &struct {
|
||||
Fii map[string]*struct {
|
||||
Field string
|
||||
}
|
||||
}{
|
||||
Fii: map[string]*struct{ Field string }{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "SetDefaults",
|
||||
element: &Hu{},
|
||||
expected: &Hu{
|
||||
Foo: "hu",
|
||||
Fii: &Hi{
|
||||
Field: "hi",
|
||||
},
|
||||
Fuu: map[string]string{
|
||||
parser.MapNamePlaceholder: "",
|
||||
},
|
||||
Fee: map[string]Hi{
|
||||
parser.MapNamePlaceholder: {
|
||||
Field: "hi",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
generate(test.element)
|
||||
|
||||
assert.Equal(t, test.expected, test.element)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type Hu struct {
|
||||
Foo string
|
||||
Fii *Hi
|
||||
Fuu map[string]string
|
||||
Fee map[string]Hi
|
||||
}
|
||||
|
||||
func (h *Hu) SetDefaults() {
|
||||
h.Foo = "hu"
|
||||
}
|
||||
|
||||
type Hi struct {
|
||||
Field string
|
||||
}
|
||||
|
||||
func (h *Hi) SetDefaults() {
|
||||
h.Field = "hi"
|
||||
}
|
||||
|
||||
type Ya struct {
|
||||
Foo *Yaa
|
||||
Field1 string
|
||||
Field2 bool
|
||||
Field3 int
|
||||
Field4 map[string]string
|
||||
Field5 map[string]int
|
||||
Field6 map[string]struct{ Field string }
|
||||
Field7 map[string]struct{ Field map[string]string }
|
||||
Field8 map[string]*struct{ Field string }
|
||||
Field9 map[string]*struct{ Field map[string]string }
|
||||
Field10 struct{ Field string }
|
||||
Field11 *struct{ Field string }
|
||||
Field12 *string
|
||||
Field13 *bool
|
||||
Field14 *int
|
||||
Field15 []int
|
||||
}
|
||||
|
||||
type Yaa struct {
|
||||
FieldIn1 string
|
||||
FieldIn2 bool
|
||||
FieldIn3 int
|
||||
FieldIn4 map[string]string
|
||||
FieldIn5 map[string]int
|
||||
FieldIn6 map[string]struct{ Field string }
|
||||
FieldIn7 map[string]struct{ Field map[string]string }
|
||||
FieldIn8 map[string]*struct{ Field string }
|
||||
FieldIn9 map[string]*struct{ Field map[string]string }
|
||||
FieldIn10 struct{ Field string }
|
||||
FieldIn11 *struct{ Field string }
|
||||
FieldIn12 *string
|
||||
FieldIn13 *bool
|
||||
FieldIn14 *int
|
||||
}
|
33
pkg/config/label/label.go
Normal file
33
pkg/config/label/label.go
Normal file
|
@ -0,0 +1,33 @@
|
|||
// Package label implements the decoding and encoding between flat labels and a typed Configuration.
|
||||
package label
|
||||
|
||||
import (
|
||||
"github.com/containous/traefik/pkg/config"
|
||||
"github.com/containous/traefik/pkg/config/parser"
|
||||
)
|
||||
|
||||
// DecodeConfiguration converts the labels to a configuration.
|
||||
func DecodeConfiguration(labels map[string]string) (*config.Configuration, error) {
|
||||
conf := &config.Configuration{
|
||||
HTTP: &config.HTTPConfiguration{},
|
||||
TCP: &config.TCPConfiguration{},
|
||||
}
|
||||
|
||||
err := parser.Decode(labels, conf, "traefik.http", "traefik.tcp")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return conf, nil
|
||||
}
|
||||
|
||||
// EncodeConfiguration converts a configuration to labels.
|
||||
func EncodeConfiguration(conf *config.Configuration) (map[string]string, error) {
|
||||
return parser.Encode(conf)
|
||||
}
|
||||
|
||||
// Decode converts the labels to an element.
|
||||
// labels -> [ node -> node + metadata (type) ] -> element (node)
|
||||
func Decode(labels map[string]string, element interface{}, filters ...string) error {
|
||||
return parser.Decode(labels, element, filters...)
|
||||
}
|
1133
pkg/config/label/label_test.go
Normal file
1133
pkg/config/label/label_test.go
Normal file
File diff suppressed because it is too large
Load diff
|
@ -1,8 +1,8 @@
|
|||
package config
|
||||
|
||||
import (
|
||||
"github.com/containous/flaeg/parse"
|
||||
"github.com/containous/traefik/pkg/ip"
|
||||
"github.com/containous/traefik/pkg/types"
|
||||
)
|
||||
|
||||
// +k8s:deepcopy-gen=true
|
||||
|
@ -52,7 +52,7 @@ type Auth struct {
|
|||
|
||||
// BasicAuth holds the HTTP basic authentication configuration.
|
||||
type BasicAuth struct {
|
||||
Users `json:"users,omitempty" mapstructure:","`
|
||||
Users Users `json:"users,omitempty"`
|
||||
UsersFile string `json:"usersFile,omitempty"`
|
||||
Realm string `json:"realm,omitempty"`
|
||||
RemoveHeader bool `json:"removeHeader,omitempty"`
|
||||
|
@ -93,7 +93,7 @@ type Compress struct{}
|
|||
|
||||
// DigestAuth holds the Digest HTTP authentication configuration.
|
||||
type DigestAuth struct {
|
||||
Users `json:"users,omitempty" mapstructure:","`
|
||||
Users Users `json:"users,omitempty"`
|
||||
UsersFile string `json:"usersFile,omitempty"`
|
||||
RemoveHeader bool `json:"removeHeader,omitempty"`
|
||||
Realm string `json:"realm,omitempty" mapstructure:","`
|
||||
|
@ -273,7 +273,7 @@ type PassTLSClientCert struct {
|
|||
|
||||
// Rate holds the rate limiting configuration for a specific time period.
|
||||
type Rate struct {
|
||||
Period parse.Duration `json:"period,omitempty"`
|
||||
Period types.Duration `json:"period,omitempty"`
|
||||
Average int64 `json:"average,omitempty"`
|
||||
Burst int64 `json:"burst,omitempty"`
|
||||
}
|
||||
|
|
327
pkg/config/parser/element_fill.go
Normal file
327
pkg/config/parser/element_fill.go
Normal file
|
@ -0,0 +1,327 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/containous/traefik/pkg/types"
|
||||
)
|
||||
|
||||
type initializer interface {
|
||||
SetDefaults()
|
||||
}
|
||||
|
||||
// Fill populates the fields of the element using the information in node.
|
||||
func Fill(element interface{}, node *Node) error {
|
||||
if element == nil || node == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if node.Kind == 0 {
|
||||
return fmt.Errorf("missing node type: %s", node.Name)
|
||||
}
|
||||
|
||||
root := reflect.ValueOf(element)
|
||||
if root.Kind() == reflect.Struct {
|
||||
return fmt.Errorf("struct are not supported, use pointer instead")
|
||||
}
|
||||
|
||||
return fill(root.Elem(), node)
|
||||
}
|
||||
|
||||
func fill(field reflect.Value, node *Node) error {
|
||||
// related to allow-empty tag
|
||||
if node.Disabled {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch field.Kind() {
|
||||
case reflect.String:
|
||||
field.SetString(node.Value)
|
||||
return nil
|
||||
case reflect.Bool:
|
||||
val, err := strconv.ParseBool(node.Value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
field.SetBool(val)
|
||||
return nil
|
||||
case reflect.Int8:
|
||||
return setInt(field, node.Value, 8)
|
||||
case reflect.Int16:
|
||||
return setInt(field, node.Value, 16)
|
||||
case reflect.Int32:
|
||||
return setInt(field, node.Value, 32)
|
||||
case reflect.Int64, reflect.Int:
|
||||
return setInt(field, node.Value, 64)
|
||||
case reflect.Uint8:
|
||||
return setUint(field, node.Value, 8)
|
||||
case reflect.Uint16:
|
||||
return setUint(field, node.Value, 16)
|
||||
case reflect.Uint32:
|
||||
return setUint(field, node.Value, 32)
|
||||
case reflect.Uint64, reflect.Uint:
|
||||
return setUint(field, node.Value, 64)
|
||||
case reflect.Float32:
|
||||
return setFloat(field, node.Value, 32)
|
||||
case reflect.Float64:
|
||||
return setFloat(field, node.Value, 64)
|
||||
case reflect.Struct:
|
||||
return setStruct(field, node)
|
||||
case reflect.Ptr:
|
||||
return setPtr(field, node)
|
||||
case reflect.Map:
|
||||
return setMap(field, node)
|
||||
case reflect.Slice:
|
||||
return setSlice(field, node)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func setPtr(field reflect.Value, node *Node) error {
|
||||
if field.IsNil() {
|
||||
field.Set(reflect.New(field.Type().Elem()))
|
||||
|
||||
if field.Type().Implements(reflect.TypeOf((*initializer)(nil)).Elem()) {
|
||||
method := field.MethodByName("SetDefaults")
|
||||
if method.IsValid() {
|
||||
method.Call([]reflect.Value{})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fill(field.Elem(), node)
|
||||
}
|
||||
|
||||
func setStruct(field reflect.Value, node *Node) error {
|
||||
for _, child := range node.Children {
|
||||
fd := field.FieldByName(child.FieldName)
|
||||
|
||||
zeroValue := reflect.Value{}
|
||||
if fd == zeroValue {
|
||||
return fmt.Errorf("field not found, node: %s (%s)", child.Name, child.FieldName)
|
||||
}
|
||||
|
||||
err := fill(fd, child)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func setSlice(field reflect.Value, node *Node) error {
|
||||
if field.Type().Elem().Kind() == reflect.Struct ||
|
||||
field.Type().Elem().Kind() == reflect.Ptr && field.Type().Elem().Elem().Kind() == reflect.Struct {
|
||||
return setSliceStruct(field, node)
|
||||
}
|
||||
|
||||
if len(node.Value) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
values := strings.Split(node.Value, ",")
|
||||
|
||||
slice := reflect.MakeSlice(field.Type(), len(values), len(values))
|
||||
field.Set(slice)
|
||||
|
||||
for i := 0; i < len(values); i++ {
|
||||
value := strings.TrimSpace(values[i])
|
||||
|
||||
switch field.Type().Elem().Kind() {
|
||||
case reflect.String:
|
||||
field.Index(i).SetString(value)
|
||||
case reflect.Int:
|
||||
val, err := strconv.ParseInt(value, 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
field.Index(i).SetInt(val)
|
||||
case reflect.Int8:
|
||||
err := setInt(field.Index(i), value, 8)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Int16:
|
||||
err := setInt(field.Index(i), value, 16)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Int32:
|
||||
err := setInt(field.Index(i), value, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Int64:
|
||||
err := setInt(field.Index(i), value, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Uint:
|
||||
val, err := strconv.ParseUint(value, 10, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
field.Index(i).SetUint(val)
|
||||
case reflect.Uint8:
|
||||
err := setUint(field.Index(i), value, 8)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Uint16:
|
||||
err := setUint(field.Index(i), value, 16)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Uint32:
|
||||
err := setUint(field.Index(i), value, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Uint64:
|
||||
err := setUint(field.Index(i), value, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Float32:
|
||||
err := setFloat(field.Index(i), value, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Float64:
|
||||
err := setFloat(field.Index(i), value, 64)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case reflect.Bool:
|
||||
val, err := strconv.ParseBool(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
field.Index(i).SetBool(val)
|
||||
default:
|
||||
return fmt.Errorf("unsupported type: %s", field.Type().Elem())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func setSliceStruct(field reflect.Value, node *Node) error {
|
||||
if node.Tag.Get(TagLabelSliceAsStruct) != "" {
|
||||
return setSliceAsStruct(field, node)
|
||||
}
|
||||
|
||||
field.Set(reflect.MakeSlice(field.Type(), len(node.Children), len(node.Children)))
|
||||
|
||||
for i, child := range node.Children {
|
||||
// use Ptr to allow "SetDefaults"
|
||||
value := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
err := setPtr(value, child)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
field.Index(i).Set(value.Elem().Elem())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func setSliceAsStruct(field reflect.Value, node *Node) error {
|
||||
if len(node.Children) == 0 {
|
||||
return fmt.Errorf("invalid slice: node %s", node.Name)
|
||||
}
|
||||
|
||||
// use Ptr to allow "SetDefaults"
|
||||
value := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
err := setPtr(value, node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
elem := value.Elem().Elem()
|
||||
|
||||
field.Set(reflect.MakeSlice(field.Type(), 1, 1))
|
||||
field.Index(0).Set(elem)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func setMap(field reflect.Value, node *Node) error {
|
||||
if field.IsNil() {
|
||||
field.Set(reflect.MakeMap(field.Type()))
|
||||
}
|
||||
|
||||
for _, child := range node.Children {
|
||||
ptrValue := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
|
||||
err := fill(ptrValue, child)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value := ptrValue.Elem().Elem()
|
||||
|
||||
key := reflect.ValueOf(child.Name)
|
||||
field.SetMapIndex(key, value)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func setInt(field reflect.Value, value string, bitSize int) error {
|
||||
switch field.Type() {
|
||||
case reflect.TypeOf(types.Duration(0)):
|
||||
return setDuration(field, value, bitSize, time.Second)
|
||||
case reflect.TypeOf(time.Duration(0)):
|
||||
return setDuration(field, value, bitSize, time.Nanosecond)
|
||||
default:
|
||||
val, err := strconv.ParseInt(value, 10, bitSize)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
field.Set(reflect.ValueOf(val).Convert(field.Type()))
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func setDuration(field reflect.Value, value string, bitSize int, defaultUnit time.Duration) error {
|
||||
val, err := strconv.ParseInt(value, 10, bitSize)
|
||||
if err == nil {
|
||||
field.Set(reflect.ValueOf(time.Duration(val) * defaultUnit).Convert(field.Type()))
|
||||
return nil
|
||||
}
|
||||
|
||||
duration, err := time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
field.Set(reflect.ValueOf(duration).Convert(field.Type()))
|
||||
return nil
|
||||
}
|
||||
|
||||
func setUint(field reflect.Value, value string, bitSize int) error {
|
||||
val, err := strconv.ParseUint(value, 10, bitSize)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
field.Set(reflect.ValueOf(val).Convert(field.Type()))
|
||||
return nil
|
||||
}
|
||||
|
||||
func setFloat(field reflect.Value, value string, bitSize int) error {
|
||||
val, err := strconv.ParseFloat(value, bitSize)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
field.Set(reflect.ValueOf(val).Convert(field.Type()))
|
||||
return nil
|
||||
}
|
1431
pkg/config/parser/element_fill_test.go
Normal file
1431
pkg/config/parser/element_fill_test.go
Normal file
File diff suppressed because it is too large
Load diff
203
pkg/config/parser/element_nodes.go
Normal file
203
pkg/config/parser/element_nodes.go
Normal file
|
@ -0,0 +1,203 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// EncodeToNode converts an element to a node.
|
||||
// element -> nodes
|
||||
func EncodeToNode(element interface{}, omitEmpty bool) (*Node, error) {
|
||||
rValue := reflect.ValueOf(element)
|
||||
node := &Node{Name: "traefik"}
|
||||
|
||||
encoder := encoderToNode{omitEmpty: omitEmpty}
|
||||
|
||||
err := encoder.setNodeValue(node, rValue)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return node, nil
|
||||
}
|
||||
|
||||
type encoderToNode struct {
|
||||
omitEmpty bool
|
||||
}
|
||||
|
||||
func (e encoderToNode) setNodeValue(node *Node, rValue reflect.Value) error {
|
||||
switch rValue.Kind() {
|
||||
case reflect.String:
|
||||
node.Value = rValue.String()
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
node.Value = strconv.FormatInt(rValue.Int(), 10)
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
node.Value = strconv.FormatUint(rValue.Uint(), 10)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
node.Value = strconv.FormatFloat(rValue.Float(), 'f', 6, 64)
|
||||
case reflect.Bool:
|
||||
node.Value = strconv.FormatBool(rValue.Bool())
|
||||
case reflect.Struct:
|
||||
return e.setStructValue(node, rValue)
|
||||
case reflect.Ptr:
|
||||
return e.setNodeValue(node, rValue.Elem())
|
||||
case reflect.Map:
|
||||
return e.setMapValue(node, rValue)
|
||||
case reflect.Slice:
|
||||
return e.setSliceValue(node, rValue)
|
||||
default:
|
||||
// noop
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e encoderToNode) setStructValue(node *Node, rValue reflect.Value) error {
|
||||
rType := rValue.Type()
|
||||
|
||||
for i := 0; i < rValue.NumField(); i++ {
|
||||
field := rType.Field(i)
|
||||
fieldValue := rValue.Field(i)
|
||||
|
||||
if !IsExported(field) {
|
||||
continue
|
||||
}
|
||||
|
||||
if field.Tag.Get(TagLabel) == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := isSupportedType(field); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if e.isSkippedField(field, fieldValue) {
|
||||
continue
|
||||
}
|
||||
|
||||
nodeName := field.Name
|
||||
if field.Type.Kind() == reflect.Slice && len(field.Tag.Get(TagLabelSliceAsStruct)) != 0 {
|
||||
nodeName = field.Tag.Get(TagLabelSliceAsStruct)
|
||||
}
|
||||
|
||||
if field.Anonymous {
|
||||
if err := e.setNodeValue(node, fieldValue); err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
child := &Node{Name: nodeName, FieldName: field.Name, Description: field.Tag.Get(TagDescription)}
|
||||
|
||||
if err := e.setNodeValue(child, fieldValue); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if field.Type.Kind() == reflect.Ptr {
|
||||
if field.Type.Elem().Kind() != reflect.Struct && fieldValue.IsNil() {
|
||||
continue
|
||||
}
|
||||
|
||||
if field.Type.Elem().Kind() == reflect.Struct && len(child.Children) == 0 {
|
||||
if field.Tag.Get(TagLabel) != TagLabelAllowEmpty {
|
||||
continue
|
||||
}
|
||||
|
||||
child.Value = "true"
|
||||
}
|
||||
}
|
||||
|
||||
node.Children = append(node.Children, child)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e encoderToNode) setMapValue(node *Node, rValue reflect.Value) error {
|
||||
for _, key := range rValue.MapKeys() {
|
||||
child := &Node{Name: key.String(), FieldName: key.String()}
|
||||
node.Children = append(node.Children, child)
|
||||
|
||||
if err := e.setNodeValue(child, rValue.MapIndex(key)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e encoderToNode) setSliceValue(node *Node, rValue reflect.Value) error {
|
||||
// label-slice-as-struct
|
||||
if rValue.Type().Elem().Kind() == reflect.Struct && !strings.EqualFold(node.Name, node.FieldName) {
|
||||
if rValue.Len() > 1 {
|
||||
return fmt.Errorf("node %s has too many slice entries: %d", node.Name, rValue.Len())
|
||||
}
|
||||
|
||||
return e.setNodeValue(node, rValue.Index(0))
|
||||
}
|
||||
|
||||
if rValue.Type().Elem().Kind() == reflect.Struct ||
|
||||
rValue.Type().Elem().Kind() == reflect.Ptr && rValue.Type().Elem().Elem().Kind() == reflect.Struct {
|
||||
for i := 0; i < rValue.Len(); i++ {
|
||||
child := &Node{Name: "[" + strconv.Itoa(i) + "]"}
|
||||
|
||||
eValue := rValue.Index(i)
|
||||
|
||||
err := e.setNodeValue(child, eValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
node.Children = append(node.Children, child)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var values []string
|
||||
|
||||
for i := 0; i < rValue.Len(); i++ {
|
||||
eValue := rValue.Index(i)
|
||||
|
||||
switch eValue.Kind() {
|
||||
case reflect.String:
|
||||
values = append(values, eValue.String())
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
values = append(values, strconv.FormatInt(eValue.Int(), 10))
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
values = append(values, strconv.FormatUint(eValue.Uint(), 10))
|
||||
case reflect.Float32, reflect.Float64:
|
||||
values = append(values, strconv.FormatFloat(eValue.Float(), 'f', 6, 64))
|
||||
case reflect.Bool:
|
||||
values = append(values, strconv.FormatBool(eValue.Bool()))
|
||||
default:
|
||||
// noop
|
||||
}
|
||||
}
|
||||
|
||||
node.Value = strings.Join(values, ", ")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e encoderToNode) isSkippedField(field reflect.StructField, fieldValue reflect.Value) bool {
|
||||
if e.omitEmpty && field.Type.Kind() == reflect.String && fieldValue.Len() == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
if field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct && fieldValue.IsNil() {
|
||||
return true
|
||||
}
|
||||
|
||||
if e.omitEmpty && (field.Type.Kind() == reflect.Slice) &&
|
||||
(fieldValue.IsNil() || fieldValue.Len() == 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (field.Type.Kind() == reflect.Map) &&
|
||||
(fieldValue.IsNil() || fieldValue.Len() == 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
737
pkg/config/parser/element_nodes_test.go
Normal file
737
pkg/config/parser/element_nodes_test.go
Normal file
|
@ -0,0 +1,737 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestEncodeToNode(t *testing.T) {
|
||||
type expected struct {
|
||||
node *Node
|
||||
error bool
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
desc string
|
||||
element interface{}
|
||||
expected expected
|
||||
}{
|
||||
{
|
||||
desc: "Description",
|
||||
element: struct {
|
||||
Foo string `description:"text"`
|
||||
}{Foo: "bar"},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "bar", Description: "text"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string",
|
||||
element: struct {
|
||||
Foo string
|
||||
}{Foo: "bar"},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "bar"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "2 string fields",
|
||||
element: struct {
|
||||
Foo string
|
||||
Fii string
|
||||
}{Foo: "bar", Fii: "hii"},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "bar"},
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int",
|
||||
element: struct {
|
||||
Foo int
|
||||
}{Foo: 1},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int8",
|
||||
element: struct {
|
||||
Foo int8
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int16",
|
||||
element: struct {
|
||||
Foo int16
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int32",
|
||||
element: struct {
|
||||
Foo int32
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int64",
|
||||
element: struct {
|
||||
Foo int64
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint",
|
||||
element: struct {
|
||||
Foo uint
|
||||
}{Foo: 1},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint8",
|
||||
element: struct {
|
||||
Foo uint8
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint16",
|
||||
element: struct {
|
||||
Foo uint16
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint32",
|
||||
element: struct {
|
||||
Foo uint32
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "uint64",
|
||||
element: struct {
|
||||
Foo uint64
|
||||
}{Foo: 2},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "2"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "float32",
|
||||
element: struct {
|
||||
Foo float32
|
||||
}{Foo: 1.12},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1.120000"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "float64",
|
||||
element: struct {
|
||||
Foo float64
|
||||
}{Foo: 1.12},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "1.120000"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool",
|
||||
element: struct {
|
||||
Foo bool
|
||||
}{Foo: true},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "true"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct",
|
||||
element: struct {
|
||||
Foo struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}
|
||||
}{
|
||||
Foo: struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}{
|
||||
Fii: "hii",
|
||||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct unexported field",
|
||||
element: struct {
|
||||
Foo struct {
|
||||
Fii string
|
||||
fuu string
|
||||
}
|
||||
}{
|
||||
Foo: struct {
|
||||
Fii string
|
||||
fuu string
|
||||
}{
|
||||
Fii: "hii",
|
||||
fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}{
|
||||
Fii: "hii",
|
||||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string pointer",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii *string
|
||||
Fuu string
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *string
|
||||
Fuu string
|
||||
}{
|
||||
Fii: func(v string) *string { return &v }("hii"),
|
||||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "string nil pointer",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii *string
|
||||
Fuu string
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *string
|
||||
Fuu string
|
||||
}{
|
||||
Fii: nil,
|
||||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "int pointer",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii *int
|
||||
Fuu int
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *int
|
||||
Fuu int
|
||||
}{
|
||||
Fii: func(v int) *int { return &v }(6),
|
||||
Fuu: 4,
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "6"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "4"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "bool pointer",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii *bool
|
||||
Fuu bool
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii *bool
|
||||
Fuu bool
|
||||
}{
|
||||
Fii: func(v bool) *bool { return &v }(true),
|
||||
Fuu: true,
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "true"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "true"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "struct nil struct pointer",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii *string
|
||||
Fuu string
|
||||
}
|
||||
}{
|
||||
Foo: nil,
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer, not allowEmpty",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}{},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "struct pointer, allowEmpty",
|
||||
element: struct {
|
||||
Foo *struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
} `label:"allowEmpty"`
|
||||
}{
|
||||
Foo: &struct {
|
||||
Fii string
|
||||
Fuu string
|
||||
}{},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Value: "true"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "map",
|
||||
element: struct {
|
||||
Foo struct {
|
||||
Bar map[string]string
|
||||
}
|
||||
}{
|
||||
Foo: struct {
|
||||
Bar map[string]string
|
||||
}{
|
||||
Bar: map[string]string{
|
||||
"name1": "huu",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Children: []*Node{
|
||||
{Name: "name1", FieldName: "name1", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
}}},
|
||||
},
|
||||
{
|
||||
desc: "empty map",
|
||||
element: struct {
|
||||
Bar map[string]string
|
||||
}{
|
||||
Bar: map[string]string{},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "map nil",
|
||||
element: struct {
|
||||
Bar map[string]string
|
||||
}{
|
||||
Bar: nil,
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "map with non string key",
|
||||
element: struct {
|
||||
Foo struct {
|
||||
Bar map[int]string
|
||||
}
|
||||
}{
|
||||
Foo: struct {
|
||||
Bar map[int]string
|
||||
}{
|
||||
Bar: map[int]string{
|
||||
1: "huu",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "slice of string",
|
||||
element: struct{ Bar []string }{Bar: []string{"huu", "hii"}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "huu, hii"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int",
|
||||
element: struct{ Bar []int }{Bar: []int{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int8",
|
||||
element: struct{ Bar []int8 }{Bar: []int8{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int16",
|
||||
element: struct{ Bar []int16 }{Bar: []int16{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int32",
|
||||
element: struct{ Bar []int32 }{Bar: []int32{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of int64",
|
||||
element: struct{ Bar []int64 }{Bar: []int64{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint",
|
||||
element: struct{ Bar []uint }{Bar: []uint{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint8",
|
||||
element: struct{ Bar []uint8 }{Bar: []uint8{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint16",
|
||||
element: struct{ Bar []uint16 }{Bar: []uint16{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint32",
|
||||
element: struct{ Bar []uint32 }{Bar: []uint32{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of uint64",
|
||||
element: struct{ Bar []uint64 }{Bar: []uint64{4, 2, 3}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4, 2, 3"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of float32",
|
||||
element: struct{ Bar []float32 }{Bar: []float32{4.1, 2, 3.2}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4.100000, 2.000000, 3.200000"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of float64",
|
||||
element: struct{ Bar []float64 }{Bar: []float64{4.1, 2, 3.2}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "4.100000, 2.000000, 3.200000"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of bool",
|
||||
element: struct{ Bar []bool }{Bar: []bool{true, false, true}},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "true, false, true"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice label-slice-as-struct",
|
||||
element: &struct {
|
||||
Foo []struct {
|
||||
Bar string
|
||||
Bir string
|
||||
} `label-slice-as-struct:"Fii"`
|
||||
}{
|
||||
Foo: []struct {
|
||||
Bar string
|
||||
Bir string
|
||||
}{
|
||||
{
|
||||
Bar: "haa",
|
||||
Bir: "hii",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{{
|
||||
Name: "Fii",
|
||||
FieldName: "Foo",
|
||||
Children: []*Node{
|
||||
{Name: "Bar", FieldName: "Bar", Value: "haa"},
|
||||
{Name: "Bir", FieldName: "Bir", Value: "hii"},
|
||||
},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "slice label-slice-as-struct several slice entries",
|
||||
element: &struct {
|
||||
Foo []struct {
|
||||
Bar string
|
||||
Bir string
|
||||
} `label-slice-as-struct:"Fii"`
|
||||
}{
|
||||
Foo: []struct {
|
||||
Bar string
|
||||
Bir string
|
||||
}{
|
||||
{
|
||||
Bar: "haa",
|
||||
Bir: "hii",
|
||||
},
|
||||
{
|
||||
Bar: "haa",
|
||||
Bir: "hii",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "slice of struct",
|
||||
element: struct {
|
||||
Foo []struct {
|
||||
Field string
|
||||
}
|
||||
}{
|
||||
Foo: []struct {
|
||||
Field string
|
||||
}{
|
||||
{
|
||||
Field: "bar",
|
||||
},
|
||||
{
|
||||
Field: "bir",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "[0]", Children: []*Node{
|
||||
{Name: "Field", FieldName: "Field", Value: "bar"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*Node{
|
||||
{Name: "Field", FieldName: "Field", Value: "bir"},
|
||||
}},
|
||||
}},
|
||||
}}},
|
||||
},
|
||||
{
|
||||
desc: "slice of pointer of struct",
|
||||
element: struct {
|
||||
Foo []*struct {
|
||||
Field string
|
||||
}
|
||||
}{
|
||||
Foo: []*struct {
|
||||
Field string
|
||||
}{
|
||||
{Field: "bar"},
|
||||
{Field: "bir"},
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "[0]", Children: []*Node{
|
||||
{Name: "Field", FieldName: "Field", Value: "bar"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*Node{
|
||||
{Name: "Field", FieldName: "Field", Value: "bir"},
|
||||
}},
|
||||
}},
|
||||
}}},
|
||||
},
|
||||
{
|
||||
desc: "empty slice",
|
||||
element: struct {
|
||||
Bar []string
|
||||
}{
|
||||
Bar: []string{},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "nil slice",
|
||||
element: struct {
|
||||
Bar []string
|
||||
}{
|
||||
Bar: nil,
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "ignore slice",
|
||||
element: struct {
|
||||
Bar []string `label:"-"`
|
||||
}{
|
||||
Bar: []string{"huu", "hii"},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik"}},
|
||||
},
|
||||
{
|
||||
desc: "embedded",
|
||||
element: struct {
|
||||
Foo struct{ FiiFoo }
|
||||
}{
|
||||
Foo: struct{ FiiFoo }{
|
||||
FiiFoo: FiiFoo{
|
||||
Fii: "hii",
|
||||
Fuu: "huu",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: expected{node: &Node{Name: "traefik", Children: []*Node{
|
||||
{Name: "Foo", FieldName: "Foo", Children: []*Node{
|
||||
{Name: "Fii", FieldName: "Fii", Value: "hii"},
|
||||
{Name: "Fuu", FieldName: "Fuu", Value: "huu"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
node, err := EncodeToNode(test.element, true)
|
||||
|
||||
if test.expected.error {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected.node, node)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
166
pkg/config/parser/flat_encode.go
Normal file
166
pkg/config/parser/flat_encode.go
Normal file
|
@ -0,0 +1,166 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/containous/traefik/pkg/types"
|
||||
)
|
||||
|
||||
const defaultPtrValue = "false"
|
||||
|
||||
// FlatOpts holds options used when encoding to Flat.
|
||||
type FlatOpts struct {
|
||||
Case string // "lower" or "upper", defaults to "lower".
|
||||
Separator string
|
||||
SkipRoot bool
|
||||
}
|
||||
|
||||
// Flat is a configuration item representation.
|
||||
type Flat struct {
|
||||
Name string
|
||||
Description string
|
||||
Default string
|
||||
}
|
||||
|
||||
// EncodeToFlat encodes a node to a Flat representation.
|
||||
// Even though the given node argument should have already been augmented with metadata such as kind,
|
||||
// the element (and its type information) is still needed to treat remaining edge cases.
|
||||
func EncodeToFlat(element interface{}, node *Node, opts FlatOpts) ([]Flat, error) {
|
||||
if element == nil || node == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if node.Kind == 0 {
|
||||
return nil, fmt.Errorf("missing node type: %s", node.Name)
|
||||
}
|
||||
|
||||
elem := reflect.ValueOf(element)
|
||||
if elem.Kind() == reflect.Struct {
|
||||
return nil, fmt.Errorf("structs are not supported, use pointer instead")
|
||||
}
|
||||
|
||||
encoder := encoderToFlat{FlatOpts: opts}
|
||||
|
||||
var entries []Flat
|
||||
if encoder.SkipRoot {
|
||||
for _, child := range node.Children {
|
||||
field := encoder.getField(elem.Elem(), child)
|
||||
entries = append(entries, encoder.createFlat(field, child.Name, child)...)
|
||||
}
|
||||
} else {
|
||||
entries = encoder.createFlat(elem, strings.ToLower(node.Name), node)
|
||||
}
|
||||
|
||||
sort.Slice(entries, func(i, j int) bool { return entries[i].Name < entries[j].Name })
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
type encoderToFlat struct {
|
||||
FlatOpts
|
||||
}
|
||||
|
||||
func (e encoderToFlat) createFlat(field reflect.Value, name string, node *Node) []Flat {
|
||||
var entries []Flat
|
||||
if node.Kind != reflect.Map && node.Description != "-" {
|
||||
if !(node.Kind == reflect.Ptr && len(node.Children) > 0) ||
|
||||
(node.Kind == reflect.Ptr && node.Tag.Get("label") == TagLabelAllowEmpty) {
|
||||
if node.Name[0] != '[' {
|
||||
entries = append(entries, Flat{
|
||||
Name: e.getName(name),
|
||||
Description: node.Description,
|
||||
Default: e.getNodeValue(e.getField(field, node), node),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, child := range node.Children {
|
||||
if node.Kind == reflect.Map {
|
||||
fChild := e.getField(field, child)
|
||||
|
||||
var v string
|
||||
if child.Kind == reflect.Struct {
|
||||
v = defaultPtrValue
|
||||
} else {
|
||||
v = e.getNodeValue(fChild, child)
|
||||
}
|
||||
|
||||
if node.Description != "-" {
|
||||
entries = append(entries, Flat{
|
||||
Name: e.getName(name, child.Name),
|
||||
Description: node.Description,
|
||||
Default: v,
|
||||
})
|
||||
}
|
||||
|
||||
if child.Kind == reflect.Struct || child.Kind == reflect.Ptr {
|
||||
for _, ch := range child.Children {
|
||||
f := e.getField(fChild, ch)
|
||||
n := e.getName(name, child.Name, ch.Name)
|
||||
entries = append(entries, e.createFlat(f, n, ch)...)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
f := e.getField(field, child)
|
||||
n := e.getName(name, child.Name)
|
||||
entries = append(entries, e.createFlat(f, n, child)...)
|
||||
}
|
||||
}
|
||||
|
||||
return entries
|
||||
}
|
||||
|
||||
func (e encoderToFlat) getField(field reflect.Value, node *Node) reflect.Value {
|
||||
switch field.Kind() {
|
||||
case reflect.Struct:
|
||||
return field.FieldByName(node.FieldName)
|
||||
case reflect.Ptr:
|
||||
if field.Elem().Kind() == reflect.Struct {
|
||||
return field.Elem().FieldByName(node.FieldName)
|
||||
}
|
||||
return field.Elem()
|
||||
case reflect.Map:
|
||||
return field.MapIndex(reflect.ValueOf(node.FieldName))
|
||||
default:
|
||||
return field
|
||||
}
|
||||
}
|
||||
|
||||
func (e encoderToFlat) getNodeValue(field reflect.Value, node *Node) string {
|
||||
if node.Kind == reflect.Ptr && len(node.Children) > 0 {
|
||||
return defaultPtrValue
|
||||
}
|
||||
|
||||
if field.Kind() == reflect.Int64 {
|
||||
i, _ := strconv.ParseInt(node.Value, 10, 64)
|
||||
|
||||
switch field.Type() {
|
||||
case reflect.TypeOf(types.Duration(time.Second)):
|
||||
return strconv.Itoa(int(i) / int(time.Second))
|
||||
case reflect.TypeOf(time.Second):
|
||||
return time.Duration(i).String()
|
||||
}
|
||||
}
|
||||
|
||||
return node.Value
|
||||
}
|
||||
|
||||
func (e encoderToFlat) getName(names ...string) string {
|
||||
var name string
|
||||
if names[len(names)-1][0] == '[' {
|
||||
name = strings.Join(names, "")
|
||||
} else {
|
||||
name = strings.Join(names, e.Separator)
|
||||
}
|
||||
|
||||
if strings.EqualFold(e.Case, "upper") {
|
||||
return strings.ToUpper(name)
|
||||
}
|
||||
return strings.ToLower(name)
|
||||
}
|
1250
pkg/config/parser/flat_encode_test.go
Normal file
1250
pkg/config/parser/flat_encode_test.go
Normal file
File diff suppressed because it is too large
Load diff
95
pkg/config/parser/labels_decode.go
Normal file
95
pkg/config/parser/labels_decode.go
Normal file
|
@ -0,0 +1,95 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const labelRoot = "traefik"
|
||||
|
||||
// DecodeToNode converts the labels to a tree of nodes.
|
||||
// If any filters are present, labels which do not match the filters are skipped.
|
||||
func DecodeToNode(labels map[string]string, filters ...string) (*Node, error) {
|
||||
sortedKeys := sortKeys(labels, filters)
|
||||
|
||||
var node *Node
|
||||
for i, key := range sortedKeys {
|
||||
split := strings.Split(key, ".")
|
||||
|
||||
if split[0] != labelRoot {
|
||||
return nil, fmt.Errorf("invalid label root %s", split[0])
|
||||
}
|
||||
|
||||
var parts []string
|
||||
for _, v := range split {
|
||||
if v[0] == '[' {
|
||||
return nil, fmt.Errorf("invalid leading character '[' in field name (bracket is a slice delimiter): %s", v)
|
||||
}
|
||||
|
||||
if strings.HasSuffix(v, "]") && v[0] != '[' {
|
||||
indexLeft := strings.Index(v, "[")
|
||||
parts = append(parts, v[:indexLeft], v[indexLeft:])
|
||||
} else {
|
||||
parts = append(parts, v)
|
||||
}
|
||||
}
|
||||
|
||||
if i == 0 {
|
||||
node = &Node{}
|
||||
}
|
||||
decodeToNode(node, parts, labels[key])
|
||||
}
|
||||
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func decodeToNode(root *Node, path []string, value string) {
|
||||
if len(root.Name) == 0 {
|
||||
root.Name = path[0]
|
||||
}
|
||||
|
||||
// it's a leaf or not -> children
|
||||
if len(path) > 1 {
|
||||
if n := containsNode(root.Children, path[1]); n != nil {
|
||||
// the child already exists
|
||||
decodeToNode(n, path[1:], value)
|
||||
} else {
|
||||
// new child
|
||||
child := &Node{Name: path[1]}
|
||||
decodeToNode(child, path[1:], value)
|
||||
root.Children = append(root.Children, child)
|
||||
}
|
||||
} else {
|
||||
root.Value = value
|
||||
}
|
||||
}
|
||||
|
||||
func containsNode(nodes []*Node, name string) *Node {
|
||||
for _, n := range nodes {
|
||||
if name == n.Name {
|
||||
return n
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func sortKeys(labels map[string]string, filters []string) []string {
|
||||
var sortedKeys []string
|
||||
for key := range labels {
|
||||
if len(filters) == 0 {
|
||||
sortedKeys = append(sortedKeys, key)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, filter := range filters {
|
||||
if len(key) >= len(filter) && strings.EqualFold(key[:len(filter)], filter) {
|
||||
sortedKeys = append(sortedKeys, key)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
sort.Strings(sortedKeys)
|
||||
|
||||
return sortedKeys
|
||||
}
|
236
pkg/config/parser/labels_decode_test.go
Normal file
236
pkg/config/parser/labels_decode_test.go
Normal file
|
@ -0,0 +1,236 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDecodeToNode(t *testing.T) {
|
||||
type expected struct {
|
||||
error bool
|
||||
node *Node
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
desc string
|
||||
in map[string]string
|
||||
filters []string
|
||||
expected expected
|
||||
}{
|
||||
{
|
||||
desc: "no label",
|
||||
in: map[string]string{},
|
||||
expected: expected{node: nil},
|
||||
},
|
||||
{
|
||||
desc: "level 1",
|
||||
in: map[string]string{
|
||||
"traefik.foo": "bar",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "level 1 empty value",
|
||||
in: map[string]string{
|
||||
"traefik.foo": "",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Value: ""},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "level 2",
|
||||
in: map[string]string{
|
||||
"traefik.foo.bar": "bar",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{{
|
||||
Name: "foo",
|
||||
Children: []*Node{
|
||||
{Name: "bar", Value: "bar"},
|
||||
},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 0",
|
||||
in: map[string]string{
|
||||
"traefik": "bar",
|
||||
"traefic": "bur",
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "several entries, prefix filter",
|
||||
in: map[string]string{
|
||||
"traefik.foo": "bar",
|
||||
"traefik.fii": "bir",
|
||||
},
|
||||
filters: []string{"traefik.Foo"},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 1",
|
||||
in: map[string]string{
|
||||
"traefik.foo": "bar",
|
||||
"traefik.fii": "bur",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "fii", Value: "bur"},
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 2",
|
||||
in: map[string]string{
|
||||
"traefik.foo.aaa": "bar",
|
||||
"traefik.foo.bbb": "bur",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 2, 3 children",
|
||||
in: map[string]string{
|
||||
"traefik.foo.aaa": "bar",
|
||||
"traefik.foo.bbb": "bur",
|
||||
"traefik.foo.ccc": "bir",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
{Name: "ccc", Value: "bir"},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 3",
|
||||
in: map[string]string{
|
||||
"traefik.foo.bar.aaa": "bar",
|
||||
"traefik.foo.bar.bbb": "bur",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 3, 2 children level 1",
|
||||
in: map[string]string{
|
||||
"traefik.foo.bar.aaa": "bar",
|
||||
"traefik.foo.bar.bbb": "bur",
|
||||
"traefik.bar.foo.bbb": "bir",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bbb", Value: "bir"},
|
||||
}},
|
||||
}},
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, slice syntax",
|
||||
in: map[string]string{
|
||||
"traefik.foo[0].aaa": "bar0",
|
||||
"traefik.foo[0].bbb": "bur0",
|
||||
"traefik.foo[1].aaa": "bar1",
|
||||
"traefik.foo[1].bbb": "bur1",
|
||||
},
|
||||
expected: expected{node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "[0]", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar0"},
|
||||
{Name: "bbb", Value: "bur0"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar1"},
|
||||
{Name: "bbb", Value: "bur1"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, invalid slice syntax",
|
||||
in: map[string]string{
|
||||
"traefik.foo.[0].aaa": "bar0",
|
||||
"traefik.foo.[0].bbb": "bur0",
|
||||
"traefik.foo.[1].aaa": "bar1",
|
||||
"traefik.foo.[1].bbb": "bur1",
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
out, err := DecodeToNode(test.in, test.filters...)
|
||||
|
||||
if test.expected.error {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
|
||||
if !assert.Equal(t, test.expected.node, out) {
|
||||
bytes, err := json.MarshalIndent(out, "", " ")
|
||||
require.NoError(t, err)
|
||||
fmt.Println(string(bytes))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
30
pkg/config/parser/labels_encode.go
Normal file
30
pkg/config/parser/labels_encode.go
Normal file
|
@ -0,0 +1,30 @@
|
|||
package parser
|
||||
|
||||
// EncodeNode Converts a node to labels.
|
||||
// nodes -> labels
|
||||
func EncodeNode(node *Node) map[string]string {
|
||||
labels := make(map[string]string)
|
||||
encodeNode(labels, node.Name, node)
|
||||
return labels
|
||||
}
|
||||
|
||||
func encodeNode(labels map[string]string, root string, node *Node) {
|
||||
for _, child := range node.Children {
|
||||
if child.Disabled {
|
||||
continue
|
||||
}
|
||||
|
||||
var sep string
|
||||
if child.Name[0] != '[' {
|
||||
sep = "."
|
||||
}
|
||||
|
||||
childName := root + sep + child.Name
|
||||
|
||||
if len(child.Children) > 0 {
|
||||
encodeNode(labels, childName, child)
|
||||
} else if len(child.Name) > 0 {
|
||||
labels[childName] = child.Value
|
||||
}
|
||||
}
|
||||
}
|
180
pkg/config/parser/labels_encode_test.go
Normal file
180
pkg/config/parser/labels_encode_test.go
Normal file
|
@ -0,0 +1,180 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestEncodeNode(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
node *Node
|
||||
expected map[string]string
|
||||
}{
|
||||
{
|
||||
desc: "1 label",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.aaa": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "2 labels",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.aaa": "bar",
|
||||
"traefik.bbb": "bur",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "2 labels, 1 disabled",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur", Disabled: true},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.aaa": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "2 levels",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.aaa": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "3 levels",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.bar.aaa": "bar",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "2 levels, same root",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.bar.aaa": "bar",
|
||||
"traefik.foo.bar.bbb": "bur",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "several levels, different root",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "ccc", Value: "bir"},
|
||||
}},
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.bar.aaa": "bar",
|
||||
"traefik.bar.ccc": "bir",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "multiple labels, multiple levels",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "ccc", Value: "bir"},
|
||||
}},
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "bar", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.foo.bar.aaa": "bar",
|
||||
"traefik.foo.bar.bbb": "bur",
|
||||
"traefik.bar.ccc": "bir",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "slice of struct syntax",
|
||||
node: &Node{
|
||||
Name: "traefik",
|
||||
Children: []*Node{
|
||||
{Name: "foo", Children: []*Node{
|
||||
{Name: "[0]", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar0"},
|
||||
{Name: "bbb", Value: "bur0"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*Node{
|
||||
{Name: "aaa", Value: "bar1"},
|
||||
{Name: "bbb", Value: "bur1"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
},
|
||||
expected: map[string]string{
|
||||
"traefik.foo[0].aaa": "bar0",
|
||||
"traefik.foo[0].bbb": "bur0",
|
||||
"traefik.foo[1].aaa": "bar1",
|
||||
"traefik.foo[1].bbb": "bur1",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
labels := EncodeNode(test.node)
|
||||
|
||||
assert.Equal(t, test.expected, labels)
|
||||
})
|
||||
}
|
||||
}
|
18
pkg/config/parser/node.go
Normal file
18
pkg/config/parser/node.go
Normal file
|
@ -0,0 +1,18 @@
|
|||
package parser
|
||||
|
||||
import "reflect"
|
||||
|
||||
// MapNamePlaceholder is the placeholder for the map name.
|
||||
const MapNamePlaceholder = "<name>"
|
||||
|
||||
// Node is a label node.
|
||||
type Node struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description,omitempty"`
|
||||
FieldName string `json:"fieldName"`
|
||||
Value string `json:"value,omitempty"`
|
||||
Disabled bool `json:"disabled,omitempty"`
|
||||
Kind reflect.Kind `json:"kind,omitempty"`
|
||||
Tag reflect.StructTag `json:"tag,omitempty"`
|
||||
Children []*Node `json:"children,omitempty"`
|
||||
}
|
182
pkg/config/parser/nodes_metadata.go
Normal file
182
pkg/config/parser/nodes_metadata.go
Normal file
|
@ -0,0 +1,182 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// AddMetadata adds metadata such as type, inferred from element, to a node.
|
||||
func AddMetadata(element interface{}, node *Node) error {
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(node.Children) == 0 {
|
||||
return fmt.Errorf("invalid node %s: no child", node.Name)
|
||||
}
|
||||
|
||||
if element == nil {
|
||||
return errors.New("nil structure")
|
||||
}
|
||||
|
||||
rootType := reflect.TypeOf(element)
|
||||
node.Kind = rootType.Kind()
|
||||
|
||||
return browseChildren(rootType, node)
|
||||
}
|
||||
|
||||
func browseChildren(fType reflect.Type, node *Node) error {
|
||||
for _, child := range node.Children {
|
||||
if err := addMetadata(fType, child); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addMetadata(rootType reflect.Type, node *Node) error {
|
||||
rType := rootType
|
||||
if rootType.Kind() == reflect.Ptr {
|
||||
rType = rootType.Elem()
|
||||
}
|
||||
|
||||
field, err := findTypedField(rType, node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = isSupportedType(field); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fType := field.Type
|
||||
node.Kind = fType.Kind()
|
||||
node.Tag = field.Tag
|
||||
|
||||
if fType.Kind() == reflect.Struct || fType.Kind() == reflect.Ptr && fType.Elem().Kind() == reflect.Struct ||
|
||||
fType.Kind() == reflect.Map {
|
||||
if len(node.Children) == 0 && field.Tag.Get(TagLabel) != TagLabelAllowEmpty {
|
||||
return fmt.Errorf("%s cannot be a standalone element (type %s)", node.Name, fType)
|
||||
}
|
||||
|
||||
node.Disabled = len(node.Value) > 0 && !strings.EqualFold(node.Value, "true") && field.Tag.Get(TagLabel) == TagLabelAllowEmpty
|
||||
}
|
||||
|
||||
if len(node.Children) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if fType.Kind() == reflect.Struct || fType.Kind() == reflect.Ptr && fType.Elem().Kind() == reflect.Struct {
|
||||
return browseChildren(fType, node)
|
||||
}
|
||||
|
||||
if fType.Kind() == reflect.Map {
|
||||
for _, child := range node.Children {
|
||||
// elem is a map entry value type
|
||||
elem := fType.Elem()
|
||||
child.Kind = elem.Kind()
|
||||
|
||||
if elem.Kind() == reflect.Map || elem.Kind() == reflect.Struct ||
|
||||
(elem.Kind() == reflect.Ptr && elem.Elem().Kind() == reflect.Struct) {
|
||||
if err = browseChildren(elem, child); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if fType.Kind() == reflect.Slice {
|
||||
if field.Tag.Get(TagLabelSliceAsStruct) != "" {
|
||||
return browseChildren(fType.Elem(), node)
|
||||
}
|
||||
|
||||
for _, ch := range node.Children {
|
||||
ch.Kind = fType.Elem().Kind()
|
||||
if err = browseChildren(fType.Elem(), ch); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("invalid node %s: %v", node.Name, fType.Kind())
|
||||
}
|
||||
|
||||
func findTypedField(rType reflect.Type, node *Node) (reflect.StructField, error) {
|
||||
for i := 0; i < rType.NumField(); i++ {
|
||||
cField := rType.Field(i)
|
||||
|
||||
fieldName := cField.Tag.Get(TagLabelSliceAsStruct)
|
||||
if len(fieldName) == 0 {
|
||||
fieldName = cField.Name
|
||||
}
|
||||
|
||||
if IsExported(cField) {
|
||||
if cField.Anonymous {
|
||||
if cField.Type.Kind() == reflect.Struct {
|
||||
structField, err := findTypedField(cField.Type, node)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
return structField, nil
|
||||
}
|
||||
}
|
||||
|
||||
if strings.EqualFold(fieldName, node.Name) {
|
||||
node.FieldName = cField.Name
|
||||
return cField, nil
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return reflect.StructField{}, fmt.Errorf("field not found, node: %s", node.Name)
|
||||
}
|
||||
|
||||
// IsExported reports whether f is exported.
|
||||
// https://golang.org/pkg/reflect/#StructField
|
||||
func IsExported(f reflect.StructField) bool {
|
||||
return f.PkgPath == ""
|
||||
}
|
||||
|
||||
func isSupportedType(field reflect.StructField) error {
|
||||
fType := field.Type
|
||||
|
||||
if fType.Kind() == reflect.Slice {
|
||||
switch fType.Elem().Kind() {
|
||||
case reflect.String,
|
||||
reflect.Bool,
|
||||
reflect.Int,
|
||||
reflect.Int8,
|
||||
reflect.Int16,
|
||||
reflect.Int32,
|
||||
reflect.Int64,
|
||||
reflect.Uint,
|
||||
reflect.Uint8,
|
||||
reflect.Uint16,
|
||||
reflect.Uint32,
|
||||
reflect.Uint64,
|
||||
reflect.Uintptr,
|
||||
reflect.Float32,
|
||||
reflect.Float64,
|
||||
reflect.Struct,
|
||||
reflect.Ptr:
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("unsupported slice type: %v", fType)
|
||||
}
|
||||
}
|
||||
|
||||
if fType.Kind() == reflect.Map && fType.Key().Kind() != reflect.String {
|
||||
return fmt.Errorf("unsupported map key type: %v", fType.Key())
|
||||
}
|
||||
|
||||
if fType.Kind() == reflect.Func {
|
||||
return fmt.Errorf("unsupported type: %v", fType)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
1011
pkg/config/parser/nodes_metadata_test.go
Normal file
1011
pkg/config/parser/nodes_metadata_test.go
Normal file
File diff suppressed because it is too large
Load diff
38
pkg/config/parser/parser.go
Normal file
38
pkg/config/parser/parser.go
Normal file
|
@ -0,0 +1,38 @@
|
|||
// Package parser implements decoding and encoding between a flat map of labels and a typed Configuration.
|
||||
package parser
|
||||
|
||||
// Decode decodes the given map of labels into the given element.
|
||||
// If any filters are present, labels which do not match the filters are skipped.
|
||||
// The operation goes through three stages roughly summarized as:
|
||||
// labels -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> typed element
|
||||
func Decode(labels map[string]string, element interface{}, filters ...string) error {
|
||||
node, err := DecodeToNode(labels, filters...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = AddMetadata(element, node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = Fill(element, node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Encode converts an element to labels.
|
||||
// element -> node (value) -> label (node)
|
||||
func Encode(element interface{}) (map[string]string, error) {
|
||||
node, err := EncodeToNode(element, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return EncodeNode(node), nil
|
||||
}
|
18
pkg/config/parser/tags.go
Normal file
18
pkg/config/parser/tags.go
Normal file
|
@ -0,0 +1,18 @@
|
|||
package parser
|
||||
|
||||
const (
|
||||
// TagLabel allows to apply a custom behavior.
|
||||
// - "allowEmpty": allows to create an empty struct.
|
||||
// - "-": ignore the field.
|
||||
TagLabel = "label"
|
||||
|
||||
// TagLabelSliceAsStruct allows to use a slice of struct by creating one entry into the slice.
|
||||
// The value is the substitution name used in the label to access the slice.
|
||||
TagLabelSliceAsStruct = "label-slice-as-struct"
|
||||
|
||||
// TagDescription is the documentation for the field.
|
||||
TagDescription = "description"
|
||||
|
||||
// TagLabelAllowEmpty is related to TagLabel.
|
||||
TagLabelAllowEmpty = "allowEmpty"
|
||||
)
|
|
@ -1,30 +1,30 @@
|
|||
package static
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/pkg/log"
|
||||
)
|
||||
|
||||
// EntryPoint holds the entry point configuration.
|
||||
type EntryPoint struct {
|
||||
Address string
|
||||
Transport *EntryPointsTransport
|
||||
ProxyProtocol *ProxyProtocol
|
||||
ForwardedHeaders *ForwardedHeaders
|
||||
Address string `description:"Entry point address."`
|
||||
Transport *EntryPointsTransport `description:"Configures communication between clients and Traefik."`
|
||||
ProxyProtocol *ProxyProtocol `description:"Proxy-Protocol configuration." label:"allowEmpty"`
|
||||
ForwardedHeaders *ForwardedHeaders `description:"Trust client forwarding headers."`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (e *EntryPoint) SetDefaults() {
|
||||
e.Transport = &EntryPointsTransport{}
|
||||
e.Transport.SetDefaults()
|
||||
e.ForwardedHeaders = &ForwardedHeaders{}
|
||||
}
|
||||
|
||||
// ForwardedHeaders Trust client forwarding headers.
|
||||
type ForwardedHeaders struct {
|
||||
Insecure bool
|
||||
TrustedIPs []string
|
||||
Insecure bool `description:"Trust all forwarded headers." export:"true"`
|
||||
TrustedIPs []string `description:"Trust only forwarded headers from selected IPs."`
|
||||
}
|
||||
|
||||
// ProxyProtocol contains Proxy-Protocol configuration.
|
||||
type ProxyProtocol struct {
|
||||
Insecure bool `export:"true"`
|
||||
TrustedIPs []string
|
||||
Insecure bool `description:"Trust all." export:"true"`
|
||||
TrustedIPs []string `description:"Trust only selected IPs."`
|
||||
}
|
||||
|
||||
// EntryPoints holds the HTTP entry point list.
|
||||
|
@ -32,103 +32,14 @@ type EntryPoints map[string]*EntryPoint
|
|||
|
||||
// EntryPointsTransport configures communication between clients and Traefik.
|
||||
type EntryPointsTransport struct {
|
||||
LifeCycle *LifeCycle `description:"Timeouts influencing the server life cycle" export:"true"`
|
||||
RespondingTimeouts *RespondingTimeouts `description:"Timeouts for incoming requests to the Traefik instance" export:"true"`
|
||||
LifeCycle *LifeCycle `description:"Timeouts influencing the server life cycle." export:"true"`
|
||||
RespondingTimeouts *RespondingTimeouts `description:"Timeouts for incoming requests to the Traefik instance." export:"true"`
|
||||
}
|
||||
|
||||
// String is the method to format the flag's value, part of the flag.Value interface.
|
||||
// The String method's output will be used in diagnostics.
|
||||
func (ep EntryPoints) String() string {
|
||||
return fmt.Sprintf("%+v", map[string]*EntryPoint(ep))
|
||||
}
|
||||
|
||||
// Get return the EntryPoints map.
|
||||
func (ep *EntryPoints) Get() interface{} {
|
||||
return *ep
|
||||
}
|
||||
|
||||
// SetValue sets the EntryPoints map with val.
|
||||
func (ep *EntryPoints) SetValue(val interface{}) {
|
||||
*ep = val.(EntryPoints)
|
||||
}
|
||||
|
||||
// Type is type of the struct.
|
||||
func (ep *EntryPoints) Type() string {
|
||||
return "entrypoints"
|
||||
}
|
||||
|
||||
// Set is the method to set the flag value, part of the flag.Value interface.
|
||||
// Set's argument is a string to be parsed to set the flag.
|
||||
// It's a comma-separated list, so we split it.
|
||||
func (ep *EntryPoints) Set(value string) error {
|
||||
result := parseEntryPointsConfiguration(value)
|
||||
|
||||
(*ep)[result["name"]] = &EntryPoint{
|
||||
Address: result["address"],
|
||||
ProxyProtocol: makeEntryPointProxyProtocol(result),
|
||||
ForwardedHeaders: makeEntryPointForwardedHeaders(result),
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func makeEntryPointProxyProtocol(result map[string]string) *ProxyProtocol {
|
||||
var proxyProtocol *ProxyProtocol
|
||||
|
||||
ppTrustedIPs := result["proxyprotocol_trustedips"]
|
||||
if len(result["proxyprotocol_insecure"]) > 0 || len(ppTrustedIPs) > 0 {
|
||||
proxyProtocol = &ProxyProtocol{
|
||||
Insecure: toBool(result, "proxyprotocol_insecure"),
|
||||
}
|
||||
if len(ppTrustedIPs) > 0 {
|
||||
proxyProtocol.TrustedIPs = strings.Split(ppTrustedIPs, ",")
|
||||
}
|
||||
}
|
||||
|
||||
if proxyProtocol != nil && proxyProtocol.Insecure {
|
||||
log.Warn("ProxyProtocol.insecure:true is dangerous. Please use 'ProxyProtocol.TrustedIPs:IPs' and remove 'ProxyProtocol.insecure:true'")
|
||||
}
|
||||
|
||||
return proxyProtocol
|
||||
}
|
||||
|
||||
func parseEntryPointsConfiguration(raw string) map[string]string {
|
||||
sections := strings.Fields(raw)
|
||||
|
||||
config := make(map[string]string)
|
||||
for _, part := range sections {
|
||||
field := strings.SplitN(part, ":", 2)
|
||||
name := strings.ToLower(strings.Replace(field[0], ".", "_", -1))
|
||||
if len(field) > 1 {
|
||||
config[name] = field[1]
|
||||
} else {
|
||||
if strings.EqualFold(name, "TLS") {
|
||||
config["tls_acme"] = "TLS"
|
||||
} else {
|
||||
config[name] = ""
|
||||
}
|
||||
}
|
||||
}
|
||||
return config
|
||||
}
|
||||
|
||||
func toBool(conf map[string]string, key string) bool {
|
||||
if val, ok := conf[key]; ok {
|
||||
return strings.EqualFold(val, "true") ||
|
||||
strings.EqualFold(val, "enable") ||
|
||||
strings.EqualFold(val, "on")
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func makeEntryPointForwardedHeaders(result map[string]string) *ForwardedHeaders {
|
||||
forwardedHeaders := &ForwardedHeaders{}
|
||||
forwardedHeaders.Insecure = toBool(result, "forwardedheaders_insecure")
|
||||
|
||||
fhTrustedIPs := result["forwardedheaders_trustedips"]
|
||||
if len(fhTrustedIPs) > 0 {
|
||||
forwardedHeaders.TrustedIPs = strings.Split(fhTrustedIPs, ",")
|
||||
}
|
||||
|
||||
return forwardedHeaders
|
||||
// SetDefaults sets the default values.
|
||||
func (t *EntryPointsTransport) SetDefaults() {
|
||||
t.LifeCycle = &LifeCycle{}
|
||||
t.LifeCycle.SetDefaults()
|
||||
t.RespondingTimeouts = &RespondingTimeouts{}
|
||||
t.RespondingTimeouts.SetDefaults()
|
||||
}
|
||||
|
|
|
@ -1,257 +0,0 @@
|
|||
package static
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_parseEntryPointsConfiguration(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
value string
|
||||
expectedResult map[string]string
|
||||
}{
|
||||
{
|
||||
name: "all parameters",
|
||||
value: "Name:foo " +
|
||||
"Address::8000 " +
|
||||
"CA:car " +
|
||||
"CA.Optional:true " +
|
||||
"Redirect.EntryPoint:https " +
|
||||
"Redirect.Regex:http://localhost/(.*) " +
|
||||
"Redirect.Replacement:http://mydomain/$1 " +
|
||||
"Redirect.Permanent:true " +
|
||||
"Compress:true " +
|
||||
"ProxyProtocol.TrustedIPs:192.168.0.1 " +
|
||||
"ForwardedHeaders.TrustedIPs:10.0.0.3/24,20.0.0.3/24 " +
|
||||
"Auth.Basic.Realm:myRealm " +
|
||||
"Auth.Basic.Users:test:$apr1$H6uskkkW$IgXLP6ewTrSuBkTrqE8wj/,test2:$apr1$d9hr9HBB$4HxwgUir3HP4EsggP/QNo0 " +
|
||||
"Auth.Basic.RemoveHeader:true " +
|
||||
"Auth.Digest.Users:test:traefik:a2688e031edb4be6a3797f3882655c05,test2:traefik:518845800f9e2bfb1f1f740ec24f074e " +
|
||||
"Auth.Digest.RemoveHeader:true " +
|
||||
"Auth.HeaderField:X-WebAuth-User " +
|
||||
"Auth.Forward.Address:https://authserver.com/auth " +
|
||||
"Auth.Forward.AuthResponseHeaders:X-Auth,X-Test,X-Secret " +
|
||||
"Auth.Forward.TrustForwardHeader:true " +
|
||||
"Auth.Forward.TLS.CA:path/to/local.crt " +
|
||||
"Auth.Forward.TLS.CAOptional:true " +
|
||||
"Auth.Forward.TLS.Cert:path/to/foo.cert " +
|
||||
"Auth.Forward.TLS.Key:path/to/foo.key " +
|
||||
"Auth.Forward.TLS.InsecureSkipVerify:true " +
|
||||
"WhiteList.SourceRange:10.42.0.0/16,152.89.1.33/32,afed:be44::/16 " +
|
||||
"WhiteList.IPStrategy.depth:3 " +
|
||||
"WhiteList.IPStrategy.ExcludedIPs:10.0.0.3/24,20.0.0.3/24 " +
|
||||
"ClientIPStrategy.depth:3 " +
|
||||
"ClientIPStrategy.ExcludedIPs:10.0.0.3/24,20.0.0.3/24 ",
|
||||
expectedResult: map[string]string{
|
||||
"address": ":8000",
|
||||
"auth_basic_realm": "myRealm",
|
||||
"auth_basic_users": "test:$apr1$H6uskkkW$IgXLP6ewTrSuBkTrqE8wj/,test2:$apr1$d9hr9HBB$4HxwgUir3HP4EsggP/QNo0",
|
||||
"auth_basic_removeheader": "true",
|
||||
"auth_digest_users": "test:traefik:a2688e031edb4be6a3797f3882655c05,test2:traefik:518845800f9e2bfb1f1f740ec24f074e",
|
||||
"auth_digest_removeheader": "true",
|
||||
"auth_forward_address": "https://authserver.com/auth",
|
||||
"auth_forward_authresponseheaders": "X-Auth,X-Test,X-Secret",
|
||||
"auth_forward_tls_ca": "path/to/local.crt",
|
||||
"auth_forward_tls_caoptional": "true",
|
||||
"auth_forward_tls_cert": "path/to/foo.cert",
|
||||
"auth_forward_tls_insecureskipverify": "true",
|
||||
"auth_forward_tls_key": "path/to/foo.key",
|
||||
"auth_forward_trustforwardheader": "true",
|
||||
"auth_headerfield": "X-WebAuth-User",
|
||||
"ca": "car",
|
||||
"ca_optional": "true",
|
||||
"compress": "true",
|
||||
"forwardedheaders_trustedips": "10.0.0.3/24,20.0.0.3/24",
|
||||
"name": "foo",
|
||||
"proxyprotocol_trustedips": "192.168.0.1",
|
||||
"redirect_entrypoint": "https",
|
||||
"redirect_permanent": "true",
|
||||
"redirect_regex": "http://localhost/(.*)",
|
||||
"redirect_replacement": "http://mydomain/$1",
|
||||
"whitelist_sourcerange": "10.42.0.0/16,152.89.1.33/32,afed:be44::/16",
|
||||
"whitelist_ipstrategy_depth": "3",
|
||||
"whitelist_ipstrategy_excludedips": "10.0.0.3/24,20.0.0.3/24",
|
||||
"clientipstrategy_depth": "3",
|
||||
"clientipstrategy_excludedips": "10.0.0.3/24,20.0.0.3/24",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "compress on",
|
||||
value: "name:foo Compress:on",
|
||||
expectedResult: map[string]string{
|
||||
"name": "foo",
|
||||
"compress": "on",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
conf := parseEntryPointsConfiguration(test.value)
|
||||
|
||||
assert.Len(t, conf, len(test.expectedResult))
|
||||
assert.Equal(t, test.expectedResult, conf)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_toBool(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
value string
|
||||
key string
|
||||
expectedBool bool
|
||||
}{
|
||||
{
|
||||
name: "on",
|
||||
value: "on",
|
||||
key: "foo",
|
||||
expectedBool: true,
|
||||
},
|
||||
{
|
||||
name: "true",
|
||||
value: "true",
|
||||
key: "foo",
|
||||
expectedBool: true,
|
||||
},
|
||||
{
|
||||
name: "enable",
|
||||
value: "enable",
|
||||
key: "foo",
|
||||
expectedBool: true,
|
||||
},
|
||||
{
|
||||
name: "arbitrary string",
|
||||
value: "bar",
|
||||
key: "foo",
|
||||
expectedBool: false,
|
||||
},
|
||||
{
|
||||
name: "no existing entry",
|
||||
value: "bar",
|
||||
key: "fii",
|
||||
expectedBool: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
conf := map[string]string{
|
||||
"foo": test.value,
|
||||
}
|
||||
|
||||
result := toBool(conf, test.key)
|
||||
|
||||
assert.Equal(t, test.expectedBool, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEntryPoints_Set(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
expression string
|
||||
expectedEntryPointName string
|
||||
expectedEntryPoint *EntryPoint
|
||||
}{
|
||||
{
|
||||
name: "all parameters camelcase",
|
||||
expression: "Name:foo " +
|
||||
"Address::8000 " +
|
||||
"CA:car " +
|
||||
"CA.Optional:true " +
|
||||
"ProxyProtocol.TrustedIPs:192.168.0.1 ",
|
||||
expectedEntryPointName: "foo",
|
||||
expectedEntryPoint: &EntryPoint{
|
||||
Address: ":8000",
|
||||
ProxyProtocol: &ProxyProtocol{
|
||||
Insecure: false,
|
||||
TrustedIPs: []string{"192.168.0.1"},
|
||||
},
|
||||
ForwardedHeaders: &ForwardedHeaders{},
|
||||
// FIXME Test ServersTransport
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "all parameters lowercase",
|
||||
expression: "Name:foo " +
|
||||
"address::8000 " +
|
||||
"tls " +
|
||||
"tls.minversion:VersionTLS11 " +
|
||||
"tls.ciphersuites:TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA384,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA " +
|
||||
"ca:car " +
|
||||
"ca.Optional:true " +
|
||||
"proxyProtocol.TrustedIPs:192.168.0.1 ",
|
||||
expectedEntryPointName: "foo",
|
||||
expectedEntryPoint: &EntryPoint{
|
||||
Address: ":8000",
|
||||
ProxyProtocol: &ProxyProtocol{
|
||||
Insecure: false,
|
||||
TrustedIPs: []string{"192.168.0.1"},
|
||||
},
|
||||
ForwardedHeaders: &ForwardedHeaders{},
|
||||
// FIXME Test ServersTransport
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "default",
|
||||
expression: "Name:foo",
|
||||
expectedEntryPointName: "foo",
|
||||
expectedEntryPoint: &EntryPoint{
|
||||
ForwardedHeaders: &ForwardedHeaders{},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "ProxyProtocol insecure true",
|
||||
expression: "Name:foo ProxyProtocol.insecure:true",
|
||||
expectedEntryPointName: "foo",
|
||||
expectedEntryPoint: &EntryPoint{
|
||||
ProxyProtocol: &ProxyProtocol{Insecure: true},
|
||||
ForwardedHeaders: &ForwardedHeaders{},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "ProxyProtocol insecure false",
|
||||
expression: "Name:foo ProxyProtocol.insecure:false",
|
||||
expectedEntryPointName: "foo",
|
||||
expectedEntryPoint: &EntryPoint{
|
||||
ProxyProtocol: &ProxyProtocol{},
|
||||
ForwardedHeaders: &ForwardedHeaders{},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "ProxyProtocol TrustedIPs",
|
||||
expression: "Name:foo ProxyProtocol.TrustedIPs:10.0.0.3/24,20.0.0.3/24",
|
||||
expectedEntryPointName: "foo",
|
||||
expectedEntryPoint: &EntryPoint{
|
||||
ProxyProtocol: &ProxyProtocol{
|
||||
TrustedIPs: []string{"10.0.0.3/24", "20.0.0.3/24"},
|
||||
},
|
||||
ForwardedHeaders: &ForwardedHeaders{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
eps := EntryPoints{}
|
||||
err := eps.Set(test.expression)
|
||||
require.NoError(t, err)
|
||||
|
||||
ep := eps[test.expectedEntryPointName]
|
||||
assert.EqualValues(t, test.expectedEntryPoint, ep)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -5,7 +5,6 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/containous/flaeg/parse"
|
||||
"github.com/containous/traefik/pkg/log"
|
||||
"github.com/containous/traefik/pkg/ping"
|
||||
acmeprovider "github.com/containous/traefik/pkg/provider/acme"
|
||||
|
@ -47,99 +46,127 @@ const (
|
|||
type Configuration struct {
|
||||
Global *Global `description:"Global configuration options" export:"true"`
|
||||
|
||||
ServersTransport *ServersTransport `description:"Servers default transport" export:"true"`
|
||||
EntryPoints EntryPoints `description:"Entry points definition using format: --entryPoints='Name:http Address::8000' --entryPoints='Name:https Address::4442'" export:"true"`
|
||||
Providers *Providers `description:"Providers configuration" export:"true"`
|
||||
ServersTransport *ServersTransport `description:"Servers default transport." export:"true"`
|
||||
EntryPoints EntryPoints `description:"Entry points definition." export:"true"`
|
||||
Providers *Providers `description:"Providers configuration." export:"true"`
|
||||
|
||||
API *API `description:"Enable api/dashboard" export:"true"`
|
||||
Metrics *types.Metrics `description:"Enable a metrics exporter" export:"true"`
|
||||
Ping *ping.Handler `description:"Enable ping" export:"true"`
|
||||
API *API `description:"Enable api/dashboard." export:"true" label:"allowEmpty"`
|
||||
Metrics *types.Metrics `description:"Enable a metrics exporter." export:"true"`
|
||||
Ping *ping.Handler `description:"Enable ping." export:"true" label:"allowEmpty"`
|
||||
// Rest *rest.Provider `description:"Enable Rest backend with default settings" export:"true"`
|
||||
|
||||
Log *types.TraefikLog `description:"Traefik log settings" export:"true"`
|
||||
AccessLog *types.AccessLog `description:"Access log settings" export:"true"`
|
||||
Tracing *Tracing `description:"OpenTracing configuration" export:"true"`
|
||||
Log *types.TraefikLog `description:"Traefik log settings." export:"true"`
|
||||
AccessLog *types.AccessLog `description:"Access log settings." export:"true" label:"allowEmpty"`
|
||||
Tracing *Tracing `description:"OpenTracing configuration." export:"true" label:"allowEmpty"`
|
||||
|
||||
HostResolver *types.HostResolverConfig `description:"Enable CNAME Flattening" export:"true"`
|
||||
HostResolver *types.HostResolverConfig `description:"Enable CNAME Flattening." export:"true" label:"allowEmpty"`
|
||||
|
||||
ACME *acmeprovider.Configuration `description:"Enable ACME (Let's Encrypt): automatic SSL" export:"true"`
|
||||
ACME *acmeprovider.Configuration `description:"Enable ACME (Let's Encrypt): automatic SSL." export:"true"`
|
||||
}
|
||||
|
||||
// Global holds the global configuration.
|
||||
type Global struct {
|
||||
Debug bool `short:"d" description:"Enable debug mode" export:"true"`
|
||||
CheckNewVersion bool `description:"Periodically check if a new version has been released" export:"true"`
|
||||
SendAnonymousUsage *bool `description:"send periodically anonymous usage statistics" export:"true"`
|
||||
Debug bool `description:"Enable debug mode." export:"true"`
|
||||
CheckNewVersion bool `description:"Periodically check if a new version has been released." export:"true"`
|
||||
SendAnonymousUsage *bool `description:"Periodically send anonymous usage statistics. If the option is not specified, it will be enabled by default." export:"true"`
|
||||
}
|
||||
|
||||
// ServersTransport options to configure communication between Traefik and the servers
|
||||
type ServersTransport struct {
|
||||
InsecureSkipVerify bool `description:"Disable SSL certificate verification" export:"true"`
|
||||
RootCAs tls.FilesOrContents `description:"Add cert file for self-signed certificate"`
|
||||
MaxIdleConnsPerHost int `description:"If non-zero, controls the maximum idle (keep-alive) to keep per-host. If zero, DefaultMaxIdleConnsPerHost is used" export:"true"`
|
||||
ForwardingTimeouts *ForwardingTimeouts `description:"Timeouts for requests forwarded to the backend servers" export:"true"`
|
||||
InsecureSkipVerify bool `description:"Disable SSL certificate verification." export:"true"`
|
||||
RootCAs []tls.FileOrContent `description:"Add cert file for self-signed certificate."`
|
||||
MaxIdleConnsPerHost int `description:"If non-zero, controls the maximum idle (keep-alive) to keep per-host. If zero, DefaultMaxIdleConnsPerHost is used" export:"true"`
|
||||
ForwardingTimeouts *ForwardingTimeouts `description:"Timeouts for requests forwarded to the backend servers." export:"true"`
|
||||
}
|
||||
|
||||
// API holds the API configuration
|
||||
type API struct {
|
||||
EntryPoint string `description:"EntryPoint" export:"true"`
|
||||
Dashboard bool `description:"Activate dashboard" export:"true"`
|
||||
Statistics *types.Statistics `description:"Enable more detailed statistics" export:"true"`
|
||||
Middlewares []string `description:"Middleware list" export:"true"`
|
||||
DashboardAssets *assetfs.AssetFS `json:"-"`
|
||||
EntryPoint string `description:"EntryPoint." export:"true"`
|
||||
Dashboard bool `description:"Activate dashboard." export:"true"`
|
||||
Statistics *types.Statistics `description:"Enable more detailed statistics." export:"true" label:"allowEmpty"`
|
||||
Middlewares []string `description:"Middleware list." export:"true"`
|
||||
DashboardAssets *assetfs.AssetFS `json:"-" label:"-"`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (a *API) SetDefaults() {
|
||||
a.EntryPoint = "traefik"
|
||||
a.Dashboard = true
|
||||
}
|
||||
|
||||
// RespondingTimeouts contains timeout configurations for incoming requests to the Traefik instance.
|
||||
type RespondingTimeouts struct {
|
||||
ReadTimeout parse.Duration `description:"ReadTimeout is the maximum duration for reading the entire request, including the body. If zero, no timeout is set" export:"true"`
|
||||
WriteTimeout parse.Duration `description:"WriteTimeout is the maximum duration before timing out writes of the response. If zero, no timeout is set" export:"true"`
|
||||
IdleTimeout parse.Duration `description:"IdleTimeout is the maximum amount duration an idle (keep-alive) connection will remain idle before closing itself. Defaults to 180 seconds. If zero, no timeout is set" export:"true"`
|
||||
ReadTimeout types.Duration `description:"ReadTimeout is the maximum duration for reading the entire request, including the body. If zero, no timeout is set." export:"true"`
|
||||
WriteTimeout types.Duration `description:"WriteTimeout is the maximum duration before timing out writes of the response. If zero, no timeout is set." export:"true"`
|
||||
IdleTimeout types.Duration `description:"IdleTimeout is the maximum amount duration an idle (keep-alive) connection will remain idle before closing itself. If zero, no timeout is set." export:"true"`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (a *RespondingTimeouts) SetDefaults() {
|
||||
a.IdleTimeout = types.Duration(DefaultIdleTimeout)
|
||||
}
|
||||
|
||||
// ForwardingTimeouts contains timeout configurations for forwarding requests to the backend servers.
|
||||
type ForwardingTimeouts struct {
|
||||
DialTimeout parse.Duration `description:"The amount of time to wait until a connection to a backend server can be established. Defaults to 30 seconds. If zero, no timeout exists" export:"true"`
|
||||
ResponseHeaderTimeout parse.Duration `description:"The amount of time to wait for a server's response headers after fully writing the request (including its body, if any). If zero, no timeout exists" export:"true"`
|
||||
DialTimeout types.Duration `description:"The amount of time to wait until a connection to a backend server can be established. If zero, no timeout exists." export:"true"`
|
||||
ResponseHeaderTimeout types.Duration `description:"The amount of time to wait for a server's response headers after fully writing the request (including its body, if any). If zero, no timeout exists." export:"true"`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (f *ForwardingTimeouts) SetDefaults() {
|
||||
f.DialTimeout = types.Duration(30 * time.Second)
|
||||
}
|
||||
|
||||
// LifeCycle contains configurations relevant to the lifecycle (such as the shutdown phase) of Traefik.
|
||||
type LifeCycle struct {
|
||||
RequestAcceptGraceTimeout parse.Duration `description:"Duration to keep accepting requests before Traefik initiates the graceful shutdown procedure"`
|
||||
GraceTimeOut parse.Duration `description:"Duration to give active requests a chance to finish before Traefik stops"`
|
||||
RequestAcceptGraceTimeout types.Duration `description:"Duration to keep accepting requests before Traefik initiates the graceful shutdown procedure."`
|
||||
GraceTimeOut types.Duration `description:"Duration to give active requests a chance to finish before Traefik stops."`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (a *LifeCycle) SetDefaults() {
|
||||
a.GraceTimeOut = types.Duration(DefaultGraceTimeout)
|
||||
}
|
||||
|
||||
// Tracing holds the tracing configuration.
|
||||
type Tracing struct {
|
||||
Backend string `description:"Selects the tracking backend ('jaeger','zipkin','datadog','instana')." export:"true"`
|
||||
ServiceName string `description:"Set the name for this service" export:"true"`
|
||||
SpanNameLimit int `description:"Set the maximum character limit for Span names (default 0 = no limit)" export:"true"`
|
||||
Jaeger *jaeger.Config `description:"Settings for jaeger"`
|
||||
Zipkin *zipkin.Config `description:"Settings for zipkin"`
|
||||
DataDog *datadog.Config `description:"Settings for DataDog"`
|
||||
Instana *instana.Config `description:"Settings for Instana"`
|
||||
Haystack *haystack.Config `description:"Settings for Haystack"`
|
||||
ServiceName string `description:"Set the name for this service." export:"true"`
|
||||
SpanNameLimit int `description:"Set the maximum character limit for Span names (default 0 = no limit)." export:"true"`
|
||||
Jaeger *jaeger.Config `description:"Settings for jaeger." label:"allowEmpty"`
|
||||
Zipkin *zipkin.Config `description:"Settings for zipkin." label:"allowEmpty"`
|
||||
DataDog *datadog.Config `description:"Settings for DataDog." label:"allowEmpty"`
|
||||
Instana *instana.Config `description:"Settings for Instana." label:"allowEmpty"`
|
||||
Haystack *haystack.Config `description:"Settings for Haystack." label:"allowEmpty"`
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (t *Tracing) SetDefaults() {
|
||||
t.Backend = "jaeger"
|
||||
t.ServiceName = "traefik"
|
||||
t.SpanNameLimit = 0
|
||||
}
|
||||
|
||||
// Providers contains providers configuration
|
||||
type Providers struct {
|
||||
ProvidersThrottleDuration parse.Duration `description:"Backends throttle duration: minimum duration between 2 events from providers before applying a new configuration. It avoids unnecessary reloads if multiples events are sent in a short amount of time." export:"true"`
|
||||
Docker *docker.Provider `description:"Enable Docker backend with default settings" export:"true"`
|
||||
File *file.Provider `description:"Enable File backend with default settings" export:"true"`
|
||||
Marathon *marathon.Provider `description:"Enable Marathon backend with default settings" export:"true"`
|
||||
Kubernetes *ingress.Provider `description:"Enable Kubernetes backend with default settings" export:"true"`
|
||||
KubernetesCRD *crd.Provider `description:"Enable Kubernetes backend with default settings" export:"true"`
|
||||
Rest *rest.Provider `description:"Enable Rest backend with default settings" export:"true"`
|
||||
Rancher *rancher.Provider `description:"Enable Rancher backend with default settings" export:"true"`
|
||||
ProvidersThrottleDuration types.Duration `description:"Backends throttle duration: minimum duration between 2 events from providers before applying a new configuration. It avoids unnecessary reloads if multiples events are sent in a short amount of time." export:"true"`
|
||||
Docker *docker.Provider `description:"Enable Docker backend with default settings." export:"true" label:"allowEmpty"`
|
||||
File *file.Provider `description:"Enable File backend with default settings." export:"true" label:"allowEmpty"`
|
||||
Marathon *marathon.Provider `description:"Enable Marathon backend with default settings." export:"true" label:"allowEmpty"`
|
||||
Kubernetes *ingress.Provider `description:"Enable Kubernetes backend with default settings." export:"true" label:"allowEmpty"`
|
||||
KubernetesCRD *crd.Provider `description:"Enable Kubernetes backend with default settings." export:"true" label:"allowEmpty"`
|
||||
Rest *rest.Provider `description:"Enable Rest backend with default settings." export:"true" label:"allowEmpty"`
|
||||
Rancher *rancher.Provider `description:"Enable Rancher backend with default settings." export:"true" label:"allowEmpty"`
|
||||
}
|
||||
|
||||
// SetEffectiveConfiguration adds missing configuration parameters derived from existing ones.
|
||||
// It also takes care of maintaining backwards compatibility.
|
||||
func (c *Configuration) SetEffectiveConfiguration(configFile string) {
|
||||
if len(c.EntryPoints) == 0 {
|
||||
ep := &EntryPoint{Address: ":80"}
|
||||
ep.SetDefaults()
|
||||
c.EntryPoints = EntryPoints{
|
||||
"http": &EntryPoint{
|
||||
Address: ":80",
|
||||
},
|
||||
"http": ep,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -148,33 +175,15 @@ func (c *Configuration) SetEffectiveConfiguration(configFile string) {
|
|||
(c.Metrics != nil && c.Metrics.Prometheus != nil && c.Metrics.Prometheus.EntryPoint == DefaultInternalEntryPointName) ||
|
||||
(c.Providers.Rest != nil && c.Providers.Rest.EntryPoint == DefaultInternalEntryPointName) {
|
||||
if _, ok := c.EntryPoints[DefaultInternalEntryPointName]; !ok {
|
||||
c.EntryPoints[DefaultInternalEntryPointName] = &EntryPoint{Address: ":8080"}
|
||||
}
|
||||
}
|
||||
|
||||
for _, entryPoint := range c.EntryPoints {
|
||||
if entryPoint.Transport == nil {
|
||||
entryPoint.Transport = &EntryPointsTransport{}
|
||||
}
|
||||
|
||||
// Make sure LifeCycle isn't nil to spare nil checks elsewhere.
|
||||
if entryPoint.Transport.LifeCycle == nil {
|
||||
entryPoint.Transport.LifeCycle = &LifeCycle{
|
||||
GraceTimeOut: parse.Duration(DefaultGraceTimeout),
|
||||
}
|
||||
entryPoint.Transport.RespondingTimeouts = &RespondingTimeouts{
|
||||
IdleTimeout: parse.Duration(DefaultIdleTimeout),
|
||||
}
|
||||
}
|
||||
|
||||
if entryPoint.ForwardedHeaders == nil {
|
||||
entryPoint.ForwardedHeaders = &ForwardedHeaders{}
|
||||
ep := &EntryPoint{Address: ":8080"}
|
||||
ep.SetDefaults()
|
||||
c.EntryPoints[DefaultInternalEntryPointName] = ep
|
||||
}
|
||||
}
|
||||
|
||||
if c.Providers.Docker != nil {
|
||||
if c.Providers.Docker.SwarmModeRefreshSeconds <= 0 {
|
||||
c.Providers.Docker.SwarmModeRefreshSeconds = 15
|
||||
c.Providers.Docker.SwarmModeRefreshSeconds = types.Duration(15 * time.Second)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue