Add KV store providers (dynamic configuration only)
Co-authored-by: Jean-Baptiste Doumenjou <jb.doumenjou@gmail.com>
This commit is contained in:
parent
028683666d
commit
9b9f4be6a4
61 changed files with 5825 additions and 70 deletions
9
pkg/config/env/env.go
vendored
9
pkg/config/env/env.go
vendored
|
@ -46,17 +46,20 @@ func Encode(element interface{}) ([]parser.Flat, error) {
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
node, err := parser.EncodeToNode(element, parser.DefaultRootName, false)
|
||||
etnOpts := parser.EncoderToNodeOpts{OmitEmpty: false, TagName: parser.TagLabel, AllowSliceAsStruct: true}
|
||||
node, err := parser.EncodeToNode(element, parser.DefaultRootName, etnOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = parser.AddMetadata(element, node)
|
||||
metaOpts := parser.MetadataOpts{TagName: parser.TagLabel, AllowSliceAsStruct: true}
|
||||
err = parser.AddMetadata(element, node, metaOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return parser.EncodeToFlat(element, node, parser.FlatOpts{Case: "upper", Separator: "_"})
|
||||
flatOpts := parser.FlatOpts{Case: "upper", Separator: "_", TagName: parser.TagLabel}
|
||||
return parser.EncodeToFlat(element, node, flatOpts)
|
||||
}
|
||||
|
||||
func checkPrefix(prefix string) error {
|
||||
|
|
|
@ -22,10 +22,11 @@ func Decode(filePath string, element interface{}) error {
|
|||
return err
|
||||
}
|
||||
|
||||
err = parser.AddMetadata(element, root)
|
||||
metaOpts := parser.MetadataOpts{TagName: parser.TagLabel, AllowSliceAsStruct: true}
|
||||
err = parser.AddMetadata(element, root, metaOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return parser.Fill(element, root)
|
||||
return parser.Fill(element, root, parser.FillerOpts{AllowSliceAsStruct: true})
|
||||
}
|
||||
|
|
|
@ -30,15 +30,18 @@ func Encode(element interface{}) ([]parser.Flat, error) {
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
node, err := parser.EncodeToNode(element, parser.DefaultRootName, false)
|
||||
etnOpts := parser.EncoderToNodeOpts{OmitEmpty: false, TagName: parser.TagLabel, AllowSliceAsStruct: true}
|
||||
node, err := parser.EncodeToNode(element, parser.DefaultRootName, etnOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = parser.AddMetadata(element, node)
|
||||
metaOpts := parser.MetadataOpts{TagName: parser.TagLabel, AllowSliceAsStruct: true}
|
||||
err = parser.AddMetadata(element, node, metaOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return parser.EncodeToFlat(element, node, parser.FlatOpts{Separator: ".", SkipRoot: true})
|
||||
flatOpts := parser.FlatOpts{Separator: ".", SkipRoot: true, TagName: parser.TagLabel}
|
||||
return parser.EncodeToFlat(element, node, flatOpts)
|
||||
}
|
||||
|
|
75
pkg/config/kv/kv.go
Normal file
75
pkg/config/kv/kv.go
Normal file
|
@ -0,0 +1,75 @@
|
|||
package kv
|
||||
|
||||
import (
|
||||
"path"
|
||||
"reflect"
|
||||
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
)
|
||||
|
||||
// Decode decodes the given KV pairs into the given element.
|
||||
// The operation goes through three stages roughly summarized as:
|
||||
// KV pairs -> tree of untyped nodes
|
||||
// untyped nodes -> nodes augmented with metadata such as kind (inferred from element)
|
||||
// "typed" nodes -> typed element
|
||||
func Decode(pairs []*store.KVPair, element interface{}, rootName string) error {
|
||||
if element == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
filters := getRootFieldNames(rootName, element)
|
||||
|
||||
node, err := DecodeToNode(pairs, rootName, filters...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
metaOpts := parser.MetadataOpts{TagName: parser.TagLabel, AllowSliceAsStruct: false}
|
||||
err = parser.AddMetadata(element, node, metaOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return parser.Fill(element, node, parser.FillerOpts{AllowSliceAsStruct: false})
|
||||
}
|
||||
|
||||
func getRootFieldNames(rootName string, element interface{}) []string {
|
||||
if element == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
rootType := reflect.TypeOf(element)
|
||||
|
||||
return getFieldNames(rootName, rootType)
|
||||
}
|
||||
|
||||
func getFieldNames(rootName string, rootType reflect.Type) []string {
|
||||
var names []string
|
||||
|
||||
if rootType.Kind() == reflect.Ptr {
|
||||
rootType = rootType.Elem()
|
||||
}
|
||||
|
||||
if rootType.Kind() != reflect.Struct {
|
||||
return nil
|
||||
}
|
||||
|
||||
for i := 0; i < rootType.NumField(); i++ {
|
||||
field := rootType.Field(i)
|
||||
|
||||
if !parser.IsExported(field) {
|
||||
continue
|
||||
}
|
||||
|
||||
if field.Anonymous &&
|
||||
(field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct || field.Type.Kind() == reflect.Struct) {
|
||||
names = append(names, getFieldNames(rootName, field.Type)...)
|
||||
continue
|
||||
}
|
||||
|
||||
names = append(names, path.Join(rootName, field.Name))
|
||||
}
|
||||
|
||||
return names
|
||||
}
|
128
pkg/config/kv/kv_node.go
Normal file
128
pkg/config/kv/kv_node.go
Normal file
|
@ -0,0 +1,128 @@
|
|||
package kv
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
)
|
||||
|
||||
// DecodeToNode converts the labels to a tree of nodes.
|
||||
// If any filters are present, labels which do not match the filters are skipped.
|
||||
func DecodeToNode(pairs []*store.KVPair, rootName string, filters ...string) (*parser.Node, error) {
|
||||
sortedPairs := filterPairs(pairs, filters)
|
||||
|
||||
exp := regexp.MustCompile(`^\d+$`)
|
||||
|
||||
var node *parser.Node
|
||||
|
||||
for i, pair := range sortedPairs {
|
||||
split := strings.FieldsFunc(pair.Key, func(c rune) bool { return c == '/' })
|
||||
|
||||
if split[0] != rootName {
|
||||
return nil, fmt.Errorf("invalid label root %s", split[0])
|
||||
}
|
||||
|
||||
var parts []string
|
||||
for _, fragment := range split {
|
||||
if exp.MatchString(fragment) {
|
||||
parts = append(parts, "["+fragment+"]")
|
||||
} else {
|
||||
parts = append(parts, fragment)
|
||||
}
|
||||
}
|
||||
|
||||
if i == 0 {
|
||||
node = &parser.Node{}
|
||||
}
|
||||
decodeToNode(node, parts, string(pair.Value))
|
||||
}
|
||||
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func decodeToNode(root *parser.Node, path []string, value string) {
|
||||
if len(root.Name) == 0 {
|
||||
root.Name = path[0]
|
||||
}
|
||||
|
||||
// it's a leaf or not -> children
|
||||
if len(path) > 1 {
|
||||
if n := containsNode(root.Children, path[1]); n != nil {
|
||||
// the child already exists
|
||||
decodeToNode(n, path[1:], value)
|
||||
} else {
|
||||
// new child
|
||||
child := &parser.Node{Name: path[1]}
|
||||
decodeToNode(child, path[1:], value)
|
||||
root.Children = append(root.Children, child)
|
||||
}
|
||||
} else {
|
||||
root.Value = value
|
||||
}
|
||||
}
|
||||
|
||||
func containsNode(nodes []*parser.Node, name string) *parser.Node {
|
||||
for _, n := range nodes {
|
||||
if strings.EqualFold(name, n.Name) {
|
||||
return n
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func filterPairs(pairs []*store.KVPair, filters []string) []*store.KVPair {
|
||||
exp := regexp.MustCompile(`^(.+)/\d+$`)
|
||||
|
||||
sort.Slice(pairs, func(i, j int) bool {
|
||||
return pairs[i].Key < pairs[j].Key
|
||||
})
|
||||
|
||||
var simplePairs = map[string]*store.KVPair{}
|
||||
var slicePairs = map[string][]string{}
|
||||
|
||||
for _, pair := range pairs {
|
||||
if len(filters) == 0 {
|
||||
// Slice of simple type
|
||||
if exp.MatchString(pair.Key) {
|
||||
sanitizedKey := exp.FindStringSubmatch(pair.Key)[1]
|
||||
slicePairs[sanitizedKey] = append(slicePairs[sanitizedKey], string(pair.Value))
|
||||
} else {
|
||||
simplePairs[pair.Key] = pair
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
for _, filter := range filters {
|
||||
if len(pair.Key) >= len(filter) && strings.EqualFold(pair.Key[:len(filter)], filter) {
|
||||
// Slice of simple type
|
||||
if exp.MatchString(pair.Key) {
|
||||
sanitizedKey := exp.FindStringSubmatch(pair.Key)[1]
|
||||
slicePairs[sanitizedKey] = append(slicePairs[sanitizedKey], string(pair.Value))
|
||||
} else {
|
||||
simplePairs[pair.Key] = pair
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var sortedPairs []*store.KVPair
|
||||
for k, v := range slicePairs {
|
||||
delete(simplePairs, k)
|
||||
sortedPairs = append(sortedPairs, &store.KVPair{Key: k, Value: []byte(strings.Join(v, ","))})
|
||||
}
|
||||
|
||||
for _, v := range simplePairs {
|
||||
sortedPairs = append(sortedPairs, v)
|
||||
}
|
||||
|
||||
sort.Slice(sortedPairs, func(i, j int) bool {
|
||||
return sortedPairs[i].Key < sortedPairs[j].Key
|
||||
})
|
||||
|
||||
return sortedPairs
|
||||
}
|
274
pkg/config/kv/kv_node_test.go
Normal file
274
pkg/config/kv/kv_node_test.go
Normal file
|
@ -0,0 +1,274 @@
|
|||
package kv
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/containous/traefik/v2/pkg/config/parser"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDecodeToNode(t *testing.T) {
|
||||
type expected struct {
|
||||
error bool
|
||||
node *parser.Node
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
desc string
|
||||
in map[string]string
|
||||
filters []string
|
||||
expected expected
|
||||
}{
|
||||
{
|
||||
desc: "no label",
|
||||
in: map[string]string{},
|
||||
expected: expected{node: nil},
|
||||
},
|
||||
{
|
||||
desc: "level 1",
|
||||
in: map[string]string{
|
||||
"traefik/foo": "bar",
|
||||
},
|
||||
expected: expected{node: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "level 1 empty value",
|
||||
in: map[string]string{
|
||||
"traefik/foo": "",
|
||||
},
|
||||
expected: expected{node: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: ""},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "level 2",
|
||||
in: map[string]string{
|
||||
"traefik/foo/bar": "bar",
|
||||
},
|
||||
expected: expected{node: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{{
|
||||
Name: "foo",
|
||||
Children: []*parser.Node{
|
||||
{Name: "bar", Value: "bar"},
|
||||
},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 0",
|
||||
in: map[string]string{
|
||||
"traefik": "bar",
|
||||
"traefic": "bur",
|
||||
},
|
||||
expected: expected{error: true},
|
||||
},
|
||||
{
|
||||
desc: "several entries, prefix filter",
|
||||
in: map[string]string{
|
||||
"traefik/foo": "bar",
|
||||
"traefik/fii": "bir",
|
||||
},
|
||||
filters: []string{"traefik/Foo"},
|
||||
expected: expected{node: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 1",
|
||||
in: map[string]string{
|
||||
"traefik/foo": "bar",
|
||||
"traefik/fii": "bur",
|
||||
},
|
||||
expected: expected{node: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "fii", Value: "bur"},
|
||||
{Name: "foo", Value: "bar"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 2",
|
||||
in: map[string]string{
|
||||
"traefik/foo/aaa": "bar",
|
||||
"traefik/foo/bbb": "bur",
|
||||
},
|
||||
expected: expected{node: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 2, case insensitive",
|
||||
in: map[string]string{
|
||||
"traefik/foo/aaa": "bar",
|
||||
"traefik/Foo/bbb": "bur",
|
||||
},
|
||||
expected: expected{node: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "Foo", Children: []*parser.Node{
|
||||
{Name: "bbb", Value: "bur"},
|
||||
{Name: "aaa", Value: "bar"},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 2, 3 children",
|
||||
in: map[string]string{
|
||||
"traefik/foo/aaa": "bar",
|
||||
"traefik/foo/bbb": "bur",
|
||||
"traefik/foo/ccc": "bir",
|
||||
},
|
||||
expected: expected{node: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
{Name: "ccc", Value: "bir"},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 3",
|
||||
in: map[string]string{
|
||||
"traefik/foo/bar/aaa": "bar",
|
||||
"traefik/foo/bar/bbb": "bur",
|
||||
},
|
||||
expected: expected{node: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "bar", Children: []*parser.Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, level 3, 2 children level 1",
|
||||
in: map[string]string{
|
||||
"traefik/foo/bar/aaa": "bar",
|
||||
"traefik/foo/bar/bbb": "bur",
|
||||
"traefik/bar/foo/bbb": "bir",
|
||||
},
|
||||
expected: expected{node: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "bar", Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "bbb", Value: "bir"},
|
||||
}},
|
||||
}},
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "bar", Children: []*parser.Node{
|
||||
{Name: "aaa", Value: "bar"},
|
||||
{Name: "bbb", Value: "bur"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, slice syntax",
|
||||
in: map[string]string{
|
||||
"traefik/foo/0/aaa": "bar0",
|
||||
"traefik/foo/0/bbb": "bur0",
|
||||
"traefik/foo/1/aaa": "bar1",
|
||||
"traefik/foo/1/bbb": "bur1",
|
||||
},
|
||||
expected: expected{node: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "aaa", Value: "bar0"},
|
||||
{Name: "bbb", Value: "bur0"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "aaa", Value: "bar1"},
|
||||
{Name: "bbb", Value: "bur1"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
{
|
||||
desc: "several entries, slice in slice of struct",
|
||||
in: map[string]string{
|
||||
"traefik/foo/0/aaa/0": "bar0",
|
||||
"traefik/foo/0/aaa/1": "bar1",
|
||||
"traefik/foo/1/aaa/0": "bar2",
|
||||
"traefik/foo/1/aaa/1": "bar3",
|
||||
},
|
||||
expected: expected{node: &parser.Node{
|
||||
Name: "traefik",
|
||||
Children: []*parser.Node{
|
||||
{Name: "foo", Children: []*parser.Node{
|
||||
{Name: "[0]", Children: []*parser.Node{
|
||||
{Name: "aaa", Value: "bar0,bar1"},
|
||||
}},
|
||||
{Name: "[1]", Children: []*parser.Node{
|
||||
{Name: "aaa", Value: "bar2,bar3"},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
out, err := DecodeToNode(mapToPairs(test.in), "traefik", test.filters...)
|
||||
|
||||
if test.expected.error {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
|
||||
if !assert.Equal(t, test.expected.node, out) {
|
||||
bytes, err := json.MarshalIndent(out, "", " ")
|
||||
require.NoError(t, err)
|
||||
fmt.Println(string(bytes))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func mapToPairs(in map[string]string) []*store.KVPair {
|
||||
var out []*store.KVPair
|
||||
for k, v := range in {
|
||||
out = append(out, &store.KVPair{Key: k, Value: []byte(v)})
|
||||
}
|
||||
return out
|
||||
}
|
63
pkg/config/kv/kv_test.go
Normal file
63
pkg/config/kv/kv_test.go
Normal file
|
@ -0,0 +1,63 @@
|
|||
package kv
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDecode(t *testing.T) {
|
||||
pairs := mapToPairs(map[string]string{
|
||||
"traefik/fielda": "bar",
|
||||
"traefik/fieldb": "1",
|
||||
"traefik/fieldc": "true",
|
||||
"traefik/fieldd/0": "one",
|
||||
"traefik/fieldd/1": "two",
|
||||
"traefik/fielde": "",
|
||||
"traefik/fieldf/Test1": "A",
|
||||
"traefik/fieldf/Test2": "B",
|
||||
"traefik/fieldg/0/name": "A",
|
||||
"traefik/fieldg/1/name": "B",
|
||||
})
|
||||
|
||||
element := &sample{}
|
||||
|
||||
err := Decode(pairs, element, "traefik")
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &sample{
|
||||
FieldA: "bar",
|
||||
FieldB: 1,
|
||||
FieldC: true,
|
||||
FieldD: []string{"one", "two"},
|
||||
FieldE: &struct {
|
||||
Name string
|
||||
}{},
|
||||
FieldF: map[string]string{
|
||||
"Test1": "A",
|
||||
"Test2": "B",
|
||||
},
|
||||
FieldG: []sub{
|
||||
{Name: "A"},
|
||||
{Name: "B"},
|
||||
},
|
||||
}
|
||||
assert.Equal(t, expected, element)
|
||||
}
|
||||
|
||||
type sample struct {
|
||||
FieldA string
|
||||
FieldB int
|
||||
FieldC bool
|
||||
FieldD []string
|
||||
FieldE *struct {
|
||||
Name string
|
||||
} `label:"allowEmpty"`
|
||||
FieldF map[string]string
|
||||
FieldG []sub
|
||||
}
|
||||
|
||||
type sub struct {
|
||||
Name string
|
||||
}
|
|
@ -14,8 +14,22 @@ type initializer interface {
|
|||
SetDefaults()
|
||||
}
|
||||
|
||||
// FillerOpts Options for the filler.
|
||||
type FillerOpts struct {
|
||||
AllowSliceAsStruct bool
|
||||
}
|
||||
|
||||
// Fill populates the fields of the element using the information in node.
|
||||
func Fill(element interface{}, node *Node) error {
|
||||
func Fill(element interface{}, node *Node, opts FillerOpts) error {
|
||||
return filler{FillerOpts: opts}.Fill(element, node)
|
||||
}
|
||||
|
||||
type filler struct {
|
||||
FillerOpts
|
||||
}
|
||||
|
||||
// Fill populates the fields of the element using the information in node.
|
||||
func (f filler) Fill(element interface{}, node *Node) error {
|
||||
if element == nil || node == nil {
|
||||
return nil
|
||||
}
|
||||
|
@ -29,10 +43,10 @@ func Fill(element interface{}, node *Node) error {
|
|||
return fmt.Errorf("struct are not supported, use pointer instead")
|
||||
}
|
||||
|
||||
return fill(root.Elem(), node)
|
||||
return f.fill(root.Elem(), node)
|
||||
}
|
||||
|
||||
func fill(field reflect.Value, node *Node) error {
|
||||
func (f filler) fill(field reflect.Value, node *Node) error {
|
||||
// related to allow-empty tag
|
||||
if node.Disabled {
|
||||
return nil
|
||||
|
@ -70,19 +84,19 @@ func fill(field reflect.Value, node *Node) error {
|
|||
case reflect.Float64:
|
||||
return setFloat(field, node.Value, 64)
|
||||
case reflect.Struct:
|
||||
return setStruct(field, node)
|
||||
return f.setStruct(field, node)
|
||||
case reflect.Ptr:
|
||||
return setPtr(field, node)
|
||||
return f.setPtr(field, node)
|
||||
case reflect.Map:
|
||||
return setMap(field, node)
|
||||
return f.setMap(field, node)
|
||||
case reflect.Slice:
|
||||
return setSlice(field, node)
|
||||
return f.setSlice(field, node)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func setPtr(field reflect.Value, node *Node) error {
|
||||
func (f filler) setPtr(field reflect.Value, node *Node) error {
|
||||
if field.IsNil() {
|
||||
field.Set(reflect.New(field.Type().Elem()))
|
||||
|
||||
|
@ -94,10 +108,10 @@ func setPtr(field reflect.Value, node *Node) error {
|
|||
}
|
||||
}
|
||||
|
||||
return fill(field.Elem(), node)
|
||||
return f.fill(field.Elem(), node)
|
||||
}
|
||||
|
||||
func setStruct(field reflect.Value, node *Node) error {
|
||||
func (f filler) setStruct(field reflect.Value, node *Node) error {
|
||||
for _, child := range node.Children {
|
||||
fd := field.FieldByName(child.FieldName)
|
||||
|
||||
|
@ -106,7 +120,7 @@ func setStruct(field reflect.Value, node *Node) error {
|
|||
return fmt.Errorf("field not found, node: %s (%s)", child.Name, child.FieldName)
|
||||
}
|
||||
|
||||
err := fill(fd, child)
|
||||
err := f.fill(fd, child)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -115,10 +129,10 @@ func setStruct(field reflect.Value, node *Node) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func setSlice(field reflect.Value, node *Node) error {
|
||||
func (f filler) setSlice(field reflect.Value, node *Node) error {
|
||||
if field.Type().Elem().Kind() == reflect.Struct ||
|
||||
field.Type().Elem().Kind() == reflect.Ptr && field.Type().Elem().Elem().Kind() == reflect.Struct {
|
||||
return setSliceStruct(field, node)
|
||||
return f.setSliceStruct(field, node)
|
||||
}
|
||||
|
||||
if len(node.Value) == 0 {
|
||||
|
@ -211,9 +225,9 @@ func setSlice(field reflect.Value, node *Node) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func setSliceStruct(field reflect.Value, node *Node) error {
|
||||
if node.Tag.Get(TagLabelSliceAsStruct) != "" {
|
||||
return setSliceAsStruct(field, node)
|
||||
func (f filler) setSliceStruct(field reflect.Value, node *Node) error {
|
||||
if f.AllowSliceAsStruct && node.Tag.Get(TagLabelSliceAsStruct) != "" {
|
||||
return f.setSliceAsStruct(field, node)
|
||||
}
|
||||
|
||||
field.Set(reflect.MakeSlice(field.Type(), len(node.Children), len(node.Children)))
|
||||
|
@ -221,7 +235,7 @@ func setSliceStruct(field reflect.Value, node *Node) error {
|
|||
for i, child := range node.Children {
|
||||
// use Ptr to allow "SetDefaults"
|
||||
value := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
err := setPtr(value, child)
|
||||
err := f.setPtr(value, child)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -232,14 +246,14 @@ func setSliceStruct(field reflect.Value, node *Node) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func setSliceAsStruct(field reflect.Value, node *Node) error {
|
||||
func (f filler) setSliceAsStruct(field reflect.Value, node *Node) error {
|
||||
if len(node.Children) == 0 {
|
||||
return fmt.Errorf("invalid slice: node %s", node.Name)
|
||||
}
|
||||
|
||||
// use Ptr to allow "SetDefaults"
|
||||
value := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
err := setPtr(value, node)
|
||||
err := f.setPtr(value, node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -252,7 +266,7 @@ func setSliceAsStruct(field reflect.Value, node *Node) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func setMap(field reflect.Value, node *Node) error {
|
||||
func (f filler) setMap(field reflect.Value, node *Node) error {
|
||||
if field.IsNil() {
|
||||
field.Set(reflect.MakeMap(field.Type()))
|
||||
}
|
||||
|
@ -260,7 +274,7 @@ func setMap(field reflect.Value, node *Node) error {
|
|||
for _, child := range node.Children {
|
||||
ptrValue := reflect.New(reflect.PtrTo(field.Type().Elem()))
|
||||
|
||||
err := fill(ptrValue, child)
|
||||
err := f.fill(ptrValue, child)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -1390,7 +1390,7 @@ func TestFill(t *testing.T) {
|
|||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
err := Fill(test.element, test.node)
|
||||
err := filler{FillerOpts: FillerOpts{AllowSliceAsStruct: true}}.Fill(test.element, test.node)
|
||||
if test.expected.error {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
|
|
|
@ -7,13 +7,20 @@ import (
|
|||
"strings"
|
||||
)
|
||||
|
||||
// EncoderToNodeOpts Options for the encoderToNode.
|
||||
type EncoderToNodeOpts struct {
|
||||
OmitEmpty bool
|
||||
TagName string
|
||||
AllowSliceAsStruct bool
|
||||
}
|
||||
|
||||
// EncodeToNode converts an element to a node.
|
||||
// element -> nodes
|
||||
func EncodeToNode(element interface{}, rootName string, omitEmpty bool) (*Node, error) {
|
||||
func EncodeToNode(element interface{}, rootName string, opts EncoderToNodeOpts) (*Node, error) {
|
||||
rValue := reflect.ValueOf(element)
|
||||
node := &Node{Name: rootName}
|
||||
|
||||
encoder := encoderToNode{omitEmpty: omitEmpty}
|
||||
encoder := encoderToNode{EncoderToNodeOpts: opts}
|
||||
|
||||
err := encoder.setNodeValue(node, rValue)
|
||||
if err != nil {
|
||||
|
@ -24,7 +31,7 @@ func EncodeToNode(element interface{}, rootName string, omitEmpty bool) (*Node,
|
|||
}
|
||||
|
||||
type encoderToNode struct {
|
||||
omitEmpty bool
|
||||
EncoderToNodeOpts
|
||||
}
|
||||
|
||||
func (e encoderToNode) setNodeValue(node *Node, rValue reflect.Value) error {
|
||||
|
@ -65,7 +72,7 @@ func (e encoderToNode) setStructValue(node *Node, rValue reflect.Value) error {
|
|||
continue
|
||||
}
|
||||
|
||||
if field.Tag.Get(TagLabel) == "-" {
|
||||
if field.Tag.Get(e.TagName) == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -78,7 +85,7 @@ func (e encoderToNode) setStructValue(node *Node, rValue reflect.Value) error {
|
|||
}
|
||||
|
||||
nodeName := field.Name
|
||||
if field.Type.Kind() == reflect.Slice && len(field.Tag.Get(TagLabelSliceAsStruct)) != 0 {
|
||||
if e.AllowSliceAsStruct && field.Type.Kind() == reflect.Slice && len(field.Tag.Get(TagLabelSliceAsStruct)) != 0 {
|
||||
nodeName = field.Tag.Get(TagLabelSliceAsStruct)
|
||||
}
|
||||
|
||||
|
@ -101,7 +108,7 @@ func (e encoderToNode) setStructValue(node *Node, rValue reflect.Value) error {
|
|||
}
|
||||
|
||||
if field.Type.Elem().Kind() == reflect.Struct && len(child.Children) == 0 {
|
||||
if field.Tag.Get(TagLabel) != TagLabelAllowEmpty {
|
||||
if field.Tag.Get(e.TagName) != TagLabelAllowEmpty {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -181,7 +188,7 @@ func (e encoderToNode) setSliceValue(node *Node, rValue reflect.Value) error {
|
|||
}
|
||||
|
||||
func (e encoderToNode) isSkippedField(field reflect.StructField, fieldValue reflect.Value) bool {
|
||||
if e.omitEmpty && field.Type.Kind() == reflect.String && fieldValue.Len() == 0 {
|
||||
if e.OmitEmpty && field.Type.Kind() == reflect.String && fieldValue.Len() == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
|
@ -189,7 +196,7 @@ func (e encoderToNode) isSkippedField(field reflect.StructField, fieldValue refl
|
|||
return true
|
||||
}
|
||||
|
||||
if e.omitEmpty && (field.Type.Kind() == reflect.Slice) &&
|
||||
if e.OmitEmpty && (field.Type.Kind() == reflect.Slice) &&
|
||||
(fieldValue.IsNil() || fieldValue.Len() == 0) {
|
||||
return true
|
||||
}
|
||||
|
|
|
@ -723,7 +723,8 @@ func TestEncodeToNode(t *testing.T) {
|
|||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
node, err := EncodeToNode(test.element, DefaultRootName, true)
|
||||
etnOpts := EncoderToNodeOpts{OmitEmpty: true, TagName: TagLabel, AllowSliceAsStruct: true}
|
||||
node, err := EncodeToNode(test.element, DefaultRootName, etnOpts)
|
||||
|
||||
if test.expected.error {
|
||||
require.Error(t, err)
|
||||
|
|
|
@ -18,6 +18,7 @@ type FlatOpts struct {
|
|||
Case string // "lower" or "upper", defaults to "lower".
|
||||
Separator string
|
||||
SkipRoot bool
|
||||
TagName string
|
||||
}
|
||||
|
||||
// Flat is a configuration item representation.
|
||||
|
@ -69,7 +70,7 @@ func (e encoderToFlat) createFlat(field reflect.Value, name string, node *Node)
|
|||
var entries []Flat
|
||||
if node.Kind != reflect.Map && node.Description != "-" {
|
||||
if !(node.Kind == reflect.Ptr && len(node.Children) > 0) ||
|
||||
(node.Kind == reflect.Ptr && node.Tag.Get("label") == TagLabelAllowEmpty) {
|
||||
(node.Kind == reflect.Ptr && node.Tag.Get(e.TagName) == TagLabelAllowEmpty) {
|
||||
if node.Name[0] != '[' {
|
||||
entries = append(entries, Flat{
|
||||
Name: e.getName(name),
|
||||
|
|
|
@ -156,6 +156,7 @@ func TestEncodeToFlat(t *testing.T) {
|
|||
Case: "upper",
|
||||
Separator: "_",
|
||||
SkipRoot: false,
|
||||
TagName: TagLabel,
|
||||
},
|
||||
expected: []Flat{{
|
||||
Name: "TRAEFIK_FIELD",
|
||||
|
@ -1236,7 +1237,7 @@ func TestEncodeToFlat(t *testing.T) {
|
|||
|
||||
var opts FlatOpts
|
||||
if test.opts == nil {
|
||||
opts = FlatOpts{Separator: ".", SkipRoot: true}
|
||||
opts = FlatOpts{Separator: ".", SkipRoot: true, TagName: TagLabel}
|
||||
} else {
|
||||
opts = *test.opts
|
||||
}
|
||||
|
|
|
@ -7,8 +7,23 @@ import (
|
|||
"strings"
|
||||
)
|
||||
|
||||
// MetadataOpts Options for the metadata.
|
||||
type MetadataOpts struct {
|
||||
TagName string
|
||||
AllowSliceAsStruct bool
|
||||
}
|
||||
|
||||
// AddMetadata adds metadata such as type, inferred from element, to a node.
|
||||
func AddMetadata(element interface{}, node *Node) error {
|
||||
func AddMetadata(element interface{}, node *Node, opts MetadataOpts) error {
|
||||
return metadata{MetadataOpts: opts}.Add(element, node)
|
||||
}
|
||||
|
||||
type metadata struct {
|
||||
MetadataOpts
|
||||
}
|
||||
|
||||
// Add adds metadata such as type, inferred from element, to a node.
|
||||
func (m metadata) Add(element interface{}, node *Node) error {
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
|
@ -24,25 +39,25 @@ func AddMetadata(element interface{}, node *Node) error {
|
|||
rootType := reflect.TypeOf(element)
|
||||
node.Kind = rootType.Kind()
|
||||
|
||||
return browseChildren(rootType, node)
|
||||
return m.browseChildren(rootType, node)
|
||||
}
|
||||
|
||||
func browseChildren(fType reflect.Type, node *Node) error {
|
||||
func (m metadata) browseChildren(fType reflect.Type, node *Node) error {
|
||||
for _, child := range node.Children {
|
||||
if err := addMetadata(fType, child); err != nil {
|
||||
if err := m.add(fType, child); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addMetadata(rootType reflect.Type, node *Node) error {
|
||||
func (m metadata) add(rootType reflect.Type, node *Node) error {
|
||||
rType := rootType
|
||||
if rootType.Kind() == reflect.Ptr {
|
||||
rType = rootType.Elem()
|
||||
}
|
||||
|
||||
field, err := findTypedField(rType, node)
|
||||
field, err := m.findTypedField(rType, node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -57,11 +72,11 @@ func addMetadata(rootType reflect.Type, node *Node) error {
|
|||
|
||||
if fType.Kind() == reflect.Struct || fType.Kind() == reflect.Ptr && fType.Elem().Kind() == reflect.Struct ||
|
||||
fType.Kind() == reflect.Map {
|
||||
if len(node.Children) == 0 && field.Tag.Get(TagLabel) != TagLabelAllowEmpty {
|
||||
if len(node.Children) == 0 && field.Tag.Get(m.TagName) != TagLabelAllowEmpty {
|
||||
return fmt.Errorf("%s cannot be a standalone element (type %s)", node.Name, fType)
|
||||
}
|
||||
|
||||
node.Disabled = len(node.Value) > 0 && !strings.EqualFold(node.Value, "true") && field.Tag.Get(TagLabel) == TagLabelAllowEmpty
|
||||
node.Disabled = len(node.Value) > 0 && !strings.EqualFold(node.Value, "true") && field.Tag.Get(m.TagName) == TagLabelAllowEmpty
|
||||
}
|
||||
|
||||
if len(node.Children) == 0 {
|
||||
|
@ -69,7 +84,7 @@ func addMetadata(rootType reflect.Type, node *Node) error {
|
|||
}
|
||||
|
||||
if fType.Kind() == reflect.Struct || fType.Kind() == reflect.Ptr && fType.Elem().Kind() == reflect.Struct {
|
||||
return browseChildren(fType, node)
|
||||
return m.browseChildren(fType, node)
|
||||
}
|
||||
|
||||
if fType.Kind() == reflect.Map {
|
||||
|
@ -80,7 +95,7 @@ func addMetadata(rootType reflect.Type, node *Node) error {
|
|||
|
||||
if elem.Kind() == reflect.Map || elem.Kind() == reflect.Struct ||
|
||||
(elem.Kind() == reflect.Ptr && elem.Elem().Kind() == reflect.Struct) {
|
||||
if err = browseChildren(elem, child); err != nil {
|
||||
if err = m.browseChildren(elem, child); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
@ -89,13 +104,13 @@ func addMetadata(rootType reflect.Type, node *Node) error {
|
|||
}
|
||||
|
||||
if fType.Kind() == reflect.Slice {
|
||||
if field.Tag.Get(TagLabelSliceAsStruct) != "" {
|
||||
return browseChildren(fType.Elem(), node)
|
||||
if m.AllowSliceAsStruct && field.Tag.Get(TagLabelSliceAsStruct) != "" {
|
||||
return m.browseChildren(fType.Elem(), node)
|
||||
}
|
||||
|
||||
for _, ch := range node.Children {
|
||||
ch.Kind = fType.Elem().Kind()
|
||||
if err = browseChildren(fType.Elem(), ch); err != nil {
|
||||
if err = m.browseChildren(fType.Elem(), ch); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
@ -105,19 +120,19 @@ func addMetadata(rootType reflect.Type, node *Node) error {
|
|||
return fmt.Errorf("invalid node %s: %v", node.Name, fType.Kind())
|
||||
}
|
||||
|
||||
func findTypedField(rType reflect.Type, node *Node) (reflect.StructField, error) {
|
||||
func (m metadata) findTypedField(rType reflect.Type, node *Node) (reflect.StructField, error) {
|
||||
for i := 0; i < rType.NumField(); i++ {
|
||||
cField := rType.Field(i)
|
||||
|
||||
fieldName := cField.Tag.Get(TagLabelSliceAsStruct)
|
||||
if len(fieldName) == 0 {
|
||||
if !m.AllowSliceAsStruct || len(fieldName) == 0 {
|
||||
fieldName = cField.Name
|
||||
}
|
||||
|
||||
if IsExported(cField) {
|
||||
if cField.Anonymous {
|
||||
if cField.Type.Kind() == reflect.Struct {
|
||||
structField, err := findTypedField(cField.Type, node)
|
||||
structField, err := m.findTypedField(cField.Type, node)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
|
|
@ -991,7 +991,7 @@ func TestAddMetadata(t *testing.T) {
|
|||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
err := AddMetadata(test.structure, test.tree)
|
||||
err := metadata{MetadataOpts{TagName: TagLabel, AllowSliceAsStruct: true}}.Add(test.structure, test.tree)
|
||||
|
||||
if test.expected.error {
|
||||
assert.Error(t, err)
|
||||
|
|
|
@ -13,12 +13,13 @@ func Decode(labels map[string]string, element interface{}, rootName string, filt
|
|||
return err
|
||||
}
|
||||
|
||||
err = AddMetadata(element, node)
|
||||
metaOpts := MetadataOpts{TagName: TagLabel, AllowSliceAsStruct: true}
|
||||
err = AddMetadata(element, node, metaOpts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = Fill(element, node)
|
||||
err = Fill(element, node, FillerOpts{AllowSliceAsStruct: true})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -29,7 +30,8 @@ func Decode(labels map[string]string, element interface{}, rootName string, filt
|
|||
// Encode converts an element to labels.
|
||||
// element -> node (value) -> label (node)
|
||||
func Encode(element interface{}, rootName string) (map[string]string, error) {
|
||||
node, err := EncodeToNode(element, rootName, true)
|
||||
etnOpts := EncoderToNodeOpts{OmitEmpty: true, TagName: TagLabel, AllowSliceAsStruct: true}
|
||||
node, err := EncodeToNode(element, rootName, etnOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -14,6 +14,10 @@ import (
|
|||
"github.com/containous/traefik/v2/pkg/provider/file"
|
||||
"github.com/containous/traefik/v2/pkg/provider/kubernetes/crd"
|
||||
"github.com/containous/traefik/v2/pkg/provider/kubernetes/ingress"
|
||||
"github.com/containous/traefik/v2/pkg/provider/kv/consul"
|
||||
"github.com/containous/traefik/v2/pkg/provider/kv/etcd"
|
||||
"github.com/containous/traefik/v2/pkg/provider/kv/redis"
|
||||
"github.com/containous/traefik/v2/pkg/provider/kv/zk"
|
||||
"github.com/containous/traefik/v2/pkg/provider/marathon"
|
||||
"github.com/containous/traefik/v2/pkg/provider/rancher"
|
||||
"github.com/containous/traefik/v2/pkg/provider/rest"
|
||||
|
@ -156,15 +160,21 @@ func (t *Tracing) SetDefaults() {
|
|||
|
||||
// Providers contains providers configuration
|
||||
type Providers struct {
|
||||
ProvidersThrottleDuration types.Duration `description:"Backends throttle duration: minimum duration between 2 events from providers before applying a new configuration. It avoids unnecessary reloads if multiples events are sent in a short amount of time." json:"providersThrottleDuration,omitempty" toml:"providersThrottleDuration,omitempty" yaml:"providersThrottleDuration,omitempty" export:"true"`
|
||||
Docker *docker.Provider `description:"Enable Docker backend with default settings." json:"docker,omitempty" toml:"docker,omitempty" yaml:"docker,omitempty" export:"true" label:"allowEmpty"`
|
||||
File *file.Provider `description:"Enable File backend with default settings." json:"file,omitempty" toml:"file,omitempty" yaml:"file,omitempty" export:"true"`
|
||||
Marathon *marathon.Provider `description:"Enable Marathon backend with default settings." json:"marathon,omitempty" toml:"marathon,omitempty" yaml:"marathon,omitempty" export:"true" label:"allowEmpty"`
|
||||
KubernetesIngress *ingress.Provider `description:"Enable Kubernetes backend with default settings." json:"kubernetesIngress,omitempty" toml:"kubernetesIngress,omitempty" yaml:"kubernetesIngress,omitempty" export:"true" label:"allowEmpty"`
|
||||
KubernetesCRD *crd.Provider `description:"Enable Kubernetes backend with default settings." json:"kubernetesCRD,omitempty" toml:"kubernetesCRD,omitempty" yaml:"kubernetesCRD,omitempty" export:"true" label:"allowEmpty"`
|
||||
Rest *rest.Provider `description:"Enable Rest backend with default settings." json:"rest,omitempty" toml:"rest,omitempty" yaml:"rest,omitempty" export:"true" label:"allowEmpty"`
|
||||
Rancher *rancher.Provider `description:"Enable Rancher backend with default settings." json:"rancher,omitempty" toml:"rancher,omitempty" yaml:"rancher,omitempty" export:"true" label:"allowEmpty"`
|
||||
ConsulCatalog *consulcatalog.Provider `description:"Enable ConsulCatalog backend with default settings." json:"consulCatalog,omitempty" toml:"consulCatalog,omitempty" yaml:"consulCatalog,omitempty"`
|
||||
ProvidersThrottleDuration types.Duration `description:"Backends throttle duration: minimum duration between 2 events from providers before applying a new configuration. It avoids unnecessary reloads if multiples events are sent in a short amount of time." json:"providersThrottleDuration,omitempty" toml:"providersThrottleDuration,omitempty" yaml:"providersThrottleDuration,omitempty" export:"true"`
|
||||
|
||||
Docker *docker.Provider `description:"Enable Docker backend with default settings." json:"docker,omitempty" toml:"docker,omitempty" yaml:"docker,omitempty" export:"true" label:"allowEmpty"`
|
||||
File *file.Provider `description:"Enable File backend with default settings." json:"file,omitempty" toml:"file,omitempty" yaml:"file,omitempty" export:"true"`
|
||||
Marathon *marathon.Provider `description:"Enable Marathon backend with default settings." json:"marathon,omitempty" toml:"marathon,omitempty" yaml:"marathon,omitempty" export:"true" label:"allowEmpty"`
|
||||
KubernetesIngress *ingress.Provider `description:"Enable Kubernetes backend with default settings." json:"kubernetesIngress,omitempty" toml:"kubernetesIngress,omitempty" yaml:"kubernetesIngress,omitempty" export:"true" label:"allowEmpty"`
|
||||
KubernetesCRD *crd.Provider `description:"Enable Kubernetes backend with default settings." json:"kubernetesCRD,omitempty" toml:"kubernetesCRD,omitempty" yaml:"kubernetesCRD,omitempty" export:"true" label:"allowEmpty"`
|
||||
Rest *rest.Provider `description:"Enable Rest backend with default settings." json:"rest,omitempty" toml:"rest,omitempty" yaml:"rest,omitempty" export:"true" label:"allowEmpty"`
|
||||
Rancher *rancher.Provider `description:"Enable Rancher backend with default settings." json:"rancher,omitempty" toml:"rancher,omitempty" yaml:"rancher,omitempty" export:"true" label:"allowEmpty"`
|
||||
ConsulCatalog *consulcatalog.Provider `description:"Enable ConsulCatalog backend with default settings." json:"consulCatalog,omitempty" toml:"consulCatalog,omitempty" yaml:"consulCatalog,omitempty"`
|
||||
|
||||
Consul *consul.Provider `description:"Enable Consul backend with default settings." json:"consul,omitempty" toml:"consul,omitempty" yaml:"consul,omitempty" export:"true" label:"allowEmpty"`
|
||||
Etcd *etcd.Provider `description:"Enable Etcd backend with default settings." json:"etcd,omitempty" toml:"etcd,omitempty" yaml:"etcd,omitempty" export:"true" label:"allowEmpty"`
|
||||
ZooKeeper *zk.Provider `description:"Enable ZooKeeper backend with default settings." json:"zooKeeper,omitempty" toml:"zooKeeper,omitempty" yaml:"zooKeeper,omitempty" export:"true" label:"allowEmpty"`
|
||||
Redis *redis.Provider `description:"Enable Redis backend with default settings." json:"redis,omitempty" toml:"redis,omitempty" yaml:"redis,omitempty" export:"true" label:"allowEmpty"`
|
||||
}
|
||||
|
||||
// SetEffectiveConfiguration adds missing configuration parameters derived from existing ones.
|
||||
|
|
|
@ -53,6 +53,22 @@ func NewProviderAggregator(conf static.Providers) ProviderAggregator {
|
|||
p.quietAddProvider(conf.ConsulCatalog)
|
||||
}
|
||||
|
||||
if conf.Consul != nil {
|
||||
p.quietAddProvider(conf.Consul)
|
||||
}
|
||||
|
||||
if conf.Etcd != nil {
|
||||
p.quietAddProvider(conf.Etcd)
|
||||
}
|
||||
|
||||
if conf.ZooKeeper != nil {
|
||||
p.quietAddProvider(conf.ZooKeeper)
|
||||
}
|
||||
|
||||
if conf.Redis != nil {
|
||||
p.quietAddProvider(conf.Redis)
|
||||
}
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
|
|
25
pkg/provider/kv/consul/consul.go
Normal file
25
pkg/provider/kv/consul/consul.go
Normal file
|
@ -0,0 +1,25 @@
|
|||
package consul
|
||||
|
||||
import (
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/containous/traefik/v2/pkg/provider"
|
||||
"github.com/containous/traefik/v2/pkg/provider/kv"
|
||||
)
|
||||
|
||||
var _ provider.Provider = (*Provider)(nil)
|
||||
|
||||
// Provider holds configurations of the provider.
|
||||
type Provider struct {
|
||||
kv.Provider
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (p *Provider) SetDefaults() {
|
||||
p.Provider.SetDefaults()
|
||||
p.Endpoints = []string{"127.0.0.1:8500"}
|
||||
}
|
||||
|
||||
// Init the provider
|
||||
func (p *Provider) Init() error {
|
||||
return p.Provider.Init(store.CONSUL, "consul")
|
||||
}
|
25
pkg/provider/kv/etcd/etcd.go
Normal file
25
pkg/provider/kv/etcd/etcd.go
Normal file
|
@ -0,0 +1,25 @@
|
|||
package etcd
|
||||
|
||||
import (
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/containous/traefik/v2/pkg/provider"
|
||||
"github.com/containous/traefik/v2/pkg/provider/kv"
|
||||
)
|
||||
|
||||
var _ provider.Provider = (*Provider)(nil)
|
||||
|
||||
// Provider holds configurations of the provider.
|
||||
type Provider struct {
|
||||
kv.Provider
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (p *Provider) SetDefaults() {
|
||||
p.Provider.SetDefaults()
|
||||
p.Endpoints = []string{"127.0.0.1:2379"}
|
||||
}
|
||||
|
||||
// Init the provider
|
||||
func (p *Provider) Init() error {
|
||||
return p.Provider.Init(store.ETCDV3, "etcd")
|
||||
}
|
191
pkg/provider/kv/kv.go
Normal file
191
pkg/provider/kv/kv.go
Normal file
|
@ -0,0 +1,191 @@
|
|||
package kv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"path"
|
||||
"time"
|
||||
|
||||
"github.com/abronan/valkeyrie"
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/abronan/valkeyrie/store/consul"
|
||||
etcdv3 "github.com/abronan/valkeyrie/store/etcd/v3"
|
||||
"github.com/abronan/valkeyrie/store/redis"
|
||||
"github.com/abronan/valkeyrie/store/zookeeper"
|
||||
"github.com/cenkalti/backoff/v3"
|
||||
"github.com/containous/traefik/v2/pkg/config/dynamic"
|
||||
"github.com/containous/traefik/v2/pkg/config/kv"
|
||||
"github.com/containous/traefik/v2/pkg/job"
|
||||
"github.com/containous/traefik/v2/pkg/log"
|
||||
"github.com/containous/traefik/v2/pkg/safe"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
)
|
||||
|
||||
// Provider holds configurations of the provider.
|
||||
type Provider struct {
|
||||
RootKey string `description:"Root key used for KV store" export:"true" json:"rootKey,omitempty" toml:"rootKey,omitempty" yaml:"rootKey,omitempty"`
|
||||
|
||||
Endpoints []string `description:"KV store endpoints" json:"endpoints,omitempty" toml:"endpoints,omitempty" yaml:"endpoints,omitempty"`
|
||||
Username string `description:"KV Username" json:"username,omitempty" toml:"username,omitempty" yaml:"username,omitempty"`
|
||||
Password string `description:"KV Password" json:"password,omitempty" toml:"password,omitempty" yaml:"password,omitempty"`
|
||||
TLS *types.ClientTLS `description:"Enable TLS support" export:"true" json:"tls,omitempty" toml:"tls,omitempty" yaml:"tls,omitempty"`
|
||||
|
||||
storeType store.Backend
|
||||
kvClient store.Store
|
||||
name string
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (p *Provider) SetDefaults() {
|
||||
p.RootKey = "traefik"
|
||||
}
|
||||
|
||||
// Init the provider
|
||||
func (p *Provider) Init(storeType store.Backend, name string) error {
|
||||
ctx := log.With(context.Background(), log.Str(log.ProviderName, string(storeType)))
|
||||
|
||||
p.storeType = storeType
|
||||
p.name = name
|
||||
|
||||
kvClient, err := p.createKVClient(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to Connect to KV store: %w", err)
|
||||
}
|
||||
|
||||
p.kvClient = kvClient
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Provide allows the docker provider to provide configurations to traefik using the given configuration channel.
|
||||
func (p *Provider) Provide(configurationChan chan<- dynamic.Message, pool *safe.Pool) error {
|
||||
ctx := log.With(context.Background(), log.Str(log.ProviderName, string(p.storeType)))
|
||||
|
||||
logger := log.FromContext(ctx)
|
||||
|
||||
operation := func() error {
|
||||
if _, err := p.kvClient.Exists(path.Join(p.RootKey, "qmslkjdfmqlskdjfmqlksjazçueznbvbwzlkajzebvkwjdcqmlsfj"), nil); err != nil {
|
||||
return fmt.Errorf("KV store connection error: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
notify := func(err error, time time.Duration) {
|
||||
logger.Errorf("KV connection error: %+v, retrying in %s", err, time)
|
||||
}
|
||||
err := backoff.RetryNotify(safe.OperationWithRecover(operation), job.NewBackOff(backoff.NewExponentialBackOff()), notify)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot connect to KV server: %w", err)
|
||||
}
|
||||
|
||||
configuration, err := p.buildConfiguration()
|
||||
if err != nil {
|
||||
logger.Errorf("Cannot build the configuration: %v", err)
|
||||
} else {
|
||||
configurationChan <- dynamic.Message{
|
||||
ProviderName: p.name,
|
||||
Configuration: configuration,
|
||||
}
|
||||
}
|
||||
|
||||
pool.Go(func(stop chan bool) {
|
||||
err := p.watchKv(ctx, configurationChan, p.RootKey, stop)
|
||||
if err != nil {
|
||||
logger.Errorf("Cannot watch KV store: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Provider) watchKv(ctx context.Context, configurationChan chan<- dynamic.Message, prefix string, stop chan bool) error {
|
||||
operation := func() error {
|
||||
events, err := p.kvClient.WatchTree(p.RootKey, make(chan struct{}), nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to watch KV: %w", err)
|
||||
}
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-stop:
|
||||
return nil
|
||||
case _, ok := <-events:
|
||||
if !ok {
|
||||
return errors.New("the WatchTree channel is closed")
|
||||
}
|
||||
|
||||
configuration, errC := p.buildConfiguration()
|
||||
if errC != nil {
|
||||
return errC
|
||||
}
|
||||
|
||||
if configuration != nil {
|
||||
configurationChan <- dynamic.Message{
|
||||
ProviderName: string(p.storeType),
|
||||
Configuration: configuration,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
notify := func(err error, time time.Duration) {
|
||||
log.FromContext(ctx).Errorf("KV connection error: %+v, retrying in %s", err, time)
|
||||
}
|
||||
err := backoff.RetryNotify(safe.OperationWithRecover(operation), job.NewBackOff(backoff.NewExponentialBackOff()), notify)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot connect to KV server: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Provider) buildConfiguration() (*dynamic.Configuration, error) {
|
||||
pairs, err := p.kvClient.List(p.RootKey, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cfg := &dynamic.Configuration{}
|
||||
err = kv.Decode(pairs, cfg, p.RootKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
func (p *Provider) createKVClient(ctx context.Context) (store.Store, error) {
|
||||
storeConfig := &store.Config{
|
||||
ConnectionTimeout: 3 * time.Second,
|
||||
Bucket: "traefik",
|
||||
Username: p.Username,
|
||||
Password: p.Password,
|
||||
}
|
||||
|
||||
if p.TLS != nil {
|
||||
var err error
|
||||
storeConfig.TLS, err = p.TLS.CreateTLSConfig(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
switch p.storeType {
|
||||
case store.CONSUL:
|
||||
consul.Register()
|
||||
case store.ETCDV3:
|
||||
etcdv3.Register()
|
||||
case store.ZK:
|
||||
zookeeper.Register()
|
||||
case store.REDIS:
|
||||
redis.Register()
|
||||
}
|
||||
|
||||
kvStore, err := valkeyrie.NewStore(p.storeType, p.Endpoints, storeConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &storeWrapper{Store: kvStore}, nil
|
||||
}
|
122
pkg/provider/kv/kv_mock_test.go
Normal file
122
pkg/provider/kv/kv_mock_test.go
Normal file
|
@ -0,0 +1,122 @@
|
|||
package kv
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strings"
|
||||
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
)
|
||||
|
||||
func newProviderMock(kvPairs []*store.KVPair) *Provider {
|
||||
return &Provider{
|
||||
RootKey: "traefik",
|
||||
kvClient: newKvClientMock(kvPairs, nil),
|
||||
}
|
||||
}
|
||||
|
||||
// Override Get/List to return a error
|
||||
type KvError struct {
|
||||
Get error
|
||||
List error
|
||||
}
|
||||
|
||||
// Extremely limited mock store so we can test initialization
|
||||
type Mock struct {
|
||||
Error KvError
|
||||
KVPairs []*store.KVPair
|
||||
WatchTreeMethod func() <-chan []*store.KVPair
|
||||
}
|
||||
|
||||
func newKvClientMock(kvPairs []*store.KVPair, err error) *Mock {
|
||||
mock := &Mock{
|
||||
KVPairs: kvPairs,
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
mock.Error = KvError{
|
||||
Get: err,
|
||||
List: err,
|
||||
}
|
||||
}
|
||||
return mock
|
||||
}
|
||||
|
||||
func (s *Mock) Put(key string, value []byte, opts *store.WriteOptions) error {
|
||||
return errors.New("method Put not supported")
|
||||
}
|
||||
|
||||
func (s *Mock) Get(key string, options *store.ReadOptions) (*store.KVPair, error) {
|
||||
if err := s.Error.Get; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, kvPair := range s.KVPairs {
|
||||
if kvPair.Key == key {
|
||||
return kvPair, nil
|
||||
}
|
||||
}
|
||||
return nil, store.ErrKeyNotFound
|
||||
}
|
||||
|
||||
func (s *Mock) Delete(key string) error {
|
||||
return errors.New("method Delete not supported")
|
||||
}
|
||||
|
||||
// Exists mock
|
||||
func (s *Mock) Exists(key string, options *store.ReadOptions) (bool, error) {
|
||||
if err := s.Error.Get; err != nil {
|
||||
return false, err
|
||||
}
|
||||
for _, kvPair := range s.KVPairs {
|
||||
if strings.HasPrefix(kvPair.Key, key) {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
return false, store.ErrKeyNotFound
|
||||
}
|
||||
|
||||
// Watch mock
|
||||
func (s *Mock) Watch(key string, stopCh <-chan struct{}, options *store.ReadOptions) (<-chan *store.KVPair, error) {
|
||||
return nil, errors.New("method Watch not supported")
|
||||
}
|
||||
|
||||
// WatchTree mock
|
||||
func (s *Mock) WatchTree(prefix string, stopCh <-chan struct{}, options *store.ReadOptions) (<-chan []*store.KVPair, error) {
|
||||
return s.WatchTreeMethod(), nil
|
||||
}
|
||||
|
||||
// NewLock mock
|
||||
func (s *Mock) NewLock(key string, options *store.LockOptions) (store.Locker, error) {
|
||||
return nil, errors.New("method NewLock not supported")
|
||||
}
|
||||
|
||||
// List mock
|
||||
func (s *Mock) List(prefix string, options *store.ReadOptions) ([]*store.KVPair, error) {
|
||||
if err := s.Error.List; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var kv []*store.KVPair
|
||||
for _, kvPair := range s.KVPairs {
|
||||
if strings.HasPrefix(kvPair.Key, prefix) { // FIXME && !strings.ContainsAny(strings.TrimPrefix(kvPair.Key, prefix), "/") {
|
||||
kv = append(kv, kvPair)
|
||||
}
|
||||
}
|
||||
return kv, nil
|
||||
}
|
||||
|
||||
// DeleteTree mock
|
||||
func (s *Mock) DeleteTree(prefix string) error {
|
||||
return errors.New("method DeleteTree not supported")
|
||||
}
|
||||
|
||||
// AtomicPut mock
|
||||
func (s *Mock) AtomicPut(key string, value []byte, previous *store.KVPair, opts *store.WriteOptions) (bool, *store.KVPair, error) {
|
||||
return false, nil, errors.New("method AtomicPut not supported")
|
||||
}
|
||||
|
||||
// AtomicDelete mock
|
||||
func (s *Mock) AtomicDelete(key string, previous *store.KVPair) (bool, error) {
|
||||
return false, errors.New("method AtomicDelete not supported")
|
||||
}
|
||||
|
||||
// Close mock
|
||||
func (s *Mock) Close() {}
|
892
pkg/provider/kv/kv_test.go
Normal file
892
pkg/provider/kv/kv_test.go
Normal file
|
@ -0,0 +1,892 @@
|
|||
package kv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/containous/traefik/v2/pkg/config/dynamic"
|
||||
"github.com/containous/traefik/v2/pkg/tls"
|
||||
"github.com/containous/traefik/v2/pkg/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_buildConfiguration(t *testing.T) {
|
||||
provider := newProviderMock(mapToPairs(map[string]string{
|
||||
"traefik/http/routers/Router0/entryPoints/0": "foobar",
|
||||
"traefik/http/routers/Router0/entryPoints/1": "foobar",
|
||||
"traefik/http/routers/Router0/middlewares/0": "foobar",
|
||||
"traefik/http/routers/Router0/middlewares/1": "foobar",
|
||||
"traefik/http/routers/Router0/service": "foobar",
|
||||
"traefik/http/routers/Router0/rule": "foobar",
|
||||
"traefik/http/routers/Router0/priority": "42",
|
||||
"traefik/http/routers/Router0/tls": "",
|
||||
"traefik/http/routers/Router1/rule": "foobar",
|
||||
"traefik/http/routers/Router1/priority": "42",
|
||||
"traefik/http/routers/Router1/tls/domains/0/main": "foobar",
|
||||
"traefik/http/routers/Router1/tls/domains/0/sans/0": "foobar",
|
||||
"traefik/http/routers/Router1/tls/domains/0/sans/1": "foobar",
|
||||
"traefik/http/routers/Router1/tls/domains/1/main": "foobar",
|
||||
"traefik/http/routers/Router1/tls/domains/1/sans/0": "foobar",
|
||||
"traefik/http/routers/Router1/tls/domains/1/sans/1": "foobar",
|
||||
"traefik/http/routers/Router1/tls/options": "foobar",
|
||||
"traefik/http/routers/Router1/tls/certResolver": "foobar",
|
||||
"traefik/http/routers/Router1/entryPoints/0": "foobar",
|
||||
"traefik/http/routers/Router1/entryPoints/1": "foobar",
|
||||
"traefik/http/routers/Router1/middlewares/0": "foobar",
|
||||
"traefik/http/routers/Router1/middlewares/1": "foobar",
|
||||
"traefik/http/routers/Router1/service": "foobar",
|
||||
"traefik/http/services/Service01/loadBalancer/healthCheck/path": "foobar",
|
||||
"traefik/http/services/Service01/loadBalancer/healthCheck/port": "42",
|
||||
"traefik/http/services/Service01/loadBalancer/healthCheck/interval": "foobar",
|
||||
"traefik/http/services/Service01/loadBalancer/healthCheck/timeout": "foobar",
|
||||
"traefik/http/services/Service01/loadBalancer/healthCheck/hostname": "foobar",
|
||||
"traefik/http/services/Service01/loadBalancer/healthCheck/headers/name0": "foobar",
|
||||
"traefik/http/services/Service01/loadBalancer/healthCheck/headers/name1": "foobar",
|
||||
"traefik/http/services/Service01/loadBalancer/healthCheck/scheme": "foobar",
|
||||
"traefik/http/services/Service01/loadBalancer/responseForwarding/flushInterval": "foobar",
|
||||
"traefik/http/services/Service01/loadBalancer/passHostHeader": "true",
|
||||
"traefik/http/services/Service01/loadBalancer/sticky/cookie/name": "foobar",
|
||||
"traefik/http/services/Service01/loadBalancer/sticky/cookie/secure": "true",
|
||||
"traefik/http/services/Service01/loadBalancer/sticky/cookie/httpOnly": "true",
|
||||
"traefik/http/services/Service01/loadBalancer/servers/0/url": "foobar",
|
||||
"traefik/http/services/Service01/loadBalancer/servers/1/url": "foobar",
|
||||
"traefik/http/services/Service02/mirroring/service": "foobar",
|
||||
"traefik/http/services/Service02/mirroring/mirrors/0/name": "foobar",
|
||||
"traefik/http/services/Service02/mirroring/mirrors/0/percent": "42",
|
||||
"traefik/http/services/Service02/mirroring/mirrors/1/name": "foobar",
|
||||
"traefik/http/services/Service02/mirroring/mirrors/1/percent": "42",
|
||||
"traefik/http/services/Service03/weighted/sticky/cookie/name": "foobar",
|
||||
"traefik/http/services/Service03/weighted/sticky/cookie/secure": "true",
|
||||
"traefik/http/services/Service03/weighted/sticky/cookie/httpOnly": "true",
|
||||
"traefik/http/services/Service03/weighted/services/0/name": "foobar",
|
||||
"traefik/http/services/Service03/weighted/services/0/weight": "42",
|
||||
"traefik/http/services/Service03/weighted/services/1/name": "foobar",
|
||||
"traefik/http/services/Service03/weighted/services/1/weight": "42",
|
||||
"traefik/http/middlewares/Middleware08/forwardAuth/authResponseHeaders/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware08/forwardAuth/authResponseHeaders/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware08/forwardAuth/tls/key": "foobar",
|
||||
"traefik/http/middlewares/Middleware08/forwardAuth/tls/insecureSkipVerify": "true",
|
||||
"traefik/http/middlewares/Middleware08/forwardAuth/tls/ca": "foobar",
|
||||
"traefik/http/middlewares/Middleware08/forwardAuth/tls/caOptional": "true",
|
||||
"traefik/http/middlewares/Middleware08/forwardAuth/tls/cert": "foobar",
|
||||
"traefik/http/middlewares/Middleware08/forwardAuth/address": "foobar",
|
||||
"traefik/http/middlewares/Middleware08/forwardAuth/trustForwardHeader": "true",
|
||||
"traefik/http/middlewares/Middleware15/redirectScheme/scheme": "foobar",
|
||||
"traefik/http/middlewares/Middleware15/redirectScheme/port": "foobar",
|
||||
"traefik/http/middlewares/Middleware15/redirectScheme/permanent": "true",
|
||||
"traefik/http/middlewares/Middleware17/replacePathRegex/regex": "foobar",
|
||||
"traefik/http/middlewares/Middleware17/replacePathRegex/replacement": "foobar",
|
||||
"traefik/http/middlewares/Middleware14/redirectRegex/regex": "foobar",
|
||||
"traefik/http/middlewares/Middleware14/redirectRegex/replacement": "foobar",
|
||||
"traefik/http/middlewares/Middleware14/redirectRegex/permanent": "true",
|
||||
"traefik/http/middlewares/Middleware16/replacePath/path": "foobar",
|
||||
"traefik/http/middlewares/Middleware06/digestAuth/removeHeader": "true",
|
||||
"traefik/http/middlewares/Middleware06/digestAuth/realm": "foobar",
|
||||
"traefik/http/middlewares/Middleware06/digestAuth/headerField": "foobar",
|
||||
"traefik/http/middlewares/Middleware06/digestAuth/users/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware06/digestAuth/users/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware06/digestAuth/usersFile": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/accessControlAllowHeaders/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/accessControlAllowHeaders/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/accessControlAllowOrigin": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/contentTypeNosniff": "true",
|
||||
"traefik/http/middlewares/Middleware09/headers/accessControlAllowCredentials": "true",
|
||||
"traefik/http/middlewares/Middleware09/headers/featurePolicy": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/forceSTSHeader": "true",
|
||||
"traefik/http/middlewares/Middleware09/headers/sslRedirect": "true",
|
||||
"traefik/http/middlewares/Middleware09/headers/sslHost": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/sslForceHost": "true",
|
||||
"traefik/http/middlewares/Middleware09/headers/sslProxyHeaders/name1": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/sslProxyHeaders/name0": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/allowedHosts/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/allowedHosts/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/stsPreload": "true",
|
||||
"traefik/http/middlewares/Middleware09/headers/frameDeny": "true",
|
||||
"traefik/http/middlewares/Middleware09/headers/isDevelopment": "true",
|
||||
"traefik/http/middlewares/Middleware09/headers/customResponseHeaders/name1": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/customResponseHeaders/name0": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/accessControlAllowMethods/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/accessControlAllowMethods/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/stsSeconds": "42",
|
||||
"traefik/http/middlewares/Middleware09/headers/stsIncludeSubdomains": "true",
|
||||
"traefik/http/middlewares/Middleware09/headers/customFrameOptionsValue": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/accessControlMaxAge": "42",
|
||||
"traefik/http/middlewares/Middleware09/headers/addVaryHeader": "true",
|
||||
"traefik/http/middlewares/Middleware09/headers/hostsProxyHeaders/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/hostsProxyHeaders/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/sslTemporaryRedirect": "true",
|
||||
"traefik/http/middlewares/Middleware09/headers/customBrowserXSSValue": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/referrerPolicy": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/accessControlExposeHeaders/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/accessControlExposeHeaders/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/contentSecurityPolicy": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/publicKey": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/customRequestHeaders/name0": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/customRequestHeaders/name1": "foobar",
|
||||
"traefik/http/middlewares/Middleware09/headers/browserXssFilter": "true",
|
||||
"traefik/http/middlewares/Middleware10/ipWhiteList/sourceRange/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware10/ipWhiteList/sourceRange/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware10/ipWhiteList/ipStrategy/excludedIPs/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware10/ipWhiteList/ipStrategy/excludedIPs/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware10/ipWhiteList/ipStrategy/depth": "42",
|
||||
"traefik/http/middlewares/Middleware11/inFlightReq/amount": "42",
|
||||
"traefik/http/middlewares/Middleware11/inFlightReq/sourceCriterion/requestHost": "true",
|
||||
"traefik/http/middlewares/Middleware11/inFlightReq/sourceCriterion/ipStrategy/depth": "42",
|
||||
"traefik/http/middlewares/Middleware11/inFlightReq/sourceCriterion/ipStrategy/excludedIPs/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware11/inFlightReq/sourceCriterion/ipStrategy/excludedIPs/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware11/inFlightReq/sourceCriterion/requestHeaderName": "foobar",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/pem": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/notAfter": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/notBefore": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/sans": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/subject/country": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/subject/province": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/subject/locality": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/subject/organization": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/subject/commonName": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/subject/serialNumber": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/subject/domainComponent": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/issuer/country": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/issuer/province": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/issuer/locality": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/issuer/organization": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/issuer/commonName": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/issuer/serialNumber": "true",
|
||||
"traefik/http/middlewares/Middleware12/passTLSClientCert/info/issuer/domainComponent": "true",
|
||||
"traefik/http/middlewares/Middleware00/addPrefix/prefix": "foobar",
|
||||
"traefik/http/middlewares/Middleware03/chain/middlewares/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware03/chain/middlewares/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware04/circuitBreaker/expression": "foobar",
|
||||
"traefik/http/middlewares/Middleware07/errors/status/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware07/errors/status/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware07/errors/service": "foobar",
|
||||
"traefik/http/middlewares/Middleware07/errors/query": "foobar",
|
||||
"traefik/http/middlewares/Middleware13/rateLimit/average": "42",
|
||||
"traefik/http/middlewares/Middleware13/rateLimit/burst": "42",
|
||||
"traefik/http/middlewares/Middleware13/rateLimit/sourceCriterion/requestHeaderName": "foobar",
|
||||
"traefik/http/middlewares/Middleware13/rateLimit/sourceCriterion/requestHost": "true",
|
||||
"traefik/http/middlewares/Middleware13/rateLimit/sourceCriterion/ipStrategy/depth": "42",
|
||||
"traefik/http/middlewares/Middleware13/rateLimit/sourceCriterion/ipStrategy/excludedIPs/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware13/rateLimit/sourceCriterion/ipStrategy/excludedIPs/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware20/stripPrefixRegex/regex/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware20/stripPrefixRegex/regex/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware01/basicAuth/users/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware01/basicAuth/users/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware01/basicAuth/usersFile": "foobar",
|
||||
"traefik/http/middlewares/Middleware01/basicAuth/realm": "foobar",
|
||||
"traefik/http/middlewares/Middleware01/basicAuth/removeHeader": "true",
|
||||
"traefik/http/middlewares/Middleware01/basicAuth/headerField": "foobar",
|
||||
"traefik/http/middlewares/Middleware02/buffering/maxResponseBodyBytes": "42",
|
||||
"traefik/http/middlewares/Middleware02/buffering/memResponseBodyBytes": "42",
|
||||
"traefik/http/middlewares/Middleware02/buffering/retryExpression": "foobar",
|
||||
"traefik/http/middlewares/Middleware02/buffering/maxRequestBodyBytes": "42",
|
||||
"traefik/http/middlewares/Middleware02/buffering/memRequestBodyBytes": "42",
|
||||
"traefik/http/middlewares/Middleware05/compress": "",
|
||||
"traefik/http/middlewares/Middleware18/retry/attempts": "42",
|
||||
"traefik/http/middlewares/Middleware19/stripPrefix/prefixes/0": "foobar",
|
||||
"traefik/http/middlewares/Middleware19/stripPrefix/prefixes/1": "foobar",
|
||||
"traefik/http/middlewares/Middleware19/stripPrefix/forceSlash": "true",
|
||||
"traefik/tcp/routers/TCPRouter0/entryPoints/0": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter0/entryPoints/1": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter0/service": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter0/rule": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter0/tls/options": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter0/tls/certResolver": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter0/tls/domains/0/main": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter0/tls/domains/0/sans/0": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter0/tls/domains/0/sans/1": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter0/tls/domains/1/main": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter0/tls/domains/1/sans/0": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter0/tls/domains/1/sans/1": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter0/tls/passthrough": "true",
|
||||
"traefik/tcp/routers/TCPRouter1/entryPoints/0": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter1/entryPoints/1": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter1/service": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter1/rule": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter1/tls/domains/0/main": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter1/tls/domains/0/sans/0": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter1/tls/domains/0/sans/1": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter1/tls/domains/1/main": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter1/tls/domains/1/sans/0": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter1/tls/domains/1/sans/1": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter1/tls/passthrough": "true",
|
||||
"traefik/tcp/routers/TCPRouter1/tls/options": "foobar",
|
||||
"traefik/tcp/routers/TCPRouter1/tls/certResolver": "foobar",
|
||||
"traefik/tcp/services/TCPService01/loadBalancer/terminationDelay": "42",
|
||||
"traefik/tcp/services/TCPService01/loadBalancer/servers/0/address": "foobar",
|
||||
"traefik/tcp/services/TCPService01/loadBalancer/servers/1/address": "foobar",
|
||||
"traefik/tcp/services/TCPService02/weighted/services/0/name": "foobar",
|
||||
"traefik/tcp/services/TCPService02/weighted/services/0/weight": "42",
|
||||
"traefik/tcp/services/TCPService02/weighted/services/1/name": "foobar",
|
||||
"traefik/tcp/services/TCPService02/weighted/services/1/weight": "43",
|
||||
"traefik/tls/options/Options0/minVersion": "foobar",
|
||||
"traefik/tls/options/Options0/maxVersion": "foobar",
|
||||
"traefik/tls/options/Options0/cipherSuites/0": "foobar",
|
||||
"traefik/tls/options/Options0/cipherSuites/1": "foobar",
|
||||
"traefik/tls/options/Options0/sniStrict": "true",
|
||||
"traefik/tls/options/Options0/curvePreferences/0": "foobar",
|
||||
"traefik/tls/options/Options0/curvePreferences/1": "foobar",
|
||||
"traefik/tls/options/Options0/clientAuth/caFiles/0": "foobar",
|
||||
"traefik/tls/options/Options0/clientAuth/caFiles/1": "foobar",
|
||||
"traefik/tls/options/Options0/clientAuth/clientAuthType": "foobar",
|
||||
"traefik/tls/options/Options1/sniStrict": "true",
|
||||
"traefik/tls/options/Options1/curvePreferences/0": "foobar",
|
||||
"traefik/tls/options/Options1/curvePreferences/1": "foobar",
|
||||
"traefik/tls/options/Options1/clientAuth/caFiles/0": "foobar",
|
||||
"traefik/tls/options/Options1/clientAuth/caFiles/1": "foobar",
|
||||
"traefik/tls/options/Options1/clientAuth/clientAuthType": "foobar",
|
||||
"traefik/tls/options/Options1/minVersion": "foobar",
|
||||
"traefik/tls/options/Options1/maxVersion": "foobar",
|
||||
"traefik/tls/options/Options1/cipherSuites/0": "foobar",
|
||||
"traefik/tls/options/Options1/cipherSuites/1": "foobar",
|
||||
"traefik/tls/stores/Store0/defaultCertificate/certFile": "foobar",
|
||||
"traefik/tls/stores/Store0/defaultCertificate/keyFile": "foobar",
|
||||
"traefik/tls/stores/Store1/defaultCertificate/certFile": "foobar",
|
||||
"traefik/tls/stores/Store1/defaultCertificate/keyFile": "foobar",
|
||||
"traefik/tls/certificates/0/certFile": "foobar",
|
||||
"traefik/tls/certificates/0/keyFile": "foobar",
|
||||
"traefik/tls/certificates/0/stores/0": "foobar",
|
||||
"traefik/tls/certificates/0/stores/1": "foobar",
|
||||
"traefik/tls/certificates/1/certFile": "foobar",
|
||||
"traefik/tls/certificates/1/keyFile": "foobar",
|
||||
"traefik/tls/certificates/1/stores/0": "foobar",
|
||||
"traefik/tls/certificates/1/stores/1": "foobar",
|
||||
}))
|
||||
|
||||
cfg, err := provider.buildConfiguration()
|
||||
require.NoError(t, err)
|
||||
|
||||
expected := &dynamic.Configuration{
|
||||
HTTP: &dynamic.HTTPConfiguration{
|
||||
Routers: map[string]*dynamic.Router{
|
||||
"Router1": {
|
||||
EntryPoints: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
Middlewares: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
Service: "foobar",
|
||||
Rule: "foobar",
|
||||
Priority: 42,
|
||||
TLS: &dynamic.RouterTLSConfig{
|
||||
Options: "foobar",
|
||||
CertResolver: "foobar",
|
||||
Domains: []types.Domain{
|
||||
{
|
||||
Main: "foobar",
|
||||
SANs: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
{
|
||||
Main: "foobar",
|
||||
SANs: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"Router0": {
|
||||
EntryPoints: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
Middlewares: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
Service: "foobar",
|
||||
Rule: "foobar",
|
||||
Priority: 42,
|
||||
TLS: &dynamic.RouterTLSConfig{},
|
||||
},
|
||||
},
|
||||
Middlewares: map[string]*dynamic.Middleware{
|
||||
"Middleware10": {
|
||||
IPWhiteList: &dynamic.IPWhiteList{
|
||||
SourceRange: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
IPStrategy: &dynamic.IPStrategy{
|
||||
Depth: 42,
|
||||
ExcludedIPs: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"Middleware13": {
|
||||
RateLimit: &dynamic.RateLimit{
|
||||
Average: 42,
|
||||
Burst: 42,
|
||||
SourceCriterion: &dynamic.SourceCriterion{
|
||||
IPStrategy: &dynamic.IPStrategy{
|
||||
Depth: 42,
|
||||
ExcludedIPs: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
RequestHeaderName: "foobar",
|
||||
RequestHost: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
"Middleware19": {
|
||||
StripPrefix: &dynamic.StripPrefix{
|
||||
Prefixes: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
ForceSlash: true,
|
||||
},
|
||||
},
|
||||
"Middleware00": {
|
||||
AddPrefix: &dynamic.AddPrefix{
|
||||
Prefix: "foobar",
|
||||
},
|
||||
},
|
||||
"Middleware02": {
|
||||
Buffering: &dynamic.Buffering{
|
||||
MaxRequestBodyBytes: 42,
|
||||
MemRequestBodyBytes: 42,
|
||||
MaxResponseBodyBytes: 42,
|
||||
MemResponseBodyBytes: 42,
|
||||
RetryExpression: "foobar",
|
||||
},
|
||||
},
|
||||
"Middleware04": {
|
||||
CircuitBreaker: &dynamic.CircuitBreaker{
|
||||
Expression: "foobar",
|
||||
},
|
||||
},
|
||||
"Middleware05": {
|
||||
Compress: &dynamic.Compress{},
|
||||
},
|
||||
"Middleware08": {
|
||||
ForwardAuth: &dynamic.ForwardAuth{
|
||||
Address: "foobar",
|
||||
TLS: &dynamic.ClientTLS{
|
||||
CA: "foobar",
|
||||
CAOptional: true,
|
||||
Cert: "foobar",
|
||||
Key: "foobar",
|
||||
InsecureSkipVerify: true,
|
||||
},
|
||||
TrustForwardHeader: true,
|
||||
AuthResponseHeaders: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
},
|
||||
"Middleware06": {
|
||||
DigestAuth: &dynamic.DigestAuth{
|
||||
Users: dynamic.Users{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
UsersFile: "foobar",
|
||||
RemoveHeader: true,
|
||||
Realm: "foobar",
|
||||
HeaderField: "foobar",
|
||||
},
|
||||
},
|
||||
"Middleware18": {
|
||||
Retry: &dynamic.Retry{
|
||||
Attempts: 42,
|
||||
},
|
||||
},
|
||||
"Middleware16": {
|
||||
ReplacePath: &dynamic.ReplacePath{
|
||||
Path: "foobar",
|
||||
},
|
||||
},
|
||||
"Middleware20": {
|
||||
StripPrefixRegex: &dynamic.StripPrefixRegex{
|
||||
Regex: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
},
|
||||
"Middleware03": {
|
||||
Chain: &dynamic.Chain{
|
||||
Middlewares: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
},
|
||||
"Middleware11": {
|
||||
InFlightReq: &dynamic.InFlightReq{
|
||||
Amount: 42,
|
||||
SourceCriterion: &dynamic.SourceCriterion{
|
||||
IPStrategy: &dynamic.IPStrategy{
|
||||
Depth: 42,
|
||||
ExcludedIPs: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
RequestHeaderName: "foobar",
|
||||
RequestHost: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
"Middleware12": {
|
||||
PassTLSClientCert: &dynamic.PassTLSClientCert{
|
||||
PEM: true,
|
||||
Info: &dynamic.TLSClientCertificateInfo{
|
||||
NotAfter: true,
|
||||
NotBefore: true,
|
||||
Sans: true,
|
||||
Subject: &dynamic.TLSCLientCertificateDNInfo{
|
||||
Country: true,
|
||||
Province: true,
|
||||
Locality: true,
|
||||
Organization: true,
|
||||
CommonName: true,
|
||||
SerialNumber: true,
|
||||
DomainComponent: true,
|
||||
},
|
||||
Issuer: &dynamic.TLSCLientCertificateDNInfo{
|
||||
Country: true,
|
||||
Province: true,
|
||||
Locality: true,
|
||||
Organization: true,
|
||||
CommonName: true,
|
||||
SerialNumber: true,
|
||||
DomainComponent: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"Middleware14": {
|
||||
RedirectRegex: &dynamic.RedirectRegex{
|
||||
Regex: "foobar",
|
||||
Replacement: "foobar",
|
||||
Permanent: true,
|
||||
},
|
||||
},
|
||||
"Middleware15": {
|
||||
RedirectScheme: &dynamic.RedirectScheme{
|
||||
Scheme: "foobar",
|
||||
Port: "foobar",
|
||||
Permanent: true,
|
||||
},
|
||||
},
|
||||
"Middleware01": {
|
||||
BasicAuth: &dynamic.BasicAuth{
|
||||
Users: dynamic.Users{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
UsersFile: "foobar",
|
||||
Realm: "foobar",
|
||||
RemoveHeader: true,
|
||||
HeaderField: "foobar",
|
||||
},
|
||||
},
|
||||
"Middleware07": {
|
||||
Errors: &dynamic.ErrorPage{
|
||||
Status: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
Service: "foobar",
|
||||
Query: "foobar",
|
||||
},
|
||||
},
|
||||
"Middleware09": {
|
||||
Headers: &dynamic.Headers{
|
||||
CustomRequestHeaders: map[string]string{
|
||||
"name0": "foobar",
|
||||
"name1": "foobar",
|
||||
},
|
||||
CustomResponseHeaders: map[string]string{
|
||||
"name0": "foobar",
|
||||
"name1": "foobar",
|
||||
},
|
||||
AccessControlAllowCredentials: true,
|
||||
AccessControlAllowHeaders: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
AccessControlAllowMethods: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
AccessControlAllowOrigin: "foobar",
|
||||
AccessControlExposeHeaders: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
AccessControlMaxAge: 42,
|
||||
AddVaryHeader: true,
|
||||
AllowedHosts: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
HostsProxyHeaders: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
SSLRedirect: true,
|
||||
SSLTemporaryRedirect: true,
|
||||
SSLHost: "foobar",
|
||||
SSLProxyHeaders: map[string]string{
|
||||
"name1": "foobar",
|
||||
"name0": "foobar",
|
||||
},
|
||||
SSLForceHost: true,
|
||||
STSSeconds: 42,
|
||||
STSIncludeSubdomains: true,
|
||||
STSPreload: true,
|
||||
ForceSTSHeader: true,
|
||||
FrameDeny: true,
|
||||
CustomFrameOptionsValue: "foobar",
|
||||
ContentTypeNosniff: true,
|
||||
BrowserXSSFilter: true,
|
||||
CustomBrowserXSSValue: "foobar",
|
||||
ContentSecurityPolicy: "foobar",
|
||||
PublicKey: "foobar",
|
||||
ReferrerPolicy: "foobar",
|
||||
FeaturePolicy: "foobar",
|
||||
IsDevelopment: true,
|
||||
},
|
||||
},
|
||||
"Middleware17": {
|
||||
ReplacePathRegex: &dynamic.ReplacePathRegex{
|
||||
Regex: "foobar",
|
||||
Replacement: "foobar",
|
||||
},
|
||||
},
|
||||
},
|
||||
Services: map[string]*dynamic.Service{
|
||||
"Service01": {
|
||||
LoadBalancer: &dynamic.ServersLoadBalancer{
|
||||
Sticky: &dynamic.Sticky{
|
||||
Cookie: &dynamic.Cookie{
|
||||
Name: "foobar",
|
||||
Secure: true,
|
||||
HTTPOnly: true,
|
||||
},
|
||||
},
|
||||
Servers: []dynamic.Server{
|
||||
{
|
||||
URL: "foobar",
|
||||
Scheme: "http",
|
||||
},
|
||||
{
|
||||
URL: "foobar",
|
||||
Scheme: "http",
|
||||
},
|
||||
},
|
||||
HealthCheck: &dynamic.HealthCheck{
|
||||
Scheme: "foobar",
|
||||
Path: "foobar",
|
||||
Port: 42,
|
||||
Interval: "foobar",
|
||||
Timeout: "foobar",
|
||||
Hostname: "foobar",
|
||||
Headers: map[string]string{
|
||||
"name0": "foobar",
|
||||
"name1": "foobar",
|
||||
},
|
||||
},
|
||||
PassHostHeader: func(v bool) *bool { return &v }(true),
|
||||
ResponseForwarding: &dynamic.ResponseForwarding{
|
||||
FlushInterval: "foobar",
|
||||
},
|
||||
},
|
||||
},
|
||||
"Service02": {
|
||||
Mirroring: &dynamic.Mirroring{
|
||||
Service: "foobar",
|
||||
Mirrors: []dynamic.MirrorService{
|
||||
{
|
||||
Name: "foobar",
|
||||
Percent: 42,
|
||||
},
|
||||
{
|
||||
Name: "foobar",
|
||||
Percent: 42,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"Service03": {
|
||||
Weighted: &dynamic.WeightedRoundRobin{
|
||||
Services: []dynamic.WRRService{
|
||||
{
|
||||
Name: "foobar",
|
||||
Weight: func(v int) *int { return &v }(42),
|
||||
},
|
||||
{
|
||||
Name: "foobar",
|
||||
Weight: func(v int) *int { return &v }(42),
|
||||
},
|
||||
},
|
||||
Sticky: &dynamic.Sticky{
|
||||
Cookie: &dynamic.Cookie{
|
||||
Name: "foobar",
|
||||
Secure: true,
|
||||
HTTPOnly: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
TCP: &dynamic.TCPConfiguration{
|
||||
Routers: map[string]*dynamic.TCPRouter{
|
||||
"TCPRouter0": {
|
||||
EntryPoints: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
Service: "foobar",
|
||||
Rule: "foobar",
|
||||
TLS: &dynamic.RouterTCPTLSConfig{
|
||||
Passthrough: true,
|
||||
Options: "foobar",
|
||||
CertResolver: "foobar",
|
||||
Domains: []types.Domain{
|
||||
{
|
||||
Main: "foobar",
|
||||
SANs: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
{
|
||||
Main: "foobar",
|
||||
SANs: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"TCPRouter1": {
|
||||
EntryPoints: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
Service: "foobar",
|
||||
Rule: "foobar",
|
||||
TLS: &dynamic.RouterTCPTLSConfig{
|
||||
Passthrough: true,
|
||||
Options: "foobar",
|
||||
CertResolver: "foobar",
|
||||
Domains: []types.Domain{
|
||||
{
|
||||
Main: "foobar",
|
||||
SANs: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
{
|
||||
Main: "foobar",
|
||||
SANs: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Services: map[string]*dynamic.TCPService{
|
||||
"TCPService01": {
|
||||
LoadBalancer: &dynamic.TCPServersLoadBalancer{
|
||||
TerminationDelay: func(v int) *int { return &v }(42),
|
||||
Servers: []dynamic.TCPServer{
|
||||
{Address: "foobar"},
|
||||
{Address: "foobar"},
|
||||
},
|
||||
},
|
||||
},
|
||||
"TCPService02": {
|
||||
Weighted: &dynamic.TCPWeightedRoundRobin{
|
||||
Services: []dynamic.TCPWRRService{
|
||||
{
|
||||
Name: "foobar",
|
||||
Weight: func(v int) *int { return &v }(42),
|
||||
},
|
||||
{
|
||||
Name: "foobar",
|
||||
Weight: func(v int) *int { return &v }(43),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
TLS: &dynamic.TLSConfiguration{
|
||||
Certificates: []*tls.CertAndStores{
|
||||
{
|
||||
Certificate: tls.Certificate{
|
||||
CertFile: tls.FileOrContent("foobar"),
|
||||
KeyFile: tls.FileOrContent("foobar"),
|
||||
},
|
||||
Stores: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
{
|
||||
Certificate: tls.Certificate{
|
||||
CertFile: tls.FileOrContent("foobar"),
|
||||
KeyFile: tls.FileOrContent("foobar"),
|
||||
},
|
||||
Stores: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
},
|
||||
},
|
||||
Options: map[string]tls.Options{
|
||||
"Options0": {
|
||||
MinVersion: "foobar",
|
||||
MaxVersion: "foobar",
|
||||
CipherSuites: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
CurvePreferences: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
ClientAuth: tls.ClientAuth{
|
||||
CAFiles: []tls.FileOrContent{
|
||||
tls.FileOrContent("foobar"),
|
||||
tls.FileOrContent("foobar"),
|
||||
},
|
||||
ClientAuthType: "foobar",
|
||||
},
|
||||
SniStrict: true,
|
||||
},
|
||||
"Options1": {
|
||||
MinVersion: "foobar",
|
||||
MaxVersion: "foobar",
|
||||
CipherSuites: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
CurvePreferences: []string{
|
||||
"foobar",
|
||||
"foobar",
|
||||
},
|
||||
ClientAuth: tls.ClientAuth{
|
||||
CAFiles: []tls.FileOrContent{
|
||||
tls.FileOrContent("foobar"),
|
||||
tls.FileOrContent("foobar"),
|
||||
},
|
||||
ClientAuthType: "foobar",
|
||||
},
|
||||
SniStrict: true,
|
||||
},
|
||||
},
|
||||
Stores: map[string]tls.Store{
|
||||
"Store0": {
|
||||
DefaultCertificate: &tls.Certificate{
|
||||
CertFile: tls.FileOrContent("foobar"),
|
||||
KeyFile: tls.FileOrContent("foobar"),
|
||||
},
|
||||
},
|
||||
"Store1": {
|
||||
DefaultCertificate: &tls.Certificate{
|
||||
CertFile: tls.FileOrContent("foobar"),
|
||||
KeyFile: tls.FileOrContent("foobar"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
assert.Equal(t, expected, cfg)
|
||||
}
|
||||
|
||||
func Test_buildConfiguration_KV_error(t *testing.T) {
|
||||
provider := &Provider{
|
||||
RootKey: "traefik",
|
||||
kvClient: &Mock{
|
||||
Error: KvError{
|
||||
List: errors.New("OOPS"),
|
||||
},
|
||||
KVPairs: mapToPairs(map[string]string{
|
||||
"traefik/foo": "bar",
|
||||
}),
|
||||
},
|
||||
}
|
||||
|
||||
cfg, err := provider.buildConfiguration()
|
||||
require.Error(t, err)
|
||||
assert.Nil(t, cfg)
|
||||
}
|
||||
|
||||
func TestKvWatchTree(t *testing.T) {
|
||||
returnedChans := make(chan chan []*store.KVPair)
|
||||
provider := Provider{
|
||||
kvClient: &Mock{
|
||||
WatchTreeMethod: func() <-chan []*store.KVPair {
|
||||
c := make(chan []*store.KVPair, 10)
|
||||
returnedChans <- c
|
||||
return c
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
configChan := make(chan dynamic.Message)
|
||||
go func() {
|
||||
err := provider.watchKv(context.Background(), configChan, "prefix", make(chan bool, 1))
|
||||
require.NoError(t, err)
|
||||
}()
|
||||
|
||||
select {
|
||||
case c1 := <-returnedChans:
|
||||
c1 <- []*store.KVPair{}
|
||||
<-configChan
|
||||
close(c1) // WatchTree chans can close due to error
|
||||
case <-time.After(1 * time.Second):
|
||||
t.Fatalf("Failed to create a new WatchTree chan")
|
||||
}
|
||||
|
||||
select {
|
||||
case c2 := <-returnedChans:
|
||||
c2 <- []*store.KVPair{}
|
||||
<-configChan
|
||||
case <-time.After(1 * time.Second):
|
||||
t.Fatalf("Failed to create a new WatchTree chan")
|
||||
}
|
||||
|
||||
select {
|
||||
case <-configChan:
|
||||
t.Fatalf("configChan should be empty")
|
||||
default:
|
||||
}
|
||||
}
|
||||
|
||||
func mapToPairs(in map[string]string) []*store.KVPair {
|
||||
var out []*store.KVPair
|
||||
for k, v := range in {
|
||||
out = append(out, &store.KVPair{Key: k, Value: []byte(v)})
|
||||
}
|
||||
return out
|
||||
}
|
25
pkg/provider/kv/redis/redis.go
Normal file
25
pkg/provider/kv/redis/redis.go
Normal file
|
@ -0,0 +1,25 @@
|
|||
package redis
|
||||
|
||||
import (
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/containous/traefik/v2/pkg/provider"
|
||||
"github.com/containous/traefik/v2/pkg/provider/kv"
|
||||
)
|
||||
|
||||
var _ provider.Provider = (*Provider)(nil)
|
||||
|
||||
// Provider holds configurations of the provider.
|
||||
type Provider struct {
|
||||
kv.Provider
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (p *Provider) SetDefaults() {
|
||||
p.Provider.SetDefaults()
|
||||
p.Endpoints = []string{"127.0.0.1:6379"}
|
||||
}
|
||||
|
||||
// Init the provider
|
||||
func (p *Provider) Init() error {
|
||||
return p.Provider.Init(store.REDIS, "redis")
|
||||
}
|
118
pkg/provider/kv/storewrapper.go
Normal file
118
pkg/provider/kv/storewrapper.go
Normal file
|
@ -0,0 +1,118 @@
|
|||
package kv
|
||||
|
||||
import (
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/containous/traefik/v2/pkg/log"
|
||||
)
|
||||
|
||||
type storeWrapper struct {
|
||||
store.Store
|
||||
}
|
||||
|
||||
func (s *storeWrapper) Put(key string, value []byte, options *store.WriteOptions) error {
|
||||
log.WithoutContext().Debugf("Put: %s", key, string(value))
|
||||
|
||||
if s.Store == nil {
|
||||
return nil
|
||||
}
|
||||
return s.Store.Put(key, value, options)
|
||||
}
|
||||
|
||||
func (s *storeWrapper) Get(key string, options *store.ReadOptions) (*store.KVPair, error) {
|
||||
log.WithoutContext().Debugf("Get: %s", key)
|
||||
|
||||
if s.Store == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return s.Store.Get(key, options)
|
||||
}
|
||||
|
||||
func (s *storeWrapper) Delete(key string) error {
|
||||
log.WithoutContext().Debugf("Delete: %s", key)
|
||||
|
||||
if s.Store == nil {
|
||||
return nil
|
||||
}
|
||||
return s.Store.Delete(key)
|
||||
}
|
||||
|
||||
func (s *storeWrapper) Exists(key string, options *store.ReadOptions) (bool, error) {
|
||||
log.WithoutContext().Debugf("Exists: %s", key)
|
||||
|
||||
if s.Store == nil {
|
||||
return true, nil
|
||||
}
|
||||
return s.Store.Exists(key, options)
|
||||
}
|
||||
|
||||
func (s *storeWrapper) Watch(key string, stopCh <-chan struct{}, options *store.ReadOptions) (<-chan *store.KVPair, error) {
|
||||
log.WithoutContext().Debugf("Watch: %s", key)
|
||||
|
||||
if s.Store == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return s.Store.Watch(key, stopCh, options)
|
||||
}
|
||||
|
||||
func (s *storeWrapper) WatchTree(directory string, stopCh <-chan struct{}, options *store.ReadOptions) (<-chan []*store.KVPair, error) {
|
||||
log.WithoutContext().Debugf("WatchTree: %s", directory)
|
||||
|
||||
if s.Store == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return s.Store.WatchTree(directory, stopCh, options)
|
||||
}
|
||||
|
||||
func (s *storeWrapper) NewLock(key string, options *store.LockOptions) (store.Locker, error) {
|
||||
log.WithoutContext().Debugf("NewLock: %s", key)
|
||||
|
||||
if s.Store == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return s.Store.NewLock(key, options)
|
||||
}
|
||||
|
||||
func (s *storeWrapper) List(directory string, options *store.ReadOptions) ([]*store.KVPair, error) {
|
||||
log.WithoutContext().Debugf("List: %s", directory)
|
||||
|
||||
if s.Store == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return s.Store.List(directory, options)
|
||||
}
|
||||
|
||||
func (s *storeWrapper) DeleteTree(directory string) error {
|
||||
log.WithoutContext().Debugf("DeleteTree: %s", directory)
|
||||
|
||||
if s.Store == nil {
|
||||
return nil
|
||||
}
|
||||
return s.Store.DeleteTree(directory)
|
||||
}
|
||||
|
||||
func (s *storeWrapper) AtomicPut(key string, value []byte, previous *store.KVPair, options *store.WriteOptions) (bool, *store.KVPair, error) {
|
||||
log.WithoutContext().Debugf("AtomicPut: %s", key, string(value), previous)
|
||||
|
||||
if s.Store == nil {
|
||||
return true, nil, nil
|
||||
}
|
||||
return s.Store.AtomicPut(key, value, previous, options)
|
||||
}
|
||||
|
||||
func (s *storeWrapper) AtomicDelete(key string, previous *store.KVPair) (bool, error) {
|
||||
log.WithoutContext().Debugf("AtomicDelete: %s", key, previous)
|
||||
|
||||
if s.Store == nil {
|
||||
return true, nil
|
||||
}
|
||||
return s.Store.AtomicDelete(key, previous)
|
||||
}
|
||||
|
||||
func (s *storeWrapper) Close() {
|
||||
log.WithoutContext().Debugf("Close")
|
||||
|
||||
if s.Store == nil {
|
||||
return
|
||||
}
|
||||
s.Store.Close()
|
||||
}
|
25
pkg/provider/kv/zk/zk.go
Normal file
25
pkg/provider/kv/zk/zk.go
Normal file
|
@ -0,0 +1,25 @@
|
|||
package zk
|
||||
|
||||
import (
|
||||
"github.com/abronan/valkeyrie/store"
|
||||
"github.com/containous/traefik/v2/pkg/provider"
|
||||
"github.com/containous/traefik/v2/pkg/provider/kv"
|
||||
)
|
||||
|
||||
var _ provider.Provider = (*Provider)(nil)
|
||||
|
||||
// Provider holds configurations of the provider.
|
||||
type Provider struct {
|
||||
kv.Provider
|
||||
}
|
||||
|
||||
// SetDefaults sets the default values.
|
||||
func (p *Provider) SetDefaults() {
|
||||
p.Provider.SetDefaults()
|
||||
p.Endpoints = []string{"127.0.0.1:2181"}
|
||||
}
|
||||
|
||||
// Init the provider
|
||||
func (p *Provider) Init() error {
|
||||
return p.Provider.Init(store.ZK, "zookeeper")
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue