Dynamic Configuration Refactoring
This commit is contained in:
parent
d3ae88f108
commit
a09dfa3ce1
452 changed files with 21023 additions and 9419 deletions
|
@ -1,212 +0,0 @@
|
|||
package label
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/log"
|
||||
)
|
||||
|
||||
const (
|
||||
mapEntrySeparator = "||"
|
||||
mapValueSeparator = ":"
|
||||
)
|
||||
|
||||
// Default values
|
||||
const (
|
||||
DefaultWeight = 1
|
||||
DefaultProtocol = "http"
|
||||
DefaultPassHostHeader = true
|
||||
DefaultPassTLSCert = false
|
||||
DefaultFrontendPriority = 0
|
||||
DefaultCircuitBreakerExpression = "NetworkErrorRatio() > 1"
|
||||
DefaultBackendLoadBalancerMethod = "wrr"
|
||||
DefaultBackendMaxconnExtractorFunc = "request.host"
|
||||
DefaultBackendLoadbalancerStickinessCookieName = ""
|
||||
DefaultBackendHealthCheckPort = 0
|
||||
)
|
||||
|
||||
var (
|
||||
// RegexpFrontendErrorPage used to extract error pages from label
|
||||
RegexpFrontendErrorPage = regexp.MustCompile(`^traefik\.frontend\.errors\.(?P<name>[^ .]+)\.(?P<field>[^ .]+)$`)
|
||||
|
||||
// RegexpFrontendRateLimit used to extract rate limits from label
|
||||
RegexpFrontendRateLimit = regexp.MustCompile(`^traefik\.frontend\.rateLimit\.rateSet\.(?P<name>[^ .]+)\.(?P<field>[^ .]+)$`)
|
||||
)
|
||||
|
||||
// GetStringValue get string value associated to a label
|
||||
func GetStringValue(labels map[string]string, labelName string, defaultValue string) string {
|
||||
if value, ok := labels[labelName]; ok && len(value) > 0 {
|
||||
return value
|
||||
}
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
// GetBoolValue get bool value associated to a label
|
||||
func GetBoolValue(labels map[string]string, labelName string, defaultValue bool) bool {
|
||||
rawValue, ok := labels[labelName]
|
||||
if ok {
|
||||
v, err := strconv.ParseBool(rawValue)
|
||||
if err == nil {
|
||||
return v
|
||||
}
|
||||
log.Errorf("Unable to parse %q: %q, falling back to %v. %v", labelName, rawValue, defaultValue, err)
|
||||
}
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
// GetIntValue get int value associated to a label
|
||||
func GetIntValue(labels map[string]string, labelName string, defaultValue int) int {
|
||||
if rawValue, ok := labels[labelName]; ok {
|
||||
value, err := strconv.Atoi(rawValue)
|
||||
if err == nil {
|
||||
return value
|
||||
}
|
||||
log.Errorf("Unable to parse %q: %q, falling back to %v. %v", labelName, rawValue, defaultValue, err)
|
||||
}
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
// GetInt64Value get int64 value associated to a label
|
||||
func GetInt64Value(labels map[string]string, labelName string, defaultValue int64) int64 {
|
||||
if rawValue, ok := labels[labelName]; ok {
|
||||
value, err := strconv.ParseInt(rawValue, 10, 64)
|
||||
if err == nil {
|
||||
return value
|
||||
}
|
||||
log.Errorf("Unable to parse %q: %q, falling back to %v. %v", labelName, rawValue, defaultValue, err)
|
||||
}
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
// GetSliceStringValue get a slice of string associated to a label
|
||||
func GetSliceStringValue(labels map[string]string, labelName string) []string {
|
||||
var value []string
|
||||
|
||||
if values, ok := labels[labelName]; ok {
|
||||
value = SplitAndTrimString(values, ",")
|
||||
|
||||
if len(value) == 0 {
|
||||
log.Debugf("Could not load %q.", labelName)
|
||||
}
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// ParseMapValue get Map value for a label value
|
||||
func ParseMapValue(labelName, values string) map[string]string {
|
||||
mapValue := make(map[string]string)
|
||||
|
||||
for _, parts := range strings.Split(values, mapEntrySeparator) {
|
||||
pair := strings.SplitN(parts, mapValueSeparator, 2)
|
||||
if len(pair) != 2 {
|
||||
log.Warnf("Could not load %q: %q, skipping...", labelName, parts)
|
||||
} else {
|
||||
mapValue[http.CanonicalHeaderKey(strings.TrimSpace(pair[0]))] = strings.TrimSpace(pair[1])
|
||||
}
|
||||
}
|
||||
|
||||
if len(mapValue) == 0 {
|
||||
log.Errorf("Could not load %q, skipping...", labelName)
|
||||
return nil
|
||||
}
|
||||
return mapValue
|
||||
}
|
||||
|
||||
// GetMapValue get Map value associated to a label
|
||||
func GetMapValue(labels map[string]string, labelName string) map[string]string {
|
||||
if values, ok := labels[labelName]; ok {
|
||||
|
||||
if len(values) == 0 {
|
||||
log.Errorf("Missing value for %q, skipping...", labelName)
|
||||
return nil
|
||||
}
|
||||
|
||||
return ParseMapValue(labelName, values)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetStringMultipleStrict get multiple string values associated to several labels
|
||||
// Fail if one label is missing
|
||||
func GetStringMultipleStrict(labels map[string]string, labelNames ...string) (map[string]string, error) {
|
||||
foundLabels := map[string]string{}
|
||||
for _, name := range labelNames {
|
||||
value := GetStringValue(labels, name, "")
|
||||
// Error out only if one of them is not defined.
|
||||
if len(value) == 0 {
|
||||
return nil, fmt.Errorf("label not found: %s", name)
|
||||
}
|
||||
foundLabels[name] = value
|
||||
}
|
||||
return foundLabels, nil
|
||||
}
|
||||
|
||||
// Has Check if a value is associated to a label
|
||||
func Has(labels map[string]string, labelName string) bool {
|
||||
value, ok := labels[labelName]
|
||||
return ok && len(value) > 0
|
||||
}
|
||||
|
||||
// HasPrefix Check if a value is associated to a less one label with a prefix
|
||||
func HasPrefix(labels map[string]string, prefix string) bool {
|
||||
for name, value := range labels {
|
||||
if strings.HasPrefix(name, prefix) && len(value) > 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsEnabled Check if a container is enabled in Traefik
|
||||
func IsEnabled(labels map[string]string, exposedByDefault bool) bool {
|
||||
return GetBoolValue(labels, TraefikEnable, exposedByDefault)
|
||||
}
|
||||
|
||||
// SplitAndTrimString splits separatedString at the separator character and trims each
|
||||
// piece, filtering out empty pieces. Returns the list of pieces or nil if the input
|
||||
// did not contain a non-empty piece.
|
||||
func SplitAndTrimString(base string, sep string) []string {
|
||||
var trimmedStrings []string
|
||||
|
||||
for _, s := range strings.Split(base, sep) {
|
||||
s = strings.TrimSpace(s)
|
||||
if len(s) > 0 {
|
||||
trimmedStrings = append(trimmedStrings, s)
|
||||
}
|
||||
}
|
||||
|
||||
return trimmedStrings
|
||||
}
|
||||
|
||||
// GetFuncString a func related to GetStringValue
|
||||
func GetFuncString(labelName string, defaultValue string) func(map[string]string) string {
|
||||
return func(labels map[string]string) string {
|
||||
return GetStringValue(labels, labelName, defaultValue)
|
||||
}
|
||||
}
|
||||
|
||||
// GetFuncInt a func related to GetIntValue
|
||||
func GetFuncInt(labelName string, defaultValue int) func(map[string]string) int {
|
||||
return func(labels map[string]string) int {
|
||||
return GetIntValue(labels, labelName, defaultValue)
|
||||
}
|
||||
}
|
||||
|
||||
// GetFuncBool a func related to GetBoolValue
|
||||
func GetFuncBool(labelName string, defaultValue bool) func(map[string]string) bool {
|
||||
return func(labels map[string]string) bool {
|
||||
return GetBoolValue(labels, labelName, defaultValue)
|
||||
}
|
||||
}
|
||||
|
||||
// GetFuncSliceString a func related to GetSliceStringValue
|
||||
func GetFuncSliceString(labelName string) func(map[string]string) []string {
|
||||
return func(labels map[string]string) []string {
|
||||
return GetSliceStringValue(labels, labelName)
|
||||
}
|
||||
}
|
|
@ -1,692 +0,0 @@
|
|||
package label
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestSplitAndTrimString(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
input string
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
desc: "empty string",
|
||||
input: "",
|
||||
expected: nil,
|
||||
}, {
|
||||
desc: "one piece",
|
||||
input: "foo",
|
||||
expected: []string{"foo"},
|
||||
}, {
|
||||
desc: "two pieces",
|
||||
input: "foo,bar",
|
||||
expected: []string{"foo", "bar"},
|
||||
}, {
|
||||
desc: "three pieces",
|
||||
input: "foo,bar,zoo",
|
||||
expected: []string{"foo", "bar", "zoo"},
|
||||
}, {
|
||||
desc: "two pieces with whitespace",
|
||||
input: " foo , bar ",
|
||||
expected: []string{"foo", "bar"},
|
||||
}, {
|
||||
desc: "consecutive commas",
|
||||
input: " foo ,, bar ",
|
||||
expected: []string{"foo", "bar"},
|
||||
}, {
|
||||
desc: "consecutive commas with whitespace",
|
||||
input: " foo , , bar ",
|
||||
expected: []string{"foo", "bar"},
|
||||
}, {
|
||||
desc: "leading and trailing commas",
|
||||
input: ",, foo , , bar,, , ",
|
||||
expected: []string{"foo", "bar"},
|
||||
}, {
|
||||
desc: "no valid pieces",
|
||||
input: ", , , ,, ,",
|
||||
expected: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
actual := SplitAndTrimString(test.input, ",")
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetStringValue(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
labelName string
|
||||
defaultValue string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
desc: "empty labels map",
|
||||
labelName: "foo",
|
||||
defaultValue: "default",
|
||||
expected: "default",
|
||||
},
|
||||
{
|
||||
desc: "existing label",
|
||||
labels: map[string]string{
|
||||
"foo": "bar",
|
||||
},
|
||||
labelName: "foo",
|
||||
defaultValue: "default",
|
||||
expected: "bar",
|
||||
},
|
||||
{
|
||||
desc: "non existing label",
|
||||
labels: map[string]string{
|
||||
"foo": "bar",
|
||||
},
|
||||
labelName: "fii",
|
||||
defaultValue: "default",
|
||||
expected: "default",
|
||||
},
|
||||
}
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got := GetStringValue(test.labels, test.labelName, test.defaultValue)
|
||||
assert.Equal(t, test.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetBoolValue(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
labelName string
|
||||
defaultValue bool
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
desc: "empty map",
|
||||
labelName: "foo",
|
||||
},
|
||||
{
|
||||
desc: "invalid boolean value",
|
||||
labels: map[string]string{
|
||||
"foo": "bar",
|
||||
},
|
||||
labelName: "foo",
|
||||
defaultValue: true,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
desc: "valid boolean value: true",
|
||||
labels: map[string]string{
|
||||
"foo": "true",
|
||||
},
|
||||
labelName: "foo",
|
||||
defaultValue: false,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
desc: "valid boolean value: false",
|
||||
labels: map[string]string{
|
||||
"foo": "false",
|
||||
},
|
||||
labelName: "foo",
|
||||
defaultValue: true,
|
||||
expected: false,
|
||||
},
|
||||
}
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got := GetBoolValue(test.labels, test.labelName, test.defaultValue)
|
||||
assert.Equal(t, test.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetIntValue(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
labelName string
|
||||
defaultValue int
|
||||
expected int
|
||||
}{
|
||||
{
|
||||
desc: "empty map",
|
||||
labelName: "foo",
|
||||
},
|
||||
{
|
||||
desc: "invalid int value",
|
||||
labelName: "foo",
|
||||
labels: map[string]string{
|
||||
"foo": "bar",
|
||||
},
|
||||
defaultValue: 666,
|
||||
expected: 666,
|
||||
},
|
||||
{
|
||||
desc: "negative int value",
|
||||
labelName: "foo",
|
||||
labels: map[string]string{
|
||||
"foo": "-1",
|
||||
},
|
||||
defaultValue: 666,
|
||||
expected: -1,
|
||||
},
|
||||
{
|
||||
desc: "positive int value",
|
||||
labelName: "foo",
|
||||
labels: map[string]string{
|
||||
"foo": "1",
|
||||
},
|
||||
defaultValue: 666,
|
||||
expected: 1,
|
||||
},
|
||||
}
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got := GetIntValue(test.labels, test.labelName, test.defaultValue)
|
||||
assert.Equal(t, test.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetInt64Value(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
labelName string
|
||||
defaultValue int64
|
||||
expected int64
|
||||
}{
|
||||
{
|
||||
desc: "empty map",
|
||||
labelName: "foo",
|
||||
},
|
||||
{
|
||||
desc: "invalid int value",
|
||||
labelName: "foo",
|
||||
labels: map[string]string{
|
||||
"foo": "bar",
|
||||
},
|
||||
defaultValue: 666,
|
||||
expected: 666,
|
||||
},
|
||||
{
|
||||
desc: "negative int value",
|
||||
labelName: "foo",
|
||||
labels: map[string]string{
|
||||
"foo": "-1",
|
||||
},
|
||||
defaultValue: 666,
|
||||
expected: -1,
|
||||
},
|
||||
{
|
||||
desc: "positive int value",
|
||||
labelName: "foo",
|
||||
labels: map[string]string{
|
||||
"foo": "1",
|
||||
},
|
||||
defaultValue: 666,
|
||||
expected: 1,
|
||||
},
|
||||
}
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got := GetInt64Value(test.labels, test.labelName, test.defaultValue)
|
||||
assert.Equal(t, test.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetSliceStringValue(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
labelName string
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
desc: "empty map",
|
||||
labelName: "foo",
|
||||
},
|
||||
{
|
||||
desc: "empty value",
|
||||
labels: map[string]string{
|
||||
"foo": "",
|
||||
},
|
||||
labelName: "foo",
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "one value, not split",
|
||||
labels: map[string]string{
|
||||
"foo": "bar",
|
||||
},
|
||||
labelName: "foo",
|
||||
expected: []string{"bar"},
|
||||
},
|
||||
{
|
||||
desc: "several values",
|
||||
labels: map[string]string{
|
||||
"foo": "bar,bir ,bur",
|
||||
},
|
||||
labelName: "foo",
|
||||
expected: []string{"bar", "bir", "bur"},
|
||||
},
|
||||
}
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got := GetSliceStringValue(test.labels, test.labelName)
|
||||
assert.EqualValues(t, test.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetMapValue(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
labelName string
|
||||
expected map[string]string
|
||||
}{
|
||||
{
|
||||
desc: "empty map",
|
||||
labelName: "foo",
|
||||
},
|
||||
{
|
||||
desc: "existent label with empty entry",
|
||||
labelName: "foo",
|
||||
labels: map[string]string{
|
||||
"foo": "",
|
||||
},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "existent label with invalid entry",
|
||||
labelName: "foo",
|
||||
labels: map[string]string{
|
||||
"foo": "bar",
|
||||
},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "existent label with empty value",
|
||||
labelName: "foo",
|
||||
labels: map[string]string{
|
||||
"foo": "bar:",
|
||||
},
|
||||
expected: map[string]string{
|
||||
"Bar": "",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "one entry",
|
||||
labelName: "foo",
|
||||
labels: map[string]string{
|
||||
"foo": " Access-Control-Allow-Methods:POST,GET,OPTIONS ",
|
||||
},
|
||||
expected: map[string]string{
|
||||
"Access-Control-Allow-Methods": "POST,GET,OPTIONS",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "several entry",
|
||||
labelName: "foo",
|
||||
labels: map[string]string{
|
||||
"foo": "Access-Control-Allow-Methods:POST,GET,OPTIONS || Content-type: application/json; charset=utf-8",
|
||||
},
|
||||
expected: map[string]string{
|
||||
"Access-Control-Allow-Methods": "POST,GET,OPTIONS",
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got := GetMapValue(test.labels, test.labelName)
|
||||
assert.EqualValues(t, test.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetStringMultipleStrict(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
labelNames []string
|
||||
expected map[string]string
|
||||
expectedErr bool
|
||||
}{
|
||||
{
|
||||
desc: "empty labels names and empty labels map",
|
||||
labels: map[string]string{},
|
||||
expected: map[string]string{},
|
||||
},
|
||||
{
|
||||
desc: "empty labels names",
|
||||
labels: map[string]string{
|
||||
"foo": "bar",
|
||||
"fii": "bir",
|
||||
},
|
||||
expected: map[string]string{},
|
||||
},
|
||||
{
|
||||
desc: "one label missing",
|
||||
labels: map[string]string{
|
||||
"foo": "bar",
|
||||
"fii": "bir",
|
||||
"fyy": "byr",
|
||||
},
|
||||
labelNames: []string{"foo", "fii", "fuu"},
|
||||
expected: nil,
|
||||
expectedErr: true,
|
||||
},
|
||||
{
|
||||
desc: "all labels are present",
|
||||
labels: map[string]string{
|
||||
"foo": "bar",
|
||||
"fii": "bir",
|
||||
"fyy": "byr",
|
||||
},
|
||||
labelNames: []string{"foo", "fii"},
|
||||
expected: map[string]string{
|
||||
"foo": "bar",
|
||||
"fii": "bir",
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got, err := GetStringMultipleStrict(test.labels, test.labelNames...)
|
||||
if (err != nil) != test.expectedErr {
|
||||
t.Errorf("error = %v, wantErr %v", err, test.expectedErr)
|
||||
return
|
||||
}
|
||||
assert.EqualValues(t, test.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestHas(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
labelName string
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
desc: "nil labels map",
|
||||
labelName: "foo",
|
||||
},
|
||||
{
|
||||
desc: "nonexistent label",
|
||||
labels: map[string]string{
|
||||
"foo": "bar",
|
||||
},
|
||||
labelName: "fii",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
desc: "existent label",
|
||||
labels: map[string]string{
|
||||
"foo": "bar",
|
||||
},
|
||||
labelName: "foo",
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
desc: "existent label with empty value",
|
||||
labels: map[string]string{
|
||||
"foo": "",
|
||||
},
|
||||
labelName: "foo",
|
||||
expected: false,
|
||||
},
|
||||
}
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got := Has(test.labels, test.labelName)
|
||||
assert.Equal(t, test.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsEnabled(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
exposedByDefault bool
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
desc: "empty labels map & exposedByDefault true",
|
||||
exposedByDefault: true,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
desc: "empty labels map & exposedByDefault false",
|
||||
exposedByDefault: false,
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
desc: "exposedByDefault false and label enable true",
|
||||
labels: map[string]string{
|
||||
TraefikEnable: "true",
|
||||
},
|
||||
exposedByDefault: false,
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
desc: "exposedByDefault false and label enable false",
|
||||
labels: map[string]string{
|
||||
TraefikEnable: "false",
|
||||
},
|
||||
exposedByDefault: false,
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
desc: "exposedByDefault true and label enable false",
|
||||
labels: map[string]string{
|
||||
TraefikEnable: "false",
|
||||
},
|
||||
exposedByDefault: true,
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
desc: "exposedByDefault true and label enable true",
|
||||
labels: map[string]string{
|
||||
TraefikEnable: "true",
|
||||
},
|
||||
exposedByDefault: true,
|
||||
expected: true,
|
||||
},
|
||||
}
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got := IsEnabled(test.labels, test.exposedByDefault)
|
||||
assert.Equal(t, test.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestHasPrefix(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
prefix string
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
desc: "nil labels map",
|
||||
prefix: "foo",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
desc: "nonexistent prefix",
|
||||
labels: map[string]string{
|
||||
"foo.carotte": "bar",
|
||||
},
|
||||
prefix: "fii",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
desc: "existent prefix",
|
||||
labels: map[string]string{
|
||||
"foo.carotte": "bar",
|
||||
},
|
||||
prefix: "foo",
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
desc: "existent prefix with empty value",
|
||||
labels: map[string]string{
|
||||
"foo.carotte": "",
|
||||
},
|
||||
prefix: "foo",
|
||||
expected: false,
|
||||
},
|
||||
}
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
got := HasPrefix(test.labels, test.prefix)
|
||||
assert.Equal(t, test.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetFuncString(t *testing.T) {
|
||||
testCases := []struct {
|
||||
labels map[string]string
|
||||
labelName string
|
||||
defaultValue string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
labels: nil,
|
||||
labelName: TraefikProtocol,
|
||||
defaultValue: DefaultProtocol,
|
||||
expected: "http",
|
||||
},
|
||||
{
|
||||
labels: map[string]string{
|
||||
TraefikProtocol: "https",
|
||||
},
|
||||
labelName: TraefikProtocol,
|
||||
defaultValue: DefaultProtocol,
|
||||
expected: "https",
|
||||
},
|
||||
}
|
||||
|
||||
for containerID, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.labelName+strconv.Itoa(containerID), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := GetFuncString(test.labelName, test.defaultValue)(test.labels)
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetSliceString(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
labelName string
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
desc: "no whitelist-label",
|
||||
labels: nil,
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "whitelist-label with empty string",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendWhiteListSourceRange: "",
|
||||
},
|
||||
labelName: TraefikFrontendWhiteListSourceRange,
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "whitelist-label with IPv4 mask",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendWhiteListSourceRange: "1.2.3.4/16",
|
||||
},
|
||||
labelName: TraefikFrontendWhiteListSourceRange,
|
||||
expected: []string{
|
||||
"1.2.3.4/16",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "whitelist-label with IPv6 mask",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendWhiteListSourceRange: "fe80::/16",
|
||||
},
|
||||
labelName: TraefikFrontendWhiteListSourceRange,
|
||||
expected: []string{
|
||||
"fe80::/16",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "whitelist-label with multiple masks",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendWhiteListSourceRange: "1.1.1.1/24, 1234:abcd::42/32",
|
||||
},
|
||||
labelName: TraefikFrontendWhiteListSourceRange,
|
||||
expected: []string{
|
||||
"1.1.1.1/24",
|
||||
"1234:abcd::42/32",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := GetFuncSliceString(test.labelName)(test.labels)
|
||||
assert.EqualValues(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,222 +0,0 @@
|
|||
package label
|
||||
|
||||
// Traefik labels
|
||||
const (
|
||||
Prefix = "traefik."
|
||||
SuffixBackend = "backend"
|
||||
SuffixDomain = "domain"
|
||||
SuffixEnable = "enable"
|
||||
SuffixPort = "port"
|
||||
SuffixPortName = "portName"
|
||||
SuffixPortIndex = "portIndex"
|
||||
SuffixProtocol = "protocol"
|
||||
SuffixTags = "tags"
|
||||
SuffixWeight = "weight"
|
||||
SuffixBackendID = "backend.id"
|
||||
SuffixBackendCircuitBreaker = "backend.circuitbreaker"
|
||||
SuffixBackendCircuitBreakerExpression = "backend.circuitbreaker.expression"
|
||||
SuffixBackendHealthCheckScheme = "backend.healthcheck.scheme"
|
||||
SuffixBackendHealthCheckPath = "backend.healthcheck.path"
|
||||
SuffixBackendHealthCheckPort = "backend.healthcheck.port"
|
||||
SuffixBackendHealthCheckInterval = "backend.healthcheck.interval"
|
||||
SuffixBackendHealthCheckTimeout = "backend.healthcheck.timeout"
|
||||
SuffixBackendHealthCheckHostname = "backend.healthcheck.hostname"
|
||||
SuffixBackendHealthCheckHeaders = "backend.healthcheck.headers"
|
||||
SuffixBackendLoadBalancer = "backend.loadbalancer"
|
||||
SuffixBackendLoadBalancerMethod = SuffixBackendLoadBalancer + ".method"
|
||||
SuffixBackendLoadBalancerStickiness = SuffixBackendLoadBalancer + ".stickiness"
|
||||
SuffixBackendLoadBalancerStickinessCookieName = SuffixBackendLoadBalancer + ".stickiness.cookieName"
|
||||
SuffixBackendMaxConnAmount = "backend.maxconn.amount"
|
||||
SuffixBackendMaxConnExtractorFunc = "backend.maxconn.extractorfunc"
|
||||
SuffixBackendBuffering = "backend.buffering"
|
||||
SuffixBackendResponseForwardingFlushInterval = "backend.responseForwarding.flushInterval"
|
||||
SuffixBackendBufferingMaxRequestBodyBytes = SuffixBackendBuffering + ".maxRequestBodyBytes"
|
||||
SuffixBackendBufferingMemRequestBodyBytes = SuffixBackendBuffering + ".memRequestBodyBytes"
|
||||
SuffixBackendBufferingMaxResponseBodyBytes = SuffixBackendBuffering + ".maxResponseBodyBytes"
|
||||
SuffixBackendBufferingMemResponseBodyBytes = SuffixBackendBuffering + ".memResponseBodyBytes"
|
||||
SuffixBackendBufferingRetryExpression = SuffixBackendBuffering + ".retryExpression"
|
||||
SuffixFrontend = "frontend"
|
||||
SuffixFrontendAuth = SuffixFrontend + ".auth"
|
||||
SuffixFrontendAuthBasic = SuffixFrontendAuth + ".basic"
|
||||
SuffixFrontendAuthBasicRealm = SuffixFrontendAuthBasic + ".realm"
|
||||
SuffixFrontendAuthBasicRemoveHeader = SuffixFrontendAuthBasic + ".removeHeader"
|
||||
SuffixFrontendAuthBasicUsers = SuffixFrontendAuthBasic + ".users"
|
||||
SuffixFrontendAuthBasicUsersFile = SuffixFrontendAuthBasic + ".usersFile"
|
||||
SuffixFrontendAuthDigest = SuffixFrontendAuth + ".digest"
|
||||
SuffixFrontendAuthDigestRemoveHeader = SuffixFrontendAuthDigest + ".removeHeader"
|
||||
SuffixFrontendAuthDigestUsers = SuffixFrontendAuthDigest + ".users"
|
||||
SuffixFrontendAuthDigestUsersFile = SuffixFrontendAuthDigest + ".usersFile"
|
||||
SuffixFrontendAuthForward = SuffixFrontendAuth + ".forward"
|
||||
SuffixFrontendAuthForwardAddress = SuffixFrontendAuthForward + ".address"
|
||||
SuffixFrontendAuthForwardAuthResponseHeaders = SuffixFrontendAuthForward + ".authResponseHeaders"
|
||||
SuffixFrontendAuthForwardTLS = SuffixFrontendAuthForward + ".tls"
|
||||
SuffixFrontendAuthForwardTLSCa = SuffixFrontendAuthForwardTLS + ".ca"
|
||||
SuffixFrontendAuthForwardTLSCaOptional = SuffixFrontendAuthForwardTLS + ".caOptional"
|
||||
SuffixFrontendAuthForwardTLSCert = SuffixFrontendAuthForwardTLS + ".cert"
|
||||
SuffixFrontendAuthForwardTLSInsecureSkipVerify = SuffixFrontendAuthForwardTLS + ".insecureSkipVerify"
|
||||
SuffixFrontendAuthForwardTLSKey = SuffixFrontendAuthForwardTLS + ".key"
|
||||
SuffixFrontendAuthForwardTrustForwardHeader = SuffixFrontendAuthForward + ".trustForwardHeader"
|
||||
SuffixFrontendAuthHeaderField = SuffixFrontendAuth + ".headerField"
|
||||
SuffixFrontendEntryPoints = "frontend.entryPoints"
|
||||
SuffixFrontendHeaders = "frontend.headers."
|
||||
SuffixFrontendRequestHeaders = SuffixFrontendHeaders + "customRequestHeaders"
|
||||
SuffixFrontendResponseHeaders = SuffixFrontendHeaders + "customResponseHeaders"
|
||||
SuffixFrontendHeadersAllowedHosts = SuffixFrontendHeaders + "allowedHosts"
|
||||
SuffixFrontendHeadersHostsProxyHeaders = SuffixFrontendHeaders + "hostsProxyHeaders"
|
||||
SuffixFrontendHeadersSSLForceHost = SuffixFrontendHeaders + "SSLForceHost"
|
||||
SuffixFrontendHeadersSSLRedirect = SuffixFrontendHeaders + "SSLRedirect"
|
||||
SuffixFrontendHeadersSSLTemporaryRedirect = SuffixFrontendHeaders + "SSLTemporaryRedirect"
|
||||
SuffixFrontendHeadersSSLHost = SuffixFrontendHeaders + "SSLHost"
|
||||
SuffixFrontendHeadersSSLProxyHeaders = SuffixFrontendHeaders + "SSLProxyHeaders"
|
||||
SuffixFrontendHeadersSTSSeconds = SuffixFrontendHeaders + "STSSeconds"
|
||||
SuffixFrontendHeadersSTSIncludeSubdomains = SuffixFrontendHeaders + "STSIncludeSubdomains"
|
||||
SuffixFrontendHeadersSTSPreload = SuffixFrontendHeaders + "STSPreload"
|
||||
SuffixFrontendHeadersForceSTSHeader = SuffixFrontendHeaders + "forceSTSHeader"
|
||||
SuffixFrontendHeadersFrameDeny = SuffixFrontendHeaders + "frameDeny"
|
||||
SuffixFrontendHeadersCustomFrameOptionsValue = SuffixFrontendHeaders + "customFrameOptionsValue"
|
||||
SuffixFrontendHeadersContentTypeNosniff = SuffixFrontendHeaders + "contentTypeNosniff"
|
||||
SuffixFrontendHeadersBrowserXSSFilter = SuffixFrontendHeaders + "browserXSSFilter"
|
||||
SuffixFrontendHeadersCustomBrowserXSSValue = SuffixFrontendHeaders + "customBrowserXSSValue"
|
||||
SuffixFrontendHeadersContentSecurityPolicy = SuffixFrontendHeaders + "contentSecurityPolicy"
|
||||
SuffixFrontendHeadersPublicKey = SuffixFrontendHeaders + "publicKey"
|
||||
SuffixFrontendHeadersReferrerPolicy = SuffixFrontendHeaders + "referrerPolicy"
|
||||
SuffixFrontendHeadersIsDevelopment = SuffixFrontendHeaders + "isDevelopment"
|
||||
SuffixFrontendPassHostHeader = "frontend.passHostHeader"
|
||||
SuffixFrontendPassTLSClientCert = "frontend.passTLSClientCert"
|
||||
SuffixFrontendPassTLSClientCertPem = SuffixFrontendPassTLSClientCert + ".pem"
|
||||
SuffixFrontendPassTLSClientCertInfos = SuffixFrontendPassTLSClientCert + ".infos"
|
||||
SuffixFrontendPassTLSClientCertInfosNotAfter = SuffixFrontendPassTLSClientCertInfos + ".notAfter"
|
||||
SuffixFrontendPassTLSClientCertInfosNotBefore = SuffixFrontendPassTLSClientCertInfos + ".notBefore"
|
||||
SuffixFrontendPassTLSClientCertInfosSans = SuffixFrontendPassTLSClientCertInfos + ".sans"
|
||||
SuffixFrontendPassTLSClientCertInfosSubject = SuffixFrontendPassTLSClientCertInfos + ".subject"
|
||||
SuffixFrontendPassTLSClientCertInfosSubjectCommonName = SuffixFrontendPassTLSClientCertInfosSubject + ".commonName"
|
||||
SuffixFrontendPassTLSClientCertInfosSubjectCountry = SuffixFrontendPassTLSClientCertInfosSubject + ".country"
|
||||
SuffixFrontendPassTLSClientCertInfosSubjectLocality = SuffixFrontendPassTLSClientCertInfosSubject + ".locality"
|
||||
SuffixFrontendPassTLSClientCertInfosSubjectOrganization = SuffixFrontendPassTLSClientCertInfosSubject + ".organization"
|
||||
SuffixFrontendPassTLSClientCertInfosSubjectProvince = SuffixFrontendPassTLSClientCertInfosSubject + ".province"
|
||||
SuffixFrontendPassTLSClientCertInfosSubjectSerialNumber = SuffixFrontendPassTLSClientCertInfosSubject + ".serialNumber"
|
||||
SuffixFrontendPassTLSCert = "frontend.passTLSCert" // Deprecated
|
||||
SuffixFrontendPriority = "frontend.priority"
|
||||
SuffixFrontendRateLimitExtractorFunc = "frontend.rateLimit.extractorFunc"
|
||||
SuffixFrontendRedirectEntryPoint = "frontend.redirect.entryPoint"
|
||||
SuffixFrontendRedirectRegex = "frontend.redirect.regex"
|
||||
SuffixFrontendRedirectReplacement = "frontend.redirect.replacement"
|
||||
SuffixFrontendRedirectPermanent = "frontend.redirect.permanent"
|
||||
SuffixFrontendRule = "frontend.rule"
|
||||
SuffixFrontendWhiteList = "frontend.whiteList."
|
||||
SuffixFrontendWhiteListSourceRange = SuffixFrontendWhiteList + "sourceRange"
|
||||
SuffixFrontendWhiteListIPStrategy = SuffixFrontendWhiteList + "ipStrategy"
|
||||
SuffixFrontendWhiteListIPStrategyDepth = SuffixFrontendWhiteListIPStrategy + ".depth"
|
||||
SuffixFrontendWhiteListIPStrategyExcludedIPS = SuffixFrontendWhiteListIPStrategy + ".excludedIPs"
|
||||
TraefikDomain = Prefix + SuffixDomain
|
||||
TraefikEnable = Prefix + SuffixEnable
|
||||
TraefikPort = Prefix + SuffixPort
|
||||
TraefikPortName = Prefix + SuffixPortName
|
||||
TraefikPortIndex = Prefix + SuffixPortIndex
|
||||
TraefikProtocol = Prefix + SuffixProtocol
|
||||
TraefikTags = Prefix + SuffixTags
|
||||
TraefikWeight = Prefix + SuffixWeight
|
||||
TraefikBackend = Prefix + SuffixBackend
|
||||
TraefikBackendID = Prefix + SuffixBackendID
|
||||
TraefikBackendCircuitBreaker = Prefix + SuffixBackendCircuitBreaker
|
||||
TraefikBackendCircuitBreakerExpression = Prefix + SuffixBackendCircuitBreakerExpression
|
||||
TraefikBackendHealthCheckScheme = Prefix + SuffixBackendHealthCheckScheme
|
||||
TraefikBackendHealthCheckPath = Prefix + SuffixBackendHealthCheckPath
|
||||
TraefikBackendHealthCheckPort = Prefix + SuffixBackendHealthCheckPort
|
||||
TraefikBackendHealthCheckInterval = Prefix + SuffixBackendHealthCheckInterval
|
||||
TraefikBackendHealthCheckTimeout = Prefix + SuffixBackendHealthCheckTimeout
|
||||
TraefikBackendHealthCheckHostname = Prefix + SuffixBackendHealthCheckHostname
|
||||
TraefikBackendHealthCheckHeaders = Prefix + SuffixBackendHealthCheckHeaders
|
||||
TraefikBackendLoadBalancer = Prefix + SuffixBackendLoadBalancer
|
||||
TraefikBackendLoadBalancerMethod = Prefix + SuffixBackendLoadBalancerMethod
|
||||
TraefikBackendLoadBalancerStickiness = Prefix + SuffixBackendLoadBalancerStickiness
|
||||
TraefikBackendLoadBalancerStickinessCookieName = Prefix + SuffixBackendLoadBalancerStickinessCookieName
|
||||
TraefikBackendMaxConnAmount = Prefix + SuffixBackendMaxConnAmount
|
||||
TraefikBackendMaxConnExtractorFunc = Prefix + SuffixBackendMaxConnExtractorFunc
|
||||
TraefikBackendBuffering = Prefix + SuffixBackendBuffering
|
||||
TraefikBackendResponseForwardingFlushInterval = Prefix + SuffixBackendResponseForwardingFlushInterval
|
||||
TraefikBackendBufferingMaxRequestBodyBytes = Prefix + SuffixBackendBufferingMaxRequestBodyBytes
|
||||
TraefikBackendBufferingMemRequestBodyBytes = Prefix + SuffixBackendBufferingMemRequestBodyBytes
|
||||
TraefikBackendBufferingMaxResponseBodyBytes = Prefix + SuffixBackendBufferingMaxResponseBodyBytes
|
||||
TraefikBackendBufferingMemResponseBodyBytes = Prefix + SuffixBackendBufferingMemResponseBodyBytes
|
||||
TraefikBackendBufferingRetryExpression = Prefix + SuffixBackendBufferingRetryExpression
|
||||
TraefikFrontend = Prefix + SuffixFrontend
|
||||
TraefikFrontendAuth = Prefix + SuffixFrontendAuth
|
||||
TraefikFrontendAuthBasic = Prefix + SuffixFrontendAuthBasic
|
||||
TraefikFrontendAuthBasicRealm = Prefix + SuffixFrontendAuthBasicRealm
|
||||
TraefikFrontendAuthBasicRemoveHeader = Prefix + SuffixFrontendAuthBasicRemoveHeader
|
||||
TraefikFrontendAuthBasicUsers = Prefix + SuffixFrontendAuthBasicUsers
|
||||
TraefikFrontendAuthBasicUsersFile = Prefix + SuffixFrontendAuthBasicUsersFile
|
||||
TraefikFrontendAuthDigest = Prefix + SuffixFrontendAuthDigest
|
||||
TraefikFrontendAuthDigestRemoveHeader = Prefix + SuffixFrontendAuthDigestRemoveHeader
|
||||
TraefikFrontendAuthDigestUsers = Prefix + SuffixFrontendAuthDigestUsers
|
||||
TraefikFrontendAuthDigestUsersFile = Prefix + SuffixFrontendAuthDigestUsersFile
|
||||
TraefikFrontendAuthForward = Prefix + SuffixFrontendAuthForward
|
||||
TraefikFrontendAuthForwardAddress = Prefix + SuffixFrontendAuthForwardAddress
|
||||
TraefikFrontendAuthForwardAuthResponseHeaders = Prefix + SuffixFrontendAuthForwardAuthResponseHeaders
|
||||
TraefikFrontendAuthForwardTLS = Prefix + SuffixFrontendAuthForwardTLS
|
||||
TraefikFrontendAuthForwardTLSCa = Prefix + SuffixFrontendAuthForwardTLSCa
|
||||
TraefikFrontendAuthForwardTLSCaOptional = Prefix + SuffixFrontendAuthForwardTLSCaOptional
|
||||
TraefikFrontendAuthForwardTLSCert = Prefix + SuffixFrontendAuthForwardTLSCert
|
||||
TraefikFrontendAuthForwardTLSInsecureSkipVerify = Prefix + SuffixFrontendAuthForwardTLSInsecureSkipVerify
|
||||
TraefikFrontendAuthForwardTLSKey = Prefix + SuffixFrontendAuthForwardTLSKey
|
||||
TraefikFrontendAuthForwardTrustForwardHeader = Prefix + SuffixFrontendAuthForwardTrustForwardHeader
|
||||
TraefikFrontendAuthHeaderField = Prefix + SuffixFrontendAuthHeaderField
|
||||
TraefikFrontendEntryPoints = Prefix + SuffixFrontendEntryPoints
|
||||
TraefikFrontendPassHostHeader = Prefix + SuffixFrontendPassHostHeader
|
||||
TraefikFrontendPassTLSClientCert = Prefix + SuffixFrontendPassTLSClientCert
|
||||
TraefikFrontendPassTLSClientCertPem = Prefix + SuffixFrontendPassTLSClientCertPem
|
||||
TraefikFrontendPassTLSClientCertInfos = Prefix + SuffixFrontendPassTLSClientCertInfos
|
||||
TraefikFrontendPassTLSClientCertInfosNotAfter = Prefix + SuffixFrontendPassTLSClientCertInfosNotAfter
|
||||
TraefikFrontendPassTLSClientCertInfosNotBefore = Prefix + SuffixFrontendPassTLSClientCertInfosNotBefore
|
||||
TraefikFrontendPassTLSClientCertInfosSans = Prefix + SuffixFrontendPassTLSClientCertInfosSans
|
||||
TraefikFrontendPassTLSClientCertInfosSubject = Prefix + SuffixFrontendPassTLSClientCertInfosSubject
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectCommonName = Prefix + SuffixFrontendPassTLSClientCertInfosSubjectCommonName
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectCountry = Prefix + SuffixFrontendPassTLSClientCertInfosSubjectCountry
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectLocality = Prefix + SuffixFrontendPassTLSClientCertInfosSubjectLocality
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectOrganization = Prefix + SuffixFrontendPassTLSClientCertInfosSubjectOrganization
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectProvince = Prefix + SuffixFrontendPassTLSClientCertInfosSubjectProvince
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectSerialNumber = Prefix + SuffixFrontendPassTLSClientCertInfosSubjectSerialNumber
|
||||
TraefikFrontendPassTLSCert = Prefix + SuffixFrontendPassTLSCert // Deprecated
|
||||
TraefikFrontendPriority = Prefix + SuffixFrontendPriority
|
||||
TraefikFrontendRateLimitExtractorFunc = Prefix + SuffixFrontendRateLimitExtractorFunc
|
||||
TraefikFrontendRedirectEntryPoint = Prefix + SuffixFrontendRedirectEntryPoint
|
||||
TraefikFrontendRedirectRegex = Prefix + SuffixFrontendRedirectRegex
|
||||
TraefikFrontendRedirectReplacement = Prefix + SuffixFrontendRedirectReplacement
|
||||
TraefikFrontendRedirectPermanent = Prefix + SuffixFrontendRedirectPermanent
|
||||
TraefikFrontendRule = Prefix + SuffixFrontendRule
|
||||
TraefikFrontendWhiteListSourceRange = Prefix + SuffixFrontendWhiteListSourceRange
|
||||
TraefikFrontendWhiteListIPStrategy = Prefix + SuffixFrontendWhiteListIPStrategy
|
||||
TraefikFrontendWhiteListIPStrategyDepth = Prefix + SuffixFrontendWhiteListIPStrategyDepth
|
||||
TraefikFrontendWhiteListIPStrategyExcludedIPS = Prefix + SuffixFrontendWhiteListIPStrategyExcludedIPS
|
||||
TraefikFrontendRequestHeaders = Prefix + SuffixFrontendRequestHeaders
|
||||
TraefikFrontendResponseHeaders = Prefix + SuffixFrontendResponseHeaders
|
||||
TraefikFrontendAllowedHosts = Prefix + SuffixFrontendHeadersAllowedHosts
|
||||
TraefikFrontendHostsProxyHeaders = Prefix + SuffixFrontendHeadersHostsProxyHeaders
|
||||
TraefikFrontendSSLForceHost = Prefix + SuffixFrontendHeadersSSLForceHost
|
||||
TraefikFrontendSSLRedirect = Prefix + SuffixFrontendHeadersSSLRedirect
|
||||
TraefikFrontendSSLTemporaryRedirect = Prefix + SuffixFrontendHeadersSSLTemporaryRedirect
|
||||
TraefikFrontendSSLHost = Prefix + SuffixFrontendHeadersSSLHost
|
||||
TraefikFrontendSSLProxyHeaders = Prefix + SuffixFrontendHeadersSSLProxyHeaders
|
||||
TraefikFrontendSTSSeconds = Prefix + SuffixFrontendHeadersSTSSeconds
|
||||
TraefikFrontendSTSIncludeSubdomains = Prefix + SuffixFrontendHeadersSTSIncludeSubdomains
|
||||
TraefikFrontendSTSPreload = Prefix + SuffixFrontendHeadersSTSPreload
|
||||
TraefikFrontendForceSTSHeader = Prefix + SuffixFrontendHeadersForceSTSHeader
|
||||
TraefikFrontendFrameDeny = Prefix + SuffixFrontendHeadersFrameDeny
|
||||
TraefikFrontendCustomFrameOptionsValue = Prefix + SuffixFrontendHeadersCustomFrameOptionsValue
|
||||
TraefikFrontendContentTypeNosniff = Prefix + SuffixFrontendHeadersContentTypeNosniff
|
||||
TraefikFrontendBrowserXSSFilter = Prefix + SuffixFrontendHeadersBrowserXSSFilter
|
||||
TraefikFrontendCustomBrowserXSSValue = Prefix + SuffixFrontendHeadersCustomBrowserXSSValue
|
||||
TraefikFrontendContentSecurityPolicy = Prefix + SuffixFrontendHeadersContentSecurityPolicy
|
||||
TraefikFrontendPublicKey = Prefix + SuffixFrontendHeadersPublicKey
|
||||
TraefikFrontendReferrerPolicy = Prefix + SuffixFrontendHeadersReferrerPolicy
|
||||
TraefikFrontendIsDevelopment = Prefix + SuffixFrontendHeadersIsDevelopment
|
||||
BaseFrontendErrorPage = "frontend.errors."
|
||||
SuffixErrorPageBackend = "backend"
|
||||
SuffixErrorPageQuery = "query"
|
||||
SuffixErrorPageStatus = "status"
|
||||
BaseFrontendRateLimit = "frontend.rateLimit.rateSet."
|
||||
SuffixRateLimitPeriod = "period"
|
||||
SuffixRateLimitAverage = "average"
|
||||
SuffixRateLimitBurst = "burst"
|
||||
)
|
|
@ -1,417 +0,0 @@
|
|||
package label
|
||||
|
||||
import (
|
||||
"math"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/flaeg/parse"
|
||||
"github.com/containous/traefik/log"
|
||||
"github.com/containous/traefik/types"
|
||||
)
|
||||
|
||||
// GetWhiteList Create white list from labels
|
||||
func GetWhiteList(labels map[string]string) *types.WhiteList {
|
||||
ranges := GetSliceStringValue(labels, TraefikFrontendWhiteListSourceRange)
|
||||
if len(ranges) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &types.WhiteList{
|
||||
SourceRange: ranges,
|
||||
IPStrategy: getIPStrategy(labels),
|
||||
}
|
||||
}
|
||||
|
||||
func getIPStrategy(labels map[string]string) *types.IPStrategy {
|
||||
ipStrategy := GetBoolValue(labels, TraefikFrontendWhiteListIPStrategy, false)
|
||||
depth := GetIntValue(labels, TraefikFrontendWhiteListIPStrategyDepth, 0)
|
||||
excludedIPs := GetSliceStringValue(labels, TraefikFrontendWhiteListIPStrategyExcludedIPS)
|
||||
|
||||
if depth == 0 && len(excludedIPs) == 0 && !ipStrategy {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &types.IPStrategy{
|
||||
Depth: depth,
|
||||
ExcludedIPs: excludedIPs,
|
||||
}
|
||||
}
|
||||
|
||||
// GetRedirect Create redirect from labels
|
||||
func GetRedirect(labels map[string]string) *types.Redirect {
|
||||
permanent := GetBoolValue(labels, TraefikFrontendRedirectPermanent, false)
|
||||
|
||||
if Has(labels, TraefikFrontendRedirectEntryPoint) {
|
||||
return &types.Redirect{
|
||||
EntryPoint: GetStringValue(labels, TraefikFrontendRedirectEntryPoint, ""),
|
||||
Permanent: permanent,
|
||||
}
|
||||
}
|
||||
|
||||
if Has(labels, TraefikFrontendRedirectRegex) &&
|
||||
Has(labels, TraefikFrontendRedirectReplacement) {
|
||||
return &types.Redirect{
|
||||
Regex: GetStringValue(labels, TraefikFrontendRedirectRegex, ""),
|
||||
Replacement: GetStringValue(labels, TraefikFrontendRedirectReplacement, ""),
|
||||
Permanent: permanent,
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetTLSClientCert create TLS client header configuration from labels
|
||||
func GetTLSClientCert(labels map[string]string) *types.TLSClientHeaders {
|
||||
if !HasPrefix(labels, TraefikFrontendPassTLSClientCert) {
|
||||
return nil
|
||||
}
|
||||
|
||||
tlsClientHeaders := &types.TLSClientHeaders{
|
||||
PEM: GetBoolValue(labels, TraefikFrontendPassTLSClientCertPem, false),
|
||||
}
|
||||
|
||||
if HasPrefix(labels, TraefikFrontendPassTLSClientCertInfos) {
|
||||
infos := &types.TLSClientCertificateInfos{
|
||||
NotAfter: GetBoolValue(labels, TraefikFrontendPassTLSClientCertInfosNotAfter, false),
|
||||
NotBefore: GetBoolValue(labels, TraefikFrontendPassTLSClientCertInfosNotBefore, false),
|
||||
Sans: GetBoolValue(labels, TraefikFrontendPassTLSClientCertInfosSans, false),
|
||||
}
|
||||
|
||||
if HasPrefix(labels, TraefikFrontendPassTLSClientCertInfosSubject) {
|
||||
subject := &types.TLSCLientCertificateSubjectInfos{
|
||||
CommonName: GetBoolValue(labels, TraefikFrontendPassTLSClientCertInfosSubjectCommonName, false),
|
||||
Country: GetBoolValue(labels, TraefikFrontendPassTLSClientCertInfosSubjectCountry, false),
|
||||
Locality: GetBoolValue(labels, TraefikFrontendPassTLSClientCertInfosSubjectLocality, false),
|
||||
Organization: GetBoolValue(labels, TraefikFrontendPassTLSClientCertInfosSubjectOrganization, false),
|
||||
Province: GetBoolValue(labels, TraefikFrontendPassTLSClientCertInfosSubjectProvince, false),
|
||||
SerialNumber: GetBoolValue(labels, TraefikFrontendPassTLSClientCertInfosSubjectSerialNumber, false),
|
||||
}
|
||||
infos.Subject = subject
|
||||
}
|
||||
tlsClientHeaders.Infos = infos
|
||||
}
|
||||
return tlsClientHeaders
|
||||
}
|
||||
|
||||
// GetAuth Create auth from labels
|
||||
func GetAuth(labels map[string]string) *types.Auth {
|
||||
if !HasPrefix(labels, TraefikFrontendAuth) {
|
||||
return nil
|
||||
}
|
||||
|
||||
auth := &types.Auth{
|
||||
HeaderField: GetStringValue(labels, TraefikFrontendAuthHeaderField, ""),
|
||||
}
|
||||
|
||||
if HasPrefix(labels, TraefikFrontendAuthBasic) {
|
||||
auth.Basic = getAuthBasic(labels)
|
||||
} else if HasPrefix(labels, TraefikFrontendAuthDigest) {
|
||||
auth.Digest = getAuthDigest(labels)
|
||||
} else if HasPrefix(labels, TraefikFrontendAuthForward) {
|
||||
auth.Forward = getAuthForward(labels)
|
||||
}
|
||||
|
||||
return auth
|
||||
}
|
||||
|
||||
// getAuthBasic Create Basic Auth from labels
|
||||
func getAuthBasic(labels map[string]string) *types.Basic {
|
||||
basicAuth := &types.Basic{
|
||||
Realm: GetStringValue(labels, TraefikFrontendAuthBasicRealm, ""),
|
||||
UsersFile: GetStringValue(labels, TraefikFrontendAuthBasicUsersFile, ""),
|
||||
RemoveHeader: GetBoolValue(labels, TraefikFrontendAuthBasicRemoveHeader, false),
|
||||
}
|
||||
|
||||
// backward compatibility
|
||||
if Has(labels, TraefikFrontendAuthBasic) {
|
||||
basicAuth.Users = GetSliceStringValue(labels, TraefikFrontendAuthBasic)
|
||||
log.Warnf("Deprecated configuration found: %s. Please use %s.", TraefikFrontendAuthBasic, TraefikFrontendAuthBasicUsers)
|
||||
} else {
|
||||
basicAuth.Users = GetSliceStringValue(labels, TraefikFrontendAuthBasicUsers)
|
||||
}
|
||||
|
||||
return basicAuth
|
||||
}
|
||||
|
||||
// getAuthDigest Create Digest Auth from labels
|
||||
func getAuthDigest(labels map[string]string) *types.Digest {
|
||||
return &types.Digest{
|
||||
Users: GetSliceStringValue(labels, TraefikFrontendAuthDigestUsers),
|
||||
UsersFile: GetStringValue(labels, TraefikFrontendAuthDigestUsersFile, ""),
|
||||
RemoveHeader: GetBoolValue(labels, TraefikFrontendAuthDigestRemoveHeader, false),
|
||||
}
|
||||
}
|
||||
|
||||
// getAuthForward Create Forward Auth from labels
|
||||
func getAuthForward(labels map[string]string) *types.Forward {
|
||||
forwardAuth := &types.Forward{
|
||||
Address: GetStringValue(labels, TraefikFrontendAuthForwardAddress, ""),
|
||||
AuthResponseHeaders: GetSliceStringValue(labels, TraefikFrontendAuthForwardAuthResponseHeaders),
|
||||
TrustForwardHeader: GetBoolValue(labels, TraefikFrontendAuthForwardTrustForwardHeader, false),
|
||||
}
|
||||
|
||||
// TLS configuration
|
||||
if HasPrefix(labels, TraefikFrontendAuthForwardTLS) {
|
||||
forwardAuth.TLS = &types.ClientTLS{
|
||||
CA: GetStringValue(labels, TraefikFrontendAuthForwardTLSCa, ""),
|
||||
CAOptional: GetBoolValue(labels, TraefikFrontendAuthForwardTLSCaOptional, false),
|
||||
Cert: GetStringValue(labels, TraefikFrontendAuthForwardTLSCert, ""),
|
||||
InsecureSkipVerify: GetBoolValue(labels, TraefikFrontendAuthForwardTLSInsecureSkipVerify, false),
|
||||
Key: GetStringValue(labels, TraefikFrontendAuthForwardTLSKey, ""),
|
||||
}
|
||||
}
|
||||
|
||||
return forwardAuth
|
||||
}
|
||||
|
||||
// GetErrorPages Create error pages from labels
|
||||
func GetErrorPages(labels map[string]string) map[string]*types.ErrorPage {
|
||||
prefix := Prefix + BaseFrontendErrorPage
|
||||
return ParseErrorPages(labels, prefix, RegexpFrontendErrorPage)
|
||||
}
|
||||
|
||||
// ParseErrorPages parse error pages to create ErrorPage struct
|
||||
func ParseErrorPages(labels map[string]string, labelPrefix string, labelRegex *regexp.Regexp) map[string]*types.ErrorPage {
|
||||
var errorPages map[string]*types.ErrorPage
|
||||
|
||||
for lblName, value := range labels {
|
||||
if strings.HasPrefix(lblName, labelPrefix) {
|
||||
submatch := labelRegex.FindStringSubmatch(lblName)
|
||||
if len(submatch) != 3 {
|
||||
log.Errorf("Invalid page error label: %s, sub-match: %v", lblName, submatch)
|
||||
continue
|
||||
}
|
||||
|
||||
if errorPages == nil {
|
||||
errorPages = make(map[string]*types.ErrorPage)
|
||||
}
|
||||
|
||||
pageName := submatch[1]
|
||||
|
||||
ep, ok := errorPages[pageName]
|
||||
if !ok {
|
||||
ep = &types.ErrorPage{}
|
||||
errorPages[pageName] = ep
|
||||
}
|
||||
|
||||
switch submatch[2] {
|
||||
case SuffixErrorPageStatus:
|
||||
ep.Status = SplitAndTrimString(value, ",")
|
||||
case SuffixErrorPageQuery:
|
||||
ep.Query = value
|
||||
case SuffixErrorPageBackend:
|
||||
ep.Backend = value
|
||||
default:
|
||||
log.Errorf("Invalid page error label: %s", lblName)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return errorPages
|
||||
}
|
||||
|
||||
// GetRateLimit Create rate limits from labels
|
||||
func GetRateLimit(labels map[string]string) *types.RateLimit {
|
||||
extractorFunc := GetStringValue(labels, TraefikFrontendRateLimitExtractorFunc, "")
|
||||
if len(extractorFunc) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
prefix := Prefix + BaseFrontendRateLimit
|
||||
limits := ParseRateSets(labels, prefix, RegexpFrontendRateLimit)
|
||||
|
||||
return &types.RateLimit{
|
||||
ExtractorFunc: extractorFunc,
|
||||
RateSet: limits,
|
||||
}
|
||||
}
|
||||
|
||||
// ParseRateSets parse rate limits to create Rate struct
|
||||
func ParseRateSets(labels map[string]string, labelPrefix string, labelRegex *regexp.Regexp) map[string]*types.Rate {
|
||||
var rateSets map[string]*types.Rate
|
||||
|
||||
for lblName, rawValue := range labels {
|
||||
if strings.HasPrefix(lblName, labelPrefix) && len(rawValue) > 0 {
|
||||
submatch := labelRegex.FindStringSubmatch(lblName)
|
||||
if len(submatch) != 3 {
|
||||
log.Errorf("Invalid rate limit label: %s, sub-match: %v", lblName, submatch)
|
||||
continue
|
||||
}
|
||||
|
||||
if rateSets == nil {
|
||||
rateSets = make(map[string]*types.Rate)
|
||||
}
|
||||
|
||||
limitName := submatch[1]
|
||||
|
||||
ep, ok := rateSets[limitName]
|
||||
if !ok {
|
||||
ep = &types.Rate{}
|
||||
rateSets[limitName] = ep
|
||||
}
|
||||
|
||||
switch submatch[2] {
|
||||
case "period":
|
||||
var d parse.Duration
|
||||
err := d.Set(rawValue)
|
||||
if err != nil {
|
||||
log.Errorf("Unable to parse %q: %q. %v", lblName, rawValue, err)
|
||||
continue
|
||||
}
|
||||
ep.Period = d
|
||||
case "average":
|
||||
value, err := strconv.ParseInt(rawValue, 10, 64)
|
||||
if err != nil {
|
||||
log.Errorf("Unable to parse %q: %q. %v", lblName, rawValue, err)
|
||||
continue
|
||||
}
|
||||
ep.Average = value
|
||||
case "burst":
|
||||
value, err := strconv.ParseInt(rawValue, 10, 64)
|
||||
if err != nil {
|
||||
log.Errorf("Unable to parse %q: %q. %v", lblName, rawValue, err)
|
||||
continue
|
||||
}
|
||||
ep.Burst = value
|
||||
default:
|
||||
log.Errorf("Invalid rate limit label: %s", lblName)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
return rateSets
|
||||
}
|
||||
|
||||
// GetHeaders Create headers from labels
|
||||
func GetHeaders(labels map[string]string) *types.Headers {
|
||||
headers := &types.Headers{
|
||||
CustomRequestHeaders: GetMapValue(labels, TraefikFrontendRequestHeaders),
|
||||
CustomResponseHeaders: GetMapValue(labels, TraefikFrontendResponseHeaders),
|
||||
SSLProxyHeaders: GetMapValue(labels, TraefikFrontendSSLProxyHeaders),
|
||||
AllowedHosts: GetSliceStringValue(labels, TraefikFrontendAllowedHosts),
|
||||
HostsProxyHeaders: GetSliceStringValue(labels, TraefikFrontendHostsProxyHeaders),
|
||||
STSSeconds: GetInt64Value(labels, TraefikFrontendSTSSeconds, 0),
|
||||
SSLRedirect: GetBoolValue(labels, TraefikFrontendSSLRedirect, false),
|
||||
SSLTemporaryRedirect: GetBoolValue(labels, TraefikFrontendSSLTemporaryRedirect, false),
|
||||
SSLForceHost: GetBoolValue(labels, TraefikFrontendSSLForceHost, false),
|
||||
STSIncludeSubdomains: GetBoolValue(labels, TraefikFrontendSTSIncludeSubdomains, false),
|
||||
STSPreload: GetBoolValue(labels, TraefikFrontendSTSPreload, false),
|
||||
ForceSTSHeader: GetBoolValue(labels, TraefikFrontendForceSTSHeader, false),
|
||||
FrameDeny: GetBoolValue(labels, TraefikFrontendFrameDeny, false),
|
||||
ContentTypeNosniff: GetBoolValue(labels, TraefikFrontendContentTypeNosniff, false),
|
||||
BrowserXSSFilter: GetBoolValue(labels, TraefikFrontendBrowserXSSFilter, false),
|
||||
IsDevelopment: GetBoolValue(labels, TraefikFrontendIsDevelopment, false),
|
||||
SSLHost: GetStringValue(labels, TraefikFrontendSSLHost, ""),
|
||||
CustomFrameOptionsValue: GetStringValue(labels, TraefikFrontendCustomFrameOptionsValue, ""),
|
||||
ContentSecurityPolicy: GetStringValue(labels, TraefikFrontendContentSecurityPolicy, ""),
|
||||
PublicKey: GetStringValue(labels, TraefikFrontendPublicKey, ""),
|
||||
ReferrerPolicy: GetStringValue(labels, TraefikFrontendReferrerPolicy, ""),
|
||||
CustomBrowserXSSValue: GetStringValue(labels, TraefikFrontendCustomBrowserXSSValue, ""),
|
||||
}
|
||||
|
||||
if !headers.HasSecureHeadersDefined() && !headers.HasCustomHeadersDefined() {
|
||||
return nil
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
// GetMaxConn Create max connection from labels
|
||||
func GetMaxConn(labels map[string]string) *types.MaxConn {
|
||||
amount := GetInt64Value(labels, TraefikBackendMaxConnAmount, math.MinInt64)
|
||||
extractorFunc := GetStringValue(labels, TraefikBackendMaxConnExtractorFunc, DefaultBackendMaxconnExtractorFunc)
|
||||
|
||||
if amount == math.MinInt64 || len(extractorFunc) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &types.MaxConn{
|
||||
Amount: amount,
|
||||
ExtractorFunc: extractorFunc,
|
||||
}
|
||||
}
|
||||
|
||||
// GetHealthCheck Create health check from labels
|
||||
func GetHealthCheck(labels map[string]string) *types.HealthCheck {
|
||||
path := GetStringValue(labels, TraefikBackendHealthCheckPath, "")
|
||||
if len(path) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
scheme := GetStringValue(labels, TraefikBackendHealthCheckScheme, "")
|
||||
port := GetIntValue(labels, TraefikBackendHealthCheckPort, DefaultBackendHealthCheckPort)
|
||||
interval := GetStringValue(labels, TraefikBackendHealthCheckInterval, "")
|
||||
timeout := GetStringValue(labels, TraefikBackendHealthCheckTimeout, "")
|
||||
hostname := GetStringValue(labels, TraefikBackendHealthCheckHostname, "")
|
||||
headers := GetMapValue(labels, TraefikBackendHealthCheckHeaders)
|
||||
|
||||
return &types.HealthCheck{
|
||||
Scheme: scheme,
|
||||
Path: path,
|
||||
Port: port,
|
||||
Interval: interval,
|
||||
Timeout: timeout,
|
||||
Hostname: hostname,
|
||||
Headers: headers,
|
||||
}
|
||||
}
|
||||
|
||||
// GetResponseForwarding Create ResponseForwarding from labels
|
||||
func GetResponseForwarding(labels map[string]string) *types.ResponseForwarding {
|
||||
if !HasPrefix(labels, TraefikBackendResponseForwardingFlushInterval) {
|
||||
return nil
|
||||
}
|
||||
|
||||
value := GetStringValue(labels, TraefikBackendResponseForwardingFlushInterval, "0")
|
||||
|
||||
return &types.ResponseForwarding{
|
||||
FlushInterval: value,
|
||||
}
|
||||
}
|
||||
|
||||
// GetBuffering Create buffering from labels
|
||||
func GetBuffering(labels map[string]string) *types.Buffering {
|
||||
if !HasPrefix(labels, TraefikBackendBuffering) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &types.Buffering{
|
||||
MaxRequestBodyBytes: GetInt64Value(labels, TraefikBackendBufferingMaxRequestBodyBytes, 0),
|
||||
MaxResponseBodyBytes: GetInt64Value(labels, TraefikBackendBufferingMaxResponseBodyBytes, 0),
|
||||
MemRequestBodyBytes: GetInt64Value(labels, TraefikBackendBufferingMemRequestBodyBytes, 0),
|
||||
MemResponseBodyBytes: GetInt64Value(labels, TraefikBackendBufferingMemResponseBodyBytes, 0),
|
||||
RetryExpression: GetStringValue(labels, TraefikBackendBufferingRetryExpression, ""),
|
||||
}
|
||||
}
|
||||
|
||||
// GetCircuitBreaker Create circuit breaker from labels
|
||||
func GetCircuitBreaker(labels map[string]string) *types.CircuitBreaker {
|
||||
circuitBreaker := GetStringValue(labels, TraefikBackendCircuitBreakerExpression, "")
|
||||
if len(circuitBreaker) == 0 {
|
||||
return nil
|
||||
}
|
||||
return &types.CircuitBreaker{Expression: circuitBreaker}
|
||||
}
|
||||
|
||||
// GetLoadBalancer Create load balancer from labels
|
||||
func GetLoadBalancer(labels map[string]string) *types.LoadBalancer {
|
||||
if !HasPrefix(labels, TraefikBackendLoadBalancer) {
|
||||
return nil
|
||||
}
|
||||
|
||||
method := GetStringValue(labels, TraefikBackendLoadBalancerMethod, DefaultBackendLoadBalancerMethod)
|
||||
|
||||
lb := &types.LoadBalancer{
|
||||
Method: method,
|
||||
}
|
||||
|
||||
if GetBoolValue(labels, TraefikBackendLoadBalancerStickiness, false) {
|
||||
cookieName := GetStringValue(labels, TraefikBackendLoadBalancerStickinessCookieName, DefaultBackendLoadbalancerStickinessCookieName)
|
||||
lb.Stickiness = &types.Stickiness{CookieName: cookieName}
|
||||
}
|
||||
|
||||
return lb
|
||||
}
|
|
@ -1,996 +0,0 @@
|
|||
package label
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/containous/flaeg/parse"
|
||||
"github.com/containous/traefik/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestParseErrorPages(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected map[string]*types.ErrorPage
|
||||
}{
|
||||
{
|
||||
desc: "2 errors pages",
|
||||
labels: map[string]string{
|
||||
Prefix + BaseFrontendErrorPage + "foo." + SuffixErrorPageStatus: "404",
|
||||
Prefix + BaseFrontendErrorPage + "foo." + SuffixErrorPageBackend: "foo_backend",
|
||||
Prefix + BaseFrontendErrorPage + "foo." + SuffixErrorPageQuery: "foo_query",
|
||||
Prefix + BaseFrontendErrorPage + "bar." + SuffixErrorPageStatus: "500,600",
|
||||
Prefix + BaseFrontendErrorPage + "bar." + SuffixErrorPageBackend: "bar_backend",
|
||||
Prefix + BaseFrontendErrorPage + "bar." + SuffixErrorPageQuery: "bar_query",
|
||||
},
|
||||
expected: map[string]*types.ErrorPage{
|
||||
"foo": {
|
||||
Status: []string{"404"},
|
||||
Query: "foo_query",
|
||||
Backend: "foo_backend",
|
||||
},
|
||||
"bar": {
|
||||
Status: []string{"500", "600"},
|
||||
Query: "bar_query",
|
||||
Backend: "bar_backend",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "only status field",
|
||||
labels: map[string]string{
|
||||
Prefix + BaseFrontendErrorPage + "foo." + SuffixErrorPageStatus: "404",
|
||||
},
|
||||
expected: map[string]*types.ErrorPage{
|
||||
"foo": {
|
||||
Status: []string{"404"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "invalid field",
|
||||
labels: map[string]string{
|
||||
Prefix + BaseFrontendErrorPage + "foo." + "courgette": "404",
|
||||
},
|
||||
expected: map[string]*types.ErrorPage{"foo": {}},
|
||||
},
|
||||
{
|
||||
desc: "no error pages labels",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
pages := ParseErrorPages(test.labels, Prefix+BaseFrontendErrorPage, RegexpFrontendErrorPage)
|
||||
|
||||
assert.EqualValues(t, test.expected, pages)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseRateSets(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected map[string]*types.Rate
|
||||
}{
|
||||
{
|
||||
desc: "2 rate limits",
|
||||
labels: map[string]string{
|
||||
Prefix + BaseFrontendRateLimit + "foo." + SuffixRateLimitPeriod: "6",
|
||||
Prefix + BaseFrontendRateLimit + "foo." + SuffixRateLimitAverage: "12",
|
||||
Prefix + BaseFrontendRateLimit + "foo." + SuffixRateLimitBurst: "18",
|
||||
Prefix + BaseFrontendRateLimit + "bar." + SuffixRateLimitPeriod: "3",
|
||||
Prefix + BaseFrontendRateLimit + "bar." + SuffixRateLimitAverage: "6",
|
||||
Prefix + BaseFrontendRateLimit + "bar." + SuffixRateLimitBurst: "9",
|
||||
},
|
||||
expected: map[string]*types.Rate{
|
||||
"foo": {
|
||||
Period: parse.Duration(6 * time.Second),
|
||||
Average: 12,
|
||||
Burst: 18,
|
||||
},
|
||||
"bar": {
|
||||
Period: parse.Duration(3 * time.Second),
|
||||
Average: 6,
|
||||
Burst: 9,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "no rate limits labels",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
rateSets := ParseRateSets(test.labels, Prefix+BaseFrontendRateLimit, RegexpFrontendRateLimit)
|
||||
|
||||
assert.EqualValues(t, test.expected, rateSets)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestWhiteList(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected *types.WhiteList
|
||||
}{
|
||||
{
|
||||
desc: "should return nil when no white list labels",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return a struct when only range",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendWhiteListSourceRange: "10.10.10.10",
|
||||
},
|
||||
expected: &types.WhiteList{
|
||||
SourceRange: []string{
|
||||
"10.10.10.10",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return a struct with ip strategy depth",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendWhiteListSourceRange: "10.10.10.10",
|
||||
TraefikFrontendWhiteListIPStrategyDepth: "5",
|
||||
},
|
||||
expected: &types.WhiteList{
|
||||
SourceRange: []string{
|
||||
"10.10.10.10",
|
||||
},
|
||||
IPStrategy: &types.IPStrategy{
|
||||
Depth: 5,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return a struct with ip strategy depth and excluded ips",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendWhiteListSourceRange: "10.10.10.10",
|
||||
TraefikFrontendWhiteListIPStrategyDepth: "5",
|
||||
TraefikFrontendWhiteListIPStrategyExcludedIPS: "10.10.10.10,10.10.10.11",
|
||||
},
|
||||
expected: &types.WhiteList{
|
||||
SourceRange: []string{
|
||||
"10.10.10.10",
|
||||
},
|
||||
IPStrategy: &types.IPStrategy{
|
||||
Depth: 5,
|
||||
ExcludedIPs: []string{
|
||||
"10.10.10.10",
|
||||
"10.10.10.11",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return a struct with ip strategy (remoteAddr) with no depth and no excludedIPs",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendWhiteListSourceRange: "10.10.10.10",
|
||||
TraefikFrontendWhiteListIPStrategy: "true",
|
||||
},
|
||||
expected: &types.WhiteList{
|
||||
SourceRange: []string{
|
||||
"10.10.10.10",
|
||||
},
|
||||
IPStrategy: &types.IPStrategy{
|
||||
Depth: 0,
|
||||
ExcludedIPs: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return a struct with ip strategy with depth",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendWhiteListSourceRange: "10.10.10.10",
|
||||
TraefikFrontendWhiteListIPStrategy: "true",
|
||||
TraefikFrontendWhiteListIPStrategyDepth: "5",
|
||||
},
|
||||
expected: &types.WhiteList{
|
||||
SourceRange: []string{
|
||||
"10.10.10.10",
|
||||
},
|
||||
IPStrategy: &types.IPStrategy{
|
||||
Depth: 5,
|
||||
ExcludedIPs: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := GetWhiteList(test.labels)
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetCircuitBreaker(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected *types.CircuitBreaker
|
||||
}{
|
||||
{
|
||||
desc: "should return nil when no CB label",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return a struct when CB label is set",
|
||||
labels: map[string]string{
|
||||
TraefikBackendCircuitBreakerExpression: "NetworkErrorRatio() > 0.5",
|
||||
},
|
||||
expected: &types.CircuitBreaker{
|
||||
Expression: "NetworkErrorRatio() > 0.5",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := GetCircuitBreaker(test.labels)
|
||||
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetLoadBalancer(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected *types.LoadBalancer
|
||||
}{
|
||||
{
|
||||
desc: "should return nil when no LB labels",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return a struct when labels are set",
|
||||
labels: map[string]string{
|
||||
TraefikBackendLoadBalancerMethod: "drr",
|
||||
TraefikBackendLoadBalancerStickiness: "true",
|
||||
TraefikBackendLoadBalancerStickinessCookieName: "foo",
|
||||
},
|
||||
expected: &types.LoadBalancer{
|
||||
Method: "drr",
|
||||
Stickiness: &types.Stickiness{
|
||||
CookieName: "foo",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return a nil Stickiness when Stickiness is not set",
|
||||
labels: map[string]string{
|
||||
TraefikBackendLoadBalancerMethod: "drr",
|
||||
TraefikBackendLoadBalancerStickinessCookieName: "foo",
|
||||
},
|
||||
expected: &types.LoadBalancer{
|
||||
Method: "drr",
|
||||
Stickiness: nil,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := GetLoadBalancer(test.labels)
|
||||
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetMaxConn(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected *types.MaxConn
|
||||
}{
|
||||
{
|
||||
desc: "should return nil when no max conn labels",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return nil when no amount label",
|
||||
labels: map[string]string{
|
||||
TraefikBackendMaxConnExtractorFunc: "client.ip",
|
||||
},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return default when no empty extractorFunc label",
|
||||
labels: map[string]string{
|
||||
TraefikBackendMaxConnExtractorFunc: "",
|
||||
TraefikBackendMaxConnAmount: "666",
|
||||
},
|
||||
expected: &types.MaxConn{
|
||||
ExtractorFunc: "request.host",
|
||||
Amount: 666,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return a struct when max conn labels are set",
|
||||
labels: map[string]string{
|
||||
TraefikBackendMaxConnExtractorFunc: "client.ip",
|
||||
TraefikBackendMaxConnAmount: "666",
|
||||
},
|
||||
expected: &types.MaxConn{
|
||||
ExtractorFunc: "client.ip",
|
||||
Amount: 666,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := GetMaxConn(test.labels)
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetHealthCheck(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected *types.HealthCheck
|
||||
}{
|
||||
{
|
||||
desc: "should return nil when no health check labels",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return nil when no health check Path label",
|
||||
labels: map[string]string{
|
||||
TraefikBackendHealthCheckPort: "80",
|
||||
TraefikBackendHealthCheckInterval: "6",
|
||||
TraefikBackendHealthCheckTimeout: "3",
|
||||
},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return a struct when health check labels are set",
|
||||
labels: map[string]string{
|
||||
TraefikBackendHealthCheckPath: "/health",
|
||||
TraefikBackendHealthCheckPort: "80",
|
||||
TraefikBackendHealthCheckInterval: "6",
|
||||
TraefikBackendHealthCheckTimeout: "3",
|
||||
TraefikBackendHealthCheckHeaders: "Foo:bar || Goo:bir",
|
||||
TraefikBackendHealthCheckHostname: "traefik",
|
||||
TraefikBackendHealthCheckScheme: "http",
|
||||
},
|
||||
expected: &types.HealthCheck{
|
||||
Scheme: "http",
|
||||
Path: "/health",
|
||||
Port: 80,
|
||||
Interval: "6",
|
||||
Timeout: "3",
|
||||
Hostname: "traefik",
|
||||
Headers: map[string]string{
|
||||
"Foo": "bar",
|
||||
"Goo": "bir",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := GetHealthCheck(test.labels)
|
||||
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetBuffering(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected *types.Buffering
|
||||
}{
|
||||
{
|
||||
desc: "should return nil when no buffering labels",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return a struct when buffering labels are set",
|
||||
labels: map[string]string{
|
||||
TraefikBackendBufferingMaxResponseBodyBytes: "10485760",
|
||||
TraefikBackendBufferingMemResponseBodyBytes: "2097152",
|
||||
TraefikBackendBufferingMaxRequestBodyBytes: "10485760",
|
||||
TraefikBackendBufferingMemRequestBodyBytes: "2097152",
|
||||
TraefikBackendBufferingRetryExpression: "IsNetworkError() && Attempts() <= 2",
|
||||
},
|
||||
expected: &types.Buffering{
|
||||
MaxResponseBodyBytes: 10485760,
|
||||
MemResponseBodyBytes: 2097152,
|
||||
MaxRequestBodyBytes: 10485760,
|
||||
MemRequestBodyBytes: 2097152,
|
||||
RetryExpression: "IsNetworkError() && Attempts() <= 2",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := GetBuffering(test.labels)
|
||||
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetRedirect(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected *types.Redirect
|
||||
}{
|
||||
|
||||
{
|
||||
desc: "should return nil when no redirect labels",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should use only entry point tag when mix regex redirect and entry point redirect",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendRedirectEntryPoint: "https",
|
||||
TraefikFrontendRedirectRegex: "(.*)",
|
||||
TraefikFrontendRedirectReplacement: "$1",
|
||||
},
|
||||
expected: &types.Redirect{
|
||||
EntryPoint: "https",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return a struct when entry point redirect label",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendRedirectEntryPoint: "https",
|
||||
},
|
||||
expected: &types.Redirect{
|
||||
EntryPoint: "https",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return a struct when entry point redirect label (permanent)",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendRedirectEntryPoint: "https",
|
||||
TraefikFrontendRedirectPermanent: "true",
|
||||
},
|
||||
expected: &types.Redirect{
|
||||
EntryPoint: "https",
|
||||
Permanent: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return a struct when regex redirect labels",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendRedirectRegex: "(.*)",
|
||||
TraefikFrontendRedirectReplacement: "$1",
|
||||
},
|
||||
expected: &types.Redirect{
|
||||
Regex: "(.*)",
|
||||
Replacement: "$1",
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return a struct when regex redirect labels (permanent)",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendRedirectRegex: "(.*)",
|
||||
TraefikFrontendRedirectReplacement: "$1",
|
||||
TraefikFrontendRedirectPermanent: "true",
|
||||
},
|
||||
expected: &types.Redirect{
|
||||
Regex: "(.*)",
|
||||
Replacement: "$1",
|
||||
Permanent: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := GetRedirect(test.labels)
|
||||
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetRateLimit(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected *types.RateLimit
|
||||
}{
|
||||
{
|
||||
desc: "should return nil when no rate limit labels",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return a struct when rate limit labels are defined",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendRateLimitExtractorFunc: "client.ip",
|
||||
Prefix + BaseFrontendRateLimit + "foo." + SuffixRateLimitPeriod: "6",
|
||||
Prefix + BaseFrontendRateLimit + "foo." + SuffixRateLimitAverage: "12",
|
||||
Prefix + BaseFrontendRateLimit + "foo." + SuffixRateLimitBurst: "18",
|
||||
Prefix + BaseFrontendRateLimit + "bar." + SuffixRateLimitPeriod: "3",
|
||||
Prefix + BaseFrontendRateLimit + "bar." + SuffixRateLimitAverage: "6",
|
||||
Prefix + BaseFrontendRateLimit + "bar." + SuffixRateLimitBurst: "9",
|
||||
},
|
||||
expected: &types.RateLimit{
|
||||
ExtractorFunc: "client.ip",
|
||||
RateSet: map[string]*types.Rate{
|
||||
"foo": {
|
||||
Period: parse.Duration(6 * time.Second),
|
||||
Average: 12,
|
||||
Burst: 18,
|
||||
},
|
||||
"bar": {
|
||||
Period: parse.Duration(3 * time.Second),
|
||||
Average: 6,
|
||||
Burst: 9,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return nil when ExtractorFunc is missing",
|
||||
labels: map[string]string{
|
||||
Prefix + BaseFrontendRateLimit + "foo." + SuffixRateLimitPeriod: "6",
|
||||
Prefix + BaseFrontendRateLimit + "foo." + SuffixRateLimitAverage: "12",
|
||||
Prefix + BaseFrontendRateLimit + "foo." + SuffixRateLimitBurst: "18",
|
||||
Prefix + BaseFrontendRateLimit + "bar." + SuffixRateLimitPeriod: "3",
|
||||
Prefix + BaseFrontendRateLimit + "bar." + SuffixRateLimitAverage: "6",
|
||||
Prefix + BaseFrontendRateLimit + "bar." + SuffixRateLimitBurst: "9",
|
||||
},
|
||||
expected: nil,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := GetRateLimit(test.labels)
|
||||
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetHeaders(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected *types.Headers
|
||||
}{
|
||||
{
|
||||
desc: "should return nil when no custom headers options are set",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return a struct when all custom headers options are set",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendRequestHeaders: "Access-Control-Allow-Methods:POST,GET,OPTIONS || Content-type: application/json; charset=utf-8",
|
||||
TraefikFrontendResponseHeaders: "Access-Control-Allow-Methods:POST,GET,OPTIONS || Content-type: application/json; charset=utf-8",
|
||||
TraefikFrontendSSLProxyHeaders: "Access-Control-Allow-Methods:POST,GET,OPTIONS || Content-type: application/json; charset=utf-8",
|
||||
TraefikFrontendAllowedHosts: "foo,bar,bor",
|
||||
TraefikFrontendHostsProxyHeaders: "foo,bar,bor",
|
||||
TraefikFrontendSSLHost: "foo",
|
||||
TraefikFrontendCustomFrameOptionsValue: "foo",
|
||||
TraefikFrontendContentSecurityPolicy: "foo",
|
||||
TraefikFrontendPublicKey: "foo",
|
||||
TraefikFrontendReferrerPolicy: "foo",
|
||||
TraefikFrontendCustomBrowserXSSValue: "foo",
|
||||
TraefikFrontendSTSSeconds: "666",
|
||||
TraefikFrontendSSLRedirect: "true",
|
||||
TraefikFrontendSSLForceHost: "true",
|
||||
TraefikFrontendSSLTemporaryRedirect: "true",
|
||||
TraefikFrontendSTSIncludeSubdomains: "true",
|
||||
TraefikFrontendSTSPreload: "true",
|
||||
TraefikFrontendForceSTSHeader: "true",
|
||||
TraefikFrontendFrameDeny: "true",
|
||||
TraefikFrontendContentTypeNosniff: "true",
|
||||
TraefikFrontendBrowserXSSFilter: "true",
|
||||
TraefikFrontendIsDevelopment: "true",
|
||||
},
|
||||
expected: &types.Headers{
|
||||
CustomRequestHeaders: map[string]string{
|
||||
"Access-Control-Allow-Methods": "POST,GET,OPTIONS",
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
},
|
||||
CustomResponseHeaders: map[string]string{
|
||||
"Access-Control-Allow-Methods": "POST,GET,OPTIONS",
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
},
|
||||
SSLProxyHeaders: map[string]string{
|
||||
"Access-Control-Allow-Methods": "POST,GET,OPTIONS",
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
},
|
||||
AllowedHosts: []string{"foo", "bar", "bor"},
|
||||
HostsProxyHeaders: []string{"foo", "bar", "bor"},
|
||||
SSLHost: "foo",
|
||||
CustomFrameOptionsValue: "foo",
|
||||
ContentSecurityPolicy: "foo",
|
||||
PublicKey: "foo",
|
||||
ReferrerPolicy: "foo",
|
||||
CustomBrowserXSSValue: "foo",
|
||||
STSSeconds: 666,
|
||||
SSLForceHost: true,
|
||||
SSLRedirect: true,
|
||||
SSLTemporaryRedirect: true,
|
||||
STSIncludeSubdomains: true,
|
||||
STSPreload: true,
|
||||
ForceSTSHeader: true,
|
||||
FrameDeny: true,
|
||||
ContentTypeNosniff: true,
|
||||
BrowserXSSFilter: true,
|
||||
IsDevelopment: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := GetHeaders(test.labels)
|
||||
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestProviderGetErrorPages(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected map[string]*types.ErrorPage
|
||||
}{
|
||||
{
|
||||
desc: "should return nil when no tags",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return a map when tags are present",
|
||||
labels: map[string]string{
|
||||
Prefix + BaseFrontendErrorPage + "foo." + SuffixErrorPageStatus: "404",
|
||||
Prefix + BaseFrontendErrorPage + "foo." + SuffixErrorPageBackend: "foo_backend",
|
||||
Prefix + BaseFrontendErrorPage + "foo." + SuffixErrorPageQuery: "foo_query",
|
||||
Prefix + BaseFrontendErrorPage + "bar." + SuffixErrorPageStatus: "500,600",
|
||||
Prefix + BaseFrontendErrorPage + "bar." + SuffixErrorPageBackend: "bar_backend",
|
||||
Prefix + BaseFrontendErrorPage + "bar." + SuffixErrorPageQuery: "bar_query",
|
||||
},
|
||||
expected: map[string]*types.ErrorPage{
|
||||
"foo": {
|
||||
Status: []string{"404"},
|
||||
Query: "foo_query",
|
||||
Backend: "foo_backend",
|
||||
},
|
||||
"bar": {
|
||||
Status: []string{"500", "600"},
|
||||
Query: "bar_query",
|
||||
Backend: "bar_backend",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
result := GetErrorPages(test.labels)
|
||||
|
||||
assert.Equal(t, test.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetAuth(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected *types.Auth
|
||||
}{
|
||||
{
|
||||
desc: "should return nil when no tags",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return a basic auth",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendAuthHeaderField: "myHeaderField",
|
||||
TraefikFrontendAuthBasicRealm: "myRealm",
|
||||
TraefikFrontendAuthBasicUsers: "user:pwd,user2:pwd2",
|
||||
TraefikFrontendAuthBasicUsersFile: "myUsersFile",
|
||||
TraefikFrontendAuthBasicRemoveHeader: "true",
|
||||
},
|
||||
expected: &types.Auth{
|
||||
HeaderField: "myHeaderField",
|
||||
Basic: &types.Basic{UsersFile: "myUsersFile", Users: []string{"user:pwd", "user2:pwd2"}, RemoveHeader: true, Realm: "myRealm"},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return a digest auth",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendAuthDigestRemoveHeader: "true",
|
||||
TraefikFrontendAuthHeaderField: "myHeaderField",
|
||||
TraefikFrontendAuthDigestUsers: "user:pwd,user2:pwd2",
|
||||
TraefikFrontendAuthDigestUsersFile: "myUsersFile",
|
||||
},
|
||||
expected: &types.Auth{
|
||||
HeaderField: "myHeaderField",
|
||||
Digest: &types.Digest{UsersFile: "myUsersFile", Users: []string{"user:pwd", "user2:pwd2"}, RemoveHeader: true},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return a forward auth",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendAuthHeaderField: "myHeaderField",
|
||||
TraefikFrontendAuthForwardAddress: "myAddress",
|
||||
TraefikFrontendAuthForwardTrustForwardHeader: "true",
|
||||
TraefikFrontendAuthForwardTLSCa: "ca.crt",
|
||||
TraefikFrontendAuthForwardTLSCaOptional: "true",
|
||||
TraefikFrontendAuthForwardTLSInsecureSkipVerify: "true",
|
||||
TraefikFrontendAuthForwardTLSKey: "myKey",
|
||||
TraefikFrontendAuthForwardTLSCert: "myCert",
|
||||
},
|
||||
expected: &types.Auth{
|
||||
HeaderField: "myHeaderField",
|
||||
Forward: &types.Forward{
|
||||
TrustForwardHeader: true,
|
||||
Address: "myAddress",
|
||||
TLS: &types.ClientTLS{
|
||||
InsecureSkipVerify: true,
|
||||
CA: "ca.crt",
|
||||
CAOptional: true,
|
||||
Key: "myKey",
|
||||
Cert: "myCert",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
result := GetAuth(test.labels)
|
||||
|
||||
assert.Equal(t, test.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
func TestGetPassTLSClientCert(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
labels map[string]string
|
||||
expected *types.TLSClientHeaders
|
||||
}{
|
||||
{
|
||||
desc: "should return nil when no tags",
|
||||
labels: map[string]string{},
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
desc: "should return tlsClientHeaders with true pem flag",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendPassTLSClientCertPem: "true",
|
||||
},
|
||||
expected: &types.TLSClientHeaders{
|
||||
PEM: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return tlsClientHeaders with infos and NotAfter true",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendPassTLSClientCertInfosNotAfter: "true",
|
||||
},
|
||||
expected: &types.TLSClientHeaders{
|
||||
Infos: &types.TLSClientCertificateInfos{
|
||||
NotAfter: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return tlsClientHeaders with infos and NotBefore true",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendPassTLSClientCertInfosNotBefore: "true",
|
||||
},
|
||||
expected: &types.TLSClientHeaders{
|
||||
Infos: &types.TLSClientCertificateInfos{
|
||||
NotBefore: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return tlsClientHeaders with infos and sans true",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendPassTLSClientCertInfosSans: "true",
|
||||
},
|
||||
expected: &types.TLSClientHeaders{
|
||||
Infos: &types.TLSClientCertificateInfos{
|
||||
Sans: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return tlsClientHeaders with infos and subject with commonName true",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectCommonName: "true",
|
||||
},
|
||||
expected: &types.TLSClientHeaders{
|
||||
Infos: &types.TLSClientCertificateInfos{
|
||||
Subject: &types.TLSCLientCertificateSubjectInfos{
|
||||
CommonName: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return tlsClientHeaders with infos and subject with country true",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectCountry: "true",
|
||||
},
|
||||
expected: &types.TLSClientHeaders{
|
||||
Infos: &types.TLSClientCertificateInfos{
|
||||
Subject: &types.TLSCLientCertificateSubjectInfos{
|
||||
Country: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return tlsClientHeaders with infos and subject with locality true",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectLocality: "true",
|
||||
},
|
||||
expected: &types.TLSClientHeaders{
|
||||
Infos: &types.TLSClientCertificateInfos{
|
||||
Subject: &types.TLSCLientCertificateSubjectInfos{
|
||||
Locality: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return tlsClientHeaders with infos and subject with organization true",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectOrganization: "true",
|
||||
},
|
||||
expected: &types.TLSClientHeaders{
|
||||
Infos: &types.TLSClientCertificateInfos{
|
||||
Subject: &types.TLSCLientCertificateSubjectInfos{
|
||||
Organization: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return tlsClientHeaders with infos and subject with province true",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectProvince: "true",
|
||||
},
|
||||
expected: &types.TLSClientHeaders{
|
||||
Infos: &types.TLSClientCertificateInfos{
|
||||
Subject: &types.TLSCLientCertificateSubjectInfos{
|
||||
Province: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return tlsClientHeaders with infos and subject with serialNumber true",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectSerialNumber: "true",
|
||||
},
|
||||
expected: &types.TLSClientHeaders{
|
||||
Infos: &types.TLSClientCertificateInfos{
|
||||
Subject: &types.TLSCLientCertificateSubjectInfos{
|
||||
SerialNumber: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "should return tlsClientHeaders with all infos",
|
||||
labels: map[string]string{
|
||||
TraefikFrontendPassTLSClientCertPem: "true",
|
||||
TraefikFrontendPassTLSClientCertInfosNotAfter: "true",
|
||||
TraefikFrontendPassTLSClientCertInfosNotBefore: "true",
|
||||
TraefikFrontendPassTLSClientCertInfosSans: "true",
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectCommonName: "true",
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectCountry: "true",
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectLocality: "true",
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectOrganization: "true",
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectProvince: "true",
|
||||
TraefikFrontendPassTLSClientCertInfosSubjectSerialNumber: "true",
|
||||
},
|
||||
expected: &types.TLSClientHeaders{
|
||||
PEM: true,
|
||||
Infos: &types.TLSClientCertificateInfos{
|
||||
Sans: true,
|
||||
NotBefore: true,
|
||||
NotAfter: true,
|
||||
Subject: &types.TLSCLientCertificateSubjectInfos{
|
||||
Province: true,
|
||||
Organization: true,
|
||||
Locality: true,
|
||||
Country: true,
|
||||
CommonName: true,
|
||||
SerialNumber: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
result := GetTLSClientCert(test.labels)
|
||||
|
||||
assert.Equal(t, test.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -1,115 +0,0 @@
|
|||
package label
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/containous/traefik/log"
|
||||
)
|
||||
|
||||
var (
|
||||
// SegmentPropertiesRegexp used to extract the name of the segment and the name of the property for this segment
|
||||
// All properties are under the format traefik.<segment_name>.frontend.*= except the port/portIndex/weight/protocol/backend directly after traefik.<segment_name>.
|
||||
SegmentPropertiesRegexp = regexp.MustCompile(`^traefik\.(?P<segment_name>.+?)\.(?P<property_name>port|portIndex|portName|weight|protocol|backend|frontend\.(.+))$`)
|
||||
|
||||
// PortRegexp used to extract the port label of the segment
|
||||
PortRegexp = regexp.MustCompile(`^traefik\.(?P<segment_name>.+?)\.port$`)
|
||||
)
|
||||
|
||||
// SegmentPropertyValues is a map of segment properties
|
||||
// an example value is: weight=42
|
||||
type SegmentPropertyValues map[string]string
|
||||
|
||||
// SegmentProperties is a map of segment properties per segment,
|
||||
// which we can get with label[segmentName][propertyName].
|
||||
// It yields a property value.
|
||||
type SegmentProperties map[string]SegmentPropertyValues
|
||||
|
||||
// FindSegmentSubmatch split segment labels
|
||||
func FindSegmentSubmatch(name string) []string {
|
||||
matches := SegmentPropertiesRegexp.FindStringSubmatch(name)
|
||||
if matches == nil ||
|
||||
strings.HasPrefix(name, TraefikFrontend+".") ||
|
||||
strings.HasPrefix(name, TraefikBackend+".") {
|
||||
return nil
|
||||
}
|
||||
return matches
|
||||
}
|
||||
|
||||
// ExtractTraefikLabels transform labels to segment labels
|
||||
func ExtractTraefikLabels(originLabels map[string]string) SegmentProperties {
|
||||
allLabels := make(SegmentProperties)
|
||||
|
||||
if _, ok := allLabels[""]; !ok {
|
||||
allLabels[""] = make(SegmentPropertyValues)
|
||||
}
|
||||
|
||||
for name, value := range originLabels {
|
||||
if !strings.HasPrefix(name, Prefix) {
|
||||
continue
|
||||
}
|
||||
|
||||
matches := FindSegmentSubmatch(name)
|
||||
if matches == nil {
|
||||
// Classic labels
|
||||
allLabels[""][name] = value
|
||||
} else {
|
||||
// segments labels
|
||||
var segmentName string
|
||||
var propertyName string
|
||||
for i, name := range SegmentPropertiesRegexp.SubexpNames() {
|
||||
// the group 0 is anonymous because it's always the root expression
|
||||
if i != 0 {
|
||||
if name == "segment_name" {
|
||||
segmentName = matches[i]
|
||||
} else if name == "property_name" {
|
||||
propertyName = matches[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if _, ok := allLabels[segmentName]; !ok {
|
||||
allLabels[segmentName] = make(SegmentPropertyValues)
|
||||
}
|
||||
allLabels[segmentName][Prefix+propertyName] = value
|
||||
}
|
||||
}
|
||||
|
||||
log.Debug("originLabels", originLabels)
|
||||
log.Debug("allLabels", allLabels)
|
||||
|
||||
allLabels.mergeDefault()
|
||||
|
||||
return allLabels
|
||||
}
|
||||
|
||||
func (s SegmentProperties) mergeDefault() {
|
||||
// if SegmentProperties contains the default segment, merge each segments with the default segment
|
||||
if defaultLabels, okDefault := s[""]; okDefault {
|
||||
|
||||
segmentsNames := s.GetSegmentNames()
|
||||
if len(defaultLabels) > 0 {
|
||||
for _, name := range segmentsNames {
|
||||
segmentLabels := s[name]
|
||||
for key, value := range defaultLabels {
|
||||
if _, ok := segmentLabels[key]; !ok {
|
||||
segmentLabels[key] = value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(segmentsNames) > 1 {
|
||||
delete(s, "")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GetSegmentNames get all segment names
|
||||
func (s SegmentProperties) GetSegmentNames() []string {
|
||||
var names []string
|
||||
for name := range s {
|
||||
names = append(names, name)
|
||||
}
|
||||
return names
|
||||
}
|
|
@ -1,95 +0,0 @@
|
|||
package label
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestExtractTraefikLabels(t *testing.T) {
|
||||
testCases := []struct {
|
||||
desc string
|
||||
prefix string
|
||||
originLabels map[string]string
|
||||
expected SegmentProperties
|
||||
}{
|
||||
{
|
||||
desc: "nil labels map",
|
||||
prefix: "traefik",
|
||||
originLabels: nil,
|
||||
expected: SegmentProperties{"": {}},
|
||||
},
|
||||
{
|
||||
desc: "container labels",
|
||||
prefix: "traefik",
|
||||
originLabels: map[string]string{
|
||||
"frontend.priority": "foo", // missing prefix: skip
|
||||
"traefik.port": "bar",
|
||||
},
|
||||
expected: SegmentProperties{
|
||||
"": {
|
||||
"traefik.port": "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "segment labels: only segment no default",
|
||||
prefix: "traefik",
|
||||
originLabels: map[string]string{
|
||||
"traefik.goo.frontend.priority": "A",
|
||||
"traefik.goo.port": "D",
|
||||
"traefik.port": "C",
|
||||
},
|
||||
expected: SegmentProperties{
|
||||
"goo": {
|
||||
"traefik.frontend.priority": "A",
|
||||
"traefik.port": "D",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "segment labels: use default",
|
||||
prefix: "traefik",
|
||||
originLabels: map[string]string{
|
||||
"traefik.guu.frontend.priority": "B",
|
||||
"traefik.port": "C",
|
||||
},
|
||||
expected: SegmentProperties{
|
||||
"guu": {
|
||||
"traefik.frontend.priority": "B",
|
||||
"traefik.port": "C",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "segment labels: several segments",
|
||||
prefix: "traefik",
|
||||
originLabels: map[string]string{
|
||||
"traefik.goo.frontend.priority": "A",
|
||||
"traefik.goo.port": "D",
|
||||
"traefik.guu.frontend.priority": "B",
|
||||
"traefik.port": "C",
|
||||
},
|
||||
expected: SegmentProperties{
|
||||
"goo": {
|
||||
"traefik.frontend.priority": "A",
|
||||
"traefik.port": "D",
|
||||
},
|
||||
"guu": {
|
||||
"traefik.frontend.priority": "B",
|
||||
"traefik.port": "C",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test := test
|
||||
t.Run(test.desc, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
actual := ExtractTraefikLabels(test.originLabels)
|
||||
assert.Equal(t, test.expected, actual)
|
||||
})
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue