mesh: add validation for the new pbmesh resources (#18410)

Adds validation for HTTPRoute, GRPCRoute, TCPRoute, DestinationPolicy, and ComputedRoutes.
This commit is contained in:
R.B. Boyer 2023-08-22 11:27:09 -05:00 committed by GitHub
parent 570c84d032
commit 55723c541e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 3633 additions and 11 deletions

View File

@ -4,6 +4,8 @@
package types package types
import ( import (
"github.com/hashicorp/go-multierror"
"github.com/hashicorp/consul/internal/resource" "github.com/hashicorp/consul/internal/resource"
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1" pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1"
"github.com/hashicorp/consul/proto-public/pbresource" "github.com/hashicorp/consul/proto-public/pbresource"
@ -27,6 +29,49 @@ func RegisterComputedRoutes(r resource.Registry) {
r.Register(resource.Registration{ r.Register(resource.Registration{
Type: ComputedRoutesV1Alpha1Type, Type: ComputedRoutesV1Alpha1Type,
Proto: &pbmesh.ComputedRoutes{}, Proto: &pbmesh.ComputedRoutes{},
Validate: nil, Validate: ValidateComputedRoutes,
}) })
} }
func ValidateComputedRoutes(res *pbresource.Resource) error {
var config pbmesh.ComputedRoutes
if err := res.Data.UnmarshalTo(&config); err != nil {
return resource.NewErrDataParse(&config, err)
}
var merr error
if len(config.PortedConfigs) == 0 {
merr = multierror.Append(merr, resource.ErrInvalidField{
Name: "ported_configs",
Wrapped: resource.ErrEmpty,
})
}
// TODO(rb): do more elaborate validation
for port, pmc := range config.PortedConfigs {
wrapErr := func(err error) error {
return resource.ErrInvalidMapValue{
Map: "ported_configs",
Key: port,
Wrapped: err,
}
}
if pmc.Config == nil {
merr = multierror.Append(merr, wrapErr(resource.ErrInvalidField{
Name: "config",
Wrapped: resource.ErrEmpty,
}))
}
if len(pmc.Targets) == 0 {
merr = multierror.Append(merr, wrapErr(resource.ErrInvalidField{
Name: "targets",
Wrapped: resource.ErrEmpty,
}))
}
}
return merr
}

View File

@ -0,0 +1,92 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package types
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/hashicorp/consul/internal/resource/resourcetest"
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1"
"github.com/hashicorp/consul/proto/private/prototest"
"github.com/hashicorp/consul/sdk/testutil"
)
func TestValidateComputedRoutes(t *testing.T) {
type testcase struct {
routes *pbmesh.ComputedRoutes
expectErr string
}
run := func(t *testing.T, tc testcase) {
res := resourcetest.Resource(ComputedRoutesType, "api").
WithData(t, tc.routes).
Build()
err := ValidateComputedRoutes(res)
// Verify that validate didn't actually change the object.
got := resourcetest.MustDecode[*pbmesh.ComputedRoutes](t, res)
prototest.AssertDeepEqual(t, tc.routes, got.Data)
if tc.expectErr == "" {
require.NoError(t, err)
} else {
testutil.RequireErrorContains(t, err, tc.expectErr)
}
}
cases := map[string]testcase{
"empty": {
routes: &pbmesh.ComputedRoutes{},
expectErr: `invalid "ported_configs" field: cannot be empty`,
},
"empty config": {
routes: &pbmesh.ComputedRoutes{
PortedConfigs: map[string]*pbmesh.ComputedPortRoutes{
"http": {
Config: nil,
Targets: map[string]*pbmesh.BackendTargetDetails{
"foo": {},
},
},
},
},
expectErr: `invalid value of key "http" within ported_configs: invalid "config" field: cannot be empty`,
},
"empty targets": {
routes: &pbmesh.ComputedRoutes{
PortedConfigs: map[string]*pbmesh.ComputedPortRoutes{
"http": {
Config: &pbmesh.ComputedPortRoutes_Tcp{
Tcp: &pbmesh.InterpretedTCPRoute{},
},
},
},
},
expectErr: `invalid value of key "http" within ported_configs: invalid "targets" field: cannot be empty`,
},
"valid": {
routes: &pbmesh.ComputedRoutes{
PortedConfigs: map[string]*pbmesh.ComputedPortRoutes{
"http": {
Config: &pbmesh.ComputedPortRoutes_Tcp{
Tcp: &pbmesh.InterpretedTCPRoute{},
},
Targets: map[string]*pbmesh.BackendTargetDetails{
"foo": {},
},
},
},
},
},
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
run(t, tc)
})
}
}

View File

@ -4,6 +4,11 @@
package types package types
import ( import (
"errors"
"fmt"
"github.com/hashicorp/go-multierror"
"github.com/hashicorp/consul/internal/resource" "github.com/hashicorp/consul/internal/resource"
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1" pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1"
"github.com/hashicorp/consul/proto-public/pbresource" "github.com/hashicorp/consul/proto-public/pbresource"
@ -27,6 +32,195 @@ func RegisterDestinationPolicy(r resource.Registry) {
r.Register(resource.Registration{ r.Register(resource.Registration{
Type: DestinationPolicyV1Alpha1Type, Type: DestinationPolicyV1Alpha1Type,
Proto: &pbmesh.DestinationPolicy{}, Proto: &pbmesh.DestinationPolicy{},
Validate: nil, Validate: ValidateDestinationPolicy,
}) })
} }
func ValidateDestinationPolicy(res *pbresource.Resource) error {
var policy pbmesh.DestinationPolicy
if err := res.Data.UnmarshalTo(&policy); err != nil {
return resource.NewErrDataParse(&policy, err)
}
var merr error
if len(policy.PortConfigs) == 0 {
merr = multierror.Append(merr, resource.ErrInvalidField{
Name: "port_configs",
Wrapped: resource.ErrEmpty,
})
}
for port, pc := range policy.PortConfigs {
wrapErr := func(err error) error {
return resource.ErrInvalidMapValue{
Map: "port_configs",
Key: port,
Wrapped: err,
}
}
if dur := pc.ConnectTimeout.AsDuration(); dur < 0 {
merr = multierror.Append(merr, wrapErr(resource.ErrInvalidField{
Name: "connect_timeout",
Wrapped: fmt.Errorf("'%v', must be >= 0", dur),
}))
}
if dur := pc.RequestTimeout.AsDuration(); dur < 0 {
merr = multierror.Append(merr, wrapErr(resource.ErrInvalidField{
Name: "request_timeout",
Wrapped: fmt.Errorf("'%v', must be >= 0", dur),
}))
}
if pc.LoadBalancer != nil {
lb := pc.LoadBalancer
wrapLBErr := func(err error) error {
return wrapErr(resource.ErrInvalidField{
Name: "load_balancer",
Wrapped: err,
})
}
switch lb.Policy {
case pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_UNSPECIFIED:
// means just do the default
case pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_RANDOM:
case pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_ROUND_ROBIN:
case pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_LEAST_REQUEST:
case pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_MAGLEV:
case pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_RING_HASH:
default:
merr = multierror.Append(merr, wrapLBErr(resource.ErrInvalidField{
Name: "policy",
Wrapped: fmt.Errorf("not a supported enum value: %v", lb.Policy),
}))
}
if lb.Policy != pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_RING_HASH && lb.Config != nil {
if _, ok := lb.Config.(*pbmesh.LoadBalancer_RingHashConfig); ok {
merr = multierror.Append(merr, wrapLBErr(resource.ErrInvalidField{
Name: "config",
Wrapped: fmt.Errorf("ring_hash_config specified for incompatible load balancing policy %q", lb.Policy),
}))
}
}
if lb.Policy != pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_LEAST_REQUEST && lb.Config != nil {
if _, ok := lb.Config.(*pbmesh.LoadBalancer_LeastRequestConfig); ok {
merr = multierror.Append(merr, wrapLBErr(resource.ErrInvalidField{
Name: "config",
Wrapped: fmt.Errorf("least_request_config specified for incompatible load balancing policy %q", lb.Policy),
}))
}
}
if !lb.Policy.IsHashBased() && len(lb.HashPolicies) > 0 {
merr = multierror.Append(merr, wrapLBErr(resource.ErrInvalidField{
Name: "hash_policies",
Wrapped: fmt.Errorf("hash_policies specified for non-hash-based policy %q", lb.Policy),
}))
}
LOOP:
for i, hp := range lb.HashPolicies {
wrapHPErr := func(err error) error {
return wrapLBErr(resource.ErrInvalidListElement{
Name: "hash_policies",
Index: i,
Wrapped: err,
})
}
var hasField bool
switch hp.Field {
case pbmesh.HashPolicyField_HASH_POLICY_FIELD_UNSPECIFIED:
case pbmesh.HashPolicyField_HASH_POLICY_FIELD_HEADER,
pbmesh.HashPolicyField_HASH_POLICY_FIELD_COOKIE,
pbmesh.HashPolicyField_HASH_POLICY_FIELD_QUERY_PARAMETER:
hasField = true
default:
merr = multierror.Append(merr, wrapHPErr(resource.ErrInvalidField{
Name: "field",
Wrapped: fmt.Errorf("not a supported enum value: %v", hp.Field),
}))
continue LOOP // no need to keep validating
}
if hp.SourceIp {
if hasField {
merr = multierror.Append(merr, wrapHPErr(resource.ErrInvalidField{
Name: "field",
Wrapped: fmt.Errorf("a single hash policy cannot hash both a source address and a %q", hp.Field),
}))
}
if hp.FieldValue != "" {
merr = multierror.Append(merr, wrapHPErr(resource.ErrInvalidField{
Name: "field_value",
Wrapped: errors.New("cannot be specified when hashing source_ip"),
}))
}
}
if hasField && hp.FieldValue == "" {
merr = multierror.Append(merr, wrapHPErr(resource.ErrInvalidField{
Name: "field_value",
Wrapped: fmt.Errorf("field %q was specified without a field_value", hp.Field),
}))
}
if hp.FieldValue != "" && !hasField {
merr = multierror.Append(merr, wrapHPErr(resource.ErrInvalidField{
Name: "field",
Wrapped: resource.ErrMissing,
}))
merr = multierror.Append(merr, wrapHPErr(resource.ErrInvalidField{
Name: "field_value",
Wrapped: errors.New("requires a field to apply to"),
}))
}
if hp.CookieConfig != nil {
if hp.Field != pbmesh.HashPolicyField_HASH_POLICY_FIELD_COOKIE {
merr = multierror.Append(merr, wrapHPErr(resource.ErrInvalidField{
Name: "cookie_config",
Wrapped: fmt.Errorf("incompatible with field %q", hp.Field),
}))
}
if hp.CookieConfig.Session && hp.CookieConfig.Ttl.AsDuration() != 0 {
merr = multierror.Append(merr, wrapHPErr(resource.ErrInvalidField{
Name: "cookie_config",
Wrapped: resource.ErrInvalidField{
Name: "ttl",
Wrapped: fmt.Errorf("a session cookie cannot have an associated TTL"),
},
}))
}
}
}
}
if pc.LocalityPrioritization != nil {
lp := pc.LocalityPrioritization
wrapLPErr := func(err error) error {
return wrapErr(resource.ErrInvalidField{
Name: "locality_prioritization",
Wrapped: err,
})
}
switch lp.Mode {
case pbmesh.LocalityPrioritizationMode_LOCALITY_PRIORITIZATION_MODE_UNSPECIFIED:
// means pbmesh.LocalityPrioritizationMode_LOCALITY_PRIORITIZATION_MODE_NONE
case pbmesh.LocalityPrioritizationMode_LOCALITY_PRIORITIZATION_MODE_NONE:
case pbmesh.LocalityPrioritizationMode_LOCALITY_PRIORITIZATION_MODE_FAILOVER:
default:
merr = multierror.Append(merr, wrapLPErr(resource.ErrInvalidField{
Name: "mode",
Wrapped: fmt.Errorf("not a supported enum value: %v", lp.Mode),
}))
}
}
}
return merr
}

View File

@ -0,0 +1,510 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package types
import (
"testing"
"time"
"github.com/stretchr/testify/require"
"google.golang.org/protobuf/types/known/durationpb"
"github.com/hashicorp/consul/internal/resource/resourcetest"
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1"
"github.com/hashicorp/consul/proto/private/prototest"
"github.com/hashicorp/consul/sdk/testutil"
)
func TestValidateDestinationPolicy(t *testing.T) {
type testcase struct {
policy *pbmesh.DestinationPolicy
expectErr string
expectErrs []string
}
run := func(t *testing.T, tc testcase) {
res := resourcetest.Resource(DestinationPolicyType, "api").
WithData(t, tc.policy).
Build()
err := ValidateDestinationPolicy(res)
// Verify that validate didn't actually change the object.
got := resourcetest.MustDecode[*pbmesh.DestinationPolicy](t, res)
prototest.AssertDeepEqual(t, tc.policy, got.Data)
if tc.expectErr != "" && len(tc.expectErrs) > 0 {
t.Fatalf("cannot test singular and list errors at the same time")
}
if tc.expectErr == "" && len(tc.expectErrs) == 0 {
require.NoError(t, err)
} else if tc.expectErr != "" {
testutil.RequireErrorContains(t, err, tc.expectErr)
} else {
for _, expectErr := range tc.expectErrs {
testutil.RequireErrorContains(t, err, expectErr)
}
}
}
cases := map[string]testcase{
// emptiness
"empty": {
policy: &pbmesh.DestinationPolicy{},
expectErr: `invalid "port_configs" field: cannot be empty`,
},
"good connect timeout": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
},
},
},
},
"bad connect timeout": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(-55 * time.Second),
},
},
},
expectErr: `invalid value of key "http" within port_configs: invalid "connect_timeout" field: '-55s', must be >= 0`,
},
"good request timeout": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
RequestTimeout: durationpb.New(55 * time.Second),
},
},
},
},
"bad request timeout": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
RequestTimeout: durationpb.New(-55 * time.Second),
},
},
},
expectErr: `invalid value of key "http" within port_configs: invalid "request_timeout" field: '-55s', must be >= 0`,
},
// load balancer
"lbpolicy: missing enum": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{},
},
},
},
},
"lbpolicy: bad enum": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: 99,
},
},
},
},
expectErr: `invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid "policy" field: not a supported enum value: 99`,
},
"lbpolicy: supported": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_RANDOM,
},
},
},
},
},
"lbpolicy: bad for least request config": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_RING_HASH,
Config: &pbmesh.LoadBalancer_LeastRequestConfig{
LeastRequestConfig: &pbmesh.LeastRequestConfig{
ChoiceCount: 10,
},
},
},
},
},
},
expectErr: `invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid "config" field: least_request_config specified for incompatible load balancing policy "LOAD_BALANCER_POLICY_RING_HASH"`,
},
"lbpolicy: bad for ring hash config": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_LEAST_REQUEST,
Config: &pbmesh.LoadBalancer_RingHashConfig{
RingHashConfig: &pbmesh.RingHashConfig{
MinimumRingSize: 1024,
},
},
},
},
},
},
expectErr: `invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid "config" field: ring_hash_config specified for incompatible load balancing policy "LOAD_BALANCER_POLICY_LEAST_REQUEST"`,
},
"lbpolicy: good for least request config": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_LEAST_REQUEST,
Config: &pbmesh.LoadBalancer_LeastRequestConfig{
LeastRequestConfig: &pbmesh.LeastRequestConfig{
ChoiceCount: 10,
},
},
},
},
},
},
},
"lbpolicy: good for ring hash config": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_RING_HASH,
Config: &pbmesh.LoadBalancer_RingHashConfig{
RingHashConfig: &pbmesh.RingHashConfig{
MinimumRingSize: 1024,
},
},
},
},
},
},
},
"lbpolicy: empty policy with hash policy": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
HashPolicies: []*pbmesh.HashPolicy{
{SourceIp: true},
},
},
},
},
},
expectErr: `invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid "hash_policies" field: hash_policies specified for non-hash-based policy "LOAD_BALANCER_POLICY_UNSPECIFIED"`,
},
"lbconfig: cookie config with header policy": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_MAGLEV,
HashPolicies: []*pbmesh.HashPolicy{
{
Field: pbmesh.HashPolicyField_HASH_POLICY_FIELD_HEADER,
FieldValue: "x-user-id",
CookieConfig: &pbmesh.CookieConfig{
Ttl: durationpb.New(10 * time.Second),
Path: "/root",
},
},
},
},
},
},
},
expectErr: `invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid element at index 0 of list "hash_policies": invalid "cookie_config" field: incompatible with field "HASH_POLICY_FIELD_HEADER"`,
},
"lbconfig: cannot generate session cookie with ttl": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_MAGLEV,
HashPolicies: []*pbmesh.HashPolicy{
{
Field: pbmesh.HashPolicyField_HASH_POLICY_FIELD_COOKIE,
FieldValue: "good-cookie",
CookieConfig: &pbmesh.CookieConfig{
Session: true,
Ttl: durationpb.New(10 * time.Second),
},
},
},
},
},
},
},
expectErr: `invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid element at index 0 of list "hash_policies": invalid "cookie_config" field: invalid "ttl" field: a session cookie cannot have an associated TTL`,
},
"lbconfig: valid cookie policy": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_MAGLEV,
HashPolicies: []*pbmesh.HashPolicy{
{
Field: pbmesh.HashPolicyField_HASH_POLICY_FIELD_COOKIE,
FieldValue: "good-cookie",
CookieConfig: &pbmesh.CookieConfig{
Ttl: durationpb.New(10 * time.Second),
Path: "/oven",
},
},
},
},
},
},
},
},
"lbconfig: bad match field": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_MAGLEV,
HashPolicies: []*pbmesh.HashPolicy{
{
Field: 99,
FieldValue: "X-Consul-Token",
},
},
},
},
},
},
expectErr: `invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid element at index 0 of list "hash_policies": invalid "field" field: not a supported enum value: 99`,
},
"lbconfig: supported match field": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_MAGLEV,
HashPolicies: []*pbmesh.HashPolicy{
{
Field: pbmesh.HashPolicyField_HASH_POLICY_FIELD_HEADER,
FieldValue: "X-Consul-Token",
},
},
},
},
},
},
},
"lbconfig: missing match field": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_MAGLEV,
HashPolicies: []*pbmesh.HashPolicy{
{
FieldValue: "X-Consul-Token",
},
},
},
},
},
},
expectErrs: []string{
`invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid element at index 0 of list "hash_policies": invalid "field" field: missing required field`,
`invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid element at index 0 of list "hash_policies": invalid "field_value" field: requires a field to apply to`,
},
},
"lbconfig: cannot match on source address and custom field": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_MAGLEV,
HashPolicies: []*pbmesh.HashPolicy{
{
Field: pbmesh.HashPolicyField_HASH_POLICY_FIELD_HEADER,
SourceIp: true,
},
},
},
},
},
},
expectErrs: []string{
`invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid element at index 0 of list "hash_policies": invalid "field" field: a single hash policy cannot hash both a source address and a "HASH_POLICY_FIELD_HEADER"`,
`invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid element at index 0 of list "hash_policies": invalid "field_value" field: field "HASH_POLICY_FIELD_HEADER" was specified without a field_value`,
},
},
"lbconfig: matchvalue not compatible with source address": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_MAGLEV,
HashPolicies: []*pbmesh.HashPolicy{
{
FieldValue: "X-Consul-Token",
SourceIp: true,
},
},
},
},
},
},
expectErrs: []string{
`invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid element at index 0 of list "hash_policies": invalid "field_value" field: cannot be specified when hashing source_ip`,
`invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid element at index 0 of list "hash_policies": invalid "field_value" field: requires a field to apply to`,
},
},
"lbconfig: field without match value": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_MAGLEV,
HashPolicies: []*pbmesh.HashPolicy{
{
Field: pbmesh.HashPolicyField_HASH_POLICY_FIELD_HEADER,
},
},
},
},
},
},
expectErr: `invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid element at index 0 of list "hash_policies": invalid "field_value" field: field "HASH_POLICY_FIELD_HEADER" was specified without a field_value`,
},
"lbconfig: matchvalue without field": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_MAGLEV,
HashPolicies: []*pbmesh.HashPolicy{
{
FieldValue: "my-cookie",
},
},
},
},
},
},
expectErr: `invalid value of key "http" within port_configs: invalid "load_balancer" field: invalid element at index 0 of list "hash_policies": invalid "field_value" field: requires a field to apply to`,
},
"lbconfig: ring hash kitchen sink": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_RING_HASH,
Config: &pbmesh.LoadBalancer_RingHashConfig{
RingHashConfig: &pbmesh.RingHashConfig{
MaximumRingSize: 10,
MinimumRingSize: 2,
},
},
HashPolicies: []*pbmesh.HashPolicy{
{
Field: pbmesh.HashPolicyField_HASH_POLICY_FIELD_COOKIE,
FieldValue: "my-cookie",
},
{
Field: pbmesh.HashPolicyField_HASH_POLICY_FIELD_HEADER,
FieldValue: "alt-header",
Terminal: true,
},
},
},
},
},
},
},
"lbconfig: least request kitchen sink": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
ConnectTimeout: durationpb.New(55 * time.Second),
LoadBalancer: &pbmesh.LoadBalancer{
Policy: pbmesh.LoadBalancerPolicy_LOAD_BALANCER_POLICY_LEAST_REQUEST,
Config: &pbmesh.LoadBalancer_LeastRequestConfig{
LeastRequestConfig: &pbmesh.LeastRequestConfig{
ChoiceCount: 10,
},
},
},
},
},
},
},
"locality: good mode": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
LocalityPrioritization: &pbmesh.LocalityPrioritization{
Mode: pbmesh.LocalityPrioritizationMode_LOCALITY_PRIORITIZATION_MODE_FAILOVER,
},
},
},
},
},
"locality: unset mode": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
LocalityPrioritization: &pbmesh.LocalityPrioritization{
Mode: pbmesh.LocalityPrioritizationMode_LOCALITY_PRIORITIZATION_MODE_UNSPECIFIED,
},
},
},
},
},
"locality: bad mode": {
policy: &pbmesh.DestinationPolicy{
PortConfigs: map[string]*pbmesh.DestinationConfig{
"http": {
LocalityPrioritization: &pbmesh.LocalityPrioritization{
Mode: 99,
},
},
},
},
expectErr: `invalid value of key "http" within port_configs: invalid "locality_prioritization" field: invalid "mode" field: not a supported enum value: 99`,
},
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
run(t, tc)
})
}
}

View File

@ -4,6 +4,11 @@
package types package types
import ( import (
"errors"
"fmt"
"github.com/hashicorp/go-multierror"
"github.com/hashicorp/consul/internal/resource" "github.com/hashicorp/consul/internal/resource"
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1" pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1"
"github.com/hashicorp/consul/proto-public/pbresource" "github.com/hashicorp/consul/proto-public/pbresource"
@ -25,8 +30,208 @@ var (
func RegisterGRPCRoute(r resource.Registry) { func RegisterGRPCRoute(r resource.Registry) {
r.Register(resource.Registration{ r.Register(resource.Registration{
Type: GRPCRouteV1Alpha1Type, Type: GRPCRouteV1Alpha1Type,
Proto: &pbmesh.GRPCRoute{}, Proto: &pbmesh.GRPCRoute{},
Validate: nil, // TODO(rb): normalize parent/backend ref tenancies in a Mutate hook
Validate: ValidateGRPCRoute,
}) })
} }
func ValidateGRPCRoute(res *pbresource.Resource) error {
var route pbmesh.GRPCRoute
if err := res.Data.UnmarshalTo(&route); err != nil {
return resource.NewErrDataParse(&route, err)
}
var merr error
if err := validateParentRefs(route.ParentRefs); err != nil {
merr = multierror.Append(merr, err)
}
if len(route.Hostnames) > 0 {
merr = multierror.Append(merr, resource.ErrInvalidField{
Name: "hostnames",
Wrapped: errors.New("should not populate hostnames"),
})
}
for i, rule := range route.Rules {
wrapRuleErr := func(err error) error {
return resource.ErrInvalidListElement{
Name: "rules",
Index: i,
Wrapped: err,
}
}
for j, match := range rule.Matches {
wrapMatchErr := func(err error) error {
return wrapRuleErr(resource.ErrInvalidListElement{
Name: "matches",
Index: j,
Wrapped: err,
})
}
if match.Method != nil {
switch match.Method.Type {
case pbmesh.GRPCMethodMatchType_GRPC_METHOD_MATCH_TYPE_UNSPECIFIED:
merr = multierror.Append(merr, wrapMatchErr(
resource.ErrInvalidField{
Name: "type",
Wrapped: resource.ErrMissing,
},
))
case pbmesh.GRPCMethodMatchType_GRPC_METHOD_MATCH_TYPE_EXACT:
case pbmesh.GRPCMethodMatchType_GRPC_METHOD_MATCH_TYPE_REGEX:
default:
merr = multierror.Append(merr, wrapMatchErr(
resource.ErrInvalidField{
Name: "type",
Wrapped: fmt.Errorf("not a supported enum value: %v", match.Method.Type),
},
))
}
if match.Method.Service == "" && match.Method.Method == "" {
merr = multierror.Append(merr, wrapMatchErr(
resource.ErrInvalidField{
Name: "service",
Wrapped: errors.New("at least one of \"service\" or \"method\" must be set"),
},
))
}
}
for k, header := range match.Headers {
wrapMatchHeaderErr := func(err error) error {
return wrapRuleErr(resource.ErrInvalidListElement{
Name: "headers",
Index: k,
Wrapped: err,
})
}
if err := validateHeaderMatchType(header.Type); err != nil {
merr = multierror.Append(merr, wrapMatchHeaderErr(
resource.ErrInvalidField{
Name: "type",
Wrapped: err,
}),
)
}
if header.Name == "" {
merr = multierror.Append(merr, wrapMatchHeaderErr(
resource.ErrInvalidField{
Name: "name",
Wrapped: resource.ErrMissing,
}),
)
}
}
}
for j, filter := range rule.Filters {
wrapFilterErr := func(err error) error {
return wrapRuleErr(resource.ErrInvalidListElement{
Name: "filters",
Index: j,
Wrapped: err,
})
}
set := 0
if filter.RequestHeaderModifier != nil {
set++
}
if filter.ResponseHeaderModifier != nil {
set++
}
if filter.UrlRewrite != nil {
set++
if filter.UrlRewrite.PathPrefix == "" {
merr = multierror.Append(merr, wrapFilterErr(
resource.ErrInvalidField{
Name: "url_rewrite",
Wrapped: resource.ErrInvalidField{
Name: "path_prefix",
Wrapped: errors.New("field should not be empty if enclosing section is set"),
},
},
))
}
}
if set != 1 {
merr = multierror.Append(merr, wrapFilterErr(
errors.New("exactly one of request_header_modifier, response_header_modifier, or url_rewrite is required"),
))
}
}
if len(rule.BackendRefs) == 0 {
/*
BackendRefs (optional)
BackendRefs defines API objects where matching requests should be
sent. If unspecified, the rule performs no forwarding. If
unspecified and no filters are specified that would result in a
response being sent, a 404 error code is returned.
*/
merr = multierror.Append(merr, wrapRuleErr(
resource.ErrInvalidField{
Name: "backend_refs",
Wrapped: resource.ErrEmpty,
},
))
}
for j, hbref := range rule.BackendRefs {
wrapBackendRefErr := func(err error) error {
return wrapRuleErr(resource.ErrInvalidListElement{
Name: "backend_refs",
Index: j,
Wrapped: err,
})
}
for _, err := range validateBackendRef(hbref.BackendRef) {
merr = multierror.Append(merr, wrapBackendRefErr(
resource.ErrInvalidField{
Name: "backend_ref",
Wrapped: err,
},
))
}
if len(hbref.Filters) > 0 {
merr = multierror.Append(merr, wrapBackendRefErr(
resource.ErrInvalidField{
Name: "filters",
Wrapped: errors.New("filters are not supported at this level yet"),
},
))
}
}
if rule.Timeouts != nil {
for _, err := range validateHTTPTimeouts(rule.Timeouts) {
merr = multierror.Append(merr, wrapRuleErr(
resource.ErrInvalidField{
Name: "timeouts",
Wrapped: err,
},
))
}
}
if rule.Retries != nil {
for _, err := range validateHTTPRetries(rule.Retries) {
merr = multierror.Append(merr, wrapRuleErr(
resource.ErrInvalidField{
Name: "retries",
Wrapped: err,
},
))
}
}
}
return merr
}

View File

@ -0,0 +1,523 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package types
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/hashicorp/consul/internal/catalog"
"github.com/hashicorp/consul/internal/resource/resourcetest"
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1"
"github.com/hashicorp/consul/proto/private/prototest"
"github.com/hashicorp/consul/sdk/testutil"
)
func TestValidateGRPCRoute(t *testing.T) {
type testcase struct {
route *pbmesh.GRPCRoute
expectErr string
}
run := func(t *testing.T, tc testcase) {
res := resourcetest.Resource(GRPCRouteType, "api").
WithData(t, tc.route).
Build()
err := ValidateGRPCRoute(res)
// Verify that validate didn't actually change the object.
got := resourcetest.MustDecode[*pbmesh.GRPCRoute](t, res)
prototest.AssertDeepEqual(t, tc.route, got.Data)
if tc.expectErr == "" {
require.NoError(t, err)
} else {
testutil.RequireErrorContains(t, err, tc.expectErr)
}
}
cases := map[string]testcase{
"hostnames not supported for services": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Hostnames: []string{"foo.local"},
},
expectErr: `invalid "hostnames" field: should not populate hostnames`,
},
"no rules": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
},
},
"rules with no matches": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"rules with matches that are empty": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Matches: []*pbmesh.GRPCRouteMatch{{
// none
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"method match with no type is bad": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Matches: []*pbmesh.GRPCRouteMatch{{
Method: &pbmesh.GRPCMethodMatch{
Service: "foo",
},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid "type" field: missing required field`,
},
"method match with unknown type is bad": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Matches: []*pbmesh.GRPCRouteMatch{{
Method: &pbmesh.GRPCMethodMatch{
Type: 99,
Service: "foo",
},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid "type" field: not a supported enum value: 99`,
},
"method match with no service nor method is bad": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Matches: []*pbmesh.GRPCRouteMatch{{
Method: &pbmesh.GRPCMethodMatch{
Type: pbmesh.GRPCMethodMatchType_GRPC_METHOD_MATCH_TYPE_EXACT,
},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid "service" field: at least one of "service" or "method" must be set`,
},
"method match is good (1)": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Matches: []*pbmesh.GRPCRouteMatch{{
Method: &pbmesh.GRPCMethodMatch{
Type: pbmesh.GRPCMethodMatchType_GRPC_METHOD_MATCH_TYPE_EXACT,
Service: "foo",
},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"method match is good (2)": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Matches: []*pbmesh.GRPCRouteMatch{{
Method: &pbmesh.GRPCMethodMatch{
Type: pbmesh.GRPCMethodMatchType_GRPC_METHOD_MATCH_TYPE_EXACT,
Method: "bar",
},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"method match is good (3)": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Matches: []*pbmesh.GRPCRouteMatch{{
Method: &pbmesh.GRPCMethodMatch{
Type: pbmesh.GRPCMethodMatchType_GRPC_METHOD_MATCH_TYPE_EXACT,
Service: "foo",
Method: "bar",
},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"header match with no type is bad": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Matches: []*pbmesh.GRPCRouteMatch{{
Headers: []*pbmesh.GRPCHeaderMatch{{
Name: "x-foo",
}},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "headers": invalid "type" field: missing required field`,
},
"header match with unknown type is bad": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Matches: []*pbmesh.GRPCRouteMatch{{
Headers: []*pbmesh.GRPCHeaderMatch{{
Type: 99,
Name: "x-foo",
}},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "headers": invalid "type" field: not a supported enum value: 99`,
},
"header match with no name is bad": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Matches: []*pbmesh.GRPCRouteMatch{{
Headers: []*pbmesh.GRPCHeaderMatch{{
Type: pbmesh.HeaderMatchType_HEADER_MATCH_TYPE_EXACT,
}},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "headers": invalid "name" field: missing required field`,
},
"header match is good": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Matches: []*pbmesh.GRPCRouteMatch{{
Headers: []*pbmesh.GRPCHeaderMatch{{
Type: pbmesh.HeaderMatchType_HEADER_MATCH_TYPE_EXACT,
Name: "x-foo",
}},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"filter empty is bad": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Filters: []*pbmesh.GRPCRouteFilter{{
// none
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "filters": exactly one of request_header_modifier, response_header_modifier, or url_rewrite`,
},
"filter req header mod is ok": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Filters: []*pbmesh.GRPCRouteFilter{{
RequestHeaderModifier: &pbmesh.HTTPHeaderFilter{},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"filter resp header mod is ok": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Filters: []*pbmesh.GRPCRouteFilter{{
ResponseHeaderModifier: &pbmesh.HTTPHeaderFilter{},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"filter rewrite header mod missing path prefix": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Filters: []*pbmesh.GRPCRouteFilter{{
UrlRewrite: &pbmesh.HTTPURLRewriteFilter{},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "filters": invalid "url_rewrite" field: invalid "path_prefix" field: field should not be empty if enclosing section is set`,
},
"filter rewrite header mod is ok": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Filters: []*pbmesh.GRPCRouteFilter{{
UrlRewrite: &pbmesh.HTTPURLRewriteFilter{
PathPrefix: "/blah",
},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"filter req+resp header mod is bad": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Filters: []*pbmesh.GRPCRouteFilter{{
RequestHeaderModifier: &pbmesh.HTTPHeaderFilter{},
ResponseHeaderModifier: &pbmesh.HTTPHeaderFilter{},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "filters": exactly one of request_header_modifier, response_header_modifier, or url_rewrite`,
},
"filter req+rewrite header mod is bad": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Filters: []*pbmesh.GRPCRouteFilter{{
RequestHeaderModifier: &pbmesh.HTTPHeaderFilter{},
UrlRewrite: &pbmesh.HTTPURLRewriteFilter{
PathPrefix: "/blah",
},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "filters": exactly one of request_header_modifier, response_header_modifier, or url_rewrite`,
},
"filter resp+rewrite header mod is bad": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Filters: []*pbmesh.GRPCRouteFilter{{
ResponseHeaderModifier: &pbmesh.HTTPHeaderFilter{},
UrlRewrite: &pbmesh.HTTPURLRewriteFilter{
PathPrefix: "/blah",
},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "filters": exactly one of request_header_modifier, response_header_modifier, or url_rewrite`,
},
"filter req+resp+rewrite header mod is bad": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Filters: []*pbmesh.GRPCRouteFilter{{
RequestHeaderModifier: &pbmesh.HTTPHeaderFilter{},
ResponseHeaderModifier: &pbmesh.HTTPHeaderFilter{},
UrlRewrite: &pbmesh.HTTPURLRewriteFilter{
PathPrefix: "/blah",
},
}},
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "filters": exactly one of request_header_modifier, response_header_modifier, or url_rewrite`,
},
"backend ref with filters is unsupported": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
Filters: []*pbmesh.GRPCRouteFilter{{
RequestHeaderModifier: &pbmesh.HTTPHeaderFilter{},
}},
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "backend_refs": invalid "filters" field: filters are not supported at this level yet`,
},
"nil backend ref": {
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
BackendRefs: []*pbmesh.GRPCBackendRef{nil},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "backend_refs": invalid "backend_ref" field: missing required field`,
},
}
// Add common timeouts test cases.
for name, timeoutsTC := range getXRouteTimeoutsTestCases() {
cases["timeouts: "+name] = testcase{
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Timeouts: timeoutsTC.timeouts,
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: timeoutsTC.expectErr,
}
}
// Add common retries test cases.
for name, retriesTC := range getXRouteRetriesTestCases() {
cases["retries: "+name] = testcase{
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{{
Retries: retriesTC.retries,
BackendRefs: []*pbmesh.GRPCBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: retriesTC.expectErr,
}
}
// Add common parent refs test cases.
for name, parentTC := range getXRouteParentRefTestCases() {
cases["parent-ref: "+name] = testcase{
route: &pbmesh.GRPCRoute{
ParentRefs: parentTC.refs,
},
expectErr: parentTC.expectErr,
}
}
// add common backend ref test cases.
for name, backendTC := range getXRouteBackendRefTestCases() {
var refs []*pbmesh.GRPCBackendRef
for _, br := range backendTC.refs {
refs = append(refs, &pbmesh.GRPCBackendRef{
BackendRef: br,
})
}
cases["backend-ref: "+name] = testcase{
route: &pbmesh.GRPCRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.GRPCRouteRule{
{BackendRefs: refs},
},
},
expectErr: backendTC.expectErr,
}
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
run(t, tc)
})
}
}

View File

@ -4,6 +4,13 @@
package types package types
import ( import (
"errors"
"fmt"
"net/http"
"strings"
"github.com/hashicorp/go-multierror"
"github.com/hashicorp/consul/internal/resource" "github.com/hashicorp/consul/internal/resource"
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1" pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1"
"github.com/hashicorp/consul/proto-public/pbresource" "github.com/hashicorp/consul/proto-public/pbresource"
@ -27,6 +34,316 @@ func RegisterHTTPRoute(r resource.Registry) {
r.Register(resource.Registration{ r.Register(resource.Registration{
Type: HTTPRouteV1Alpha1Type, Type: HTTPRouteV1Alpha1Type,
Proto: &pbmesh.HTTPRoute{}, Proto: &pbmesh.HTTPRoute{},
Validate: nil, Mutate: MutateHTTPRoute,
Validate: ValidateHTTPRoute,
}) })
} }
func MutateHTTPRoute(res *pbresource.Resource) error {
var route pbmesh.HTTPRoute
if err := res.Data.UnmarshalTo(&route); err != nil {
return resource.NewErrDataParse(&route, err)
}
changed := false
for _, rule := range route.Rules {
for _, match := range rule.Matches {
if match.Method != "" {
norm := strings.ToUpper(match.Method)
if match.Method != norm {
match.Method = norm
changed = true
}
}
}
}
// TODO(rb): normalize parent/backend ref tenancies
if !changed {
return nil
}
return res.Data.MarshalFrom(&route)
}
func ValidateHTTPRoute(res *pbresource.Resource) error {
var route pbmesh.HTTPRoute
if err := res.Data.UnmarshalTo(&route); err != nil {
return resource.NewErrDataParse(&route, err)
}
var merr error
if err := validateParentRefs(route.ParentRefs); err != nil {
merr = multierror.Append(merr, err)
}
if len(route.Hostnames) > 0 {
merr = multierror.Append(merr, resource.ErrInvalidField{
Name: "hostnames",
Wrapped: errors.New("should not populate hostnames"),
})
}
for i, rule := range route.Rules {
wrapRuleErr := func(err error) error {
return resource.ErrInvalidListElement{
Name: "rules",
Index: i,
Wrapped: err,
}
}
for j, match := range rule.Matches {
wrapMatchErr := func(err error) error {
return wrapRuleErr(resource.ErrInvalidListElement{
Name: "matches",
Index: j,
Wrapped: err,
})
}
if match.Path != nil {
wrapMatchPathErr := func(err error) error {
return wrapMatchErr(resource.ErrInvalidField{
Name: "path",
Wrapped: err,
})
}
switch match.Path.Type {
case pbmesh.PathMatchType_PATH_MATCH_TYPE_UNSPECIFIED:
merr = multierror.Append(merr, wrapMatchPathErr(
resource.ErrInvalidField{
Name: "type",
Wrapped: resource.ErrMissing,
},
))
case pbmesh.PathMatchType_PATH_MATCH_TYPE_EXACT:
if !strings.HasPrefix(match.Path.Value, "/") {
merr = multierror.Append(merr, wrapMatchPathErr(
resource.ErrInvalidField{
Name: "value",
Wrapped: fmt.Errorf("exact patch value does not start with '/': %q", match.Path.Value),
},
))
}
case pbmesh.PathMatchType_PATH_MATCH_TYPE_PREFIX:
if !strings.HasPrefix(match.Path.Value, "/") {
merr = multierror.Append(merr, wrapMatchPathErr(
resource.ErrInvalidField{
Name: "value",
Wrapped: fmt.Errorf("prefix patch value does not start with '/': %q", match.Path.Value),
},
))
}
default:
merr = multierror.Append(merr, wrapMatchPathErr(
resource.ErrInvalidField{
Name: "type",
Wrapped: fmt.Errorf("not a supported enum value: %v", match.Path.Type),
},
))
}
}
for k, hdr := range match.Headers {
wrapMatchHeaderErr := func(err error) error {
return wrapMatchErr(resource.ErrInvalidListElement{
Name: "headers",
Index: k,
Wrapped: err,
})
}
if err := validateHeaderMatchType(hdr.Type); err != nil {
merr = multierror.Append(merr, wrapMatchHeaderErr(
resource.ErrInvalidField{
Name: "type",
Wrapped: err,
}),
)
}
if hdr.Name == "" {
merr = multierror.Append(merr, wrapMatchHeaderErr(
resource.ErrInvalidField{
Name: "name",
Wrapped: resource.ErrMissing,
}),
)
}
}
for k, qm := range match.QueryParams {
wrapMatchParamErr := func(err error) error {
return wrapMatchErr(resource.ErrInvalidListElement{
Name: "query_params",
Index: k,
Wrapped: err,
})
}
switch qm.Type {
case pbmesh.QueryParamMatchType_QUERY_PARAM_MATCH_TYPE_UNSPECIFIED:
merr = multierror.Append(merr, wrapMatchParamErr(
resource.ErrInvalidField{
Name: "type",
Wrapped: resource.ErrMissing,
}),
)
case pbmesh.QueryParamMatchType_QUERY_PARAM_MATCH_TYPE_EXACT:
case pbmesh.QueryParamMatchType_QUERY_PARAM_MATCH_TYPE_REGEX:
case pbmesh.QueryParamMatchType_QUERY_PARAM_MATCH_TYPE_PRESENT:
default:
merr = multierror.Append(merr, wrapMatchParamErr(
resource.ErrInvalidField{
Name: "type",
Wrapped: fmt.Errorf("not a supported enum value: %v", qm.Type),
},
))
}
if qm.Name == "" {
merr = multierror.Append(merr, wrapMatchParamErr(
resource.ErrInvalidField{
Name: "name",
Wrapped: resource.ErrMissing,
}),
)
}
}
if match.Method != "" && !isValidHTTPMethod(match.Method) {
merr = multierror.Append(merr, wrapMatchErr(
resource.ErrInvalidField{
Name: "method",
Wrapped: fmt.Errorf("not a valid http method: %q", match.Method),
},
))
}
}
for j, filter := range rule.Filters {
wrapFilterErr := func(err error) error {
return wrapRuleErr(resource.ErrInvalidListElement{
Name: "filters",
Index: j,
Wrapped: err,
})
}
set := 0
if filter.RequestHeaderModifier != nil {
set++
}
if filter.ResponseHeaderModifier != nil {
set++
}
if filter.UrlRewrite != nil {
set++
if filter.UrlRewrite.PathPrefix == "" {
merr = multierror.Append(merr, wrapFilterErr(
resource.ErrInvalidField{
Name: "url_rewrite",
Wrapped: resource.ErrInvalidField{
Name: "path_prefix",
Wrapped: errors.New("field should not be empty if enclosing section is set"),
},
},
))
}
}
if set != 1 {
merr = multierror.Append(merr, wrapFilterErr(
errors.New("exactly one of request_header_modifier, response_header_modifier, or url_rewrite is required"),
))
}
}
if len(rule.BackendRefs) == 0 {
/*
BackendRefs (optional)
BackendRefs defines API objects where matching requests should be
sent. If unspecified, the rule performs no forwarding. If
unspecified and no filters are specified that would result in a
response being sent, a 404 error code is returned.
*/
merr = multierror.Append(merr, wrapRuleErr(
resource.ErrInvalidField{
Name: "backend_refs",
Wrapped: resource.ErrEmpty,
},
))
}
for j, hbref := range rule.BackendRefs {
wrapBackendRefErr := func(err error) error {
return wrapRuleErr(resource.ErrInvalidListElement{
Name: "backend_refs",
Index: j,
Wrapped: err,
})
}
for _, err := range validateBackendRef(hbref.BackendRef) {
merr = multierror.Append(merr, wrapBackendRefErr(
resource.ErrInvalidField{
Name: "backend_ref",
Wrapped: err,
},
))
}
if len(hbref.Filters) > 0 {
merr = multierror.Append(merr, wrapBackendRefErr(
resource.ErrInvalidField{
Name: "filters",
Wrapped: errors.New("filters are not supported at this level yet"),
},
))
}
}
if rule.Timeouts != nil {
for _, err := range validateHTTPTimeouts(rule.Timeouts) {
merr = multierror.Append(merr, wrapRuleErr(
resource.ErrInvalidField{
Name: "timeouts",
Wrapped: err,
},
))
}
}
if rule.Retries != nil {
for _, err := range validateHTTPRetries(rule.Retries) {
merr = multierror.Append(merr, wrapRuleErr(
resource.ErrInvalidField{
Name: "retries",
Wrapped: err,
},
))
}
}
}
return merr
}
func isValidHTTPMethod(method string) bool {
switch method {
case http.MethodGet,
http.MethodHead,
http.MethodPost,
http.MethodPut,
http.MethodPatch,
http.MethodDelete,
http.MethodConnect,
http.MethodOptions,
http.MethodTrace:
return true
default:
return false
}
}

View File

@ -0,0 +1,980 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package types
import (
"testing"
"time"
"github.com/stretchr/testify/require"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/types/known/durationpb"
"github.com/hashicorp/consul/internal/catalog"
"github.com/hashicorp/consul/internal/resource/resourcetest"
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1"
"github.com/hashicorp/consul/proto-public/pbresource"
"github.com/hashicorp/consul/proto/private/prototest"
"github.com/hashicorp/consul/sdk/testutil"
)
func TestMutateHTTPRoute(t *testing.T) {
type testcase struct {
route *pbmesh.HTTPRoute
expect *pbmesh.HTTPRoute
expectErr string
}
run := func(t *testing.T, tc testcase) {
res := resourcetest.Resource(HTTPRouteType, "api").
WithData(t, tc.route).
Build()
err := MutateHTTPRoute(res)
got := resourcetest.MustDecode[*pbmesh.HTTPRoute](t, res)
if tc.expectErr == "" {
require.NoError(t, err)
if tc.expect == nil {
tc.expect = proto.Clone(tc.route).(*pbmesh.HTTPRoute)
}
prototest.AssertDeepEqual(t, tc.expect, got.Data)
} else {
testutil.RequireErrorContains(t, err, tc.expectErr)
}
}
cases := map[string]testcase{
"no-rules": {
route: &pbmesh.HTTPRoute{},
},
"rules-with-no-matches": {
route: &pbmesh.HTTPRoute{
Rules: []*pbmesh.HTTPRouteRule{{
// none
}},
},
},
"rules-with-matches-no-methods": {
route: &pbmesh.HTTPRoute{
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Path: &pbmesh.HTTPPathMatch{
Type: pbmesh.PathMatchType_PATH_MATCH_TYPE_PREFIX,
Value: "/foo",
},
}},
}},
},
},
"rules-with-matches-methods-uppercase": {
route: &pbmesh.HTTPRoute{
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{
{
Path: &pbmesh.HTTPPathMatch{
Type: pbmesh.PathMatchType_PATH_MATCH_TYPE_PREFIX,
Value: "/foo",
},
Method: "GET",
},
{
Path: &pbmesh.HTTPPathMatch{
Type: pbmesh.PathMatchType_PATH_MATCH_TYPE_PREFIX,
Value: "/bar",
},
Method: "POST",
},
},
}},
},
},
"rules-with-matches-methods-lowercase": {
route: &pbmesh.HTTPRoute{
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{
{
Path: &pbmesh.HTTPPathMatch{
Type: pbmesh.PathMatchType_PATH_MATCH_TYPE_PREFIX,
Value: "/foo",
},
Method: "get",
},
{
Path: &pbmesh.HTTPPathMatch{
Type: pbmesh.PathMatchType_PATH_MATCH_TYPE_PREFIX,
Value: "/bar",
},
Method: "post",
},
},
}},
},
expect: &pbmesh.HTTPRoute{
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{
{
Path: &pbmesh.HTTPPathMatch{
Type: pbmesh.PathMatchType_PATH_MATCH_TYPE_PREFIX,
Value: "/foo",
},
Method: "GET",
},
{
Path: &pbmesh.HTTPPathMatch{
Type: pbmesh.PathMatchType_PATH_MATCH_TYPE_PREFIX,
Value: "/bar",
},
Method: "POST",
},
},
}},
},
},
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
run(t, tc)
})
}
}
func TestValidateHTTPRoute(t *testing.T) {
type testcase struct {
route *pbmesh.HTTPRoute
expectErr string
}
run := func(t *testing.T, tc testcase) {
res := resourcetest.Resource(HTTPRouteType, "api").
WithData(t, tc.route).
Build()
err := MutateHTTPRoute(res)
require.NoError(t, err)
err = ValidateHTTPRoute(res)
// Verify that validate didn't actually change the object.
got := resourcetest.MustDecode[*pbmesh.HTTPRoute](t, res)
prototest.AssertDeepEqual(t, tc.route, got.Data)
if tc.expectErr == "" {
require.NoError(t, err)
} else {
testutil.RequireErrorContains(t, err, tc.expectErr)
}
}
cases := map[string]testcase{
"hostnames not supported for services": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Hostnames: []string{"foo.local"},
},
expectErr: `invalid "hostnames" field: should not populate hostnames`,
},
"no rules": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
},
},
"rules with no matches": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"rules with matches that are empty": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
// none
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"path match with no type is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Path: &pbmesh.HTTPPathMatch{
Value: "/foo",
},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid "path" field: invalid "type" field: missing required field`,
},
"path match with unknown type is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Path: &pbmesh.HTTPPathMatch{
Type: 99,
Value: "/foo",
},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid "path" field: invalid "type" field: not a supported enum value: 99`,
},
"exact path match with no leading slash is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Path: &pbmesh.HTTPPathMatch{
Type: pbmesh.PathMatchType_PATH_MATCH_TYPE_EXACT,
Value: "foo",
},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid "path" field: invalid "value" field: exact patch value does not start with '/': "foo"`,
},
"prefix path match with no leading slash is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Path: &pbmesh.HTTPPathMatch{
Type: pbmesh.PathMatchType_PATH_MATCH_TYPE_PREFIX,
Value: "foo",
},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid "path" field: invalid "value" field: prefix patch value does not start with '/': "foo"`,
},
"exact path match with leading slash is good": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Path: &pbmesh.HTTPPathMatch{
Type: pbmesh.PathMatchType_PATH_MATCH_TYPE_EXACT,
Value: "/foo",
},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"prefix path match with leading slash is good": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Path: &pbmesh.HTTPPathMatch{
Type: pbmesh.PathMatchType_PATH_MATCH_TYPE_PREFIX,
Value: "/foo",
},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"header match with no type is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Headers: []*pbmesh.HTTPHeaderMatch{{
Name: "x-foo",
}},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid element at index 0 of list "headers": invalid "type" field: missing required field`,
},
"header match with unknown type is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Headers: []*pbmesh.HTTPHeaderMatch{{
Type: 99,
Name: "x-foo",
}},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid element at index 0 of list "headers": invalid "type" field: not a supported enum value: 99`,
},
"header match with no name is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Headers: []*pbmesh.HTTPHeaderMatch{{
Type: pbmesh.HeaderMatchType_HEADER_MATCH_TYPE_EXACT,
}},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid element at index 0 of list "headers": invalid "name" field: missing required field`,
},
"header match is good": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Headers: []*pbmesh.HTTPHeaderMatch{{
Type: pbmesh.HeaderMatchType_HEADER_MATCH_TYPE_EXACT,
Name: "x-foo",
}},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"queryparam match with no type is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
QueryParams: []*pbmesh.HTTPQueryParamMatch{{
Name: "x-foo",
}},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid element at index 0 of list "query_params": invalid "type" field: missing required field`,
},
"queryparam match with unknown type is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
QueryParams: []*pbmesh.HTTPQueryParamMatch{{
Type: 99,
Name: "x-foo",
}},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid element at index 0 of list "query_params": invalid "type" field: not a supported enum value: 99`,
},
"queryparam match with no name is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
QueryParams: []*pbmesh.HTTPQueryParamMatch{{
Type: pbmesh.QueryParamMatchType_QUERY_PARAM_MATCH_TYPE_EXACT,
}},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid element at index 0 of list "query_params": invalid "name" field: missing required field`,
},
"queryparam match is good": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
QueryParams: []*pbmesh.HTTPQueryParamMatch{{
Type: pbmesh.QueryParamMatchType_QUERY_PARAM_MATCH_TYPE_EXACT,
Name: "x-foo",
}},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"method match is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Method: "BOB",
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "matches": invalid "method" field: not a valid http method: "BOB"`,
},
"method match is good": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Matches: []*pbmesh.HTTPRouteMatch{{
Method: "DELETE",
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"filter empty is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Filters: []*pbmesh.HTTPRouteFilter{{
// none
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "filters": exactly one of request_header_modifier, response_header_modifier, or url_rewrite`,
},
"filter req header mod is ok": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Filters: []*pbmesh.HTTPRouteFilter{{
RequestHeaderModifier: &pbmesh.HTTPHeaderFilter{},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"filter resp header mod is ok": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Filters: []*pbmesh.HTTPRouteFilter{{
ResponseHeaderModifier: &pbmesh.HTTPHeaderFilter{},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"filter rewrite header mod missing path prefix": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Filters: []*pbmesh.HTTPRouteFilter{{
UrlRewrite: &pbmesh.HTTPURLRewriteFilter{},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "filters": invalid "url_rewrite" field: invalid "path_prefix" field: field should not be empty if enclosing section is set`,
},
"filter rewrite header mod is ok": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Filters: []*pbmesh.HTTPRouteFilter{{
UrlRewrite: &pbmesh.HTTPURLRewriteFilter{
PathPrefix: "/blah",
},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
},
"filter req+resp header mod is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Filters: []*pbmesh.HTTPRouteFilter{{
RequestHeaderModifier: &pbmesh.HTTPHeaderFilter{},
ResponseHeaderModifier: &pbmesh.HTTPHeaderFilter{},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "filters": exactly one of request_header_modifier, response_header_modifier, or url_rewrite`,
},
"filter req+rewrite header mod is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Filters: []*pbmesh.HTTPRouteFilter{{
RequestHeaderModifier: &pbmesh.HTTPHeaderFilter{},
UrlRewrite: &pbmesh.HTTPURLRewriteFilter{
PathPrefix: "/blah",
},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "filters": exactly one of request_header_modifier, response_header_modifier, or url_rewrite`,
},
"filter resp+rewrite header mod is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Filters: []*pbmesh.HTTPRouteFilter{{
ResponseHeaderModifier: &pbmesh.HTTPHeaderFilter{},
UrlRewrite: &pbmesh.HTTPURLRewriteFilter{
PathPrefix: "/blah",
},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "filters": exactly one of request_header_modifier, response_header_modifier, or url_rewrite`,
},
"filter req+resp+rewrite header mod is bad": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Filters: []*pbmesh.HTTPRouteFilter{{
RequestHeaderModifier: &pbmesh.HTTPHeaderFilter{},
ResponseHeaderModifier: &pbmesh.HTTPHeaderFilter{},
UrlRewrite: &pbmesh.HTTPURLRewriteFilter{
PathPrefix: "/blah",
},
}},
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "filters": exactly one of request_header_modifier, response_header_modifier, or url_rewrite`,
},
"backend ref with filters is unsupported": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
Filters: []*pbmesh.HTTPRouteFilter{{
RequestHeaderModifier: &pbmesh.HTTPHeaderFilter{},
}},
}},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "backend_refs": invalid "filters" field: filters are not supported at this level yet`,
},
"nil backend ref": {
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
BackendRefs: []*pbmesh.HTTPBackendRef{nil},
}},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 0 of list "backend_refs": invalid "backend_ref" field: missing required field`,
},
}
// Add common timeouts test cases.
for name, timeoutsTC := range getXRouteTimeoutsTestCases() {
cases["timeouts: "+name] = testcase{
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Timeouts: timeoutsTC.timeouts,
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: timeoutsTC.expectErr,
}
}
// Add common retries test cases.
for name, retriesTC := range getXRouteRetriesTestCases() {
cases["retries: "+name] = testcase{
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{{
Retries: retriesTC.retries,
BackendRefs: []*pbmesh.HTTPBackendRef{{
BackendRef: newBackendRef(catalog.ServiceType, "api", ""),
}},
}},
},
expectErr: retriesTC.expectErr,
}
}
// Add common parent refs test cases.
for name, parentTC := range getXRouteParentRefTestCases() {
cases["parent-ref: "+name] = testcase{
route: &pbmesh.HTTPRoute{
ParentRefs: parentTC.refs,
},
expectErr: parentTC.expectErr,
}
}
// add common backend ref test cases.
for name, backendTC := range getXRouteBackendRefTestCases() {
var refs []*pbmesh.HTTPBackendRef
for _, br := range backendTC.refs {
refs = append(refs, &pbmesh.HTTPBackendRef{
BackendRef: br,
})
}
cases["backend-ref: "+name] = testcase{
route: &pbmesh.HTTPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.HTTPRouteRule{
{BackendRefs: refs},
},
},
expectErr: backendTC.expectErr,
}
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
run(t, tc)
})
}
}
type xRouteParentRefTestcase struct {
refs []*pbmesh.ParentReference
expectErr string
}
func getXRouteParentRefTestCases() map[string]xRouteParentRefTestcase {
return map[string]xRouteParentRefTestcase{
"no parent refs": {
expectErr: `invalid "parent_refs" field: cannot be empty`,
},
"parent ref with nil ref": {
refs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "api", ""),
{
Ref: nil,
Port: "http",
},
},
expectErr: `invalid element at index 1 of list "parent_refs": invalid "ref" field: missing required field`,
},
"parent ref with bad type ref": {
refs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "api", ""),
newParentRef(catalog.WorkloadType, "api", ""),
},
expectErr: `invalid element at index 1 of list "parent_refs": invalid "ref" field: reference must have type catalog.v1alpha1.Service`,
},
"parent ref with section": {
refs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "api", ""),
{
Ref: resourcetest.Resource(catalog.ServiceType, "web").Reference("section2"),
Port: "http",
},
},
expectErr: `invalid element at index 1 of list "parent_refs": invalid "ref" field: invalid "section" field: section not supported for service parent refs`,
},
"duplicate exact parents": {
refs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "api", "http"),
newParentRef(catalog.ServiceType, "api", "http"),
},
expectErr: `invalid element at index 1 of list "parent_refs": invalid "ref" field: parent ref "catalog.v1alpha1.Service/default.local.default/api" for port "http" exists twice`,
},
"duplicate wild parents": {
refs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "api", ""),
newParentRef(catalog.ServiceType, "api", ""),
},
expectErr: `invalid element at index 1 of list "parent_refs": invalid "ref" field: parent ref "catalog.v1alpha1.Service/default.local.default/api" for wildcard port exists twice`,
},
"duplicate parents via exact+wild overlap": {
refs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "api", "http"),
newParentRef(catalog.ServiceType, "api", ""),
},
expectErr: `invalid element at index 1 of list "parent_refs": invalid "ref" field: parent ref "catalog.v1alpha1.Service/default.local.default/api" for ports [http] covered by wildcard port already`,
},
"duplicate parents via exact+wild overlap (reversed)": {
refs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "api", ""),
newParentRef(catalog.ServiceType, "api", "http"),
},
expectErr: `invalid element at index 1 of list "parent_refs": invalid "ref" field: parent ref "catalog.v1alpha1.Service/default.local.default/api" for port "http" covered by wildcard port already`,
},
"good single parent ref": {
refs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "api", "http"),
},
},
"good muliple parent refs": {
refs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "api", "http"),
newParentRef(catalog.ServiceType, "web", ""),
},
},
}
}
type xRouteBackendRefTestcase struct {
refs []*pbmesh.BackendReference
expectErr string
}
func getXRouteBackendRefTestCases() map[string]xRouteBackendRefTestcase {
return map[string]xRouteBackendRefTestcase{
"no backend refs": {
expectErr: `invalid "backend_refs" field: cannot be empty`,
},
"backend ref with nil ref": {
refs: []*pbmesh.BackendReference{
newBackendRef(catalog.ServiceType, "api", ""),
{
Ref: nil,
Port: "http",
},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 1 of list "backend_refs": invalid "backend_ref" field: invalid "ref" field: missing required field`,
},
"backend ref with bad type ref": {
refs: []*pbmesh.BackendReference{
newBackendRef(catalog.ServiceType, "api", ""),
newBackendRef(catalog.WorkloadType, "api", ""),
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 1 of list "backend_refs": invalid "backend_ref" field: invalid "ref" field: reference must have type catalog.v1alpha1.Service`,
},
"backend ref with section": {
refs: []*pbmesh.BackendReference{
newBackendRef(catalog.ServiceType, "api", ""),
{
Ref: resourcetest.Resource(catalog.ServiceType, "web").Reference("section2"),
Port: "http",
},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 1 of list "backend_refs": invalid "backend_ref" field: invalid "ref" field: invalid "section" field: section not supported for service backend refs`,
},
"backend ref with datacenter": {
refs: []*pbmesh.BackendReference{
newBackendRef(catalog.ServiceType, "api", ""),
{
Ref: newRef(catalog.ServiceType, "db"),
Port: "http",
Datacenter: "dc2",
},
},
expectErr: `invalid element at index 0 of list "rules": invalid element at index 1 of list "backend_refs": invalid "backend_ref" field: invalid "datacenter" field: datacenter is not yet supported on backend refs`,
},
"good backend ref": {
refs: []*pbmesh.BackendReference{
newBackendRef(catalog.ServiceType, "api", ""),
{
Ref: newRef(catalog.ServiceType, "db"),
Port: "http",
},
},
},
}
}
type xRouteTimeoutsTestcase struct {
timeouts *pbmesh.HTTPRouteTimeouts
expectErr string
}
func getXRouteTimeoutsTestCases() map[string]xRouteTimeoutsTestcase {
return map[string]xRouteTimeoutsTestcase{
"bad request": {
timeouts: &pbmesh.HTTPRouteTimeouts{
Request: durationpb.New(-1 * time.Second),
},
expectErr: `invalid element at index 0 of list "rules": invalid "timeouts" field: invalid "request" field: timeout cannot be negative: -1s`,
},
"bad backend request": {
timeouts: &pbmesh.HTTPRouteTimeouts{
BackendRequest: durationpb.New(-1 * time.Second),
},
expectErr: `invalid element at index 0 of list "rules": invalid "timeouts" field: invalid "backend_request" field: timeout cannot be negative: -1s`,
},
"bad idle": {
timeouts: &pbmesh.HTTPRouteTimeouts{
Idle: durationpb.New(-1 * time.Second),
},
expectErr: `invalid element at index 0 of list "rules": invalid "timeouts" field: invalid "idle" field: timeout cannot be negative: -1s`,
},
"good all": {
timeouts: &pbmesh.HTTPRouteTimeouts{
Request: durationpb.New(1 * time.Second),
BackendRequest: durationpb.New(2 * time.Second),
Idle: durationpb.New(3 * time.Second),
},
},
}
}
type xRouteRetriesTestcase struct {
retries *pbmesh.HTTPRouteRetries
expectErr string
}
func getXRouteRetriesTestCases() map[string]xRouteRetriesTestcase {
return map[string]xRouteRetriesTestcase{
"bad number": {
retries: &pbmesh.HTTPRouteRetries{
Number: -5,
},
expectErr: `invalid element at index 0 of list "rules": invalid "retries" field: invalid "number" field: cannot be negative: -5`,
},
"bad conditions": {
retries: &pbmesh.HTTPRouteRetries{
OnConditions: []string{"garbage"},
},
expectErr: `invalid element at index 0 of list "rules": invalid "retries" field: invalid element at index 0 of list "on_conditions": not a valid retry condition: "garbage"`,
},
"good all": {
retries: &pbmesh.HTTPRouteRetries{
Number: 5,
OnConditions: []string{"internal"},
},
},
}
}
func newRef(typ *pbresource.Type, name string) *pbresource.Reference {
return resourcetest.Resource(typ, name).Reference("")
}
func newBackendRef(typ *pbresource.Type, name, port string) *pbmesh.BackendReference {
return &pbmesh.BackendReference{
Ref: newRef(typ, name),
Port: port,
}
}
func newParentRef(typ *pbresource.Type, name, port string) *pbmesh.ParentReference {
return &pbmesh.ParentReference{
Ref: newRef(typ, name),
Port: port,
}
}

View File

@ -25,8 +25,9 @@ var (
func RegisterProxyConfiguration(r resource.Registry) { func RegisterProxyConfiguration(r resource.Registry) {
r.Register(resource.Registration{ r.Register(resource.Registration{
Type: ProxyConfigurationV1Alpha1Type, Type: ProxyConfigurationV1Alpha1Type,
Proto: &pbmesh.ProxyConfiguration{}, Proto: &pbmesh.ProxyConfiguration{},
// TODO(rb): add validation for proxy configuration
Validate: nil, Validate: nil,
}) })
} }

View File

@ -4,6 +4,8 @@
package types package types
import ( import (
"github.com/hashicorp/go-multierror"
"github.com/hashicorp/consul/internal/resource" "github.com/hashicorp/consul/internal/resource"
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1" pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1"
"github.com/hashicorp/consul/proto-public/pbresource" "github.com/hashicorp/consul/proto-public/pbresource"
@ -25,8 +27,69 @@ var (
func RegisterTCPRoute(r resource.Registry) { func RegisterTCPRoute(r resource.Registry) {
r.Register(resource.Registration{ r.Register(resource.Registration{
Type: TCPRouteV1Alpha1Type, Type: TCPRouteV1Alpha1Type,
Proto: &pbmesh.TCPRoute{}, Proto: &pbmesh.TCPRoute{},
Validate: nil, // TODO(rb): normalize parent/backend ref tenancies in a Mutate hook
Validate: ValidateTCPRoute,
}) })
} }
func ValidateTCPRoute(res *pbresource.Resource) error {
var route pbmesh.TCPRoute
if err := res.Data.UnmarshalTo(&route); err != nil {
return resource.NewErrDataParse(&route, err)
}
var merr error
if err := validateParentRefs(route.ParentRefs); err != nil {
merr = multierror.Append(merr, err)
}
for i, rule := range route.Rules {
wrapRuleErr := func(err error) error {
return resource.ErrInvalidListElement{
Name: "rules",
Index: i,
Wrapped: err,
}
}
if len(rule.BackendRefs) == 0 {
/*
BackendRefs (optional)
BackendRefs defines API objects where matching requests should be
sent. If unspecified, the rule performs no forwarding. If
unspecified and no filters are specified that would result in a
response being sent, a 404 error code is returned.
*/
merr = multierror.Append(merr, wrapRuleErr(
resource.ErrInvalidField{
Name: "backend_refs",
Wrapped: resource.ErrEmpty,
},
))
}
for j, hbref := range rule.BackendRefs {
wrapBackendRefErr := func(err error) error {
return wrapRuleErr(resource.ErrInvalidListElement{
Name: "backend_refs",
Index: j,
Wrapped: err,
})
}
for _, err := range validateBackendRef(hbref.BackendRef) {
merr = multierror.Append(merr, wrapBackendRefErr(
resource.ErrInvalidField{
Name: "backend_ref",
Wrapped: err,
},
))
}
}
}
return merr
}

View File

@ -0,0 +1,79 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package types
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/hashicorp/consul/internal/catalog"
"github.com/hashicorp/consul/internal/resource/resourcetest"
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1"
"github.com/hashicorp/consul/proto/private/prototest"
"github.com/hashicorp/consul/sdk/testutil"
)
func TestValidateTCPRoute(t *testing.T) {
type testcase struct {
route *pbmesh.TCPRoute
expectErr string
}
run := func(t *testing.T, tc testcase) {
res := resourcetest.Resource(TCPRouteType, "api").
WithData(t, tc.route).
Build()
err := ValidateTCPRoute(res)
// Verify that validate didn't actually change the object.
got := resourcetest.MustDecode[*pbmesh.TCPRoute](t, res)
prototest.AssertDeepEqual(t, tc.route, got.Data)
if tc.expectErr == "" {
require.NoError(t, err)
} else {
testutil.RequireErrorContains(t, err, tc.expectErr)
}
}
cases := map[string]testcase{}
// Add common parent refs test cases.
for name, parentTC := range getXRouteParentRefTestCases() {
cases["parent-ref: "+name] = testcase{
route: &pbmesh.TCPRoute{
ParentRefs: parentTC.refs,
},
expectErr: parentTC.expectErr,
}
}
// add common backend ref test cases.
for name, backendTC := range getXRouteBackendRefTestCases() {
var refs []*pbmesh.TCPBackendRef
for _, br := range backendTC.refs {
refs = append(refs, &pbmesh.TCPBackendRef{
BackendRef: br,
})
}
cases["backend-ref: "+name] = testcase{
route: &pbmesh.TCPRoute{
ParentRefs: []*pbmesh.ParentReference{
newParentRef(catalog.ServiceType, "web", ""),
},
Rules: []*pbmesh.TCPRouteRule{
{BackendRefs: refs},
},
},
expectErr: backendTC.expectErr,
}
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
run(t, tc)
})
}
}

View File

@ -0,0 +1,52 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package types
import (
"github.com/hashicorp/consul/internal/catalog"
"github.com/hashicorp/consul/internal/resource"
"github.com/hashicorp/consul/proto-public/pbresource"
)
func IsRouteType(typ *pbresource.Type) bool {
switch {
case resource.EqualType(typ, HTTPRouteType),
resource.EqualType(typ, GRPCRouteType),
resource.EqualType(typ, TCPRouteType):
return true
}
return false
}
func IsFailoverPolicyType(typ *pbresource.Type) bool {
switch {
case resource.EqualType(typ, catalog.FailoverPolicyType):
return true
}
return false
}
func IsDestinationPolicyType(typ *pbresource.Type) bool {
switch {
case resource.EqualType(typ, DestinationPolicyType):
return true
}
return false
}
func IsServiceType(typ *pbresource.Type) bool {
switch {
case resource.EqualType(typ, catalog.ServiceType):
return true
}
return false
}
func IsComputedRoutesType(typ *pbresource.Type) bool {
switch {
case resource.EqualType(typ, ComputedRoutesType):
return true
}
return false
}

View File

@ -0,0 +1,296 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package types
import (
"errors"
"fmt"
"github.com/hashicorp/go-multierror"
"google.golang.org/protobuf/proto"
"github.com/hashicorp/consul/internal/catalog"
"github.com/hashicorp/consul/internal/resource"
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v1alpha1"
)
type XRouteData interface {
proto.Message
XRouteWithRefs
}
type XRouteWithRefs interface {
GetParentRefs() []*pbmesh.ParentReference
GetUnderlyingBackendRefs() []*pbmesh.BackendReference
}
type portedRefKey struct {
Key resource.ReferenceKey
Port string
}
func validateParentRefs(parentRefs []*pbmesh.ParentReference) error {
var merr error
if len(parentRefs) == 0 {
merr = multierror.Append(merr, resource.ErrInvalidField{
Name: "parent_refs",
Wrapped: resource.ErrEmpty,
})
}
var (
seen = make(map[portedRefKey]struct{})
seenAny = make(map[resource.ReferenceKey][]string)
)
for i, parent := range parentRefs {
wrapErr := func(err error) error {
return resource.ErrInvalidListElement{
Name: "parent_refs",
Index: i,
Wrapped: err,
}
}
if parent.Ref == nil {
merr = multierror.Append(merr, wrapErr(
resource.ErrInvalidField{
Name: "ref",
Wrapped: resource.ErrMissing,
},
))
} else {
if !IsServiceType(parent.Ref.Type) {
merr = multierror.Append(merr, wrapErr(
resource.ErrInvalidField{
Name: "ref",
Wrapped: resource.ErrInvalidReferenceType{
AllowedType: catalog.ServiceType,
},
},
))
}
if parent.Ref.Section != "" {
merr = multierror.Append(merr, wrapErr(
resource.ErrInvalidField{
Name: "ref",
Wrapped: resource.ErrInvalidField{
Name: "section",
Wrapped: errors.New("section not supported for service parent refs"),
},
},
))
}
prk := portedRefKey{
Key: resource.NewReferenceKey(parent.Ref),
Port: parent.Port,
}
_, portExist := seen[prk]
if parent.Port == "" {
coveredPorts, exactExists := seenAny[prk.Key]
if portExist { // check for duplicate wild
merr = multierror.Append(merr, wrapErr(
resource.ErrInvalidField{
Name: "ref",
Wrapped: fmt.Errorf(
"parent ref %q for wildcard port exists twice",
resource.ReferenceToString(parent.Ref),
),
},
))
} else if exactExists { // check for existing exact
merr = multierror.Append(merr, wrapErr(
resource.ErrInvalidField{
Name: "ref",
Wrapped: fmt.Errorf(
"parent ref %q for ports %v covered by wildcard port already",
resource.ReferenceToString(parent.Ref),
coveredPorts,
),
},
))
} else {
seen[prk] = struct{}{}
}
} else {
prkWild := prk
prkWild.Port = ""
_, wildExist := seen[prkWild]
if portExist { // check for duplicate exact
merr = multierror.Append(merr, wrapErr(
resource.ErrInvalidField{
Name: "ref",
Wrapped: fmt.Errorf(
"parent ref %q for port %q exists twice",
resource.ReferenceToString(parent.Ref),
parent.Port,
),
},
))
} else if wildExist { // check for existing wild
merr = multierror.Append(merr, wrapErr(
resource.ErrInvalidField{
Name: "ref",
Wrapped: fmt.Errorf(
"parent ref %q for port %q covered by wildcard port already",
resource.ReferenceToString(parent.Ref),
parent.Port,
),
},
))
} else {
seen[prk] = struct{}{}
seenAny[prk.Key] = append(seenAny[prk.Key], parent.Port)
}
}
}
}
return merr
}
func validateBackendRef(backendRef *pbmesh.BackendReference) []error {
var errs []error
if backendRef == nil {
errs = append(errs, resource.ErrMissing)
} else if backendRef.Ref == nil {
errs = append(errs, resource.ErrInvalidField{
Name: "ref",
Wrapped: resource.ErrMissing,
})
} else {
if !IsServiceType(backendRef.Ref.Type) {
errs = append(errs, resource.ErrInvalidField{
Name: "ref",
Wrapped: resource.ErrInvalidReferenceType{
AllowedType: catalog.ServiceType,
},
})
}
if backendRef.Ref.Section != "" {
errs = append(errs, resource.ErrInvalidField{
Name: "ref",
Wrapped: resource.ErrInvalidField{
Name: "section",
Wrapped: errors.New("section not supported for service backend refs"),
},
})
}
if backendRef.Datacenter != "" {
errs = append(errs, resource.ErrInvalidField{
Name: "datacenter",
Wrapped: errors.New("datacenter is not yet supported on backend refs"),
})
}
}
return errs
}
func validateHeaderMatchType(typ pbmesh.HeaderMatchType) error {
switch typ {
case pbmesh.HeaderMatchType_HEADER_MATCH_TYPE_UNSPECIFIED:
return resource.ErrMissing
case pbmesh.HeaderMatchType_HEADER_MATCH_TYPE_EXACT:
case pbmesh.HeaderMatchType_HEADER_MATCH_TYPE_REGEX:
case pbmesh.HeaderMatchType_HEADER_MATCH_TYPE_PRESENT:
case pbmesh.HeaderMatchType_HEADER_MATCH_TYPE_PREFIX:
case pbmesh.HeaderMatchType_HEADER_MATCH_TYPE_SUFFIX:
default:
return fmt.Errorf("not a supported enum value: %v", typ)
}
return nil
}
func validateHTTPTimeouts(timeouts *pbmesh.HTTPRouteTimeouts) []error {
if timeouts == nil {
return nil
}
var errs []error
if timeouts.Request != nil {
val := timeouts.Request.AsDuration()
if val < 0 {
errs = append(errs, resource.ErrInvalidField{
Name: "request",
Wrapped: fmt.Errorf("timeout cannot be negative: %v", val),
})
}
}
if timeouts.BackendRequest != nil {
val := timeouts.BackendRequest.AsDuration()
if val < 0 {
errs = append(errs, resource.ErrInvalidField{
Name: "backend_request",
Wrapped: fmt.Errorf("timeout cannot be negative: %v", val),
})
}
}
if timeouts.Idle != nil {
val := timeouts.Idle.AsDuration()
if val < 0 {
errs = append(errs, resource.ErrInvalidField{
Name: "idle",
Wrapped: fmt.Errorf("timeout cannot be negative: %v", val),
})
}
}
return errs
}
func validateHTTPRetries(retries *pbmesh.HTTPRouteRetries) []error {
if retries == nil {
return nil
}
var errs []error
if retries.Number < 0 {
errs = append(errs, resource.ErrInvalidField{
Name: "number",
Wrapped: fmt.Errorf("cannot be negative: %v", retries.Number),
})
}
for i, condition := range retries.OnConditions {
if !isValidRetryCondition(condition) {
errs = append(errs, resource.ErrInvalidListElement{
Name: "on_conditions",
Index: i,
Wrapped: fmt.Errorf("not a valid retry condition: %q", condition),
})
}
}
return errs
}
func isValidRetryCondition(retryOn string) bool {
switch retryOn {
case "5xx",
"gateway-error",
"reset",
"connect-failure",
"envoy-ratelimited",
"retriable-4xx",
"refused-stream",
"cancelled",
"deadline-exceeded",
"internal",
"resource-exhausted",
"unavailable":
return true
default:
return false
}
}

View File

@ -0,0 +1,91 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package meshv1alpha1
// GetUnderlyingBackendRefs will collect BackendReferences from all rules and
// bundle them up in one slice, unwrapping the HTTP-specifics in the process.
//
// This implements an XRouteWithRefs interface in the internal/mesh package.
//
// NOTE: no deduplication occurs.
func (x *HTTPRoute) GetUnderlyingBackendRefs() []*BackendReference {
if x == nil {
return nil
}
estimate := 0
for _, rule := range x.Rules {
estimate += len(rule.BackendRefs)
}
backendRefs := make([]*BackendReference, 0, estimate)
for _, rule := range x.Rules {
for _, backendRef := range rule.BackendRefs {
backendRefs = append(backendRefs, backendRef.BackendRef)
}
}
return backendRefs
}
// GetUnderlyingBackendRefs will collect BackendReferences from all rules and
// bundle them up in one slice, unwrapping the GRPC-specifics in the process.
//
// This implements an XRouteWithRefs interface in the internal/mesh package.
//
// NOTE: no deduplication occurs.
func (x *GRPCRoute) GetUnderlyingBackendRefs() []*BackendReference {
if x == nil {
return nil
}
estimate := 0
for _, rule := range x.Rules {
estimate += len(rule.BackendRefs)
}
backendRefs := make([]*BackendReference, 0, estimate)
for _, rule := range x.Rules {
for _, backendRef := range rule.BackendRefs {
backendRefs = append(backendRefs, backendRef.BackendRef)
}
}
return backendRefs
}
// GetUnderlyingBackendRefs will collect BackendReferences from all rules and
// bundle them up in one slice, unwrapping the TCP-specifics in the process.
//
// This implements an XRouteWithRefs interface in the internal/mesh package.
//
// NOTE: no deduplication occurs.
func (x *TCPRoute) GetUnderlyingBackendRefs() []*BackendReference {
if x == nil {
return nil
}
estimate := 0
for _, rule := range x.Rules {
estimate += len(rule.BackendRefs)
}
backendRefs := make([]*BackendReference, 0, estimate)
for _, rule := range x.Rules {
for _, backendRef := range rule.BackendRefs {
backendRefs = append(backendRefs, backendRef.BackendRef)
}
}
return backendRefs
}
// IsHashBased returns true if the policy is a hash-based policy such as maglev
// or ring hash.
func (p LoadBalancerPolicy) IsHashBased() bool {
switch p {
case LoadBalancerPolicy_LOAD_BALANCER_POLICY_MAGLEV,
LoadBalancerPolicy_LOAD_BALANCER_POLICY_RING_HASH:
return true
}
return false
}

View File

@ -0,0 +1,174 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package meshv1alpha1
import (
"testing"
"github.com/stretchr/testify/require"
"google.golang.org/protobuf/encoding/protojson"
"google.golang.org/protobuf/proto"
pbresource "github.com/hashicorp/consul/proto-public/pbresource"
)
type routeWithAddons interface {
proto.Message
GetUnderlyingBackendRefs() []*BackendReference
}
func TestXRoute_GetUnderlyingBackendRefs(t *testing.T) {
type testcase struct {
route routeWithAddons
expect []*BackendReference
}
run := func(t *testing.T, tc testcase) {
got := tc.route.GetUnderlyingBackendRefs()
require.ElementsMatch(t, stringifyList(tc.expect), stringifyList(got))
}
cases := map[string]testcase{
"http: nil": {
route: (*HTTPRoute)(nil),
},
"grpc: nil": {
route: (*GRPCRoute)(nil),
},
"tcp: nil": {
route: (*TCPRoute)(nil),
},
"http: kitchen sink": {
route: &HTTPRoute{
Rules: []*HTTPRouteRule{
{BackendRefs: []*HTTPBackendRef{
{BackendRef: newBackendRef("aa")},
}},
{BackendRefs: []*HTTPBackendRef{
{BackendRef: newBackendRef("bb")},
}},
{BackendRefs: []*HTTPBackendRef{
{BackendRef: newBackendRef("cc")},
{BackendRef: newBackendRef("dd")},
}},
{BackendRefs: []*HTTPBackendRef{
{BackendRef: newBackendRef("ee")},
{BackendRef: newBackendRef("ff")},
}},
},
},
expect: []*BackendReference{
newBackendRef("aa"),
newBackendRef("bb"),
newBackendRef("cc"),
newBackendRef("dd"),
newBackendRef("ee"),
newBackendRef("ff"),
},
},
"grpc: kitchen sink": {
route: &GRPCRoute{
Rules: []*GRPCRouteRule{
{BackendRefs: []*GRPCBackendRef{
{BackendRef: newBackendRef("aa")},
}},
{BackendRefs: []*GRPCBackendRef{
{BackendRef: newBackendRef("bb")},
}},
{BackendRefs: []*GRPCBackendRef{
{BackendRef: newBackendRef("cc")},
{BackendRef: newBackendRef("dd")},
}},
{BackendRefs: []*GRPCBackendRef{
{BackendRef: newBackendRef("ee")},
{BackendRef: newBackendRef("ff")},
}},
},
},
expect: []*BackendReference{
newBackendRef("aa"),
newBackendRef("bb"),
newBackendRef("cc"),
newBackendRef("dd"),
newBackendRef("ee"),
newBackendRef("ff"),
},
},
"tcp: kitchen sink": {
route: &TCPRoute{
Rules: []*TCPRouteRule{
{BackendRefs: []*TCPBackendRef{
{BackendRef: newBackendRef("aa")},
}},
{BackendRefs: []*TCPBackendRef{
{BackendRef: newBackendRef("bb")},
}},
{BackendRefs: []*TCPBackendRef{
{BackendRef: newBackendRef("cc")},
{BackendRef: newBackendRef("dd")},
}},
{BackendRefs: []*TCPBackendRef{
{BackendRef: newBackendRef("ee")},
{BackendRef: newBackendRef("ff")},
}},
},
},
expect: []*BackendReference{
newBackendRef("aa"),
newBackendRef("bb"),
newBackendRef("cc"),
newBackendRef("dd"),
newBackendRef("ee"),
newBackendRef("ff"),
},
},
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
run(t, tc)
})
}
}
func protoToString[V proto.Message](pb V) string {
m := protojson.MarshalOptions{
Indent: " ",
}
gotJSON, err := m.Marshal(pb)
if err != nil {
return "<ERR: " + err.Error() + ">"
}
return string(gotJSON)
}
func newRouteRef(name string) *pbresource.Reference {
return &pbresource.Reference{
Type: &pbresource.Type{
Group: "fake",
GroupVersion: "v1alpha1",
Kind: "fake",
},
Tenancy: &pbresource.Tenancy{
Partition: "default",
Namespace: "default",
PeerName: "local",
},
Name: name,
}
}
func newBackendRef(name string) *BackendReference {
return &BackendReference{
Ref: newRouteRef(name),
}
}
func stringifyList[V proto.Message](list []V) []string {
out := make([]string, 0, len(list))
for _, item := range list {
out = append(out, protoToString(item))
}
return out
}