mirror of https://github.com/status-im/consul.git
mesh: add ComputedImplicitDestinations resource for future use (#20547)
Creates a new controller to create ComputedImplicitDestinations resources by composing ComputedRoutes, Services, and ComputedTrafficPermissions to infer all ParentRef services that could possibly send some portion of traffic to a Service that has at least one accessible Workload Identity. A followup PR will rewire the sidecar controller to make use of this new resource. As this is a performance optimization, rather than a security feature the following aspects of traffic permissions have been ignored: - DENY rules - port rules (all ports are allowed) Also: - Add some v2 TestController machinery to help test complex dependency mappers.
This commit is contained in:
parent
8c05e57ac1
commit
6742340878
|
@ -191,6 +191,57 @@ a `Baz` resource gets updated to no longer have a value, it should not be repres
|
|||
|
||||
3. Update the dependency mappers to query the cache index *in addition to* looking at the current state of the dependent resource. In our example above the `Baz` dependency mapper could use the [`MultiMapper`] to combine querying the cache for `Baz` types that currently should be associated with a `ComputedBaz` and querying the index added in step 2 for previous references.
|
||||
|
||||
#### Footgun: Needing Bound References
|
||||
|
||||
When an interior (mutable) foreign key pointer on watched data is used to
|
||||
determine the resources's applicability in a dependency mapper, it is subject
|
||||
to the "orphaned computed resource" problem.
|
||||
|
||||
(An example of this would be a ParentRef on an xRoute, or the Destination field
|
||||
of a TrafficPermission.)
|
||||
|
||||
When you edit the mutable pointer to point elsewhere, the DependencyMapper will
|
||||
only witness the NEW value and will trigger reconciles for things derived from
|
||||
the NEW pointer, but side effects from a prior reconcile using the OLD pointer
|
||||
will be orphaned until some other event triggers that reconcile (if ever).
|
||||
|
||||
This applies equally to all varieties of controller:
|
||||
|
||||
- creates computed resources
|
||||
- only updates status conditions on existing resources
|
||||
- has other external side effects (xDS controller writes envoy config over a stream)
|
||||
|
||||
To solve this we need to collect the list of bound references that were
|
||||
"ingredients" into a computed resource's output and persist them on the newly
|
||||
written resource. Then we load them up and index them such that we can use them
|
||||
to AUGMENT a mapper event with additional maps using the OLD data as well.
|
||||
|
||||
We have only actively worked to solve this for the computed resource flavor of
|
||||
controller:
|
||||
|
||||
1. The top level of the resource data protobuf needs a
|
||||
`BoundReferences []*pbresource.Reference` field.
|
||||
|
||||
2. Use a `*resource.BoundReferenceCollector` to capture any resource during
|
||||
`Reconcile` that directly contributes to the final output resource data
|
||||
payload.
|
||||
|
||||
3. Call `brc.List()` on the above and set it to the `BoundReferences` field on
|
||||
the computed resource before persisting.
|
||||
|
||||
4. Use `indexers.BoundRefsIndex` to index this field on the primary type of the
|
||||
controller.
|
||||
|
||||
5. Create `boundRefsMapper := dependency.CacheListMapper(ZZZ, boundRefsIndex.Name())`
|
||||
|
||||
6. For each watched type, wrap its DependencyMapper with
|
||||
`dependency.MultiMapper(boundRefsMapper, ZZZ)`
|
||||
|
||||
7. That's it.
|
||||
|
||||
This will cause each reconcile to index the prior list of inputs and augment
|
||||
the results of future mapper events with historical references.
|
||||
|
||||
### Custom Watches
|
||||
|
||||
In some cases, we may want to trigger reconciles for events that aren't generated from CRUD operations on resources, for example
|
||||
|
|
|
@ -96,6 +96,10 @@ func ValidateSelector(sel *pbcatalog.WorkloadSelector, allowEmpty bool) error {
|
|||
return types.ValidateSelector(sel, allowEmpty)
|
||||
}
|
||||
|
||||
func ValidatePortName(id string) error {
|
||||
return types.ValidatePortName(id)
|
||||
}
|
||||
|
||||
func ValidateServicePortID(id string) error {
|
||||
return types.ValidateServicePortID(id)
|
||||
}
|
||||
|
|
|
@ -9,11 +9,12 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/hashicorp/go-hclog"
|
||||
|
||||
"github.com/hashicorp/consul/internal/controller/cache"
|
||||
"github.com/hashicorp/consul/internal/controller/cache/index"
|
||||
"github.com/hashicorp/consul/internal/resource"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
"github.com/hashicorp/go-hclog"
|
||||
)
|
||||
|
||||
// DependencyMapper is called when a dependency watched via WithWatch is changed
|
||||
|
@ -189,6 +190,27 @@ func (ctl *Controller) buildCache() cache.Cache {
|
|||
return c
|
||||
}
|
||||
|
||||
// dryRunMapper will trigger the appropriate DependencyMapper for an update of
|
||||
// the provided type and return the requested reconciles.
|
||||
//
|
||||
// This is mainly to be used by the TestController.
|
||||
func (ctl *Controller) dryRunMapper(
|
||||
ctx context.Context,
|
||||
rt Runtime,
|
||||
res *pbresource.Resource,
|
||||
) ([]Request, error) {
|
||||
if resource.EqualType(ctl.managedTypeWatch.watchedType, res.Id.Type) {
|
||||
return nil, nil // no-op
|
||||
}
|
||||
|
||||
for _, w := range ctl.watches {
|
||||
if resource.EqualType(w.watchedType, res.Id.Type) {
|
||||
return w.mapper(ctx, rt, res)
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("no mapper for type: %s", resource.TypeToString(res.Id.Type))
|
||||
}
|
||||
|
||||
// String returns a textual description of the controller, useful for debugging.
|
||||
func (ctl *Controller) String() string {
|
||||
watchedTypes := make([]string, 0, len(ctl.watches))
|
||||
|
|
|
@ -6,6 +6,8 @@ package dependency
|
|||
import (
|
||||
"context"
|
||||
|
||||
"google.golang.org/protobuf/proto"
|
||||
|
||||
"github.com/hashicorp/consul/internal/controller"
|
||||
"github.com/hashicorp/consul/internal/resource"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
|
@ -52,3 +54,15 @@ func ReplaceType(desiredType *pbresource.Type) controller.DependencyMapper {
|
|||
}, nil
|
||||
}
|
||||
}
|
||||
|
||||
type DecodedDependencyMapper[T proto.Message] func(context.Context, controller.Runtime, *resource.DecodedResource[T]) ([]controller.Request, error)
|
||||
|
||||
func MapDecoded[T proto.Message](mapper DecodedDependencyMapper[T]) controller.DependencyMapper {
|
||||
return func(ctx context.Context, rt controller.Runtime, res *pbresource.Resource) ([]controller.Request, error) {
|
||||
decoded, err := resource.Decode[T](res)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return mapper(ctx, rt, decoded)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,10 +8,15 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp/cmpopts"
|
||||
"github.com/hashicorp/consul/internal/controller"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
"github.com/hashicorp/consul/proto/private/prototest"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/hashicorp/consul/internal/controller"
|
||||
"github.com/hashicorp/consul/internal/resource"
|
||||
"github.com/hashicorp/consul/internal/resource/resourcetest"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
pbdemo "github.com/hashicorp/consul/proto/private/pbdemo/v1"
|
||||
"github.com/hashicorp/consul/proto/private/prototest"
|
||||
"github.com/hashicorp/consul/sdk/testutil"
|
||||
)
|
||||
|
||||
func resourceID(group string, version string, kind string, name string) *pbresource.ID {
|
||||
|
@ -137,3 +142,51 @@ func TestReplaceType(t *testing.T) {
|
|||
}
|
||||
prototest.AssertDeepEqual(t, expected, reqs[0].ID)
|
||||
}
|
||||
|
||||
func TestMapDecoded(t *testing.T) {
|
||||
mapper := MapDecoded[*pbdemo.Artist](func(_ context.Context, _ controller.Runtime, res *resource.DecodedResource[*pbdemo.Artist]) ([]controller.Request, error) {
|
||||
return []controller.Request{
|
||||
{
|
||||
ID: &pbresource.ID{
|
||||
Type: res.Id.Type,
|
||||
Tenancy: res.Id.Tenancy,
|
||||
// not realistic for how the Artist's Name is intended but we just want to pull
|
||||
// some data out of the decoded portion and return it.
|
||||
Name: res.Data.Name,
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
})
|
||||
|
||||
for _, tenancy := range resourcetest.TestTenancies() {
|
||||
t.Run(resourcetest.AppendTenancyInfo(t.Name(), tenancy), func(t *testing.T) {
|
||||
ctx := testutil.TestContext(t)
|
||||
|
||||
res1 := resourcetest.Resource(pbdemo.ArtistType, "foo").
|
||||
WithTenancy(tenancy).
|
||||
WithData(t, &pbdemo.Artist{Name: "something"}).
|
||||
Build()
|
||||
|
||||
res2 := resourcetest.Resource(pbdemo.ArtistType, "foo").
|
||||
WithTenancy(tenancy).
|
||||
// Wrong data type here to force an error in the outer decoder
|
||||
WithData(t, &pbdemo.Album{Name: "else"}).
|
||||
Build()
|
||||
|
||||
reqs, err := mapper(ctx, controller.Runtime{}, res1)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, reqs, 1)
|
||||
|
||||
expected := &pbresource.ID{
|
||||
Type: res1.Id.Type,
|
||||
Tenancy: res1.Id.Tenancy,
|
||||
Name: "something",
|
||||
}
|
||||
prototest.AssertDeepEqual(t, expected, reqs[0].ID)
|
||||
|
||||
reqs, err = mapper(ctx, controller.Runtime{}, res2)
|
||||
require.Nil(t, reqs)
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,9 +6,10 @@ package controller
|
|||
import (
|
||||
"context"
|
||||
|
||||
"github.com/hashicorp/go-hclog"
|
||||
|
||||
"github.com/hashicorp/consul/internal/controller/cache"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
"github.com/hashicorp/go-hclog"
|
||||
)
|
||||
|
||||
// TestController is most useful when writing unit tests for a controller where
|
||||
|
@ -66,3 +67,13 @@ func (tc *TestController) Runtime() Runtime {
|
|||
Cache: tc.cache,
|
||||
}
|
||||
}
|
||||
|
||||
// DryRunMapper will trigger the appropriate DependencyMapper for an update of
|
||||
// the provided type and return the requested reconciles.
|
||||
//
|
||||
// Useful for testing just the DependencyMapper+Cache interactions for chains
|
||||
// that are more complicated than just a full controller interaction test would
|
||||
// be able to easily verify.
|
||||
func (tc *TestController) DryRunMapper(ctx context.Context, res *pbresource.Resource) ([]Request, error) {
|
||||
return tc.c.dryRunMapper(ctx, tc.Runtime(), res)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,88 @@
|
|||
// Copyright (c) HashiCorp, Inc.
|
||||
// SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
package implicitdestinations
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/oklog/ulid/v2"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/hashicorp/consul/internal/auth"
|
||||
"github.com/hashicorp/consul/internal/mesh/internal/types"
|
||||
"github.com/hashicorp/consul/internal/resource"
|
||||
"github.com/hashicorp/consul/internal/resource/resourcetest"
|
||||
rtest "github.com/hashicorp/consul/internal/resource/resourcetest"
|
||||
pbauth "github.com/hashicorp/consul/proto-public/pbauth/v2beta1"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
)
|
||||
|
||||
// TODO: do this properly and export it from internal/auth/exports.go
|
||||
// This is a crude approximation suitable for this test.
|
||||
func ReconcileComputedTrafficPermissions(
|
||||
t *testing.T,
|
||||
client *rtest.Client,
|
||||
id *pbresource.ID,
|
||||
tpList ...*pbauth.TrafficPermissions,
|
||||
) *types.DecodedComputedTrafficPermissions {
|
||||
// TODO: allow this to take a nil client and still execute all of the proper validations etc.
|
||||
|
||||
require.True(t, resource.EqualType(pbauth.ComputedTrafficPermissionsType, id.GetType()))
|
||||
|
||||
registry := resource.NewRegistry()
|
||||
auth.RegisterTypes(registry)
|
||||
|
||||
merged := &pbauth.ComputedTrafficPermissions{}
|
||||
added := false
|
||||
for _, tp := range tpList {
|
||||
name := strings.ToLower(ulid.Make().String())
|
||||
|
||||
// Default to request aligned.
|
||||
if tp.Destination == nil {
|
||||
tp.Destination = &pbauth.Destination{}
|
||||
}
|
||||
if tp.Destination.IdentityName == "" {
|
||||
tp.Destination.IdentityName = id.Name
|
||||
}
|
||||
require.Equal(t, id.Name, tp.Destination.IdentityName)
|
||||
|
||||
res := rtest.Resource(pbauth.TrafficPermissionsType, name).
|
||||
WithTenancy(id.Tenancy).
|
||||
WithData(t, tp).
|
||||
Build()
|
||||
resourcetest.ValidateAndNormalize(t, registry, res)
|
||||
|
||||
dec := rtest.MustDecode[*pbauth.TrafficPermissions](t, res)
|
||||
|
||||
added = true
|
||||
|
||||
switch dec.Data.Action {
|
||||
case pbauth.Action_ACTION_ALLOW:
|
||||
merged.AllowPermissions = append(merged.AllowPermissions, dec.Data.Permissions...)
|
||||
case pbauth.Action_ACTION_DENY:
|
||||
merged.DenyPermissions = append(merged.DenyPermissions, dec.Data.Permissions...)
|
||||
default:
|
||||
t.Fatalf("Unexpected action: %v", dec.Data.Action)
|
||||
}
|
||||
}
|
||||
|
||||
if !added {
|
||||
merged.IsDefault = true
|
||||
}
|
||||
|
||||
var res *pbresource.Resource
|
||||
if client != nil {
|
||||
res = rtest.ResourceID(id).
|
||||
WithData(t, merged).
|
||||
Write(t, client)
|
||||
} else {
|
||||
res = rtest.ResourceID(id).
|
||||
WithData(t, merged).
|
||||
Build()
|
||||
resourcetest.ValidateAndNormalize(t, registry, res)
|
||||
}
|
||||
|
||||
return rtest.MustDecode[*pbauth.ComputedTrafficPermissions](t, res)
|
||||
}
|
|
@ -0,0 +1,314 @@
|
|||
// Copyright (c) HashiCorp, Inc.
|
||||
// SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
package implicitdestinations
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sort"
|
||||
|
||||
"google.golang.org/protobuf/proto"
|
||||
"google.golang.org/protobuf/types/known/anypb"
|
||||
|
||||
"github.com/hashicorp/consul/internal/controller"
|
||||
"github.com/hashicorp/consul/internal/controller/cache"
|
||||
"github.com/hashicorp/consul/internal/controller/cache/index"
|
||||
"github.com/hashicorp/consul/internal/controller/dependency"
|
||||
"github.com/hashicorp/consul/internal/resource"
|
||||
"github.com/hashicorp/consul/internal/storage"
|
||||
pbauth "github.com/hashicorp/consul/proto-public/pbauth/v2beta1"
|
||||
pbcatalog "github.com/hashicorp/consul/proto-public/pbcatalog/v2beta1"
|
||||
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v2beta1"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
)
|
||||
|
||||
// Future work: this can be optimized to omit:
|
||||
//
|
||||
// - destinations denied due to DENY TP
|
||||
// - only ports exposed by CR or CTP explicitly
|
||||
|
||||
/*
|
||||
Data Relationships:
|
||||
|
||||
Reconcile:
|
||||
- read WI[source] (ignore)
|
||||
- list CTPs by WI[source]
|
||||
- turn CTP.id -> WI[backend].id
|
||||
- list SVCs by WI[backend]
|
||||
- list CRs by SVC[backend]
|
||||
- turn CR.id -> SVC[dest].id
|
||||
- emit SVC[dest]
|
||||
|
||||
DepMappers:
|
||||
- CR <list> SVC[backend] <list> WI[backend] <align> CTP <list> WI[source] <align> CID
|
||||
- SVC[backend] <list> WI[backend] <align> CTP <list> WI[source] <align> CID
|
||||
- CTP <list> WI[source] <align> CID
|
||||
- bound refs for all
|
||||
|
||||
*/
|
||||
|
||||
func Controller(globalDefaultAllow bool) *controller.Controller {
|
||||
m := &mapAndTransformer{globalDefaultAllow: globalDefaultAllow}
|
||||
|
||||
boundRefsMapper := dependency.CacheListMapper(pbmesh.ComputedImplicitDestinationsType, boundRefsIndex.Name())
|
||||
|
||||
return controller.NewController(ControllerID,
|
||||
pbmesh.ComputedImplicitDestinationsType,
|
||||
boundRefsIndex,
|
||||
).
|
||||
WithWatch(pbauth.WorkloadIdentityType,
|
||||
// BoundRefs: none
|
||||
dependency.ReplaceType(pbmesh.ComputedImplicitDestinationsType),
|
||||
).
|
||||
WithWatch(pbauth.ComputedTrafficPermissionsType,
|
||||
// BoundRefs: the WI source refs are interior up-pointers and may change.
|
||||
dependency.MultiMapper(boundRefsMapper, m.MapComputedTrafficPermissions),
|
||||
ctpBySourceWorkloadIdentityIndex,
|
||||
ctpByWildcardSourceIndexCreator(globalDefaultAllow),
|
||||
).
|
||||
WithWatch(pbcatalog.ServiceType,
|
||||
// BoundRefs: the WI slice in the status conds is an interior up-pointer and may change.
|
||||
dependency.MultiMapper(boundRefsMapper, m.MapService),
|
||||
serviceByWorkloadIdentityIndex,
|
||||
).
|
||||
WithWatch(pbmesh.ComputedRoutesType,
|
||||
// BoundRefs: the backend services are interior up-pointers and may change.
|
||||
dependency.MultiMapper(boundRefsMapper, m.MapComputedRoutes),
|
||||
computedRoutesByBackendServiceIndex,
|
||||
).
|
||||
WithReconciler(&reconciler{
|
||||
defaultAllow: globalDefaultAllow,
|
||||
})
|
||||
}
|
||||
|
||||
type reconciler struct {
|
||||
defaultAllow bool
|
||||
}
|
||||
|
||||
func (r *reconciler) Reconcile(ctx context.Context, rt controller.Runtime, req controller.Request) error {
|
||||
rt.Logger = rt.Logger.With("resource-id", req.ID, "controller", ControllerID)
|
||||
|
||||
wi := resource.ReplaceType(pbauth.WorkloadIdentityType, req.ID)
|
||||
|
||||
workloadIdentity, err := cache.GetDecoded[*pbauth.WorkloadIdentity](rt.Cache, pbauth.WorkloadIdentityType, "id", wi)
|
||||
if err != nil {
|
||||
rt.Logger.Error("error retrieving corresponding Workload Identity", "error", err)
|
||||
return err
|
||||
} else if workloadIdentity == nil {
|
||||
rt.Logger.Trace("workload identity has been deleted")
|
||||
return nil
|
||||
}
|
||||
|
||||
// generate new CID and compare, if different, write new one, if not return without doing anything
|
||||
newData, err := r.generateComputedImplicitDestinations(rt, wi)
|
||||
if err != nil {
|
||||
rt.Logger.Error("error generating computed implicit destinations", "error", err)
|
||||
// TODO: update the workload identity with this error as a status condition?
|
||||
return err
|
||||
}
|
||||
|
||||
oldData, err := resource.GetDecodedResource[*pbmesh.ComputedImplicitDestinations](ctx, rt.Client, req.ID)
|
||||
if err != nil {
|
||||
rt.Logger.Error("error retrieving computed implicit destinations", "error", err)
|
||||
return err
|
||||
}
|
||||
if oldData != nil && proto.Equal(oldData.Data, newData) {
|
||||
rt.Logger.Trace("computed implicit destinations have not changed")
|
||||
// there are no changes, and we can return early
|
||||
return nil
|
||||
}
|
||||
rt.Logger.Trace("computed implicit destinations have changed")
|
||||
|
||||
newCID, err := anypb.New(newData)
|
||||
if err != nil {
|
||||
rt.Logger.Error("error marshalling implicit destination data", "error", err)
|
||||
return err
|
||||
}
|
||||
rt.Logger.Trace("writing computed implicit destinations")
|
||||
|
||||
_, err = rt.Client.Write(ctx, &pbresource.WriteRequest{
|
||||
Resource: &pbresource.Resource{
|
||||
Id: req.ID,
|
||||
Data: newCID,
|
||||
Owner: workloadIdentity.Resource.Id,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
rt.Logger.Error("error writing new computed implicit destinations", "error", err)
|
||||
return err
|
||||
}
|
||||
rt.Logger.Trace("new computed implicit destinations were successfully written")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// generateComputedImplicitDestinations will use all associated Traffic Permissions to create new ComputedImplicitDestinations data
|
||||
func (r *reconciler) generateComputedImplicitDestinations(rt controller.Runtime, cid *pbresource.ID) (*pbmesh.ComputedImplicitDestinations, error) {
|
||||
wiID := resource.ReplaceType(pbauth.WorkloadIdentityType, cid)
|
||||
|
||||
// Summary: list CTPs by WI[source]
|
||||
ctps, err := rt.Cache.List(
|
||||
pbauth.ComputedTrafficPermissionsType,
|
||||
ctpBySourceWorkloadIdentityIndex.Name(),
|
||||
wiID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// This covers a foo.bar.* wildcard.
|
||||
wildNameCTPs, err := rt.Cache.List(
|
||||
pbauth.ComputedTrafficPermissionsType,
|
||||
ctpByWildcardSourceIndexName,
|
||||
tenantedName{
|
||||
Partition: wiID.GetTenancy().GetPartition(),
|
||||
Namespace: wiID.GetTenancy().GetNamespace(),
|
||||
Name: storage.Wildcard,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ctps = append(ctps, wildNameCTPs...)
|
||||
|
||||
// This covers a foo.*.* wildcard.
|
||||
wildNamespaceCTPs, err := rt.Cache.List(
|
||||
pbauth.ComputedTrafficPermissionsType,
|
||||
ctpByWildcardSourceIndexName,
|
||||
tenantedName{
|
||||
Partition: wiID.GetTenancy().GetPartition(),
|
||||
Namespace: storage.Wildcard,
|
||||
Name: storage.Wildcard,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ctps = append(ctps, wildNamespaceCTPs...)
|
||||
|
||||
// This covers the default-allow + default-CTP option.
|
||||
wildPartitionCTPs, err := rt.Cache.List(
|
||||
pbauth.ComputedTrafficPermissionsType,
|
||||
ctpByWildcardSourceIndexName,
|
||||
tenantedName{
|
||||
Partition: storage.Wildcard,
|
||||
Namespace: storage.Wildcard,
|
||||
Name: storage.Wildcard,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ctps = append(ctps, wildPartitionCTPs...)
|
||||
|
||||
var (
|
||||
out = &pbmesh.ComputedImplicitDestinations{}
|
||||
seenDest = make(map[resource.ReferenceKey]struct{})
|
||||
boundRefCollector = resource.NewBoundReferenceCollector()
|
||||
)
|
||||
for _, ctp := range ctps {
|
||||
// CTP is name aligned with WI[backend].
|
||||
backendWorkloadID := resource.ReplaceType(pbauth.WorkloadIdentityType, ctp.Id)
|
||||
|
||||
// Find all services that can reach this WI.
|
||||
svcList, err := cache.ListDecoded[*pbcatalog.Service](
|
||||
rt.Cache,
|
||||
pbcatalog.ServiceType,
|
||||
serviceByWorkloadIdentityIndex.Name(),
|
||||
backendWorkloadID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, svc := range svcList {
|
||||
// Find all computed routes that have at least one backend target of this service.
|
||||
crList, err := rt.Cache.List(
|
||||
pbmesh.ComputedRoutesType,
|
||||
computedRoutesByBackendServiceIndex.Name(),
|
||||
svc.Id,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// These are name-aligned with the service name that should go
|
||||
// directly into the implicit destination list.
|
||||
for _, cr := range crList {
|
||||
implDestSvcRef := resource.ReplaceType(pbcatalog.ServiceType, cr.Id)
|
||||
|
||||
rk := resource.NewReferenceKey(implDestSvcRef)
|
||||
if _, seen := seenDest[rk]; seen {
|
||||
continue
|
||||
}
|
||||
|
||||
// TODO: populate just the ports allowed by the underlying TPs.
|
||||
implDest := &pbmesh.ImplicitDestination{
|
||||
DestinationRef: resource.Reference(implDestSvcRef, ""),
|
||||
}
|
||||
|
||||
implDestSvc, err := cache.GetDecoded[*pbcatalog.Service](rt.Cache, pbcatalog.ServiceType, "id", implDestSvcRef)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if implDestSvc == nil {
|
||||
continue // skip
|
||||
}
|
||||
|
||||
inMesh := false
|
||||
for _, port := range implDestSvc.Data.Ports {
|
||||
if port.Protocol == pbcatalog.Protocol_PROTOCOL_MESH {
|
||||
inMesh = true
|
||||
continue // skip
|
||||
}
|
||||
implDest.DestinationPorts = append(implDest.DestinationPorts, port.TargetPort)
|
||||
}
|
||||
if !inMesh {
|
||||
continue // skip
|
||||
}
|
||||
|
||||
// Add entire bound-ref lineage at once, since they're only
|
||||
// bound if they materially affect the computed resource.
|
||||
boundRefCollector.AddRefOrID(ctp.Id)
|
||||
boundRefCollector.AddRefOrID(svc.Id)
|
||||
boundRefCollector.AddRefOrID(cr.Id)
|
||||
boundRefCollector.AddRefOrID(implDestSvcRef)
|
||||
|
||||
sort.Strings(implDest.DestinationPorts)
|
||||
|
||||
out.Destinations = append(out.Destinations, implDest)
|
||||
seenDest[rk] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure determinstic sort so we don't get into infinite-reconcile
|
||||
sort.Slice(out.Destinations, func(i, j int) bool {
|
||||
a, b := out.Destinations[i], out.Destinations[j]
|
||||
return resource.LessReference(a.DestinationRef, b.DestinationRef)
|
||||
})
|
||||
|
||||
out.BoundReferences = boundRefCollector.List()
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func listAllWorkloadIdentities(
|
||||
cache cache.ReadOnlyCache,
|
||||
tenancy *pbresource.Tenancy,
|
||||
) ([]*pbresource.Reference, error) {
|
||||
// This is the same logic used by the sidecar controller to interpret CTPs. Here we
|
||||
// carry it to its logical conclusion and simply include all possible identities.
|
||||
iter, err := cache.ListIterator(pbauth.WorkloadIdentityType, "id", &pbresource.Reference{
|
||||
Type: pbauth.WorkloadIdentityType,
|
||||
Tenancy: tenancy,
|
||||
}, index.IndexQueryOptions{Prefix: true})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var out []*pbresource.Reference
|
||||
for res := iter.Next(); res != nil; res = iter.Next() {
|
||||
out = append(out, resource.Reference(res.Id, ""))
|
||||
}
|
||||
return out, nil
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,236 @@
|
|||
// Copyright (c) HashiCorp, Inc.
|
||||
// SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
package implicitdestinations
|
||||
|
||||
import (
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/hashicorp/consul/internal/catalog"
|
||||
"github.com/hashicorp/consul/internal/controller/cache/index"
|
||||
"github.com/hashicorp/consul/internal/controller/cache/indexers"
|
||||
"github.com/hashicorp/consul/internal/mesh/internal/types"
|
||||
"github.com/hashicorp/consul/internal/resource"
|
||||
"github.com/hashicorp/consul/internal/storage"
|
||||
pbauth "github.com/hashicorp/consul/proto-public/pbauth/v2beta1"
|
||||
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v2beta1"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
)
|
||||
|
||||
// When an interior (mutable) foreign key pointer on watched data is used to
|
||||
// determine the resources's applicability in a dependency mapper, it is
|
||||
// subject to the "orphaned computed resource" problem. When you edit the
|
||||
// mutable pointer to point elsewhere, the mapper will only witness the NEW
|
||||
// value and will trigger reconciles for things derived from the NEW pointer,
|
||||
// but side effects from a prior reconcile using the OLD pointer will be
|
||||
// orphaned until some other event triggers that reconcile (if ever).
|
||||
//
|
||||
// To solve this we need to collect the list of bound references that were
|
||||
// "ingredients" into a computed resource's output and persist them on the
|
||||
// newly written resource. Then we load them up and index them such that we can
|
||||
// use them to AUGMENT a mapper event with additional maps using the OLD data
|
||||
// as well.
|
||||
var boundRefsIndex = indexers.BoundRefsIndex[*pbmesh.ComputedImplicitDestinations]("bound-references")
|
||||
|
||||
// Cache: reverse SVC[*] => WI[*]
|
||||
var serviceByWorkloadIdentityIndex = indexers.RefOrIDIndex(
|
||||
"service-by-workload-identity",
|
||||
func(svc *types.DecodedService) []*pbresource.Reference {
|
||||
return getWorkloadIdentitiesFromService(svc.Resource)
|
||||
},
|
||||
)
|
||||
|
||||
// Cache: reverse CTP => WI[source]
|
||||
var ctpBySourceWorkloadIdentityIndex = indexers.RefOrIDIndex(
|
||||
"ctp-by-source-workload-identity",
|
||||
func(ctp *types.DecodedComputedTrafficPermissions) []*pbresource.Reference {
|
||||
// We ignore wildcards for this index.
|
||||
exact, _, _ := getSourceWorkloadIdentitiesFromCTP(ctp)
|
||||
return maps.Values(exact)
|
||||
},
|
||||
)
|
||||
|
||||
const ctpByWildcardSourceIndexName = "ctp-by-wildcard-source"
|
||||
|
||||
func ctpByWildcardSourceIndexCreator(globalDefaultAllow bool) *index.Index {
|
||||
return indexers.DecodedMultiIndexer(
|
||||
ctpByWildcardSourceIndexName,
|
||||
index.SingleValueFromArgs(func(tn tenantedName) ([]byte, error) {
|
||||
return indexFromTenantedName(tn), nil
|
||||
}),
|
||||
func(r *types.DecodedComputedTrafficPermissions) (bool, [][]byte, error) {
|
||||
var vals [][]byte
|
||||
|
||||
if r.Data.IsDefault && globalDefaultAllow {
|
||||
// Literally everything can reach it.
|
||||
vals = append(vals, indexFromTenantedName(tenantedName{
|
||||
Partition: storage.Wildcard,
|
||||
Namespace: storage.Wildcard,
|
||||
Name: storage.Wildcard,
|
||||
}))
|
||||
return true, vals, nil
|
||||
}
|
||||
|
||||
_, wildNameInNS, wildNSInPartition := getSourceWorkloadIdentitiesFromCTP(r)
|
||||
for _, tenancy := range wildNameInNS {
|
||||
// wildcard name
|
||||
vals = append(vals, indexFromTenantedName(tenantedName{
|
||||
Partition: tenancy.Partition,
|
||||
Namespace: tenancy.Namespace,
|
||||
Name: storage.Wildcard,
|
||||
}))
|
||||
}
|
||||
for _, partition := range wildNSInPartition {
|
||||
// wildcard name+ns
|
||||
vals = append(vals, indexFromTenantedName(tenantedName{
|
||||
Partition: partition,
|
||||
Namespace: storage.Wildcard,
|
||||
Name: storage.Wildcard,
|
||||
}))
|
||||
}
|
||||
|
||||
return true, vals, nil
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
type tenantedName struct {
|
||||
Partition string
|
||||
Namespace string
|
||||
Name string
|
||||
}
|
||||
|
||||
func indexFromTenantedName(tn tenantedName) []byte {
|
||||
var b index.Builder
|
||||
b.String(tn.Partition)
|
||||
b.String(tn.Namespace)
|
||||
b.String(tn.Name)
|
||||
return b.Bytes()
|
||||
}
|
||||
|
||||
// Cache: reverse CR => SVC[backend]
|
||||
var computedRoutesByBackendServiceIndex = indexers.RefOrIDIndex(
|
||||
"computed-routes-by-backend-service",
|
||||
func(cr *types.DecodedComputedRoutes) []*pbresource.Reference {
|
||||
return getBackendServiceRefsFromComputedRoutes(cr)
|
||||
},
|
||||
)
|
||||
|
||||
func getWorkloadIdentitiesFromService(svc *pbresource.Resource) []*pbresource.Reference {
|
||||
ids := catalog.GetBoundIdentities(svc)
|
||||
|
||||
out := make([]*pbresource.Reference, 0, len(ids))
|
||||
for _, id := range ids {
|
||||
out = append(out, &pbresource.Reference{
|
||||
Type: pbauth.WorkloadIdentityType,
|
||||
Name: id,
|
||||
Tenancy: svc.Id.Tenancy,
|
||||
})
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func getSourceWorkloadIdentitiesFromCTP(
|
||||
ctp *types.DecodedComputedTrafficPermissions,
|
||||
) (exact map[resource.ReferenceKey]*pbresource.Reference, wildNames []*pbresource.Tenancy, wildNS []string) {
|
||||
var (
|
||||
out = make(map[resource.ReferenceKey]*pbresource.Reference)
|
||||
wildNameInNS = make(map[string]*pbresource.Tenancy)
|
||||
wildNSInPartition = make(map[string]struct{})
|
||||
)
|
||||
|
||||
for _, perm := range ctp.Data.AllowPermissions {
|
||||
for _, src := range perm.Sources {
|
||||
srcType := determineSourceType(src)
|
||||
if srcType != sourceTypeLocal {
|
||||
// Partition / Peer / SamenessGroup are mututally exclusive.
|
||||
continue // Ignore these for now.
|
||||
}
|
||||
// It is assumed that src.Partition != "" at this point.
|
||||
|
||||
if src.IdentityName != "" {
|
||||
// exact
|
||||
ref := &pbresource.Reference{
|
||||
Type: pbauth.WorkloadIdentityType,
|
||||
Name: src.IdentityName,
|
||||
Tenancy: &pbresource.Tenancy{
|
||||
Partition: src.Partition,
|
||||
Namespace: src.Namespace,
|
||||
},
|
||||
}
|
||||
|
||||
rk := resource.NewReferenceKey(ref)
|
||||
if _, ok := out[rk]; !ok {
|
||||
out[rk] = ref
|
||||
}
|
||||
} else if src.Namespace != "" {
|
||||
// wildcard name
|
||||
tenancy := pbauth.SourceToTenancy(src)
|
||||
tenancyStr := resource.TenancyToString(tenancy)
|
||||
if _, ok := wildNameInNS[tenancyStr]; !ok {
|
||||
wildNameInNS[tenancyStr] = tenancy
|
||||
}
|
||||
continue
|
||||
} else {
|
||||
// wildcard name+ns
|
||||
if _, ok := wildNSInPartition[src.Partition]; !ok {
|
||||
wildNSInPartition[src.Partition] = struct{}{}
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
sliceWildNameInNS []*pbresource.Tenancy
|
||||
sliceWildNSInPartition []string
|
||||
)
|
||||
if len(wildNameInNS) > 0 {
|
||||
sliceWildNameInNS = maps.Values(wildNameInNS)
|
||||
}
|
||||
if len(wildNSInPartition) > 0 {
|
||||
sliceWildNSInPartition = maps.Keys(wildNSInPartition)
|
||||
}
|
||||
|
||||
return out, sliceWildNameInNS, sliceWildNSInPartition
|
||||
}
|
||||
|
||||
func getBackendServiceRefsFromComputedRoutes(cr *types.DecodedComputedRoutes) []*pbresource.Reference {
|
||||
var (
|
||||
out []*pbresource.Reference
|
||||
seen = make(map[resource.ReferenceKey]struct{})
|
||||
)
|
||||
for _, pc := range cr.Data.PortedConfigs {
|
||||
for _, target := range pc.Targets {
|
||||
ref := target.BackendRef.Ref
|
||||
rk := resource.NewReferenceKey(ref)
|
||||
if _, ok := seen[rk]; !ok {
|
||||
out = append(out, ref)
|
||||
seen[rk] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
type sourceType int
|
||||
|
||||
const (
|
||||
sourceTypeLocal sourceType = iota
|
||||
sourceTypePeer
|
||||
sourceTypeSamenessGroup
|
||||
)
|
||||
|
||||
// These rules also exist in internal/auth/internal/types during TP validation.
|
||||
func determineSourceType(src *pbauth.Source) sourceType {
|
||||
srcPeer := src.GetPeer()
|
||||
|
||||
switch {
|
||||
case srcPeer != "" && srcPeer != "local":
|
||||
return sourceTypePeer
|
||||
case src.GetSamenessGroup() != "":
|
||||
return sourceTypeSamenessGroup
|
||||
default:
|
||||
return sourceTypeLocal
|
||||
}
|
||||
}
|
|
@ -0,0 +1,408 @@
|
|||
// Copyright (c) HashiCorp, Inc.
|
||||
// SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
package implicitdestinations
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/hashicorp/consul/internal/auth"
|
||||
"github.com/hashicorp/consul/internal/catalog"
|
||||
"github.com/hashicorp/consul/internal/mesh/internal/types"
|
||||
"github.com/hashicorp/consul/internal/resource"
|
||||
"github.com/hashicorp/consul/internal/resource/resourcetest"
|
||||
rtest "github.com/hashicorp/consul/internal/resource/resourcetest"
|
||||
pbauth "github.com/hashicorp/consul/proto-public/pbauth/v2beta1"
|
||||
pbcatalog "github.com/hashicorp/consul/proto-public/pbcatalog/v2beta1"
|
||||
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v2beta1"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
"github.com/hashicorp/consul/proto/private/prototest"
|
||||
)
|
||||
|
||||
func TestGetWorkloadIdentitiesFromService(t *testing.T) {
|
||||
tenancy := resource.DefaultNamespacedTenancy()
|
||||
|
||||
build := func(conds ...*pbresource.Condition) *pbresource.Resource {
|
||||
b := rtest.Resource(pbcatalog.ServiceType, "web").
|
||||
WithTenancy(tenancy).
|
||||
WithData(t, &pbcatalog.Service{})
|
||||
if len(conds) > 0 {
|
||||
b.WithStatus(catalog.EndpointsStatusKey, &pbresource.Status{
|
||||
Conditions: conds,
|
||||
})
|
||||
}
|
||||
return b.Build()
|
||||
}
|
||||
|
||||
fooRef := &pbresource.Reference{
|
||||
Type: pbauth.WorkloadIdentityType,
|
||||
Tenancy: tenancy,
|
||||
Name: "foo",
|
||||
}
|
||||
barRef := &pbresource.Reference{
|
||||
Type: pbauth.WorkloadIdentityType,
|
||||
Tenancy: tenancy,
|
||||
Name: "bar",
|
||||
}
|
||||
|
||||
makeRefs := func(refs ...*pbresource.Reference) []*pbresource.Reference {
|
||||
return refs
|
||||
}
|
||||
|
||||
run := getWorkloadIdentitiesFromService
|
||||
|
||||
require.Empty(t, run(build(nil)))
|
||||
require.Empty(t, run(build(&pbresource.Condition{
|
||||
Type: catalog.StatusConditionBoundIdentities,
|
||||
State: pbresource.Condition_STATE_TRUE,
|
||||
Message: "",
|
||||
})))
|
||||
prototest.AssertDeepEqual(t, makeRefs(fooRef), run(build(&pbresource.Condition{
|
||||
Type: catalog.StatusConditionBoundIdentities,
|
||||
State: pbresource.Condition_STATE_TRUE,
|
||||
Message: "foo",
|
||||
})))
|
||||
require.Empty(t, run(build(&pbresource.Condition{
|
||||
Type: catalog.StatusConditionBoundIdentities,
|
||||
State: pbresource.Condition_STATE_FALSE,
|
||||
Message: "foo",
|
||||
})))
|
||||
prototest.AssertDeepEqual(t, makeRefs(barRef, fooRef), run(build(&pbresource.Condition{
|
||||
Type: catalog.StatusConditionBoundIdentities,
|
||||
State: pbresource.Condition_STATE_TRUE,
|
||||
Message: "bar,foo", // proper order
|
||||
})))
|
||||
prototest.AssertDeepEqual(t, makeRefs(barRef, fooRef), run(build(&pbresource.Condition{
|
||||
Type: catalog.StatusConditionBoundIdentities,
|
||||
State: pbresource.Condition_STATE_TRUE,
|
||||
Message: "foo,bar", // incorrect order gets fixed
|
||||
})))
|
||||
}
|
||||
|
||||
func TestGetSourceWorkloadIdentitiesFromCTP(t *testing.T) {
|
||||
registry := resource.NewRegistry()
|
||||
types.Register(registry)
|
||||
auth.RegisterTypes(registry)
|
||||
catalog.RegisterTypes(registry)
|
||||
|
||||
type testcase struct {
|
||||
ctp *types.DecodedComputedTrafficPermissions
|
||||
expectExact []*pbresource.Reference
|
||||
expectWildNameInNS []*pbresource.Tenancy
|
||||
expectWildNSInPartition []string
|
||||
}
|
||||
|
||||
run := func(t *testing.T, tc testcase) {
|
||||
expectExactMap := make(map[resource.ReferenceKey]*pbresource.Reference)
|
||||
for _, ref := range tc.expectExact {
|
||||
rk := resource.NewReferenceKey(ref)
|
||||
expectExactMap[rk] = ref
|
||||
}
|
||||
|
||||
gotExact, gotWildNameInNS, gotWildNSInPartition := getSourceWorkloadIdentitiesFromCTP(tc.ctp)
|
||||
prototest.AssertDeepEqual(t, expectExactMap, gotExact)
|
||||
prototest.AssertElementsMatch(t, tc.expectWildNameInNS, gotWildNameInNS)
|
||||
require.ElementsMatch(t, tc.expectWildNSInPartition, gotWildNSInPartition)
|
||||
}
|
||||
|
||||
tenancy := resource.DefaultNamespacedTenancy()
|
||||
|
||||
ctpID := &pbresource.ID{
|
||||
Type: pbauth.ComputedTrafficPermissionsType,
|
||||
Tenancy: tenancy,
|
||||
Name: "ctp1",
|
||||
}
|
||||
|
||||
newRef := func(name string) *pbresource.Reference {
|
||||
return &pbresource.Reference{
|
||||
Type: pbauth.WorkloadIdentityType,
|
||||
Tenancy: tenancy,
|
||||
Name: name,
|
||||
}
|
||||
}
|
||||
|
||||
cases := map[string]testcase{
|
||||
"empty": {
|
||||
ctp: ReconcileComputedTrafficPermissions(t, nil, ctpID),
|
||||
},
|
||||
"single include": {
|
||||
ctp: ReconcileComputedTrafficPermissions(t, nil, ctpID,
|
||||
&pbauth.TrafficPermissions{
|
||||
Action: pbauth.Action_ACTION_ALLOW,
|
||||
Permissions: []*pbauth.Permission{{
|
||||
Sources: []*pbauth.Source{
|
||||
{IdentityName: "foo"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
),
|
||||
expectExact: []*pbresource.Reference{
|
||||
newRef("foo"),
|
||||
},
|
||||
},
|
||||
"multiple includes (1)": {
|
||||
ctp: ReconcileComputedTrafficPermissions(t, nil, ctpID,
|
||||
&pbauth.TrafficPermissions{
|
||||
Action: pbauth.Action_ACTION_ALLOW,
|
||||
Permissions: []*pbauth.Permission{{
|
||||
Sources: []*pbauth.Source{
|
||||
{IdentityName: "foo"},
|
||||
{IdentityName: "bar"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
),
|
||||
expectExact: []*pbresource.Reference{
|
||||
newRef("foo"),
|
||||
newRef("bar"),
|
||||
},
|
||||
},
|
||||
"multiple includes (2)": {
|
||||
ctp: ReconcileComputedTrafficPermissions(t, nil, ctpID,
|
||||
&pbauth.TrafficPermissions{
|
||||
Action: pbauth.Action_ACTION_ALLOW,
|
||||
Permissions: []*pbauth.Permission{
|
||||
{Sources: []*pbauth.Source{{IdentityName: "foo"}}},
|
||||
{Sources: []*pbauth.Source{{IdentityName: "bar"}}},
|
||||
},
|
||||
},
|
||||
),
|
||||
expectExact: []*pbresource.Reference{
|
||||
newRef("foo"),
|
||||
newRef("bar"),
|
||||
},
|
||||
},
|
||||
"default ns wildcard (1) / excludes ignored": {
|
||||
ctp: ReconcileComputedTrafficPermissions(t, nil, ctpID,
|
||||
&pbauth.TrafficPermissions{
|
||||
Action: pbauth.Action_ACTION_ALLOW,
|
||||
Permissions: []*pbauth.Permission{{
|
||||
Sources: []*pbauth.Source{{
|
||||
Exclude: []*pbauth.ExcludeSource{{
|
||||
IdentityName: "bar",
|
||||
}},
|
||||
}},
|
||||
}},
|
||||
},
|
||||
),
|
||||
expectWildNSInPartition: []string{"default"},
|
||||
},
|
||||
"default ns wildcard (2)": {
|
||||
ctp: ReconcileComputedTrafficPermissions(t, nil, ctpID,
|
||||
&pbauth.TrafficPermissions{
|
||||
Action: pbauth.Action_ACTION_ALLOW,
|
||||
Permissions: []*pbauth.Permission{{
|
||||
Sources: []*pbauth.Source{
|
||||
{Partition: "default"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
),
|
||||
expectWildNSInPartition: []string{"default"},
|
||||
},
|
||||
"multiple ns wildcards": {
|
||||
ctp: ReconcileComputedTrafficPermissions(t, nil, ctpID,
|
||||
&pbauth.TrafficPermissions{
|
||||
Action: pbauth.Action_ACTION_ALLOW,
|
||||
Permissions: []*pbauth.Permission{{
|
||||
Sources: []*pbauth.Source{
|
||||
{Partition: "foo"},
|
||||
{Partition: "bar"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
),
|
||||
expectWildNSInPartition: []string{"bar", "foo"},
|
||||
},
|
||||
"multiple ns wildcards deduped": {
|
||||
ctp: ReconcileComputedTrafficPermissions(t, nil, ctpID,
|
||||
&pbauth.TrafficPermissions{
|
||||
Action: pbauth.Action_ACTION_ALLOW,
|
||||
Permissions: []*pbauth.Permission{{
|
||||
Sources: []*pbauth.Source{
|
||||
{Partition: "bar"},
|
||||
{Partition: "bar"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
),
|
||||
expectWildNSInPartition: []string{"bar"},
|
||||
},
|
||||
"name wildcard": {
|
||||
ctp: ReconcileComputedTrafficPermissions(t, nil, ctpID,
|
||||
&pbauth.TrafficPermissions{
|
||||
Action: pbauth.Action_ACTION_ALLOW,
|
||||
Permissions: []*pbauth.Permission{{
|
||||
Sources: []*pbauth.Source{
|
||||
{Partition: "default", Namespace: "zim"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
),
|
||||
expectWildNameInNS: []*pbresource.Tenancy{
|
||||
{Partition: "default", Namespace: "zim"},
|
||||
},
|
||||
},
|
||||
"multiple name wildcards": {
|
||||
ctp: ReconcileComputedTrafficPermissions(t, nil, ctpID,
|
||||
&pbauth.TrafficPermissions{
|
||||
Action: pbauth.Action_ACTION_ALLOW,
|
||||
Permissions: []*pbauth.Permission{{
|
||||
Sources: []*pbauth.Source{
|
||||
{Partition: "foo", Namespace: "zim"},
|
||||
{Partition: "bar", Namespace: "gir"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
),
|
||||
expectWildNameInNS: []*pbresource.Tenancy{
|
||||
{Partition: "foo", Namespace: "zim"},
|
||||
{Partition: "bar", Namespace: "gir"},
|
||||
},
|
||||
},
|
||||
"multiple name wildcards deduped": {
|
||||
ctp: ReconcileComputedTrafficPermissions(t, nil, ctpID,
|
||||
&pbauth.TrafficPermissions{
|
||||
Action: pbauth.Action_ACTION_ALLOW,
|
||||
Permissions: []*pbauth.Permission{{
|
||||
Sources: []*pbauth.Source{
|
||||
{Partition: "foo", Namespace: "zim"},
|
||||
{Partition: "foo", Namespace: "zim"},
|
||||
},
|
||||
}},
|
||||
},
|
||||
),
|
||||
expectWildNameInNS: []*pbresource.Tenancy{
|
||||
{Partition: "foo", Namespace: "zim"},
|
||||
},
|
||||
},
|
||||
"some of each": {
|
||||
ctp: ReconcileComputedTrafficPermissions(t, nil, ctpID,
|
||||
&pbauth.TrafficPermissions{
|
||||
Action: pbauth.Action_ACTION_ALLOW,
|
||||
Permissions: []*pbauth.Permission{
|
||||
{
|
||||
Sources: []*pbauth.Source{
|
||||
{Partition: "foo", Namespace: "zim"},
|
||||
{Partition: "bar", Namespace: "gir"},
|
||||
{IdentityName: "dib"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Sources: []*pbauth.Source{
|
||||
{Partition: "foo"},
|
||||
{Partition: "bar"},
|
||||
{IdentityName: "gaz"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
),
|
||||
expectWildNameInNS: []*pbresource.Tenancy{
|
||||
{Partition: "foo", Namespace: "zim"},
|
||||
{Partition: "bar", Namespace: "gir"},
|
||||
},
|
||||
expectWildNSInPartition: []string{"bar", "foo"},
|
||||
expectExact: []*pbresource.Reference{
|
||||
newRef("dib"),
|
||||
newRef("gaz"),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for name, tc := range cases {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
run(t, tc)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetBackendServiceRefsFromComputedRoutes(t *testing.T) {
|
||||
type testcase struct {
|
||||
cr *types.DecodedComputedRoutes
|
||||
expect []*pbresource.Reference
|
||||
}
|
||||
|
||||
run := func(t *testing.T, tc testcase) {
|
||||
got := getBackendServiceRefsFromComputedRoutes(tc.cr)
|
||||
prototest.AssertElementsMatch(t, tc.expect, got)
|
||||
}
|
||||
|
||||
tenancy := resource.DefaultNamespacedTenancy()
|
||||
|
||||
newRef := func(name string) *pbresource.Reference {
|
||||
return &pbresource.Reference{
|
||||
Type: pbcatalog.ServiceType,
|
||||
Tenancy: tenancy,
|
||||
Name: name,
|
||||
}
|
||||
}
|
||||
|
||||
cr1 := resourcetest.Resource(pbmesh.ComputedRoutesType, "cr1").
|
||||
WithTenancy(tenancy).
|
||||
WithData(t, &pbmesh.ComputedRoutes{
|
||||
PortedConfigs: map[string]*pbmesh.ComputedPortRoutes{
|
||||
"http": {
|
||||
Targets: map[string]*pbmesh.BackendTargetDetails{
|
||||
"opaque1": {
|
||||
BackendRef: &pbmesh.BackendReference{Ref: newRef("aaa")},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}).
|
||||
Build()
|
||||
|
||||
cr2 := resourcetest.Resource(pbmesh.ComputedRoutesType, "cr2").
|
||||
WithTenancy(tenancy).
|
||||
WithData(t, &pbmesh.ComputedRoutes{
|
||||
PortedConfigs: map[string]*pbmesh.ComputedPortRoutes{
|
||||
"http": {
|
||||
Targets: map[string]*pbmesh.BackendTargetDetails{
|
||||
"opaque1": {
|
||||
BackendRef: &pbmesh.BackendReference{Ref: newRef("aaa")},
|
||||
},
|
||||
"opaque2": {
|
||||
BackendRef: &pbmesh.BackendReference{Ref: newRef("bbb")},
|
||||
},
|
||||
},
|
||||
},
|
||||
"grpc": {
|
||||
Targets: map[string]*pbmesh.BackendTargetDetails{
|
||||
"opaque2": {
|
||||
BackendRef: &pbmesh.BackendReference{Ref: newRef("bbb")},
|
||||
},
|
||||
"opaque3": {
|
||||
BackendRef: &pbmesh.BackendReference{Ref: newRef("ccc")},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}).
|
||||
Build()
|
||||
|
||||
cases := map[string]testcase{
|
||||
"one": {
|
||||
cr: resourcetest.MustDecode[*pbmesh.ComputedRoutes](t, cr1),
|
||||
expect: []*pbresource.Reference{
|
||||
newRef("aaa"),
|
||||
},
|
||||
},
|
||||
"two": {
|
||||
cr: resourcetest.MustDecode[*pbmesh.ComputedRoutes](t, cr2),
|
||||
expect: []*pbresource.Reference{
|
||||
newRef("aaa"),
|
||||
newRef("bbb"),
|
||||
newRef("ccc"),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for name, tc := range cases {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
run(t, tc)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -0,0 +1,170 @@
|
|||
// Copyright (c) HashiCorp, Inc.
|
||||
// SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
package implicitdestinations
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/hashicorp/consul/internal/controller"
|
||||
"github.com/hashicorp/consul/internal/controller/cache"
|
||||
"github.com/hashicorp/consul/internal/controller/dependency"
|
||||
"github.com/hashicorp/consul/internal/mesh/internal/types"
|
||||
"github.com/hashicorp/consul/internal/resource"
|
||||
"github.com/hashicorp/consul/internal/storage"
|
||||
pbauth "github.com/hashicorp/consul/proto-public/pbauth/v2beta1"
|
||||
pbcatalog "github.com/hashicorp/consul/proto-public/pbcatalog/v2beta1"
|
||||
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v2beta1"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
)
|
||||
|
||||
type mapAndTransformer struct {
|
||||
globalDefaultAllow bool
|
||||
}
|
||||
|
||||
// Note: these MapZZZ functions ignore the bound refs.
|
||||
|
||||
func (m *mapAndTransformer) MapComputedTrafficPermissions(ctx context.Context, rt controller.Runtime, res *pbresource.Resource) ([]controller.Request, error) {
|
||||
// Summary: CTP <list> WI[source] <align> CID
|
||||
|
||||
dm := dependency.MapDecoded[*pbauth.ComputedTrafficPermissions](
|
||||
// (1) turn CTP -> WI[source]
|
||||
m.mapComputedTrafficPermissionsToSourceWorkloadIdentities,
|
||||
)
|
||||
return dependency.WrapAndReplaceType(pbmesh.ComputedImplicitDestinationsType, dm)(ctx, rt, res)
|
||||
}
|
||||
|
||||
func (m *mapAndTransformer) MapService(ctx context.Context, rt controller.Runtime, res *pbresource.Resource) ([]controller.Request, error) {
|
||||
// Summary: SVC[backend] <list> WI[backend] <align> CTP <list> WI[source] <align> CID
|
||||
|
||||
dm := dependency.MapperWithTransform(
|
||||
// (2) turn WI[backend] -> CTP -> WI[source]
|
||||
m.mapBackendWorkloadIdentityToSourceWorkloadIdentity,
|
||||
// (1) turn SVC[backend] => WI[backend]
|
||||
m.transformServiceToWorkloadIdentities,
|
||||
)
|
||||
return dependency.WrapAndReplaceType(pbmesh.ComputedImplicitDestinationsType, dm)(ctx, rt, res)
|
||||
}
|
||||
|
||||
func (m *mapAndTransformer) MapComputedRoutes(ctx context.Context, rt controller.Runtime, res *pbresource.Resource) ([]controller.Request, error) {
|
||||
// Summary: CR <list> SVC[backend] <list> WI[backend] <align> CTP <list> WI[source] <align> CID
|
||||
|
||||
dm := dependency.MapperWithTransform(
|
||||
// (3) turn WI[backend] -> CTP -> WI[source]
|
||||
m.mapBackendWorkloadIdentityToSourceWorkloadIdentity,
|
||||
dependency.TransformChain(
|
||||
// (1) Turn CR -> SVC[backend]
|
||||
m.transformComputedRoutesToBackendServiceRefs,
|
||||
// (2) Turn SVC[backend] -> WI[backend]
|
||||
m.transformServiceToWorkloadIdentities,
|
||||
),
|
||||
)
|
||||
return dependency.WrapAndReplaceType(pbmesh.ComputedImplicitDestinationsType, dm)(ctx, rt, res)
|
||||
}
|
||||
|
||||
func (m *mapAndTransformer) mapComputedTrafficPermissionsToSourceWorkloadIdentities(ctx context.Context, rt controller.Runtime, ctp *types.DecodedComputedTrafficPermissions) ([]controller.Request, error) {
|
||||
refs, err := m.getSourceWorkloadIdentitiesFromCTPWithWildcardExpansion(rt.Cache, ctp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return controller.MakeRequests(pbauth.WorkloadIdentityType, refs), nil
|
||||
}
|
||||
|
||||
func (m *mapAndTransformer) getSourceWorkloadIdentitiesFromCTPWithWildcardExpansion(
|
||||
cache cache.ReadOnlyCache,
|
||||
ctp *types.DecodedComputedTrafficPermissions,
|
||||
) ([]*pbresource.Reference, error) {
|
||||
if ctp.Data.IsDefault && m.globalDefaultAllow {
|
||||
return listAllWorkloadIdentities(cache, &pbresource.Tenancy{
|
||||
Partition: storage.Wildcard,
|
||||
Namespace: storage.Wildcard,
|
||||
})
|
||||
}
|
||||
|
||||
exact, wildNames, wildNS := getSourceWorkloadIdentitiesFromCTP(ctp)
|
||||
|
||||
for _, wildTenancy := range wildNames {
|
||||
got, err := listAllWorkloadIdentities(cache, wildTenancy)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, ref := range got {
|
||||
rk := resource.NewReferenceKey(ref)
|
||||
if _, ok := exact[rk]; !ok {
|
||||
exact[rk] = ref
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, wildPartition := range wildNS {
|
||||
got, err := listAllWorkloadIdentities(cache, &pbresource.Tenancy{
|
||||
Partition: wildPartition,
|
||||
Namespace: storage.Wildcard,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, ref := range got {
|
||||
rk := resource.NewReferenceKey(ref)
|
||||
if _, ok := exact[rk]; !ok {
|
||||
exact[rk] = ref
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return maps.Values(exact), nil
|
||||
}
|
||||
|
||||
func (m *mapAndTransformer) mapBackendWorkloadIdentityToSourceWorkloadIdentity(ctx context.Context, rt controller.Runtime, wiRes *pbresource.Resource) ([]controller.Request, error) {
|
||||
ctpID := resource.ReplaceType(pbauth.ComputedTrafficPermissionsType, wiRes.Id)
|
||||
|
||||
ctp, err := cache.GetDecoded[*pbauth.ComputedTrafficPermissions](rt.Cache, pbauth.ComputedTrafficPermissionsType, "id", ctpID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if ctp == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return m.mapComputedTrafficPermissionsToSourceWorkloadIdentities(ctx, rt, ctp)
|
||||
}
|
||||
|
||||
func (m *mapAndTransformer) transformServiceToWorkloadIdentities(ctx context.Context, rt controller.Runtime, res *pbresource.Resource) ([]*pbresource.Resource, error) {
|
||||
// This is deliberately thin b/c WI's have no body, and we'll pass this to
|
||||
// another transformer immediately anyway, so it's largely an opaque
|
||||
// carrier for the WI name string only.
|
||||
|
||||
wiIDs := getWorkloadIdentitiesFromService(res)
|
||||
|
||||
out := make([]*pbresource.Resource, 0, len(wiIDs))
|
||||
for _, wiID := range wiIDs {
|
||||
wiLite := &pbresource.Resource{
|
||||
Id: resource.IDFromReference(wiID),
|
||||
}
|
||||
out = append(out, wiLite)
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (m *mapAndTransformer) transformComputedRoutesToBackendServiceRefs(ctx context.Context, rt controller.Runtime, res *pbresource.Resource) ([]*pbresource.Resource, error) {
|
||||
cr, err := resource.Decode[*pbmesh.ComputedRoutes](res)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
svcRefs := getBackendServiceRefsFromComputedRoutes(cr)
|
||||
|
||||
out := make([]*pbresource.Resource, 0, len(svcRefs))
|
||||
for _, svcRef := range svcRefs {
|
||||
svc, err := rt.Cache.Get(pbcatalog.ServiceType, "id", svcRef)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if svc != nil {
|
||||
out = append(out, svc)
|
||||
}
|
||||
}
|
||||
return out, nil
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
// Copyright (c) HashiCorp, Inc.
|
||||
// SPDX-License-Identifier: BUSL-1.1
|
||||
package implicitdestinations
|
||||
|
||||
const (
|
||||
ControllerID = "consul.io/implicit-destinations"
|
||||
)
|
|
@ -8,6 +8,7 @@ import (
|
|||
|
||||
"github.com/hashicorp/consul/internal/mesh/internal/controllers/apigateways"
|
||||
"github.com/hashicorp/consul/internal/mesh/internal/controllers/gatewayproxy"
|
||||
"github.com/hashicorp/consul/internal/mesh/internal/controllers/implicitdestinations"
|
||||
"github.com/hashicorp/consul/internal/mesh/internal/controllers/meshconfiguration"
|
||||
|
||||
"github.com/hashicorp/consul/agent/leafcert"
|
||||
|
@ -55,6 +56,7 @@ func Register(mgr *controller.Manager, deps Dependencies) {
|
|||
|
||||
mgr.Register(proxyconfiguration.Controller(workloadselectionmapper.New[*pbmesh.ProxyConfiguration](pbmesh.ComputedProxyConfigurationType)))
|
||||
mgr.Register(explicitdestinations.Controller(mapper.New()))
|
||||
mgr.Register(implicitdestinations.Controller(deps.DefaultAllow))
|
||||
|
||||
mgr.Register(meshgateways.Controller())
|
||||
mgr.Register(meshconfiguration.Controller())
|
||||
|
|
|
@ -78,7 +78,7 @@ func compile(
|
|||
// future we could use the others, but for now they are harmless to include
|
||||
// in the produced resource and is beneficial from an audit/debugging
|
||||
// perspective to know all of the inputs that produced this output.
|
||||
boundRefCollector := NewBoundReferenceCollector()
|
||||
boundRefCollector := resource.NewBoundReferenceCollector()
|
||||
|
||||
parentServiceDec := related.GetService(parentServiceID)
|
||||
if parentServiceDec == nil {
|
||||
|
@ -441,7 +441,7 @@ func compileFailoverConfig(
|
|||
related *loader.RelatedResources,
|
||||
failoverConfig *pbcatalog.FailoverConfig,
|
||||
targets map[string]*pbmesh.BackendTargetDetails,
|
||||
brc *BoundReferenceCollector,
|
||||
brc *resource.BoundReferenceCollector,
|
||||
) *pbmesh.ComputedFailoverConfig {
|
||||
if failoverConfig == nil {
|
||||
return nil
|
||||
|
@ -522,7 +522,7 @@ func compileHTTPRouteNode(
|
|||
res *pbresource.Resource,
|
||||
route *pbmesh.HTTPRoute,
|
||||
serviceGetter serviceGetter,
|
||||
brc *BoundReferenceCollector,
|
||||
brc *resource.BoundReferenceCollector,
|
||||
) *inputRouteNode {
|
||||
route = protoClone(route)
|
||||
node := newInputRouteNode(port)
|
||||
|
@ -599,7 +599,7 @@ func compileGRPCRouteNode(
|
|||
res *pbresource.Resource,
|
||||
route *pbmesh.GRPCRoute,
|
||||
serviceGetter serviceGetter,
|
||||
brc *BoundReferenceCollector,
|
||||
brc *resource.BoundReferenceCollector,
|
||||
) *inputRouteNode {
|
||||
route = protoClone(route)
|
||||
|
||||
|
@ -669,7 +669,7 @@ func compileTCPRouteNode(
|
|||
res *pbresource.Resource,
|
||||
route *pbmesh.TCPRoute,
|
||||
serviceGetter serviceGetter,
|
||||
brc *BoundReferenceCollector,
|
||||
brc *resource.BoundReferenceCollector,
|
||||
) *inputRouteNode {
|
||||
route = protoClone(route)
|
||||
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
// Copyright (c) HashiCorp, Inc.
|
||||
// SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
package types
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/go-multierror"
|
||||
|
||||
"github.com/hashicorp/consul/acl"
|
||||
"github.com/hashicorp/consul/internal/catalog"
|
||||
"github.com/hashicorp/consul/internal/resource"
|
||||
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v2beta1"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
)
|
||||
|
||||
type DecodedComputedImplicitDestinations = resource.DecodedResource[*pbmesh.ComputedImplicitDestinations]
|
||||
|
||||
func RegisterComputedImplicitDestinations(r resource.Registry) {
|
||||
r.Register(resource.Registration{
|
||||
Type: pbmesh.ComputedImplicitDestinationsType,
|
||||
Proto: &pbmesh.ComputedImplicitDestinations{},
|
||||
ACLs: &resource.ACLHooks{
|
||||
Read: aclReadHookComputedImplicitDestinations,
|
||||
Write: aclWriteHookComputedImplicitDestinations,
|
||||
List: resource.NoOpACLListHook,
|
||||
},
|
||||
Validate: ValidateComputedImplicitDestinations,
|
||||
Scope: resource.ScopeNamespace,
|
||||
})
|
||||
}
|
||||
|
||||
var ValidateComputedImplicitDestinations = resource.DecodeAndValidate(validateComputedImplicitDestinations)
|
||||
|
||||
func validateComputedImplicitDestinations(res *DecodedComputedImplicitDestinations) error {
|
||||
var merr error
|
||||
for i, implDest := range res.Data.Destinations {
|
||||
wrapErr := func(err error) error {
|
||||
return resource.ErrInvalidListElement{
|
||||
Name: "destinations",
|
||||
Index: i,
|
||||
Wrapped: err,
|
||||
}
|
||||
}
|
||||
if err := validateImplicitDestination(implDest, wrapErr); err != nil {
|
||||
merr = multierror.Append(merr, err)
|
||||
}
|
||||
}
|
||||
return merr
|
||||
}
|
||||
|
||||
func validateImplicitDestination(p *pbmesh.ImplicitDestination, wrapErr func(error) error) error {
|
||||
var merr error
|
||||
|
||||
wrapRefErr := func(err error) error {
|
||||
return wrapErr(resource.ErrInvalidField{
|
||||
Name: "destination_ref",
|
||||
Wrapped: err,
|
||||
})
|
||||
}
|
||||
|
||||
if refErr := catalog.ValidateLocalServiceRefNoSection(p.DestinationRef, wrapRefErr); refErr != nil {
|
||||
merr = multierror.Append(merr, refErr)
|
||||
}
|
||||
|
||||
if len(p.DestinationPorts) == 0 {
|
||||
merr = multierror.Append(merr, wrapErr(resource.ErrInvalidField{
|
||||
Name: "destination_ports",
|
||||
Wrapped: resource.ErrEmpty,
|
||||
}))
|
||||
} else {
|
||||
for i, port := range p.DestinationPorts {
|
||||
if portErr := catalog.ValidatePortName(port); portErr != nil {
|
||||
merr = multierror.Append(merr, wrapErr(resource.ErrInvalidListElement{
|
||||
Name: "destination_ports",
|
||||
Index: i,
|
||||
Wrapped: portErr,
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return merr
|
||||
}
|
||||
|
||||
func aclReadHookComputedImplicitDestinations(
|
||||
authorizer acl.Authorizer,
|
||||
authzCtx *acl.AuthorizerContext,
|
||||
id *pbresource.ID,
|
||||
res *pbresource.Resource,
|
||||
) error {
|
||||
if id != nil {
|
||||
return authorizer.ToAllowAuthorizer().IdentityReadAllowed(id.Name, authzCtx)
|
||||
}
|
||||
if res != nil {
|
||||
return authorizer.ToAllowAuthorizer().IdentityReadAllowed(res.Id.Name, authzCtx)
|
||||
}
|
||||
return resource.ErrNeedResource
|
||||
}
|
||||
|
||||
func aclWriteHookComputedImplicitDestinations(authorizer acl.Authorizer, authzContext *acl.AuthorizerContext, res *pbresource.Resource) error {
|
||||
return authorizer.ToAllowAuthorizer().OperatorWriteAllowed(authzContext)
|
||||
}
|
|
@ -0,0 +1,268 @@
|
|||
// Copyright (c) HashiCorp, Inc.
|
||||
// SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
package types
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/hashicorp/consul/acl"
|
||||
"github.com/hashicorp/consul/agent/structs"
|
||||
"github.com/hashicorp/consul/internal/resource"
|
||||
"github.com/hashicorp/consul/internal/resource/resourcetest"
|
||||
pbcatalog "github.com/hashicorp/consul/proto-public/pbcatalog/v2beta1"
|
||||
pbmesh "github.com/hashicorp/consul/proto-public/pbmesh/v2beta1"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
"github.com/hashicorp/consul/proto/private/prototest"
|
||||
"github.com/hashicorp/consul/sdk/testutil"
|
||||
)
|
||||
|
||||
func TestValidateComputedImplicitDestinations(t *testing.T) {
|
||||
type testcase struct {
|
||||
data *pbmesh.ComputedImplicitDestinations
|
||||
expectErr string
|
||||
}
|
||||
run := func(t *testing.T, tc testcase) {
|
||||
res := resourcetest.Resource(pbmesh.ComputedImplicitDestinationsType, "api").
|
||||
WithTenancy(resource.DefaultNamespacedTenancy()).
|
||||
WithData(t, tc.data).
|
||||
Build()
|
||||
|
||||
err := ValidateComputedImplicitDestinations(res)
|
||||
|
||||
// Verify that validate didn't actually change the object.
|
||||
got := resourcetest.MustDecode[*pbmesh.ComputedImplicitDestinations](t, res)
|
||||
prototest.AssertDeepEqual(t, tc.data, got.Data)
|
||||
|
||||
if tc.expectErr == "" {
|
||||
require.NoError(t, err)
|
||||
} else {
|
||||
testutil.RequireErrorContains(t, err, tc.expectErr)
|
||||
}
|
||||
}
|
||||
|
||||
cases := map[string]testcase{
|
||||
// emptiness
|
||||
"empty": {
|
||||
data: &pbmesh.ComputedImplicitDestinations{},
|
||||
},
|
||||
"svc/nil ref": {
|
||||
data: &pbmesh.ComputedImplicitDestinations{
|
||||
Destinations: []*pbmesh.ImplicitDestination{
|
||||
{DestinationRef: nil},
|
||||
},
|
||||
},
|
||||
expectErr: `invalid element at index 0 of list "destinations": invalid "destination_ref" field: missing required field`,
|
||||
},
|
||||
"svc/bad type": {
|
||||
data: &pbmesh.ComputedImplicitDestinations{
|
||||
Destinations: []*pbmesh.ImplicitDestination{
|
||||
{DestinationRef: newRefWithTenancy(pbcatalog.WorkloadType, "default.default", "api")},
|
||||
},
|
||||
},
|
||||
expectErr: `invalid element at index 0 of list "destinations": invalid "destination_ref" field: invalid "type" field: reference must have type catalog.v2beta1.Service`,
|
||||
},
|
||||
"svc/nil tenancy": {
|
||||
data: &pbmesh.ComputedImplicitDestinations{
|
||||
Destinations: []*pbmesh.ImplicitDestination{
|
||||
{DestinationRef: &pbresource.Reference{Type: pbcatalog.ServiceType, Name: "api"}},
|
||||
},
|
||||
},
|
||||
expectErr: `invalid element at index 0 of list "destinations": invalid "destination_ref" field: invalid "tenancy" field: missing required field`,
|
||||
},
|
||||
"svc/bad dest tenancy/partition": {
|
||||
data: &pbmesh.ComputedImplicitDestinations{
|
||||
Destinations: []*pbmesh.ImplicitDestination{
|
||||
{DestinationRef: newRefWithTenancy(pbcatalog.ServiceType, ".bar", "api")},
|
||||
},
|
||||
},
|
||||
expectErr: `invalid element at index 0 of list "destinations": invalid "destination_ref" field: invalid "tenancy" field: invalid "partition" field: cannot be empty`,
|
||||
},
|
||||
"svc/bad dest tenancy/namespace": {
|
||||
data: &pbmesh.ComputedImplicitDestinations{
|
||||
Destinations: []*pbmesh.ImplicitDestination{
|
||||
{DestinationRef: newRefWithTenancy(pbcatalog.ServiceType, "foo", "api")},
|
||||
},
|
||||
},
|
||||
expectErr: `invalid element at index 0 of list "destinations": invalid "destination_ref" field: invalid "tenancy" field: invalid "namespace" field: cannot be empty`,
|
||||
},
|
||||
"no ports": {
|
||||
data: &pbmesh.ComputedImplicitDestinations{
|
||||
Destinations: []*pbmesh.ImplicitDestination{
|
||||
{
|
||||
DestinationRef: newRefWithTenancy(pbcatalog.ServiceType, "foo.bar", "api"),
|
||||
},
|
||||
},
|
||||
},
|
||||
expectErr: `invalid element at index 0 of list "destinations": invalid "destination_ports" field: cannot be empty`,
|
||||
},
|
||||
"bad port/empty": {
|
||||
data: &pbmesh.ComputedImplicitDestinations{
|
||||
Destinations: []*pbmesh.ImplicitDestination{
|
||||
{
|
||||
DestinationRef: newRefWithTenancy(pbcatalog.ServiceType, "foo.bar", "api"),
|
||||
DestinationPorts: []string{""},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectErr: `invalid element at index 0 of list "destinations": invalid element at index 0 of list "destination_ports": cannot be empty`,
|
||||
},
|
||||
"bad port/no letters": {
|
||||
data: &pbmesh.ComputedImplicitDestinations{
|
||||
Destinations: []*pbmesh.ImplicitDestination{
|
||||
{
|
||||
DestinationRef: newRefWithTenancy(pbcatalog.ServiceType, "foo.bar", "api"),
|
||||
DestinationPorts: []string{"1234"},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectErr: `invalid element at index 0 of list "destinations": invalid element at index 0 of list "destination_ports": value must be 1-15 characters`,
|
||||
},
|
||||
"bad port/too long": {
|
||||
data: &pbmesh.ComputedImplicitDestinations{
|
||||
Destinations: []*pbmesh.ImplicitDestination{
|
||||
{
|
||||
DestinationRef: newRefWithTenancy(pbcatalog.ServiceType, "foo.bar", "api"),
|
||||
DestinationPorts: []string{strings.Repeat("a", 16)},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectErr: `invalid element at index 0 of list "destinations": invalid element at index 0 of list "destination_ports": value must be 1-15 characters`,
|
||||
},
|
||||
"normal": {
|
||||
data: &pbmesh.ComputedImplicitDestinations{
|
||||
Destinations: []*pbmesh.ImplicitDestination{
|
||||
{
|
||||
DestinationRef: newRefWithTenancy(pbcatalog.ServiceType, "foo.bar", "api"),
|
||||
DestinationPorts: []string{"p1"},
|
||||
},
|
||||
{
|
||||
DestinationRef: newRefWithTenancy(pbcatalog.ServiceType, "foo.zim", "api"),
|
||||
DestinationPorts: []string{"p2"},
|
||||
},
|
||||
{
|
||||
DestinationRef: newRefWithTenancy(pbcatalog.ServiceType, "gir.zim", "api"),
|
||||
DestinationPorts: []string{"p3"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for name, tc := range cases {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
run(t, tc)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestComputedImplicitDestinationsACLs(t *testing.T) {
|
||||
// Wire up a registry to generically invoke hooks
|
||||
registry := resource.NewRegistry()
|
||||
Register(registry)
|
||||
|
||||
type testcase struct {
|
||||
rules string
|
||||
check func(t *testing.T, authz acl.Authorizer, res *pbresource.Resource)
|
||||
readOK string
|
||||
writeOK string
|
||||
listOK string
|
||||
}
|
||||
|
||||
const (
|
||||
DENY = "deny"
|
||||
ALLOW = "allow"
|
||||
DEFAULT = "default"
|
||||
)
|
||||
|
||||
checkF := func(t *testing.T, expect string, got error) {
|
||||
switch expect {
|
||||
case ALLOW:
|
||||
if acl.IsErrPermissionDenied(got) {
|
||||
t.Fatal("should be allowed")
|
||||
}
|
||||
case DENY:
|
||||
if !acl.IsErrPermissionDenied(got) {
|
||||
t.Fatal("should be denied")
|
||||
}
|
||||
case DEFAULT:
|
||||
require.Nil(t, got, "expected fallthrough decision")
|
||||
default:
|
||||
t.Fatalf("unexpected expectation: %q", expect)
|
||||
}
|
||||
}
|
||||
|
||||
reg, ok := registry.Resolve(pbmesh.ComputedImplicitDestinationsType)
|
||||
require.True(t, ok)
|
||||
|
||||
run := func(t *testing.T, tc testcase) {
|
||||
cidData := &pbmesh.ComputedImplicitDestinations{}
|
||||
res := resourcetest.Resource(pbmesh.ComputedImplicitDestinationsType, "wi1").
|
||||
WithTenancy(resource.DefaultNamespacedTenancy()).
|
||||
WithData(t, cidData).
|
||||
Build()
|
||||
resourcetest.ValidateAndNormalize(t, registry, res)
|
||||
|
||||
config := acl.Config{
|
||||
WildcardName: structs.WildcardSpecifier,
|
||||
}
|
||||
authz, err := acl.NewAuthorizerFromRules(tc.rules, &config, nil)
|
||||
require.NoError(t, err)
|
||||
authz = acl.NewChainedAuthorizer([]acl.Authorizer{authz, acl.DenyAll()})
|
||||
|
||||
t.Run("read", func(t *testing.T) {
|
||||
err := reg.ACLs.Read(authz, &acl.AuthorizerContext{}, res.Id, res)
|
||||
checkF(t, tc.readOK, err)
|
||||
})
|
||||
t.Run("write", func(t *testing.T) {
|
||||
err := reg.ACLs.Write(authz, &acl.AuthorizerContext{}, res)
|
||||
checkF(t, tc.writeOK, err)
|
||||
})
|
||||
t.Run("list", func(t *testing.T) {
|
||||
err := reg.ACLs.List(authz, &acl.AuthorizerContext{})
|
||||
checkF(t, tc.listOK, err)
|
||||
})
|
||||
}
|
||||
|
||||
cases := map[string]testcase{
|
||||
"no rules": {
|
||||
rules: ``,
|
||||
readOK: DENY,
|
||||
writeOK: DENY,
|
||||
listOK: DEFAULT,
|
||||
},
|
||||
"operator read": {
|
||||
rules: `operator = "read" `,
|
||||
readOK: DENY,
|
||||
writeOK: DENY,
|
||||
listOK: DEFAULT,
|
||||
},
|
||||
"operator write": {
|
||||
rules: `operator = "write" `,
|
||||
readOK: DENY,
|
||||
writeOK: ALLOW,
|
||||
listOK: DEFAULT,
|
||||
},
|
||||
"workload identity w1 read": {
|
||||
rules: `identity "wi1" { policy = "read" }`,
|
||||
readOK: ALLOW,
|
||||
writeOK: DENY,
|
||||
listOK: DEFAULT,
|
||||
},
|
||||
"workload identity w1 write": {
|
||||
rules: `identity "wi1" { policy = "write" }`,
|
||||
readOK: ALLOW,
|
||||
writeOK: DENY,
|
||||
listOK: DEFAULT,
|
||||
},
|
||||
}
|
||||
|
||||
for name, tc := range cases {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
run(t, tc)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -19,7 +19,9 @@ type (
|
|||
DecodedDestinationsConfiguration = resource.DecodedResource[*pbmesh.DestinationsConfiguration]
|
||||
DecodedComputedRoutes = resource.DecodedResource[*pbmesh.ComputedRoutes]
|
||||
DecodedComputedTrafficPermissions = resource.DecodedResource[*pbauth.ComputedTrafficPermissions]
|
||||
DecodedTrafficPermissions = resource.DecodedResource[*pbauth.TrafficPermissions]
|
||||
DecodedComputedFailoverPolicy = resource.DecodedResource[*pbcatalog.ComputedFailoverPolicy]
|
||||
DecodedFailoverPolicy = resource.DecodedResource[*pbcatalog.FailoverPolicy]
|
||||
DecodedService = resource.DecodedResource[*pbcatalog.Service]
|
||||
DecodedServiceEndpoints = resource.DecodedResource[*pbcatalog.ServiceEndpoints]
|
||||
DecodedWorkload = resource.DecodedResource[*pbcatalog.Workload]
|
||||
|
|
|
@ -12,6 +12,7 @@ func Register(r resource.Registry) {
|
|||
RegisterComputedProxyConfiguration(r)
|
||||
RegisterDestinations(r)
|
||||
RegisterComputedExplicitDestinations(r)
|
||||
RegisterComputedImplicitDestinations(r)
|
||||
RegisterProxyStateTemplate(r)
|
||||
RegisterHTTPRoute(r)
|
||||
RegisterTCPRoute(r)
|
||||
|
|
|
@ -1,17 +1,16 @@
|
|||
// Copyright (c) HashiCorp, Inc.
|
||||
// SPDX-License-Identifier: BUSL-1.1
|
||||
|
||||
package routes
|
||||
package resource
|
||||
|
||||
import (
|
||||
"sort"
|
||||
|
||||
"github.com/hashicorp/consul/internal/resource"
|
||||
"github.com/hashicorp/consul/proto-public/pbresource"
|
||||
)
|
||||
|
||||
type sectionRefKey struct {
|
||||
resource.ReferenceKey
|
||||
ReferenceKey
|
||||
Section string
|
||||
}
|
||||
|
||||
|
@ -36,17 +35,17 @@ func (c *BoundReferenceCollector) List() []*pbresource.Reference {
|
|||
}
|
||||
|
||||
sort.Slice(out, func(i, j int) bool {
|
||||
return resource.LessReference(out[i], out[j])
|
||||
return LessReference(out[i], out[j])
|
||||
})
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (c *BoundReferenceCollector) AddRefOrID(ref resource.ReferenceOrID) {
|
||||
func (c *BoundReferenceCollector) AddRefOrID(ref ReferenceOrID) {
|
||||
if c == nil {
|
||||
return
|
||||
}
|
||||
c.AddRef(resource.ReferenceFromReferenceOrID(ref))
|
||||
c.AddRef(ReferenceFromReferenceOrID(ref))
|
||||
}
|
||||
|
||||
func (c *BoundReferenceCollector) AddRef(ref *pbresource.Reference) {
|
||||
|
@ -54,7 +53,7 @@ func (c *BoundReferenceCollector) AddRef(ref *pbresource.Reference) {
|
|||
return
|
||||
}
|
||||
srk := sectionRefKey{
|
||||
ReferenceKey: resource.NewReferenceKey(ref),
|
||||
ReferenceKey: NewReferenceKey(ref),
|
||||
Section: ref.Section,
|
||||
}
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
// Code generated by protoc-gen-go-binary. DO NOT EDIT.
|
||||
// source: pbmesh/v2beta1/computed_implicit_destinations.proto
|
||||
|
||||
package meshv2beta1
|
||||
|
||||
import (
|
||||
"google.golang.org/protobuf/proto"
|
||||
)
|
||||
|
||||
// MarshalBinary implements encoding.BinaryMarshaler
|
||||
func (msg *ComputedImplicitDestinations) MarshalBinary() ([]byte, error) {
|
||||
return proto.Marshal(msg)
|
||||
}
|
||||
|
||||
// UnmarshalBinary implements encoding.BinaryUnmarshaler
|
||||
func (msg *ComputedImplicitDestinations) UnmarshalBinary(b []byte) error {
|
||||
return proto.Unmarshal(b, msg)
|
||||
}
|
||||
|
||||
// MarshalBinary implements encoding.BinaryMarshaler
|
||||
func (msg *ImplicitDestination) MarshalBinary() ([]byte, error) {
|
||||
return proto.Marshal(msg)
|
||||
}
|
||||
|
||||
// UnmarshalBinary implements encoding.BinaryUnmarshaler
|
||||
func (msg *ImplicitDestination) UnmarshalBinary(b []byte) error {
|
||||
return proto.Unmarshal(b, msg)
|
||||
}
|
|
@ -0,0 +1,273 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.31.0
|
||||
// protoc (unknown)
|
||||
// source: pbmesh/v2beta1/computed_implicit_destinations.proto
|
||||
|
||||
package meshv2beta1
|
||||
|
||||
import (
|
||||
pbresource "github.com/hashicorp/consul/proto-public/pbresource"
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
// Verify that this generated code is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
|
||||
// Verify that runtime/protoimpl is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
|
||||
)
|
||||
|
||||
// ImplicitDestinations tracks destination services for a given workload identity.
|
||||
type ComputedImplicitDestinations struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
// destinations is the list of destinations.
|
||||
Destinations []*ImplicitDestination `protobuf:"bytes,1,rep,name=destinations,proto3" json:"destinations,omitempty"`
|
||||
// BoundReferences is a slice of mixed type references of resources that were
|
||||
// involved in the formulation of this resource.
|
||||
BoundReferences []*pbresource.Reference `protobuf:"bytes,2,rep,name=bound_references,json=boundReferences,proto3" json:"bound_references,omitempty"`
|
||||
}
|
||||
|
||||
func (x *ComputedImplicitDestinations) Reset() {
|
||||
*x = ComputedImplicitDestinations{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_pbmesh_v2beta1_computed_implicit_destinations_proto_msgTypes[0]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *ComputedImplicitDestinations) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*ComputedImplicitDestinations) ProtoMessage() {}
|
||||
|
||||
func (x *ComputedImplicitDestinations) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_pbmesh_v2beta1_computed_implicit_destinations_proto_msgTypes[0]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use ComputedImplicitDestinations.ProtoReflect.Descriptor instead.
|
||||
func (*ComputedImplicitDestinations) Descriptor() ([]byte, []int) {
|
||||
return file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDescGZIP(), []int{0}
|
||||
}
|
||||
|
||||
func (x *ComputedImplicitDestinations) GetDestinations() []*ImplicitDestination {
|
||||
if x != nil {
|
||||
return x.Destinations
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *ComputedImplicitDestinations) GetBoundReferences() []*pbresource.Reference {
|
||||
if x != nil {
|
||||
return x.BoundReferences
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ImplicitDestination contains a reference to a catalog service and a list of
|
||||
// port names that are allowed by TrafficPermissions.
|
||||
type ImplicitDestination struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
DestinationRef *pbresource.Reference `protobuf:"bytes,1,opt,name=destination_ref,json=destinationRef,proto3" json:"destination_ref,omitempty"`
|
||||
DestinationPorts []string `protobuf:"bytes,2,rep,name=destination_ports,json=destinationPorts,proto3" json:"destination_ports,omitempty"`
|
||||
}
|
||||
|
||||
func (x *ImplicitDestination) Reset() {
|
||||
*x = ImplicitDestination{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_pbmesh_v2beta1_computed_implicit_destinations_proto_msgTypes[1]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *ImplicitDestination) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*ImplicitDestination) ProtoMessage() {}
|
||||
|
||||
func (x *ImplicitDestination) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_pbmesh_v2beta1_computed_implicit_destinations_proto_msgTypes[1]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use ImplicitDestination.ProtoReflect.Descriptor instead.
|
||||
func (*ImplicitDestination) Descriptor() ([]byte, []int) {
|
||||
return file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDescGZIP(), []int{1}
|
||||
}
|
||||
|
||||
func (x *ImplicitDestination) GetDestinationRef() *pbresource.Reference {
|
||||
if x != nil {
|
||||
return x.DestinationRef
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *ImplicitDestination) GetDestinationPorts() []string {
|
||||
if x != nil {
|
||||
return x.DestinationPorts
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var File_pbmesh_v2beta1_computed_implicit_destinations_proto protoreflect.FileDescriptor
|
||||
|
||||
var file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDesc = []byte{
|
||||
0x0a, 0x33, 0x70, 0x62, 0x6d, 0x65, 0x73, 0x68, 0x2f, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31,
|
||||
0x2f, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x5f, 0x69, 0x6d, 0x70, 0x6c, 0x69, 0x63,
|
||||
0x69, 0x74, 0x5f, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e,
|
||||
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x1d, 0x68, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70,
|
||||
0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6c, 0x2e, 0x6d, 0x65, 0x73, 0x68, 0x2e, 0x76, 0x32, 0x62,
|
||||
0x65, 0x74, 0x61, 0x31, 0x1a, 0x1c, 0x70, 0x62, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
|
||||
0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x1a, 0x19, 0x70, 0x62, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2f, 0x72,
|
||||
0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xcf, 0x01,
|
||||
0x0a, 0x1c, 0x43, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x49, 0x6d, 0x70, 0x6c, 0x69, 0x63,
|
||||
0x69, 0x74, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x56,
|
||||
0x0a, 0x0c, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01,
|
||||
0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x68, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70,
|
||||
0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6c, 0x2e, 0x6d, 0x65, 0x73, 0x68, 0x2e, 0x76, 0x32, 0x62,
|
||||
0x65, 0x74, 0x61, 0x31, 0x2e, 0x49, 0x6d, 0x70, 0x6c, 0x69, 0x63, 0x69, 0x74, 0x44, 0x65, 0x73,
|
||||
0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e,
|
||||
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x4f, 0x0a, 0x10, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x5f,
|
||||
0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b,
|
||||
0x32, 0x24, 0x2e, 0x68, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70, 0x2e, 0x63, 0x6f, 0x6e,
|
||||
0x73, 0x75, 0x6c, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x66,
|
||||
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x0f, 0x62, 0x6f, 0x75, 0x6e, 0x64, 0x52, 0x65, 0x66,
|
||||
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x3a, 0x06, 0xa2, 0x93, 0x04, 0x02, 0x08, 0x03, 0x22,
|
||||
0x91, 0x01, 0x0a, 0x13, 0x49, 0x6d, 0x70, 0x6c, 0x69, 0x63, 0x69, 0x74, 0x44, 0x65, 0x73, 0x74,
|
||||
0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x4d, 0x0a, 0x0f, 0x64, 0x65, 0x73, 0x74, 0x69,
|
||||
0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b,
|
||||
0x32, 0x24, 0x2e, 0x68, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70, 0x2e, 0x63, 0x6f, 0x6e,
|
||||
0x73, 0x75, 0x6c, 0x2e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x66,
|
||||
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x0e, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74,
|
||||
0x69, 0x6f, 0x6e, 0x52, 0x65, 0x66, 0x12, 0x2b, 0x0a, 0x11, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e,
|
||||
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x6f, 0x72, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28,
|
||||
0x09, 0x52, 0x10, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x6f,
|
||||
0x72, 0x74, 0x73, 0x42, 0xa2, 0x02, 0x0a, 0x21, 0x63, 0x6f, 0x6d, 0x2e, 0x68, 0x61, 0x73, 0x68,
|
||||
0x69, 0x63, 0x6f, 0x72, 0x70, 0x2e, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6c, 0x2e, 0x6d, 0x65, 0x73,
|
||||
0x68, 0x2e, 0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x42, 0x21, 0x43, 0x6f, 0x6d, 0x70, 0x75,
|
||||
0x74, 0x65, 0x64, 0x49, 0x6d, 0x70, 0x6c, 0x69, 0x63, 0x69, 0x74, 0x44, 0x65, 0x73, 0x74, 0x69,
|
||||
0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x43,
|
||||
0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x73, 0x68, 0x69,
|
||||
0x63, 0x6f, 0x72, 0x70, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6c, 0x2f, 0x70, 0x72, 0x6f, 0x74,
|
||||
0x6f, 0x2d, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x2f, 0x70, 0x62, 0x6d, 0x65, 0x73, 0x68, 0x2f,
|
||||
0x76, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x3b, 0x6d, 0x65, 0x73, 0x68, 0x76, 0x32, 0x62, 0x65,
|
||||
0x74, 0x61, 0x31, 0xa2, 0x02, 0x03, 0x48, 0x43, 0x4d, 0xaa, 0x02, 0x1d, 0x48, 0x61, 0x73, 0x68,
|
||||
0x69, 0x63, 0x6f, 0x72, 0x70, 0x2e, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6c, 0x2e, 0x4d, 0x65, 0x73,
|
||||
0x68, 0x2e, 0x56, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0xca, 0x02, 0x1d, 0x48, 0x61, 0x73, 0x68,
|
||||
0x69, 0x63, 0x6f, 0x72, 0x70, 0x5c, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6c, 0x5c, 0x4d, 0x65, 0x73,
|
||||
0x68, 0x5c, 0x56, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0xe2, 0x02, 0x29, 0x48, 0x61, 0x73, 0x68,
|
||||
0x69, 0x63, 0x6f, 0x72, 0x70, 0x5c, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6c, 0x5c, 0x4d, 0x65, 0x73,
|
||||
0x68, 0x5c, 0x56, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74,
|
||||
0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x20, 0x48, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72,
|
||||
0x70, 0x3a, 0x3a, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6c, 0x3a, 0x3a, 0x4d, 0x65, 0x73, 0x68, 0x3a,
|
||||
0x3a, 0x56, 0x32, 0x62, 0x65, 0x74, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDescOnce sync.Once
|
||||
file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDescData = file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDesc
|
||||
)
|
||||
|
||||
func file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDescGZIP() []byte {
|
||||
file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDescOnce.Do(func() {
|
||||
file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDescData = protoimpl.X.CompressGZIP(file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDescData)
|
||||
})
|
||||
return file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDescData
|
||||
}
|
||||
|
||||
var file_pbmesh_v2beta1_computed_implicit_destinations_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
|
||||
var file_pbmesh_v2beta1_computed_implicit_destinations_proto_goTypes = []interface{}{
|
||||
(*ComputedImplicitDestinations)(nil), // 0: hashicorp.consul.mesh.v2beta1.ComputedImplicitDestinations
|
||||
(*ImplicitDestination)(nil), // 1: hashicorp.consul.mesh.v2beta1.ImplicitDestination
|
||||
(*pbresource.Reference)(nil), // 2: hashicorp.consul.resource.Reference
|
||||
}
|
||||
var file_pbmesh_v2beta1_computed_implicit_destinations_proto_depIdxs = []int32{
|
||||
1, // 0: hashicorp.consul.mesh.v2beta1.ComputedImplicitDestinations.destinations:type_name -> hashicorp.consul.mesh.v2beta1.ImplicitDestination
|
||||
2, // 1: hashicorp.consul.mesh.v2beta1.ComputedImplicitDestinations.bound_references:type_name -> hashicorp.consul.resource.Reference
|
||||
2, // 2: hashicorp.consul.mesh.v2beta1.ImplicitDestination.destination_ref:type_name -> hashicorp.consul.resource.Reference
|
||||
3, // [3:3] is the sub-list for method output_type
|
||||
3, // [3:3] is the sub-list for method input_type
|
||||
3, // [3:3] is the sub-list for extension type_name
|
||||
3, // [3:3] is the sub-list for extension extendee
|
||||
0, // [0:3] is the sub-list for field type_name
|
||||
}
|
||||
|
||||
func init() { file_pbmesh_v2beta1_computed_implicit_destinations_proto_init() }
|
||||
func file_pbmesh_v2beta1_computed_implicit_destinations_proto_init() {
|
||||
if File_pbmesh_v2beta1_computed_implicit_destinations_proto != nil {
|
||||
return
|
||||
}
|
||||
if !protoimpl.UnsafeEnabled {
|
||||
file_pbmesh_v2beta1_computed_implicit_destinations_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*ComputedImplicitDestinations); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_pbmesh_v2beta1_computed_implicit_destinations_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*ImplicitDestination); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
type x struct{}
|
||||
out := protoimpl.TypeBuilder{
|
||||
File: protoimpl.DescBuilder{
|
||||
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
|
||||
RawDescriptor: file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDesc,
|
||||
NumEnums: 0,
|
||||
NumMessages: 2,
|
||||
NumExtensions: 0,
|
||||
NumServices: 0,
|
||||
},
|
||||
GoTypes: file_pbmesh_v2beta1_computed_implicit_destinations_proto_goTypes,
|
||||
DependencyIndexes: file_pbmesh_v2beta1_computed_implicit_destinations_proto_depIdxs,
|
||||
MessageInfos: file_pbmesh_v2beta1_computed_implicit_destinations_proto_msgTypes,
|
||||
}.Build()
|
||||
File_pbmesh_v2beta1_computed_implicit_destinations_proto = out.File
|
||||
file_pbmesh_v2beta1_computed_implicit_destinations_proto_rawDesc = nil
|
||||
file_pbmesh_v2beta1_computed_implicit_destinations_proto_goTypes = nil
|
||||
file_pbmesh_v2beta1_computed_implicit_destinations_proto_depIdxs = nil
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package hashicorp.consul.mesh.v2beta1;
|
||||
|
||||
import "pbresource/annotations.proto";
|
||||
import "pbresource/resource.proto";
|
||||
|
||||
// ImplicitDestinations tracks destination services for a given workload identity.
|
||||
message ComputedImplicitDestinations {
|
||||
option (hashicorp.consul.resource.spec) = {scope: SCOPE_NAMESPACE};
|
||||
// destinations is the list of destinations.
|
||||
repeated ImplicitDestination destinations = 1;
|
||||
|
||||
// BoundReferences is a slice of mixed type references of resources that were
|
||||
// involved in the formulation of this resource.
|
||||
repeated hashicorp.consul.resource.Reference bound_references = 2;
|
||||
}
|
||||
|
||||
// ImplicitDestination contains a reference to a catalog service and a list of
|
||||
// port names that are allowed by TrafficPermissions.
|
||||
message ImplicitDestination {
|
||||
hashicorp.consul.resource.Reference destination_ref = 1;
|
||||
repeated string destination_ports = 2;
|
||||
}
|
|
@ -0,0 +1,48 @@
|
|||
// Code generated by protoc-gen-deepcopy. DO NOT EDIT.
|
||||
package meshv2beta1
|
||||
|
||||
import (
|
||||
proto "google.golang.org/protobuf/proto"
|
||||
)
|
||||
|
||||
// DeepCopyInto supports using ComputedImplicitDestinations within kubernetes types, where deepcopy-gen is used.
|
||||
func (in *ComputedImplicitDestinations) DeepCopyInto(out *ComputedImplicitDestinations) {
|
||||
proto.Reset(out)
|
||||
proto.Merge(out, proto.Clone(in))
|
||||
}
|
||||
|
||||
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ComputedImplicitDestinations. Required by controller-gen.
|
||||
func (in *ComputedImplicitDestinations) DeepCopy() *ComputedImplicitDestinations {
|
||||
if in == nil {
|
||||
return nil
|
||||
}
|
||||
out := new(ComputedImplicitDestinations)
|
||||
in.DeepCopyInto(out)
|
||||
return out
|
||||
}
|
||||
|
||||
// DeepCopyInterface is an autogenerated deepcopy function, copying the receiver, creating a new ComputedImplicitDestinations. Required by controller-gen.
|
||||
func (in *ComputedImplicitDestinations) DeepCopyInterface() interface{} {
|
||||
return in.DeepCopy()
|
||||
}
|
||||
|
||||
// DeepCopyInto supports using ImplicitDestination within kubernetes types, where deepcopy-gen is used.
|
||||
func (in *ImplicitDestination) DeepCopyInto(out *ImplicitDestination) {
|
||||
proto.Reset(out)
|
||||
proto.Merge(out, proto.Clone(in))
|
||||
}
|
||||
|
||||
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ImplicitDestination. Required by controller-gen.
|
||||
func (in *ImplicitDestination) DeepCopy() *ImplicitDestination {
|
||||
if in == nil {
|
||||
return nil
|
||||
}
|
||||
out := new(ImplicitDestination)
|
||||
in.DeepCopyInto(out)
|
||||
return out
|
||||
}
|
||||
|
||||
// DeepCopyInterface is an autogenerated deepcopy function, copying the receiver, creating a new ImplicitDestination. Required by controller-gen.
|
||||
func (in *ImplicitDestination) DeepCopyInterface() interface{} {
|
||||
return in.DeepCopy()
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
// Code generated by protoc-json-shim. DO NOT EDIT.
|
||||
package meshv2beta1
|
||||
|
||||
import (
|
||||
protojson "google.golang.org/protobuf/encoding/protojson"
|
||||
)
|
||||
|
||||
// MarshalJSON is a custom marshaler for ComputedImplicitDestinations
|
||||
func (this *ComputedImplicitDestinations) MarshalJSON() ([]byte, error) {
|
||||
str, err := ComputedImplicitDestinationsMarshaler.Marshal(this)
|
||||
return []byte(str), err
|
||||
}
|
||||
|
||||
// UnmarshalJSON is a custom unmarshaler for ComputedImplicitDestinations
|
||||
func (this *ComputedImplicitDestinations) UnmarshalJSON(b []byte) error {
|
||||
return ComputedImplicitDestinationsUnmarshaler.Unmarshal(b, this)
|
||||
}
|
||||
|
||||
// MarshalJSON is a custom marshaler for ImplicitDestination
|
||||
func (this *ImplicitDestination) MarshalJSON() ([]byte, error) {
|
||||
str, err := ComputedImplicitDestinationsMarshaler.Marshal(this)
|
||||
return []byte(str), err
|
||||
}
|
||||
|
||||
// UnmarshalJSON is a custom unmarshaler for ImplicitDestination
|
||||
func (this *ImplicitDestination) UnmarshalJSON(b []byte) error {
|
||||
return ComputedImplicitDestinationsUnmarshaler.Unmarshal(b, this)
|
||||
}
|
||||
|
||||
var (
|
||||
ComputedImplicitDestinationsMarshaler = &protojson.MarshalOptions{}
|
||||
ComputedImplicitDestinationsUnmarshaler = &protojson.UnmarshalOptions{DiscardUnknown: false}
|
||||
)
|
|
@ -12,6 +12,7 @@ const (
|
|||
|
||||
APIGatewayKind = "APIGateway"
|
||||
ComputedExplicitDestinationsKind = "ComputedExplicitDestinations"
|
||||
ComputedImplicitDestinationsKind = "ComputedImplicitDestinations"
|
||||
ComputedProxyConfigurationKind = "ComputedProxyConfiguration"
|
||||
ComputedRoutesKind = "ComputedRoutes"
|
||||
DestinationPolicyKind = "DestinationPolicy"
|
||||
|
@ -39,6 +40,12 @@ var (
|
|||
Kind: ComputedExplicitDestinationsKind,
|
||||
}
|
||||
|
||||
ComputedImplicitDestinationsType = &pbresource.Type{
|
||||
Group: GroupName,
|
||||
GroupVersion: Version,
|
||||
Kind: ComputedImplicitDestinationsKind,
|
||||
}
|
||||
|
||||
ComputedProxyConfigurationType = &pbresource.Type{
|
||||
Group: GroupName,
|
||||
GroupVersion: Version,
|
||||
|
|
Loading…
Reference in New Issue