diff --git a/control-plane/config/eventing-kafka-broker/100-channel/100-kafka-channel.yaml b/control-plane/config/eventing-kafka-broker/100-channel/100-kafka-channel.yaml index 1dceedae6e..6058c60cf7 100644 --- a/control-plane/config/eventing-kafka-broker/100-channel/100-kafka-channel.yaml +++ b/control-plane/config/eventing-kafka-broker/100-channel/100-kafka-channel.yaml @@ -86,6 +86,9 @@ spec: type: string CACerts: type: string + audience: + description: Audience is the OIDC audience for the deadLetterSink. + type: string retry: description: Retry is the minimum number of retries the sender should attempt when sending an event before moving it to the dead letter sink. type: integer @@ -132,6 +135,9 @@ spec: type: string CACerts: type: string + audience: + description: Audience is the OIDC audience for the deadLetterSink. + type: string retry: description: Retry is the minimum number of retries the sender should attempt when sending an event before moving it to the dead letter sink. type: integer @@ -147,15 +153,28 @@ spec: replyCACerts: description: replyCACerts is the CA certs to trust for the reply. type: string + replyAudience: + description: ReplyAudience is the OIDC audience for the replyUri. + type: string subscriberUri: description: SubscriberURI is the endpoint for the subscriber type: string subscriberCACerts: description: SubscriberCACerts is the CA certs to trust for the subscriber. type: string + subscriberAudience: + description: SubscriberAudience is the OIDC audience for the subscriberUri. + type: string uid: description: UID is used to understand the origin of the subscriber. type: string + auth: + description: Auth provides the relevant information for OIDC authentication. + type: object + properties: + serviceAccountName: + description: ServiceAccountName is the name of the generated service account used for this components OIDC authentication. + type: string status: description: Status represents the current state of the KafkaChannel. This data may be out of date. type: object @@ -239,6 +258,9 @@ spec: type: string deadLetterSinkCACerts: type: string + deadLetterSinkAudience: + description: OIDC audience of the dead letter sink. + type: string observedGeneration: description: ObservedGeneration is the 'Generation' of the Service that was last processed by the controller. type: integer @@ -262,6 +284,13 @@ spec: uid: description: UID is used to understand the origin of the subscriber. type: string + auth: + description: Auth provides the relevant information for OIDC authentication. + type: object + properties: + serviceAccountName: + description: ServiceAccountName is the name of the generated service account used for this components OIDC authentication. + type: string additionalPrinterColumns: - name: Ready type: string diff --git a/control-plane/config/eventing-kafka-broker/100-source/100-kafka-source.yaml b/control-plane/config/eventing-kafka-broker/100-source/100-kafka-source.yaml index ac3b38148a..ac5391ba80 100644 --- a/control-plane/config/eventing-kafka-broker/100-source/100-kafka-source.yaml +++ b/control-plane/config/eventing-kafka-broker/100-source/100-kafka-source.yaml @@ -87,9 +87,6 @@ spec: description: DeadLetterSink is the sink receiving event that could not be sent to a destination. type: object properties: - CACerts: - description: CACerts are Certification Authority (CA) certificates in PEM format according to https://www.rfc-editor.org/rfc/rfc7468. If set, these CAs are appended to the set of CAs provided by the Addressable target, if any. - type: string ref: description: Ref points to an Addressable. type: object @@ -118,6 +115,12 @@ spec: uri: description: URI can be an absolute URL(non-empty scheme and non-empty host) pointing to the target or a relative URI. Relative URIs will be resolved using the base URI retrieved from Ref. type: string + CACerts: + description: CACerts are Certification Authority (CA) certificates in PEM format according to https://www.rfc-editor.org/rfc/rfc7468. If set, these CAs are appended to the set of CAs provided by the Addressable target, if any. + type: string + audience: + description: Audience is the OIDC audience for the deadLetterSink. + type: string retry: description: Retry is the minimum number of retries the sender should attempt when sending an event before moving it to the dead letter sink. type: integer @@ -271,9 +274,6 @@ spec: description: Sink is a reference to an object that will resolve to a uri to use as the sink. type: object properties: - CACerts: - description: CACerts are Certification Authority (CA) certificates in PEM format according to https://www.rfc-editor.org/rfc/rfc7468. If set, these CAs are appended to the set of CAs provided by the Addressable target, if any. - type: string ref: description: Ref points to an Addressable. type: object @@ -302,6 +302,12 @@ spec: uri: description: URI can be an absolute URL(non-empty scheme and non-empty host) pointing to the target or a relative URI. Relative URIs will be resolved using the base URI retrieved from Ref. type: string + CACerts: + description: CACerts are Certification Authority (CA) certificates in PEM format according to https://www.rfc-editor.org/rfc/rfc7468. If set, these CAs are appended to the set of CAs provided by the Addressable target, if any. + type: string + audience: + description: Audience is the OIDC audience for the sink. + type: string topics: description: Topic topics to consume messages from type: array @@ -392,6 +398,16 @@ spec: sinkUri: description: SinkURI is the current active sink URI that has been configured for the Source. type: string + sinkAudience: + description: SinkAudience is the OIDC audience of the sink. + type: string + auth: + description: Auth provides the relevant information for OIDC authentication. + type: object + properties: + serviceAccountName: + description: ServiceAccountName is the name of the generated service account used for this components OIDC authentication. + type: string subresources: status: {} scale: diff --git a/control-plane/config/eventing-kafka-broker/200-controller/200-controller-cluster-role.yaml b/control-plane/config/eventing-kafka-broker/200-controller/200-controller-cluster-role.yaml index f6dabf349c..bf6afcb4ff 100644 --- a/control-plane/config/eventing-kafka-broker/200-controller/200-controller-cluster-role.yaml +++ b/control-plane/config/eventing-kafka-broker/200-controller/200-controller-cluster-role.yaml @@ -140,6 +140,13 @@ rules: - update - create - delete + # To grant NamespacedBroker permissions to create OIDC tokens + - apiGroups: + - "" + resources: + - serviceaccounts/token + verbs: + - create # Scheduler permissions - apiGroups: diff --git a/control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/consumer_group_types.go b/control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/consumer_group_types.go index 15b19f8e22..6778c27e06 100644 --- a/control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/consumer_group_types.go +++ b/control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/consumer_group_types.go @@ -100,6 +100,10 @@ type ConsumerGroupSpec struct { // More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors // +optional Selector map[string]string `json:"selector,omitempty" protobuf:"bytes,2,rep,name=selector"` + + // OIDCServiceAccountName is the name of service account used for this components + // OIDC authentication. + OIDCServiceAccountName *string `json:"oidcServiceAccountName,omitempty"` } type ConsumerGroupStatus struct { @@ -120,6 +124,10 @@ type ConsumerGroupStatus struct { // +optional SubscriberCACerts *string `json:"subscriberCACerts,omitempty"` + // SubscriberAudience is the OIDC audience for the resolved URI + // +optional + SubscriberAudience *string `json:"subscriberAudience,omitempty"` + // DeliveryStatus contains a resolved URL to the dead letter sink address, and any other // resolved delivery options. eventingduckv1.DeliveryStatus `json:",inline"` diff --git a/control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/consumer_types.go b/control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/consumer_types.go index f4bbce0c4f..1861099c23 100644 --- a/control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/consumer_types.go +++ b/control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/consumer_types.go @@ -103,6 +103,10 @@ type ConsumerSpec struct { // PodBind represents a reference to the pod in which the consumer should be placed. PodBind *PodBind `json:"podBind"` + + // OIDCServiceAccountName is the name of the generated service account + // used for this components OIDC authentication. + OIDCServiceAccountName *string `json:"oidcServiceAccountName,omitempty"` } type ReplyStrategy struct { @@ -208,6 +212,10 @@ type ConsumerStatus struct { // +optional SubscriberCACerts *string `json:"subscriberCACerts,omitempty"` + // SubscriberAudience is the OIDC audience for the resolved URI + // +optional + SubscriberAudience *string `json:"subscriberAudience,omitempty"` + // DeliveryStatus contains a resolved URL to the dead letter sink address, and any other // resolved delivery options. eventingduck.DeliveryStatus `json:",inline"` diff --git a/control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/zz_generated.deepcopy.go b/control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/zz_generated.deepcopy.go index 3b0ec4bd77..ba53fa9199 100644 --- a/control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/zz_generated.deepcopy.go +++ b/control-plane/pkg/apis/internals/kafka/eventing/v1alpha1/zz_generated.deepcopy.go @@ -221,6 +221,11 @@ func (in *ConsumerGroupSpec) DeepCopyInto(out *ConsumerGroupSpec) { (*out)[key] = val } } + if in.OIDCServiceAccountName != nil { + in, out := &in.OIDCServiceAccountName, &out.OIDCServiceAccountName + *out = new(string) + **out = **in + } return } @@ -249,6 +254,11 @@ func (in *ConsumerGroupStatus) DeepCopyInto(out *ConsumerGroupStatus) { *out = new(string) **out = **in } + if in.SubscriberAudience != nil { + in, out := &in.SubscriberAudience, &out.SubscriberAudience + *out = new(string) + **out = **in + } in.DeliveryStatus.DeepCopyInto(&out.DeliveryStatus) if in.Replicas != nil { in, out := &in.Replicas, &out.Replicas @@ -346,6 +356,11 @@ func (in *ConsumerSpec) DeepCopyInto(out *ConsumerSpec) { *out = new(PodBind) **out = **in } + if in.OIDCServiceAccountName != nil { + in, out := &in.OIDCServiceAccountName, &out.OIDCServiceAccountName + *out = new(string) + **out = **in + } return } @@ -373,6 +388,11 @@ func (in *ConsumerStatus) DeepCopyInto(out *ConsumerStatus) { *out = new(string) **out = **in } + if in.SubscriberAudience != nil { + in, out := &in.SubscriberAudience, &out.SubscriberAudience + *out = new(string) + **out = **in + } in.DeliveryStatus.DeepCopyInto(&out.DeliveryStatus) return } diff --git a/control-plane/pkg/apis/sources/v1beta1/kafka_lifecycle.go b/control-plane/pkg/apis/sources/v1beta1/kafka_lifecycle.go index b7457341ee..d8c966c2dd 100644 --- a/control-plane/pkg/apis/sources/v1beta1/kafka_lifecycle.go +++ b/control-plane/pkg/apis/sources/v1beta1/kafka_lifecycle.go @@ -46,6 +46,9 @@ const ( // KafkaConditionInitialOffsetsCommitted is True when the KafkaSource has committed the // initial offset of all claims KafkaConditionInitialOffsetsCommitted apis.ConditionType = "InitialOffsetsCommitted" + + // KafkaConditionOIDCIdentityCreated has status True when the KafkaSource has created an OIDC identity. + KafkaConditionOIDCIdentityCreated apis.ConditionType = "OIDCIdentityCreated" ) var ( @@ -54,6 +57,7 @@ var ( KafkaConditionDeployed, KafkaConditionConnectionEstablished, KafkaConditionInitialOffsetsCommitted, + KafkaConditionOIDCIdentityCreated, ) kafkaCondSetLock = sync.RWMutex{} @@ -91,6 +95,7 @@ func (s *KafkaSourceStatus) MarkSink(addr *duckv1.Addressable) { if addr.URL != nil && !addr.URL.IsEmpty() { s.SinkURI = addr.URL s.SinkCACerts = addr.CACerts + s.SinkAudience = addr.Audience KafkaSourceCondSet.Manage(s).MarkTrue(KafkaConditionSinkProvided) } else { KafkaSourceCondSet.Manage(s).MarkUnknown(KafkaConditionSinkProvided, "SinkEmpty", "Sink has resolved to empty.%s", "") @@ -160,6 +165,22 @@ func (s *KafkaSourceStatus) MarkInitialOffsetNotCommitted(reason, messageFormat KafkaSourceCondSet.Manage(s).MarkFalse(KafkaConditionInitialOffsetsCommitted, reason, messageFormat, messageA...) } +func (s *KafkaSourceStatus) MarkOIDCIdentityCreatedSucceeded() { + KafkaSourceCondSet.Manage(s).MarkTrue(KafkaConditionOIDCIdentityCreated) +} + +func (s *KafkaSourceStatus) MarkOIDCIdentityCreatedSucceededWithReason(reason, messageFormat string, messageA ...interface{}) { + KafkaSourceCondSet.Manage(s).MarkTrueWithReason(KafkaConditionOIDCIdentityCreated, reason, messageFormat, messageA...) +} + +func (s *KafkaSourceStatus) MarkOIDCIdentityCreatedFailed(reason, messageFormat string, messageA ...interface{}) { + KafkaSourceCondSet.Manage(s).MarkFalse(KafkaConditionOIDCIdentityCreated, reason, messageFormat, messageA...) +} + +func (s *KafkaSourceStatus) MarkOIDCIdentityCreatedUnknown(reason, messageFormat string, messageA ...interface{}) { + KafkaSourceCondSet.Manage(s).MarkUnknown(KafkaConditionOIDCIdentityCreated, reason, messageFormat, messageA...) +} + func (s *KafkaSourceStatus) UpdateConsumerGroupStatus(status string) { s.Claims = status } diff --git a/control-plane/pkg/contract/contract.pb.go b/control-plane/pkg/contract/contract.pb.go index 4b1d229997..373fc069ce 100644 --- a/control-plane/pkg/contract/contract.pb.go +++ b/control-plane/pkg/contract/contract.pb.go @@ -703,6 +703,7 @@ type DialectedFilter struct { unknownFields protoimpl.UnknownFields // Types that are assignable to Filter: + // // *DialectedFilter_Exact // *DialectedFilter_Prefix // *DialectedFilter_Suffix @@ -909,6 +910,8 @@ type EgressConfig struct { DeadLetter string `protobuf:"bytes,1,opt,name=deadLetter,proto3" json:"deadLetter,omitempty"` // Dead Letter CA Cert is the CA Cert used for HTTPS communication through dead letter DeadLetterCACerts string `protobuf:"bytes,6,opt,name=deadLetterCACerts,proto3" json:"deadLetterCACerts,omitempty"` + // Dead Letter Audience is the OIDC audience of the dead letter + DeadLetterAudience string `protobuf:"bytes,7,opt,name=deadLetterAudience,proto3" json:"deadLetterAudience,omitempty"` // retry is the minimum number of retries the sender should attempt when // sending an event before moving it to the dead letter sink. // @@ -968,6 +971,13 @@ func (x *EgressConfig) GetDeadLetterCACerts() string { return "" } +func (x *EgressConfig) GetDeadLetterAudience() string { + if x != nil { + return x.DeadLetterAudience + } + return "" +} + func (x *EgressConfig) GetRetry() uint32 { if x != nil { return x.Retry @@ -1007,13 +1017,18 @@ type Egress struct { Destination string `protobuf:"bytes,2,opt,name=destination,proto3" json:"destination,omitempty"` // destination CA Cert is the CA Cert used for HTTPS communication through destination DestinationCACerts string `protobuf:"bytes,15,opt,name=destinationCACerts,proto3" json:"destinationCACerts,omitempty"` + // OIDC audience of the destination + DestinationAudience string `protobuf:"bytes,17,opt,name=destinationAudience,proto3" json:"destinationAudience,omitempty"` // Types that are assignable to ReplyStrategy: + // // *Egress_ReplyUrl // *Egress_ReplyToOriginalTopic // *Egress_DiscardReply ReplyStrategy isEgress_ReplyStrategy `protobuf_oneof:"replyStrategy"` // replyUrl CA Cert is the CA Cert used for HTTPS communication through replyUrl ReplyUrlCACerts string `protobuf:"bytes,16,opt,name=replyUrlCACerts,proto3" json:"replyUrlCACerts,omitempty"` + // OIDC audience of the replyUrl + ReplyUrlAudience string `protobuf:"bytes,18,opt,name=replyUrlAudience,proto3" json:"replyUrlAudience,omitempty"` // A filter for performing exact match against Cloud Events attributes Filter *Filter `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` // Id of the egress @@ -1039,6 +1054,8 @@ type Egress struct { VReplicas int32 `protobuf:"varint,13,opt,name=vReplicas,proto3" json:"vReplicas,omitempty"` // Egress feature flags. FeatureFlags *EgressFeatureFlags `protobuf:"bytes,14,opt,name=featureFlags,proto3" json:"featureFlags,omitempty"` + // Name of the service account to use for OIDC authentication. + OidcServiceAccountName string `protobuf:"bytes,19,opt,name=oidcServiceAccountName,proto3" json:"oidcServiceAccountName,omitempty"` } func (x *Egress) Reset() { @@ -1094,6 +1111,13 @@ func (x *Egress) GetDestinationCACerts() string { return "" } +func (x *Egress) GetDestinationAudience() string { + if x != nil { + return x.DestinationAudience + } + return "" +} + func (m *Egress) GetReplyStrategy() isEgress_ReplyStrategy { if m != nil { return m.ReplyStrategy @@ -1129,6 +1153,13 @@ func (x *Egress) GetReplyUrlCACerts() string { return "" } +func (x *Egress) GetReplyUrlAudience() string { + if x != nil { + return x.ReplyUrlAudience + } + return "" +} + func (x *Egress) GetFilter() *Filter { if x != nil { return x.Filter @@ -1192,6 +1223,13 @@ func (x *Egress) GetFeatureFlags() *EgressFeatureFlags { return nil } +func (x *Egress) GetOidcServiceAccountName() string { + if x != nil { + return x.OidcServiceAccountName + } + return "" +} + type isEgress_ReplyStrategy interface { isEgress_ReplyStrategy() } @@ -1295,6 +1333,8 @@ type Ingress struct { Host string `protobuf:"bytes,3,opt,name=host,proto3" json:"host,omitempty"` // whether to autocreate event types EnableAutoCreateEventTypes bool `protobuf:"varint,4,opt,name=enableAutoCreateEventTypes,proto3" json:"enableAutoCreateEventTypes,omitempty"` + // OIDC audience of this ingress + Audience string `protobuf:"bytes,5,opt,name=audience,proto3" json:"audience,omitempty"` } func (x *Ingress) Reset() { @@ -1357,6 +1397,13 @@ func (x *Ingress) GetEnableAutoCreateEventTypes() bool { return false } +func (x *Ingress) GetAudience() string { + if x != nil { + return x.Audience + } + return "" +} + // Kubernetes resource reference. type Reference struct { state protoimpl.MessageState @@ -1680,7 +1727,8 @@ type Resource struct { Uid string `protobuf:"bytes,1,opt,name=uid,proto3" json:"uid,omitempty"` // Topics name // Note: If there is an ingress configured, then this field must have exactly 1 element otherwise, - // if the resource does just dispatch from Kafka, then this topic list can contain multiple elements + // + // if the resource does just dispatch from Kafka, then this topic list can contain multiple elements Topics []string `protobuf:"bytes,2,rep,name=topics,proto3" json:"topics,omitempty"` // A comma separated list of host/port pairs to use for establishing the initial connection to the Kafka cluster. // Note: we're using a comma separated list simply because that's how java kafka client likes it. @@ -1692,6 +1740,7 @@ type Resource struct { // Optional egresses for this topic Egresses []*Egress `protobuf:"bytes,6,rep,name=egresses,proto3" json:"egresses,omitempty"` // Types that are assignable to Auth: + // // *Resource_AbsentAuth // *Resource_AuthSecret // *Resource_MultiAuthSecret @@ -1836,34 +1885,34 @@ type Resource_AuthSecret struct { // // Secret format: // - // protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) - // sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) - // ca.crt: - // user.crt: - // user.key: - // user: - // password: + // protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + // sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + // ca.crt: + // user.crt: + // user.key: + // user: + // password: // // Validation: // - protocol=PLAINTEXT // - protocol=SSL - // - required: - // - ca.crt - // - user.crt - // - user.key + // - required: + // - ca.crt + // - user.crt + // - user.key // - protocol=SASL_PLAINTEXT - // - required: - // - sasl.mechanism - // - user - // - password + // - required: + // - sasl.mechanism + // - user + // - password // - protocol=SASL_SSL - // - required: - // - sasl.mechanism - // - ca.crt - // - user.crt - // - user.key - // - user - // - password + // - required: + // - sasl.mechanism + // - ca.crt + // - user.crt + // - user.key + // - user + // - password AuthSecret *Reference `protobuf:"bytes,8,opt,name=authSecret,proto3,oneof"` } @@ -2008,13 +2057,16 @@ var file_contract_proto_rawDesc = []byte{ 0x74, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x3a, 0x02, 0x38, 0x01, 0x22, 0xe6, 0x01, 0x0a, 0x0c, 0x45, 0x67, 0x72, 0x65, 0x73, 0x73, 0x43, + 0x3a, 0x02, 0x38, 0x01, 0x22, 0x96, 0x02, 0x0a, 0x0c, 0x45, 0x67, 0x72, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x1e, 0x0a, 0x0a, 0x64, 0x65, 0x61, 0x64, 0x4c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x64, 0x65, 0x61, 0x64, 0x4c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x12, 0x2c, 0x0a, 0x11, 0x64, 0x65, 0x61, 0x64, 0x4c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x43, 0x41, 0x43, 0x65, 0x72, 0x74, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x64, 0x65, 0x61, 0x64, 0x4c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x43, 0x41, 0x43, 0x65, - 0x72, 0x74, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x72, 0x65, 0x74, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, + 0x72, 0x74, 0x73, 0x12, 0x2e, 0x0a, 0x12, 0x64, 0x65, 0x61, 0x64, 0x4c, 0x65, 0x74, 0x74, 0x65, + 0x72, 0x41, 0x75, 0x64, 0x69, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x12, 0x64, 0x65, 0x61, 0x64, 0x4c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x41, 0x75, 0x64, 0x69, 0x65, + 0x6e, 0x63, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x72, 0x65, 0x74, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x72, 0x65, 0x74, 0x72, 0x79, 0x12, 0x34, 0x0a, 0x0d, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0e, 0x2e, 0x42, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, @@ -2022,7 +2074,7 @@ var file_contract_proto_rawDesc = []byte{ 0x22, 0x0a, 0x0c, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x44, 0x65, 0x6c, 0x61, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0c, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x44, 0x65, 0x6c, 0x61, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x04, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x22, 0xc2, 0x05, + 0x20, 0x01, 0x28, 0x04, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x22, 0xd8, 0x06, 0x0a, 0x06, 0x45, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x24, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x12, 0x20, @@ -2031,170 +2083,181 @@ var file_contract_proto_rawDesc = []byte{ 0x12, 0x2e, 0x0a, 0x12, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x41, 0x43, 0x65, 0x72, 0x74, 0x73, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x41, 0x43, 0x65, 0x72, 0x74, 0x73, - 0x12, 0x1c, 0x0a, 0x08, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x55, 0x72, 0x6c, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x55, 0x72, 0x6c, 0x12, 0x3c, - 0x0a, 0x14, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x54, 0x6f, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x61, - 0x6c, 0x54, 0x6f, 0x70, 0x69, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x45, - 0x6d, 0x70, 0x74, 0x79, 0x48, 0x00, 0x52, 0x14, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x54, 0x6f, 0x4f, - 0x72, 0x69, 0x67, 0x69, 0x6e, 0x61, 0x6c, 0x54, 0x6f, 0x70, 0x69, 0x63, 0x12, 0x2c, 0x0a, 0x0c, - 0x64, 0x69, 0x73, 0x63, 0x61, 0x72, 0x64, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x18, 0x09, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x48, 0x00, 0x52, 0x0c, 0x64, 0x69, - 0x73, 0x63, 0x61, 0x72, 0x64, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x28, 0x0a, 0x0f, 0x72, 0x65, - 0x70, 0x6c, 0x79, 0x55, 0x72, 0x6c, 0x43, 0x41, 0x43, 0x65, 0x72, 0x74, 0x73, 0x18, 0x10, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0f, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x55, 0x72, 0x6c, 0x43, 0x41, 0x43, - 0x65, 0x72, 0x74, 0x73, 0x12, 0x1f, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x07, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, - 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x75, 0x69, 0x64, 0x12, 0x31, 0x0a, 0x0c, 0x65, 0x67, 0x72, 0x65, 0x73, - 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, - 0x45, 0x67, 0x72, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0c, 0x65, 0x67, - 0x72, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x34, 0x0a, 0x0d, 0x64, 0x65, - 0x6c, 0x69, 0x76, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x08, 0x20, 0x01, 0x28, - 0x0e, 0x32, 0x0e, 0x2e, 0x44, 0x65, 0x6c, 0x69, 0x76, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, - 0x72, 0x52, 0x0d, 0x64, 0x65, 0x6c, 0x69, 0x76, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, - 0x12, 0x22, 0x0a, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, - 0x0e, 0x32, 0x08, 0x2e, 0x4b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x52, 0x07, 0x6b, 0x65, 0x79, - 0x54, 0x79, 0x70, 0x65, 0x12, 0x28, 0x0a, 0x09, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, - 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, - 0x6e, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x3a, - 0x0a, 0x0f, 0x64, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x74, 0x65, - 0x72, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x44, 0x69, 0x61, 0x6c, 0x65, 0x63, - 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x0f, 0x64, 0x69, 0x61, 0x6c, 0x65, - 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x1c, 0x0a, 0x09, 0x76, 0x52, - 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x76, - 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x73, 0x12, 0x37, 0x0a, 0x0c, 0x66, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x73, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, - 0x2e, 0x45, 0x67, 0x72, 0x65, 0x73, 0x73, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x46, 0x6c, - 0x61, 0x67, 0x73, 0x52, 0x0c, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x46, 0x6c, 0x61, 0x67, - 0x73, 0x42, 0x0f, 0x0a, 0x0d, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, - 0x67, 0x79, 0x22, 0x86, 0x01, 0x0a, 0x12, 0x45, 0x67, 0x72, 0x65, 0x73, 0x73, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x73, 0x12, 0x2c, 0x0a, 0x11, 0x65, 0x6e, 0x61, - 0x62, 0x6c, 0x65, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x61, 0x74, 0x65, - 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x12, 0x42, 0x0a, 0x1c, 0x65, 0x6e, 0x61, 0x62, 0x6c, - 0x65, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x65, 0x64, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, - 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x1c, 0x65, - 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x65, 0x64, 0x45, 0x78, 0x65, 0x63, - 0x75, 0x74, 0x6f, 0x72, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x22, 0xa1, 0x01, 0x0a, 0x07, - 0x49, 0x6e, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x2e, 0x0a, 0x0b, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x4d, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0c, 0x2e, 0x43, - 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x0b, 0x63, 0x6f, 0x6e, 0x74, - 0x65, 0x6e, 0x74, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x68, - 0x6f, 0x73, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x12, - 0x3e, 0x0a, 0x1a, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x43, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x73, 0x18, 0x04, 0x20, - 0x01, 0x28, 0x08, 0x52, 0x1a, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x41, 0x75, 0x74, 0x6f, 0x43, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x73, 0x22, - 0xa3, 0x01, 0x0a, 0x09, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x12, 0x0a, - 0x04, 0x75, 0x75, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x75, 0x75, 0x69, - 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, - 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, - 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6b, 0x69, 0x6e, - 0x64, 0x12, 0x22, 0x0a, 0x0c, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x56, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x7f, 0x0a, 0x0f, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x52, - 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x28, 0x0a, 0x09, 0x72, 0x65, 0x66, 0x65, - 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x52, 0x65, - 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, - 0x63, 0x65, 0x12, 0x42, 0x0a, 0x12, 0x6b, 0x65, 0x79, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, - 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, - 0x2e, 0x4b, 0x65, 0x79, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, - 0x63, 0x65, 0x52, 0x12, 0x6b, 0x65, 0x79, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x66, 0x65, - 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x22, 0x55, 0x0a, 0x11, 0x4b, 0x65, 0x79, 0x46, 0x69, 0x65, - 0x6c, 0x64, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x73, - 0x65, 0x63, 0x72, 0x65, 0x74, 0x4b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, - 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x22, 0x0a, 0x05, 0x66, 0x69, 0x65, - 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0c, 0x2e, 0x53, 0x65, 0x63, 0x72, 0x65, - 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x05, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x22, 0x6f, 0x0a, - 0x14, 0x4d, 0x75, 0x6c, 0x74, 0x69, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, - 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x25, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, - 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x09, 0x2e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x63, - 0x6f, 0x6c, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x12, 0x30, 0x0a, 0x0a, - 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x10, 0x2e, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, - 0x63, 0x65, 0x52, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x22, 0x9a, - 0x01, 0x0a, 0x13, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4f, 0x76, 0x65, - 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x12, 0x44, 0x0a, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, - 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x43, 0x6c, 0x6f, - 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, - 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x52, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x3d, 0x0a, 0x0f, - 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xf1, 0x03, 0x0a, 0x08, - 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x69, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x74, 0x6f, - 0x70, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x74, 0x6f, 0x70, 0x69, - 0x63, 0x73, 0x12, 0x2a, 0x0a, 0x10, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x53, - 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x62, 0x6f, - 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, 0x22, - 0x0a, 0x07, 0x69, 0x6e, 0x67, 0x72, 0x65, 0x73, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x08, 0x2e, 0x49, 0x6e, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x07, 0x69, 0x6e, 0x67, 0x72, 0x65, - 0x73, 0x73, 0x12, 0x31, 0x0a, 0x0c, 0x65, 0x67, 0x72, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x45, 0x67, 0x72, 0x65, 0x73, - 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0c, 0x65, 0x67, 0x72, 0x65, 0x73, 0x73, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x23, 0x0a, 0x08, 0x65, 0x67, 0x72, 0x65, 0x73, 0x73, 0x65, - 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x07, 0x2e, 0x45, 0x67, 0x72, 0x65, 0x73, 0x73, - 0x52, 0x08, 0x65, 0x67, 0x72, 0x65, 0x73, 0x73, 0x65, 0x73, 0x12, 0x28, 0x0a, 0x0a, 0x61, 0x62, - 0x73, 0x65, 0x6e, 0x74, 0x41, 0x75, 0x74, 0x68, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x06, - 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x48, 0x00, 0x52, 0x0a, 0x61, 0x62, 0x73, 0x65, 0x6e, 0x74, - 0x41, 0x75, 0x74, 0x68, 0x12, 0x2c, 0x0a, 0x0a, 0x61, 0x75, 0x74, 0x68, 0x53, 0x65, 0x63, 0x72, - 0x65, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, - 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x61, 0x75, 0x74, 0x68, 0x53, 0x65, 0x63, 0x72, - 0x65, 0x74, 0x12, 0x41, 0x0a, 0x0f, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x41, 0x75, 0x74, 0x68, 0x53, - 0x65, 0x63, 0x72, 0x65, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x4d, 0x75, - 0x6c, 0x74, 0x69, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, - 0x63, 0x65, 0x48, 0x00, 0x52, 0x0f, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x41, 0x75, 0x74, 0x68, 0x53, - 0x65, 0x63, 0x72, 0x65, 0x74, 0x12, 0x46, 0x0a, 0x13, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x18, 0x0a, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4f, - 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x52, 0x13, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x12, 0x28, 0x0a, - 0x09, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x0a, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, - 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x41, 0x75, 0x74, 0x68, 0x22, - 0x77, 0x0a, 0x08, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x12, 0x1e, 0x0a, 0x0a, 0x67, - 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, - 0x0a, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, 0x09, 0x72, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x09, - 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x73, 0x12, 0x22, 0x0a, 0x0c, 0x74, 0x72, 0x75, 0x73, 0x74, 0x42, 0x75, 0x6e, - 0x64, 0x6c, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0c, 0x74, 0x72, 0x75, 0x73, - 0x74, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x73, 0x2a, 0x2c, 0x0a, 0x0d, 0x42, 0x61, 0x63, 0x6b, - 0x6f, 0x66, 0x66, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, 0x0f, 0x0a, 0x0b, 0x45, 0x78, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x4c, 0x69, - 0x6e, 0x65, 0x61, 0x72, 0x10, 0x01, 0x2a, 0x2b, 0x0a, 0x0d, 0x44, 0x65, 0x6c, 0x69, 0x76, 0x65, - 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x0d, 0x0a, 0x09, 0x55, 0x4e, 0x4f, 0x52, 0x44, - 0x45, 0x52, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x4f, 0x52, 0x44, 0x45, 0x52, 0x45, - 0x44, 0x10, 0x01, 0x2a, 0x3d, 0x0a, 0x07, 0x4b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0a, - 0x0a, 0x06, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x6e, - 0x74, 0x65, 0x67, 0x65, 0x72, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x6f, 0x75, 0x62, 0x6c, - 0x65, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x09, 0x42, 0x79, 0x74, 0x65, 0x41, 0x72, 0x72, 0x61, 0x79, - 0x10, 0x03, 0x2a, 0x29, 0x0a, 0x0b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x4d, 0x6f, 0x64, - 0x65, 0x12, 0x0a, 0x0a, 0x06, 0x42, 0x49, 0x4e, 0x41, 0x52, 0x59, 0x10, 0x00, 0x12, 0x0e, 0x0a, - 0x0a, 0x53, 0x54, 0x52, 0x55, 0x43, 0x54, 0x55, 0x52, 0x45, 0x44, 0x10, 0x01, 0x2a, 0x61, 0x0a, - 0x0b, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x12, 0x0a, 0x0e, - 0x53, 0x41, 0x53, 0x4c, 0x5f, 0x4d, 0x45, 0x43, 0x48, 0x41, 0x4e, 0x49, 0x53, 0x4d, 0x10, 0x00, - 0x12, 0x0a, 0x0a, 0x06, 0x43, 0x41, 0x5f, 0x43, 0x52, 0x54, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, - 0x55, 0x53, 0x45, 0x52, 0x5f, 0x43, 0x52, 0x54, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x55, 0x53, - 0x45, 0x52, 0x5f, 0x4b, 0x45, 0x59, 0x10, 0x03, 0x12, 0x08, 0x0a, 0x04, 0x55, 0x53, 0x45, 0x52, - 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x50, 0x41, 0x53, 0x53, 0x57, 0x4f, 0x52, 0x44, 0x10, 0x05, - 0x2a, 0x44, 0x0a, 0x08, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x12, 0x0d, 0x0a, 0x09, - 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x54, 0x45, 0x58, 0x54, 0x10, 0x00, 0x12, 0x12, 0x0a, 0x0e, 0x53, - 0x41, 0x53, 0x4c, 0x5f, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x54, 0x45, 0x58, 0x54, 0x10, 0x01, 0x12, - 0x07, 0x0a, 0x03, 0x53, 0x53, 0x4c, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x41, 0x53, 0x4c, - 0x5f, 0x53, 0x53, 0x4c, 0x10, 0x03, 0x42, 0x5b, 0x0a, 0x2a, 0x64, 0x65, 0x76, 0x2e, 0x6b, 0x6e, - 0x61, 0x74, 0x69, 0x76, 0x65, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x6b, - 0x61, 0x66, 0x6b, 0x61, 0x2e, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, - 0x72, 0x61, 0x63, 0x74, 0x42, 0x11, 0x44, 0x61, 0x74, 0x61, 0x50, 0x6c, 0x61, 0x6e, 0x65, 0x43, - 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x5a, 0x1a, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, - 0x2d, 0x70, 0x6c, 0x61, 0x6e, 0x65, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, - 0x61, 0x63, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x12, 0x30, 0x0a, 0x13, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, + 0x75, 0x64, 0x69, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x64, + 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x75, 0x64, 0x69, 0x65, 0x6e, + 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x08, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x55, 0x72, 0x6c, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x55, 0x72, 0x6c, + 0x12, 0x3c, 0x0a, 0x14, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x54, 0x6f, 0x4f, 0x72, 0x69, 0x67, 0x69, + 0x6e, 0x61, 0x6c, 0x54, 0x6f, 0x70, 0x69, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x06, + 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x48, 0x00, 0x52, 0x14, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x54, + 0x6f, 0x4f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x61, 0x6c, 0x54, 0x6f, 0x70, 0x69, 0x63, 0x12, 0x2c, + 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x63, 0x61, 0x72, 0x64, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x18, 0x09, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x48, 0x00, 0x52, 0x0c, + 0x64, 0x69, 0x73, 0x63, 0x61, 0x72, 0x64, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x28, 0x0a, 0x0f, + 0x72, 0x65, 0x70, 0x6c, 0x79, 0x55, 0x72, 0x6c, 0x43, 0x41, 0x43, 0x65, 0x72, 0x74, 0x73, 0x18, + 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x55, 0x72, 0x6c, 0x43, + 0x41, 0x43, 0x65, 0x72, 0x74, 0x73, 0x12, 0x2a, 0x0a, 0x10, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x55, + 0x72, 0x6c, 0x41, 0x75, 0x64, 0x69, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x10, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x55, 0x72, 0x6c, 0x41, 0x75, 0x64, 0x69, 0x65, 0x6e, + 0x63, 0x65, 0x12, 0x1f, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x07, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, + 0x74, 0x65, 0x72, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x03, 0x75, 0x69, 0x64, 0x12, 0x31, 0x0a, 0x0c, 0x65, 0x67, 0x72, 0x65, 0x73, 0x73, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x45, 0x67, + 0x72, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0c, 0x65, 0x67, 0x72, 0x65, + 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x34, 0x0a, 0x0d, 0x64, 0x65, 0x6c, 0x69, + 0x76, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x0e, 0x2e, 0x44, 0x65, 0x6c, 0x69, 0x76, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x52, + 0x0d, 0x64, 0x65, 0x6c, 0x69, 0x76, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x22, + 0x0a, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x08, 0x2e, 0x4b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, 0x52, 0x07, 0x6b, 0x65, 0x79, 0x54, 0x79, + 0x70, 0x65, 0x12, 0x28, 0x0a, 0x09, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, + 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, + 0x65, 0x52, 0x09, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x3a, 0x0a, 0x0f, + 0x64, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, + 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x44, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x65, + 0x64, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x0f, 0x64, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x1c, 0x0a, 0x09, 0x76, 0x52, 0x65, 0x70, + 0x6c, 0x69, 0x63, 0x61, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x76, 0x52, 0x65, + 0x70, 0x6c, 0x69, 0x63, 0x61, 0x73, 0x12, 0x37, 0x0a, 0x0c, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, + 0x65, 0x46, 0x6c, 0x61, 0x67, 0x73, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x45, + 0x67, 0x72, 0x65, 0x73, 0x73, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x46, 0x6c, 0x61, 0x67, + 0x73, 0x52, 0x0c, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x73, 0x12, + 0x36, 0x0a, 0x16, 0x6f, 0x69, 0x64, 0x63, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x41, 0x63, + 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x13, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x16, 0x6f, 0x69, 0x64, 0x63, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x41, 0x63, 0x63, 0x6f, + 0x75, 0x6e, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x42, 0x0f, 0x0a, 0x0d, 0x72, 0x65, 0x70, 0x6c, 0x79, + 0x53, 0x74, 0x72, 0x61, 0x74, 0x65, 0x67, 0x79, 0x22, 0x86, 0x01, 0x0a, 0x12, 0x45, 0x67, 0x72, + 0x65, 0x73, 0x73, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x46, 0x6c, 0x61, 0x67, 0x73, 0x12, + 0x2c, 0x0a, 0x11, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, + 0x69, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x65, 0x6e, 0x61, 0x62, + 0x6c, 0x65, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x65, 0x72, 0x12, 0x42, 0x0a, + 0x1c, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x65, 0x64, 0x45, 0x78, + 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x08, 0x52, 0x1c, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x4f, 0x72, 0x64, 0x65, 0x72, + 0x65, 0x64, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, + 0x73, 0x22, 0xbd, 0x01, 0x0a, 0x07, 0x49, 0x6e, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x2e, 0x0a, + 0x0b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x4d, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x0c, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x4d, 0x6f, 0x64, 0x65, + 0x52, 0x0b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x12, 0x0a, + 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, + 0x68, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x68, 0x6f, 0x73, 0x74, 0x12, 0x3e, 0x0a, 0x1a, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x41, + 0x75, 0x74, 0x6f, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, + 0x70, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x1a, 0x65, 0x6e, 0x61, 0x62, 0x6c, + 0x65, 0x41, 0x75, 0x74, 0x6f, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, + 0x54, 0x79, 0x70, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, 0x61, 0x75, 0x64, 0x69, 0x65, 0x6e, 0x63, + 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x61, 0x75, 0x64, 0x69, 0x65, 0x6e, 0x63, + 0x65, 0x22, 0xa3, 0x01, 0x0a, 0x09, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, + 0x12, 0x0a, 0x04, 0x75, 0x75, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x75, + 0x75, 0x69, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, + 0x12, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6b, + 0x69, 0x6e, 0x64, 0x12, 0x22, 0x0a, 0x0c, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x56, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x67, 0x72, 0x6f, 0x75, 0x70, + 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x7f, 0x0a, 0x0f, 0x53, 0x65, 0x63, 0x72, 0x65, + 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x28, 0x0a, 0x09, 0x72, 0x65, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, + 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x12, 0x42, 0x0a, 0x12, 0x6b, 0x65, 0x79, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x12, 0x2e, 0x4b, 0x65, 0x79, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x52, 0x12, 0x6b, 0x65, 0x79, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x22, 0x55, 0x0a, 0x11, 0x4b, 0x65, 0x79, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x1c, 0x0a, + 0x09, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x09, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x22, 0x0a, 0x05, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0c, 0x2e, 0x53, 0x65, 0x63, + 0x72, 0x65, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x05, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x22, + 0x6f, 0x0a, 0x14, 0x4d, 0x75, 0x6c, 0x74, 0x69, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x52, 0x65, + 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x25, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x63, 0x6f, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x09, 0x2e, 0x50, 0x72, 0x6f, 0x74, + 0x6f, 0x63, 0x6f, 0x6c, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x12, 0x30, + 0x0a, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x52, 0x0a, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, + 0x22, 0x9a, 0x01, 0x0a, 0x13, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4f, + 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x12, 0x44, 0x0a, 0x0a, 0x65, 0x78, 0x74, 0x65, + 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x43, + 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, + 0x65, 0x73, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x52, 0x0a, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x3d, + 0x0a, 0x0f, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, + 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xf1, 0x03, + 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x69, 0x64, 0x12, 0x16, 0x0a, 0x06, + 0x74, 0x6f, 0x70, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x74, 0x6f, + 0x70, 0x69, 0x63, 0x73, 0x12, 0x2a, 0x0a, 0x10, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, + 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, + 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, + 0x12, 0x22, 0x0a, 0x07, 0x69, 0x6e, 0x67, 0x72, 0x65, 0x73, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x08, 0x2e, 0x49, 0x6e, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x07, 0x69, 0x6e, 0x67, + 0x72, 0x65, 0x73, 0x73, 0x12, 0x31, 0x0a, 0x0c, 0x65, 0x67, 0x72, 0x65, 0x73, 0x73, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x45, 0x67, 0x72, + 0x65, 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0c, 0x65, 0x67, 0x72, 0x65, 0x73, + 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x23, 0x0a, 0x08, 0x65, 0x67, 0x72, 0x65, 0x73, + 0x73, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x07, 0x2e, 0x45, 0x67, 0x72, 0x65, + 0x73, 0x73, 0x52, 0x08, 0x65, 0x67, 0x72, 0x65, 0x73, 0x73, 0x65, 0x73, 0x12, 0x28, 0x0a, 0x0a, + 0x61, 0x62, 0x73, 0x65, 0x6e, 0x74, 0x41, 0x75, 0x74, 0x68, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x06, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x48, 0x00, 0x52, 0x0a, 0x61, 0x62, 0x73, 0x65, + 0x6e, 0x74, 0x41, 0x75, 0x74, 0x68, 0x12, 0x2c, 0x0a, 0x0a, 0x61, 0x75, 0x74, 0x68, 0x53, 0x65, + 0x63, 0x72, 0x65, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x52, 0x65, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x61, 0x75, 0x74, 0x68, 0x53, 0x65, + 0x63, 0x72, 0x65, 0x74, 0x12, 0x41, 0x0a, 0x0f, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x41, 0x75, 0x74, + 0x68, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, + 0x4d, 0x75, 0x6c, 0x74, 0x69, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, + 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0f, 0x6d, 0x75, 0x6c, 0x74, 0x69, 0x41, 0x75, 0x74, + 0x68, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x12, 0x46, 0x0a, 0x13, 0x63, 0x6c, 0x6f, 0x75, 0x64, + 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x18, 0x0a, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x52, 0x13, 0x63, 0x6c, 0x6f, 0x75, + 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x12, + 0x28, 0x0a, 0x09, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x0b, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x0a, 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x09, + 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x41, 0x75, 0x74, + 0x68, 0x22, 0x77, 0x0a, 0x08, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x12, 0x1e, 0x0a, + 0x0a, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x04, 0x52, 0x0a, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, + 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x09, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x22, 0x0a, 0x0c, 0x74, 0x72, 0x75, 0x73, 0x74, 0x42, + 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0c, 0x74, 0x72, + 0x75, 0x73, 0x74, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x73, 0x2a, 0x2c, 0x0a, 0x0d, 0x42, 0x61, + 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, 0x0f, 0x0a, 0x0b, 0x45, + 0x78, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, + 0x4c, 0x69, 0x6e, 0x65, 0x61, 0x72, 0x10, 0x01, 0x2a, 0x2b, 0x0a, 0x0d, 0x44, 0x65, 0x6c, 0x69, + 0x76, 0x65, 0x72, 0x79, 0x4f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x0d, 0x0a, 0x09, 0x55, 0x4e, 0x4f, + 0x52, 0x44, 0x45, 0x52, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x4f, 0x52, 0x44, 0x45, + 0x52, 0x45, 0x44, 0x10, 0x01, 0x2a, 0x3d, 0x0a, 0x07, 0x4b, 0x65, 0x79, 0x54, 0x79, 0x70, 0x65, + 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, + 0x49, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x6f, 0x75, + 0x62, 0x6c, 0x65, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x09, 0x42, 0x79, 0x74, 0x65, 0x41, 0x72, 0x72, + 0x61, 0x79, 0x10, 0x03, 0x2a, 0x29, 0x0a, 0x0b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x4d, + 0x6f, 0x64, 0x65, 0x12, 0x0a, 0x0a, 0x06, 0x42, 0x49, 0x4e, 0x41, 0x52, 0x59, 0x10, 0x00, 0x12, + 0x0e, 0x0a, 0x0a, 0x53, 0x54, 0x52, 0x55, 0x43, 0x54, 0x55, 0x52, 0x45, 0x44, 0x10, 0x01, 0x2a, + 0x61, 0x0a, 0x0b, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x12, + 0x0a, 0x0e, 0x53, 0x41, 0x53, 0x4c, 0x5f, 0x4d, 0x45, 0x43, 0x48, 0x41, 0x4e, 0x49, 0x53, 0x4d, + 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x43, 0x41, 0x5f, 0x43, 0x52, 0x54, 0x10, 0x01, 0x12, 0x0c, + 0x0a, 0x08, 0x55, 0x53, 0x45, 0x52, 0x5f, 0x43, 0x52, 0x54, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, + 0x55, 0x53, 0x45, 0x52, 0x5f, 0x4b, 0x45, 0x59, 0x10, 0x03, 0x12, 0x08, 0x0a, 0x04, 0x55, 0x53, + 0x45, 0x52, 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x50, 0x41, 0x53, 0x53, 0x57, 0x4f, 0x52, 0x44, + 0x10, 0x05, 0x2a, 0x44, 0x0a, 0x08, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x6f, 0x6c, 0x12, 0x0d, + 0x0a, 0x09, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x54, 0x45, 0x58, 0x54, 0x10, 0x00, 0x12, 0x12, 0x0a, + 0x0e, 0x53, 0x41, 0x53, 0x4c, 0x5f, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x54, 0x45, 0x58, 0x54, 0x10, + 0x01, 0x12, 0x07, 0x0a, 0x03, 0x53, 0x53, 0x4c, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x41, + 0x53, 0x4c, 0x5f, 0x53, 0x53, 0x4c, 0x10, 0x03, 0x42, 0x5b, 0x0a, 0x2a, 0x64, 0x65, 0x76, 0x2e, + 0x6b, 0x6e, 0x61, 0x74, 0x69, 0x76, 0x65, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x69, 0x6e, 0x67, + 0x2e, 0x6b, 0x61, 0x66, 0x6b, 0x61, 0x2e, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x63, 0x6f, + 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x42, 0x11, 0x44, 0x61, 0x74, 0x61, 0x50, 0x6c, 0x61, 0x6e, + 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x5a, 0x1a, 0x63, 0x6f, 0x6e, 0x74, 0x72, + 0x6f, 0x6c, 0x2d, 0x70, 0x6c, 0x61, 0x6e, 0x65, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x63, 0x6f, 0x6e, + 0x74, 0x72, 0x61, 0x63, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/control-plane/pkg/core/config/utils.go b/control-plane/pkg/core/config/utils.go index e7d8dff3cb..fa45f52966 100644 --- a/control-plane/pkg/core/config/utils.go +++ b/control-plane/pkg/core/config/utils.go @@ -77,6 +77,9 @@ func EgressConfigFromDelivery( if deadLetterSinkAddr.CACerts != nil { egressConfig.DeadLetterCACerts = *deadLetterSinkAddr.CACerts } + if deadLetterSinkAddr.Audience != nil { + egressConfig.DeadLetterAudience = *deadLetterSinkAddr.Audience + } } if delivery.Retry != nil { diff --git a/control-plane/pkg/receiver/address.go b/control-plane/pkg/receiver/address.go index 07deba0c16..9e57492b17 100644 --- a/control-plane/pkg/receiver/address.go +++ b/control-plane/pkg/receiver/address.go @@ -35,29 +35,32 @@ func Address(host string, object metav1.Object) *url.URL { } // HTTPAddress returns the addressable -func HTTPAddress(host string, object metav1.Object) duckv1.Addressable { +func HTTPAddress(host string, audience *string, object metav1.Object) duckv1.Addressable { httpAddress := duckv1.Addressable{ - Name: pointer.String("http"), - URL: apis.HTTP(host), + Name: pointer.String("http"), + URL: apis.HTTP(host), + Audience: audience, } httpAddress.URL.Path = fmt.Sprintf("/%s/%s", object.GetNamespace(), object.GetName()) return httpAddress } // HTTPAddress returns the addressable -func ChannelHTTPAddress(host string) duckv1.Addressable { +func ChannelHTTPAddress(host string, audience *string) duckv1.Addressable { httpAddress := duckv1.Addressable{ - Name: pointer.String("http"), - URL: apis.HTTP(host), + Name: pointer.String("http"), + URL: apis.HTTP(host), + Audience: audience, } return httpAddress } -func HTTPSAddress(host string, object metav1.Object, caCerts *string) duckv1.Addressable { +func HTTPSAddress(host string, audience *string, object metav1.Object, caCerts *string) duckv1.Addressable { httpsAddress := duckv1.Addressable{ - Name: pointer.String("https"), - URL: apis.HTTPS(host), - CACerts: caCerts, + Name: pointer.String("https"), + URL: apis.HTTPS(host), + CACerts: caCerts, + Audience: audience, } httpsAddress.URL.Path = fmt.Sprintf("/%s/%s", object.GetNamespace(), object.GetName()) return httpsAddress diff --git a/control-plane/pkg/receiver/address_test.go b/control-plane/pkg/receiver/address_test.go index cbbb3fe9f7..f17b8a0b8b 100644 --- a/control-plane/pkg/receiver/address_test.go +++ b/control-plane/pkg/receiver/address_test.go @@ -52,9 +52,11 @@ func TestHTTPSAddress(t *testing.T) { Name: "ks", }, } - httpsAddress := HTTPSAddress(host, ks, pointer.String(string(eventingtlstesting.CA))) + aud := "my-audience" + httpsAddress := HTTPSAddress(host, &aud, ks, pointer.String(string(eventingtlstesting.CA))) require.Equal(t, httpsAddress.URL.Host, host) + require.Equal(t, httpsAddress.Audience, &aud) require.Equal(t, httpsAddress.URL.Scheme, "https") require.Contains(t, httpsAddress.URL.Path, ks.GetNamespace()) require.Contains(t, httpsAddress.URL.Path, ks.GetName()) @@ -69,9 +71,11 @@ func TestHTTPAddress(t *testing.T) { Name: "ks", }, } - httpAddress := HTTPAddress(host, ks) + aud := "my-audience" + httpAddress := HTTPAddress(host, &aud, ks) require.Equal(t, host, httpAddress.URL.Host) + require.Equal(t, httpAddress.Audience, &aud) require.Equal(t, httpAddress.URL.Scheme, "http") require.Contains(t, httpAddress.URL.Path, ks.GetNamespace()) require.Contains(t, httpAddress.URL.Path, ks.GetName()) diff --git a/control-plane/pkg/reconciler/broker/broker.go b/control-plane/pkg/reconciler/broker/broker.go index c5538dd379..20cc869727 100644 --- a/control-plane/pkg/reconciler/broker/broker.go +++ b/control-plane/pkg/reconciler/broker/broker.go @@ -22,6 +22,9 @@ import ( "strings" "time" + "knative.dev/eventing/pkg/auth" + "knative.dev/pkg/logging" + "go.uber.org/zap" corev1 "k8s.io/api/core/v1" apierrors "k8s.io/apimachinery/pkg/api/errors" @@ -247,8 +250,8 @@ func (r *Reconciler) reconcileKind(ctx context.Context, broker *eventing.Broker) return err } - httpAddress := receiver.HTTPAddress(ingressHost, broker) - httpsAddress := receiver.HTTPSAddress(ingressHost, broker, caCerts) + httpAddress := receiver.HTTPAddress(ingressHost, nil, broker) + httpsAddress := receiver.HTTPSAddress(ingressHost, nil, broker, caCerts) addressableStatus.Address = &httpAddress addressableStatus.Addresses = []duckv1.Addressable{httpAddress, httpsAddress} } else if transportEncryptionFlags.IsStrictTransportEncryption() { @@ -257,15 +260,14 @@ func (r *Reconciler) reconcileKind(ctx context.Context, broker *eventing.Broker) return err } - httpsAddress := receiver.HTTPSAddress(ingressHost, broker, caCerts) + httpsAddress := receiver.HTTPSAddress(ingressHost, nil, broker, caCerts) addressableStatus.Address = &httpsAddress addressableStatus.Addresses = []duckv1.Addressable{httpsAddress} } else { - httpAddress := receiver.HTTPAddress(ingressHost, broker) + httpAddress := receiver.HTTPAddress(ingressHost, nil, broker) addressableStatus.Address = &httpAddress addressableStatus.Addresses = []duckv1.Addressable{httpAddress} } - proberAddressable := prober.ProberAddressable{ AddressStatus: &addressableStatus, ResourceKey: types.NamespacedName{ @@ -282,6 +284,26 @@ func (r *Reconciler) reconcileKind(ctx context.Context, broker *eventing.Broker) broker.Status.Address = addressableStatus.Address broker.Status.Addresses = addressableStatus.Addresses + + if feature.FromContext(ctx).IsOIDCAuthentication() { + audience := auth.GetAudience(eventing.SchemeGroupVersion.WithKind("Broker"), broker.ObjectMeta) + logging.FromContext(ctx).Debugw("Setting the brokers audience", zap.String("audience", audience)) + broker.Status.Address.Audience = &audience + + for i := range broker.Status.Addresses { + broker.Status.Addresses[i].Audience = &audience + } + } else { + logging.FromContext(ctx).Debug("Clearing the brokers audience as OIDC is not enabled") + if broker.Status.Address != nil { + broker.Status.Address.Audience = nil + } + + for i := range broker.Status.Addresses { + broker.Status.Addresses[i].Audience = nil + } + } + broker.GetConditionSet().Manage(broker.GetStatus()).MarkTrue(base.ConditionAddressable) return nil @@ -360,7 +382,7 @@ func (r *Reconciler) finalizeKind(ctx context.Context, broker *eventing.Broker) // See (under discussions KIPs, unlikely to be accepted as they are): // - https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=181306446 // - https://cwiki.apache.org/confluence/display/KAFKA/KIP-286%3A+producer.send%28%29+should+not+block+on+metadata+update - address := receiver.HTTPAddress(ingressHost, broker) + address := receiver.HTTPAddress(ingressHost, nil, broker) proberAddressable := prober.ProberAddressable{ AddressStatus: &duckv1.AddressStatus{ Address: &address, @@ -641,6 +663,10 @@ func (r *Reconciler) reconcilerBrokerResource(ctx context.Context, topic string, } } + if broker.Status.Address != nil && broker.Status.Address.Audience != nil { + resource.Ingress.Audience = *broker.Status.Address.Audience + } + egressConfig, err := coreconfig.EgressConfigFromDelivery(ctx, r.Resolver, broker, broker.Spec.Delivery, r.DefaultBackoffDelayMs) if err != nil { return nil, err diff --git a/control-plane/pkg/reconciler/broker/broker_test.go b/control-plane/pkg/reconciler/broker/broker_test.go index 6c46dd8ee1..08a1e3a1ac 100644 --- a/control-plane/pkg/reconciler/broker/broker_test.go +++ b/control-plane/pkg/reconciler/broker/broker_test.go @@ -23,6 +23,8 @@ import ( "testing" "text/template" + "knative.dev/eventing/pkg/auth" + "knative.dev/eventing-kafka-broker/control-plane/pkg/counter" "k8s.io/apimachinery/pkg/runtime/schema" @@ -106,6 +108,10 @@ var ( linear = eventingduck.BackoffPolicyLinear exponential = eventingduck.BackoffPolicyExponential customBrokerTopicTemplate = customTemplate() + brokerAudience = auth.GetAudience(eventing.SchemeGroupVersion.WithKind("Broker"), metav1.ObjectMeta{ + Name: BrokerName, + Namespace: BrokerNamespace, + }) ) var DefaultEnv = &config.Env{ @@ -121,7 +127,6 @@ var DefaultEnv = &config.Env{ func TestBrokerReconciler(t *testing.T) { eventing.RegisterAlternateBrokerConditionSet(base.IngressConditionSet) - t.Parallel() for _, f := range Formats { @@ -2221,6 +2226,105 @@ func brokerReconciliation(t *testing.T, format string, env config.Env) { }), }, }, + { + Name: "Should provision audience if authentication enabled", + Objects: []runtime.Object{ + NewBroker( + WithBrokerConfig(KReference(BrokerConfig(bootstrapServers, 20, 5, + BrokerAuthConfig("secret-1"), + ))), + ), + NewSSLSecret(ConfigMapNamespace, "secret-1"), + BrokerConfig(bootstrapServers, 20, 5, BrokerAuthConfig("secret-1")), + NewConfigMapWithBinaryData(env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, nil), + NewService(), + BrokerReceiverPod(env.SystemNamespace, map[string]string{ + "annotation_to_preserve": "value_to_preserve", + }), + BrokerDispatcherPod(env.SystemNamespace, map[string]string{ + "annotation_to_preserve": "value_to_preserve", + }), + }, + Key: testKey, + WantEvents: []string{ + finalizerUpdatedEvent, + }, + WantUpdates: []clientgotesting.UpdateActionImpl{ + SecretFinalizerUpdate("secret-1", SecretFinalizerName), + ConfigMapUpdate(env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, env.ContractConfigMapFormat, &contract.Contract{ + Resources: []*contract.Resource{ + { + Uid: BrokerUUID, + Topics: []string{BrokerTopic()}, + Ingress: &contract.Ingress{Path: receiver.Path(BrokerNamespace, BrokerName)}, + BootstrapServers: bootstrapServers, + Reference: BrokerReference(), + Auth: &contract.Resource_AuthSecret{ + AuthSecret: &contract.Reference{ + Uuid: SecretUUID, + Namespace: ConfigMapNamespace, + Name: "secret-1", + Version: SecretResourceVersion, + }, + }, + }, + }, + Generation: 1, + }), + BrokerReceiverPodUpdate(env.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "1", + "annotation_to_preserve": "value_to_preserve", + }), + BrokerDispatcherPodUpdate(env.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "1", + "annotation_to_preserve": "value_to_preserve", + }), + }, + WantPatches: []clientgotesting.PatchActionImpl{ + patchFinalizers(), + }, + WantStatusUpdates: []clientgotesting.UpdateActionImpl{ + { + Object: NewBroker( + WithBrokerConfig(KReference(BrokerConfig(bootstrapServers, 20, 5, + BrokerAuthConfig("secret-1"), + ))), + reconcilertesting.WithInitBrokerConditions, + StatusBrokerConfigMapUpdatedReady(&env), + StatusBrokerDataPlaneAvailable, + StatusBrokerConfigParsed, + StatusBrokerTopicReady, + BrokerConfigMapAnnotations(), + WithTopicStatusAnnotation(BrokerTopic()), + BrokerConfigMapSecretAnnotation("secret-1"), + BrokerAddressable(&env), + StatusBrokerProbeSucceeded, + WithBrokerAddresses([]duckv1.Addressable{ + { + Name: pointer.String("http"), + URL: brokerAddress, + Audience: &brokerAudience, + }, + }), + WithBrokerAddress(duckv1.Addressable{ + Name: pointer.String("http"), + URL: brokerAddress, + Audience: &brokerAudience, + }), + WithBrokerAddessable(), + ), + }, + }, + OtherTestData: map[string]interface{}{ + ExpectedTopicDetail: sarama.TopicDetail{ + NumPartitions: 20, + ReplicationFactor: 5, + }, + }, + Ctx: feature.ToContext(context.Background(), feature.Flags{ + feature.OIDCAuthentication: feature.Enabled, + }), + }, } for i := range table { diff --git a/control-plane/pkg/reconciler/broker/namespaced_broker.go b/control-plane/pkg/reconciler/broker/namespaced_broker.go index 4db94dede7..7a5d6c78d2 100644 --- a/control-plane/pkg/reconciler/broker/namespaced_broker.go +++ b/control-plane/pkg/reconciler/broker/namespaced_broker.go @@ -385,6 +385,7 @@ func (r *NamespacedReconciler) configMapsFromSystemNamespace(broker *eventing.Br configMaps := []string{ "config-kafka-broker-data-plane", "config-tracing", + "config-features", "kafka-config-logging", "config-openshift-trusted-cabundle", } diff --git a/control-plane/pkg/reconciler/broker/namespaced_broker_test.go b/control-plane/pkg/reconciler/broker/namespaced_broker_test.go index 6f8f41ebf9..1f4bcaefda 100644 --- a/control-plane/pkg/reconciler/broker/namespaced_broker_test.go +++ b/control-plane/pkg/reconciler/broker/namespaced_broker_test.go @@ -109,6 +109,7 @@ func namespacedBrokerReconciliation(t *testing.T, format string, env config.Env) DataPlaneConfigInitialOffset(ConsumerConfigKey, sources.OffsetLatest), ), reconcilertesting.NewConfigMap("config-tracing", SystemNamespace), + reconcilertesting.NewConfigMap("config-features", SystemNamespace), reconcilertesting.NewConfigMap("kafka-config-logging", SystemNamespace), NewConfigMapWithBinaryData(env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, nil), NewService(), @@ -181,6 +182,14 @@ func namespacedBrokerReconciliation(t *testing.T, format string, env config.Env) WithNamespacedBrokerOwnerRef, WithNamespacedLabel, ), + ToManifestivalResource(t, + reconcilertesting.NewConfigMap( + "config-features", + BrokerNamespace, + ), + WithNamespacedBrokerOwnerRef, + WithNamespacedLabel, + ), ToManifestivalResource(t, reconcilertesting.NewConfigMap( "kafka-config-logging", @@ -360,6 +369,7 @@ func namespacedBrokerFinalization(t *testing.T, format string, env config.Env) { }, env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, env.ContractConfigMapFormat), reconcilertesting.NewConfigMap(env.DataPlaneConfigConfigMapName, SystemNamespace), reconcilertesting.NewConfigMap("config-tracing", SystemNamespace), + reconcilertesting.NewConfigMap("config-features", SystemNamespace), reconcilertesting.NewConfigMap("kafka-config-logging", SystemNamespace), reconcilertesting.NewDeployment("kafka-broker-receiver", SystemNamespace), reconcilertesting.NewDeployment("kafka-broker-dispatcher", SystemNamespace), diff --git a/control-plane/pkg/reconciler/channel/channel.go b/control-plane/pkg/reconciler/channel/channel.go index 92030261c3..2d2cb918b2 100644 --- a/control-plane/pkg/reconciler/channel/channel.go +++ b/control-plane/pkg/reconciler/channel/channel.go @@ -22,10 +22,13 @@ import ( "strconv" "time" + "k8s.io/utils/pointer" + "knative.dev/eventing/pkg/auth" + "knative.dev/pkg/logging" + "k8s.io/apimachinery/pkg/api/equality" apierrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/utils/pointer" "knative.dev/eventing/pkg/apis/feature" messaging "knative.dev/eventing/pkg/apis/messaging/v1" "knative.dev/pkg/network" @@ -313,18 +316,27 @@ func (r *Reconciler) reconcileKind(ctx context.Context, channel *messagingv1beta return err } + featureFlags := feature.FromContext(ctx) + var audience *string + if featureFlags.IsOIDCAuthentication() { + audience = pointer.String(auth.GetAudience(messaging.SchemeGroupVersion.WithKind("KafkaChannel"), channel.ObjectMeta)) + logging.FromContext(ctx).Debugw("Setting the KafkaChannels audience", zap.String("audience", *audience)) + } else { + logging.FromContext(ctx).Debug("Clearing the KafkaChannels audience as OIDC is not enabled") + audience = nil + } + var addressableStatus duckv1.AddressStatus channelHttpsHost := network.GetServiceHostname(r.Env.IngressName, r.SystemNamespace) channelHttpHost := network.GetServiceHostname(channelService.Name, channel.Namespace) - transportEncryptionFlags := feature.FromContext(ctx) - if transportEncryptionFlags.IsPermissiveTransportEncryption() { + if featureFlags.IsPermissiveTransportEncryption() { caCerts, err := r.getCaCerts() if err != nil { return err } - httpAddress := receiver.ChannelHTTPAddress(channelHttpHost) - httpsAddress := receiver.HTTPSAddress(channelHttpsHost, channel, caCerts) + httpAddress := receiver.ChannelHTTPAddress(channelHttpHost, audience) + httpsAddress := receiver.HTTPSAddress(channelHttpsHost, audience, channel, caCerts) // Permissive mode: // - status.address http address with path-based routing // - status.addresses: @@ -332,7 +344,7 @@ func (r *Reconciler) reconcileKind(ctx context.Context, channel *messagingv1beta // - http address with path-based routing addressableStatus.Addresses = []duckv1.Addressable{httpsAddress, httpAddress} addressableStatus.Address = &httpAddress - } else if transportEncryptionFlags.IsStrictTransportEncryption() { + } else if featureFlags.IsStrictTransportEncryption() { // Strict mode: (only https addresses) // - status.address https address with path-based routing // - status.addresses: @@ -342,11 +354,11 @@ func (r *Reconciler) reconcileKind(ctx context.Context, channel *messagingv1beta return err } - httpsAddress := receiver.HTTPSAddress(channelHttpsHost, channel, caCerts) + httpsAddress := receiver.HTTPSAddress(channelHttpsHost, audience, channel, caCerts) addressableStatus.Addresses = []duckv1.Addressable{httpsAddress} addressableStatus.Address = &httpsAddress } else { - httpAddress := receiver.ChannelHTTPAddress(channelHttpHost) + httpAddress := receiver.ChannelHTTPAddress(channelHttpHost, audience) addressableStatus.Address = &httpAddress addressableStatus.Addresses = []duckv1.Addressable{httpAddress} } @@ -433,7 +445,7 @@ func (r *Reconciler) finalizeKind(ctx context.Context, channel *messagingv1beta1 // See (under discussions KIPs, unlikely to be accepted as they are): // - https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=181306446 // - https://cwiki.apache.org/confluence/display/KAFKA/KIP-286%3A+producer.send%28%29+should+not+block+on+metadata+update - address := receiver.HTTPAddress(r.IngressHost, channel) + address := receiver.HTTPAddress(r.IngressHost, nil, channel) proberAddressable := prober.ProberAddressable{ AddressStatus: &duckv1.AddressStatus{ Address: &address, @@ -606,6 +618,12 @@ func (r *Reconciler) getSubscriberConfig(ctx context.Context, channel *messaging if subscriber.SubscriberCACerts != nil && *subscriber.SubscriberCACerts != "" { egress.DestinationCACerts = *subscriber.SubscriberCACerts } + if subscriber.SubscriberAudience != nil && *subscriber.SubscriberAudience != "" { + egress.DestinationAudience = *subscriber.SubscriberAudience + } + if subscriber.Auth != nil && subscriber.Auth.ServiceAccountName != nil { + egress.OidcServiceAccountName = *subscriber.Auth.ServiceAccountName + } if subscriptionName != "" { egress.Reference = &contract.Reference{ @@ -622,6 +640,9 @@ func (r *Reconciler) getSubscriberConfig(ctx context.Context, channel *messaging if subscriber.ReplyCACerts != nil && *subscriber.ReplyCACerts != "" { egress.ReplyUrlCACerts = *subscriber.ReplyCACerts } + if subscriber.ReplyAudience != nil && *subscriber.ReplyAudience != "" { + egress.ReplyUrlAudience = *subscriber.ReplyAudience + } } subscriptionEgressConfig, err := coreconfig.EgressConfigFromDelivery(ctx, r.Resolver, channel, subscriber.Delivery, r.DefaultBackoffDelayMs) @@ -701,6 +722,10 @@ func (r *Reconciler) getChannelContractResource(ctx context.Context, topic strin } } + if channel.Status.Address != nil && channel.Status.Address.Audience != nil { + resource.Ingress.Audience = *channel.Status.Address.Audience + } + egressConfig, err := coreconfig.EgressConfigFromDelivery(ctx, r.Resolver, channel, channel.Spec.Delivery, r.DefaultBackoffDelayMs) if err != nil { return nil, err diff --git a/control-plane/pkg/reconciler/channel/channel_test.go b/control-plane/pkg/reconciler/channel/channel_test.go index 9a4c901dcd..eda161a896 100644 --- a/control-plane/pkg/reconciler/channel/channel_test.go +++ b/control-plane/pkg/reconciler/channel/channel_test.go @@ -2005,6 +2005,91 @@ func TestReconcileKind(t *testing.T) { finalizerUpdatedEvent, }, }, + { + Name: "Reconciled normal - OIDC enabled", + Objects: []runtime.Object{ + NewChannel(), + NewService(), + NewPerChannelService(DefaultEnv), + ChannelReceiverPod(env.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "0", + "annotation_to_preserve": "value_to_preserve", + }), + ChannelDispatcherPod(env.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "0", + "annotation_to_preserve": "value_to_preserve", + }), + NewConfigMapWithTextData(system.Namespace(), DefaultEnv.GeneralConfigMapName, map[string]string{ + kafka.BootstrapServersConfigMapKey: ChannelBootstrapServers, + }), + }, + Key: testKey, + Ctx: feature.ToContext(context.Background(), feature.Flags{ + feature.OIDCAuthentication: feature.Enabled, + }), + WantUpdates: []clientgotesting.UpdateActionImpl{ + ConfigMapUpdate(env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, env.ContractConfigMapFormat, &contract.Contract{ + Generation: 1, + Resources: []*contract.Resource{ + { + Uid: ChannelUUID, + Topics: []string{ChannelTopic()}, + BootstrapServers: ChannelBootstrapServers, + Reference: ChannelReference(), + Ingress: &contract.Ingress{ + Path: receiver.Path(ChannelNamespace, ChannelName), + Host: receiver.Host(ChannelNamespace, ChannelName), + }, + }, + }, + }), + ChannelReceiverPodUpdate(env.SystemNamespace, map[string]string{ + "annotation_to_preserve": "value_to_preserve", + base.VolumeGenerationAnnotationKey: "1", + }), + ChannelDispatcherPodUpdate(env.SystemNamespace, map[string]string{ + "annotation_to_preserve": "value_to_preserve", + base.VolumeGenerationAnnotationKey: "1", + }), + }, + SkipNamespaceValidation: true, // WantCreates compare the channel namespace with configmap namespace, so skip it + WantCreates: []runtime.Object{ + NewConfigMapWithBinaryData(env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, nil), + }, + WantStatusUpdates: []clientgotesting.UpdateActionImpl{ + { + Object: NewChannel( + WithInitKafkaChannelConditions, + StatusConfigParsed, + StatusConfigMapUpdatedReady(&env), + WithChannelTopicStatusAnnotation(ChannelTopic()), + StatusTopicReadyWithName(ChannelTopic()), + StatusDataPlaneAvailable, + ChannelAddressable(&env), + StatusProbeSucceeded, + WithChannelAddresses([]duckv1.Addressable{ + { + Name: pointer.String("http"), + URL: ChannelAddress(), + Audience: pointer.String(ChannelAudience), + }, + }), + WithChannelAddress(duckv1.Addressable{ + Name: pointer.String("http"), + URL: ChannelAddress(), + Audience: pointer.String(ChannelAudience), + }), + WithChannelAddessable(), + ), + }, + }, + WantPatches: []clientgotesting.PatchActionImpl{ + patchFinalizers(), + }, + WantEvents: []string{ + finalizerUpdatedEvent, + }, + }, } useTable(t, table, env) diff --git a/control-plane/pkg/reconciler/channel/controller.go b/control-plane/pkg/reconciler/channel/controller.go index e682e6d110..95538cbc84 100644 --- a/control-plane/pkg/reconciler/channel/controller.go +++ b/control-plane/pkg/reconciler/channel/controller.go @@ -87,7 +87,12 @@ func NewController(ctx context.Context, watcher configmap.Watcher, configs *conf logger := logging.FromContext(ctx) - featureStore := feature.NewStore(logging.FromContext(ctx).Named("feature-config-store")) + var globalResync func(obj interface{}) + featureStore := feature.NewStore(logging.FromContext(ctx).Named("feature-config-store"), func(name string, value interface{}) { + if globalResync != nil { + globalResync(nil) + } + }) featureStore.WatchConfigs(watcher) _, err := reconciler.GetOrCreateDataPlaneConfigMap(ctx) @@ -136,7 +141,7 @@ func NewController(ctx context.Context, watcher configmap.Watcher, configs *conf reconciler.Resolver = resolver.NewURIResolverFromTracker(ctx, impl.Tracker) - globalResync := func(_ interface{}) { + globalResync = func(_ interface{}) { impl.GlobalResync(channelInformer.Informer()) } diff --git a/control-plane/pkg/reconciler/channel/v2/channelv2.go b/control-plane/pkg/reconciler/channel/v2/channelv2.go index 40c434f105..8c2c7645f4 100644 --- a/control-plane/pkg/reconciler/channel/v2/channelv2.go +++ b/control-plane/pkg/reconciler/channel/v2/channelv2.go @@ -24,6 +24,10 @@ import ( "strings" "time" + "k8s.io/utils/pointer" + "knative.dev/eventing/pkg/auth" + "knative.dev/pkg/logging" + "github.com/IBM/sarama" "go.uber.org/multierr" "go.uber.org/zap" @@ -34,7 +38,6 @@ import ( "k8s.io/apimachinery/pkg/labels" "k8s.io/apimachinery/pkg/types" corelisters "k8s.io/client-go/listers/core/v1" - "k8s.io/utils/pointer" "knative.dev/eventing/pkg/apis/feature" "knative.dev/pkg/network" "knative.dev/pkg/resolver" @@ -278,18 +281,27 @@ func (r *Reconciler) ReconcileKind(ctx context.Context, channel *messagingv1beta return err } + featureFlags := feature.FromContext(ctx) + var audience *string + if featureFlags.IsOIDCAuthentication() { + audience = pointer.String(auth.GetAudience(messaging.SchemeGroupVersion.WithKind("KafkaChannel"), channel.ObjectMeta)) + logging.FromContext(ctx).Debugw("Setting the KafkaChannels audience", zap.String("audience", *audience)) + } else { + logging.FromContext(ctx).Debug("Clearing the KafkaChannels audience as OIDC is not enabled") + audience = nil + } + var addressableStatus duckv1.AddressStatus channelHttpsHost := network.GetServiceHostname(r.Env.IngressName, r.SystemNamespace) channelHttpHost := network.GetServiceHostname(channelService.Name, channel.Namespace) - transportEncryptionFlags := feature.FromContext(ctx) - if transportEncryptionFlags.IsPermissiveTransportEncryption() { + if featureFlags.IsPermissiveTransportEncryption() { caCerts, err := r.getCaCerts() if err != nil { return err } - httpAddress := receiver.ChannelHTTPAddress(channelHttpHost) - httpsAddress := receiver.HTTPSAddress(channelHttpsHost, channelService, caCerts) + httpAddress := receiver.ChannelHTTPAddress(channelHttpHost, audience) + httpsAddress := receiver.HTTPSAddress(channelHttpsHost, audience, channelService, caCerts) // Permissive mode: // - status.address http address with path-based routing // - status.addresses: @@ -297,7 +309,7 @@ func (r *Reconciler) ReconcileKind(ctx context.Context, channel *messagingv1beta // - http address with path-based routing addressableStatus.Addresses = []duckv1.Addressable{httpsAddress, httpAddress} addressableStatus.Address = &httpAddress - } else if transportEncryptionFlags.IsStrictTransportEncryption() { + } else if featureFlags.IsStrictTransportEncryption() { // Strict mode: (only https addresses) // - status.address https address with path-based routing // - status.addresses: @@ -307,11 +319,11 @@ func (r *Reconciler) ReconcileKind(ctx context.Context, channel *messagingv1beta return err } - httpsAddress := receiver.HTTPSAddress(channelHttpsHost, channelService, caCerts) + httpsAddress := receiver.HTTPSAddress(channelHttpsHost, audience, channelService, caCerts) addressableStatus.Addresses = []duckv1.Addressable{httpsAddress} addressableStatus.Address = &httpsAddress } else { - httpAddress := receiver.ChannelHTTPAddress(channelHttpHost) + httpAddress := receiver.ChannelHTTPAddress(channelHttpHost, audience) addressableStatus.Address = &httpAddress addressableStatus.Addresses = []duckv1.Addressable{httpAddress} } @@ -421,7 +433,7 @@ func (r *Reconciler) FinalizeKind(ctx context.Context, channel *messagingv1beta1 // See (under discussions KIPs, unlikely to be accepted as they are): // - https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=181306446 // - https://cwiki.apache.org/confluence/display/KAFKA/KIP-286%3A+producer.send%28%29+should+not+block+on+metadata+update - address := receiver.HTTPAddress(r.IngressHost, channel) + address := receiver.HTTPAddress(r.IngressHost, nil, channel) proberAddressable := prober.ProberAddressable{ AddressStatus: &duckv1.AddressStatus{ Address: &address, @@ -691,6 +703,10 @@ func (r *Reconciler) getChannelContractResource(ctx context.Context, topic strin } } + if channel.Status.Address != nil && channel.Status.Address.Audience != nil { + resource.Ingress.Audience = *channel.Status.Address.Audience + } + egressConfig, err := coreconfig.EgressConfigFromDelivery(ctx, r.Resolver, channel, channel.Spec.Delivery, r.DefaultBackoffDelayMs) if err != nil { return nil, err diff --git a/control-plane/pkg/reconciler/channel/v2/channelv2_test.go b/control-plane/pkg/reconciler/channel/v2/channelv2_test.go index 84b7fbfac7..6a51b5b97f 100644 --- a/control-plane/pkg/reconciler/channel/v2/channelv2_test.go +++ b/control-plane/pkg/reconciler/channel/v2/channelv2_test.go @@ -1904,6 +1904,82 @@ func TestReconcileKind(t *testing.T) { finalizerUpdatedEvent, }, }, + + { + Name: "Reconciled normal - OIDC enabled", + Objects: []runtime.Object{ + NewChannel(), + NewConfigMapWithTextData(env.SystemNamespace, DefaultEnv.GeneralConfigMapName, map[string]string{ + kafka.BootstrapServersConfigMapKey: ChannelBootstrapServers, + }), + ChannelReceiverPod(env.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "0", + "annotation_to_preserve": "value_to_preserve", + }), + }, + Key: testKey, + Ctx: feature.ToContext(context.Background(), feature.Flags{ + feature.OIDCAuthentication: feature.Enabled, + }), + WantUpdates: []clientgotesting.UpdateActionImpl{ + ConfigMapUpdate(env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, env.ContractConfigMapFormat, &contract.Contract{ + Generation: 1, + Resources: []*contract.Resource{ + { + Uid: ChannelUUID, + Topics: []string{ChannelTopic()}, + BootstrapServers: ChannelBootstrapServers, + Reference: ChannelReference(), + Ingress: &contract.Ingress{ + Host: receiver.Host(ChannelNamespace, ChannelName), + }, + }, + }, + }), + ChannelReceiverPodUpdate(env.SystemNamespace, map[string]string{ + "annotation_to_preserve": "value_to_preserve", + base.VolumeGenerationAnnotationKey: "1", + }), + }, + SkipNamespaceValidation: true, // WantCreates compare the channel namespace with configmap namespace, so skip it + WantCreates: []runtime.Object{ + NewConfigMapWithBinaryData(env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, nil), + NewPerChannelService(&env), + }, + WantStatusUpdates: []clientgotesting.UpdateActionImpl{ + { + Object: NewChannel( + WithInitKafkaChannelConditions, + StatusConfigParsed, + StatusConfigMapUpdatedReady(&env), + WithChannelTopicStatusAnnotation(ChannelTopic()), + StatusTopicReadyWithName(ChannelTopic()), + ChannelAddressable(&env), + StatusProbeSucceeded, + StatusChannelSubscribers(), + WithChannelAddresses([]duckv1.Addressable{ + { + Name: pointer.String("http"), + URL: ChannelAddress(), + Audience: pointer.String(ChannelAudience), + }, + }), + WithChannelAddress(duckv1.Addressable{ + Name: pointer.String("http"), + URL: ChannelAddress(), + Audience: pointer.String(ChannelAudience), + }), + WithChannelAddessable(), + ), + }, + }, + WantPatches: []clientgotesting.PatchActionImpl{ + patchFinalizers(), + }, + WantEvents: []string{ + finalizerUpdatedEvent, + }, + }, } table.Test(t, NewFactory(&env, func(ctx context.Context, listers *Listers, env *config.Env, row *TableRow) controller.Reconciler { diff --git a/control-plane/pkg/reconciler/consumer/consumer.go b/control-plane/pkg/reconciler/consumer/consumer.go index 684ed5485e..eda4cc81bc 100644 --- a/control-plane/pkg/reconciler/consumer/consumer.go +++ b/control-plane/pkg/reconciler/consumer/consumer.go @@ -141,6 +141,7 @@ func (r *Reconciler) reconcileContractEgress(ctx context.Context, c *kafkaintern } c.Status.SubscriberURI = destinationAddr.URL c.Status.SubscriberCACerts = destinationAddr.CACerts + c.Status.SubscriberAudience = destinationAddr.Audience egressConfig := &contract.EgressConfig{} if c.Spec.Delivery != nil { @@ -154,6 +155,9 @@ func (r *Reconciler) reconcileContractEgress(ctx context.Context, c *kafkaintern if egressConfig.DeadLetterCACerts != "" { c.Status.DeliveryStatus.DeadLetterSinkCACerts = pointer.String(egressConfig.DeadLetterCACerts) } + if egressConfig.DeadLetterAudience != "" { + c.Status.DeliveryStatus.DeadLetterSinkAudience = pointer.String(egressConfig.DeadLetterAudience) + } } egress := &contract.Egress{ @@ -176,11 +180,18 @@ func (r *Reconciler) reconcileContractEgress(ctx context.Context, c *kafkaintern if destinationAddr.CACerts != nil { egress.DestinationCACerts = *destinationAddr.CACerts } + if destinationAddr.Audience != nil { + egress.DestinationAudience = *destinationAddr.Audience + } if c.Spec.Configs.KeyType != nil { egress.KeyType = coreconfig.KeyTypeFromString(*c.Spec.Configs.KeyType) } + if c.Spec.OIDCServiceAccountName != nil { + egress.OidcServiceAccountName = *c.Spec.OIDCServiceAccountName + } + if err := r.reconcileReplyStrategy(ctx, c, egress); err != nil { return nil, fmt.Errorf("failed to reconcile reply strategy: %w", err) } @@ -290,6 +301,9 @@ func (r *Reconciler) reconcileReplyStrategy(ctx context.Context, c *kafkainterna if destination.CACerts != nil { egress.ReplyUrlCACerts = *destination.CACerts } + if destination.Audience != nil { + egress.ReplyUrlAudience = *destination.Audience + } return nil } if c.Spec.Reply.TopicReply != nil && c.Spec.Reply.TopicReply.Enabled { diff --git a/control-plane/pkg/reconciler/consumergroup/consumergroup.go b/control-plane/pkg/reconciler/consumergroup/consumergroup.go index 7aee0f6567..1776486a07 100644 --- a/control-plane/pkg/reconciler/consumergroup/consumergroup.go +++ b/control-plane/pkg/reconciler/consumergroup/consumergroup.go @@ -506,12 +506,18 @@ func (r *Reconciler) propagateStatus(ctx context.Context, cg *kafkainternals.Con if c.Status.SubscriberCACerts != nil { cg.Status.SubscriberCACerts = c.Status.SubscriberCACerts } + if c.Status.SubscriberAudience != nil { + cg.Status.SubscriberAudience = c.Status.SubscriberAudience + } if c.Status.DeliveryStatus.DeadLetterSinkURI != nil { cg.Status.DeliveryStatus.DeadLetterSinkURI = c.Status.DeadLetterSinkURI } if c.Status.DeliveryStatus.DeadLetterSinkCACerts != nil { cg.Status.DeliveryStatus.DeadLetterSinkCACerts = c.Status.DeadLetterSinkCACerts } + if c.Status.DeliveryStatus.DeadLetterSinkAudience != nil { + cg.Status.DeliveryStatus.DeadLetterSinkAudience = c.Status.DeadLetterSinkAudience + } } else if condition == nil { // Propagate only a single false condition cond := c.GetConditionSet().Manage(c.GetStatus()).GetTopLevelCondition() if cond.IsFalse() { @@ -530,6 +536,7 @@ func (r *Reconciler) propagateStatus(ctx context.Context, cg *kafkainternals.Con } cg.Status.SubscriberURI = subscriber.URL cg.Status.SubscriberCACerts = subscriber.CACerts + cg.Status.SubscriberAudience = subscriber.Audience } return condition, nil diff --git a/control-plane/pkg/reconciler/sink/kafka_sink.go b/control-plane/pkg/reconciler/sink/kafka_sink.go index 91ce8242cd..a77d09b915 100644 --- a/control-plane/pkg/reconciler/sink/kafka_sink.go +++ b/control-plane/pkg/reconciler/sink/kafka_sink.go @@ -211,6 +211,11 @@ func (r *Reconciler) reconcileKind(ctx context.Context, ks *eventing.KafkaSink) }, } } + + if ks.Status.Address != nil && ks.Status.Address.Audience != nil { + sinkConfig.Ingress.Audience = *ks.Status.Address.Audience + } + statusConditionManager.ConfigResolved() sinkIndex := coreconfig.FindResource(ct, ks.UID) @@ -255,8 +260,8 @@ func (r *Reconciler) reconcileKind(ctx context.Context, ks *eventing.KafkaSink) return err } - httpAddress := receiver.HTTPAddress(r.IngressHost, ks) - httpsAddress := receiver.HTTPSAddress(r.IngressHost, ks, caCerts) + httpAddress := receiver.HTTPAddress(r.IngressHost, nil, ks) + httpsAddress := receiver.HTTPSAddress(r.IngressHost, nil, ks, caCerts) // Permissive mode: // - status.address http address with path-based routing // - status.addresses: @@ -273,14 +278,14 @@ func (r *Reconciler) reconcileKind(ctx context.Context, ks *eventing.KafkaSink) if err != nil { return err } - httpsAddress := receiver.HTTPSAddress(r.IngressHost, ks, caCerts) + httpsAddress := receiver.HTTPSAddress(r.IngressHost, nil, ks, caCerts) addressableStatus.Address = &httpsAddress addressableStatus.Addresses = []duckv1.Addressable{httpsAddress} } else { // Disabled mode: // Unchange - httpAddress := receiver.HTTPAddress(r.IngressHost, ks) + httpAddress := receiver.HTTPAddress(r.IngressHost, nil, ks) addressableStatus.Address = &httpAddress addressableStatus.Addresses = []duckv1.Addressable{httpAddress} @@ -357,7 +362,7 @@ func (r *Reconciler) finalizeKind(ctx context.Context, ks *eventing.KafkaSink) e // See (under discussions KIPs, unlikely to be accepted as they are): // - https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=181306446 // - https://cwiki.apache.org/confluence/display/KAFKA/KIP-286%3A+producer.send%28%29+should+not+block+on+metadata+update - address := receiver.HTTPAddress(r.IngressHost, ks) + address := receiver.HTTPAddress(r.IngressHost, nil, ks) proberAddressable := prober.ProberAddressable{ AddressStatus: &duckv1.AddressStatus{ Address: &address, diff --git a/control-plane/pkg/reconciler/source/controller.go b/control-plane/pkg/reconciler/source/controller.go index fb8fb503eb..a74ac95f50 100644 --- a/control-plane/pkg/reconciler/source/controller.go +++ b/control-plane/pkg/reconciler/source/controller.go @@ -19,6 +19,9 @@ package source import ( "context" + "knative.dev/eventing/pkg/apis/feature" + "knative.dev/pkg/logging" + "k8s.io/client-go/tools/cache" "knative.dev/pkg/configmap" "knative.dev/pkg/controller" @@ -34,29 +37,51 @@ import ( "knative.dev/eventing-kafka-broker/control-plane/pkg/reconciler/consumergroup" kedaclient "knative.dev/eventing-kafka-broker/third_party/pkg/client/injection/client" + + kubeclient "knative.dev/pkg/client/injection/kube/client" + serviceaccountinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/serviceaccount" ) func NewController(ctx context.Context, watcher configmap.Watcher) *controller.Impl { kafkaInformer := kafkainformer.Get(ctx) consumerGroupInformer := consumergroupinformer.Get(ctx) + serviceaccountInformer := serviceaccountinformer.Get(ctx) sources.RegisterAlternateKafkaConditionSet(conditionSet) + var globalResync func() + featureStore := feature.NewStore(logging.FromContext(ctx).Named("feature-config-store"), func(name string, value interface{}) { + if globalResync != nil { + globalResync() + } + }) + featureStore.WatchConfigs(watcher) + r := &Reconciler{ - ConsumerGroupLister: consumerGroupInformer.Lister(), - InternalsClient: consumergroupclient.Get(ctx), - KedaClient: kedaclient.Get(ctx), - KafkaFeatureFlags: config.DefaultFeaturesConfig(), + KubeClient: kubeclient.Get(ctx), + ConsumerGroupLister: consumerGroupInformer.Lister(), + InternalsClient: consumergroupclient.Get(ctx), + KedaClient: kedaclient.Get(ctx), + KafkaFeatureFlags: config.DefaultFeaturesConfig(), + ServiceAccountLister: serviceaccountInformer.Lister(), } - impl := kafkasource.NewImpl(ctx, r) + impl := kafkasource.NewImpl(ctx, r, func(impl *controller.Impl) controller.Options { + return controller.Options{ + ConfigStore: featureStore, + } + }) + + globalResync = func() { + impl.GlobalResync(kafkaInformer.Informer()) + } kafkaInformer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue)) configStore := config.NewStore(ctx, func(name string, value *config.KafkaFeatureFlags) { r.KafkaFeatureFlags.Reset(value) - impl.GlobalResync(kafkaInformer.Informer()) + globalResync() }) configStore.WatchConfigs(watcher) @@ -65,5 +90,12 @@ func NewController(ctx context.Context, watcher configmap.Watcher) *controller.I FilterFunc: consumergroup.Filter("kafkasource"), Handler: controller.HandleAll(consumergroup.Enqueue("kafkasource", impl.EnqueueKey)), }) + + // Reconcile KafkaSource when the OIDC service account changes + serviceaccountInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ + FilterFunc: controller.FilterController(&sources.KafkaSource{}), + Handler: controller.HandleAll(impl.EnqueueControllerOf), + }) + return impl } diff --git a/control-plane/pkg/reconciler/source/controller_test.go b/control-plane/pkg/reconciler/source/controller_test.go index 3ccfefb94e..eaff75ab92 100644 --- a/control-plane/pkg/reconciler/source/controller_test.go +++ b/control-plane/pkg/reconciler/source/controller_test.go @@ -19,6 +19,8 @@ package source import ( "testing" + "knative.dev/eventing/pkg/apis/feature" + corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" @@ -40,6 +42,7 @@ import ( "knative.dev/eventing-kafka-broker/control-plane/pkg/config" kedaclient "knative.dev/eventing-kafka-broker/third_party/pkg/client/injection/client/fake" + _ "knative.dev/pkg/client/injection/kube/informers/core/v1/serviceaccount/fake" ) func TestNewController(t *testing.T) { @@ -69,6 +72,10 @@ func TestNewController(t *testing.T) { ObjectMeta: metav1.ObjectMeta{ Name: "config-kafka-features", }, + }, &corev1.ConfigMap{ + ObjectMeta: metav1.ObjectMeta{ + Name: feature.FlagsConfigName, + }, })) if controller == nil { t.Error("failed to create controller: ") diff --git a/control-plane/pkg/reconciler/source/source.go b/control-plane/pkg/reconciler/source/source.go index 34306f2558..acd5387cdb 100644 --- a/control-plane/pkg/reconciler/source/source.go +++ b/control-plane/pkg/reconciler/source/source.go @@ -21,6 +21,11 @@ import ( "fmt" "strings" + "k8s.io/client-go/kubernetes" + corelisters "k8s.io/client-go/listers/core/v1" + "knative.dev/eventing/pkg/apis/feature" + "knative.dev/eventing/pkg/auth" + "k8s.io/apimachinery/pkg/api/equality" apierrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -57,10 +62,12 @@ var ( ) type Reconciler struct { - ConsumerGroupLister internalslst.ConsumerGroupLister - InternalsClient internalsclient.Interface - KedaClient kedaclientset.Interface - KafkaFeatureFlags *config.KafkaFeatureFlags + KubeClient kubernetes.Interface + ConsumerGroupLister internalslst.ConsumerGroupLister + InternalsClient internalsclient.Interface + KedaClient kedaclientset.Interface + KafkaFeatureFlags *config.KafkaFeatureFlags + ServiceAccountLister corelisters.ServiceAccountLister } func (r *Reconciler) ReconcileKind(ctx context.Context, ks *sources.KafkaSource) reconciler.Event { @@ -71,6 +78,13 @@ func (r *Reconciler) ReconcileKind(ctx context.Context, ks *sources.KafkaSource) } ks.Status.Selector = selector.String() + err = auth.SetupOIDCServiceAccount(ctx, feature.FromContext(ctx), r.ServiceAccountLister, r.KubeClient, sources.SchemeGroupVersion.WithKind("KafkaSource"), ks.ObjectMeta, &ks.Status, func(as *duckv1.AuthStatus) { + ks.Status.Auth = as + }) + if err != nil { + return fmt.Errorf("could not setup OIDC service account for KafkaSource %s/%s: %w", ks.Name, ks.Namespace, err) + } + cg, err := r.reconcileConsumerGroup(ctx, ks) if err != nil { ks.GetConditionSet().Manage(&ks.Status).MarkFalse(KafkaConditionConsumerGroup, "failed to reconcile consumer group", err.Error()) @@ -187,6 +201,10 @@ func (r Reconciler) reconcileConsumerGroup(ctx context.Context, ks *sources.Kafk expectedCg.Spec.Template.Spec.Configs.KeyType = &kt } + if ks.Status.Auth != nil { + expectedCg.Spec.Template.Spec.OIDCServiceAccountName = ks.Status.Auth.ServiceAccountName + } + // TODO: make keda annotation values configurable and maybe unexposed expectedCg.Annotations = keda.SetAutoscalingAnnotations(ks.Annotations) @@ -246,8 +264,9 @@ func propagateConsumerGroupStatus(cg *internalscg.ConsumerGroup, ks *sources.Kaf } } ks.Status.MarkSink(&duckv1.Addressable{ - URL: cg.Status.SubscriberURI, - CACerts: cg.Status.SubscriberCACerts, + URL: cg.Status.SubscriberURI, + CACerts: cg.Status.SubscriberCACerts, + Audience: cg.Status.SubscriberAudience, }) ks.Status.Placeable = cg.Status.Placeable if cg.Status.Replicas != nil { diff --git a/control-plane/pkg/reconciler/source/source_test.go b/control-plane/pkg/reconciler/source/source_test.go index dfec70ad63..cfa162741e 100644 --- a/control-plane/pkg/reconciler/source/source_test.go +++ b/control-plane/pkg/reconciler/source/source_test.go @@ -21,6 +21,9 @@ import ( "fmt" "testing" + "knative.dev/eventing/pkg/apis/feature" + "knative.dev/eventing/pkg/auth" + "github.com/google/go-cmp/cmp" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -52,6 +55,8 @@ import ( . "knative.dev/eventing-kafka-broker/control-plane/pkg/reconciler/testing" kedaclient "knative.dev/eventing-kafka-broker/third_party/pkg/client/injection/client/fake" + + fakekubeclient "knative.dev/pkg/client/injection/kube/client/fake" ) const ( @@ -148,6 +153,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroupUnknown(), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -202,6 +208,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroupUnknown(), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -256,6 +263,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroupUnknown(), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -315,6 +323,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroupUnknown(), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -370,6 +379,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroupUnknown(), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -506,6 +516,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceSinkResolved(""), SourceNetSaslTls(true), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -626,6 +637,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceSinkResolved(""), SourceNetSaslTls(false), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -688,6 +700,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroupUnknown(), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -743,6 +756,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceSinkResolved(""), StatusSourceSelector(), WithAutoscalingAnnotationsSource(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -832,6 +846,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroup(), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -897,6 +912,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroup(), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -963,6 +979,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceSinkResolved(""), StatusSourceSelector(), WithAutoscalingAnnotationsSource(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -1027,6 +1044,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroupUnknown(), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -1080,6 +1098,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceSinkResolved(""), StatusSourceSelector(), WithAutoscalingAnnotationsSource(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -1131,6 +1150,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroupUnknown(), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -1183,6 +1203,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroupFailed("failed", "failed"), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -1237,6 +1258,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceSinkResolved(""), StatusSourceConsumerGroupReplicas(1), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -1325,6 +1347,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceSinkResolved(""), StatusSourceConsumerGroupReplicas(1), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -1385,6 +1408,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceSinkResolved(""), StatusSourceConsumerGroupReplicas(1), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -1439,6 +1463,7 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroupUnknown(), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -1493,6 +1518,67 @@ func TestReconcileKind(t *testing.T) { StatusSourceConsumerGroupUnknown(), StatusSourceSinkResolved(""), StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), + ), + }, + }, + WantPatches: []clientgotesting.PatchActionImpl{ + patchFinalizers(), + }, + WantEvents: []string{ + finalizerUpdatedEvent, + }, + }, + { + Name: "Reconciled normal - with OIDC enabled", + Ctx: feature.ToContext(context.Background(), feature.Flags{ + feature.OIDCAuthentication: feature.Enabled, + }), + Objects: []runtime.Object{ + NewSource(), + }, + Key: testKey, + WantCreates: []runtime.Object{ + makeKafkaSourceOIDCServiceAccount(), + NewConsumerGroup( + WithConsumerGroupFinalizer(), + WithConsumerGroupName(SourceUUID), + WithConsumerGroupNamespace(SourceNamespace), + WithConsumerGroupOwnerRef(kmeta.NewControllerRef(NewSource())), + WithConsumerGroupMetaLabels(OwnerAsSourceLabel), + WithConsumerGroupLabels(ConsumerSourceLabel), + ConsumerGroupConsumerSpec(NewConsumerSpec( + ConsumerTopics(SourceTopics[0], SourceTopics[1]), + ConsumerConfigs( + ConsumerGroupIdConfig(SourceConsumerGroup), + ConsumerBootstrapServersConfig(SourceBootstrapServers), + ), + ConsumerAuth(NewConsumerSpecAuth()), + ConsumerDelivery( + NewConsumerSpecDelivery( + sources.Ordered, + NewConsumerTimeout("PT600S"), + NewConsumerRetry(10), + NewConsumerBackoffDelay("PT0.3S"), + NewConsumerBackoffPolicy(eventingduck.BackoffPolicyExponential), + ConsumerInitialOffset(sources.OffsetLatest), + ), + ), + ConsumerSubscriber(NewSourceSinkReference()), + ConsumerReply(ConsumerNoReply()), + ConsumerOIDCServiceAccountName(makeKafkaSourceOIDCServiceAccount().Name), + )), + ConsumerGroupReplicas(1), + ), + }, + WantStatusUpdates: []clientgotesting.UpdateActionImpl{ + { + Object: NewSource( + StatusSourceConsumerGroupUnknown(), + StatusSourceSinkResolved(""), + StatusSourceSelector(), + StatusSourceOIDCIdentityCreatedSucceeded(), + StatusSourceOIDCIdentity(makeKafkaSourceOIDCServiceAccount().Name), ), }, }, @@ -1523,10 +1609,12 @@ func TestReconcileKind(t *testing.T) { } reconciler := &Reconciler{ - ConsumerGroupLister: listers.GetConsumerGroupLister(), - InternalsClient: fakeconsumergroupinformer.Get(ctx), - KedaClient: kedaclient.Get(ctx), - KafkaFeatureFlags: configapis.DefaultFeaturesConfig(), + ConsumerGroupLister: listers.GetConsumerGroupLister(), + InternalsClient: fakeconsumergroupinformer.Get(ctx), + KedaClient: kedaclient.Get(ctx), + KafkaFeatureFlags: configapis.DefaultFeaturesConfig(), + ServiceAccountLister: listers.GetServiceAccountLister(), + KubeClient: fakekubeclient.Get(ctx), } reconciler.KafkaFeatureFlags = configapis.FromContext(store.ToContext(ctx)) @@ -1634,3 +1722,11 @@ func patchFinalizers() clientgotesting.PatchActionImpl { action.Patch = []byte(patch) return action } + +func makeKafkaSourceOIDCServiceAccount() *corev1.ServiceAccount { + return auth.GetOIDCServiceAccountForResource(sources.SchemeGroupVersion.WithKind("KafkaSource"), metav1.ObjectMeta{ + Name: SourceName, + Namespace: SourceNamespace, + UID: SourceUUID, + }) +} diff --git a/control-plane/pkg/reconciler/testing/objects_channel.go b/control-plane/pkg/reconciler/testing/objects_channel.go index b87873c204..76d3c5850c 100644 --- a/control-plane/pkg/reconciler/testing/objects_channel.go +++ b/control-plane/pkg/reconciler/testing/objects_channel.go @@ -52,6 +52,7 @@ const ( ChannelUUID = "c1234567-8901-2345-6789-123456789101" ChannelBootstrapServers = "kafka-1:9092,kafka-2:9093" ChannelServiceName = "kc-kn-channel" + ChannelAudience = "messaging.knative.dev/kafkachannel/" + ChannelNamespace + "/" + ChannelName Subscription1Name = "sub-1" Subscription2Name = "sub-2" diff --git a/control-plane/pkg/reconciler/testing/objects_consumer.go b/control-plane/pkg/reconciler/testing/objects_consumer.go index a4cebb4c3d..433295d315 100644 --- a/control-plane/pkg/reconciler/testing/objects_consumer.go +++ b/control-plane/pkg/reconciler/testing/objects_consumer.go @@ -200,6 +200,12 @@ func ConsumerTopics(topics ...string) ConsumerSpecOption { } } +func ConsumerOIDCServiceAccountName(sa string) ConsumerSpecOption { + return func(c *kafkainternals.ConsumerSpec) { + c.OIDCServiceAccountName = &sa + } +} + func ConsumerPlacement(pb kafkainternals.PodBind) ConsumerSpecOption { return func(c *kafkainternals.ConsumerSpec) { c.PodBind = &pb diff --git a/control-plane/pkg/reconciler/testing/objects_source.go b/control-plane/pkg/reconciler/testing/objects_source.go index 4762ece316..af11432cf7 100644 --- a/control-plane/pkg/reconciler/testing/objects_source.go +++ b/control-plane/pkg/reconciler/testing/objects_source.go @@ -17,6 +17,8 @@ package testing import ( + "fmt" + corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" @@ -24,6 +26,7 @@ import ( "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/bindings/v1beta1" sources "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/sources/v1beta1" eventingduck "knative.dev/eventing/pkg/apis/duck/v1" + "knative.dev/eventing/pkg/apis/feature" "knative.dev/eventing/pkg/eventingtls/eventingtlstesting" "knative.dev/pkg/apis" duckv1 "knative.dev/pkg/apis/duck/v1" @@ -252,6 +255,29 @@ func StatusSourceSinkNotResolved(err string) KRShapedOption { } } +func StatusSourceOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled() KRShapedOption { + return func(obj duckv1.KRShaped) { + ks := obj.(*sources.KafkaSource) + ks.Status.MarkOIDCIdentityCreatedSucceededWithReason(fmt.Sprintf("%s feature disabled", feature.OIDCAuthentication), "") + } +} + +func StatusSourceOIDCIdentityCreatedSucceeded() KRShapedOption { + return func(obj duckv1.KRShaped) { + ks := obj.(*sources.KafkaSource) + ks.Status.MarkOIDCIdentityCreatedSucceeded() + } +} + +func StatusSourceOIDCIdentity(saName string) KRShapedOption { + return func(obj duckv1.KRShaped) { + ks := obj.(*sources.KafkaSource) + ks.Status.Auth = &duckv1.AuthStatus{ + ServiceAccountName: &saName, + } + } +} + func SourceReference() *contract.Reference { return &contract.Reference{ Namespace: SourceNamespace, diff --git a/control-plane/pkg/reconciler/trigger/controller.go b/control-plane/pkg/reconciler/trigger/controller.go index 2243f1ef8f..b9b78f32c1 100644 --- a/control-plane/pkg/reconciler/trigger/controller.go +++ b/control-plane/pkg/reconciler/trigger/controller.go @@ -44,6 +44,7 @@ import ( triggerinformer "knative.dev/eventing/pkg/client/injection/informers/eventing/v1/trigger" triggerreconciler "knative.dev/eventing/pkg/client/injection/reconciler/eventing/v1/trigger" eventinglisters "knative.dev/eventing/pkg/client/listers/eventing/v1" + serviceaccountinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/serviceaccount" "knative.dev/eventing-kafka-broker/control-plane/pkg/config" "knative.dev/eventing-kafka-broker/control-plane/pkg/kafka" @@ -64,6 +65,7 @@ func NewController(ctx context.Context, watcher configmap.Watcher, configs *conf brokerInformer := brokerinformer.Get(ctx) triggerInformer := triggerinformer.Get(ctx) triggerLister := triggerInformer.Lister() + serviceaccountInformer := serviceaccountinformer.Get(ctx) reconciler := &Reconciler{ Reconciler: &base.Reconciler{ @@ -91,6 +93,7 @@ func NewController(ctx context.Context, watcher configmap.Watcher, configs *conf NewKafkaClient: sarama.NewClient, NewKafkaClusterAdminClient: sarama.NewClusterAdmin, InitOffsetsFunc: offset.InitOffsets, + ServiceAccountLister: serviceaccountInformer.Lister(), } impl := triggerreconciler.NewImpl(ctx, reconciler, func(impl *controller.Impl) controller.Options { @@ -121,6 +124,13 @@ func NewController(ctx context.Context, watcher configmap.Watcher, configs *conf impl.FilteredGlobalResync(filterTriggers(reconciler.BrokerLister, kafka.BrokerClass, FinalizerName), triggerInformer.Informer()) } + featureStore := feature.NewStore(logging.FromContext(ctx).Named("feature-config-store"), func(name string, value interface{}) { + if globalResync != nil { + globalResync(nil) + } + }) + featureStore.WatchConfigs(watcher) + configmapInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ FilterFunc: controller.FilterWithNameAndNamespace(configs.DataPlaneConfigMapNamespace, configs.ContractConfigMapName), Handler: cache.ResourceEventHandlerFuncs{ @@ -132,6 +142,12 @@ func NewController(ctx context.Context, watcher configmap.Watcher, configs *conf reconciler.Tracker = impl.Tracker secretinformer.Get(ctx).Informer().AddEventHandler(controller.HandleAll(reconciler.Tracker.OnChanged)) + // Reconciler Trigger when the OIDC service account changes + serviceaccountInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ + FilterFunc: controller.FilterController(&eventing.Trigger{}), + Handler: controller.HandleAll(impl.EnqueueControllerOf), + }) + return impl } diff --git a/control-plane/pkg/reconciler/trigger/controller_test.go b/control-plane/pkg/reconciler/trigger/controller_test.go index 36edb063a6..7fb6b03ad3 100644 --- a/control-plane/pkg/reconciler/trigger/controller_test.go +++ b/control-plane/pkg/reconciler/trigger/controller_test.go @@ -27,6 +27,7 @@ import ( _ "knative.dev/pkg/client/injection/kube/informers/core/v1/configmap/fake" _ "knative.dev/pkg/client/injection/kube/informers/core/v1/pod/fake" _ "knative.dev/pkg/client/injection/kube/informers/core/v1/secret/fake" + _ "knative.dev/pkg/client/injection/kube/informers/core/v1/serviceaccount/fake" "knative.dev/pkg/configmap" reconcilertesting "knative.dev/pkg/reconciler/testing" diff --git a/control-plane/pkg/reconciler/trigger/namespaced_controller.go b/control-plane/pkg/reconciler/trigger/namespaced_controller.go index 7694edb804..d6f4a1ab45 100644 --- a/control-plane/pkg/reconciler/trigger/namespaced_controller.go +++ b/control-plane/pkg/reconciler/trigger/namespaced_controller.go @@ -27,11 +27,13 @@ import ( configmapinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/configmap" podinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/pod" secretinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/secret" + serviceaccountinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/serviceaccount" "knative.dev/pkg/configmap" "knative.dev/pkg/controller" "knative.dev/pkg/logging" "knative.dev/pkg/resolver" + eventing "knative.dev/eventing/pkg/apis/eventing/v1" "knative.dev/eventing/pkg/apis/feature" eventingclient "knative.dev/eventing/pkg/client/injection/client" brokerinformer "knative.dev/eventing/pkg/client/injection/informers/eventing/v1/broker" @@ -57,6 +59,7 @@ func NewNamespacedController(ctx context.Context, watcher configmap.Watcher, con brokerInformer := brokerinformer.Get(ctx) triggerInformer := triggerinformer.Get(ctx) triggerLister := triggerInformer.Lister() + serviceaccountInformer := serviceaccountinformer.Get(ctx) reconciler := &NamespacedReconciler{ Reconciler: &base.Reconciler{ @@ -76,6 +79,7 @@ func NewNamespacedController(ctx context.Context, watcher configmap.Watcher, con }, BrokerLister: brokerInformer.Lister(), ConfigMapLister: configmapInformer.Lister(), + ServiceAccountLister: serviceaccountInformer.Lister(), EventingClient: eventingclient.Get(ctx), Env: configs, NewKafkaClient: sarama.NewClient, @@ -111,6 +115,13 @@ func NewNamespacedController(ctx context.Context, watcher configmap.Watcher, con impl.GlobalResync(brokerInformer.Informer()) } + featureStore := feature.NewStore(logging.FromContext(ctx).Named("feature-config-store"), func(name string, value interface{}) { + if globalResync != nil { + globalResync(nil) + } + }) + featureStore.WatchConfigs(watcher) + configmapInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ FilterFunc: kafka.FilterWithLabel(kafka.NamespacedBrokerDataplaneLabelKey, kafka.NamespacedBrokerDataplaneLabelValue), Handler: cache.ResourceEventHandlerFuncs{ @@ -126,5 +137,11 @@ func NewNamespacedController(ctx context.Context, watcher configmap.Watcher, con reconciler.Tracker = impl.Tracker secretinformer.Get(ctx).Informer().AddEventHandler(controller.HandleAll(reconciler.Tracker.OnChanged)) + // Reconciler Trigger when the OIDC service account changes + serviceaccountInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ + FilterFunc: controller.FilterController(&eventing.Trigger{}), + Handler: controller.HandleAll(impl.EnqueueControllerOf), + }) + return impl } diff --git a/control-plane/pkg/reconciler/trigger/namespaced_trigger.go b/control-plane/pkg/reconciler/trigger/namespaced_trigger.go index dc09aa8891..8752f501aa 100644 --- a/control-plane/pkg/reconciler/trigger/namespaced_trigger.go +++ b/control-plane/pkg/reconciler/trigger/namespaced_trigger.go @@ -37,10 +37,11 @@ type NamespacedReconciler struct { *base.Reconciler *FlagsHolder - BrokerLister eventinglisters.BrokerLister - ConfigMapLister corelisters.ConfigMapLister - EventingClient eventingclientset.Interface - Resolver *resolver.URIResolver + BrokerLister eventinglisters.BrokerLister + ConfigMapLister corelisters.ConfigMapLister + ServiceAccountLister corelisters.ServiceAccountLister + EventingClient eventingclientset.Interface + Resolver *resolver.URIResolver Env *config.Env @@ -81,11 +82,12 @@ func (r *NamespacedReconciler) createReconcilerForTriggerInstance(trigger *event FlagsHolder: &FlagsHolder{ Flags: r.Flags, }, - BrokerLister: r.BrokerLister, - ConfigMapLister: r.ConfigMapLister, - EventingClient: r.EventingClient, - Resolver: r.Resolver, - Env: r.Env, + BrokerLister: r.BrokerLister, + ConfigMapLister: r.ConfigMapLister, + ServiceAccountLister: r.ServiceAccountLister, + EventingClient: r.EventingClient, + Resolver: r.Resolver, + Env: r.Env, // override BrokerClass: kafka.NamespacedBrokerClass, DataPlaneConfigMapLabeler: kafka.NamespacedDataplaneLabelConfigmapOption, diff --git a/control-plane/pkg/reconciler/trigger/namespaced_trigger_test.go b/control-plane/pkg/reconciler/trigger/namespaced_trigger_test.go index 35323102a8..91f8b01001 100644 --- a/control-plane/pkg/reconciler/trigger/namespaced_trigger_test.go +++ b/control-plane/pkg/reconciler/trigger/namespaced_trigger_test.go @@ -129,6 +129,7 @@ func namespacedTriggerReconciliation(t *testing.T, format string, env config.Env withTriggerSubscriberResolvedSucceeded(contract.DeliveryOrder_UNORDERED), withTriggerStatusGroupIdAnnotation(triggerConsumerGroup), reconcilertesting.WithTriggerDeadLetterSinkNotConfigured(), + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -163,11 +164,12 @@ func useNamespacedTable(t *testing.T, table TableTest, env *config.Env) { FlagsHolder: &FlagsHolder{ Flags: nil, }, - BrokerLister: listers.GetBrokerLister(), - ConfigMapLister: listers.GetConfigMapLister(), - EventingClient: eventingclient.Get(ctx), - Resolver: nil, - Env: env, + BrokerLister: listers.GetBrokerLister(), + ConfigMapLister: listers.GetConfigMapLister(), + ServiceAccountLister: listers.GetServiceAccountLister(), + EventingClient: eventingclient.Get(ctx), + Resolver: nil, + Env: env, InitOffsetsFunc: func(ctx context.Context, kafkaClient sarama.Client, kafkaAdminClient sarama.ClusterAdmin, topics []string, consumerGroup string) (int32, error) { return 1, nil }, diff --git a/control-plane/pkg/reconciler/trigger/trigger.go b/control-plane/pkg/reconciler/trigger/trigger.go index a2393ad65d..52eace88fa 100644 --- a/control-plane/pkg/reconciler/trigger/trigger.go +++ b/control-plane/pkg/reconciler/trigger/trigger.go @@ -33,8 +33,10 @@ import ( eventing "knative.dev/eventing/pkg/apis/eventing/v1" "knative.dev/eventing/pkg/apis/feature" + "knative.dev/eventing/pkg/auth" eventingclientset "knative.dev/eventing/pkg/client/clientset/versioned" eventinglisters "knative.dev/eventing/pkg/client/listers/eventing/v1" + duckv1 "knative.dev/pkg/apis/duck/v1" apisconfig "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/config" sources "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/sources/v1beta1" @@ -61,10 +63,11 @@ type Reconciler struct { *base.Reconciler *FlagsHolder - BrokerLister eventinglisters.BrokerLister - ConfigMapLister corelisters.ConfigMapLister - EventingClient eventingclientset.Interface - Resolver *resolver.URIResolver + BrokerLister eventinglisters.BrokerLister + ConfigMapLister corelisters.ConfigMapLister + ServiceAccountLister corelisters.ServiceAccountLister + EventingClient eventingclientset.Interface + Resolver *resolver.URIResolver Env *config.Env @@ -87,7 +90,7 @@ type Reconciler struct { } func (r *Reconciler) ReconcileKind(ctx context.Context, trigger *eventing.Trigger) reconciler.Event { - trigger.Status.MarkOIDCIdentityCreatedNotSupported() + return retry.RetryOnConflict(retry.DefaultBackoff, func() error { return r.reconcileKind(ctx, trigger) }) @@ -95,7 +98,9 @@ func (r *Reconciler) ReconcileKind(ctx context.Context, trigger *eventing.Trigge func (r *Reconciler) reconcileKind(ctx context.Context, trigger *eventing.Trigger) reconciler.Event { logger := kafkalogging.CreateReconcileMethodLogger(ctx, trigger) - + errOIDC := auth.SetupOIDCServiceAccount(ctx, r.Flags, r.ServiceAccountLister, r.KubeClient, eventing.SchemeGroupVersion.WithKind("Trigger"), trigger.ObjectMeta, &trigger.Status, func(as *duckv1.AuthStatus) { + trigger.Status.Auth = as + }) statusConditionManager := statusConditionManager{ Trigger: trigger, Configs: r.Env, @@ -214,6 +219,9 @@ func (r *Reconciler) reconcileKind(ctx context.Context, trigger *eventing.Trigge } logger.Debug("Contract config map updated") + if errOIDC != nil { + return errOIDC + } return statusConditionManager.reconciled() } @@ -331,6 +339,12 @@ func (r *Reconciler) reconcileTriggerEgress(ctx context.Context, broker *eventin if destination.CACerts != nil { egress.DestinationCACerts = *destination.CACerts } + if destination.Audience != nil { + egress.DestinationAudience = *destination.Audience + } + if trigger.Status.Auth != nil && trigger.Status.Auth.ServiceAccountName != nil { + egress.OidcServiceAccountName = *trigger.Status.Auth.ServiceAccountName + } newFiltersEnabled := func() bool { r.FlagsLock.RLock() diff --git a/control-plane/pkg/reconciler/trigger/trigger_test.go b/control-plane/pkg/reconciler/trigger/trigger_test.go index 190ef7f96d..d5ea8b66dd 100644 --- a/control-plane/pkg/reconciler/trigger/trigger_test.go +++ b/control-plane/pkg/reconciler/trigger/trigger_test.go @@ -26,6 +26,7 @@ import ( "github.com/google/go-cmp/cmp" "google.golang.org/protobuf/testing/protocmp" corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/types" clientgotesting "k8s.io/client-go/testing" @@ -46,6 +47,7 @@ import ( eventing "knative.dev/eventing/pkg/apis/eventing/v1" v1 "knative.dev/eventing/pkg/apis/eventing/v1" "knative.dev/eventing/pkg/apis/feature" + "knative.dev/eventing/pkg/auth" eventingclient "knative.dev/eventing/pkg/client/injection/client/fake" triggerreconciler "knative.dev/eventing/pkg/client/injection/reconciler/eventing/v1/trigger" "knative.dev/eventing/pkg/eventingtls/eventingtlstesting" @@ -195,6 +197,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerSubscribed(), withSubscriberURI, reconcilertesting.WithTriggerDependencyReady(), @@ -266,6 +269,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerSubscribed(), withSubscriberURI, reconcilertesting.WithTriggerDependencyReady(), @@ -343,6 +347,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi Object: newTrigger( withDelivery, reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerSubscribed(), withSubscriberURI, reconcilertesting.WithTriggerDependencyReady(), @@ -413,6 +418,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerSubscribed(), withSubscriberURI, reconcilertesting.WithTriggerDependencyReady(), @@ -484,6 +490,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerSubscribed(), withSubscriberURI, reconcilertesting.WithTriggerDependencyReady(), @@ -562,6 +569,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi Object: newTrigger( withDelivery, reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerSubscribed(), withSubscriberURI, reconcilertesting.WithTriggerDependencyReady(), @@ -725,6 +733,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerSubscribed(), withSubscriberURI, reconcilertesting.WithTriggerDependencyReady(), @@ -774,6 +783,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerBrokerFailed( "Broker not found in data plane map", fmt.Sprintf("config map: %s", env.DataPlaneConfigMapAsString()), @@ -818,6 +828,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerBrokerFailed( "Broker not found in data plane map", fmt.Sprintf("config map: %s", env.DataPlaneConfigMapAsString()), @@ -843,6 +854,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -867,6 +879,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerBrokerFailed("wrong", ""), ), }, @@ -892,6 +905,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -922,6 +936,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -987,6 +1002,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -1072,6 +1088,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi useNewFilters, ), reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerSubscribed(), withSubscriberURI, reconcilertesting.WithTriggerDependencyReady(), @@ -1382,6 +1399,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi useNewFilters, ), reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerSubscribed(), withSubscriberURI, reconcilertesting.WithTriggerDependencyReady(), @@ -1686,6 +1704,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi useNewFilters, ), reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerSubscribed(), withSubscriberURI, reconcilertesting.WithTriggerDependencyReady(), @@ -2082,6 +2101,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerSubscribed(), withSubscriberURI, reconcilertesting.WithTriggerDependencyReady(), @@ -2108,6 +2128,7 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -2118,6 +2139,163 @@ func triggerReconciliation(t *testing.T, format string, env config.Env, useNewFi patchFinalizers(), }, }, + { + Name: "OIDC: creates OIDC service account", + Ctx: feature.ToContext(context.Background(), feature.Flags{ + feature.OIDCAuthentication: feature.Enabled, + }), + Objects: []runtime.Object{ + NewBroker( + BrokerReady, + WithTopicStatusAnnotation(BrokerTopic()), + WithBootstrapServerStatusAnnotation(bootstrapServers), + ), + newTrigger(), + NewService(), + NewConfigMapFromContract(&contract.Contract{ + Resources: []*contract.Resource{ + { + Uid: BrokerUUID, + Topics: []string{BrokerTopic()}, + Ingress: &contract.Ingress{Path: receiver.Path(BrokerNamespace, BrokerName)}, + }, + }, + }, env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, env.ContractConfigMapFormat), + BrokerDispatcherPod(env.SystemNamespace, nil), + DataPlaneConfigMap(env.DataPlaneConfigMapNamespace, env.DataPlaneConfigConfigMapName, brokerreconciler.ConsumerConfigKey, + DataPlaneConfigInitialOffset(brokerreconciler.ConsumerConfigKey, sources.OffsetLatest), + ), + }, + Key: testKey, + WantEvents: []string{ + finalizerUpdatedEvent, + }, + WantPatches: []clientgotesting.PatchActionImpl{ + patchFinalizers(), + }, + WantUpdates: []clientgotesting.UpdateActionImpl{ + ConfigMapUpdate(env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, env.ContractConfigMapFormat, &contract.Contract{ + Resources: []*contract.Resource{ + { + Uid: BrokerUUID, + Topics: []string{BrokerTopic()}, + Ingress: &contract.Ingress{Path: receiver.Path(BrokerNamespace, BrokerName)}, + Egresses: []*contract.Egress{ + { + Destination: ServiceURL, + ConsumerGroup: triggerConsumerGroup, + Uid: TriggerUUID, + Reference: TriggerReference(), + OidcServiceAccountName: makeTriggerOIDCServiceAccount().Name, + }, + }, + }, + }, + Generation: 1, + }), + BrokerDispatcherPodUpdate(env.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "1", + }), + }, + WantStatusUpdates: []clientgotesting.UpdateActionImpl{ + { + Object: newTrigger( + reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceeded(), + reconcilertesting.WithTriggerOIDCServiceAccountName(makeTriggerOIDCServiceAccount().Name), + reconcilertesting.WithTriggerSubscribed(), + withSubscriberURI, + reconcilertesting.WithTriggerDependencyReady(), + reconcilertesting.WithTriggerBrokerReady(), + withTriggerSubscriberResolvedSucceeded(contract.DeliveryOrder_UNORDERED), + withTriggerStatusGroupIdAnnotation(triggerConsumerGroup), + reconcilertesting.WithTriggerDeadLetterSinkNotConfigured(), + ), + }, + }, + WantCreates: []runtime.Object{ + makeTriggerOIDCServiceAccount(), + }, + }, + { + Name: "OIDC: Trigger not ready on invalid OIDC service account", + Ctx: feature.ToContext(context.Background(), feature.Flags{ + feature.OIDCAuthentication: feature.Enabled, + }), + Objects: []runtime.Object{ + NewBroker( + BrokerReady, + WithTopicStatusAnnotation(BrokerTopic()), + WithBootstrapServerStatusAnnotation(bootstrapServers), + ), + newTrigger(), + makeTriggerOIDCServiceAccountWithoutOwnerRef(), + NewService(), + NewConfigMapFromContract(&contract.Contract{ + Resources: []*contract.Resource{ + { + Uid: BrokerUUID, + Topics: []string{BrokerTopic()}, + Ingress: &contract.Ingress{Path: receiver.Path(BrokerNamespace, BrokerName)}, + }, + }, + }, env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, env.ContractConfigMapFormat), + BrokerDispatcherPod(env.SystemNamespace, nil), + DataPlaneConfigMap(env.DataPlaneConfigMapNamespace, env.DataPlaneConfigConfigMapName, brokerreconciler.ConsumerConfigKey, + DataPlaneConfigInitialOffset(brokerreconciler.ConsumerConfigKey, sources.OffsetLatest), + ), + }, + WantErr: true, + Key: testKey, + WantEvents: []string{ + finalizerUpdatedEvent, + Eventf(corev1.EventTypeWarning, "InternalError", fmt.Sprintf("service account %s not owned by Trigger %s", makeTriggerOIDCServiceAccountWithoutOwnerRef().Name, TriggerName)), + }, + WantPatches: []clientgotesting.PatchActionImpl{ + patchFinalizers(), + }, + WantUpdates: []clientgotesting.UpdateActionImpl{ + ConfigMapUpdate(env.DataPlaneConfigMapNamespace, env.ContractConfigMapName, env.ContractConfigMapFormat, &contract.Contract{ + Resources: []*contract.Resource{ + { + Uid: BrokerUUID, + Topics: []string{BrokerTopic()}, + Ingress: &contract.Ingress{Path: receiver.Path(BrokerNamespace, BrokerName)}, + Egresses: []*contract.Egress{ + { + Destination: ServiceURL, + ConsumerGroup: triggerConsumerGroup, + Uid: TriggerUUID, + Reference: TriggerReference(), + OidcServiceAccountName: makeTriggerOIDCServiceAccount().Name, + }, + }, + }, + }, + Generation: 1, + }), + BrokerDispatcherPodUpdate(env.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "1", + }), + }, + WantStatusUpdates: []clientgotesting.UpdateActionImpl{ + { + Object: newTrigger( + reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedFailed("Unable to resolve service account for OIDC authentication", fmt.Sprintf("service account %s not owned by Trigger %s", makeTriggerOIDCServiceAccountWithoutOwnerRef().Name, TriggerName)), + reconcilertesting.WithTriggerOIDCServiceAccountName(makeTriggerOIDCServiceAccountWithoutOwnerRef().Name), + reconcilertesting.WithTriggerSubscribed(), + withSubscriberURI, + reconcilertesting.WithTriggerDependencyUnknown("", ""), + reconcilertesting.WithTriggerBrokerReady(), + withTriggerStatusGroupIdAnnotation(triggerConsumerGroup), + reconcilertesting.WithTriggerDeadLetterSinkNotConfigured(), + withTriggerSubscriberResolvedSucceeded(contract.DeliveryOrder_UNORDERED), + reconcilertesting.WithTriggerSubscribedUnknown("", ""), + ), + }, + }, + }, } for i := range table { @@ -2228,6 +2406,7 @@ func triggerFinalizer(t *testing.T, format string, env config.Env) { { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -2258,6 +2437,7 @@ func triggerFinalizer(t *testing.T, format string, env config.Env) { { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -2282,6 +2462,7 @@ func triggerFinalizer(t *testing.T, format string, env config.Env) { { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -2448,6 +2629,7 @@ func triggerFinalizer(t *testing.T, format string, env config.Env) { { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -2614,6 +2796,7 @@ func triggerFinalizer(t *testing.T, format string, env config.Env) { { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -2846,6 +3029,7 @@ func triggerFinalizer(t *testing.T, format string, env config.Env) { { Object: newTrigger( reconcilertesting.WithInitTriggerConditions, + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), ), }, }, @@ -2928,6 +3112,7 @@ func triggerFinalizer(t *testing.T, format string, env config.Env) { withTriggerSubscriberResolvedSucceeded(contract.DeliveryOrder_UNORDERED), withTriggerStatusGroupIdAnnotation(triggerConsumerGroup), reconcilertesting.WithTriggerDeadLetterSinkResolvedSucceeded(), + reconcilertesting.WithTriggerOIDCIdentityCreatedSucceededBecauseOIDCFeatureDisabled(), reconcilertesting.WithTriggerStatusDeadLetterSinkURI(duckv1.Addressable{ URL: &apis.URL{ Scheme: "http", @@ -2985,6 +3170,7 @@ func useTableWithFlags(t *testing.T, table TableTest, env *config.Env, flags fea }, BrokerLister: listers.GetBrokerLister(), ConfigMapLister: listers.GetConfigMapLister(), + ServiceAccountLister: listers.GetServiceAccountLister(), EventingClient: eventingclient.Get(ctx), Resolver: nil, Env: env, @@ -3054,6 +3240,24 @@ func newTrigger(options ...reconcilertesting.TriggerOption) runtime.Object { ) } +func makeTriggerOIDCServiceAccount() *corev1.ServiceAccount { + return auth.GetOIDCServiceAccountForResource(v1.SchemeGroupVersion.WithKind("Trigger"), metav1.ObjectMeta{ + Name: TriggerName, + Namespace: TriggerNamespace, + UID: TriggerUUID, + }) +} + +func makeTriggerOIDCServiceAccountWithoutOwnerRef() *corev1.ServiceAccount { + sa := auth.GetOIDCServiceAccountForResource(v1.SchemeGroupVersion.WithKind("Trigger"), metav1.ObjectMeta{ + Name: TriggerName, + Namespace: TriggerNamespace, + UID: TriggerUUID, + }) + sa.OwnerReferences = nil + + return sa +} func newTriggerWithCert(options ...reconcilertesting.TriggerOption) runtime.Object { return reconcilertesting.NewTrigger( TriggerName, diff --git a/control-plane/pkg/reconciler/trigger/v2/triggerv2.go b/control-plane/pkg/reconciler/trigger/v2/triggerv2.go index a5aab80fd4..dc049ed94c 100644 --- a/control-plane/pkg/reconciler/trigger/v2/triggerv2.go +++ b/control-plane/pkg/reconciler/trigger/v2/triggerv2.go @@ -229,6 +229,10 @@ func (r Reconciler) reconcileConsumerGroup(ctx context.Context, broker *eventing } } + if trigger.Status.Auth != nil { + expectedCg.Spec.OIDCServiceAccountName = trigger.Status.Auth.ServiceAccountName + } + cg, err := r.ConsumerGroupLister.ConsumerGroups(trigger.GetNamespace()).Get(groupId) //Get by consumer group name if err != nil && !apierrors.IsNotFound(err) { return nil, err diff --git a/data-plane/config/broker/200-broker-data-plane-cluster-role.yaml b/data-plane/config/broker/200-broker-data-plane-cluster-role.yaml index 4c3e69829f..900badd68e 100644 --- a/data-plane/config/broker/200-broker-data-plane-cluster-role.yaml +++ b/data-plane/config/broker/200-broker-data-plane-cluster-role.yaml @@ -29,3 +29,9 @@ rules: - get - list - watch + - apiGroups: + - "" + resources: + - serviceaccounts/token + verbs: + - create diff --git a/data-plane/config/broker/500-receiver.yaml b/data-plane/config/broker/500-receiver.yaml index e956da2a88..4a88d62c4e 100644 --- a/data-plane/config/broker/500-receiver.yaml +++ b/data-plane/config/broker/500-receiver.yaml @@ -76,6 +76,9 @@ spec: - mountPath: /etc/tracing name: config-tracing readOnly: true + - mountPath: /etc/features + name: config-features + readOnly: true - mountPath: /etc/receiver-tls-secret name: broker-receiver-tls-secret readOnly: true @@ -120,6 +123,8 @@ spec: value: "false" - name: CONFIG_TRACING_PATH value: "/etc/tracing" + - name: CONFIG_FEATURES_PATH + value: "/etc/features" # https://github.com/fabric8io/kubernetes-client/issues/2212 - name: HTTP2_DISABLE value: "true" @@ -175,6 +180,9 @@ spec: - name: config-tracing configMap: name: config-tracing + - name: config-features + configMap: + name: config-features - name: broker-receiver-tls-secret secret: secretName: kafka-broker-ingress-server-tls diff --git a/data-plane/config/channel/200-channel-data-plane-cluster-role.yaml b/data-plane/config/channel/200-channel-data-plane-cluster-role.yaml index b9b2bb92e4..aded352aef 100644 --- a/data-plane/config/channel/200-channel-data-plane-cluster-role.yaml +++ b/data-plane/config/channel/200-channel-data-plane-cluster-role.yaml @@ -29,3 +29,9 @@ rules: - get - list - watch + - apiGroups: + - "" + resources: + - serviceaccounts/token + verbs: + - create diff --git a/data-plane/config/channel/500-receiver.yaml b/data-plane/config/channel/500-receiver.yaml index 62f0ab3cd3..85db8a250b 100644 --- a/data-plane/config/channel/500-receiver.yaml +++ b/data-plane/config/channel/500-receiver.yaml @@ -76,6 +76,9 @@ spec: - mountPath: /etc/tracing name: config-tracing readOnly: true + - mountPath: /etc/features + name: config-features + readOnly: true - mountPath: /etc/receiver-tls-secret name: channel-receiver-tls-secret readOnly: true @@ -120,6 +123,8 @@ spec: value: "false" - name: CONFIG_TRACING_PATH value: "/etc/tracing" + - name: CONFIG_FEATURES_PATH + value: "/etc/features" # https://github.com/fabric8io/kubernetes-client/issues/2212 - name: HTTP2_DISABLE value: "true" @@ -175,6 +180,9 @@ spec: - name: config-tracing configMap: name: config-tracing + - name: config-features + configMap: + name: config-features - name: channel-receiver-tls-secret secret: secretName: kafka-channel-ingress-server-tls diff --git a/data-plane/config/sink/500-receiver.yaml b/data-plane/config/sink/500-receiver.yaml index 66bd32bc2a..e29d2ba988 100644 --- a/data-plane/config/sink/500-receiver.yaml +++ b/data-plane/config/sink/500-receiver.yaml @@ -76,6 +76,9 @@ spec: - mountPath: /etc/tracing name: config-tracing readOnly: true + - mountPath: /etc/features + name: config-features + readOnly: true - mountPath: /etc/receiver-tls-secret name: sink-receiver-tls-secret readOnly: true @@ -120,6 +123,8 @@ spec: value: "false" - name: CONFIG_TRACING_PATH value: "/etc/tracing" + - name: CONFIG_FEATURES_PATH + value: "/etc/features" # https://github.com/fabric8io/kubernetes-client/issues/2212 - name: HTTP2_DISABLE value: "true" @@ -175,6 +180,9 @@ spec: - name: config-tracing configMap: name: config-tracing + - name: config-features + configMap: + name: config-features - name: sink-receiver-tls-secret secret: secretName: kafka-sink-ingress-server-tls diff --git a/data-plane/config/source/200-source-data-plane-cluster-role.yaml b/data-plane/config/source/200-source-data-plane-cluster-role.yaml index 5131b2430c..3c2f973a79 100644 --- a/data-plane/config/source/200-source-data-plane-cluster-role.yaml +++ b/data-plane/config/source/200-source-data-plane-cluster-role.yaml @@ -29,3 +29,9 @@ rules: - get - list - watch + - apiGroups: + - "" + resources: + - serviceaccounts/token + verbs: + - create diff --git a/data-plane/contract/src/main/java/dev/knative/eventing/kafka/broker/contract/DataPlaneContract.java b/data-plane/contract/src/main/java/dev/knative/eventing/kafka/broker/contract/DataPlaneContract.java index 6358142cc5..577fa7a36f 100644 --- a/data-plane/contract/src/main/java/dev/knative/eventing/kafka/broker/contract/DataPlaneContract.java +++ b/data-plane/contract/src/main/java/dev/knative/eventing/kafka/broker/contract/DataPlaneContract.java @@ -9249,6 +9249,25 @@ public interface EgressConfigOrBuilder */ com.google.protobuf.ByteString getDeadLetterCACertsBytes(); + /** + *
+         * Dead Letter Audience is the OIDC audience of the dead letter
+         * 
+ * + * string deadLetterAudience = 7; + * @return The deadLetterAudience. + */ + java.lang.String getDeadLetterAudience(); + /** + *
+         * Dead Letter Audience is the OIDC audience of the dead letter
+         * 
+ * + * string deadLetterAudience = 7; + * @return The bytes for deadLetterAudience. + */ + com.google.protobuf.ByteString getDeadLetterAudienceBytes(); + /** *
          * retry is the minimum number of retries the sender should attempt when
@@ -9316,6 +9335,7 @@ private EgressConfig(com.google.protobuf.GeneratedMessageV3.Builder builder)
         private EgressConfig() {
             deadLetter_ = "";
             deadLetterCACerts_ = "";
+            deadLetterAudience_ = "";
             backoffPolicy_ = 0;
         }
 
@@ -9377,6 +9397,12 @@ private EgressConfig(
                             deadLetterCACerts_ = s;
                             break;
                         }
+                        case 58: {
+                            java.lang.String s = input.readStringRequireUtf8();
+
+                            deadLetterAudience_ = s;
+                            break;
+                        }
                         default: {
                             if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
                                 done = true;
@@ -9492,6 +9518,48 @@ public com.google.protobuf.ByteString getDeadLetterCACertsBytes() {
             }
         }
 
+        public static final int DEADLETTERAUDIENCE_FIELD_NUMBER = 7;
+        private volatile java.lang.Object deadLetterAudience_;
+        /**
+         * 
+         * Dead Letter Audience is the OIDC audience of the dead letter
+         * 
+ * + * string deadLetterAudience = 7; + * @return The deadLetterAudience. + */ + @java.lang.Override + public java.lang.String getDeadLetterAudience() { + java.lang.Object ref = deadLetterAudience_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + deadLetterAudience_ = s; + return s; + } + } + /** + *
+         * Dead Letter Audience is the OIDC audience of the dead letter
+         * 
+ * + * string deadLetterAudience = 7; + * @return The bytes for deadLetterAudience. + */ + @java.lang.Override + public com.google.protobuf.ByteString getDeadLetterAudienceBytes() { + java.lang.Object ref = deadLetterAudience_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + deadLetterAudience_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + public static final int RETRY_FIELD_NUMBER = 2; private int retry_; /** @@ -9605,6 +9673,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (!getDeadLetterCACertsBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, deadLetterCACerts_); } + if (!getDeadLetterAudienceBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 7, deadLetterAudience_); + } unknownFields.writeTo(output); } @@ -9634,6 +9705,9 @@ public int getSerializedSize() { if (!getDeadLetterCACertsBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, deadLetterCACerts_); } + if (!getDeadLetterAudienceBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, deadLetterAudience_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -9652,6 +9726,7 @@ public boolean equals(final java.lang.Object obj) { if (!getDeadLetter().equals(other.getDeadLetter())) return false; if (!getDeadLetterCACerts().equals(other.getDeadLetterCACerts())) return false; + if (!getDeadLetterAudience().equals(other.getDeadLetterAudience())) return false; if (getRetry() != other.getRetry()) return false; if (backoffPolicy_ != other.backoffPolicy_) return false; if (getBackoffDelay() != other.getBackoffDelay()) return false; @@ -9671,6 +9746,8 @@ public int hashCode() { hash = (53 * hash) + getDeadLetter().hashCode(); hash = (37 * hash) + DEADLETTERCACERTS_FIELD_NUMBER; hash = (53 * hash) + getDeadLetterCACerts().hashCode(); + hash = (37 * hash) + DEADLETTERAUDIENCE_FIELD_NUMBER; + hash = (53 * hash) + getDeadLetterAudience().hashCode(); hash = (37 * hash) + RETRY_FIELD_NUMBER; hash = (53 * hash) + getRetry(); hash = (37 * hash) + BACKOFFPOLICY_FIELD_NUMBER; @@ -9818,6 +9895,8 @@ public Builder clear() { deadLetterCACerts_ = ""; + deadLetterAudience_ = ""; + retry_ = 0; backoffPolicy_ = 0; @@ -9856,6 +9935,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig(this); result.deadLetter_ = deadLetter_; result.deadLetterCACerts_ = deadLetterCACerts_; + result.deadLetterAudience_ = deadLetterAudience_; result.retry_ = retry_; result.backoffPolicy_ = backoffPolicy_; result.backoffDelay_ = backoffDelay_; @@ -9918,6 +9998,10 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon deadLetterCACerts_ = other.deadLetterCACerts_; onChanged(); } + if (!other.getDeadLetterAudience().isEmpty()) { + deadLetterAudience_ = other.deadLetterAudience_; + onChanged(); + } if (other.getRetry() != 0) { setRetry(other.getRetry()); } @@ -10142,6 +10226,97 @@ public Builder setDeadLetterCACertsBytes(com.google.protobuf.ByteString value) { return this; } + private java.lang.Object deadLetterAudience_ = ""; + /** + *
+             * Dead Letter Audience is the OIDC audience of the dead letter
+             * 
+ * + * string deadLetterAudience = 7; + * @return The deadLetterAudience. + */ + public java.lang.String getDeadLetterAudience() { + java.lang.Object ref = deadLetterAudience_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + deadLetterAudience_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+             * Dead Letter Audience is the OIDC audience of the dead letter
+             * 
+ * + * string deadLetterAudience = 7; + * @return The bytes for deadLetterAudience. + */ + public com.google.protobuf.ByteString getDeadLetterAudienceBytes() { + java.lang.Object ref = deadLetterAudience_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + deadLetterAudience_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+             * Dead Letter Audience is the OIDC audience of the dead letter
+             * 
+ * + * string deadLetterAudience = 7; + * @param value The deadLetterAudience to set. + * @return This builder for chaining. + */ + public Builder setDeadLetterAudience(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + deadLetterAudience_ = value; + onChanged(); + return this; + } + /** + *
+             * Dead Letter Audience is the OIDC audience of the dead letter
+             * 
+ * + * string deadLetterAudience = 7; + * @return This builder for chaining. + */ + public Builder clearDeadLetterAudience() { + + deadLetterAudience_ = getDefaultInstance().getDeadLetterAudience(); + onChanged(); + return this; + } + /** + *
+             * Dead Letter Audience is the OIDC audience of the dead letter
+             * 
+ * + * string deadLetterAudience = 7; + * @param value The bytes for deadLetterAudience to set. + * @return This builder for chaining. + */ + public Builder setDeadLetterAudienceBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + deadLetterAudience_ = value; + onChanged(); + return this; + } + private int retry_; /** *
@@ -10469,6 +10644,25 @@ public interface EgressOrBuilder
          */
         com.google.protobuf.ByteString getDestinationCACertsBytes();
 
+        /**
+         * 
+         * OIDC audience of the destination
+         * 
+ * + * string destinationAudience = 17; + * @return The destinationAudience. + */ + java.lang.String getDestinationAudience(); + /** + *
+         * OIDC audience of the destination
+         * 
+ * + * string destinationAudience = 17; + * @return The bytes for destinationAudience. + */ + com.google.protobuf.ByteString getDestinationAudienceBytes(); + /** *
          * Send the response to an url
@@ -10570,6 +10764,25 @@ public interface EgressOrBuilder
          */
         com.google.protobuf.ByteString getReplyUrlCACertsBytes();
 
+        /**
+         * 
+         * OIDC audience of the replyUrl
+         * 
+ * + * string replyUrlAudience = 18; + * @return The replyUrlAudience. + */ + java.lang.String getReplyUrlAudience(); + /** + *
+         * OIDC audience of the replyUrl
+         * 
+ * + * string replyUrlAudience = 18; + * @return The bytes for replyUrlAudience. + */ + com.google.protobuf.ByteString getReplyUrlAudienceBytes(); + /** *
          * A filter for performing exact match against Cloud Events attributes
@@ -10806,6 +11019,25 @@ public interface EgressOrBuilder
         dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatureFlagsOrBuilder
                 getFeatureFlagsOrBuilder();
 
+        /**
+         * 
+         * Name of the service account to use for OIDC authentication.
+         * 
+ * + * string oidcServiceAccountName = 19; + * @return The oidcServiceAccountName. + */ + java.lang.String getOidcServiceAccountName(); + /** + *
+         * Name of the service account to use for OIDC authentication.
+         * 
+ * + * string oidcServiceAccountName = 19; + * @return The bytes for oidcServiceAccountName. + */ + com.google.protobuf.ByteString getOidcServiceAccountNameBytes(); + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress.ReplyStrategyCase getReplyStrategyCase(); } @@ -10826,11 +11058,14 @@ private Egress() { consumerGroup_ = ""; destination_ = ""; destinationCACerts_ = ""; + destinationAudience_ = ""; replyUrlCACerts_ = ""; + replyUrlAudience_ = ""; uid_ = ""; deliveryOrder_ = 0; keyType_ = 0; dialectedFilter_ = java.util.Collections.emptyList(); + oidcServiceAccountName_ = ""; } @java.lang.Override @@ -11031,6 +11266,24 @@ private Egress( replyUrlCACerts_ = s; break; } + case 138: { + java.lang.String s = input.readStringRequireUtf8(); + + destinationAudience_ = s; + break; + } + case 146: { + java.lang.String s = input.readStringRequireUtf8(); + + replyUrlAudience_ = s; + break; + } + case 154: { + java.lang.String s = input.readStringRequireUtf8(); + + oidcServiceAccountName_ = s; + break; + } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; @@ -11240,6 +11493,48 @@ public com.google.protobuf.ByteString getDestinationCACertsBytes() { } } + public static final int DESTINATIONAUDIENCE_FIELD_NUMBER = 17; + private volatile java.lang.Object destinationAudience_; + /** + *
+         * OIDC audience of the destination
+         * 
+ * + * string destinationAudience = 17; + * @return The destinationAudience. + */ + @java.lang.Override + public java.lang.String getDestinationAudience() { + java.lang.Object ref = destinationAudience_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + destinationAudience_ = s; + return s; + } + } + /** + *
+         * OIDC audience of the destination
+         * 
+ * + * string destinationAudience = 17; + * @return The bytes for destinationAudience. + */ + @java.lang.Override + public com.google.protobuf.ByteString getDestinationAudienceBytes() { + java.lang.Object ref = destinationAudience_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + destinationAudience_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + public static final int REPLYURL_FIELD_NUMBER = 3; /** *
@@ -11429,6 +11724,48 @@ public com.google.protobuf.ByteString getReplyUrlCACertsBytes() {
             }
         }
 
+        public static final int REPLYURLAUDIENCE_FIELD_NUMBER = 18;
+        private volatile java.lang.Object replyUrlAudience_;
+        /**
+         * 
+         * OIDC audience of the replyUrl
+         * 
+ * + * string replyUrlAudience = 18; + * @return The replyUrlAudience. + */ + @java.lang.Override + public java.lang.String getReplyUrlAudience() { + java.lang.Object ref = replyUrlAudience_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + replyUrlAudience_ = s; + return s; + } + } + /** + *
+         * OIDC audience of the replyUrl
+         * 
+ * + * string replyUrlAudience = 18; + * @return The bytes for replyUrlAudience. + */ + @java.lang.Override + public com.google.protobuf.ByteString getReplyUrlAudienceBytes() { + java.lang.Object ref = replyUrlAudience_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + replyUrlAudience_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + public static final int FILTER_FIELD_NUMBER = 5; private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter filter_; /** @@ -11793,6 +12130,48 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressFeatur return getFeatureFlags(); } + public static final int OIDCSERVICEACCOUNTNAME_FIELD_NUMBER = 19; + private volatile java.lang.Object oidcServiceAccountName_; + /** + *
+         * Name of the service account to use for OIDC authentication.
+         * 
+ * + * string oidcServiceAccountName = 19; + * @return The oidcServiceAccountName. + */ + @java.lang.Override + public java.lang.String getOidcServiceAccountName() { + java.lang.Object ref = oidcServiceAccountName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + oidcServiceAccountName_ = s; + return s; + } + } + /** + *
+         * Name of the service account to use for OIDC authentication.
+         * 
+ * + * string oidcServiceAccountName = 19; + * @return The bytes for oidcServiceAccountName. + */ + @java.lang.Override + public com.google.protobuf.ByteString getOidcServiceAccountNameBytes() { + java.lang.Object ref = oidcServiceAccountName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + oidcServiceAccountName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -11859,6 +12238,15 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (!getReplyUrlCACertsBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 16, replyUrlCACerts_); } + if (!getDestinationAudienceBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 17, destinationAudience_); + } + if (!getReplyUrlAudienceBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 18, replyUrlAudience_); + } + if (!getOidcServiceAccountNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 19, oidcServiceAccountName_); + } unknownFields.writeTo(output); } @@ -11920,6 +12308,15 @@ public int getSerializedSize() { if (!getReplyUrlCACertsBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(16, replyUrlCACerts_); } + if (!getDestinationAudienceBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(17, destinationAudience_); + } + if (!getReplyUrlAudienceBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(18, replyUrlAudience_); + } + if (!getOidcServiceAccountNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(19, oidcServiceAccountName_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -11939,7 +12336,9 @@ public boolean equals(final java.lang.Object obj) { if (!getConsumerGroup().equals(other.getConsumerGroup())) return false; if (!getDestination().equals(other.getDestination())) return false; if (!getDestinationCACerts().equals(other.getDestinationCACerts())) return false; + if (!getDestinationAudience().equals(other.getDestinationAudience())) return false; if (!getReplyUrlCACerts().equals(other.getReplyUrlCACerts())) return false; + if (!getReplyUrlAudience().equals(other.getReplyUrlAudience())) return false; if (hasFilter() != other.hasFilter()) return false; if (hasFilter()) { if (!getFilter().equals(other.getFilter())) return false; @@ -11961,6 +12360,7 @@ public boolean equals(final java.lang.Object obj) { if (hasFeatureFlags()) { if (!getFeatureFlags().equals(other.getFeatureFlags())) return false; } + if (!getOidcServiceAccountName().equals(other.getOidcServiceAccountName())) return false; if (!getReplyStrategyCase().equals(other.getReplyStrategyCase())) return false; switch (replyStrategyCase_) { case 3: @@ -11992,8 +12392,12 @@ public int hashCode() { hash = (53 * hash) + getDestination().hashCode(); hash = (37 * hash) + DESTINATIONCACERTS_FIELD_NUMBER; hash = (53 * hash) + getDestinationCACerts().hashCode(); + hash = (37 * hash) + DESTINATIONAUDIENCE_FIELD_NUMBER; + hash = (53 * hash) + getDestinationAudience().hashCode(); hash = (37 * hash) + REPLYURLCACERTS_FIELD_NUMBER; hash = (53 * hash) + getReplyUrlCACerts().hashCode(); + hash = (37 * hash) + REPLYURLAUDIENCE_FIELD_NUMBER; + hash = (53 * hash) + getReplyUrlAudience().hashCode(); if (hasFilter()) { hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); @@ -12022,6 +12426,8 @@ public int hashCode() { hash = (37 * hash) + FEATUREFLAGS_FIELD_NUMBER; hash = (53 * hash) + getFeatureFlags().hashCode(); } + hash = (37 * hash) + OIDCSERVICEACCOUNTNAME_FIELD_NUMBER; + hash = (53 * hash) + getOidcServiceAccountName().hashCode(); switch (replyStrategyCase_) { case 3: hash = (37 * hash) + REPLYURL_FIELD_NUMBER; @@ -12179,8 +12585,12 @@ public Builder clear() { destinationCACerts_ = ""; + destinationAudience_ = ""; + replyUrlCACerts_ = ""; + replyUrlAudience_ = ""; + if (filterBuilder_ == null) { filter_ = null; } else { @@ -12219,6 +12629,8 @@ public Builder clear() { featureFlags_ = null; featureFlagsBuilder_ = null; } + oidcServiceAccountName_ = ""; + replyStrategyCase_ = 0; replyStrategy_ = null; return this; @@ -12251,6 +12663,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress build result.consumerGroup_ = consumerGroup_; result.destination_ = destination_; result.destinationCACerts_ = destinationCACerts_; + result.destinationAudience_ = destinationAudience_; if (replyStrategyCase_ == 3) { result.replyStrategy_ = replyStrategy_; } @@ -12269,6 +12682,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress build } } result.replyUrlCACerts_ = replyUrlCACerts_; + result.replyUrlAudience_ = replyUrlAudience_; if (filterBuilder_ == null) { result.filter_ = filter_; } else { @@ -12302,6 +12716,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress build } else { result.featureFlags_ = featureFlagsBuilder_.build(); } + result.oidcServiceAccountName_ = oidcServiceAccountName_; result.replyStrategyCase_ = replyStrategyCase_; onBuilt(); return result; @@ -12364,10 +12779,18 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon destinationCACerts_ = other.destinationCACerts_; onChanged(); } + if (!other.getDestinationAudience().isEmpty()) { + destinationAudience_ = other.destinationAudience_; + onChanged(); + } if (!other.getReplyUrlCACerts().isEmpty()) { replyUrlCACerts_ = other.replyUrlCACerts_; onChanged(); } + if (!other.getReplyUrlAudience().isEmpty()) { + replyUrlAudience_ = other.replyUrlAudience_; + onChanged(); + } if (other.hasFilter()) { mergeFilter(other.getFilter()); } @@ -12419,6 +12842,10 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon if (other.hasFeatureFlags()) { mergeFeatureFlags(other.getFeatureFlags()); } + if (!other.getOidcServiceAccountName().isEmpty()) { + oidcServiceAccountName_ = other.oidcServiceAccountName_; + onChanged(); + } switch (other.getReplyStrategyCase()) { case REPLYURL: { replyStrategyCase_ = 3; @@ -12699,7 +13126,98 @@ public com.google.protobuf.ByteString getDestinationCACertsBytes() { if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); - destinationCACerts_ = b; + destinationCACerts_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+             * destination CA Cert is the CA Cert used for HTTPS communication through destination
+             * 
+ * + * string destinationCACerts = 15; + * @param value The destinationCACerts to set. + * @return This builder for chaining. + */ + public Builder setDestinationCACerts(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + destinationCACerts_ = value; + onChanged(); + return this; + } + /** + *
+             * destination CA Cert is the CA Cert used for HTTPS communication through destination
+             * 
+ * + * string destinationCACerts = 15; + * @return This builder for chaining. + */ + public Builder clearDestinationCACerts() { + + destinationCACerts_ = getDefaultInstance().getDestinationCACerts(); + onChanged(); + return this; + } + /** + *
+             * destination CA Cert is the CA Cert used for HTTPS communication through destination
+             * 
+ * + * string destinationCACerts = 15; + * @param value The bytes for destinationCACerts to set. + * @return This builder for chaining. + */ + public Builder setDestinationCACertsBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + destinationCACerts_ = value; + onChanged(); + return this; + } + + private java.lang.Object destinationAudience_ = ""; + /** + *
+             * OIDC audience of the destination
+             * 
+ * + * string destinationAudience = 17; + * @return The destinationAudience. + */ + public java.lang.String getDestinationAudience() { + java.lang.Object ref = destinationAudience_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + destinationAudience_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+             * OIDC audience of the destination
+             * 
+ * + * string destinationAudience = 17; + * @return The bytes for destinationAudience. + */ + public com.google.protobuf.ByteString getDestinationAudienceBytes() { + java.lang.Object ref = destinationAudience_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + destinationAudience_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; @@ -12707,52 +13225,52 @@ public com.google.protobuf.ByteString getDestinationCACertsBytes() { } /** *
-             * destination CA Cert is the CA Cert used for HTTPS communication through destination
+             * OIDC audience of the destination
              * 
* - * string destinationCACerts = 15; - * @param value The destinationCACerts to set. + * string destinationAudience = 17; + * @param value The destinationAudience to set. * @return This builder for chaining. */ - public Builder setDestinationCACerts(java.lang.String value) { + public Builder setDestinationAudience(java.lang.String value) { if (value == null) { throw new NullPointerException(); } - destinationCACerts_ = value; + destinationAudience_ = value; onChanged(); return this; } /** *
-             * destination CA Cert is the CA Cert used for HTTPS communication through destination
+             * OIDC audience of the destination
              * 
* - * string destinationCACerts = 15; + * string destinationAudience = 17; * @return This builder for chaining. */ - public Builder clearDestinationCACerts() { + public Builder clearDestinationAudience() { - destinationCACerts_ = getDefaultInstance().getDestinationCACerts(); + destinationAudience_ = getDefaultInstance().getDestinationAudience(); onChanged(); return this; } /** *
-             * destination CA Cert is the CA Cert used for HTTPS communication through destination
+             * OIDC audience of the destination
              * 
* - * string destinationCACerts = 15; - * @param value The bytes for destinationCACerts to set. + * string destinationAudience = 17; + * @param value The bytes for destinationAudience to set. * @return This builder for chaining. */ - public Builder setDestinationCACertsBytes(com.google.protobuf.ByteString value) { + public Builder setDestinationAudienceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - destinationCACerts_ = value; + destinationAudience_ = value; onChanged(); return this; } @@ -13351,6 +13869,97 @@ public Builder setReplyUrlCACertsBytes(com.google.protobuf.ByteString value) { return this; } + private java.lang.Object replyUrlAudience_ = ""; + /** + *
+             * OIDC audience of the replyUrl
+             * 
+ * + * string replyUrlAudience = 18; + * @return The replyUrlAudience. + */ + public java.lang.String getReplyUrlAudience() { + java.lang.Object ref = replyUrlAudience_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + replyUrlAudience_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+             * OIDC audience of the replyUrl
+             * 
+ * + * string replyUrlAudience = 18; + * @return The bytes for replyUrlAudience. + */ + public com.google.protobuf.ByteString getReplyUrlAudienceBytes() { + java.lang.Object ref = replyUrlAudience_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + replyUrlAudience_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+             * OIDC audience of the replyUrl
+             * 
+ * + * string replyUrlAudience = 18; + * @param value The replyUrlAudience to set. + * @return This builder for chaining. + */ + public Builder setReplyUrlAudience(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + replyUrlAudience_ = value; + onChanged(); + return this; + } + /** + *
+             * OIDC audience of the replyUrl
+             * 
+ * + * string replyUrlAudience = 18; + * @return This builder for chaining. + */ + public Builder clearReplyUrlAudience() { + + replyUrlAudience_ = getDefaultInstance().getReplyUrlAudience(); + onChanged(); + return this; + } + /** + *
+             * OIDC audience of the replyUrl
+             * 
+ * + * string replyUrlAudience = 18; + * @param value The bytes for replyUrlAudience to set. + * @return This builder for chaining. + */ + public Builder setReplyUrlAudienceBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + replyUrlAudience_ = value; + onChanged(); + return this; + } + private dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter filter_; private com.google.protobuf.SingleFieldBuilderV3< dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter, @@ -14710,6 +15319,97 @@ public Builder clearFeatureFlags() { return featureFlagsBuilder_; } + private java.lang.Object oidcServiceAccountName_ = ""; + /** + *
+             * Name of the service account to use for OIDC authentication.
+             * 
+ * + * string oidcServiceAccountName = 19; + * @return The oidcServiceAccountName. + */ + public java.lang.String getOidcServiceAccountName() { + java.lang.Object ref = oidcServiceAccountName_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + oidcServiceAccountName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+             * Name of the service account to use for OIDC authentication.
+             * 
+ * + * string oidcServiceAccountName = 19; + * @return The bytes for oidcServiceAccountName. + */ + public com.google.protobuf.ByteString getOidcServiceAccountNameBytes() { + java.lang.Object ref = oidcServiceAccountName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + oidcServiceAccountName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+             * Name of the service account to use for OIDC authentication.
+             * 
+ * + * string oidcServiceAccountName = 19; + * @param value The oidcServiceAccountName to set. + * @return This builder for chaining. + */ + public Builder setOidcServiceAccountName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + oidcServiceAccountName_ = value; + onChanged(); + return this; + } + /** + *
+             * Name of the service account to use for OIDC authentication.
+             * 
+ * + * string oidcServiceAccountName = 19; + * @return This builder for chaining. + */ + public Builder clearOidcServiceAccountName() { + + oidcServiceAccountName_ = getDefaultInstance().getOidcServiceAccountName(); + onChanged(); + return this; + } + /** + *
+             * Name of the service account to use for OIDC authentication.
+             * 
+ * + * string oidcServiceAccountName = 19; + * @param value The bytes for oidcServiceAccountName to set. + * @return This builder for chaining. + */ + public Builder setOidcServiceAccountNameBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + oidcServiceAccountName_ = value; + onChanged(); + return this; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); @@ -15437,6 +16137,25 @@ public interface IngressOrBuilder * @return The enableAutoCreateEventTypes. */ boolean getEnableAutoCreateEventTypes(); + + /** + *
+         * OIDC audience of this ingress
+         * 
+ * + * string audience = 5; + * @return The audience. + */ + java.lang.String getAudience(); + /** + *
+         * OIDC audience of this ingress
+         * 
+ * + * string audience = 5; + * @return The bytes for audience. + */ + com.google.protobuf.ByteString getAudienceBytes(); } /** *
@@ -15465,6 +16184,7 @@ private Ingress() {
             contentMode_ = 0;
             path_ = "";
             host_ = "";
+            audience_ = "";
         }
 
         @java.lang.Override
@@ -15517,6 +16237,12 @@ private Ingress(
                             enableAutoCreateEventTypes_ = input.readBool();
                             break;
                         }
+                        case 42: {
+                            java.lang.String s = input.readStringRequireUtf8();
+
+                            audience_ = s;
+                            break;
+                        }
                         default: {
                             if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
                                 done = true;
@@ -15679,6 +16405,48 @@ public boolean getEnableAutoCreateEventTypes() {
             return enableAutoCreateEventTypes_;
         }
 
+        public static final int AUDIENCE_FIELD_NUMBER = 5;
+        private volatile java.lang.Object audience_;
+        /**
+         * 
+         * OIDC audience of this ingress
+         * 
+ * + * string audience = 5; + * @return The audience. + */ + @java.lang.Override + public java.lang.String getAudience() { + java.lang.Object ref = audience_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + audience_ = s; + return s; + } + } + /** + *
+         * OIDC audience of this ingress
+         * 
+ * + * string audience = 5; + * @return The bytes for audience. + */ + @java.lang.Override + public com.google.protobuf.ByteString getAudienceBytes() { + java.lang.Object ref = audience_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + audience_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override @@ -15706,6 +16474,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io if (enableAutoCreateEventTypes_ != false) { output.writeBool(4, enableAutoCreateEventTypes_); } + if (!getAudienceBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, audience_); + } unknownFields.writeTo(output); } @@ -15728,6 +16499,9 @@ public int getSerializedSize() { if (enableAutoCreateEventTypes_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, enableAutoCreateEventTypes_); } + if (!getAudienceBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, audience_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -15748,6 +16522,7 @@ public boolean equals(final java.lang.Object obj) { if (!getPath().equals(other.getPath())) return false; if (!getHost().equals(other.getHost())) return false; if (getEnableAutoCreateEventTypes() != other.getEnableAutoCreateEventTypes()) return false; + if (!getAudience().equals(other.getAudience())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -15767,6 +16542,8 @@ public int hashCode() { hash = (53 * hash) + getHost().hashCode(); hash = (37 * hash) + ENABLEAUTOCREATEEVENTTYPES_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getEnableAutoCreateEventTypes()); + hash = (37 * hash) + AUDIENCE_FIELD_NUMBER; + hash = (53 * hash) + getAudience().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -15918,6 +16695,8 @@ public Builder clear() { enableAutoCreateEventTypes_ = false; + audience_ = ""; + return this; } @@ -15948,6 +16727,7 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress buil result.path_ = path_; result.host_ = host_; result.enableAutoCreateEventTypes_ = enableAutoCreateEventTypes_; + result.audience_ = audience_; onBuilt(); return result; } @@ -16011,6 +16791,10 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon if (other.getEnableAutoCreateEventTypes() != false) { setEnableAutoCreateEventTypes(other.getEnableAutoCreateEventTypes()); } + if (!other.getAudience().isEmpty()) { + audience_ = other.audience_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -16345,6 +17129,97 @@ public Builder clearEnableAutoCreateEventTypes() { return this; } + private java.lang.Object audience_ = ""; + /** + *
+             * OIDC audience of this ingress
+             * 
+ * + * string audience = 5; + * @return The audience. + */ + public java.lang.String getAudience() { + java.lang.Object ref = audience_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + audience_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+             * OIDC audience of this ingress
+             * 
+ * + * string audience = 5; + * @return The bytes for audience. + */ + public com.google.protobuf.ByteString getAudienceBytes() { + java.lang.Object ref = audience_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); + audience_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+             * OIDC audience of this ingress
+             * 
+ * + * string audience = 5; + * @param value The audience to set. + * @return This builder for chaining. + */ + public Builder setAudience(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + audience_ = value; + onChanged(); + return this; + } + /** + *
+             * OIDC audience of this ingress
+             * 
+ * + * string audience = 5; + * @return This builder for chaining. + */ + public Builder clearAudience() { + + audience_ = getDefaultInstance().getAudience(); + onChanged(); + return this; + } + /** + *
+             * OIDC audience of this ingress
+             * 
+ * + * string audience = 5; + * @param value The bytes for audience to set. + * @return This builder for chaining. + */ + public Builder setAudienceBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + audience_ = value; + onChanged(); + return this; + } + @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); @@ -27174,64 +28049,67 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { + "not\030\006 \001(\0132\004.NotH\000\022\027\n\005cesql\030\007 \001(\0132\006.CESQL" + "H\000B\010\n\006filter\"h\n\006Filter\022+\n\nattributes\030\001 \003" + "(\0132\027.Filter.AttributesEntry\0321\n\017Attribute" - + "sEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\232" + + "sEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\266" + "\001\n\014EgressConfig\022\022\n\ndeadLetter\030\001 \001(\t\022\031\n\021d" - + "eadLetterCACerts\030\006 \001(\t\022\r\n\005retry\030\002 \001(\r\022%\n" - + "\rbackoffPolicy\030\003 \001(\0162\016.BackoffPolicy\022\024\n\014" - + "backoffDelay\030\004 \001(\004\022\017\n\007timeout\030\005 \001(\004\"\353\003\n\006" - + "Egress\022\025\n\rconsumerGroup\030\001 \001(\t\022\023\n\013destina" - + "tion\030\002 \001(\t\022\032\n\022destinationCACerts\030\017 \001(\t\022\022" - + "\n\010replyUrl\030\003 \001(\tH\000\022&\n\024replyToOriginalTop" - + "ic\030\004 \001(\0132\006.EmptyH\000\022\036\n\014discardReply\030\t \001(\013" - + "2\006.EmptyH\000\022\027\n\017replyUrlCACerts\030\020 \001(\t\022\027\n\006f" - + "ilter\030\005 \001(\0132\007.Filter\022\013\n\003uid\030\006 \001(\t\022#\n\014egr" - + "essConfig\030\007 \001(\0132\r.EgressConfig\022%\n\rdelive" - + "ryOrder\030\010 \001(\0162\016.DeliveryOrder\022\031\n\007keyType" - + "\030\n \001(\0162\010.KeyType\022\035\n\treference\030\013 \001(\0132\n.Re" - + "ference\022)\n\017dialectedFilter\030\014 \003(\0132\020.Diale" - + "ctedFilter\022\021\n\tvReplicas\030\r \001(\005\022)\n\014feature" - + "Flags\030\016 \001(\0132\023.EgressFeatureFlagsB\017\n\rrepl" - + "yStrategy\"U\n\022EgressFeatureFlags\022\031\n\021enabl" - + "eRateLimiter\030\001 \001(\010\022$\n\034enableOrderedExecu" - + "torMetrics\030\002 \001(\010\"l\n\007Ingress\022!\n\013contentMo" - + "de\030\001 \001(\0162\014.ContentMode\022\014\n\004path\030\002 \001(\t\022\014\n\004" - + "host\030\003 \001(\t\022\"\n\032enableAutoCreateEventTypes" - + "\030\004 \001(\010\"o\n\tReference\022\014\n\004uuid\030\001 \001(\t\022\021\n\tnam" - + "espace\030\002 \001(\t\022\014\n\004name\030\003 \001(\t\022\017\n\007version\030\004 " - + "\001(\t\022\014\n\004kind\030\005 \001(\t\022\024\n\014groupVersion\030\006 \001(\t\"" - + "`\n\017SecretReference\022\035\n\treference\030\001 \001(\0132\n." - + "Reference\022.\n\022keyFieldReferences\030\002 \003(\0132\022." - + "KeyFieldReference\"C\n\021KeyFieldReference\022\021" - + "\n\tsecretKey\030\002 \001(\t\022\033\n\005field\030\003 \001(\0162\014.Secre" - + "tField\"Y\n\024MultiSecretReference\022\033\n\010protoc" - + "ol\030\001 \001(\0162\t.Protocol\022$\n\nreferences\030\002 \003(\0132" - + "\020.SecretReference\"\202\001\n\023CloudEventOverride" - + "s\0228\n\nextensions\030\001 \003(\0132$.CloudEventOverri" - + "des.ExtensionsEntry\0321\n\017ExtensionsEntry\022\013" - + "\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\350\002\n\010Resou" - + "rce\022\013\n\003uid\030\001 \001(\t\022\016\n\006topics\030\002 \003(\t\022\030\n\020boot" - + "strapServers\030\003 \001(\t\022\031\n\007ingress\030\004 \001(\0132\010.In" - + "gress\022#\n\014egressConfig\030\005 \001(\0132\r.EgressConf" - + "ig\022\031\n\010egresses\030\006 \003(\0132\007.Egress\022\034\n\nabsentA" - + "uth\030\007 \001(\0132\006.EmptyH\000\022 \n\nauthSecret\030\010 \001(\0132" - + "\n.ReferenceH\000\0220\n\017multiAuthSecret\030\t \001(\0132\025" - + ".MultiSecretReferenceH\000\0221\n\023cloudEventOve" - + "rrides\030\n \001(\0132\024.CloudEventOverrides\022\035\n\tre" - + "ference\030\013 \001(\0132\n.ReferenceB\006\n\004Auth\"R\n\010Con" - + "tract\022\022\n\ngeneration\030\001 \001(\004\022\034\n\tresources\030\002" - + " \003(\0132\t.Resource\022\024\n\014trustBundles\030\003 \003(\t*,\n" - + "\rBackoffPolicy\022\017\n\013Exponential\020\000\022\n\n\006Linea" - + "r\020\001*+\n\rDeliveryOrder\022\r\n\tUNORDERED\020\000\022\013\n\007O" - + "RDERED\020\001*=\n\007KeyType\022\n\n\006String\020\000\022\013\n\007Integ" - + "er\020\001\022\n\n\006Double\020\002\022\r\n\tByteArray\020\003*)\n\013Conte" - + "ntMode\022\n\n\006BINARY\020\000\022\016\n\nSTRUCTURED\020\001*a\n\013Se" - + "cretField\022\022\n\016SASL_MECHANISM\020\000\022\n\n\006CA_CRT\020" - + "\001\022\014\n\010USER_CRT\020\002\022\014\n\010USER_KEY\020\003\022\010\n\004USER\020\004\022" - + "\014\n\010PASSWORD\020\005*D\n\010Protocol\022\r\n\tPLAINTEXT\020\000" - + "\022\022\n\016SASL_PLAINTEXT\020\001\022\007\n\003SSL\020\002\022\014\n\010SASL_SS" - + "L\020\003B[\n*dev.knative.eventing.kafka.broker" - + ".contractB\021DataPlaneContractZ\032control-pl" - + "ane/pkg/contractb\006proto3" + + "eadLetterCACerts\030\006 \001(\t\022\032\n\022deadLetterAudi" + + "ence\030\007 \001(\t\022\r\n\005retry\030\002 \001(\r\022%\n\rbackoffPoli" + + "cy\030\003 \001(\0162\016.BackoffPolicy\022\024\n\014backoffDelay" + + "\030\004 \001(\004\022\017\n\007timeout\030\005 \001(\004\"\302\004\n\006Egress\022\025\n\rco" + + "nsumerGroup\030\001 \001(\t\022\023\n\013destination\030\002 \001(\t\022\032" + + "\n\022destinationCACerts\030\017 \001(\t\022\033\n\023destinatio" + + "nAudience\030\021 \001(\t\022\022\n\010replyUrl\030\003 \001(\tH\000\022&\n\024r" + + "eplyToOriginalTopic\030\004 \001(\0132\006.EmptyH\000\022\036\n\014d" + + "iscardReply\030\t \001(\0132\006.EmptyH\000\022\027\n\017replyUrlC" + + "ACerts\030\020 \001(\t\022\030\n\020replyUrlAudience\030\022 \001(\t\022\027" + + "\n\006filter\030\005 \001(\0132\007.Filter\022\013\n\003uid\030\006 \001(\t\022#\n\014" + + "egressConfig\030\007 \001(\0132\r.EgressConfig\022%\n\rdel" + + "iveryOrder\030\010 \001(\0162\016.DeliveryOrder\022\031\n\007keyT" + + "ype\030\n \001(\0162\010.KeyType\022\035\n\treference\030\013 \001(\0132\n" + + ".Reference\022)\n\017dialectedFilter\030\014 \003(\0132\020.Di" + + "alectedFilter\022\021\n\tvReplicas\030\r \001(\005\022)\n\014feat" + + "ureFlags\030\016 \001(\0132\023.EgressFeatureFlags\022\036\n\026o" + + "idcServiceAccountName\030\023 \001(\tB\017\n\rreplyStra" + + "tegy\"U\n\022EgressFeatureFlags\022\031\n\021enableRate" + + "Limiter\030\001 \001(\010\022$\n\034enableOrderedExecutorMe" + + "trics\030\002 \001(\010\"~\n\007Ingress\022!\n\013contentMode\030\001 " + + "\001(\0162\014.ContentMode\022\014\n\004path\030\002 \001(\t\022\014\n\004host\030" + + "\003 \001(\t\022\"\n\032enableAutoCreateEventTypes\030\004 \001(" + + "\010\022\020\n\010audience\030\005 \001(\t\"o\n\tReference\022\014\n\004uuid" + + "\030\001 \001(\t\022\021\n\tnamespace\030\002 \001(\t\022\014\n\004name\030\003 \001(\t\022" + + "\017\n\007version\030\004 \001(\t\022\014\n\004kind\030\005 \001(\t\022\024\n\014groupV" + + "ersion\030\006 \001(\t\"`\n\017SecretReference\022\035\n\trefer" + + "ence\030\001 \001(\0132\n.Reference\022.\n\022keyFieldRefere" + + "nces\030\002 \003(\0132\022.KeyFieldReference\"C\n\021KeyFie" + + "ldReference\022\021\n\tsecretKey\030\002 \001(\t\022\033\n\005field\030" + + "\003 \001(\0162\014.SecretField\"Y\n\024MultiSecretRefere" + + "nce\022\033\n\010protocol\030\001 \001(\0162\t.Protocol\022$\n\nrefe" + + "rences\030\002 \003(\0132\020.SecretReference\"\202\001\n\023Cloud" + + "EventOverrides\0228\n\nextensions\030\001 \003(\0132$.Clo" + + "udEventOverrides.ExtensionsEntry\0321\n\017Exte" + + "nsionsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:" + + "\0028\001\"\350\002\n\010Resource\022\013\n\003uid\030\001 \001(\t\022\016\n\006topics\030" + + "\002 \003(\t\022\030\n\020bootstrapServers\030\003 \001(\t\022\031\n\007ingre" + + "ss\030\004 \001(\0132\010.Ingress\022#\n\014egressConfig\030\005 \001(\013" + + "2\r.EgressConfig\022\031\n\010egresses\030\006 \003(\0132\007.Egre" + + "ss\022\034\n\nabsentAuth\030\007 \001(\0132\006.EmptyH\000\022 \n\nauth" + + "Secret\030\010 \001(\0132\n.ReferenceH\000\0220\n\017multiAuthS" + + "ecret\030\t \001(\0132\025.MultiSecretReferenceH\000\0221\n\023" + + "cloudEventOverrides\030\n \001(\0132\024.CloudEventOv" + + "errides\022\035\n\treference\030\013 \001(\0132\n.ReferenceB\006" + + "\n\004Auth\"R\n\010Contract\022\022\n\ngeneration\030\001 \001(\004\022\034" + + "\n\tresources\030\002 \003(\0132\t.Resource\022\024\n\014trustBun" + + "dles\030\003 \003(\t*,\n\rBackoffPolicy\022\017\n\013Exponenti" + + "al\020\000\022\n\n\006Linear\020\001*+\n\rDeliveryOrder\022\r\n\tUNO" + + "RDERED\020\000\022\013\n\007ORDERED\020\001*=\n\007KeyType\022\n\n\006Stri" + + "ng\020\000\022\013\n\007Integer\020\001\022\n\n\006Double\020\002\022\r\n\tByteArr" + + "ay\020\003*)\n\013ContentMode\022\n\n\006BINARY\020\000\022\016\n\nSTRUC" + + "TURED\020\001*a\n\013SecretField\022\022\n\016SASL_MECHANISM" + + "\020\000\022\n\n\006CA_CRT\020\001\022\014\n\010USER_CRT\020\002\022\014\n\010USER_KEY" + + "\020\003\022\010\n\004USER\020\004\022\014\n\010PASSWORD\020\005*D\n\010Protocol\022\r" + + "\n\tPLAINTEXT\020\000\022\022\n\016SASL_PLAINTEXT\020\001\022\007\n\003SSL" + + "\020\002\022\014\n\010SASL_SSL\020\003B[\n*dev.knative.eventing" + + ".kafka.broker.contractB\021DataPlaneContrac" + + "tZ\032control-plane/pkg/contractb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] {}); @@ -27317,7 +28195,13 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { getDescriptor().getMessageTypes().get(10); internal_static_EgressConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_EgressConfig_descriptor, new java.lang.String[] { - "DeadLetter", "DeadLetterCACerts", "Retry", "BackoffPolicy", "BackoffDelay", "Timeout", + "DeadLetter", + "DeadLetterCACerts", + "DeadLetterAudience", + "Retry", + "BackoffPolicy", + "BackoffDelay", + "Timeout", }); internal_static_Egress_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_Egress_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( @@ -27325,10 +28209,12 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "ConsumerGroup", "Destination", "DestinationCACerts", + "DestinationAudience", "ReplyUrl", "ReplyToOriginalTopic", "DiscardReply", "ReplyUrlCACerts", + "ReplyUrlAudience", "Filter", "Uid", "EgressConfig", @@ -27338,6 +28224,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { "DialectedFilter", "VReplicas", "FeatureFlags", + "OidcServiceAccountName", "ReplyStrategy", }); internal_static_EgressFeatureFlags_descriptor = @@ -27350,7 +28237,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { internal_static_Ingress_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_Ingress_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_Ingress_descriptor, new java.lang.String[] { - "ContentMode", "Path", "Host", "EnableAutoCreateEventTypes", + "ContentMode", "Path", "Host", "EnableAutoCreateEventTypes", "Audience", }); internal_static_Reference_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_Reference_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/features/FeaturesConfig.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/features/FeaturesConfig.java new file mode 100644 index 0000000000..939d3fc945 --- /dev/null +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/features/FeaturesConfig.java @@ -0,0 +1,55 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package dev.knative.eventing.kafka.broker.core.features; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.HashMap; +import java.util.Map; + +public class FeaturesConfig { + + private final String DISABLED = "disabled"; + private final String ENABLED = "enabled"; + + public static final String KEY_AUTHENTICATION_OIDC = "authentication-oidc"; + + private final Map features; + + public FeaturesConfig(String path) throws IOException { + features = new HashMap<>(); + String[] keys = { + KEY_AUTHENTICATION_OIDC, + }; + + for (String key : keys) { + Path filePath = Paths.get(path, key); + if (Files.exists(filePath)) { + features.put(key, Files.readString(filePath)); + } + } + } + + public boolean isAuthenticationOIDC() { + return isEnabled(KEY_AUTHENTICATION_OIDC); + } + + private boolean isEnabled(String key) { + return features.getOrDefault(key, DISABLED).toLowerCase().equals(ENABLED); + } +} diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/OIDCDiscoveryConfig.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/OIDCDiscoveryConfig.java index 02d5d3b360..4e5d18c769 100644 --- a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/OIDCDiscoveryConfig.java +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/OIDCDiscoveryConfig.java @@ -34,6 +34,8 @@ public class OIDCDiscoveryConfig { private static final Logger logger = LoggerFactory.getLogger(TokenVerifier.class); + private static final String OIDC_DISCOVERY_URL = "https://kubernetes.default.svc/.well-known/openid-configuration"; + private String issuer; private JwksVerificationKeyResolver jwksVerificationKeyResolver; @@ -58,13 +60,18 @@ public static Future build(Vertx vertx) { OIDCDiscoveryConfig oidcDiscoveryConfig = new OIDCDiscoveryConfig(); return webClient - .getAbs("https://kubernetes.default.svc/.well-known/openid-configuration") + .getAbs(OIDC_DISCOVERY_URL) .bearerTokenAuthentication(kubeConfig.getAutoOAuthToken()) .send() .compose(res -> { logger.debug("Got raw OIDC discovery info: " + res.bodyAsString()); try { + if (res.statusCode() != 200) { + return Future.failedFuture("Unexpected status (" + res.statusCode() + + ") on OIDC discovery endpoint: " + res.bodyAsString()); + } + ObjectMapper mapper = new ObjectMapper(); OIDCInfo oidcInfo = mapper.readValue(res.bodyAsString(), OIDCInfo.class); diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/TokenProvider.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/TokenProvider.java index d9f5c35daa..3ee9cf81c7 100644 --- a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/TokenProvider.java +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/TokenProvider.java @@ -15,6 +15,8 @@ */ package dev.knative.eventing.kafka.broker.core.oidc; +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; import dev.knative.eventing.kafka.broker.core.NamespacedName; import io.fabric8.kubernetes.api.model.authentication.TokenRequest; import io.fabric8.kubernetes.api.model.authentication.TokenRequestBuilder; @@ -22,36 +24,78 @@ import io.fabric8.kubernetes.client.ConfigBuilder; import io.fabric8.kubernetes.client.KubernetesClient; import io.fabric8.kubernetes.client.KubernetesClientBuilder; +import io.vertx.core.Future; +import io.vertx.core.Vertx; +import java.util.concurrent.TimeUnit; public class TokenProvider { + private static final long TOKEN_EXPIRATION_SECONDS = 3600L; // 1 hour + private static final long EXPIRATION_BUFFER_TIME_SECONDS = 300L; // 5 minutes + + private static final long CACHE_MAXIMUM_SIZE = 1000L; // Cache up to 1000 tokens + private static final long CACHE_EXPIRATION_TIME_SECONDS = + TOKEN_EXPIRATION_SECONDS - EXPIRATION_BUFFER_TIME_SECONDS; // Cache tokens for 55 minutes + private final KubernetesClient kubernetesClient; + private final Cache tokenCache; - public TokenProvider() { - Config clientConfig = new ConfigBuilder().build(); + private final Vertx vertx; + public TokenProvider(Vertx vertx) { + this.vertx = vertx; + Config clientConfig = new ConfigBuilder().build(); kubernetesClient = new KubernetesClientBuilder().withConfig(clientConfig).build(); - } - public String requestToken(NamespacedName serviceAccount, String audience) { - TokenRequest tokenRequest = new TokenRequestBuilder() - .withNewSpec() - .withAudiences(audience) - .withExpirationSeconds(3600L) - .endSpec() + this.tokenCache = CacheBuilder.newBuilder() + .expireAfterWrite(CACHE_EXPIRATION_TIME_SECONDS, TimeUnit.SECONDS) + .maximumSize(CACHE_MAXIMUM_SIZE) .build(); + } - tokenRequest = kubernetesClient - .serviceAccounts() - .inNamespace(serviceAccount.namespace()) - .withName(serviceAccount.name()) - .tokenRequest(tokenRequest); + public Future getToken(NamespacedName serviceAccount, String audience) { + String cacheKey = generateCacheKey(serviceAccount, audience); + String token = tokenCache.getIfPresent(cacheKey); - if (tokenRequest != null && tokenRequest.getStatus() != null) { - return tokenRequest.getStatus().getToken(); + if (token != null) { + return Future.succeededFuture(token); } else { - return null; + return requestToken(serviceAccount, audience).onSuccess(t -> { + if (t != null) { + tokenCache.put(cacheKey, t); + } + }); } } + + private Future requestToken(NamespacedName serviceAccount, String audience) { + return this.vertx.executeBlocking( + promise -> { + TokenRequest tokenRequest = new TokenRequestBuilder() + .withNewSpec() + .withAudiences(audience) + .withExpirationSeconds(TOKEN_EXPIRATION_SECONDS) + .endSpec() + .build(); + + tokenRequest = kubernetesClient + .serviceAccounts() + .inNamespace(serviceAccount.namespace()) + .withName(serviceAccount.name()) + .tokenRequest(tokenRequest); + + if (tokenRequest != null && tokenRequest.getStatus() != null) { + promise.complete(tokenRequest.getStatus().getToken()); + } else { + promise.fail("could not request token for " + serviceAccount.name() + "/" + + serviceAccount.namespace()); + } + }, + false); + } + + private String generateCacheKey(NamespacedName serviceAccount, String audience) { + return serviceAccount.namespace() + "/" + serviceAccount.name() + "/" + audience; + } } diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/TokenVerifier.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/TokenVerifier.java index e12c764de3..521b4f4514 100644 --- a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/TokenVerifier.java +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/TokenVerifier.java @@ -16,61 +16,11 @@ package dev.knative.eventing.kafka.broker.core.oidc; import io.vertx.core.Future; -import io.vertx.core.Vertx; import io.vertx.core.http.HttpServerRequest; import org.jose4j.jwt.JwtClaims; -import org.jose4j.jwt.consumer.InvalidJwtException; -import org.jose4j.jwt.consumer.JwtConsumer; -import org.jose4j.jwt.consumer.JwtConsumerBuilder; -import org.jose4j.jwt.consumer.JwtContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -public class TokenVerifier { +public interface TokenVerifier { + Future verify(String token, String expectedAudience); - private static final Logger logger = LoggerFactory.getLogger(TokenVerifier.class); - - private final Vertx vertx; - - private final OIDCDiscoveryConfig oidcDiscoveryConfig; - - public TokenVerifier(Vertx vertx, OIDCDiscoveryConfig oidcDiscoveryConfig) { - this.vertx = vertx; - this.oidcDiscoveryConfig = oidcDiscoveryConfig; - } - - public Future verify(String token, String expectedAudience) { - return this.vertx.executeBlocking(promise -> { - // execute blocking, as jose .process() is blocking - - JwtConsumer jwtConsumer = new JwtConsumerBuilder() - .setVerificationKeyResolver(this.oidcDiscoveryConfig.getJwksVerificationKeyResolver()) - .setExpectedAudience(expectedAudience) - .setExpectedIssuer(this.oidcDiscoveryConfig.getIssuer()) - .build(); - - try { - JwtContext jwtContext = jwtConsumer.process(token); - - promise.complete(jwtContext.getJwtClaims()); - } catch (InvalidJwtException e) { - promise.fail(e); - } - }); - } - - public Future verify(HttpServerRequest request, String expectedAudience) { - String authHeader = request.getHeader("Authorization"); - if (authHeader == null || authHeader.isEmpty()) { - return Future.failedFuture("Request didn't contain Authorization header"); // change to exception - } - - if (!authHeader.startsWith("Bearer ") && authHeader.length() <= "Bearer ".length()) { - return Future.failedFuture("Authorization header didn't contain Bearer token"); // change to exception - } - - String token = authHeader.substring("Bearer ".length()); - - return verify(token, expectedAudience); - } + Future verify(HttpServerRequest request, String expectedAudience); } diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/TokenVerifierImpl.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/TokenVerifierImpl.java new file mode 100644 index 0000000000..b5312e0bbb --- /dev/null +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/oidc/TokenVerifierImpl.java @@ -0,0 +1,87 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package dev.knative.eventing.kafka.broker.core.oidc; + +import dev.knative.eventing.kafka.broker.core.features.FeaturesConfig; +import io.vertx.core.Future; +import io.vertx.core.Vertx; +import io.vertx.core.http.HttpServerRequest; +import org.jose4j.jwt.JwtClaims; +import org.jose4j.jwt.consumer.InvalidJwtException; +import org.jose4j.jwt.consumer.JwtConsumer; +import org.jose4j.jwt.consumer.JwtConsumerBuilder; +import org.jose4j.jwt.consumer.JwtContext; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TokenVerifierImpl implements TokenVerifier { + + private static final Logger logger = LoggerFactory.getLogger(TokenVerifierImpl.class); + + private final Vertx vertx; + + private final OIDCDiscoveryConfig oidcDiscoveryConfig; + + public TokenVerifierImpl(Vertx vertx, OIDCDiscoveryConfig oidcDiscoveryConfig) { + this.vertx = vertx; + this.oidcDiscoveryConfig = oidcDiscoveryConfig; + } + + public Future verify(String token, String expectedAudience) { + return this.vertx.executeBlocking( + promise -> { + // execute blocking, as jose .process() is blocking + + if (oidcDiscoveryConfig == null) { + promise.fail( + "OIDC discovery config not initialized. This is most likely the case when the pod was started with an invalid OIDC config in place and then later the " + + FeaturesConfig.KEY_AUTHENTICATION_OIDC + + " flag was enabled. Restarting the pod should help."); + } + + JwtConsumer jwtConsumer = new JwtConsumerBuilder() + .setVerificationKeyResolver(this.oidcDiscoveryConfig.getJwksVerificationKeyResolver()) + .setExpectedAudience(expectedAudience) + .setExpectedIssuer(this.oidcDiscoveryConfig.getIssuer()) + .build(); + + try { + JwtContext jwtContext = jwtConsumer.process(token); + + promise.complete(jwtContext.getJwtClaims()); + } catch (InvalidJwtException e) { + promise.fail(e); + } + }, + false); + } + + public Future verify(final HttpServerRequest request, String expectedAudience) { + String authHeader = request.getHeader("Authorization"); + if (authHeader == null || authHeader.isEmpty()) { + return Future.failedFuture("Request didn't contain Authorization header"); + } + + if (!authHeader.startsWith("Bearer ") && authHeader.length() <= "Bearer ".length()) { + return Future.failedFuture("Authorization header didn't contain Bearer token"); + } + + String token = authHeader.substring("Bearer ".length()); + + request.pause(); + return verify(token, expectedAudience).onSuccess(v -> request.resume()); + } +} diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/utils/BaseEnv.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/utils/BaseEnv.java index 5b3743876b..9353006166 100644 --- a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/utils/BaseEnv.java +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/utils/BaseEnv.java @@ -48,6 +48,9 @@ public class BaseEnv { public static final String CONFIG_TRACING_PATH = "CONFIG_TRACING_PATH"; private final String configTracingPath; + public static final String CONFIG_FEATURES_PATH = "CONFIG_FEATURES_PATH"; + private final String configFeaturesPath; + public static final String WAIT_STARTUP_SECONDS = "WAIT_STARTUP_SECONDS"; private final int waitStartupSeconds; @@ -61,6 +64,7 @@ public BaseEnv(Function envProvider) { this.producerConfigFilePath = requireNonNull(envProvider.apply(PRODUCER_CONFIG_FILE_PATH)); this.dataPlaneConfigFilePath = requireNonNull(envProvider.apply(DATA_PLANE_CONFIG_FILE_PATH)); this.configTracingPath = requireNonNull(envProvider.apply(CONFIG_TRACING_PATH)); + this.configFeaturesPath = envProvider.apply(CONFIG_FEATURES_PATH); this.waitStartupSeconds = Integer.parseInt(envProvider.apply(WAIT_STARTUP_SECONDS)); } @@ -100,6 +104,10 @@ public String getConfigTracingPath() { return configTracingPath; } + public String getConfigFeaturesPath() { + return configFeaturesPath; + } + public int getWaitStartupSeconds() { return waitStartupSeconds; } diff --git a/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/features/FeaturesConfigTest.java b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/features/FeaturesConfigTest.java new file mode 100644 index 0000000000..0a008d483c --- /dev/null +++ b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/features/FeaturesConfigTest.java @@ -0,0 +1,66 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.features; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class FeaturesConfigTest { + + private File featuresDir; + + @BeforeEach + public void setup() throws IOException { + featuresDir = Files.createTempDirectory("features").toFile(); + + Files.createFile(Path.of(featuresDir.getAbsolutePath(), FeaturesConfig.KEY_AUTHENTICATION_OIDC)) + .toFile(); + } + + @AfterEach + public void cleanup() throws IOException { + Files.walk(featuresDir.toPath()) + .filter(Files::isRegularFile) + .map(Path::toFile) + .forEach(File::delete); + + featuresDir.delete(); + } + + @Test + public void testFeaturesConfigAuthenticationOIDC() throws IOException { + FeaturesConfig fc = new FeaturesConfig(featuresDir.getAbsolutePath()); + Assertions.assertFalse(fc.isAuthenticationOIDC(), "should be false, if feature file is empty"); + + try (FileWriter writer = + new FileWriter(Paths.get(featuresDir.getAbsolutePath(), FeaturesConfig.KEY_AUTHENTICATION_OIDC) + .toString())) { + writer.write("enabled"); + } + + fc = new FeaturesConfig(featuresDir.getAbsolutePath()); + Assertions.assertTrue(fc.isAuthenticationOIDC(), "should be true, if feature is enabled"); + } +} diff --git a/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/impl/http/WebClientCloudEventSender.java b/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/impl/http/WebClientCloudEventSender.java index e944f90306..72e7cf35d1 100644 --- a/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/impl/http/WebClientCloudEventSender.java +++ b/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/impl/http/WebClientCloudEventSender.java @@ -18,7 +18,9 @@ import static dev.knative.eventing.kafka.broker.core.utils.Logging.keyValue; import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; +import dev.knative.eventing.kafka.broker.core.NamespacedName; import dev.knative.eventing.kafka.broker.core.metrics.Metrics; +import dev.knative.eventing.kafka.broker.core.oidc.TokenProvider; import dev.knative.eventing.kafka.broker.core.tracing.TracingSpan; import dev.knative.eventing.kafka.broker.dispatcher.CloudEventSender; import dev.knative.eventing.kafka.broker.dispatcher.impl.ResponseFailureException; @@ -31,6 +33,7 @@ import io.vertx.core.Promise; import io.vertx.core.Vertx; import io.vertx.core.buffer.Buffer; +import io.vertx.ext.web.client.HttpRequest; import io.vertx.ext.web.client.HttpResponse; import io.vertx.ext.web.client.WebClient; import java.net.URI; @@ -49,6 +52,8 @@ public final class WebClientCloudEventSender implements CloudEventSender { private final WebClient client; private final String target; + private final String targetOIDCAudience; + private final NamespacedName oidcServiceAccount; private final ConsumerVerticleContext consumerVerticleContext; private final Promise closePromise = Promise.promise(); @@ -57,6 +62,7 @@ public final class WebClientCloudEventSender implements CloudEventSender { private final AtomicBoolean closed = new AtomicBoolean(false); private final AtomicInteger inFlightRequests = new AtomicInteger(0); + private final TokenProvider tokenProvider; /** * All args constructor. @@ -70,6 +76,8 @@ public WebClientCloudEventSender( final Vertx vertx, final WebClient client, final String target, + final String targetOIDCAudience, + final NamespacedName oidcServiceAccount, final ConsumerVerticleContext consumerVerticleContext, final Tags additionalTags) { Objects.requireNonNull(vertx); @@ -85,8 +93,11 @@ public WebClientCloudEventSender( this.vertx = vertx; this.client = client; this.target = target; + this.targetOIDCAudience = targetOIDCAudience; + this.oidcServiceAccount = oidcServiceAccount; this.consumerVerticleContext = consumerVerticleContext; this.retryPolicyFunc = computeRetryPolicy(consumerVerticleContext.getEgressConfig()); + this.tokenProvider = new TokenProvider(vertx); Metrics.eventDispatchInFlightCount( additionalTags.and(consumerVerticleContext.getTags()), this.inFlightRequests::get) @@ -113,6 +124,7 @@ private Future> send(final CloudEvent event, final int retr try { TracingSpan.decorateCurrentWithEvent(event); requestEmitted(); + // here we send the event send(event, promise).onComplete(v -> requestCompleted()); } catch (CloudEventRWException e) { logger.error( @@ -175,21 +187,39 @@ private void requestEmitted() { } private Future send(final CloudEvent event, final Promise> breaker) { - return VertxMessageFactory.createWriter(client.postAbs(target) - .timeout( - this.consumerVerticleContext.getEgressConfig().getTimeout() <= 0 - ? DEFAULT_TIMEOUT_MS - : this.consumerVerticleContext - .getEgressConfig() - .getTimeout()) - .putHeader("Prefer", "reply") - .putHeader( - "Kn-Namespace", - this.consumerVerticleContext - .getEgress() - .getReference() - .getNamespace())) - .writeBinary(event) + Future requestToken; + if (this.targetOIDCAudience.isEmpty()) { + requestToken = Future.succeededFuture(null); + } else { + requestToken = this.tokenProvider.getToken(this.oidcServiceAccount, this.targetOIDCAudience); + } + + return requestToken + .compose(token -> { + HttpRequest req = client.postAbs(target) + .timeout( + this.consumerVerticleContext + .getEgressConfig() + .getTimeout() + <= 0 + ? DEFAULT_TIMEOUT_MS + : this.consumerVerticleContext + .getEgressConfig() + .getTimeout()) + .putHeader("Prefer", "reply") + .putHeader( + "Kn-Namespace", + this.consumerVerticleContext + .getEgress() + .getReference() + .getNamespace()); + + if (token != null && !token.isEmpty()) { + req.putHeader("Authorization", "Bearer " + token); + } + + return VertxMessageFactory.createWriter(req).writeBinary(event); + }) .onFailure(ex -> { logError(event, ex); breaker.tryFail(ex); diff --git a/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/main/ConsumerVerticleBuilder.java b/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/main/ConsumerVerticleBuilder.java index c2d7b1813a..a6ee90a61a 100644 --- a/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/main/ConsumerVerticleBuilder.java +++ b/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/main/ConsumerVerticleBuilder.java @@ -18,6 +18,7 @@ import static dev.knative.eventing.kafka.broker.core.utils.Logging.keyValue; import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; +import dev.knative.eventing.kafka.broker.core.NamespacedName; import dev.knative.eventing.kafka.broker.core.ReactiveKafkaConsumer; import dev.knative.eventing.kafka.broker.core.ReactiveKafkaProducer; import dev.knative.eventing.kafka.broker.core.metrics.Metrics; @@ -100,6 +101,7 @@ private void build(final Vertx vertx, final ConsumerVerticle consumerVerticle, f final var metricsCloser = Metrics.register(consumer.unwrap()); consumerVerticle.setCloser(metricsCloser); + // setting up cloud events sender final var egressSubscriberSender = createConsumerRecordSender(vertx); final var egressDeadLetterSender = createDeadLetterSinkRecordSender(vertx); final var responseHandler = createResponseHandler(vertx); @@ -237,6 +239,10 @@ private ResponseHandler createResponseHandler(final Vertx vertx) { createWebClientOptionsFromCACerts( consumerVerticleContext.getEgress().getReplyUrlCACerts())), consumerVerticleContext.getEgress().getReplyUrl(), + consumerVerticleContext.getEgress().getReplyUrlAudience(), + new NamespacedName( + consumerVerticleContext.getResource().getReference().getNamespace(), + consumerVerticleContext.getEgress().getOidcServiceAccountName()), consumerVerticleContext, Metrics.Tags.senderContext("reply"))); } @@ -262,6 +268,10 @@ private CloudEventSender createConsumerRecordSender(final Vertx vertx) { createWebClientOptionsFromCACerts( consumerVerticleContext.getEgress().getDestinationCACerts())), consumerVerticleContext.getEgress().getDestination(), + consumerVerticleContext.getEgress().getDestinationAudience(), + new NamespacedName( + consumerVerticleContext.getResource().getReference().getNamespace(), + consumerVerticleContext.getEgress().getOidcServiceAccountName()), consumerVerticleContext, Metrics.Tags.senderContext("subscriber")); } @@ -280,6 +290,10 @@ private CloudEventSender createDeadLetterSinkRecordSender(final Vertx vertx) { createWebClientOptionsFromCACerts( consumerVerticleContext.getEgressConfig().getDeadLetterCACerts())), consumerVerticleContext.getEgressConfig().getDeadLetter(), + consumerVerticleContext.getEgressConfig().getDeadLetterAudience(), + new NamespacedName( + consumerVerticleContext.getResource().getReference().getNamespace(), + consumerVerticleContext.getEgress().getOidcServiceAccountName()), consumerVerticleContext, Metrics.Tags.senderContext("deadlettersink")); } diff --git a/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/impl/http/WebClientCloudEventSenderTest.java b/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/impl/http/WebClientCloudEventSenderTest.java index f62ef25340..955bb6c529 100644 --- a/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/impl/http/WebClientCloudEventSenderTest.java +++ b/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/impl/http/WebClientCloudEventSenderTest.java @@ -27,6 +27,7 @@ import static org.mockito.Mockito.verify; import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; +import dev.knative.eventing.kafka.broker.core.NamespacedName; import dev.knative.eventing.kafka.broker.core.metrics.Metrics; import dev.knative.eventing.kafka.broker.dispatcher.main.FakeConsumerVerticleContext; import io.cloudevents.core.builder.CloudEventBuilder; @@ -69,7 +70,13 @@ public void shouldWebClient(final Vertx vertx, final VertxTestContext context) { doNothing().when(webClient).close(); final var consumerRecordSender = new WebClientCloudEventSender( - vertx, webClient, "http://localhost:12345", FakeConsumerVerticleContext.get(), Tags.empty()); + vertx, + webClient, + "http://localhost:12345", + "", + new NamespacedName("", ""), + FakeConsumerVerticleContext.get(), + Tags.empty()); consumerRecordSender .close() @@ -115,6 +122,8 @@ public void shouldRetry(final Vertx vertx, final VertxTestContext context) vertx, WebClient.create(vertx), "http://localhost:" + port, + "", + new NamespacedName("", ""), FakeConsumerVerticleContext.get( FakeConsumerVerticleContext.get().getResource(), DataPlaneContract.Egress.newBuilder( @@ -174,6 +183,8 @@ public void shouldNotRetry(final Vertx vertx, final VertxTestContext context) vertx, WebClient.create(vertx), "http://localhost:" + port, + "", + new NamespacedName("", ""), FakeConsumerVerticleContext.get( FakeConsumerVerticleContext.get().getResource(), DataPlaneContract.Egress.newBuilder( @@ -236,6 +247,8 @@ public void shouldRetryAndFail(final Vertx vertx, final VertxTestContext context vertx, WebClient.create(vertx), "http://localhost:" + port, + "", + new NamespacedName("", ""), FakeConsumerVerticleContext.get( FakeConsumerVerticleContext.get().getResource(), DataPlaneContract.Egress.newBuilder( @@ -292,6 +305,8 @@ public void shouldNotRetryAndFail(final Vertx vertx, final VertxTestContext cont vertx, WebClient.create(vertx), "http://localhost:" + port, + "", + new NamespacedName("", ""), FakeConsumerVerticleContext.get( FakeConsumerVerticleContext.get().getResource(), DataPlaneContract.Egress.newBuilder( @@ -357,6 +372,8 @@ public void shouldTimeoutAndFail(final Vertx vertx, final VertxTestContext conte vertx, WebClient.create(vertx), "http://localhost:" + port, + "", + new NamespacedName("", ""), FakeConsumerVerticleContext.get( FakeConsumerVerticleContext.get().getResource(), DataPlaneContract.Egress.newBuilder( diff --git a/data-plane/receiver-loom/src/main/java/dev/knative/eventing/kafka/broker/receiverloom/Main.java b/data-plane/receiver-loom/src/main/java/dev/knative/eventing/kafka/broker/receiverloom/Main.java index ae325bfc90..b218b3767f 100644 --- a/data-plane/receiver-loom/src/main/java/dev/knative/eventing/kafka/broker/receiverloom/Main.java +++ b/data-plane/receiver-loom/src/main/java/dev/knative/eventing/kafka/broker/receiverloom/Main.java @@ -16,9 +16,10 @@ package dev.knative.eventing.kafka.broker.receiverloom; import java.io.IOException; +import java.util.concurrent.ExecutionException; public class Main { - public static void main(String[] args) throws IOException { + public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { dev.knative.eventing.kafka.broker.receiver.main.Main.start(args, new LoomProducerFactory<>()); } } diff --git a/data-plane/receiver-vertx/src/main/java/dev/knative/eventing/kafka/broker/receiververtx/Main.java b/data-plane/receiver-vertx/src/main/java/dev/knative/eventing/kafka/broker/receiververtx/Main.java index 1c0dce4f4f..5365e96414 100644 --- a/data-plane/receiver-vertx/src/main/java/dev/knative/eventing/kafka/broker/receiververtx/Main.java +++ b/data-plane/receiver-vertx/src/main/java/dev/knative/eventing/kafka/broker/receiververtx/Main.java @@ -16,9 +16,10 @@ package dev.knative.eventing.kafka.broker.receiververtx; import java.io.IOException; +import java.util.concurrent.ExecutionException; public class Main { - public static void main(String[] args) throws IOException { + public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { dev.knative.eventing.kafka.broker.receiver.main.Main.start(args, new VertxProducerFactory<>()); } } diff --git a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/IngressProducer.java b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/IngressProducer.java index 116067adc6..149191540d 100644 --- a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/IngressProducer.java +++ b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/IngressProducer.java @@ -50,4 +50,9 @@ default Future send(ProducerRecord record) { * @return the resource associated with this producer. */ DataPlaneContract.Reference getReference(); + + /** + * @return the OIDC audience for the ingress. + */ + String getAudience(); } diff --git a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/impl/IngressProducerReconcilableStore.java b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/impl/IngressProducerReconcilableStore.java index 3926d2e3c2..1f3dafa51d 100644 --- a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/impl/IngressProducerReconcilableStore.java +++ b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/impl/IngressProducerReconcilableStore.java @@ -266,6 +266,7 @@ private static class IngressProducerImpl implements IngressProducer { private final String host; private final Properties producerProperties; private final DataPlaneContract.Reference reference; + private final String audience; IngressProducerImpl( final ReactiveKafkaProducer producer, @@ -276,6 +277,7 @@ private static class IngressProducerImpl implements IngressProducer { this.producer = producer; this.topic = resource.getTopics(0); this.reference = resource.getReference(); + this.audience = resource.getIngress().getAudience(); this.path = path; this.host = host; this.producerProperties = producerProperties; @@ -291,6 +293,11 @@ public String getTopic() { return topic; } + @Override + public String getAudience() { + return audience; + } + @Override public DataPlaneContract.Reference getReference() { return reference; diff --git a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/impl/ReceiverVerticle.java b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/impl/ReceiverVerticle.java index 115383e954..24e3415a7a 100644 --- a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/impl/ReceiverVerticle.java +++ b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/impl/ReceiverVerticle.java @@ -22,11 +22,15 @@ import static io.netty.handler.codec.http.HttpResponseStatus.OK; import dev.knative.eventing.kafka.broker.core.file.FileWatcher; +import dev.knative.eventing.kafka.broker.core.oidc.OIDCDiscoveryConfig; +import dev.knative.eventing.kafka.broker.core.oidc.TokenVerifier; +import dev.knative.eventing.kafka.broker.core.oidc.TokenVerifierImpl; import dev.knative.eventing.kafka.broker.core.reconciler.IngressReconcilerListener; import dev.knative.eventing.kafka.broker.core.reconciler.ResourcesReconciler; import dev.knative.eventing.kafka.broker.receiver.IngressProducer; import dev.knative.eventing.kafka.broker.receiver.IngressRequestHandler; import dev.knative.eventing.kafka.broker.receiver.RequestContext; +import dev.knative.eventing.kafka.broker.receiver.impl.handler.AuthenticationHandler; import dev.knative.eventing.kafka.broker.receiver.impl.handler.MethodNotAllowedHandler; import dev.knative.eventing.kafka.broker.receiver.impl.handler.ProbeHandler; import dev.knative.eventing.kafka.broker.receiver.main.ReceiverEnv; @@ -83,12 +87,13 @@ public class ReceiverVerticle extends AbstractVerticle implements Handler ingressProducerStoreFactory; private final IngressRequestHandler ingressRequestHandler; private final ReceiverEnv env; + private final OIDCDiscoveryConfig oidcDiscoveryConfig; + private AuthenticationHandler authenticationHandler; private HttpServer httpServer; private HttpServer httpsServer; private MessageConsumer messageConsumer; private IngressProducerReconcilableStore ingressProducerStore; - private FileWatcher secretWatcher; public ReceiverVerticle( @@ -97,7 +102,8 @@ public ReceiverVerticle( final HttpServerOptions httpsServerOptions, final Function ingressProducerStoreFactory, final IngressRequestHandler ingressRequestHandler, - final String secretVolumePath) { + final String secretVolumePath, + final OIDCDiscoveryConfig oidcDiscoveryConfig) { Objects.requireNonNull(env); Objects.requireNonNull(httpServerOptions); @@ -114,6 +120,7 @@ public ReceiverVerticle( this.secretVolume = new File(secretVolumePath); this.tlsKeyFile = new File(secretVolumePath + "/tls.key"); this.tlsCrtFile = new File(secretVolumePath + "/tls.crt"); + this.oidcDiscoveryConfig = oidcDiscoveryConfig; } public HttpServerOptions getHttpsServerOptions() { @@ -127,6 +134,9 @@ public void start(final Promise startPromise) { .watchIngress(IngressReconcilerListener.all(this.ingressProducerStore, this.ingressRequestHandler)) .buildAndListen(vertx); + TokenVerifier tokenVerifier = new TokenVerifierImpl(vertx, oidcDiscoveryConfig); + this.authenticationHandler = new AuthenticationHandler(tokenVerifier); + this.httpServer = vertx.createHttpServer(this.httpServerOptions); // check whether the secret volume is mounted @@ -200,10 +210,7 @@ public void stop(Promise stopPromise) throws Exception { } @Override - public void handle(HttpServerRequest request) { - - final var requestContext = new RequestContext(request); - + public void handle(final HttpServerRequest request) { // Look up for the ingress producer IngressProducer producer = this.ingressProducerStore.resolve(request.host(), request.path()); if (producer == null) { @@ -224,8 +231,11 @@ public void handle(HttpServerRequest request) { return; } - // Invoke the ingress request handler - this.ingressRequestHandler.handle(requestContext, producer); + this.authenticationHandler.handle(request, producer, req -> { + // Invoke the ingress request handler + final var requestContext = new RequestContext(req); + this.ingressRequestHandler.handle(requestContext, producer); + }); } public void updateServerConfig() { diff --git a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/impl/handler/AuthenticationHandler.java b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/impl/handler/AuthenticationHandler.java new file mode 100644 index 0000000000..a1283ac275 --- /dev/null +++ b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/impl/handler/AuthenticationHandler.java @@ -0,0 +1,61 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package dev.knative.eventing.kafka.broker.receiver.impl.handler; + +import static dev.knative.eventing.kafka.broker.core.utils.Logging.keyValue; + +import dev.knative.eventing.kafka.broker.core.oidc.TokenVerifier; +import dev.knative.eventing.kafka.broker.receiver.IngressProducer; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.vertx.core.Handler; +import io.vertx.core.http.HttpServerRequest; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Handler checking that the provided request contained a valid JWT. + */ +public class AuthenticationHandler { + + private static final Logger logger = LoggerFactory.getLogger(AuthenticationHandler.class); + private final TokenVerifier tokenVerifier; + + public AuthenticationHandler(final TokenVerifier tokenVerifier) { + this.tokenVerifier = tokenVerifier; + } + + public void handle( + final HttpServerRequest request, final IngressProducer ingressInfo, final Handler next) { + if (ingressInfo.getAudience().isEmpty()) { + logger.debug("No audience for ingress set. Continue without authentication check..."); + next.handle(request); + return; + } + + tokenVerifier + .verify(request, ingressInfo.getAudience()) + .onFailure(e -> { + logger.debug("Failed to verify authentication of request: {}", keyValue("error", e.getMessage())); + request.response() + .setStatusCode(HttpResponseStatus.UNAUTHORIZED.code()) + .end(); + }) + .onSuccess(jwtClaims -> { + logger.debug("Request contained valid JWT. Continuing..."); + next.handle(request); + }); + } +} diff --git a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/main/Main.java b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/main/Main.java index 8638931f45..4d0f4cd902 100644 --- a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/main/Main.java +++ b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/main/Main.java @@ -20,8 +20,10 @@ import dev.knative.eventing.kafka.broker.core.ReactiveProducerFactory; import dev.knative.eventing.kafka.broker.core.eventbus.ContractMessageCodec; import dev.knative.eventing.kafka.broker.core.eventbus.ContractPublisher; +import dev.knative.eventing.kafka.broker.core.features.FeaturesConfig; import dev.knative.eventing.kafka.broker.core.file.FileWatcher; import dev.knative.eventing.kafka.broker.core.metrics.Metrics; +import dev.knative.eventing.kafka.broker.core.oidc.OIDCDiscoveryConfig; import dev.knative.eventing.kafka.broker.core.reconciler.impl.ResourcesReconcilerMessageHandler; import dev.knative.eventing.kafka.broker.core.tracing.TracingConfig; import dev.knative.eventing.kafka.broker.core.utils.Configurations; @@ -39,6 +41,7 @@ import java.io.File; import java.io.IOException; import java.util.Properties; +import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import org.apache.kafka.clients.producer.ProducerConfig; @@ -60,12 +63,14 @@ public class Main { * @param args command line arguments. */ public static void start(final String[] args, final ReactiveProducerFactory kafkaProducerFactory) - throws IOException { + throws IOException, ExecutionException, InterruptedException { ReceiverEnv env = new ReceiverEnv(System::getenv); OpenTelemetrySdk openTelemetry = TracingConfig.fromDir(env.getConfigTracingPath()).setup(); + FeaturesConfig featuresConfig = new FeaturesConfig(env.getConfigFeaturesPath()); + // Read producer properties and override some defaults Properties producerConfigs = Configurations.readPropertiesSync(env.getProducerConfigFilePath()); producerConfigs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); @@ -97,6 +102,24 @@ public static void start(final String[] args, final ReactiveProducerFactory kafk httpsServerOptions.setPort(env.getIngressTLSPort()); httpsServerOptions.setTracingPolicy(TracingPolicy.PROPAGATE); + // Setup OIDC discovery config + OIDCDiscoveryConfig oidcDiscoveryConfig = null; + try { + oidcDiscoveryConfig = OIDCDiscoveryConfig.build(vertx) + .toCompletionStage() + .toCompletableFuture() + .get(); + } catch (ExecutionException ex) { + if (featuresConfig.isAuthenticationOIDC()) { + logger.error("Could not load OIDC config while OIDC authentication feature is enabled."); + throw ex; + } else { + logger.warn( + "Could not load OIDC configuration. This will lead to problems, when the {} flag will be enabled later", + FeaturesConfig.KEY_AUTHENTICATION_OIDC); + } + } + // Configure the verticle to deploy and the deployment options final Supplier receiverVerticleFactory = new ReceiverVerticleFactory( env, @@ -104,7 +127,8 @@ public static void start(final String[] args, final ReactiveProducerFactory kafk Metrics.getRegistry(), httpServerOptions, httpsServerOptions, - kafkaProducerFactory); + kafkaProducerFactory, + oidcDiscoveryConfig); DeploymentOptions deploymentOptions = new DeploymentOptions().setInstances(Runtime.getRuntime().availableProcessors()); diff --git a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/main/ReceiverVerticleFactory.java b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/main/ReceiverVerticleFactory.java index 1647694589..72438f31ae 100644 --- a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/main/ReceiverVerticleFactory.java +++ b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/main/ReceiverVerticleFactory.java @@ -16,6 +16,7 @@ package dev.knative.eventing.kafka.broker.receiver.main; import dev.knative.eventing.kafka.broker.core.ReactiveProducerFactory; +import dev.knative.eventing.kafka.broker.core.oidc.OIDCDiscoveryConfig; import dev.knative.eventing.kafka.broker.core.security.AuthProvider; import dev.knative.eventing.kafka.broker.receiver.IngressRequestHandler; import dev.knative.eventing.kafka.broker.receiver.impl.IngressProducerReconcilableStore; @@ -39,6 +40,7 @@ class ReceiverVerticleFactory implements Supplier { private final String secretVolumePath = "/etc/receiver-tls-secret"; private final IngressRequestHandler ingressRequestHandler; + private final OIDCDiscoveryConfig oidcDiscoveryConfig; private ReactiveProducerFactory kafkaProducerFactory; @@ -48,16 +50,16 @@ class ReceiverVerticleFactory implements Supplier { final MeterRegistry metricsRegistry, final HttpServerOptions httpServerOptions, final HttpServerOptions httpsServerOptions, - final ReactiveProducerFactory kafkaProducerFactory) { - { - this.env = env; - this.producerConfigs = producerConfigs; - this.httpServerOptions = httpServerOptions; - this.httpsServerOptions = httpsServerOptions; - this.ingressRequestHandler = - new IngressRequestHandlerImpl(StrictRequestToRecordMapper.getInstance(), metricsRegistry); - this.kafkaProducerFactory = kafkaProducerFactory; - } + final ReactiveProducerFactory kafkaProducerFactory, + final OIDCDiscoveryConfig oidcDiscoveryConfig) { + this.env = env; + this.producerConfigs = producerConfigs; + this.httpServerOptions = httpServerOptions; + this.httpsServerOptions = httpsServerOptions; + this.ingressRequestHandler = + new IngressRequestHandlerImpl(StrictRequestToRecordMapper.getInstance(), metricsRegistry); + this.kafkaProducerFactory = kafkaProducerFactory; + this.oidcDiscoveryConfig = oidcDiscoveryConfig; } @Override @@ -71,6 +73,7 @@ public Verticle get() { producerConfigs, properties -> kafkaProducerFactory.create(v, properties)), this.ingressRequestHandler, - secretVolumePath); + secretVolumePath, + oidcDiscoveryConfig); } } diff --git a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/ReceiverVerticleTest.java b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/ReceiverVerticleTest.java index eb37cfecca..0d1684ed5f 100644 --- a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/ReceiverVerticleTest.java +++ b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/ReceiverVerticleTest.java @@ -149,7 +149,8 @@ public void setUpHTTP(final Vertx vertx, final VertxTestContext testContext) { httpsServerOptions, v -> store, new IngressRequestHandlerImpl(StrictRequestToRecordMapper.getInstance(), registry), - SECRET_VOLUME_PATH); + SECRET_VOLUME_PATH, + null); vertx.deployVerticle(verticle, testContext.succeeding(ar -> testContext.completeNow())); // Connect to the logger in ReceiverVerticle diff --git a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/ReceiverVerticleTracingTest.java b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/ReceiverVerticleTracingTest.java index 950d3bd314..bcc41cdde8 100644 --- a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/ReceiverVerticleTracingTest.java +++ b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/ReceiverVerticleTracingTest.java @@ -132,7 +132,8 @@ public void setup() throws ExecutionException, InterruptedException { httpsServerOptions, v -> store, new IngressRequestHandlerImpl(StrictRequestToRecordMapper.getInstance(), Metrics.getRegistry()), - SECRET_VOLUME_PATH); + SECRET_VOLUME_PATH, + null); vertx.deployVerticle(verticle).toCompletionStage().toCompletableFuture().get(); } diff --git a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/handler/AuthenticationHandlerTest.java b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/handler/AuthenticationHandlerTest.java new file mode 100644 index 0000000000..494f9b3814 --- /dev/null +++ b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/handler/AuthenticationHandlerTest.java @@ -0,0 +1,138 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.receiver.impl.handler; + +import static org.mockito.Mockito.*; +import static org.mockito.Mockito.when; + +import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; +import dev.knative.eventing.kafka.broker.core.ReactiveKafkaProducer; +import dev.knative.eventing.kafka.broker.core.oidc.TokenVerifier; +import dev.knative.eventing.kafka.broker.receiver.IngressProducer; +import io.cloudevents.CloudEvent; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.vertx.core.Future; +import io.vertx.core.Handler; +import io.vertx.core.http.HttpServerRequest; +import io.vertx.core.http.HttpServerResponse; +import org.jose4j.jwt.JwtClaims; +import org.junit.jupiter.api.Test; + +public class AuthenticationHandlerTest { + @Test + public void shouldReturnUnauthorizedWhenJWTValidationFails() { + final HttpServerRequest request = mock(HttpServerRequest.class); + final var response = mockResponse(request, HttpResponseStatus.UNAUTHORIZED.code()); + + TokenVerifier tokenVerifier = new TokenVerifier() { + @Override + public Future verify(String token, String expectedAudience) { + return Future.failedFuture("JWT validation failed"); + } + + @Override + public Future verify(HttpServerRequest request, String expectedAudience) { + return Future.failedFuture("JWT validation failed"); + } + }; + + final AuthenticationHandler authHandler = new AuthenticationHandler(tokenVerifier); + + authHandler.handle( + request, + new IngressProducer() { + @Override + public ReactiveKafkaProducer getKafkaProducer() { + return null; + } + + @Override + public String getTopic() { + return null; + } + + @Override + public DataPlaneContract.Reference getReference() { + return null; + } + + @Override + public String getAudience() { + return "some-required-audience"; + } + }, + mock(Handler.class)); + + verify(response, times(1)).setStatusCode(HttpResponseStatus.UNAUTHORIZED.code()); + verify(response, times(1)).end(); + } + + @Test + public void shouldContinueWithRequestWhenJWTSucceeds() { + final HttpServerRequest request = mock(HttpServerRequest.class); + final var next = mock(Handler.class); // mockHandler(request); + + TokenVerifier tokenVerifier = new TokenVerifier() { + @Override + public Future verify(String token, String expectedAudience) { + return Future.succeededFuture(new JwtClaims()); + } + + @Override + public Future verify(HttpServerRequest request, String expectedAudience) { + return Future.succeededFuture(new JwtClaims()); + } + }; + + final AuthenticationHandler authHandler = new AuthenticationHandler(tokenVerifier); + + authHandler.handle( + request, + new IngressProducer() { + @Override + public ReactiveKafkaProducer getKafkaProducer() { + return null; + } + + @Override + public String getTopic() { + return null; + } + + @Override + public DataPlaneContract.Reference getReference() { + return null; + } + + @Override + public String getAudience() { + return "some-required-audience"; + } + }, + next); + + verify(next, times(1)).handle(request); + } + + private static HttpServerResponse mockResponse(final HttpServerRequest request, final int statusCode) { + final var response = mock(HttpServerResponse.class); + when(response.setStatusCode(statusCode)).thenReturn(response); + when(request.response()).thenReturn(response); + + return response; + } +} diff --git a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/handler/IngressRequestHandlerImplTest.java b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/handler/IngressRequestHandlerImplTest.java index d95cc20d4c..462326bc71 100644 --- a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/handler/IngressRequestHandlerImplTest.java +++ b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/impl/handler/IngressRequestHandlerImplTest.java @@ -98,6 +98,11 @@ public String getTopic() { public DataPlaneContract.Reference getReference() { return DataPlaneContract.Reference.newBuilder().build(); } + + @Override + public String getAudience() { + return ""; + } }); verifySetStatusCodeAndTerminateResponse(statusCode, response); @@ -129,6 +134,11 @@ public String getTopic() { public DataPlaneContract.Reference getReference() { return DataPlaneContract.Reference.newBuilder().build(); } + + @Override + public String getAudience() { + return ""; + } }); verifySetStatusCodeAndTerminateResponse(IngressRequestHandlerImpl.MAPPER_FAILED, response); diff --git a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/main/ReceiverVerticleFactoryTest.java b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/main/ReceiverVerticleFactoryTest.java index d18cad6157..970ca0d0f0 100644 --- a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/main/ReceiverVerticleFactoryTest.java +++ b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/main/ReceiverVerticleFactoryTest.java @@ -20,6 +20,7 @@ import static org.mockito.Mockito.mock; import dev.knative.eventing.kafka.broker.core.metrics.Metrics; +import dev.knative.eventing.kafka.broker.core.oidc.OIDCDiscoveryConfig; import dev.knative.eventing.kafka.broker.receiver.MockReactiveProducerFactory; import io.micrometer.core.instrument.MeterRegistry; import io.vertx.core.http.HttpServerOptions; @@ -42,7 +43,8 @@ public void shouldCreateMultipleReceiverVerticleInstances() { mock(MeterRegistry.class), mock(HttpServerOptions.class), mock(HttpServerOptions.class), - mock(MockReactiveProducerFactory.class)); + mock(MockReactiveProducerFactory.class), + mock(OIDCDiscoveryConfig.class)); assertThat(supplier.get()).isNotSameAs(supplier.get()); } diff --git a/data-plane/tests/src/test/java/dev/knative/eventing/kafka/broker/tests/AbstractDataPlaneTest.java b/data-plane/tests/src/test/java/dev/knative/eventing/kafka/broker/tests/AbstractDataPlaneTest.java index e8d1841e7c..02ce2b8b43 100644 --- a/data-plane/tests/src/test/java/dev/knative/eventing/kafka/broker/tests/AbstractDataPlaneTest.java +++ b/data-plane/tests/src/test/java/dev/knative/eventing/kafka/broker/tests/AbstractDataPlaneTest.java @@ -392,7 +392,8 @@ private ReceiverVerticle setUpReceiver(final Vertx vertx, final VertxTestContext AuthProvider.noAuth(), producerConfigs(), properties -> getReactiveProducerFactory() .create(v, properties)), new IngressRequestHandlerImpl(StrictRequestToRecordMapper.getInstance(), Metrics.getRegistry()), - SECRET_VOLUME_PATH); + SECRET_VOLUME_PATH, + null); final CountDownLatch latch = new CountDownLatch(1); vertx.deployVerticle(verticle, context.succeeding(h -> latch.countDown())); diff --git a/go.mod b/go.mod index 9a7f7c8a10..058776e3b5 100644 --- a/go.mod +++ b/go.mod @@ -35,10 +35,10 @@ require ( k8s.io/apiserver v0.28.5 k8s.io/client-go v0.28.5 k8s.io/utils v0.0.0-20230406110748-d93618cff8a2 - knative.dev/eventing v0.40.0 + knative.dev/eventing v0.40.2-0.20240220123738-40637813ad8d knative.dev/hack v0.0.0-20240123162936-f3f03ac0ab1a knative.dev/pkg v0.0.0-20240116073220-b488e7be5902 - knative.dev/reconciler-test v0.0.0-20240116084801-50276dfba7b3 + knative.dev/reconciler-test v0.0.0-20240206112124-e5d2639d7c5c sigs.k8s.io/controller-runtime v0.12.3 sigs.k8s.io/yaml v1.4.0 ) diff --git a/go.sum b/go.sum index da0779d6e5..263e5c7010 100644 --- a/go.sum +++ b/go.sum @@ -1273,14 +1273,14 @@ k8s.io/utils v0.0.0-20200912215256-4140de9c8800/go.mod h1:jPW/WVKK9YHAvNhRxK0md/ k8s.io/utils v0.0.0-20210819203725-bdf08cb9a70a/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA= k8s.io/utils v0.0.0-20230406110748-d93618cff8a2 h1:qY1Ad8PODbnymg2pRbkyMT/ylpTrCM8P2RJ0yroCyIk= k8s.io/utils v0.0.0-20230406110748-d93618cff8a2/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= -knative.dev/eventing v0.40.0 h1:zvMeKGBdQ5Us94Hdy7jmxpzyc1fdFnO4SS21+6nDSiU= -knative.dev/eventing v0.40.0/go.mod h1:+yUUIyvX9fn9bCSH3012kc8rG7YBbjvvxwy1Kr53dRc= +knative.dev/eventing v0.40.2-0.20240220123738-40637813ad8d h1:1ICSgDfbWgP02REn8A6h1pzNVzmxpfFlHyt5DeyKEhE= +knative.dev/eventing v0.40.2-0.20240220123738-40637813ad8d/go.mod h1:+yUUIyvX9fn9bCSH3012kc8rG7YBbjvvxwy1Kr53dRc= knative.dev/hack v0.0.0-20240123162936-f3f03ac0ab1a h1:+4Mdk0Lt3LGAVEI6vYyhfjBlVBx7sqS4wECtTkuXoSY= knative.dev/hack v0.0.0-20240123162936-f3f03ac0ab1a/go.mod h1:yk2OjGDsbEnQjfxdm0/HJKS2WqTLEFg/N6nUs6Rqx3Q= knative.dev/pkg v0.0.0-20240116073220-b488e7be5902 h1:H6+JJN23fhwYWCHY1339sY6uhIyoUwDy1a8dN233fdk= knative.dev/pkg v0.0.0-20240116073220-b488e7be5902/go.mod h1:NYk8mMYoLkO7CQWnNkti4YGGnvLxN6MIDbUvtgeo0C0= -knative.dev/reconciler-test v0.0.0-20240116084801-50276dfba7b3 h1:YgmYC7C3CH1urEryPvgS1PmVi6PZG2fXqGWDQ0snib4= -knative.dev/reconciler-test v0.0.0-20240116084801-50276dfba7b3/go.mod h1:PdI3uCI/8URA+hyBvWqZ2pwCIvX/4/nqCNsdW1cQauM= +knative.dev/reconciler-test v0.0.0-20240206112124-e5d2639d7c5c h1:aFTcLeISFwAv6F8ACidQ8RszREI16ax0PkTsGVibdoI= +knative.dev/reconciler-test v0.0.0-20240206112124-e5d2639d7c5c/go.mod h1:PdI3uCI/8URA+hyBvWqZ2pwCIvX/4/nqCNsdW1cQauM= pgregory.net/rapid v1.1.0 h1:CMa0sjHSru3puNx+J0MIAuiiEV4N0qj8/cMWGBBCsjw= pgregory.net/rapid v1.1.0/go.mod h1:PY5XlDGj0+V1FCq0o192FdRhpKHGTRIWBgqjDBTrq04= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= diff --git a/openshift/e2e-common.sh b/openshift/e2e-common.sh index 0aa0ebf047..f8c2f64c89 100755 --- a/openshift/e2e-common.sh +++ b/openshift/e2e-common.sh @@ -163,7 +163,7 @@ function run_e2e_encryption_auth_tests(){ oc wait --for=condition=Ready knativeeventing.operator.knative.dev knative-eventing -n "${EVENTING_NAMESPACE}" --timeout=900s || return $? - local regex="TLS" + local regex="TLS|OIDC" local test_name="${1:-}" local run_command="-run ${regex}" @@ -173,7 +173,7 @@ function run_e2e_encryption_auth_tests(){ local run_command="-run ^(${test_name})$" fi # check for test flags - RUN_FLAGS="-timeout=1h -run ${regex}" + RUN_FLAGS="-timeout=1h ${run_command}" go_test_e2e ${RUN_FLAGS} ./test/e2e_new --images.producer.file="${images_file}" || failed=$? return $failed diff --git a/openshift/release/artifacts/eventing-kafka-broker.yaml b/openshift/release/artifacts/eventing-kafka-broker.yaml index 19bd300080..28f0a1ae15 100644 --- a/openshift/release/artifacts/eventing-kafka-broker.yaml +++ b/openshift/release/artifacts/eventing-kafka-broker.yaml @@ -211,6 +211,12 @@ rules: - get - list - watch + - apiGroups: + - "" + resources: + - serviceaccounts/token + verbs: + - create --- --- @@ -516,6 +522,9 @@ spec: - mountPath: /etc/tracing name: config-tracing readOnly: true + - mountPath: /etc/features + name: config-features + readOnly: true - mountPath: /etc/receiver-tls-secret name: broker-receiver-tls-secret readOnly: true @@ -560,6 +569,8 @@ spec: value: "false" - name: CONFIG_TRACING_PATH value: "/etc/tracing" + - name: CONFIG_FEATURES_PATH + value: "/etc/features" # https://github.com/fabric8io/kubernetes-client/issues/2212 - name: HTTP2_DISABLE value: "true" @@ -615,6 +626,9 @@ spec: - name: config-tracing configMap: name: config-tracing + - name: config-features + configMap: + name: config-features - name: broker-receiver-tls-secret secret: secretName: kafka-broker-ingress-server-tls diff --git a/openshift/release/artifacts/eventing-kafka-channel.yaml b/openshift/release/artifacts/eventing-kafka-channel.yaml index 110c591556..bc5c0f2b6d 100644 --- a/openshift/release/artifacts/eventing-kafka-channel.yaml +++ b/openshift/release/artifacts/eventing-kafka-channel.yaml @@ -210,6 +210,12 @@ rules: - get - list - watch + - apiGroups: + - "" + resources: + - serviceaccounts/token + verbs: + - create --- --- @@ -515,6 +521,9 @@ spec: - mountPath: /etc/tracing name: config-tracing readOnly: true + - mountPath: /etc/features + name: config-features + readOnly: true - mountPath: /etc/receiver-tls-secret name: channel-receiver-tls-secret readOnly: true @@ -559,6 +568,8 @@ spec: value: "false" - name: CONFIG_TRACING_PATH value: "/etc/tracing" + - name: CONFIG_FEATURES_PATH + value: "/etc/features" # https://github.com/fabric8io/kubernetes-client/issues/2212 - name: HTTP2_DISABLE value: "true" @@ -614,6 +625,9 @@ spec: - name: config-tracing configMap: name: config-tracing + - name: config-features + configMap: + name: config-features - name: channel-receiver-tls-secret secret: secretName: kafka-channel-ingress-server-tls diff --git a/openshift/release/artifacts/eventing-kafka-controller.yaml b/openshift/release/artifacts/eventing-kafka-controller.yaml index 2ee6563876..8daae397dc 100644 --- a/openshift/release/artifacts/eventing-kafka-controller.yaml +++ b/openshift/release/artifacts/eventing-kafka-controller.yaml @@ -319,9 +319,6 @@ spec: description: DeadLetterSink is the sink receiving event that could not be sent to a destination. type: object properties: - CACerts: - description: CACerts are Certification Authority (CA) certificates in PEM format according to https://www.rfc-editor.org/rfc/rfc7468. If set, these CAs are appended to the set of CAs provided by the Addressable target, if any. - type: string ref: description: Ref points to an Addressable. type: object @@ -350,6 +347,12 @@ spec: uri: description: URI can be an absolute URL(non-empty scheme and non-empty host) pointing to the target or a relative URI. Relative URIs will be resolved using the base URI retrieved from Ref. type: string + CACerts: + description: CACerts are Certification Authority (CA) certificates in PEM format according to https://www.rfc-editor.org/rfc/rfc7468. If set, these CAs are appended to the set of CAs provided by the Addressable target, if any. + type: string + audience: + description: Audience is the OIDC audience for the deadLetterSink. + type: string retry: description: Retry is the minimum number of retries the sender should attempt when sending an event before moving it to the dead letter sink. type: integer @@ -503,9 +506,6 @@ spec: description: Sink is a reference to an object that will resolve to a uri to use as the sink. type: object properties: - CACerts: - description: CACerts are Certification Authority (CA) certificates in PEM format according to https://www.rfc-editor.org/rfc/rfc7468. If set, these CAs are appended to the set of CAs provided by the Addressable target, if any. - type: string ref: description: Ref points to an Addressable. type: object @@ -534,6 +534,12 @@ spec: uri: description: URI can be an absolute URL(non-empty scheme and non-empty host) pointing to the target or a relative URI. Relative URIs will be resolved using the base URI retrieved from Ref. type: string + CACerts: + description: CACerts are Certification Authority (CA) certificates in PEM format according to https://www.rfc-editor.org/rfc/rfc7468. If set, these CAs are appended to the set of CAs provided by the Addressable target, if any. + type: string + audience: + description: Audience is the OIDC audience for the sink. + type: string topics: description: Topic topics to consume messages from type: array @@ -624,6 +630,16 @@ spec: sinkUri: description: SinkURI is the current active sink URI that has been configured for the Source. type: string + sinkAudience: + description: SinkAudience is the OIDC audience of the sink. + type: string + auth: + description: Auth provides the relevant information for OIDC authentication. + type: object + properties: + serviceAccountName: + description: ServiceAccountName is the name of the generated service account used for this components OIDC authentication. + type: string subresources: status: {} scale: @@ -872,6 +888,9 @@ spec: type: string CACerts: type: string + audience: + description: Audience is the OIDC audience for the deadLetterSink. + type: string retry: description: Retry is the minimum number of retries the sender should attempt when sending an event before moving it to the dead letter sink. type: integer @@ -918,6 +937,9 @@ spec: type: string CACerts: type: string + audience: + description: Audience is the OIDC audience for the deadLetterSink. + type: string retry: description: Retry is the minimum number of retries the sender should attempt when sending an event before moving it to the dead letter sink. type: integer @@ -933,15 +955,28 @@ spec: replyCACerts: description: replyCACerts is the CA certs to trust for the reply. type: string + replyAudience: + description: ReplyAudience is the OIDC audience for the replyUri. + type: string subscriberUri: description: SubscriberURI is the endpoint for the subscriber type: string subscriberCACerts: description: SubscriberCACerts is the CA certs to trust for the subscriber. type: string + subscriberAudience: + description: SubscriberAudience is the OIDC audience for the subscriberUri. + type: string uid: description: UID is used to understand the origin of the subscriber. type: string + auth: + description: Auth provides the relevant information for OIDC authentication. + type: object + properties: + serviceAccountName: + description: ServiceAccountName is the name of the generated service account used for this components OIDC authentication. + type: string status: description: Status represents the current state of the KafkaChannel. This data may be out of date. type: object @@ -1025,6 +1060,9 @@ spec: type: string deadLetterSinkCACerts: type: string + deadLetterSinkAudience: + description: OIDC audience of the dead letter sink. + type: string observedGeneration: description: ObservedGeneration is the 'Generation' of the Service that was last processed by the controller. type: integer @@ -1048,6 +1086,13 @@ spec: uid: description: UID is used to understand the origin of the subscriber. type: string + auth: + description: Auth provides the relevant information for OIDC authentication. + type: object + properties: + serviceAccountName: + description: ServiceAccountName is the name of the generated service account used for this components OIDC authentication. + type: string additionalPrinterColumns: - name: Ready type: string @@ -1696,6 +1741,13 @@ rules: - update - create - delete + # To grant NamespacedBroker permissions to create OIDC tokens + - apiGroups: + - "" + resources: + - serviceaccounts/token + verbs: + - create # Scheduler permissions - apiGroups: diff --git a/openshift/release/artifacts/eventing-kafka-sink.yaml b/openshift/release/artifacts/eventing-kafka-sink.yaml index 6124451149..c9e93aaa42 100644 --- a/openshift/release/artifacts/eventing-kafka-sink.yaml +++ b/openshift/release/artifacts/eventing-kafka-sink.yaml @@ -258,6 +258,9 @@ spec: - mountPath: /etc/tracing name: config-tracing readOnly: true + - mountPath: /etc/features + name: config-features + readOnly: true - mountPath: /etc/receiver-tls-secret name: sink-receiver-tls-secret readOnly: true @@ -302,6 +305,8 @@ spec: value: "false" - name: CONFIG_TRACING_PATH value: "/etc/tracing" + - name: CONFIG_FEATURES_PATH + value: "/etc/features" # https://github.com/fabric8io/kubernetes-client/issues/2212 - name: HTTP2_DISABLE value: "true" @@ -357,6 +362,9 @@ spec: - name: config-tracing configMap: name: config-tracing + - name: config-features + configMap: + name: config-features - name: sink-receiver-tls-secret secret: secretName: kafka-sink-ingress-server-tls diff --git a/openshift/release/artifacts/eventing-kafka-source.yaml b/openshift/release/artifacts/eventing-kafka-source.yaml index ef0b588b50..dfd7a8046b 100644 --- a/openshift/release/artifacts/eventing-kafka-source.yaml +++ b/openshift/release/artifacts/eventing-kafka-source.yaml @@ -191,6 +191,12 @@ rules: - get - list - watch + - apiGroups: + - "" + resources: + - serviceaccounts/token + verbs: + - create --- --- diff --git a/openshift/release/artifacts/eventing-kafka.yaml b/openshift/release/artifacts/eventing-kafka.yaml index 46efcd8bf1..7196f42517 100644 --- a/openshift/release/artifacts/eventing-kafka.yaml +++ b/openshift/release/artifacts/eventing-kafka.yaml @@ -319,9 +319,6 @@ spec: description: DeadLetterSink is the sink receiving event that could not be sent to a destination. type: object properties: - CACerts: - description: CACerts are Certification Authority (CA) certificates in PEM format according to https://www.rfc-editor.org/rfc/rfc7468. If set, these CAs are appended to the set of CAs provided by the Addressable target, if any. - type: string ref: description: Ref points to an Addressable. type: object @@ -350,6 +347,12 @@ spec: uri: description: URI can be an absolute URL(non-empty scheme and non-empty host) pointing to the target or a relative URI. Relative URIs will be resolved using the base URI retrieved from Ref. type: string + CACerts: + description: CACerts are Certification Authority (CA) certificates in PEM format according to https://www.rfc-editor.org/rfc/rfc7468. If set, these CAs are appended to the set of CAs provided by the Addressable target, if any. + type: string + audience: + description: Audience is the OIDC audience for the deadLetterSink. + type: string retry: description: Retry is the minimum number of retries the sender should attempt when sending an event before moving it to the dead letter sink. type: integer @@ -503,9 +506,6 @@ spec: description: Sink is a reference to an object that will resolve to a uri to use as the sink. type: object properties: - CACerts: - description: CACerts are Certification Authority (CA) certificates in PEM format according to https://www.rfc-editor.org/rfc/rfc7468. If set, these CAs are appended to the set of CAs provided by the Addressable target, if any. - type: string ref: description: Ref points to an Addressable. type: object @@ -534,6 +534,12 @@ spec: uri: description: URI can be an absolute URL(non-empty scheme and non-empty host) pointing to the target or a relative URI. Relative URIs will be resolved using the base URI retrieved from Ref. type: string + CACerts: + description: CACerts are Certification Authority (CA) certificates in PEM format according to https://www.rfc-editor.org/rfc/rfc7468. If set, these CAs are appended to the set of CAs provided by the Addressable target, if any. + type: string + audience: + description: Audience is the OIDC audience for the sink. + type: string topics: description: Topic topics to consume messages from type: array @@ -624,6 +630,16 @@ spec: sinkUri: description: SinkURI is the current active sink URI that has been configured for the Source. type: string + sinkAudience: + description: SinkAudience is the OIDC audience of the sink. + type: string + auth: + description: Auth provides the relevant information for OIDC authentication. + type: object + properties: + serviceAccountName: + description: ServiceAccountName is the name of the generated service account used for this components OIDC authentication. + type: string subresources: status: {} scale: @@ -872,6 +888,9 @@ spec: type: string CACerts: type: string + audience: + description: Audience is the OIDC audience for the deadLetterSink. + type: string retry: description: Retry is the minimum number of retries the sender should attempt when sending an event before moving it to the dead letter sink. type: integer @@ -918,6 +937,9 @@ spec: type: string CACerts: type: string + audience: + description: Audience is the OIDC audience for the deadLetterSink. + type: string retry: description: Retry is the minimum number of retries the sender should attempt when sending an event before moving it to the dead letter sink. type: integer @@ -933,15 +955,28 @@ spec: replyCACerts: description: replyCACerts is the CA certs to trust for the reply. type: string + replyAudience: + description: ReplyAudience is the OIDC audience for the replyUri. + type: string subscriberUri: description: SubscriberURI is the endpoint for the subscriber type: string subscriberCACerts: description: SubscriberCACerts is the CA certs to trust for the subscriber. type: string + subscriberAudience: + description: SubscriberAudience is the OIDC audience for the subscriberUri. + type: string uid: description: UID is used to understand the origin of the subscriber. type: string + auth: + description: Auth provides the relevant information for OIDC authentication. + type: object + properties: + serviceAccountName: + description: ServiceAccountName is the name of the generated service account used for this components OIDC authentication. + type: string status: description: Status represents the current state of the KafkaChannel. This data may be out of date. type: object @@ -1025,6 +1060,9 @@ spec: type: string deadLetterSinkCACerts: type: string + deadLetterSinkAudience: + description: OIDC audience of the dead letter sink. + type: string observedGeneration: description: ObservedGeneration is the 'Generation' of the Service that was last processed by the controller. type: integer @@ -1048,6 +1086,13 @@ spec: uid: description: UID is used to understand the origin of the subscriber. type: string + auth: + description: Auth provides the relevant information for OIDC authentication. + type: object + properties: + serviceAccountName: + description: ServiceAccountName is the name of the generated service account used for this components OIDC authentication. + type: string additionalPrinterColumns: - name: Ready type: string @@ -1696,6 +1741,13 @@ rules: - update - create - delete + # To grant NamespacedBroker permissions to create OIDC tokens + - apiGroups: + - "" + resources: + - serviceaccounts/token + verbs: + - create # Scheduler permissions - apiGroups: @@ -3023,6 +3075,12 @@ rules: - get - list - watch + - apiGroups: + - "" + resources: + - serviceaccounts/token + verbs: + - create --- --- @@ -3465,6 +3523,12 @@ rules: - get - list - watch + - apiGroups: + - "" + resources: + - serviceaccounts/token + verbs: + - create --- --- @@ -3770,6 +3834,9 @@ spec: - mountPath: /etc/tracing name: config-tracing readOnly: true + - mountPath: /etc/features + name: config-features + readOnly: true - mountPath: /etc/receiver-tls-secret name: broker-receiver-tls-secret readOnly: true @@ -3814,6 +3881,8 @@ spec: value: "false" - name: CONFIG_TRACING_PATH value: "/etc/tracing" + - name: CONFIG_FEATURES_PATH + value: "/etc/features" # https://github.com/fabric8io/kubernetes-client/issues/2212 - name: HTTP2_DISABLE value: "true" @@ -3869,6 +3938,9 @@ spec: - name: config-tracing configMap: name: config-tracing + - name: config-features + configMap: + name: config-features - name: broker-receiver-tls-secret secret: secretName: kafka-broker-ingress-server-tls @@ -4124,6 +4196,12 @@ rules: - get - list - watch + - apiGroups: + - "" + resources: + - serviceaccounts/token + verbs: + - create --- --- @@ -4429,6 +4507,9 @@ spec: - mountPath: /etc/tracing name: config-tracing readOnly: true + - mountPath: /etc/features + name: config-features + readOnly: true - mountPath: /etc/receiver-tls-secret name: channel-receiver-tls-secret readOnly: true @@ -4473,6 +4554,8 @@ spec: value: "false" - name: CONFIG_TRACING_PATH value: "/etc/tracing" + - name: CONFIG_FEATURES_PATH + value: "/etc/features" # https://github.com/fabric8io/kubernetes-client/issues/2212 - name: HTTP2_DISABLE value: "true" @@ -4528,6 +4611,9 @@ spec: - name: config-tracing configMap: name: config-tracing + - name: config-features + configMap: + name: config-features - name: channel-receiver-tls-secret secret: secretName: kafka-channel-ingress-server-tls @@ -4826,6 +4912,9 @@ spec: - mountPath: /etc/tracing name: config-tracing readOnly: true + - mountPath: /etc/features + name: config-features + readOnly: true - mountPath: /etc/receiver-tls-secret name: sink-receiver-tls-secret readOnly: true @@ -4870,6 +4959,8 @@ spec: value: "false" - name: CONFIG_TRACING_PATH value: "/etc/tracing" + - name: CONFIG_FEATURES_PATH + value: "/etc/features" # https://github.com/fabric8io/kubernetes-client/issues/2212 - name: HTTP2_DISABLE value: "true" @@ -4925,6 +5016,9 @@ spec: - name: config-tracing configMap: name: config-tracing + - name: config-features + configMap: + name: config-features - name: sink-receiver-tls-secret secret: secretName: kafka-sink-ingress-server-tls diff --git a/proto/contract.proto b/proto/contract.proto index d1e7f5fe29..eab3baca13 100644 --- a/proto/contract.proto +++ b/proto/contract.proto @@ -78,6 +78,9 @@ message EgressConfig { // Dead Letter CA Cert is the CA Cert used for HTTPS communication through dead letter string deadLetterCACerts = 6; + // Dead Letter Audience is the OIDC audience of the dead letter + string deadLetterAudience = 7; + // retry is the minimum number of retries the sender should attempt when // sending an event before moving it to the dead letter sink. // @@ -117,6 +120,9 @@ message Egress { // destination CA Cert is the CA Cert used for HTTPS communication through destination string destinationCACerts = 15; + // OIDC audience of the destination + string destinationAudience = 17; + oneof replyStrategy { // Send the response to an url string replyUrl = 3; @@ -131,6 +137,9 @@ message Egress { // replyUrl CA Cert is the CA Cert used for HTTPS communication through replyUrl string replyUrlCACerts = 16; + // OIDC audience of the replyUrl + string replyUrlAudience = 18; + // A filter for performing exact match against Cloud Events attributes Filter filter = 5; @@ -164,6 +173,9 @@ message Egress { // Egress feature flags. EgressFeatureFlags featureFlags = 14; + + // Name of the service account to use for OIDC authentication. + string oidcServiceAccountName = 19; } message EgressFeatureFlags { @@ -200,6 +212,8 @@ message Ingress { // whether to autocreate event types bool enableAutoCreateEventTypes = 4; + // OIDC audience of this ingress + string audience = 5; } // Kubernetes resource reference. diff --git a/test/channel-reconciler-tests.sh b/test/channel-reconciler-tests.sh index 03a1942a78..2bd58ca226 100755 --- a/test/channel-reconciler-tests.sh +++ b/test/channel-reconciler-tests.sh @@ -3,3 +3,9 @@ source $(dirname $0)/e2e-common.sh go_test_e2e -tags=e2e,cloudevents -timeout=1h ./test/e2e_new_channel/... || fail_test "E2E (new - KafkaChannel) suite failed" + +echo "Running E2E Channel Reconciler Tests with OIDC authentication enabled" + +kubectl apply -Rf "$(dirname "$0")/config-oidc-authentication" + +go_test_e2e -timeout=1h ./test/e2e_new_channel/... -run OIDC || fail_test diff --git a/test/config-oidc-authentication/features.yaml b/test/config-oidc-authentication/features.yaml new file mode 100644 index 0000000000..ae227d1072 --- /dev/null +++ b/test/config-oidc-authentication/features.yaml @@ -0,0 +1,31 @@ +# Copyright 2021 The Knative Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: v1 +kind: ConfigMap +metadata: + name: config-features + namespace: knative-eventing + labels: + knative.dev/config-propagation: original + knative.dev/config-category: eventing +data: + kreference-group: "disabled" + delivery-retryafter: "disabled" + delivery-timeout: "enabled" + kreference-mapping: "disabled" + new-trigger-filters: "enabled" + transport-encryption: "strict" + eventtype-auto-create: "disabled" + authentication-oidc: "enabled" diff --git a/test/e2e_new/broker_test.go b/test/e2e_new/broker_test.go index 0f86cfd5b2..df73a76627 100644 --- a/test/e2e_new/broker_test.go +++ b/test/e2e_new/broker_test.go @@ -25,11 +25,16 @@ import ( "knative.dev/pkg/system" "knative.dev/reconciler-test/pkg/environment" + "knative.dev/reconciler-test/pkg/eventshub" "knative.dev/reconciler-test/pkg/feature" "knative.dev/reconciler-test/pkg/k8s" "knative.dev/reconciler-test/pkg/knative" "knative.dev/eventing-kafka-broker/test/rekt/features" + "knative.dev/eventing/test/rekt/features/broker" + brokereventingfeatures "knative.dev/eventing/test/rekt/features/broker" + "knative.dev/eventing/test/rekt/features/oidc" + brokerresources "knative.dev/eventing/test/rekt/resources/broker" ) const ( @@ -231,3 +236,37 @@ func TestNamespacedBrokerNamespaceDeletion(t *testing.T) { env.Test(ctx, t, features.SetupNamespacedBroker(name)) env.Test(ctx, t, features.CleanupNamespace(namespace)) } + +func TestBrokerSupportsOIDC(t *testing.T) { + t.Parallel() + + ctx, env := global.Environment( + knative.WithKnativeNamespace(system.Namespace()), + knative.WithLoggingConfig, + knative.WithTracingConfig, + k8s.WithEventListener, + environment.WithPollTimings(4*time.Second, 12*time.Minute), + environment.Managed(t), + eventshub.WithTLS(t), + ) + + name := feature.MakeRandomK8sName("broker") + env.Prerequisite(ctx, t, broker.GoesReady(name, brokerresources.WithEnvConfig()...)) + + env.TestSet(ctx, t, oidc.AddressableOIDCConformance(brokerresources.GVR(), "Broker", name, env.Namespace())) +} + +func TestBrokerSendsEventsWithOIDCSupport(t *testing.T) { + t.Parallel() + + ctx, env := global.Environment( + knative.WithKnativeNamespace(system.Namespace()), + knative.WithLoggingConfig, + knative.WithTracingConfig, + k8s.WithEventListener, + environment.Managed(t), + eventshub.WithTLS(t), + ) + + env.TestSet(ctx, t, brokereventingfeatures.BrokerSendEventWithOIDC()) +} diff --git a/test/e2e_new/templates/kafka-broker/broker.yaml b/test/e2e_new/templates/kafka-broker/broker.yaml index 5c462829ab..37150cc4ec 100644 --- a/test/e2e_new/templates/kafka-broker/broker.yaml +++ b/test/e2e_new/templates/kafka-broker/broker.yaml @@ -64,6 +64,13 @@ spec: {{ if .delivery.deadLetterSink.uri }} uri: {{ .delivery.deadLetterSink.uri }} {{ end }} + {{ if .delivery.deadLetterSink.CACerts }} + CACerts: |- + {{ .delivery.deadLetterSink.CACerts }} + {{ end }} + {{ if .delivery.deadLetterSink.audience }} + audience: {{ .delivery.deadLetterSink.audience }} + {{ end }} {{ end }} {{ if .delivery.retry }} retry: {{ .delivery.retry}} diff --git a/test/e2e_new_channel/kafka_channel_test.go b/test/e2e_new_channel/kafka_channel_test.go index a3f01e16c4..b90cbe22d8 100644 --- a/test/e2e_new_channel/kafka_channel_test.go +++ b/test/e2e_new_channel/kafka_channel_test.go @@ -23,14 +23,19 @@ import ( "testing" "time" + "knative.dev/eventing/test/rekt/features/channel" + "knative.dev/eventing/test/rekt/features/oidc" + "knative.dev/pkg/system" "knative.dev/reconciler-test/pkg/environment" + "knative.dev/reconciler-test/pkg/eventshub" "knative.dev/reconciler-test/pkg/feature" "knative.dev/reconciler-test/pkg/k8s" "knative.dev/reconciler-test/pkg/knative" "knative.dev/reconciler-test/pkg/state" "knative.dev/eventing-kafka-broker/test/rekt/features/kafkachannel" + kafkachannelresource "knative.dev/eventing-kafka-broker/test/rekt/resources/kafkachannel" ) const ( @@ -69,3 +74,38 @@ func TestKafkaChannelReadiness(t *testing.T) { env.Test(ctx, t, f) } } + +func TestKafkaChannelDispatcherAuthenticatesWithOIDC(t *testing.T) { + t.Parallel() + + ctx, env := global.Environment( + knative.WithKnativeNamespace(system.Namespace()), + knative.WithLoggingConfig, + knative.WithTracingConfig, + k8s.WithEventListener, + environment.Managed(t), + eventshub.WithTLS(t), + ) + + env.Test(ctx, t, channel.DispatcherAuthenticatesRequestsWithOIDC()) +} + +func TestKafkaChannelOIDC(t *testing.T) { + // Run Test In Parallel With Others + t.Parallel() + + ctx, env := global.Environment( + knative.WithKnativeNamespace(system.Namespace()), + knative.WithLoggingConfig, + knative.WithTracingConfig, + k8s.WithEventListener, + environment.WithPollTimings(2*time.Second, 12*time.Minute), + environment.Managed(t), + eventshub.WithTLS(t), + ) + + name := feature.MakeRandomK8sName("kafkaChannel") + env.Prerequisite(ctx, t, channel.ImplGoesReady(name)) + + env.TestSet(ctx, t, oidc.AddressableOIDCConformance(kafkachannelresource.GVR(), "KafkaChannel", name, env.Namespace())) +} diff --git a/test/reconciler-tests.sh b/test/reconciler-tests.sh index 57fbab16ef..c94b04ff17 100755 --- a/test/reconciler-tests.sh +++ b/test/reconciler-tests.sh @@ -54,6 +54,12 @@ kubectl apply -Rf "$(dirname "$0")/config-transport-encryption" go_test_e2e -timeout=1h ./test/e2e_new -run TLS || fail_test +echo "Running E2E Reconciler Tests with OIDC authentication enabled" + +kubectl apply -Rf "$(dirname "$0")/config-oidc-authentication" + +go_test_e2e -timeout=1h ./test/e2e_new -run OIDC || fail_test + if ! ${LOCAL_DEVELOPMENT}; then go_test_e2e -tags=sacura -timeout=40m ./test/e2e/... || fail_test "E2E (sacura) suite failed" fi diff --git a/vendor/knative.dev/eventing/test/e2e-rekt-tests.sh b/vendor/knative.dev/eventing/test/e2e-rekt-tests.sh index bb0b386c32..4f8bcb0e25 100644 --- a/vendor/knative.dev/eventing/test/e2e-rekt-tests.sh +++ b/vendor/knative.dev/eventing/test/e2e-rekt-tests.sh @@ -44,4 +44,10 @@ kubectl apply -Rf "$(dirname "$0")/config-transport-encryption" go_test_e2e -timeout=1h ./test/rekt -run TLS || fail_test +echo "Running E2E OIDC Reconciler Tests" + +kubectl apply -Rf "$(dirname "$0")/config-authentication-oidc" + +go_test_e2e -timeout=1h ./test/rekt -run OIDC || fail_test + success diff --git a/vendor/knative.dev/eventing/test/rekt/features/broker/oidc_feature.go b/vendor/knative.dev/eventing/test/rekt/features/broker/oidc_feature.go new file mode 100644 index 0000000000..4642995dde --- /dev/null +++ b/vendor/knative.dev/eventing/test/rekt/features/broker/oidc_feature.go @@ -0,0 +1,223 @@ +/* +Copyright 2024 The Knative Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package broker + +import ( + "context" + + "knative.dev/pkg/apis" + + "github.com/cloudevents/sdk-go/v2/test" + "github.com/google/uuid" + "knative.dev/eventing/test/rekt/features/featureflags" + "knative.dev/eventing/test/rekt/resources/broker" + "knative.dev/eventing/test/rekt/resources/delivery" + "knative.dev/eventing/test/rekt/resources/trigger" + duckv1 "knative.dev/pkg/apis/duck/v1" + "knative.dev/reconciler-test/pkg/eventshub" + eventassert "knative.dev/reconciler-test/pkg/eventshub/assert" + "knative.dev/reconciler-test/pkg/feature" + "knative.dev/reconciler-test/pkg/resources/service" +) + +func BrokerSendEventWithOIDC() *feature.FeatureSet { + return &feature.FeatureSet{ + Name: "Broker send events with OIDC support", + Features: []*feature.Feature{ + BrokerSendEventWithOIDCTokenToSubscriber(), + BrokerSendEventWithOIDCTokenToReply(), + BrokerSendEventWithOIDCTokenToDLS(), + }, + } +} + +func BrokerSendEventWithOIDCTokenToSubscriber() *feature.Feature { + f := feature.NewFeatureNamed("Broker supports flow with OIDC tokens") + + f.Prerequisite("OIDC Authentication is enabled", featureflags.AuthenticationOIDCEnabled()) + f.Prerequisite("transport encryption is strict", featureflags.TransportEncryptionStrict()) + f.Prerequisite("should not run when Istio is enabled", featureflags.IstioDisabled()) + + source := feature.MakeRandomK8sName("source") + brokerName := feature.MakeRandomK8sName("broker") + sink := feature.MakeRandomK8sName("sink") + triggerName := feature.MakeRandomK8sName("triggerName") + sinkAudience := "sink-audience" + + event := test.FullEvent() + + // Install the broker + f.Setup("install broker", broker.Install(brokerName, broker.WithEnvConfig()...)) + f.Setup("broker is ready", broker.IsReady(brokerName)) + f.Setup("broker is addressable", broker.IsAddressable(brokerName)) + + // Install the sink + f.Setup("install sink", eventshub.Install( + sink, + eventshub.StartReceiverTLS, + eventshub.OIDCReceiverAudience(sinkAudience), + )) + + f.Setup("Install the trigger", func(ctx context.Context, t feature.T) { + d := service.AsDestinationRef(sink) + d.CACerts = eventshub.GetCaCerts(ctx) + d.Audience = &sinkAudience + trigger.Install(triggerName, brokerName, trigger.WithSubscriberFromDestination(d))(ctx, t) + }) + f.Setup("trigger goes ready", trigger.IsReady(triggerName)) + + // Send event + f.Requirement("install source", eventshub.Install( + source, + eventshub.StartSenderToResourceTLS(broker.GVR(), brokerName, nil), + eventshub.InputEvent(event), + )) + + f.Alpha("Broker"). + Must("handles event with valid OIDC token", eventassert.OnStore(sink).MatchReceivedEvent(test.HasId(event.ID())).Exact(1)) + + return f +} + +func BrokerSendEventWithOIDCTokenToDLS() *feature.Feature { + f := feature.NewFeature() + + f.Prerequisite("OIDC Authentication is enabled", featureflags.AuthenticationOIDCEnabled()) + f.Prerequisite("transport encryption is strict", featureflags.TransportEncryptionStrict()) + f.Prerequisite("should not run when Istio is enabled", featureflags.IstioDisabled()) + + brokerName := feature.MakeRandomK8sName("broker") + dls := feature.MakeRandomK8sName("dls") + triggerName := feature.MakeRandomK8sName("trigger") + source := feature.MakeRandomK8sName("source") + dlsAudience := "dls-audience" + + event := test.FullEvent() + event.SetID(uuid.New().String()) + + // Install DLS sink + f.Setup("install dead letter sink", eventshub.Install(dls, + eventshub.OIDCReceiverAudience(dlsAudience), + eventshub.StartReceiverTLS)) + + f.Setup("install broker", func(ctx context.Context, t feature.T) { + brokerConfig := append(broker.WithEnvConfig(), + delivery.WithDeadLetterSinkFromDestination(&duckv1.Destination{ + Ref: service.AsKReference(dls), + Audience: &dlsAudience, + CACerts: eventshub.GetCaCerts(ctx), + })) + broker.Install(brokerName, brokerConfig...)(ctx, t) + }) + + f.Setup("Broker is ready", broker.IsReady(brokerName)) + + f.Setup("Install the trigger", func(ctx context.Context, t feature.T) { + // create an empty destination ref + d := duckv1.Destination{} + d.CACerts = eventshub.GetCaCerts(ctx) + d.URI, _ = apis.ParseURL("bad://uri") + trigger.Install(triggerName, brokerName, trigger.WithSubscriberFromDestination(&d))(ctx, t) + + }) + + f.Setup("trigger is ready", trigger.IsReady(triggerName)) + + // Send events after data plane is ready. + f.Requirement("install source", eventshub.Install(source, + eventshub.StartSenderToResourceTLS(broker.GVR(), brokerName, nil), + eventshub.InputEvent(event), + )) + + // Assert events ended up where we expected. + f.Stable("broker with DLS"). + Must("deliver event to DLQ", eventassert.OnStore(dls).MatchReceivedEvent(test.HasId(event.ID())).AtLeast(1)) + + return f +} + +func BrokerSendEventWithOIDCTokenToReply() *feature.Feature { + //1. An event is sent to a broker. + //2. A trigger routes this event to a subscriber. + //3. The subscriber processes and replies to the event. + //4. A helper trigger routes the reply to a designated sink. + //5. The test verifies that the reply reaches the sink with the expected modifications. + f := feature.NewFeature() + + f.Prerequisite("OIDC Authentication is enabled", featureflags.AuthenticationOIDCEnabled()) + f.Prerequisite("transport encryption is strict", featureflags.TransportEncryptionStrict()) + f.Prerequisite("should not run when Istio is enabled", featureflags.IstioDisabled()) + + brokerName := feature.MakeRandomK8sName("broker") + subscriber := feature.MakeRandomK8sName("subscriber") + reply := feature.MakeRandomK8sName("reply") + triggerName := feature.MakeRandomK8sName("trigger") + helperTriggerName := feature.MakeRandomK8sName("helper-trigger") + source := feature.MakeRandomK8sName("source") + + event := test.FullEvent() + event.SetID(uuid.New().String()) + + replyEventType := "reply-type" + replyEventSource := "reply-source" + + // Install subscriber + f.Setup("install subscriber", eventshub.Install(subscriber, + eventshub.ReplyWithTransformedEvent(replyEventType, replyEventSource, ""), + eventshub.StartReceiverTLS)) + + // Install sink for reply + // Hint: we don't need to require OIDC auth at the reply sink, because the + // actual reply is sent to the broker ingress, which must support OIDC. This + // reply sink is only to check that the reply was sent and routed correctly. + f.Setup("install sink for reply", eventshub.Install(reply, + eventshub.StartReceiverTLS)) + + // Install broker + f.Setup("install broker", broker.Install(brokerName, broker.WithEnvConfig()...)) + f.Setup("Broker is ready", broker.IsReady(brokerName)) + + f.Setup("install the trigger", func(ctx context.Context, t feature.T) { + d := service.AsDestinationRef(subscriber) + d.CACerts = eventshub.GetCaCerts(ctx) + trigger.Install(triggerName, brokerName, trigger.WithSubscriberFromDestination(d), trigger.WithFilter(map[string]string{ + "type": event.Type(), + }))(ctx, t) + }) + + f.Setup("trigger is ready", trigger.IsReady(triggerName)) + + f.Setup("install the trigger and specify the CA cert of the destination", func(ctx context.Context, t feature.T) { + d := service.AsDestinationRef(reply) + d.CACerts = eventshub.GetCaCerts(ctx) + trigger.Install(helperTriggerName, brokerName, trigger.WithSubscriberFromDestination(d), trigger.WithFilter(map[string]string{ + "type": replyEventType, + }))(ctx, t) + }) + + // Send events after data plane is ready. + f.Requirement("install source", eventshub.Install(source, + eventshub.StartSenderToResourceTLS(broker.GVR(), brokerName, nil), + eventshub.InputEvent(event), + )) + + // Assert events ended up where we expected. + f.Stable("broker with reply"). + Must("deliver event to reply sink", eventassert.OnStore(reply).MatchReceivedEvent(test.HasSource(replyEventSource)).AtLeast(1)) + + return f +} diff --git a/vendor/knative.dev/eventing/test/rekt/features/channel/oidc_feature.go b/vendor/knative.dev/eventing/test/rekt/features/channel/oidc_feature.go new file mode 100644 index 0000000000..b4e4f3abb0 --- /dev/null +++ b/vendor/knative.dev/eventing/test/rekt/features/channel/oidc_feature.go @@ -0,0 +1,66 @@ +/* +Copyright 2024 The Knative Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package channel + +import ( + "context" + + "github.com/cloudevents/sdk-go/v2/test" + "knative.dev/eventing/test/rekt/features/featureflags" + "knative.dev/eventing/test/rekt/resources/channel_impl" + "knative.dev/eventing/test/rekt/resources/subscription" + "knative.dev/reconciler-test/pkg/eventshub" + "knative.dev/reconciler-test/pkg/eventshub/assert" + "knative.dev/reconciler-test/pkg/feature" + "knative.dev/reconciler-test/pkg/resources/service" +) + +func DispatcherAuthenticatesRequestsWithOIDC() *feature.Feature { + f := feature.NewFeatureNamed("Channel dispatcher authenticates requests with OIDC") + + f.Prerequisite("OIDC authentication is enabled", featureflags.AuthenticationOIDCEnabled()) + f.Prerequisite("transport encryption is strict", featureflags.TransportEncryptionStrict()) + f.Prerequisite("should not run when Istio is enabled", featureflags.IstioDisabled()) + + source := feature.MakeRandomK8sName("source") + channelName := feature.MakeRandomK8sName("channel") + sink := feature.MakeRandomK8sName("sink") + subscriptionName := feature.MakeRandomK8sName("subscription") + receiverAudience := feature.MakeRandomK8sName("receiver") + + f.Setup("install channel", channel_impl.Install(channelName)) + f.Setup("channel is ready", channel_impl.IsReady(channelName)) + f.Setup("install sink", eventshub.Install(sink, eventshub.OIDCReceiverAudience(receiverAudience), eventshub.StartReceiverTLS)) + + f.Setup("install subscription", func(ctx context.Context, t feature.T) { + d := service.AsDestinationRef(sink) + d.CACerts = eventshub.GetCaCerts(ctx) + d.Audience = &receiverAudience + subscription.Install(subscriptionName, + subscription.WithChannel(channel_impl.AsRef(channelName)), + subscription.WithSubscriberFromDestination(d))(ctx, t) + }) + + f.Setup("subscription is ready", subscription.IsReady(subscriptionName)) + + event := test.FullEvent() + f.Requirement("install source", eventshub.Install(source, eventshub.InputEvent(event), eventshub.StartSenderToResourceTLS(channel_impl.GVR(), channelName, nil))) + + f.Alpha("channel dispatcher").Must("authenticate requests with OIDC", assert.OnStore(sink).MatchReceivedEvent(test.HasId(event.ID())).AtLeast(1)) + + return f +} diff --git a/vendor/knative.dev/eventing/test/rekt/features/oidc/addressable_oidc_conformance.go b/vendor/knative.dev/eventing/test/rekt/features/oidc/addressable_oidc_conformance.go new file mode 100644 index 0000000000..bee8f85adb --- /dev/null +++ b/vendor/knative.dev/eventing/test/rekt/features/oidc/addressable_oidc_conformance.go @@ -0,0 +1,189 @@ +/* +Copyright 2024 The Knative Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package oidc + +import ( + "fmt" + + "knative.dev/eventing/test/rekt/features/featureflags" + + "github.com/cloudevents/sdk-go/v2/test" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/runtime/schema" + "knative.dev/eventing/pkg/auth" + "knative.dev/eventing/test/rekt/resources/addressable" + "knative.dev/reconciler-test/pkg/eventshub" + eventassert "knative.dev/reconciler-test/pkg/eventshub/assert" + "knative.dev/reconciler-test/pkg/feature" + "knative.dev/reconciler-test/pkg/k8s" +) + +func AddressableOIDCConformance(gvr schema.GroupVersionResource, kind, name, namespace string) *feature.FeatureSet { + fs := feature.FeatureSet{ + Name: fmt.Sprintf("%s handles requests with OIDC tokens correctly", kind), + Features: AddressableOIDCTokenConformance(gvr, kind, name).Features, + } + + fs.Features = append(fs.Features, + AddressableHasAudiencePopulated(gvr, kind, name, namespace), + ) + + return &fs +} + +func AddressableOIDCTokenConformance(gvr schema.GroupVersionResource, kind, name string) *feature.FeatureSet { + fs := feature.FeatureSet{ + Name: fmt.Sprintf("%s handles requests with OIDC tokens correctly", kind), + Features: []*feature.Feature{ + addressableRejectInvalidAudience(gvr, kind, name), + addressableRejectCorruptedSignature(gvr, kind, name), + addressableRejectExpiredToken(gvr, kind, name), + addressableAllowsValidRequest(gvr, kind, name), + }, + } + + return &fs +} + +func AddressableHasAudiencePopulated(gvr schema.GroupVersionResource, kind, name, namespace string) *feature.Feature { + f := feature.NewFeatureNamed(fmt.Sprintf("%s populates its .status.address.audience correctly", kind)) + + f.Prerequisite("OIDC authentication is enabled", featureflags.AuthenticationOIDCEnabled()) + + f.Requirement(fmt.Sprintf("%s is ready", kind), k8s.IsReady(gvr, name)) + f.Requirement(fmt.Sprintf("%s is addressable", kind), k8s.IsAddressable(gvr, name)) + + expectedAudience := auth.GetAudience(gvr.GroupVersion().WithKind(kind), metav1.ObjectMeta{ + Name: name, + Namespace: namespace, + }) + + f.Alpha(kind).Must("have audience set", addressable.ValidateAddress(gvr, name, addressable.AssertAddressWithAudience(expectedAudience))) + + return f +} + +func addressableRejectInvalidAudience(gvr schema.GroupVersionResource, kind, name string) *feature.Feature { + f := feature.NewFeatureNamed(fmt.Sprintf("%s reject event for wrong OIDC audience", kind)) + + f.Prerequisite("OIDC authentication is enabled", featureflags.AuthenticationOIDCEnabled()) + f.Prerequisite("transport encryption is strict", featureflags.TransportEncryptionStrict()) + f.Prerequisite("should not run when Istio is enabled", featureflags.IstioDisabled()) + + source := feature.MakeRandomK8sName("source") + + event := test.FullEvent() + + f.Requirement(fmt.Sprintf("%s is ready", kind), k8s.IsReady(gvr, name)) + f.Requirement(fmt.Sprintf("%s is addressable", kind), k8s.IsAddressable(gvr, name)) + + f.Requirement("install source", eventshub.Install( + source, + eventshub.StartSenderToResourceTLS(gvr, name, nil), + eventshub.OIDCInvalidAudience(), + eventshub.InputEvent(event), + )) + + f.Alpha(kind). + Must("event sent", eventassert.OnStore(source).MatchSentEvent(test.HasId(event.ID())).Exact(1)). + Must("get 401 on response", eventassert.OnStore(source).Match(eventassert.MatchStatusCode(401)).Exact(1)) + + return f +} + +func addressableRejectExpiredToken(gvr schema.GroupVersionResource, kind, name string) *feature.Feature { + f := feature.NewFeatureNamed(fmt.Sprintf("%s reject event with expired OIDC token", kind)) + + f.Prerequisite("OIDC authentication is enabled", featureflags.AuthenticationOIDCEnabled()) + f.Prerequisite("transport encryption is strict", featureflags.TransportEncryptionStrict()) + f.Prerequisite("should not run when Istio is enabled", featureflags.IstioDisabled()) + + source := feature.MakeRandomK8sName("source") + + event := test.FullEvent() + + f.Requirement(fmt.Sprintf("%s is ready", kind), k8s.IsReady(gvr, name)) + f.Requirement(fmt.Sprintf("%s is addressable", kind), k8s.IsAddressable(gvr, name)) + + f.Requirement("install source", eventshub.Install( + source, + eventshub.StartSenderToResourceTLS(gvr, name, nil), + eventshub.OIDCExpiredToken(), + eventshub.InputEvent(event), + )) + + f.Alpha(kind). + Must("event sent", eventassert.OnStore(source).MatchSentEvent(test.HasId(event.ID())).Exact(1)). + Must("get 401 on response", eventassert.OnStore(source).Match(eventassert.MatchStatusCode(401)).Exact(1)) + + return f +} + +func addressableRejectCorruptedSignature(gvr schema.GroupVersionResource, kind, name string) *feature.Feature { + f := feature.NewFeatureNamed(fmt.Sprintf("%s reject event with corrupted OIDC token signature", kind)) + + f.Prerequisite("OIDC authentication is enabled", featureflags.AuthenticationOIDCEnabled()) + f.Prerequisite("transport encryption is strict", featureflags.TransportEncryptionStrict()) + f.Prerequisite("should not run when Istio is enabled", featureflags.IstioDisabled()) + + source := feature.MakeRandomK8sName("source") + + event := test.FullEvent() + + f.Requirement(fmt.Sprintf("%s is ready", kind), k8s.IsReady(gvr, name)) + f.Requirement(fmt.Sprintf("%s is addressable", kind), k8s.IsAddressable(gvr, name)) + + f.Requirement("install source", eventshub.Install( + source, + eventshub.StartSenderToResourceTLS(gvr, name, nil), + eventshub.OIDCCorruptedSignature(), + eventshub.InputEvent(event), + )) + + f.Alpha(kind). + Must("event sent", eventassert.OnStore(source).MatchSentEvent(test.HasId(event.ID())).Exact(1)). + Must("get 401 on response", eventassert.OnStore(source).Match(eventassert.MatchStatusCode(401)).Exact(1)) + + return f +} + +func addressableAllowsValidRequest(gvr schema.GroupVersionResource, kind, name string) *feature.Feature { + f := feature.NewFeatureNamed(fmt.Sprintf("%s handles event with valid OIDC token", kind)) + + f.Prerequisite("OIDC authentication is enabled", featureflags.AuthenticationOIDCEnabled()) + f.Prerequisite("transport encryption is strict", featureflags.TransportEncryptionStrict()) + f.Prerequisite("should not run when Istio is enabled", featureflags.IstioDisabled()) + + source := feature.MakeRandomK8sName("source") + + event := test.FullEvent() + + f.Requirement(fmt.Sprintf("%s is ready", kind), k8s.IsReady(gvr, name)) + f.Requirement(fmt.Sprintf("%s is addressable", kind), k8s.IsAddressable(gvr, name)) + + f.Requirement("install source", eventshub.Install( + source, + eventshub.StartSenderToResourceTLS(gvr, name, nil), + eventshub.InputEvent(event), + )) + + f.Alpha(kind). + Must("event sent", eventassert.OnStore(source).MatchSentEvent(test.HasId(event.ID())).Exact(1)). + Must("get 202 on response", eventassert.OnStore(source).Match(eventassert.MatchStatusCode(202)).Exact(1)) + + return f +} diff --git a/vendor/knative.dev/pkg/client/injection/kube/informers/core/v1/serviceaccount/fake/fake.go b/vendor/knative.dev/pkg/client/injection/kube/informers/core/v1/serviceaccount/fake/fake.go new file mode 100644 index 0000000000..81c0c20000 --- /dev/null +++ b/vendor/knative.dev/pkg/client/injection/kube/informers/core/v1/serviceaccount/fake/fake.go @@ -0,0 +1,40 @@ +/* +Copyright 2022 The Knative Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package fake + +import ( + context "context" + + serviceaccount "knative.dev/pkg/client/injection/kube/informers/core/v1/serviceaccount" + fake "knative.dev/pkg/client/injection/kube/informers/factory/fake" + controller "knative.dev/pkg/controller" + injection "knative.dev/pkg/injection" +) + +var Get = serviceaccount.Get + +func init() { + injection.Fake.RegisterInformer(withInformer) +} + +func withInformer(ctx context.Context) (context.Context, controller.Informer) { + f := fake.Get(ctx) + inf := f.Core().V1().ServiceAccounts() + return context.WithValue(ctx, serviceaccount.Key{}, inf), inf.Informer() +} diff --git a/vendor/knative.dev/reconciler-test/pkg/eventshub/assert/step.go b/vendor/knative.dev/reconciler-test/pkg/eventshub/assert/step.go index 2bbff311c5..21464fc8a5 100644 --- a/vendor/knative.dev/reconciler-test/pkg/eventshub/assert/step.go +++ b/vendor/knative.dev/reconciler-test/pkg/eventshub/assert/step.go @@ -1,8 +1,10 @@ package assert import ( + "bytes" "context" "encoding/json" + "encoding/pem" "fmt" cetest "github.com/cloudevents/sdk-go/v2/test" @@ -144,13 +146,40 @@ func MatchPeerCertificatesFromSecret(namespace, name string, key string) eventsh return fmt.Errorf("failed to match peer certificates, connection is not TLS") } - for _, cert := range info.Connection.TLS.PemPeerCertificates { - if cert == string(value) { - return nil + // secret value can, in general, be a certificate chain (a sequence of PEM-encoded certificate blocks) + valueBlock, valueRest := pem.Decode(value) + if valueBlock == nil { + // error if there's not even a single certificate in the value + return fmt.Errorf("failed to decode secret certificate:\n%s", string(value)) + } + // for each certificate in the chain, check if it's present in info.Connection.TLS.PemPeerCertificates + for valueBlock != nil { + found := false + for _, cert := range info.Connection.TLS.PemPeerCertificates { + certBlock, _ := pem.Decode([]byte(cert)) + if certBlock == nil { + return fmt.Errorf("failed to decode peer certificate:\n%s", cert) + } + + if certBlock.Type == valueBlock.Type && string(certBlock.Bytes) == string(valueBlock.Bytes) { + found = true + break + } + } + + if !found { + pemBytes, _ := json.MarshalIndent(info.Connection.TLS.PemPeerCertificates, "", " ") + return fmt.Errorf("failed to find peer certificate with value\n%s\nin:\n%s", string(value), string(pemBytes)) } + + valueBlock, valueRest = pem.Decode(valueRest) + } + + // any non-whitespace suffix not parsed as a PEM is suspicious, so we treat it as an error: + if "" != string(bytes.TrimSpace(valueRest)) { + return fmt.Errorf("failed to decode secret certificate starting with\n%s\nin:\n%s", string(valueRest), string(value)) } - bytes, _ := json.MarshalIndent(info.Connection.TLS.PemPeerCertificates, "", " ") - return fmt.Errorf("failed to find peer certificate with value\n%s\nin:\n%s", string(value), string(bytes)) + return nil } } diff --git a/vendor/modules.txt b/vendor/modules.txt index 1d5cca132e..ab208f9481 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1326,7 +1326,7 @@ k8s.io/utils/net k8s.io/utils/pointer k8s.io/utils/strings/slices k8s.io/utils/trace -# knative.dev/eventing v0.40.0 +# knative.dev/eventing v0.40.2-0.20240220123738-40637813ad8d ## explicit; go 1.21 knative.dev/eventing/cmd/event_display knative.dev/eventing/cmd/heartbeats @@ -1462,6 +1462,7 @@ knative.dev/eventing/test/rekt/features/channel knative.dev/eventing/test/rekt/features/featureflags knative.dev/eventing/test/rekt/features/knconf knative.dev/eventing/test/rekt/features/new_trigger_filters +knative.dev/eventing/test/rekt/features/oidc knative.dev/eventing/test/rekt/features/source knative.dev/eventing/test/rekt/features/trigger knative.dev/eventing/test/rekt/resources/account_role @@ -1533,6 +1534,7 @@ knative.dev/pkg/client/injection/kube/informers/core/v1/secret/fake knative.dev/pkg/client/injection/kube/informers/core/v1/service knative.dev/pkg/client/injection/kube/informers/core/v1/service/fake knative.dev/pkg/client/injection/kube/informers/core/v1/serviceaccount +knative.dev/pkg/client/injection/kube/informers/core/v1/serviceaccount/fake knative.dev/pkg/client/injection/kube/informers/factory knative.dev/pkg/client/injection/kube/informers/factory/fake knative.dev/pkg/client/injection/kube/informers/factory/filtered @@ -1615,7 +1617,7 @@ knative.dev/pkg/webhook/json knative.dev/pkg/webhook/resourcesemantics knative.dev/pkg/webhook/resourcesemantics/defaulting knative.dev/pkg/webhook/resourcesemantics/validation -# knative.dev/reconciler-test v0.0.0-20240116084801-50276dfba7b3 +# knative.dev/reconciler-test v0.0.0-20240206112124-e5d2639d7c5c ## explicit; go 1.20 knative.dev/reconciler-test/cmd/eventshub knative.dev/reconciler-test/pkg/environment