diff --git a/control-plane/config/200-controller-cluster-role.yaml b/control-plane/config/200-controller-cluster-role.yaml index 039f752375..d94025aa03 100644 --- a/control-plane/config/200-controller-cluster-role.yaml +++ b/control-plane/config/200-controller-cluster-role.yaml @@ -60,6 +60,15 @@ rules: - patch - watch + - apiGroups: + - "*" + resources: + - secrets + verbs: + - list + - get + - watch + # Eventing resources and statuses we care about - apiGroups: - "eventing.knative.dev" diff --git a/control-plane/config/sink/100-kafka-sink.yaml b/control-plane/config/sink/100-kafka-sink.yaml index 1a034eb383..b41bd009ed 100644 --- a/control-plane/config/sink/100-kafka-sink.yaml +++ b/control-plane/config/sink/100-kafka-sink.yaml @@ -82,6 +82,25 @@ spec: - structured - binary default: structured + auth: + description: 'Auth configurations' + type: object + properties: + secret: + description: 'Auth secret' + type: object + properties: + ref: + # TODO add format in description (?) + description: | + Secret reference. + type: object + required: + - name + properties: + name: + description: 'Secret name' + type: string status: description: 'Status represents the current state of the Broker. This data may be out of date.' type: object diff --git a/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_defaults.go b/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_defaults.go index 0887735b59..0e9edd5a59 100644 --- a/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_defaults.go +++ b/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_defaults.go @@ -27,7 +27,7 @@ func (ks *KafkaSink) SetDefaults(ctx context.Context) { func (kss *KafkaSinkSpec) SetDefaults(ctx context.Context) { defaultMode := ModeStructured - if kss.ContentMode == nil { + if kss.ContentMode == nil || *kss.ContentMode == "" { kss.ContentMode = &defaultMode } } diff --git a/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_types.go b/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_types.go index da27e3c829..1814e2e2f5 100644 --- a/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_types.go +++ b/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_types.go @@ -23,6 +23,7 @@ import ( "k8s.io/apimachinery/pkg/util/sets" "knative.dev/pkg/apis" duckv1 "knative.dev/pkg/apis/duck/v1" + "knative.dev/pkg/kmeta" ) const ( @@ -60,6 +61,7 @@ var _ apis.Defaultable = (*KafkaSink)(nil) var _ runtime.Object = (*KafkaSink)(nil) var _ duckv1.KRShaped = (*KafkaSink)(nil) var _ apis.Convertible = (*KafkaSink)(nil) +var _ kmeta.OwnerRefable = (*KafkaSink)(nil) // KafkaSinkSpec defines the desired state of the Kafka Sink. type KafkaSinkSpec struct { @@ -91,6 +93,24 @@ type KafkaSinkSpec struct { // // +optional ContentMode *string `json:"contentMode,omitempty"` + + // Auth configurations. + Auth *Auth `json:"auth,omitempty"` +} + +type Auth struct { + // Auth Secret + Secret *Secret `json:"secret,omitempty"` +} + +type Secret struct { + // Secret reference for SASL and SSL configurations. + Ref *SecretReference `json:"ref,omitempty"` +} + +type SecretReference struct { + // Secret name. + Name string `json:"name"` } // KafkaSinkStatus represents the current state of the KafkaSink. @@ -129,3 +149,7 @@ func (ks *KafkaSink) GetUntypedSpec() interface{} { func (ks *KafkaSink) GetStatus() *duckv1.Status { return &ks.Status.Status } + +func (kss KafkaSinkSpec) HasAuthConfig() bool { + return kss.Auth != nil && kss.Auth.Secret != nil && kss.Auth.Secret.Ref != nil +} diff --git a/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_validation.go b/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_validation.go index 0ce8830b47..854e505fb1 100644 --- a/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_validation.go +++ b/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_validation.go @@ -62,6 +62,10 @@ func (kss *KafkaSinkSpec) Validate(ctx context.Context) *apis.FieldError { errs = errs.Also(apis.ErrInvalidValue(*kss.NumPartitions, "numPartitions")) } + if kss.HasAuthConfig() && kss.Auth.Secret.Ref.Name == "" { + errs = errs.Also(apis.ErrInvalidValue("", "auth.secret.ref.name")) + } + return errs } diff --git a/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_validation_test.go b/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_validation_test.go index 66254e67e8..377b8177b3 100644 --- a/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_validation_test.go +++ b/control-plane/pkg/apis/eventing/v1alpha1/kafka_sink_validation_test.go @@ -127,6 +127,19 @@ func TestKafkaSink_Validate(t *testing.T) { ctx: context.Background(), want: apis.ErrInvalidValue("-10", "spec.replicationFactor"), }, + { + name: "invalid secret name", + ks: &KafkaSink{ + Spec: KafkaSinkSpec{ + Topic: "topic-name-1", + BootstrapServers: []string{"broker-1:9092"}, + ContentMode: pointer.StringPtr(ModeStructured), + Auth: &Auth{Secret: &Secret{Ref: &SecretReference{}}}, + }, + }, + ctx: context.Background(), + want: apis.ErrInvalidValue("", "spec.auth.secret.ref.name"), + }, { name: "immutable replication factor", ks: &KafkaSink{ diff --git a/control-plane/pkg/apis/eventing/v1alpha1/zz_generated.deepcopy.go b/control-plane/pkg/apis/eventing/v1alpha1/zz_generated.deepcopy.go index 30cad9a964..1fc0ddcdd0 100644 --- a/control-plane/pkg/apis/eventing/v1alpha1/zz_generated.deepcopy.go +++ b/control-plane/pkg/apis/eventing/v1alpha1/zz_generated.deepcopy.go @@ -24,6 +24,27 @@ import ( runtime "k8s.io/apimachinery/pkg/runtime" ) +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Auth) DeepCopyInto(out *Auth) { + *out = *in + if in.Secret != nil { + in, out := &in.Secret, &out.Secret + *out = new(Secret) + (*in).DeepCopyInto(*out) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Auth. +func (in *Auth) DeepCopy() *Auth { + if in == nil { + return nil + } + out := new(Auth) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *KafkaSink) DeepCopyInto(out *KafkaSink) { *out = *in @@ -108,6 +129,11 @@ func (in *KafkaSinkSpec) DeepCopyInto(out *KafkaSinkSpec) { *out = new(string) **out = **in } + if in.Auth != nil { + in, out := &in.Auth, &out.Auth + *out = new(Auth) + (*in).DeepCopyInto(*out) + } return } @@ -138,3 +164,40 @@ func (in *KafkaSinkStatus) DeepCopy() *KafkaSinkStatus { in.DeepCopyInto(out) return out } + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Secret) DeepCopyInto(out *Secret) { + *out = *in + if in.Ref != nil { + in, out := &in.Ref, &out.Ref + *out = new(SecretReference) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Secret. +func (in *Secret) DeepCopy() *Secret { + if in == nil { + return nil + } + out := new(Secret) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SecretReference) DeepCopyInto(out *SecretReference) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretReference. +func (in *SecretReference) DeepCopy() *SecretReference { + if in == nil { + return nil + } + out := new(SecretReference) + in.DeepCopyInto(out) + return out +} diff --git a/control-plane/pkg/contract/contract.pb.go b/control-plane/pkg/contract/contract.pb.go index 822bdb0348..60cf43be6b 100644 --- a/control-plane/pkg/contract/contract.pb.go +++ b/control-plane/pkg/contract/contract.pb.go @@ -465,6 +465,82 @@ func (*Ingress_Path) isIngress_IngressType() {} func (*Ingress_Host) isIngress_IngressType() {} +// Kubernetes resource reference. +type Reference struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Object id. + Uuid string `protobuf:"bytes,1,opt,name=uuid,proto3" json:"uuid,omitempty"` + // Object namespace. + Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` + // Object name. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // Object version. + Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` +} + +func (x *Reference) Reset() { + *x = Reference{} + if protoimpl.UnsafeEnabled { + mi := &file_proto_def_contract_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Reference) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Reference) ProtoMessage() {} + +func (x *Reference) ProtoReflect() protoreflect.Message { + mi := &file_proto_def_contract_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Reference.ProtoReflect.Descriptor instead. +func (*Reference) Descriptor() ([]byte, []int) { + return file_proto_def_contract_proto_rawDescGZIP(), []int{4} +} + +func (x *Reference) GetUuid() string { + if x != nil { + return x.Uuid + } + return "" +} + +func (x *Reference) GetNamespace() string { + if x != nil { + return x.Namespace + } + return "" +} + +func (x *Reference) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *Reference) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + type Resource struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -486,12 +562,16 @@ type Resource struct { EgressConfig *EgressConfig `protobuf:"bytes,5,opt,name=egressConfig,proto3" json:"egressConfig,omitempty"` // Optional egresses for this topic Egresses []*Egress `protobuf:"bytes,6,rep,name=egresses,proto3" json:"egresses,omitempty"` + // Types that are assignable to Auth: + // *Resource_AbsentAuth + // *Resource_AuthSecret + Auth isResource_Auth `protobuf_oneof:"Auth"` } func (x *Resource) Reset() { *x = Resource{} if protoimpl.UnsafeEnabled { - mi := &file_proto_def_contract_proto_msgTypes[4] + mi := &file_proto_def_contract_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -504,7 +584,7 @@ func (x *Resource) String() string { func (*Resource) ProtoMessage() {} func (x *Resource) ProtoReflect() protoreflect.Message { - mi := &file_proto_def_contract_proto_msgTypes[4] + mi := &file_proto_def_contract_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -517,7 +597,7 @@ func (x *Resource) ProtoReflect() protoreflect.Message { // Deprecated: Use Resource.ProtoReflect.Descriptor instead. func (*Resource) Descriptor() ([]byte, []int) { - return file_proto_def_contract_proto_rawDescGZIP(), []int{4} + return file_proto_def_contract_proto_rawDescGZIP(), []int{5} } func (x *Resource) GetUid() string { @@ -562,6 +642,76 @@ func (x *Resource) GetEgresses() []*Egress { return nil } +func (m *Resource) GetAuth() isResource_Auth { + if m != nil { + return m.Auth + } + return nil +} + +func (x *Resource) GetAbsentAuth() *empty.Empty { + if x, ok := x.GetAuth().(*Resource_AbsentAuth); ok { + return x.AbsentAuth + } + return nil +} + +func (x *Resource) GetAuthSecret() *Reference { + if x, ok := x.GetAuth().(*Resource_AuthSecret); ok { + return x.AuthSecret + } + return nil +} + +type isResource_Auth interface { + isResource_Auth() +} + +type Resource_AbsentAuth struct { + // No auth configured. + AbsentAuth *empty.Empty `protobuf:"bytes,7,opt,name=absentAuth,proto3,oneof"` +} + +type Resource_AuthSecret struct { + // Secret reference. + // + // Secret format: + // + // protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + // sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + // ca.crt: + // user.crt: + // user.key: + // user: + // password: + // + // Validation: + // - protocol=PLAINTEXT + // - protocol=SSL + // - required: + // - ca.crt + // - user.crt + // - user.key + // - protocol=SASL_PLAINTEXT + // - required: + // - sasl.mechanism + // - user + // - password + // - protocol=SASL_SSL + // - required: + // - sasl.mechanism + // - ca.crt + // - user.crt + // - user.key + // - user + // - password + AuthSecret *Reference `protobuf:"bytes,8,opt,name=authSecret,proto3,oneof"` +} + +func (*Resource_AbsentAuth) isResource_Auth() {} + +func (*Resource_AuthSecret) isResource_Auth() {} + type Contract struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -576,7 +726,7 @@ type Contract struct { func (x *Contract) Reset() { *x = Contract{} if protoimpl.UnsafeEnabled { - mi := &file_proto_def_contract_proto_msgTypes[5] + mi := &file_proto_def_contract_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -589,7 +739,7 @@ func (x *Contract) String() string { func (*Contract) ProtoMessage() {} func (x *Contract) ProtoReflect() protoreflect.Message { - mi := &file_proto_def_contract_proto_msgTypes[5] + mi := &file_proto_def_contract_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -602,7 +752,7 @@ func (x *Contract) ProtoReflect() protoreflect.Message { // Deprecated: Use Contract.ProtoReflect.Descriptor instead. func (*Contract) Descriptor() ([]byte, []int) { - return file_proto_def_contract_proto_rawDescGZIP(), []int{5} + return file_proto_def_contract_proto_rawDescGZIP(), []int{6} } func (x *Contract) GetGeneration() uint64 { @@ -667,38 +817,52 @@ var file_proto_def_contract_proto_rawDesc = []byte{ 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x14, 0x0a, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x42, 0x0d, 0x0a, 0x0b, 0x69, 0x6e, 0x67, 0x72, 0x65, 0x73, 0x73, - 0x54, 0x79, 0x70, 0x65, 0x22, 0xdc, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x75, 0x69, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, - 0x03, 0x28, 0x09, 0x52, 0x06, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x73, 0x12, 0x2a, 0x0a, 0x10, 0x62, - 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, - 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, 0x22, 0x0a, 0x07, 0x69, 0x6e, 0x67, 0x72, 0x65, - 0x73, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x08, 0x2e, 0x49, 0x6e, 0x67, 0x72, 0x65, - 0x73, 0x73, 0x52, 0x07, 0x69, 0x6e, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x31, 0x0a, 0x0c, 0x65, - 0x67, 0x72, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x0d, 0x2e, 0x45, 0x67, 0x72, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x52, 0x0c, 0x65, 0x67, 0x72, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x23, - 0x0a, 0x08, 0x65, 0x67, 0x72, 0x65, 0x73, 0x73, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x07, 0x2e, 0x45, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x08, 0x65, 0x67, 0x72, 0x65, 0x73, - 0x73, 0x65, 0x73, 0x22, 0x53, 0x0a, 0x08, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x12, - 0x1e, 0x0a, 0x0a, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x04, 0x52, 0x0a, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, - 0x27, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x09, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2a, 0x2c, 0x0a, 0x0d, 0x42, 0x61, 0x63, 0x6b, - 0x6f, 0x66, 0x66, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, 0x0f, 0x0a, 0x0b, 0x45, 0x78, 0x70, - 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x4c, 0x69, - 0x6e, 0x65, 0x61, 0x72, 0x10, 0x01, 0x2a, 0x29, 0x0a, 0x0b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x0a, 0x0a, 0x06, 0x42, 0x49, 0x4e, 0x41, 0x52, 0x59, 0x10, - 0x00, 0x12, 0x0e, 0x0a, 0x0a, 0x53, 0x54, 0x52, 0x55, 0x43, 0x54, 0x55, 0x52, 0x45, 0x44, 0x10, - 0x01, 0x42, 0x5b, 0x0a, 0x2a, 0x64, 0x65, 0x76, 0x2e, 0x6b, 0x6e, 0x61, 0x74, 0x69, 0x76, 0x65, - 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x6b, 0x61, 0x66, 0x6b, 0x61, 0x2e, - 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x42, - 0x11, 0x44, 0x61, 0x74, 0x61, 0x50, 0x6c, 0x61, 0x6e, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, - 0x63, 0x74, 0x5a, 0x1a, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x2d, 0x70, 0x6c, 0x61, 0x6e, - 0x65, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x62, 0x06, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x54, 0x79, 0x70, 0x65, 0x22, 0x6b, 0x0a, 0x09, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, + 0x65, 0x12, 0x12, 0x0a, 0x04, 0x75, 0x75, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x75, 0x75, 0x69, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, + 0x6e, 0x22, 0xcc, 0x02, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x10, + 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x69, 0x64, + 0x12, 0x16, 0x0a, 0x06, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x06, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x73, 0x12, 0x2a, 0x0a, 0x10, 0x62, 0x6f, 0x6f, 0x74, + 0x73, 0x74, 0x72, 0x61, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x10, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x53, 0x65, 0x72, + 0x76, 0x65, 0x72, 0x73, 0x12, 0x22, 0x0a, 0x07, 0x69, 0x6e, 0x67, 0x72, 0x65, 0x73, 0x73, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x08, 0x2e, 0x49, 0x6e, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, + 0x07, 0x69, 0x6e, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x31, 0x0a, 0x0c, 0x65, 0x67, 0x72, 0x65, + 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, + 0x2e, 0x45, 0x67, 0x72, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x0c, 0x65, + 0x67, 0x72, 0x65, 0x73, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x23, 0x0a, 0x08, 0x65, + 0x67, 0x72, 0x65, 0x73, 0x73, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x07, 0x2e, + 0x45, 0x67, 0x72, 0x65, 0x73, 0x73, 0x52, 0x08, 0x65, 0x67, 0x72, 0x65, 0x73, 0x73, 0x65, 0x73, + 0x12, 0x38, 0x0a, 0x0a, 0x61, 0x62, 0x73, 0x65, 0x6e, 0x74, 0x41, 0x75, 0x74, 0x68, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x48, 0x00, 0x52, 0x0a, + 0x61, 0x62, 0x73, 0x65, 0x6e, 0x74, 0x41, 0x75, 0x74, 0x68, 0x12, 0x2c, 0x0a, 0x0a, 0x61, 0x75, + 0x74, 0x68, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0a, + 0x2e, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x61, 0x75, + 0x74, 0x68, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x42, 0x06, 0x0a, 0x04, 0x41, 0x75, 0x74, 0x68, + 0x22, 0x53, 0x0a, 0x08, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x12, 0x1e, 0x0a, 0x0a, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, + 0x52, 0x0a, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, 0x09, + 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x09, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x73, 0x2a, 0x2c, 0x0a, 0x0d, 0x42, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, + 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x12, 0x0f, 0x0a, 0x0b, 0x45, 0x78, 0x70, 0x6f, 0x6e, 0x65, + 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x4c, 0x69, 0x6e, 0x65, 0x61, + 0x72, 0x10, 0x01, 0x2a, 0x29, 0x0a, 0x0b, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x4d, 0x6f, + 0x64, 0x65, 0x12, 0x0a, 0x0a, 0x06, 0x42, 0x49, 0x4e, 0x41, 0x52, 0x59, 0x10, 0x00, 0x12, 0x0e, + 0x0a, 0x0a, 0x53, 0x54, 0x52, 0x55, 0x43, 0x54, 0x55, 0x52, 0x45, 0x44, 0x10, 0x01, 0x42, 0x5b, + 0x0a, 0x2a, 0x64, 0x65, 0x76, 0x2e, 0x6b, 0x6e, 0x61, 0x74, 0x69, 0x76, 0x65, 0x2e, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x69, 0x6e, 0x67, 0x2e, 0x6b, 0x61, 0x66, 0x6b, 0x61, 0x2e, 0x62, 0x72, 0x6f, + 0x6b, 0x65, 0x72, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x42, 0x11, 0x44, 0x61, + 0x74, 0x61, 0x50, 0x6c, 0x61, 0x6e, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x5a, + 0x1a, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x2d, 0x70, 0x6c, 0x61, 0x6e, 0x65, 0x2f, 0x70, + 0x6b, 0x67, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, } var ( @@ -714,7 +878,7 @@ func file_proto_def_contract_proto_rawDescGZIP() []byte { } var file_proto_def_contract_proto_enumTypes = make([]protoimpl.EnumInfo, 2) -var file_proto_def_contract_proto_msgTypes = make([]protoimpl.MessageInfo, 7) +var file_proto_def_contract_proto_msgTypes = make([]protoimpl.MessageInfo, 8) var file_proto_def_contract_proto_goTypes = []interface{}{ (BackoffPolicy)(0), // 0: BackoffPolicy (ContentMode)(0), // 1: ContentMode @@ -722,26 +886,29 @@ var file_proto_def_contract_proto_goTypes = []interface{}{ (*EgressConfig)(nil), // 3: EgressConfig (*Egress)(nil), // 4: Egress (*Ingress)(nil), // 5: Ingress - (*Resource)(nil), // 6: Resource - (*Contract)(nil), // 7: Contract - nil, // 8: Filter.AttributesEntry - (*empty.Empty)(nil), // 9: google.protobuf.Empty + (*Reference)(nil), // 6: Reference + (*Resource)(nil), // 7: Resource + (*Contract)(nil), // 8: Contract + nil, // 9: Filter.AttributesEntry + (*empty.Empty)(nil), // 10: google.protobuf.Empty } var file_proto_def_contract_proto_depIdxs = []int32{ - 8, // 0: Filter.attributes:type_name -> Filter.AttributesEntry - 0, // 1: EgressConfig.backoffPolicy:type_name -> BackoffPolicy - 9, // 2: Egress.replyToOriginalTopic:type_name -> google.protobuf.Empty - 2, // 3: Egress.filter:type_name -> Filter - 1, // 4: Ingress.contentMode:type_name -> ContentMode - 5, // 5: Resource.ingress:type_name -> Ingress - 3, // 6: Resource.egressConfig:type_name -> EgressConfig - 4, // 7: Resource.egresses:type_name -> Egress - 6, // 8: Contract.resources:type_name -> Resource - 9, // [9:9] is the sub-list for method output_type - 9, // [9:9] is the sub-list for method input_type - 9, // [9:9] is the sub-list for extension type_name - 9, // [9:9] is the sub-list for extension extendee - 0, // [0:9] is the sub-list for field type_name + 9, // 0: Filter.attributes:type_name -> Filter.AttributesEntry + 0, // 1: EgressConfig.backoffPolicy:type_name -> BackoffPolicy + 10, // 2: Egress.replyToOriginalTopic:type_name -> google.protobuf.Empty + 2, // 3: Egress.filter:type_name -> Filter + 1, // 4: Ingress.contentMode:type_name -> ContentMode + 5, // 5: Resource.ingress:type_name -> Ingress + 3, // 6: Resource.egressConfig:type_name -> EgressConfig + 4, // 7: Resource.egresses:type_name -> Egress + 10, // 8: Resource.absentAuth:type_name -> google.protobuf.Empty + 6, // 9: Resource.authSecret:type_name -> Reference + 7, // 10: Contract.resources:type_name -> Resource + 11, // [11:11] is the sub-list for method output_type + 11, // [11:11] is the sub-list for method input_type + 11, // [11:11] is the sub-list for extension type_name + 11, // [11:11] is the sub-list for extension extendee + 0, // [0:11] is the sub-list for field type_name } func init() { file_proto_def_contract_proto_init() } @@ -799,7 +966,7 @@ func file_proto_def_contract_proto_init() { } } file_proto_def_contract_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Resource); i { + switch v := v.(*Reference); i { case 0: return &v.state case 1: @@ -811,6 +978,18 @@ func file_proto_def_contract_proto_init() { } } file_proto_def_contract_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Resource); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_proto_def_contract_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Contract); i { case 0: return &v.state @@ -831,13 +1010,17 @@ func file_proto_def_contract_proto_init() { (*Ingress_Path)(nil), (*Ingress_Host)(nil), } + file_proto_def_contract_proto_msgTypes[5].OneofWrappers = []interface{}{ + (*Resource_AbsentAuth)(nil), + (*Resource_AuthSecret)(nil), + } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_proto_def_contract_proto_rawDesc, NumEnums: 2, - NumMessages: 7, + NumMessages: 8, NumExtensions: 0, NumServices: 0, }, diff --git a/control-plane/pkg/reconciler/base/reconciler.go b/control-plane/pkg/reconciler/base/reconciler.go index 765582be9c..f8a0bc2b9e 100644 --- a/control-plane/pkg/reconciler/base/reconciler.go +++ b/control-plane/pkg/reconciler/base/reconciler.go @@ -20,11 +20,9 @@ import ( "context" "fmt" + "go.uber.org/zap" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/proto" - "knative.dev/eventing-kafka-broker/control-plane/pkg/contract" - - "go.uber.org/zap" corev1 "k8s.io/api/core/v1" apierrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -32,6 +30,10 @@ import ( "k8s.io/client-go/kubernetes" corelisters "k8s.io/client-go/listers/core/v1" "k8s.io/client-go/util/retry" + "knative.dev/pkg/tracker" + + "knative.dev/eventing-kafka-broker/control-plane/pkg/contract" + "knative.dev/eventing-kafka-broker/control-plane/pkg/security" ) const ( @@ -58,8 +60,12 @@ const ( // Base reconciler for broker and trigger reconciler. // It contains common logic for both trigger and broker reconciler. type Reconciler struct { - KubeClient kubernetes.Interface - PodLister corelisters.PodLister + KubeClient kubernetes.Interface + PodLister corelisters.PodLister + SecretLister corelisters.SecretLister + + SecretTracker tracker.Interface + ConfigMapTracker tracker.Interface DataPlaneConfigMapNamespace string DataPlaneConfigMapName string @@ -255,3 +261,44 @@ func (r *Reconciler) receiverSelector() labels.Selector { func (r *Reconciler) dispatcherSelector() labels.Selector { return labels.SelectorFromSet(map[string]string{"app": r.DispatcherLabel}) } + +func (r *Reconciler) SecretProviderFunc() security.SecretProviderFunc { + return security.DefaultSecretProviderFunc(r.SecretLister, r.KubeClient) +} + +func (r *Reconciler) TrackSecret(secret *corev1.Secret, parent metav1.Object) error { + if secret == nil { + return nil + } + ref := tracker.Reference{ + // Do not use cm.APIVersion and cm.Kind since they might be empty when they've been pulled from a lister. + APIVersion: "v1", + Kind: "Secret", + Namespace: secret.Namespace, + Name: secret.Name, + } + return r.SecretTracker.TrackReference(ref, parent) +} + +func (r *Reconciler) TrackConfigMap(cm *corev1.ConfigMap, parent metav1.Object) error { + if cm == nil { + return nil + } + ref := tracker.Reference{ + // Do not use cm.APIVersion and cm.Kind since they might be empty when they've been pulled from a lister. + APIVersion: "v1", + Kind: "ConfigMap", + Namespace: cm.Namespace, + Name: cm.Name, + } + return r.ConfigMapTracker.TrackReference(ref, parent) +} + +func (r *Reconciler) OnDeleteObserver(obj interface{}) { + if r.ConfigMapTracker != nil { + r.ConfigMapTracker.OnDeletedObserver(obj) + } + if r.SecretTracker != nil { + r.SecretTracker.OnDeletedObserver(obj) + } +} diff --git a/control-plane/pkg/reconciler/base/reconciler_test.go b/control-plane/pkg/reconciler/base/reconciler_test.go new file mode 100644 index 0000000000..ec424cb0f0 --- /dev/null +++ b/control-plane/pkg/reconciler/base/reconciler_test.go @@ -0,0 +1,49 @@ +/* + * Copyright 2020 The Knative Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package base + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" + eventing "knative.dev/eventing/pkg/apis/eventing/v1beta1" + "knative.dev/pkg/tracker" +) + +func TestTrackConfigMap(t *testing.T) { + + r := &Reconciler{ + ConfigMapTracker: tracker.New(func(name types.NamespacedName) {}, time.Second), + } + + cm := &corev1.ConfigMap{ + TypeMeta: metav1.TypeMeta{ + Kind: "ConfigMap", + APIVersion: "v1", + }, + ObjectMeta: metav1.ObjectMeta{ + Namespace: "my-namespace", + Name: "my-name", + }, + } + err := r.TrackConfigMap(cm, &eventing.Broker{}) + assert.Nil(t, err) +} diff --git a/control-plane/pkg/reconciler/broker/broker.go b/control-plane/pkg/reconciler/broker/broker.go index 97297ee634..abe9acacd6 100644 --- a/control-plane/pkg/reconciler/broker/broker.go +++ b/control-plane/pkg/reconciler/broker/broker.go @@ -23,8 +23,6 @@ import ( "strings" "sync" - "knative.dev/eventing-kafka-broker/control-plane/pkg/contract" - "github.com/Shopify/sarama" "go.uber.org/zap" corev1 "k8s.io/api/core/v1" @@ -37,11 +35,13 @@ import ( "knative.dev/pkg/resolver" "knative.dev/eventing-kafka-broker/control-plane/pkg/config" + "knative.dev/eventing-kafka-broker/control-plane/pkg/contract" coreconfig "knative.dev/eventing-kafka-broker/control-plane/pkg/core/config" "knative.dev/eventing-kafka-broker/control-plane/pkg/log" "knative.dev/eventing-kafka-broker/control-plane/pkg/receiver" "knative.dev/eventing-kafka-broker/control-plane/pkg/reconciler/base" "knative.dev/eventing-kafka-broker/control-plane/pkg/reconciler/kafka" + "knative.dev/eventing-kafka-broker/control-plane/pkg/security" ) const ( @@ -95,15 +95,37 @@ func (r *Reconciler) reconcileKind(ctx context.Context, broker *eventing.Broker) } statusConditionManager.DataPlaneAvailable() - topicConfig, err := r.topicConfig(logger, broker) + topicConfig, brokerConfig, err := r.topicConfig(logger, broker) if err != nil { return statusConditionManager.FailedToResolveConfig(err) } statusConditionManager.ConfigResolved() + if err := r.TrackConfigMap(brokerConfig, broker); err != nil { + return fmt.Errorf("failed to track broker config: %w", err) + } + logger.Debug("config resolved", zap.Any("config", topicConfig)) - topic, err := r.ClusterAdmin.CreateTopic(logger, kafka.Topic(TopicPrefix, broker), topicConfig) + securityOption, secret, err := security.NewOptionFromSecret(ctx, &security.MTConfigMapSecretLocator{ConfigMap: brokerConfig}, r.SecretProviderFunc()) + if err != nil { + return fmt.Errorf("failed to create security (auth) option: %w", err) + } + + if secret != nil { + logger.Debug("Secret reference", + zap.String("apiVersion", secret.APIVersion), + zap.String("name", secret.Name), + zap.String("namespace", secret.Namespace), + zap.String("kind", secret.Kind), + ) + } + + if err := r.TrackSecret(secret, broker); err != nil { + return fmt.Errorf("failed to track secret: %w", err) + } + + topic, err := r.ClusterAdmin.CreateTopic(logger, kafka.Topic(TopicPrefix, broker), topicConfig, securityOption) if err != nil { return statusConditionManager.FailedToCreateTopic(topic, err) } @@ -131,7 +153,7 @@ func (r *Reconciler) reconcileKind(ctx context.Context, broker *eventing.Broker) ) // Get resource configuration. - brokerResource, err := r.getBrokerResource(ctx, topic, broker, topicConfig) + brokerResource, err := r.getBrokerResource(ctx, topic, broker, secret, topicConfig) if err != nil { return statusConditionManager.FailedToGetConfig(err) } @@ -240,13 +262,18 @@ func (r *Reconciler) finalizeKind(ctx context.Context, broker *eventing.Broker) // eventually be seen by the dispatcher pod and resources will be deleted accordingly. } - topicConfig, err := r.topicConfig(logger, broker) + topicConfig, brokerConfig, err := r.topicConfig(logger, broker) if err != nil { return fmt.Errorf("failed to resolve broker config: %w", err) } - bootstrapServers := topicConfig.BootstrapServers - topic, err := r.ClusterAdmin.DeleteTopic(kafka.Topic(TopicPrefix, broker), bootstrapServers) + authProvider := &security.MTConfigMapSecretLocator{ConfigMap: brokerConfig} + securityOption, _, err := security.NewOptionFromSecret(ctx, authProvider, r.SecretProviderFunc()) + if err != nil { + return fmt.Errorf("failed to create security (auth) option: %w", err) + } + + topic, err := r.ClusterAdmin.DeleteTopic(kafka.Topic(TopicPrefix, broker), topicConfig.BootstrapServers, securityOption) if err != nil { return err } @@ -260,16 +287,17 @@ func incrementContractGeneration(generation uint64) uint64 { return (generation + 1) % (math.MaxUint64 - 1) } -func (r *Reconciler) topicConfig(logger *zap.Logger, broker *eventing.Broker) (*kafka.TopicConfig, error) { +func (r *Reconciler) topicConfig(logger *zap.Logger, broker *eventing.Broker) (*kafka.TopicConfig, *corev1.ConfigMap, error) { logger.Debug("broker config", zap.Any("broker.spec.config", broker.Spec.Config)) if broker.Spec.Config == nil { - return r.defaultConfig() + tc, err := r.defaultConfig() + return tc, nil, err } if strings.ToLower(broker.Spec.Config.Kind) != "configmap" { // TODO: is there any constant? - return nil, fmt.Errorf("supported config Kind: ConfigMap - got %s", broker.Spec.Config.Kind) + return nil, nil, fmt.Errorf("supported config Kind: ConfigMap - got %s", broker.Spec.Config.Kind) } namespace := broker.Spec.Config.Namespace @@ -279,15 +307,15 @@ func (r *Reconciler) topicConfig(logger *zap.Logger, broker *eventing.Broker) (* } cm, err := r.ConfigMapLister.ConfigMaps(namespace).Get(broker.Spec.Config.Name) if err != nil { - return nil, fmt.Errorf("failed to get configmap %s/%s: %w", namespace, broker.Spec.Config.Name, err) + return nil, nil, fmt.Errorf("failed to get configmap %s/%s: %w", namespace, broker.Spec.Config.Name, err) } brokerConfig, err := configFromConfigMap(logger, cm) if err != nil { - return nil, err + return nil, cm, err } - return brokerConfig, nil + return brokerConfig, cm, nil } func (r *Reconciler) defaultTopicDetail() sarama.TopicDetail { @@ -311,7 +339,7 @@ func (r *Reconciler) defaultConfig() (*kafka.TopicConfig, error) { }, nil } -func (r *Reconciler) getBrokerResource(ctx context.Context, topic string, broker *eventing.Broker, config *kafka.TopicConfig) (*contract.Resource, error) { +func (r *Reconciler) getBrokerResource(ctx context.Context, topic string, broker *eventing.Broker, secret *corev1.Secret, config *kafka.TopicConfig) (*contract.Resource, error) { res := &contract.Resource{ Uid: string(broker.UID), Topics: []string{topic}, @@ -323,6 +351,17 @@ func (r *Reconciler) getBrokerResource(ctx context.Context, topic string, broker BootstrapServers: config.GetBootstrapServers(), } + if secret != nil { + res.Auth = &contract.Resource_AuthSecret{ + AuthSecret: &contract.Reference{ + Uuid: string(secret.UID), + Namespace: secret.Namespace, + Name: secret.Name, + Version: secret.ResourceVersion, + }, + } + } + delivery := broker.Spec.Delivery if delivery != nil { diff --git a/control-plane/pkg/reconciler/broker/broker_config.go b/control-plane/pkg/reconciler/broker/broker_config.go index 31d09dcc4a..b942f3d114 100644 --- a/control-plane/pkg/reconciler/broker/broker_config.go +++ b/control-plane/pkg/reconciler/broker/broker_config.go @@ -45,10 +45,12 @@ func configFromConfigMap(logger *zap.Logger, cm *corev1.ConfigMap) (*kafka.Topic if topicDetail.NumPartitions <= 0 || replicationFactor <= 0 || bootstrapServers == "" { return nil, fmt.Errorf( - "invalid configuration - numPartitions: %d - replicationFactor: %d - bootstrapServers: %s", + "invalid configuration - numPartitions: %d - replicationFactor: %d - bootstrapServers: %s - ConfigMap data: %v", topicDetail.NumPartitions, replicationFactor, - bootstrapServers) + bootstrapServers, + cm.Data, + ) } topicDetail.ReplicationFactor = int16(replicationFactor) diff --git a/control-plane/pkg/reconciler/broker/broker_test.go b/control-plane/pkg/reconciler/broker/broker_test.go index 19314b41e2..973e25e71a 100644 --- a/control-plane/pkg/reconciler/broker/broker_test.go +++ b/control-plane/pkg/reconciler/broker/broker_test.go @@ -876,6 +876,86 @@ func brokerReconciliation(t *testing.T, format string, configs Configs) { }, }, }, + { + Name: "Reconciled normal - with auth config", + Objects: []runtime.Object{ + NewBroker( + WithBrokerConfig(KReference(BrokerConfig(bootstrapServers, 20, 5, + BrokerAuthConfig("secret-1"), + ))), + ), + NewSSLSecret(ConfigMapNamespace, "secret-1"), + BrokerConfig(bootstrapServers, 20, 5, BrokerAuthConfig("secret-1")), + NewConfigMap(&configs, nil), + NewService(), + BrokerReceiverPod(configs.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "1", + "annotation_to_preserve": "value_to_preserve", + }), + BrokerDispatcherPod(configs.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "2", + "annotation_to_preserve": "value_to_preserve", + }), + }, + Key: testKey, + WantEvents: []string{ + finalizerUpdatedEvent, + }, + WantUpdates: []clientgotesting.UpdateActionImpl{ + ConfigMapUpdate(&configs, &contract.Contract{ + Resources: []*contract.Resource{ + { + Uid: BrokerUUID, + Topics: []string{BrokerTopic()}, + Ingress: &contract.Ingress{ContentMode: contract.ContentMode_BINARY, IngressType: &contract.Ingress_Path{Path: receiver.Path(BrokerNamespace, BrokerName)}}, + BootstrapServers: bootstrapServers, + Auth: &contract.Resource_AuthSecret{ + AuthSecret: &contract.Reference{ + Uuid: SecretUUID, + Namespace: ConfigMapNamespace, + Name: "secret-1", + Version: SecretResourceVersion, + }, + }, + }, + }, + Generation: 1, + }), + BrokerReceiverPodUpdate(configs.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "1", + "annotation_to_preserve": "value_to_preserve", + }), + BrokerDispatcherPodUpdate(configs.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "1", + "annotation_to_preserve": "value_to_preserve", + }), + }, + WantPatches: []clientgotesting.PatchActionImpl{ + patchFinalizers(), + }, + WantStatusUpdates: []clientgotesting.UpdateActionImpl{ + { + Object: NewBroker( + WithBrokerConfig(KReference(BrokerConfig(bootstrapServers, 20, 5, + BrokerAuthConfig("secret-1"), + ))), + reconcilertesting.WithInitBrokerConditions, + BrokerConfigMapUpdatedReady(&configs), + BrokerDataPlaneAvailable, + BrokerConfigParsed, + BrokerTopicReady, + BrokerAddressable(&configs), + ), + }, + }, + OtherTestData: map[string]interface{}{ + BootstrapServersConfigMapKey: bootstrapServers, + ExpectedTopicDetail: sarama.TopicDetail{ + NumPartitions: 20, + ReplicationFactor: 5, + }, + }, + }, { Name: "Failed to parse broker config - not found", Objects: []runtime.Object{ @@ -1681,6 +1761,7 @@ func useTable(t *testing.T, table TableTest, configs *Configs) { Reconciler: &base.Reconciler{ KubeClient: kubeclient.Get(ctx), PodLister: listers.GetPodLister(), + SecretLister: listers.GetSecretLister(), DataPlaneConfigMapNamespace: configs.DataPlaneConfigMapNamespace, DataPlaneConfigMapName: configs.DataPlaneConfigMapName, DataPlaneConfigFormat: configs.DataPlaneConfigFormat, @@ -1704,6 +1785,9 @@ func useTable(t *testing.T, table TableTest, configs *Configs) { } reconciler.SetBootstrapServers(bootstrapServers) + reconciler.ConfigMapTracker = &FakeTracker{} + reconciler.SecretTracker = &FakeTracker{} + r := brokerreconciler.NewReconciler( ctx, logging.FromContext(ctx), diff --git a/control-plane/pkg/reconciler/broker/controller.go b/control-plane/pkg/reconciler/broker/controller.go index 7d8b382047..c33256d819 100644 --- a/control-plane/pkg/reconciler/broker/controller.go +++ b/control-plane/pkg/reconciler/broker/controller.go @@ -23,6 +23,7 @@ import ( "github.com/Shopify/sarama" "go.uber.org/zap" + corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/client-go/tools/cache" eventing "knative.dev/eventing/pkg/apis/eventing/v1" @@ -31,11 +32,13 @@ import ( "knative.dev/pkg/controller" "knative.dev/pkg/logging" "knative.dev/pkg/resolver" + "knative.dev/pkg/tracker" brokerinformer "knative.dev/eventing/pkg/client/injection/informers/eventing/v1/broker" brokerreconciler "knative.dev/eventing/pkg/client/injection/reconciler/eventing/v1/broker" configmapinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/configmap" podinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/pod" + secretinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/secret" "knative.dev/eventing-kafka-broker/control-plane/pkg/config" "knative.dev/eventing-kafka-broker/control-plane/pkg/reconciler/base" @@ -61,6 +64,7 @@ func NewController(ctx context.Context, watcher configmap.Watcher, configs *Conf Reconciler: &base.Reconciler{ KubeClient: kubeclient.Get(ctx), PodLister: podinformer.Get(ctx).Lister(), + SecretLister: secretinformer.Get(ctx).Lister(), DataPlaneConfigMapNamespace: configs.DataPlaneConfigMapNamespace, DataPlaneConfigMapName: configs.DataPlaneConfigMapName, DataPlaneConfigFormat: configs.DataPlaneConfigFormat, @@ -120,6 +124,27 @@ func NewController(ctx context.Context, watcher configmap.Watcher, configs *Conf }, }) + reconciler.SecretTracker = tracker.New(impl.EnqueueKey, controller.GetTrackerLease(ctx)) + secretinformer.Get(ctx).Informer().AddEventHandler(controller.HandleAll(reconciler.SecretTracker.OnChanged)) + + reconciler.ConfigMapTracker = tracker.New(impl.EnqueueKey, controller.GetTrackerLease(ctx)) + configmapinformer.Get(ctx).Informer().AddEventHandler(controller.HandleAll( + // Call the tracker's OnChanged method, but we've seen the objects + // coming through this path missing TypeMeta, so ensure it is properly + // populated. + controller.EnsureTypeMeta( + reconciler.ConfigMapTracker.OnChanged, + corev1.SchemeGroupVersion.WithKind("ConfigMap"), + ), + )) + + brokerInformer.Informer().AddEventHandler(cache.FilteringResourceEventHandler{ + FilterFunc: kafka.BrokerClassFilter(), + Handler: cache.ResourceEventHandlerFuncs{ + DeleteFunc: reconciler.OnDeleteObserver, + }, + }) + cm, err := reconciler.KubeClient.CoreV1().ConfigMaps(configs.SystemNamespace).Get(ctx, configs.GeneralConfigMapName, metav1.GetOptions{}) if err != nil { panic(fmt.Errorf("failed to get config map %s/%s: %w", configs.SystemNamespace, configs.GeneralConfigMapName, err)) diff --git a/control-plane/pkg/reconciler/broker/controller_test.go b/control-plane/pkg/reconciler/broker/controller_test.go index 71ba497a0f..ae0caa44e7 100644 --- a/control-plane/pkg/reconciler/broker/controller_test.go +++ b/control-plane/pkg/reconciler/broker/controller_test.go @@ -29,6 +29,7 @@ import ( fakekubeclient "knative.dev/pkg/client/injection/kube/client/fake" _ "knative.dev/pkg/client/injection/kube/informers/core/v1/configmap/fake" _ "knative.dev/pkg/client/injection/kube/informers/core/v1/pod/fake" + _ "knative.dev/pkg/client/injection/kube/informers/core/v1/secret/fake" "knative.dev/pkg/configmap" dynamicclient "knative.dev/pkg/injection/clients/dynamicclient/fake" reconcilertesting "knative.dev/pkg/reconciler/testing" diff --git a/control-plane/pkg/reconciler/broker/testdata/ca.crt b/control-plane/pkg/reconciler/broker/testdata/ca.crt new file mode 120000 index 0000000000..63004fe697 --- /dev/null +++ b/control-plane/pkg/reconciler/broker/testdata/ca.crt @@ -0,0 +1 @@ +../../../security/testdata/ca.crt \ No newline at end of file diff --git a/control-plane/pkg/reconciler/broker/testdata/user.crt b/control-plane/pkg/reconciler/broker/testdata/user.crt new file mode 120000 index 0000000000..202a9e41fa --- /dev/null +++ b/control-plane/pkg/reconciler/broker/testdata/user.crt @@ -0,0 +1 @@ +../../../security/testdata/user.crt \ No newline at end of file diff --git a/control-plane/pkg/reconciler/broker/testdata/user.key b/control-plane/pkg/reconciler/broker/testdata/user.key new file mode 120000 index 0000000000..19fe6fc64b --- /dev/null +++ b/control-plane/pkg/reconciler/broker/testdata/user.key @@ -0,0 +1 @@ +../../../security/testdata/user.key \ No newline at end of file diff --git a/control-plane/pkg/reconciler/kafka/admin.go b/control-plane/pkg/reconciler/kafka/admin.go index 99716ec8e7..45084b8873 100644 --- a/control-plane/pkg/reconciler/kafka/admin.go +++ b/control-plane/pkg/reconciler/kafka/admin.go @@ -20,6 +20,8 @@ import ( "fmt" "github.com/Shopify/sarama" + + "knative.dev/eventing-kafka-broker/control-plane/pkg/security" ) // NewClusterAdminFunc creates new sarama.ClusterAdmin. @@ -36,14 +38,19 @@ func AdminConfig() *sarama.Config { // GetClusterAdmin creates a new sarama.ClusterAdmin. // // The caller is responsible for closing the sarama.ClusterAdmin. -func GetClusterAdmin(adminFunc NewClusterAdminFunc, bootstrapServers []string) (sarama.ClusterAdmin, error) { - return GetClusterAdminFromConfig(adminFunc, AdminConfig(), bootstrapServers) +func GetClusterAdmin(adminFunc NewClusterAdminFunc, bootstrapServers []string, secOptions security.ConfigOption) (sarama.ClusterAdmin, error) { + return GetClusterAdminFromConfig(adminFunc, AdminConfig(), bootstrapServers, secOptions) } // GetClusterAdminFromConfig creates a new sarama.ClusterAdmin. // // The caller is responsible for closing the sarama.ClusterAdmin. -func GetClusterAdminFromConfig(adminFunc NewClusterAdminFunc, config *sarama.Config, bootstrapServers []string) (sarama.ClusterAdmin, error) { +func GetClusterAdminFromConfig(adminFunc NewClusterAdminFunc, config *sarama.Config, bootstrapServers []string, secOptions security.ConfigOption) (sarama.ClusterAdmin, error) { + + err := secOptions(config) + if err != nil { + return nil, err + } kafkaClusterAdmin, err := adminFunc(bootstrapServers, config) if err != nil { diff --git a/control-plane/pkg/reconciler/kafka/topic.go b/control-plane/pkg/reconciler/kafka/topic.go index ac25f61559..bbabef261a 100644 --- a/control-plane/pkg/reconciler/kafka/topic.go +++ b/control-plane/pkg/reconciler/kafka/topic.go @@ -23,6 +23,8 @@ import ( "github.com/Shopify/sarama" "go.uber.org/zap" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + + "knative.dev/eventing-kafka-broker/control-plane/pkg/security" ) // TopicConfig contains configurations for creating a topic. @@ -89,9 +91,9 @@ func DeleteTopic(admin sarama.ClusterAdmin, topic string) (string, error) { return topic, nil } -func (f NewClusterAdminFunc) CreateTopic(logger *zap.Logger, topic string, config *TopicConfig) (string, error) { +func (f NewClusterAdminFunc) CreateTopic(logger *zap.Logger, topic string, config *TopicConfig, secOptions security.ConfigOption) (string, error) { - kafkaClusterAdmin, err := GetClusterAdmin(f, config.BootstrapServers) + kafkaClusterAdmin, err := GetClusterAdmin(f, config.BootstrapServers, secOptions) if err != nil { return topic, err } @@ -100,9 +102,9 @@ func (f NewClusterAdminFunc) CreateTopic(logger *zap.Logger, topic string, confi return CreateTopic(kafkaClusterAdmin, logger, topic, config) } -func (f NewClusterAdminFunc) DeleteTopic(topic string, bootstrapServers []string) (string, error) { +func (f NewClusterAdminFunc) DeleteTopic(topic string, bootstrapServers []string, secOptions security.ConfigOption) (string, error) { - kafkaClusterAdmin, err := GetClusterAdmin(f, bootstrapServers) + kafkaClusterAdmin, err := GetClusterAdmin(f, bootstrapServers, secOptions) if err != nil { return topic, err } @@ -111,9 +113,9 @@ func (f NewClusterAdminFunc) DeleteTopic(topic string, bootstrapServers []string return DeleteTopic(kafkaClusterAdmin, topic) } -func (f NewClusterAdminFunc) IsTopicPresentAndValid(topic string, bootstrapServers []string) (bool, error) { +func (f NewClusterAdminFunc) IsTopicPresentAndValid(topic string, bootstrapServers []string, secOptions security.ConfigOption) (bool, error) { - kafkaClusterAdmin, err := GetClusterAdmin(f, bootstrapServers) + kafkaClusterAdmin, err := GetClusterAdmin(f, bootstrapServers, secOptions) if err != nil { return false, err } diff --git a/control-plane/pkg/reconciler/kafka/topic_test.go b/control-plane/pkg/reconciler/kafka/topic_test.go index e68bf09f07..69d34875a3 100644 --- a/control-plane/pkg/reconciler/kafka/topic_test.go +++ b/control-plane/pkg/reconciler/kafka/topic_test.go @@ -11,6 +11,7 @@ import ( eventing "knative.dev/eventing/pkg/apis/eventing/v1" kafkatesting "knative.dev/eventing-kafka-broker/control-plane/pkg/reconciler/kafka/testing" + "knative.dev/eventing-kafka-broker/control-plane/pkg/security" ) func TestCreateTopic(t *testing.T) { @@ -295,7 +296,7 @@ func TestCreateTopicTopicAlreadyExists(t *testing.T) { return ca, nil } - topicRet, err := f.CreateTopic(zap.NewNop(), topic, &TopicConfig{}) + topicRet, err := f.CreateTopic(zap.NewNop(), topic, &TopicConfig{}, security.NoOp) assert.Equal(t, topicRet, topic, "expected topic %s go %s", topic, topicRet) assert.Nil(t, err, "expected nil error on topic already exists") @@ -316,7 +317,7 @@ func TestNewClusterAdminFuncDeleteTopicCloseClusterAdmin(t *testing.T) { return ca, nil }) - got, err := f.DeleteTopic("topic-name-1", []string{}) + got, err := f.DeleteTopic("topic-name-1", []string{}, security.NoOp) if err != nil { t.Errorf("DeleteTopic() error = %v, wantErr %v", err, false) return @@ -401,7 +402,7 @@ func TestNewClusterAdminFuncIsTopicPresent(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, err := tt.f.IsTopicPresentAndValid(tt.args.topic, tt.args.bootstrapServers) + got, err := tt.f.IsTopicPresentAndValid(tt.args.topic, tt.args.bootstrapServers, security.NoOp) if (err != nil) != tt.wantErr { t.Errorf("IsTopicPresentAndValid() error = %v, wantErr %v", err, tt.wantErr) return @@ -431,7 +432,7 @@ func TestNewClusterAdminFuncIsTopicPresentCloseClusterAdmin(t *testing.T) { return ca, nil }) - got, err := f.IsTopicPresentAndValid("topic-name-1", []string{}) + got, err := f.IsTopicPresentAndValid("topic-name-1", []string{}, security.NoOp) assert.Nil(t, err, "IsTopicPresentAndValid() error = %v, wantErr %v", err, false) assert.True(t, got, "IsTopicPresentAndValid() got = %v, want %v", got, true) diff --git a/control-plane/pkg/reconciler/sink/auth_provider.go b/control-plane/pkg/reconciler/sink/auth_provider.go new file mode 100644 index 0000000000..20ce371a89 --- /dev/null +++ b/control-plane/pkg/reconciler/sink/auth_provider.go @@ -0,0 +1,36 @@ +/* + * Copyright 2020 The Knative Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package sink + +import ( + eventing "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/eventing/v1alpha1" +) + +type SecretLocator struct { + *eventing.KafkaSink +} + +func (ks *SecretLocator) SecretName() (string, bool, error) { + if !ks.Spec.HasAuthConfig() { + return "", false, nil + } + return ks.Spec.Auth.Secret.Ref.Name, true, nil +} + +func (ks *SecretLocator) SecretNamespace() (string, bool, error) { + return ks.Namespace, true, nil +} diff --git a/control-plane/pkg/reconciler/sink/controller.go b/control-plane/pkg/reconciler/sink/controller.go index e07d07c446..aa97b81450 100644 --- a/control-plane/pkg/reconciler/sink/controller.go +++ b/control-plane/pkg/reconciler/sink/controller.go @@ -21,13 +21,16 @@ import ( "github.com/Shopify/sarama" "go.uber.org/zap" + corev1 "k8s.io/api/core/v1" "k8s.io/client-go/tools/cache" kubeclient "knative.dev/pkg/client/injection/kube/client" configmapinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/configmap" podinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/pod" + secretinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/secret" "knative.dev/pkg/configmap" "knative.dev/pkg/controller" "knative.dev/pkg/logging" + "knative.dev/pkg/tracker" eventing "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/eventing/v1alpha1" sinkinformer "knative.dev/eventing-kafka-broker/control-plane/pkg/client/injection/informers/eventing/v1alpha1/kafkasink" @@ -48,6 +51,7 @@ func NewController(ctx context.Context, _ configmap.Watcher, configs *config.Env Reconciler: &base.Reconciler{ KubeClient: kubeclient.Get(ctx), PodLister: podinformer.Get(ctx).Lister(), + SecretLister: secretinformer.Get(ctx).Lister(), DataPlaneConfigMapNamespace: configs.DataPlaneConfigMapNamespace, DataPlaneConfigMapName: configs.DataPlaneConfigMapName, DataPlaneConfigFormat: configs.DataPlaneConfigFormat, @@ -89,5 +93,20 @@ func NewController(ctx context.Context, _ configmap.Watcher, configs *config.Env }, }) + reconciler.SecretTracker = tracker.New(impl.EnqueueKey, controller.GetTrackerLease(ctx)) + secretinformer.Get(ctx).Informer().AddEventHandler(controller.HandleAll( + // Call the tracker's OnChanged method, but we've seen the objects + // coming through this path missing TypeMeta, so ensure it is properly + // populated. + controller.EnsureTypeMeta( + reconciler.SecretTracker.OnChanged, + corev1.SchemeGroupVersion.WithKind("Secret"), + ), + )) + + sinkInformer.Informer().AddEventHandler(cache.ResourceEventHandlerFuncs{ + DeleteFunc: reconciler.OnDeleteObserver, + }) + return impl } diff --git a/control-plane/pkg/reconciler/sink/controller_test.go b/control-plane/pkg/reconciler/sink/controller_test.go index 42ca3f0fbe..7ca57c5b3d 100644 --- a/control-plane/pkg/reconciler/sink/controller_test.go +++ b/control-plane/pkg/reconciler/sink/controller_test.go @@ -25,6 +25,7 @@ import ( _ "knative.dev/pkg/client/injection/kube/client/fake" _ "knative.dev/pkg/client/injection/kube/informers/core/v1/configmap/fake" _ "knative.dev/pkg/client/injection/kube/informers/core/v1/pod/fake" + _ "knative.dev/pkg/client/injection/kube/informers/core/v1/secret/fake" _ "knative.dev/eventing-kafka-broker/control-plane/pkg/client/injection/informers/eventing/v1alpha1/kafkasink/fake" "knative.dev/eventing-kafka-broker/control-plane/pkg/config" diff --git a/control-plane/pkg/reconciler/sink/kafka_sink.go b/control-plane/pkg/reconciler/sink/kafka_sink.go index a6b1f68865..46699529da 100644 --- a/control-plane/pkg/reconciler/sink/kafka_sink.go +++ b/control-plane/pkg/reconciler/sink/kafka_sink.go @@ -21,8 +21,6 @@ import ( "fmt" "math" - "knative.dev/eventing-kafka-broker/control-plane/pkg/contract" - "github.com/Shopify/sarama" "go.uber.org/zap" corelisters "k8s.io/client-go/listers/core/v1" @@ -32,11 +30,13 @@ import ( eventing "knative.dev/eventing-kafka-broker/control-plane/pkg/apis/eventing/v1alpha1" "knative.dev/eventing-kafka-broker/control-plane/pkg/config" + "knative.dev/eventing-kafka-broker/control-plane/pkg/contract" coreconfig "knative.dev/eventing-kafka-broker/control-plane/pkg/core/config" "knative.dev/eventing-kafka-broker/control-plane/pkg/log" "knative.dev/eventing-kafka-broker/control-plane/pkg/receiver" "knative.dev/eventing-kafka-broker/control-plane/pkg/reconciler/base" "knative.dev/eventing-kafka-broker/control-plane/pkg/reconciler/kafka" + "knative.dev/eventing-kafka-broker/control-plane/pkg/security" ) const ( @@ -82,13 +82,22 @@ func (r *Reconciler) reconcileKind(ctx context.Context, ks *eventing.KafkaSink) ks.GetStatus().Annotations = make(map[string]string, 1) } + securityOption, secret, err := security.NewOptionFromSecret(ctx, &SecretLocator{KafkaSink: ks}, r.SecretProviderFunc()) + if err != nil { + return fmt.Errorf("failed to create auth option: %w", err) + } + + if err := r.TrackSecret(secret, ks); err != nil { + return fmt.Errorf("failed to track secret: %w", err) + } + if ks.Spec.NumPartitions != nil && ks.Spec.ReplicationFactor != nil { ks.GetStatus().Annotations[base.TopicOwnerAnnotation] = ControllerTopicOwner topicConfig := topicConfigFromSinkSpec(&ks.Spec) - topic, err := r.ClusterAdmin.CreateTopic(logger, ks.Spec.Topic, topicConfig) + topic, err := r.ClusterAdmin.CreateTopic(logger, ks.Spec.Topic, topicConfig, securityOption) if err != nil { return statusConditionManager.FailedToCreateTopic(topic, err) } @@ -98,7 +107,7 @@ func (r *Reconciler) reconcileKind(ctx context.Context, ks *eventing.KafkaSink) ks.GetStatus().Annotations[base.TopicOwnerAnnotation] = ExternalTopicOwner - isPresentAndValid, err := r.ClusterAdmin.IsTopicPresentAndValid(ks.Spec.Topic, ks.Spec.BootstrapServers) + isPresentAndValid, err := r.ClusterAdmin.IsTopicPresentAndValid(ks.Spec.Topic, ks.Spec.BootstrapServers, securityOption) if err != nil { return statusConditionManager.TopicNotPresentOrInvalidErr(err) } @@ -143,6 +152,16 @@ func (r *Reconciler) reconcileKind(ctx context.Context, ks *eventing.KafkaSink) }, BootstrapServers: kafka.BootstrapServersCommaSeparated(ks.Spec.BootstrapServers), } + if ks.Spec.HasAuthConfig() { + sinkConfig.Auth = &contract.Resource_AuthSecret{ + AuthSecret: &contract.Reference{ + Uuid: string(secret.UID), + Namespace: secret.Namespace, + Name: secret.Name, + Version: secret.ResourceVersion, + }, + } + } statusConditionManager.ConfigResolved() sinkIndex := coreconfig.FindResource(ct, ks.UID) @@ -224,7 +243,11 @@ func (r *Reconciler) finalizeKind(ctx context.Context, ks *eventing.KafkaSink) e } if ks.GetStatus().Annotations[base.TopicOwnerAnnotation] == ControllerTopicOwner { - topic, err := r.ClusterAdmin.DeleteTopic(ks.Spec.Topic, ks.Spec.BootstrapServers) + securityOption, _, err := security.NewOptionFromSecret(ctx, &SecretLocator{KafkaSink: ks}, r.SecretProviderFunc()) + if err != nil { + return fmt.Errorf("failed to create security (auth) option: %w", err) + } + topic, err := r.ClusterAdmin.DeleteTopic(ks.Spec.Topic, ks.Spec.BootstrapServers, securityOption) if err != nil { return err } diff --git a/control-plane/pkg/reconciler/sink/kafka_sink_test.go b/control-plane/pkg/reconciler/sink/kafka_sink_test.go index d032767980..95891d20b2 100644 --- a/control-plane/pkg/reconciler/sink/kafka_sink_test.go +++ b/control-plane/pkg/reconciler/sink/kafka_sink_test.go @@ -146,6 +146,69 @@ func sinkReconciliation(t *testing.T, format string, configs broker.Configs) { }, }, }, + { + Name: "Reconciled normal - with auth config", + Objects: []runtime.Object{ + NewSink( + SinkControllerOwnsTopic, + SinkAuthSecretRef("secret-1"), + ), + NewSSLSecret(SinkNamespace, "secret-1"), + NewConfigMap(&configs, nil), + SinkReceiverPod(configs.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "1", + "annotation_to_preserve": "value_to_preserve", + }), + }, + Key: testKey, + WantEvents: []string{ + finalizerUpdatedEvent, + }, + WantUpdates: []clientgotesting.UpdateActionImpl{ + ConfigMapUpdate(&configs, &contract.Contract{ + Resources: []*contract.Resource{ + { + Uid: SinkUUID, + Topics: []string{SinkTopic()}, + BootstrapServers: bootstrapServers, + Ingress: &contract.Ingress{ContentMode: contract.ContentMode_STRUCTURED, IngressType: &contract.Ingress_Path{Path: receiver.Path(SinkNamespace, SinkName)}}, + Auth: &contract.Resource_AuthSecret{ + AuthSecret: &contract.Reference{ + Uuid: SecretUUID, + Namespace: SinkNamespace, + Name: "secret-1", + Version: SecretResourceVersion, + }, + }, + }, + }, + Generation: 1, + }), + SinkReceiverPodUpdate(configs.SystemNamespace, map[string]string{ + base.VolumeGenerationAnnotationKey: "1", + "annotation_to_preserve": "value_to_preserve", + }), + }, + WantPatches: []clientgotesting.PatchActionImpl{ + patchFinalizers(), + }, + WantStatusUpdates: []clientgotesting.UpdateActionImpl{ + { + Object: NewSink( + SinkControllerOwnsTopic, + SinkAuthSecretRef("secret-1"), + InitSinkConditions, + SinkDataPlaneAvailable, + SinkConfigParsed, + BootstrapServers(bootstrapServersArr), + SinkConfigMapUpdatedReady(&configs.Env), + SinkTopicReady, + SinkTopicReadyWithOwner(SinkTopic(), sink.ControllerTopicOwner), + SinkAddressable(&configs.Env), + ), + }, + }, + }, { Name: "Reconciled normal - no topic owner", Objects: []runtime.Object{ @@ -961,6 +1024,7 @@ func useTable(t *testing.T, table TableTest, configs *broker.Configs) { Reconciler: &base.Reconciler{ KubeClient: kubeclient.Get(ctx), PodLister: listers.GetPodLister(), + SecretLister: listers.GetSecretLister(), DataPlaneConfigMapNamespace: configs.DataPlaneConfigMapNamespace, DataPlaneConfigMapName: configs.DataPlaneConfigMapName, DataPlaneConfigFormat: configs.DataPlaneConfigFormat, @@ -983,6 +1047,8 @@ func useTable(t *testing.T, table TableTest, configs *broker.Configs) { Configs: &configs.Env, } + reconciler.SecretTracker = &FakeTracker{} + return sinkreconciler.NewReconciler( ctx, logging.FromContext(ctx), diff --git a/control-plane/pkg/reconciler/sink/testdata/ca.crt b/control-plane/pkg/reconciler/sink/testdata/ca.crt new file mode 120000 index 0000000000..63004fe697 --- /dev/null +++ b/control-plane/pkg/reconciler/sink/testdata/ca.crt @@ -0,0 +1 @@ +../../../security/testdata/ca.crt \ No newline at end of file diff --git a/control-plane/pkg/reconciler/sink/testdata/user.crt b/control-plane/pkg/reconciler/sink/testdata/user.crt new file mode 120000 index 0000000000..202a9e41fa --- /dev/null +++ b/control-plane/pkg/reconciler/sink/testdata/user.crt @@ -0,0 +1 @@ +../../../security/testdata/user.crt \ No newline at end of file diff --git a/control-plane/pkg/reconciler/sink/testdata/user.key b/control-plane/pkg/reconciler/sink/testdata/user.key new file mode 120000 index 0000000000..19fe6fc64b --- /dev/null +++ b/control-plane/pkg/reconciler/sink/testdata/user.key @@ -0,0 +1 @@ +../../../security/testdata/user.key \ No newline at end of file diff --git a/control-plane/pkg/reconciler/testing/listers.go b/control-plane/pkg/reconciler/testing/listers.go index 03598558a5..08c14a5df5 100644 --- a/control-plane/pkg/reconciler/testing/listers.go +++ b/control-plane/pkg/reconciler/testing/listers.go @@ -92,6 +92,10 @@ func (l *Listers) GetPodLister() corelisters.PodLister { return corelisters.NewPodLister(l.indexerFor(&corev1.Pod{})) } +func (l *Listers) GetSecretLister() corelisters.SecretLister { + return corelisters.NewSecretLister(l.indexerFor(&corev1.Secret{})) +} + func (l *Listers) GetTriggerLister() eventinglisters.TriggerLister { return eventinglisters.NewTriggerLister(l.indexerFor(&eventing.Trigger{})) } diff --git a/control-plane/pkg/reconciler/testing/objects_broker.go b/control-plane/pkg/reconciler/testing/objects_broker.go index 68e0a308b1..610c81a4a5 100644 --- a/control-plane/pkg/reconciler/testing/objects_broker.go +++ b/control-plane/pkg/reconciler/testing/objects_broker.go @@ -35,6 +35,7 @@ import ( "knative.dev/eventing-kafka-broker/control-plane/pkg/reconciler/base" . "knative.dev/eventing-kafka-broker/control-plane/pkg/reconciler/broker" "knative.dev/eventing-kafka-broker/control-plane/pkg/reconciler/kafka" + "knative.dev/eventing-kafka-broker/control-plane/pkg/security" ) const ( @@ -111,8 +112,10 @@ func WithBrokerConfig(reference *duckv1.KReference) func(*eventing.Broker) { } } -func BrokerConfig(bootstrapServers string, numPartitions, replicationFactor int) *corev1.ConfigMap { - return &corev1.ConfigMap{ +type CMOption func(cm *corev1.ConfigMap) + +func BrokerConfig(bootstrapServers string, numPartitions, replicationFactor int, options ...CMOption) *corev1.ConfigMap { + cm := &corev1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{ Namespace: ConfigMapNamespace, Name: ConfigMapName, @@ -123,6 +126,19 @@ func BrokerConfig(bootstrapServers string, numPartitions, replicationFactor int) DefaultTopicNumPartitionConfigMapKey: fmt.Sprintf("%d", numPartitions), }, } + for _, opt := range options { + opt(cm) + } + return cm +} + +func BrokerAuthConfig(name string) CMOption { + return func(cm *corev1.ConfigMap) { + if cm.Data == nil { + cm.Data = make(map[string]string, 1) + } + cm.Data[security.AuthSecretNameKey] = name + } } func KReference(configMap *corev1.ConfigMap) *duckv1.KReference { diff --git a/control-plane/pkg/reconciler/testing/objects_common.go b/control-plane/pkg/reconciler/testing/objects_common.go index c1c6a300db..3c59174f34 100644 --- a/control-plane/pkg/reconciler/testing/objects_common.go +++ b/control-plane/pkg/reconciler/testing/objects_common.go @@ -17,7 +17,10 @@ package testing import ( + "io/ioutil" + "knative.dev/eventing-kafka-broker/control-plane/pkg/contract" + "knative.dev/eventing-kafka-broker/control-plane/pkg/security" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/proto" @@ -41,6 +44,9 @@ const ( ServiceURL = "http://test-service.test-service-namespace.svc.cluster.local/" TriggerUUID = "e7185016-5d98-4b54-84e8-3b1cd4acc6b5" + + SecretResourceVersion = "1234" + SecretUUID = "a7185016-5d98-4b54-84e8-3b1cd4acc6b6" ) var ( @@ -102,3 +108,42 @@ func ConfigMapUpdate(configs *Configs, contract *contract.Contract) clientgotest NewConfigMapFromContract(contract, configs), ) } + +func NewSSLSecret(ns, name string) *corev1.Secret { + + ca, userKey, userCert := loadCerts() + + return &corev1.Secret{ + ObjectMeta: metav1.ObjectMeta{ + Namespace: ns, + Name: name, + ResourceVersion: SecretResourceVersion, + UID: SecretUUID, + }, + Data: map[string][]byte{ + security.ProtocolKey: []byte(security.ProtocolSSL), + security.CaCertificateKey: ca, + security.UserKey: userKey, + security.UserCertificate: userCert, + }, + } +} + +func loadCerts() (ca, userKey, userCert []byte) { + ca, err := ioutil.ReadFile("testdata/ca.crt") + if err != nil { + panic(err) + } + + userKey, err = ioutil.ReadFile("testdata/user.key") + if err != nil { + panic(err) + } + + userCert, err = ioutil.ReadFile("testdata/user.crt") + if err != nil { + panic(err) + } + + return ca, userKey, userCert +} diff --git a/control-plane/pkg/reconciler/testing/objects_sink.go b/control-plane/pkg/reconciler/testing/objects_sink.go index 7f7d7eaeab..45c0c5538c 100644 --- a/control-plane/pkg/reconciler/testing/objects_sink.go +++ b/control-plane/pkg/reconciler/testing/objects_sink.go @@ -177,6 +177,18 @@ func SinkControllerDontOwnTopic(sink *eventing.KafkaSink) { sink.GetStatus().Annotations[base.TopicOwnerAnnotation] = sinkreconciler.ExternalTopicOwner } +func SinkAuthSecretRef(name string) func(sink *eventing.KafkaSink) { + return func(sink *eventing.KafkaSink) { + sink.Spec.Auth = &eventing.Auth{ + Secret: &eventing.Secret{ + Ref: &eventing.SecretReference{ + Name: name, + }, + }, + } + } +} + func allocateStatusAnnotations(sink *eventing.KafkaSink) { if sink.GetStatus().Annotations == nil { sink.GetStatus().Annotations = make(map[string]string, 1) diff --git a/control-plane/pkg/reconciler/trigger/controller.go b/control-plane/pkg/reconciler/trigger/controller.go index bc017338cb..a9783a25c2 100644 --- a/control-plane/pkg/reconciler/trigger/controller.go +++ b/control-plane/pkg/reconciler/trigger/controller.go @@ -25,6 +25,7 @@ import ( kubeclient "knative.dev/pkg/client/injection/kube/client" configmapinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/configmap" podinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/pod" + secretinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/secret" "knative.dev/pkg/configmap" "knative.dev/pkg/controller" "knative.dev/pkg/logging" @@ -62,6 +63,7 @@ func NewController(ctx context.Context, _ configmap.Watcher, configs *config.Env Reconciler: &base.Reconciler{ KubeClient: kubeclient.Get(ctx), PodLister: podinformer.Get(ctx).Lister(), + SecretLister: secretinformer.Get(ctx).Lister(), DataPlaneConfigMapNamespace: configs.DataPlaneConfigMapNamespace, DataPlaneConfigMapName: configs.DataPlaneConfigMapName, DataPlaneConfigFormat: configs.DataPlaneConfigFormat, diff --git a/control-plane/pkg/reconciler/trigger/controller_test.go b/control-plane/pkg/reconciler/trigger/controller_test.go index 556ed3ff68..b67feef99f 100644 --- a/control-plane/pkg/reconciler/trigger/controller_test.go +++ b/control-plane/pkg/reconciler/trigger/controller_test.go @@ -31,6 +31,7 @@ import ( _ "knative.dev/pkg/client/injection/ducks/duck/v1/addressable/fake" _ "knative.dev/pkg/client/injection/kube/informers/core/v1/configmap/fake" _ "knative.dev/pkg/client/injection/kube/informers/core/v1/pod/fake" + _ "knative.dev/pkg/client/injection/kube/informers/core/v1/secret/fake" "knative.dev/eventing-kafka-broker/control-plane/pkg/config" ) diff --git a/control-plane/pkg/reconciler/trigger/trigger_test.go b/control-plane/pkg/reconciler/trigger/trigger_test.go index 3413e0fd76..ff1c6df107 100644 --- a/control-plane/pkg/reconciler/trigger/trigger_test.go +++ b/control-plane/pkg/reconciler/trigger/trigger_test.go @@ -1884,6 +1884,7 @@ func useTable(t *testing.T, table TableTest, configs *broker.Configs) { Reconciler: &base.Reconciler{ KubeClient: kubeclient.Get(ctx), PodLister: listers.GetPodLister(), + SecretLister: listers.GetSecretLister(), DataPlaneConfigMapNamespace: configs.DataPlaneConfigMapNamespace, DataPlaneConfigMapName: configs.DataPlaneConfigMapName, DataPlaneConfigFormat: configs.DataPlaneConfigFormat, diff --git a/control-plane/pkg/security/config.go b/control-plane/pkg/security/config.go new file mode 100644 index 0000000000..4c449b0e33 --- /dev/null +++ b/control-plane/pkg/security/config.go @@ -0,0 +1,120 @@ +/* + * Copyright 2020 The Knative Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package security + +import ( + "context" + "fmt" + + "github.com/Shopify/sarama" + corev1 "k8s.io/api/core/v1" + apierrors "k8s.io/apimachinery/pkg/api/errors" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/client-go/kubernetes" + corelisters "k8s.io/client-go/listers/core/v1" +) + +const ( + AuthSecretNameKey = "auth.secret.ref.name" /* #nosec G101 */ /* Potential hardcoded credentials (gosec) */ +) + +// SecretLocator locates a secret in a cluster. +type SecretLocator interface { + // SecretName returns the secret name. + // It returns true if the name should be used and false if should be ignored. + SecretName() (string, bool, error) + + // SecretNamespace returns the secret name. + // It returns true if the namespace should be used and false if should be ignored. + SecretNamespace() (string, bool, error) +} + +// SecretProviderFunc provides a secret given a namespace/name pair. +type SecretProviderFunc func(ctx context.Context, namespace, name string) (*corev1.Secret, error) + +func NewOptionFromSecret(ctx context.Context, config SecretLocator, secretProviderFunc SecretProviderFunc) (ConfigOption, *corev1.Secret, error) { + + name, ok, err := config.SecretName() + if err != nil { + return nil, nil, err + } + if !ok { + // No auth config, return a no-op config option. + return NoOp, nil, nil + } + ns, ok, err := config.SecretNamespace() + if err != nil { + return nil, nil, err + } + if !ok { + // No auth config, return a no-op config option. + return NoOp, nil, nil + } + + secret, err := secretProviderFunc(ctx, ns, name) + if err != nil { + return nil, nil, err + } + + return secretData(secret.Data), secret, nil +} + +// DefaultSecretProviderFunc is a secret provider that uses the local cache for getting the secret and when the secret +// is not found it uses the kube client check if the secret doesn't actually exist. +func DefaultSecretProviderFunc(lister corelisters.SecretLister, kc kubernetes.Interface) SecretProviderFunc { + return func(ctx context.Context, namespace, name string) (*corev1.Secret, error) { + secret, err := lister.Secrets(namespace).Get(name) + if err != nil { + if apierrors.IsNotFound(err) { + // Check if the secret actually doesn't exist. + secret, err = kc.CoreV1().Secrets(namespace).Get(ctx, name, metav1.GetOptions{}) + if err != nil { + return nil, fmt.Errorf("failed to get secret %s/%s: %w", namespace, name, err) + } + return secret, nil + } + return nil, fmt.Errorf("failed to get secret %s/%s: %w", namespace, name, err) + } + + return secret, nil + } +} + +// NoOp is a no-op ConfigOption. +func NoOp(*sarama.Config) error { + return nil +} + +// MTConfigMapSecretLocator is a SecretLocator that locates a secret using a reference in a ConfigMap. +// +// The name is take from the data field using the key: AuthSecretNameKey. +// The namespace is the same namespace of the ConfigMap. +type MTConfigMapSecretLocator struct { + *corev1.ConfigMap +} + +func (cmp *MTConfigMapSecretLocator) SecretName() (string, bool, error) { + if cmp.ConfigMap == nil { + return "", false, nil + } + v, ok := cmp.Data[AuthSecretNameKey] + return v, ok, nil +} + +func (cmp *MTConfigMapSecretLocator) SecretNamespace() (string, bool, error) { + return cmp.Namespace, true, nil +} diff --git a/control-plane/pkg/security/config_test.go b/control-plane/pkg/security/config_test.go new file mode 100644 index 0000000000..c7df3e9493 --- /dev/null +++ b/control-plane/pkg/security/config_test.go @@ -0,0 +1,187 @@ +/* + * Copyright 2020 The Knative Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package security + +import ( + "context" + "io" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/stretchr/testify/assert" + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/client-go/kubernetes" + corev1listers "k8s.io/client-go/listers/core/v1" + kubeclient "knative.dev/pkg/client/injection/kube/client/fake" + secretinformer "knative.dev/pkg/client/injection/kube/informers/core/v1/secret" + reconcilertesting "knative.dev/pkg/reconciler/testing" + + _ "knative.dev/pkg/client/injection/kube/informers/core/v1/secret/fake" +) + +type SecretProviderFuncMock struct { + secret *corev1.Secret + err error + + wantName string + wantNamespace string + t *testing.T +} + +func (sp *SecretProviderFuncMock) F(ctx context.Context, namespace, name string) (*corev1.Secret, error) { + assert.NotNil(sp.t, ctx) + assert.Equal(sp.t, sp.wantNamespace, namespace) + assert.Equal(sp.t, sp.wantName, name) + return sp.secret, sp.err +} + +func TestNewOptionFromSecret(t *testing.T) { + + tests := []struct { + name string + ctx context.Context + config SecretLocator + secretProviderFunc SecretProviderFunc + want ConfigOption + wantSecret *corev1.Secret + wantConfigOption bool + wantErr bool + }{ + { + name: "happy case", + ctx: context.Background(), + config: &MTConfigMapSecretLocator{ + &corev1.ConfigMap{ + ObjectMeta: metav1.ObjectMeta{ + Namespace: "my-ns", + Name: "my-name", + }, + Data: map[string]string{ + AuthSecretNameKey: "my-name", + }, + }, + }, + secretProviderFunc: (&SecretProviderFuncMock{ + secret: &corev1.Secret{ + Data: map[string][]byte{ + ProtocolKey: []byte(ProtocolPlaintext), + }, + }, + err: nil, + wantName: "my-name", + wantNamespace: "my-ns", + t: t, + }).F, + wantConfigOption: true, + wantSecret: &corev1.Secret{ + Data: map[string][]byte{ + ProtocolKey: []byte(ProtocolPlaintext), + }, + }, + wantErr: false, + }, + { + name: "secret provider error", + ctx: context.Background(), + config: &MTConfigMapSecretLocator{ + &corev1.ConfigMap{ + ObjectMeta: metav1.ObjectMeta{ + Namespace: "my-ns", + Name: "my-name", + }, + Data: map[string]string{ + AuthSecretNameKey: "my-name", + }, + }, + }, + secretProviderFunc: (&SecretProviderFuncMock{ + secret: &corev1.Secret{ + Data: map[string][]byte{ + ProtocolKey: []byte(ProtocolPlaintext), + }, + }, + err: io.EOF, + wantName: "my-name", + wantNamespace: "my-ns", + t: t, + }).F, + wantConfigOption: false, + wantSecret: nil, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + configOption, secret, err := NewOptionFromSecret(tt.ctx, tt.config, tt.secretProviderFunc) + if (err != nil) != tt.wantErr { + t.Errorf("NewOptionFromSecret() error = %v, wantErr %v", err, tt.wantErr) + return + } + if (configOption != nil) != tt.wantConfigOption { + t.Errorf("NewOptionFromSecret() configOption want %v got %p", tt.wantConfigOption, configOption) + } + if diff := cmp.Diff(tt.wantSecret, secret); diff != "" { + t.Errorf("NewOptionFromSecret() secret = %v, want %v diff: %s", secret, tt.wantSecret, diff) + } + }) + } +} + +func TestDefaultSecretProviderFunc(t *testing.T) { + + ctx, _ := reconcilertesting.SetupFakeContext(t) + + tests := []struct { + name string + lister corev1listers.SecretLister + kc kubernetes.Interface + want bool + + secretNamespace string + secretName string + wantSecret *corev1.Secret + wantErr bool + }{ + { + name: "not found", + lister: secretinformer.Get(ctx).Lister(), + kc: kubeclient.Get(ctx), + want: true, + + secretNamespace: "my-namespace", + secretName: "my-name", + wantSecret: nil, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := DefaultSecretProviderFunc(tt.lister, tt.kc) + if (got != nil) != tt.want { + t.Errorf("DefaultSecretProviderFunc() = %p, want %v", got, tt.want) + } + if got != nil { + if s, err := got(ctx, tt.secretNamespace, tt.secretNamespace); (err != nil) != tt.wantErr { + t.Errorf("got() got error %v want %v", err, tt.wantErr) + } else if diff := cmp.Diff(tt.wantSecret, s); diff != "" { + t.Errorf("diff %v", diff) + } + } + }) + } +} diff --git a/control-plane/pkg/security/scram.go b/control-plane/pkg/security/scram.go new file mode 100644 index 0000000000..f8dd6f4177 --- /dev/null +++ b/control-plane/pkg/security/scram.go @@ -0,0 +1,65 @@ +/* + * Copyright 2020 The Knative Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package security + +import ( + "crypto/sha256" + "crypto/sha512" + "hash" + + "github.com/Shopify/sarama" + "github.com/xdg/scram" +) + +var ( + // sha256HashGenerator hash generator function for SCRAM conversation. + sha256HashGenerator scram.HashGeneratorFcn = func() hash.Hash { return sha256.New() } + + // sha512HashGenerator hash generator function for SCRAM conversation. + sha512HashGenerator scram.HashGeneratorFcn = func() hash.Hash { return sha512.New() } +) + +type xdgScramClient struct { + *scram.Client + *scram.ClientConversation + scram.HashGeneratorFcn +} + +func (x *xdgScramClient) Begin(userName, password, authzID string) (err error) { + x.Client, err = x.HashGeneratorFcn.NewClient(userName, password, authzID) + if err != nil { + return err + } + x.ClientConversation = x.Client.NewConversation() + return nil +} + +func (x *xdgScramClient) Step(challenge string) (string, error) { + return x.ClientConversation.Step(challenge) +} + +func (x *xdgScramClient) Done() bool { + return x.ClientConversation.Done() +} + +func sha256ScramClientGeneratorFunc() sarama.SCRAMClient { + return &xdgScramClient{HashGeneratorFcn: sha256HashGenerator} +} + +func sha512ScramClientGeneratorFunc() sarama.SCRAMClient { + return &xdgScramClient{HashGeneratorFcn: sha512HashGenerator} +} diff --git a/control-plane/pkg/security/scram_test.go b/control-plane/pkg/security/scram_test.go new file mode 100644 index 0000000000..6fa41345fd --- /dev/null +++ b/control-plane/pkg/security/scram_test.go @@ -0,0 +1,47 @@ +/* + * Copyright 2020 The Knative Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package security + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSha256ScramClientGeneratorFunc(t *testing.T) { + c := sha256ScramClientGeneratorFunc() + assert.NotNil(t, c) +} + +func TestSha512ScramClientGeneratorFunc(t *testing.T) { + c := sha512ScramClientGeneratorFunc() + assert.NotNil(t, c) +} + +func TestNoChallenge(t *testing.T) { + c := sha512ScramClientGeneratorFunc() + assert.Nil(t, c.Begin("user", "pass", "authz")) + assert.False(t, c.Done()) +} + +func TestWithChallenge(t *testing.T) { + c := sha512ScramClientGeneratorFunc() + assert.Nil(t, c.Begin("user", "pass", "authz")) + _, err := c.Step("step") + assert.Nil(t, err) + assert.False(t, c.Done()) +} diff --git a/control-plane/pkg/security/secret.go b/control-plane/pkg/security/secret.go new file mode 100644 index 0000000000..efc78ed2e4 --- /dev/null +++ b/control-plane/pkg/security/secret.go @@ -0,0 +1,189 @@ +/* + * Copyright 2020 The Knative Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package security + +import ( + "crypto/tls" + "crypto/x509" + "fmt" + + "github.com/Shopify/sarama" +) + +const ( + ProtocolKey = "protocol" + + CaCertificateKey = "ca.crt" + + UserCertificate = "user.crt" + UserKey = "user.key" + + SaslMechanismKey = "sasl.mechanism" + SaslUserKey = "user" + SaslPasswordKey = "password" + + ProtocolPlaintext = "PLAINTEXT" + ProtocolSASLPlaintext = "SASL_PLAINTEXT" + ProtocolSSL = "SSL" + ProtocolSASLSSL = "SASL_SSL" + + SaslPlain = "PLAIN" + SaslScramSha256 = "SCRAM-SHA-256" + SaslScramSha512 = "SCRAM-SHA-512" +) + +var supportedProtocols = fmt.Sprintf("%v", []string{ + ProtocolPlaintext, + ProtocolSASLPlaintext, + ProtocolSSL, + ProtocolSASLSSL, +}) + +type ConfigOption func(config *sarama.Config) error + +func options(config *sarama.Config, options ...ConfigOption) error { + for _, opt := range options { + if err := opt(config); err != nil { + return err + } + } + return nil +} + +func secretData(data map[string][]byte) ConfigOption { + return func(config *sarama.Config) error { + + protocolBytes, ok := data[ProtocolKey] + if !ok { + return fmt.Errorf("protocol required (key: %s) supported protocols: %s", ProtocolKey, supportedProtocols) + } + + protocol := string(protocolBytes) + if protocol == ProtocolPlaintext { + return nil + } + + if protocol == ProtocolSASLPlaintext { + return options(config, + saslConfig(protocol, data), + ) + } + + if protocol == ProtocolSSL { + return options(config, + sslConfig(protocol, data), + ) + } + + if protocol == ProtocolSASLSSL { + return options(config, + saslConfig(protocol, data), + sslConfig(protocol, data), + ) + } + + return fmt.Errorf("protocol %s unsupported (key: %s), supported protocols: %s", protocol, ProtocolKey, supportedProtocols) + } +} + +func saslConfig(protocol string, data map[string][]byte) ConfigOption { + return func(config *sarama.Config) error { + + // Supported mechanism SASL/PLAIN or SASL/SCRAM. + givenSASLMechanism, ok := data[SaslMechanismKey] + if !ok { + return fmt.Errorf("[protocol %s] SASL mechanism required (key: %s)", protocol, SaslMechanismKey) + } + saslMechanism := string(givenSASLMechanism) + + user, ok := data[SaslUserKey] + if !ok || len(user) == 0 { + return fmt.Errorf("[protocol %s] SASL user required (key: %s)", protocol, SaslUserKey) + } + + password, ok := data[SaslPasswordKey] + if !ok || len(password) == 0 { + return fmt.Errorf("[protocol %s] SASL password required (key: %s)", protocol, SaslPasswordKey) + } + + config.Net.SASL.Enable = true + config.Net.SASL.Handshake = true + config.Net.SASL.User = string(user) + config.Net.SASL.Password = string(password) + + if saslMechanism == SaslPlain { + config.Net.SASL.Mechanism = sarama.SASLTypePlaintext + return nil + } + + if saslMechanism == SaslScramSha512 { + config.Net.SASL.Mechanism = sarama.SASLTypeSCRAMSHA512 + config.Net.SASL.SCRAMClientGeneratorFunc = sha512ScramClientGeneratorFunc + return nil + } + + if saslMechanism == SaslScramSha256 { + config.Net.SASL.Mechanism = sarama.SASLTypeSCRAMSHA256 + config.Net.SASL.SCRAMClientGeneratorFunc = sha256ScramClientGeneratorFunc + return nil + } + + return fmt.Errorf("[protocol %s] unsupported SASL mechanism (key: %s)", protocol, SaslMechanismKey) + } +} + +func sslConfig(protocol string, data map[string][]byte) ConfigOption { + return func(config *sarama.Config) error { + + caCert, ok := data[CaCertificateKey] + if !ok { + return fmt.Errorf("[protocol %s] required CA certificate (key: %s)", protocol, CaCertificateKey) + } + + var tlsCerts []tls.Certificate + if protocol == ProtocolSSL { + userKeyCert, ok := data[UserKey] + if !ok { + return fmt.Errorf("[protocol %s] required user key (key: %s)", protocol, UserKey) + } + + userCert, ok := data[UserCertificate] + if !ok { + return fmt.Errorf("[protocol %s] required user key (key: %s)", protocol, UserCertificate) + } + + tlsCert, err := tls.X509KeyPair(userCert, userKeyCert) + if err != nil { + return fmt.Errorf("[protocol %s] failed to load x.509 key pair: %w", protocol, err) + } + tlsCerts = []tls.Certificate{tlsCert} + } + + certPool := x509.NewCertPool() + certPool.AppendCertsFromPEM(caCert) + + config.Net.TLS.Enable = true + config.Net.TLS.Config = &tls.Config{ + MinVersion: tls.VersionTLS12, + MaxVersion: tls.VersionTLS13, + Certificates: tlsCerts, + RootCAs: certPool, + } + + return nil + } +} diff --git a/control-plane/pkg/security/secret_test.go b/control-plane/pkg/security/secret_test.go new file mode 100644 index 0000000000..f81867aee5 --- /dev/null +++ b/control-plane/pkg/security/secret_test.go @@ -0,0 +1,273 @@ +/* + * Copyright 2020 The Knative Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package security + +import ( + "io/ioutil" + "testing" + + "github.com/Shopify/sarama" + "github.com/stretchr/testify/assert" +) + +func TestNoProtocol(t *testing.T) { + config := sarama.NewConfig() + + err := options(config, secretData(map[string][]byte{})) + + assert.NotNil(t, err) +} + +func TestUnsupportedProtocol(t *testing.T) { + secret := map[string][]byte{ + "protocol": []byte("PLAIN"), + } + config := sarama.NewConfig() + + err := options(config, secretData(secret)) + + assert.NotNil(t, err) +} + +func TestPlaintext(t *testing.T) { + secret := map[string][]byte{ + "protocol": []byte("PLAINTEXT"), + } + config := sarama.NewConfig() + + err := options(config, secretData(secret)) + + assert.Nil(t, err) +} + +func TestSASLPlain(t *testing.T) { + secret := map[string][]byte{ + "protocol": []byte("SASL_PLAINTEXT"), + "sasl.mechanism": []byte("PLAIN"), + "user": []byte("my-user-name"), + "password": []byte("my-user-password"), + } + config := sarama.NewConfig() + + err := options(config, secretData(secret)) + + assert.Nil(t, err) + assert.True(t, config.Net.SASL.Enable) + assert.True(t, config.Net.SASL.Handshake) + assert.Equal(t, sarama.SASLMechanism(sarama.SASLTypePlaintext), config.Net.SASL.Mechanism) + assert.Equal(t, "my-user-name", config.Net.SASL.User) + assert.Equal(t, "my-user-password", config.Net.SASL.Password) +} + +func TestSASLPlainLSCRAM256(t *testing.T) { + secret := map[string][]byte{ + "protocol": []byte("SASL_PLAINTEXT"), + "sasl.mechanism": []byte("SCRAM-SHA-256"), + "user": []byte("my-user-name"), + "password": []byte("my-user-password"), + } + config := sarama.NewConfig() + + err := options(config, secretData(secret)) + + assert.Nil(t, err) + assert.True(t, config.Net.SASL.Enable) + assert.True(t, config.Net.SASL.Handshake) + assert.Equal(t, sarama.SASLMechanism(sarama.SASLTypeSCRAMSHA256), config.Net.SASL.Mechanism) + assert.NotNil(t, config.Net.SASL.SCRAMClientGeneratorFunc) + assert.Equal(t, "my-user-name", config.Net.SASL.User) + assert.Equal(t, "my-user-password", config.Net.SASL.Password) +} + +func TestSASLPlainLSCRAM512(t *testing.T) { + secret := map[string][]byte{ + "protocol": []byte("SASL_PLAINTEXT"), + "sasl.mechanism": []byte("SCRAM-SHA-512"), + "user": []byte("my-user-name"), + "password": []byte("my-user-password"), + } + config := sarama.NewConfig() + + err := options(config, secretData(secret)) + + assert.Nil(t, err) + assert.True(t, config.Net.SASL.Enable) + assert.True(t, config.Net.SASL.Handshake) + assert.Equal(t, sarama.SASLMechanism(sarama.SASLTypeSCRAMSHA512), config.Net.SASL.Mechanism) + assert.NotNil(t, config.Net.SASL.SCRAMClientGeneratorFunc) + assert.Equal(t, "my-user-name", config.Net.SASL.User) + assert.Equal(t, "my-user-password", config.Net.SASL.Password) +} + +func TestSASLPlainSCRAM513(t *testing.T) { + secret := map[string][]byte{ + "protocol": []byte("SASL_PLAINTEXT"), + "sasl.mechanism": []byte("SCRAM-SHA-513"), + "user": []byte("my-user-name"), + "password": []byte("my-user-password"), + } + config := sarama.NewConfig() + + err := options(config, secretData(secret)) + + assert.NotNil(t, err) +} + +func TestSASLPlainLSCRAM512NoUser(t *testing.T) { + secret := map[string][]byte{ + "protocol": []byte("SASL_PLAINTEXT"), + "sasl.mechanism": []byte("SCRAM-SHA-512"), + "user": []byte(""), + "password": []byte("my-user-password"), + } + config := sarama.NewConfig() + + err := options(config, secretData(secret)) + + assert.NotNil(t, err) +} + +func TestSASLPlainLSCRAM512NoPassword(t *testing.T) { + secret := map[string][]byte{ + "protocol": []byte("SASL_PLAINTEXT"), + "sasl.mechanism": []byte("SCRAM-SHA-512"), + "user": []byte("my-user-name"), + "password": []byte(""), + } + config := sarama.NewConfig() + + err := options(config, secretData(secret)) + + assert.NotNil(t, err) +} + +func TestSSL(t *testing.T) { + ca, userKey, userCert := loadCerts(t) + + secret := map[string][]byte{ + "protocol": []byte("SSL"), + "user.key": userKey, + "user.crt": userCert, + "ca.crt": ca, + } + config := sarama.NewConfig() + + err := options(config, secretData(secret)) + + assert.Nil(t, err) + assert.True(t, config.Net.TLS.Enable) + assert.Greater(t, len(config.Net.TLS.Config.Certificates), 0) + assert.NotNil(t, config.Net.TLS.Config.RootCAs) + assert.Greater(t, len(config.Net.TLS.Config.RootCAs.Subjects()), 0) +} + +func TestSASLPLainSSL(t *testing.T) { + ca, userKey, userCert := loadCerts(t) + + secret := map[string][]byte{ + "protocol": []byte("SASL_SSL"), + "sasl.mechanism": []byte("PLAIN"), + "user.key": userKey, + "user.crt": userCert, + "ca.crt": ca, + "user": []byte("my-user-name"), + "password": []byte("my-user-password"), + } + config := sarama.NewConfig() + + err := options(config, secretData(secret)) + + assert.Nil(t, err) + assert.True(t, config.Net.TLS.Enable) + assert.Equal(t, len(config.Net.TLS.Config.Certificates), 0) + assert.NotNil(t, config.Net.TLS.Config.RootCAs) + assert.Greater(t, len(config.Net.TLS.Config.RootCAs.Subjects()), 0) + assert.True(t, config.Net.SASL.Enable) + assert.True(t, config.Net.SASL.Handshake) + assert.Equal(t, sarama.SASLMechanism(sarama.SASLTypePlaintext), config.Net.SASL.Mechanism) + assert.Equal(t, "my-user-name", config.Net.SASL.User) + assert.Equal(t, "my-user-password", config.Net.SASL.Password) +} + +func TestSASLSCRAM256SSL(t *testing.T) { + ca, userKey, userCert := loadCerts(t) + + secret := map[string][]byte{ + "protocol": []byte("SASL_SSL"), + "sasl.mechanism": []byte("SCRAM-SHA-256"), + "ca.crt": ca, + "user.crt": userCert, + "user.key": userKey, + "user": []byte("my-user-name"), + "password": []byte("my-user-password"), + } + config := sarama.NewConfig() + + err := options(config, secretData(secret)) + + assert.Nil(t, err) + assert.True(t, config.Net.TLS.Enable) + assert.Equal(t, len(config.Net.TLS.Config.Certificates), 0) + assert.NotNil(t, config.Net.TLS.Config.RootCAs) + assert.Greater(t, len(config.Net.TLS.Config.RootCAs.Subjects()), 0) + assert.True(t, config.Net.SASL.Enable) + assert.True(t, config.Net.SASL.Handshake) + assert.Equal(t, sarama.SASLMechanism(sarama.SASLTypeSCRAMSHA256), config.Net.SASL.Mechanism) + assert.Equal(t, "my-user-name", config.Net.SASL.User) + assert.Equal(t, "my-user-password", config.Net.SASL.Password) +} + +func TestSASLSCRAM512SSL(t *testing.T) { + ca, userKey, userCert := loadCerts(t) + + secret := map[string][]byte{ + "protocol": []byte("SASL_SSL"), + "sasl.mechanism": []byte("SCRAM-SHA-512"), + "ca.crt": ca, + "user.crt": userCert, + "user.key": userKey, + "user": []byte("my-user-name"), + "password": []byte("my-user-password"), + } + config := sarama.NewConfig() + + err := options(config, secretData(secret)) + + assert.Nil(t, err) + assert.True(t, config.Net.TLS.Enable) + assert.Equal(t, len(config.Net.TLS.Config.Certificates), 0) + assert.NotNil(t, config.Net.TLS.Config.RootCAs) + assert.Greater(t, len(config.Net.TLS.Config.RootCAs.Subjects()), 0) + assert.True(t, config.Net.SASL.Enable) + assert.True(t, config.Net.SASL.Handshake) + assert.Equal(t, sarama.SASLMechanism(sarama.SASLTypeSCRAMSHA512), config.Net.SASL.Mechanism) + assert.Equal(t, "my-user-name", config.Net.SASL.User) + assert.Equal(t, "my-user-password", config.Net.SASL.Password) +} + +func loadCerts(t *testing.T) (ca, userKey, userCert []byte) { + ca, err := ioutil.ReadFile("testdata/ca.crt") + assert.Nil(t, err) + + userKey, err = ioutil.ReadFile("testdata/user.key") + assert.Nil(t, err) + + userCert, err = ioutil.ReadFile("testdata/user.crt") + assert.Nil(t, err) + + return ca, userKey, userCert +} diff --git a/control-plane/pkg/security/testdata/ca.crt b/control-plane/pkg/security/testdata/ca.crt new file mode 100644 index 0000000000..6ebdd8f82e --- /dev/null +++ b/control-plane/pkg/security/testdata/ca.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDLTCCAhWgAwIBAgIJAOjtl0zhGBvpMA0GCSqGSIb3DQEBCwUAMC0xEzARBgNV +BAoMCmlvLnN0cmltemkxFjAUBgNVBAMMDWNsdXN0ZXItY2EgdjAwHhcNMjAxMjIw +MDgzNzU4WhcNMjExMjIwMDgzNzU4WjAtMRMwEQYDVQQKDAppby5zdHJpbXppMRYw +FAYDVQQDDA1jbHVzdGVyLWNhIHYwMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAy7rIo+UwJh5dL6PhUfDe9wRuLgOf1ZeZmabd++eLc2kWL1r6TO8X034n +CerkREfjF+MjDoK30z9xvEURThoSi20a4i/Cb39on9T0AgOr5qCSrqlN9n4KtRey +ZLnKKA5QyLAM6kzyyvIg4PVwWCWFTQSicDPzqd2OmH6jtogD50FkbaP7LcyrKnWf +64gcR9CCEAcrO8tJdhcZP2Slxg+RvupVjXK1rdZcI6/liZ3Jp4hzApSRN30x/8wU +5eJYAtzaeWUvJ0Yq/7BH7uY8J+2Hwh+shhi5K98HBAKeISwuIJEQrWmmUer8WGp1 +IcBZqXbkd4dBXuFa0chO0gSKvzjKpQIDAQABo1AwTjAdBgNVHQ4EFgQUeascji1L +C2voPwDAlPL6iz8TzncwHwYDVR0jBBgwFoAUeascji1LC2voPwDAlPL6iz8Tzncw +DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAIEL2uustCrpPas06LyoR +VR6QFHQJDcMgdL0CZFE46uLSgGupXO0ybmPP2ymBJ1zDNxx1qskNTwBsfBJLBAj6 +8LfJmhfw98QK8YQDJ/Xhx3fcVxjn6NjJ3RYOyb5bqSIGGCQZRmbMjerf71KMhP3X +rdYg2hVoCvfRcfP2G0jbWtMRK4+MlB3oEvhIvQQW1dw4sohw32HaNJnzb7dErEDB +Ha2zVM47CcNezdWYUD5NQzFqCRypgrIONafQI2S+Ck7aKOiqF03QSug4wizRbKhT +uYpQg59dUIOBebg0roRF326H2x6kFGn5L2o+TROrZeeXT8vyIl2R33o3E+ULpuw+ +Vw== +-----END CERTIFICATE----- diff --git a/control-plane/pkg/security/testdata/user.crt b/control-plane/pkg/security/testdata/user.crt new file mode 100644 index 0000000000..540c46f2af --- /dev/null +++ b/control-plane/pkg/security/testdata/user.crt @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE----- +MIICvzCCAacCCQCCB7cNEWYczDANBgkqhkiG9w0BAQsFADAtMRMwEQYDVQQKDApp +by5zdHJpbXppMRYwFAYDVQQDDA1jbGllbnRzLWNhIHYwMB4XDTIwMTIyMDExMjQ0 +OVoXDTIxMTIyMDExMjQ0OVowFjEUMBIGA1UEAwwLbXktdGxzLXVzZXIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDVzpeggunvw1WFK7MLwonVCdIZDwKK +wIeQZZ4QiRcjQK6HaTrb6+lUgxS4d16EXjn8nFyJrngaohnpKd43Uilb3j6g/5KW +zVm0RmAmkkLIyp7yoF2MHYz/4IjenHwbKKee6YznRM+v/OmnThexHwjY3OFCrYhC +00CbsWMruitm192oJh6yTm0BFw5tVbEqb5Y7CNNYMaaVXLHDJaoUYiylXbKa/xLH +yuc4GFQVggGoHeV582eNrQ6Hz2hkzpmVGsK6nGydkdeEzeL9zb4BH60tZARbTGWp +22V7ygBF7jgTVuevIjWYMuaNDdi7JHj8rD7KXhD4MsUwLukEBktMVuQhAgMBAAEw +DQYJKoZIhvcNAQELBQADggEBAM+rqGT16PmUcqsdG2AQd0NIgt6PV1R5kdgoOo3o +Cx3tDiBsGHSb0utHl3wCQnYi6K6FoCMdwmaqqfNi42JtehWEkI8wLUrtn7ZNbm3X +GIm4mrUQAfJLfolRrNl3G28e/AqB/DfJHOnE7bpQPD2yRag+nD40KcV5siJ7Elwk +PlPnYJeY94NBMzDxADAOpamcjZmtDraWRB2s0OGwXbGO9xZ7yXnVrQKR5TW0zRH8 +bnWEU0tKfw+rosZsq+JYZGVqVmJTWIUBxBrQvD6Dy2Dhiq6qqgrXGYuvKcQND+6R +T/ksWByG+mrI8alQ+tIdtwopQVSsLvtdWkDlMoAudxfb5tc= +-----END CERTIFICATE----- diff --git a/control-plane/pkg/security/testdata/user.key b/control-plane/pkg/security/testdata/user.key new file mode 100644 index 0000000000..0f0e7af9e6 --- /dev/null +++ b/control-plane/pkg/security/testdata/user.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDVzpeggunvw1WF +K7MLwonVCdIZDwKKwIeQZZ4QiRcjQK6HaTrb6+lUgxS4d16EXjn8nFyJrngaohnp +Kd43Uilb3j6g/5KWzVm0RmAmkkLIyp7yoF2MHYz/4IjenHwbKKee6YznRM+v/Omn +ThexHwjY3OFCrYhC00CbsWMruitm192oJh6yTm0BFw5tVbEqb5Y7CNNYMaaVXLHD +JaoUYiylXbKa/xLHyuc4GFQVggGoHeV582eNrQ6Hz2hkzpmVGsK6nGydkdeEzeL9 +zb4BH60tZARbTGWp22V7ygBF7jgTVuevIjWYMuaNDdi7JHj8rD7KXhD4MsUwLukE +BktMVuQhAgMBAAECggEATX3QNOvd7/mDIH4jKar7m8wwasam7DU1rR83d9TIHw3N +c+Su4ZCSg//yXZ1eE2m3aJgfbgNfIoAIFhsHoFNWDCbz2hRnQ+LvlUhAhxpRvEa7 +70IyRZtcRE1ULiLXVFyazBzn5dZp0+Y9pVbHfXm7EiaUOj98+INqkkNrpyiVnHMN +F9r5wfY+uPMPDfXtDfvWoSWIk4kXZkU8NEW9IGJJCWj2AT29VHqVqe5obWdAWQCc +3XCNvip+OwaNJ1JtB84eTAD6NDyjgvdywCiJ1SrxSUuYYljsYhK0XjubtOJa/9ot +xMI6SzuyKL+BAnH0YSZ77BHVd78G0cSuqcg3eFRUpQKBgQDs+9pXoAkT8e32K1Fv +G6h4r7IB7RKf3PmZu7L0dCxUkfQssv+UvlgG7JZbL5OPsiit8ISYfJj0o6r1XsdF +Ehro8rN3mtUafUPwjrI9T+fwWBlug7LhqE2WRsM0i4dy7btuSqIBnhW7aalG1qWv +tBB2VL2/tLbfkYNBZOugNBDLJwKBgQDm9qOY+e9g5HZLaHilIFMp3P0FrZ9bDQyI +sUgqgeY5uVrOpFmQs4qg38Nuc4qlu8qrIDpzCjiv0AXh1EtrSwTfOOY04FkBXRS7 +p+FxpmC9z7W3zY/6+tluU/2lc50UYah53hJuyQsv8LWhd3r5Dz2sIUiSQclcMm/F +FRk17QEDdwKBgFn1qQbGW2vU1M4z12/kAen+WAZfJkjFK4LuO5qMVx+rYBNvx5GB +SA0o1lK7FrrfmI2e1dbExZNoZcdLOT3BB7ISH4pxnh+2GYpBwOuBX7xe1RicqdT7 +fKhlKEjUBpDcKeBOOH2AsyM3kuivJqoyImWo+XbCdQEdTcQPOVrRKObVAoGBAI/G +h27qX47EibNFlJzOc5bC36uSOno2tE+/dHoppQTYEuRCQRMoAStGQGVYfb3QGkjO +5jPI8CLZ7192hYCEEcNXq07NPfrwU8DBsEDMXrqqO6fwyill0MayeFpvjyAp7vP2 +PylB87njMV4Tf7TdPPoTOhb55vyC1TsteCUupYvfAoGBALT7SVGwZ+ew+BENoADC +gcLE/qHcM0xxHE0xxrVojvwQ0llRd9jdfMbI4TWL/X9JXU1APg7UGUSo1NoOUzTL +DRS20y84+sj5kL2FkhsLFV9cZnvEqogv5WicbrgbFJmHszqlAfNSOWpOmyvwlobo +7s4NC2Vg8fA3DbiPtga/F+Ts +-----END PRIVATE KEY----- diff --git a/data-plane/THIRD-PARTY.txt b/data-plane/THIRD-PARTY.txt index 1009db6f11..6fcadb5d2d 100644 --- a/data-plane/THIRD-PARTY.txt +++ b/data-plane/THIRD-PARTY.txt @@ -1,24 +1,30 @@ -Lists of 111 third-party dependencies. +Lists of 155 third-party dependencies. (Eclipse Public License - v 1.0) (GNU Lesser General Public License) Logback Classic Module (ch.qos.logback:logback-classic:1.2.3 - http://logback.qos.ch/logback-classic) (Eclipse Public License - v 1.0) (GNU Lesser General Public License) Logback Core Module (ch.qos.logback:logback-core:1.2.3 - http://logback.qos.ch/logback-core) (The Apache Software License, Version 2.0) Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.11.3 - http://github.com/FasterXML/jackson) (The Apache Software License, Version 2.0) Jackson-core (com.fasterxml.jackson.core:jackson-core:2.11.3 - https://github.com/FasterXML/jackson-core) (The Apache Software License, Version 2.0) jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.11.3 - http://github.com/FasterXML/jackson) (The Apache Software License, Version 2.0) Jackson-dataformat-CSV (com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.12.0 - https://github.com/FasterXML/jackson-dataformats-text) + (The Apache Software License, Version 2.0) Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.11.3 - https://github.com/FasterXML/jackson-dataformats-text) (The Apache Software License, Version 2.0) Jackson datatype: jdk8 (com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.12.0 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jdk8) (The Apache Software License, Version 2.0) Jackson datatype: JSR310 (com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.12.0 - https://github.com/FasterXML/jackson-modules-java8/jackson-datatype-jsr310) + (The Apache Software License, Version 2.0) Jackson module: JAXB Annotations (com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.12.0 - https://github.com/FasterXML/jackson-modules-base) (The Apache Software License, Version 2.0) jackson-module-scala (com.fasterxml.jackson.module:jackson-module-scala_2.12:2.12.0 - http://wiki.fasterxml.com/JacksonModuleScala) - (BSD 2-Clause License) zstd-jni (com.github.luben:zstd-jni:1.4.4-7 - https://github.com/luben/zstd-jni) + (BSD 2-Clause License) zstd-jni (com.github.luben:zstd-jni:1.4.5-6 - https://github.com/luben/zstd-jni) + (The Apache Software License, Version 2.0) Generex (com.github.mifmif:generex:1.0.2 - https://github.com/mifmif/Generex/tree/master) (The Apache Software License, Version 2.0) FindBugs-jsr305 (com.google.code.findbugs:jsr305:3.0.2 - http://findbugs.sourceforge.net/) (Apache 2.0) Gson (com.google.code.gson:gson:2.8.6 - https://github.com/google/gson/gson) (Apache 2.0) error-prone annotations (com.google.errorprone:error_prone_annotations:2.3.4 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations) (The Apache Software License, Version 2.0) Guava InternalFutureFailureAccess and InternalFutures (com.google.guava:failureaccess:1.0.1 - https://github.com/google/guava/failureaccess) (Apache License, Version 2.0) Guava: Google Core Libraries for Java (com.google.guava:guava:29.0-android - https://github.com/google/guava/guava) + (Apache License, Version 2.0) Guava: Google Core Libraries for Java (com.google.guava:guava:30.0-jre - https://github.com/google/guava/guava) (The Apache Software License, Version 2.0) Guava ListenableFuture only (com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava - https://github.com/google/guava/listenablefuture) (The Apache Software License, Version 2.0) J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.3 - https://github.com/google/j2objc/) (3-Clause BSD License) Protocol Buffers [Core] (com.google.protobuf:protobuf-java:3.14.0 - https://developers.google.com/protocol-buffers/protobuf-java/) (3-Clause BSD License) Protocol Buffers [Util] (com.google.protobuf:protobuf-java-util:3.14.0 - https://developers.google.com/protocol-buffers/protobuf-java-util/) + (Apache 2.0) OkHttp Logging Interceptor (com.squareup.okhttp3:logging-interceptor:3.12.12 - https://github.com/square/okhttp/logging-interceptor) + (Apache 2.0) MockWebServer (com.squareup.okhttp3:mockwebserver:3.12.6 - https://github.com/square/okhttp/mockwebserver) (Apache 2.0) OkHttp (com.squareup.okhttp3:okhttp:3.14.7 - https://github.com/square/okhttp/okhttp) (Apache 2.0) Okio (com.squareup.okio:okio:1.17.2 - https://github.com/square/okio/okio) (Apache 2.0 License) scala-logging (com.typesafe.scala-logging:scala-logging_2.12:3.9.2 - https://github.com/lightbend/scala-logging) @@ -28,15 +34,40 @@ Lists of 111 third-party dependencies. (Unknown license) core (dev.knative.eventing.kafka.broker:core:1.0-SNAPSHOT - no url defined) (Unknown license) dispatcher (dev.knative.eventing.kafka.broker:dispatcher:1.0-SNAPSHOT - no url defined) (Unknown license) receiver (dev.knative.eventing.kafka.broker:receiver:1.0-SNAPSHOT - no url defined) + (BSD) Automaton (dk.brics.automaton:automaton:1.11-8 - http://www.brics.dk/automaton/) (The Apache Software License, Version 2.0) CloudEvents - API (io.cloudevents:cloudevents-api:2.0.0.RC2 - https://cloudevents.github.io/sdk-java/cloudevents-api/) (The Apache Software License, Version 2.0) CloudEvents - Core (io.cloudevents:cloudevents-core:2.0.0.RC2 - https://cloudevents.github.io/sdk-java/cloudevents-core/) (The Apache Software License, Version 2.0) CloudEvents - Vert.x Http Binding (io.cloudevents:cloudevents-http-vertx:2.0.0.RC2 - https://cloudevents.github.io/sdk-java/cloudevents-http-vertx/) (The Apache Software License, Version 2.0) CloudEvents - JSON Jackson (io.cloudevents:cloudevents-json-jackson:2.0.0.RC2 - https://cloudevents.github.io/sdk-java/cloudevents-json-jackson/) (The Apache Software License, Version 2.0) CloudEvents - Kafka Binding (io.cloudevents:cloudevents-kafka:2.0.0.RC2 - https://cloudevents.github.io/sdk-java/cloudevents-kafka/) - (Apache Software License 2.0) Debezium API (io.debezium:debezium-api:1.3.1.Final - https://debezium.io/debezium-api) - (Apache Software License 2.0) Debezium Core (io.debezium:debezium-core:1.3.1.Final - https://debezium.io/debezium-core) - (The Apache Software License, Version 2.0) micrometer-core (io.micrometer:micrometer-core:1.6.2 - https://github.com/micrometer-metrics/micrometer) - (The Apache Software License, Version 2.0) micrometer-registry-prometheus (io.micrometer:micrometer-registry-prometheus:1.6.2 - https://github.com/micrometer-metrics/micrometer) + (Apache Software License 2.0) Debezium API (io.debezium:debezium-api:1.4.0.Final - https://debezium.io/debezium-api) + (Apache Software License 2.0) Debezium Core (io.debezium:debezium-core:1.4.0.Final - https://debezium.io/debezium-core) + (Apache License, Version 2.0) Fabric8 :: Kubernetes :: Java Client (io.fabric8:kubernetes-client:5.0.0-alpha-3 - http://fabric8.io/kubernetes-client/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Admission Registration, Authentication and Authorization (io.fabric8:kubernetes-model-admissionregistration:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-admissionregistration/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: API Extensions (io.fabric8:kubernetes-model-apiextensions:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-apiextensions/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Apps (io.fabric8:kubernetes-model-apps:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-apps/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Autoscaling (io.fabric8:kubernetes-model-autoscaling:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-autoscaling/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Batch (io.fabric8:kubernetes-model-batch:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-batch/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Certificates (io.fabric8:kubernetes-model-certificates:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-certificates/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Common (io.fabric8:kubernetes-model-common:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-common/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Coordination (io.fabric8:kubernetes-model-coordination:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-coordination/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Core (io.fabric8:kubernetes-model-core:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-core/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Discovery (io.fabric8:kubernetes-model-discovery:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-discovery/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Events (io.fabric8:kubernetes-model-events:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-events/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Extensions (io.fabric8:kubernetes-model-extensions:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-extensions/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Metrics (io.fabric8:kubernetes-model-metrics:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-metrics/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Networking (io.fabric8:kubernetes-model-networking:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-networking/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Node (io.fabric8:kubernetes-model-node:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-node/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Policy (io.fabric8:kubernetes-model-policy:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-policy/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: RBAC (io.fabric8:kubernetes-model-rbac:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-rbac/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Scheduling (io.fabric8:kubernetes-model-scheduling:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-scheduling/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Settings (io.fabric8:kubernetes-model-settings:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-settings/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes Model :: Storage Class (io.fabric8:kubernetes-model-storageclass:5.0.0-alpha-3 - http://fabric8.io/kubernetes-model-generator/kubernetes-model-storageclass/) + (Apache License, Version 2.0) Fabric8 :: Kubernetes :: Server Mock (io.fabric8:kubernetes-server-mock:5.0.0-alpha-3 - http://fabric8.io/kubernetes-server-mock/) + (Apache License, Version 2.0) Fabric8 :: Mock Web Server (io.fabric8:mockwebserver:0.1.8 - http://fabric8.io/) + (The Apache Software License, Version 2.0) zjsonpatch (io.fabric8:zjsonpatch:0.3.0 - https://github.com/fabric8io/zjsonpatch/) + (The Apache Software License, Version 2.0) micrometer-core (io.micrometer:micrometer-core:1.6.3 - https://github.com/micrometer-metrics/micrometer) + (The Apache Software License, Version 2.0) micrometer-registry-prometheus (io.micrometer:micrometer-registry-prometheus:1.6.3 - https://github.com/micrometer-metrics/micrometer) (Apache License, Version 2.0) Netty/Buffer (io.netty:netty-buffer:4.1.49.Final - https://netty.io/netty-buffer/) (Apache License, Version 2.0) Netty/Codec (io.netty:netty-codec:4.1.49.Final - https://netty.io/netty-codec/) (Apache License, Version 2.0) Netty/Codec/DNS (io.netty:netty-codec-dns:4.1.49.Final - https://netty.io/netty-codec-dns/) @@ -65,6 +96,10 @@ Lists of 111 third-party dependencies. (The Apache License, Version 2.0) OpenTelemetry Java (io.opentelemetry:opentelemetry-sdk-trace:0.13.1 - https://github.com/open-telemetry/opentelemetry-java) (The Apache Software License, Version 2.0) Prometheus Java Simpleclient (io.prometheus:simpleclient:0.9.0 - http://github.com/prometheus/client_java/simpleclient) (The Apache Software License, Version 2.0) Prometheus Java Simpleclient Common (io.prometheus:simpleclient_common:0.9.0 - http://github.com/prometheus/client_java/simpleclient_common) + (The Apache Software License, Version 2.0) Sundrio :: Annotations :: Builder (io.sundr:builder-annotations:0.13.1 - https://github.com/sundrio/sundrio/annotations/builder-annotations) + (The Apache Software License, Version 2.0) Sundrio :: Annotations :: Resourcecify (io.sundr:resourcecify-annotations:0.13.1 - https://github.com/sundrio/sundrio/annotations/resourcecify-annotations) + (The Apache Software License, Version 2.0) Sundrio :: Code generation (io.sundr:sundr-codegen:0.13.1 - https://github.com/sundrio/sundrio/sundr-codegen) + (The Apache Software License, Version 2.0) Sundrio :: Core (io.sundr:sundr-core:0.13.1 - https://github.com/sundrio/sundrio/sundr-core) (Eclipse Public License - v 1.0) (The Apache Software License, Version 2.0) vertx-auth-common (io.vertx:vertx-auth-common:4.0.0 - http://nexus.sonatype.org/oss-repository-hosting.html/vertx-parent/vertx-ext/vertx-ext-parent/vertx-auth/vertx-auth-common) (Eclipse Public License - v 1.0) (The Apache Software License, Version 2.0) vertx-circuit-breaker (io.vertx:vertx-circuit-breaker:4.0.0 - http://nexus.sonatype.org/oss-repository-hosting.html/vertx-parent/vertx-ext/vertx-ext-parent/vertx-circuit-breaker) (Eclipse Public License - v 2.0) (The Apache Software License, Version 2.0) Vert.x Core (io.vertx:vertx-core:4.0.0 - http://nexus.sonatype.org/oss-repository-hosting.html/vertx-parent/vertx-core) @@ -76,12 +111,18 @@ Lists of 111 third-party dependencies. (The Apache Software License, Version 2.0) Zipkin Reporter: Core (io.zipkin.reporter2:zipkin-reporter:2.12.2 - https://github.com/openzipkin/zipkin-reporter-java/zipkin-reporter) (The Apache Software License, Version 2.0) Zipkin Sender: OkHttp 3 (io.zipkin.reporter2:zipkin-sender-okhttp3:2.12.2 - https://github.com/openzipkin/zipkin-reporter-java/zipkin-sender-okhttp3) (The Apache Software License, Version 2.0) Zipkin Core Library (io.zipkin.zipkin2:zipkin:2.21.0 - https://github.com/openzipkin/zipkin/zipkin) - (Apache License, Version 2.0) Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.10.18 - https://bytebuddy.net/byte-buddy) - (Apache License, Version 2.0) Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.10.18 - https://bytebuddy.net/byte-buddy-agent) + (EDL 1.0) JavaBeans Activation Framework API jar (jakarta.activation:jakarta.activation-api:1.2.1 - https://github.com/eclipse-ee4j/jaf/jakarta.activation-api) + (Eclipse Distribution License - v 1.0) jakarta.xml.bind-api (jakarta.xml.bind:jakarta.xml.bind-api:2.3.2 - https://github.com/eclipse-ee4j/jaxb-api/jakarta.xml.bind-api) + (CDDL + GPLv2 with classpath exception) javax.annotation API (javax.annotation:javax.annotation-api:1.3.2 - http://jcp.org/en/jsr/detail?id=250) + (CDDL 1.1) (GPL2 w/ CPE) jaxb-api (javax.xml.bind:jaxb-api:2.3.0 - https://github.com/javaee/jaxb-spec/jaxb-api) + (Eclipse Public License 1.0) JUnit (junit:junit:4.13.1 - http://junit.org) + (Apache License, Version 2.0) Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.10.19 - https://bytebuddy.net/byte-buddy) + (Apache License, Version 2.0) Byte Buddy agent (net.bytebuddy:byte-buddy-agent:1.10.19 - https://bytebuddy.net/byte-buddy-agent) (Apache License, Version 2.0) (MIT License) Logstash Logback Encoder (net.logstash.logback:logstash-logback-encoder:6.5 - https://github.com/logstash/logstash-logback-encoder) (The MIT License) JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.4 - http://jopt-simple.github.io/jopt-simple) - (The Apache Software License, Version 2.0) Apache Kafka (org.apache.kafka:kafka-clients:2.6.0 - https://kafka.apache.org) - (The Apache Software License, Version 2.0) Apache Kafka (org.apache.kafka:kafka_2.12:2.6.0 - https://kafka.apache.org) + (The Apache Software License, Version 2.0) Apache Kafka (org.apache.kafka:kafka-clients:2.7.0 - https://kafka.apache.org) + (The Apache Software License, Version 2.0) Apache Kafka (org.apache.kafka:kafka-raft:2.7.0 - https://kafka.apache.org) + (The Apache Software License, Version 2.0) Apache Kafka (org.apache.kafka:kafka_2.12:2.7.0 - https://kafka.apache.org) (Apache License, Version 2.0) Apache Yetus - Audience Annotations (org.apache.yetus:audience-annotations:0.5.0 - https://yetus.apache.org/audience-annotations) (Apache License, Version 2.0) Apache ZooKeeper - Server (org.apache.zookeeper:zookeeper:3.5.8 - http://zookeeper.apache.org/zookeeper) (Apache License, Version 2.0) Apache ZooKeeper - Jute (org.apache.zookeeper:zookeeper-jute:3.5.8 - http://zookeeper.apache.org/zookeeper-jute) @@ -90,24 +131,27 @@ Lists of 111 third-party dependencies. (Apache 2.0) Awaitility (org.awaitility:awaitility:4.0.3 - http://awaitility.org) (GNU General Public License, version 2 (GPL2), with the classpath exception) (The MIT License) Checker Qual (org.checkerframework:checker-compat-qual:2.5.5 - https://checkerframework.org) (BSD Licence 3) Hamcrest (org.hamcrest:hamcrest:2.1 - http://hamcrest.org/JavaHamcrest/) + (New BSD License) Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core) (BSD-2-Clause) (Public Domain, per Creative Commons CC0) HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.12 - http://hdrhistogram.github.io/HdrHistogram/) (Apache License 2.0) (LGPL 2.1) (MPL 1.1) Javassist (org.javassist:javassist:3.26.0-GA - http://www.javassist.org/) (Eclipse Public License v2.0) JUnit Jupiter (Aggregator) (org.junit.jupiter:junit-jupiter:5.7.0 - https://junit.org/junit5/) (Eclipse Public License v2.0) JUnit Jupiter API (org.junit.jupiter:junit-jupiter-api:5.7.0 - https://junit.org/junit5/) (Eclipse Public License v2.0) JUnit Jupiter Engine (org.junit.jupiter:junit-jupiter-engine:5.7.0 - https://junit.org/junit5/) + (Eclipse Public License v2.0) JUnit Jupiter Migration Support (org.junit.jupiter:junit-jupiter-migrationsupport:5.7.0 - https://junit.org/junit5/) (Eclipse Public License v2.0) JUnit Jupiter Params (org.junit.jupiter:junit-jupiter-params:5.7.0 - https://junit.org/junit5/) (Eclipse Public License v2.0) JUnit Platform Commons (org.junit.platform:junit-platform-commons:1.7.0 - https://junit.org/junit5/) (Eclipse Public License v2.0) JUnit Platform Engine API (org.junit.platform:junit-platform-engine:1.7.0 - https://junit.org/junit5/) (Public Domain, per Creative Commons CC0) LatencyUtils (org.latencyutils:LatencyUtils:2.0.3 - http://latencyutils.github.io/LatencyUtils/) (The Apache Software License, Version 2.0) LZ4 and xxHash (org.lz4:lz4-java:1.7.1 - https://github.com/lz4/lz4-java) - (The MIT License) mockito-core (org.mockito:mockito-core:3.6.28 - https://github.com/mockito/mockito) - (The MIT License) mockito-junit-jupiter (org.mockito:mockito-junit-jupiter:3.6.28 - https://github.com/mockito/mockito) + (The MIT License) mockito-core (org.mockito:mockito-core:3.7.7 - https://github.com/mockito/mockito) + (The MIT License) mockito-junit-jupiter (org.mockito:mockito-junit-jupiter:3.7.7 - https://github.com/mockito/mockito) (Apache License, Version 2.0) Objenesis (org.objenesis:objenesis:3.1 - http://objenesis.org) (The Apache License, Version 2.0) org.opentest4j:opentest4j (org.opentest4j:opentest4j:1.2.0 - https://github.com/ota4j-team/opentest4j) (The New BSD License) (WTFPL) Reflections (org.reflections:reflections:0.9.12 - http://github.com/ronmamo/reflections) - (Apache-2.0) Scala Library (org.scala-lang:scala-library:2.12.11 - https://www.scala-lang.org/) - (Apache-2.0) Scala Compiler (org.scala-lang:scala-reflect:2.12.11 - https://www.scala-lang.org/) - (Apache-2.0) scala-collection-compat (org.scala-lang.modules:scala-collection-compat_2.12:2.1.6 - http://www.scala-lang.org/) + (Apache-2.0) Scala Library (org.scala-lang:scala-library:2.12.12 - https://www.scala-lang.org/) + (Apache-2.0) Scala Compiler (org.scala-lang:scala-reflect:2.12.12 - https://www.scala-lang.org/) + (Apache-2.0) scala-collection-compat (org.scala-lang.modules:scala-collection-compat_2.12:2.2.0 - http://www.scala-lang.org/) (Apache-2.0) scala-java8-compat (org.scala-lang.modules:scala-java8-compat_2.12:0.9.1 - http://www.scala-lang.org/) (MIT License) SLF4J API Module (org.slf4j:slf4j-api:1.7.30 - http://www.slf4j.org) - (The Apache Software License, Version 2.0) snappy-java (org.xerial.snappy:snappy-java:1.1.7.3 - https://github.com/xerial/snappy-java) + (Apache-2.0) snappy-java (org.xerial.snappy:snappy-java:1.1.7.7 - https://github.com/xerial/snappy-java) + (Apache License, Version 2.0) SnakeYAML (org.yaml:snakeyaml:1.26 - http://www.snakeyaml.org) diff --git a/data-plane/config/200-data-plane-cluster-role.yaml b/data-plane/config/200-data-plane-cluster-role.yaml new file mode 100644 index 0000000000..3bead0edb8 --- /dev/null +++ b/data-plane/config/200-data-plane-cluster-role.yaml @@ -0,0 +1,29 @@ +--- + +# Copyright 2020 The Knative Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + name: knative-kafka-data-plane + labels: + kafka.eventing.knative.dev/release: devel +rules: + - apiGroups: + - "*" + resources: + - secrets + verbs: + - get diff --git a/data-plane/config/200-data-plane-service-account.yaml b/data-plane/config/200-data-plane-service-account.yaml new file mode 100644 index 0000000000..a1ab5f424d --- /dev/null +++ b/data-plane/config/200-data-plane-service-account.yaml @@ -0,0 +1,23 @@ +--- + +# Copyright 2020 The Knative Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: v1 +kind: ServiceAccount +metadata: + name: knative-kafka-data-plane + namespace: knative-eventing + labels: + kafka.eventing.knative.dev/release: devel diff --git a/data-plane/config/201-data-plane-cluster-role-binding.yaml b/data-plane/config/201-data-plane-cluster-role-binding.yaml new file mode 100644 index 0000000000..07c69ea609 --- /dev/null +++ b/data-plane/config/201-data-plane-cluster-role-binding.yaml @@ -0,0 +1,30 @@ +--- + +# Copyright 2020 The Knative Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: knative-kafka-data-plane + labels: + kafka.eventing.knative.dev/release: devel +subjects: + - kind: ServiceAccount + name: knative-kafka-data-plane + namespace: knative-eventing +roleRef: + kind: ClusterRole + name: knative-kafka-data-plane + apiGroup: rbac.authorization.k8s.io diff --git a/data-plane/config/sink/template/500-receiver.yaml b/data-plane/config/sink/template/500-receiver.yaml index 9ab8eab066..86a429d957 100644 --- a/data-plane/config/sink/template/500-receiver.yaml +++ b/data-plane/config/sink/template/500-receiver.yaml @@ -33,6 +33,7 @@ spec: app: kafka-sink-receiver kafka.eventing.knative.dev/release: devel spec: + serviceAccountName: knative-kafka-data-plane securityContext: runAsNonRoot: true runAsUser: 65532 @@ -101,6 +102,9 @@ spec: value: "false" - name: CONFIG_TRACING_PATH value: "/etc/tracing" + # https://github.com/fabric8io/kubernetes-client/issues/2212 + - name: HTTP2_DISABLE + value: "true" command: - "java" args: diff --git a/data-plane/config/template/500-dispatcher.yaml b/data-plane/config/template/500-dispatcher.yaml index ff08e8ef78..6b5041a607 100644 --- a/data-plane/config/template/500-dispatcher.yaml +++ b/data-plane/config/template/500-dispatcher.yaml @@ -33,6 +33,7 @@ spec: app: kafka-broker-dispatcher kafka.eventing.knative.dev/release: devel spec: + serviceAccountName: knative-kafka-data-plane securityContext: runAsNonRoot: true runAsUser: 65532 @@ -90,6 +91,9 @@ spec: value: "false" - name: CONFIG_TRACING_PATH value: "/etc/tracing" + # https://github.com/fabric8io/kubernetes-client/issues/2212 + - name: HTTP2_DISABLE + value: "true" command: - "java" args: diff --git a/data-plane/config/template/500-receiver.yaml b/data-plane/config/template/500-receiver.yaml index 646c10fa49..6fb2135490 100644 --- a/data-plane/config/template/500-receiver.yaml +++ b/data-plane/config/template/500-receiver.yaml @@ -33,6 +33,7 @@ spec: app: kafka-broker-receiver kafka.eventing.knative.dev/release: devel spec: + serviceAccountName: knative-kafka-data-plane securityContext: runAsNonRoot: true runAsUser: 65532 @@ -101,6 +102,9 @@ spec: value: "false" - name: CONFIG_TRACING_PATH value: "/etc/tracing" + # https://github.com/fabric8io/kubernetes-client/issues/2212 + - name: HTTP2_DISABLE + value: "true" command: - "java" args: diff --git a/data-plane/contract/src/main/java/dev/knative/eventing/kafka/broker/contract/DataPlaneContract.java b/data-plane/contract/src/main/java/dev/knative/eventing/kafka/broker/contract/DataPlaneContract.java index 2eb23fed5f..f368a49b0c 100644 --- a/data-plane/contract/src/main/java/dev/knative/eventing/kafka/broker/contract/DataPlaneContract.java +++ b/data-plane/contract/src/main/java/dev/knative/eventing/kafka/broker/contract/DataPlaneContract.java @@ -4877,6 +4877,1176 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Ingress getD } + public interface ReferenceOrBuilder extends + // @@protoc_insertion_point(interface_extends:Reference) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * Object id.
+     * 
+ * + * string uuid = 1; + * @return The uuid. + */ + java.lang.String getUuid(); + /** + *
+     * Object id.
+     * 
+ * + * string uuid = 1; + * @return The bytes for uuid. + */ + com.google.protobuf.ByteString + getUuidBytes(); + + /** + *
+     * Object namespace.
+     * 
+ * + * string namespace = 2; + * @return The namespace. + */ + java.lang.String getNamespace(); + /** + *
+     * Object namespace.
+     * 
+ * + * string namespace = 2; + * @return The bytes for namespace. + */ + com.google.protobuf.ByteString + getNamespaceBytes(); + + /** + *
+     * Object name.
+     * 
+ * + * string name = 3; + * @return The name. + */ + java.lang.String getName(); + /** + *
+     * Object name.
+     * 
+ * + * string name = 3; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + *
+     * Object version.
+     * 
+ * + * string version = 4; + * @return The version. + */ + java.lang.String getVersion(); + /** + *
+     * Object version.
+     * 
+ * + * string version = 4; + * @return The bytes for version. + */ + com.google.protobuf.ByteString + getVersionBytes(); + } + /** + *
+   * Kubernetes resource reference.
+   * 
+ * + * Protobuf type {@code Reference} + */ + public static final class Reference extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:Reference) + ReferenceOrBuilder { + private static final long serialVersionUID = 0L; + // Use Reference.newBuilder() to construct. + private Reference(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private Reference() { + uuid_ = ""; + namespace_ = ""; + name_ = ""; + version_ = ""; + } + + @java.lang.Override + @SuppressWarnings({"unused"}) + protected java.lang.Object newInstance( + UnusedPrivateParameter unused) { + return new Reference(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private Reference( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + uuid_ = s; + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + namespace_ = s; + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + + name_ = s; + break; + } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + + version_ = s; + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Reference_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Reference_fieldAccessorTable + .ensureFieldAccessorsInitialized( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.class, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder.class); + } + + public static final int UUID_FIELD_NUMBER = 1; + private volatile java.lang.Object uuid_; + /** + *
+     * Object id.
+     * 
+ * + * string uuid = 1; + * @return The uuid. + */ + @java.lang.Override + public java.lang.String getUuid() { + java.lang.Object ref = uuid_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + uuid_ = s; + return s; + } + } + /** + *
+     * Object id.
+     * 
+ * + * string uuid = 1; + * @return The bytes for uuid. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getUuidBytes() { + java.lang.Object ref = uuid_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + uuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int NAMESPACE_FIELD_NUMBER = 2; + private volatile java.lang.Object namespace_; + /** + *
+     * Object namespace.
+     * 
+ * + * string namespace = 2; + * @return The namespace. + */ + @java.lang.Override + public java.lang.String getNamespace() { + java.lang.Object ref = namespace_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + namespace_ = s; + return s; + } + } + /** + *
+     * Object namespace.
+     * 
+ * + * string namespace = 2; + * @return The bytes for namespace. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNamespaceBytes() { + java.lang.Object ref = namespace_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + namespace_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int NAME_FIELD_NUMBER = 3; + private volatile java.lang.Object name_; + /** + *
+     * Object name.
+     * 
+ * + * string name = 3; + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + *
+     * Object name.
+     * 
+ * + * string name = 3; + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int VERSION_FIELD_NUMBER = 4; + private volatile java.lang.Object version_; + /** + *
+     * Object version.
+     * 
+ * + * string version = 4; + * @return The version. + */ + @java.lang.Override + public java.lang.String getVersion() { + java.lang.Object ref = version_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + version_ = s; + return s; + } + } + /** + *
+     * Object version.
+     * 
+ * + * string version = 4; + * @return The bytes for version. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getVersionBytes() { + java.lang.Object ref = version_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + version_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getUuidBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, uuid_); + } + if (!getNamespaceBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, namespace_); + } + if (!getNameBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, name_); + } + if (!getVersionBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, version_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getUuidBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, uuid_); + } + if (!getNamespaceBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, namespace_); + } + if (!getNameBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, name_); + } + if (!getVersionBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, version_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference)) { + return super.equals(obj); + } + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference other = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) obj; + + if (!getUuid() + .equals(other.getUuid())) return false; + if (!getNamespace() + .equals(other.getNamespace())) return false; + if (!getName() + .equals(other.getName())) return false; + if (!getVersion() + .equals(other.getVersion())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + UUID_FIELD_NUMBER; + hash = (53 * hash) + getUuid().hashCode(); + hash = (37 * hash) + NAMESPACE_FIELD_NUMBER; + hash = (53 * hash) + getNamespace().hashCode(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * Kubernetes resource reference.
+     * 
+ * + * Protobuf type {@code Reference} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:Reference) + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Reference_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Reference_fieldAccessorTable + .ensureFieldAccessorsInitialized( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.class, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder.class); + } + + // Construct using dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + uuid_ = ""; + + namespace_ = ""; + + name_ = ""; + + version_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.internal_static_Reference_descriptor; + } + + @java.lang.Override + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference getDefaultInstanceForType() { + return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.getDefaultInstance(); + } + + @java.lang.Override + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference build() { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference buildPartial() { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference result = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference(this); + result.uuid_ = uuid_; + result.namespace_ = namespace_; + result.name_ = name_; + result.version_ = version_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) { + return mergeFrom((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference other) { + if (other == dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.getDefaultInstance()) return this; + if (!other.getUuid().isEmpty()) { + uuid_ = other.uuid_; + onChanged(); + } + if (!other.getNamespace().isEmpty()) { + namespace_ = other.namespace_; + onChanged(); + } + if (!other.getName().isEmpty()) { + name_ = other.name_; + onChanged(); + } + if (!other.getVersion().isEmpty()) { + version_ = other.version_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object uuid_ = ""; + /** + *
+       * Object id.
+       * 
+ * + * string uuid = 1; + * @return The uuid. + */ + public java.lang.String getUuid() { + java.lang.Object ref = uuid_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + uuid_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Object id.
+       * 
+ * + * string uuid = 1; + * @return The bytes for uuid. + */ + public com.google.protobuf.ByteString + getUuidBytes() { + java.lang.Object ref = uuid_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + uuid_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Object id.
+       * 
+ * + * string uuid = 1; + * @param value The uuid to set. + * @return This builder for chaining. + */ + public Builder setUuid( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + uuid_ = value; + onChanged(); + return this; + } + /** + *
+       * Object id.
+       * 
+ * + * string uuid = 1; + * @return This builder for chaining. + */ + public Builder clearUuid() { + + uuid_ = getDefaultInstance().getUuid(); + onChanged(); + return this; + } + /** + *
+       * Object id.
+       * 
+ * + * string uuid = 1; + * @param value The bytes for uuid to set. + * @return This builder for chaining. + */ + public Builder setUuidBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + uuid_ = value; + onChanged(); + return this; + } + + private java.lang.Object namespace_ = ""; + /** + *
+       * Object namespace.
+       * 
+ * + * string namespace = 2; + * @return The namespace. + */ + public java.lang.String getNamespace() { + java.lang.Object ref = namespace_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + namespace_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Object namespace.
+       * 
+ * + * string namespace = 2; + * @return The bytes for namespace. + */ + public com.google.protobuf.ByteString + getNamespaceBytes() { + java.lang.Object ref = namespace_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + namespace_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Object namespace.
+       * 
+ * + * string namespace = 2; + * @param value The namespace to set. + * @return This builder for chaining. + */ + public Builder setNamespace( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + namespace_ = value; + onChanged(); + return this; + } + /** + *
+       * Object namespace.
+       * 
+ * + * string namespace = 2; + * @return This builder for chaining. + */ + public Builder clearNamespace() { + + namespace_ = getDefaultInstance().getNamespace(); + onChanged(); + return this; + } + /** + *
+       * Object namespace.
+       * 
+ * + * string namespace = 2; + * @param value The bytes for namespace to set. + * @return This builder for chaining. + */ + public Builder setNamespaceBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + namespace_ = value; + onChanged(); + return this; + } + + private java.lang.Object name_ = ""; + /** + *
+       * Object name.
+       * 
+ * + * string name = 3; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Object name.
+       * 
+ * + * string name = 3; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Object name.
+       * 
+ * + * string name = 3; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + name_ = value; + onChanged(); + return this; + } + /** + *
+       * Object name.
+       * 
+ * + * string name = 3; + * @return This builder for chaining. + */ + public Builder clearName() { + + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + /** + *
+       * Object name.
+       * 
+ * + * string name = 3; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + name_ = value; + onChanged(); + return this; + } + + private java.lang.Object version_ = ""; + /** + *
+       * Object version.
+       * 
+ * + * string version = 4; + * @return The version. + */ + public java.lang.String getVersion() { + java.lang.Object ref = version_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + version_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Object version.
+       * 
+ * + * string version = 4; + * @return The bytes for version. + */ + public com.google.protobuf.ByteString + getVersionBytes() { + java.lang.Object ref = version_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + version_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Object version.
+       * 
+ * + * string version = 4; + * @param value The version to set. + * @return This builder for chaining. + */ + public Builder setVersion( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + version_ = value; + onChanged(); + return this; + } + /** + *
+       * Object version.
+       * 
+ * + * string version = 4; + * @return This builder for chaining. + */ + public Builder clearVersion() { + + version_ = getDefaultInstance().getVersion(); + onChanged(); + return this; + } + /** + *
+       * Object version.
+       * 
+ * + * string version = 4; + * @param value The bytes for version to set. + * @return This builder for chaining. + */ + public Builder setVersionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + version_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:Reference) + } + + // @@protoc_insertion_point(class_scope:Reference) + private static final dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference(); + } + + public static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public Reference parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new Reference(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + public interface ResourceOrBuilder extends // @@protoc_insertion_point(interface_extends:Resource) com.google.protobuf.MessageOrBuilder { @@ -5071,6 +6241,146 @@ public interface ResourceOrBuilder extends */ dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressOrBuilder getEgressesOrBuilder( int index); + + /** + *
+     * No auth configured.
+     * 
+ * + * .google.protobuf.Empty absentAuth = 7; + * @return Whether the absentAuth field is set. + */ + boolean hasAbsentAuth(); + /** + *
+     * No auth configured.
+     * 
+ * + * .google.protobuf.Empty absentAuth = 7; + * @return The absentAuth. + */ + com.google.protobuf.Empty getAbsentAuth(); + /** + *
+     * No auth configured.
+     * 
+ * + * .google.protobuf.Empty absentAuth = 7; + */ + com.google.protobuf.EmptyOrBuilder getAbsentAuthOrBuilder(); + + /** + *
+     * Secret reference.
+     * Secret format:
+     *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+     *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+     *   ca.crt: <CA PEM certificate>
+     *   user.crt: <User PEM certificate>
+     *   user.key: <User PEM key>
+     *   user: <SASL username>
+     *   password: <SASL password>
+     * Validation:
+     *   - protocol=PLAINTEXT
+     *   - protocol=SSL
+     *     - required:
+     *       - ca.crt
+     *       - user.crt
+     *       - user.key
+     *   - protocol=SASL_PLAINTEXT
+     *     - required:
+     *       - sasl.mechanism
+     *       - user
+     *       - password
+     *   - protocol=SASL_SSL
+     *     - required:
+     *       - sasl.mechanism
+     *       - ca.crt
+     *       - user.crt
+     *       - user.key
+     *       - user
+     *       - password
+     * 
+ * + * .Reference authSecret = 8; + * @return Whether the authSecret field is set. + */ + boolean hasAuthSecret(); + /** + *
+     * Secret reference.
+     * Secret format:
+     *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+     *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+     *   ca.crt: <CA PEM certificate>
+     *   user.crt: <User PEM certificate>
+     *   user.key: <User PEM key>
+     *   user: <SASL username>
+     *   password: <SASL password>
+     * Validation:
+     *   - protocol=PLAINTEXT
+     *   - protocol=SSL
+     *     - required:
+     *       - ca.crt
+     *       - user.crt
+     *       - user.key
+     *   - protocol=SASL_PLAINTEXT
+     *     - required:
+     *       - sasl.mechanism
+     *       - user
+     *       - password
+     *   - protocol=SASL_SSL
+     *     - required:
+     *       - sasl.mechanism
+     *       - ca.crt
+     *       - user.crt
+     *       - user.key
+     *       - user
+     *       - password
+     * 
+ * + * .Reference authSecret = 8; + * @return The authSecret. + */ + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference getAuthSecret(); + /** + *
+     * Secret reference.
+     * Secret format:
+     *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+     *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+     *   ca.crt: <CA PEM certificate>
+     *   user.crt: <User PEM certificate>
+     *   user.key: <User PEM key>
+     *   user: <SASL username>
+     *   password: <SASL password>
+     * Validation:
+     *   - protocol=PLAINTEXT
+     *   - protocol=SSL
+     *     - required:
+     *       - ca.crt
+     *       - user.crt
+     *       - user.key
+     *   - protocol=SASL_PLAINTEXT
+     *     - required:
+     *       - sasl.mechanism
+     *       - user
+     *       - password
+     *   - protocol=SASL_SSL
+     *     - required:
+     *       - sasl.mechanism
+     *       - ca.crt
+     *       - user.crt
+     *       - user.key
+     *       - user
+     *       - password
+     * 
+ * + * .Reference authSecret = 8; + */ + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder getAuthSecretOrBuilder(); + + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource.AuthCase getAuthCase(); } /** * Protobuf type {@code Resource} @@ -5178,6 +6488,34 @@ private Resource( input.readMessage(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress.parser(), extensionRegistry)); break; } + case 58: { + com.google.protobuf.Empty.Builder subBuilder = null; + if (authCase_ == 7) { + subBuilder = ((com.google.protobuf.Empty) auth_).toBuilder(); + } + auth_ = + input.readMessage(com.google.protobuf.Empty.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.protobuf.Empty) auth_); + auth_ = subBuilder.buildPartial(); + } + authCase_ = 7; + break; + } + case 66: { + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder subBuilder = null; + if (authCase_ == 8) { + subBuilder = ((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) auth_).toBuilder(); + } + auth_ = + input.readMessage(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) auth_); + auth_ = subBuilder.buildPartial(); + } + authCase_ = 8; + break; + } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { @@ -5216,6 +6554,47 @@ private Resource( dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource.class, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource.Builder.class); } + private int authCase_ = 0; + private java.lang.Object auth_; + public enum AuthCase + implements com.google.protobuf.Internal.EnumLite, + com.google.protobuf.AbstractMessage.InternalOneOfEnum { + ABSENTAUTH(7), + AUTHSECRET(8), + AUTH_NOT_SET(0); + private final int value; + private AuthCase(int value) { + this.value = value; + } + /** + * @param value The number of the enum to look for. + * @return The enum associated with the given number. + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static AuthCase valueOf(int value) { + return forNumber(value); + } + + public static AuthCase forNumber(int value) { + switch (value) { + case 7: return ABSENTAUTH; + case 8: return AUTHSECRET; + case 0: return AUTH_NOT_SET; + default: return null; + } + } + public int getNumber() { + return this.value; + } + }; + + public AuthCase + getAuthCase() { + return AuthCase.forNumber( + authCase_); + } + public static final int UID_FIELD_NUMBER = 1; private volatile java.lang.Object uid_; /** @@ -5480,31 +6859,201 @@ public java.util.Listrepeated .Egress egresses = 6; */ @java.lang.Override - public int getEgressesCount() { - return egresses_.size(); + public int getEgressesCount() { + return egresses_.size(); + } + /** + *
+     * Optional egresses for this topic
+     * 
+ * + * repeated .Egress egresses = 6; + */ + @java.lang.Override + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress getEgresses(int index) { + return egresses_.get(index); + } + /** + *
+     * Optional egresses for this topic
+     * 
+ * + * repeated .Egress egresses = 6; + */ + @java.lang.Override + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressOrBuilder getEgressesOrBuilder( + int index) { + return egresses_.get(index); + } + + public static final int ABSENTAUTH_FIELD_NUMBER = 7; + /** + *
+     * No auth configured.
+     * 
+ * + * .google.protobuf.Empty absentAuth = 7; + * @return Whether the absentAuth field is set. + */ + @java.lang.Override + public boolean hasAbsentAuth() { + return authCase_ == 7; + } + /** + *
+     * No auth configured.
+     * 
+ * + * .google.protobuf.Empty absentAuth = 7; + * @return The absentAuth. + */ + @java.lang.Override + public com.google.protobuf.Empty getAbsentAuth() { + if (authCase_ == 7) { + return (com.google.protobuf.Empty) auth_; + } + return com.google.protobuf.Empty.getDefaultInstance(); + } + /** + *
+     * No auth configured.
+     * 
+ * + * .google.protobuf.Empty absentAuth = 7; + */ + @java.lang.Override + public com.google.protobuf.EmptyOrBuilder getAbsentAuthOrBuilder() { + if (authCase_ == 7) { + return (com.google.protobuf.Empty) auth_; + } + return com.google.protobuf.Empty.getDefaultInstance(); + } + + public static final int AUTHSECRET_FIELD_NUMBER = 8; + /** + *
+     * Secret reference.
+     * Secret format:
+     *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+     *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+     *   ca.crt: <CA PEM certificate>
+     *   user.crt: <User PEM certificate>
+     *   user.key: <User PEM key>
+     *   user: <SASL username>
+     *   password: <SASL password>
+     * Validation:
+     *   - protocol=PLAINTEXT
+     *   - protocol=SSL
+     *     - required:
+     *       - ca.crt
+     *       - user.crt
+     *       - user.key
+     *   - protocol=SASL_PLAINTEXT
+     *     - required:
+     *       - sasl.mechanism
+     *       - user
+     *       - password
+     *   - protocol=SASL_SSL
+     *     - required:
+     *       - sasl.mechanism
+     *       - ca.crt
+     *       - user.crt
+     *       - user.key
+     *       - user
+     *       - password
+     * 
+ * + * .Reference authSecret = 8; + * @return Whether the authSecret field is set. + */ + @java.lang.Override + public boolean hasAuthSecret() { + return authCase_ == 8; } /** *
-     * Optional egresses for this topic
+     * Secret reference.
+     * Secret format:
+     *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+     *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+     *   ca.crt: <CA PEM certificate>
+     *   user.crt: <User PEM certificate>
+     *   user.key: <User PEM key>
+     *   user: <SASL username>
+     *   password: <SASL password>
+     * Validation:
+     *   - protocol=PLAINTEXT
+     *   - protocol=SSL
+     *     - required:
+     *       - ca.crt
+     *       - user.crt
+     *       - user.key
+     *   - protocol=SASL_PLAINTEXT
+     *     - required:
+     *       - sasl.mechanism
+     *       - user
+     *       - password
+     *   - protocol=SASL_SSL
+     *     - required:
+     *       - sasl.mechanism
+     *       - ca.crt
+     *       - user.crt
+     *       - user.key
+     *       - user
+     *       - password
      * 
* - * repeated .Egress egresses = 6; + * .Reference authSecret = 8; + * @return The authSecret. */ @java.lang.Override - public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress getEgresses(int index) { - return egresses_.get(index); + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference getAuthSecret() { + if (authCase_ == 8) { + return (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) auth_; + } + return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.getDefaultInstance(); } /** *
-     * Optional egresses for this topic
+     * Secret reference.
+     * Secret format:
+     *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+     *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+     *   ca.crt: <CA PEM certificate>
+     *   user.crt: <User PEM certificate>
+     *   user.key: <User PEM key>
+     *   user: <SASL username>
+     *   password: <SASL password>
+     * Validation:
+     *   - protocol=PLAINTEXT
+     *   - protocol=SSL
+     *     - required:
+     *       - ca.crt
+     *       - user.crt
+     *       - user.key
+     *   - protocol=SASL_PLAINTEXT
+     *     - required:
+     *       - sasl.mechanism
+     *       - user
+     *       - password
+     *   - protocol=SASL_SSL
+     *     - required:
+     *       - sasl.mechanism
+     *       - ca.crt
+     *       - user.crt
+     *       - user.key
+     *       - user
+     *       - password
      * 
* - * repeated .Egress egresses = 6; + * .Reference authSecret = 8; */ @java.lang.Override - public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressOrBuilder getEgressesOrBuilder( - int index) { - return egresses_.get(index); + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder getAuthSecretOrBuilder() { + if (authCase_ == 8) { + return (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) auth_; + } + return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @@ -5539,6 +7088,12 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) for (int i = 0; i < egresses_.size(); i++) { output.writeMessage(6, egresses_.get(i)); } + if (authCase_ == 7) { + output.writeMessage(7, (com.google.protobuf.Empty) auth_); + } + if (authCase_ == 8) { + output.writeMessage(8, (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) auth_); + } unknownFields.writeTo(output); } @@ -5574,6 +7129,14 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(6, egresses_.get(i)); } + if (authCase_ == 7) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(7, (com.google.protobuf.Empty) auth_); + } + if (authCase_ == 8) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(8, (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) auth_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -5607,6 +7170,19 @@ public boolean equals(final java.lang.Object obj) { } if (!getEgressesList() .equals(other.getEgressesList())) return false; + if (!getAuthCase().equals(other.getAuthCase())) return false; + switch (authCase_) { + case 7: + if (!getAbsentAuth() + .equals(other.getAbsentAuth())) return false; + break; + case 8: + if (!getAuthSecret() + .equals(other.getAuthSecret())) return false; + break; + case 0: + default: + } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -5638,6 +7214,18 @@ public int hashCode() { hash = (37 * hash) + EGRESSES_FIELD_NUMBER; hash = (53 * hash) + getEgressesList().hashCode(); } + switch (authCase_) { + case 7: + hash = (37 * hash) + ABSENTAUTH_FIELD_NUMBER; + hash = (53 * hash) + getAbsentAuth().hashCode(); + break; + case 8: + hash = (37 * hash) + AUTHSECRET_FIELD_NUMBER; + hash = (53 * hash) + getAuthSecret().hashCode(); + break; + case 0: + default: + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -5796,6 +7384,8 @@ public Builder clear() { } else { egressesBuilder_.clear(); } + authCase_ = 0; + auth_ = null; return this; } @@ -5849,6 +7439,21 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource bui } else { result.egresses_ = egressesBuilder_.build(); } + if (authCase_ == 7) { + if (absentAuthBuilder_ == null) { + result.auth_ = auth_; + } else { + result.auth_ = absentAuthBuilder_.build(); + } + } + if (authCase_ == 8) { + if (authSecretBuilder_ == null) { + result.auth_ = auth_; + } else { + result.auth_ = authSecretBuilder_.build(); + } + } + result.authCase_ = authCase_; onBuilt(); return result; } @@ -5947,6 +7552,19 @@ public Builder mergeFrom(dev.knative.eventing.kafka.broker.contract.DataPlaneCon } } } + switch (other.getAuthCase()) { + case ABSENTAUTH: { + mergeAbsentAuth(other.getAbsentAuth()); + break; + } + case AUTHSECRET: { + mergeAuthSecret(other.getAuthSecret()); + break; + } + case AUTH_NOT_SET: { + break; + } + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -5975,6 +7593,21 @@ public Builder mergeFrom( } return this; } + private int authCase_ = 0; + private java.lang.Object auth_; + public AuthCase + getAuthCase() { + return AuthCase.forNumber( + authCase_); + } + + public Builder clearAuth() { + authCase_ = 0; + auth_ = null; + onChanged(); + return this; + } + private int bitField0_; private java.lang.Object uid_ = ""; @@ -6964,6 +8597,612 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress.Build } return egressesBuilder_; } + + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Empty, com.google.protobuf.Empty.Builder, com.google.protobuf.EmptyOrBuilder> absentAuthBuilder_; + /** + *
+       * No auth configured.
+       * 
+ * + * .google.protobuf.Empty absentAuth = 7; + * @return Whether the absentAuth field is set. + */ + @java.lang.Override + public boolean hasAbsentAuth() { + return authCase_ == 7; + } + /** + *
+       * No auth configured.
+       * 
+ * + * .google.protobuf.Empty absentAuth = 7; + * @return The absentAuth. + */ + @java.lang.Override + public com.google.protobuf.Empty getAbsentAuth() { + if (absentAuthBuilder_ == null) { + if (authCase_ == 7) { + return (com.google.protobuf.Empty) auth_; + } + return com.google.protobuf.Empty.getDefaultInstance(); + } else { + if (authCase_ == 7) { + return absentAuthBuilder_.getMessage(); + } + return com.google.protobuf.Empty.getDefaultInstance(); + } + } + /** + *
+       * No auth configured.
+       * 
+ * + * .google.protobuf.Empty absentAuth = 7; + */ + public Builder setAbsentAuth(com.google.protobuf.Empty value) { + if (absentAuthBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + auth_ = value; + onChanged(); + } else { + absentAuthBuilder_.setMessage(value); + } + authCase_ = 7; + return this; + } + /** + *
+       * No auth configured.
+       * 
+ * + * .google.protobuf.Empty absentAuth = 7; + */ + public Builder setAbsentAuth( + com.google.protobuf.Empty.Builder builderForValue) { + if (absentAuthBuilder_ == null) { + auth_ = builderForValue.build(); + onChanged(); + } else { + absentAuthBuilder_.setMessage(builderForValue.build()); + } + authCase_ = 7; + return this; + } + /** + *
+       * No auth configured.
+       * 
+ * + * .google.protobuf.Empty absentAuth = 7; + */ + public Builder mergeAbsentAuth(com.google.protobuf.Empty value) { + if (absentAuthBuilder_ == null) { + if (authCase_ == 7 && + auth_ != com.google.protobuf.Empty.getDefaultInstance()) { + auth_ = com.google.protobuf.Empty.newBuilder((com.google.protobuf.Empty) auth_) + .mergeFrom(value).buildPartial(); + } else { + auth_ = value; + } + onChanged(); + } else { + if (authCase_ == 7) { + absentAuthBuilder_.mergeFrom(value); + } + absentAuthBuilder_.setMessage(value); + } + authCase_ = 7; + return this; + } + /** + *
+       * No auth configured.
+       * 
+ * + * .google.protobuf.Empty absentAuth = 7; + */ + public Builder clearAbsentAuth() { + if (absentAuthBuilder_ == null) { + if (authCase_ == 7) { + authCase_ = 0; + auth_ = null; + onChanged(); + } + } else { + if (authCase_ == 7) { + authCase_ = 0; + auth_ = null; + } + absentAuthBuilder_.clear(); + } + return this; + } + /** + *
+       * No auth configured.
+       * 
+ * + * .google.protobuf.Empty absentAuth = 7; + */ + public com.google.protobuf.Empty.Builder getAbsentAuthBuilder() { + return getAbsentAuthFieldBuilder().getBuilder(); + } + /** + *
+       * No auth configured.
+       * 
+ * + * .google.protobuf.Empty absentAuth = 7; + */ + @java.lang.Override + public com.google.protobuf.EmptyOrBuilder getAbsentAuthOrBuilder() { + if ((authCase_ == 7) && (absentAuthBuilder_ != null)) { + return absentAuthBuilder_.getMessageOrBuilder(); + } else { + if (authCase_ == 7) { + return (com.google.protobuf.Empty) auth_; + } + return com.google.protobuf.Empty.getDefaultInstance(); + } + } + /** + *
+       * No auth configured.
+       * 
+ * + * .google.protobuf.Empty absentAuth = 7; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Empty, com.google.protobuf.Empty.Builder, com.google.protobuf.EmptyOrBuilder> + getAbsentAuthFieldBuilder() { + if (absentAuthBuilder_ == null) { + if (!(authCase_ == 7)) { + auth_ = com.google.protobuf.Empty.getDefaultInstance(); + } + absentAuthBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Empty, com.google.protobuf.Empty.Builder, com.google.protobuf.EmptyOrBuilder>( + (com.google.protobuf.Empty) auth_, + getParentForChildren(), + isClean()); + auth_ = null; + } + authCase_ = 7; + onChanged();; + return absentAuthBuilder_; + } + + private com.google.protobuf.SingleFieldBuilderV3< + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder> authSecretBuilder_; + /** + *
+       * Secret reference.
+       * Secret format:
+       *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+       *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+       *   ca.crt: <CA PEM certificate>
+       *   user.crt: <User PEM certificate>
+       *   user.key: <User PEM key>
+       *   user: <SASL username>
+       *   password: <SASL password>
+       * Validation:
+       *   - protocol=PLAINTEXT
+       *   - protocol=SSL
+       *     - required:
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *   - protocol=SASL_PLAINTEXT
+       *     - required:
+       *       - sasl.mechanism
+       *       - user
+       *       - password
+       *   - protocol=SASL_SSL
+       *     - required:
+       *       - sasl.mechanism
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *       - user
+       *       - password
+       * 
+ * + * .Reference authSecret = 8; + * @return Whether the authSecret field is set. + */ + @java.lang.Override + public boolean hasAuthSecret() { + return authCase_ == 8; + } + /** + *
+       * Secret reference.
+       * Secret format:
+       *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+       *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+       *   ca.crt: <CA PEM certificate>
+       *   user.crt: <User PEM certificate>
+       *   user.key: <User PEM key>
+       *   user: <SASL username>
+       *   password: <SASL password>
+       * Validation:
+       *   - protocol=PLAINTEXT
+       *   - protocol=SSL
+       *     - required:
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *   - protocol=SASL_PLAINTEXT
+       *     - required:
+       *       - sasl.mechanism
+       *       - user
+       *       - password
+       *   - protocol=SASL_SSL
+       *     - required:
+       *       - sasl.mechanism
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *       - user
+       *       - password
+       * 
+ * + * .Reference authSecret = 8; + * @return The authSecret. + */ + @java.lang.Override + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference getAuthSecret() { + if (authSecretBuilder_ == null) { + if (authCase_ == 8) { + return (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) auth_; + } + return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.getDefaultInstance(); + } else { + if (authCase_ == 8) { + return authSecretBuilder_.getMessage(); + } + return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.getDefaultInstance(); + } + } + /** + *
+       * Secret reference.
+       * Secret format:
+       *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+       *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+       *   ca.crt: <CA PEM certificate>
+       *   user.crt: <User PEM certificate>
+       *   user.key: <User PEM key>
+       *   user: <SASL username>
+       *   password: <SASL password>
+       * Validation:
+       *   - protocol=PLAINTEXT
+       *   - protocol=SSL
+       *     - required:
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *   - protocol=SASL_PLAINTEXT
+       *     - required:
+       *       - sasl.mechanism
+       *       - user
+       *       - password
+       *   - protocol=SASL_SSL
+       *     - required:
+       *       - sasl.mechanism
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *       - user
+       *       - password
+       * 
+ * + * .Reference authSecret = 8; + */ + public Builder setAuthSecret(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference value) { + if (authSecretBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + auth_ = value; + onChanged(); + } else { + authSecretBuilder_.setMessage(value); + } + authCase_ = 8; + return this; + } + /** + *
+       * Secret reference.
+       * Secret format:
+       *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+       *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+       *   ca.crt: <CA PEM certificate>
+       *   user.crt: <User PEM certificate>
+       *   user.key: <User PEM key>
+       *   user: <SASL username>
+       *   password: <SASL password>
+       * Validation:
+       *   - protocol=PLAINTEXT
+       *   - protocol=SSL
+       *     - required:
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *   - protocol=SASL_PLAINTEXT
+       *     - required:
+       *       - sasl.mechanism
+       *       - user
+       *       - password
+       *   - protocol=SASL_SSL
+       *     - required:
+       *       - sasl.mechanism
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *       - user
+       *       - password
+       * 
+ * + * .Reference authSecret = 8; + */ + public Builder setAuthSecret( + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder builderForValue) { + if (authSecretBuilder_ == null) { + auth_ = builderForValue.build(); + onChanged(); + } else { + authSecretBuilder_.setMessage(builderForValue.build()); + } + authCase_ = 8; + return this; + } + /** + *
+       * Secret reference.
+       * Secret format:
+       *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+       *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+       *   ca.crt: <CA PEM certificate>
+       *   user.crt: <User PEM certificate>
+       *   user.key: <User PEM key>
+       *   user: <SASL username>
+       *   password: <SASL password>
+       * Validation:
+       *   - protocol=PLAINTEXT
+       *   - protocol=SSL
+       *     - required:
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *   - protocol=SASL_PLAINTEXT
+       *     - required:
+       *       - sasl.mechanism
+       *       - user
+       *       - password
+       *   - protocol=SASL_SSL
+       *     - required:
+       *       - sasl.mechanism
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *       - user
+       *       - password
+       * 
+ * + * .Reference authSecret = 8; + */ + public Builder mergeAuthSecret(dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference value) { + if (authSecretBuilder_ == null) { + if (authCase_ == 8 && + auth_ != dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.getDefaultInstance()) { + auth_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.newBuilder((dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) auth_) + .mergeFrom(value).buildPartial(); + } else { + auth_ = value; + } + onChanged(); + } else { + if (authCase_ == 8) { + authSecretBuilder_.mergeFrom(value); + } + authSecretBuilder_.setMessage(value); + } + authCase_ = 8; + return this; + } + /** + *
+       * Secret reference.
+       * Secret format:
+       *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+       *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+       *   ca.crt: <CA PEM certificate>
+       *   user.crt: <User PEM certificate>
+       *   user.key: <User PEM key>
+       *   user: <SASL username>
+       *   password: <SASL password>
+       * Validation:
+       *   - protocol=PLAINTEXT
+       *   - protocol=SSL
+       *     - required:
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *   - protocol=SASL_PLAINTEXT
+       *     - required:
+       *       - sasl.mechanism
+       *       - user
+       *       - password
+       *   - protocol=SASL_SSL
+       *     - required:
+       *       - sasl.mechanism
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *       - user
+       *       - password
+       * 
+ * + * .Reference authSecret = 8; + */ + public Builder clearAuthSecret() { + if (authSecretBuilder_ == null) { + if (authCase_ == 8) { + authCase_ = 0; + auth_ = null; + onChanged(); + } + } else { + if (authCase_ == 8) { + authCase_ = 0; + auth_ = null; + } + authSecretBuilder_.clear(); + } + return this; + } + /** + *
+       * Secret reference.
+       * Secret format:
+       *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+       *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+       *   ca.crt: <CA PEM certificate>
+       *   user.crt: <User PEM certificate>
+       *   user.key: <User PEM key>
+       *   user: <SASL username>
+       *   password: <SASL password>
+       * Validation:
+       *   - protocol=PLAINTEXT
+       *   - protocol=SSL
+       *     - required:
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *   - protocol=SASL_PLAINTEXT
+       *     - required:
+       *       - sasl.mechanism
+       *       - user
+       *       - password
+       *   - protocol=SASL_SSL
+       *     - required:
+       *       - sasl.mechanism
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *       - user
+       *       - password
+       * 
+ * + * .Reference authSecret = 8; + */ + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder getAuthSecretBuilder() { + return getAuthSecretFieldBuilder().getBuilder(); + } + /** + *
+       * Secret reference.
+       * Secret format:
+       *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+       *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+       *   ca.crt: <CA PEM certificate>
+       *   user.crt: <User PEM certificate>
+       *   user.key: <User PEM key>
+       *   user: <SASL username>
+       *   password: <SASL password>
+       * Validation:
+       *   - protocol=PLAINTEXT
+       *   - protocol=SSL
+       *     - required:
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *   - protocol=SASL_PLAINTEXT
+       *     - required:
+       *       - sasl.mechanism
+       *       - user
+       *       - password
+       *   - protocol=SASL_SSL
+       *     - required:
+       *       - sasl.mechanism
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *       - user
+       *       - password
+       * 
+ * + * .Reference authSecret = 8; + */ + @java.lang.Override + public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder getAuthSecretOrBuilder() { + if ((authCase_ == 8) && (authSecretBuilder_ != null)) { + return authSecretBuilder_.getMessageOrBuilder(); + } else { + if (authCase_ == 8) { + return (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) auth_; + } + return dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.getDefaultInstance(); + } + } + /** + *
+       * Secret reference.
+       * Secret format:
+       *   protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL)
+       *   sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512)
+       *   ca.crt: <CA PEM certificate>
+       *   user.crt: <User PEM certificate>
+       *   user.key: <User PEM key>
+       *   user: <SASL username>
+       *   password: <SASL password>
+       * Validation:
+       *   - protocol=PLAINTEXT
+       *   - protocol=SSL
+       *     - required:
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *   - protocol=SASL_PLAINTEXT
+       *     - required:
+       *       - sasl.mechanism
+       *       - user
+       *       - password
+       *   - protocol=SASL_SSL
+       *     - required:
+       *       - sasl.mechanism
+       *       - ca.crt
+       *       - user.crt
+       *       - user.key
+       *       - user
+       *       - password
+       * 
+ * + * .Reference authSecret = 8; + */ + private com.google.protobuf.SingleFieldBuilderV3< + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder> + getAuthSecretFieldBuilder() { + if (authSecretBuilder_ == null) { + if (!(authCase_ == 8)) { + auth_ = dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.getDefaultInstance(); + } + authSecretBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference.Builder, dev.knative.eventing.kafka.broker.contract.DataPlaneContract.ReferenceOrBuilder>( + (dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Reference) auth_, + getParentForChildren(), + isClean()); + auth_ = null; + } + authCase_ = 8; + onChanged();; + return authSecretBuilder_; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -7931,6 +10170,11 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract get private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_Ingress_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_Reference_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_Reference_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_Resource_descriptor; private static final @@ -7963,18 +10207,22 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract get "tyH\000\022\027\n\006filter\030\005 \001(\0132\007.Filter\022\013\n\003uid\030\006 \001" + "(\tB\017\n\rreplyStrategy\"[\n\007Ingress\022!\n\013conten" + "tMode\030\001 \001(\0162\014.ContentMode\022\016\n\004path\030\002 \001(\tH" + - "\000\022\016\n\004host\030\003 \001(\tH\000B\r\n\013ingressType\"\234\001\n\010Res" + - "ource\022\013\n\003uid\030\001 \001(\t\022\016\n\006topics\030\002 \003(\t\022\030\n\020bo" + - "otstrapServers\030\003 \001(\t\022\031\n\007ingress\030\004 \001(\0132\010." + - "Ingress\022#\n\014egressConfig\030\005 \001(\0132\r.EgressCo" + - "nfig\022\031\n\010egresses\030\006 \003(\0132\007.Egress\"<\n\010Contr" + - "act\022\022\n\ngeneration\030\001 \001(\004\022\034\n\tresources\030\002 \003" + - "(\0132\t.Resource*,\n\rBackoffPolicy\022\017\n\013Expone" + - "ntial\020\000\022\n\n\006Linear\020\001*)\n\013ContentMode\022\n\n\006BI" + - "NARY\020\000\022\016\n\nSTRUCTURED\020\001B[\n*dev.knative.ev" + - "enting.kafka.broker.contractB\021DataPlaneC" + - "ontractZ\032control-plane/pkg/contractb\006pro" + - "to3" + "\000\022\016\n\004host\030\003 \001(\tH\000B\r\n\013ingressType\"K\n\tRefe" + + "rence\022\014\n\004uuid\030\001 \001(\t\022\021\n\tnamespace\030\002 \001(\t\022\014" + + "\n\004name\030\003 \001(\t\022\017\n\007version\030\004 \001(\t\"\364\001\n\010Resour" + + "ce\022\013\n\003uid\030\001 \001(\t\022\016\n\006topics\030\002 \003(\t\022\030\n\020boots" + + "trapServers\030\003 \001(\t\022\031\n\007ingress\030\004 \001(\0132\010.Ing" + + "ress\022#\n\014egressConfig\030\005 \001(\0132\r.EgressConfi" + + "g\022\031\n\010egresses\030\006 \003(\0132\007.Egress\022,\n\nabsentAu" + + "th\030\007 \001(\0132\026.google.protobuf.EmptyH\000\022 \n\nau" + + "thSecret\030\010 \001(\0132\n.ReferenceH\000B\006\n\004Auth\"<\n\010" + + "Contract\022\022\n\ngeneration\030\001 \001(\004\022\034\n\tresource" + + "s\030\002 \003(\0132\t.Resource*,\n\rBackoffPolicy\022\017\n\013E" + + "xponential\020\000\022\n\n\006Linear\020\001*)\n\013ContentMode\022" + + "\n\n\006BINARY\020\000\022\016\n\nSTRUCTURED\020\001B[\n*dev.knati" + + "ve.eventing.kafka.broker.contractB\021DataP" + + "laneContractZ\032control-plane/pkg/contract" + + "b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, @@ -8011,14 +10259,20 @@ public dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Contract get com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_Ingress_descriptor, new java.lang.String[] { "ContentMode", "Path", "Host", "IngressType", }); - internal_static_Resource_descriptor = + internal_static_Reference_descriptor = getDescriptor().getMessageTypes().get(4); + internal_static_Reference_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_Reference_descriptor, + new java.lang.String[] { "Uuid", "Namespace", "Name", "Version", }); + internal_static_Resource_descriptor = + getDescriptor().getMessageTypes().get(5); internal_static_Resource_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_Resource_descriptor, - new java.lang.String[] { "Uid", "Topics", "BootstrapServers", "Ingress", "EgressConfig", "Egresses", }); + new java.lang.String[] { "Uid", "Topics", "BootstrapServers", "Ingress", "EgressConfig", "Egresses", "AbsentAuth", "AuthSecret", "Auth", }); internal_static_Contract_descriptor = - getDescriptor().getMessageTypes().get(5); + getDescriptor().getMessageTypes().get(6); internal_static_Contract_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_Contract_descriptor, diff --git a/data-plane/core/pom.xml b/data-plane/core/pom.xml index 0ccee6e7c0..88ed51148a 100644 --- a/data-plane/core/pom.xml +++ b/data-plane/core/pom.xml @@ -17,8 +17,8 @@ --> + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> data-plane dev.knative.eventing.kafka.broker @@ -35,6 +35,10 @@ ${project.version} + + org.apache.kafka + kafka-clients + io.vertx vertx-kafka-client @@ -44,6 +48,16 @@ vertx-web-client + + io.fabric8 + kubernetes-client + + + io.fabric8 + kubernetes-server-mock + test + + com.fasterxml.jackson.core jackson-core @@ -132,11 +146,27 @@ protobuf-java-util + + io.cloudevents + cloudevents-kafka + test + + + org.apache.kafka + kafka-clients + + + org.junit.jupiter junit-jupiter test + + org.junit.jupiter + junit-jupiter-migrationsupport + test + io.vertx vertx-junit5 diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/reconciler/impl/ResourcesReconcilerImpl.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/reconciler/impl/ResourcesReconcilerImpl.java index ace1267a67..348f956c3d 100644 --- a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/reconciler/impl/ResourcesReconcilerImpl.java +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/reconciler/impl/ResourcesReconcilerImpl.java @@ -22,6 +22,7 @@ import dev.knative.eventing.kafka.broker.core.utils.CollectionsUtils; import io.vertx.core.CompositeFuture; import io.vertx.core.Future; + import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -204,6 +205,8 @@ private boolean resourceEquals(DataPlaneContract.Resource r1, DataPlaneContract. && Objects.equals(r1.getTopicsList(), r2.getTopicsList()) && Objects.equals(r1.getBootstrapServers(), r2.getBootstrapServers()) && Objects.equals(r1.getIngress(), r2.getIngress()) + && Objects.equals(r1.hasAbsentAuth(), r2.hasAbsentAuth()) + && Objects.equals(r1.getAuthSecret(), r2.getAuthSecret()) // In the case of ingress reconcile, do we really care about this one? && Objects.equals(r1.getEgressConfig(), r2.getEgressConfig()); } diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/AuthProvider.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/AuthProvider.java new file mode 100644 index 0000000000..285c235bdc --- /dev/null +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/AuthProvider.java @@ -0,0 +1,38 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.security; + +import io.fabric8.kubernetes.client.DefaultKubernetesClient; +import io.vertx.core.Future; + +/** + * AuthProvider provides auth credentials. + */ +@FunctionalInterface +public interface AuthProvider { + + static AuthProvider kubernetes() { + return new KubernetesAuthProvider(new DefaultKubernetesClient()); + } + + /** + * Get credentials from given a location represented by namespace and name. + * + * @return A failed or succeeded future with valid credentials. + */ + Future getCredentials(final String namespace, final String name); +} diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/Credentials.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/Credentials.java new file mode 100644 index 0000000000..480615c1f0 --- /dev/null +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/Credentials.java @@ -0,0 +1,73 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.security; + +import org.apache.kafka.common.security.auth.SecurityProtocol; + +public interface Credentials { + + /** + * @return CA certificate. + */ + String caCertificates(); + + /** + * @return user certificate. + */ + String userCertificate(); + + /** + * @return user key. + */ + String userKey(); + + /** + * Client key: security.protocol + * + * @return Security protocol or null if not specified. + */ + SecurityProtocol securityProtocol(); + + /** + * Client key: sasl.mechanism + * + * @return SASL mechanism or null if not specified. + */ + String SASLMechanism(); + + /** + * Client config: + * sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required \ + * username="alice" \ + * password="alice-secret"; + * + * @return username. + * @see SASL Scram + */ + String SASLUsername(); + + /** + * Client config: + * sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required \ + * username="alice" \ + * password="alice-secret"; + * + * @return password. + * @see SASL Scram + */ + String SASLPassword(); +} diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/CredentialsValidator.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/CredentialsValidator.java new file mode 100644 index 0000000000..d0e3aebc20 --- /dev/null +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/CredentialsValidator.java @@ -0,0 +1,94 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.security; + +import org.apache.kafka.common.security.auth.SecurityProtocol; + +class CredentialsValidator { + + private CredentialsValidator() { + } + + static String validate(final Credentials credentials) { + + final var securityProtocol = credentials.securityProtocol(); + if (securityProtocol == null) { + return "No security protocol specified"; + } + + if (is(SecurityProtocol.PLAINTEXT, securityProtocol)) { + return null; + } + + if (is(SecurityProtocol.SSL, securityProtocol)) { + if (anyBlank(credentials.userCertificate(), credentials.userKey(), credentials.caCertificates())) { + return "Security protocol " + securityProtocol.name + ": invalid user certificate or user key or CA certificates"; + } + return null; + } + + final var SASLMechanism = credentials.SASLMechanism(); + if (is(SecurityProtocol.SASL_PLAINTEXT, securityProtocol)) { + if (isInvalidSASLMechanism(SASLMechanism)) { + return "Security protocol " + securityProtocol.name + ": invalid SASL mechanism, expected SCRAM-SHA-256 or SCRAM-SHA-512 got " + SASLMechanism; + } + if (anyBlank(credentials.SASLUsername(), credentials.SASLPassword())) { + return "Security protocol " + securityProtocol.name + ": invalid SASL username or password"; + } + return null; + } + + if (is(SecurityProtocol.SASL_SSL, securityProtocol)) { + if (anyBlank(credentials.caCertificates())) { + return "Security protocol " + securityProtocol.name + ": invalid truststore"; + } + if (isInvalidSASLMechanism(SASLMechanism)) { + return "Security protocol " + securityProtocol.name + ": invalid SASL mechanism, expected SCRAM-SHA-256 or SCRAM-SHA-512 got " + SASLMechanism; + } + if (anyBlank(credentials.SASLUsername(), credentials.SASLPassword())) { + return "Security protocol " + securityProtocol.name + ": invalid SASL username or password"; + } + return null; + } + return "Unsupported security protocol " + securityProtocol.name; + } + + private static boolean is(final SecurityProtocol s1, final SecurityProtocol s2) { + return s1.name.equals(s2.name); + } + + private static boolean is(final String s1, String s2) { + return s1.equals(s2); + } + + private static boolean isBlank(final String s) { + return s == null || s.isBlank(); + } + + private static boolean isInvalidSASLMechanism(final String SASLMechanism) { + return !(is("SCRAM-SHA-256", SASLMechanism) || is("SCRAM-SHA-512", SASLMechanism)); + } + + private static boolean anyBlank(final String... stores) { + for (final var s : stores) { + if (isBlank(s)) { + return true; + } + } + return false; + } +} diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/KafkaClientsAuth.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/KafkaClientsAuth.java new file mode 100644 index 0000000000..eb35f4d9bc --- /dev/null +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/KafkaClientsAuth.java @@ -0,0 +1,99 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.security; + +import io.vertx.core.Future; +import org.apache.kafka.clients.CommonClientConfigs; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.config.SslConfigs; +import org.apache.kafka.common.security.plain.PlainLoginModule; +import org.apache.kafka.common.security.scram.ScramLoginModule; +import org.apache.kafka.common.security.ssl.DefaultSslEngineFactory; + +import java.util.Map; +import java.util.Properties; +import java.util.function.BiConsumer; + +public class KafkaClientsAuth { + + public static Future updateConfigsFromProps(final Credentials credentials, + final Properties properties) { + return clientsProperties(properties::setProperty, credentials) + .map(r -> properties); + } + + public static Future> updateProducerConfigs(final Credentials credentials, + final Map configs) { + return clientsProperties(configs::put, credentials) + .map(r -> configs); + } + + public static Future> updateConsumerConfigs(final Credentials credentials, + final Map configs) { + return clientsProperties(configs::put, credentials) + .map(r -> configs); + } + + private static Future clientsProperties(final BiConsumer propertiesSetter, final Credentials credentials) { + final var protocol = credentials.securityProtocol(); + if (protocol == null) { + return Future.succeededFuture(); + } + + propertiesSetter.accept(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, protocol.name); + return switch (protocol) { + case PLAINTEXT -> Future.succeededFuture(); + case SSL -> ssl(propertiesSetter, credentials); + case SASL_PLAINTEXT -> sasl(propertiesSetter, credentials); + case SASL_SSL -> ssl(propertiesSetter, credentials).compose(r -> sasl(propertiesSetter, credentials)); + }; + } + + private static Future sasl(final BiConsumer propertiesSetter, final Credentials credentials) { + final var mechanism = credentials.SASLMechanism(); + if (mechanism == null) { + return Future.failedFuture("SASL mechanism required"); + } + propertiesSetter.accept(SaslConfigs.SASL_MECHANISM, mechanism); + if ("PLAIN".equals(mechanism)) { + propertiesSetter.accept(SaslConfigs.SASL_JAAS_CONFIG, String.format( + PlainLoginModule.class.getName() + " required username=\"%s\" password=\"%s\";", + credentials.SASLUsername(), + credentials.SASLPassword() + )); + } else { + propertiesSetter.accept(SaslConfigs.SASL_JAAS_CONFIG, String.format( + ScramLoginModule.class.getName() + " required username=\"%s\" password=\"%s\";", + credentials.SASLUsername(), + credentials.SASLPassword() + )); + } + return Future.succeededFuture(); + } + + private static Future ssl(final BiConsumer propertiesSetter, final Credentials credentials) { + propertiesSetter.accept(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, DefaultSslEngineFactory.PEM_TYPE); + propertiesSetter.accept(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG, credentials.caCertificates()); + final var keystore = credentials.userCertificate(); + if (keystore != null) { + propertiesSetter.accept(SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG, credentials.userCertificate()); + propertiesSetter.accept(SslConfigs.SSL_KEYSTORE_KEY_CONFIG, credentials.userKey()); + propertiesSetter.accept(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, DefaultSslEngineFactory.PEM_TYPE); + } + return Future.succeededFuture(); + } +} diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/KubernetesAuthProvider.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/KubernetesAuthProvider.java new file mode 100644 index 0000000000..e98c2d7d9f --- /dev/null +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/KubernetesAuthProvider.java @@ -0,0 +1,59 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.security; + +import io.fabric8.kubernetes.api.model.Secret; +import io.fabric8.kubernetes.client.KubernetesClient; +import io.vertx.core.Future; +import io.vertx.core.Vertx; + +class KubernetesAuthProvider implements AuthProvider { + + private final KubernetesClient kubernetesClient; + + KubernetesAuthProvider(final KubernetesClient client) { + this.kubernetesClient = client; + } + + @Override + public Future getCredentials(final String namespace, final String name) { + return Vertx.currentContext().executeBlocking(p -> { + Secret secret; + try { + secret = kubernetesClient.secrets() + .inNamespace(namespace) + .withName(name) + .get(); + } catch (final Exception ex) { + p.fail(ex); + return; + } + if (secret == null) { + p.fail(String.format("Secret %s/%s null", namespace, name)); + return; + } + + final var credentials = new KubernetesCredentials(secret); + final var error = CredentialsValidator.validate(credentials); + if (error != null) { + p.fail(error); + return; + } + p.complete(credentials); + }); + } +} diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/KubernetesCredentials.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/KubernetesCredentials.java new file mode 100644 index 0000000000..2a6331e4c6 --- /dev/null +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/KubernetesCredentials.java @@ -0,0 +1,172 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.security; + +import io.fabric8.kubernetes.api.model.Secret; +import org.apache.kafka.common.security.auth.SecurityProtocol; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nullable; +import java.util.Base64; + +import static net.logstash.logback.argument.StructuredArguments.keyValue; + +class KubernetesCredentials implements Credentials { + + private final static Logger logger = LoggerFactory.getLogger(KubernetesCredentials.class); + + static final String CA_CERTIFICATE_KEY = "ca.crt"; + + static final String USER_CERTIFICATE_KEY = "user.crt"; + static final String USER_KEY_KEY = "user.key"; + + static final String USERNAME_KEY = "user"; + static final String PASSWORD_KEY = "password"; + + static final String SECURITY_PROTOCOL = "protocol"; + static final String SASL_MECHANISM = "sasl.mechanism"; + + private final Secret secret; + + private String caCertificates; + private String userCertificate; + private String userKey; + private SecurityProtocol securityProtocol; + private String SASLMechanism; + private String SASLUsername; + private String SASLPassword; + + KubernetesCredentials(final Secret secret) { + this.secret = secret; + } + + @Override + public String caCertificates() { + if (secret == null || secret.getData() == null) { + return null; + } + if (caCertificates == null) { + final var truststore = secret.getData().get(CA_CERTIFICATE_KEY); + if (truststore == null) { + return null; + } + this.caCertificates = new String(Base64.getDecoder().decode(truststore)); + } + return this.caCertificates; + } + + @Override + public String userCertificate() { + if (secret == null || secret.getData() == null) { + return null; + } + if (userCertificate == null) { + final var keystore = secret.getData().get(USER_CERTIFICATE_KEY); + if (keystore == null) { + return null; + } + this.userCertificate = new String(Base64.getDecoder().decode(keystore)); + } + return userCertificate; + } + + @Override + public String userKey() { + if (secret == null || secret.getData() == null) { + return null; + } + if (userKey == null) { + final var userKey = secret.getData().get(USER_KEY_KEY); + if (userKey == null) { + return null; + } + this.userKey = new String(Base64.getDecoder().decode(userKey)); + } + return userKey; + } + + + @Override + public SecurityProtocol securityProtocol() { + if (secret == null || secret.getData() == null) { + return null; + } + if (securityProtocol == null) { + final var protocolStr = secret.getData().get(SECURITY_PROTOCOL); + if (protocolStr == null) { + return null; + } + final var protocol = new String(Base64.getDecoder().decode(protocolStr)); + if (!SecurityProtocol.names().contains(protocol)) { + logger.debug("Security protocol {}", keyValue(SECURITY_PROTOCOL, protocol)); + return null; + } + this.securityProtocol = SecurityProtocol.forName(protocol); + } + return this.securityProtocol; + } + + @Override + public String SASLMechanism() { + if (secret == null || secret.getData() == null) { + return null; + } + if (SASLMechanism == null) { + final var SASLMechanism = secret.getData().get(SASL_MECHANISM); + if (SASLMechanism == null) { + return null; + } + this.SASLMechanism = switch (new String(Base64.getDecoder().decode(SASLMechanism))) { + case "SCRAM-SHA-256" -> "SCRAM-SHA-256"; + case "SCRAM-SHA-512" -> "SCRAM-SHA-512"; + default -> null; + }; + } + return this.SASLMechanism; + } + + @Override + public String SASLUsername() { + if (secret == null || secret.getData() == null) { + return null; + } + if (SASLUsername == null) { + final var SASLUsername = secret.getData().get(USERNAME_KEY); + if (SASLUsername == null) { + return null; + } + this.SASLUsername = new String(Base64.getDecoder().decode(SASLUsername)); + } + return this.SASLUsername; + } + + @Override + public String SASLPassword() { + if (secret == null || secret.getData() == null) { + return null; + } + if (SASLPassword == null) { + final var SASLPassword = secret.getData().get(PASSWORD_KEY); + if (SASLPassword == null) { + return null; + } + this.SASLPassword = new String(Base64.getDecoder().decode(SASLPassword)); + } + return this.SASLPassword; + } +} diff --git a/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/PlaintextCredentials.java b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/PlaintextCredentials.java new file mode 100644 index 0000000000..2961b5a02e --- /dev/null +++ b/data-plane/core/src/main/java/dev/knative/eventing/kafka/broker/core/security/PlaintextCredentials.java @@ -0,0 +1,57 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.security; + +import org.apache.kafka.common.security.auth.SecurityProtocol; + +public class PlaintextCredentials implements Credentials { + + @Override + public String caCertificates() { + return null; + } + + @Override + public String userCertificate() { + return null; + } + + @Override + public String userKey() { + return null; + } + + @Override + public SecurityProtocol securityProtocol() { + return SecurityProtocol.PLAINTEXT; + } + + @Override + public String SASLMechanism() { + return null; + } + + @Override + public String SASLUsername() { + return null; + } + + @Override + public String SASLPassword() { + return null; + } +} diff --git a/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/reconciler/impl/ResourcesReconcilerImplTest.java b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/reconciler/impl/ResourcesReconcilerImplTest.java index dc0e11cff9..1afdc52816 100644 --- a/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/reconciler/impl/ResourcesReconcilerImplTest.java +++ b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/reconciler/impl/ResourcesReconcilerImplTest.java @@ -15,16 +15,18 @@ */ package dev.knative.eventing.kafka.broker.core.reconciler.impl; +import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.List; +import java.util.UUID; + import static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress; import static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Filter; import static dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource; import static org.assertj.core.api.Assertions.assertThatThrownBy; -import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; -import java.util.Collections; -import java.util.List; -import org.junit.jupiter.api.Test; - class ResourcesReconcilerImplTest { @Test @@ -117,6 +119,48 @@ void reconcileIngressAndUpdateIngressAtSecondStep() { .run(); } + @Test + void reconcileIngressAndAddAuthConfigAtSecondStepAndUpdateAuthConfigAtThirdStep() { + final var uuid = UUID.randomUUID().toString(); + new ResourceReconcilerTestRunner() + .enableIngressListener() + .reconcile(List.of( + baseResource("1-1234") + .setIngress(DataPlaneContract.Ingress.newBuilder().setPath("/hello")) + .build() + )) + .expect() + .newIngress("1-1234") + .then() + .reconcile(List.of( + baseResource("1-1234") + .setIngress(DataPlaneContract.Ingress.newBuilder().setPath("/hello/world")) + .setAuthSecret(DataPlaneContract.Reference.newBuilder() + .setName("n1") + .setNamespace("ns1") + .setUuid(uuid) + .setVersion("1")) + .build() + )) + .expect() + .updatedIngress("1-1234") + .then() + .reconcile(List.of( + baseResource("1-1234") + .setIngress(DataPlaneContract.Ingress.newBuilder().setPath("/hello")) + .setAuthSecret(DataPlaneContract.Reference.newBuilder() + .setName("n1") + .setNamespace("ns1") + .setUuid(uuid) + .setVersion("2")) + .build() + )) + .expect() + .updatedIngress("1-1234") + .then() + .run(); + } + @Test void reconcileIngressAddUpdateAndRemoveResource() { new ResourceReconcilerTestRunner() @@ -239,6 +283,60 @@ void reconcileEgressModifyingTheGlobalEgressConfig() { .run(); } + @Test + void reconcileEgressModifyingAuthConfig() { + final var uuid = UUID.randomUUID().toString(); + new ResourceReconcilerTestRunner() + .enableEgressListener() + .reconcile(List.of( + baseResource("1-1234") + .addEgresses(egress("aaa")) + .addEgresses(egress("bbb")) + .addEgresses(egress("ccc")) + .build() + )) + .expect() + .newEgress("aaa") + .newEgress("bbb") + .newEgress("ccc") + .then() + .reconcile(List.of( + baseResource("1-1234") + .setAuthSecret(DataPlaneContract.Reference.newBuilder() + .setName("n1") + .setNamespace("ns1") + .setUuid(uuid) + .setVersion("1")) + .addEgresses(egress("aaa")) + .addEgresses(egress("bbb")) + .addEgresses(egress("ccc")) + .build() + )) + .expect() + .updatedEgress("aaa") + .updatedEgress("bbb") + .updatedEgress("ccc") + .then() + .reconcile(List.of( + baseResource("1-1234") + .setAuthSecret(DataPlaneContract.Reference.newBuilder() + .setName("n1") + .setNamespace("ns1") + .setUuid(uuid) + .setVersion("2")) + .addEgresses(egress("aaa")) + .addEgresses(egress("bbb")) + .addEgresses(egress("ccc")) + .build() + )) + .expect() + .updatedEgress("aaa") + .updatedEgress("bbb") + .updatedEgress("ccc") + .then() + .run(); + } + @Test void reconcileEgressAddingAndRemovingResource() { new ResourceReconcilerTestRunner() diff --git a/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/security/CredentialsValidatorTest.java b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/security/CredentialsValidatorTest.java new file mode 100644 index 0000000000..248702ba78 --- /dev/null +++ b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/security/CredentialsValidatorTest.java @@ -0,0 +1,285 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.security; + +import org.apache.kafka.common.security.auth.SecurityProtocol; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class CredentialsValidatorTest { + + @Test + public void securityProtocolPlaintextValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.PLAINTEXT); + + assertThat(CredentialsValidator.validate(credential)).isNull(); + } + + @Test + public void securityProtocolSslValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SSL); + when(credential.userCertificate()).thenReturn("abc"); + when(credential.userKey()).thenReturn("key"); + when(credential.caCertificates()).thenReturn("xyz"); + + assertThat(CredentialsValidator.validate(credential)).isNull(); + } + + @Test + public void securityProtocolSslInvalidNoUserCert() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SSL); + when(credential.userCertificate()).thenReturn(" "); + when(credential.userKey()).thenReturn("my-key"); + when(credential.caCertificates()).thenReturn("xyz"); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSslInvalidNoUserKey() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SSL); + when(credential.userCertificate()).thenReturn("xyz"); + when(credential.userKey()).thenReturn(" "); + when(credential.caCertificates()).thenReturn("my-cert"); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSslInvalidNoCACert() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SSL); + when(credential.userCertificate()).thenReturn("xyz"); + when(credential.userKey()).thenReturn("my-key"); + when(credential.caCertificates()).thenReturn(" "); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSaslPlaintextScramSha256Valid() { + securityProtocolSaslPlaintextScramValid("SCRAM-SHA-256"); + } + + @Test + public void securityProtocolSaslPlaintextScramSha512Valid() { + securityProtocolSaslPlaintextScramValid("SCRAM-SHA-512"); + } + + private static void securityProtocolSaslPlaintextScramValid(final String mechanism) { + + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_PLAINTEXT); + when(credential.SASLMechanism()).thenReturn(mechanism); + when(credential.SASLUsername()).thenReturn("aaa"); + when(credential.SASLPassword()).thenReturn("bbb"); + + assertThat(CredentialsValidator.validate(credential)).isNull(); + } + + @Test + public void securityProtocolSaslPlaintextScramSha513InValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_PLAINTEXT); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-513"); + when(credential.SASLUsername()).thenReturn("aaa"); + when(credential.SASLPassword()).thenReturn("bbb"); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSaslPLAINTEXT_ScramSha51NoUsernameInValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_PLAINTEXT); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-512"); + when(credential.SASLUsername()).thenReturn(" "); + when(credential.SASLPassword()).thenReturn("bbb"); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSaslPLAINTEXT_ScramSha51NoPasswordInValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_PLAINTEXT); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-512"); + when(credential.SASLUsername()).thenReturn("bbb"); + when(credential.SASLPassword()).thenReturn(" "); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSaslSslScramSha256Valid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_SSL); + when(credential.caCertificates()).thenReturn("xyz"); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-256"); + when(credential.SASLUsername()).thenReturn("aaa"); + when(credential.SASLPassword()).thenReturn("bbb"); + + assertThat(CredentialsValidator.validate(credential)).isNull(); + } + + @Test + public void securityProtocolSaslSslScramSha513InValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_SSL); + when(credential.caCertificates()).thenReturn("xyz"); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-513"); + when(credential.SASLUsername()).thenReturn("aaa"); + when(credential.SASLPassword()).thenReturn("bbb"); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSaslSslScramSha51NoUsernameInValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_SSL); + when(credential.userCertificate()).thenReturn("abc"); + when(credential.caCertificates()).thenReturn("xyz"); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-512"); + when(credential.SASLUsername()).thenReturn(" "); + when(credential.SASLPassword()).thenReturn("bbb"); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSaslSslScramSha51NoPasswordInValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_SSL); + when(credential.userCertificate()).thenReturn("abc"); + when(credential.caCertificates()).thenReturn("xyz"); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-512"); + when(credential.SASLUsername()).thenReturn("bbb"); + when(credential.SASLPassword()).thenReturn(" "); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSaslSslScramSha51NoTruststoreInValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_SSL); + when(credential.userCertificate()).thenReturn("abc"); + when(credential.caCertificates()).thenReturn(" "); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-512"); + when(credential.SASLUsername()).thenReturn("bbb"); + when(credential.SASLPassword()).thenReturn("ccc"); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSaslSslScramSha51Valid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_SSL); + when(credential.userCertificate()).thenReturn("abc"); + when(credential.caCertificates()).thenReturn("xyz"); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-512"); + when(credential.SASLUsername()).thenReturn("aaa"); + when(credential.SASLPassword()).thenReturn("bbb"); + + assertThat(CredentialsValidator.validate(credential)).isNull(); + } + + @Test + public void securityProtocolSaslSslScramSha256NoUsernameInValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_SSL); + when(credential.userCertificate()).thenReturn("abc"); + when(credential.caCertificates()).thenReturn("xyz"); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-256"); + when(credential.SASLUsername()).thenReturn(" "); + when(credential.SASLPassword()).thenReturn("bbb"); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSaslSslScramSha256NoPasswordInValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_SSL); + when(credential.caCertificates()).thenReturn("xyz"); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-512"); + when(credential.SASLUsername()).thenReturn("bbb"); + when(credential.SASLPassword()).thenReturn(" "); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSaslSslScramSha256NoTruststoreInValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_SSL); + when(credential.caCertificates()).thenReturn(" "); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-512"); + when(credential.SASLUsername()).thenReturn("bbb"); + when(credential.SASLPassword()).thenReturn("ccc"); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocolSaslSslScramSha256NoCACertInValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(SecurityProtocol.SASL_SSL); + when(credential.SASLMechanism()).thenReturn("SCRAM-SHA-512"); + when(credential.SASLUsername()).thenReturn("bbb"); + when(credential.SASLPassword()).thenReturn("ccc"); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } + + @Test + public void securityProtocol_nullInValid() { + final var credential = mock(Credentials.class); + + when(credential.securityProtocol()).thenReturn(null); + + assertThat(CredentialsValidator.validate(credential)).isNotEmpty(); + } +} diff --git a/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/security/KafkaClientsAuthTest.java b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/security/KafkaClientsAuthTest.java new file mode 100644 index 0000000000..7ee8d6b885 --- /dev/null +++ b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/security/KafkaClientsAuthTest.java @@ -0,0 +1,184 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.security; + +import org.apache.kafka.clients.CommonClientConfigs; +import org.apache.kafka.common.config.SaslConfigs; +import org.apache.kafka.common.config.SslConfigs; +import org.apache.kafka.common.security.auth.SecurityProtocol; +import org.apache.kafka.common.security.plain.PlainLoginModule; +import org.apache.kafka.common.security.scram.ScramLoginModule; +import org.apache.kafka.common.security.ssl.DefaultSslEngineFactory; +import org.junit.jupiter.api.Test; + +import javax.security.auth.spi.LoginModule; +import java.util.HashMap; +import java.util.Properties; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class KafkaClientsAuthTest { + + @Test + public void shouldConfigureSaslScram512Ssl() { + shouldConfigureSaslSsl(ScramLoginModule.class, "SCRAM-SHA-512"); + } + + @Test + public void shouldConfigureSaslScram256Ssl() { + shouldConfigureSaslSsl(ScramLoginModule.class, "SCRAM-SHA-256"); + } + + @Test + public void shouldConfigureSaslPlainSsl() { + shouldConfigureSaslSsl(PlainLoginModule.class, "PLAIN"); + } + + private static void shouldConfigureSaslSsl(final Class module, final String mechanism) { + final var props = new Properties(); + + final var credentials = mock(Credentials.class); + when(credentials.securityProtocol()).thenReturn(SecurityProtocol.SASL_SSL); + when(credentials.caCertificates()).thenReturn("xyz"); + when(credentials.SASLMechanism()).thenReturn(mechanism); + when(credentials.SASLUsername()).thenReturn("aaa"); + when(credentials.SASLPassword()).thenReturn("bbb"); + + assertThat(KafkaClientsAuth.updateConfigsFromProps(credentials, props).succeeded()).isTrue(); + + final var expected = new Properties(); + expected.setProperty(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SASL_SSL.name()); + expected.setProperty(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, DefaultSslEngineFactory.PEM_TYPE); + expected.setProperty(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG, "xyz"); + expected.setProperty(SaslConfigs.SASL_MECHANISM, mechanism); + expected.setProperty( + SaslConfigs.SASL_JAAS_CONFIG, + module.getName() + " required username=\"" + credentials.SASLUsername() + "\" password=\"" + credentials.SASLPassword() + "\";" + ); + + assertThat(props).isEqualTo(expected); + + final var producerConfigs = new HashMap(); + final var consumerConfigs = new HashMap(); + + assertThat(KafkaClientsAuth.updateProducerConfigs(credentials, producerConfigs).succeeded()).isTrue(); + assertThat(KafkaClientsAuth.updateConsumerConfigs(credentials, consumerConfigs).succeeded()).isTrue(); + + assertThat(producerConfigs).isEqualTo(expected); + assertThat(consumerConfigs).isEqualTo(expected); + } + + @Test + public void shouldConfigureSsl() { + final var props = new Properties(); + + final var credentials = mock(Credentials.class); + when(credentials.securityProtocol()).thenReturn(SecurityProtocol.SSL); + when(credentials.userCertificate()).thenReturn("abc"); + when(credentials.userKey()).thenReturn("key"); + when(credentials.caCertificates()).thenReturn("xyz"); + + assertThat(KafkaClientsAuth.updateConfigsFromProps(credentials, props).succeeded()).isTrue(); + + final var expected = new Properties(); + expected.setProperty(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SSL.name()); + expected.setProperty(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, DefaultSslEngineFactory.PEM_TYPE); + expected.setProperty(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG, "xyz"); + expected.setProperty(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, DefaultSslEngineFactory.PEM_TYPE); + expected.setProperty(SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG, "abc"); + expected.setProperty(SslConfigs.SSL_KEYSTORE_KEY_CONFIG, "key"); + + assertThat(props).isEqualTo(expected); + + final var producerConfigs = new HashMap(); + final var consumerConfigs = new HashMap(); + + assertThat(KafkaClientsAuth.updateProducerConfigs(credentials, producerConfigs).succeeded()).isTrue(); + assertThat(KafkaClientsAuth.updateConsumerConfigs(credentials, consumerConfigs).succeeded()).isTrue(); + + assertThat(producerConfigs).isEqualTo(expected); + assertThat(consumerConfigs).isEqualTo(expected); + } + + @Test + public void shouldConfigureSaslPlaintextScram512() { + shouldConfigureSaslPlaintext("SCRAM-SHA-512"); + } + + @Test + public void shouldConfigureSaslPlaintextScram256() { + shouldConfigureSaslPlaintext("SCRAM-SHA-256"); + } + + @Test + public void shouldConfigurePlaintext() { + final var props = new Properties(); + + final var credentials = mock(Credentials.class); + when(credentials.securityProtocol()).thenReturn(SecurityProtocol.PLAINTEXT); + + assertThat(KafkaClientsAuth.updateConfigsFromProps(credentials, props).succeeded()).isTrue(); + + final var expected = new Properties(); + expected.setProperty(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.PLAINTEXT.name()); + + assertThat(props).isEqualTo(expected); + + final var producerConfigs = new HashMap(); + final var consumerConfigs = new HashMap(); + + assertThat(KafkaClientsAuth.updateProducerConfigs(credentials, producerConfigs).succeeded()).isTrue(); + assertThat(KafkaClientsAuth.updateConsumerConfigs(credentials, consumerConfigs).succeeded()).isTrue(); + + assertThat(producerConfigs).isEqualTo(expected); + assertThat(consumerConfigs).isEqualTo(expected); + } + + private static void shouldConfigureSaslPlaintext(final String mechanism) { + final var props = new Properties(); + + final var credentials = mock(Credentials.class); + when(credentials.securityProtocol()).thenReturn(SecurityProtocol.SASL_PLAINTEXT); + when(credentials.SASLMechanism()).thenReturn(mechanism); + when(credentials.SASLUsername()).thenReturn("aaa"); + when(credentials.SASLPassword()).thenReturn("bbb"); + + assertThat(KafkaClientsAuth.updateConfigsFromProps(credentials, props).succeeded()).isTrue(); + + final var expected = new Properties(); + + expected.setProperty(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SASL_PLAINTEXT.name()); + expected.setProperty(SaslConfigs.SASL_MECHANISM, mechanism); + expected.setProperty( + SaslConfigs.SASL_JAAS_CONFIG, + ScramLoginModule.class.getName() + " required username=\"" + credentials.SASLUsername() + "\" password=\"" + credentials.SASLPassword() + "\";" + ); + + assertThat(props).isEqualTo(expected); + + final var producerConfigs = new HashMap(); + final var consumerConfigs = new HashMap(); + + assertThat(KafkaClientsAuth.updateProducerConfigs(credentials, producerConfigs).succeeded()).isTrue(); + assertThat(KafkaClientsAuth.updateConsumerConfigs(credentials, consumerConfigs).succeeded()).isTrue(); + + assertThat(producerConfigs).isEqualTo(expected); + assertThat(consumerConfigs).isEqualTo(expected); + } +} diff --git a/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/security/KubernetesAuthProviderTest.java b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/security/KubernetesAuthProviderTest.java new file mode 100644 index 0000000000..af5f829350 --- /dev/null +++ b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/security/KubernetesAuthProviderTest.java @@ -0,0 +1,137 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.security; + +import io.fabric8.kubernetes.api.model.SecretBuilder; +import io.fabric8.kubernetes.client.server.mock.KubernetesServer; +import io.vertx.core.Vertx; +import io.vertx.junit5.VertxExtension; +import io.vertx.junit5.VertxTestContext; +import org.apache.kafka.common.security.auth.SecurityProtocol; +import org.junit.Rule; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.migrationsupport.rules.EnableRuleMigrationSupport; + +import java.util.AbstractMap; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.assertj.core.api.Assertions.assertThat; + +@ExtendWith(VertxExtension.class) +@EnableRuleMigrationSupport +public class KubernetesAuthProviderTest { + + @Rule + public KubernetesServer server = new KubernetesServer(true, true); + + @Test + public void getCredentialsFromSecret(final Vertx vertx, final VertxTestContext context) { + final var client = server.getClient(); + + final var data = new HashMap(); + + data.put(KubernetesCredentials.SECURITY_PROTOCOL, SecurityProtocol.SASL_SSL.name); + data.put(KubernetesCredentials.SASL_MECHANISM, "SCRAM-SHA-512"); + + data.put(KubernetesCredentials.USER_CERTIFICATE_KEY, "my-user-cert"); + data.put(KubernetesCredentials.USER_KEY_KEY, "my-user-key"); + + data.put(KubernetesCredentials.USERNAME_KEY, "my-username"); + data.put(KubernetesCredentials.PASSWORD_KEY, "my-user-password"); + + data.put(KubernetesCredentials.CA_CERTIFICATE_KEY, "my-ca-certificate"); + + final var secret = new SecretBuilder() + .withNewMetadata() + .withName("my-secret-name") + .withNamespace("my-secret-namespace") + .endMetadata() + .withData( + data.entrySet().stream() + .map(e -> new AbstractMap.SimpleImmutableEntry<>(e.getKey(), Base64.getEncoder().encodeToString(e.getValue().getBytes()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) + ) + .build(); + + client.secrets().inNamespace("my-secret-namespace").create(secret); + + final var provider = new KubernetesAuthProvider(server.getClient()); + + vertx.runOnContext(r -> { + final var credentialsFuture = provider.getCredentials("my-secret-namespace", "my-secret-name"); + + credentialsFuture + .onFailure(context::failNow) + .onSuccess(credentials -> context.verify(() -> { + + assertThat(credentials.SASLMechanism()).isEqualTo(data.get(KubernetesCredentials.SASL_MECHANISM)); + assertThat(credentials.securityProtocol()).isEqualTo(SecurityProtocol.forName(data.get(KubernetesCredentials.SECURITY_PROTOCOL))); + + assertThat(credentials.userCertificate()).isEqualTo(data.get(KubernetesCredentials.USER_CERTIFICATE_KEY)); + + assertThat(credentials.caCertificates()).isEqualTo(data.get(KubernetesCredentials.CA_CERTIFICATE_KEY)); + + context.completeNow(); + })); + }); + } + + @Test + public void shouldFailOnSecretNotFound(final Vertx vertx, final VertxTestContext context) { + + final var provider = new KubernetesAuthProvider(server.getClient()); + + vertx.runOnContext(r -> provider.getCredentials("my-secret-namespace", "my-secret-name-not-found") + .onSuccess(ignored -> context.failNow("Unexpected success: expected not found error")) + .onFailure(cause -> context.completeNow()) + ); + } + + @Test + public void shouldFailOnInvalidSecret(final Vertx vertx, final VertxTestContext context) { + + final var provider = new KubernetesAuthProvider(server.getClient()); + + final var data = new HashMap(); + + data.put(KubernetesCredentials.SECURITY_PROTOCOL, SecurityProtocol.SASL_SSL.name); + data.put(KubernetesCredentials.SASL_MECHANISM, "SCRAM-SHA-512"); + + final var secret = new SecretBuilder() + .withNewMetadata() + .withName("my-secret-name-invalid") + .withNamespace("my-secret-namespace") + .endMetadata() + .withData( + data.entrySet().stream() + .map(e -> new AbstractMap.SimpleImmutableEntry<>(e.getKey(), Base64.getEncoder().encodeToString(e.getValue().getBytes()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) + ) + .build(); + + server.getClient().secrets().inNamespace("my-secret-namespace").create(secret); + + vertx.runOnContext(r -> provider.getCredentials("my-secret-namespace", "my-secret-name-invalid") + .onSuccess(ignored -> context.failNow("Unexpected success: expected invalid secret error")) + .onFailure(cause -> context.completeNow()) + ); + } +} diff --git a/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/security/KubernetesCredentialsTest.java b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/security/KubernetesCredentialsTest.java new file mode 100644 index 0000000000..b0d729d8e5 --- /dev/null +++ b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/security/KubernetesCredentialsTest.java @@ -0,0 +1,147 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.security; + +import io.fabric8.kubernetes.api.model.Secret; +import io.fabric8.kubernetes.api.model.SecretBuilder; +import org.apache.kafka.common.security.auth.SecurityProtocol; +import org.junit.jupiter.api.Test; + +import java.util.AbstractMap; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.assertj.core.api.Assertions.assertThat; + +public class KubernetesCredentialsTest { + + @Test + public void getKubernetesCredentialsFromSecret() { + + final var data = Map.of( + KubernetesCredentials.CA_CERTIFICATE_KEY, "CA_CERT", + KubernetesCredentials.USER_CERTIFICATE_KEY, "USER_CERT", + KubernetesCredentials.USER_KEY_KEY, "USER_KEY", + KubernetesCredentials.SASL_MECHANISM, "SCRAM-SHA-256", + KubernetesCredentials.SECURITY_PROTOCOL, SecurityProtocol.SASL_SSL.name, + KubernetesCredentials.USERNAME_KEY, "USERNAME", + KubernetesCredentials.PASSWORD_KEY, "PASSWORD" + ); + + final var credentials = new KubernetesCredentials( + new SecretBuilder() + .withNewMetadata() + .withNamespace("ns1") + .withName("name1") + .endMetadata() + .withData( + base64(data) + ) + .build() + ); + + for (int i = 0; i < 2; i++) { + assertThat(credentials.securityProtocol()).isEqualTo(SecurityProtocol.forName(data.get(KubernetesCredentials.SECURITY_PROTOCOL))); + assertAll(data, credentials); + } + } + + @Test + public void getKubernetesCredentialsFromEmptySecret() { + kubernetesCredentialsFromInvalidSecret( + new SecretBuilder() + .withNewMetadata() + .withNamespace("ns1") + .withName("name1") + .endMetadata() + .withData(new HashMap<>()) + .build() + ); + } + + @Test + public void getKubernetesCredentialsFromNullSecret() { + kubernetesCredentialsFromInvalidSecret(null); + } + + private static void kubernetesCredentialsFromInvalidSecret(final Secret secret) { + final var credentials = new KubernetesCredentials(secret); + + for (int i = 0; i < 2; i++) { + assertThat(credentials.securityProtocol()).isNull(); + assertAll(new HashMap<>(), credentials); + } + } + + @Test + public void getKubernetesCredentialsFromNullSecretData() { + final var credentials = new KubernetesCredentials( + new SecretBuilder() + .withNewMetadata() + .withNamespace("ns1") + .withName("name1") + .endMetadata() + .build() + ); + + for (int i = 0; i < 2; i++) { + assertThat(credentials.securityProtocol()).isNull(); + assertAll(new HashMap<>(), credentials); + } + } + + private static void assertAll(final Map data, final KubernetesCredentials credentials) { + assertThat(credentials.SASLMechanism()).isEqualTo(data.get(KubernetesCredentials.SASL_MECHANISM)); + assertThat(credentials.caCertificates()).isEqualTo(data.get(KubernetesCredentials.CA_CERTIFICATE_KEY)); + assertThat(credentials.userCertificate()).isEqualTo(data.get(KubernetesCredentials.USER_CERTIFICATE_KEY)); + assertThat(credentials.userKey()).isEqualTo(data.get(KubernetesCredentials.USER_KEY_KEY)); + assertThat(credentials.SASLUsername()).isEqualTo(data.get(KubernetesCredentials.USERNAME_KEY)); + assertThat(credentials.SASLPassword()).isEqualTo(data.get(KubernetesCredentials.PASSWORD_KEY)); + } + + @Test + public void unknownSecurityProtocolReturnsNull() { + + final var data = Map.of( + KubernetesCredentials.SECURITY_PROTOCOL, "SASSO_PLAINTEXT" + ); + + final var credentials = new KubernetesCredentials( + new SecretBuilder() + .withNewMetadata() + .withNamespace("ns1") + .withName("name1") + .endMetadata() + .withData( + base64(data) + ) + .build() + ); + + for (int i = 0; i < 2; i++) { + assertThat(credentials.securityProtocol()).isNull(); + } + } + + private static Map base64(Map data) { + return data.entrySet().stream() + .map(e -> new AbstractMap.SimpleImmutableEntry<>(e.getKey(), Base64.getEncoder().encodeToString(e.getValue().getBytes()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } +} diff --git a/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/testing/CloudEventSerializerMock.java b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/testing/CloudEventSerializerMock.java new file mode 100644 index 0000000000..c6023c4b97 --- /dev/null +++ b/data-plane/core/src/test/java/dev/knative/eventing/kafka/broker/core/testing/CloudEventSerializerMock.java @@ -0,0 +1,31 @@ +/* + * Copyright © 2018 Knative Authors (knative-dev@googlegroups.com) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package dev.knative.eventing.kafka.broker.core.testing; + +import io.cloudevents.CloudEvent; +import io.cloudevents.kafka.CloudEventSerializer; + +// Workaround: +// Kafka 2.7 producer mock calls serialize(topic, ce) because +// "just to throw ClassCastException if serializers are not the proper ones to serialize key/value" +// https://github.com/apache/kafka/blob/3db46769baa379a0775bcd76396d24d637a55768/clients/src/main/java/org/apache/kafka/clients/producer/MockProducer.java#L306-L308 +public class CloudEventSerializerMock extends CloudEventSerializer { + @Override + public byte[] serialize(final String topic, final CloudEvent data) { + return null; + } +} diff --git a/data-plane/dispatcher/pom.xml b/data-plane/dispatcher/pom.xml index d204b8acce..40b059fb02 100644 --- a/data-plane/dispatcher/pom.xml +++ b/data-plane/dispatcher/pom.xml @@ -17,8 +17,8 @@ --> + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> data-plane dev.knative.eventing.kafka.broker diff --git a/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/ConsumerVerticle.java b/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/ConsumerVerticle.java index cbc85ee188..5337530d23 100644 --- a/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/ConsumerVerticle.java +++ b/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/ConsumerVerticle.java @@ -18,15 +18,17 @@ import dev.knative.eventing.kafka.broker.core.metrics.Metrics; import io.vertx.core.AbstractVerticle; import io.vertx.core.CompositeFuture; +import io.vertx.core.Future; import io.vertx.core.Promise; import io.vertx.core.Vertx; import io.vertx.kafka.client.consumer.KafkaConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.util.Objects; import java.util.Set; import java.util.function.BiFunction; import java.util.function.Function; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * ConsumerVerticle is responsible for manging the consumer lifecycle. @@ -43,8 +45,8 @@ public final class ConsumerVerticle extends AbstractVerticle { private ConsumerRecordHandler handler; private final Set topics; - private final Function> consumerFactory; - private final BiFunction, ConsumerRecordHandler> recordHandler; + private final Function>> consumerFactory; + private final BiFunction, Future>> recordHandlerFactory; /** * All args constructor. @@ -54,16 +56,16 @@ public final class ConsumerVerticle extends AbstractVerticle { * @param recordHandlerFactory record handler factory. */ public ConsumerVerticle( - final Function> consumerFactory, + final Function>> consumerFactory, final Set topics, - final BiFunction, ConsumerRecordHandler> recordHandlerFactory) { + final BiFunction, Future>> recordHandlerFactory) { Objects.requireNonNull(consumerFactory, "provide consumerFactory"); Objects.requireNonNull(topics, "provide topic"); Objects.requireNonNull(recordHandlerFactory, "provide recordHandlerFactory"); this.topics = topics; - this.recordHandler = recordHandlerFactory; + this.recordHandlerFactory = recordHandlerFactory; this.consumerFactory = consumerFactory; } @@ -72,13 +74,25 @@ public ConsumerVerticle( */ @Override public void start(Promise startPromise) { - this.consumer = consumerFactory.apply(vertx); - this.consumerMeterBinder = Metrics.register(this.consumer.unwrap()); - this.handler = recordHandler.apply(vertx, this.consumer); + consumerFactory.apply(vertx) + .onSuccess(consumer -> { + if (consumer == null) { + startPromise.fail("Consumer cannot be null"); + return; + } - consumer.handler(handler); - consumer.exceptionHandler(startPromise::tryFail); - consumer.subscribe(topics, startPromise); + this.consumer = consumer; + this.consumerMeterBinder = Metrics.register(this.consumer.unwrap()); + recordHandlerFactory.apply(vertx, this.consumer) + .onSuccess(h -> { + this.handler = h; + this.consumer.handler(this.handler); + this.consumer.exceptionHandler(startPromise::tryFail); + this.consumer.subscribe(this.topics, startPromise); + }) + .onFailure(startPromise::tryFail); + }) + .onFailure(startPromise::fail); } /** diff --git a/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/ConsumerVerticleFactory.java b/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/ConsumerVerticleFactory.java index e3e94c306b..2c2731751b 100644 --- a/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/ConsumerVerticleFactory.java +++ b/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/ConsumerVerticleFactory.java @@ -18,6 +18,8 @@ import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; import io.vertx.core.AbstractVerticle; +import java.util.concurrent.ExecutionException; + /** * ConsumerVerticleFactory is responsible for instantiating consumer verticles. */ @@ -30,5 +32,5 @@ public interface ConsumerVerticleFactory { * @param egress trigger data. * @return a new consumer verticle. */ - AbstractVerticle get(final DataPlaneContract.Resource resource, final DataPlaneContract.Egress egress); + AbstractVerticle get(final DataPlaneContract.Resource resource, final DataPlaneContract.Egress egress) throws ExecutionException, InterruptedException; } diff --git a/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/Main.java b/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/Main.java index db23528cd6..039c90688e 100644 --- a/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/Main.java +++ b/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/Main.java @@ -15,13 +15,12 @@ */ package dev.knative.eventing.kafka.broker.dispatcher; -import static net.logstash.logback.argument.StructuredArguments.keyValue; - import dev.knative.eventing.kafka.broker.core.eventbus.ContractMessageCodec; import dev.knative.eventing.kafka.broker.core.eventbus.ContractPublisher; import dev.knative.eventing.kafka.broker.core.file.FileWatcher; import dev.knative.eventing.kafka.broker.core.metrics.Metrics; import dev.knative.eventing.kafka.broker.core.reconciler.impl.ResourcesReconcilerMessageHandler; +import dev.knative.eventing.kafka.broker.core.security.AuthProvider; import dev.knative.eventing.kafka.broker.core.tracing.OpenTelemetryVertxTracingFactory; import dev.knative.eventing.kafka.broker.core.tracing.Tracing; import dev.knative.eventing.kafka.broker.core.tracing.TracingConfig; @@ -37,17 +36,20 @@ import io.vertx.core.tracing.TracingOptions; import io.vertx.core.tracing.TracingPolicy; import io.vertx.ext.web.client.WebClientOptions; -import java.io.File; -import java.io.IOException; -import java.nio.file.FileSystems; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; import net.logstash.logback.encoder.LogstashEncoder; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.producer.ProducerConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.File; +import java.io.IOException; +import java.nio.file.FileSystems; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static net.logstash.logback.argument.StructuredArguments.keyValue; + public class Main { // Micrometer employs a naming convention that separates lowercase words with a '.' (dot) character. @@ -117,7 +119,8 @@ public static void main(final String[] args) throws IOException { consumerRecordOffsetStrategyFactory, consumerConfig, clientOptions, - producerConfig + producerConfig, + AuthProvider.kubernetes() ); final var consumerDeployerVerticle = new ConsumerDeployerVerticle( diff --git a/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/http/HttpConsumerVerticleFactory.java b/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/http/HttpConsumerVerticleFactory.java index 3723946d6d..1ecb1a88e3 100644 --- a/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/http/HttpConsumerVerticleFactory.java +++ b/data-plane/dispatcher/src/main/java/dev/knative/eventing/kafka/broker/dispatcher/http/HttpConsumerVerticleFactory.java @@ -15,14 +15,14 @@ */ package dev.knative.eventing.kafka.broker.dispatcher.http; -import static org.apache.kafka.clients.consumer.ConsumerConfig.GROUP_ID_CONFIG; - import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; -import dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress; import dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig; -import dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource; import dev.knative.eventing.kafka.broker.core.filter.Filter; import dev.knative.eventing.kafka.broker.core.filter.impl.AttributesFilter; +import dev.knative.eventing.kafka.broker.core.security.AuthProvider; +import dev.knative.eventing.kafka.broker.core.security.Credentials; +import dev.knative.eventing.kafka.broker.core.security.KafkaClientsAuth; +import dev.knative.eventing.kafka.broker.core.security.PlaintextCredentials; import dev.knative.eventing.kafka.broker.dispatcher.ConsumerRecordHandler; import dev.knative.eventing.kafka.broker.dispatcher.ConsumerRecordOffsetStrategyFactory; import dev.knative.eventing.kafka.broker.dispatcher.ConsumerRecordSender; @@ -43,6 +43,9 @@ import io.vertx.kafka.client.common.KafkaClientOptions; import io.vertx.kafka.client.consumer.KafkaConsumer; import io.vertx.kafka.client.producer.KafkaProducer; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.producer.ProducerConfig; + import java.util.AbstractMap.SimpleImmutableEntry; import java.util.HashMap; import java.util.HashSet; @@ -53,8 +56,8 @@ import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.producer.ProducerConfig; + +import static org.apache.kafka.clients.consumer.ConsumerConfig.GROUP_ID_CONFIG; public class HttpConsumerVerticleFactory implements ConsumerVerticleFactory { @@ -65,6 +68,7 @@ public class HttpConsumerVerticleFactory implements ConsumerVerticleFactory { private final WebClientOptions webClientOptions; private final Map producerConfigs; private final ConsumerRecordOffsetStrategyFactory consumerRecordOffsetStrategyFactory; + private final AuthProvider authProvider; /** * All args constructor. @@ -73,12 +77,14 @@ public class HttpConsumerVerticleFactory implements ConsumerVerticleFactory { * @param consumerConfigs base consumer configurations. * @param webClientOptions web client options. * @param producerConfigs base producer configurations. + * @param authProvider auth provider. */ public HttpConsumerVerticleFactory( final ConsumerRecordOffsetStrategyFactory consumerRecordOffsetStrategyFactory, final Properties consumerConfigs, final WebClientOptions webClientOptions, - final Properties producerConfigs) { + final Properties producerConfigs, + final AuthProvider authProvider) { Objects.requireNonNull(consumerRecordOffsetStrategyFactory, "provide consumerRecordOffsetStrategyFactory"); Objects.requireNonNull(consumerConfigs, "provide consumerConfigs"); @@ -95,6 +101,7 @@ public HttpConsumerVerticleFactory( .map(e -> new SimpleImmutableEntry<>(e.getKey().toString(), e.getValue().toString())) .collect(Collectors.toMap(Entry::getKey, Entry::getValue)); this.webClientOptions = webClientOptions; + this.authProvider = authProvider; } /** @@ -106,11 +113,33 @@ public AbstractVerticle get(final DataPlaneContract.Resource resource, final Dat Objects.requireNonNull(resource, "provide resource"); Objects.requireNonNull(egress, "provide egress"); - final Function> consumerFactory = createConsumerFactory(resource, egress); + // Consumer and producer configs are shared objects and they act as a prototype for each instance. + final var consumerConfigs = new HashMap<>(this.consumerConfigs); + consumerConfigs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, resource.getBootstrapServers()); + consumerConfigs.put(GROUP_ID_CONFIG, egress.getConsumerGroup()); + + final var producerConfigs = new HashMap<>(this.producerConfigs); + producerConfigs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, resource.getBootstrapServers()); + producerConfigs.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, PartitionKeyExtensionInterceptor.class.getName()); + + final Future credentialsFuture = resource.hasAuthSecret() ? + authProvider.getCredentials(resource.getAuthSecret().getNamespace(), resource.getAuthSecret().getName()) + : Future.succeededFuture(new PlaintextCredentials()); + + final Function>> consumerFactory = createConsumerFactory( + consumerConfigs, + resource, + credentialsFuture + ); + + final Function>> producerFactory = createProducerFactory( + producerConfigs, + resource, + credentialsFuture + ); - final BiFunction, ConsumerRecordHandler>> recordHandlerFactory = (vertx, consumer) -> { + final BiFunction, Future>>> recordHandlerFactory = (vertx, consumer) -> { - final var producer = createProducer(vertx, resource, egress); final var circuitBreakerOptions = createCircuitBreakerOptions(resource); final var egressConfig = resource.getEgressConfig(); @@ -125,48 +154,46 @@ public AbstractVerticle get(final DataPlaneContract.Resource resource, final Dat ? NO_DLQ_SENDER : createConsumerRecordSender(vertx, egressConfig.getDeadLetter(), circuitBreakerOptions, egressConfig); - return new ConsumerRecordHandler<>( - egressSubscriberSender, - egress.hasFilter() ? new AttributesFilter(egress.getFilter().getAttributesMap()) : Filter.noop(), - this.consumerRecordOffsetStrategyFactory.get(consumer, resource, egress), - new HttpSinkResponseHandler(vertx, resource.getTopics(0), producer), - egressDeadLetterSender - ); + return producerFactory.apply(vertx) + .map(producer -> new ConsumerRecordHandler<>( + egressSubscriberSender, + egress.hasFilter() ? new AttributesFilter(egress.getFilter().getAttributesMap()) : Filter.noop(), + this.consumerRecordOffsetStrategyFactory.get(consumer, resource, egress), + new HttpSinkResponseHandler(vertx, resource.getTopics(0), producer), + egressDeadLetterSender + )); }; return new ConsumerVerticle<>(consumerFactory, new HashSet<>(resource.getTopicsList()), recordHandlerFactory); } - protected Function> createConsumerFactory( + protected Function>> createConsumerFactory( + final Map consumerConfigs, final DataPlaneContract.Resource resource, - final DataPlaneContract.Egress egress) { - - // this.consumerConfigs is a shared object and it acts as a prototype for each consumer instance. - final var consumerConfigs = new HashMap<>(this.consumerConfigs); - - consumerConfigs.put(GROUP_ID_CONFIG, egress.getConsumerGroup()); - consumerConfigs.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, resource.getBootstrapServers()); + final Future credentialsFuture) { + return vertx -> credentialsFuture + .compose(credentials -> KafkaClientsAuth.updateConsumerConfigs(credentials, consumerConfigs)) + // Note: Do not use consumerConfigs as parameter, use configs (return value) + .map(configs -> createConsumer(vertx, configs)); + } + private static KafkaConsumer createConsumer(final Vertx vertx, + final Map consumerConfigs) { final var opt = new KafkaClientOptions() .setConfig(consumerConfigs) .setTracingPolicy(TracingPolicy.PROPAGATE); - return vertx -> KafkaConsumer.create(vertx, opt); + return KafkaConsumer.create(vertx, opt); } - protected KafkaProducer createProducer( - final Vertx vertx, - final Resource resource, - final Egress egress) { - - // producerConfigs is a shared object and it acts as a prototype for each producer instance. - final var producerConfigs = new HashMap<>(this.producerConfigs); - - // TODO create a single producer per bootstrap servers. - producerConfigs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, resource.getBootstrapServers()); - producerConfigs.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, PartitionKeyExtensionInterceptor.class.getName()); - - return KafkaProducer.create(vertx, producerConfigs); + protected Function>> createProducerFactory( + final Map producerConfigs, + final DataPlaneContract.Resource resource, + final Future credentialsFuture) { + return vertx -> credentialsFuture + .compose(credentials -> KafkaClientsAuth.updateProducerConfigs(credentials, producerConfigs)) + // Note: Do not use producerConfigs as parameter, use configs (return value) + .map(configs -> KafkaProducer.create(vertx, configs)); } private ConsumerRecordSender> createConsumerRecordSender( diff --git a/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/ConsumerVerticleTest.java b/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/ConsumerVerticleTest.java index 9871c013e6..034bd09205 100644 --- a/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/ConsumerVerticleTest.java +++ b/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/ConsumerVerticleTest.java @@ -15,13 +15,6 @@ */ package dev.knative.eventing.kafka.broker.dispatcher; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - import dev.knative.eventing.kafka.broker.core.metrics.Metrics; import io.vertx.core.AsyncResult; import io.vertx.core.Future; @@ -33,10 +26,6 @@ import io.vertx.kafka.client.consumer.KafkaConsumer; import io.vertx.micrometer.MicrometerMetricsOptions; import io.vertx.micrometer.backends.BackendRegistries; -import java.util.Arrays; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.Collectors; import org.apache.kafka.clients.consumer.MockConsumer; import org.apache.kafka.clients.consumer.OffsetResetStrategy; import org.apache.kafka.common.TopicPartition; @@ -44,6 +33,18 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import java.util.Arrays; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + @ExtendWith(VertxExtension.class) public class ConsumerVerticleTest { @@ -58,9 +59,9 @@ public void subscribedToTopic(final Vertx vertx, final VertxTestContext context) final var topic = "topic1"; final var verticle = new ConsumerVerticle<>( - v -> KafkaConsumer.create(v, consumer), + v -> Future.succeededFuture(KafkaConsumer.create(v, consumer)), Set.of(topic), - (a, b) -> new ConsumerRecordHandler<>( + (a, b) -> Future.succeededFuture(new ConsumerRecordHandler<>( ConsumerRecordSender.create(Future.failedFuture("subscriber send called"), Future.succeededFuture()), value -> false, (ConsumerRecordOffsetStrategy) mock(ConsumerRecordOffsetStrategy.class), @@ -69,7 +70,7 @@ public void subscribedToTopic(final Vertx vertx, final VertxTestContext context) response -> Future.succeededFuture() ), ConsumerRecordSender.create(Future.failedFuture("DLQ send called"), Future.succeededFuture()) - ) + )) ); final Promise promise = Promise.promise(); @@ -91,9 +92,9 @@ public void stop(final Vertx vertx, final VertxTestContext context) { final var topic = "topic1"; final var verticle = new ConsumerVerticle<>( - v -> KafkaConsumer.create(v, consumer), + v -> Future.succeededFuture(KafkaConsumer.create(v, consumer)), Set.of(topic), - (a, b) -> new ConsumerRecordHandler<>( + (a, b) -> Future.succeededFuture(new ConsumerRecordHandler<>( ConsumerRecordSender.create(Future.failedFuture("subscriber send called"), Future.succeededFuture()), value -> false, (ConsumerRecordOffsetStrategy) mock(ConsumerRecordOffsetStrategy.class), @@ -102,7 +103,7 @@ public void stop(final Vertx vertx, final VertxTestContext context) { response -> Future.succeededFuture() ), ConsumerRecordSender.create(Future.failedFuture("DLQ send called"), Future.succeededFuture()) - ) + )) ); final Promise deployPromise = Promise.promise(); @@ -152,9 +153,9 @@ public void shouldCloseEverything(final Vertx vertx, final VertxTestContext cont final var sinkClosed = new AtomicBoolean(false); final var verticle = new ConsumerVerticle<>( - v -> consumer, + v -> Future.succeededFuture(consumer), Arrays.stream(topics).collect(Collectors.toSet()), - (v, c) -> new ConsumerRecordHandler<>( + (v, c) -> Future.succeededFuture(new ConsumerRecordHandler<>( new ConsumerRecordSenderMock<>( () -> { consumerRecordSenderClosed.set(true); @@ -178,7 +179,7 @@ record -> Future.succeededFuture() }, record -> Future.succeededFuture() ) - ) + )) ); vertx.deployVerticle(verticle) diff --git a/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/http/HttpConsumerVerticleFactoryTest.java b/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/http/HttpConsumerVerticleFactoryTest.java index c325f58370..188838ba37 100644 --- a/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/http/HttpConsumerVerticleFactoryTest.java +++ b/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/http/HttpConsumerVerticleFactoryTest.java @@ -15,21 +15,12 @@ */ package dev.knative.eventing.kafka.broker.dispatcher.http; -import static org.apache.kafka.clients.consumer.ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG; -import static org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG; -import static org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.INTERCEPTOR_CLASSES_CONFIG; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; -import static org.mockito.Mockito.mock; - import com.google.protobuf.Empty; import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; import dev.knative.eventing.kafka.broker.contract.DataPlaneContract.BackoffPolicy; import dev.knative.eventing.kafka.broker.contract.DataPlaneContract.EgressConfig; import dev.knative.eventing.kafka.broker.core.metrics.Metrics; +import dev.knative.eventing.kafka.broker.core.security.AuthProvider; import dev.knative.eventing.kafka.broker.dispatcher.ConsumerRecordOffsetStrategyFactory; import io.cloudevents.kafka.CloudEventDeserializer; import io.cloudevents.kafka.CloudEventSerializer; @@ -40,12 +31,23 @@ import io.vertx.junit5.VertxExtension; import io.vertx.micrometer.MicrometerMetricsOptions; import io.vertx.micrometer.backends.BackendRegistries; -import java.util.Properties; import org.apache.kafka.common.serialization.StringDeserializer; import org.apache.kafka.common.serialization.StringSerializer; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import java.util.Properties; + +import static org.apache.kafka.clients.consumer.ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG; +import static org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG; +import static org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG; +import static org.apache.kafka.clients.producer.ProducerConfig.INTERCEPTOR_CLASSES_CONFIG; +import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; +import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.mockito.Mockito.mock; + @ExtendWith(VertxExtension.class) public class HttpConsumerVerticleFactoryTest { @@ -71,8 +73,8 @@ public void shouldAlwaysSucceed(final Vertx vertx) { ConsumerRecordOffsetStrategyFactory.unordered(mock(Counter.class)), consumerProperties, new WebClientOptions(), - producerConfigs - ); + producerConfigs, + mock(AuthProvider.class)); final var egress = DataPlaneContract.Egress.newBuilder() .setConsumerGroup("1234") @@ -114,8 +116,8 @@ public void shouldNotThrowIllegalArgumentExceptionIfNotDLQ() { ConsumerRecordOffsetStrategyFactory.unordered(mock(Counter.class)), consumerProperties, new WebClientOptions(), - producerConfigs - ); + producerConfigs, + mock(AuthProvider.class)); final var egress = DataPlaneContract.Egress.newBuilder() .setConsumerGroup("1234") diff --git a/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/http/HttpSinkResponseHandlerTest.java b/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/http/HttpSinkResponseHandlerTest.java index 01257230c7..ee0df25cc9 100644 --- a/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/http/HttpSinkResponseHandlerTest.java +++ b/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/http/HttpSinkResponseHandlerTest.java @@ -15,16 +15,11 @@ */ package dev.knative.eventing.kafka.broker.dispatcher.http; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - import dev.knative.eventing.kafka.broker.core.metrics.Metrics; +import dev.knative.eventing.kafka.broker.core.testing.CloudEventSerializerMock; import io.cloudevents.CloudEvent; import io.cloudevents.core.provider.EventFormatProvider; import io.cloudevents.core.v1.CloudEventBuilder; -import io.cloudevents.kafka.CloudEventSerializer; import io.vertx.core.Future; import io.vertx.core.MultiMap; import io.vertx.core.Vertx; @@ -36,8 +31,6 @@ import io.vertx.kafka.client.producer.KafkaProducer; import io.vertx.micrometer.MicrometerMetricsOptions; import io.vertx.micrometer.backends.BackendRegistries; -import java.net.URI; -import java.util.concurrent.CountDownLatch; import org.apache.kafka.clients.producer.MockProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.StringSerializer; @@ -47,6 +40,14 @@ import org.junit.jupiter.api.parallel.Execution; import org.junit.jupiter.api.parallel.ExecutionMode; +import java.net.URI; +import java.util.concurrent.CountDownLatch; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + @Execution(ExecutionMode.CONCURRENT) @ExtendWith(VertxExtension.class) public class HttpSinkResponseHandlerTest { @@ -62,7 +63,7 @@ public void shouldSucceedOnUnknownEncodingAndEmptyResponse(final Vertx vertx, fi final var producer = new MockProducer<>( true, new StringSerializer(), - new CloudEventSerializer() + new CloudEventSerializerMock() ); final var handler = new HttpSinkResponseHandler( vertx, @@ -86,7 +87,7 @@ public void shouldSucceedOnUnknownEncodingAndNullResponseBody(final Vertx vertx, final var producer = new MockProducer<>( true, new StringSerializer(), - new CloudEventSerializer() + new CloudEventSerializerMock() ); final var handler = new HttpSinkResponseHandler( vertx, @@ -112,7 +113,7 @@ public void shouldFailOnUnknownEncodingAndNonEmptyResponse(final Vertx vertx, fi final var producer = new MockProducer<>( true, new StringSerializer(), - new CloudEventSerializer() + new CloudEventSerializerMock() ); final var handler = new HttpSinkResponseHandler( vertx, @@ -138,7 +139,7 @@ public void shouldSendRecord(final Vertx vertx, final VertxTestContext context) final var producer = new MockProducer<>( true, new StringSerializer(), - new CloudEventSerializer() + new CloudEventSerializerMock() ); final var handler = new HttpSinkResponseHandler( vertx, diff --git a/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/integration/ConsumerVerticleFactoryMock.java b/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/integration/ConsumerVerticleFactoryMock.java index cd1e338986..0d08c68428 100644 --- a/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/integration/ConsumerVerticleFactoryMock.java +++ b/data-plane/dispatcher/src/test/java/dev/knative/eventing/kafka/broker/dispatcher/integration/ConsumerVerticleFactoryMock.java @@ -16,22 +16,17 @@ package dev.knative.eventing.kafka.broker.dispatcher.integration; import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; -import dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Egress; -import dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource; +import dev.knative.eventing.kafka.broker.core.security.AuthProvider; +import dev.knative.eventing.kafka.broker.core.security.Credentials; import dev.knative.eventing.kafka.broker.dispatcher.ConsumerRecordOffsetStrategyFactory; import dev.knative.eventing.kafka.broker.dispatcher.http.HttpConsumerVerticleFactory; import io.cloudevents.CloudEvent; import io.cloudevents.kafka.CloudEventSerializer; +import io.vertx.core.Future; import io.vertx.core.Vertx; import io.vertx.ext.web.client.WebClientOptions; import io.vertx.kafka.client.consumer.KafkaConsumer; import io.vertx.kafka.client.producer.KafkaProducer; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.ConcurrentHashMap; -import java.util.function.Function; -import java.util.stream.Collectors; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.MockConsumer; import org.apache.kafka.clients.consumer.OffsetResetStrategy; @@ -39,6 +34,15 @@ import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.serialization.StringSerializer; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static org.mockito.Mockito.mock; + public class ConsumerVerticleFactoryMock extends HttpConsumerVerticleFactory { // trigger.id() -> Mock*er @@ -52,38 +56,37 @@ public ConsumerVerticleFactoryMock( final Properties producerConfigs, final ConsumerRecordOffsetStrategyFactory consumerRecordOffsetStrategyFactory) { - super(consumerRecordOffsetStrategyFactory, consumerConfigs, new WebClientOptions(), producerConfigs); + super(consumerRecordOffsetStrategyFactory, consumerConfigs, new WebClientOptions(), producerConfigs, mock(AuthProvider.class)); mockProducer = new ConcurrentHashMap<>(); mockConsumer = new ConcurrentHashMap<>(); } @Override - protected KafkaProducer createProducer( - final Vertx vertx, - final Resource resource, - final Egress egress) { - - final var producer = new MockProducer<>( - true, - new StringSerializer(), - new CloudEventSerializer() - ); + protected Function>> createProducerFactory( + final Map producerConfigs, + final DataPlaneContract.Resource resource, + final Future credentialsFuture) { - mockProducer.put(egress.getConsumerGroup(), producer); + return vertx -> { + final var producer = new MockProducer<>( + true, + new StringSerializer(), + new CloudEventSerializer() + ); - return KafkaProducer.create(vertx, producer); + return Future.succeededFuture(KafkaProducer.create(vertx, producer)); + }; } @Override - protected Function> createConsumerFactory( + protected Function>> createConsumerFactory( + final Map consumerConfigs, final DataPlaneContract.Resource resource, - final DataPlaneContract.Egress egress) { + final Future credentialsFuture) { return vertx -> { final var consumer = new MockConsumer(OffsetResetStrategy.LATEST); - mockConsumer.put(egress.getConsumerGroup(), consumer); - consumer.schedulePollTask(() -> { consumer.unsubscribe(); @@ -105,7 +108,7 @@ protected Function> createConsumerFacto } }); - return KafkaConsumer.create(vertx, consumer); + return Future.succeededFuture(KafkaConsumer.create(vertx, consumer)); }; } diff --git a/data-plane/docker/generate_jdk.sh b/data-plane/docker/generate_jdk.sh index ef7a097130..81b43842c7 100755 --- a/data-plane/docker/generate_jdk.sh +++ b/data-plane/docker/generate_jdk.sh @@ -28,7 +28,7 @@ echo "Computed mods = '$MODS'" # Remove compiler, sql, management modules MODS=$(echo $MODS | tr , '\n' | sed '/^java.compiler/d' | sed -z 's/\n/,/g;s/,$/\n/') # Patch adding the dns -MODS="$MODS,jdk.naming.dns" +MODS="$MODS,jdk.naming.dns,jdk.crypto.ec" echo "Patched modules shipped with the generated jdk = '$MODS'" jlink --verbose --no-header-files --no-man-pages --compress=2 --strip-debug --add-modules "$MODS" --output /app/jdk diff --git a/data-plane/pom.xml b/data-plane/pom.xml index d2b6869179..3acc7b75bc 100644 --- a/data-plane/pom.xml +++ b/data-plane/pom.xml @@ -17,8 +17,8 @@ --> + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 dev.knative.eventing.kafka.broker @@ -55,6 +55,9 @@ 4.0.3 5.7.0 3.7.7 + 5.0.0-alpha-3 + 2.7.0 + 1.4.0.Final @@ -87,6 +90,18 @@ ${vertx.version} pom import + + + org.apache.kafka + kafka-clients + + + + + + org.apache.kafka + kafka-clients + ${kafka.version} @@ -106,6 +121,31 @@ import + + + io.fabric8 + kubernetes-client + ${fabric8.kubernetes.version} + + + okhttp + com.squareup.okhttp3 + + + + + io.fabric8 + kubernetes-server-mock + ${fabric8.kubernetes.version} + test + + + okhttp + com.squareup.okhttp3 + + + + com.fasterxml.jackson @@ -193,6 +233,7 @@ io.vertx vertx-junit5 ${vertx.version} + test org.junit.jupiter @@ -208,11 +249,19 @@ org.junit.jupiter junit-jupiter ${junit.jupiter.version} + test + + + org.junit.jupiter + junit-jupiter-migrationsupport + ${junit.jupiter.version} + test org.mockito mockito-junit-jupiter ${mokito.junit.jupiter.version} + test org.junit.jupiter @@ -232,6 +281,30 @@ ${awaitility.version} test + + + + io.debezium + debezium-core + ${debezium.version} + test + + + io.debezium + debezium-core + ${debezium.version} + test-jar + test + + + org.apache.kafka + kafka_2.12 + ${kafka.version} + test + diff --git a/data-plane/receiver/pom.xml b/data-plane/receiver/pom.xml index 4b368eb39c..87c5e08b27 100644 --- a/data-plane/receiver/pom.xml +++ b/data-plane/receiver/pom.xml @@ -17,8 +17,8 @@ --> + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> data-plane dev.knative.eventing.kafka.broker diff --git a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/Main.java b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/Main.java index b3717a8de1..695fd71f66 100644 --- a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/Main.java +++ b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/Main.java @@ -15,13 +15,12 @@ */ package dev.knative.eventing.kafka.broker.receiver; -import static net.logstash.logback.argument.StructuredArguments.keyValue; - import dev.knative.eventing.kafka.broker.core.eventbus.ContractMessageCodec; import dev.knative.eventing.kafka.broker.core.eventbus.ContractPublisher; import dev.knative.eventing.kafka.broker.core.file.FileWatcher; import dev.knative.eventing.kafka.broker.core.metrics.Metrics; import dev.knative.eventing.kafka.broker.core.reconciler.impl.ResourcesReconcilerMessageHandler; +import dev.knative.eventing.kafka.broker.core.security.AuthProvider; import dev.knative.eventing.kafka.broker.core.tracing.OpenTelemetryVertxTracingFactory; import dev.knative.eventing.kafka.broker.core.tracing.Tracing; import dev.knative.eventing.kafka.broker.core.tracing.TracingConfig; @@ -29,7 +28,6 @@ import dev.knative.eventing.kafka.broker.core.utils.Shutdown; import io.cloudevents.CloudEvent; import io.cloudevents.kafka.CloudEventSerializer; -import io.micrometer.core.instrument.binder.kafka.KafkaClientMetrics; import io.opentelemetry.api.OpenTelemetry; import io.vertx.core.Vertx; import io.vertx.core.VertxOptions; @@ -37,17 +35,20 @@ import io.vertx.core.tracing.TracingOptions; import io.vertx.core.tracing.TracingPolicy; import io.vertx.kafka.client.producer.KafkaProducer; +import net.logstash.logback.encoder.LogstashEncoder; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.StringSerializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.File; import java.io.IOException; import java.nio.file.FileSystems; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.function.Function; -import net.logstash.logback.encoder.LogstashEncoder; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.common.serialization.StringSerializer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; + +import static net.logstash.logback.argument.StructuredArguments.keyValue; public class Main { @@ -106,6 +107,7 @@ public static void main(final String[] args) throws IOException { final Function> handlerFactory = v -> new RequestMapper<>( v, + AuthProvider.kubernetes(), producerConfigs, new CloudEventRequestToRecordMapper(vertx), properties -> KafkaProducer.create(v, properties), diff --git a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/RequestMapper.java b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/RequestMapper.java index 0346d85e96..b1a97ac1ea 100644 --- a/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/RequestMapper.java +++ b/data-plane/receiver/src/main/java/dev/knative/eventing/kafka/broker/receiver/RequestMapper.java @@ -15,15 +15,11 @@ */ package dev.knative.eventing.kafka.broker.receiver; -import static io.netty.handler.codec.http.HttpResponseStatus.ACCEPTED; -import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; -import static io.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND; -import static io.netty.handler.codec.http.HttpResponseStatus.SERVICE_UNAVAILABLE; -import static net.logstash.logback.argument.StructuredArguments.keyValue; - import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; import dev.knative.eventing.kafka.broker.core.metrics.Metrics; import dev.knative.eventing.kafka.broker.core.reconciler.IngressReconcilerListener; +import dev.knative.eventing.kafka.broker.core.security.AuthProvider; +import dev.knative.eventing.kafka.broker.core.security.KafkaClientsAuth; import io.cloudevents.core.message.Encoding; import io.cloudevents.jackson.JsonFormat; import io.cloudevents.kafka.CloudEventSerializer; @@ -34,14 +30,21 @@ import io.vertx.core.Vertx; import io.vertx.core.http.HttpServerRequest; import io.vertx.kafka.client.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Properties; import java.util.function.Function; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; + +import static io.netty.handler.codec.http.HttpResponseStatus.ACCEPTED; +import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; +import static io.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND; +import static io.netty.handler.codec.http.HttpResponseStatus.SERVICE_UNAVAILABLE; +import static net.logstash.logback.argument.StructuredArguments.keyValue; /** * RequestHandler is responsible for mapping HTTP requests to Kafka records, sending records to Kafka through the Kafka @@ -72,18 +75,11 @@ public class RequestMapper implements Handler, IngressR private final Counter badRequestCounter; private final Counter produceEventsCounter; private final Vertx vertx; + private final AuthProvider authProvider; - /** - * Create a new Request handler. - * - * @param producerConfigs common producers configurations - * @param requestToRecordMapper request to record mapper - * @param producerCreator creates a producer - * @param badRequestCounter count bad request responses - * @param produceEventsCounter count events sent to Kafka - */ public RequestMapper( final Vertx vertx, + final AuthProvider authProvider, final Properties producerConfigs, final RequestToRecordMapper requestToRecordMapper, final Function> producerCreator, @@ -96,6 +92,7 @@ public RequestMapper( Objects.requireNonNull(producerCreator, "provide producerCreator"); this.vertx = vertx; + this.authProvider = authProvider; this.producerConfigs = producerConfigs; this.requestToRecordMapper = requestToRecordMapper; this.producerCreator = producerCreator; @@ -162,8 +159,20 @@ public void handle(final HttpServerRequest request) { public Future onNewIngress( DataPlaneContract.Resource resource, DataPlaneContract.Ingress ingress) { - // Compute the properties + final var producerProps = (Properties) this.producerConfigs.clone(); + if (resource.hasAuthSecret()) { + return authProvider.getCredentials(resource.getAuthSecret().getNamespace(), resource.getAuthSecret().getName()) + .compose(credentials -> KafkaClientsAuth.updateConfigsFromProps(credentials, producerProps)) + .compose(configs -> onNewIngress(resource, ingress, configs)); + } + return onNewIngress(resource, ingress, producerProps); + } + + private Future onNewIngress(final DataPlaneContract.Resource resource, + final DataPlaneContract.Ingress ingress, + final Properties producerProps) { + // Compute the properties. producerProps.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, resource.getBootstrapServers()); if (ingress.getContentMode() != DataPlaneContract.ContentMode.UNRECOGNIZED) { producerProps.setProperty(CloudEventSerializer.ENCODING_CONFIG, encoding(ingress.getContentMode())); @@ -293,7 +302,7 @@ private static class IngressInfo { private final Properties producerProperties; IngressInfo(final KafkaProducer producer, final String topic, final String path, - final Properties producerProperties) { + final Properties producerProperties) { this.producer = producer; this.topic = topic; this.path = path; diff --git a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/ReceiverVerticleTest.java b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/ReceiverVerticleTest.java index 4dd30b3f26..64eaf9626d 100644 --- a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/ReceiverVerticleTest.java +++ b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/ReceiverVerticleTest.java @@ -15,9 +15,6 @@ */ package dev.knative.eventing.kafka.broker.receiver; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; - import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.FloatNode; import com.fasterxml.jackson.databind.node.TextNode; @@ -26,6 +23,7 @@ import dev.knative.eventing.kafka.broker.core.eventbus.ContractPublisher; import dev.knative.eventing.kafka.broker.core.metrics.Metrics; import dev.knative.eventing.kafka.broker.core.reconciler.impl.ResourcesReconcilerMessageHandler; +import dev.knative.eventing.kafka.broker.core.testing.CloudEventSerializerMock; import io.cloudevents.CloudEvent; import io.cloudevents.core.v1.CloudEventBuilder; import io.cloudevents.http.vertx.VertxMessageFactory; @@ -45,13 +43,6 @@ import io.vertx.kafka.client.producer.KafkaProducer; import io.vertx.micrometer.MicrometerMetricsOptions; import io.vertx.micrometer.backends.BackendRegistries; -import java.net.URI; -import java.util.Arrays; -import java.util.List; -import java.util.Properties; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; import org.apache.kafka.clients.producer.MockProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.serialization.StringSerializer; @@ -61,6 +52,17 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; +import java.net.URI; +import java.util.Arrays; +import java.util.List; +import java.util.Properties; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; + @ExtendWith(VertxExtension.class) public class ReceiverVerticleTest { @@ -86,7 +88,7 @@ public void setUp(final Vertx vertx, final VertxTestContext testContext) { ReceiverVerticleTest.mockProducer = new MockProducer<>( true, new StringSerializer(), - new CloudEventSerializer() + new CloudEventSerializerMock() ); KafkaProducer producer = KafkaProducer.create(vertx, mockProducer); @@ -95,6 +97,7 @@ public void setUp(final Vertx vertx, final VertxTestContext testContext) { handler = new RequestMapper<>( vertx, + null, new Properties(), new CloudEventRequestToRecordMapper(vertx), properties -> producer, diff --git a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/RequestMapperTest.java b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/RequestMapperTest.java index a12c2fd2b4..c51bd6540c 100644 --- a/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/RequestMapperTest.java +++ b/data-plane/receiver/src/test/java/dev/knative/eventing/kafka/broker/receiver/RequestMapperTest.java @@ -15,16 +15,6 @@ */ package dev.knative.eventing.kafka.broker.receiver; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.entry; -import static org.assertj.core.api.InstanceOfAssertFactories.map; -import static org.junit.jupiter.api.Assertions.fail; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; import dev.knative.eventing.kafka.broker.contract.DataPlaneContract.Resource; import dev.knative.eventing.kafka.broker.core.metrics.Metrics; @@ -43,6 +33,10 @@ import io.vertx.kafka.client.producer.impl.KafkaProducerRecordImpl; import io.vertx.micrometer.MicrometerMetricsOptions; import io.vertx.micrometer.backends.BackendRegistries; +import org.apache.kafka.clients.producer.MockProducer; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + import java.util.Collections; import java.util.List; import java.util.Map; @@ -51,9 +45,16 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; -import org.apache.kafka.clients.producer.MockProducer; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.entry; +import static org.assertj.core.api.InstanceOfAssertFactories.map; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; @ExtendWith(VertxExtension.class) public class RequestMapperTest { @@ -106,6 +107,7 @@ final var record = new KafkaProducerRecordImpl<>( final var handler = new RequestMapper<>( mock(Vertx.class), + null, new Properties(), mapper, properties -> producer, @@ -149,6 +151,7 @@ public void shouldReturnBadRequestIfNoRecordCanBeCreated(final Vertx vertx) thro final var handler = new RequestMapper( vertx, + null, new Properties(), mapper, properties -> producer, @@ -417,6 +420,7 @@ private void testRequestMapper( final var handler = new RequestMapper( vertx, + null, new Properties(), (request, topic) -> Future.succeededFuture(), properties -> { diff --git a/data-plane/tests/pom.xml b/data-plane/tests/pom.xml index 47bafd7fa0..9d1db58c52 100644 --- a/data-plane/tests/pom.xml +++ b/data-plane/tests/pom.xml @@ -28,11 +28,6 @@ tests - - 1.4.0.Final - 2.6.0 - - @@ -82,27 +77,20 @@ mockito-junit-jupiter test - io.debezium debezium-core - ${debezium.version} test io.debezium debezium-core - ${debezium.version} test-jar test org.apache.kafka kafka_2.12 - ${kafka.version} test diff --git a/data-plane/tests/src/test/java/dev/knative/eventing/kafka/broker/tests/DataPlaneTest.java b/data-plane/tests/src/test/java/dev/knative/eventing/kafka/broker/tests/DataPlaneTest.java index e898acea0a..126ba219da 100644 --- a/data-plane/tests/src/test/java/dev/knative/eventing/kafka/broker/tests/DataPlaneTest.java +++ b/data-plane/tests/src/test/java/dev/knative/eventing/kafka/broker/tests/DataPlaneTest.java @@ -15,20 +15,12 @@ */ package dev.knative.eventing.kafka.broker.tests; -import static java.lang.String.format; -import static org.apache.kafka.clients.consumer.ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG; -import static org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG; -import static org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; -import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; - import dev.knative.eventing.kafka.broker.contract.DataPlaneContract; import dev.knative.eventing.kafka.broker.core.eventbus.ContractMessageCodec; import dev.knative.eventing.kafka.broker.core.eventbus.ContractPublisher; import dev.knative.eventing.kafka.broker.core.metrics.Metrics; import dev.knative.eventing.kafka.broker.core.reconciler.impl.ResourcesReconcilerMessageHandler; +import dev.knative.eventing.kafka.broker.core.security.AuthProvider; import dev.knative.eventing.kafka.broker.dispatcher.ConsumerDeployerVerticle; import dev.knative.eventing.kafka.broker.dispatcher.ConsumerRecordOffsetStrategyFactory; import dev.knative.eventing.kafka.broker.dispatcher.http.HttpConsumerVerticleFactory; @@ -54,23 +46,31 @@ import io.vertx.kafka.client.producer.KafkaProducer; import io.vertx.micrometer.MicrometerMetricsOptions; import io.vertx.micrometer.backends.BackendRegistries; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + import java.io.File; import java.io.IOException; import java.net.URI; import java.util.Properties; import java.util.UUID; -import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.function.Function; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.apache.kafka.common.serialization.StringSerializer; -import org.awaitility.core.ConditionTimeoutException; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; + +import static java.lang.String.format; +import static org.apache.kafka.clients.consumer.ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG; +import static org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG; +import static org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG; +import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; +import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; +import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; +import static org.mockito.Mockito.mock; @ExtendWith(VertxExtension.class) public class DataPlaneTest { @@ -203,7 +203,7 @@ public void execute(final Vertx vertx, final VertxTestContext context) throws Ex new ContractPublisher(vertx.eventBus(), ResourcesReconcilerMessageHandler.ADDRESS) .accept(DataPlaneContract.Contract.newBuilder().addResources(resource).build()); - await().atMost(10, TimeUnit.SECONDS).untilAsserted(() -> assertThat(vertx.deploymentIDs()).hasSize(resource.getEgressesCount()+ NUM_RESOURCES + NUM_SYSTEM_VERTICLES)); + await().atMost(10, TimeUnit.SECONDS).untilAsserted(() -> assertThat(vertx.deploymentIDs()).hasSize(resource.getEgressesCount() + NUM_RESOURCES + NUM_SYSTEM_VERTICLES)); // start service vertx.createHttpServer() @@ -302,8 +302,8 @@ private static ConsumerDeployerVerticle setUpDispatcher(final Vertx vertx, final consumerRecordOffsetStrategyFactory, consumerConfigs, new WebClientOptions(), - producerConfigs - ); + producerConfigs, + mock(AuthProvider.class)); final var verticle = new ConsumerDeployerVerticle( consumerVerticleFactory, @@ -323,6 +323,7 @@ private static ReceiverVerticle setUpReceiver( final Function> handlerFactory = v -> new RequestMapper<>( vertx, + null, producerConfigs(), new CloudEventRequestToRecordMapper(v), properties -> KafkaProducer.create(v, properties), diff --git a/go.mod b/go.mod index 5d0ad4248f..1a40dffbaf 100644 --- a/go.mod +++ b/go.mod @@ -13,7 +13,9 @@ require ( github.com/pierdipi/sacura v0.0.0-20201003135834-e90e3a725ff9 github.com/rickb777/date v1.14.1 github.com/stretchr/testify v1.6.1 + github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c go.uber.org/zap v1.16.0 + golang.org/x/crypto v0.0.0-20201217014255-9d1352758620 // indirect google.golang.org/protobuf v1.25.0 k8s.io/api v0.18.12 k8s.io/apiextensions-apiserver v0.18.12 diff --git a/go.sum b/go.sum index bfb823f2dd..4762aadec2 100644 --- a/go.sum +++ b/go.sum @@ -701,7 +701,9 @@ github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6Kllzaw github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw= github.com/wavesoftware/go-ensure v1.0.0/go.mod h1:K2UAFSwMTvpiRGay/M3aEYYuurcR8S4A6HkQlJPV8k4= +github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c h1:u40Z8hqBAAQyv+vATcGgV0YCnDjqSL7/q/JyPhhJSPk= github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= +github.com/xdg/stringprep v1.0.0 h1:d9X0esnoa3dFsV0FG35rAT0RIhYFlPq7MiP+DW89La0= github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= @@ -764,6 +766,8 @@ golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0 h1:hb9wdF1z5waM+dSIICn1l0DkLVDT3hqhhQsDNUmHPRE= golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201217014255-9d1352758620 h1:3wPMTskHO3+O6jqTEXyFcsnuxMQOqYSaHsDxcbUXpqA= +golang.org/x/crypto v0.0.0-20201217014255-9d1352758620/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -904,6 +908,7 @@ golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191113165036-4c7a9d0fe056/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -933,6 +938,8 @@ golang.org/x/sys v0.0.0-20201015000850-e3ed0017c211/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3 h1:kzM6+9dur93BcC2kVlYl34cHU+TYZLanmpSJHVMmL64= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= diff --git a/proto/def/contract.proto b/proto/def/contract.proto index c87550c28a..f1ad7a719d 100644 --- a/proto/def/contract.proto +++ b/proto/def/contract.proto @@ -86,6 +86,22 @@ message Ingress { } } +// Kubernetes resource reference. +message Reference { + + // Object id. + string uuid = 1; + + // Object namespace. + string namespace = 2; + + // Object name. + string name = 3; + + // Object version. + string version = 4; +} + message Resource { // Id of the resource // It's the same as the Kubernetes resource uid @@ -108,6 +124,45 @@ message Resource { // Optional egresses for this topic repeated Egress egresses = 6; + + oneof Auth { + // No auth configured. + google.protobuf.Empty absentAuth = 7; + + // Secret reference. + // + // Secret format: + // + // protocol: (PLAINTEXT | SASL_PLAINTEXT | SSL | SASL_SSL) + // sasl.mechanism: (SCRAM-SHA-256 | SCRAM-SHA-512) + // ca.crt: + // user.crt: + // user.key: + // user: + // password: + // + // Validation: + // - protocol=PLAINTEXT + // - protocol=SSL + // - required: + // - ca.crt + // - user.crt + // - user.key + // - protocol=SASL_PLAINTEXT + // - required: + // - sasl.mechanism + // - user + // - password + // - protocol=SASL_SSL + // - required: + // - sasl.mechanism + // - ca.crt + // - user.crt + // - user.key + // - user + // - password + Reference authSecret = 8; + } } message Contract { diff --git a/test/e2e/broker_sasl_ssl_test.go b/test/e2e/broker_sasl_ssl_test.go new file mode 100644 index 0000000000..688f99fb41 --- /dev/null +++ b/test/e2e/broker_sasl_ssl_test.go @@ -0,0 +1,219 @@ +// +build e2e + +/* + * Copyright 2020 The Knative Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package e2e + +import ( + "context" + "testing" + "time" + + cloudevents "github.com/cloudevents/sdk-go/v2" + . "github.com/cloudevents/sdk-go/v2/test" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apiserver/pkg/storage/names" + "k8s.io/client-go/util/retry" + testlib "knative.dev/eventing/test/lib" + "knative.dev/eventing/test/lib/recordevents" + "knative.dev/eventing/test/lib/resources" + "knative.dev/eventing/test/lib/sender" + duckv1 "knative.dev/pkg/apis/duck/v1" + + "knative.dev/eventing-kafka-broker/control-plane/pkg/reconciler/kafka" + . "knative.dev/eventing-kafka-broker/test/pkg/testing" +) + +func brokerAuth(t *testing.T, secretProvider SecretProvider, configProvider ConfigProvider) { + + RunMultiple(t, func(t *testing.T) { + + ctx := context.Background() + + const ( + brokerName = "broker" + triggerName = "trigger" + subscriber = "subscriber" + configMap = "config-broker" + secretName = "broker-auth" + + eventType = "type1" + eventSource = "source1" + eventBody = `{"msg":"e2e-auth-body"}` + senderName = "sender" + ) + + client := testlib.Setup(t, true) + defer testlib.TearDown(client) + + client.CreateConfigMapOrFail( + configMap, + client.Namespace, + configProvider(secretName, client), + ) + + client.CreateBrokerV1OrFail( + brokerName, + resources.WithBrokerClassForBrokerV1(kafka.BrokerClass), + resources.WithConfigForBrokerV1(&duckv1.KReference{ + APIVersion: "v1", + Kind: "ConfigMap", + Namespace: client.Namespace, + Name: configMap, + }), + ) + + // secret doesn't exist, so broker won't become ready. + time.Sleep(time.Second * 30) + br, err := client.Eventing.EventingV1().Brokers(client.Namespace).Get(ctx, brokerName, metav1.GetOptions{}) + assert.Nil(t, err) + assert.False(t, br.Status.IsReady(), "secret %s/%s doesn't exist, so broker must no be ready", client.Namespace, secretName) + + secretData := secretProvider(t, client) + + secret := &corev1.Secret{ + ObjectMeta: metav1.ObjectMeta{ + Namespace: client.Namespace, + Name: secretName, + }, + Data: secretData, + } + + _, err = client.Kube.CoreV1().Secrets(client.Namespace).Create(ctx, secret, metav1.CreateOptions{}) + assert.Nil(t, err) + + // Trigger a reconciliation by updating the referenced ConfigMap in broker.spec.config. + err = retry.RetryOnConflict(retry.DefaultBackoff, func() error { + config, err := client.Kube.CoreV1().ConfigMaps(client.Namespace).Get(ctx, configMap, metav1.GetOptions{}) + if err != nil { + return nil + } + + if config.Labels == nil { + config.Labels = make(map[string]string, 1) + } + config.Labels["test.eventing.knative.dev/updated"] = names.SimpleNameGenerator.GenerateName("now") + + _, err = client.Kube.CoreV1().ConfigMaps(client.Namespace).Update(ctx, config, metav1.UpdateOptions{}) + return err + }) + assert.Nil(t, err) + + client.WaitForResourceReadyOrFail(brokerName, testlib.BrokerTypeMeta) + + eventTracker, _ := recordevents.StartEventRecordOrFail(ctx, client, subscriber, recordevents.AddTracing()) + + client.CreateTriggerV1OrFail( + triggerName, + resources.WithBrokerV1(brokerName), + resources.WithSubscriberServiceRefForTriggerV1(subscriber), + ) + + client.WaitForAllTestResourcesReadyOrFail(ctx) + + id := uuid.New().String() + eventToSend := cloudevents.NewEvent() + eventToSend.SetID(id) + eventToSend.SetType(eventType) + eventToSend.SetSource(eventSource) + err = eventToSend.SetData(cloudevents.ApplicationJSON, []byte(eventBody)) + assert.Nil(t, err) + + client.SendEventToAddressable( + ctx, + senderName, + brokerName, + testlib.BrokerTypeMeta, + eventToSend, + sender.EnableTracing(), + ) + + eventTracker.AssertAtLeast(1, recordevents.MatchEvent( + HasId(id), + HasSource(eventSource), + HasType(eventType), + HasData([]byte(eventBody)), + )) + }) +} + +func TestBrokerAuthPlaintext(t *testing.T) { + + brokerAuth( + t, + Plaintext, + func(secretName string, client *testlib.Client) map[string]string { + return map[string]string{ + "default.topic.replication.factor": "2", + "default.topic.partitions": "2", + "bootstrap.servers": BootstrapServersPlaintext, + "auth.secret.ref.name": secretName, + } + }, + ) +} + +func TestBrokerAuthSsl(t *testing.T) { + + brokerAuth( + t, + Ssl, + func(secretName string, client *testlib.Client) map[string]string { + return map[string]string{ + "default.topic.replication.factor": "2", + "default.topic.partitions": "2", + "bootstrap.servers": BootstrapServersSsl, + "auth.secret.ref.name": secretName, + } + }, + ) +} + +func TestBrokerAuthSaslPlaintextScram512(t *testing.T) { + + brokerAuth( + t, + SaslPlaintextScram512, + func(secretName string, client *testlib.Client) map[string]string { + return map[string]string{ + "default.topic.replication.factor": "2", + "default.topic.partitions": "2", + "bootstrap.servers": BootstrapServersSaslPlaintext, + "auth.secret.ref.name": secretName, + } + }, + ) +} + +func TestBrokerAuthSslSaslScram512(t *testing.T) { + + brokerAuth( + t, + SslSaslScram512, + func(secretName string, client *testlib.Client) map[string]string { + return map[string]string{ + "default.topic.replication.factor": "2", + "default.topic.partitions": "2", + "bootstrap.servers": BootstrapServersSslSaslScram, + "auth.secret.ref.name": secretName, + } + }, + ) +} diff --git a/test/e2e/broker_trigger_sink_test.go b/test/e2e/broker_trigger_sink_test.go index 66d0d18d6e..d300b889ec 100644 --- a/test/e2e/broker_trigger_sink_test.go +++ b/test/e2e/broker_trigger_sink_test.go @@ -64,7 +64,7 @@ func TestBrokerV1TriggersV1SinkV1Alpha1(t *testing.T) { Topic: "kafka-sink-" + client.Namespace, NumPartitions: pointer.Int32Ptr(10), ReplicationFactor: func(rf int16) *int16 { return &rf }(1), - BootstrapServers: testingpkg.BootstrapServersArr, + BootstrapServers: testingpkg.BootstrapServersPlaintextArr, } createFunc := sink.CreatorV1Alpha1(clientSet, kss) diff --git a/test/e2e/broker_trigger_test.go b/test/e2e/broker_trigger_test.go index 6a8a723e5b..fa192865d3 100644 --- a/test/e2e/broker_trigger_test.go +++ b/test/e2e/broker_trigger_test.go @@ -138,7 +138,7 @@ func TestBrokerTrigger(t *testing.T) { )) config := &kafkatest.Config{ - BootstrapServers: testingpkg.BootstrapServers, + BootstrapServers: testingpkg.BootstrapServersPlaintext, ReplicationFactor: defaultReplicationFactor, NumPartitions: defaultNumPartitions, Topic: kafka.Topic(broker.TopicPrefix, br), @@ -187,7 +187,7 @@ func TestBrokerWithConfig(t *testing.T) { cm := client.CreateConfigMapOrFail(configMapName, client.Namespace, map[string]string{ broker.DefaultTopicNumPartitionConfigMapKey: fmt.Sprintf("%d", numPartitions), broker.DefaultTopicReplicationFactorConfigMapKey: fmt.Sprintf("%d", replicationFactor), - broker.BootstrapServersConfigMapKey: testingpkg.BootstrapServers, + broker.BootstrapServersConfigMapKey: testingpkg.BootstrapServersPlaintext, }) br := client.CreateBrokerV1OrFail( @@ -249,7 +249,7 @@ func TestBrokerWithConfig(t *testing.T) { t.Logf("Verify num partitions and replication factor") config := &kafkatest.Config{ - BootstrapServers: testingpkg.BootstrapServers, + BootstrapServers: testingpkg.BootstrapServersPlaintext, ReplicationFactor: replicationFactor, NumPartitions: numPartitions, Topic: kafka.Topic(broker.TopicPrefix, br), diff --git a/test/e2e/delete_cm_test.go b/test/e2e/delete_cm_test.go index cafc2e763b..74e9e6452f 100644 --- a/test/e2e/delete_cm_test.go +++ b/test/e2e/delete_cm_test.go @@ -184,7 +184,7 @@ func TestDeleteSinkConfigMap(t *testing.T) { Topic: "kafka-sink-" + client.Namespace, NumPartitions: pointer.Int32Ptr(10), ReplicationFactor: func(rf int16) *int16 { return &rf }(1), - BootstrapServers: testingpkg.BootstrapServersArr, + BootstrapServers: testingpkg.BootstrapServersPlaintextArr, } createFunc := sink.CreatorV1Alpha1(clientSet, kss) diff --git a/test/e2e/kafka_sink_test.go b/test/e2e/kafka_sink_test.go index 612cfc888d..f266640e9e 100644 --- a/test/e2e/kafka_sink_test.go +++ b/test/e2e/kafka_sink_test.go @@ -19,9 +19,12 @@ package e2e import ( + "context" "testing" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" "k8s.io/utils/pointer" testlib "knative.dev/eventing/test/lib" @@ -30,90 +33,58 @@ import ( eventingv1alpha1clientset "knative.dev/eventing-kafka-broker/control-plane/pkg/client/clientset/versioned/typed/eventing/v1alpha1" "knative.dev/eventing-kafka-broker/test/pkg/addressable" "knative.dev/eventing-kafka-broker/test/pkg/sink" - testingpkg "knative.dev/eventing-kafka-broker/test/pkg/testing" + . "knative.dev/eventing-kafka-broker/test/pkg/testing" ) -func TestKafkaSinkV1Alpha1DefaultContentMode(t *testing.T) { - testingpkg.RunMultiple(t, func(t *testing.T) { - - client := testlib.Setup(t, false) - defer testlib.TearDown(client) - - clientSet, err := eventingv1alpha1clientset.NewForConfig(client.Config) - assert.Nil(t, err) - - // Create a KafkaSink with the following spec. - - kss := eventingv1alpha1.KafkaSinkSpec{ - Topic: "kafka-sink-" + client.Namespace, - NumPartitions: pointer.Int32Ptr(10), - ReplicationFactor: func(rf int16) *int16 { return &rf }(1), - BootstrapServers: testingpkg.BootstrapServersArr, - } - - createFunc := sink.CreatorV1Alpha1(clientSet, kss) - - kafkaSink, err := createFunc(types.NamespacedName{ - Namespace: client.Namespace, - Name: "kafka-sink", - }) - assert.Nil(t, err) - - client.WaitForResourceReadyOrFail(kafkaSink.Name, &kafkaSink.TypeMeta) - - // Send events to the KafkaSink. - ids := addressable.Send(t, kafkaSink) +const ( + sinkSecretName = "secret-test" +) - // Read events from the topic. - sink.Verify(t, client, eventingv1alpha1.ModeStructured, kss.Topic, ids) +func TestKafkaSinkV1Alpha1DefaultContentMode(t *testing.T) { + testKafkaSink(t, eventingv1alpha1.ModeStructured, nil, func(kss *eventingv1alpha1.KafkaSinkSpec) error { + kss.ContentMode = pointer.StringPtr("") + return nil }) } func TestKafkaSinkV1Alpha1StructuredContentMode(t *testing.T) { - testingpkg.RunMultiple(t, func(t *testing.T) { - - client := testlib.Setup(t, false) - defer testlib.TearDown(client) - - clientSet, err := eventingv1alpha1clientset.NewForConfig(client.Config) - assert.Nil(t, err) + testKafkaSink(t, eventingv1alpha1.ModeStructured, nil) +} - // Create a KafkaSink with the following spec. +func TestKafkaSinkV1Alpha1BinaryContentMode(t *testing.T) { + testKafkaSink(t, eventingv1alpha1.ModeBinary, nil) +} - kss := eventingv1alpha1.KafkaSinkSpec{ - Topic: "kafka-sink-" + client.Namespace, - NumPartitions: pointer.Int32Ptr(10), - ReplicationFactor: func(rf int16) *int16 { return &rf }(1), - BootstrapServers: testingpkg.BootstrapServersArr, - ContentMode: pointer.StringPtr(eventingv1alpha1.ModeStructured), - } +func TestKafkaSinkV1Alpha1AuthPlaintext(t *testing.T) { + testKafkaSink(t, eventingv1alpha1.ModeStructured, Plaintext, withBootstrap(BootstrapServersPlaintextArr), withSecret) +} - createFunc := sink.CreatorV1Alpha1(clientSet, kss) +func TestKafkaSinkV1Alpha1AuthSsl(t *testing.T) { + testKafkaSink(t, eventingv1alpha1.ModeStructured, Ssl, withBootstrap(BootstrapServersSslArr), withSecret) +} - kafkaSink, err := createFunc(types.NamespacedName{ - Namespace: client.Namespace, - Name: "kafka-sink", - }) - assert.Nil(t, err) +func TestKafkaSinkV1Alpha1AuthSaslPlaintextScram512(t *testing.T) { + testKafkaSink(t, eventingv1alpha1.ModeStructured, SaslPlaintextScram512, withBootstrap(BootstrapServersSaslPlaintextArr), withSecret) +} - client.WaitForResourceReadyOrFail(kafkaSink.Name, &kafkaSink.TypeMeta) +func TestKafkaSinkV1Alpha1AuthSslSaslScram512(t *testing.T) { + testKafkaSink(t, eventingv1alpha1.ModeStructured, SslSaslScram512, withBootstrap(BootstrapServersSslSaslScramArr), withSecret) +} - // Send events to the KafkaSink. - ids := addressable.Send(t, kafkaSink) +func testKafkaSink(t *testing.T, mode string, sp SecretProvider, opts ...func(kss *eventingv1alpha1.KafkaSinkSpec) error) { + RunMultiple(t, func(t *testing.T) { - // Read events from the topic. - sink.Verify(t, client, eventingv1alpha1.ModeStructured, kss.Topic, ids) - }) -} + ctx := context.Background() -func TestKafkaSinkV1Alpha1BinaryContentMode(t *testing.T) { - testingpkg.RunMultiple(t, func(t *testing.T) { + const ( + kafkaSinkName = "kafka-sink" + ) client := testlib.Setup(t, false) defer testlib.TearDown(client) clientSet, err := eventingv1alpha1clientset.NewForConfig(client.Config) - assert.Nil(t, err) + require.Nil(t, err) // Create a KafkaSink with the following spec. @@ -121,17 +92,40 @@ func TestKafkaSinkV1Alpha1BinaryContentMode(t *testing.T) { Topic: "kafka-sink-" + client.Namespace, NumPartitions: pointer.Int32Ptr(10), ReplicationFactor: func(rf int16) *int16 { return &rf }(1), - BootstrapServers: testingpkg.BootstrapServersArr, - ContentMode: pointer.StringPtr(eventingv1alpha1.ModeBinary), + BootstrapServers: BootstrapServersPlaintextArr, + ContentMode: pointer.StringPtr(mode), + } + for _, opt := range opts { + require.Nil(t, opt(&kss)) + } + + t.Log(kss) + + if sp != nil { + secretData := sp(t, client) + secret := &corev1.Secret{ + ObjectMeta: metav1.ObjectMeta{ + Namespace: client.Namespace, + Name: sinkSecretName, + }, + Data: secretData, + } + secret, err = client.Kube.CoreV1().Secrets(client.Namespace).Create(ctx, secret, metav1.CreateOptions{}) + require.Nil(t, err) + client.Tracker.Add(corev1.GroupName, "v1", "Secret", secret.Namespace, secret.Name) } createFunc := sink.CreatorV1Alpha1(clientSet, kss) kafkaSink, err := createFunc(types.NamespacedName{ Namespace: client.Namespace, - Name: "kafka-sink", + Name: kafkaSinkName, }) - assert.Nil(t, err) + require.Nil(t, err) + + ks, err := clientSet.KafkaSinks(client.Namespace).Get(ctx, kafkaSinkName, metav1.GetOptions{}) + require.Nil(t, err) + client.Tracker.AddObj(ks) client.WaitForResourceReadyOrFail(kafkaSink.Name, &kafkaSink.TypeMeta) @@ -139,6 +133,24 @@ func TestKafkaSinkV1Alpha1BinaryContentMode(t *testing.T) { ids := addressable.Send(t, kafkaSink) // Read events from the topic. - sink.Verify(t, client, eventingv1alpha1.ModeBinary, kss.Topic, ids) + sink.Verify(t, client, mode, kss.Topic, ids) }) } + +func withSecret(kss *eventingv1alpha1.KafkaSinkSpec) error { + kss.Auth = &eventingv1alpha1.Auth{ + Secret: &eventingv1alpha1.Secret{ + Ref: &eventingv1alpha1.SecretReference{ + Name: sinkSecretName, + }, + }, + } + return nil +} + +func withBootstrap(bs []string) func(kss *eventingv1alpha1.KafkaSinkSpec) error { + return func(kss *eventingv1alpha1.KafkaSinkSpec) error { + kss.BootstrapServers = bs + return nil + } +} diff --git a/test/kafka/kafka-ephemeral.yaml b/test/kafka/kafka-ephemeral.yaml index 8328f742e7..108e1fb15c 100644 --- a/test/kafka/kafka-ephemeral.yaml +++ b/test/kafka/kafka-ephemeral.yaml @@ -21,8 +21,34 @@ spec: version: 2.6.0 replicas: 3 listeners: - plain: {} - tls: {} + # PLAINTEXT + - name: plain + port: 9092 + type: internal + tls: false + # SSL + - name: tls + port: 9093 + type: internal + tls: true + authentication: + type: tls + # protocol=SASL_PLAINTEXT + # sasl.mechanism=SCRAM-SHA-512 + - name: saslplain + port: 9094 + type: internal + tls: false + authentication: + type: scram-sha-512 + # protocol=SASL_SSL + # sasl.mechanism=SCRAM-SHA-512 + - name: saslssl + port: 9095 + type: internal + tls: true + authentication: + type: scram-sha-512 config: offsets.topic.replication.factor: 3 transaction.state.log.replication.factor: 3 @@ -36,5 +62,5 @@ spec: storage: type: ephemeral entityOperator: - topicOperator: {} - userOperator: {} + topicOperator: { } + userOperator: { } diff --git a/test/kafka/kafka_setup.sh b/test/kafka/kafka_setup.sh index 8890230496..f46f1ffbfa 100755 --- a/test/kafka/kafka_setup.sh +++ b/test/kafka/kafka_setup.sh @@ -18,7 +18,6 @@ set -e source $(dirname $0)/../../vendor/knative.dev/hack/e2e-tests.sh - kubectl create namespace kafka --dry-run -o yaml | kubectl apply -f - sleep 5 @@ -29,6 +28,8 @@ cat $(dirname $0)/strimzi-cluster-operator.yaml | sed "s/cluster.local/${CLUSTER sleep 5 kubectl -n kafka apply -f $(dirname $0)/kafka-ephemeral.yaml +kubectl apply -n kafka -f $(dirname $0)/user-tls.yaml +kubectl apply -n kafka -f $(dirname $0)/user-sasl-scram-512.yaml sleep 5 diff --git a/test/kafka/user-sasl-scram-512.yaml b/test/kafka/user-sasl-scram-512.yaml new file mode 100644 index 0000000000..bb32d4c995 --- /dev/null +++ b/test/kafka/user-sasl-scram-512.yaml @@ -0,0 +1,58 @@ +# Copyright 2020 The Knative Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: kafka.strimzi.io/v1beta1 +kind: KafkaUser +metadata: + name: my-sasl-user + labels: + strimzi.io/cluster: my-cluster +spec: + authentication: + type: scram-sha-512 + authorization: + type: simple + acls: + # Example ACL rules for consuming from knative-messaging-kafka using consumer group my-group + - resource: + type: topic + name: "*" + operation: Read + host: "*" + - resource: + type: topic + name: "*" + operation: Describe + host: "*" + - resource: + type: group + name: "*" + operation: Read + host: "*" + # Example ACL rules for producing to topic knative-messaging-kafka + - resource: + type: topic + name: "*" + operation: Write + host: "*" + - resource: + type: topic + name: "*" + operation: Create + host: "*" + - resource: + type: topic + name: "*" + operation: Describe + host: "*" diff --git a/test/kafka/user-tls.yaml b/test/kafka/user-tls.yaml new file mode 100644 index 0000000000..18cacc70bb --- /dev/null +++ b/test/kafka/user-tls.yaml @@ -0,0 +1,58 @@ +# Copyright 2020 The Knative Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: kafka.strimzi.io/v1beta1 +kind: KafkaUser +metadata: + name: my-tls-user + labels: + strimzi.io/cluster: my-cluster +spec: + authentication: + type: tls + authorization: + type: simple + acls: + # Example ACL rules for consuming from a topic. + - resource: + type: topic + name: "*" + operation: Read + host: "*" + - resource: + type: topic + name: "*" + operation: Describe + host: "*" + - resource: + type: group + name: "*" + operation: Read + host: "*" + # Example ACL rules for producing to a topic. + - resource: + type: topic + name: "*" + operation: Write + host: "*" + - resource: + type: topic + name: "*" + operation: Create + host: "*" + - resource: + type: topic + name: "*" + operation: Describe + host: "*" diff --git a/test/pkg/sink/verify.go b/test/pkg/sink/verify.go index f8b3e38ab7..3489b19f4b 100644 --- a/test/pkg/sink/verify.go +++ b/test/pkg/sink/verify.go @@ -39,7 +39,7 @@ func Verify(t *testing.T, client *testlib.Client, mode, topic string, ids []stri Name: names.SimpleNameGenerator.GenerateName("verify-messages"), }, &kafkatest.ConsumerConfig{ - BootstrapServers: testingpkg.BootstrapServers, + BootstrapServers: testingpkg.BootstrapServersPlaintext, Topic: topic, IDS: strings.Join(ids, ","), ContentMode: mode, diff --git a/test/pkg/testing/auth.go b/test/pkg/testing/auth.go new file mode 100644 index 0000000000..d8876b4c59 --- /dev/null +++ b/test/pkg/testing/auth.go @@ -0,0 +1,80 @@ +/* + * Copyright 2021 The Knative Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package testing + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + testlib "knative.dev/eventing/test/lib" +) + +type SecretProvider func(t *testing.T, client *testlib.Client) map[string][]byte + +func Plaintext(t *testing.T, _ *testlib.Client) map[string][]byte { + return map[string][]byte{ + "protocol": []byte("PLAINTEXT"), + } +} + +func Ssl(t *testing.T, client *testlib.Client) map[string][]byte { + caSecret, err := client.Kube.CoreV1().Secrets(KafkaClusterNamespace).Get(context.Background(), CaSecretName, metav1.GetOptions{}) + assert.Nil(t, err) + + tlsUserSecret, err := client.Kube.CoreV1().Secrets(KafkaClusterNamespace).Get(context.Background(), TlsUserSecretName, metav1.GetOptions{}) + assert.Nil(t, err) + + return map[string][]byte{ + "protocol": []byte("SSL"), + "ca.crt": caSecret.Data["ca.crt"], + "user.crt": tlsUserSecret.Data["user.crt"], + "user.key": tlsUserSecret.Data["user.key"], + } +} + +func SaslPlaintextScram512(t *testing.T, client *testlib.Client) map[string][]byte { + + saslUserSecret, err := client.Kube.CoreV1().Secrets(KafkaClusterNamespace).Get(context.Background(), SaslUserSecretName, metav1.GetOptions{}) + assert.Nil(t, err) + + return map[string][]byte{ + "protocol": []byte("SASL_PLAINTEXT"), + "sasl.mechanism": []byte("SCRAM-SHA-512"), + "user": []byte(SaslUserSecretName), + "password": saslUserSecret.Data["password"], + } +} + +func SslSaslScram512(t *testing.T, client *testlib.Client) map[string][]byte { + caSecret, err := client.Kube.CoreV1().Secrets(KafkaClusterNamespace).Get(context.Background(), CaSecretName, metav1.GetOptions{}) + assert.Nil(t, err) + + saslUserSecret, err := client.Kube.CoreV1().Secrets(KafkaClusterNamespace).Get(context.Background(), SaslUserSecretName, metav1.GetOptions{}) + assert.Nil(t, err) + + return map[string][]byte{ + "protocol": []byte("SASL_SSL"), + "sasl.mechanism": []byte("SCRAM-SHA-512"), + "ca.crt": caSecret.Data["ca.crt"], + "user": []byte(SaslUserSecretName), + "password": saslUserSecret.Data["password"], + } +} + +type ConfigProvider func(secretName string, client *testlib.Client) map[string]string diff --git a/test/pkg/testing/run.go b/test/pkg/testing/run.go index 50ee3e46fe..e21cb807af 100644 --- a/test/pkg/testing/run.go +++ b/test/pkg/testing/run.go @@ -26,12 +26,23 @@ const ( rerunTimes = 5 // Kafka bootstrap server. - BootstrapServers = "my-cluster-kafka-bootstrap.kafka:9092" + BootstrapServersPlaintext = "my-cluster-kafka-bootstrap.kafka:9092" + BootstrapServersSsl = "my-cluster-kafka-bootstrap.kafka:9093" + BootstrapServersSaslPlaintext = "my-cluster-kafka-bootstrap.kafka:9094" + BootstrapServersSslSaslScram = "my-cluster-kafka-bootstrap.kafka:9095" + + KafkaClusterNamespace = "kafka" + TlsUserSecretName = "my-tls-user" + SaslUserSecretName = "my-sasl-user" + CaSecretName = "my-cluster-cluster-ca-cert" ) var ( // Kafka bootstrap server as array. - BootstrapServersArr = []string{BootstrapServers} + BootstrapServersPlaintextArr = []string{BootstrapServersPlaintext} + BootstrapServersSslArr = []string{BootstrapServersSsl} + BootstrapServersSaslPlaintextArr = []string{BootstrapServersSaslPlaintext} + BootstrapServersSslSaslScramArr = []string{BootstrapServersSslSaslScram} ) // RunMultiple run test function f `rerunTimes` times. diff --git a/third_party/VENDOR-LICENSE/github.com/xdg/scram/LICENSE b/third_party/VENDOR-LICENSE/github.com/xdg/scram/LICENSE new file mode 100644 index 0000000000..67db858821 --- /dev/null +++ b/third_party/VENDOR-LICENSE/github.com/xdg/scram/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/third_party/VENDOR-LICENSE/github.com/xdg/stringprep/LICENSE b/third_party/VENDOR-LICENSE/github.com/xdg/stringprep/LICENSE new file mode 100644 index 0000000000..67db858821 --- /dev/null +++ b/third_party/VENDOR-LICENSE/github.com/xdg/stringprep/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/third_party/VENDOR-LICENSE/golang.org/x/term/LICENSE b/third_party/VENDOR-LICENSE/golang.org/x/term/LICENSE new file mode 100644 index 0000000000..6a66aea5ea --- /dev/null +++ b/third_party/VENDOR-LICENSE/golang.org/x/term/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/xdg/scram/.gitignore b/vendor/github.com/xdg/scram/.gitignore new file mode 100644 index 0000000000..e69de29bb2 diff --git a/vendor/github.com/xdg/scram/.travis.yml b/vendor/github.com/xdg/scram/.travis.yml new file mode 100644 index 0000000000..f391327ea9 --- /dev/null +++ b/vendor/github.com/xdg/scram/.travis.yml @@ -0,0 +1,11 @@ +language: go +sudo: false +go: + - "1.7" + - "1.8" + - "1.9" + - "1.10" + - master +matrix: + allow_failures: + - go: master diff --git a/vendor/github.com/xdg/scram/LICENSE b/vendor/github.com/xdg/scram/LICENSE new file mode 100644 index 0000000000..67db858821 --- /dev/null +++ b/vendor/github.com/xdg/scram/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/vendor/github.com/xdg/scram/README.md b/vendor/github.com/xdg/scram/README.md new file mode 100644 index 0000000000..6782d94d74 --- /dev/null +++ b/vendor/github.com/xdg/scram/README.md @@ -0,0 +1,71 @@ +[![GoDoc](https://godoc.org/github.com/xdg/scram?status.svg)](https://godoc.org/github.com/xdg/scram) +[![Build Status](https://travis-ci.org/xdg/scram.svg?branch=master)](https://travis-ci.org/xdg/scram) + +# scram – Go implementation of RFC-5802 + +## Description + +Package scram provides client and server implementations of the Salted +Challenge Response Authentication Mechanism (SCRAM) described in +[RFC-5802](https://tools.ietf.org/html/rfc5802) and +[RFC-7677](https://tools.ietf.org/html/rfc7677). + +It includes both client and server side support. + +Channel binding and extensions are not (yet) supported. + +## Examples + +### Client side + + package main + + import "github.com/xdg/scram" + + func main() { + // Get Client with username, password and (optional) authorization ID. + clientSHA1, err := scram.SHA1.NewClient("mulder", "trustno1", "") + if err != nil { + panic(err) + } + + // Prepare the authentication conversation. Use the empty string as the + // initial server message argument to start the conversation. + conv := clientSHA1.NewConversation() + var serverMsg string + + // Get the first message, send it and read the response. + firstMsg, err := conv.Step(serverMsg) + if err != nil { + panic(err) + } + serverMsg = sendClientMsg(firstMsg) + + // Get the second message, send it, and read the response. + secondMsg, err := conv.Step(serverMsg) + if err != nil { + panic(err) + } + serverMsg = sendClientMsg(secondMsg) + + // Validate the server's final message. We have no further message to + // send so ignore that return value. + _, err = conv.Step(serverMsg) + if err != nil { + panic(err) + } + + return + } + + func sendClientMsg(s string) string { + // A real implementation would send this to a server and read a reply. + return "" + } + +## Copyright and License + +Copyright 2018 by David A. Golden. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"). You may +obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 diff --git a/vendor/github.com/xdg/scram/client.go b/vendor/github.com/xdg/scram/client.go new file mode 100644 index 0000000000..ca0c4c711c --- /dev/null +++ b/vendor/github.com/xdg/scram/client.go @@ -0,0 +1,130 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package scram + +import ( + "sync" + + "golang.org/x/crypto/pbkdf2" +) + +// Client implements the client side of SCRAM authentication. It holds +// configuration values needed to initialize new client-side conversations for +// a specific username, password and authorization ID tuple. Client caches +// the computationally-expensive parts of a SCRAM conversation as described in +// RFC-5802. If repeated authentication conversations may be required for a +// user (e.g. disconnect/reconnect), the user's Client should be preserved. +// +// For security reasons, Clients have a default minimum PBKDF2 iteration count +// of 4096. If a server requests a smaller iteration count, an authentication +// conversation will error. +// +// A Client can also be used by a server application to construct the hashed +// authentication values to be stored for a new user. See StoredCredentials() +// for more. +type Client struct { + sync.RWMutex + username string + password string + authzID string + minIters int + nonceGen NonceGeneratorFcn + hashGen HashGeneratorFcn + cache map[KeyFactors]derivedKeys +} + +func newClient(username, password, authzID string, fcn HashGeneratorFcn) *Client { + return &Client{ + username: username, + password: password, + authzID: authzID, + minIters: 4096, + nonceGen: defaultNonceGenerator, + hashGen: fcn, + cache: make(map[KeyFactors]derivedKeys), + } +} + +// WithMinIterations changes minimum required PBKDF2 iteration count. +func (c *Client) WithMinIterations(n int) *Client { + c.Lock() + defer c.Unlock() + c.minIters = n + return c +} + +// WithNonceGenerator replaces the default nonce generator (base64 encoding of +// 24 bytes from crypto/rand) with a custom generator. This is provided for +// testing or for users with custom nonce requirements. +func (c *Client) WithNonceGenerator(ng NonceGeneratorFcn) *Client { + c.Lock() + defer c.Unlock() + c.nonceGen = ng + return c +} + +// NewConversation constructs a client-side authentication conversation. +// Conversations cannot be reused, so this must be called for each new +// authentication attempt. +func (c *Client) NewConversation() *ClientConversation { + c.RLock() + defer c.RUnlock() + return &ClientConversation{ + client: c, + nonceGen: c.nonceGen, + hashGen: c.hashGen, + minIters: c.minIters, + } +} + +func (c *Client) getDerivedKeys(kf KeyFactors) derivedKeys { + dk, ok := c.getCache(kf) + if !ok { + dk = c.computeKeys(kf) + c.setCache(kf, dk) + } + return dk +} + +// GetStoredCredentials takes a salt and iteration count structure and +// provides the values that must be stored by a server to authentication a +// user. These values are what the Server credential lookup function must +// return for a given username. +func (c *Client) GetStoredCredentials(kf KeyFactors) StoredCredentials { + dk := c.getDerivedKeys(kf) + return StoredCredentials{ + KeyFactors: kf, + StoredKey: dk.StoredKey, + ServerKey: dk.ServerKey, + } +} + +func (c *Client) computeKeys(kf KeyFactors) derivedKeys { + h := c.hashGen() + saltedPassword := pbkdf2.Key([]byte(c.password), []byte(kf.Salt), kf.Iters, h.Size(), c.hashGen) + clientKey := computeHMAC(c.hashGen, saltedPassword, []byte("Client Key")) + + return derivedKeys{ + ClientKey: clientKey, + StoredKey: computeHash(c.hashGen, clientKey), + ServerKey: computeHMAC(c.hashGen, saltedPassword, []byte("Server Key")), + } +} + +func (c *Client) getCache(kf KeyFactors) (derivedKeys, bool) { + c.RLock() + defer c.RUnlock() + dk, ok := c.cache[kf] + return dk, ok +} + +func (c *Client) setCache(kf KeyFactors, dk derivedKeys) { + c.Lock() + defer c.Unlock() + c.cache[kf] = dk + return +} diff --git a/vendor/github.com/xdg/scram/client_conv.go b/vendor/github.com/xdg/scram/client_conv.go new file mode 100644 index 0000000000..834056889e --- /dev/null +++ b/vendor/github.com/xdg/scram/client_conv.go @@ -0,0 +1,149 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package scram + +import ( + "crypto/hmac" + "encoding/base64" + "errors" + "fmt" + "strings" +) + +type clientState int + +const ( + clientStarting clientState = iota + clientFirst + clientFinal + clientDone +) + +// ClientConversation implements the client-side of an authentication +// conversation with a server. A new conversation must be created for +// each authentication attempt. +type ClientConversation struct { + client *Client + nonceGen NonceGeneratorFcn + hashGen HashGeneratorFcn + minIters int + state clientState + valid bool + gs2 string + nonce string + c1b string + serveSig []byte +} + +// Step takes a string provided from a server (or just an empty string for the +// very first conversation step) and attempts to move the authentication +// conversation forward. It returns a string to be sent to the server or an +// error if the server message is invalid. Calling Step after a conversation +// completes is also an error. +func (cc *ClientConversation) Step(challenge string) (response string, err error) { + switch cc.state { + case clientStarting: + cc.state = clientFirst + response, err = cc.firstMsg() + case clientFirst: + cc.state = clientFinal + response, err = cc.finalMsg(challenge) + case clientFinal: + cc.state = clientDone + response, err = cc.validateServer(challenge) + default: + response, err = "", errors.New("Conversation already completed") + } + return +} + +// Done returns true if the conversation is completed or has errored. +func (cc *ClientConversation) Done() bool { + return cc.state == clientDone +} + +// Valid returns true if the conversation successfully authenticated with the +// server, including counter-validation that the server actually has the +// user's stored credentials. +func (cc *ClientConversation) Valid() bool { + return cc.valid +} + +func (cc *ClientConversation) firstMsg() (string, error) { + // Values are cached for use in final message parameters + cc.gs2 = cc.gs2Header() + cc.nonce = cc.client.nonceGen() + cc.c1b = fmt.Sprintf("n=%s,r=%s", encodeName(cc.client.username), cc.nonce) + + return cc.gs2 + cc.c1b, nil +} + +func (cc *ClientConversation) finalMsg(s1 string) (string, error) { + msg, err := parseServerFirst(s1) + if err != nil { + return "", err + } + + // Check nonce prefix and update + if !strings.HasPrefix(msg.nonce, cc.nonce) { + return "", errors.New("server nonce did not extend client nonce") + } + cc.nonce = msg.nonce + + // Check iteration count vs minimum + if msg.iters < cc.minIters { + return "", fmt.Errorf("server requested too few iterations (%d)", msg.iters) + } + + // Create client-final-message-without-proof + c2wop := fmt.Sprintf( + "c=%s,r=%s", + base64.StdEncoding.EncodeToString([]byte(cc.gs2)), + cc.nonce, + ) + + // Create auth message + authMsg := cc.c1b + "," + s1 + "," + c2wop + + // Get derived keys from client cache + dk := cc.client.getDerivedKeys(KeyFactors{Salt: string(msg.salt), Iters: msg.iters}) + + // Create proof as clientkey XOR clientsignature + clientSignature := computeHMAC(cc.hashGen, dk.StoredKey, []byte(authMsg)) + clientProof := xorBytes(dk.ClientKey, clientSignature) + proof := base64.StdEncoding.EncodeToString(clientProof) + + // Cache ServerSignature for later validation + cc.serveSig = computeHMAC(cc.hashGen, dk.ServerKey, []byte(authMsg)) + + return fmt.Sprintf("%s,p=%s", c2wop, proof), nil +} + +func (cc *ClientConversation) validateServer(s2 string) (string, error) { + msg, err := parseServerFinal(s2) + if err != nil { + return "", err + } + + if len(msg.err) > 0 { + return "", fmt.Errorf("server error: %s", msg.err) + } + + if !hmac.Equal(msg.verifier, cc.serveSig) { + return "", errors.New("server validation failed") + } + + cc.valid = true + return "", nil +} + +func (cc *ClientConversation) gs2Header() string { + if cc.client.authzID == "" { + return "n,," + } + return fmt.Sprintf("n,%s,", encodeName(cc.client.authzID)) +} diff --git a/vendor/github.com/xdg/scram/common.go b/vendor/github.com/xdg/scram/common.go new file mode 100644 index 0000000000..cb705cb74e --- /dev/null +++ b/vendor/github.com/xdg/scram/common.go @@ -0,0 +1,97 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package scram + +import ( + "crypto/hmac" + "crypto/rand" + "encoding/base64" + "strings" +) + +// NonceGeneratorFcn defines a function that returns a string of high-quality +// random printable ASCII characters EXCLUDING the comma (',') character. The +// default nonce generator provides Base64 encoding of 24 bytes from +// crypto/rand. +type NonceGeneratorFcn func() string + +// derivedKeys collects the three cryptographically derived values +// into one struct for caching. +type derivedKeys struct { + ClientKey []byte + StoredKey []byte + ServerKey []byte +} + +// KeyFactors represent the two server-provided factors needed to compute +// client credentials for authentication. Salt is decoded bytes (i.e. not +// base64), but in string form so that KeyFactors can be used as a map key for +// cached credentials. +type KeyFactors struct { + Salt string + Iters int +} + +// StoredCredentials are the values that a server must store for a given +// username to allow authentication. They include the salt and iteration +// count, plus the derived values to authenticate a client and for the server +// to authenticate itself back to the client. +// +// NOTE: these are specific to a given hash function. To allow a user to +// authenticate with either SCRAM-SHA-1 or SCRAM-SHA-256, two sets of +// StoredCredentials must be created and stored, one for each hash function. +type StoredCredentials struct { + KeyFactors + StoredKey []byte + ServerKey []byte +} + +// CredentialLookup is a callback to provide StoredCredentials for a given +// username. This is used to configure Server objects. +// +// NOTE: these are specific to a given hash function. The callback provided +// to a Server with a given hash function must provide the corresponding +// StoredCredentials. +type CredentialLookup func(string) (StoredCredentials, error) + +func defaultNonceGenerator() string { + raw := make([]byte, 24) + nonce := make([]byte, base64.StdEncoding.EncodedLen(len(raw))) + rand.Read(raw) + base64.StdEncoding.Encode(nonce, raw) + return string(nonce) +} + +func encodeName(s string) string { + return strings.Replace(strings.Replace(s, "=", "=3D", -1), ",", "=2C", -1) +} + +func decodeName(s string) (string, error) { + // TODO Check for = not followed by 2C or 3D + return strings.Replace(strings.Replace(s, "=2C", ",", -1), "=3D", "=", -1), nil +} + +func computeHash(hg HashGeneratorFcn, b []byte) []byte { + h := hg() + h.Write(b) + return h.Sum(nil) +} + +func computeHMAC(hg HashGeneratorFcn, key, data []byte) []byte { + mac := hmac.New(hg, key) + mac.Write(data) + return mac.Sum(nil) +} + +func xorBytes(a, b []byte) []byte { + // TODO check a & b are same length, or just xor to smallest + xor := make([]byte, len(a)) + for i := range a { + xor[i] = a[i] ^ b[i] + } + return xor +} diff --git a/vendor/github.com/xdg/scram/doc.go b/vendor/github.com/xdg/scram/doc.go new file mode 100644 index 0000000000..d43bee6071 --- /dev/null +++ b/vendor/github.com/xdg/scram/doc.go @@ -0,0 +1,24 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +// Package scram provides client and server implementations of the Salted +// Challenge Response Authentication Mechanism (SCRAM) described in RFC-5802 +// and RFC-7677. +// +// Usage +// +// The scram package provides two variables, `SHA1` and `SHA256`, that are +// used to construct Client or Server objects. +// +// clientSHA1, err := scram.SHA1.NewClient(username, password, authID) +// clientSHA256, err := scram.SHA256.NewClient(username, password, authID) +// +// serverSHA1, err := scram.SHA1.NewServer(credentialLookupFcn) +// serverSHA256, err := scram.SHA256.NewServer(credentialLookupFcn) +// +// These objects are used to construct ClientConversation or +// ServerConversation objects that are used to carry out authentication. +package scram diff --git a/vendor/github.com/xdg/scram/parse.go b/vendor/github.com/xdg/scram/parse.go new file mode 100644 index 0000000000..722f6043d3 --- /dev/null +++ b/vendor/github.com/xdg/scram/parse.go @@ -0,0 +1,205 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package scram + +import ( + "encoding/base64" + "errors" + "fmt" + "strconv" + "strings" +) + +type c1Msg struct { + gs2Header string + authzID string + username string + nonce string + c1b string +} + +type c2Msg struct { + cbind []byte + nonce string + proof []byte + c2wop string +} + +type s1Msg struct { + nonce string + salt []byte + iters int +} + +type s2Msg struct { + verifier []byte + err string +} + +func parseField(s, k string) (string, error) { + t := strings.TrimPrefix(s, k+"=") + if t == s { + return "", fmt.Errorf("error parsing '%s' for field '%s'", s, k) + } + return t, nil +} + +func parseGS2Flag(s string) (string, error) { + if s[0] == 'p' { + return "", fmt.Errorf("channel binding requested but not supported") + } + + if s == "n" || s == "y" { + return s, nil + } + + return "", fmt.Errorf("error parsing '%s' for gs2 flag", s) +} + +func parseFieldBase64(s, k string) ([]byte, error) { + raw, err := parseField(s, k) + if err != nil { + return nil, err + } + + dec, err := base64.StdEncoding.DecodeString(raw) + if err != nil { + return nil, err + } + + return dec, nil +} + +func parseFieldInt(s, k string) (int, error) { + raw, err := parseField(s, k) + if err != nil { + return 0, err + } + + num, err := strconv.Atoi(raw) + if err != nil { + return 0, fmt.Errorf("error parsing field '%s': %v", k, err) + } + + return num, nil +} + +func parseClientFirst(c1 string) (msg c1Msg, err error) { + + fields := strings.Split(c1, ",") + if len(fields) < 4 { + err = errors.New("not enough fields in first server message") + return + } + + gs2flag, err := parseGS2Flag(fields[0]) + if err != nil { + return + } + + // 'a' field is optional + if len(fields[1]) > 0 { + msg.authzID, err = parseField(fields[1], "a") + if err != nil { + return + } + } + + // Recombine and save the gs2 header + msg.gs2Header = gs2flag + "," + msg.authzID + "," + + // Check for unsupported extensions field "m". + if strings.HasPrefix(fields[2], "m=") { + err = errors.New("SCRAM message extensions are not supported") + return + } + + msg.username, err = parseField(fields[2], "n") + if err != nil { + return + } + + msg.nonce, err = parseField(fields[3], "r") + if err != nil { + return + } + + msg.c1b = strings.Join(fields[2:], ",") + + return +} + +func parseClientFinal(c2 string) (msg c2Msg, err error) { + fields := strings.Split(c2, ",") + if len(fields) < 3 { + err = errors.New("not enough fields in first server message") + return + } + + msg.cbind, err = parseFieldBase64(fields[0], "c") + if err != nil { + return + } + + msg.nonce, err = parseField(fields[1], "r") + if err != nil { + return + } + + // Extension fields may come between nonce and proof, so we + // grab the *last* fields as proof. + msg.proof, err = parseFieldBase64(fields[len(fields)-1], "p") + if err != nil { + return + } + + msg.c2wop = c2[:strings.LastIndex(c2, ",")] + + return +} + +func parseServerFirst(s1 string) (msg s1Msg, err error) { + + // Check for unsupported extensions field "m". + if strings.HasPrefix(s1, "m=") { + err = errors.New("SCRAM message extensions are not supported") + return + } + + fields := strings.Split(s1, ",") + if len(fields) < 3 { + err = errors.New("not enough fields in first server message") + return + } + + msg.nonce, err = parseField(fields[0], "r") + if err != nil { + return + } + + msg.salt, err = parseFieldBase64(fields[1], "s") + if err != nil { + return + } + + msg.iters, err = parseFieldInt(fields[2], "i") + + return +} + +func parseServerFinal(s2 string) (msg s2Msg, err error) { + fields := strings.Split(s2, ",") + + msg.verifier, err = parseFieldBase64(fields[0], "v") + if err == nil { + return + } + + msg.err, err = parseField(fields[0], "e") + + return +} diff --git a/vendor/github.com/xdg/scram/scram.go b/vendor/github.com/xdg/scram/scram.go new file mode 100644 index 0000000000..9e9836afe7 --- /dev/null +++ b/vendor/github.com/xdg/scram/scram.go @@ -0,0 +1,66 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package scram + +import ( + "crypto/sha1" + "crypto/sha256" + "fmt" + "hash" + + "github.com/xdg/stringprep" +) + +// HashGeneratorFcn abstracts a factory function that returns a hash.Hash +// value to be used for SCRAM operations. Generally, one would use the +// provided package variables, `scram.SHA1` and `scram.SHA256`, for the most +// common forms of SCRAM. +type HashGeneratorFcn func() hash.Hash + +// SHA1 is a function that returns a crypto/sha1 hasher and should be used to +// create Client objects configured for SHA-1 hashing. +var SHA1 HashGeneratorFcn = func() hash.Hash { return sha1.New() } + +// SHA256 is a function that returns a crypto/sha256 hasher and should be used +// to create Client objects configured for SHA-256 hashing. +var SHA256 HashGeneratorFcn = func() hash.Hash { return sha256.New() } + +// NewClient constructs a SCRAM client component based on a given hash.Hash +// factory receiver. This constructor will normalize the username, password +// and authzID via the SASLprep algorithm, as recommended by RFC-5802. If +// SASLprep fails, the method returns an error. +func (f HashGeneratorFcn) NewClient(username, password, authzID string) (*Client, error) { + var userprep, passprep, authprep string + var err error + + if userprep, err = stringprep.SASLprep.Prepare(username); err != nil { + return nil, fmt.Errorf("Error SASLprepping username '%s': %v", username, err) + } + if passprep, err = stringprep.SASLprep.Prepare(password); err != nil { + return nil, fmt.Errorf("Error SASLprepping password '%s': %v", password, err) + } + if authprep, err = stringprep.SASLprep.Prepare(authzID); err != nil { + return nil, fmt.Errorf("Error SASLprepping authzID '%s': %v", authzID, err) + } + + return newClient(userprep, passprep, authprep, f), nil +} + +// NewClientUnprepped acts like NewClient, except none of the arguments will +// be normalized via SASLprep. This is not generally recommended, but is +// provided for users that may have custom normalization needs. +func (f HashGeneratorFcn) NewClientUnprepped(username, password, authzID string) (*Client, error) { + return newClient(username, password, authzID, f), nil +} + +// NewServer constructs a SCRAM server component based on a given hash.Hash +// factory receiver. To be maximally generic, it uses dependency injection to +// handle credential lookup, which is the process of turning a username string +// into a struct with stored credentials for authentication. +func (f HashGeneratorFcn) NewServer(cl CredentialLookup) (*Server, error) { + return newServer(cl, f) +} diff --git a/vendor/github.com/xdg/scram/server.go b/vendor/github.com/xdg/scram/server.go new file mode 100644 index 0000000000..b119b36156 --- /dev/null +++ b/vendor/github.com/xdg/scram/server.go @@ -0,0 +1,50 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package scram + +import "sync" + +// Server implements the server side of SCRAM authentication. It holds +// configuration values needed to initialize new server-side conversations. +// Generally, this can be persistent within an application. +type Server struct { + sync.RWMutex + credentialCB CredentialLookup + nonceGen NonceGeneratorFcn + hashGen HashGeneratorFcn +} + +func newServer(cl CredentialLookup, fcn HashGeneratorFcn) (*Server, error) { + return &Server{ + credentialCB: cl, + nonceGen: defaultNonceGenerator, + hashGen: fcn, + }, nil +} + +// WithNonceGenerator replaces the default nonce generator (base64 encoding of +// 24 bytes from crypto/rand) with a custom generator. This is provided for +// testing or for users with custom nonce requirements. +func (s *Server) WithNonceGenerator(ng NonceGeneratorFcn) *Server { + s.Lock() + defer s.Unlock() + s.nonceGen = ng + return s +} + +// NewConversation constructs a server-side authentication conversation. +// Conversations cannot be reused, so this must be called for each new +// authentication attempt. +func (s *Server) NewConversation() *ServerConversation { + s.RLock() + defer s.RUnlock() + return &ServerConversation{ + nonceGen: s.nonceGen, + hashGen: s.hashGen, + credentialCB: s.credentialCB, + } +} diff --git a/vendor/github.com/xdg/scram/server_conv.go b/vendor/github.com/xdg/scram/server_conv.go new file mode 100644 index 0000000000..9c8838c38a --- /dev/null +++ b/vendor/github.com/xdg/scram/server_conv.go @@ -0,0 +1,151 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package scram + +import ( + "crypto/hmac" + "encoding/base64" + "errors" + "fmt" +) + +type serverState int + +const ( + serverFirst serverState = iota + serverFinal + serverDone +) + +// ServerConversation implements the server-side of an authentication +// conversation with a client. A new conversation must be created for +// each authentication attempt. +type ServerConversation struct { + nonceGen NonceGeneratorFcn + hashGen HashGeneratorFcn + credentialCB CredentialLookup + state serverState + credential StoredCredentials + valid bool + gs2Header string + username string + authzID string + nonce string + c1b string + s1 string +} + +// Step takes a string provided from a client and attempts to move the +// authentication conversation forward. It returns a string to be sent to the +// client or an error if the client message is invalid. Calling Step after a +// conversation completes is also an error. +func (sc *ServerConversation) Step(challenge string) (response string, err error) { + switch sc.state { + case serverFirst: + sc.state = serverFinal + response, err = sc.firstMsg(challenge) + case serverFinal: + sc.state = serverDone + response, err = sc.finalMsg(challenge) + default: + response, err = "", errors.New("Conversation already completed") + } + return +} + +// Done returns true if the conversation is completed or has errored. +func (sc *ServerConversation) Done() bool { + return sc.state == serverDone +} + +// Valid returns true if the conversation successfully authenticated the +// client. +func (sc *ServerConversation) Valid() bool { + return sc.valid +} + +// Username returns the client-provided username. This is valid to call +// if the first conversation Step() is successful. +func (sc *ServerConversation) Username() string { + return sc.username +} + +// AuthzID returns the (optional) client-provided authorization identity, if +// any. If one was not provided, it returns the empty string. This is valid +// to call if the first conversation Step() is successful. +func (sc *ServerConversation) AuthzID() string { + return sc.authzID +} + +func (sc *ServerConversation) firstMsg(c1 string) (string, error) { + msg, err := parseClientFirst(c1) + if err != nil { + sc.state = serverDone + return "", err + } + + sc.gs2Header = msg.gs2Header + sc.username = msg.username + sc.authzID = msg.authzID + + sc.credential, err = sc.credentialCB(msg.username) + if err != nil { + sc.state = serverDone + return "e=unknown-user", err + } + + sc.nonce = msg.nonce + sc.nonceGen() + sc.c1b = msg.c1b + sc.s1 = fmt.Sprintf("r=%s,s=%s,i=%d", + sc.nonce, + base64.StdEncoding.EncodeToString([]byte(sc.credential.Salt)), + sc.credential.Iters, + ) + + return sc.s1, nil +} + +// For errors, returns server error message as well as non-nil error. Callers +// can choose whether to send server error or not. +func (sc *ServerConversation) finalMsg(c2 string) (string, error) { + msg, err := parseClientFinal(c2) + if err != nil { + return "", err + } + + // Check channel binding matches what we expect; in this case, we expect + // just the gs2 header we received as we don't support channel binding + // with a data payload. If we add binding, we need to independently + // compute the header to match here. + if string(msg.cbind) != sc.gs2Header { + return "e=channel-bindings-dont-match", fmt.Errorf("channel binding received '%s' doesn't match expected '%s'", msg.cbind, sc.gs2Header) + } + + // Check nonce received matches what we sent + if msg.nonce != sc.nonce { + return "e=other-error", errors.New("nonce received did not match nonce sent") + } + + // Create auth message + authMsg := sc.c1b + "," + sc.s1 + "," + msg.c2wop + + // Retrieve ClientKey from proof and verify it + clientSignature := computeHMAC(sc.hashGen, sc.credential.StoredKey, []byte(authMsg)) + clientKey := xorBytes([]byte(msg.proof), clientSignature) + storedKey := computeHash(sc.hashGen, clientKey) + + // Compare with constant-time function + if !hmac.Equal(storedKey, sc.credential.StoredKey) { + return "e=invalid-proof", errors.New("challenge proof invalid") + } + + sc.valid = true + + // Compute and return server verifier + serverSignature := computeHMAC(sc.hashGen, sc.credential.ServerKey, []byte(authMsg)) + return "v=" + base64.StdEncoding.EncodeToString(serverSignature), nil +} diff --git a/vendor/github.com/xdg/stringprep/.gitignore b/vendor/github.com/xdg/stringprep/.gitignore new file mode 100644 index 0000000000..e69de29bb2 diff --git a/vendor/github.com/xdg/stringprep/.travis.yml b/vendor/github.com/xdg/stringprep/.travis.yml new file mode 100644 index 0000000000..376c53a74d --- /dev/null +++ b/vendor/github.com/xdg/stringprep/.travis.yml @@ -0,0 +1,10 @@ +language: go +sudo: false +go: + - 1.7 + - 1.8 + - 1.9 + - master +matrix: + allow_failures: + - go: master diff --git a/vendor/github.com/xdg/stringprep/LICENSE b/vendor/github.com/xdg/stringprep/LICENSE new file mode 100644 index 0000000000..67db858821 --- /dev/null +++ b/vendor/github.com/xdg/stringprep/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/vendor/github.com/xdg/stringprep/README.md b/vendor/github.com/xdg/stringprep/README.md new file mode 100644 index 0000000000..87279e3ebf --- /dev/null +++ b/vendor/github.com/xdg/stringprep/README.md @@ -0,0 +1,27 @@ +[![GoDoc](https://godoc.org/github.com/xdg/stringprep?status.svg)](https://godoc.org/github.com/xdg/stringprep) +[![Build Status](https://travis-ci.org/xdg/stringprep.svg?branch=master)](https://travis-ci.org/xdg/stringprep) + +# stringprep – Go implementation of RFC-3454 stringprep and RFC-4013 SASLprep + +## Synopsis + +``` + import "github.com/xdg/stringprep" + + prepped := stringprep.SASLprep.Prepare("TrustNô1") + +``` + +## Description + +This library provides an implementation of the stringprep algorithm +(RFC-3454) in Go, including all data tables. + +A pre-built SASLprep (RFC-4013) profile is provided as well. + +## Copyright and License + +Copyright 2018 by David A. Golden. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"). You may +obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 diff --git a/vendor/github.com/xdg/stringprep/bidi.go b/vendor/github.com/xdg/stringprep/bidi.go new file mode 100644 index 0000000000..6f6d321dfd --- /dev/null +++ b/vendor/github.com/xdg/stringprep/bidi.go @@ -0,0 +1,73 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package stringprep + +var errHasLCat = "BiDi string can't have runes from category L" +var errFirstRune = "BiDi string first rune must have category R or AL" +var errLastRune = "BiDi string last rune must have category R or AL" + +// Check for prohibited characters from table C.8 +func checkBiDiProhibitedRune(s string) error { + for _, r := range s { + if TableC8.Contains(r) { + return Error{Msg: errProhibited, Rune: r} + } + } + return nil +} + +// Check for LCat characters from table D.2 +func checkBiDiLCat(s string) error { + for _, r := range s { + if TableD2.Contains(r) { + return Error{Msg: errHasLCat, Rune: r} + } + } + return nil +} + +// Check first and last characters are in table D.1; requires non-empty string +func checkBadFirstAndLastRandALCat(s string) error { + rs := []rune(s) + if !TableD1.Contains(rs[0]) { + return Error{Msg: errFirstRune, Rune: rs[0]} + } + n := len(rs) - 1 + if !TableD1.Contains(rs[n]) { + return Error{Msg: errLastRune, Rune: rs[n]} + } + return nil +} + +// Look for RandALCat characters from table D.1 +func hasBiDiRandALCat(s string) bool { + for _, r := range s { + if TableD1.Contains(r) { + return true + } + } + return false +} + +// Check that BiDi rules are satisfied ; let empty string pass this rule +func passesBiDiRules(s string) error { + if len(s) == 0 { + return nil + } + if err := checkBiDiProhibitedRune(s); err != nil { + return err + } + if hasBiDiRandALCat(s) { + if err := checkBiDiLCat(s); err != nil { + return err + } + if err := checkBadFirstAndLastRandALCat(s); err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/xdg/stringprep/doc.go b/vendor/github.com/xdg/stringprep/doc.go new file mode 100644 index 0000000000..b319e081b7 --- /dev/null +++ b/vendor/github.com/xdg/stringprep/doc.go @@ -0,0 +1,10 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +// Package stringprep provides data tables and algorithms for RFC-3454, +// including errata (as of 2018-02). It also provides a profile for +// SASLprep as defined in RFC-4013. +package stringprep diff --git a/vendor/github.com/xdg/stringprep/error.go b/vendor/github.com/xdg/stringprep/error.go new file mode 100644 index 0000000000..7403e49911 --- /dev/null +++ b/vendor/github.com/xdg/stringprep/error.go @@ -0,0 +1,14 @@ +package stringprep + +import "fmt" + +// Error describes problems encountered during stringprep, including what rune +// was problematic. +type Error struct { + Msg string + Rune rune +} + +func (e Error) Error() string { + return fmt.Sprintf("%s (rune: '\\u%04x')", e.Msg, e.Rune) +} diff --git a/vendor/github.com/xdg/stringprep/map.go b/vendor/github.com/xdg/stringprep/map.go new file mode 100644 index 0000000000..e56a0dd43e --- /dev/null +++ b/vendor/github.com/xdg/stringprep/map.go @@ -0,0 +1,21 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package stringprep + +// Mapping represents a stringprep mapping, from a single rune to zero or more +// runes. +type Mapping map[rune][]rune + +// Map maps a rune to a (possibly empty) rune slice via a stringprep Mapping. +// The ok return value is false if the rune was not found. +func (m Mapping) Map(r rune) (replacement []rune, ok bool) { + rs, ok := m[r] + if !ok { + return nil, false + } + return rs, true +} diff --git a/vendor/github.com/xdg/stringprep/profile.go b/vendor/github.com/xdg/stringprep/profile.go new file mode 100644 index 0000000000..5a73be9e54 --- /dev/null +++ b/vendor/github.com/xdg/stringprep/profile.go @@ -0,0 +1,75 @@ +package stringprep + +import ( + "golang.org/x/text/unicode/norm" +) + +// Profile represents a stringprep profile. +type Profile struct { + Mappings []Mapping + Normalize bool + Prohibits []Set + CheckBiDi bool +} + +var errProhibited = "prohibited character" + +// Prepare transforms an input string to an output string following +// the rules defined in the profile as defined by RFC-3454. +func (p Profile) Prepare(s string) (string, error) { + // Optimistically, assume output will be same length as input + temp := make([]rune, 0, len(s)) + + // Apply maps + for _, r := range s { + rs, ok := p.applyMaps(r) + if ok { + temp = append(temp, rs...) + } else { + temp = append(temp, r) + } + } + + // Normalize + var out string + if p.Normalize { + out = norm.NFKC.String(string(temp)) + } else { + out = string(temp) + } + + // Check prohibited + for _, r := range out { + if p.runeIsProhibited(r) { + return "", Error{Msg: errProhibited, Rune: r} + } + } + + // Check BiDi allowed + if p.CheckBiDi { + if err := passesBiDiRules(out); err != nil { + return "", err + } + } + + return out, nil +} + +func (p Profile) applyMaps(r rune) ([]rune, bool) { + for _, m := range p.Mappings { + rs, ok := m.Map(r) + if ok { + return rs, true + } + } + return nil, false +} + +func (p Profile) runeIsProhibited(r rune) bool { + for _, s := range p.Prohibits { + if s.Contains(r) { + return true + } + } + return false +} diff --git a/vendor/github.com/xdg/stringprep/saslprep.go b/vendor/github.com/xdg/stringprep/saslprep.go new file mode 100644 index 0000000000..40013488bf --- /dev/null +++ b/vendor/github.com/xdg/stringprep/saslprep.go @@ -0,0 +1,52 @@ +package stringprep + +var mapNonASCIISpaceToASCIISpace = Mapping{ + 0x00A0: []rune{0x0020}, + 0x1680: []rune{0x0020}, + 0x2000: []rune{0x0020}, + 0x2001: []rune{0x0020}, + 0x2002: []rune{0x0020}, + 0x2003: []rune{0x0020}, + 0x2004: []rune{0x0020}, + 0x2005: []rune{0x0020}, + 0x2006: []rune{0x0020}, + 0x2007: []rune{0x0020}, + 0x2008: []rune{0x0020}, + 0x2009: []rune{0x0020}, + 0x200A: []rune{0x0020}, + 0x200B: []rune{0x0020}, + 0x202F: []rune{0x0020}, + 0x205F: []rune{0x0020}, + 0x3000: []rune{0x0020}, +} + +// SASLprep is a pre-defined stringprep profile for user names and passwords +// as described in RFC-4013. +// +// Because the stringprep distinction between query and stored strings was +// intended for compatibility across profile versions, but SASLprep was never +// updated and is now deprecated, this profile only operates in stored +// strings mode, prohibiting unassigned code points. +var SASLprep Profile = saslprep + +var saslprep = Profile{ + Mappings: []Mapping{ + TableB1, + mapNonASCIISpaceToASCIISpace, + }, + Normalize: true, + Prohibits: []Set{ + TableA1, + TableC1_2, + TableC2_1, + TableC2_2, + TableC3, + TableC4, + TableC5, + TableC6, + TableC7, + TableC8, + TableC9, + }, + CheckBiDi: true, +} diff --git a/vendor/github.com/xdg/stringprep/set.go b/vendor/github.com/xdg/stringprep/set.go new file mode 100644 index 0000000000..c837e28c88 --- /dev/null +++ b/vendor/github.com/xdg/stringprep/set.go @@ -0,0 +1,36 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package stringprep + +import "sort" + +// RuneRange represents a close-ended range of runes: [N,M]. For a range +// consisting of a single rune, N and M will be equal. +type RuneRange [2]rune + +// Contains returns true if a rune is within the bounds of the RuneRange. +func (rr RuneRange) Contains(r rune) bool { + return rr[0] <= r && r <= rr[1] +} + +func (rr RuneRange) isAbove(r rune) bool { + return r <= rr[0] +} + +// Set represents a stringprep data table used to identify runes of a +// particular type. +type Set []RuneRange + +// Contains returns true if a rune is within any of the RuneRanges in the +// Set. +func (s Set) Contains(r rune) bool { + i := sort.Search(len(s), func(i int) bool { return s[i].Contains(r) || s[i].isAbove(r) }) + if i < len(s) && s[i].Contains(r) { + return true + } + return false +} diff --git a/vendor/github.com/xdg/stringprep/tables.go b/vendor/github.com/xdg/stringprep/tables.go new file mode 100644 index 0000000000..c3fc1fa0fa --- /dev/null +++ b/vendor/github.com/xdg/stringprep/tables.go @@ -0,0 +1,3215 @@ +// Copyright 2018 by David A. Golden. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package stringprep + +var tableA1 = Set{ + RuneRange{0x0221, 0x0221}, + RuneRange{0x0234, 0x024F}, + RuneRange{0x02AE, 0x02AF}, + RuneRange{0x02EF, 0x02FF}, + RuneRange{0x0350, 0x035F}, + RuneRange{0x0370, 0x0373}, + RuneRange{0x0376, 0x0379}, + RuneRange{0x037B, 0x037D}, + RuneRange{0x037F, 0x0383}, + RuneRange{0x038B, 0x038B}, + RuneRange{0x038D, 0x038D}, + RuneRange{0x03A2, 0x03A2}, + RuneRange{0x03CF, 0x03CF}, + RuneRange{0x03F7, 0x03FF}, + RuneRange{0x0487, 0x0487}, + RuneRange{0x04CF, 0x04CF}, + RuneRange{0x04F6, 0x04F7}, + RuneRange{0x04FA, 0x04FF}, + RuneRange{0x0510, 0x0530}, + RuneRange{0x0557, 0x0558}, + RuneRange{0x0560, 0x0560}, + RuneRange{0x0588, 0x0588}, + RuneRange{0x058B, 0x0590}, + RuneRange{0x05A2, 0x05A2}, + RuneRange{0x05BA, 0x05BA}, + RuneRange{0x05C5, 0x05CF}, + RuneRange{0x05EB, 0x05EF}, + RuneRange{0x05F5, 0x060B}, + RuneRange{0x060D, 0x061A}, + RuneRange{0x061C, 0x061E}, + RuneRange{0x0620, 0x0620}, + RuneRange{0x063B, 0x063F}, + RuneRange{0x0656, 0x065F}, + RuneRange{0x06EE, 0x06EF}, + RuneRange{0x06FF, 0x06FF}, + RuneRange{0x070E, 0x070E}, + RuneRange{0x072D, 0x072F}, + RuneRange{0x074B, 0x077F}, + RuneRange{0x07B2, 0x0900}, + RuneRange{0x0904, 0x0904}, + RuneRange{0x093A, 0x093B}, + RuneRange{0x094E, 0x094F}, + RuneRange{0x0955, 0x0957}, + RuneRange{0x0971, 0x0980}, + RuneRange{0x0984, 0x0984}, + RuneRange{0x098D, 0x098E}, + RuneRange{0x0991, 0x0992}, + RuneRange{0x09A9, 0x09A9}, + RuneRange{0x09B1, 0x09B1}, + RuneRange{0x09B3, 0x09B5}, + RuneRange{0x09BA, 0x09BB}, + RuneRange{0x09BD, 0x09BD}, + RuneRange{0x09C5, 0x09C6}, + RuneRange{0x09C9, 0x09CA}, + RuneRange{0x09CE, 0x09D6}, + RuneRange{0x09D8, 0x09DB}, + RuneRange{0x09DE, 0x09DE}, + RuneRange{0x09E4, 0x09E5}, + RuneRange{0x09FB, 0x0A01}, + RuneRange{0x0A03, 0x0A04}, + RuneRange{0x0A0B, 0x0A0E}, + RuneRange{0x0A11, 0x0A12}, + RuneRange{0x0A29, 0x0A29}, + RuneRange{0x0A31, 0x0A31}, + RuneRange{0x0A34, 0x0A34}, + RuneRange{0x0A37, 0x0A37}, + RuneRange{0x0A3A, 0x0A3B}, + RuneRange{0x0A3D, 0x0A3D}, + RuneRange{0x0A43, 0x0A46}, + RuneRange{0x0A49, 0x0A4A}, + RuneRange{0x0A4E, 0x0A58}, + RuneRange{0x0A5D, 0x0A5D}, + RuneRange{0x0A5F, 0x0A65}, + RuneRange{0x0A75, 0x0A80}, + RuneRange{0x0A84, 0x0A84}, + RuneRange{0x0A8C, 0x0A8C}, + RuneRange{0x0A8E, 0x0A8E}, + RuneRange{0x0A92, 0x0A92}, + RuneRange{0x0AA9, 0x0AA9}, + RuneRange{0x0AB1, 0x0AB1}, + RuneRange{0x0AB4, 0x0AB4}, + RuneRange{0x0ABA, 0x0ABB}, + RuneRange{0x0AC6, 0x0AC6}, + RuneRange{0x0ACA, 0x0ACA}, + RuneRange{0x0ACE, 0x0ACF}, + RuneRange{0x0AD1, 0x0ADF}, + RuneRange{0x0AE1, 0x0AE5}, + RuneRange{0x0AF0, 0x0B00}, + RuneRange{0x0B04, 0x0B04}, + RuneRange{0x0B0D, 0x0B0E}, + RuneRange{0x0B11, 0x0B12}, + RuneRange{0x0B29, 0x0B29}, + RuneRange{0x0B31, 0x0B31}, + RuneRange{0x0B34, 0x0B35}, + RuneRange{0x0B3A, 0x0B3B}, + RuneRange{0x0B44, 0x0B46}, + RuneRange{0x0B49, 0x0B4A}, + RuneRange{0x0B4E, 0x0B55}, + RuneRange{0x0B58, 0x0B5B}, + RuneRange{0x0B5E, 0x0B5E}, + RuneRange{0x0B62, 0x0B65}, + RuneRange{0x0B71, 0x0B81}, + RuneRange{0x0B84, 0x0B84}, + RuneRange{0x0B8B, 0x0B8D}, + RuneRange{0x0B91, 0x0B91}, + RuneRange{0x0B96, 0x0B98}, + RuneRange{0x0B9B, 0x0B9B}, + RuneRange{0x0B9D, 0x0B9D}, + RuneRange{0x0BA0, 0x0BA2}, + RuneRange{0x0BA5, 0x0BA7}, + RuneRange{0x0BAB, 0x0BAD}, + RuneRange{0x0BB6, 0x0BB6}, + RuneRange{0x0BBA, 0x0BBD}, + RuneRange{0x0BC3, 0x0BC5}, + RuneRange{0x0BC9, 0x0BC9}, + RuneRange{0x0BCE, 0x0BD6}, + RuneRange{0x0BD8, 0x0BE6}, + RuneRange{0x0BF3, 0x0C00}, + RuneRange{0x0C04, 0x0C04}, + RuneRange{0x0C0D, 0x0C0D}, + RuneRange{0x0C11, 0x0C11}, + RuneRange{0x0C29, 0x0C29}, + RuneRange{0x0C34, 0x0C34}, + RuneRange{0x0C3A, 0x0C3D}, + RuneRange{0x0C45, 0x0C45}, + RuneRange{0x0C49, 0x0C49}, + RuneRange{0x0C4E, 0x0C54}, + RuneRange{0x0C57, 0x0C5F}, + RuneRange{0x0C62, 0x0C65}, + RuneRange{0x0C70, 0x0C81}, + RuneRange{0x0C84, 0x0C84}, + RuneRange{0x0C8D, 0x0C8D}, + RuneRange{0x0C91, 0x0C91}, + RuneRange{0x0CA9, 0x0CA9}, + RuneRange{0x0CB4, 0x0CB4}, + RuneRange{0x0CBA, 0x0CBD}, + RuneRange{0x0CC5, 0x0CC5}, + RuneRange{0x0CC9, 0x0CC9}, + RuneRange{0x0CCE, 0x0CD4}, + RuneRange{0x0CD7, 0x0CDD}, + RuneRange{0x0CDF, 0x0CDF}, + RuneRange{0x0CE2, 0x0CE5}, + RuneRange{0x0CF0, 0x0D01}, + RuneRange{0x0D04, 0x0D04}, + RuneRange{0x0D0D, 0x0D0D}, + RuneRange{0x0D11, 0x0D11}, + RuneRange{0x0D29, 0x0D29}, + RuneRange{0x0D3A, 0x0D3D}, + RuneRange{0x0D44, 0x0D45}, + RuneRange{0x0D49, 0x0D49}, + RuneRange{0x0D4E, 0x0D56}, + RuneRange{0x0D58, 0x0D5F}, + RuneRange{0x0D62, 0x0D65}, + RuneRange{0x0D70, 0x0D81}, + RuneRange{0x0D84, 0x0D84}, + RuneRange{0x0D97, 0x0D99}, + RuneRange{0x0DB2, 0x0DB2}, + RuneRange{0x0DBC, 0x0DBC}, + RuneRange{0x0DBE, 0x0DBF}, + RuneRange{0x0DC7, 0x0DC9}, + RuneRange{0x0DCB, 0x0DCE}, + RuneRange{0x0DD5, 0x0DD5}, + RuneRange{0x0DD7, 0x0DD7}, + RuneRange{0x0DE0, 0x0DF1}, + RuneRange{0x0DF5, 0x0E00}, + RuneRange{0x0E3B, 0x0E3E}, + RuneRange{0x0E5C, 0x0E80}, + RuneRange{0x0E83, 0x0E83}, + RuneRange{0x0E85, 0x0E86}, + RuneRange{0x0E89, 0x0E89}, + RuneRange{0x0E8B, 0x0E8C}, + RuneRange{0x0E8E, 0x0E93}, + RuneRange{0x0E98, 0x0E98}, + RuneRange{0x0EA0, 0x0EA0}, + RuneRange{0x0EA4, 0x0EA4}, + RuneRange{0x0EA6, 0x0EA6}, + RuneRange{0x0EA8, 0x0EA9}, + RuneRange{0x0EAC, 0x0EAC}, + RuneRange{0x0EBA, 0x0EBA}, + RuneRange{0x0EBE, 0x0EBF}, + RuneRange{0x0EC5, 0x0EC5}, + RuneRange{0x0EC7, 0x0EC7}, + RuneRange{0x0ECE, 0x0ECF}, + RuneRange{0x0EDA, 0x0EDB}, + RuneRange{0x0EDE, 0x0EFF}, + RuneRange{0x0F48, 0x0F48}, + RuneRange{0x0F6B, 0x0F70}, + RuneRange{0x0F8C, 0x0F8F}, + RuneRange{0x0F98, 0x0F98}, + RuneRange{0x0FBD, 0x0FBD}, + RuneRange{0x0FCD, 0x0FCE}, + RuneRange{0x0FD0, 0x0FFF}, + RuneRange{0x1022, 0x1022}, + RuneRange{0x1028, 0x1028}, + RuneRange{0x102B, 0x102B}, + RuneRange{0x1033, 0x1035}, + RuneRange{0x103A, 0x103F}, + RuneRange{0x105A, 0x109F}, + RuneRange{0x10C6, 0x10CF}, + RuneRange{0x10F9, 0x10FA}, + RuneRange{0x10FC, 0x10FF}, + RuneRange{0x115A, 0x115E}, + RuneRange{0x11A3, 0x11A7}, + RuneRange{0x11FA, 0x11FF}, + RuneRange{0x1207, 0x1207}, + RuneRange{0x1247, 0x1247}, + RuneRange{0x1249, 0x1249}, + RuneRange{0x124E, 0x124F}, + RuneRange{0x1257, 0x1257}, + RuneRange{0x1259, 0x1259}, + RuneRange{0x125E, 0x125F}, + RuneRange{0x1287, 0x1287}, + RuneRange{0x1289, 0x1289}, + RuneRange{0x128E, 0x128F}, + RuneRange{0x12AF, 0x12AF}, + RuneRange{0x12B1, 0x12B1}, + RuneRange{0x12B6, 0x12B7}, + RuneRange{0x12BF, 0x12BF}, + RuneRange{0x12C1, 0x12C1}, + RuneRange{0x12C6, 0x12C7}, + RuneRange{0x12CF, 0x12CF}, + RuneRange{0x12D7, 0x12D7}, + RuneRange{0x12EF, 0x12EF}, + RuneRange{0x130F, 0x130F}, + RuneRange{0x1311, 0x1311}, + RuneRange{0x1316, 0x1317}, + RuneRange{0x131F, 0x131F}, + RuneRange{0x1347, 0x1347}, + RuneRange{0x135B, 0x1360}, + RuneRange{0x137D, 0x139F}, + RuneRange{0x13F5, 0x1400}, + RuneRange{0x1677, 0x167F}, + RuneRange{0x169D, 0x169F}, + RuneRange{0x16F1, 0x16FF}, + RuneRange{0x170D, 0x170D}, + RuneRange{0x1715, 0x171F}, + RuneRange{0x1737, 0x173F}, + RuneRange{0x1754, 0x175F}, + RuneRange{0x176D, 0x176D}, + RuneRange{0x1771, 0x1771}, + RuneRange{0x1774, 0x177F}, + RuneRange{0x17DD, 0x17DF}, + RuneRange{0x17EA, 0x17FF}, + RuneRange{0x180F, 0x180F}, + RuneRange{0x181A, 0x181F}, + RuneRange{0x1878, 0x187F}, + RuneRange{0x18AA, 0x1DFF}, + RuneRange{0x1E9C, 0x1E9F}, + RuneRange{0x1EFA, 0x1EFF}, + RuneRange{0x1F16, 0x1F17}, + RuneRange{0x1F1E, 0x1F1F}, + RuneRange{0x1F46, 0x1F47}, + RuneRange{0x1F4E, 0x1F4F}, + RuneRange{0x1F58, 0x1F58}, + RuneRange{0x1F5A, 0x1F5A}, + RuneRange{0x1F5C, 0x1F5C}, + RuneRange{0x1F5E, 0x1F5E}, + RuneRange{0x1F7E, 0x1F7F}, + RuneRange{0x1FB5, 0x1FB5}, + RuneRange{0x1FC5, 0x1FC5}, + RuneRange{0x1FD4, 0x1FD5}, + RuneRange{0x1FDC, 0x1FDC}, + RuneRange{0x1FF0, 0x1FF1}, + RuneRange{0x1FF5, 0x1FF5}, + RuneRange{0x1FFF, 0x1FFF}, + RuneRange{0x2053, 0x2056}, + RuneRange{0x2058, 0x205E}, + RuneRange{0x2064, 0x2069}, + RuneRange{0x2072, 0x2073}, + RuneRange{0x208F, 0x209F}, + RuneRange{0x20B2, 0x20CF}, + RuneRange{0x20EB, 0x20FF}, + RuneRange{0x213B, 0x213C}, + RuneRange{0x214C, 0x2152}, + RuneRange{0x2184, 0x218F}, + RuneRange{0x23CF, 0x23FF}, + RuneRange{0x2427, 0x243F}, + RuneRange{0x244B, 0x245F}, + RuneRange{0x24FF, 0x24FF}, + RuneRange{0x2614, 0x2615}, + RuneRange{0x2618, 0x2618}, + RuneRange{0x267E, 0x267F}, + RuneRange{0x268A, 0x2700}, + RuneRange{0x2705, 0x2705}, + RuneRange{0x270A, 0x270B}, + RuneRange{0x2728, 0x2728}, + RuneRange{0x274C, 0x274C}, + RuneRange{0x274E, 0x274E}, + RuneRange{0x2753, 0x2755}, + RuneRange{0x2757, 0x2757}, + RuneRange{0x275F, 0x2760}, + RuneRange{0x2795, 0x2797}, + RuneRange{0x27B0, 0x27B0}, + RuneRange{0x27BF, 0x27CF}, + RuneRange{0x27EC, 0x27EF}, + RuneRange{0x2B00, 0x2E7F}, + RuneRange{0x2E9A, 0x2E9A}, + RuneRange{0x2EF4, 0x2EFF}, + RuneRange{0x2FD6, 0x2FEF}, + RuneRange{0x2FFC, 0x2FFF}, + RuneRange{0x3040, 0x3040}, + RuneRange{0x3097, 0x3098}, + RuneRange{0x3100, 0x3104}, + RuneRange{0x312D, 0x3130}, + RuneRange{0x318F, 0x318F}, + RuneRange{0x31B8, 0x31EF}, + RuneRange{0x321D, 0x321F}, + RuneRange{0x3244, 0x3250}, + RuneRange{0x327C, 0x327E}, + RuneRange{0x32CC, 0x32CF}, + RuneRange{0x32FF, 0x32FF}, + RuneRange{0x3377, 0x337A}, + RuneRange{0x33DE, 0x33DF}, + RuneRange{0x33FF, 0x33FF}, + RuneRange{0x4DB6, 0x4DFF}, + RuneRange{0x9FA6, 0x9FFF}, + RuneRange{0xA48D, 0xA48F}, + RuneRange{0xA4C7, 0xABFF}, + RuneRange{0xD7A4, 0xD7FF}, + RuneRange{0xFA2E, 0xFA2F}, + RuneRange{0xFA6B, 0xFAFF}, + RuneRange{0xFB07, 0xFB12}, + RuneRange{0xFB18, 0xFB1C}, + RuneRange{0xFB37, 0xFB37}, + RuneRange{0xFB3D, 0xFB3D}, + RuneRange{0xFB3F, 0xFB3F}, + RuneRange{0xFB42, 0xFB42}, + RuneRange{0xFB45, 0xFB45}, + RuneRange{0xFBB2, 0xFBD2}, + RuneRange{0xFD40, 0xFD4F}, + RuneRange{0xFD90, 0xFD91}, + RuneRange{0xFDC8, 0xFDCF}, + RuneRange{0xFDFD, 0xFDFF}, + RuneRange{0xFE10, 0xFE1F}, + RuneRange{0xFE24, 0xFE2F}, + RuneRange{0xFE47, 0xFE48}, + RuneRange{0xFE53, 0xFE53}, + RuneRange{0xFE67, 0xFE67}, + RuneRange{0xFE6C, 0xFE6F}, + RuneRange{0xFE75, 0xFE75}, + RuneRange{0xFEFD, 0xFEFE}, + RuneRange{0xFF00, 0xFF00}, + RuneRange{0xFFBF, 0xFFC1}, + RuneRange{0xFFC8, 0xFFC9}, + RuneRange{0xFFD0, 0xFFD1}, + RuneRange{0xFFD8, 0xFFD9}, + RuneRange{0xFFDD, 0xFFDF}, + RuneRange{0xFFE7, 0xFFE7}, + RuneRange{0xFFEF, 0xFFF8}, + RuneRange{0x10000, 0x102FF}, + RuneRange{0x1031F, 0x1031F}, + RuneRange{0x10324, 0x1032F}, + RuneRange{0x1034B, 0x103FF}, + RuneRange{0x10426, 0x10427}, + RuneRange{0x1044E, 0x1CFFF}, + RuneRange{0x1D0F6, 0x1D0FF}, + RuneRange{0x1D127, 0x1D129}, + RuneRange{0x1D1DE, 0x1D3FF}, + RuneRange{0x1D455, 0x1D455}, + RuneRange{0x1D49D, 0x1D49D}, + RuneRange{0x1D4A0, 0x1D4A1}, + RuneRange{0x1D4A3, 0x1D4A4}, + RuneRange{0x1D4A7, 0x1D4A8}, + RuneRange{0x1D4AD, 0x1D4AD}, + RuneRange{0x1D4BA, 0x1D4BA}, + RuneRange{0x1D4BC, 0x1D4BC}, + RuneRange{0x1D4C1, 0x1D4C1}, + RuneRange{0x1D4C4, 0x1D4C4}, + RuneRange{0x1D506, 0x1D506}, + RuneRange{0x1D50B, 0x1D50C}, + RuneRange{0x1D515, 0x1D515}, + RuneRange{0x1D51D, 0x1D51D}, + RuneRange{0x1D53A, 0x1D53A}, + RuneRange{0x1D53F, 0x1D53F}, + RuneRange{0x1D545, 0x1D545}, + RuneRange{0x1D547, 0x1D549}, + RuneRange{0x1D551, 0x1D551}, + RuneRange{0x1D6A4, 0x1D6A7}, + RuneRange{0x1D7CA, 0x1D7CD}, + RuneRange{0x1D800, 0x1FFFD}, + RuneRange{0x2A6D7, 0x2F7FF}, + RuneRange{0x2FA1E, 0x2FFFD}, + RuneRange{0x30000, 0x3FFFD}, + RuneRange{0x40000, 0x4FFFD}, + RuneRange{0x50000, 0x5FFFD}, + RuneRange{0x60000, 0x6FFFD}, + RuneRange{0x70000, 0x7FFFD}, + RuneRange{0x80000, 0x8FFFD}, + RuneRange{0x90000, 0x9FFFD}, + RuneRange{0xA0000, 0xAFFFD}, + RuneRange{0xB0000, 0xBFFFD}, + RuneRange{0xC0000, 0xCFFFD}, + RuneRange{0xD0000, 0xDFFFD}, + RuneRange{0xE0000, 0xE0000}, + RuneRange{0xE0002, 0xE001F}, + RuneRange{0xE0080, 0xEFFFD}, +} + +// TableA1 represents RFC-3454 Table A.1. +var TableA1 Set = tableA1 + +var tableB1 = Mapping{ + 0x00AD: []rune{}, // Map to nothing + 0x034F: []rune{}, // Map to nothing + 0x180B: []rune{}, // Map to nothing + 0x180C: []rune{}, // Map to nothing + 0x180D: []rune{}, // Map to nothing + 0x200B: []rune{}, // Map to nothing + 0x200C: []rune{}, // Map to nothing + 0x200D: []rune{}, // Map to nothing + 0x2060: []rune{}, // Map to nothing + 0xFE00: []rune{}, // Map to nothing + 0xFE01: []rune{}, // Map to nothing + 0xFE02: []rune{}, // Map to nothing + 0xFE03: []rune{}, // Map to nothing + 0xFE04: []rune{}, // Map to nothing + 0xFE05: []rune{}, // Map to nothing + 0xFE06: []rune{}, // Map to nothing + 0xFE07: []rune{}, // Map to nothing + 0xFE08: []rune{}, // Map to nothing + 0xFE09: []rune{}, // Map to nothing + 0xFE0A: []rune{}, // Map to nothing + 0xFE0B: []rune{}, // Map to nothing + 0xFE0C: []rune{}, // Map to nothing + 0xFE0D: []rune{}, // Map to nothing + 0xFE0E: []rune{}, // Map to nothing + 0xFE0F: []rune{}, // Map to nothing + 0xFEFF: []rune{}, // Map to nothing +} + +// TableB1 represents RFC-3454 Table B.1. +var TableB1 Mapping = tableB1 + +var tableB2 = Mapping{ + 0x0041: []rune{0x0061}, // Case map + 0x0042: []rune{0x0062}, // Case map + 0x0043: []rune{0x0063}, // Case map + 0x0044: []rune{0x0064}, // Case map + 0x0045: []rune{0x0065}, // Case map + 0x0046: []rune{0x0066}, // Case map + 0x0047: []rune{0x0067}, // Case map + 0x0048: []rune{0x0068}, // Case map + 0x0049: []rune{0x0069}, // Case map + 0x004A: []rune{0x006A}, // Case map + 0x004B: []rune{0x006B}, // Case map + 0x004C: []rune{0x006C}, // Case map + 0x004D: []rune{0x006D}, // Case map + 0x004E: []rune{0x006E}, // Case map + 0x004F: []rune{0x006F}, // Case map + 0x0050: []rune{0x0070}, // Case map + 0x0051: []rune{0x0071}, // Case map + 0x0052: []rune{0x0072}, // Case map + 0x0053: []rune{0x0073}, // Case map + 0x0054: []rune{0x0074}, // Case map + 0x0055: []rune{0x0075}, // Case map + 0x0056: []rune{0x0076}, // Case map + 0x0057: []rune{0x0077}, // Case map + 0x0058: []rune{0x0078}, // Case map + 0x0059: []rune{0x0079}, // Case map + 0x005A: []rune{0x007A}, // Case map + 0x00B5: []rune{0x03BC}, // Case map + 0x00C0: []rune{0x00E0}, // Case map + 0x00C1: []rune{0x00E1}, // Case map + 0x00C2: []rune{0x00E2}, // Case map + 0x00C3: []rune{0x00E3}, // Case map + 0x00C4: []rune{0x00E4}, // Case map + 0x00C5: []rune{0x00E5}, // Case map + 0x00C6: []rune{0x00E6}, // Case map + 0x00C7: []rune{0x00E7}, // Case map + 0x00C8: []rune{0x00E8}, // Case map + 0x00C9: []rune{0x00E9}, // Case map + 0x00CA: []rune{0x00EA}, // Case map + 0x00CB: []rune{0x00EB}, // Case map + 0x00CC: []rune{0x00EC}, // Case map + 0x00CD: []rune{0x00ED}, // Case map + 0x00CE: []rune{0x00EE}, // Case map + 0x00CF: []rune{0x00EF}, // Case map + 0x00D0: []rune{0x00F0}, // Case map + 0x00D1: []rune{0x00F1}, // Case map + 0x00D2: []rune{0x00F2}, // Case map + 0x00D3: []rune{0x00F3}, // Case map + 0x00D4: []rune{0x00F4}, // Case map + 0x00D5: []rune{0x00F5}, // Case map + 0x00D6: []rune{0x00F6}, // Case map + 0x00D8: []rune{0x00F8}, // Case map + 0x00D9: []rune{0x00F9}, // Case map + 0x00DA: []rune{0x00FA}, // Case map + 0x00DB: []rune{0x00FB}, // Case map + 0x00DC: []rune{0x00FC}, // Case map + 0x00DD: []rune{0x00FD}, // Case map + 0x00DE: []rune{0x00FE}, // Case map + 0x00DF: []rune{0x0073, 0x0073}, // Case map + 0x0100: []rune{0x0101}, // Case map + 0x0102: []rune{0x0103}, // Case map + 0x0104: []rune{0x0105}, // Case map + 0x0106: []rune{0x0107}, // Case map + 0x0108: []rune{0x0109}, // Case map + 0x010A: []rune{0x010B}, // Case map + 0x010C: []rune{0x010D}, // Case map + 0x010E: []rune{0x010F}, // Case map + 0x0110: []rune{0x0111}, // Case map + 0x0112: []rune{0x0113}, // Case map + 0x0114: []rune{0x0115}, // Case map + 0x0116: []rune{0x0117}, // Case map + 0x0118: []rune{0x0119}, // Case map + 0x011A: []rune{0x011B}, // Case map + 0x011C: []rune{0x011D}, // Case map + 0x011E: []rune{0x011F}, // Case map + 0x0120: []rune{0x0121}, // Case map + 0x0122: []rune{0x0123}, // Case map + 0x0124: []rune{0x0125}, // Case map + 0x0126: []rune{0x0127}, // Case map + 0x0128: []rune{0x0129}, // Case map + 0x012A: []rune{0x012B}, // Case map + 0x012C: []rune{0x012D}, // Case map + 0x012E: []rune{0x012F}, // Case map + 0x0130: []rune{0x0069, 0x0307}, // Case map + 0x0132: []rune{0x0133}, // Case map + 0x0134: []rune{0x0135}, // Case map + 0x0136: []rune{0x0137}, // Case map + 0x0139: []rune{0x013A}, // Case map + 0x013B: []rune{0x013C}, // Case map + 0x013D: []rune{0x013E}, // Case map + 0x013F: []rune{0x0140}, // Case map + 0x0141: []rune{0x0142}, // Case map + 0x0143: []rune{0x0144}, // Case map + 0x0145: []rune{0x0146}, // Case map + 0x0147: []rune{0x0148}, // Case map + 0x0149: []rune{0x02BC, 0x006E}, // Case map + 0x014A: []rune{0x014B}, // Case map + 0x014C: []rune{0x014D}, // Case map + 0x014E: []rune{0x014F}, // Case map + 0x0150: []rune{0x0151}, // Case map + 0x0152: []rune{0x0153}, // Case map + 0x0154: []rune{0x0155}, // Case map + 0x0156: []rune{0x0157}, // Case map + 0x0158: []rune{0x0159}, // Case map + 0x015A: []rune{0x015B}, // Case map + 0x015C: []rune{0x015D}, // Case map + 0x015E: []rune{0x015F}, // Case map + 0x0160: []rune{0x0161}, // Case map + 0x0162: []rune{0x0163}, // Case map + 0x0164: []rune{0x0165}, // Case map + 0x0166: []rune{0x0167}, // Case map + 0x0168: []rune{0x0169}, // Case map + 0x016A: []rune{0x016B}, // Case map + 0x016C: []rune{0x016D}, // Case map + 0x016E: []rune{0x016F}, // Case map + 0x0170: []rune{0x0171}, // Case map + 0x0172: []rune{0x0173}, // Case map + 0x0174: []rune{0x0175}, // Case map + 0x0176: []rune{0x0177}, // Case map + 0x0178: []rune{0x00FF}, // Case map + 0x0179: []rune{0x017A}, // Case map + 0x017B: []rune{0x017C}, // Case map + 0x017D: []rune{0x017E}, // Case map + 0x017F: []rune{0x0073}, // Case map + 0x0181: []rune{0x0253}, // Case map + 0x0182: []rune{0x0183}, // Case map + 0x0184: []rune{0x0185}, // Case map + 0x0186: []rune{0x0254}, // Case map + 0x0187: []rune{0x0188}, // Case map + 0x0189: []rune{0x0256}, // Case map + 0x018A: []rune{0x0257}, // Case map + 0x018B: []rune{0x018C}, // Case map + 0x018E: []rune{0x01DD}, // Case map + 0x018F: []rune{0x0259}, // Case map + 0x0190: []rune{0x025B}, // Case map + 0x0191: []rune{0x0192}, // Case map + 0x0193: []rune{0x0260}, // Case map + 0x0194: []rune{0x0263}, // Case map + 0x0196: []rune{0x0269}, // Case map + 0x0197: []rune{0x0268}, // Case map + 0x0198: []rune{0x0199}, // Case map + 0x019C: []rune{0x026F}, // Case map + 0x019D: []rune{0x0272}, // Case map + 0x019F: []rune{0x0275}, // Case map + 0x01A0: []rune{0x01A1}, // Case map + 0x01A2: []rune{0x01A3}, // Case map + 0x01A4: []rune{0x01A5}, // Case map + 0x01A6: []rune{0x0280}, // Case map + 0x01A7: []rune{0x01A8}, // Case map + 0x01A9: []rune{0x0283}, // Case map + 0x01AC: []rune{0x01AD}, // Case map + 0x01AE: []rune{0x0288}, // Case map + 0x01AF: []rune{0x01B0}, // Case map + 0x01B1: []rune{0x028A}, // Case map + 0x01B2: []rune{0x028B}, // Case map + 0x01B3: []rune{0x01B4}, // Case map + 0x01B5: []rune{0x01B6}, // Case map + 0x01B7: []rune{0x0292}, // Case map + 0x01B8: []rune{0x01B9}, // Case map + 0x01BC: []rune{0x01BD}, // Case map + 0x01C4: []rune{0x01C6}, // Case map + 0x01C5: []rune{0x01C6}, // Case map + 0x01C7: []rune{0x01C9}, // Case map + 0x01C8: []rune{0x01C9}, // Case map + 0x01CA: []rune{0x01CC}, // Case map + 0x01CB: []rune{0x01CC}, // Case map + 0x01CD: []rune{0x01CE}, // Case map + 0x01CF: []rune{0x01D0}, // Case map + 0x01D1: []rune{0x01D2}, // Case map + 0x01D3: []rune{0x01D4}, // Case map + 0x01D5: []rune{0x01D6}, // Case map + 0x01D7: []rune{0x01D8}, // Case map + 0x01D9: []rune{0x01DA}, // Case map + 0x01DB: []rune{0x01DC}, // Case map + 0x01DE: []rune{0x01DF}, // Case map + 0x01E0: []rune{0x01E1}, // Case map + 0x01E2: []rune{0x01E3}, // Case map + 0x01E4: []rune{0x01E5}, // Case map + 0x01E6: []rune{0x01E7}, // Case map + 0x01E8: []rune{0x01E9}, // Case map + 0x01EA: []rune{0x01EB}, // Case map + 0x01EC: []rune{0x01ED}, // Case map + 0x01EE: []rune{0x01EF}, // Case map + 0x01F0: []rune{0x006A, 0x030C}, // Case map + 0x01F1: []rune{0x01F3}, // Case map + 0x01F2: []rune{0x01F3}, // Case map + 0x01F4: []rune{0x01F5}, // Case map + 0x01F6: []rune{0x0195}, // Case map + 0x01F7: []rune{0x01BF}, // Case map + 0x01F8: []rune{0x01F9}, // Case map + 0x01FA: []rune{0x01FB}, // Case map + 0x01FC: []rune{0x01FD}, // Case map + 0x01FE: []rune{0x01FF}, // Case map + 0x0200: []rune{0x0201}, // Case map + 0x0202: []rune{0x0203}, // Case map + 0x0204: []rune{0x0205}, // Case map + 0x0206: []rune{0x0207}, // Case map + 0x0208: []rune{0x0209}, // Case map + 0x020A: []rune{0x020B}, // Case map + 0x020C: []rune{0x020D}, // Case map + 0x020E: []rune{0x020F}, // Case map + 0x0210: []rune{0x0211}, // Case map + 0x0212: []rune{0x0213}, // Case map + 0x0214: []rune{0x0215}, // Case map + 0x0216: []rune{0x0217}, // Case map + 0x0218: []rune{0x0219}, // Case map + 0x021A: []rune{0x021B}, // Case map + 0x021C: []rune{0x021D}, // Case map + 0x021E: []rune{0x021F}, // Case map + 0x0220: []rune{0x019E}, // Case map + 0x0222: []rune{0x0223}, // Case map + 0x0224: []rune{0x0225}, // Case map + 0x0226: []rune{0x0227}, // Case map + 0x0228: []rune{0x0229}, // Case map + 0x022A: []rune{0x022B}, // Case map + 0x022C: []rune{0x022D}, // Case map + 0x022E: []rune{0x022F}, // Case map + 0x0230: []rune{0x0231}, // Case map + 0x0232: []rune{0x0233}, // Case map + 0x0345: []rune{0x03B9}, // Case map + 0x037A: []rune{0x0020, 0x03B9}, // Additional folding + 0x0386: []rune{0x03AC}, // Case map + 0x0388: []rune{0x03AD}, // Case map + 0x0389: []rune{0x03AE}, // Case map + 0x038A: []rune{0x03AF}, // Case map + 0x038C: []rune{0x03CC}, // Case map + 0x038E: []rune{0x03CD}, // Case map + 0x038F: []rune{0x03CE}, // Case map + 0x0390: []rune{0x03B9, 0x0308, 0x0301}, // Case map + 0x0391: []rune{0x03B1}, // Case map + 0x0392: []rune{0x03B2}, // Case map + 0x0393: []rune{0x03B3}, // Case map + 0x0394: []rune{0x03B4}, // Case map + 0x0395: []rune{0x03B5}, // Case map + 0x0396: []rune{0x03B6}, // Case map + 0x0397: []rune{0x03B7}, // Case map + 0x0398: []rune{0x03B8}, // Case map + 0x0399: []rune{0x03B9}, // Case map + 0x039A: []rune{0x03BA}, // Case map + 0x039B: []rune{0x03BB}, // Case map + 0x039C: []rune{0x03BC}, // Case map + 0x039D: []rune{0x03BD}, // Case map + 0x039E: []rune{0x03BE}, // Case map + 0x039F: []rune{0x03BF}, // Case map + 0x03A0: []rune{0x03C0}, // Case map + 0x03A1: []rune{0x03C1}, // Case map + 0x03A3: []rune{0x03C3}, // Case map + 0x03A4: []rune{0x03C4}, // Case map + 0x03A5: []rune{0x03C5}, // Case map + 0x03A6: []rune{0x03C6}, // Case map + 0x03A7: []rune{0x03C7}, // Case map + 0x03A8: []rune{0x03C8}, // Case map + 0x03A9: []rune{0x03C9}, // Case map + 0x03AA: []rune{0x03CA}, // Case map + 0x03AB: []rune{0x03CB}, // Case map + 0x03B0: []rune{0x03C5, 0x0308, 0x0301}, // Case map + 0x03C2: []rune{0x03C3}, // Case map + 0x03D0: []rune{0x03B2}, // Case map + 0x03D1: []rune{0x03B8}, // Case map + 0x03D2: []rune{0x03C5}, // Additional folding + 0x03D3: []rune{0x03CD}, // Additional folding + 0x03D4: []rune{0x03CB}, // Additional folding + 0x03D5: []rune{0x03C6}, // Case map + 0x03D6: []rune{0x03C0}, // Case map + 0x03D8: []rune{0x03D9}, // Case map + 0x03DA: []rune{0x03DB}, // Case map + 0x03DC: []rune{0x03DD}, // Case map + 0x03DE: []rune{0x03DF}, // Case map + 0x03E0: []rune{0x03E1}, // Case map + 0x03E2: []rune{0x03E3}, // Case map + 0x03E4: []rune{0x03E5}, // Case map + 0x03E6: []rune{0x03E7}, // Case map + 0x03E8: []rune{0x03E9}, // Case map + 0x03EA: []rune{0x03EB}, // Case map + 0x03EC: []rune{0x03ED}, // Case map + 0x03EE: []rune{0x03EF}, // Case map + 0x03F0: []rune{0x03BA}, // Case map + 0x03F1: []rune{0x03C1}, // Case map + 0x03F2: []rune{0x03C3}, // Case map + 0x03F4: []rune{0x03B8}, // Case map + 0x03F5: []rune{0x03B5}, // Case map + 0x0400: []rune{0x0450}, // Case map + 0x0401: []rune{0x0451}, // Case map + 0x0402: []rune{0x0452}, // Case map + 0x0403: []rune{0x0453}, // Case map + 0x0404: []rune{0x0454}, // Case map + 0x0405: []rune{0x0455}, // Case map + 0x0406: []rune{0x0456}, // Case map + 0x0407: []rune{0x0457}, // Case map + 0x0408: []rune{0x0458}, // Case map + 0x0409: []rune{0x0459}, // Case map + 0x040A: []rune{0x045A}, // Case map + 0x040B: []rune{0x045B}, // Case map + 0x040C: []rune{0x045C}, // Case map + 0x040D: []rune{0x045D}, // Case map + 0x040E: []rune{0x045E}, // Case map + 0x040F: []rune{0x045F}, // Case map + 0x0410: []rune{0x0430}, // Case map + 0x0411: []rune{0x0431}, // Case map + 0x0412: []rune{0x0432}, // Case map + 0x0413: []rune{0x0433}, // Case map + 0x0414: []rune{0x0434}, // Case map + 0x0415: []rune{0x0435}, // Case map + 0x0416: []rune{0x0436}, // Case map + 0x0417: []rune{0x0437}, // Case map + 0x0418: []rune{0x0438}, // Case map + 0x0419: []rune{0x0439}, // Case map + 0x041A: []rune{0x043A}, // Case map + 0x041B: []rune{0x043B}, // Case map + 0x041C: []rune{0x043C}, // Case map + 0x041D: []rune{0x043D}, // Case map + 0x041E: []rune{0x043E}, // Case map + 0x041F: []rune{0x043F}, // Case map + 0x0420: []rune{0x0440}, // Case map + 0x0421: []rune{0x0441}, // Case map + 0x0422: []rune{0x0442}, // Case map + 0x0423: []rune{0x0443}, // Case map + 0x0424: []rune{0x0444}, // Case map + 0x0425: []rune{0x0445}, // Case map + 0x0426: []rune{0x0446}, // Case map + 0x0427: []rune{0x0447}, // Case map + 0x0428: []rune{0x0448}, // Case map + 0x0429: []rune{0x0449}, // Case map + 0x042A: []rune{0x044A}, // Case map + 0x042B: []rune{0x044B}, // Case map + 0x042C: []rune{0x044C}, // Case map + 0x042D: []rune{0x044D}, // Case map + 0x042E: []rune{0x044E}, // Case map + 0x042F: []rune{0x044F}, // Case map + 0x0460: []rune{0x0461}, // Case map + 0x0462: []rune{0x0463}, // Case map + 0x0464: []rune{0x0465}, // Case map + 0x0466: []rune{0x0467}, // Case map + 0x0468: []rune{0x0469}, // Case map + 0x046A: []rune{0x046B}, // Case map + 0x046C: []rune{0x046D}, // Case map + 0x046E: []rune{0x046F}, // Case map + 0x0470: []rune{0x0471}, // Case map + 0x0472: []rune{0x0473}, // Case map + 0x0474: []rune{0x0475}, // Case map + 0x0476: []rune{0x0477}, // Case map + 0x0478: []rune{0x0479}, // Case map + 0x047A: []rune{0x047B}, // Case map + 0x047C: []rune{0x047D}, // Case map + 0x047E: []rune{0x047F}, // Case map + 0x0480: []rune{0x0481}, // Case map + 0x048A: []rune{0x048B}, // Case map + 0x048C: []rune{0x048D}, // Case map + 0x048E: []rune{0x048F}, // Case map + 0x0490: []rune{0x0491}, // Case map + 0x0492: []rune{0x0493}, // Case map + 0x0494: []rune{0x0495}, // Case map + 0x0496: []rune{0x0497}, // Case map + 0x0498: []rune{0x0499}, // Case map + 0x049A: []rune{0x049B}, // Case map + 0x049C: []rune{0x049D}, // Case map + 0x049E: []rune{0x049F}, // Case map + 0x04A0: []rune{0x04A1}, // Case map + 0x04A2: []rune{0x04A3}, // Case map + 0x04A4: []rune{0x04A5}, // Case map + 0x04A6: []rune{0x04A7}, // Case map + 0x04A8: []rune{0x04A9}, // Case map + 0x04AA: []rune{0x04AB}, // Case map + 0x04AC: []rune{0x04AD}, // Case map + 0x04AE: []rune{0x04AF}, // Case map + 0x04B0: []rune{0x04B1}, // Case map + 0x04B2: []rune{0x04B3}, // Case map + 0x04B4: []rune{0x04B5}, // Case map + 0x04B6: []rune{0x04B7}, // Case map + 0x04B8: []rune{0x04B9}, // Case map + 0x04BA: []rune{0x04BB}, // Case map + 0x04BC: []rune{0x04BD}, // Case map + 0x04BE: []rune{0x04BF}, // Case map + 0x04C1: []rune{0x04C2}, // Case map + 0x04C3: []rune{0x04C4}, // Case map + 0x04C5: []rune{0x04C6}, // Case map + 0x04C7: []rune{0x04C8}, // Case map + 0x04C9: []rune{0x04CA}, // Case map + 0x04CB: []rune{0x04CC}, // Case map + 0x04CD: []rune{0x04CE}, // Case map + 0x04D0: []rune{0x04D1}, // Case map + 0x04D2: []rune{0x04D3}, // Case map + 0x04D4: []rune{0x04D5}, // Case map + 0x04D6: []rune{0x04D7}, // Case map + 0x04D8: []rune{0x04D9}, // Case map + 0x04DA: []rune{0x04DB}, // Case map + 0x04DC: []rune{0x04DD}, // Case map + 0x04DE: []rune{0x04DF}, // Case map + 0x04E0: []rune{0x04E1}, // Case map + 0x04E2: []rune{0x04E3}, // Case map + 0x04E4: []rune{0x04E5}, // Case map + 0x04E6: []rune{0x04E7}, // Case map + 0x04E8: []rune{0x04E9}, // Case map + 0x04EA: []rune{0x04EB}, // Case map + 0x04EC: []rune{0x04ED}, // Case map + 0x04EE: []rune{0x04EF}, // Case map + 0x04F0: []rune{0x04F1}, // Case map + 0x04F2: []rune{0x04F3}, // Case map + 0x04F4: []rune{0x04F5}, // Case map + 0x04F8: []rune{0x04F9}, // Case map + 0x0500: []rune{0x0501}, // Case map + 0x0502: []rune{0x0503}, // Case map + 0x0504: []rune{0x0505}, // Case map + 0x0506: []rune{0x0507}, // Case map + 0x0508: []rune{0x0509}, // Case map + 0x050A: []rune{0x050B}, // Case map + 0x050C: []rune{0x050D}, // Case map + 0x050E: []rune{0x050F}, // Case map + 0x0531: []rune{0x0561}, // Case map + 0x0532: []rune{0x0562}, // Case map + 0x0533: []rune{0x0563}, // Case map + 0x0534: []rune{0x0564}, // Case map + 0x0535: []rune{0x0565}, // Case map + 0x0536: []rune{0x0566}, // Case map + 0x0537: []rune{0x0567}, // Case map + 0x0538: []rune{0x0568}, // Case map + 0x0539: []rune{0x0569}, // Case map + 0x053A: []rune{0x056A}, // Case map + 0x053B: []rune{0x056B}, // Case map + 0x053C: []rune{0x056C}, // Case map + 0x053D: []rune{0x056D}, // Case map + 0x053E: []rune{0x056E}, // Case map + 0x053F: []rune{0x056F}, // Case map + 0x0540: []rune{0x0570}, // Case map + 0x0541: []rune{0x0571}, // Case map + 0x0542: []rune{0x0572}, // Case map + 0x0543: []rune{0x0573}, // Case map + 0x0544: []rune{0x0574}, // Case map + 0x0545: []rune{0x0575}, // Case map + 0x0546: []rune{0x0576}, // Case map + 0x0547: []rune{0x0577}, // Case map + 0x0548: []rune{0x0578}, // Case map + 0x0549: []rune{0x0579}, // Case map + 0x054A: []rune{0x057A}, // Case map + 0x054B: []rune{0x057B}, // Case map + 0x054C: []rune{0x057C}, // Case map + 0x054D: []rune{0x057D}, // Case map + 0x054E: []rune{0x057E}, // Case map + 0x054F: []rune{0x057F}, // Case map + 0x0550: []rune{0x0580}, // Case map + 0x0551: []rune{0x0581}, // Case map + 0x0552: []rune{0x0582}, // Case map + 0x0553: []rune{0x0583}, // Case map + 0x0554: []rune{0x0584}, // Case map + 0x0555: []rune{0x0585}, // Case map + 0x0556: []rune{0x0586}, // Case map + 0x0587: []rune{0x0565, 0x0582}, // Case map + 0x1E00: []rune{0x1E01}, // Case map + 0x1E02: []rune{0x1E03}, // Case map + 0x1E04: []rune{0x1E05}, // Case map + 0x1E06: []rune{0x1E07}, // Case map + 0x1E08: []rune{0x1E09}, // Case map + 0x1E0A: []rune{0x1E0B}, // Case map + 0x1E0C: []rune{0x1E0D}, // Case map + 0x1E0E: []rune{0x1E0F}, // Case map + 0x1E10: []rune{0x1E11}, // Case map + 0x1E12: []rune{0x1E13}, // Case map + 0x1E14: []rune{0x1E15}, // Case map + 0x1E16: []rune{0x1E17}, // Case map + 0x1E18: []rune{0x1E19}, // Case map + 0x1E1A: []rune{0x1E1B}, // Case map + 0x1E1C: []rune{0x1E1D}, // Case map + 0x1E1E: []rune{0x1E1F}, // Case map + 0x1E20: []rune{0x1E21}, // Case map + 0x1E22: []rune{0x1E23}, // Case map + 0x1E24: []rune{0x1E25}, // Case map + 0x1E26: []rune{0x1E27}, // Case map + 0x1E28: []rune{0x1E29}, // Case map + 0x1E2A: []rune{0x1E2B}, // Case map + 0x1E2C: []rune{0x1E2D}, // Case map + 0x1E2E: []rune{0x1E2F}, // Case map + 0x1E30: []rune{0x1E31}, // Case map + 0x1E32: []rune{0x1E33}, // Case map + 0x1E34: []rune{0x1E35}, // Case map + 0x1E36: []rune{0x1E37}, // Case map + 0x1E38: []rune{0x1E39}, // Case map + 0x1E3A: []rune{0x1E3B}, // Case map + 0x1E3C: []rune{0x1E3D}, // Case map + 0x1E3E: []rune{0x1E3F}, // Case map + 0x1E40: []rune{0x1E41}, // Case map + 0x1E42: []rune{0x1E43}, // Case map + 0x1E44: []rune{0x1E45}, // Case map + 0x1E46: []rune{0x1E47}, // Case map + 0x1E48: []rune{0x1E49}, // Case map + 0x1E4A: []rune{0x1E4B}, // Case map + 0x1E4C: []rune{0x1E4D}, // Case map + 0x1E4E: []rune{0x1E4F}, // Case map + 0x1E50: []rune{0x1E51}, // Case map + 0x1E52: []rune{0x1E53}, // Case map + 0x1E54: []rune{0x1E55}, // Case map + 0x1E56: []rune{0x1E57}, // Case map + 0x1E58: []rune{0x1E59}, // Case map + 0x1E5A: []rune{0x1E5B}, // Case map + 0x1E5C: []rune{0x1E5D}, // Case map + 0x1E5E: []rune{0x1E5F}, // Case map + 0x1E60: []rune{0x1E61}, // Case map + 0x1E62: []rune{0x1E63}, // Case map + 0x1E64: []rune{0x1E65}, // Case map + 0x1E66: []rune{0x1E67}, // Case map + 0x1E68: []rune{0x1E69}, // Case map + 0x1E6A: []rune{0x1E6B}, // Case map + 0x1E6C: []rune{0x1E6D}, // Case map + 0x1E6E: []rune{0x1E6F}, // Case map + 0x1E70: []rune{0x1E71}, // Case map + 0x1E72: []rune{0x1E73}, // Case map + 0x1E74: []rune{0x1E75}, // Case map + 0x1E76: []rune{0x1E77}, // Case map + 0x1E78: []rune{0x1E79}, // Case map + 0x1E7A: []rune{0x1E7B}, // Case map + 0x1E7C: []rune{0x1E7D}, // Case map + 0x1E7E: []rune{0x1E7F}, // Case map + 0x1E80: []rune{0x1E81}, // Case map + 0x1E82: []rune{0x1E83}, // Case map + 0x1E84: []rune{0x1E85}, // Case map + 0x1E86: []rune{0x1E87}, // Case map + 0x1E88: []rune{0x1E89}, // Case map + 0x1E8A: []rune{0x1E8B}, // Case map + 0x1E8C: []rune{0x1E8D}, // Case map + 0x1E8E: []rune{0x1E8F}, // Case map + 0x1E90: []rune{0x1E91}, // Case map + 0x1E92: []rune{0x1E93}, // Case map + 0x1E94: []rune{0x1E95}, // Case map + 0x1E96: []rune{0x0068, 0x0331}, // Case map + 0x1E97: []rune{0x0074, 0x0308}, // Case map + 0x1E98: []rune{0x0077, 0x030A}, // Case map + 0x1E99: []rune{0x0079, 0x030A}, // Case map + 0x1E9A: []rune{0x0061, 0x02BE}, // Case map + 0x1E9B: []rune{0x1E61}, // Case map + 0x1EA0: []rune{0x1EA1}, // Case map + 0x1EA2: []rune{0x1EA3}, // Case map + 0x1EA4: []rune{0x1EA5}, // Case map + 0x1EA6: []rune{0x1EA7}, // Case map + 0x1EA8: []rune{0x1EA9}, // Case map + 0x1EAA: []rune{0x1EAB}, // Case map + 0x1EAC: []rune{0x1EAD}, // Case map + 0x1EAE: []rune{0x1EAF}, // Case map + 0x1EB0: []rune{0x1EB1}, // Case map + 0x1EB2: []rune{0x1EB3}, // Case map + 0x1EB4: []rune{0x1EB5}, // Case map + 0x1EB6: []rune{0x1EB7}, // Case map + 0x1EB8: []rune{0x1EB9}, // Case map + 0x1EBA: []rune{0x1EBB}, // Case map + 0x1EBC: []rune{0x1EBD}, // Case map + 0x1EBE: []rune{0x1EBF}, // Case map + 0x1EC0: []rune{0x1EC1}, // Case map + 0x1EC2: []rune{0x1EC3}, // Case map + 0x1EC4: []rune{0x1EC5}, // Case map + 0x1EC6: []rune{0x1EC7}, // Case map + 0x1EC8: []rune{0x1EC9}, // Case map + 0x1ECA: []rune{0x1ECB}, // Case map + 0x1ECC: []rune{0x1ECD}, // Case map + 0x1ECE: []rune{0x1ECF}, // Case map + 0x1ED0: []rune{0x1ED1}, // Case map + 0x1ED2: []rune{0x1ED3}, // Case map + 0x1ED4: []rune{0x1ED5}, // Case map + 0x1ED6: []rune{0x1ED7}, // Case map + 0x1ED8: []rune{0x1ED9}, // Case map + 0x1EDA: []rune{0x1EDB}, // Case map + 0x1EDC: []rune{0x1EDD}, // Case map + 0x1EDE: []rune{0x1EDF}, // Case map + 0x1EE0: []rune{0x1EE1}, // Case map + 0x1EE2: []rune{0x1EE3}, // Case map + 0x1EE4: []rune{0x1EE5}, // Case map + 0x1EE6: []rune{0x1EE7}, // Case map + 0x1EE8: []rune{0x1EE9}, // Case map + 0x1EEA: []rune{0x1EEB}, // Case map + 0x1EEC: []rune{0x1EED}, // Case map + 0x1EEE: []rune{0x1EEF}, // Case map + 0x1EF0: []rune{0x1EF1}, // Case map + 0x1EF2: []rune{0x1EF3}, // Case map + 0x1EF4: []rune{0x1EF5}, // Case map + 0x1EF6: []rune{0x1EF7}, // Case map + 0x1EF8: []rune{0x1EF9}, // Case map + 0x1F08: []rune{0x1F00}, // Case map + 0x1F09: []rune{0x1F01}, // Case map + 0x1F0A: []rune{0x1F02}, // Case map + 0x1F0B: []rune{0x1F03}, // Case map + 0x1F0C: []rune{0x1F04}, // Case map + 0x1F0D: []rune{0x1F05}, // Case map + 0x1F0E: []rune{0x1F06}, // Case map + 0x1F0F: []rune{0x1F07}, // Case map + 0x1F18: []rune{0x1F10}, // Case map + 0x1F19: []rune{0x1F11}, // Case map + 0x1F1A: []rune{0x1F12}, // Case map + 0x1F1B: []rune{0x1F13}, // Case map + 0x1F1C: []rune{0x1F14}, // Case map + 0x1F1D: []rune{0x1F15}, // Case map + 0x1F28: []rune{0x1F20}, // Case map + 0x1F29: []rune{0x1F21}, // Case map + 0x1F2A: []rune{0x1F22}, // Case map + 0x1F2B: []rune{0x1F23}, // Case map + 0x1F2C: []rune{0x1F24}, // Case map + 0x1F2D: []rune{0x1F25}, // Case map + 0x1F2E: []rune{0x1F26}, // Case map + 0x1F2F: []rune{0x1F27}, // Case map + 0x1F38: []rune{0x1F30}, // Case map + 0x1F39: []rune{0x1F31}, // Case map + 0x1F3A: []rune{0x1F32}, // Case map + 0x1F3B: []rune{0x1F33}, // Case map + 0x1F3C: []rune{0x1F34}, // Case map + 0x1F3D: []rune{0x1F35}, // Case map + 0x1F3E: []rune{0x1F36}, // Case map + 0x1F3F: []rune{0x1F37}, // Case map + 0x1F48: []rune{0x1F40}, // Case map + 0x1F49: []rune{0x1F41}, // Case map + 0x1F4A: []rune{0x1F42}, // Case map + 0x1F4B: []rune{0x1F43}, // Case map + 0x1F4C: []rune{0x1F44}, // Case map + 0x1F4D: []rune{0x1F45}, // Case map + 0x1F50: []rune{0x03C5, 0x0313}, // Case map + 0x1F52: []rune{0x03C5, 0x0313, 0x0300}, // Case map + 0x1F54: []rune{0x03C5, 0x0313, 0x0301}, // Case map + 0x1F56: []rune{0x03C5, 0x0313, 0x0342}, // Case map + 0x1F59: []rune{0x1F51}, // Case map + 0x1F5B: []rune{0x1F53}, // Case map + 0x1F5D: []rune{0x1F55}, // Case map + 0x1F5F: []rune{0x1F57}, // Case map + 0x1F68: []rune{0x1F60}, // Case map + 0x1F69: []rune{0x1F61}, // Case map + 0x1F6A: []rune{0x1F62}, // Case map + 0x1F6B: []rune{0x1F63}, // Case map + 0x1F6C: []rune{0x1F64}, // Case map + 0x1F6D: []rune{0x1F65}, // Case map + 0x1F6E: []rune{0x1F66}, // Case map + 0x1F6F: []rune{0x1F67}, // Case map + 0x1F80: []rune{0x1F00, 0x03B9}, // Case map + 0x1F81: []rune{0x1F01, 0x03B9}, // Case map + 0x1F82: []rune{0x1F02, 0x03B9}, // Case map + 0x1F83: []rune{0x1F03, 0x03B9}, // Case map + 0x1F84: []rune{0x1F04, 0x03B9}, // Case map + 0x1F85: []rune{0x1F05, 0x03B9}, // Case map + 0x1F86: []rune{0x1F06, 0x03B9}, // Case map + 0x1F87: []rune{0x1F07, 0x03B9}, // Case map + 0x1F88: []rune{0x1F00, 0x03B9}, // Case map + 0x1F89: []rune{0x1F01, 0x03B9}, // Case map + 0x1F8A: []rune{0x1F02, 0x03B9}, // Case map + 0x1F8B: []rune{0x1F03, 0x03B9}, // Case map + 0x1F8C: []rune{0x1F04, 0x03B9}, // Case map + 0x1F8D: []rune{0x1F05, 0x03B9}, // Case map + 0x1F8E: []rune{0x1F06, 0x03B9}, // Case map + 0x1F8F: []rune{0x1F07, 0x03B9}, // Case map + 0x1F90: []rune{0x1F20, 0x03B9}, // Case map + 0x1F91: []rune{0x1F21, 0x03B9}, // Case map + 0x1F92: []rune{0x1F22, 0x03B9}, // Case map + 0x1F93: []rune{0x1F23, 0x03B9}, // Case map + 0x1F94: []rune{0x1F24, 0x03B9}, // Case map + 0x1F95: []rune{0x1F25, 0x03B9}, // Case map + 0x1F96: []rune{0x1F26, 0x03B9}, // Case map + 0x1F97: []rune{0x1F27, 0x03B9}, // Case map + 0x1F98: []rune{0x1F20, 0x03B9}, // Case map + 0x1F99: []rune{0x1F21, 0x03B9}, // Case map + 0x1F9A: []rune{0x1F22, 0x03B9}, // Case map + 0x1F9B: []rune{0x1F23, 0x03B9}, // Case map + 0x1F9C: []rune{0x1F24, 0x03B9}, // Case map + 0x1F9D: []rune{0x1F25, 0x03B9}, // Case map + 0x1F9E: []rune{0x1F26, 0x03B9}, // Case map + 0x1F9F: []rune{0x1F27, 0x03B9}, // Case map + 0x1FA0: []rune{0x1F60, 0x03B9}, // Case map + 0x1FA1: []rune{0x1F61, 0x03B9}, // Case map + 0x1FA2: []rune{0x1F62, 0x03B9}, // Case map + 0x1FA3: []rune{0x1F63, 0x03B9}, // Case map + 0x1FA4: []rune{0x1F64, 0x03B9}, // Case map + 0x1FA5: []rune{0x1F65, 0x03B9}, // Case map + 0x1FA6: []rune{0x1F66, 0x03B9}, // Case map + 0x1FA7: []rune{0x1F67, 0x03B9}, // Case map + 0x1FA8: []rune{0x1F60, 0x03B9}, // Case map + 0x1FA9: []rune{0x1F61, 0x03B9}, // Case map + 0x1FAA: []rune{0x1F62, 0x03B9}, // Case map + 0x1FAB: []rune{0x1F63, 0x03B9}, // Case map + 0x1FAC: []rune{0x1F64, 0x03B9}, // Case map + 0x1FAD: []rune{0x1F65, 0x03B9}, // Case map + 0x1FAE: []rune{0x1F66, 0x03B9}, // Case map + 0x1FAF: []rune{0x1F67, 0x03B9}, // Case map + 0x1FB2: []rune{0x1F70, 0x03B9}, // Case map + 0x1FB3: []rune{0x03B1, 0x03B9}, // Case map + 0x1FB4: []rune{0x03AC, 0x03B9}, // Case map + 0x1FB6: []rune{0x03B1, 0x0342}, // Case map + 0x1FB7: []rune{0x03B1, 0x0342, 0x03B9}, // Case map + 0x1FB8: []rune{0x1FB0}, // Case map + 0x1FB9: []rune{0x1FB1}, // Case map + 0x1FBA: []rune{0x1F70}, // Case map + 0x1FBB: []rune{0x1F71}, // Case map + 0x1FBC: []rune{0x03B1, 0x03B9}, // Case map + 0x1FBE: []rune{0x03B9}, // Case map + 0x1FC2: []rune{0x1F74, 0x03B9}, // Case map + 0x1FC3: []rune{0x03B7, 0x03B9}, // Case map + 0x1FC4: []rune{0x03AE, 0x03B9}, // Case map + 0x1FC6: []rune{0x03B7, 0x0342}, // Case map + 0x1FC7: []rune{0x03B7, 0x0342, 0x03B9}, // Case map + 0x1FC8: []rune{0x1F72}, // Case map + 0x1FC9: []rune{0x1F73}, // Case map + 0x1FCA: []rune{0x1F74}, // Case map + 0x1FCB: []rune{0x1F75}, // Case map + 0x1FCC: []rune{0x03B7, 0x03B9}, // Case map + 0x1FD2: []rune{0x03B9, 0x0308, 0x0300}, // Case map + 0x1FD3: []rune{0x03B9, 0x0308, 0x0301}, // Case map + 0x1FD6: []rune{0x03B9, 0x0342}, // Case map + 0x1FD7: []rune{0x03B9, 0x0308, 0x0342}, // Case map + 0x1FD8: []rune{0x1FD0}, // Case map + 0x1FD9: []rune{0x1FD1}, // Case map + 0x1FDA: []rune{0x1F76}, // Case map + 0x1FDB: []rune{0x1F77}, // Case map + 0x1FE2: []rune{0x03C5, 0x0308, 0x0300}, // Case map + 0x1FE3: []rune{0x03C5, 0x0308, 0x0301}, // Case map + 0x1FE4: []rune{0x03C1, 0x0313}, // Case map + 0x1FE6: []rune{0x03C5, 0x0342}, // Case map + 0x1FE7: []rune{0x03C5, 0x0308, 0x0342}, // Case map + 0x1FE8: []rune{0x1FE0}, // Case map + 0x1FE9: []rune{0x1FE1}, // Case map + 0x1FEA: []rune{0x1F7A}, // Case map + 0x1FEB: []rune{0x1F7B}, // Case map + 0x1FEC: []rune{0x1FE5}, // Case map + 0x1FF2: []rune{0x1F7C, 0x03B9}, // Case map + 0x1FF3: []rune{0x03C9, 0x03B9}, // Case map + 0x1FF4: []rune{0x03CE, 0x03B9}, // Case map + 0x1FF6: []rune{0x03C9, 0x0342}, // Case map + 0x1FF7: []rune{0x03C9, 0x0342, 0x03B9}, // Case map + 0x1FF8: []rune{0x1F78}, // Case map + 0x1FF9: []rune{0x1F79}, // Case map + 0x1FFA: []rune{0x1F7C}, // Case map + 0x1FFB: []rune{0x1F7D}, // Case map + 0x1FFC: []rune{0x03C9, 0x03B9}, // Case map + 0x20A8: []rune{0x0072, 0x0073}, // Additional folding + 0x2102: []rune{0x0063}, // Additional folding + 0x2103: []rune{0x00B0, 0x0063}, // Additional folding + 0x2107: []rune{0x025B}, // Additional folding + 0x2109: []rune{0x00B0, 0x0066}, // Additional folding + 0x210B: []rune{0x0068}, // Additional folding + 0x210C: []rune{0x0068}, // Additional folding + 0x210D: []rune{0x0068}, // Additional folding + 0x2110: []rune{0x0069}, // Additional folding + 0x2111: []rune{0x0069}, // Additional folding + 0x2112: []rune{0x006C}, // Additional folding + 0x2115: []rune{0x006E}, // Additional folding + 0x2116: []rune{0x006E, 0x006F}, // Additional folding + 0x2119: []rune{0x0070}, // Additional folding + 0x211A: []rune{0x0071}, // Additional folding + 0x211B: []rune{0x0072}, // Additional folding + 0x211C: []rune{0x0072}, // Additional folding + 0x211D: []rune{0x0072}, // Additional folding + 0x2120: []rune{0x0073, 0x006D}, // Additional folding + 0x2121: []rune{0x0074, 0x0065, 0x006C}, // Additional folding + 0x2122: []rune{0x0074, 0x006D}, // Additional folding + 0x2124: []rune{0x007A}, // Additional folding + 0x2126: []rune{0x03C9}, // Case map + 0x2128: []rune{0x007A}, // Additional folding + 0x212A: []rune{0x006B}, // Case map + 0x212B: []rune{0x00E5}, // Case map + 0x212C: []rune{0x0062}, // Additional folding + 0x212D: []rune{0x0063}, // Additional folding + 0x2130: []rune{0x0065}, // Additional folding + 0x2131: []rune{0x0066}, // Additional folding + 0x2133: []rune{0x006D}, // Additional folding + 0x213E: []rune{0x03B3}, // Additional folding + 0x213F: []rune{0x03C0}, // Additional folding + 0x2145: []rune{0x0064}, // Additional folding + 0x2160: []rune{0x2170}, // Case map + 0x2161: []rune{0x2171}, // Case map + 0x2162: []rune{0x2172}, // Case map + 0x2163: []rune{0x2173}, // Case map + 0x2164: []rune{0x2174}, // Case map + 0x2165: []rune{0x2175}, // Case map + 0x2166: []rune{0x2176}, // Case map + 0x2167: []rune{0x2177}, // Case map + 0x2168: []rune{0x2178}, // Case map + 0x2169: []rune{0x2179}, // Case map + 0x216A: []rune{0x217A}, // Case map + 0x216B: []rune{0x217B}, // Case map + 0x216C: []rune{0x217C}, // Case map + 0x216D: []rune{0x217D}, // Case map + 0x216E: []rune{0x217E}, // Case map + 0x216F: []rune{0x217F}, // Case map + 0x24B6: []rune{0x24D0}, // Case map + 0x24B7: []rune{0x24D1}, // Case map + 0x24B8: []rune{0x24D2}, // Case map + 0x24B9: []rune{0x24D3}, // Case map + 0x24BA: []rune{0x24D4}, // Case map + 0x24BB: []rune{0x24D5}, // Case map + 0x24BC: []rune{0x24D6}, // Case map + 0x24BD: []rune{0x24D7}, // Case map + 0x24BE: []rune{0x24D8}, // Case map + 0x24BF: []rune{0x24D9}, // Case map + 0x24C0: []rune{0x24DA}, // Case map + 0x24C1: []rune{0x24DB}, // Case map + 0x24C2: []rune{0x24DC}, // Case map + 0x24C3: []rune{0x24DD}, // Case map + 0x24C4: []rune{0x24DE}, // Case map + 0x24C5: []rune{0x24DF}, // Case map + 0x24C6: []rune{0x24E0}, // Case map + 0x24C7: []rune{0x24E1}, // Case map + 0x24C8: []rune{0x24E2}, // Case map + 0x24C9: []rune{0x24E3}, // Case map + 0x24CA: []rune{0x24E4}, // Case map + 0x24CB: []rune{0x24E5}, // Case map + 0x24CC: []rune{0x24E6}, // Case map + 0x24CD: []rune{0x24E7}, // Case map + 0x24CE: []rune{0x24E8}, // Case map + 0x24CF: []rune{0x24E9}, // Case map + 0x3371: []rune{0x0068, 0x0070, 0x0061}, // Additional folding + 0x3373: []rune{0x0061, 0x0075}, // Additional folding + 0x3375: []rune{0x006F, 0x0076}, // Additional folding + 0x3380: []rune{0x0070, 0x0061}, // Additional folding + 0x3381: []rune{0x006E, 0x0061}, // Additional folding + 0x3382: []rune{0x03BC, 0x0061}, // Additional folding + 0x3383: []rune{0x006D, 0x0061}, // Additional folding + 0x3384: []rune{0x006B, 0x0061}, // Additional folding + 0x3385: []rune{0x006B, 0x0062}, // Additional folding + 0x3386: []rune{0x006D, 0x0062}, // Additional folding + 0x3387: []rune{0x0067, 0x0062}, // Additional folding + 0x338A: []rune{0x0070, 0x0066}, // Additional folding + 0x338B: []rune{0x006E, 0x0066}, // Additional folding + 0x338C: []rune{0x03BC, 0x0066}, // Additional folding + 0x3390: []rune{0x0068, 0x007A}, // Additional folding + 0x3391: []rune{0x006B, 0x0068, 0x007A}, // Additional folding + 0x3392: []rune{0x006D, 0x0068, 0x007A}, // Additional folding + 0x3393: []rune{0x0067, 0x0068, 0x007A}, // Additional folding + 0x3394: []rune{0x0074, 0x0068, 0x007A}, // Additional folding + 0x33A9: []rune{0x0070, 0x0061}, // Additional folding + 0x33AA: []rune{0x006B, 0x0070, 0x0061}, // Additional folding + 0x33AB: []rune{0x006D, 0x0070, 0x0061}, // Additional folding + 0x33AC: []rune{0x0067, 0x0070, 0x0061}, // Additional folding + 0x33B4: []rune{0x0070, 0x0076}, // Additional folding + 0x33B5: []rune{0x006E, 0x0076}, // Additional folding + 0x33B6: []rune{0x03BC, 0x0076}, // Additional folding + 0x33B7: []rune{0x006D, 0x0076}, // Additional folding + 0x33B8: []rune{0x006B, 0x0076}, // Additional folding + 0x33B9: []rune{0x006D, 0x0076}, // Additional folding + 0x33BA: []rune{0x0070, 0x0077}, // Additional folding + 0x33BB: []rune{0x006E, 0x0077}, // Additional folding + 0x33BC: []rune{0x03BC, 0x0077}, // Additional folding + 0x33BD: []rune{0x006D, 0x0077}, // Additional folding + 0x33BE: []rune{0x006B, 0x0077}, // Additional folding + 0x33BF: []rune{0x006D, 0x0077}, // Additional folding + 0x33C0: []rune{0x006B, 0x03C9}, // Additional folding + 0x33C1: []rune{0x006D, 0x03C9}, // Additional folding + 0x33C3: []rune{0x0062, 0x0071}, // Additional folding + 0x33C6: []rune{0x0063, 0x2215, 0x006B, 0x0067}, // Additional folding + 0x33C7: []rune{0x0063, 0x006F, 0x002E}, // Additional folding + 0x33C8: []rune{0x0064, 0x0062}, // Additional folding + 0x33C9: []rune{0x0067, 0x0079}, // Additional folding + 0x33CB: []rune{0x0068, 0x0070}, // Additional folding + 0x33CD: []rune{0x006B, 0x006B}, // Additional folding + 0x33CE: []rune{0x006B, 0x006D}, // Additional folding + 0x33D7: []rune{0x0070, 0x0068}, // Additional folding + 0x33D9: []rune{0x0070, 0x0070, 0x006D}, // Additional folding + 0x33DA: []rune{0x0070, 0x0072}, // Additional folding + 0x33DC: []rune{0x0073, 0x0076}, // Additional folding + 0x33DD: []rune{0x0077, 0x0062}, // Additional folding + 0xFB00: []rune{0x0066, 0x0066}, // Case map + 0xFB01: []rune{0x0066, 0x0069}, // Case map + 0xFB02: []rune{0x0066, 0x006C}, // Case map + 0xFB03: []rune{0x0066, 0x0066, 0x0069}, // Case map + 0xFB04: []rune{0x0066, 0x0066, 0x006C}, // Case map + 0xFB05: []rune{0x0073, 0x0074}, // Case map + 0xFB06: []rune{0x0073, 0x0074}, // Case map + 0xFB13: []rune{0x0574, 0x0576}, // Case map + 0xFB14: []rune{0x0574, 0x0565}, // Case map + 0xFB15: []rune{0x0574, 0x056B}, // Case map + 0xFB16: []rune{0x057E, 0x0576}, // Case map + 0xFB17: []rune{0x0574, 0x056D}, // Case map + 0xFF21: []rune{0xFF41}, // Case map + 0xFF22: []rune{0xFF42}, // Case map + 0xFF23: []rune{0xFF43}, // Case map + 0xFF24: []rune{0xFF44}, // Case map + 0xFF25: []rune{0xFF45}, // Case map + 0xFF26: []rune{0xFF46}, // Case map + 0xFF27: []rune{0xFF47}, // Case map + 0xFF28: []rune{0xFF48}, // Case map + 0xFF29: []rune{0xFF49}, // Case map + 0xFF2A: []rune{0xFF4A}, // Case map + 0xFF2B: []rune{0xFF4B}, // Case map + 0xFF2C: []rune{0xFF4C}, // Case map + 0xFF2D: []rune{0xFF4D}, // Case map + 0xFF2E: []rune{0xFF4E}, // Case map + 0xFF2F: []rune{0xFF4F}, // Case map + 0xFF30: []rune{0xFF50}, // Case map + 0xFF31: []rune{0xFF51}, // Case map + 0xFF32: []rune{0xFF52}, // Case map + 0xFF33: []rune{0xFF53}, // Case map + 0xFF34: []rune{0xFF54}, // Case map + 0xFF35: []rune{0xFF55}, // Case map + 0xFF36: []rune{0xFF56}, // Case map + 0xFF37: []rune{0xFF57}, // Case map + 0xFF38: []rune{0xFF58}, // Case map + 0xFF39: []rune{0xFF59}, // Case map + 0xFF3A: []rune{0xFF5A}, // Case map + 0x10400: []rune{0x10428}, // Case map + 0x10401: []rune{0x10429}, // Case map + 0x10402: []rune{0x1042A}, // Case map + 0x10403: []rune{0x1042B}, // Case map + 0x10404: []rune{0x1042C}, // Case map + 0x10405: []rune{0x1042D}, // Case map + 0x10406: []rune{0x1042E}, // Case map + 0x10407: []rune{0x1042F}, // Case map + 0x10408: []rune{0x10430}, // Case map + 0x10409: []rune{0x10431}, // Case map + 0x1040A: []rune{0x10432}, // Case map + 0x1040B: []rune{0x10433}, // Case map + 0x1040C: []rune{0x10434}, // Case map + 0x1040D: []rune{0x10435}, // Case map + 0x1040E: []rune{0x10436}, // Case map + 0x1040F: []rune{0x10437}, // Case map + 0x10410: []rune{0x10438}, // Case map + 0x10411: []rune{0x10439}, // Case map + 0x10412: []rune{0x1043A}, // Case map + 0x10413: []rune{0x1043B}, // Case map + 0x10414: []rune{0x1043C}, // Case map + 0x10415: []rune{0x1043D}, // Case map + 0x10416: []rune{0x1043E}, // Case map + 0x10417: []rune{0x1043F}, // Case map + 0x10418: []rune{0x10440}, // Case map + 0x10419: []rune{0x10441}, // Case map + 0x1041A: []rune{0x10442}, // Case map + 0x1041B: []rune{0x10443}, // Case map + 0x1041C: []rune{0x10444}, // Case map + 0x1041D: []rune{0x10445}, // Case map + 0x1041E: []rune{0x10446}, // Case map + 0x1041F: []rune{0x10447}, // Case map + 0x10420: []rune{0x10448}, // Case map + 0x10421: []rune{0x10449}, // Case map + 0x10422: []rune{0x1044A}, // Case map + 0x10423: []rune{0x1044B}, // Case map + 0x10424: []rune{0x1044C}, // Case map + 0x10425: []rune{0x1044D}, // Case map + 0x1D400: []rune{0x0061}, // Additional folding + 0x1D401: []rune{0x0062}, // Additional folding + 0x1D402: []rune{0x0063}, // Additional folding + 0x1D403: []rune{0x0064}, // Additional folding + 0x1D404: []rune{0x0065}, // Additional folding + 0x1D405: []rune{0x0066}, // Additional folding + 0x1D406: []rune{0x0067}, // Additional folding + 0x1D407: []rune{0x0068}, // Additional folding + 0x1D408: []rune{0x0069}, // Additional folding + 0x1D409: []rune{0x006A}, // Additional folding + 0x1D40A: []rune{0x006B}, // Additional folding + 0x1D40B: []rune{0x006C}, // Additional folding + 0x1D40C: []rune{0x006D}, // Additional folding + 0x1D40D: []rune{0x006E}, // Additional folding + 0x1D40E: []rune{0x006F}, // Additional folding + 0x1D40F: []rune{0x0070}, // Additional folding + 0x1D410: []rune{0x0071}, // Additional folding + 0x1D411: []rune{0x0072}, // Additional folding + 0x1D412: []rune{0x0073}, // Additional folding + 0x1D413: []rune{0x0074}, // Additional folding + 0x1D414: []rune{0x0075}, // Additional folding + 0x1D415: []rune{0x0076}, // Additional folding + 0x1D416: []rune{0x0077}, // Additional folding + 0x1D417: []rune{0x0078}, // Additional folding + 0x1D418: []rune{0x0079}, // Additional folding + 0x1D419: []rune{0x007A}, // Additional folding + 0x1D434: []rune{0x0061}, // Additional folding + 0x1D435: []rune{0x0062}, // Additional folding + 0x1D436: []rune{0x0063}, // Additional folding + 0x1D437: []rune{0x0064}, // Additional folding + 0x1D438: []rune{0x0065}, // Additional folding + 0x1D439: []rune{0x0066}, // Additional folding + 0x1D43A: []rune{0x0067}, // Additional folding + 0x1D43B: []rune{0x0068}, // Additional folding + 0x1D43C: []rune{0x0069}, // Additional folding + 0x1D43D: []rune{0x006A}, // Additional folding + 0x1D43E: []rune{0x006B}, // Additional folding + 0x1D43F: []rune{0x006C}, // Additional folding + 0x1D440: []rune{0x006D}, // Additional folding + 0x1D441: []rune{0x006E}, // Additional folding + 0x1D442: []rune{0x006F}, // Additional folding + 0x1D443: []rune{0x0070}, // Additional folding + 0x1D444: []rune{0x0071}, // Additional folding + 0x1D445: []rune{0x0072}, // Additional folding + 0x1D446: []rune{0x0073}, // Additional folding + 0x1D447: []rune{0x0074}, // Additional folding + 0x1D448: []rune{0x0075}, // Additional folding + 0x1D449: []rune{0x0076}, // Additional folding + 0x1D44A: []rune{0x0077}, // Additional folding + 0x1D44B: []rune{0x0078}, // Additional folding + 0x1D44C: []rune{0x0079}, // Additional folding + 0x1D44D: []rune{0x007A}, // Additional folding + 0x1D468: []rune{0x0061}, // Additional folding + 0x1D469: []rune{0x0062}, // Additional folding + 0x1D46A: []rune{0x0063}, // Additional folding + 0x1D46B: []rune{0x0064}, // Additional folding + 0x1D46C: []rune{0x0065}, // Additional folding + 0x1D46D: []rune{0x0066}, // Additional folding + 0x1D46E: []rune{0x0067}, // Additional folding + 0x1D46F: []rune{0x0068}, // Additional folding + 0x1D470: []rune{0x0069}, // Additional folding + 0x1D471: []rune{0x006A}, // Additional folding + 0x1D472: []rune{0x006B}, // Additional folding + 0x1D473: []rune{0x006C}, // Additional folding + 0x1D474: []rune{0x006D}, // Additional folding + 0x1D475: []rune{0x006E}, // Additional folding + 0x1D476: []rune{0x006F}, // Additional folding + 0x1D477: []rune{0x0070}, // Additional folding + 0x1D478: []rune{0x0071}, // Additional folding + 0x1D479: []rune{0x0072}, // Additional folding + 0x1D47A: []rune{0x0073}, // Additional folding + 0x1D47B: []rune{0x0074}, // Additional folding + 0x1D47C: []rune{0x0075}, // Additional folding + 0x1D47D: []rune{0x0076}, // Additional folding + 0x1D47E: []rune{0x0077}, // Additional folding + 0x1D47F: []rune{0x0078}, // Additional folding + 0x1D480: []rune{0x0079}, // Additional folding + 0x1D481: []rune{0x007A}, // Additional folding + 0x1D49C: []rune{0x0061}, // Additional folding + 0x1D49E: []rune{0x0063}, // Additional folding + 0x1D49F: []rune{0x0064}, // Additional folding + 0x1D4A2: []rune{0x0067}, // Additional folding + 0x1D4A5: []rune{0x006A}, // Additional folding + 0x1D4A6: []rune{0x006B}, // Additional folding + 0x1D4A9: []rune{0x006E}, // Additional folding + 0x1D4AA: []rune{0x006F}, // Additional folding + 0x1D4AB: []rune{0x0070}, // Additional folding + 0x1D4AC: []rune{0x0071}, // Additional folding + 0x1D4AE: []rune{0x0073}, // Additional folding + 0x1D4AF: []rune{0x0074}, // Additional folding + 0x1D4B0: []rune{0x0075}, // Additional folding + 0x1D4B1: []rune{0x0076}, // Additional folding + 0x1D4B2: []rune{0x0077}, // Additional folding + 0x1D4B3: []rune{0x0078}, // Additional folding + 0x1D4B4: []rune{0x0079}, // Additional folding + 0x1D4B5: []rune{0x007A}, // Additional folding + 0x1D4D0: []rune{0x0061}, // Additional folding + 0x1D4D1: []rune{0x0062}, // Additional folding + 0x1D4D2: []rune{0x0063}, // Additional folding + 0x1D4D3: []rune{0x0064}, // Additional folding + 0x1D4D4: []rune{0x0065}, // Additional folding + 0x1D4D5: []rune{0x0066}, // Additional folding + 0x1D4D6: []rune{0x0067}, // Additional folding + 0x1D4D7: []rune{0x0068}, // Additional folding + 0x1D4D8: []rune{0x0069}, // Additional folding + 0x1D4D9: []rune{0x006A}, // Additional folding + 0x1D4DA: []rune{0x006B}, // Additional folding + 0x1D4DB: []rune{0x006C}, // Additional folding + 0x1D4DC: []rune{0x006D}, // Additional folding + 0x1D4DD: []rune{0x006E}, // Additional folding + 0x1D4DE: []rune{0x006F}, // Additional folding + 0x1D4DF: []rune{0x0070}, // Additional folding + 0x1D4E0: []rune{0x0071}, // Additional folding + 0x1D4E1: []rune{0x0072}, // Additional folding + 0x1D4E2: []rune{0x0073}, // Additional folding + 0x1D4E3: []rune{0x0074}, // Additional folding + 0x1D4E4: []rune{0x0075}, // Additional folding + 0x1D4E5: []rune{0x0076}, // Additional folding + 0x1D4E6: []rune{0x0077}, // Additional folding + 0x1D4E7: []rune{0x0078}, // Additional folding + 0x1D4E8: []rune{0x0079}, // Additional folding + 0x1D4E9: []rune{0x007A}, // Additional folding + 0x1D504: []rune{0x0061}, // Additional folding + 0x1D505: []rune{0x0062}, // Additional folding + 0x1D507: []rune{0x0064}, // Additional folding + 0x1D508: []rune{0x0065}, // Additional folding + 0x1D509: []rune{0x0066}, // Additional folding + 0x1D50A: []rune{0x0067}, // Additional folding + 0x1D50D: []rune{0x006A}, // Additional folding + 0x1D50E: []rune{0x006B}, // Additional folding + 0x1D50F: []rune{0x006C}, // Additional folding + 0x1D510: []rune{0x006D}, // Additional folding + 0x1D511: []rune{0x006E}, // Additional folding + 0x1D512: []rune{0x006F}, // Additional folding + 0x1D513: []rune{0x0070}, // Additional folding + 0x1D514: []rune{0x0071}, // Additional folding + 0x1D516: []rune{0x0073}, // Additional folding + 0x1D517: []rune{0x0074}, // Additional folding + 0x1D518: []rune{0x0075}, // Additional folding + 0x1D519: []rune{0x0076}, // Additional folding + 0x1D51A: []rune{0x0077}, // Additional folding + 0x1D51B: []rune{0x0078}, // Additional folding + 0x1D51C: []rune{0x0079}, // Additional folding + 0x1D538: []rune{0x0061}, // Additional folding + 0x1D539: []rune{0x0062}, // Additional folding + 0x1D53B: []rune{0x0064}, // Additional folding + 0x1D53C: []rune{0x0065}, // Additional folding + 0x1D53D: []rune{0x0066}, // Additional folding + 0x1D53E: []rune{0x0067}, // Additional folding + 0x1D540: []rune{0x0069}, // Additional folding + 0x1D541: []rune{0x006A}, // Additional folding + 0x1D542: []rune{0x006B}, // Additional folding + 0x1D543: []rune{0x006C}, // Additional folding + 0x1D544: []rune{0x006D}, // Additional folding + 0x1D546: []rune{0x006F}, // Additional folding + 0x1D54A: []rune{0x0073}, // Additional folding + 0x1D54B: []rune{0x0074}, // Additional folding + 0x1D54C: []rune{0x0075}, // Additional folding + 0x1D54D: []rune{0x0076}, // Additional folding + 0x1D54E: []rune{0x0077}, // Additional folding + 0x1D54F: []rune{0x0078}, // Additional folding + 0x1D550: []rune{0x0079}, // Additional folding + 0x1D56C: []rune{0x0061}, // Additional folding + 0x1D56D: []rune{0x0062}, // Additional folding + 0x1D56E: []rune{0x0063}, // Additional folding + 0x1D56F: []rune{0x0064}, // Additional folding + 0x1D570: []rune{0x0065}, // Additional folding + 0x1D571: []rune{0x0066}, // Additional folding + 0x1D572: []rune{0x0067}, // Additional folding + 0x1D573: []rune{0x0068}, // Additional folding + 0x1D574: []rune{0x0069}, // Additional folding + 0x1D575: []rune{0x006A}, // Additional folding + 0x1D576: []rune{0x006B}, // Additional folding + 0x1D577: []rune{0x006C}, // Additional folding + 0x1D578: []rune{0x006D}, // Additional folding + 0x1D579: []rune{0x006E}, // Additional folding + 0x1D57A: []rune{0x006F}, // Additional folding + 0x1D57B: []rune{0x0070}, // Additional folding + 0x1D57C: []rune{0x0071}, // Additional folding + 0x1D57D: []rune{0x0072}, // Additional folding + 0x1D57E: []rune{0x0073}, // Additional folding + 0x1D57F: []rune{0x0074}, // Additional folding + 0x1D580: []rune{0x0075}, // Additional folding + 0x1D581: []rune{0x0076}, // Additional folding + 0x1D582: []rune{0x0077}, // Additional folding + 0x1D583: []rune{0x0078}, // Additional folding + 0x1D584: []rune{0x0079}, // Additional folding + 0x1D585: []rune{0x007A}, // Additional folding + 0x1D5A0: []rune{0x0061}, // Additional folding + 0x1D5A1: []rune{0x0062}, // Additional folding + 0x1D5A2: []rune{0x0063}, // Additional folding + 0x1D5A3: []rune{0x0064}, // Additional folding + 0x1D5A4: []rune{0x0065}, // Additional folding + 0x1D5A5: []rune{0x0066}, // Additional folding + 0x1D5A6: []rune{0x0067}, // Additional folding + 0x1D5A7: []rune{0x0068}, // Additional folding + 0x1D5A8: []rune{0x0069}, // Additional folding + 0x1D5A9: []rune{0x006A}, // Additional folding + 0x1D5AA: []rune{0x006B}, // Additional folding + 0x1D5AB: []rune{0x006C}, // Additional folding + 0x1D5AC: []rune{0x006D}, // Additional folding + 0x1D5AD: []rune{0x006E}, // Additional folding + 0x1D5AE: []rune{0x006F}, // Additional folding + 0x1D5AF: []rune{0x0070}, // Additional folding + 0x1D5B0: []rune{0x0071}, // Additional folding + 0x1D5B1: []rune{0x0072}, // Additional folding + 0x1D5B2: []rune{0x0073}, // Additional folding + 0x1D5B3: []rune{0x0074}, // Additional folding + 0x1D5B4: []rune{0x0075}, // Additional folding + 0x1D5B5: []rune{0x0076}, // Additional folding + 0x1D5B6: []rune{0x0077}, // Additional folding + 0x1D5B7: []rune{0x0078}, // Additional folding + 0x1D5B8: []rune{0x0079}, // Additional folding + 0x1D5B9: []rune{0x007A}, // Additional folding + 0x1D5D4: []rune{0x0061}, // Additional folding + 0x1D5D5: []rune{0x0062}, // Additional folding + 0x1D5D6: []rune{0x0063}, // Additional folding + 0x1D5D7: []rune{0x0064}, // Additional folding + 0x1D5D8: []rune{0x0065}, // Additional folding + 0x1D5D9: []rune{0x0066}, // Additional folding + 0x1D5DA: []rune{0x0067}, // Additional folding + 0x1D5DB: []rune{0x0068}, // Additional folding + 0x1D5DC: []rune{0x0069}, // Additional folding + 0x1D5DD: []rune{0x006A}, // Additional folding + 0x1D5DE: []rune{0x006B}, // Additional folding + 0x1D5DF: []rune{0x006C}, // Additional folding + 0x1D5E0: []rune{0x006D}, // Additional folding + 0x1D5E1: []rune{0x006E}, // Additional folding + 0x1D5E2: []rune{0x006F}, // Additional folding + 0x1D5E3: []rune{0x0070}, // Additional folding + 0x1D5E4: []rune{0x0071}, // Additional folding + 0x1D5E5: []rune{0x0072}, // Additional folding + 0x1D5E6: []rune{0x0073}, // Additional folding + 0x1D5E7: []rune{0x0074}, // Additional folding + 0x1D5E8: []rune{0x0075}, // Additional folding + 0x1D5E9: []rune{0x0076}, // Additional folding + 0x1D5EA: []rune{0x0077}, // Additional folding + 0x1D5EB: []rune{0x0078}, // Additional folding + 0x1D5EC: []rune{0x0079}, // Additional folding + 0x1D5ED: []rune{0x007A}, // Additional folding + 0x1D608: []rune{0x0061}, // Additional folding + 0x1D609: []rune{0x0062}, // Additional folding + 0x1D60A: []rune{0x0063}, // Additional folding + 0x1D60B: []rune{0x0064}, // Additional folding + 0x1D60C: []rune{0x0065}, // Additional folding + 0x1D60D: []rune{0x0066}, // Additional folding + 0x1D60E: []rune{0x0067}, // Additional folding + 0x1D60F: []rune{0x0068}, // Additional folding + 0x1D610: []rune{0x0069}, // Additional folding + 0x1D611: []rune{0x006A}, // Additional folding + 0x1D612: []rune{0x006B}, // Additional folding + 0x1D613: []rune{0x006C}, // Additional folding + 0x1D614: []rune{0x006D}, // Additional folding + 0x1D615: []rune{0x006E}, // Additional folding + 0x1D616: []rune{0x006F}, // Additional folding + 0x1D617: []rune{0x0070}, // Additional folding + 0x1D618: []rune{0x0071}, // Additional folding + 0x1D619: []rune{0x0072}, // Additional folding + 0x1D61A: []rune{0x0073}, // Additional folding + 0x1D61B: []rune{0x0074}, // Additional folding + 0x1D61C: []rune{0x0075}, // Additional folding + 0x1D61D: []rune{0x0076}, // Additional folding + 0x1D61E: []rune{0x0077}, // Additional folding + 0x1D61F: []rune{0x0078}, // Additional folding + 0x1D620: []rune{0x0079}, // Additional folding + 0x1D621: []rune{0x007A}, // Additional folding + 0x1D63C: []rune{0x0061}, // Additional folding + 0x1D63D: []rune{0x0062}, // Additional folding + 0x1D63E: []rune{0x0063}, // Additional folding + 0x1D63F: []rune{0x0064}, // Additional folding + 0x1D640: []rune{0x0065}, // Additional folding + 0x1D641: []rune{0x0066}, // Additional folding + 0x1D642: []rune{0x0067}, // Additional folding + 0x1D643: []rune{0x0068}, // Additional folding + 0x1D644: []rune{0x0069}, // Additional folding + 0x1D645: []rune{0x006A}, // Additional folding + 0x1D646: []rune{0x006B}, // Additional folding + 0x1D647: []rune{0x006C}, // Additional folding + 0x1D648: []rune{0x006D}, // Additional folding + 0x1D649: []rune{0x006E}, // Additional folding + 0x1D64A: []rune{0x006F}, // Additional folding + 0x1D64B: []rune{0x0070}, // Additional folding + 0x1D64C: []rune{0x0071}, // Additional folding + 0x1D64D: []rune{0x0072}, // Additional folding + 0x1D64E: []rune{0x0073}, // Additional folding + 0x1D64F: []rune{0x0074}, // Additional folding + 0x1D650: []rune{0x0075}, // Additional folding + 0x1D651: []rune{0x0076}, // Additional folding + 0x1D652: []rune{0x0077}, // Additional folding + 0x1D653: []rune{0x0078}, // Additional folding + 0x1D654: []rune{0x0079}, // Additional folding + 0x1D655: []rune{0x007A}, // Additional folding + 0x1D670: []rune{0x0061}, // Additional folding + 0x1D671: []rune{0x0062}, // Additional folding + 0x1D672: []rune{0x0063}, // Additional folding + 0x1D673: []rune{0x0064}, // Additional folding + 0x1D674: []rune{0x0065}, // Additional folding + 0x1D675: []rune{0x0066}, // Additional folding + 0x1D676: []rune{0x0067}, // Additional folding + 0x1D677: []rune{0x0068}, // Additional folding + 0x1D678: []rune{0x0069}, // Additional folding + 0x1D679: []rune{0x006A}, // Additional folding + 0x1D67A: []rune{0x006B}, // Additional folding + 0x1D67B: []rune{0x006C}, // Additional folding + 0x1D67C: []rune{0x006D}, // Additional folding + 0x1D67D: []rune{0x006E}, // Additional folding + 0x1D67E: []rune{0x006F}, // Additional folding + 0x1D67F: []rune{0x0070}, // Additional folding + 0x1D680: []rune{0x0071}, // Additional folding + 0x1D681: []rune{0x0072}, // Additional folding + 0x1D682: []rune{0x0073}, // Additional folding + 0x1D683: []rune{0x0074}, // Additional folding + 0x1D684: []rune{0x0075}, // Additional folding + 0x1D685: []rune{0x0076}, // Additional folding + 0x1D686: []rune{0x0077}, // Additional folding + 0x1D687: []rune{0x0078}, // Additional folding + 0x1D688: []rune{0x0079}, // Additional folding + 0x1D689: []rune{0x007A}, // Additional folding + 0x1D6A8: []rune{0x03B1}, // Additional folding + 0x1D6A9: []rune{0x03B2}, // Additional folding + 0x1D6AA: []rune{0x03B3}, // Additional folding + 0x1D6AB: []rune{0x03B4}, // Additional folding + 0x1D6AC: []rune{0x03B5}, // Additional folding + 0x1D6AD: []rune{0x03B6}, // Additional folding + 0x1D6AE: []rune{0x03B7}, // Additional folding + 0x1D6AF: []rune{0x03B8}, // Additional folding + 0x1D6B0: []rune{0x03B9}, // Additional folding + 0x1D6B1: []rune{0x03BA}, // Additional folding + 0x1D6B2: []rune{0x03BB}, // Additional folding + 0x1D6B3: []rune{0x03BC}, // Additional folding + 0x1D6B4: []rune{0x03BD}, // Additional folding + 0x1D6B5: []rune{0x03BE}, // Additional folding + 0x1D6B6: []rune{0x03BF}, // Additional folding + 0x1D6B7: []rune{0x03C0}, // Additional folding + 0x1D6B8: []rune{0x03C1}, // Additional folding + 0x1D6B9: []rune{0x03B8}, // Additional folding + 0x1D6BA: []rune{0x03C3}, // Additional folding + 0x1D6BB: []rune{0x03C4}, // Additional folding + 0x1D6BC: []rune{0x03C5}, // Additional folding + 0x1D6BD: []rune{0x03C6}, // Additional folding + 0x1D6BE: []rune{0x03C7}, // Additional folding + 0x1D6BF: []rune{0x03C8}, // Additional folding + 0x1D6C0: []rune{0x03C9}, // Additional folding + 0x1D6D3: []rune{0x03C3}, // Additional folding + 0x1D6E2: []rune{0x03B1}, // Additional folding + 0x1D6E3: []rune{0x03B2}, // Additional folding + 0x1D6E4: []rune{0x03B3}, // Additional folding + 0x1D6E5: []rune{0x03B4}, // Additional folding + 0x1D6E6: []rune{0x03B5}, // Additional folding + 0x1D6E7: []rune{0x03B6}, // Additional folding + 0x1D6E8: []rune{0x03B7}, // Additional folding + 0x1D6E9: []rune{0x03B8}, // Additional folding + 0x1D6EA: []rune{0x03B9}, // Additional folding + 0x1D6EB: []rune{0x03BA}, // Additional folding + 0x1D6EC: []rune{0x03BB}, // Additional folding + 0x1D6ED: []rune{0x03BC}, // Additional folding + 0x1D6EE: []rune{0x03BD}, // Additional folding + 0x1D6EF: []rune{0x03BE}, // Additional folding + 0x1D6F0: []rune{0x03BF}, // Additional folding + 0x1D6F1: []rune{0x03C0}, // Additional folding + 0x1D6F2: []rune{0x03C1}, // Additional folding + 0x1D6F3: []rune{0x03B8}, // Additional folding + 0x1D6F4: []rune{0x03C3}, // Additional folding + 0x1D6F5: []rune{0x03C4}, // Additional folding + 0x1D6F6: []rune{0x03C5}, // Additional folding + 0x1D6F7: []rune{0x03C6}, // Additional folding + 0x1D6F8: []rune{0x03C7}, // Additional folding + 0x1D6F9: []rune{0x03C8}, // Additional folding + 0x1D6FA: []rune{0x03C9}, // Additional folding + 0x1D70D: []rune{0x03C3}, // Additional folding + 0x1D71C: []rune{0x03B1}, // Additional folding + 0x1D71D: []rune{0x03B2}, // Additional folding + 0x1D71E: []rune{0x03B3}, // Additional folding + 0x1D71F: []rune{0x03B4}, // Additional folding + 0x1D720: []rune{0x03B5}, // Additional folding + 0x1D721: []rune{0x03B6}, // Additional folding + 0x1D722: []rune{0x03B7}, // Additional folding + 0x1D723: []rune{0x03B8}, // Additional folding + 0x1D724: []rune{0x03B9}, // Additional folding + 0x1D725: []rune{0x03BA}, // Additional folding + 0x1D726: []rune{0x03BB}, // Additional folding + 0x1D727: []rune{0x03BC}, // Additional folding + 0x1D728: []rune{0x03BD}, // Additional folding + 0x1D729: []rune{0x03BE}, // Additional folding + 0x1D72A: []rune{0x03BF}, // Additional folding + 0x1D72B: []rune{0x03C0}, // Additional folding + 0x1D72C: []rune{0x03C1}, // Additional folding + 0x1D72D: []rune{0x03B8}, // Additional folding + 0x1D72E: []rune{0x03C3}, // Additional folding + 0x1D72F: []rune{0x03C4}, // Additional folding + 0x1D730: []rune{0x03C5}, // Additional folding + 0x1D731: []rune{0x03C6}, // Additional folding + 0x1D732: []rune{0x03C7}, // Additional folding + 0x1D733: []rune{0x03C8}, // Additional folding + 0x1D734: []rune{0x03C9}, // Additional folding + 0x1D747: []rune{0x03C3}, // Additional folding + 0x1D756: []rune{0x03B1}, // Additional folding + 0x1D757: []rune{0x03B2}, // Additional folding + 0x1D758: []rune{0x03B3}, // Additional folding + 0x1D759: []rune{0x03B4}, // Additional folding + 0x1D75A: []rune{0x03B5}, // Additional folding + 0x1D75B: []rune{0x03B6}, // Additional folding + 0x1D75C: []rune{0x03B7}, // Additional folding + 0x1D75D: []rune{0x03B8}, // Additional folding + 0x1D75E: []rune{0x03B9}, // Additional folding + 0x1D75F: []rune{0x03BA}, // Additional folding + 0x1D760: []rune{0x03BB}, // Additional folding + 0x1D761: []rune{0x03BC}, // Additional folding + 0x1D762: []rune{0x03BD}, // Additional folding + 0x1D763: []rune{0x03BE}, // Additional folding + 0x1D764: []rune{0x03BF}, // Additional folding + 0x1D765: []rune{0x03C0}, // Additional folding + 0x1D766: []rune{0x03C1}, // Additional folding + 0x1D767: []rune{0x03B8}, // Additional folding + 0x1D768: []rune{0x03C3}, // Additional folding + 0x1D769: []rune{0x03C4}, // Additional folding + 0x1D76A: []rune{0x03C5}, // Additional folding + 0x1D76B: []rune{0x03C6}, // Additional folding + 0x1D76C: []rune{0x03C7}, // Additional folding + 0x1D76D: []rune{0x03C8}, // Additional folding + 0x1D76E: []rune{0x03C9}, // Additional folding + 0x1D781: []rune{0x03C3}, // Additional folding + 0x1D790: []rune{0x03B1}, // Additional folding + 0x1D791: []rune{0x03B2}, // Additional folding + 0x1D792: []rune{0x03B3}, // Additional folding + 0x1D793: []rune{0x03B4}, // Additional folding + 0x1D794: []rune{0x03B5}, // Additional folding + 0x1D795: []rune{0x03B6}, // Additional folding + 0x1D796: []rune{0x03B7}, // Additional folding + 0x1D797: []rune{0x03B8}, // Additional folding + 0x1D798: []rune{0x03B9}, // Additional folding + 0x1D799: []rune{0x03BA}, // Additional folding + 0x1D79A: []rune{0x03BB}, // Additional folding + 0x1D79B: []rune{0x03BC}, // Additional folding + 0x1D79C: []rune{0x03BD}, // Additional folding + 0x1D79D: []rune{0x03BE}, // Additional folding + 0x1D79E: []rune{0x03BF}, // Additional folding + 0x1D79F: []rune{0x03C0}, // Additional folding + 0x1D7A0: []rune{0x03C1}, // Additional folding + 0x1D7A1: []rune{0x03B8}, // Additional folding + 0x1D7A2: []rune{0x03C3}, // Additional folding + 0x1D7A3: []rune{0x03C4}, // Additional folding + 0x1D7A4: []rune{0x03C5}, // Additional folding + 0x1D7A5: []rune{0x03C6}, // Additional folding + 0x1D7A6: []rune{0x03C7}, // Additional folding + 0x1D7A7: []rune{0x03C8}, // Additional folding + 0x1D7A8: []rune{0x03C9}, // Additional folding + 0x1D7BB: []rune{0x03C3}, // Additional folding +} + +// TableB2 represents RFC-3454 Table B.2. +var TableB2 Mapping = tableB2 + +var tableB3 = Mapping{ + 0x0041: []rune{0x0061}, // Case map + 0x0042: []rune{0x0062}, // Case map + 0x0043: []rune{0x0063}, // Case map + 0x0044: []rune{0x0064}, // Case map + 0x0045: []rune{0x0065}, // Case map + 0x0046: []rune{0x0066}, // Case map + 0x0047: []rune{0x0067}, // Case map + 0x0048: []rune{0x0068}, // Case map + 0x0049: []rune{0x0069}, // Case map + 0x004A: []rune{0x006A}, // Case map + 0x004B: []rune{0x006B}, // Case map + 0x004C: []rune{0x006C}, // Case map + 0x004D: []rune{0x006D}, // Case map + 0x004E: []rune{0x006E}, // Case map + 0x004F: []rune{0x006F}, // Case map + 0x0050: []rune{0x0070}, // Case map + 0x0051: []rune{0x0071}, // Case map + 0x0052: []rune{0x0072}, // Case map + 0x0053: []rune{0x0073}, // Case map + 0x0054: []rune{0x0074}, // Case map + 0x0055: []rune{0x0075}, // Case map + 0x0056: []rune{0x0076}, // Case map + 0x0057: []rune{0x0077}, // Case map + 0x0058: []rune{0x0078}, // Case map + 0x0059: []rune{0x0079}, // Case map + 0x005A: []rune{0x007A}, // Case map + 0x00B5: []rune{0x03BC}, // Case map + 0x00C0: []rune{0x00E0}, // Case map + 0x00C1: []rune{0x00E1}, // Case map + 0x00C2: []rune{0x00E2}, // Case map + 0x00C3: []rune{0x00E3}, // Case map + 0x00C4: []rune{0x00E4}, // Case map + 0x00C5: []rune{0x00E5}, // Case map + 0x00C6: []rune{0x00E6}, // Case map + 0x00C7: []rune{0x00E7}, // Case map + 0x00C8: []rune{0x00E8}, // Case map + 0x00C9: []rune{0x00E9}, // Case map + 0x00CA: []rune{0x00EA}, // Case map + 0x00CB: []rune{0x00EB}, // Case map + 0x00CC: []rune{0x00EC}, // Case map + 0x00CD: []rune{0x00ED}, // Case map + 0x00CE: []rune{0x00EE}, // Case map + 0x00CF: []rune{0x00EF}, // Case map + 0x00D0: []rune{0x00F0}, // Case map + 0x00D1: []rune{0x00F1}, // Case map + 0x00D2: []rune{0x00F2}, // Case map + 0x00D3: []rune{0x00F3}, // Case map + 0x00D4: []rune{0x00F4}, // Case map + 0x00D5: []rune{0x00F5}, // Case map + 0x00D6: []rune{0x00F6}, // Case map + 0x00D8: []rune{0x00F8}, // Case map + 0x00D9: []rune{0x00F9}, // Case map + 0x00DA: []rune{0x00FA}, // Case map + 0x00DB: []rune{0x00FB}, // Case map + 0x00DC: []rune{0x00FC}, // Case map + 0x00DD: []rune{0x00FD}, // Case map + 0x00DE: []rune{0x00FE}, // Case map + 0x00DF: []rune{0x0073, 0x0073}, // Case map + 0x0100: []rune{0x0101}, // Case map + 0x0102: []rune{0x0103}, // Case map + 0x0104: []rune{0x0105}, // Case map + 0x0106: []rune{0x0107}, // Case map + 0x0108: []rune{0x0109}, // Case map + 0x010A: []rune{0x010B}, // Case map + 0x010C: []rune{0x010D}, // Case map + 0x010E: []rune{0x010F}, // Case map + 0x0110: []rune{0x0111}, // Case map + 0x0112: []rune{0x0113}, // Case map + 0x0114: []rune{0x0115}, // Case map + 0x0116: []rune{0x0117}, // Case map + 0x0118: []rune{0x0119}, // Case map + 0x011A: []rune{0x011B}, // Case map + 0x011C: []rune{0x011D}, // Case map + 0x011E: []rune{0x011F}, // Case map + 0x0120: []rune{0x0121}, // Case map + 0x0122: []rune{0x0123}, // Case map + 0x0124: []rune{0x0125}, // Case map + 0x0126: []rune{0x0127}, // Case map + 0x0128: []rune{0x0129}, // Case map + 0x012A: []rune{0x012B}, // Case map + 0x012C: []rune{0x012D}, // Case map + 0x012E: []rune{0x012F}, // Case map + 0x0130: []rune{0x0069, 0x0307}, // Case map + 0x0132: []rune{0x0133}, // Case map + 0x0134: []rune{0x0135}, // Case map + 0x0136: []rune{0x0137}, // Case map + 0x0139: []rune{0x013A}, // Case map + 0x013B: []rune{0x013C}, // Case map + 0x013D: []rune{0x013E}, // Case map + 0x013F: []rune{0x0140}, // Case map + 0x0141: []rune{0x0142}, // Case map + 0x0143: []rune{0x0144}, // Case map + 0x0145: []rune{0x0146}, // Case map + 0x0147: []rune{0x0148}, // Case map + 0x0149: []rune{0x02BC, 0x006E}, // Case map + 0x014A: []rune{0x014B}, // Case map + 0x014C: []rune{0x014D}, // Case map + 0x014E: []rune{0x014F}, // Case map + 0x0150: []rune{0x0151}, // Case map + 0x0152: []rune{0x0153}, // Case map + 0x0154: []rune{0x0155}, // Case map + 0x0156: []rune{0x0157}, // Case map + 0x0158: []rune{0x0159}, // Case map + 0x015A: []rune{0x015B}, // Case map + 0x015C: []rune{0x015D}, // Case map + 0x015E: []rune{0x015F}, // Case map + 0x0160: []rune{0x0161}, // Case map + 0x0162: []rune{0x0163}, // Case map + 0x0164: []rune{0x0165}, // Case map + 0x0166: []rune{0x0167}, // Case map + 0x0168: []rune{0x0169}, // Case map + 0x016A: []rune{0x016B}, // Case map + 0x016C: []rune{0x016D}, // Case map + 0x016E: []rune{0x016F}, // Case map + 0x0170: []rune{0x0171}, // Case map + 0x0172: []rune{0x0173}, // Case map + 0x0174: []rune{0x0175}, // Case map + 0x0176: []rune{0x0177}, // Case map + 0x0178: []rune{0x00FF}, // Case map + 0x0179: []rune{0x017A}, // Case map + 0x017B: []rune{0x017C}, // Case map + 0x017D: []rune{0x017E}, // Case map + 0x017F: []rune{0x0073}, // Case map + 0x0181: []rune{0x0253}, // Case map + 0x0182: []rune{0x0183}, // Case map + 0x0184: []rune{0x0185}, // Case map + 0x0186: []rune{0x0254}, // Case map + 0x0187: []rune{0x0188}, // Case map + 0x0189: []rune{0x0256}, // Case map + 0x018A: []rune{0x0257}, // Case map + 0x018B: []rune{0x018C}, // Case map + 0x018E: []rune{0x01DD}, // Case map + 0x018F: []rune{0x0259}, // Case map + 0x0190: []rune{0x025B}, // Case map + 0x0191: []rune{0x0192}, // Case map + 0x0193: []rune{0x0260}, // Case map + 0x0194: []rune{0x0263}, // Case map + 0x0196: []rune{0x0269}, // Case map + 0x0197: []rune{0x0268}, // Case map + 0x0198: []rune{0x0199}, // Case map + 0x019C: []rune{0x026F}, // Case map + 0x019D: []rune{0x0272}, // Case map + 0x019F: []rune{0x0275}, // Case map + 0x01A0: []rune{0x01A1}, // Case map + 0x01A2: []rune{0x01A3}, // Case map + 0x01A4: []rune{0x01A5}, // Case map + 0x01A6: []rune{0x0280}, // Case map + 0x01A7: []rune{0x01A8}, // Case map + 0x01A9: []rune{0x0283}, // Case map + 0x01AC: []rune{0x01AD}, // Case map + 0x01AE: []rune{0x0288}, // Case map + 0x01AF: []rune{0x01B0}, // Case map + 0x01B1: []rune{0x028A}, // Case map + 0x01B2: []rune{0x028B}, // Case map + 0x01B3: []rune{0x01B4}, // Case map + 0x01B5: []rune{0x01B6}, // Case map + 0x01B7: []rune{0x0292}, // Case map + 0x01B8: []rune{0x01B9}, // Case map + 0x01BC: []rune{0x01BD}, // Case map + 0x01C4: []rune{0x01C6}, // Case map + 0x01C5: []rune{0x01C6}, // Case map + 0x01C7: []rune{0x01C9}, // Case map + 0x01C8: []rune{0x01C9}, // Case map + 0x01CA: []rune{0x01CC}, // Case map + 0x01CB: []rune{0x01CC}, // Case map + 0x01CD: []rune{0x01CE}, // Case map + 0x01CF: []rune{0x01D0}, // Case map + 0x01D1: []rune{0x01D2}, // Case map + 0x01D3: []rune{0x01D4}, // Case map + 0x01D5: []rune{0x01D6}, // Case map + 0x01D7: []rune{0x01D8}, // Case map + 0x01D9: []rune{0x01DA}, // Case map + 0x01DB: []rune{0x01DC}, // Case map + 0x01DE: []rune{0x01DF}, // Case map + 0x01E0: []rune{0x01E1}, // Case map + 0x01E2: []rune{0x01E3}, // Case map + 0x01E4: []rune{0x01E5}, // Case map + 0x01E6: []rune{0x01E7}, // Case map + 0x01E8: []rune{0x01E9}, // Case map + 0x01EA: []rune{0x01EB}, // Case map + 0x01EC: []rune{0x01ED}, // Case map + 0x01EE: []rune{0x01EF}, // Case map + 0x01F0: []rune{0x006A, 0x030C}, // Case map + 0x01F1: []rune{0x01F3}, // Case map + 0x01F2: []rune{0x01F3}, // Case map + 0x01F4: []rune{0x01F5}, // Case map + 0x01F6: []rune{0x0195}, // Case map + 0x01F7: []rune{0x01BF}, // Case map + 0x01F8: []rune{0x01F9}, // Case map + 0x01FA: []rune{0x01FB}, // Case map + 0x01FC: []rune{0x01FD}, // Case map + 0x01FE: []rune{0x01FF}, // Case map + 0x0200: []rune{0x0201}, // Case map + 0x0202: []rune{0x0203}, // Case map + 0x0204: []rune{0x0205}, // Case map + 0x0206: []rune{0x0207}, // Case map + 0x0208: []rune{0x0209}, // Case map + 0x020A: []rune{0x020B}, // Case map + 0x020C: []rune{0x020D}, // Case map + 0x020E: []rune{0x020F}, // Case map + 0x0210: []rune{0x0211}, // Case map + 0x0212: []rune{0x0213}, // Case map + 0x0214: []rune{0x0215}, // Case map + 0x0216: []rune{0x0217}, // Case map + 0x0218: []rune{0x0219}, // Case map + 0x021A: []rune{0x021B}, // Case map + 0x021C: []rune{0x021D}, // Case map + 0x021E: []rune{0x021F}, // Case map + 0x0220: []rune{0x019E}, // Case map + 0x0222: []rune{0x0223}, // Case map + 0x0224: []rune{0x0225}, // Case map + 0x0226: []rune{0x0227}, // Case map + 0x0228: []rune{0x0229}, // Case map + 0x022A: []rune{0x022B}, // Case map + 0x022C: []rune{0x022D}, // Case map + 0x022E: []rune{0x022F}, // Case map + 0x0230: []rune{0x0231}, // Case map + 0x0232: []rune{0x0233}, // Case map + 0x0345: []rune{0x03B9}, // Case map + 0x0386: []rune{0x03AC}, // Case map + 0x0388: []rune{0x03AD}, // Case map + 0x0389: []rune{0x03AE}, // Case map + 0x038A: []rune{0x03AF}, // Case map + 0x038C: []rune{0x03CC}, // Case map + 0x038E: []rune{0x03CD}, // Case map + 0x038F: []rune{0x03CE}, // Case map + 0x0390: []rune{0x03B9, 0x0308, 0x0301}, // Case map + 0x0391: []rune{0x03B1}, // Case map + 0x0392: []rune{0x03B2}, // Case map + 0x0393: []rune{0x03B3}, // Case map + 0x0394: []rune{0x03B4}, // Case map + 0x0395: []rune{0x03B5}, // Case map + 0x0396: []rune{0x03B6}, // Case map + 0x0397: []rune{0x03B7}, // Case map + 0x0398: []rune{0x03B8}, // Case map + 0x0399: []rune{0x03B9}, // Case map + 0x039A: []rune{0x03BA}, // Case map + 0x039B: []rune{0x03BB}, // Case map + 0x039C: []rune{0x03BC}, // Case map + 0x039D: []rune{0x03BD}, // Case map + 0x039E: []rune{0x03BE}, // Case map + 0x039F: []rune{0x03BF}, // Case map + 0x03A0: []rune{0x03C0}, // Case map + 0x03A1: []rune{0x03C1}, // Case map + 0x03A3: []rune{0x03C3}, // Case map + 0x03A4: []rune{0x03C4}, // Case map + 0x03A5: []rune{0x03C5}, // Case map + 0x03A6: []rune{0x03C6}, // Case map + 0x03A7: []rune{0x03C7}, // Case map + 0x03A8: []rune{0x03C8}, // Case map + 0x03A9: []rune{0x03C9}, // Case map + 0x03AA: []rune{0x03CA}, // Case map + 0x03AB: []rune{0x03CB}, // Case map + 0x03B0: []rune{0x03C5, 0x0308, 0x0301}, // Case map + 0x03C2: []rune{0x03C3}, // Case map + 0x03D0: []rune{0x03B2}, // Case map + 0x03D1: []rune{0x03B8}, // Case map + 0x03D5: []rune{0x03C6}, // Case map + 0x03D6: []rune{0x03C0}, // Case map + 0x03D8: []rune{0x03D9}, // Case map + 0x03DA: []rune{0x03DB}, // Case map + 0x03DC: []rune{0x03DD}, // Case map + 0x03DE: []rune{0x03DF}, // Case map + 0x03E0: []rune{0x03E1}, // Case map + 0x03E2: []rune{0x03E3}, // Case map + 0x03E4: []rune{0x03E5}, // Case map + 0x03E6: []rune{0x03E7}, // Case map + 0x03E8: []rune{0x03E9}, // Case map + 0x03EA: []rune{0x03EB}, // Case map + 0x03EC: []rune{0x03ED}, // Case map + 0x03EE: []rune{0x03EF}, // Case map + 0x03F0: []rune{0x03BA}, // Case map + 0x03F1: []rune{0x03C1}, // Case map + 0x03F2: []rune{0x03C3}, // Case map + 0x03F4: []rune{0x03B8}, // Case map + 0x03F5: []rune{0x03B5}, // Case map + 0x0400: []rune{0x0450}, // Case map + 0x0401: []rune{0x0451}, // Case map + 0x0402: []rune{0x0452}, // Case map + 0x0403: []rune{0x0453}, // Case map + 0x0404: []rune{0x0454}, // Case map + 0x0405: []rune{0x0455}, // Case map + 0x0406: []rune{0x0456}, // Case map + 0x0407: []rune{0x0457}, // Case map + 0x0408: []rune{0x0458}, // Case map + 0x0409: []rune{0x0459}, // Case map + 0x040A: []rune{0x045A}, // Case map + 0x040B: []rune{0x045B}, // Case map + 0x040C: []rune{0x045C}, // Case map + 0x040D: []rune{0x045D}, // Case map + 0x040E: []rune{0x045E}, // Case map + 0x040F: []rune{0x045F}, // Case map + 0x0410: []rune{0x0430}, // Case map + 0x0411: []rune{0x0431}, // Case map + 0x0412: []rune{0x0432}, // Case map + 0x0413: []rune{0x0433}, // Case map + 0x0414: []rune{0x0434}, // Case map + 0x0415: []rune{0x0435}, // Case map + 0x0416: []rune{0x0436}, // Case map + 0x0417: []rune{0x0437}, // Case map + 0x0418: []rune{0x0438}, // Case map + 0x0419: []rune{0x0439}, // Case map + 0x041A: []rune{0x043A}, // Case map + 0x041B: []rune{0x043B}, // Case map + 0x041C: []rune{0x043C}, // Case map + 0x041D: []rune{0x043D}, // Case map + 0x041E: []rune{0x043E}, // Case map + 0x041F: []rune{0x043F}, // Case map + 0x0420: []rune{0x0440}, // Case map + 0x0421: []rune{0x0441}, // Case map + 0x0422: []rune{0x0442}, // Case map + 0x0423: []rune{0x0443}, // Case map + 0x0424: []rune{0x0444}, // Case map + 0x0425: []rune{0x0445}, // Case map + 0x0426: []rune{0x0446}, // Case map + 0x0427: []rune{0x0447}, // Case map + 0x0428: []rune{0x0448}, // Case map + 0x0429: []rune{0x0449}, // Case map + 0x042A: []rune{0x044A}, // Case map + 0x042B: []rune{0x044B}, // Case map + 0x042C: []rune{0x044C}, // Case map + 0x042D: []rune{0x044D}, // Case map + 0x042E: []rune{0x044E}, // Case map + 0x042F: []rune{0x044F}, // Case map + 0x0460: []rune{0x0461}, // Case map + 0x0462: []rune{0x0463}, // Case map + 0x0464: []rune{0x0465}, // Case map + 0x0466: []rune{0x0467}, // Case map + 0x0468: []rune{0x0469}, // Case map + 0x046A: []rune{0x046B}, // Case map + 0x046C: []rune{0x046D}, // Case map + 0x046E: []rune{0x046F}, // Case map + 0x0470: []rune{0x0471}, // Case map + 0x0472: []rune{0x0473}, // Case map + 0x0474: []rune{0x0475}, // Case map + 0x0476: []rune{0x0477}, // Case map + 0x0478: []rune{0x0479}, // Case map + 0x047A: []rune{0x047B}, // Case map + 0x047C: []rune{0x047D}, // Case map + 0x047E: []rune{0x047F}, // Case map + 0x0480: []rune{0x0481}, // Case map + 0x048A: []rune{0x048B}, // Case map + 0x048C: []rune{0x048D}, // Case map + 0x048E: []rune{0x048F}, // Case map + 0x0490: []rune{0x0491}, // Case map + 0x0492: []rune{0x0493}, // Case map + 0x0494: []rune{0x0495}, // Case map + 0x0496: []rune{0x0497}, // Case map + 0x0498: []rune{0x0499}, // Case map + 0x049A: []rune{0x049B}, // Case map + 0x049C: []rune{0x049D}, // Case map + 0x049E: []rune{0x049F}, // Case map + 0x04A0: []rune{0x04A1}, // Case map + 0x04A2: []rune{0x04A3}, // Case map + 0x04A4: []rune{0x04A5}, // Case map + 0x04A6: []rune{0x04A7}, // Case map + 0x04A8: []rune{0x04A9}, // Case map + 0x04AA: []rune{0x04AB}, // Case map + 0x04AC: []rune{0x04AD}, // Case map + 0x04AE: []rune{0x04AF}, // Case map + 0x04B0: []rune{0x04B1}, // Case map + 0x04B2: []rune{0x04B3}, // Case map + 0x04B4: []rune{0x04B5}, // Case map + 0x04B6: []rune{0x04B7}, // Case map + 0x04B8: []rune{0x04B9}, // Case map + 0x04BA: []rune{0x04BB}, // Case map + 0x04BC: []rune{0x04BD}, // Case map + 0x04BE: []rune{0x04BF}, // Case map + 0x04C1: []rune{0x04C2}, // Case map + 0x04C3: []rune{0x04C4}, // Case map + 0x04C5: []rune{0x04C6}, // Case map + 0x04C7: []rune{0x04C8}, // Case map + 0x04C9: []rune{0x04CA}, // Case map + 0x04CB: []rune{0x04CC}, // Case map + 0x04CD: []rune{0x04CE}, // Case map + 0x04D0: []rune{0x04D1}, // Case map + 0x04D2: []rune{0x04D3}, // Case map + 0x04D4: []rune{0x04D5}, // Case map + 0x04D6: []rune{0x04D7}, // Case map + 0x04D8: []rune{0x04D9}, // Case map + 0x04DA: []rune{0x04DB}, // Case map + 0x04DC: []rune{0x04DD}, // Case map + 0x04DE: []rune{0x04DF}, // Case map + 0x04E0: []rune{0x04E1}, // Case map + 0x04E2: []rune{0x04E3}, // Case map + 0x04E4: []rune{0x04E5}, // Case map + 0x04E6: []rune{0x04E7}, // Case map + 0x04E8: []rune{0x04E9}, // Case map + 0x04EA: []rune{0x04EB}, // Case map + 0x04EC: []rune{0x04ED}, // Case map + 0x04EE: []rune{0x04EF}, // Case map + 0x04F0: []rune{0x04F1}, // Case map + 0x04F2: []rune{0x04F3}, // Case map + 0x04F4: []rune{0x04F5}, // Case map + 0x04F8: []rune{0x04F9}, // Case map + 0x0500: []rune{0x0501}, // Case map + 0x0502: []rune{0x0503}, // Case map + 0x0504: []rune{0x0505}, // Case map + 0x0506: []rune{0x0507}, // Case map + 0x0508: []rune{0x0509}, // Case map + 0x050A: []rune{0x050B}, // Case map + 0x050C: []rune{0x050D}, // Case map + 0x050E: []rune{0x050F}, // Case map + 0x0531: []rune{0x0561}, // Case map + 0x0532: []rune{0x0562}, // Case map + 0x0533: []rune{0x0563}, // Case map + 0x0534: []rune{0x0564}, // Case map + 0x0535: []rune{0x0565}, // Case map + 0x0536: []rune{0x0566}, // Case map + 0x0537: []rune{0x0567}, // Case map + 0x0538: []rune{0x0568}, // Case map + 0x0539: []rune{0x0569}, // Case map + 0x053A: []rune{0x056A}, // Case map + 0x053B: []rune{0x056B}, // Case map + 0x053C: []rune{0x056C}, // Case map + 0x053D: []rune{0x056D}, // Case map + 0x053E: []rune{0x056E}, // Case map + 0x053F: []rune{0x056F}, // Case map + 0x0540: []rune{0x0570}, // Case map + 0x0541: []rune{0x0571}, // Case map + 0x0542: []rune{0x0572}, // Case map + 0x0543: []rune{0x0573}, // Case map + 0x0544: []rune{0x0574}, // Case map + 0x0545: []rune{0x0575}, // Case map + 0x0546: []rune{0x0576}, // Case map + 0x0547: []rune{0x0577}, // Case map + 0x0548: []rune{0x0578}, // Case map + 0x0549: []rune{0x0579}, // Case map + 0x054A: []rune{0x057A}, // Case map + 0x054B: []rune{0x057B}, // Case map + 0x054C: []rune{0x057C}, // Case map + 0x054D: []rune{0x057D}, // Case map + 0x054E: []rune{0x057E}, // Case map + 0x054F: []rune{0x057F}, // Case map + 0x0550: []rune{0x0580}, // Case map + 0x0551: []rune{0x0581}, // Case map + 0x0552: []rune{0x0582}, // Case map + 0x0553: []rune{0x0583}, // Case map + 0x0554: []rune{0x0584}, // Case map + 0x0555: []rune{0x0585}, // Case map + 0x0556: []rune{0x0586}, // Case map + 0x0587: []rune{0x0565, 0x0582}, // Case map + 0x1E00: []rune{0x1E01}, // Case map + 0x1E02: []rune{0x1E03}, // Case map + 0x1E04: []rune{0x1E05}, // Case map + 0x1E06: []rune{0x1E07}, // Case map + 0x1E08: []rune{0x1E09}, // Case map + 0x1E0A: []rune{0x1E0B}, // Case map + 0x1E0C: []rune{0x1E0D}, // Case map + 0x1E0E: []rune{0x1E0F}, // Case map + 0x1E10: []rune{0x1E11}, // Case map + 0x1E12: []rune{0x1E13}, // Case map + 0x1E14: []rune{0x1E15}, // Case map + 0x1E16: []rune{0x1E17}, // Case map + 0x1E18: []rune{0x1E19}, // Case map + 0x1E1A: []rune{0x1E1B}, // Case map + 0x1E1C: []rune{0x1E1D}, // Case map + 0x1E1E: []rune{0x1E1F}, // Case map + 0x1E20: []rune{0x1E21}, // Case map + 0x1E22: []rune{0x1E23}, // Case map + 0x1E24: []rune{0x1E25}, // Case map + 0x1E26: []rune{0x1E27}, // Case map + 0x1E28: []rune{0x1E29}, // Case map + 0x1E2A: []rune{0x1E2B}, // Case map + 0x1E2C: []rune{0x1E2D}, // Case map + 0x1E2E: []rune{0x1E2F}, // Case map + 0x1E30: []rune{0x1E31}, // Case map + 0x1E32: []rune{0x1E33}, // Case map + 0x1E34: []rune{0x1E35}, // Case map + 0x1E36: []rune{0x1E37}, // Case map + 0x1E38: []rune{0x1E39}, // Case map + 0x1E3A: []rune{0x1E3B}, // Case map + 0x1E3C: []rune{0x1E3D}, // Case map + 0x1E3E: []rune{0x1E3F}, // Case map + 0x1E40: []rune{0x1E41}, // Case map + 0x1E42: []rune{0x1E43}, // Case map + 0x1E44: []rune{0x1E45}, // Case map + 0x1E46: []rune{0x1E47}, // Case map + 0x1E48: []rune{0x1E49}, // Case map + 0x1E4A: []rune{0x1E4B}, // Case map + 0x1E4C: []rune{0x1E4D}, // Case map + 0x1E4E: []rune{0x1E4F}, // Case map + 0x1E50: []rune{0x1E51}, // Case map + 0x1E52: []rune{0x1E53}, // Case map + 0x1E54: []rune{0x1E55}, // Case map + 0x1E56: []rune{0x1E57}, // Case map + 0x1E58: []rune{0x1E59}, // Case map + 0x1E5A: []rune{0x1E5B}, // Case map + 0x1E5C: []rune{0x1E5D}, // Case map + 0x1E5E: []rune{0x1E5F}, // Case map + 0x1E60: []rune{0x1E61}, // Case map + 0x1E62: []rune{0x1E63}, // Case map + 0x1E64: []rune{0x1E65}, // Case map + 0x1E66: []rune{0x1E67}, // Case map + 0x1E68: []rune{0x1E69}, // Case map + 0x1E6A: []rune{0x1E6B}, // Case map + 0x1E6C: []rune{0x1E6D}, // Case map + 0x1E6E: []rune{0x1E6F}, // Case map + 0x1E70: []rune{0x1E71}, // Case map + 0x1E72: []rune{0x1E73}, // Case map + 0x1E74: []rune{0x1E75}, // Case map + 0x1E76: []rune{0x1E77}, // Case map + 0x1E78: []rune{0x1E79}, // Case map + 0x1E7A: []rune{0x1E7B}, // Case map + 0x1E7C: []rune{0x1E7D}, // Case map + 0x1E7E: []rune{0x1E7F}, // Case map + 0x1E80: []rune{0x1E81}, // Case map + 0x1E82: []rune{0x1E83}, // Case map + 0x1E84: []rune{0x1E85}, // Case map + 0x1E86: []rune{0x1E87}, // Case map + 0x1E88: []rune{0x1E89}, // Case map + 0x1E8A: []rune{0x1E8B}, // Case map + 0x1E8C: []rune{0x1E8D}, // Case map + 0x1E8E: []rune{0x1E8F}, // Case map + 0x1E90: []rune{0x1E91}, // Case map + 0x1E92: []rune{0x1E93}, // Case map + 0x1E94: []rune{0x1E95}, // Case map + 0x1E96: []rune{0x0068, 0x0331}, // Case map + 0x1E97: []rune{0x0074, 0x0308}, // Case map + 0x1E98: []rune{0x0077, 0x030A}, // Case map + 0x1E99: []rune{0x0079, 0x030A}, // Case map + 0x1E9A: []rune{0x0061, 0x02BE}, // Case map + 0x1E9B: []rune{0x1E61}, // Case map + 0x1EA0: []rune{0x1EA1}, // Case map + 0x1EA2: []rune{0x1EA3}, // Case map + 0x1EA4: []rune{0x1EA5}, // Case map + 0x1EA6: []rune{0x1EA7}, // Case map + 0x1EA8: []rune{0x1EA9}, // Case map + 0x1EAA: []rune{0x1EAB}, // Case map + 0x1EAC: []rune{0x1EAD}, // Case map + 0x1EAE: []rune{0x1EAF}, // Case map + 0x1EB0: []rune{0x1EB1}, // Case map + 0x1EB2: []rune{0x1EB3}, // Case map + 0x1EB4: []rune{0x1EB5}, // Case map + 0x1EB6: []rune{0x1EB7}, // Case map + 0x1EB8: []rune{0x1EB9}, // Case map + 0x1EBA: []rune{0x1EBB}, // Case map + 0x1EBC: []rune{0x1EBD}, // Case map + 0x1EBE: []rune{0x1EBF}, // Case map + 0x1EC0: []rune{0x1EC1}, // Case map + 0x1EC2: []rune{0x1EC3}, // Case map + 0x1EC4: []rune{0x1EC5}, // Case map + 0x1EC6: []rune{0x1EC7}, // Case map + 0x1EC8: []rune{0x1EC9}, // Case map + 0x1ECA: []rune{0x1ECB}, // Case map + 0x1ECC: []rune{0x1ECD}, // Case map + 0x1ECE: []rune{0x1ECF}, // Case map + 0x1ED0: []rune{0x1ED1}, // Case map + 0x1ED2: []rune{0x1ED3}, // Case map + 0x1ED4: []rune{0x1ED5}, // Case map + 0x1ED6: []rune{0x1ED7}, // Case map + 0x1ED8: []rune{0x1ED9}, // Case map + 0x1EDA: []rune{0x1EDB}, // Case map + 0x1EDC: []rune{0x1EDD}, // Case map + 0x1EDE: []rune{0x1EDF}, // Case map + 0x1EE0: []rune{0x1EE1}, // Case map + 0x1EE2: []rune{0x1EE3}, // Case map + 0x1EE4: []rune{0x1EE5}, // Case map + 0x1EE6: []rune{0x1EE7}, // Case map + 0x1EE8: []rune{0x1EE9}, // Case map + 0x1EEA: []rune{0x1EEB}, // Case map + 0x1EEC: []rune{0x1EED}, // Case map + 0x1EEE: []rune{0x1EEF}, // Case map + 0x1EF0: []rune{0x1EF1}, // Case map + 0x1EF2: []rune{0x1EF3}, // Case map + 0x1EF4: []rune{0x1EF5}, // Case map + 0x1EF6: []rune{0x1EF7}, // Case map + 0x1EF8: []rune{0x1EF9}, // Case map + 0x1F08: []rune{0x1F00}, // Case map + 0x1F09: []rune{0x1F01}, // Case map + 0x1F0A: []rune{0x1F02}, // Case map + 0x1F0B: []rune{0x1F03}, // Case map + 0x1F0C: []rune{0x1F04}, // Case map + 0x1F0D: []rune{0x1F05}, // Case map + 0x1F0E: []rune{0x1F06}, // Case map + 0x1F0F: []rune{0x1F07}, // Case map + 0x1F18: []rune{0x1F10}, // Case map + 0x1F19: []rune{0x1F11}, // Case map + 0x1F1A: []rune{0x1F12}, // Case map + 0x1F1B: []rune{0x1F13}, // Case map + 0x1F1C: []rune{0x1F14}, // Case map + 0x1F1D: []rune{0x1F15}, // Case map + 0x1F28: []rune{0x1F20}, // Case map + 0x1F29: []rune{0x1F21}, // Case map + 0x1F2A: []rune{0x1F22}, // Case map + 0x1F2B: []rune{0x1F23}, // Case map + 0x1F2C: []rune{0x1F24}, // Case map + 0x1F2D: []rune{0x1F25}, // Case map + 0x1F2E: []rune{0x1F26}, // Case map + 0x1F2F: []rune{0x1F27}, // Case map + 0x1F38: []rune{0x1F30}, // Case map + 0x1F39: []rune{0x1F31}, // Case map + 0x1F3A: []rune{0x1F32}, // Case map + 0x1F3B: []rune{0x1F33}, // Case map + 0x1F3C: []rune{0x1F34}, // Case map + 0x1F3D: []rune{0x1F35}, // Case map + 0x1F3E: []rune{0x1F36}, // Case map + 0x1F3F: []rune{0x1F37}, // Case map + 0x1F48: []rune{0x1F40}, // Case map + 0x1F49: []rune{0x1F41}, // Case map + 0x1F4A: []rune{0x1F42}, // Case map + 0x1F4B: []rune{0x1F43}, // Case map + 0x1F4C: []rune{0x1F44}, // Case map + 0x1F4D: []rune{0x1F45}, // Case map + 0x1F50: []rune{0x03C5, 0x0313}, // Case map + 0x1F52: []rune{0x03C5, 0x0313, 0x0300}, // Case map + 0x1F54: []rune{0x03C5, 0x0313, 0x0301}, // Case map + 0x1F56: []rune{0x03C5, 0x0313, 0x0342}, // Case map + 0x1F59: []rune{0x1F51}, // Case map + 0x1F5B: []rune{0x1F53}, // Case map + 0x1F5D: []rune{0x1F55}, // Case map + 0x1F5F: []rune{0x1F57}, // Case map + 0x1F68: []rune{0x1F60}, // Case map + 0x1F69: []rune{0x1F61}, // Case map + 0x1F6A: []rune{0x1F62}, // Case map + 0x1F6B: []rune{0x1F63}, // Case map + 0x1F6C: []rune{0x1F64}, // Case map + 0x1F6D: []rune{0x1F65}, // Case map + 0x1F6E: []rune{0x1F66}, // Case map + 0x1F6F: []rune{0x1F67}, // Case map + 0x1F80: []rune{0x1F00, 0x03B9}, // Case map + 0x1F81: []rune{0x1F01, 0x03B9}, // Case map + 0x1F82: []rune{0x1F02, 0x03B9}, // Case map + 0x1F83: []rune{0x1F03, 0x03B9}, // Case map + 0x1F84: []rune{0x1F04, 0x03B9}, // Case map + 0x1F85: []rune{0x1F05, 0x03B9}, // Case map + 0x1F86: []rune{0x1F06, 0x03B9}, // Case map + 0x1F87: []rune{0x1F07, 0x03B9}, // Case map + 0x1F88: []rune{0x1F00, 0x03B9}, // Case map + 0x1F89: []rune{0x1F01, 0x03B9}, // Case map + 0x1F8A: []rune{0x1F02, 0x03B9}, // Case map + 0x1F8B: []rune{0x1F03, 0x03B9}, // Case map + 0x1F8C: []rune{0x1F04, 0x03B9}, // Case map + 0x1F8D: []rune{0x1F05, 0x03B9}, // Case map + 0x1F8E: []rune{0x1F06, 0x03B9}, // Case map + 0x1F8F: []rune{0x1F07, 0x03B9}, // Case map + 0x1F90: []rune{0x1F20, 0x03B9}, // Case map + 0x1F91: []rune{0x1F21, 0x03B9}, // Case map + 0x1F92: []rune{0x1F22, 0x03B9}, // Case map + 0x1F93: []rune{0x1F23, 0x03B9}, // Case map + 0x1F94: []rune{0x1F24, 0x03B9}, // Case map + 0x1F95: []rune{0x1F25, 0x03B9}, // Case map + 0x1F96: []rune{0x1F26, 0x03B9}, // Case map + 0x1F97: []rune{0x1F27, 0x03B9}, // Case map + 0x1F98: []rune{0x1F20, 0x03B9}, // Case map + 0x1F99: []rune{0x1F21, 0x03B9}, // Case map + 0x1F9A: []rune{0x1F22, 0x03B9}, // Case map + 0x1F9B: []rune{0x1F23, 0x03B9}, // Case map + 0x1F9C: []rune{0x1F24, 0x03B9}, // Case map + 0x1F9D: []rune{0x1F25, 0x03B9}, // Case map + 0x1F9E: []rune{0x1F26, 0x03B9}, // Case map + 0x1F9F: []rune{0x1F27, 0x03B9}, // Case map + 0x1FA0: []rune{0x1F60, 0x03B9}, // Case map + 0x1FA1: []rune{0x1F61, 0x03B9}, // Case map + 0x1FA2: []rune{0x1F62, 0x03B9}, // Case map + 0x1FA3: []rune{0x1F63, 0x03B9}, // Case map + 0x1FA4: []rune{0x1F64, 0x03B9}, // Case map + 0x1FA5: []rune{0x1F65, 0x03B9}, // Case map + 0x1FA6: []rune{0x1F66, 0x03B9}, // Case map + 0x1FA7: []rune{0x1F67, 0x03B9}, // Case map + 0x1FA8: []rune{0x1F60, 0x03B9}, // Case map + 0x1FA9: []rune{0x1F61, 0x03B9}, // Case map + 0x1FAA: []rune{0x1F62, 0x03B9}, // Case map + 0x1FAB: []rune{0x1F63, 0x03B9}, // Case map + 0x1FAC: []rune{0x1F64, 0x03B9}, // Case map + 0x1FAD: []rune{0x1F65, 0x03B9}, // Case map + 0x1FAE: []rune{0x1F66, 0x03B9}, // Case map + 0x1FAF: []rune{0x1F67, 0x03B9}, // Case map + 0x1FB2: []rune{0x1F70, 0x03B9}, // Case map + 0x1FB3: []rune{0x03B1, 0x03B9}, // Case map + 0x1FB4: []rune{0x03AC, 0x03B9}, // Case map + 0x1FB6: []rune{0x03B1, 0x0342}, // Case map + 0x1FB7: []rune{0x03B1, 0x0342, 0x03B9}, // Case map + 0x1FB8: []rune{0x1FB0}, // Case map + 0x1FB9: []rune{0x1FB1}, // Case map + 0x1FBA: []rune{0x1F70}, // Case map + 0x1FBB: []rune{0x1F71}, // Case map + 0x1FBC: []rune{0x03B1, 0x03B9}, // Case map + 0x1FBE: []rune{0x03B9}, // Case map + 0x1FC2: []rune{0x1F74, 0x03B9}, // Case map + 0x1FC3: []rune{0x03B7, 0x03B9}, // Case map + 0x1FC4: []rune{0x03AE, 0x03B9}, // Case map + 0x1FC6: []rune{0x03B7, 0x0342}, // Case map + 0x1FC7: []rune{0x03B7, 0x0342, 0x03B9}, // Case map + 0x1FC8: []rune{0x1F72}, // Case map + 0x1FC9: []rune{0x1F73}, // Case map + 0x1FCA: []rune{0x1F74}, // Case map + 0x1FCB: []rune{0x1F75}, // Case map + 0x1FCC: []rune{0x03B7, 0x03B9}, // Case map + 0x1FD2: []rune{0x03B9, 0x0308, 0x0300}, // Case map + 0x1FD3: []rune{0x03B9, 0x0308, 0x0301}, // Case map + 0x1FD6: []rune{0x03B9, 0x0342}, // Case map + 0x1FD7: []rune{0x03B9, 0x0308, 0x0342}, // Case map + 0x1FD8: []rune{0x1FD0}, // Case map + 0x1FD9: []rune{0x1FD1}, // Case map + 0x1FDA: []rune{0x1F76}, // Case map + 0x1FDB: []rune{0x1F77}, // Case map + 0x1FE2: []rune{0x03C5, 0x0308, 0x0300}, // Case map + 0x1FE3: []rune{0x03C5, 0x0308, 0x0301}, // Case map + 0x1FE4: []rune{0x03C1, 0x0313}, // Case map + 0x1FE6: []rune{0x03C5, 0x0342}, // Case map + 0x1FE7: []rune{0x03C5, 0x0308, 0x0342}, // Case map + 0x1FE8: []rune{0x1FE0}, // Case map + 0x1FE9: []rune{0x1FE1}, // Case map + 0x1FEA: []rune{0x1F7A}, // Case map + 0x1FEB: []rune{0x1F7B}, // Case map + 0x1FEC: []rune{0x1FE5}, // Case map + 0x1FF2: []rune{0x1F7C, 0x03B9}, // Case map + 0x1FF3: []rune{0x03C9, 0x03B9}, // Case map + 0x1FF4: []rune{0x03CE, 0x03B9}, // Case map + 0x1FF6: []rune{0x03C9, 0x0342}, // Case map + 0x1FF7: []rune{0x03C9, 0x0342, 0x03B9}, // Case map + 0x1FF8: []rune{0x1F78}, // Case map + 0x1FF9: []rune{0x1F79}, // Case map + 0x1FFA: []rune{0x1F7C}, // Case map + 0x1FFB: []rune{0x1F7D}, // Case map + 0x1FFC: []rune{0x03C9, 0x03B9}, // Case map + 0x2126: []rune{0x03C9}, // Case map + 0x212A: []rune{0x006B}, // Case map + 0x212B: []rune{0x00E5}, // Case map + 0x2160: []rune{0x2170}, // Case map + 0x2161: []rune{0x2171}, // Case map + 0x2162: []rune{0x2172}, // Case map + 0x2163: []rune{0x2173}, // Case map + 0x2164: []rune{0x2174}, // Case map + 0x2165: []rune{0x2175}, // Case map + 0x2166: []rune{0x2176}, // Case map + 0x2167: []rune{0x2177}, // Case map + 0x2168: []rune{0x2178}, // Case map + 0x2169: []rune{0x2179}, // Case map + 0x216A: []rune{0x217A}, // Case map + 0x216B: []rune{0x217B}, // Case map + 0x216C: []rune{0x217C}, // Case map + 0x216D: []rune{0x217D}, // Case map + 0x216E: []rune{0x217E}, // Case map + 0x216F: []rune{0x217F}, // Case map + 0x24B6: []rune{0x24D0}, // Case map + 0x24B7: []rune{0x24D1}, // Case map + 0x24B8: []rune{0x24D2}, // Case map + 0x24B9: []rune{0x24D3}, // Case map + 0x24BA: []rune{0x24D4}, // Case map + 0x24BB: []rune{0x24D5}, // Case map + 0x24BC: []rune{0x24D6}, // Case map + 0x24BD: []rune{0x24D7}, // Case map + 0x24BE: []rune{0x24D8}, // Case map + 0x24BF: []rune{0x24D9}, // Case map + 0x24C0: []rune{0x24DA}, // Case map + 0x24C1: []rune{0x24DB}, // Case map + 0x24C2: []rune{0x24DC}, // Case map + 0x24C3: []rune{0x24DD}, // Case map + 0x24C4: []rune{0x24DE}, // Case map + 0x24C5: []rune{0x24DF}, // Case map + 0x24C6: []rune{0x24E0}, // Case map + 0x24C7: []rune{0x24E1}, // Case map + 0x24C8: []rune{0x24E2}, // Case map + 0x24C9: []rune{0x24E3}, // Case map + 0x24CA: []rune{0x24E4}, // Case map + 0x24CB: []rune{0x24E5}, // Case map + 0x24CC: []rune{0x24E6}, // Case map + 0x24CD: []rune{0x24E7}, // Case map + 0x24CE: []rune{0x24E8}, // Case map + 0x24CF: []rune{0x24E9}, // Case map + 0xFB00: []rune{0x0066, 0x0066}, // Case map + 0xFB01: []rune{0x0066, 0x0069}, // Case map + 0xFB02: []rune{0x0066, 0x006C}, // Case map + 0xFB03: []rune{0x0066, 0x0066, 0x0069}, // Case map + 0xFB04: []rune{0x0066, 0x0066, 0x006C}, // Case map + 0xFB05: []rune{0x0073, 0x0074}, // Case map + 0xFB06: []rune{0x0073, 0x0074}, // Case map + 0xFB13: []rune{0x0574, 0x0576}, // Case map + 0xFB14: []rune{0x0574, 0x0565}, // Case map + 0xFB15: []rune{0x0574, 0x056B}, // Case map + 0xFB16: []rune{0x057E, 0x0576}, // Case map + 0xFB17: []rune{0x0574, 0x056D}, // Case map + 0xFF21: []rune{0xFF41}, // Case map + 0xFF22: []rune{0xFF42}, // Case map + 0xFF23: []rune{0xFF43}, // Case map + 0xFF24: []rune{0xFF44}, // Case map + 0xFF25: []rune{0xFF45}, // Case map + 0xFF26: []rune{0xFF46}, // Case map + 0xFF27: []rune{0xFF47}, // Case map + 0xFF28: []rune{0xFF48}, // Case map + 0xFF29: []rune{0xFF49}, // Case map + 0xFF2A: []rune{0xFF4A}, // Case map + 0xFF2B: []rune{0xFF4B}, // Case map + 0xFF2C: []rune{0xFF4C}, // Case map + 0xFF2D: []rune{0xFF4D}, // Case map + 0xFF2E: []rune{0xFF4E}, // Case map + 0xFF2F: []rune{0xFF4F}, // Case map + 0xFF30: []rune{0xFF50}, // Case map + 0xFF31: []rune{0xFF51}, // Case map + 0xFF32: []rune{0xFF52}, // Case map + 0xFF33: []rune{0xFF53}, // Case map + 0xFF34: []rune{0xFF54}, // Case map + 0xFF35: []rune{0xFF55}, // Case map + 0xFF36: []rune{0xFF56}, // Case map + 0xFF37: []rune{0xFF57}, // Case map + 0xFF38: []rune{0xFF58}, // Case map + 0xFF39: []rune{0xFF59}, // Case map + 0xFF3A: []rune{0xFF5A}, // Case map + 0x10400: []rune{0x10428}, // Case map + 0x10401: []rune{0x10429}, // Case map + 0x10402: []rune{0x1042A}, // Case map + 0x10403: []rune{0x1042B}, // Case map + 0x10404: []rune{0x1042C}, // Case map + 0x10405: []rune{0x1042D}, // Case map + 0x10406: []rune{0x1042E}, // Case map + 0x10407: []rune{0x1042F}, // Case map + 0x10408: []rune{0x10430}, // Case map + 0x10409: []rune{0x10431}, // Case map + 0x1040A: []rune{0x10432}, // Case map + 0x1040B: []rune{0x10433}, // Case map + 0x1040C: []rune{0x10434}, // Case map + 0x1040D: []rune{0x10435}, // Case map + 0x1040E: []rune{0x10436}, // Case map + 0x1040F: []rune{0x10437}, // Case map + 0x10410: []rune{0x10438}, // Case map + 0x10411: []rune{0x10439}, // Case map + 0x10412: []rune{0x1043A}, // Case map + 0x10413: []rune{0x1043B}, // Case map + 0x10414: []rune{0x1043C}, // Case map + 0x10415: []rune{0x1043D}, // Case map + 0x10416: []rune{0x1043E}, // Case map + 0x10417: []rune{0x1043F}, // Case map + 0x10418: []rune{0x10440}, // Case map + 0x10419: []rune{0x10441}, // Case map + 0x1041A: []rune{0x10442}, // Case map + 0x1041B: []rune{0x10443}, // Case map + 0x1041C: []rune{0x10444}, // Case map + 0x1041D: []rune{0x10445}, // Case map + 0x1041E: []rune{0x10446}, // Case map + 0x1041F: []rune{0x10447}, // Case map + 0x10420: []rune{0x10448}, // Case map + 0x10421: []rune{0x10449}, // Case map + 0x10422: []rune{0x1044A}, // Case map + 0x10423: []rune{0x1044B}, // Case map + 0x10424: []rune{0x1044C}, // Case map + 0x10425: []rune{0x1044D}, // Case map +} + +// TableB3 represents RFC-3454 Table B.3. +var TableB3 Mapping = tableB3 + +var tableC1_1 = Set{ + RuneRange{0x0020, 0x0020}, // SPACE +} + +// TableC1_1 represents RFC-3454 Table C.1.1. +var TableC1_1 Set = tableC1_1 + +var tableC1_2 = Set{ + RuneRange{0x00A0, 0x00A0}, // NO-BREAK SPACE + RuneRange{0x1680, 0x1680}, // OGHAM SPACE MARK + RuneRange{0x2000, 0x2000}, // EN QUAD + RuneRange{0x2001, 0x2001}, // EM QUAD + RuneRange{0x2002, 0x2002}, // EN SPACE + RuneRange{0x2003, 0x2003}, // EM SPACE + RuneRange{0x2004, 0x2004}, // THREE-PER-EM SPACE + RuneRange{0x2005, 0x2005}, // FOUR-PER-EM SPACE + RuneRange{0x2006, 0x2006}, // SIX-PER-EM SPACE + RuneRange{0x2007, 0x2007}, // FIGURE SPACE + RuneRange{0x2008, 0x2008}, // PUNCTUATION SPACE + RuneRange{0x2009, 0x2009}, // THIN SPACE + RuneRange{0x200A, 0x200A}, // HAIR SPACE + RuneRange{0x200B, 0x200B}, // ZERO WIDTH SPACE + RuneRange{0x202F, 0x202F}, // NARROW NO-BREAK SPACE + RuneRange{0x205F, 0x205F}, // MEDIUM MATHEMATICAL SPACE + RuneRange{0x3000, 0x3000}, // IDEOGRAPHIC SPACE +} + +// TableC1_2 represents RFC-3454 Table C.1.2. +var TableC1_2 Set = tableC1_2 + +var tableC2_1 = Set{ + RuneRange{0x0000, 0x001F}, // [CONTROL CHARACTERS] + RuneRange{0x007F, 0x007F}, // DELETE +} + +// TableC2_1 represents RFC-3454 Table C.2.1. +var TableC2_1 Set = tableC2_1 + +var tableC2_2 = Set{ + RuneRange{0x0080, 0x009F}, // [CONTROL CHARACTERS] + RuneRange{0x06DD, 0x06DD}, // ARABIC END OF AYAH + RuneRange{0x070F, 0x070F}, // SYRIAC ABBREVIATION MARK + RuneRange{0x180E, 0x180E}, // MONGOLIAN VOWEL SEPARATOR + RuneRange{0x200C, 0x200C}, // ZERO WIDTH NON-JOINER + RuneRange{0x200D, 0x200D}, // ZERO WIDTH JOINER + RuneRange{0x2028, 0x2028}, // LINE SEPARATOR + RuneRange{0x2029, 0x2029}, // PARAGRAPH SEPARATOR + RuneRange{0x2060, 0x2060}, // WORD JOINER + RuneRange{0x2061, 0x2061}, // FUNCTION APPLICATION + RuneRange{0x2062, 0x2062}, // INVISIBLE TIMES + RuneRange{0x2063, 0x2063}, // INVISIBLE SEPARATOR + RuneRange{0x206A, 0x206F}, // [CONTROL CHARACTERS] + RuneRange{0xFEFF, 0xFEFF}, // ZERO WIDTH NO-BREAK SPACE + RuneRange{0xFFF9, 0xFFFC}, // [CONTROL CHARACTERS] + RuneRange{0x1D173, 0x1D17A}, // [MUSICAL CONTROL CHARACTERS] +} + +// TableC2_2 represents RFC-3454 Table C.2.2. +var TableC2_2 Set = tableC2_2 + +var tableC3 = Set{ + RuneRange{0xE000, 0xF8FF}, // [PRIVATE USE, PLANE 0] + RuneRange{0xF0000, 0xFFFFD}, // [PRIVATE USE, PLANE 15] + RuneRange{0x100000, 0x10FFFD}, // [PRIVATE USE, PLANE 16] +} + +// TableC3 represents RFC-3454 Table C.3. +var TableC3 Set = tableC3 + +var tableC4 = Set{ + RuneRange{0xFDD0, 0xFDEF}, // [NONCHARACTER CODE POINTS] + RuneRange{0xFFFE, 0xFFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0x1FFFE, 0x1FFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0x2FFFE, 0x2FFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0x3FFFE, 0x3FFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0x4FFFE, 0x4FFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0x5FFFE, 0x5FFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0x6FFFE, 0x6FFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0x7FFFE, 0x7FFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0x8FFFE, 0x8FFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0x9FFFE, 0x9FFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0xAFFFE, 0xAFFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0xBFFFE, 0xBFFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0xCFFFE, 0xCFFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0xDFFFE, 0xDFFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0xEFFFE, 0xEFFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0xFFFFE, 0xFFFFF}, // [NONCHARACTER CODE POINTS] + RuneRange{0x10FFFE, 0x10FFFF}, // [NONCHARACTER CODE POINTS] +} + +// TableC4 represents RFC-3454 Table C.4. +var TableC4 Set = tableC4 + +var tableC5 = Set{ + RuneRange{0xD800, 0xDFFF}, // [SURROGATE CODES] +} + +// TableC5 represents RFC-3454 Table C.5. +var TableC5 Set = tableC5 + +var tableC6 = Set{ + RuneRange{0xFFF9, 0xFFF9}, // INTERLINEAR ANNOTATION ANCHOR + RuneRange{0xFFFA, 0xFFFA}, // INTERLINEAR ANNOTATION SEPARATOR + RuneRange{0xFFFB, 0xFFFB}, // INTERLINEAR ANNOTATION TERMINATOR + RuneRange{0xFFFC, 0xFFFC}, // OBJECT REPLACEMENT CHARACTER + RuneRange{0xFFFD, 0xFFFD}, // REPLACEMENT CHARACTER +} + +// TableC6 represents RFC-3454 Table C.6. +var TableC6 Set = tableC6 + +var tableC7 = Set{ + RuneRange{0x2FF0, 0x2FFB}, // [IDEOGRAPHIC DESCRIPTION CHARACTERS] +} + +// TableC7 represents RFC-3454 Table C.7. +var TableC7 Set = tableC7 + +var tableC8 = Set{ + RuneRange{0x0340, 0x0340}, // COMBINING GRAVE TONE MARK + RuneRange{0x0341, 0x0341}, // COMBINING ACUTE TONE MARK + RuneRange{0x200E, 0x200E}, // LEFT-TO-RIGHT MARK + RuneRange{0x200F, 0x200F}, // RIGHT-TO-LEFT MARK + RuneRange{0x202A, 0x202A}, // LEFT-TO-RIGHT EMBEDDING + RuneRange{0x202B, 0x202B}, // RIGHT-TO-LEFT EMBEDDING + RuneRange{0x202C, 0x202C}, // POP DIRECTIONAL FORMATTING + RuneRange{0x202D, 0x202D}, // LEFT-TO-RIGHT OVERRIDE + RuneRange{0x202E, 0x202E}, // RIGHT-TO-LEFT OVERRIDE + RuneRange{0x206A, 0x206A}, // INHIBIT SYMMETRIC SWAPPING + RuneRange{0x206B, 0x206B}, // ACTIVATE SYMMETRIC SWAPPING + RuneRange{0x206C, 0x206C}, // INHIBIT ARABIC FORM SHAPING + RuneRange{0x206D, 0x206D}, // ACTIVATE ARABIC FORM SHAPING + RuneRange{0x206E, 0x206E}, // NATIONAL DIGIT SHAPES + RuneRange{0x206F, 0x206F}, // NOMINAL DIGIT SHAPES +} + +// TableC8 represents RFC-3454 Table C.8. +var TableC8 Set = tableC8 + +var tableC9 = Set{ + RuneRange{0xE0001, 0xE0001}, // LANGUAGE TAG + RuneRange{0xE0020, 0xE007F}, // [TAGGING CHARACTERS] +} + +// TableC9 represents RFC-3454 Table C.9. +var TableC9 Set = tableC9 + +var tableD1 = Set{ + RuneRange{0x05BE, 0x05BE}, + RuneRange{0x05C0, 0x05C0}, + RuneRange{0x05C3, 0x05C3}, + RuneRange{0x05D0, 0x05EA}, + RuneRange{0x05F0, 0x05F4}, + RuneRange{0x061B, 0x061B}, + RuneRange{0x061F, 0x061F}, + RuneRange{0x0621, 0x063A}, + RuneRange{0x0640, 0x064A}, + RuneRange{0x066D, 0x066F}, + RuneRange{0x0671, 0x06D5}, + RuneRange{0x06DD, 0x06DD}, + RuneRange{0x06E5, 0x06E6}, + RuneRange{0x06FA, 0x06FE}, + RuneRange{0x0700, 0x070D}, + RuneRange{0x0710, 0x0710}, + RuneRange{0x0712, 0x072C}, + RuneRange{0x0780, 0x07A5}, + RuneRange{0x07B1, 0x07B1}, + RuneRange{0x200F, 0x200F}, + RuneRange{0xFB1D, 0xFB1D}, + RuneRange{0xFB1F, 0xFB28}, + RuneRange{0xFB2A, 0xFB36}, + RuneRange{0xFB38, 0xFB3C}, + RuneRange{0xFB3E, 0xFB3E}, + RuneRange{0xFB40, 0xFB41}, + RuneRange{0xFB43, 0xFB44}, + RuneRange{0xFB46, 0xFBB1}, + RuneRange{0xFBD3, 0xFD3D}, + RuneRange{0xFD50, 0xFD8F}, + RuneRange{0xFD92, 0xFDC7}, + RuneRange{0xFDF0, 0xFDFC}, + RuneRange{0xFE70, 0xFE74}, + RuneRange{0xFE76, 0xFEFC}, +} + +// TableD1 represents RFC-3454 Table D.1. +var TableD1 Set = tableD1 + +var tableD2 = Set{ + RuneRange{0x0041, 0x005A}, + RuneRange{0x0061, 0x007A}, + RuneRange{0x00AA, 0x00AA}, + RuneRange{0x00B5, 0x00B5}, + RuneRange{0x00BA, 0x00BA}, + RuneRange{0x00C0, 0x00D6}, + RuneRange{0x00D8, 0x00F6}, + RuneRange{0x00F8, 0x0220}, + RuneRange{0x0222, 0x0233}, + RuneRange{0x0250, 0x02AD}, + RuneRange{0x02B0, 0x02B8}, + RuneRange{0x02BB, 0x02C1}, + RuneRange{0x02D0, 0x02D1}, + RuneRange{0x02E0, 0x02E4}, + RuneRange{0x02EE, 0x02EE}, + RuneRange{0x037A, 0x037A}, + RuneRange{0x0386, 0x0386}, + RuneRange{0x0388, 0x038A}, + RuneRange{0x038C, 0x038C}, + RuneRange{0x038E, 0x03A1}, + RuneRange{0x03A3, 0x03CE}, + RuneRange{0x03D0, 0x03F5}, + RuneRange{0x0400, 0x0482}, + RuneRange{0x048A, 0x04CE}, + RuneRange{0x04D0, 0x04F5}, + RuneRange{0x04F8, 0x04F9}, + RuneRange{0x0500, 0x050F}, + RuneRange{0x0531, 0x0556}, + RuneRange{0x0559, 0x055F}, + RuneRange{0x0561, 0x0587}, + RuneRange{0x0589, 0x0589}, + RuneRange{0x0903, 0x0903}, + RuneRange{0x0905, 0x0939}, + RuneRange{0x093D, 0x0940}, + RuneRange{0x0949, 0x094C}, + RuneRange{0x0950, 0x0950}, + RuneRange{0x0958, 0x0961}, + RuneRange{0x0964, 0x0970}, + RuneRange{0x0982, 0x0983}, + RuneRange{0x0985, 0x098C}, + RuneRange{0x098F, 0x0990}, + RuneRange{0x0993, 0x09A8}, + RuneRange{0x09AA, 0x09B0}, + RuneRange{0x09B2, 0x09B2}, + RuneRange{0x09B6, 0x09B9}, + RuneRange{0x09BE, 0x09C0}, + RuneRange{0x09C7, 0x09C8}, + RuneRange{0x09CB, 0x09CC}, + RuneRange{0x09D7, 0x09D7}, + RuneRange{0x09DC, 0x09DD}, + RuneRange{0x09DF, 0x09E1}, + RuneRange{0x09E6, 0x09F1}, + RuneRange{0x09F4, 0x09FA}, + RuneRange{0x0A05, 0x0A0A}, + RuneRange{0x0A0F, 0x0A10}, + RuneRange{0x0A13, 0x0A28}, + RuneRange{0x0A2A, 0x0A30}, + RuneRange{0x0A32, 0x0A33}, + RuneRange{0x0A35, 0x0A36}, + RuneRange{0x0A38, 0x0A39}, + RuneRange{0x0A3E, 0x0A40}, + RuneRange{0x0A59, 0x0A5C}, + RuneRange{0x0A5E, 0x0A5E}, + RuneRange{0x0A66, 0x0A6F}, + RuneRange{0x0A72, 0x0A74}, + RuneRange{0x0A83, 0x0A83}, + RuneRange{0x0A85, 0x0A8B}, + RuneRange{0x0A8D, 0x0A8D}, + RuneRange{0x0A8F, 0x0A91}, + RuneRange{0x0A93, 0x0AA8}, + RuneRange{0x0AAA, 0x0AB0}, + RuneRange{0x0AB2, 0x0AB3}, + RuneRange{0x0AB5, 0x0AB9}, + RuneRange{0x0ABD, 0x0AC0}, + RuneRange{0x0AC9, 0x0AC9}, + RuneRange{0x0ACB, 0x0ACC}, + RuneRange{0x0AD0, 0x0AD0}, + RuneRange{0x0AE0, 0x0AE0}, + RuneRange{0x0AE6, 0x0AEF}, + RuneRange{0x0B02, 0x0B03}, + RuneRange{0x0B05, 0x0B0C}, + RuneRange{0x0B0F, 0x0B10}, + RuneRange{0x0B13, 0x0B28}, + RuneRange{0x0B2A, 0x0B30}, + RuneRange{0x0B32, 0x0B33}, + RuneRange{0x0B36, 0x0B39}, + RuneRange{0x0B3D, 0x0B3E}, + RuneRange{0x0B40, 0x0B40}, + RuneRange{0x0B47, 0x0B48}, + RuneRange{0x0B4B, 0x0B4C}, + RuneRange{0x0B57, 0x0B57}, + RuneRange{0x0B5C, 0x0B5D}, + RuneRange{0x0B5F, 0x0B61}, + RuneRange{0x0B66, 0x0B70}, + RuneRange{0x0B83, 0x0B83}, + RuneRange{0x0B85, 0x0B8A}, + RuneRange{0x0B8E, 0x0B90}, + RuneRange{0x0B92, 0x0B95}, + RuneRange{0x0B99, 0x0B9A}, + RuneRange{0x0B9C, 0x0B9C}, + RuneRange{0x0B9E, 0x0B9F}, + RuneRange{0x0BA3, 0x0BA4}, + RuneRange{0x0BA8, 0x0BAA}, + RuneRange{0x0BAE, 0x0BB5}, + RuneRange{0x0BB7, 0x0BB9}, + RuneRange{0x0BBE, 0x0BBF}, + RuneRange{0x0BC1, 0x0BC2}, + RuneRange{0x0BC6, 0x0BC8}, + RuneRange{0x0BCA, 0x0BCC}, + RuneRange{0x0BD7, 0x0BD7}, + RuneRange{0x0BE7, 0x0BF2}, + RuneRange{0x0C01, 0x0C03}, + RuneRange{0x0C05, 0x0C0C}, + RuneRange{0x0C0E, 0x0C10}, + RuneRange{0x0C12, 0x0C28}, + RuneRange{0x0C2A, 0x0C33}, + RuneRange{0x0C35, 0x0C39}, + RuneRange{0x0C41, 0x0C44}, + RuneRange{0x0C60, 0x0C61}, + RuneRange{0x0C66, 0x0C6F}, + RuneRange{0x0C82, 0x0C83}, + RuneRange{0x0C85, 0x0C8C}, + RuneRange{0x0C8E, 0x0C90}, + RuneRange{0x0C92, 0x0CA8}, + RuneRange{0x0CAA, 0x0CB3}, + RuneRange{0x0CB5, 0x0CB9}, + RuneRange{0x0CBE, 0x0CBE}, + RuneRange{0x0CC0, 0x0CC4}, + RuneRange{0x0CC7, 0x0CC8}, + RuneRange{0x0CCA, 0x0CCB}, + RuneRange{0x0CD5, 0x0CD6}, + RuneRange{0x0CDE, 0x0CDE}, + RuneRange{0x0CE0, 0x0CE1}, + RuneRange{0x0CE6, 0x0CEF}, + RuneRange{0x0D02, 0x0D03}, + RuneRange{0x0D05, 0x0D0C}, + RuneRange{0x0D0E, 0x0D10}, + RuneRange{0x0D12, 0x0D28}, + RuneRange{0x0D2A, 0x0D39}, + RuneRange{0x0D3E, 0x0D40}, + RuneRange{0x0D46, 0x0D48}, + RuneRange{0x0D4A, 0x0D4C}, + RuneRange{0x0D57, 0x0D57}, + RuneRange{0x0D60, 0x0D61}, + RuneRange{0x0D66, 0x0D6F}, + RuneRange{0x0D82, 0x0D83}, + RuneRange{0x0D85, 0x0D96}, + RuneRange{0x0D9A, 0x0DB1}, + RuneRange{0x0DB3, 0x0DBB}, + RuneRange{0x0DBD, 0x0DBD}, + RuneRange{0x0DC0, 0x0DC6}, + RuneRange{0x0DCF, 0x0DD1}, + RuneRange{0x0DD8, 0x0DDF}, + RuneRange{0x0DF2, 0x0DF4}, + RuneRange{0x0E01, 0x0E30}, + RuneRange{0x0E32, 0x0E33}, + RuneRange{0x0E40, 0x0E46}, + RuneRange{0x0E4F, 0x0E5B}, + RuneRange{0x0E81, 0x0E82}, + RuneRange{0x0E84, 0x0E84}, + RuneRange{0x0E87, 0x0E88}, + RuneRange{0x0E8A, 0x0E8A}, + RuneRange{0x0E8D, 0x0E8D}, + RuneRange{0x0E94, 0x0E97}, + RuneRange{0x0E99, 0x0E9F}, + RuneRange{0x0EA1, 0x0EA3}, + RuneRange{0x0EA5, 0x0EA5}, + RuneRange{0x0EA7, 0x0EA7}, + RuneRange{0x0EAA, 0x0EAB}, + RuneRange{0x0EAD, 0x0EB0}, + RuneRange{0x0EB2, 0x0EB3}, + RuneRange{0x0EBD, 0x0EBD}, + RuneRange{0x0EC0, 0x0EC4}, + RuneRange{0x0EC6, 0x0EC6}, + RuneRange{0x0ED0, 0x0ED9}, + RuneRange{0x0EDC, 0x0EDD}, + RuneRange{0x0F00, 0x0F17}, + RuneRange{0x0F1A, 0x0F34}, + RuneRange{0x0F36, 0x0F36}, + RuneRange{0x0F38, 0x0F38}, + RuneRange{0x0F3E, 0x0F47}, + RuneRange{0x0F49, 0x0F6A}, + RuneRange{0x0F7F, 0x0F7F}, + RuneRange{0x0F85, 0x0F85}, + RuneRange{0x0F88, 0x0F8B}, + RuneRange{0x0FBE, 0x0FC5}, + RuneRange{0x0FC7, 0x0FCC}, + RuneRange{0x0FCF, 0x0FCF}, + RuneRange{0x1000, 0x1021}, + RuneRange{0x1023, 0x1027}, + RuneRange{0x1029, 0x102A}, + RuneRange{0x102C, 0x102C}, + RuneRange{0x1031, 0x1031}, + RuneRange{0x1038, 0x1038}, + RuneRange{0x1040, 0x1057}, + RuneRange{0x10A0, 0x10C5}, + RuneRange{0x10D0, 0x10F8}, + RuneRange{0x10FB, 0x10FB}, + RuneRange{0x1100, 0x1159}, + RuneRange{0x115F, 0x11A2}, + RuneRange{0x11A8, 0x11F9}, + RuneRange{0x1200, 0x1206}, + RuneRange{0x1208, 0x1246}, + RuneRange{0x1248, 0x1248}, + RuneRange{0x124A, 0x124D}, + RuneRange{0x1250, 0x1256}, + RuneRange{0x1258, 0x1258}, + RuneRange{0x125A, 0x125D}, + RuneRange{0x1260, 0x1286}, + RuneRange{0x1288, 0x1288}, + RuneRange{0x128A, 0x128D}, + RuneRange{0x1290, 0x12AE}, + RuneRange{0x12B0, 0x12B0}, + RuneRange{0x12B2, 0x12B5}, + RuneRange{0x12B8, 0x12BE}, + RuneRange{0x12C0, 0x12C0}, + RuneRange{0x12C2, 0x12C5}, + RuneRange{0x12C8, 0x12CE}, + RuneRange{0x12D0, 0x12D6}, + RuneRange{0x12D8, 0x12EE}, + RuneRange{0x12F0, 0x130E}, + RuneRange{0x1310, 0x1310}, + RuneRange{0x1312, 0x1315}, + RuneRange{0x1318, 0x131E}, + RuneRange{0x1320, 0x1346}, + RuneRange{0x1348, 0x135A}, + RuneRange{0x1361, 0x137C}, + RuneRange{0x13A0, 0x13F4}, + RuneRange{0x1401, 0x1676}, + RuneRange{0x1681, 0x169A}, + RuneRange{0x16A0, 0x16F0}, + RuneRange{0x1700, 0x170C}, + RuneRange{0x170E, 0x1711}, + RuneRange{0x1720, 0x1731}, + RuneRange{0x1735, 0x1736}, + RuneRange{0x1740, 0x1751}, + RuneRange{0x1760, 0x176C}, + RuneRange{0x176E, 0x1770}, + RuneRange{0x1780, 0x17B6}, + RuneRange{0x17BE, 0x17C5}, + RuneRange{0x17C7, 0x17C8}, + RuneRange{0x17D4, 0x17DA}, + RuneRange{0x17DC, 0x17DC}, + RuneRange{0x17E0, 0x17E9}, + RuneRange{0x1810, 0x1819}, + RuneRange{0x1820, 0x1877}, + RuneRange{0x1880, 0x18A8}, + RuneRange{0x1E00, 0x1E9B}, + RuneRange{0x1EA0, 0x1EF9}, + RuneRange{0x1F00, 0x1F15}, + RuneRange{0x1F18, 0x1F1D}, + RuneRange{0x1F20, 0x1F45}, + RuneRange{0x1F48, 0x1F4D}, + RuneRange{0x1F50, 0x1F57}, + RuneRange{0x1F59, 0x1F59}, + RuneRange{0x1F5B, 0x1F5B}, + RuneRange{0x1F5D, 0x1F5D}, + RuneRange{0x1F5F, 0x1F7D}, + RuneRange{0x1F80, 0x1FB4}, + RuneRange{0x1FB6, 0x1FBC}, + RuneRange{0x1FBE, 0x1FBE}, + RuneRange{0x1FC2, 0x1FC4}, + RuneRange{0x1FC6, 0x1FCC}, + RuneRange{0x1FD0, 0x1FD3}, + RuneRange{0x1FD6, 0x1FDB}, + RuneRange{0x1FE0, 0x1FEC}, + RuneRange{0x1FF2, 0x1FF4}, + RuneRange{0x1FF6, 0x1FFC}, + RuneRange{0x200E, 0x200E}, + RuneRange{0x2071, 0x2071}, + RuneRange{0x207F, 0x207F}, + RuneRange{0x2102, 0x2102}, + RuneRange{0x2107, 0x2107}, + RuneRange{0x210A, 0x2113}, + RuneRange{0x2115, 0x2115}, + RuneRange{0x2119, 0x211D}, + RuneRange{0x2124, 0x2124}, + RuneRange{0x2126, 0x2126}, + RuneRange{0x2128, 0x2128}, + RuneRange{0x212A, 0x212D}, + RuneRange{0x212F, 0x2131}, + RuneRange{0x2133, 0x2139}, + RuneRange{0x213D, 0x213F}, + RuneRange{0x2145, 0x2149}, + RuneRange{0x2160, 0x2183}, + RuneRange{0x2336, 0x237A}, + RuneRange{0x2395, 0x2395}, + RuneRange{0x249C, 0x24E9}, + RuneRange{0x3005, 0x3007}, + RuneRange{0x3021, 0x3029}, + RuneRange{0x3031, 0x3035}, + RuneRange{0x3038, 0x303C}, + RuneRange{0x3041, 0x3096}, + RuneRange{0x309D, 0x309F}, + RuneRange{0x30A1, 0x30FA}, + RuneRange{0x30FC, 0x30FF}, + RuneRange{0x3105, 0x312C}, + RuneRange{0x3131, 0x318E}, + RuneRange{0x3190, 0x31B7}, + RuneRange{0x31F0, 0x321C}, + RuneRange{0x3220, 0x3243}, + RuneRange{0x3260, 0x327B}, + RuneRange{0x327F, 0x32B0}, + RuneRange{0x32C0, 0x32CB}, + RuneRange{0x32D0, 0x32FE}, + RuneRange{0x3300, 0x3376}, + RuneRange{0x337B, 0x33DD}, + RuneRange{0x33E0, 0x33FE}, + RuneRange{0x3400, 0x4DB5}, + RuneRange{0x4E00, 0x9FA5}, + RuneRange{0xA000, 0xA48C}, + RuneRange{0xAC00, 0xD7A3}, + RuneRange{0xD800, 0xFA2D}, + RuneRange{0xFA30, 0xFA6A}, + RuneRange{0xFB00, 0xFB06}, + RuneRange{0xFB13, 0xFB17}, + RuneRange{0xFF21, 0xFF3A}, + RuneRange{0xFF41, 0xFF5A}, + RuneRange{0xFF66, 0xFFBE}, + RuneRange{0xFFC2, 0xFFC7}, + RuneRange{0xFFCA, 0xFFCF}, + RuneRange{0xFFD2, 0xFFD7}, + RuneRange{0xFFDA, 0xFFDC}, + RuneRange{0x10300, 0x1031E}, + RuneRange{0x10320, 0x10323}, + RuneRange{0x10330, 0x1034A}, + RuneRange{0x10400, 0x10425}, + RuneRange{0x10428, 0x1044D}, + RuneRange{0x1D000, 0x1D0F5}, + RuneRange{0x1D100, 0x1D126}, + RuneRange{0x1D12A, 0x1D166}, + RuneRange{0x1D16A, 0x1D172}, + RuneRange{0x1D183, 0x1D184}, + RuneRange{0x1D18C, 0x1D1A9}, + RuneRange{0x1D1AE, 0x1D1DD}, + RuneRange{0x1D400, 0x1D454}, + RuneRange{0x1D456, 0x1D49C}, + RuneRange{0x1D49E, 0x1D49F}, + RuneRange{0x1D4A2, 0x1D4A2}, + RuneRange{0x1D4A5, 0x1D4A6}, + RuneRange{0x1D4A9, 0x1D4AC}, + RuneRange{0x1D4AE, 0x1D4B9}, + RuneRange{0x1D4BB, 0x1D4BB}, + RuneRange{0x1D4BD, 0x1D4C0}, + RuneRange{0x1D4C2, 0x1D4C3}, + RuneRange{0x1D4C5, 0x1D505}, + RuneRange{0x1D507, 0x1D50A}, + RuneRange{0x1D50D, 0x1D514}, + RuneRange{0x1D516, 0x1D51C}, + RuneRange{0x1D51E, 0x1D539}, + RuneRange{0x1D53B, 0x1D53E}, + RuneRange{0x1D540, 0x1D544}, + RuneRange{0x1D546, 0x1D546}, + RuneRange{0x1D54A, 0x1D550}, + RuneRange{0x1D552, 0x1D6A3}, + RuneRange{0x1D6A8, 0x1D7C9}, + RuneRange{0x20000, 0x2A6D6}, + RuneRange{0x2F800, 0x2FA1D}, + RuneRange{0xF0000, 0xFFFFD}, + RuneRange{0x100000, 0x10FFFD}, +} + +// TableD2 represents RFC-3454 Table D.2. +var TableD2 Set = tableD2 diff --git a/vendor/golang.org/x/crypto/ssh/terminal/terminal.go b/vendor/golang.org/x/crypto/ssh/terminal/terminal.go index 2ffb97bfb8..a4d1919a9e 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/terminal.go +++ b/vendor/golang.org/x/crypto/ssh/terminal/terminal.go @@ -2,986 +2,75 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +// Package terminal provides support functions for dealing with terminals, as +// commonly found on UNIX systems. +// +// Deprecated: this package moved to golang.org/x/term. package terminal import ( - "bytes" "io" - "runtime" - "strconv" - "sync" - "unicode/utf8" + + "golang.org/x/term" ) // EscapeCodes contains escape sequences that can be written to the terminal in // order to achieve different styles of text. -type EscapeCodes struct { - // Foreground colors - Black, Red, Green, Yellow, Blue, Magenta, Cyan, White []byte - - // Reset all attributes - Reset []byte -} - -var vt100EscapeCodes = EscapeCodes{ - Black: []byte{keyEscape, '[', '3', '0', 'm'}, - Red: []byte{keyEscape, '[', '3', '1', 'm'}, - Green: []byte{keyEscape, '[', '3', '2', 'm'}, - Yellow: []byte{keyEscape, '[', '3', '3', 'm'}, - Blue: []byte{keyEscape, '[', '3', '4', 'm'}, - Magenta: []byte{keyEscape, '[', '3', '5', 'm'}, - Cyan: []byte{keyEscape, '[', '3', '6', 'm'}, - White: []byte{keyEscape, '[', '3', '7', 'm'}, - - Reset: []byte{keyEscape, '[', '0', 'm'}, -} +type EscapeCodes = term.EscapeCodes // Terminal contains the state for running a VT100 terminal that is capable of // reading lines of input. -type Terminal struct { - // AutoCompleteCallback, if non-null, is called for each keypress with - // the full input line and the current position of the cursor (in - // bytes, as an index into |line|). If it returns ok=false, the key - // press is processed normally. Otherwise it returns a replacement line - // and the new cursor position. - AutoCompleteCallback func(line string, pos int, key rune) (newLine string, newPos int, ok bool) - - // Escape contains a pointer to the escape codes for this terminal. - // It's always a valid pointer, although the escape codes themselves - // may be empty if the terminal doesn't support them. - Escape *EscapeCodes - - // lock protects the terminal and the state in this object from - // concurrent processing of a key press and a Write() call. - lock sync.Mutex - - c io.ReadWriter - prompt []rune - - // line is the current line being entered. - line []rune - // pos is the logical position of the cursor in line - pos int - // echo is true if local echo is enabled - echo bool - // pasteActive is true iff there is a bracketed paste operation in - // progress. - pasteActive bool - - // cursorX contains the current X value of the cursor where the left - // edge is 0. cursorY contains the row number where the first row of - // the current line is 0. - cursorX, cursorY int - // maxLine is the greatest value of cursorY so far. - maxLine int - - termWidth, termHeight int - - // outBuf contains the terminal data to be sent. - outBuf []byte - // remainder contains the remainder of any partial key sequences after - // a read. It aliases into inBuf. - remainder []byte - inBuf [256]byte - - // history contains previously entered commands so that they can be - // accessed with the up and down keys. - history stRingBuffer - // historyIndex stores the currently accessed history entry, where zero - // means the immediately previous entry. - historyIndex int - // When navigating up and down the history it's possible to return to - // the incomplete, initial line. That value is stored in - // historyPending. - historyPending string -} +type Terminal = term.Terminal // NewTerminal runs a VT100 terminal on the given ReadWriter. If the ReadWriter is // a local terminal, that terminal must first have been put into raw mode. // prompt is a string that is written at the start of each input line (i.e. // "> "). func NewTerminal(c io.ReadWriter, prompt string) *Terminal { - return &Terminal{ - Escape: &vt100EscapeCodes, - c: c, - prompt: []rune(prompt), - termWidth: 80, - termHeight: 24, - echo: true, - historyIndex: -1, - } -} - -const ( - keyCtrlC = 3 - keyCtrlD = 4 - keyCtrlU = 21 - keyEnter = '\r' - keyEscape = 27 - keyBackspace = 127 - keyUnknown = 0xd800 /* UTF-16 surrogate area */ + iota - keyUp - keyDown - keyLeft - keyRight - keyAltLeft - keyAltRight - keyHome - keyEnd - keyDeleteWord - keyDeleteLine - keyClearScreen - keyPasteStart - keyPasteEnd -) - -var ( - crlf = []byte{'\r', '\n'} - pasteStart = []byte{keyEscape, '[', '2', '0', '0', '~'} - pasteEnd = []byte{keyEscape, '[', '2', '0', '1', '~'} -) - -// bytesToKey tries to parse a key sequence from b. If successful, it returns -// the key and the remainder of the input. Otherwise it returns utf8.RuneError. -func bytesToKey(b []byte, pasteActive bool) (rune, []byte) { - if len(b) == 0 { - return utf8.RuneError, nil - } - - if !pasteActive { - switch b[0] { - case 1: // ^A - return keyHome, b[1:] - case 2: // ^B - return keyLeft, b[1:] - case 5: // ^E - return keyEnd, b[1:] - case 6: // ^F - return keyRight, b[1:] - case 8: // ^H - return keyBackspace, b[1:] - case 11: // ^K - return keyDeleteLine, b[1:] - case 12: // ^L - return keyClearScreen, b[1:] - case 23: // ^W - return keyDeleteWord, b[1:] - case 14: // ^N - return keyDown, b[1:] - case 16: // ^P - return keyUp, b[1:] - } - } - - if b[0] != keyEscape { - if !utf8.FullRune(b) { - return utf8.RuneError, b - } - r, l := utf8.DecodeRune(b) - return r, b[l:] - } - - if !pasteActive && len(b) >= 3 && b[0] == keyEscape && b[1] == '[' { - switch b[2] { - case 'A': - return keyUp, b[3:] - case 'B': - return keyDown, b[3:] - case 'C': - return keyRight, b[3:] - case 'D': - return keyLeft, b[3:] - case 'H': - return keyHome, b[3:] - case 'F': - return keyEnd, b[3:] - } - } - - if !pasteActive && len(b) >= 6 && b[0] == keyEscape && b[1] == '[' && b[2] == '1' && b[3] == ';' && b[4] == '3' { - switch b[5] { - case 'C': - return keyAltRight, b[6:] - case 'D': - return keyAltLeft, b[6:] - } - } - - if !pasteActive && len(b) >= 6 && bytes.Equal(b[:6], pasteStart) { - return keyPasteStart, b[6:] - } - - if pasteActive && len(b) >= 6 && bytes.Equal(b[:6], pasteEnd) { - return keyPasteEnd, b[6:] - } - - // If we get here then we have a key that we don't recognise, or a - // partial sequence. It's not clear how one should find the end of a - // sequence without knowing them all, but it seems that [a-zA-Z~] only - // appears at the end of a sequence. - for i, c := range b[0:] { - if c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z' || c == '~' { - return keyUnknown, b[i+1:] - } - } - - return utf8.RuneError, b -} - -// queue appends data to the end of t.outBuf -func (t *Terminal) queue(data []rune) { - t.outBuf = append(t.outBuf, []byte(string(data))...) -} - -var eraseUnderCursor = []rune{' ', keyEscape, '[', 'D'} -var space = []rune{' '} - -func isPrintable(key rune) bool { - isInSurrogateArea := key >= 0xd800 && key <= 0xdbff - return key >= 32 && !isInSurrogateArea -} - -// moveCursorToPos appends data to t.outBuf which will move the cursor to the -// given, logical position in the text. -func (t *Terminal) moveCursorToPos(pos int) { - if !t.echo { - return - } - - x := visualLength(t.prompt) + pos - y := x / t.termWidth - x = x % t.termWidth - - up := 0 - if y < t.cursorY { - up = t.cursorY - y - } - - down := 0 - if y > t.cursorY { - down = y - t.cursorY - } - - left := 0 - if x < t.cursorX { - left = t.cursorX - x - } - - right := 0 - if x > t.cursorX { - right = x - t.cursorX - } - - t.cursorX = x - t.cursorY = y - t.move(up, down, left, right) -} - -func (t *Terminal) move(up, down, left, right int) { - m := []rune{} - - // 1 unit up can be expressed as ^[[A or ^[A - // 5 units up can be expressed as ^[[5A - - if up == 1 { - m = append(m, keyEscape, '[', 'A') - } else if up > 1 { - m = append(m, keyEscape, '[') - m = append(m, []rune(strconv.Itoa(up))...) - m = append(m, 'A') - } - - if down == 1 { - m = append(m, keyEscape, '[', 'B') - } else if down > 1 { - m = append(m, keyEscape, '[') - m = append(m, []rune(strconv.Itoa(down))...) - m = append(m, 'B') - } - - if right == 1 { - m = append(m, keyEscape, '[', 'C') - } else if right > 1 { - m = append(m, keyEscape, '[') - m = append(m, []rune(strconv.Itoa(right))...) - m = append(m, 'C') - } - - if left == 1 { - m = append(m, keyEscape, '[', 'D') - } else if left > 1 { - m = append(m, keyEscape, '[') - m = append(m, []rune(strconv.Itoa(left))...) - m = append(m, 'D') - } - - t.queue(m) -} - -func (t *Terminal) clearLineToRight() { - op := []rune{keyEscape, '[', 'K'} - t.queue(op) -} - -const maxLineLength = 4096 - -func (t *Terminal) setLine(newLine []rune, newPos int) { - if t.echo { - t.moveCursorToPos(0) - t.writeLine(newLine) - for i := len(newLine); i < len(t.line); i++ { - t.writeLine(space) - } - t.moveCursorToPos(newPos) - } - t.line = newLine - t.pos = newPos -} - -func (t *Terminal) advanceCursor(places int) { - t.cursorX += places - t.cursorY += t.cursorX / t.termWidth - if t.cursorY > t.maxLine { - t.maxLine = t.cursorY - } - t.cursorX = t.cursorX % t.termWidth - - if places > 0 && t.cursorX == 0 { - // Normally terminals will advance the current position - // when writing a character. But that doesn't happen - // for the last character in a line. However, when - // writing a character (except a new line) that causes - // a line wrap, the position will be advanced two - // places. - // - // So, if we are stopping at the end of a line, we - // need to write a newline so that our cursor can be - // advanced to the next line. - t.outBuf = append(t.outBuf, '\r', '\n') - } -} - -func (t *Terminal) eraseNPreviousChars(n int) { - if n == 0 { - return - } - - if t.pos < n { - n = t.pos - } - t.pos -= n - t.moveCursorToPos(t.pos) - - copy(t.line[t.pos:], t.line[n+t.pos:]) - t.line = t.line[:len(t.line)-n] - if t.echo { - t.writeLine(t.line[t.pos:]) - for i := 0; i < n; i++ { - t.queue(space) - } - t.advanceCursor(n) - t.moveCursorToPos(t.pos) - } -} - -// countToLeftWord returns then number of characters from the cursor to the -// start of the previous word. -func (t *Terminal) countToLeftWord() int { - if t.pos == 0 { - return 0 - } - - pos := t.pos - 1 - for pos > 0 { - if t.line[pos] != ' ' { - break - } - pos-- - } - for pos > 0 { - if t.line[pos] == ' ' { - pos++ - break - } - pos-- - } - - return t.pos - pos -} - -// countToRightWord returns then number of characters from the cursor to the -// start of the next word. -func (t *Terminal) countToRightWord() int { - pos := t.pos - for pos < len(t.line) { - if t.line[pos] == ' ' { - break - } - pos++ - } - for pos < len(t.line) { - if t.line[pos] != ' ' { - break - } - pos++ - } - return pos - t.pos -} - -// visualLength returns the number of visible glyphs in s. -func visualLength(runes []rune) int { - inEscapeSeq := false - length := 0 - - for _, r := range runes { - switch { - case inEscapeSeq: - if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') { - inEscapeSeq = false - } - case r == '\x1b': - inEscapeSeq = true - default: - length++ - } - } - - return length -} - -// handleKey processes the given key and, optionally, returns a line of text -// that the user has entered. -func (t *Terminal) handleKey(key rune) (line string, ok bool) { - if t.pasteActive && key != keyEnter { - t.addKeyToLine(key) - return - } - - switch key { - case keyBackspace: - if t.pos == 0 { - return - } - t.eraseNPreviousChars(1) - case keyAltLeft: - // move left by a word. - t.pos -= t.countToLeftWord() - t.moveCursorToPos(t.pos) - case keyAltRight: - // move right by a word. - t.pos += t.countToRightWord() - t.moveCursorToPos(t.pos) - case keyLeft: - if t.pos == 0 { - return - } - t.pos-- - t.moveCursorToPos(t.pos) - case keyRight: - if t.pos == len(t.line) { - return - } - t.pos++ - t.moveCursorToPos(t.pos) - case keyHome: - if t.pos == 0 { - return - } - t.pos = 0 - t.moveCursorToPos(t.pos) - case keyEnd: - if t.pos == len(t.line) { - return - } - t.pos = len(t.line) - t.moveCursorToPos(t.pos) - case keyUp: - entry, ok := t.history.NthPreviousEntry(t.historyIndex + 1) - if !ok { - return "", false - } - if t.historyIndex == -1 { - t.historyPending = string(t.line) - } - t.historyIndex++ - runes := []rune(entry) - t.setLine(runes, len(runes)) - case keyDown: - switch t.historyIndex { - case -1: - return - case 0: - runes := []rune(t.historyPending) - t.setLine(runes, len(runes)) - t.historyIndex-- - default: - entry, ok := t.history.NthPreviousEntry(t.historyIndex - 1) - if ok { - t.historyIndex-- - runes := []rune(entry) - t.setLine(runes, len(runes)) - } - } - case keyEnter: - t.moveCursorToPos(len(t.line)) - t.queue([]rune("\r\n")) - line = string(t.line) - ok = true - t.line = t.line[:0] - t.pos = 0 - t.cursorX = 0 - t.cursorY = 0 - t.maxLine = 0 - case keyDeleteWord: - // Delete zero or more spaces and then one or more characters. - t.eraseNPreviousChars(t.countToLeftWord()) - case keyDeleteLine: - // Delete everything from the current cursor position to the - // end of line. - for i := t.pos; i < len(t.line); i++ { - t.queue(space) - t.advanceCursor(1) - } - t.line = t.line[:t.pos] - t.moveCursorToPos(t.pos) - case keyCtrlD: - // Erase the character under the current position. - // The EOF case when the line is empty is handled in - // readLine(). - if t.pos < len(t.line) { - t.pos++ - t.eraseNPreviousChars(1) - } - case keyCtrlU: - t.eraseNPreviousChars(t.pos) - case keyClearScreen: - // Erases the screen and moves the cursor to the home position. - t.queue([]rune("\x1b[2J\x1b[H")) - t.queue(t.prompt) - t.cursorX, t.cursorY = 0, 0 - t.advanceCursor(visualLength(t.prompt)) - t.setLine(t.line, t.pos) - default: - if t.AutoCompleteCallback != nil { - prefix := string(t.line[:t.pos]) - suffix := string(t.line[t.pos:]) - - t.lock.Unlock() - newLine, newPos, completeOk := t.AutoCompleteCallback(prefix+suffix, len(prefix), key) - t.lock.Lock() - - if completeOk { - t.setLine([]rune(newLine), utf8.RuneCount([]byte(newLine)[:newPos])) - return - } - } - if !isPrintable(key) { - return - } - if len(t.line) == maxLineLength { - return - } - t.addKeyToLine(key) - } - return -} - -// addKeyToLine inserts the given key at the current position in the current -// line. -func (t *Terminal) addKeyToLine(key rune) { - if len(t.line) == cap(t.line) { - newLine := make([]rune, len(t.line), 2*(1+len(t.line))) - copy(newLine, t.line) - t.line = newLine - } - t.line = t.line[:len(t.line)+1] - copy(t.line[t.pos+1:], t.line[t.pos:]) - t.line[t.pos] = key - if t.echo { - t.writeLine(t.line[t.pos:]) - } - t.pos++ - t.moveCursorToPos(t.pos) -} - -func (t *Terminal) writeLine(line []rune) { - for len(line) != 0 { - remainingOnLine := t.termWidth - t.cursorX - todo := len(line) - if todo > remainingOnLine { - todo = remainingOnLine - } - t.queue(line[:todo]) - t.advanceCursor(visualLength(line[:todo])) - line = line[todo:] - } -} - -// writeWithCRLF writes buf to w but replaces all occurrences of \n with \r\n. -func writeWithCRLF(w io.Writer, buf []byte) (n int, err error) { - for len(buf) > 0 { - i := bytes.IndexByte(buf, '\n') - todo := len(buf) - if i >= 0 { - todo = i - } - - var nn int - nn, err = w.Write(buf[:todo]) - n += nn - if err != nil { - return n, err - } - buf = buf[todo:] - - if i >= 0 { - if _, err = w.Write(crlf); err != nil { - return n, err - } - n++ - buf = buf[1:] - } - } - - return n, nil -} - -func (t *Terminal) Write(buf []byte) (n int, err error) { - t.lock.Lock() - defer t.lock.Unlock() - - if t.cursorX == 0 && t.cursorY == 0 { - // This is the easy case: there's nothing on the screen that we - // have to move out of the way. - return writeWithCRLF(t.c, buf) - } - - // We have a prompt and possibly user input on the screen. We - // have to clear it first. - t.move(0 /* up */, 0 /* down */, t.cursorX /* left */, 0 /* right */) - t.cursorX = 0 - t.clearLineToRight() - - for t.cursorY > 0 { - t.move(1 /* up */, 0, 0, 0) - t.cursorY-- - t.clearLineToRight() - } - - if _, err = t.c.Write(t.outBuf); err != nil { - return - } - t.outBuf = t.outBuf[:0] - - if n, err = writeWithCRLF(t.c, buf); err != nil { - return - } - - t.writeLine(t.prompt) - if t.echo { - t.writeLine(t.line) - } - - t.moveCursorToPos(t.pos) - - if _, err = t.c.Write(t.outBuf); err != nil { - return - } - t.outBuf = t.outBuf[:0] - return -} - -// ReadPassword temporarily changes the prompt and reads a password, without -// echo, from the terminal. -func (t *Terminal) ReadPassword(prompt string) (line string, err error) { - t.lock.Lock() - defer t.lock.Unlock() - - oldPrompt := t.prompt - t.prompt = []rune(prompt) - t.echo = false - - line, err = t.readLine() - - t.prompt = oldPrompt - t.echo = true - - return -} - -// ReadLine returns a line of input from the terminal. -func (t *Terminal) ReadLine() (line string, err error) { - t.lock.Lock() - defer t.lock.Unlock() - - return t.readLine() -} - -func (t *Terminal) readLine() (line string, err error) { - // t.lock must be held at this point - - if t.cursorX == 0 && t.cursorY == 0 { - t.writeLine(t.prompt) - t.c.Write(t.outBuf) - t.outBuf = t.outBuf[:0] - } - - lineIsPasted := t.pasteActive - - for { - rest := t.remainder - lineOk := false - for !lineOk { - var key rune - key, rest = bytesToKey(rest, t.pasteActive) - if key == utf8.RuneError { - break - } - if !t.pasteActive { - if key == keyCtrlD { - if len(t.line) == 0 { - return "", io.EOF - } - } - if key == keyCtrlC { - return "", io.EOF - } - if key == keyPasteStart { - t.pasteActive = true - if len(t.line) == 0 { - lineIsPasted = true - } - continue - } - } else if key == keyPasteEnd { - t.pasteActive = false - continue - } - if !t.pasteActive { - lineIsPasted = false - } - line, lineOk = t.handleKey(key) - } - if len(rest) > 0 { - n := copy(t.inBuf[:], rest) - t.remainder = t.inBuf[:n] - } else { - t.remainder = nil - } - t.c.Write(t.outBuf) - t.outBuf = t.outBuf[:0] - if lineOk { - if t.echo { - t.historyIndex = -1 - t.history.Add(line) - } - if lineIsPasted { - err = ErrPasteIndicator - } - return - } - - // t.remainder is a slice at the beginning of t.inBuf - // containing a partial key sequence - readBuf := t.inBuf[len(t.remainder):] - var n int - - t.lock.Unlock() - n, err = t.c.Read(readBuf) - t.lock.Lock() - - if err != nil { - return - } - - t.remainder = t.inBuf[:n+len(t.remainder)] - } -} - -// SetPrompt sets the prompt to be used when reading subsequent lines. -func (t *Terminal) SetPrompt(prompt string) { - t.lock.Lock() - defer t.lock.Unlock() - - t.prompt = []rune(prompt) -} - -func (t *Terminal) clearAndRepaintLinePlusNPrevious(numPrevLines int) { - // Move cursor to column zero at the start of the line. - t.move(t.cursorY, 0, t.cursorX, 0) - t.cursorX, t.cursorY = 0, 0 - t.clearLineToRight() - for t.cursorY < numPrevLines { - // Move down a line - t.move(0, 1, 0, 0) - t.cursorY++ - t.clearLineToRight() - } - // Move back to beginning. - t.move(t.cursorY, 0, 0, 0) - t.cursorX, t.cursorY = 0, 0 - - t.queue(t.prompt) - t.advanceCursor(visualLength(t.prompt)) - t.writeLine(t.line) - t.moveCursorToPos(t.pos) -} - -func (t *Terminal) SetSize(width, height int) error { - t.lock.Lock() - defer t.lock.Unlock() - - if width == 0 { - width = 1 - } - - oldWidth := t.termWidth - t.termWidth, t.termHeight = width, height - - switch { - case width == oldWidth: - // If the width didn't change then nothing else needs to be - // done. - return nil - case len(t.line) == 0 && t.cursorX == 0 && t.cursorY == 0: - // If there is nothing on current line and no prompt printed, - // just do nothing - return nil - case width < oldWidth: - // Some terminals (e.g. xterm) will truncate lines that were - // too long when shinking. Others, (e.g. gnome-terminal) will - // attempt to wrap them. For the former, repainting t.maxLine - // works great, but that behaviour goes badly wrong in the case - // of the latter because they have doubled every full line. - - // We assume that we are working on a terminal that wraps lines - // and adjust the cursor position based on every previous line - // wrapping and turning into two. This causes the prompt on - // xterms to move upwards, which isn't great, but it avoids a - // huge mess with gnome-terminal. - if t.cursorX >= t.termWidth { - t.cursorX = t.termWidth - 1 - } - t.cursorY *= 2 - t.clearAndRepaintLinePlusNPrevious(t.maxLine * 2) - case width > oldWidth: - // If the terminal expands then our position calculations will - // be wrong in the future because we think the cursor is - // |t.pos| chars into the string, but there will be a gap at - // the end of any wrapped line. - // - // But the position will actually be correct until we move, so - // we can move back to the beginning and repaint everything. - t.clearAndRepaintLinePlusNPrevious(t.maxLine) - } - - _, err := t.c.Write(t.outBuf) - t.outBuf = t.outBuf[:0] - return err -} - -type pasteIndicatorError struct{} - -func (pasteIndicatorError) Error() string { - return "terminal: ErrPasteIndicator not correctly handled" + return term.NewTerminal(c, prompt) } // ErrPasteIndicator may be returned from ReadLine as the error, in addition // to valid line data. It indicates that bracketed paste mode is enabled and // that the returned line consists only of pasted data. Programs may wish to // interpret pasted data more literally than typed data. -var ErrPasteIndicator = pasteIndicatorError{} +var ErrPasteIndicator = term.ErrPasteIndicator -// SetBracketedPasteMode requests that the terminal bracket paste operations -// with markers. Not all terminals support this but, if it is supported, then -// enabling this mode will stop any autocomplete callback from running due to -// pastes. Additionally, any lines that are completely pasted will be returned -// from ReadLine with the error set to ErrPasteIndicator. -func (t *Terminal) SetBracketedPasteMode(on bool) { - if on { - io.WriteString(t.c, "\x1b[?2004h") - } else { - io.WriteString(t.c, "\x1b[?2004l") - } -} +// State contains the state of a terminal. +type State = term.State -// stRingBuffer is a ring buffer of strings. -type stRingBuffer struct { - // entries contains max elements. - entries []string - max int - // head contains the index of the element most recently added to the ring. - head int - // size contains the number of elements in the ring. - size int +// IsTerminal returns whether the given file descriptor is a terminal. +func IsTerminal(fd int) bool { + return term.IsTerminal(fd) } -func (s *stRingBuffer) Add(a string) { - if s.entries == nil { - const defaultNumEntries = 100 - s.entries = make([]string, defaultNumEntries) - s.max = defaultNumEntries - } +// ReadPassword reads a line of input from a terminal without local echo. This +// is commonly used for inputting passwords and other sensitive data. The slice +// returned does not include the \n. +func ReadPassword(fd int) ([]byte, error) { + return term.ReadPassword(fd) +} - s.head = (s.head + 1) % s.max - s.entries[s.head] = a - if s.size < s.max { - s.size++ - } +// MakeRaw puts the terminal connected to the given file descriptor into raw +// mode and returns the previous state of the terminal so that it can be +// restored. +func MakeRaw(fd int) (*State, error) { + return term.MakeRaw(fd) } -// NthPreviousEntry returns the value passed to the nth previous call to Add. -// If n is zero then the immediately prior value is returned, if one, then the -// next most recent, and so on. If such an element doesn't exist then ok is -// false. -func (s *stRingBuffer) NthPreviousEntry(n int) (value string, ok bool) { - if n >= s.size { - return "", false - } - index := s.head - n - if index < 0 { - index += s.max - } - return s.entries[index], true +// Restore restores the terminal connected to the given file descriptor to a +// previous state. +func Restore(fd int, oldState *State) error { + return term.Restore(fd, oldState) } -// readPasswordLine reads from reader until it finds \n or io.EOF. -// The slice returned does not include the \n. -// readPasswordLine also ignores any \r it finds. -// Windows uses \r as end of line. So, on Windows, readPasswordLine -// reads until it finds \r and ignores any \n it finds during processing. -func readPasswordLine(reader io.Reader) ([]byte, error) { - var buf [1]byte - var ret []byte +// GetState returns the current state of a terminal which may be useful to +// restore the terminal after a signal. +func GetState(fd int) (*State, error) { + return term.GetState(fd) +} - for { - n, err := reader.Read(buf[:]) - if n > 0 { - switch buf[0] { - case '\b': - if len(ret) > 0 { - ret = ret[:len(ret)-1] - } - case '\n': - if runtime.GOOS != "windows" { - return ret, nil - } - // otherwise ignore \n - case '\r': - if runtime.GOOS == "windows" { - return ret, nil - } - // otherwise ignore \r - default: - ret = append(ret, buf[0]) - } - continue - } - if err != nil { - if err == io.EOF && len(ret) > 0 { - return ret, nil - } - return ret, err - } - } +// GetSize returns the dimensions of the given terminal. +func GetSize(fd int) (width, height int, err error) { + return term.GetSize(fd) } diff --git a/vendor/golang.org/x/sys/plan9/asm.s b/vendor/golang.org/x/sys/plan9/asm.s new file mode 100644 index 0000000000..06449ebfa9 --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/asm.s @@ -0,0 +1,8 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +#include "textflag.h" + +TEXT ·use(SB),NOSPLIT,$0 + RET diff --git a/vendor/golang.org/x/sys/plan9/asm_plan9_386.s b/vendor/golang.org/x/sys/plan9/asm_plan9_386.s new file mode 100644 index 0000000000..bc5cab1f34 --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/asm_plan9_386.s @@ -0,0 +1,30 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +#include "textflag.h" + +// +// System call support for 386, Plan 9 +// + +// Just jump to package syscall's implementation for all these functions. +// The runtime may know about them. + +TEXT ·Syscall(SB),NOSPLIT,$0-32 + JMP syscall·Syscall(SB) + +TEXT ·Syscall6(SB),NOSPLIT,$0-44 + JMP syscall·Syscall6(SB) + +TEXT ·RawSyscall(SB),NOSPLIT,$0-28 + JMP syscall·RawSyscall(SB) + +TEXT ·RawSyscall6(SB),NOSPLIT,$0-40 + JMP syscall·RawSyscall6(SB) + +TEXT ·seek(SB),NOSPLIT,$0-36 + JMP syscall·seek(SB) + +TEXT ·exit(SB),NOSPLIT,$4-4 + JMP syscall·exit(SB) diff --git a/vendor/golang.org/x/sys/plan9/asm_plan9_amd64.s b/vendor/golang.org/x/sys/plan9/asm_plan9_amd64.s new file mode 100644 index 0000000000..d3448e6750 --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/asm_plan9_amd64.s @@ -0,0 +1,30 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +#include "textflag.h" + +// +// System call support for amd64, Plan 9 +// + +// Just jump to package syscall's implementation for all these functions. +// The runtime may know about them. + +TEXT ·Syscall(SB),NOSPLIT,$0-64 + JMP syscall·Syscall(SB) + +TEXT ·Syscall6(SB),NOSPLIT,$0-88 + JMP syscall·Syscall6(SB) + +TEXT ·RawSyscall(SB),NOSPLIT,$0-56 + JMP syscall·RawSyscall(SB) + +TEXT ·RawSyscall6(SB),NOSPLIT,$0-80 + JMP syscall·RawSyscall6(SB) + +TEXT ·seek(SB),NOSPLIT,$0-56 + JMP syscall·seek(SB) + +TEXT ·exit(SB),NOSPLIT,$8-8 + JMP syscall·exit(SB) diff --git a/vendor/golang.org/x/sys/plan9/asm_plan9_arm.s b/vendor/golang.org/x/sys/plan9/asm_plan9_arm.s new file mode 100644 index 0000000000..afb7c0a9b9 --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/asm_plan9_arm.s @@ -0,0 +1,25 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +#include "textflag.h" + +// System call support for plan9 on arm + +// Just jump to package syscall's implementation for all these functions. +// The runtime may know about them. + +TEXT ·Syscall(SB),NOSPLIT,$0-32 + JMP syscall·Syscall(SB) + +TEXT ·Syscall6(SB),NOSPLIT,$0-44 + JMP syscall·Syscall6(SB) + +TEXT ·RawSyscall(SB),NOSPLIT,$0-28 + JMP syscall·RawSyscall(SB) + +TEXT ·RawSyscall6(SB),NOSPLIT,$0-40 + JMP syscall·RawSyscall6(SB) + +TEXT ·seek(SB),NOSPLIT,$0-36 + JMP syscall·exit(SB) diff --git a/vendor/golang.org/x/sys/plan9/const_plan9.go b/vendor/golang.org/x/sys/plan9/const_plan9.go new file mode 100644 index 0000000000..b4e85a3a9d --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/const_plan9.go @@ -0,0 +1,70 @@ +package plan9 + +// Plan 9 Constants + +// Open modes +const ( + O_RDONLY = 0 + O_WRONLY = 1 + O_RDWR = 2 + O_TRUNC = 16 + O_CLOEXEC = 32 + O_EXCL = 0x1000 +) + +// Rfork flags +const ( + RFNAMEG = 1 << 0 + RFENVG = 1 << 1 + RFFDG = 1 << 2 + RFNOTEG = 1 << 3 + RFPROC = 1 << 4 + RFMEM = 1 << 5 + RFNOWAIT = 1 << 6 + RFCNAMEG = 1 << 10 + RFCENVG = 1 << 11 + RFCFDG = 1 << 12 + RFREND = 1 << 13 + RFNOMNT = 1 << 14 +) + +// Qid.Type bits +const ( + QTDIR = 0x80 + QTAPPEND = 0x40 + QTEXCL = 0x20 + QTMOUNT = 0x10 + QTAUTH = 0x08 + QTTMP = 0x04 + QTFILE = 0x00 +) + +// Dir.Mode bits +const ( + DMDIR = 0x80000000 + DMAPPEND = 0x40000000 + DMEXCL = 0x20000000 + DMMOUNT = 0x10000000 + DMAUTH = 0x08000000 + DMTMP = 0x04000000 + DMREAD = 0x4 + DMWRITE = 0x2 + DMEXEC = 0x1 +) + +const ( + STATMAX = 65535 + ERRMAX = 128 + STATFIXLEN = 49 +) + +// Mount and bind flags +const ( + MREPL = 0x0000 + MBEFORE = 0x0001 + MAFTER = 0x0002 + MORDER = 0x0003 + MCREATE = 0x0004 + MCACHE = 0x0010 + MMASK = 0x0017 +) diff --git a/vendor/golang.org/x/sys/plan9/dir_plan9.go b/vendor/golang.org/x/sys/plan9/dir_plan9.go new file mode 100644 index 0000000000..0955e0c53e --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/dir_plan9.go @@ -0,0 +1,212 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Plan 9 directory marshalling. See intro(5). + +package plan9 + +import "errors" + +var ( + ErrShortStat = errors.New("stat buffer too short") + ErrBadStat = errors.New("malformed stat buffer") + ErrBadName = errors.New("bad character in file name") +) + +// A Qid represents a 9P server's unique identification for a file. +type Qid struct { + Path uint64 // the file server's unique identification for the file + Vers uint32 // version number for given Path + Type uint8 // the type of the file (plan9.QTDIR for example) +} + +// A Dir contains the metadata for a file. +type Dir struct { + // system-modified data + Type uint16 // server type + Dev uint32 // server subtype + + // file data + Qid Qid // unique id from server + Mode uint32 // permissions + Atime uint32 // last read time + Mtime uint32 // last write time + Length int64 // file length + Name string // last element of path + Uid string // owner name + Gid string // group name + Muid string // last modifier name +} + +var nullDir = Dir{ + Type: ^uint16(0), + Dev: ^uint32(0), + Qid: Qid{ + Path: ^uint64(0), + Vers: ^uint32(0), + Type: ^uint8(0), + }, + Mode: ^uint32(0), + Atime: ^uint32(0), + Mtime: ^uint32(0), + Length: ^int64(0), +} + +// Null assigns special "don't touch" values to members of d to +// avoid modifying them during plan9.Wstat. +func (d *Dir) Null() { *d = nullDir } + +// Marshal encodes a 9P stat message corresponding to d into b +// +// If there isn't enough space in b for a stat message, ErrShortStat is returned. +func (d *Dir) Marshal(b []byte) (n int, err error) { + n = STATFIXLEN + len(d.Name) + len(d.Uid) + len(d.Gid) + len(d.Muid) + if n > len(b) { + return n, ErrShortStat + } + + for _, c := range d.Name { + if c == '/' { + return n, ErrBadName + } + } + + b = pbit16(b, uint16(n)-2) + b = pbit16(b, d.Type) + b = pbit32(b, d.Dev) + b = pbit8(b, d.Qid.Type) + b = pbit32(b, d.Qid.Vers) + b = pbit64(b, d.Qid.Path) + b = pbit32(b, d.Mode) + b = pbit32(b, d.Atime) + b = pbit32(b, d.Mtime) + b = pbit64(b, uint64(d.Length)) + b = pstring(b, d.Name) + b = pstring(b, d.Uid) + b = pstring(b, d.Gid) + b = pstring(b, d.Muid) + + return n, nil +} + +// UnmarshalDir decodes a single 9P stat message from b and returns the resulting Dir. +// +// If b is too small to hold a valid stat message, ErrShortStat is returned. +// +// If the stat message itself is invalid, ErrBadStat is returned. +func UnmarshalDir(b []byte) (*Dir, error) { + if len(b) < STATFIXLEN { + return nil, ErrShortStat + } + size, buf := gbit16(b) + if len(b) != int(size)+2 { + return nil, ErrBadStat + } + b = buf + + var d Dir + d.Type, b = gbit16(b) + d.Dev, b = gbit32(b) + d.Qid.Type, b = gbit8(b) + d.Qid.Vers, b = gbit32(b) + d.Qid.Path, b = gbit64(b) + d.Mode, b = gbit32(b) + d.Atime, b = gbit32(b) + d.Mtime, b = gbit32(b) + + n, b := gbit64(b) + d.Length = int64(n) + + var ok bool + if d.Name, b, ok = gstring(b); !ok { + return nil, ErrBadStat + } + if d.Uid, b, ok = gstring(b); !ok { + return nil, ErrBadStat + } + if d.Gid, b, ok = gstring(b); !ok { + return nil, ErrBadStat + } + if d.Muid, b, ok = gstring(b); !ok { + return nil, ErrBadStat + } + + return &d, nil +} + +// pbit8 copies the 8-bit number v to b and returns the remaining slice of b. +func pbit8(b []byte, v uint8) []byte { + b[0] = byte(v) + return b[1:] +} + +// pbit16 copies the 16-bit number v to b in little-endian order and returns the remaining slice of b. +func pbit16(b []byte, v uint16) []byte { + b[0] = byte(v) + b[1] = byte(v >> 8) + return b[2:] +} + +// pbit32 copies the 32-bit number v to b in little-endian order and returns the remaining slice of b. +func pbit32(b []byte, v uint32) []byte { + b[0] = byte(v) + b[1] = byte(v >> 8) + b[2] = byte(v >> 16) + b[3] = byte(v >> 24) + return b[4:] +} + +// pbit64 copies the 64-bit number v to b in little-endian order and returns the remaining slice of b. +func pbit64(b []byte, v uint64) []byte { + b[0] = byte(v) + b[1] = byte(v >> 8) + b[2] = byte(v >> 16) + b[3] = byte(v >> 24) + b[4] = byte(v >> 32) + b[5] = byte(v >> 40) + b[6] = byte(v >> 48) + b[7] = byte(v >> 56) + return b[8:] +} + +// pstring copies the string s to b, prepending it with a 16-bit length in little-endian order, and +// returning the remaining slice of b.. +func pstring(b []byte, s string) []byte { + b = pbit16(b, uint16(len(s))) + n := copy(b, s) + return b[n:] +} + +// gbit8 reads an 8-bit number from b and returns it with the remaining slice of b. +func gbit8(b []byte) (uint8, []byte) { + return uint8(b[0]), b[1:] +} + +// gbit16 reads a 16-bit number in little-endian order from b and returns it with the remaining slice of b. +func gbit16(b []byte) (uint16, []byte) { + return uint16(b[0]) | uint16(b[1])<<8, b[2:] +} + +// gbit32 reads a 32-bit number in little-endian order from b and returns it with the remaining slice of b. +func gbit32(b []byte) (uint32, []byte) { + return uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24, b[4:] +} + +// gbit64 reads a 64-bit number in little-endian order from b and returns it with the remaining slice of b. +func gbit64(b []byte) (uint64, []byte) { + lo := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 + hi := uint32(b[4]) | uint32(b[5])<<8 | uint32(b[6])<<16 | uint32(b[7])<<24 + return uint64(lo) | uint64(hi)<<32, b[8:] +} + +// gstring reads a string from b, prefixed with a 16-bit length in little-endian order. +// It returns the string with the remaining slice of b and a boolean. If the length is +// greater than the number of bytes in b, the boolean will be false. +func gstring(b []byte) (string, []byte, bool) { + n, b := gbit16(b) + if int(n) > len(b) { + return "", b, false + } + return string(b[:n]), b[n:], true +} diff --git a/vendor/golang.org/x/sys/plan9/env_plan9.go b/vendor/golang.org/x/sys/plan9/env_plan9.go new file mode 100644 index 0000000000..8f1918004f --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/env_plan9.go @@ -0,0 +1,31 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Plan 9 environment variables. + +package plan9 + +import ( + "syscall" +) + +func Getenv(key string) (value string, found bool) { + return syscall.Getenv(key) +} + +func Setenv(key, value string) error { + return syscall.Setenv(key, value) +} + +func Clearenv() { + syscall.Clearenv() +} + +func Environ() []string { + return syscall.Environ() +} + +func Unsetenv(key string) error { + return syscall.Unsetenv(key) +} diff --git a/vendor/golang.org/x/sys/plan9/errors_plan9.go b/vendor/golang.org/x/sys/plan9/errors_plan9.go new file mode 100644 index 0000000000..65fe74d3ef --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/errors_plan9.go @@ -0,0 +1,50 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package plan9 + +import "syscall" + +// Constants +const ( + // Invented values to support what package os expects. + O_CREAT = 0x02000 + O_APPEND = 0x00400 + O_NOCTTY = 0x00000 + O_NONBLOCK = 0x00000 + O_SYNC = 0x00000 + O_ASYNC = 0x00000 + + S_IFMT = 0x1f000 + S_IFIFO = 0x1000 + S_IFCHR = 0x2000 + S_IFDIR = 0x4000 + S_IFBLK = 0x6000 + S_IFREG = 0x8000 + S_IFLNK = 0xa000 + S_IFSOCK = 0xc000 +) + +// Errors +var ( + EINVAL = syscall.NewError("bad arg in system call") + ENOTDIR = syscall.NewError("not a directory") + EISDIR = syscall.NewError("file is a directory") + ENOENT = syscall.NewError("file does not exist") + EEXIST = syscall.NewError("file already exists") + EMFILE = syscall.NewError("no free file descriptors") + EIO = syscall.NewError("i/o error") + ENAMETOOLONG = syscall.NewError("file name too long") + EINTR = syscall.NewError("interrupted") + EPERM = syscall.NewError("permission denied") + EBUSY = syscall.NewError("no free devices") + ETIMEDOUT = syscall.NewError("connection timed out") + EPLAN9 = syscall.NewError("not supported by plan 9") + + // The following errors do not correspond to any + // Plan 9 system messages. Invented to support + // what package os and others expect. + EACCES = syscall.NewError("access permission denied") + EAFNOSUPPORT = syscall.NewError("address family not supported by protocol") +) diff --git a/vendor/golang.org/x/sys/plan9/mkall.sh b/vendor/golang.org/x/sys/plan9/mkall.sh new file mode 100644 index 0000000000..1650fbcc74 --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/mkall.sh @@ -0,0 +1,150 @@ +#!/usr/bin/env bash +# Copyright 2009 The Go Authors. All rights reserved. +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file. + +# The plan9 package provides access to the raw system call +# interface of the underlying operating system. Porting Go to +# a new architecture/operating system combination requires +# some manual effort, though there are tools that automate +# much of the process. The auto-generated files have names +# beginning with z. +# +# This script runs or (given -n) prints suggested commands to generate z files +# for the current system. Running those commands is not automatic. +# This script is documentation more than anything else. +# +# * asm_${GOOS}_${GOARCH}.s +# +# This hand-written assembly file implements system call dispatch. +# There are three entry points: +# +# func Syscall(trap, a1, a2, a3 uintptr) (r1, r2, err uintptr); +# func Syscall6(trap, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2, err uintptr); +# func RawSyscall(trap, a1, a2, a3 uintptr) (r1, r2, err uintptr); +# +# The first and second are the standard ones; they differ only in +# how many arguments can be passed to the kernel. +# The third is for low-level use by the ForkExec wrapper; +# unlike the first two, it does not call into the scheduler to +# let it know that a system call is running. +# +# * syscall_${GOOS}.go +# +# This hand-written Go file implements system calls that need +# special handling and lists "//sys" comments giving prototypes +# for ones that can be auto-generated. Mksyscall reads those +# comments to generate the stubs. +# +# * syscall_${GOOS}_${GOARCH}.go +# +# Same as syscall_${GOOS}.go except that it contains code specific +# to ${GOOS} on one particular architecture. +# +# * types_${GOOS}.c +# +# This hand-written C file includes standard C headers and then +# creates typedef or enum names beginning with a dollar sign +# (use of $ in variable names is a gcc extension). The hardest +# part about preparing this file is figuring out which headers to +# include and which symbols need to be #defined to get the +# actual data structures that pass through to the kernel system calls. +# Some C libraries present alternate versions for binary compatibility +# and translate them on the way in and out of system calls, but +# there is almost always a #define that can get the real ones. +# See types_darwin.c and types_linux.c for examples. +# +# * zerror_${GOOS}_${GOARCH}.go +# +# This machine-generated file defines the system's error numbers, +# error strings, and signal numbers. The generator is "mkerrors.sh". +# Usually no arguments are needed, but mkerrors.sh will pass its +# arguments on to godefs. +# +# * zsyscall_${GOOS}_${GOARCH}.go +# +# Generated by mksyscall.pl; see syscall_${GOOS}.go above. +# +# * zsysnum_${GOOS}_${GOARCH}.go +# +# Generated by mksysnum_${GOOS}. +# +# * ztypes_${GOOS}_${GOARCH}.go +# +# Generated by godefs; see types_${GOOS}.c above. + +GOOSARCH="${GOOS}_${GOARCH}" + +# defaults +mksyscall="go run mksyscall.go" +mkerrors="./mkerrors.sh" +zerrors="zerrors_$GOOSARCH.go" +mksysctl="" +zsysctl="zsysctl_$GOOSARCH.go" +mksysnum= +mktypes= +run="sh" + +case "$1" in +-syscalls) + for i in zsyscall*go + do + sed 1q $i | sed 's;^// ;;' | sh > _$i && gofmt < _$i > $i + rm _$i + done + exit 0 + ;; +-n) + run="cat" + shift +esac + +case "$#" in +0) + ;; +*) + echo 'usage: mkall.sh [-n]' 1>&2 + exit 2 +esac + +case "$GOOSARCH" in +_* | *_ | _) + echo 'undefined $GOOS_$GOARCH:' "$GOOSARCH" 1>&2 + exit 1 + ;; +plan9_386) + mkerrors= + mksyscall="go run mksyscall.go -l32 -plan9 -tags plan9,386" + mksysnum="./mksysnum_plan9.sh /n/sources/plan9/sys/src/libc/9syscall/sys.h" + mktypes="XXX" + ;; +plan9_amd64) + mkerrors= + mksyscall="go run mksyscall.go -l32 -plan9 -tags plan9,amd64" + mksysnum="./mksysnum_plan9.sh /n/sources/plan9/sys/src/libc/9syscall/sys.h" + mktypes="XXX" + ;; +plan9_arm) + mkerrors= + mksyscall="go run mksyscall.go -l32 -plan9 -tags plan9,arm" + mksysnum="./mksysnum_plan9.sh /n/sources/plan9/sys/src/libc/9syscall/sys.h" + mktypes="XXX" + ;; +*) + echo 'unrecognized $GOOS_$GOARCH: ' "$GOOSARCH" 1>&2 + exit 1 + ;; +esac + +( + if [ -n "$mkerrors" ]; then echo "$mkerrors |gofmt >$zerrors"; fi + case "$GOOS" in + plan9) + syscall_goos="syscall_$GOOS.go" + if [ -n "$mksyscall" ]; then echo "$mksyscall $syscall_goos |gofmt >zsyscall_$GOOSARCH.go"; fi + ;; + esac + if [ -n "$mksysctl" ]; then echo "$mksysctl |gofmt >$zsysctl"; fi + if [ -n "$mksysnum" ]; then echo "$mksysnum |gofmt >zsysnum_$GOOSARCH.go"; fi + if [ -n "$mktypes" ]; then echo "$mktypes types_$GOOS.go |gofmt >ztypes_$GOOSARCH.go"; fi +) | $run diff --git a/vendor/golang.org/x/sys/plan9/mkerrors.sh b/vendor/golang.org/x/sys/plan9/mkerrors.sh new file mode 100644 index 0000000000..85309c4a5b --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/mkerrors.sh @@ -0,0 +1,246 @@ +#!/usr/bin/env bash +# Copyright 2009 The Go Authors. All rights reserved. +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file. + +# Generate Go code listing errors and other #defined constant +# values (ENAMETOOLONG etc.), by asking the preprocessor +# about the definitions. + +unset LANG +export LC_ALL=C +export LC_CTYPE=C + +CC=${CC:-gcc} + +uname=$(uname) + +includes=' +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +' + +ccflags="$@" + +# Write go tool cgo -godefs input. +( + echo package plan9 + echo + echo '/*' + indirect="includes_$(uname)" + echo "${!indirect} $includes" + echo '*/' + echo 'import "C"' + echo + echo 'const (' + + # The gcc command line prints all the #defines + # it encounters while processing the input + echo "${!indirect} $includes" | $CC -x c - -E -dM $ccflags | + awk ' + $1 != "#define" || $2 ~ /\(/ || $3 == "" {next} + + $2 ~ /^E([ABCD]X|[BIS]P|[SD]I|S|FL)$/ {next} # 386 registers + $2 ~ /^(SIGEV_|SIGSTKSZ|SIGRT(MIN|MAX))/ {next} + $2 ~ /^(SCM_SRCRT)$/ {next} + $2 ~ /^(MAP_FAILED)$/ {next} + + $2 !~ /^ETH_/ && + $2 !~ /^EPROC_/ && + $2 !~ /^EQUIV_/ && + $2 !~ /^EXPR_/ && + $2 ~ /^E[A-Z0-9_]+$/ || + $2 ~ /^B[0-9_]+$/ || + $2 ~ /^V[A-Z0-9]+$/ || + $2 ~ /^CS[A-Z0-9]/ || + $2 ~ /^I(SIG|CANON|CRNL|EXTEN|MAXBEL|STRIP|UTF8)$/ || + $2 ~ /^IGN/ || + $2 ~ /^IX(ON|ANY|OFF)$/ || + $2 ~ /^IN(LCR|PCK)$/ || + $2 ~ /(^FLU?SH)|(FLU?SH$)/ || + $2 ~ /^C(LOCAL|READ)$/ || + $2 == "BRKINT" || + $2 == "HUPCL" || + $2 == "PENDIN" || + $2 == "TOSTOP" || + $2 ~ /^PAR/ || + $2 ~ /^SIG[^_]/ || + $2 ~ /^O[CNPFP][A-Z]+[^_][A-Z]+$/ || + $2 ~ /^IN_/ || + $2 ~ /^LOCK_(SH|EX|NB|UN)$/ || + $2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|ICMP6|TCP|EVFILT|NOTE|EV|SHUT|PROT|MAP|PACKET|MSG|SCM|MCL|DT|MADV|PR)_/ || + $2 == "ICMPV6_FILTER" || + $2 == "SOMAXCONN" || + $2 == "NAME_MAX" || + $2 == "IFNAMSIZ" || + $2 ~ /^CTL_(MAXNAME|NET|QUERY)$/ || + $2 ~ /^SYSCTL_VERS/ || + $2 ~ /^(MS|MNT)_/ || + $2 ~ /^TUN(SET|GET|ATTACH|DETACH)/ || + $2 ~ /^(O|F|FD|NAME|S|PTRACE|PT)_/ || + $2 ~ /^LINUX_REBOOT_CMD_/ || + $2 ~ /^LINUX_REBOOT_MAGIC[12]$/ || + $2 !~ "NLA_TYPE_MASK" && + $2 ~ /^(NETLINK|NLM|NLMSG|NLA|IFA|IFAN|RT|RTCF|RTN|RTPROT|RTNH|ARPHRD|ETH_P)_/ || + $2 ~ /^SIOC/ || + $2 ~ /^TIOC/ || + $2 !~ "RTF_BITS" && + $2 ~ /^(IFF|IFT|NET_RT|RTM|RTF|RTV|RTA|RTAX)_/ || + $2 ~ /^BIOC/ || + $2 ~ /^RUSAGE_(SELF|CHILDREN|THREAD)/ || + $2 ~ /^RLIMIT_(AS|CORE|CPU|DATA|FSIZE|NOFILE|STACK)|RLIM_INFINITY/ || + $2 ~ /^PRIO_(PROCESS|PGRP|USER)/ || + $2 ~ /^CLONE_[A-Z_]+/ || + $2 !~ /^(BPF_TIMEVAL)$/ && + $2 ~ /^(BPF|DLT)_/ || + $2 !~ "WMESGLEN" && + $2 ~ /^W[A-Z0-9]+$/ {printf("\t%s = C.%s\n", $2, $2)} + $2 ~ /^__WCOREFLAG$/ {next} + $2 ~ /^__W[A-Z0-9]+$/ {printf("\t%s = C.%s\n", substr($2,3), $2)} + + {next} + ' | sort + + echo ')' +) >_const.go + +# Pull out the error names for later. +errors=$( + echo '#include ' | $CC -x c - -E -dM $ccflags | + awk '$1=="#define" && $2 ~ /^E[A-Z0-9_]+$/ { print $2 }' | + sort +) + +# Pull out the signal names for later. +signals=$( + echo '#include ' | $CC -x c - -E -dM $ccflags | + awk '$1=="#define" && $2 ~ /^SIG[A-Z0-9]+$/ { print $2 }' | + egrep -v '(SIGSTKSIZE|SIGSTKSZ|SIGRT)' | + sort +) + +# Again, writing regexps to a file. +echo '#include ' | $CC -x c - -E -dM $ccflags | + awk '$1=="#define" && $2 ~ /^E[A-Z0-9_]+$/ { print "^\t" $2 "[ \t]*=" }' | + sort >_error.grep +echo '#include ' | $CC -x c - -E -dM $ccflags | + awk '$1=="#define" && $2 ~ /^SIG[A-Z0-9]+$/ { print "^\t" $2 "[ \t]*=" }' | + egrep -v '(SIGSTKSIZE|SIGSTKSZ|SIGRT)' | + sort >_signal.grep + +echo '// mkerrors.sh' "$@" +echo '// Code generated by the command above; DO NOT EDIT.' +echo +go tool cgo -godefs -- "$@" _const.go >_error.out +cat _error.out | grep -vf _error.grep | grep -vf _signal.grep +echo +echo '// Errors' +echo 'const (' +cat _error.out | grep -f _error.grep | sed 's/=\(.*\)/= Errno(\1)/' +echo ')' + +echo +echo '// Signals' +echo 'const (' +cat _error.out | grep -f _signal.grep | sed 's/=\(.*\)/= Signal(\1)/' +echo ')' + +# Run C program to print error and syscall strings. +( + echo -E " +#include +#include +#include +#include +#include +#include + +#define nelem(x) (sizeof(x)/sizeof((x)[0])) + +enum { A = 'A', Z = 'Z', a = 'a', z = 'z' }; // avoid need for single quotes below + +int errors[] = { +" + for i in $errors + do + echo -E ' '$i, + done + + echo -E " +}; + +int signals[] = { +" + for i in $signals + do + echo -E ' '$i, + done + + # Use -E because on some systems bash builtin interprets \n itself. + echo -E ' +}; + +static int +intcmp(const void *a, const void *b) +{ + return *(int*)a - *(int*)b; +} + +int +main(void) +{ + int i, j, e; + char buf[1024], *p; + + printf("\n\n// Error table\n"); + printf("var errors = [...]string {\n"); + qsort(errors, nelem(errors), sizeof errors[0], intcmp); + for(i=0; i 0 && errors[i-1] == e) + continue; + strcpy(buf, strerror(e)); + // lowercase first letter: Bad -> bad, but STREAM -> STREAM. + if(A <= buf[0] && buf[0] <= Z && a <= buf[1] && buf[1] <= z) + buf[0] += a - A; + printf("\t%d: \"%s\",\n", e, buf); + } + printf("}\n\n"); + + printf("\n\n// Signal table\n"); + printf("var signals = [...]string {\n"); + qsort(signals, nelem(signals), sizeof signals[0], intcmp); + for(i=0; i 0 && signals[i-1] == e) + continue; + strcpy(buf, strsignal(e)); + // lowercase first letter: Bad -> bad, but STREAM -> STREAM. + if(A <= buf[0] && buf[0] <= Z && a <= buf[1] && buf[1] <= z) + buf[0] += a - A; + // cut trailing : number. + p = strrchr(buf, ":"[0]); + if(p) + *p = '\0'; + printf("\t%d: \"%s\",\n", e, buf); + } + printf("}\n\n"); + + return 0; +} + +' +) >_errors.c + +$CC $ccflags -o _errors _errors.c && $GORUN ./_errors && rm -f _errors.c _errors _const.go _error.grep _signal.grep _error.out diff --git a/vendor/golang.org/x/sys/plan9/mksysnum_plan9.sh b/vendor/golang.org/x/sys/plan9/mksysnum_plan9.sh new file mode 100644 index 0000000000..3c3ab05810 --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/mksysnum_plan9.sh @@ -0,0 +1,23 @@ +#!/bin/sh +# Copyright 2009 The Go Authors. All rights reserved. +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file. + +COMMAND="mksysnum_plan9.sh $@" + +cat <= 10 { + buf[i] = byte(val%10 + '0') + i-- + val /= 10 + } + buf[i] = byte(val + '0') + return string(buf[i:]) +} diff --git a/vendor/golang.org/x/sys/plan9/syscall.go b/vendor/golang.org/x/sys/plan9/syscall.go new file mode 100644 index 0000000000..e7363a2f54 --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/syscall.go @@ -0,0 +1,116 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build plan9 + +// Package plan9 contains an interface to the low-level operating system +// primitives. OS details vary depending on the underlying system, and +// by default, godoc will display the OS-specific documentation for the current +// system. If you want godoc to display documentation for another +// system, set $GOOS and $GOARCH to the desired system. For example, if +// you want to view documentation for freebsd/arm on linux/amd64, set $GOOS +// to freebsd and $GOARCH to arm. +// +// The primary use of this package is inside other packages that provide a more +// portable interface to the system, such as "os", "time" and "net". Use +// those packages rather than this one if you can. +// +// For details of the functions and data types in this package consult +// the manuals for the appropriate operating system. +// +// These calls return err == nil to indicate success; otherwise +// err represents an operating system error describing the failure and +// holds a value of type syscall.ErrorString. +package plan9 // import "golang.org/x/sys/plan9" + +import ( + "bytes" + "strings" + "unsafe" + + "golang.org/x/sys/internal/unsafeheader" +) + +// ByteSliceFromString returns a NUL-terminated slice of bytes +// containing the text of s. If s contains a NUL byte at any +// location, it returns (nil, EINVAL). +func ByteSliceFromString(s string) ([]byte, error) { + if strings.IndexByte(s, 0) != -1 { + return nil, EINVAL + } + a := make([]byte, len(s)+1) + copy(a, s) + return a, nil +} + +// BytePtrFromString returns a pointer to a NUL-terminated array of +// bytes containing the text of s. If s contains a NUL byte at any +// location, it returns (nil, EINVAL). +func BytePtrFromString(s string) (*byte, error) { + a, err := ByteSliceFromString(s) + if err != nil { + return nil, err + } + return &a[0], nil +} + +// ByteSliceToString returns a string form of the text represented by the slice s, with a terminating NUL and any +// bytes after the NUL removed. +func ByteSliceToString(s []byte) string { + if i := bytes.IndexByte(s, 0); i != -1 { + s = s[:i] + } + return string(s) +} + +// BytePtrToString takes a pointer to a sequence of text and returns the corresponding string. +// If the pointer is nil, it returns the empty string. It assumes that the text sequence is terminated +// at a zero byte; if the zero byte is not present, the program may crash. +func BytePtrToString(p *byte) string { + if p == nil { + return "" + } + if *p == 0 { + return "" + } + + // Find NUL terminator. + n := 0 + for ptr := unsafe.Pointer(p); *(*byte)(ptr) != 0; n++ { + ptr = unsafe.Pointer(uintptr(ptr) + 1) + } + + var s []byte + h := (*unsafeheader.Slice)(unsafe.Pointer(&s)) + h.Data = unsafe.Pointer(p) + h.Len = n + h.Cap = n + + return string(s) +} + +// Single-word zero for use when we need a valid pointer to 0 bytes. +// See mksyscall.pl. +var _zero uintptr + +func (ts *Timespec) Unix() (sec int64, nsec int64) { + return int64(ts.Sec), int64(ts.Nsec) +} + +func (tv *Timeval) Unix() (sec int64, nsec int64) { + return int64(tv.Sec), int64(tv.Usec) * 1000 +} + +func (ts *Timespec) Nano() int64 { + return int64(ts.Sec)*1e9 + int64(ts.Nsec) +} + +func (tv *Timeval) Nano() int64 { + return int64(tv.Sec)*1e9 + int64(tv.Usec)*1000 +} + +// use is a no-op, but the compiler cannot see that it is. +// Calling use(p) ensures that p is kept live until that point. +//go:noescape +func use(p unsafe.Pointer) diff --git a/vendor/golang.org/x/sys/plan9/syscall_plan9.go b/vendor/golang.org/x/sys/plan9/syscall_plan9.go new file mode 100644 index 0000000000..84e1471481 --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/syscall_plan9.go @@ -0,0 +1,349 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Plan 9 system calls. +// This file is compiled as ordinary Go code, +// but it is also input to mksyscall, +// which parses the //sys lines and generates system call stubs. +// Note that sometimes we use a lowercase //sys name and +// wrap it in our own nicer implementation. + +package plan9 + +import ( + "bytes" + "syscall" + "unsafe" +) + +// A Note is a string describing a process note. +// It implements the os.Signal interface. +type Note string + +func (n Note) Signal() {} + +func (n Note) String() string { + return string(n) +} + +var ( + Stdin = 0 + Stdout = 1 + Stderr = 2 +) + +// For testing: clients can set this flag to force +// creation of IPv6 sockets to return EAFNOSUPPORT. +var SocketDisableIPv6 bool + +func Syscall(trap, a1, a2, a3 uintptr) (r1, r2 uintptr, err syscall.ErrorString) +func Syscall6(trap, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err syscall.ErrorString) +func RawSyscall(trap, a1, a2, a3 uintptr) (r1, r2, err uintptr) +func RawSyscall6(trap, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2, err uintptr) + +func atoi(b []byte) (n uint) { + n = 0 + for i := 0; i < len(b); i++ { + n = n*10 + uint(b[i]-'0') + } + return +} + +func cstring(s []byte) string { + i := bytes.IndexByte(s, 0) + if i == -1 { + i = len(s) + } + return string(s[:i]) +} + +func errstr() string { + var buf [ERRMAX]byte + + RawSyscall(SYS_ERRSTR, uintptr(unsafe.Pointer(&buf[0])), uintptr(len(buf)), 0) + + buf[len(buf)-1] = 0 + return cstring(buf[:]) +} + +// Implemented in assembly to import from runtime. +func exit(code int) + +func Exit(code int) { exit(code) } + +func readnum(path string) (uint, error) { + var b [12]byte + + fd, e := Open(path, O_RDONLY) + if e != nil { + return 0, e + } + defer Close(fd) + + n, e := Pread(fd, b[:], 0) + + if e != nil { + return 0, e + } + + m := 0 + for ; m < n && b[m] == ' '; m++ { + } + + return atoi(b[m : n-1]), nil +} + +func Getpid() (pid int) { + n, _ := readnum("#c/pid") + return int(n) +} + +func Getppid() (ppid int) { + n, _ := readnum("#c/ppid") + return int(n) +} + +func Read(fd int, p []byte) (n int, err error) { + return Pread(fd, p, -1) +} + +func Write(fd int, p []byte) (n int, err error) { + return Pwrite(fd, p, -1) +} + +var ioSync int64 + +//sys fd2path(fd int, buf []byte) (err error) +func Fd2path(fd int) (path string, err error) { + var buf [512]byte + + e := fd2path(fd, buf[:]) + if e != nil { + return "", e + } + return cstring(buf[:]), nil +} + +//sys pipe(p *[2]int32) (err error) +func Pipe(p []int) (err error) { + if len(p) != 2 { + return syscall.ErrorString("bad arg in system call") + } + var pp [2]int32 + err = pipe(&pp) + p[0] = int(pp[0]) + p[1] = int(pp[1]) + return +} + +// Underlying system call writes to newoffset via pointer. +// Implemented in assembly to avoid allocation. +func seek(placeholder uintptr, fd int, offset int64, whence int) (newoffset int64, err string) + +func Seek(fd int, offset int64, whence int) (newoffset int64, err error) { + newoffset, e := seek(0, fd, offset, whence) + + if newoffset == -1 { + err = syscall.ErrorString(e) + } + return +} + +func Mkdir(path string, mode uint32) (err error) { + fd, err := Create(path, O_RDONLY, DMDIR|mode) + + if fd != -1 { + Close(fd) + } + + return +} + +type Waitmsg struct { + Pid int + Time [3]uint32 + Msg string +} + +func (w Waitmsg) Exited() bool { return true } +func (w Waitmsg) Signaled() bool { return false } + +func (w Waitmsg) ExitStatus() int { + if len(w.Msg) == 0 { + // a normal exit returns no message + return 0 + } + return 1 +} + +//sys await(s []byte) (n int, err error) +func Await(w *Waitmsg) (err error) { + var buf [512]byte + var f [5][]byte + + n, err := await(buf[:]) + + if err != nil || w == nil { + return + } + + nf := 0 + p := 0 + for i := 0; i < n && nf < len(f)-1; i++ { + if buf[i] == ' ' { + f[nf] = buf[p:i] + p = i + 1 + nf++ + } + } + f[nf] = buf[p:] + nf++ + + if nf != len(f) { + return syscall.ErrorString("invalid wait message") + } + w.Pid = int(atoi(f[0])) + w.Time[0] = uint32(atoi(f[1])) + w.Time[1] = uint32(atoi(f[2])) + w.Time[2] = uint32(atoi(f[3])) + w.Msg = cstring(f[4]) + if w.Msg == "''" { + // await() returns '' for no error + w.Msg = "" + } + return +} + +func Unmount(name, old string) (err error) { + fixwd() + oldp, err := BytePtrFromString(old) + if err != nil { + return err + } + oldptr := uintptr(unsafe.Pointer(oldp)) + + var r0 uintptr + var e syscall.ErrorString + + // bind(2) man page: If name is zero, everything bound or mounted upon old is unbound or unmounted. + if name == "" { + r0, _, e = Syscall(SYS_UNMOUNT, _zero, oldptr, 0) + } else { + namep, err := BytePtrFromString(name) + if err != nil { + return err + } + r0, _, e = Syscall(SYS_UNMOUNT, uintptr(unsafe.Pointer(namep)), oldptr, 0) + } + + if int32(r0) == -1 { + err = e + } + return +} + +func Fchdir(fd int) (err error) { + path, err := Fd2path(fd) + + if err != nil { + return + } + + return Chdir(path) +} + +type Timespec struct { + Sec int32 + Nsec int32 +} + +type Timeval struct { + Sec int32 + Usec int32 +} + +func NsecToTimeval(nsec int64) (tv Timeval) { + nsec += 999 // round up to microsecond + tv.Usec = int32(nsec % 1e9 / 1e3) + tv.Sec = int32(nsec / 1e9) + return +} + +func nsec() int64 { + var scratch int64 + + r0, _, _ := Syscall(SYS_NSEC, uintptr(unsafe.Pointer(&scratch)), 0, 0) + // TODO(aram): remove hack after I fix _nsec in the pc64 kernel. + if r0 == 0 { + return scratch + } + return int64(r0) +} + +func Gettimeofday(tv *Timeval) error { + nsec := nsec() + *tv = NsecToTimeval(nsec) + return nil +} + +func Getpagesize() int { return 0x1000 } + +func Getegid() (egid int) { return -1 } +func Geteuid() (euid int) { return -1 } +func Getgid() (gid int) { return -1 } +func Getuid() (uid int) { return -1 } + +func Getgroups() (gids []int, err error) { + return make([]int, 0), nil +} + +//sys open(path string, mode int) (fd int, err error) +func Open(path string, mode int) (fd int, err error) { + fixwd() + return open(path, mode) +} + +//sys create(path string, mode int, perm uint32) (fd int, err error) +func Create(path string, mode int, perm uint32) (fd int, err error) { + fixwd() + return create(path, mode, perm) +} + +//sys remove(path string) (err error) +func Remove(path string) error { + fixwd() + return remove(path) +} + +//sys stat(path string, edir []byte) (n int, err error) +func Stat(path string, edir []byte) (n int, err error) { + fixwd() + return stat(path, edir) +} + +//sys bind(name string, old string, flag int) (err error) +func Bind(name string, old string, flag int) (err error) { + fixwd() + return bind(name, old, flag) +} + +//sys mount(fd int, afd int, old string, flag int, aname string) (err error) +func Mount(fd int, afd int, old string, flag int, aname string) (err error) { + fixwd() + return mount(fd, afd, old, flag, aname) +} + +//sys wstat(path string, edir []byte) (err error) +func Wstat(path string, edir []byte) (err error) { + fixwd() + return wstat(path, edir) +} + +//sys chdir(path string) (err error) +//sys Dup(oldfd int, newfd int) (fd int, err error) +//sys Pread(fd int, p []byte, offset int64) (n int, err error) +//sys Pwrite(fd int, p []byte, offset int64) (n int, err error) +//sys Close(fd int) (err error) +//sys Fstat(fd int, edir []byte) (n int, err error) +//sys Fwstat(fd int, edir []byte) (err error) diff --git a/vendor/golang.org/x/sys/plan9/zsyscall_plan9_386.go b/vendor/golang.org/x/sys/plan9/zsyscall_plan9_386.go new file mode 100644 index 0000000000..6819bc2094 --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/zsyscall_plan9_386.go @@ -0,0 +1,284 @@ +// go run mksyscall.go -l32 -plan9 -tags plan9,386 syscall_plan9.go +// Code generated by the command above; see README.md. DO NOT EDIT. + +// +build plan9,386 + +package plan9 + +import "unsafe" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func fd2path(fd int, buf []byte) (err error) { + var _p0 unsafe.Pointer + if len(buf) > 0 { + _p0 = unsafe.Pointer(&buf[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_FD2PATH, uintptr(fd), uintptr(_p0), uintptr(len(buf))) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func pipe(p *[2]int32) (err error) { + r0, _, e1 := Syscall(SYS_PIPE, uintptr(unsafe.Pointer(p)), 0, 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func await(s []byte) (n int, err error) { + var _p0 unsafe.Pointer + if len(s) > 0 { + _p0 = unsafe.Pointer(&s[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_AWAIT, uintptr(_p0), uintptr(len(s)), 0) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func open(path string, mode int) (fd int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_OPEN, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) + fd = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func create(path string, mode int, perm uint32) (fd int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_CREATE, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(perm)) + fd = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func remove(path string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_REMOVE, uintptr(unsafe.Pointer(_p0)), 0, 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func stat(path string, edir []byte) (n int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(edir) > 0 { + _p1 = unsafe.Pointer(&edir[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_STAT, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(edir))) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func bind(name string, old string, flag int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(name) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(old) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_BIND, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(flag)) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func mount(fd int, afd int, old string, flag int, aname string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(old) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(aname) + if err != nil { + return + } + r0, _, e1 := Syscall6(SYS_MOUNT, uintptr(fd), uintptr(afd), uintptr(unsafe.Pointer(_p0)), uintptr(flag), uintptr(unsafe.Pointer(_p1)), 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func wstat(path string, edir []byte) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(edir) > 0 { + _p1 = unsafe.Pointer(&edir[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_WSTAT, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(edir))) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func chdir(path string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Dup(oldfd int, newfd int) (fd int, err error) { + r0, _, e1 := Syscall(SYS_DUP, uintptr(oldfd), uintptr(newfd), 0) + fd = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Pread(fd int, p []byte, offset int64) (n int, err error) { + var _p0 unsafe.Pointer + if len(p) > 0 { + _p0 = unsafe.Pointer(&p[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall6(SYS_PREAD, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Pwrite(fd int, p []byte, offset int64) (n int, err error) { + var _p0 unsafe.Pointer + if len(p) > 0 { + _p0 = unsafe.Pointer(&p[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall6(SYS_PWRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Close(fd int) (err error) { + r0, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Fstat(fd int, edir []byte) (n int, err error) { + var _p0 unsafe.Pointer + if len(edir) > 0 { + _p0 = unsafe.Pointer(&edir[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_FSTAT, uintptr(fd), uintptr(_p0), uintptr(len(edir))) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Fwstat(fd int, edir []byte) (err error) { + var _p0 unsafe.Pointer + if len(edir) > 0 { + _p0 = unsafe.Pointer(&edir[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_FWSTAT, uintptr(fd), uintptr(_p0), uintptr(len(edir))) + if int32(r0) == -1 { + err = e1 + } + return +} diff --git a/vendor/golang.org/x/sys/plan9/zsyscall_plan9_amd64.go b/vendor/golang.org/x/sys/plan9/zsyscall_plan9_amd64.go new file mode 100644 index 0000000000..418abbbfc7 --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/zsyscall_plan9_amd64.go @@ -0,0 +1,284 @@ +// go run mksyscall.go -l32 -plan9 -tags plan9,amd64 syscall_plan9.go +// Code generated by the command above; see README.md. DO NOT EDIT. + +// +build plan9,amd64 + +package plan9 + +import "unsafe" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func fd2path(fd int, buf []byte) (err error) { + var _p0 unsafe.Pointer + if len(buf) > 0 { + _p0 = unsafe.Pointer(&buf[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_FD2PATH, uintptr(fd), uintptr(_p0), uintptr(len(buf))) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func pipe(p *[2]int32) (err error) { + r0, _, e1 := Syscall(SYS_PIPE, uintptr(unsafe.Pointer(p)), 0, 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func await(s []byte) (n int, err error) { + var _p0 unsafe.Pointer + if len(s) > 0 { + _p0 = unsafe.Pointer(&s[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_AWAIT, uintptr(_p0), uintptr(len(s)), 0) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func open(path string, mode int) (fd int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_OPEN, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) + fd = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func create(path string, mode int, perm uint32) (fd int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_CREATE, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(perm)) + fd = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func remove(path string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_REMOVE, uintptr(unsafe.Pointer(_p0)), 0, 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func stat(path string, edir []byte) (n int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(edir) > 0 { + _p1 = unsafe.Pointer(&edir[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_STAT, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(edir))) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func bind(name string, old string, flag int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(name) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(old) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_BIND, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(flag)) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func mount(fd int, afd int, old string, flag int, aname string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(old) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(aname) + if err != nil { + return + } + r0, _, e1 := Syscall6(SYS_MOUNT, uintptr(fd), uintptr(afd), uintptr(unsafe.Pointer(_p0)), uintptr(flag), uintptr(unsafe.Pointer(_p1)), 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func wstat(path string, edir []byte) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(edir) > 0 { + _p1 = unsafe.Pointer(&edir[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_WSTAT, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(edir))) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func chdir(path string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Dup(oldfd int, newfd int) (fd int, err error) { + r0, _, e1 := Syscall(SYS_DUP, uintptr(oldfd), uintptr(newfd), 0) + fd = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Pread(fd int, p []byte, offset int64) (n int, err error) { + var _p0 unsafe.Pointer + if len(p) > 0 { + _p0 = unsafe.Pointer(&p[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall6(SYS_PREAD, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Pwrite(fd int, p []byte, offset int64) (n int, err error) { + var _p0 unsafe.Pointer + if len(p) > 0 { + _p0 = unsafe.Pointer(&p[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall6(SYS_PWRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Close(fd int) (err error) { + r0, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Fstat(fd int, edir []byte) (n int, err error) { + var _p0 unsafe.Pointer + if len(edir) > 0 { + _p0 = unsafe.Pointer(&edir[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_FSTAT, uintptr(fd), uintptr(_p0), uintptr(len(edir))) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Fwstat(fd int, edir []byte) (err error) { + var _p0 unsafe.Pointer + if len(edir) > 0 { + _p0 = unsafe.Pointer(&edir[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_FWSTAT, uintptr(fd), uintptr(_p0), uintptr(len(edir))) + if int32(r0) == -1 { + err = e1 + } + return +} diff --git a/vendor/golang.org/x/sys/plan9/zsyscall_plan9_arm.go b/vendor/golang.org/x/sys/plan9/zsyscall_plan9_arm.go new file mode 100644 index 0000000000..3e8a1a58ca --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/zsyscall_plan9_arm.go @@ -0,0 +1,284 @@ +// go run mksyscall.go -l32 -plan9 -tags plan9,arm syscall_plan9.go +// Code generated by the command above; see README.md. DO NOT EDIT. + +// +build plan9,arm + +package plan9 + +import "unsafe" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func fd2path(fd int, buf []byte) (err error) { + var _p0 unsafe.Pointer + if len(buf) > 0 { + _p0 = unsafe.Pointer(&buf[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_FD2PATH, uintptr(fd), uintptr(_p0), uintptr(len(buf))) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func pipe(p *[2]int32) (err error) { + r0, _, e1 := Syscall(SYS_PIPE, uintptr(unsafe.Pointer(p)), 0, 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func await(s []byte) (n int, err error) { + var _p0 unsafe.Pointer + if len(s) > 0 { + _p0 = unsafe.Pointer(&s[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_AWAIT, uintptr(_p0), uintptr(len(s)), 0) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func open(path string, mode int) (fd int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_OPEN, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) + fd = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func create(path string, mode int, perm uint32) (fd int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_CREATE, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(perm)) + fd = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func remove(path string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_REMOVE, uintptr(unsafe.Pointer(_p0)), 0, 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func stat(path string, edir []byte) (n int, err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(edir) > 0 { + _p1 = unsafe.Pointer(&edir[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_STAT, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(edir))) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func bind(name string, old string, flag int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(name) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(old) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_BIND, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(flag)) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func mount(fd int, afd int, old string, flag int, aname string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(old) + if err != nil { + return + } + var _p1 *byte + _p1, err = BytePtrFromString(aname) + if err != nil { + return + } + r0, _, e1 := Syscall6(SYS_MOUNT, uintptr(fd), uintptr(afd), uintptr(unsafe.Pointer(_p0)), uintptr(flag), uintptr(unsafe.Pointer(_p1)), 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func wstat(path string, edir []byte) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(edir) > 0 { + _p1 = unsafe.Pointer(&edir[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_WSTAT, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(edir))) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func chdir(path string) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + r0, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Dup(oldfd int, newfd int) (fd int, err error) { + r0, _, e1 := Syscall(SYS_DUP, uintptr(oldfd), uintptr(newfd), 0) + fd = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Pread(fd int, p []byte, offset int64) (n int, err error) { + var _p0 unsafe.Pointer + if len(p) > 0 { + _p0 = unsafe.Pointer(&p[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall6(SYS_PREAD, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Pwrite(fd int, p []byte, offset int64) (n int, err error) { + var _p0 unsafe.Pointer + if len(p) > 0 { + _p0 = unsafe.Pointer(&p[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall6(SYS_PWRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Close(fd int) (err error) { + r0, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Fstat(fd int, edir []byte) (n int, err error) { + var _p0 unsafe.Pointer + if len(edir) > 0 { + _p0 = unsafe.Pointer(&edir[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_FSTAT, uintptr(fd), uintptr(_p0), uintptr(len(edir))) + n = int(r0) + if int32(r0) == -1 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func Fwstat(fd int, edir []byte) (err error) { + var _p0 unsafe.Pointer + if len(edir) > 0 { + _p0 = unsafe.Pointer(&edir[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + r0, _, e1 := Syscall(SYS_FWSTAT, uintptr(fd), uintptr(_p0), uintptr(len(edir))) + if int32(r0) == -1 { + err = e1 + } + return +} diff --git a/vendor/golang.org/x/sys/plan9/zsysnum_plan9.go b/vendor/golang.org/x/sys/plan9/zsysnum_plan9.go new file mode 100644 index 0000000000..22e8abd43d --- /dev/null +++ b/vendor/golang.org/x/sys/plan9/zsysnum_plan9.go @@ -0,0 +1,49 @@ +// mksysnum_plan9.sh /opt/plan9/sys/src/libc/9syscall/sys.h +// MACHINE GENERATED BY THE ABOVE COMMAND; DO NOT EDIT + +package plan9 + +const ( + SYS_SYSR1 = 0 + SYS_BIND = 2 + SYS_CHDIR = 3 + SYS_CLOSE = 4 + SYS_DUP = 5 + SYS_ALARM = 6 + SYS_EXEC = 7 + SYS_EXITS = 8 + SYS_FAUTH = 10 + SYS_SEGBRK = 12 + SYS_OPEN = 14 + SYS_OSEEK = 16 + SYS_SLEEP = 17 + SYS_RFORK = 19 + SYS_PIPE = 21 + SYS_CREATE = 22 + SYS_FD2PATH = 23 + SYS_BRK_ = 24 + SYS_REMOVE = 25 + SYS_NOTIFY = 28 + SYS_NOTED = 29 + SYS_SEGATTACH = 30 + SYS_SEGDETACH = 31 + SYS_SEGFREE = 32 + SYS_SEGFLUSH = 33 + SYS_RENDEZVOUS = 34 + SYS_UNMOUNT = 35 + SYS_SEMACQUIRE = 37 + SYS_SEMRELEASE = 38 + SYS_SEEK = 39 + SYS_FVERSION = 40 + SYS_ERRSTR = 41 + SYS_STAT = 42 + SYS_FSTAT = 43 + SYS_WSTAT = 44 + SYS_FWSTAT = 45 + SYS_MOUNT = 46 + SYS_AWAIT = 47 + SYS_PREAD = 50 + SYS_PWRITE = 51 + SYS_TSEMACQUIRE = 52 + SYS_NSEC = 53 +) diff --git a/vendor/golang.org/x/term/AUTHORS b/vendor/golang.org/x/term/AUTHORS new file mode 100644 index 0000000000..15167cd746 --- /dev/null +++ b/vendor/golang.org/x/term/AUTHORS @@ -0,0 +1,3 @@ +# This source code refers to The Go Authors for copyright purposes. +# The master list of authors is in the main Go distribution, +# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/golang.org/x/term/CONTRIBUTING.md b/vendor/golang.org/x/term/CONTRIBUTING.md new file mode 100644 index 0000000000..d0485e887a --- /dev/null +++ b/vendor/golang.org/x/term/CONTRIBUTING.md @@ -0,0 +1,26 @@ +# Contributing to Go + +Go is an open source project. + +It is the work of hundreds of contributors. We appreciate your help! + +## Filing issues + +When [filing an issue](https://golang.org/issue/new), make sure to answer these five questions: + +1. What version of Go are you using (`go version`)? +2. What operating system and processor architecture are you using? +3. What did you do? +4. What did you expect to see? +5. What did you see instead? + +General questions should go to the [golang-nuts mailing list](https://groups.google.com/group/golang-nuts) instead of the issue tracker. +The gophers there will answer or ask you to file an issue if you've tripped over a bug. + +## Contributing code + +Please read the [Contribution Guidelines](https://golang.org/doc/contribute.html) +before sending patches. + +Unless otherwise noted, the Go source files are distributed under +the BSD-style license found in the LICENSE file. diff --git a/vendor/golang.org/x/term/CONTRIBUTORS b/vendor/golang.org/x/term/CONTRIBUTORS new file mode 100644 index 0000000000..1c4577e968 --- /dev/null +++ b/vendor/golang.org/x/term/CONTRIBUTORS @@ -0,0 +1,3 @@ +# This source code was written by the Go contributors. +# The master list of contributors is in the main Go distribution, +# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/golang.org/x/term/LICENSE b/vendor/golang.org/x/term/LICENSE new file mode 100644 index 0000000000..6a66aea5ea --- /dev/null +++ b/vendor/golang.org/x/term/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/term/PATENTS b/vendor/golang.org/x/term/PATENTS new file mode 100644 index 0000000000..733099041f --- /dev/null +++ b/vendor/golang.org/x/term/PATENTS @@ -0,0 +1,22 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Go project. + +Google hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) +patent license to make, have made, use, offer to sell, sell, import, +transfer and otherwise run, modify and propagate the contents of this +implementation of Go, where such license applies only to those patent +claims, both currently owned or controlled by Google and acquired in +the future, licensable by Google that are necessarily infringed by this +implementation of Go. This grant does not include claims that would be +infringed only as a consequence of further modification of this +implementation. If you or your agent or exclusive licensee institute or +order or agree to the institution of patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging +that this implementation of Go or any code incorporated within this +implementation of Go constitutes direct or contributory patent +infringement, or inducement of patent infringement, then any patent +rights granted to you under this License for this implementation of Go +shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/term/README.md b/vendor/golang.org/x/term/README.md new file mode 100644 index 0000000000..e0f390cb22 --- /dev/null +++ b/vendor/golang.org/x/term/README.md @@ -0,0 +1,17 @@ +# Go terminal/console support + +This repository provides Go terminal and console support packages. + +## Download/Install + +The easiest way to install is to run `go get -u golang.org/x/term`. You can +also manually git clone the repository to `$GOPATH/src/golang.org/x/term`. + +## Report Issues / Send Patches + +This repository uses Gerrit for code changes. To learn how to submit changes to +this repository, see https://golang.org/doc/contribute.html. + +The main issue tracker for the term repository is located at +https://github.com/golang/go/issues. Prefix your issue with "x/term:" in the +subject line, so it is easy to find. diff --git a/vendor/golang.org/x/term/go.mod b/vendor/golang.org/x/term/go.mod new file mode 100644 index 0000000000..d45f52851e --- /dev/null +++ b/vendor/golang.org/x/term/go.mod @@ -0,0 +1,5 @@ +module golang.org/x/term + +go 1.11 + +require golang.org/x/sys v0.0.0-20201119102817-f84b799fce68 diff --git a/vendor/golang.org/x/term/go.sum b/vendor/golang.org/x/term/go.sum new file mode 100644 index 0000000000..de9e09c654 --- /dev/null +++ b/vendor/golang.org/x/term/go.sum @@ -0,0 +1,2 @@ +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68 h1:nxC68pudNYkKU6jWhgrqdreuFiOQWj1Fs7T3VrH4Pjw= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_plan9.go b/vendor/golang.org/x/term/term.go similarity index 56% rename from vendor/golang.org/x/crypto/ssh/terminal/util_plan9.go rename to vendor/golang.org/x/term/term.go index 9317ac7ede..69931cc84c 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/util_plan9.go +++ b/vendor/golang.org/x/term/term.go @@ -1,8 +1,8 @@ -// Copyright 2016 The Go Authors. All rights reserved. +// Copyright 2019 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package terminal provides support functions for dealing with terminals, as +// Package term provides support functions for dealing with terminals, as // commonly found on UNIX systems. // // Putting a terminal into raw mode is the most common requirement: @@ -12,47 +12,47 @@ // panic(err) // } // defer terminal.Restore(0, oldState) -package terminal +package term -import ( - "fmt" - "runtime" -) - -type State struct{} +// State contains the state of a terminal. +type State struct { + state +} // IsTerminal returns whether the given file descriptor is a terminal. func IsTerminal(fd int) bool { - return false + return isTerminal(fd) } -// MakeRaw put the terminal connected to the given file descriptor into raw +// MakeRaw puts the terminal connected to the given file descriptor into raw // mode and returns the previous state of the terminal so that it can be // restored. func MakeRaw(fd int) (*State, error) { - return nil, fmt.Errorf("terminal: MakeRaw not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) + return makeRaw(fd) } // GetState returns the current state of a terminal which may be useful to // restore the terminal after a signal. func GetState(fd int) (*State, error) { - return nil, fmt.Errorf("terminal: GetState not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) + return getState(fd) } // Restore restores the terminal connected to the given file descriptor to a // previous state. -func Restore(fd int, state *State) error { - return fmt.Errorf("terminal: Restore not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) +func Restore(fd int, oldState *State) error { + return restore(fd, oldState) } -// GetSize returns the dimensions of the given terminal. +// GetSize returns the visible dimensions of the given terminal. +// +// These dimensions don't include any scrollback buffer height. func GetSize(fd int) (width, height int, err error) { - return 0, 0, fmt.Errorf("terminal: GetSize not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) + return getSize(fd) } // ReadPassword reads a line of input from a terminal without local echo. This // is commonly used for inputting passwords and other sensitive data. The slice // returned does not include the \n. func ReadPassword(fd int) ([]byte, error) { - return nil, fmt.Errorf("terminal: ReadPassword not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) + return readPassword(fd) } diff --git a/vendor/golang.org/x/term/term_plan9.go b/vendor/golang.org/x/term/term_plan9.go new file mode 100644 index 0000000000..21afa55cdb --- /dev/null +++ b/vendor/golang.org/x/term/term_plan9.go @@ -0,0 +1,42 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package term + +import ( + "fmt" + "runtime" + + "golang.org/x/sys/plan9" +) + +type state struct{} + +func isTerminal(fd int) bool { + path, err := plan9.Fd2path(fd) + if err != nil { + return false + } + return path == "/dev/cons" || path == "/mnt/term/dev/cons" +} + +func makeRaw(fd int) (*State, error) { + return nil, fmt.Errorf("terminal: MakeRaw not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) +} + +func getState(fd int) (*State, error) { + return nil, fmt.Errorf("terminal: GetState not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) +} + +func restore(fd int, state *State) error { + return fmt.Errorf("terminal: Restore not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) +} + +func getSize(fd int) (width, height int, err error) { + return 0, 0, fmt.Errorf("terminal: GetSize not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) +} + +func readPassword(fd int) ([]byte, error) { + return nil, fmt.Errorf("terminal: ReadPassword not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) +} diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_solaris.go b/vendor/golang.org/x/term/term_solaris.go similarity index 61% rename from vendor/golang.org/x/crypto/ssh/terminal/util_solaris.go rename to vendor/golang.org/x/term/term_solaris.go index 3d5f06a9f0..b9da29744b 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/util_solaris.go +++ b/vendor/golang.org/x/term/term_solaris.go @@ -1,32 +1,27 @@ -// Copyright 2015 The Go Authors. All rights reserved. +// Copyright 2019 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build solaris - -package terminal // import "golang.org/x/crypto/ssh/terminal" +package term import ( - "golang.org/x/sys/unix" "io" "syscall" + + "golang.org/x/sys/unix" ) // State contains the state of a terminal. -type State struct { +type state struct { termios unix.Termios } -// IsTerminal returns whether the given file descriptor is a terminal. -func IsTerminal(fd int) bool { +func isTerminal(fd int) bool { _, err := unix.IoctlGetTermio(fd, unix.TCGETA) return err == nil } -// ReadPassword reads a line of input from a terminal without local echo. This -// is commonly used for inputting passwords and other sensitive data. The slice -// returned does not include the \n. -func ReadPassword(fd int) ([]byte, error) { +func readPassword(fd int) ([]byte, error) { // see also: http://src.illumos.org/source/xref/illumos-gate/usr/src/lib/libast/common/uwin/getpass.c val, err := unix.IoctlGetTermios(fd, unix.TCGETS) if err != nil { @@ -70,17 +65,14 @@ func ReadPassword(fd int) ([]byte, error) { return ret, nil } -// MakeRaw puts the terminal connected to the given file descriptor into raw -// mode and returns the previous state of the terminal so that it can be -// restored. -// see http://cr.illumos.org/~webrev/andy_js/1060/ -func MakeRaw(fd int) (*State, error) { +func makeRaw(fd int) (*State, error) { + // see http://cr.illumos.org/~webrev/andy_js/1060/ termios, err := unix.IoctlGetTermios(fd, unix.TCGETS) if err != nil { return nil, err } - oldState := State{termios: *termios} + oldState := State{state{termios: *termios}} termios.Iflag &^= unix.IGNBRK | unix.BRKINT | unix.PARMRK | unix.ISTRIP | unix.INLCR | unix.IGNCR | unix.ICRNL | unix.IXON termios.Oflag &^= unix.OPOST @@ -97,25 +89,20 @@ func MakeRaw(fd int) (*State, error) { return &oldState, nil } -// Restore restores the terminal connected to the given file descriptor to a -// previous state. -func Restore(fd int, oldState *State) error { +func restore(fd int, oldState *State) error { return unix.IoctlSetTermios(fd, unix.TCSETS, &oldState.termios) } -// GetState returns the current state of a terminal which may be useful to -// restore the terminal after a signal. -func GetState(fd int) (*State, error) { +func getState(fd int) (*State, error) { termios, err := unix.IoctlGetTermios(fd, unix.TCGETS) if err != nil { return nil, err } - return &State{termios: *termios}, nil + return &State{state{termios: *termios}}, nil } -// GetSize returns the dimensions of the given terminal. -func GetSize(fd int) (width, height int, err error) { +func getSize(fd int) (width, height int, err error) { ws, err := unix.IoctlGetWinsize(fd, unix.TIOCGWINSZ) if err != nil { return 0, 0, err diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util.go b/vendor/golang.org/x/term/term_unix.go similarity index 53% rename from vendor/golang.org/x/crypto/ssh/terminal/util.go rename to vendor/golang.org/x/term/term_unix.go index 3911040840..4c60e457d0 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/util.go +++ b/vendor/golang.org/x/term/term_unix.go @@ -1,46 +1,31 @@ -// Copyright 2011 The Go Authors. All rights reserved. +// Copyright 2019 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build aix darwin dragonfly freebsd linux,!appengine netbsd openbsd +// +build aix darwin dragonfly freebsd linux netbsd openbsd zos -// Package terminal provides support functions for dealing with terminals, as -// commonly found on UNIX systems. -// -// Putting a terminal into raw mode is the most common requirement: -// -// oldState, err := terminal.MakeRaw(0) -// if err != nil { -// panic(err) -// } -// defer terminal.Restore(0, oldState) -package terminal // import "golang.org/x/crypto/ssh/terminal" +package term import ( "golang.org/x/sys/unix" ) -// State contains the state of a terminal. -type State struct { +type state struct { termios unix.Termios } -// IsTerminal returns whether the given file descriptor is a terminal. -func IsTerminal(fd int) bool { +func isTerminal(fd int) bool { _, err := unix.IoctlGetTermios(fd, ioctlReadTermios) return err == nil } -// MakeRaw put the terminal connected to the given file descriptor into raw -// mode and returns the previous state of the terminal so that it can be -// restored. -func MakeRaw(fd int) (*State, error) { +func makeRaw(fd int) (*State, error) { termios, err := unix.IoctlGetTermios(fd, ioctlReadTermios) if err != nil { return nil, err } - oldState := State{termios: *termios} + oldState := State{state{termios: *termios}} // This attempts to replicate the behaviour documented for cfmakeraw in // the termios(3) manpage. @@ -58,25 +43,20 @@ func MakeRaw(fd int) (*State, error) { return &oldState, nil } -// GetState returns the current state of a terminal which may be useful to -// restore the terminal after a signal. -func GetState(fd int) (*State, error) { +func getState(fd int) (*State, error) { termios, err := unix.IoctlGetTermios(fd, ioctlReadTermios) if err != nil { return nil, err } - return &State{termios: *termios}, nil + return &State{state{termios: *termios}}, nil } -// Restore restores the terminal connected to the given file descriptor to a -// previous state. -func Restore(fd int, state *State) error { +func restore(fd int, state *State) error { return unix.IoctlSetTermios(fd, ioctlWriteTermios, &state.termios) } -// GetSize returns the dimensions of the given terminal. -func GetSize(fd int) (width, height int, err error) { +func getSize(fd int) (width, height int, err error) { ws, err := unix.IoctlGetWinsize(fd, unix.TIOCGWINSZ) if err != nil { return -1, -1, err @@ -91,10 +71,7 @@ func (r passwordReader) Read(buf []byte) (int, error) { return unix.Read(int(r), buf) } -// ReadPassword reads a line of input from a terminal without local echo. This -// is commonly used for inputting passwords and other sensitive data. The slice -// returned does not include the \n. -func ReadPassword(fd int) ([]byte, error) { +func readPassword(fd int) ([]byte, error) { termios, err := unix.IoctlGetTermios(fd, ioctlReadTermios) if err != nil { return nil, err diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_linux.go b/vendor/golang.org/x/term/term_unix_aix.go similarity index 74% rename from vendor/golang.org/x/crypto/ssh/terminal/util_linux.go rename to vendor/golang.org/x/term/term_unix_aix.go index 5fadfe8a1d..2d5efd26ad 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/util_linux.go +++ b/vendor/golang.org/x/term/term_unix_aix.go @@ -1,8 +1,8 @@ -// Copyright 2013 The Go Authors. All rights reserved. +// Copyright 2019 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package terminal +package term import "golang.org/x/sys/unix" diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_bsd.go b/vendor/golang.org/x/term/term_unix_bsd.go similarity index 95% rename from vendor/golang.org/x/crypto/ssh/terminal/util_bsd.go rename to vendor/golang.org/x/term/term_unix_bsd.go index cb23a59049..3342be00b4 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/util_bsd.go +++ b/vendor/golang.org/x/term/term_unix_bsd.go @@ -4,7 +4,7 @@ // +build darwin dragonfly freebsd netbsd openbsd -package terminal +package term import "golang.org/x/sys/unix" diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_aix.go b/vendor/golang.org/x/term/term_unix_linux.go similarity index 71% rename from vendor/golang.org/x/crypto/ssh/terminal/util_aix.go rename to vendor/golang.org/x/term/term_unix_linux.go index dfcd627859..2d5efd26ad 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/util_aix.go +++ b/vendor/golang.org/x/term/term_unix_linux.go @@ -1,10 +1,8 @@ -// Copyright 2018 The Go Authors. All rights reserved. +// Copyright 2019 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build aix - -package terminal +package term import "golang.org/x/sys/unix" diff --git a/vendor/golang.org/x/term/term_unix_zos.go b/vendor/golang.org/x/term/term_unix_zos.go new file mode 100644 index 0000000000..b85ab89989 --- /dev/null +++ b/vendor/golang.org/x/term/term_unix_zos.go @@ -0,0 +1,10 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package term + +import "golang.org/x/sys/unix" + +const ioctlReadTermios = unix.TCGETS +const ioctlWriteTermios = unix.TCSETS diff --git a/vendor/golang.org/x/term/term_unsupported.go b/vendor/golang.org/x/term/term_unsupported.go new file mode 100644 index 0000000000..8b5d1bad00 --- /dev/null +++ b/vendor/golang.org/x/term/term_unsupported.go @@ -0,0 +1,38 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !aix,!darwin,!dragonfly,!freebsd,!linux,!netbsd,!openbsd,!zos,!windows,!solaris,!plan9 + +package term + +import ( + "fmt" + "runtime" +) + +type state struct{} + +func isTerminal(fd int) bool { + return false +} + +func makeRaw(fd int) (*State, error) { + return nil, fmt.Errorf("terminal: MakeRaw not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) +} + +func getState(fd int) (*State, error) { + return nil, fmt.Errorf("terminal: GetState not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) +} + +func restore(fd int, state *State) error { + return fmt.Errorf("terminal: Restore not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) +} + +func getSize(fd int) (width, height int, err error) { + return 0, 0, fmt.Errorf("terminal: GetSize not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) +} + +func readPassword(fd int) ([]byte, error) { + return nil, fmt.Errorf("terminal: ReadPassword not implemented on %s/%s", runtime.GOOS, runtime.GOARCH) +} diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go b/vendor/golang.org/x/term/term_windows.go similarity index 53% rename from vendor/golang.org/x/crypto/ssh/terminal/util_windows.go rename to vendor/golang.org/x/term/term_windows.go index f614e9cb60..465f560604 100644 --- a/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go +++ b/vendor/golang.org/x/term/term_windows.go @@ -1,20 +1,8 @@ -// Copyright 2011 The Go Authors. All rights reserved. +// Copyright 2019 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// +build windows - -// Package terminal provides support functions for dealing with terminals, as -// commonly found on UNIX systems. -// -// Putting a terminal into raw mode is the most common requirement: -// -// oldState, err := terminal.MakeRaw(0) -// if err != nil { -// panic(err) -// } -// defer terminal.Restore(0, oldState) -package terminal +package term import ( "os" @@ -22,21 +10,17 @@ import ( "golang.org/x/sys/windows" ) -type State struct { +type state struct { mode uint32 } -// IsTerminal returns whether the given file descriptor is a terminal. -func IsTerminal(fd int) bool { +func isTerminal(fd int) bool { var st uint32 err := windows.GetConsoleMode(windows.Handle(fd), &st) return err == nil } -// MakeRaw put the terminal connected to the given file descriptor into raw -// mode and returns the previous state of the terminal so that it can be -// restored. -func MakeRaw(fd int) (*State, error) { +func makeRaw(fd int) (*State, error) { var st uint32 if err := windows.GetConsoleMode(windows.Handle(fd), &st); err != nil { return nil, err @@ -45,29 +29,22 @@ func MakeRaw(fd int) (*State, error) { if err := windows.SetConsoleMode(windows.Handle(fd), raw); err != nil { return nil, err } - return &State{st}, nil + return &State{state{st}}, nil } -// GetState returns the current state of a terminal which may be useful to -// restore the terminal after a signal. -func GetState(fd int) (*State, error) { +func getState(fd int) (*State, error) { var st uint32 if err := windows.GetConsoleMode(windows.Handle(fd), &st); err != nil { return nil, err } - return &State{st}, nil + return &State{state{st}}, nil } -// Restore restores the terminal connected to the given file descriptor to a -// previous state. -func Restore(fd int, state *State) error { +func restore(fd int, state *State) error { return windows.SetConsoleMode(windows.Handle(fd), state.mode) } -// GetSize returns the visible dimensions of the given terminal. -// -// These dimensions don't include any scrollback buffer height. -func GetSize(fd int) (width, height int, err error) { +func getSize(fd int) (width, height int, err error) { var info windows.ConsoleScreenBufferInfo if err := windows.GetConsoleScreenBufferInfo(windows.Handle(fd), &info); err != nil { return 0, 0, err @@ -75,10 +52,7 @@ func GetSize(fd int) (width, height int, err error) { return int(info.Window.Right - info.Window.Left + 1), int(info.Window.Bottom - info.Window.Top + 1), nil } -// ReadPassword reads a line of input from a terminal without local echo. This -// is commonly used for inputting passwords and other sensitive data. The slice -// returned does not include the \n. -func ReadPassword(fd int) ([]byte, error) { +func readPassword(fd int) ([]byte, error) { var st uint32 if err := windows.GetConsoleMode(windows.Handle(fd), &st); err != nil { return nil, err diff --git a/vendor/golang.org/x/term/terminal.go b/vendor/golang.org/x/term/terminal.go new file mode 100644 index 0000000000..535ab8257c --- /dev/null +++ b/vendor/golang.org/x/term/terminal.go @@ -0,0 +1,987 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package term + +import ( + "bytes" + "io" + "runtime" + "strconv" + "sync" + "unicode/utf8" +) + +// EscapeCodes contains escape sequences that can be written to the terminal in +// order to achieve different styles of text. +type EscapeCodes struct { + // Foreground colors + Black, Red, Green, Yellow, Blue, Magenta, Cyan, White []byte + + // Reset all attributes + Reset []byte +} + +var vt100EscapeCodes = EscapeCodes{ + Black: []byte{keyEscape, '[', '3', '0', 'm'}, + Red: []byte{keyEscape, '[', '3', '1', 'm'}, + Green: []byte{keyEscape, '[', '3', '2', 'm'}, + Yellow: []byte{keyEscape, '[', '3', '3', 'm'}, + Blue: []byte{keyEscape, '[', '3', '4', 'm'}, + Magenta: []byte{keyEscape, '[', '3', '5', 'm'}, + Cyan: []byte{keyEscape, '[', '3', '6', 'm'}, + White: []byte{keyEscape, '[', '3', '7', 'm'}, + + Reset: []byte{keyEscape, '[', '0', 'm'}, +} + +// Terminal contains the state for running a VT100 terminal that is capable of +// reading lines of input. +type Terminal struct { + // AutoCompleteCallback, if non-null, is called for each keypress with + // the full input line and the current position of the cursor (in + // bytes, as an index into |line|). If it returns ok=false, the key + // press is processed normally. Otherwise it returns a replacement line + // and the new cursor position. + AutoCompleteCallback func(line string, pos int, key rune) (newLine string, newPos int, ok bool) + + // Escape contains a pointer to the escape codes for this terminal. + // It's always a valid pointer, although the escape codes themselves + // may be empty if the terminal doesn't support them. + Escape *EscapeCodes + + // lock protects the terminal and the state in this object from + // concurrent processing of a key press and a Write() call. + lock sync.Mutex + + c io.ReadWriter + prompt []rune + + // line is the current line being entered. + line []rune + // pos is the logical position of the cursor in line + pos int + // echo is true if local echo is enabled + echo bool + // pasteActive is true iff there is a bracketed paste operation in + // progress. + pasteActive bool + + // cursorX contains the current X value of the cursor where the left + // edge is 0. cursorY contains the row number where the first row of + // the current line is 0. + cursorX, cursorY int + // maxLine is the greatest value of cursorY so far. + maxLine int + + termWidth, termHeight int + + // outBuf contains the terminal data to be sent. + outBuf []byte + // remainder contains the remainder of any partial key sequences after + // a read. It aliases into inBuf. + remainder []byte + inBuf [256]byte + + // history contains previously entered commands so that they can be + // accessed with the up and down keys. + history stRingBuffer + // historyIndex stores the currently accessed history entry, where zero + // means the immediately previous entry. + historyIndex int + // When navigating up and down the history it's possible to return to + // the incomplete, initial line. That value is stored in + // historyPending. + historyPending string +} + +// NewTerminal runs a VT100 terminal on the given ReadWriter. If the ReadWriter is +// a local terminal, that terminal must first have been put into raw mode. +// prompt is a string that is written at the start of each input line (i.e. +// "> "). +func NewTerminal(c io.ReadWriter, prompt string) *Terminal { + return &Terminal{ + Escape: &vt100EscapeCodes, + c: c, + prompt: []rune(prompt), + termWidth: 80, + termHeight: 24, + echo: true, + historyIndex: -1, + } +} + +const ( + keyCtrlC = 3 + keyCtrlD = 4 + keyCtrlU = 21 + keyEnter = '\r' + keyEscape = 27 + keyBackspace = 127 + keyUnknown = 0xd800 /* UTF-16 surrogate area */ + iota + keyUp + keyDown + keyLeft + keyRight + keyAltLeft + keyAltRight + keyHome + keyEnd + keyDeleteWord + keyDeleteLine + keyClearScreen + keyPasteStart + keyPasteEnd +) + +var ( + crlf = []byte{'\r', '\n'} + pasteStart = []byte{keyEscape, '[', '2', '0', '0', '~'} + pasteEnd = []byte{keyEscape, '[', '2', '0', '1', '~'} +) + +// bytesToKey tries to parse a key sequence from b. If successful, it returns +// the key and the remainder of the input. Otherwise it returns utf8.RuneError. +func bytesToKey(b []byte, pasteActive bool) (rune, []byte) { + if len(b) == 0 { + return utf8.RuneError, nil + } + + if !pasteActive { + switch b[0] { + case 1: // ^A + return keyHome, b[1:] + case 2: // ^B + return keyLeft, b[1:] + case 5: // ^E + return keyEnd, b[1:] + case 6: // ^F + return keyRight, b[1:] + case 8: // ^H + return keyBackspace, b[1:] + case 11: // ^K + return keyDeleteLine, b[1:] + case 12: // ^L + return keyClearScreen, b[1:] + case 23: // ^W + return keyDeleteWord, b[1:] + case 14: // ^N + return keyDown, b[1:] + case 16: // ^P + return keyUp, b[1:] + } + } + + if b[0] != keyEscape { + if !utf8.FullRune(b) { + return utf8.RuneError, b + } + r, l := utf8.DecodeRune(b) + return r, b[l:] + } + + if !pasteActive && len(b) >= 3 && b[0] == keyEscape && b[1] == '[' { + switch b[2] { + case 'A': + return keyUp, b[3:] + case 'B': + return keyDown, b[3:] + case 'C': + return keyRight, b[3:] + case 'D': + return keyLeft, b[3:] + case 'H': + return keyHome, b[3:] + case 'F': + return keyEnd, b[3:] + } + } + + if !pasteActive && len(b) >= 6 && b[0] == keyEscape && b[1] == '[' && b[2] == '1' && b[3] == ';' && b[4] == '3' { + switch b[5] { + case 'C': + return keyAltRight, b[6:] + case 'D': + return keyAltLeft, b[6:] + } + } + + if !pasteActive && len(b) >= 6 && bytes.Equal(b[:6], pasteStart) { + return keyPasteStart, b[6:] + } + + if pasteActive && len(b) >= 6 && bytes.Equal(b[:6], pasteEnd) { + return keyPasteEnd, b[6:] + } + + // If we get here then we have a key that we don't recognise, or a + // partial sequence. It's not clear how one should find the end of a + // sequence without knowing them all, but it seems that [a-zA-Z~] only + // appears at the end of a sequence. + for i, c := range b[0:] { + if c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z' || c == '~' { + return keyUnknown, b[i+1:] + } + } + + return utf8.RuneError, b +} + +// queue appends data to the end of t.outBuf +func (t *Terminal) queue(data []rune) { + t.outBuf = append(t.outBuf, []byte(string(data))...) +} + +var eraseUnderCursor = []rune{' ', keyEscape, '[', 'D'} +var space = []rune{' '} + +func isPrintable(key rune) bool { + isInSurrogateArea := key >= 0xd800 && key <= 0xdbff + return key >= 32 && !isInSurrogateArea +} + +// moveCursorToPos appends data to t.outBuf which will move the cursor to the +// given, logical position in the text. +func (t *Terminal) moveCursorToPos(pos int) { + if !t.echo { + return + } + + x := visualLength(t.prompt) + pos + y := x / t.termWidth + x = x % t.termWidth + + up := 0 + if y < t.cursorY { + up = t.cursorY - y + } + + down := 0 + if y > t.cursorY { + down = y - t.cursorY + } + + left := 0 + if x < t.cursorX { + left = t.cursorX - x + } + + right := 0 + if x > t.cursorX { + right = x - t.cursorX + } + + t.cursorX = x + t.cursorY = y + t.move(up, down, left, right) +} + +func (t *Terminal) move(up, down, left, right int) { + m := []rune{} + + // 1 unit up can be expressed as ^[[A or ^[A + // 5 units up can be expressed as ^[[5A + + if up == 1 { + m = append(m, keyEscape, '[', 'A') + } else if up > 1 { + m = append(m, keyEscape, '[') + m = append(m, []rune(strconv.Itoa(up))...) + m = append(m, 'A') + } + + if down == 1 { + m = append(m, keyEscape, '[', 'B') + } else if down > 1 { + m = append(m, keyEscape, '[') + m = append(m, []rune(strconv.Itoa(down))...) + m = append(m, 'B') + } + + if right == 1 { + m = append(m, keyEscape, '[', 'C') + } else if right > 1 { + m = append(m, keyEscape, '[') + m = append(m, []rune(strconv.Itoa(right))...) + m = append(m, 'C') + } + + if left == 1 { + m = append(m, keyEscape, '[', 'D') + } else if left > 1 { + m = append(m, keyEscape, '[') + m = append(m, []rune(strconv.Itoa(left))...) + m = append(m, 'D') + } + + t.queue(m) +} + +func (t *Terminal) clearLineToRight() { + op := []rune{keyEscape, '[', 'K'} + t.queue(op) +} + +const maxLineLength = 4096 + +func (t *Terminal) setLine(newLine []rune, newPos int) { + if t.echo { + t.moveCursorToPos(0) + t.writeLine(newLine) + for i := len(newLine); i < len(t.line); i++ { + t.writeLine(space) + } + t.moveCursorToPos(newPos) + } + t.line = newLine + t.pos = newPos +} + +func (t *Terminal) advanceCursor(places int) { + t.cursorX += places + t.cursorY += t.cursorX / t.termWidth + if t.cursorY > t.maxLine { + t.maxLine = t.cursorY + } + t.cursorX = t.cursorX % t.termWidth + + if places > 0 && t.cursorX == 0 { + // Normally terminals will advance the current position + // when writing a character. But that doesn't happen + // for the last character in a line. However, when + // writing a character (except a new line) that causes + // a line wrap, the position will be advanced two + // places. + // + // So, if we are stopping at the end of a line, we + // need to write a newline so that our cursor can be + // advanced to the next line. + t.outBuf = append(t.outBuf, '\r', '\n') + } +} + +func (t *Terminal) eraseNPreviousChars(n int) { + if n == 0 { + return + } + + if t.pos < n { + n = t.pos + } + t.pos -= n + t.moveCursorToPos(t.pos) + + copy(t.line[t.pos:], t.line[n+t.pos:]) + t.line = t.line[:len(t.line)-n] + if t.echo { + t.writeLine(t.line[t.pos:]) + for i := 0; i < n; i++ { + t.queue(space) + } + t.advanceCursor(n) + t.moveCursorToPos(t.pos) + } +} + +// countToLeftWord returns then number of characters from the cursor to the +// start of the previous word. +func (t *Terminal) countToLeftWord() int { + if t.pos == 0 { + return 0 + } + + pos := t.pos - 1 + for pos > 0 { + if t.line[pos] != ' ' { + break + } + pos-- + } + for pos > 0 { + if t.line[pos] == ' ' { + pos++ + break + } + pos-- + } + + return t.pos - pos +} + +// countToRightWord returns then number of characters from the cursor to the +// start of the next word. +func (t *Terminal) countToRightWord() int { + pos := t.pos + for pos < len(t.line) { + if t.line[pos] == ' ' { + break + } + pos++ + } + for pos < len(t.line) { + if t.line[pos] != ' ' { + break + } + pos++ + } + return pos - t.pos +} + +// visualLength returns the number of visible glyphs in s. +func visualLength(runes []rune) int { + inEscapeSeq := false + length := 0 + + for _, r := range runes { + switch { + case inEscapeSeq: + if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') { + inEscapeSeq = false + } + case r == '\x1b': + inEscapeSeq = true + default: + length++ + } + } + + return length +} + +// handleKey processes the given key and, optionally, returns a line of text +// that the user has entered. +func (t *Terminal) handleKey(key rune) (line string, ok bool) { + if t.pasteActive && key != keyEnter { + t.addKeyToLine(key) + return + } + + switch key { + case keyBackspace: + if t.pos == 0 { + return + } + t.eraseNPreviousChars(1) + case keyAltLeft: + // move left by a word. + t.pos -= t.countToLeftWord() + t.moveCursorToPos(t.pos) + case keyAltRight: + // move right by a word. + t.pos += t.countToRightWord() + t.moveCursorToPos(t.pos) + case keyLeft: + if t.pos == 0 { + return + } + t.pos-- + t.moveCursorToPos(t.pos) + case keyRight: + if t.pos == len(t.line) { + return + } + t.pos++ + t.moveCursorToPos(t.pos) + case keyHome: + if t.pos == 0 { + return + } + t.pos = 0 + t.moveCursorToPos(t.pos) + case keyEnd: + if t.pos == len(t.line) { + return + } + t.pos = len(t.line) + t.moveCursorToPos(t.pos) + case keyUp: + entry, ok := t.history.NthPreviousEntry(t.historyIndex + 1) + if !ok { + return "", false + } + if t.historyIndex == -1 { + t.historyPending = string(t.line) + } + t.historyIndex++ + runes := []rune(entry) + t.setLine(runes, len(runes)) + case keyDown: + switch t.historyIndex { + case -1: + return + case 0: + runes := []rune(t.historyPending) + t.setLine(runes, len(runes)) + t.historyIndex-- + default: + entry, ok := t.history.NthPreviousEntry(t.historyIndex - 1) + if ok { + t.historyIndex-- + runes := []rune(entry) + t.setLine(runes, len(runes)) + } + } + case keyEnter: + t.moveCursorToPos(len(t.line)) + t.queue([]rune("\r\n")) + line = string(t.line) + ok = true + t.line = t.line[:0] + t.pos = 0 + t.cursorX = 0 + t.cursorY = 0 + t.maxLine = 0 + case keyDeleteWord: + // Delete zero or more spaces and then one or more characters. + t.eraseNPreviousChars(t.countToLeftWord()) + case keyDeleteLine: + // Delete everything from the current cursor position to the + // end of line. + for i := t.pos; i < len(t.line); i++ { + t.queue(space) + t.advanceCursor(1) + } + t.line = t.line[:t.pos] + t.moveCursorToPos(t.pos) + case keyCtrlD: + // Erase the character under the current position. + // The EOF case when the line is empty is handled in + // readLine(). + if t.pos < len(t.line) { + t.pos++ + t.eraseNPreviousChars(1) + } + case keyCtrlU: + t.eraseNPreviousChars(t.pos) + case keyClearScreen: + // Erases the screen and moves the cursor to the home position. + t.queue([]rune("\x1b[2J\x1b[H")) + t.queue(t.prompt) + t.cursorX, t.cursorY = 0, 0 + t.advanceCursor(visualLength(t.prompt)) + t.setLine(t.line, t.pos) + default: + if t.AutoCompleteCallback != nil { + prefix := string(t.line[:t.pos]) + suffix := string(t.line[t.pos:]) + + t.lock.Unlock() + newLine, newPos, completeOk := t.AutoCompleteCallback(prefix+suffix, len(prefix), key) + t.lock.Lock() + + if completeOk { + t.setLine([]rune(newLine), utf8.RuneCount([]byte(newLine)[:newPos])) + return + } + } + if !isPrintable(key) { + return + } + if len(t.line) == maxLineLength { + return + } + t.addKeyToLine(key) + } + return +} + +// addKeyToLine inserts the given key at the current position in the current +// line. +func (t *Terminal) addKeyToLine(key rune) { + if len(t.line) == cap(t.line) { + newLine := make([]rune, len(t.line), 2*(1+len(t.line))) + copy(newLine, t.line) + t.line = newLine + } + t.line = t.line[:len(t.line)+1] + copy(t.line[t.pos+1:], t.line[t.pos:]) + t.line[t.pos] = key + if t.echo { + t.writeLine(t.line[t.pos:]) + } + t.pos++ + t.moveCursorToPos(t.pos) +} + +func (t *Terminal) writeLine(line []rune) { + for len(line) != 0 { + remainingOnLine := t.termWidth - t.cursorX + todo := len(line) + if todo > remainingOnLine { + todo = remainingOnLine + } + t.queue(line[:todo]) + t.advanceCursor(visualLength(line[:todo])) + line = line[todo:] + } +} + +// writeWithCRLF writes buf to w but replaces all occurrences of \n with \r\n. +func writeWithCRLF(w io.Writer, buf []byte) (n int, err error) { + for len(buf) > 0 { + i := bytes.IndexByte(buf, '\n') + todo := len(buf) + if i >= 0 { + todo = i + } + + var nn int + nn, err = w.Write(buf[:todo]) + n += nn + if err != nil { + return n, err + } + buf = buf[todo:] + + if i >= 0 { + if _, err = w.Write(crlf); err != nil { + return n, err + } + n++ + buf = buf[1:] + } + } + + return n, nil +} + +func (t *Terminal) Write(buf []byte) (n int, err error) { + t.lock.Lock() + defer t.lock.Unlock() + + if t.cursorX == 0 && t.cursorY == 0 { + // This is the easy case: there's nothing on the screen that we + // have to move out of the way. + return writeWithCRLF(t.c, buf) + } + + // We have a prompt and possibly user input on the screen. We + // have to clear it first. + t.move(0 /* up */, 0 /* down */, t.cursorX /* left */, 0 /* right */) + t.cursorX = 0 + t.clearLineToRight() + + for t.cursorY > 0 { + t.move(1 /* up */, 0, 0, 0) + t.cursorY-- + t.clearLineToRight() + } + + if _, err = t.c.Write(t.outBuf); err != nil { + return + } + t.outBuf = t.outBuf[:0] + + if n, err = writeWithCRLF(t.c, buf); err != nil { + return + } + + t.writeLine(t.prompt) + if t.echo { + t.writeLine(t.line) + } + + t.moveCursorToPos(t.pos) + + if _, err = t.c.Write(t.outBuf); err != nil { + return + } + t.outBuf = t.outBuf[:0] + return +} + +// ReadPassword temporarily changes the prompt and reads a password, without +// echo, from the terminal. +func (t *Terminal) ReadPassword(prompt string) (line string, err error) { + t.lock.Lock() + defer t.lock.Unlock() + + oldPrompt := t.prompt + t.prompt = []rune(prompt) + t.echo = false + + line, err = t.readLine() + + t.prompt = oldPrompt + t.echo = true + + return +} + +// ReadLine returns a line of input from the terminal. +func (t *Terminal) ReadLine() (line string, err error) { + t.lock.Lock() + defer t.lock.Unlock() + + return t.readLine() +} + +func (t *Terminal) readLine() (line string, err error) { + // t.lock must be held at this point + + if t.cursorX == 0 && t.cursorY == 0 { + t.writeLine(t.prompt) + t.c.Write(t.outBuf) + t.outBuf = t.outBuf[:0] + } + + lineIsPasted := t.pasteActive + + for { + rest := t.remainder + lineOk := false + for !lineOk { + var key rune + key, rest = bytesToKey(rest, t.pasteActive) + if key == utf8.RuneError { + break + } + if !t.pasteActive { + if key == keyCtrlD { + if len(t.line) == 0 { + return "", io.EOF + } + } + if key == keyCtrlC { + return "", io.EOF + } + if key == keyPasteStart { + t.pasteActive = true + if len(t.line) == 0 { + lineIsPasted = true + } + continue + } + } else if key == keyPasteEnd { + t.pasteActive = false + continue + } + if !t.pasteActive { + lineIsPasted = false + } + line, lineOk = t.handleKey(key) + } + if len(rest) > 0 { + n := copy(t.inBuf[:], rest) + t.remainder = t.inBuf[:n] + } else { + t.remainder = nil + } + t.c.Write(t.outBuf) + t.outBuf = t.outBuf[:0] + if lineOk { + if t.echo { + t.historyIndex = -1 + t.history.Add(line) + } + if lineIsPasted { + err = ErrPasteIndicator + } + return + } + + // t.remainder is a slice at the beginning of t.inBuf + // containing a partial key sequence + readBuf := t.inBuf[len(t.remainder):] + var n int + + t.lock.Unlock() + n, err = t.c.Read(readBuf) + t.lock.Lock() + + if err != nil { + return + } + + t.remainder = t.inBuf[:n+len(t.remainder)] + } +} + +// SetPrompt sets the prompt to be used when reading subsequent lines. +func (t *Terminal) SetPrompt(prompt string) { + t.lock.Lock() + defer t.lock.Unlock() + + t.prompt = []rune(prompt) +} + +func (t *Terminal) clearAndRepaintLinePlusNPrevious(numPrevLines int) { + // Move cursor to column zero at the start of the line. + t.move(t.cursorY, 0, t.cursorX, 0) + t.cursorX, t.cursorY = 0, 0 + t.clearLineToRight() + for t.cursorY < numPrevLines { + // Move down a line + t.move(0, 1, 0, 0) + t.cursorY++ + t.clearLineToRight() + } + // Move back to beginning. + t.move(t.cursorY, 0, 0, 0) + t.cursorX, t.cursorY = 0, 0 + + t.queue(t.prompt) + t.advanceCursor(visualLength(t.prompt)) + t.writeLine(t.line) + t.moveCursorToPos(t.pos) +} + +func (t *Terminal) SetSize(width, height int) error { + t.lock.Lock() + defer t.lock.Unlock() + + if width == 0 { + width = 1 + } + + oldWidth := t.termWidth + t.termWidth, t.termHeight = width, height + + switch { + case width == oldWidth: + // If the width didn't change then nothing else needs to be + // done. + return nil + case len(t.line) == 0 && t.cursorX == 0 && t.cursorY == 0: + // If there is nothing on current line and no prompt printed, + // just do nothing + return nil + case width < oldWidth: + // Some terminals (e.g. xterm) will truncate lines that were + // too long when shinking. Others, (e.g. gnome-terminal) will + // attempt to wrap them. For the former, repainting t.maxLine + // works great, but that behaviour goes badly wrong in the case + // of the latter because they have doubled every full line. + + // We assume that we are working on a terminal that wraps lines + // and adjust the cursor position based on every previous line + // wrapping and turning into two. This causes the prompt on + // xterms to move upwards, which isn't great, but it avoids a + // huge mess with gnome-terminal. + if t.cursorX >= t.termWidth { + t.cursorX = t.termWidth - 1 + } + t.cursorY *= 2 + t.clearAndRepaintLinePlusNPrevious(t.maxLine * 2) + case width > oldWidth: + // If the terminal expands then our position calculations will + // be wrong in the future because we think the cursor is + // |t.pos| chars into the string, but there will be a gap at + // the end of any wrapped line. + // + // But the position will actually be correct until we move, so + // we can move back to the beginning and repaint everything. + t.clearAndRepaintLinePlusNPrevious(t.maxLine) + } + + _, err := t.c.Write(t.outBuf) + t.outBuf = t.outBuf[:0] + return err +} + +type pasteIndicatorError struct{} + +func (pasteIndicatorError) Error() string { + return "terminal: ErrPasteIndicator not correctly handled" +} + +// ErrPasteIndicator may be returned from ReadLine as the error, in addition +// to valid line data. It indicates that bracketed paste mode is enabled and +// that the returned line consists only of pasted data. Programs may wish to +// interpret pasted data more literally than typed data. +var ErrPasteIndicator = pasteIndicatorError{} + +// SetBracketedPasteMode requests that the terminal bracket paste operations +// with markers. Not all terminals support this but, if it is supported, then +// enabling this mode will stop any autocomplete callback from running due to +// pastes. Additionally, any lines that are completely pasted will be returned +// from ReadLine with the error set to ErrPasteIndicator. +func (t *Terminal) SetBracketedPasteMode(on bool) { + if on { + io.WriteString(t.c, "\x1b[?2004h") + } else { + io.WriteString(t.c, "\x1b[?2004l") + } +} + +// stRingBuffer is a ring buffer of strings. +type stRingBuffer struct { + // entries contains max elements. + entries []string + max int + // head contains the index of the element most recently added to the ring. + head int + // size contains the number of elements in the ring. + size int +} + +func (s *stRingBuffer) Add(a string) { + if s.entries == nil { + const defaultNumEntries = 100 + s.entries = make([]string, defaultNumEntries) + s.max = defaultNumEntries + } + + s.head = (s.head + 1) % s.max + s.entries[s.head] = a + if s.size < s.max { + s.size++ + } +} + +// NthPreviousEntry returns the value passed to the nth previous call to Add. +// If n is zero then the immediately prior value is returned, if one, then the +// next most recent, and so on. If such an element doesn't exist then ok is +// false. +func (s *stRingBuffer) NthPreviousEntry(n int) (value string, ok bool) { + if n >= s.size { + return "", false + } + index := s.head - n + if index < 0 { + index += s.max + } + return s.entries[index], true +} + +// readPasswordLine reads from reader until it finds \n or io.EOF. +// The slice returned does not include the \n. +// readPasswordLine also ignores any \r it finds. +// Windows uses \r as end of line. So, on Windows, readPasswordLine +// reads until it finds \r and ignores any \n it finds during processing. +func readPasswordLine(reader io.Reader) ([]byte, error) { + var buf [1]byte + var ret []byte + + for { + n, err := reader.Read(buf[:]) + if n > 0 { + switch buf[0] { + case '\b': + if len(ret) > 0 { + ret = ret[:len(ret)-1] + } + case '\n': + if runtime.GOOS != "windows" { + return ret, nil + } + // otherwise ignore \n + case '\r': + if runtime.GOOS == "windows" { + return ret, nil + } + // otherwise ignore \r + default: + ret = append(ret, buf[0]) + } + continue + } + if err != nil { + if err == io.EOF && len(ret) > 0 { + return ret, nil + } + return ret, err + } + } +} diff --git a/vendor/knative.dev/pkg/client/injection/kube/informers/core/v1/secret/fake/fake.go b/vendor/knative.dev/pkg/client/injection/kube/informers/core/v1/secret/fake/fake.go new file mode 100644 index 0000000000..48b2532d76 --- /dev/null +++ b/vendor/knative.dev/pkg/client/injection/kube/informers/core/v1/secret/fake/fake.go @@ -0,0 +1,40 @@ +/* +Copyright 2020 The Knative Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package fake + +import ( + context "context" + + secret "knative.dev/pkg/client/injection/kube/informers/core/v1/secret" + fake "knative.dev/pkg/client/injection/kube/informers/factory/fake" + controller "knative.dev/pkg/controller" + injection "knative.dev/pkg/injection" +) + +var Get = secret.Get + +func init() { + injection.Fake.RegisterInformer(withInformer) +} + +func withInformer(ctx context.Context) (context.Context, controller.Informer) { + f := fake.Get(ctx) + inf := f.Core().V1().Secrets() + return context.WithValue(ctx, secret.Key{}, inf), inf.Informer() +} diff --git a/vendor/knative.dev/pkg/client/injection/kube/informers/core/v1/secret/secret.go b/vendor/knative.dev/pkg/client/injection/kube/informers/core/v1/secret/secret.go new file mode 100644 index 0000000000..5442d08b77 --- /dev/null +++ b/vendor/knative.dev/pkg/client/injection/kube/informers/core/v1/secret/secret.go @@ -0,0 +1,52 @@ +/* +Copyright 2020 The Knative Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package secret + +import ( + context "context" + + v1 "k8s.io/client-go/informers/core/v1" + factory "knative.dev/pkg/client/injection/kube/informers/factory" + controller "knative.dev/pkg/controller" + injection "knative.dev/pkg/injection" + logging "knative.dev/pkg/logging" +) + +func init() { + injection.Default.RegisterInformer(withInformer) +} + +// Key is used for associating the Informer inside the context.Context. +type Key struct{} + +func withInformer(ctx context.Context) (context.Context, controller.Informer) { + f := factory.Get(ctx) + inf := f.Core().V1().Secrets() + return context.WithValue(ctx, Key{}, inf), inf.Informer() +} + +// Get extracts the typed informer from the context. +func Get(ctx context.Context) v1.SecretInformer { + untyped := ctx.Value(Key{}) + if untyped == nil { + logging.FromContext(ctx).Panic( + "Unable to fetch k8s.io/client-go/informers/core/v1.SecretInformer from context.") + } + return untyped.(v1.SecretInformer) +} diff --git a/vendor/modules.txt b/vendor/modules.txt index d807dd0da6..259fafc501 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -311,6 +311,11 @@ github.com/stretchr/testify/assert github.com/stretchr/testify/require # github.com/tsenart/vegeta/v12 v12.8.4 github.com/tsenart/vegeta/v12/lib +# github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c +## explicit +github.com/xdg/scram +# github.com/xdg/stringprep v1.0.0 +github.com/xdg/stringprep # go.opencensus.io v0.22.5 go.opencensus.io go.opencensus.io/internal @@ -355,7 +360,8 @@ go.uber.org/zap/internal/exit go.uber.org/zap/internal/ztest go.uber.org/zap/zapcore go.uber.org/zap/zaptest -# golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0 +# golang.org/x/crypto v0.0.0-20201217014255-9d1352758620 +## explicit golang.org/x/crypto/cast5 golang.org/x/crypto/md4 golang.org/x/crypto/openpgp @@ -395,10 +401,13 @@ golang.org/x/sync/errgroup golang.org/x/sync/semaphore # golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3 golang.org/x/sys/internal/unsafeheader +golang.org/x/sys/plan9 golang.org/x/sys/unix golang.org/x/sys/windows golang.org/x/sys/windows/registry golang.org/x/sys/windows/svc/eventlog +# golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 +golang.org/x/term # golang.org/x/text v0.3.4 golang.org/x/text/secure/bidirule golang.org/x/text/transform @@ -1203,6 +1212,8 @@ knative.dev/pkg/client/injection/kube/informers/core/v1/configmap knative.dev/pkg/client/injection/kube/informers/core/v1/configmap/fake knative.dev/pkg/client/injection/kube/informers/core/v1/pod knative.dev/pkg/client/injection/kube/informers/core/v1/pod/fake +knative.dev/pkg/client/injection/kube/informers/core/v1/secret +knative.dev/pkg/client/injection/kube/informers/core/v1/secret/fake knative.dev/pkg/client/injection/kube/informers/factory knative.dev/pkg/client/injection/kube/informers/factory/fake knative.dev/pkg/codegen/cmd/injection-gen