diff --git a/codegen/schema.go b/codegen/schema.go index 7ae052d2..3741e296 100644 --- a/codegen/schema.go +++ b/codegen/schema.go @@ -12,19 +12,19 @@ import ( var defaultScalarTypes = schema.SchemaResponseScalarTypes{ "String": schema.ScalarType{ AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, - ComparisonOperators: schema.ScalarTypeComparisonOperators{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, }, "Int": schema.ScalarType{ AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, - ComparisonOperators: schema.ScalarTypeComparisonOperators{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, }, "Float": schema.ScalarType{ AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, - ComparisonOperators: schema.ScalarTypeComparisonOperators{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, }, "Boolean": schema.ScalarType{ AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, - ComparisonOperators: schema.ScalarTypeComparisonOperators{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, }, } diff --git a/connector/server.go b/connector/server.go index e7fda2be..4361a01d 100644 --- a/connector/server.go +++ b/connector/server.go @@ -9,6 +9,7 @@ import ( "net/http" "os" "os/signal" + "strings" "time" "github.com/hasura/ndc-sdk-go/schema" @@ -232,10 +233,10 @@ func (s *Server[RawConfiguration, Configuration, State]) Query(w http.ResponseWr s.telemetry.queryLatencyHistogram.Record(r.Context(), time.Since(startTime).Seconds(), metric.WithAttributes(collectionAttr)) } -func (s *Server[RawConfiguration, Configuration, State]) Explain(w http.ResponseWriter, r *http.Request) { +func (s *Server[RawConfiguration, Configuration, State]) QueryExplain(w http.ResponseWriter, r *http.Request) { startTime := time.Now() logger := GetLogger(r.Context()) - ctx, span := s.telemetry.Tracer.Start(r.Context(), "Explain", trace.WithSpanKind(trace.SpanKindServer)) + ctx, span := s.telemetry.Tracer.Start(r.Context(), "Query Explain", trace.WithSpanKind(trace.SpanKindServer)) defer span.End() attributes := []attribute.KeyValue{} @@ -266,7 +267,69 @@ func (s *Server[RawConfiguration, Configuration, State]) Explain(w http.Response execCtx, execSpan := s.telemetry.Tracer.Start(ctx, "Execute Explain") defer execSpan.End() - response, err := s.connector.Explain(execCtx, s.configuration, s.state, &body) + response, err := s.connector.QueryExplain(execCtx, s.configuration, s.state, &body) + if err != nil { + status := writeError(w, logger, err) + statusAttributes := []attribute.KeyValue{ + attribute.String("status", "failed"), + attribute.String("reason", fmt.Sprintf("%d", status)), + } + span.SetAttributes(attributes...) + s.telemetry.explainCounter.Add(r.Context(), 1, metric.WithAttributes(append(attributes, statusAttributes...)...)) + return + } + execSpan.End() + + statusAttribute := attribute.String("status", "success") + span.SetAttributes(statusAttribute) + _, responseSpan := s.telemetry.Tracer.Start(ctx, "Response") + writeJson(w, logger, http.StatusOK, response) + responseSpan.End() + s.telemetry.explainCounter.Add(r.Context(), 1, metric.WithAttributes(append(attributes, statusAttribute)...)) + + // record latency for success requests only + s.telemetry.explainLatencyHistogram.Record(r.Context(), time.Since(startTime).Seconds(), metric.WithAttributes(collectionAttr)) +} + +func (s *Server[RawConfiguration, Configuration, State]) MutationExplain(w http.ResponseWriter, r *http.Request) { + startTime := time.Now() + logger := GetLogger(r.Context()) + ctx, span := s.telemetry.Tracer.Start(r.Context(), "Mutation Explain", trace.WithSpanKind(trace.SpanKindServer)) + defer span.End() + + attributes := []attribute.KeyValue{} + _, decodeSpan := s.telemetry.Tracer.Start(ctx, "Decode JSON Body") + defer decodeSpan.End() + var body schema.MutationRequest + if err := json.NewDecoder(r.Body).Decode(&body); err != nil { + writeJson(w, logger, http.StatusBadRequest, schema.ErrorResponse{ + Message: "failed to decode json request body", + Details: map[string]any{ + "cause": err.Error(), + }, + }) + + attributes := []attribute.KeyValue{ + attribute.String("status", "failed"), + attribute.String("reason", "json_decode"), + } + span.SetAttributes(attributes...) + s.telemetry.explainCounter.Add(r.Context(), 1, metric.WithAttributes(attributes...)) + return + } + decodeSpan.End() + + var operationNames []string + for _, op := range body.Operations { + operationNames = append(operationNames, op.Name) + } + collectionAttr := attribute.String("operations", strings.Join(operationNames, ",")) + attributes = append(attributes, collectionAttr) + span.SetAttributes(attributes...) + execCtx, execSpan := s.telemetry.Tracer.Start(ctx, "Execute Explain") + defer execSpan.End() + + response, err := s.connector.MutationExplain(execCtx, s.configuration, s.state, &body) if err != nil { status := writeError(w, logger, err) statusAttributes := []attribute.KeyValue{ @@ -349,9 +412,10 @@ func (s *Server[RawConfiguration, Configuration, State]) buildHandler() *http.Se router.Use("/capabilities", http.MethodGet, s.withAuth(s.GetCapabilities)) router.Use("/schema", http.MethodGet, s.withAuth(s.GetSchema)) router.Use("/query", http.MethodPost, s.withAuth(s.Query)) - router.Use("/explain", http.MethodPost, s.withAuth(s.Explain)) + router.Use("/query/explain", http.MethodPost, s.withAuth(s.QueryExplain)) + router.Use("/mutation/explain", http.MethodPost, s.withAuth(s.MutationExplain)) router.Use("/mutation", http.MethodPost, s.withAuth(s.Mutation)) - router.Use("/health", http.MethodGet, s.withAuth(s.Health)) + router.Use("/health", http.MethodGet, s.Health) router.Use("/metrics", http.MethodGet, s.withAuth(promhttp.Handler().ServeHTTP)) return router.Build() diff --git a/connector/server_test.go b/connector/server_test.go index 49ea9ae5..934a6438 100644 --- a/connector/server_test.go +++ b/connector/server_test.go @@ -31,7 +31,7 @@ type mockState struct{} type mockConnector struct{} var mockCapabilities = schema.CapabilitiesResponse{ - Versions: "^0.1.0", + Version: "^0.1.0", Capabilities: schema.Capabilities{ Query: schema.QueryCapabilities{ Aggregates: schema.LeafCapability{}, @@ -48,10 +48,8 @@ var mockSchema = schema.SchemaResponse{ ScalarTypes: schema.SchemaResponseScalarTypes{ "String": schema.ScalarType{ AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, - ComparisonOperators: schema.ScalarTypeComparisonOperators{ - "like": schema.ComparisonOperatorDefinition{ - ArgumentType: schema.NewNamedType("String").Encode(), - }, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{ + "like": schema.NewComparisonOperatorCustom(schema.NewNamedType("String")).Encode(), }, }, "Int": schema.ScalarType{ @@ -63,7 +61,7 @@ var mockSchema = schema.SchemaResponse{ ResultType: schema.NewNullableNamedType("Int").Encode(), }, }, - ComparisonOperators: schema.ScalarTypeComparisonOperators{}, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{}, }, }, ObjectTypes: schema.SchemaResponseObjectTypes{ @@ -157,7 +155,13 @@ func (mc *mockConnector) GetCapabilities(configuration *mockConfiguration) *sche func (mc *mockConnector) GetSchema(configuration *mockConfiguration) (*schema.SchemaResponse, error) { return &mockSchema, nil } -func (mc *mockConnector) Explain(ctx context.Context, configuration *mockConfiguration, state *mockState, request *schema.QueryRequest) (*schema.ExplainResponse, error) { +func (mc *mockConnector) QueryExplain(ctx context.Context, configuration *mockConfiguration, state *mockState, request *schema.QueryRequest) (*schema.ExplainResponse, error) { + return &schema.ExplainResponse{ + Details: schema.ExplainResponseDetails{}, + }, nil +} + +func (mc *mockConnector) MutationExplain(ctx context.Context, configuration *mockConfiguration, state *mockState, request *schema.MutationRequest) (*schema.ExplainResponse, error) { return &schema.ExplainResponse{ Details: schema.ExplainResponseDetails{}, }, nil diff --git a/connector/types.go b/connector/types.go index aa683a8b..13f5ced7 100644 --- a/connector/types.go +++ b/connector/types.go @@ -67,11 +67,17 @@ type Connector[RawConfiguration any, Configuration any, State any] interface { // [schema endpoint]: https://hasura.github.io/ndc-spec/specification/schema/index.html GetSchema(configuration *Configuration) (*schema.SchemaResponse, error) - // Explain a query by creating an execution plan. + // QueryExplain explains a query by creating an execution plan. // This function implements the [explain endpoint] from the NDC specification. // // [explain endpoint]: https://hasura.github.io/ndc-spec/specification/explain.html - Explain(ctx context.Context, configuration *Configuration, state *State, request *schema.QueryRequest) (*schema.ExplainResponse, error) + QueryExplain(ctx context.Context, configuration *Configuration, state *State, request *schema.QueryRequest) (*schema.ExplainResponse, error) + + // QueryExplain explains a mutation by creating an execution plan. + // This function implements the [explain endpoint] from the NDC specification. + // + // [explain endpoint]: https://hasura.github.io/ndc-spec/specification/explain.html + MutationExplain(ctx context.Context, configuration *Configuration, state *State, request *schema.MutationRequest) (*schema.ExplainResponse, error) // Execute a mutation. // diff --git a/example/codegen/connector.go b/example/codegen/connector.go index 9def775c..0a1f5763 100644 --- a/example/codegen/connector.go +++ b/example/codegen/connector.go @@ -40,7 +40,7 @@ func (mc *Connector) HealthCheck(ctx context.Context, configuration *Configurati func (mc *Connector) GetCapabilities(configuration *Configuration) *schema.CapabilitiesResponse { return &schema.CapabilitiesResponse{ - Versions: "^0.1.0", + Version: "^0.1.0", Capabilities: schema.Capabilities{ Query: schema.QueryCapabilities{ Variables: schema.LeafCapability{}, @@ -49,6 +49,10 @@ func (mc *Connector) GetCapabilities(configuration *Configuration) *schema.Capab } } -func (mc *Connector) Explain(ctx context.Context, configuration *Configuration, state *State, request *schema.QueryRequest) (*schema.ExplainResponse, error) { - return nil, schema.NotSupportedError("explain has not been supported yet", nil) +func (mc *Connector) QueryExplain(ctx context.Context, configuration *Configuration, state *State, request *schema.QueryRequest) (*schema.ExplainResponse, error) { + return nil, schema.NotSupportedError("query explain has not been supported yet", nil) +} + +func (mc *Connector) MutationExplain(ctx context.Context, configuration *Configuration, state *State, request *schema.MutationRequest) (*schema.ExplainResponse, error) { + return nil, schema.NotSupportedError("mutation explain has not been supported yet", nil) } diff --git a/example/reference/connector.go b/example/reference/connector.go index e3e801a1..deec108a 100644 --- a/example/reference/connector.go +++ b/example/reference/connector.go @@ -85,7 +85,7 @@ func (mc *Connector) HealthCheck(ctx context.Context, configuration *Configurati func (mc *Connector) GetCapabilities(configuration *Configuration) *schema.CapabilitiesResponse { return &schema.CapabilitiesResponse{ - Versions: "^0.1.0", + Version: "^0.1.0", Capabilities: schema.Capabilities{ Query: schema.QueryCapabilities{ Aggregates: schema.LeafCapability{}, @@ -104,10 +104,8 @@ func (mc *Connector) GetSchema(configuration *Configuration) (*schema.SchemaResp ScalarTypes: schema.SchemaResponseScalarTypes{ "String": schema.ScalarType{ AggregateFunctions: schema.ScalarTypeAggregateFunctions{}, - ComparisonOperators: schema.ScalarTypeComparisonOperators{ - "like": schema.ComparisonOperatorDefinition{ - ArgumentType: schema.NewNamedType("String").Encode(), - }, + ComparisonOperators: map[string]schema.ComparisonOperatorDefinition{ + "like": schema.NewComparisonOperatorCustom(schema.NewNamedType("String")).Encode(), }, }, "Int": schema.ScalarType{ @@ -224,7 +222,13 @@ func (mc *Connector) GetSchema(configuration *Configuration) (*schema.SchemaResp }, nil } -func (mc *Connector) Explain(ctx context.Context, configuration *Configuration, state *State, request *schema.QueryRequest) (*schema.ExplainResponse, error) { +func (mc *Connector) QueryExplain(ctx context.Context, configuration *Configuration, state *State, request *schema.QueryRequest) (*schema.ExplainResponse, error) { + return &schema.ExplainResponse{ + Details: schema.ExplainResponseDetails{}, + }, nil +} + +func (mc *Connector) MutationExplain(ctx context.Context, configuration *Configuration, state *State, request *schema.MutationRequest) (*schema.ExplainResponse, error) { return &schema.ExplainResponse{ Details: schema.ExplainResponseDetails{}, }, nil diff --git a/schema/extend.go b/schema/extend.go index 8cc6336c..c6def4ed 100644 --- a/schema/extend.go +++ b/schema/extend.go @@ -17,15 +17,17 @@ var ( type TypeEnum string const ( - TypeNamed TypeEnum = "named" - TypeNullable TypeEnum = "nullable" - TypeArray TypeEnum = "array" + TypeNamed TypeEnum = "named" + TypeNullable TypeEnum = "nullable" + TypeArray TypeEnum = "array" + TypePredicate TypeEnum = "predicate" ) var enumValues_Type = []TypeEnum{ TypeNamed, TypeNullable, TypeArray, + TypePredicate, } // ParseTypeEnum parses a type enum from string @@ -92,6 +94,9 @@ func (j *Type) UnmarshalJSON(b []byte) error { if err := json.Unmarshal(rawName, &name); err != nil { return fmt.Errorf("field name in Type: %s", err) } + if name == "" { + return fmt.Errorf("field name in Type: required") + } result["name"] = name case TypeNullable: rawUnderlyingType, ok := raw["underlying_type"] @@ -113,6 +118,19 @@ func (j *Type) UnmarshalJSON(b []byte) error { return fmt.Errorf("field element_type in Type: %s", err) } result["element_type"] = elementType + case TypePredicate: + rawName, ok := raw["object_type_name"] + if !ok { + return errors.New("field object_type_name in Type is required for predicate type") + } + var objectTypeName string + if err := json.Unmarshal(rawName, &objectTypeName); err != nil { + return fmt.Errorf("field object_type_name in Type: %s", err) + } + if objectTypeName == "" { + return fmt.Errorf("field object_type_name in Type: required") + } + result["object_type_name"] = objectTypeName } *j = result return nil @@ -201,6 +219,22 @@ func (ty Type) AsArray() (*ArrayType, error) { }, nil } +// AsPredicate tries to convert the current type to PredicateType +func (ty Type) AsPredicate() (*PredicateType, error) { + t, err := ty.Type() + if err != nil { + return nil, err + } + if t != TypePredicate { + return nil, fmt.Errorf("invalid type; expected %s, got %s", TypePredicate, t) + } + + return &PredicateType{ + Type: t, + ObjectTypeName: getStringValueByKey(ty, "object_type_name"), + }, nil +} + // Interface returns the TypeEncoder interface func (ty Type) Interface() (TypeEncoder, error) { t, err := ty.Type() @@ -215,6 +249,8 @@ func (ty Type) Interface() (TypeEncoder, error) { return ty.AsNullable() case TypeArray: return ty.AsArray() + case TypePredicate: + return ty.AsPredicate() default: return nil, fmt.Errorf("invalid type: %s", t) } @@ -310,6 +346,29 @@ func NewArrayType(elementType TypeEncoder) *ArrayType { } } +// PredicateType represents a predicate type for a given object type +type PredicateType struct { + Type TypeEnum `json:"type" mapstructure:"type"` + // The name can refer to a primitive type or a scalar type + ObjectTypeName string `json:"object_type_name" mapstructure:"object_type_name"` +} + +// NewPredicateType creates a new PredicateType instance +func NewPredicateType(objectTypeName string) *PredicateType { + return &PredicateType{ + Type: TypePredicate, + ObjectTypeName: objectTypeName, + } +} + +// Encode returns the raw Type instance +func (ty PredicateType) Encode() Type { + return map[string]any{ + "type": ty.Type, + "object_type_name": ty.ObjectTypeName, + } +} + // ArgumentType represents an argument type enum type ArgumentType string @@ -542,6 +601,16 @@ func (j *Field) UnmarshalJSON(b []byte) error { } results["column"] = column + + // decode fields + rawFields, ok := raw["fields"] + if ok { + var fields NestedField + if err = json.Unmarshal(rawFields, &fields); err != nil { + return fmt.Errorf("field fields in Field: %s", err) + } + results["fields"] = fields + } case FieldTypeRelationship: relationship, err := unmarshalStringFromJsonMap(raw, "relationship", true) if err != nil { @@ -608,10 +677,21 @@ func (j Field) AsColumn() (*ColumnField, error) { if column == "" { return nil, errors.New("ColumnField.column is required") } - return &ColumnField{ + + result := &ColumnField{ Type: t, Column: column, - }, nil + } + rawFields, ok := j["fields"] + if ok && rawFields != nil { + fields, ok := rawFields.(NestedField) + if !ok { + return nil, fmt.Errorf("invalid ColumnField.fields type; expected NestedField, got %+v", rawFields) + } + result.Fields = fields + } + + return result, nil } // AsRelationship tries to convert the current type to RelationshipField @@ -676,6 +756,10 @@ type ColumnField struct { Type FieldType `json:"type" mapstructure:"type"` // Column name Column string `json:"column" mapstructure:"column"` + // When the type of the column is a (possibly-nullable) array or object, + // the caller can request a subset of the complete column data, by specifying fields to fetch here. + // If omitted, the column data will be fetched in full. + Fields NestedField `json:"fields" mapstructure:"fields"` } // Encode converts the instance to raw Field @@ -683,14 +767,16 @@ func (f ColumnField) Encode() Field { return Field{ "type": f.Type, "column": f.Column, + "fields": f.Fields, } } // NewColumnField creates a new ColumnField instance -func NewColumnField(column string) *ColumnField { +func NewColumnField(column string, fields NestedField) *ColumnField { return &ColumnField{ Type: FieldTypeColumn, Column: column, + Fields: fields, } } @@ -865,13 +951,12 @@ type ComparisonTarget struct { type ExpressionType string const ( - ExpressionTypeAnd ExpressionType = "and" - ExpressionTypeOr ExpressionType = "or" - ExpressionTypeNot ExpressionType = "not" - ExpressionTypeUnaryComparisonOperator ExpressionType = "unary_comparison_operator" - ExpressionTypeBinaryComparisonOperator ExpressionType = "binary_comparison_operator" - ExpressionTypeBinaryArrayComparisonOperator ExpressionType = "binary_array_comparison_operator" - ExpressionTypeExists ExpressionType = "exists" + ExpressionTypeAnd ExpressionType = "and" + ExpressionTypeOr ExpressionType = "or" + ExpressionTypeNot ExpressionType = "not" + ExpressionTypeUnaryComparisonOperator ExpressionType = "unary_comparison_operator" + ExpressionTypeBinaryComparisonOperator ExpressionType = "binary_comparison_operator" + ExpressionTypeExists ExpressionType = "exists" ) var enumValues_ExpressionType = []ExpressionType{ @@ -880,7 +965,6 @@ var enumValues_ExpressionType = []ExpressionType{ ExpressionTypeNot, ExpressionTypeUnaryComparisonOperator, ExpressionTypeBinaryComparisonOperator, - ExpressionTypeBinaryArrayComparisonOperator, ExpressionTypeExists, } @@ -910,45 +994,6 @@ func (j *ExpressionType) UnmarshalJSON(b []byte) error { return nil } -// BinaryComparisonOperatorType represents a binary comparison operator type enum -type BinaryComparisonOperatorType string - -const ( - BinaryComparisonOperatorTypeEqual BinaryComparisonOperatorType = "equal" - BinaryComparisonOperatorTypeOther BinaryComparisonOperatorType = "other" -) - -var enumValues_BinaryComparisonOperatorType = []BinaryComparisonOperatorType{ - BinaryComparisonOperatorTypeEqual, - BinaryComparisonOperatorTypeOther, -} - -// ParseBinaryComparisonOperatorType parses a comparison target type argument type from string -func ParseBinaryComparisonOperatorType(input string) (*BinaryComparisonOperatorType, error) { - if !Contains(enumValues_BinaryComparisonOperatorType, BinaryComparisonOperatorType(input)) { - return nil, fmt.Errorf("failed to parse BinaryComparisonOperatorType, expect one of %v", enumValues_BinaryComparisonOperatorType) - } - result := BinaryComparisonOperatorType(input) - - return &result, nil -} - -// UnmarshalJSON implements json.Unmarshaler. -func (j *BinaryComparisonOperatorType) UnmarshalJSON(b []byte) error { - var rawValue string - if err := json.Unmarshal(b, &rawValue); err != nil { - return err - } - - value, err := ParseBinaryComparisonOperatorType(rawValue) - if err != nil { - return err - } - - *j = *value - return nil -} - // ComparisonValueType represents a comparison value type enum type ComparisonValueType string @@ -1450,12 +1495,6 @@ func (ei ExistsInCollectionUnrelated) Encode() ExistsInCollection { } } -// BinaryComparisonOperator represents a binary comparison operator object -type BinaryComparisonOperator struct { - Type BinaryComparisonOperatorType `json:"type" mapstructure:"type"` - Name string `json:"name,omitempty" mapstructure:"name"` -} - // Expression represents the query expression object type Expression map[string]any @@ -1525,10 +1564,14 @@ func (j *Expression) UnmarshalJSON(b []byte) error { if !ok { return fmt.Errorf("field operator in Expression is required for '%s' type", ty) } - var operator BinaryComparisonOperator + var operator string if err := json.Unmarshal(rawOperator, &operator); err != nil { return fmt.Errorf("field operator in Expression: %s", err) } + + if operator == "" { + return fmt.Errorf("field operator in Expression is required for '%s' type", ty) + } result["operator"] = operator rawColumn, ok := raw["column"] @@ -1550,46 +1593,15 @@ func (j *Expression) UnmarshalJSON(b []byte) error { return fmt.Errorf("field value in Expression: %s", err) } result["value"] = value - case ExpressionTypeBinaryArrayComparisonOperator: - rawOperator, ok := raw["operator"] - if !ok { - return fmt.Errorf("field operator in Expression is required for '%s' type", ty) - } - var operator BinaryArrayComparisonOperator - if err := json.Unmarshal(rawOperator, &operator); err != nil { - return fmt.Errorf("field operator in Expression: %s", err) - } - result["operator"] = operator - - rawColumn, ok := raw["column"] - if !ok { - return fmt.Errorf("field column in Expression is required for '%s' type", ty) - } - var column ComparisonTarget - if err := json.Unmarshal(rawColumn, &column); err != nil { - return fmt.Errorf("field column in Expression: %s", err) - } - result["column"] = column - - rawValues, ok := raw["values"] - if !ok { - return fmt.Errorf("field values in Expression is required for '%s' type", ty) - } - var values []ComparisonValue - if err := json.Unmarshal(rawValues, &values); err != nil { - return fmt.Errorf("field values in Expression: %s", err) - } - result["values"] = values case ExpressionTypeExists: - rawWhere, ok := raw["where"] - if !ok { - return fmt.Errorf("field where in Expression is required for '%s' type", ty) - } - var where Expression - if err := json.Unmarshal(rawWhere, &where); err != nil { - return fmt.Errorf("field where in Expression: %s", err) + rawPredicate, ok := raw["predicate"] + if ok { + var predicate Expression + if err := json.Unmarshal(rawPredicate, &predicate); err != nil { + return fmt.Errorf("field predicate in Expression: %s", err) + } + result["predicate"] = predicate } - result["where"] = where rawInCollection, ok := raw["in_collection"] if !ok { @@ -1751,15 +1763,6 @@ func (j Expression) AsBinaryComparisonOperator() (*ExpressionBinaryComparisonOpe return nil, fmt.Errorf("invalid type; expected: %s, got: %s", ExpressionTypeBinaryComparisonOperator, t) } - rawOperator, ok := j["operator"] - if !ok { - return nil, errors.New("ExpressionBinaryComparisonOperator.operator is required") - } - operator, ok := rawOperator.(BinaryComparisonOperator) - if !ok { - return nil, fmt.Errorf("invalid ExpressionBinaryComparisonOperator.operator type; expected: BinaryComparisonOperator, got: %+v", rawOperator) - } - rawColumn, ok := j["column"] if !ok { return nil, errors.New("ExpressionBinaryComparisonOperator.column is required") @@ -1780,57 +1783,12 @@ func (j Expression) AsBinaryComparisonOperator() (*ExpressionBinaryComparisonOpe return &ExpressionBinaryComparisonOperator{ Type: t, - Operator: operator, + Operator: getStringValueByKey(j, "operator"), Column: column, Value: value, }, nil } -// AsBinaryArrayComparisonOperator tries to convert the instance to ExpressionBinaryArrayComparisonOperator instance -func (j Expression) AsBinaryArrayComparisonOperator() (*ExpressionBinaryArrayComparisonOperator, error) { - t, err := j.Type() - if err != nil { - return nil, err - } - if t != ExpressionTypeBinaryArrayComparisonOperator { - return nil, fmt.Errorf("invalid type; expected: %s, got: %s", ExpressionTypeBinaryArrayComparisonOperator, t) - } - - rawOperator, ok := j["operator"] - if !ok { - return nil, errors.New("ExpressionBinaryComparisonOperator.operator is required") - } - operator, ok := rawOperator.(BinaryArrayComparisonOperator) - if !ok { - return nil, fmt.Errorf("invalid ExpressionBinaryArrayComparisonOperator.operator type; expected: BinaryArrayComparisonOperator, got: %+v", rawOperator) - } - - rawColumn, ok := j["column"] - if !ok { - return nil, errors.New("ExpressionBinaryComparisonOperator.column is required") - } - column, ok := rawColumn.(ComparisonTarget) - if !ok { - return nil, fmt.Errorf("invalid ExpressionBinaryArrayComparisonOperator.column type; expected: ComparisonTarget, got: %+v", rawColumn) - } - - rawValues, ok := j["values"] - if !ok { - return nil, errors.New("ExpressionBinaryComparisonOperator.values is required") - } - values, ok := rawValues.([]ComparisonValue) - if !ok { - return nil, fmt.Errorf("invalid ExpressionBinaryArrayComparisonOperator.values type; expected: []ComparisonValue, got: %+v", rawValues) - } - - return &ExpressionBinaryArrayComparisonOperator{ - Type: t, - Operator: operator, - Column: column, - Values: values, - }, nil -} - // AsExists tries to convert the instance to ExpressionExists instance func (j Expression) AsExists() (*ExpressionExists, error) { t, err := j.Type() @@ -1841,15 +1799,6 @@ func (j Expression) AsExists() (*ExpressionExists, error) { return nil, fmt.Errorf("invalid type; expected: %s, got: %s", ExpressionTypeExists, t) } - rawWhere, ok := j["where"] - if !ok { - return nil, errors.New("ExpressionExists.where is required") - } - where, ok := rawWhere.(Expression) - if !ok { - return nil, fmt.Errorf("invalid ExpressionExists.where type; expected: Expression, got: %+v", rawWhere) - } - rawInCollection, ok := j["in_collection"] if !ok { return nil, errors.New("ExpressionExists.in_collection is required") @@ -1859,11 +1808,19 @@ func (j Expression) AsExists() (*ExpressionExists, error) { return nil, fmt.Errorf("invalid ExpressionExists.in_collection type; expected: ExistsInCollection, got: %+v", rawInCollection) } - return &ExpressionExists{ + result := &ExpressionExists{ Type: t, - Where: where, InCollection: inCollection, - }, nil + } + rawPredicate, ok := j["predicate"] + if ok && rawPredicate != nil { + predicate, ok := rawPredicate.(Expression) + if !ok { + return nil, fmt.Errorf("invalid ExpressionExists.predicate type; expected: Expression, got: %+v", rawPredicate) + } + result.Predicate = predicate + } + return result, nil } // Interface tries to convert the instance to the ExpressionEncoder interface @@ -1883,8 +1840,6 @@ func (j Expression) Interface() (ExpressionEncoder, error) { return j.AsUnaryComparisonOperator() case ExpressionTypeBinaryComparisonOperator: return j.AsBinaryComparisonOperator() - case ExpressionTypeBinaryArrayComparisonOperator: - return j.AsBinaryArrayComparisonOperator() case ExpressionTypeExists: return j.AsExists() default: @@ -1967,10 +1922,10 @@ func (exp ExpressionUnaryComparisonOperator) Encode() Expression { // // [binary operator expression]: https://hasura.github.io/ndc-spec/specification/queries/filtering.html?highlight=expression#unary-operators type ExpressionBinaryComparisonOperator struct { - Type ExpressionType `json:"type" mapstructure:"type"` - Operator BinaryComparisonOperator `json:"operator" mapstructure:"operator"` - Column ComparisonTarget `json:"column" mapstructure:"column"` - Value ComparisonValue `json:"value" mapstructure:"value"` + Type ExpressionType `json:"type" mapstructure:"type"` + Operator string `json:"operator" mapstructure:"operator"` + Column ComparisonTarget `json:"column" mapstructure:"column"` + Value ComparisonValue `json:"value" mapstructure:"value"` } // Encode converts the instance to a raw Expression @@ -1983,32 +1938,12 @@ func (exp ExpressionBinaryComparisonOperator) Encode() Expression { } } -// ExpressionBinaryArrayComparisonOperator is an object which represents an [binary array-valued comparison operators expression] -// -// [binary array-valued comparison operators expression]: https://hasura.github.io/ndc-spec/specification/queries/filtering.html?highlight=expression#binary-array-valued-comparison-operators -type ExpressionBinaryArrayComparisonOperator struct { - Type ExpressionType `json:"type" mapstructure:"type"` - Operator BinaryArrayComparisonOperator `json:"operator" mapstructure:"operator"` - Column ComparisonTarget `json:"column" mapstructure:"column"` - Values []ComparisonValue `json:"values" mapstructure:"values"` -} - -// Encode converts the instance to a raw Expression -func (exp ExpressionBinaryArrayComparisonOperator) Encode() Expression { - return Expression{ - "type": exp.Type, - "operator": exp.Operator, - "column": exp.Column, - "values": exp.Values, - } -} - // ExpressionExists is an object which represents an [EXISTS expression] // // [EXISTS expression]: https://hasura.github.io/ndc-spec/specification/queries/filtering.html?highlight=expression#exists-expressions type ExpressionExists struct { Type ExpressionType `json:"type" mapstructure:"type"` - Where Expression `json:"where" mapstructure:"where"` + Predicate Expression `json:"predicate" mapstructure:"predicate"` InCollection ExistsInCollection `json:"in_collection" mapstructure:"in_collection"` } @@ -2016,7 +1951,7 @@ type ExpressionExists struct { func (exp ExpressionExists) Encode() Expression { return Expression{ "type": exp.Type, - "where": exp.Where, + "predicate": exp.Predicate, "in_collection": exp.InCollection, } } @@ -2651,3 +2586,472 @@ func (ob OrderByStarCountAggregate) Encode() OrderByTarget { "path": ob.Path, } } + +// ComparisonOperatorDefinitionType represents a binary comparison operator type enum +type ComparisonOperatorDefinitionType string + +const ( + ComparisonOperatorDefinitionTypeEqual ComparisonOperatorDefinitionType = "equal" + ComparisonOperatorDefinitionTypeIn ComparisonOperatorDefinitionType = "in" + ComparisonOperatorDefinitionTypeCustom ComparisonOperatorDefinitionType = "custom" +) + +var enumValues_ComparisonOperatorDefinitionType = []ComparisonOperatorDefinitionType{ + ComparisonOperatorDefinitionTypeEqual, + ComparisonOperatorDefinitionTypeIn, + ComparisonOperatorDefinitionTypeCustom, +} + +// ParseComparisonOperatorDefinitionType parses a type of a comparison operator definition +func ParseComparisonOperatorDefinitionType(input string) (*ComparisonOperatorDefinitionType, error) { + if !Contains(enumValues_ComparisonOperatorDefinitionType, ComparisonOperatorDefinitionType(input)) { + return nil, fmt.Errorf("failed to parse ComparisonOperatorDefinitionType, expect one of %v", enumValues_ComparisonOperatorDefinitionType) + } + result := ComparisonOperatorDefinitionType(input) + + return &result, nil +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *ComparisonOperatorDefinitionType) UnmarshalJSON(b []byte) error { + var rawValue string + if err := json.Unmarshal(b, &rawValue); err != nil { + return err + } + + value, err := ParseComparisonOperatorDefinitionType(rawValue) + if err != nil { + return err + } + + *j = *value + return nil +} + +// ComparisonOperatorDefinition the definition of a comparison operator on a scalar type +type ComparisonOperatorDefinition map[string]any + +// UnmarshalJSON implements json.Unmarshaler. +func (j *ComparisonOperatorDefinition) UnmarshalJSON(b []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + + rawType, ok := raw["type"] + if !ok { + return errors.New("field type in ComparisonOperatorDefinition: required") + } + + var ty ComparisonOperatorDefinitionType + if err := json.Unmarshal(rawType, &ty); err != nil { + return fmt.Errorf("field type in ComparisonOperatorDefinition: %s", err) + } + + result := map[string]any{ + "type": ty, + } + switch ty { + case ComparisonOperatorDefinitionTypeEqual: + case ComparisonOperatorDefinitionTypeIn: + case ComparisonOperatorDefinitionTypeCustom: + rawArgumentType, ok := raw["argument_type"] + if !ok { + return errors.New("field argument_type in ComparisonOperatorDefinition is required for custom type") + } + var argumentType Type + if err := json.Unmarshal(rawArgumentType, &argumentType); err != nil { + return fmt.Errorf("field argument_type in ComparisonOperatorDefinition: %s", err) + } + result["argument_type"] = argumentType + } + *j = result + return nil +} + +// Type gets the type enum of the current type +func (j ComparisonOperatorDefinition) Type() (ComparisonOperatorDefinitionType, error) { + t, ok := j["type"] + if !ok { + return ComparisonOperatorDefinitionType(""), errTypeRequired + } + switch raw := t.(type) { + case string: + v, err := ParseComparisonOperatorDefinitionType(raw) + if err != nil { + return ComparisonOperatorDefinitionType(""), err + } + return *v, nil + case ComparisonOperatorDefinitionType: + return raw, nil + default: + return ComparisonOperatorDefinitionType(""), fmt.Errorf("invalid type: %+v", t) + } +} + +// AsEqual tries to convert the instance to ComparisonOperatorEqual type +func (j ComparisonOperatorDefinition) AsEqual() (*ComparisonOperatorEqual, error) { + t, err := j.Type() + if err != nil { + return nil, err + } + if t != ComparisonOperatorDefinitionTypeEqual { + return nil, fmt.Errorf("invalid type; expected: %s, got: %s", ComparisonOperatorDefinitionTypeEqual, t) + } + + return &ComparisonOperatorEqual{ + Type: t, + }, nil +} + +// AsIn tries to convert the instance to ComparisonOperatorIn type +func (j ComparisonOperatorDefinition) AsIn() (*ComparisonOperatorIn, error) { + t, err := j.Type() + if err != nil { + return nil, err + } + if t != ComparisonOperatorDefinitionTypeIn { + return nil, fmt.Errorf("invalid type; expected: %s, got: %s", ComparisonOperatorDefinitionTypeIn, t) + } + + return &ComparisonOperatorIn{ + Type: t, + }, nil +} + +// AsCustom tries to convert the instance to ComparisonOperatorIn type +func (j ComparisonOperatorDefinition) AsCustom() (*ComparisonOperatorCustom, error) { + t, err := j.Type() + if err != nil { + return nil, err + } + if t != ComparisonOperatorDefinitionTypeCustom { + return nil, fmt.Errorf("invalid type; expected: %s, got: %s", ComparisonOperatorDefinitionTypeCustom, t) + } + + rawArg, ok := j["argument_type"] + if !ok { + return nil, errors.New("ComparisonOperatorCustom.argument_type is required") + } + + arg, ok := rawArg.(Type) + if !ok { + return nil, fmt.Errorf("invalid ComparisonOperatorCustom.argument_type type; expected: Type, got: %+v", rawArg) + } + + return &ComparisonOperatorCustom{ + Type: t, + ArgumentType: arg, + }, nil +} + +// Interface tries to convert the instance to ComparisonOperatorDefinitionEncoder interface +func (j ComparisonOperatorDefinition) Interface() (ComparisonOperatorDefinitionEncoder, error) { + t, err := j.Type() + if err != nil { + return nil, err + } + + switch t { + case ComparisonOperatorDefinitionTypeEqual: + return j.AsEqual() + case ComparisonOperatorDefinitionTypeIn: + return j.AsIn() + case ComparisonOperatorDefinitionTypeCustom: + return j.AsCustom() + default: + return nil, fmt.Errorf("invalid type: %s", t) + } +} + +// ComparisonOperatorDefinitionEncoder abstracts the serialization interface for ComparisonOperatorDefinition +type ComparisonOperatorDefinitionEncoder interface { + Encode() ComparisonOperatorDefinition +} + +// ComparisonOperatorEqual presents an equal comparison operator +type ComparisonOperatorEqual struct { + Type ComparisonOperatorDefinitionType `json:"type" mapstructure:"type"` +} + +// NewComparisonOperatorEqual create a new ComparisonOperatorEqual instance +func NewComparisonOperatorEqual() *ComparisonOperatorEqual { + return &ComparisonOperatorEqual{ + Type: ComparisonOperatorDefinitionTypeEqual, + } +} + +// Encode converts the instance to raw ComparisonOperatorDefinition +func (ob ComparisonOperatorEqual) Encode() ComparisonOperatorDefinition { + return ComparisonOperatorDefinition{ + "type": ob.Type, + } +} + +// ComparisonOperatorIn presents an in comparison operator +type ComparisonOperatorIn struct { + Type ComparisonOperatorDefinitionType `json:"type" mapstructure:"type"` +} + +// NewComparisonOperatorIn create a new ComparisonOperatorIn instance +func NewComparisonOperatorIn() *ComparisonOperatorIn { + return &ComparisonOperatorIn{ + Type: ComparisonOperatorDefinitionTypeIn, + } +} + +// Encode converts the instance to raw ComparisonOperatorDefinition +func (ob ComparisonOperatorIn) Encode() ComparisonOperatorDefinition { + return ComparisonOperatorDefinition{ + "type": ob.Type, + } +} + +// ComparisonOperatorCustom presents a custom comparison operator +type ComparisonOperatorCustom struct { + Type ComparisonOperatorDefinitionType `json:"type" mapstructure:"type"` + // The type of the argument to this operator + ArgumentType Type `json:"argument_type" mapstructure:"argument_type"` +} + +// NewComparisonOperatorCustom create a new ComparisonOperatorCustom instance +func NewComparisonOperatorCustom(argumentType TypeEncoder) *ComparisonOperatorCustom { + return &ComparisonOperatorCustom{ + Type: ComparisonOperatorDefinitionTypeCustom, + ArgumentType: argumentType.Encode(), + } +} + +// Encode converts the instance to raw ComparisonOperatorDefinition +func (ob ComparisonOperatorCustom) Encode() ComparisonOperatorDefinition { + return ComparisonOperatorDefinition{ + "type": ob.Type, + "argument_type": ob.ArgumentType, + } +} + +// NestedFieldType represents a nested field type enum +type NestedFieldType string + +const ( + NestedFieldTypeObject NestedFieldType = "object" + NestedFieldTypeArray NestedFieldType = "array" +) + +var enumValues_NestedFieldType = []NestedFieldType{ + NestedFieldTypeObject, + NestedFieldTypeArray, +} + +// ParseNestedFieldType parses the type of nested field +func ParseNestedFieldType(input string) (*NestedFieldType, error) { + if !Contains(enumValues_NestedFieldType, NestedFieldType(input)) { + return nil, fmt.Errorf("failed to parse NestedFieldType, expect one of %v", enumValues_NestedFieldType) + } + result := NestedFieldType(input) + + return &result, nil +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *NestedFieldType) UnmarshalJSON(b []byte) error { + var rawValue string + if err := json.Unmarshal(b, &rawValue); err != nil { + return err + } + + value, err := ParseNestedFieldType(rawValue) + if err != nil { + return err + } + + *j = *value + return nil +} + +// NestedField represents a nested field +type NestedField map[string]any + +// UnmarshalJSON implements json.Unmarshaler. +func (j *NestedField) UnmarshalJSON(b []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + + rawType, ok := raw["type"] + if !ok { + return errors.New("field type in NestedField: required") + } + + var ty NestedFieldType + if err := json.Unmarshal(rawType, &ty); err != nil { + return fmt.Errorf("field type in NestedField: %s", err) + } + + result := map[string]any{ + "type": ty, + } + switch ty { + case NestedFieldTypeObject: + rawFields, ok := raw["fields"] + if !ok { + return errors.New("field fields in NestedField is required for object type") + } + var fields map[string]Field + if err := json.Unmarshal(rawFields, &fields); err != nil { + return fmt.Errorf("field fields in NestedField object: %s", err) + } + result["fields"] = fields + case NestedFieldTypeArray: + rawFields, ok := raw["fields"] + if !ok { + return errors.New("field fields in NestedField is required for array type") + } + var fields NestedField + if err := json.Unmarshal(rawFields, &fields); err != nil { + return fmt.Errorf("field fields in NestedField array: %s", err) + } + result["fields"] = fields + } + *j = result + return nil +} + +// Type gets the type enum of the current type +func (j NestedField) Type() (NestedFieldType, error) { + t, ok := j["type"] + if !ok { + return NestedFieldType(""), errTypeRequired + } + switch raw := t.(type) { + case string: + v, err := ParseNestedFieldType(raw) + if err != nil { + return NestedFieldType(""), err + } + return *v, nil + case NestedFieldType: + return raw, nil + default: + return NestedFieldType(""), fmt.Errorf("invalid type: %+v", t) + } +} + +// AsObject tries to convert the instance to NestedObject type +func (j NestedField) AsObject() (*NestedObject, error) { + t, err := j.Type() + if err != nil { + return nil, err + } + if t != NestedFieldTypeObject { + return nil, fmt.Errorf("invalid type; expected: %s, got: %s", NestedFieldTypeObject, t) + } + + rawFields, ok := j["fields"] + if !ok { + return nil, errors.New("NestedObject.fields is required") + } + + fields, ok := rawFields.(map[string]Field) + if !ok { + return nil, fmt.Errorf("invalid NestedObject.fields type; expected: map[string]Field, got: %+v", rawFields) + } + + return &NestedObject{ + Type: t, + Fields: fields, + }, nil +} + +// AsArray tries to convert the instance to NestedArray type +func (j NestedField) AsArray() (*NestedArray, error) { + t, err := j.Type() + if err != nil { + return nil, err + } + if t != NestedFieldTypeArray { + return nil, fmt.Errorf("invalid type; expected: %s, got: %s", NestedFieldTypeArray, t) + } + + rawFields, ok := j["fields"] + if !ok { + return nil, errors.New("NestedArray.fields is required") + } + + fields, ok := rawFields.(NestedField) + if !ok { + return nil, fmt.Errorf("invalid NestedArray.fields type; expected: NestedField, got: %+v", rawFields) + } + + return &NestedArray{ + Type: t, + Fields: fields, + }, nil +} + +// Interface tries to convert the instance to NestedFieldEncoder interface +func (j NestedField) Interface() (NestedFieldEncoder, error) { + t, err := j.Type() + if err != nil { + return nil, err + } + + switch t { + case NestedFieldTypeObject: + return j.AsObject() + case NestedFieldTypeArray: + return j.AsArray() + default: + return nil, fmt.Errorf("invalid type: %s", t) + } +} + +// NestedFieldEncoder abstracts the serialization interface for NestedField +type NestedFieldEncoder interface { + Encode() NestedField +} + +// NestedObject presents a nested object field +type NestedObject struct { + Type NestedFieldType `json:"type" mapstructure:"type"` + Fields map[string]Field `json:"fields" mapstructure:"fields"` +} + +// NewNestedObject create a new NestedObject instance +func NewNestedObject(fields map[string]Field) *NestedObject { + return &NestedObject{ + Type: NestedFieldTypeObject, + Fields: fields, + } +} + +// Encode converts the instance to raw NestedField +func (ob NestedObject) Encode() NestedField { + return NestedField{ + "type": ob.Type, + "fields": ob.Fields, + } +} + +// NestedArray presents a nested array field +type NestedArray struct { + Type NestedFieldType `json:"type" mapstructure:"type"` + Fields NestedField `json:"fields" mapstructure:"fields"` +} + +// NewNestedArray create a new NestedArray instance +func NewNestedArray(fields NestedFieldEncoder) *NestedArray { + return &NestedArray{ + Type: NestedFieldTypeArray, + Fields: fields.Encode(), + } +} + +// Encode converts the instance to raw NestedField +func (ob NestedArray) Encode() NestedField { + return NestedField{ + "type": ob.Type, + "fields": ob.Fields, + } +} diff --git a/schema/schema.generated.go b/schema/schema.generated.go index 666e3844..299efa38 100644 --- a/schema/schema.generated.go +++ b/schema/schema.generated.go @@ -20,14 +20,10 @@ type ArgumentInfo struct { Type Type `json:"type" yaml:"type" mapstructure:"type"` } -type BinaryArrayComparisonOperator string - -const BinaryArrayComparisonOperatorIn BinaryArrayComparisonOperator = "in" - // Describes the features of the specification which a data connector implements. type Capabilities struct { - // Explain corresponds to the JSON schema field "explain". - Explain interface{} `json:"explain,omitempty" yaml:"explain,omitempty" mapstructure:"explain,omitempty"` + // Mutation corresponds to the JSON schema field "mutation". + Mutation MutationCapabilities `json:"mutation" yaml:"mutation" mapstructure:"mutation"` // Query corresponds to the JSON schema field "query". Query QueryCapabilities `json:"query" yaml:"query" mapstructure:"query"` @@ -40,8 +36,8 @@ type CapabilitiesResponse struct { // Capabilities corresponds to the JSON schema field "capabilities". Capabilities Capabilities `json:"capabilities" yaml:"capabilities" mapstructure:"capabilities"` - // Versions corresponds to the JSON schema field "versions". - Versions string `json:"versions" yaml:"versions" mapstructure:"versions"` + // Version corresponds to the JSON schema field "version". + Version string `json:"version" yaml:"version" mapstructure:"version"` } type CollectionInfo struct { @@ -77,10 +73,6 @@ type CollectionInfoForeignKeys map[string]ForeignKeyConstraint type CollectionInfoUniquenessConstraints map[string]UniquenessConstraint // The definition of a comparison operator on a scalar type -type ComparisonOperatorDefinition struct { - // The type of the argument to this operator - ArgumentType Type `json:"argument_type" yaml:"argument_type" mapstructure:"argument_type"` -} type ErrorResponse struct { // Any additional structured information about the error @@ -136,6 +128,14 @@ type FunctionInfoArguments map[string]ArgumentInfo // empty struct to allow for future sub-capabilities. type LeafCapability map[string]interface{} +type MutationCapabilities struct { + // Does the connector support explaining mutations + Explain interface{} `json:"explain,omitempty" yaml:"explain,omitempty" mapstructure:"explain,omitempty"` + + // Does the connector support executing multiple mutations in a transaction. + Transactional interface{} `json:"transactional,omitempty" yaml:"transactional,omitempty" mapstructure:"transactional,omitempty"` +} + type MutationOperationResults struct { // The number of rows affected by the mutation operation AffectedRows int `json:"affected_rows" yaml:"affected_rows" mapstructure:"affected_rows"` @@ -204,7 +204,7 @@ type PathElement struct { Arguments PathElementArguments `json:"arguments" yaml:"arguments" mapstructure:"arguments"` // A predicate expression to apply to the target collection - Predicate Expression `json:"predicate" yaml:"predicate" mapstructure:"predicate"` + Predicate Expression `json:"predicate,omitempty" yaml:"predicate,omitempty" mapstructure:"predicate,omitempty"` // The name of the relationship to follow Relationship string `json:"relationship" yaml:"relationship" mapstructure:"relationship"` @@ -246,8 +246,8 @@ type Query struct { // OrderBy corresponds to the JSON schema field "order_by". OrderBy *OrderBy `json:"order_by,omitempty" yaml:"order_by,omitempty" mapstructure:"order_by,omitempty"` - // Where corresponds to the JSON schema field "where". - Where Expression `json:"where,omitempty" yaml:"where,omitempty" mapstructure:"where,omitempty"` + // Predicate corresponds to the JSON schema field "predicate". + Predicate Expression `json:"predicate,omitempty" yaml:"predicate,omitempty" mapstructure:"predicate,omitempty"` } // Aggregate fields of the query @@ -257,6 +257,9 @@ type QueryCapabilities struct { // Does the connector support aggregate queries Aggregates interface{} `json:"aggregates,omitempty" yaml:"aggregates,omitempty" mapstructure:"aggregates,omitempty"` + // Does the connector support explaining queries + Explain interface{} `json:"explain,omitempty" yaml:"explain,omitempty" mapstructure:"explain,omitempty"` + // Does the connector support queries which use variables Variables interface{} `json:"variables,omitempty" yaml:"variables,omitempty" mapstructure:"variables,omitempty"` } @@ -328,273 +331,244 @@ type RelationshipColumnMapping map[string]string type RelationshipType string -const RelationshipTypeArray RelationshipType = "array" -const RelationshipTypeObject RelationshipType = "object" - -type RowFieldValue interface{} - -type RowSet struct { - // The results of the aggregates returned by the query - Aggregates RowSetAggregates `json:"aggregates,omitempty" yaml:"aggregates,omitempty" mapstructure:"aggregates,omitempty"` - - // The rows returned by the query, corresponding to the query's fields - Rows []Row `json:"rows,omitempty" yaml:"rows,omitempty" mapstructure:"rows,omitempty"` -} - -// The results of the aggregates returned by the query -type RowSetAggregates map[string]interface{} - -type Row any - -// The definition of a scalar type, i.e. types that can be used as the types of -// columns. -type ScalarType struct { - // A map from aggregate function names to their definitions. Result type names - // must be defined scalar types declared in ScalarTypesCapabilities. - AggregateFunctions ScalarTypeAggregateFunctions `json:"aggregate_functions" yaml:"aggregate_functions" mapstructure:"aggregate_functions"` - - // A map from comparison operator names to their definitions. Argument type names - // must be defined scalar types declared in ScalarTypesCapabilities. - ComparisonOperators ScalarTypeComparisonOperators `json:"comparison_operators" yaml:"comparison_operators" mapstructure:"comparison_operators"` -} - -// A map from aggregate function names to their definitions. Result type names must -// be defined scalar types declared in ScalarTypesCapabilities. -type ScalarTypeAggregateFunctions map[string]AggregateFunctionDefinition - -// A map from comparison operator names to their definitions. Argument type names -// must be defined scalar types declared in ScalarTypesCapabilities. -type ScalarTypeComparisonOperators map[string]ComparisonOperatorDefinition - // UnmarshalJSON implements json.Unmarshaler. -func (j *MutationRequest) UnmarshalJSON(b []byte) error { +func (j *QueryRequest) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } + if v, ok := raw["arguments"]; !ok || v == nil { + return fmt.Errorf("field arguments in QueryRequest: required") + } + if v, ok := raw["collection"]; !ok || v == nil { + return fmt.Errorf("field collection in QueryRequest: required") + } if v, ok := raw["collection_relationships"]; !ok || v == nil { - return fmt.Errorf("field collection_relationships in MutationRequest: required") + return fmt.Errorf("field collection_relationships in QueryRequest: required") } - if v, ok := raw["operations"]; !ok || v == nil { - return fmt.Errorf("field operations in MutationRequest: required") + if v, ok := raw["query"]; !ok || v == nil { + return fmt.Errorf("field query in QueryRequest: required") } - type Plain MutationRequest + type Plain QueryRequest var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = MutationRequest(plain) + *j = QueryRequest(plain) return nil } +var enumValues_OrderDirection = []interface{}{ + "asc", + "desc", +} + // UnmarshalJSON implements json.Unmarshaler. -func (j *OrderByElement) UnmarshalJSON(b []byte) error { +func (j *OrderDirection) UnmarshalJSON(b []byte) error { + var v string + if err := json.Unmarshal(b, &v); err != nil { + return err + } + var ok bool + for _, expected := range enumValues_OrderDirection { + if reflect.DeepEqual(v, expected) { + ok = true + break + } + } + if !ok { + return fmt.Errorf("invalid value (expected one of %#v): %#v", enumValues_OrderDirection, v) + } + *j = OrderDirection(v) + return nil +} + +var enumValues_RelationshipType = []interface{}{ + "object", + "array", +} + +// UnmarshalJSON implements json.Unmarshaler. +func (j *Capabilities) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["order_direction"]; !ok || v == nil { - return fmt.Errorf("field order_direction in OrderByElement: required") + if v, ok := raw["mutation"]; !ok || v == nil { + return fmt.Errorf("field mutation in Capabilities: required") } - if v, ok := raw["target"]; !ok || v == nil { - return fmt.Errorf("field target in OrderByElement: required") + if v, ok := raw["query"]; !ok || v == nil { + return fmt.Errorf("field query in Capabilities: required") } - type Plain OrderByElement + type Plain Capabilities var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = OrderByElement(plain) + *j = Capabilities(plain) return nil } // UnmarshalJSON implements json.Unmarshaler. -func (j *ErrorResponse) UnmarshalJSON(b []byte) error { +func (j *ObjectType) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["details"]; !ok || v == nil { - return fmt.Errorf("field details in ErrorResponse: required") - } - if v, ok := raw["message"]; !ok || v == nil { - return fmt.Errorf("field message in ErrorResponse: required") + if v, ok := raw["fields"]; !ok || v == nil { + return fmt.Errorf("field fields in ObjectType: required") } - type Plain ErrorResponse + type Plain ObjectType var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = ErrorResponse(plain) + *j = ObjectType(plain) return nil } +const RelationshipTypeObject RelationshipType = "object" + // UnmarshalJSON implements json.Unmarshaler. -func (j *OrderBy) UnmarshalJSON(b []byte) error { +func (j *OrderByElement) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["elements"]; !ok || v == nil { - return fmt.Errorf("field elements in OrderBy: required") + if v, ok := raw["order_direction"]; !ok || v == nil { + return fmt.Errorf("field order_direction in OrderByElement: required") } - type Plain OrderBy + if v, ok := raw["target"]; !ok || v == nil { + return fmt.Errorf("field target in OrderByElement: required") + } + type Plain OrderByElement var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = OrderBy(plain) + *j = OrderByElement(plain) return nil } // UnmarshalJSON implements json.Unmarshaler. -func (j *ArgumentInfo) UnmarshalJSON(b []byte) error { +func (j *MutationOperationResults) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["type"]; !ok || v == nil { - return fmt.Errorf("field type in ArgumentInfo: required") + if v, ok := raw["affected_rows"]; !ok || v == nil { + return fmt.Errorf("field affected_rows in MutationOperationResults: required") } - type Plain ArgumentInfo + type Plain MutationOperationResults var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = ArgumentInfo(plain) + *j = MutationOperationResults(plain) return nil } // UnmarshalJSON implements json.Unmarshaler. -func (j *RelationshipType) UnmarshalJSON(b []byte) error { - var v string - if err := json.Unmarshal(b, &v); err != nil { +func (j *OrderBy) UnmarshalJSON(b []byte) error { + var raw map[string]interface{} + if err := json.Unmarshal(b, &raw); err != nil { return err } - var ok bool - for _, expected := range enumValues_RelationshipType { - if reflect.DeepEqual(v, expected) { - ok = true - break - } + if v, ok := raw["elements"]; !ok || v == nil { + return fmt.Errorf("field elements in OrderBy: required") } - if !ok { - return fmt.Errorf("invalid value (expected one of %#v): %#v", enumValues_RelationshipType, v) + type Plain OrderBy + var plain Plain + if err := json.Unmarshal(b, &plain); err != nil { + return err } - *j = RelationshipType(v) + *j = OrderBy(plain) return nil } -var enumValues_RelationshipType = []interface{}{ - "object", - "array", -} - // UnmarshalJSON implements json.Unmarshaler. -func (j *PathElement) UnmarshalJSON(b []byte) error { +func (j *CapabilitiesResponse) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["arguments"]; !ok || v == nil { - return fmt.Errorf("field arguments in PathElement: required") - } - if v, ok := raw["predicate"]; !ok || v == nil { - return fmt.Errorf("field predicate in PathElement: required") + if v, ok := raw["capabilities"]; !ok || v == nil { + return fmt.Errorf("field capabilities in CapabilitiesResponse: required") } - if v, ok := raw["relationship"]; !ok || v == nil { - return fmt.Errorf("field relationship in PathElement: required") + if v, ok := raw["version"]; !ok || v == nil { + return fmt.Errorf("field version in CapabilitiesResponse: required") } - type Plain PathElement + type Plain CapabilitiesResponse var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = PathElement(plain) + *j = CapabilitiesResponse(plain) return nil } // UnmarshalJSON implements json.Unmarshaler. -func (j *MutationOperationResults) UnmarshalJSON(b []byte) error { +func (j *ForeignKeyConstraint) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["affected_rows"]; !ok || v == nil { - return fmt.Errorf("field affected_rows in MutationOperationResults: required") + if v, ok := raw["column_mapping"]; !ok || v == nil { + return fmt.Errorf("field column_mapping in ForeignKeyConstraint: required") } - type Plain MutationOperationResults + if v, ok := raw["foreign_collection"]; !ok || v == nil { + return fmt.Errorf("field foreign_collection in ForeignKeyConstraint: required") + } + type Plain ForeignKeyConstraint var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = MutationOperationResults(plain) + *j = ForeignKeyConstraint(plain) return nil } -var enumValues_BinaryArrayComparisonOperator = []interface{}{ - "in", +type UniquenessConstraint struct { + // A list of columns which this constraint requires to be unique + UniqueColumns []string `json:"unique_columns" yaml:"unique_columns" mapstructure:"unique_columns"` } // UnmarshalJSON implements json.Unmarshaler. -func (j *ProcedureInfo) UnmarshalJSON(b []byte) error { +func (j *PathElement) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } if v, ok := raw["arguments"]; !ok || v == nil { - return fmt.Errorf("field arguments in ProcedureInfo: required") - } - if v, ok := raw["name"]; !ok || v == nil { - return fmt.Errorf("field name in ProcedureInfo: required") + return fmt.Errorf("field arguments in PathElement: required") } - if v, ok := raw["result_type"]; !ok || v == nil { - return fmt.Errorf("field result_type in ProcedureInfo: required") + if v, ok := raw["relationship"]; !ok || v == nil { + return fmt.Errorf("field relationship in PathElement: required") } - type Plain ProcedureInfo + type Plain PathElement var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = ProcedureInfo(plain) - return nil -} - -// UnmarshalJSON implements json.Unmarshaler. -func (j *BinaryArrayComparisonOperator) UnmarshalJSON(b []byte) error { - var v string - if err := json.Unmarshal(b, &v); err != nil { - return err - } - var ok bool - for _, expected := range enumValues_BinaryArrayComparisonOperator { - if reflect.DeepEqual(v, expected) { - ok = true - break - } - } - if !ok { - return fmt.Errorf("invalid value (expected one of %#v): %#v", enumValues_BinaryArrayComparisonOperator, v) - } - *j = BinaryArrayComparisonOperator(v) + *j = PathElement(plain) return nil } // UnmarshalJSON implements json.Unmarshaler. -func (j *Capabilities) UnmarshalJSON(b []byte) error { +func (j *UniquenessConstraint) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["query"]; !ok || v == nil { - return fmt.Errorf("field query in Capabilities: required") + if v, ok := raw["unique_columns"]; !ok || v == nil { + return fmt.Errorf("field unique_columns in UniquenessConstraint: required") } - type Plain Capabilities + type Plain UniquenessConstraint var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = Capabilities(plain) + *j = UniquenessConstraint(plain) return nil } @@ -623,112 +597,97 @@ func (j *FunctionInfo) UnmarshalJSON(b []byte) error { } // UnmarshalJSON implements json.Unmarshaler. -func (j *CapabilitiesResponse) UnmarshalJSON(b []byte) error { +func (j *ProcedureInfo) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["capabilities"]; !ok || v == nil { - return fmt.Errorf("field capabilities in CapabilitiesResponse: required") + if v, ok := raw["arguments"]; !ok || v == nil { + return fmt.Errorf("field arguments in ProcedureInfo: required") } - if v, ok := raw["versions"]; !ok || v == nil { - return fmt.Errorf("field versions in CapabilitiesResponse: required") + if v, ok := raw["name"]; !ok || v == nil { + return fmt.Errorf("field name in ProcedureInfo: required") } - type Plain CapabilitiesResponse + if v, ok := raw["result_type"]; !ok || v == nil { + return fmt.Errorf("field result_type in ProcedureInfo: required") + } + type Plain ProcedureInfo var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = CapabilitiesResponse(plain) + *j = ProcedureInfo(plain) return nil } // UnmarshalJSON implements json.Unmarshaler. -func (j *ForeignKeyConstraint) UnmarshalJSON(b []byte) error { +func (j *CollectionInfo) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["column_mapping"]; !ok || v == nil { - return fmt.Errorf("field column_mapping in ForeignKeyConstraint: required") + if v, ok := raw["arguments"]; !ok || v == nil { + return fmt.Errorf("field arguments in CollectionInfo: required") } - if v, ok := raw["foreign_collection"]; !ok || v == nil { - return fmt.Errorf("field foreign_collection in ForeignKeyConstraint: required") + if v, ok := raw["foreign_keys"]; !ok || v == nil { + return fmt.Errorf("field foreign_keys in CollectionInfo: required") } - type Plain ForeignKeyConstraint - var plain Plain - if err := json.Unmarshal(b, &plain); err != nil { - return err + if v, ok := raw["name"]; !ok || v == nil { + return fmt.Errorf("field name in CollectionInfo: required") } - *j = ForeignKeyConstraint(plain) - return nil -} - -type UniquenessConstraint struct { - // A list of columns which this constraint requires to be unique - UniqueColumns []string `json:"unique_columns" yaml:"unique_columns" mapstructure:"unique_columns"` -} - -// UnmarshalJSON implements json.Unmarshaler. -func (j *UniquenessConstraint) UnmarshalJSON(b []byte) error { - var raw map[string]interface{} - if err := json.Unmarshal(b, &raw); err != nil { - return err + if v, ok := raw["type"]; !ok || v == nil { + return fmt.Errorf("field type in CollectionInfo: required") } - if v, ok := raw["unique_columns"]; !ok || v == nil { - return fmt.Errorf("field unique_columns in UniquenessConstraint: required") + if v, ok := raw["uniqueness_constraints"]; !ok || v == nil { + return fmt.Errorf("field uniqueness_constraints in CollectionInfo: required") } - type Plain UniquenessConstraint + type Plain CollectionInfo var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = UniquenessConstraint(plain) + *j = CollectionInfo(plain) return nil } +const RelationshipTypeArray RelationshipType = "array" + // UnmarshalJSON implements json.Unmarshaler. -func (j *QueryRequest) UnmarshalJSON(b []byte) error { +func (j *ErrorResponse) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["arguments"]; !ok || v == nil { - return fmt.Errorf("field arguments in QueryRequest: required") - } - if v, ok := raw["collection"]; !ok || v == nil { - return fmt.Errorf("field collection in QueryRequest: required") - } - if v, ok := raw["collection_relationships"]; !ok || v == nil { - return fmt.Errorf("field collection_relationships in QueryRequest: required") + if v, ok := raw["details"]; !ok || v == nil { + return fmt.Errorf("field details in ErrorResponse: required") } - if v, ok := raw["query"]; !ok || v == nil { - return fmt.Errorf("field query in QueryRequest: required") + if v, ok := raw["message"]; !ok || v == nil { + return fmt.Errorf("field message in ErrorResponse: required") } - type Plain QueryRequest + type Plain ErrorResponse var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = QueryRequest(plain) + *j = ErrorResponse(plain) return nil } // UnmarshalJSON implements json.Unmarshaler. -func (j *AggregateFunctionDefinition) UnmarshalJSON(b []byte) error { +func (j *ObjectField) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["result_type"]; !ok || v == nil { - return fmt.Errorf("field result_type in AggregateFunctionDefinition: required") + if v, ok := raw["type"]; !ok || v == nil { + return fmt.Errorf("field type in ObjectField: required") } - type Plain AggregateFunctionDefinition + type Plain ObjectField var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = AggregateFunctionDefinition(plain) + *j = ObjectField(plain) return nil } @@ -760,150 +719,152 @@ func (j *Relationship) UnmarshalJSON(b []byte) error { } // UnmarshalJSON implements json.Unmarshaler. -func (j *OrderDirection) UnmarshalJSON(b []byte) error { - var v string - if err := json.Unmarshal(b, &v); err != nil { - return err - } - var ok bool - for _, expected := range enumValues_OrderDirection { - if reflect.DeepEqual(v, expected) { - ok = true - break - } - } - if !ok { - return fmt.Errorf("invalid value (expected one of %#v): %#v", enumValues_OrderDirection, v) - } - *j = OrderDirection(v) - return nil -} - -// UnmarshalJSON implements json.Unmarshaler. -func (j *ExplainResponse) UnmarshalJSON(b []byte) error { +func (j *MutationResponse) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["details"]; !ok || v == nil { - return fmt.Errorf("field details in ExplainResponse: required") + if v, ok := raw["operation_results"]; !ok || v == nil { + return fmt.Errorf("field operation_results in MutationResponse: required") } - type Plain ExplainResponse + type Plain MutationResponse var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = ExplainResponse(plain) + *j = MutationResponse(plain) return nil } // UnmarshalJSON implements json.Unmarshaler. -func (j *CollectionInfo) UnmarshalJSON(b []byte) error { +func (j *AggregateFunctionDefinition) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["arguments"]; !ok || v == nil { - return fmt.Errorf("field arguments in CollectionInfo: required") - } - if v, ok := raw["foreign_keys"]; !ok || v == nil { - return fmt.Errorf("field foreign_keys in CollectionInfo: required") - } - if v, ok := raw["name"]; !ok || v == nil { - return fmt.Errorf("field name in CollectionInfo: required") - } - if v, ok := raw["type"]; !ok || v == nil { - return fmt.Errorf("field type in CollectionInfo: required") - } - if v, ok := raw["uniqueness_constraints"]; !ok || v == nil { - return fmt.Errorf("field uniqueness_constraints in CollectionInfo: required") + if v, ok := raw["result_type"]; !ok || v == nil { + return fmt.Errorf("field result_type in AggregateFunctionDefinition: required") } - type Plain CollectionInfo + type Plain AggregateFunctionDefinition var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = CollectionInfo(plain) + *j = AggregateFunctionDefinition(plain) return nil } // UnmarshalJSON implements json.Unmarshaler. -func (j *ComparisonOperatorDefinition) UnmarshalJSON(b []byte) error { - var raw map[string]interface{} - if err := json.Unmarshal(b, &raw); err != nil { +func (j *RelationshipType) UnmarshalJSON(b []byte) error { + var v string + if err := json.Unmarshal(b, &v); err != nil { return err } - if v, ok := raw["argument_type"]; !ok || v == nil { - return fmt.Errorf("field argument_type in ComparisonOperatorDefinition: required") + var ok bool + for _, expected := range enumValues_RelationshipType { + if reflect.DeepEqual(v, expected) { + ok = true + break + } } - type Plain ComparisonOperatorDefinition - var plain Plain - if err := json.Unmarshal(b, &plain); err != nil { - return err + if !ok { + return fmt.Errorf("invalid value (expected one of %#v): %#v", enumValues_RelationshipType, v) } - *j = ComparisonOperatorDefinition(plain) + *j = RelationshipType(v) return nil } -var enumValues_OrderDirection = []interface{}{ - "asc", - "desc", +// The results of the aggregates returned by the query +type RowSetAggregates map[string]interface{} + +type Row any + +type RowSet struct { + // The results of the aggregates returned by the query + Aggregates RowSetAggregates `json:"aggregates,omitempty" yaml:"aggregates,omitempty" mapstructure:"aggregates,omitempty"` + + // The rows returned by the query, corresponding to the query's fields + Rows []Row `json:"rows,omitempty" yaml:"rows,omitempty" mapstructure:"rows,omitempty"` } // UnmarshalJSON implements json.Unmarshaler. -func (j *MutationResponse) UnmarshalJSON(b []byte) error { +func (j *MutationRequest) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["operation_results"]; !ok || v == nil { - return fmt.Errorf("field operation_results in MutationResponse: required") + if v, ok := raw["collection_relationships"]; !ok || v == nil { + return fmt.Errorf("field collection_relationships in MutationRequest: required") } - type Plain MutationResponse + if v, ok := raw["operations"]; !ok || v == nil { + return fmt.Errorf("field operations in MutationRequest: required") + } + type Plain MutationRequest var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = MutationResponse(plain) + *j = MutationRequest(plain) return nil } // UnmarshalJSON implements json.Unmarshaler. -func (j *ObjectField) UnmarshalJSON(b []byte) error { +func (j *ArgumentInfo) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } if v, ok := raw["type"]; !ok || v == nil { - return fmt.Errorf("field type in ObjectField: required") + return fmt.Errorf("field type in ArgumentInfo: required") } - type Plain ObjectField + type Plain ArgumentInfo var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = ObjectField(plain) + *j = ArgumentInfo(plain) return nil } // UnmarshalJSON implements json.Unmarshaler. -func (j *ObjectType) UnmarshalJSON(b []byte) error { +func (j *ExplainResponse) UnmarshalJSON(b []byte) error { var raw map[string]interface{} if err := json.Unmarshal(b, &raw); err != nil { return err } - if v, ok := raw["fields"]; !ok || v == nil { - return fmt.Errorf("field fields in ObjectType: required") + if v, ok := raw["details"]; !ok || v == nil { + return fmt.Errorf("field details in ExplainResponse: required") } - type Plain ObjectType + type Plain ExplainResponse var plain Plain if err := json.Unmarshal(b, &plain); err != nil { return err } - *j = ObjectType(plain) + *j = ExplainResponse(plain) return nil } +type RowFieldValue interface{} + +// A map from aggregate function names to their definitions. Result type names must +// be defined scalar types declared in ScalarTypesCapabilities. +type ScalarTypeAggregateFunctions map[string]AggregateFunctionDefinition + +// A map from comparison operator names to their definitions. Argument type names +// must be defined scalar types declared in ScalarTypesCapabilities. + +// The definition of a scalar type, i.e. types that can be used as the types of +// columns. +type ScalarType struct { + // A map from aggregate function names to their definitions. Result type names + // must be defined scalar types declared in ScalarTypesCapabilities. + AggregateFunctions ScalarTypeAggregateFunctions `json:"aggregate_functions" yaml:"aggregate_functions" mapstructure:"aggregate_functions"` + + // A map from comparison operator names to their definitions. Argument type names + // must be defined scalar types declared in ScalarTypesCapabilities. + ComparisonOperators map[string]ComparisonOperatorDefinition `json:"comparison_operators" yaml:"comparison_operators" mapstructure:"comparison_operators"` +} + // UnmarshalJSON implements json.Unmarshaler. func (j *ScalarType) UnmarshalJSON(b []byte) error { var raw map[string]interface{} diff --git a/schema/schema.generated.json b/schema/schema.generated.json index dbf101fb..ac239921 100644 --- a/schema/schema.generated.json +++ b/schema/schema.generated.json @@ -48,10 +48,10 @@ "type": "object", "required": [ "capabilities", - "versions" + "version" ], "properties": { - "versions": { + "version": { "type": "string" }, "capabilities": { @@ -64,21 +64,15 @@ "description": "Describes the features of the specification which a data connector implements.", "type": "object", "required": [ + "mutation", "query" ], "properties": { "query": { "$ref": "#/definitions/QueryCapabilities" }, - "explain": { - "anyOf": [ - { - "$ref": "#/definitions/LeafCapability" - }, - { - "type": "null" - } - ] + "mutation": { + "$ref": "#/definitions/MutationCapabilities" }, "relationships": { "anyOf": [ @@ -117,6 +111,17 @@ "type": "null" } ] + }, + "explain": { + "description": "Does the connector support explaining queries", + "anyOf": [ + { + "$ref": "#/definitions/LeafCapability" + }, + { + "type": "null" + } + ] } } }, @@ -124,6 +129,34 @@ "description": "A unit value to indicate a particular leaf capability is supported. This is an empty struct to allow for future sub-capabilities.", "type": "object" }, + "MutationCapabilities": { + "title": "Mutation Capabilities", + "type": "object", + "properties": { + "transactional": { + "description": "Does the connector support executing multiple mutations in a transaction.", + "anyOf": [ + { + "$ref": "#/definitions/LeafCapability" + }, + { + "type": "null" + } + ] + }, + "explain": { + "description": "Does the connector support explaining mutations", + "anyOf": [ + { + "$ref": "#/definitions/LeafCapability" + }, + { + "type": "null" + } + ] + } + } + }, "RelationshipCapabilities": { "title": "Relationship Capabilities", "type": "object", @@ -314,26 +347,85 @@ ] } } + }, + { + "description": "A predicate type for a given object type", + "type": "object", + "required": [ + "object_type_name", + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "predicate" + ] + }, + "object_type_name": { + "description": "The object type name", + "type": "string" + } + } } ] }, "ComparisonOperatorDefinition": { "title": "Comparison Operator Definition", "description": "The definition of a comparison operator on a scalar type", - "type": "object", - "required": [ - "argument_type" - ], - "properties": { - "argument_type": { - "description": "The type of the argument to this operator", - "allOf": [ - { - "$ref": "#/definitions/Type" + "oneOf": [ + { + "type": "object", + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "equal" + ] } - ] + } + }, + { + "type": "object", + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "in" + ] + } + } + }, + { + "type": "object", + "required": [ + "argument_type", + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "custom" + ] + }, + "argument_type": { + "description": "The type of the argument to this operator", + "allOf": [ + { + "$ref": "#/definitions/Type" + } + ] + } + } } - } + ] }, "ObjectType": { "title": "Object Type", @@ -670,7 +762,7 @@ } ] }, - "where": { + "predicate": { "anyOf": [ { "$ref": "#/definitions/Expression" @@ -767,6 +859,17 @@ }, "column": { "type": "string" + }, + "fields": { + "description": "When the type of the column is a (possibly-nullable) array or object, the caller can request a subset of the complete column data, by specifying fields to fetch here. If omitted, the column data will be fetched in full.", + "anyOf": [ + { + "$ref": "#/definitions/NestedField" + }, + { + "type": "null" + } + ] } } }, @@ -803,6 +906,52 @@ } ] }, + "NestedField": { + "title": "NestedField", + "oneOf": [ + { + "title": "NestedObject", + "type": "object", + "required": [ + "fields", + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "object" + ] + }, + "fields": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/Field" + } + } + } + }, + { + "title": "NestedArray", + "type": "object", + "required": [ + "fields", + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "array" + ] + }, + "fields": { + "$ref": "#/definitions/NestedField" + } + } + } + ] + }, "RelationshipArgument": { "title": "Relationship Argument", "oneOf": [ @@ -993,7 +1142,6 @@ "type": "object", "required": [ "arguments", - "predicate", "relationship" ], "properties": { @@ -1010,9 +1158,12 @@ }, "predicate": { "description": "A predicate expression to apply to the target collection", - "allOf": [ + "anyOf": [ { "$ref": "#/definitions/Expression" + }, + { + "type": "null" } ] } @@ -1122,48 +1273,18 @@ "$ref": "#/definitions/ComparisonTarget" }, "operator": { - "$ref": "#/definitions/BinaryComparisonOperator" + "type": "string" }, "value": { "$ref": "#/definitions/ComparisonValue" } } }, - { - "type": "object", - "required": [ - "column", - "operator", - "type", - "values" - ], - "properties": { - "type": { - "type": "string", - "enum": [ - "binary_array_comparison_operator" - ] - }, - "column": { - "$ref": "#/definitions/ComparisonTarget" - }, - "operator": { - "$ref": "#/definitions/BinaryArrayComparisonOperator" - }, - "values": { - "type": "array", - "items": { - "$ref": "#/definitions/ComparisonValue" - } - } - } - }, { "type": "object", "required": [ "in_collection", - "type", - "where" + "type" ], "properties": { "type": { @@ -1175,8 +1296,15 @@ "in_collection": { "$ref": "#/definitions/ExistsInCollection" }, - "where": { - "$ref": "#/definitions/Expression" + "predicate": { + "anyOf": [ + { + "$ref": "#/definitions/Expression" + }, + { + "type": "null" + } + ] } } } @@ -1240,43 +1368,6 @@ "is_null" ] }, - "BinaryComparisonOperator": { - "title": "Binary Comparison Operator", - "oneOf": [ - { - "type": "object", - "required": [ - "type" - ], - "properties": { - "type": { - "type": "string", - "enum": [ - "equal" - ] - } - } - }, - { - "type": "object", - "required": [ - "name", - "type" - ], - "properties": { - "type": { - "type": "string", - "enum": [ - "other" - ] - }, - "name": { - "type": "string" - } - } - } - ] - }, "ComparisonValue": { "title": "Comparison Value", "oneOf": [ @@ -1334,13 +1425,6 @@ } ] }, - "BinaryArrayComparisonOperator": { - "title": "Binary Array Comparison Operator", - "type": "string", - "enum": [ - "in" - ] - }, "ExistsInCollection": { "title": "Exists In Collection", "oneOf": [ diff --git a/schema/schema_test.go b/schema/schema_test.go index 0fc70fbd..567f8351 100644 --- a/schema/schema_test.go +++ b/schema/schema_test.go @@ -222,10 +222,8 @@ func TestSchemaResponse(t *testing.T) { } else if len(stringScalar.AggregateFunctions) != 0 { t.Errorf("Int scalar in SchemaResponse: expected no aggregate function, got: %+v", stringScalar.AggregateFunctions) t.FailNow() - } else if !internal.DeepEqual(stringScalar.ComparisonOperators, ScalarTypeComparisonOperators{ - "like": ComparisonOperatorDefinition{ - ArgumentType: NewNamedType("String").Encode(), - }, + } else if !internal.DeepEqual(stringScalar.ComparisonOperators, map[string]ComparisonOperatorDefinition{ + "like": NewComparisonOperatorCustom(NewNamedType("String")).Encode(), }) { t.Errorf("String scalar in SchemaResponse: expected equal comparison operators; %+v", stringScalar.ComparisonOperators) t.FailNow() @@ -437,8 +435,8 @@ func TestQueryRequest(t *testing.T) { Collection: "authors", Query: Query{ Fields: QueryFields{ - "first_name": NewColumnField("first_name").Encode(), - "last_name": NewColumnField("last_name").Encode(), + "first_name": NewColumnField("first_name", nil).Encode(), + "last_name": NewColumnField("last_name", nil).Encode(), "articles": NewRelationshipField( Query{ Aggregates: QueryAggregates{ diff --git a/typegen/regenerate-schema.sh b/typegen/regenerate-schema.sh index 23398bf6..c60cade5 100755 --- a/typegen/regenerate-schema.sh +++ b/typegen/regenerate-schema.sh @@ -48,6 +48,10 @@ sed -i 's/Type interface{}/Type Type/g' ../schema/schema.generated.go sed -i 's/QueryFields map\[string\]interface{}/QueryFields map[string]Field/g' ../schema/schema.generated.go sed -i 's/type ComparisonValue interface{}//g' ../schema/schema.generated.go sed -i 's/type ExistsInCollection interface{}//g' ../schema/schema.generated.go +sed -i 's/type ComparisonOperatorDefinition interface{}//g' ../schema/schema.generated.go +sed -i 's/type NestedField interface{}//g' ../schema/schema.generated.go +sed -i 's/type ScalarTypeComparisonOperators map\[string\]interface{}//g' ../schema/schema.generated.go +sed -i 's/ScalarTypeComparisonOperators/map[string]ComparisonOperatorDefinition/g' ../schema/schema.generated.go # format codes gofmt -w -s ../ \ No newline at end of file