From d9902ccdf06df97ea92598f882228410557288ec Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Mon, 27 Sep 2021 11:09:46 +1300 Subject: [PATCH 01/13] Rewritten execution pipeline The execution pipeline has been rewritten to improve readability in some areas, and fix the result marshalling bugs that were popping up. The summary of the changes is: * support for the Node interface method of boundary field lookup has been dropped. It was only supported for a brief window and everything should be using the better @boundary method. * execution results are now collected before the merging step. This means all results are stored in their own map and will not be mutated as other steps execute. * null bubbling as per the GraphQL spec is now done in it's own step. * when formatting the response, the selection set is now used as the source of truth. This is done to better accomodate queries using fragments that currently break in some scenarios. These cases should now be fixed. * there is no more partial unmarshalling, all results are unmarshalled into either map[string]interface{} or []interface. This means there are no steps of the pipeline that get conditionally skipped, as is the case right now. * tracing is gone from the execution related code as it was not being used. Co-authored-by: Nicolas Maquet Co-authored-by: Malcolm Lockyer --- client.go | 7 + execution.go | 582 ++----- execution_test.go | 3615 ++++++++++++++++++++++++++++++++++++-------- go.mod | 3 +- go.sum | 7 + merge.go | 10 +- plan.go | 32 +- plan_test.go | 69 +- plugin.go | 4 +- query_execution.go | 898 +++++++++++ 10 files changed, 4103 insertions(+), 1124 deletions(-) create mode 100644 query_execution.go diff --git a/client.go b/client.go index e2489f4f..ca344f01 100644 --- a/client.go +++ b/client.go @@ -13,6 +13,7 @@ import ( "time" opentracing "github.com/opentracing/opentracing-go" + "github.com/vektah/gqlparser/v2/ast" ) // GraphQLClient is a GraphQL client. @@ -144,6 +145,11 @@ func NewRequest(body string) *Request { } } +func (r *Request) WithHeaders(headers http.Header) *Request { + r.Headers = headers + return r +} + // Response is a GraphQL response type Response struct { Errors GraphqlErrors `json:"errors"` @@ -157,6 +163,7 @@ type GraphqlErrors []GraphqlError // GraphqlError is a single GraphQL error type GraphqlError struct { Message string `json:"message"` + Path ast.Path `json:"path,omitempty"` Extensions map[string]interface{} `json:"extensions"` } diff --git a/execution.go b/execution.go index 54884985..aa5489b2 100644 --- a/execution.go +++ b/execution.go @@ -3,13 +3,8 @@ package bramble import ( "context" "encoding/json" - "errors" "fmt" - "reflect" - "runtime/debug" - "strings" "sync" - "sync/atomic" "time" "github.com/99designs/gqlgen/graphql" @@ -46,7 +41,7 @@ type ExecutableSchema struct { Locations FieldURLMap IsBoundary map[string]bool Services map[string]*Service - BoundaryQueries BoundaryQueriesMap + BoundaryQueries BoundaryFieldsMap GraphqlClient *GraphQLClient Tracer opentracing.Tracer MaxRequestsPerQuery int64 @@ -115,7 +110,7 @@ func (s *ExecutableSchema) UpdateSchema(forceRebuild bool) error { return fmt.Errorf("update of service %v caused schema error: %w", updatedServices, err) } - boundaryQueries := buildBoundaryQueriesMap(services...) + boundaryQueries := buildBoundaryFieldsMap(services...) locations := buildFieldURLMap(services...) isBoundary := buildIsBoundaryMap(services...) @@ -135,7 +130,6 @@ func (s *ExecutableSchema) Exec(ctx context.Context) graphql.ResponseHandler { return s.ExecuteQuery } -// ExecuteQuery executes an incoming query func (s *ExecutableSchema) ExecuteQuery(ctx context.Context) *graphql.Response { start := time.Now() @@ -145,8 +139,6 @@ func (s *ExecutableSchema) ExecuteQuery(ctx context.Context) *graphql.Response { s.mutex.RLock() defer s.mutex.RUnlock() - result := make(map[string]interface{}) - variables := map[string]interface{}{} if graphql.HasOperationContext(ctx) { reqctx := graphql.GetOperationContext(ctx) @@ -169,15 +161,6 @@ func (s *ExecutableSchema) ExecuteQuery(ctx context.Context) *graphql.Response { if hasPerms { filteredSchema = perms.FilterSchema(s.MergedSchema) } - for _, f := range selectionSetToFields(op.SelectionSet) { - switch f.Name { - case "__type": - name := f.Arguments.ForName("name").Value.Raw - result[f.Alias] = s.resolveType(ctx, filteredSchema, &ast.Type{NamedType: name}, f.SelectionSet) - case "__schema": - result[f.Alias] = s.resolveSchema(ctx, filteredSchema, f.SelectionSet) - } - } plan, err := Plan(&PlanningContext{ Operation: op, @@ -194,9 +177,17 @@ func (s *ExecutableSchema) ExecuteQuery(ctx context.Context) *graphql.Response { AddField(ctx, "operation.name", op.Name) AddField(ctx, "operation.type", op.Operation) - qe := newQueryExecution(s.GraphqlClient, s.Schema(), s.Tracer, s.MaxRequestsPerQuery, s.BoundaryQueries) - executionErrors := qe.execute(ctx, plan, result) - errs = append(errs, executionErrors...) + qe := newQueryExecution(ctx, s.GraphqlClient, s.Schema(), s.BoundaryQueries, int32(s.MaxRequestsPerQuery)) + results, executeErrs := qe.Execute(plan) + if len(executeErrs) > 0 { + return &graphql.Response{ + Errors: executeErrs, + } + } + + timings := make(map[string]interface{}) + timings["execution"] = time.Since(start).Round(time.Millisecond).String() + extensions := make(map[string]interface{}) if debugInfo, ok := ctx.Value(DebugKey).(DebugInfo); ok { if debugInfo.Query { @@ -208,12 +199,6 @@ func (s *ExecutableSchema) ExecuteQuery(ctx context.Context) *graphql.Response { if debugInfo.Plan { extensions["plan"] = plan } - if debugInfo.Timing { - extensions["timing"] = time.Since(start).Round(time.Millisecond).String() - } - if debugInfo.TraceID { - extensions["traceid"] = TraceIDFromContext(ctx) - } } for _, plugin := range s.plugins { @@ -222,11 +207,22 @@ func (s *ExecutableSchema) ExecuteQuery(ctx context.Context) *graphql.Response { } } - for name, value := range extensions { - graphql.RegisterExtension(ctx, name, value) + for _, result := range results { + errs = append(errs, result.Errors...) } - res, err := marshalResult(result, op.SelectionSet, s.MergedSchema, &ast.Type{NamedType: strings.Title(string(op.Operation))}) + introspectionData := s.resolveIntrospectionFields(ctx, op.SelectionSet, filteredSchema) + if len(introspectionData) > 0 { + results = append([]executionResult{ + { + ServiceURL: internalServiceName, + Data: introspectionData, + }, + }, results...) + } + + mergeStart := time.Now() + mergedResult, err := mergeExecutionResults(results) if err != nil { errs = append(errs, &gqlerror.Error{Message: err.Error()}) AddField(ctx, "errors", errs) @@ -234,15 +230,50 @@ func (s *ExecutableSchema) ExecuteQuery(ctx context.Context) *graphql.Response { Errors: errs, } } + timings["merge"] = time.Since(mergeStart).Round(time.Millisecond).String() + + bubbleErrs, err := bubbleUpNullValuesInPlace(qe.schema, op.SelectionSet, mergedResult) + if err == errNullBubbledToRoot { + mergedResult = nil + } else if err != nil { + errs = append(errs, &gqlerror.Error{Message: err.Error()}) + AddField(ctx, "errors", errs) + return &graphql.Response{ + Errors: errs, + } + } + + errs = append(errs, bubbleErrs...) + + formattingStart := time.Now() + formattedResponse, err := formatResponseData(qe.schema, op.SelectionSet, mergedResult) + if err != nil { + errs = append(errs, &gqlerror.Error{Message: err.Error()}) + AddField(ctx, "errors", errs) + return &graphql.Response{ + Errors: errs, + } + } + timings["format"] = time.Since(formattingStart).Round(time.Millisecond).String() + + if debugInfo, ok := ctx.Value(DebugKey).(DebugInfo); ok { + if debugInfo.Timing { + extensions["timing"] = timings + } + } + + for name, value := range extensions { + graphql.RegisterExtension(ctx, name, value) + } if len(errs) > 0 { AddField(ctx, "errors", errs) } + return &graphql.Response{ - Data: res, + Data: []byte(formattedResponse), Errors: errs, } - } // TraceIDFromContext retrieves the trace ID from the context if it exists. @@ -270,6 +301,21 @@ func (s *ExecutableSchema) Complexity(typeName, fieldName string, childComplexit return 0, false } +func (s *ExecutableSchema) resolveIntrospectionFields(ctx context.Context, selectionSet ast.SelectionSet, filteredSchema *ast.Schema) map[string]interface{} { + introspectionResult := make(map[string]interface{}) + for _, f := range selectionSetToFields(selectionSet) { + switch f.Name { + case "__type": + name := f.Arguments.ForName("name").Value.Raw + introspectionResult[f.Alias] = s.resolveType(ctx, filteredSchema, &ast.Type{NamedType: name}, f.SelectionSet) + case "__schema": + introspectionResult[f.Alias] = s.resolveSchema(ctx, filteredSchema, f.SelectionSet) + } + } + + return introspectionResult +} + func (s *ExecutableSchema) resolveSchema(ctx context.Context, schema *ast.Schema, selectionSet ast.SelectionSet) map[string]interface{} { result := make(map[string]interface{}) @@ -558,152 +604,6 @@ func hasDeprecatedDirective(directives ast.DirectiveList) (bool, *string) { return false, nil } -// QueryExecution is a single query execution -type QueryExecution struct { - Schema *ast.Schema - Errors []*gqlerror.Error - RequestCount int64 - - maxRequest int64 - tracer opentracing.Tracer - wg sync.WaitGroup - m sync.Mutex - graphqlClient *GraphQLClient - boundaryQueries BoundaryQueriesMap -} - -func newQueryExecution(client *GraphQLClient, schema *ast.Schema, tracer opentracing.Tracer, maxRequest int64, boundaryQueries BoundaryQueriesMap) *QueryExecution { - return &QueryExecution{ - Schema: schema, - graphqlClient: client, - tracer: tracer, - maxRequest: maxRequest, - boundaryQueries: boundaryQueries, - } -} - -func (e *QueryExecution) execute(ctx context.Context, plan *QueryPlan, resData map[string]interface{}) []*gqlerror.Error { - e.wg.Add(len(plan.RootSteps)) - for _, step := range plan.RootSteps { - if step.ServiceURL == internalServiceName { - e.executeBrambleStep(ctx, step, resData) - continue - } - go e.executeRootStep(ctx, step, resData) - } - - e.wg.Wait() - - if e.RequestCount > e.maxRequest { - e.Errors = append(e.Errors, &gqlerror.Error{ - Message: fmt.Sprintf("query exceeded max requests count of %d with %d requests, data will be incomplete", e.maxRequest, e.RequestCount), - }) - } - - return e.Errors -} - -func (e *QueryExecution) addError(ctx context.Context, step *QueryPlanStep, err error) { - var path ast.Path - for _, p := range step.InsertionPoint { - path = append(path, ast.PathName(p)) - } - - var locs []gqlerror.Location - for _, f := range selectionSetToFields(step.SelectionSet) { - pos := f.GetPosition() - if pos == nil { - continue - } - locs = append(locs, gqlerror.Location{Line: pos.Line, Column: pos.Column}) - - // if the field has a subset it's part of the path - if len(f.SelectionSet) > 0 { - path = append(path, ast.PathName(f.Alias)) - } - } - - e.m.Lock() - defer e.m.Unlock() - - var gqlErr GraphqlErrors - if errors.As(err, &gqlErr) { - for _, ge := range gqlErr { - extensions := ge.Extensions - if extensions == nil { - extensions = make(map[string]interface{}) - } - extensions["selectionSet"] = formatSelectionSetSingleLine(ctx, e.Schema, step.SelectionSet) - extensions["serviceName"] = step.ServiceName - extensions["serviceUrl"] = step.ServiceURL - - e.Errors = append(e.Errors, &gqlerror.Error{ - Message: ge.Message, - Path: path, - Locations: locs, - Extensions: extensions, - }) - } - } else { - e.Errors = append(e.Errors, &gqlerror.Error{ - Message: err.Error(), - Path: path, - Locations: locs, - Extensions: map[string]interface{}{ - "selectionSet": formatSelectionSetSingleLine(ctx, e.Schema, step.SelectionSet), - }, - }) - } -} - -func (e *QueryExecution) executeRootStep(ctx context.Context, step *QueryPlanStep, result map[string]interface{}) { - defer e.wg.Done() - defer func() { - if r := recover(); r != nil { - AddField(ctx, "panic", map[string]interface{}{ - "err": r, - "stacktrace": string(debug.Stack()), - }) - e.addError(ctx, step, errors.New("an error happened during query execution")) - } - }() - - if e.tracer != nil { - contextSpan := opentracing.SpanFromContext(ctx) - if contextSpan != nil { - span := e.tracer.StartSpan(step.ServiceName, opentracing.ChildOf(contextSpan.Context())) - ctx = opentracing.ContextWithSpan(ctx, span) - defer span.Finish() - } - } - - q := formatSelectionSet(ctx, e.Schema, step.SelectionSet) - if step.ParentType == mutationObjectName { - q = "mutation " + q - } else { - q = "query " + q - } - - resp := map[string]json.RawMessage{} - promHTTPInFlightGauge.Inc() - req := NewRequest(q) - req.Headers = GetOutgoingRequestHeadersFromContext(ctx) - err := e.graphqlClient.Request(ctx, step.ServiceURL, req, &resp) - promHTTPInFlightGauge.Dec() - if err != nil { - e.addError(ctx, step, err) - } - - e.m.Lock() - mergeMaps(result, jsonMapToInterfaceMap(resp)) - e.m.Unlock() - - for _, subStep := range step.Then { - e.wg.Add(1) - go e.executeChildStep(ctx, subStep, result) - } -} - func jsonMapToInterfaceMap(m map[string]json.RawMessage) map[string]interface{} { res := make(map[string]interface{}, len(m)) for k, v := range m { @@ -713,204 +613,6 @@ func jsonMapToInterfaceMap(m map[string]json.RawMessage) map[string]interface{} return res } -// executeChildStep executes a child step. It finds the insertion targets for -// the step's insertion point and queries the specified service using the node -// query type. -func (e *QueryExecution) executeChildStep(ctx context.Context, step *QueryPlanStep, result map[string]interface{}) { - defer e.wg.Done() - defer func() { - if r := recover(); r != nil { - AddField(ctx, "panic", map[string]interface{}{ - "err": r, - "stacktrace": string(debug.Stack()), - }) - e.addError(ctx, step, errors.New("an error happened during query execution")) - } - }() - - if e.tracer != nil { - contextSpan := opentracing.SpanFromContext(ctx) - if contextSpan != nil { - span := e.tracer.StartSpan(step.ServiceName, opentracing.ChildOf(contextSpan.Context())) - ctx = opentracing.ContextWithSpan(ctx, span) - defer span.Finish() - } - } - - e.m.Lock() - result = prepareMapForInsertion(step.InsertionPoint, result).(map[string]interface{}) - e.m.Unlock() - - insertionPoints := buildInsertionSlice(step.InsertionPoint, result) - if len(insertionPoints) == 0 { - return - } - - atomic.AddInt64(&e.RequestCount, 1) - - if e.RequestCount > e.maxRequest { - return - } - - boundaryQuery := e.boundaryQueries.Query(step.ServiceURL, step.ParentType) - selectionSet := formatSelectionSet(ctx, e.Schema, step.SelectionSet) - var b strings.Builder - - b.WriteString("{") - if boundaryQuery.Array { - var ids string - for _, ip := range insertionPoints { - ids += fmt.Sprintf("%q ", ip.ID) - } - b.WriteString(fmt.Sprintf("_result: %s(ids: [%s]) %s", boundaryQuery.Query, ids, selectionSet)) - } else { - for i, ip := range insertionPoints { - b.WriteString(fmt.Sprintf("%s: %s(id: %q) { ... on %s %s } ", nodeAlias(i), boundaryQuery.Query, ip.ID, step.ParentType, selectionSet)) - } - } - b.WriteString("}") - - query := b.String() - - if boundaryQuery.Array { - if len(step.Then) == 0 { - resp := struct { - Result []map[string]json.RawMessage `json:"_result"` - }{} - promHTTPInFlightGauge.Inc() - req := NewRequest(query) - req.Headers = GetOutgoingRequestHeadersFromContext(ctx) - err := e.graphqlClient.Request(ctx, step.ServiceURL, req, &resp) - promHTTPInFlightGauge.Dec() - if err != nil { - e.addError(ctx, step, err) - } - if len(resp.Result) != len(insertionPoints) { - e.addError(ctx, step, fmt.Errorf("error while querying %s: service returned incorrect number of elements", step.ServiceURL)) - return - } - e.m.Lock() - for i := range insertionPoints { - for k, v := range resp.Result[i] { - insertionPoints[i].Target[k] = v - } - } - e.m.Unlock() - return - } - - resp := struct { - Result []map[string]interface{} `json:"_result"` - }{} - promHTTPInFlightGauge.Inc() - req := NewRequest(query) - req.Headers = GetOutgoingRequestHeadersFromContext(ctx) - err := e.graphqlClient.Request(ctx, step.ServiceURL, req, &resp) - promHTTPInFlightGauge.Dec() - if err != nil { - e.addError(ctx, step, err) - return - } - if len(resp.Result) != len(insertionPoints) { - e.addError(ctx, step, fmt.Errorf("error while querying %s: service returned incorrect number of elements", step.ServiceURL)) - return - } - e.m.Lock() - for i := range insertionPoints { - for k, v := range resp.Result[i] { - insertionPoints[i].Target[k] = v - } - } - e.m.Unlock() - - for _, subStep := range step.Then { - e.wg.Add(1) - go e.executeChildStep(ctx, subStep, result) - } - return - } - - // If there's no sub-calls on the data we want to store it as returned. - // This is to preserve fields order with inline fragments on unions, as we - // have no way to determine which type was matched. - // e.g.: { ... on Cat { name, age } ... on Dog { age, name } } - if len(step.Then) == 0 { - resp := map[string]map[string]json.RawMessage{} - promHTTPInFlightGauge.Inc() - req := NewRequest(query) - req.Headers = GetOutgoingRequestHeadersFromContext(ctx) - err := e.graphqlClient.Request(ctx, step.ServiceURL, req, &resp) - promHTTPInFlightGauge.Dec() - if err != nil { - e.addError(ctx, step, err) - return - } - if len(resp) != len(insertionPoints) { - e.addError(ctx, step, fmt.Errorf("error while querying %s: service returned incorrect number of elements", step.ServiceURL)) - return - } - e.m.Lock() - for i := range insertionPoints { - for k, v := range resp[nodeAlias(i)] { - insertionPoints[i].Target[k] = v - } - } - e.m.Unlock() - return - } - - resp := map[string]map[string]interface{}{} - promHTTPInFlightGauge.Inc() - req := NewRequest(query) - req.Headers = GetOutgoingRequestHeadersFromContext(ctx) - err := e.graphqlClient.Request(ctx, step.ServiceURL, req, &resp) - promHTTPInFlightGauge.Dec() - if err != nil { - e.addError(ctx, step, err) - return - } - if len(resp) != len(insertionPoints) { - e.addError(ctx, step, fmt.Errorf("error while querying %s: service returned incorrect number of elements", step.ServiceURL)) - return - } - e.m.Lock() - for i := range insertionPoints { - for k, v := range resp[nodeAlias(i)] { - insertionPoints[i].Target[k] = v - } - } - e.m.Unlock() - - for _, subStep := range step.Then { - e.wg.Add(1) - go e.executeChildStep(ctx, subStep, result) - } -} - -// executeBrambleStep executes the Bramble-specific operations -func (e *QueryExecution) executeBrambleStep(ctx context.Context, step *QueryPlanStep, result map[string]interface{}) { - m := buildTypenameResponseMap(step.SelectionSet, step.ParentType) - mergeMaps(result, m) - e.wg.Done() -} - -// buildTypenameResponseMap recursively builds the response map for `__typename` -// fields. This is used for namespaces as they do not belong to any service. -func buildTypenameResponseMap(ss ast.SelectionSet, currentType string) map[string]interface{} { - res := make(map[string]interface{}) - for _, f := range selectionSetToFields(ss) { - if len(f.SelectionSet) > 0 { - res[f.Alias] = buildTypenameResponseMap(f.SelectionSet, f.Definition.Type.Name()) - continue - } - - if f.Name == "__typename" { - res[f.Alias] = currentType - } - } - return res -} - func nodeAlias(i int) string { return fmt.Sprintf("_%d", i) } @@ -963,113 +665,14 @@ func mergeMaps(dst, src map[string]interface{}) { } } -type insertionTarget struct { - ID string - Target map[string]interface{} -} - -// prepareMapForInsertion recursively traverses the result map to the insertion -// point and unmarshals any json.RawMessage it finds on the way -func prepareMapForInsertion(insertionPoint []string, in interface{}) interface{} { - if len(insertionPoint) == 0 { - switch in := in.(type) { - case json.RawMessage: - var i interface{} - _ = json.Unmarshal([]byte(in), &i) - switch i := i.(type) { - case map[string]interface{}, []interface{}: - return i - case nil: - return nil - default: - panic("unknown type after unmarshalling") - } - default: - return in - } - } - - switch in := in.(type) { - case map[string]interface{}: - in[insertionPoint[0]] = prepareMapForInsertion(insertionPoint[1:], in[insertionPoint[0]]) - return in - case json.RawMessage: - var m map[string]interface{} - _ = json.Unmarshal([]byte(in), &m) - if m == nil { - return nil - } - m[insertionPoint[0]] = prepareMapForInsertion(insertionPoint[1:], m[insertionPoint[0]]) - return m - case []interface{}: - for i, e := range in { - in[i] = prepareMapForInsertion(insertionPoint, e) - } - return in - case nil: - return nil - default: - panic(fmt.Sprintf("unhandled type: %s", reflect.TypeOf(in).Name())) - } -} - -// buildInsertionSlice returns the list of maps where the data should be inserted -// It recursively traverses maps and list to find the insertion points. -// For example, if we have "insertionPoint" [movie, compTitles] and "in" -// movie { compTitles: [ -// { id: 1 }, -// { id: 2 } -// ] } -// we want to return [{ id: 1 }, { id: 2 }] -func buildInsertionSlice(insertionPoint []string, in interface{}) []insertionTarget { - if len(insertionPoint) == 0 { - switch in := in.(type) { - case map[string]interface{}: - eid := "" - if id, ok := in["_id"]; ok { - eid = id.(string) - } else if id, ok := in["id"]; ok { - eid = id.(string) - } - - if eid == "" { - return nil - } - - return []insertionTarget{{ - ID: eid, - Target: in, - }} - case []interface{}: - var result []insertionTarget - for _, e := range in { - result = append(result, buildInsertionSlice(insertionPoint, e)...) - } - return result - case json.RawMessage: - var m map[string]interface{} - _ = json.Unmarshal([]byte(in), &m) - return buildInsertionSlice(nil, m) - case nil: - return nil - default: - panic(fmt.Sprintf("unhandled insertion point type: %q", reflect.TypeOf(in).Name())) - } - } - - switch in := in.(type) { - case map[string]interface{}: - return buildInsertionSlice(insertionPoint[1:], in[insertionPoint[0]]) - case []interface{}: - var result []insertionTarget - for _, e := range in { - result = append(result, buildInsertionSlice(insertionPoint, e)...) - } - return result - case nil: - return nil - default: - panic(fmt.Sprintf("unhandled insertion point type: %s", reflect.TypeOf(in).Name())) +func (s *ExecutableSchema) evaluateSkipAndInclude(vars map[string]interface{}, op *ast.OperationDefinition) *ast.OperationDefinition { + return &ast.OperationDefinition{ + Operation: op.Operation, + Name: op.Name, + VariableDefinitions: op.VariableDefinitions, + Directives: op.Directives, + SelectionSet: s.evaluateSkipAndIncludeRec(vars, op.SelectionSet), + Position: op.Position, } } @@ -1141,17 +744,6 @@ func (s *ExecutableSchema) evaluateSkipAndIncludeRec(vars map[string]interface{} return result } -func (s *ExecutableSchema) evaluateSkipAndInclude(vars map[string]interface{}, op *ast.OperationDefinition) *ast.OperationDefinition { - return &ast.OperationDefinition{ - Operation: op.Operation, - Name: op.Name, - VariableDefinitions: op.VariableDefinitions, - Directives: op.Directives, - SelectionSet: s.evaluateSkipAndIncludeRec(vars, op.SelectionSet), - Position: op.Position, - } -} - func removeSkipAndInclude(directives ast.DirectiveList) ast.DirectiveList { var result ast.DirectiveList for _, d := range directives { diff --git a/execution_test.go b/execution_test.go index a65f6d21..eac9f152 100644 --- a/execution_test.go +++ b/execution_test.go @@ -21,160 +21,6 @@ import ( "github.com/vektah/gqlparser/v2/gqlerror" ) -func TestFederatedQueryFragmentSpreads(t *testing.T) { - serviceA := testService{ - schema: ` - directive @boundary on OBJECT - interface Snapshot { - id: ID! - name: String! - } - - type Gizmo @boundary { - id: ID! - } - - type SnapshotImplementation implements Snapshot { - id: ID! - name: String! - gizmos: [Gizmo!]! - } - - type Query { - snapshot(id: ID!): Snapshot! - }`, - handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(` - { - "data": { - "snapshot": { - "id": "100", - "name": "foo", - "gizmos": [{ "id": "1" }] - } - } - }`)) - }), - } - - serviceB := testService{ - schema: ` - directive @boundary on OBJECT - type Gizmo @boundary { - id: ID! - name: String! - } - - type Query { - gizmo(id: ID!): Gizmo @boundary - }`, - handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(` - { - "data": { - "_0": { - "id": "1", - "name": "Gizmo #1" - } - } - }`)) - }), - } - - t.Run("with inline fragment spread", func(t *testing.T) { - f := &queryExecutionFixture{ - services: []testService{serviceA, serviceB}, - query: ` - query Foo { - snapshot(id: "foo") { - id - name - ... on SnapshotImplementation { - gizmos { - id - name - } - } - } - }`, - expected: ` - { - "snapshot": { - "id": "100", - "name": "foo", - "gizmos": [{ "id": "1", "name": "Gizmo #1" }] - } - }`, - } - - f.checkSuccess(t) - }) - - t.Run("with named fragment spread", func(t *testing.T) { - f := &queryExecutionFixture{ - services: []testService{serviceA, serviceB}, - query: ` - query Foo { - snapshot(id: "foo") { - id - name - ... NamedFragment - } - } - - fragment NamedFragment on SnapshotImplementation { - gizmos { - id - name - } - }`, - expected: ` - { - "snapshot": { - "id": "100", - "name": "foo", - "gizmos": [{ "id": "1", "name": "Gizmo #1" }] - } - }`, - } - - f.checkSuccess(t) - }) - - t.Run("with nested fragment spread", func(t *testing.T) { - f := &queryExecutionFixture{ - services: []testService{serviceA, serviceB}, - query: ` - query Foo { - snapshot(id: "foo") { - ... NamedFragment - } - } - - fragment NamedFragment on Snapshot { - id - name - ... on SnapshotImplementation { - gizmos { - id - name - } - } - }`, - expected: ` - { - "snapshot": { - "id": "100", - "name": "foo", - "gizmos": [{ "id": "1", "name": "Gizmo #1" }] - } - }`, - } - - f.checkSuccess(t) - }) -} - func TestIntrospectionQuery(t *testing.T) { schema := ` union MovieOrCinema = Movie | Cinema @@ -234,7 +80,7 @@ func TestIntrospectionQuery(t *testing.T) { ctx := testContextWithoutVariables(query.Operations[0]) resp := es.ExecuteQuery(ctx) - assert.JSONEq(t, ` + require.JSONEq(t, ` { "__type": { "description": "A bit like a film", @@ -256,7 +102,7 @@ func TestIntrospectionQuery(t *testing.T) { ctx := testContextWithoutVariables(query.Operations[0]) resp := es.ExecuteQuery(ctx) - assert.JSONEq(t, ` + require.JSONEq(t, ` { "movie": { "desc": "A bit like a film", @@ -294,7 +140,7 @@ func TestIntrospectionQuery(t *testing.T) { }`) ctx := testContextWithoutVariables(query.Operations[0]) resp := es.ExecuteQuery(ctx) - assert.JSONEq(t, ` + require.JSONEq(t, ` { "__type": { "fields": [ @@ -357,14 +203,17 @@ func TestIntrospectionQuery(t *testing.T) { } fragment TypeInfo on __Type { - description - kind - name + description + kind + name } `) ctx := testContextWithoutVariables(query.Operations[0]) resp := es.ExecuteQuery(ctx) - assert.JSONEq(t, ` + errsJSON, err := json.Marshal(resp.Errors) + require.NoError(t, err) + require.Nil(t, resp.Errors, fmt.Sprintf("errors: %s", errsJSON)) + require.JSONEq(t, ` { "__type": { "description": "A bit like a film", @@ -389,7 +238,7 @@ func TestIntrospectionQuery(t *testing.T) { `) ctx := testContextWithoutVariables(query.Operations[0]) resp := es.ExecuteQuery(ctx) - assert.JSONEq(t, ` + require.JSONEq(t, ` { "__type": { "enumValues": [ @@ -446,7 +295,7 @@ func TestIntrospectionQuery(t *testing.T) { `) ctx := testContextWithoutVariables(query.Operations[0]) resp := es.ExecuteQuery(ctx) - assert.JSONEq(t, ` + require.JSONEq(t, ` { "__type": { "possibleTypes": [ @@ -472,7 +321,7 @@ func TestIntrospectionQuery(t *testing.T) { ctx := testContextWithoutVariables(query.Operations[0]) resp := es.ExecuteQuery(ctx) - assert.JSONEq(t, ` + require.JSONEq(t, ` { "__type": { "kind": "OBJECT", @@ -562,7 +411,7 @@ func TestIntrospectionQuery(t *testing.T) { } `), &expected) require.NoError(t, err) - assert.ElementsMatch(t, expected.Schema.Directives, actual.Schema.Directives) + require.ElementsMatch(t, expected.Schema.Directives, actual.Schema.Directives) }) t.Run("__schema", func(t *testing.T) { @@ -583,7 +432,7 @@ func TestIntrospectionQuery(t *testing.T) { `) ctx := testContextWithoutVariables(query.Operations[0]) resp := es.ExecuteQuery(ctx) - assert.JSONEq(t, ` + require.JSONEq(t, ` { "__schema": { "queryType": { @@ -597,25 +446,34 @@ func TestIntrospectionQuery(t *testing.T) { }) } -func TestQueryExecutionWithSingleService(t *testing.T) { +func TestQueryWithNamespace(t *testing.T) { f := &queryExecutionFixture{ services: []testService{ { - schema: `type Movie { + schema: ` + directive @namespace on OBJECT + + type NamespacedMovie { id: ID! title: String } + type NamespaceQuery @namespace { + movie(id: ID!): NamespacedMovie! + } + type Query { - movie(id: ID!): Movie! + namespace: NamespaceQuery! } `, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte(`{ "data": { - "movie": { - "id": "1", - "title": "Test title" + "namespace": { + "movie": { + "id": "1", + "title": "Test title" + } } } }`)) @@ -623,15 +481,21 @@ func TestQueryExecutionWithSingleService(t *testing.T) { }, }, query: `{ - movie(id: "1") { - id - title + namespace { + movie(id: "1") { + id + title + } + __typename } }`, expected: `{ - "movie": { - "id": "1", - "title": "Test title" + "namespace": { + "movie": { + "id": "1", + "title": "Test title" + }, + "__typename": "NamespaceQuery" } }`, } @@ -639,53 +503,31 @@ func TestQueryExecutionWithSingleService(t *testing.T) { f.checkSuccess(t) } -func TestQueryExecutionMultipleServices(t *testing.T) { +func TestQueryError(t *testing.T) { f := &queryExecutionFixture{ services: []testService{ { - schema: `directive @boundary on OBJECT - type Movie @boundary { + schema: `type Movie { id: ID! title: String } type Query { movie(id: ID!): Movie! - }`, - handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(`{ - "data": { - "movie": { - "id": "1", - "title": "Test title" - } - } - } - `)) - }), - }, - { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } - - type Movie @boundary { - id: ID! - release: Int } - - type Query { - node(id: ID!): Node! - }`, + `, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte(`{ - "data": { - "_0": { - "id": "1", - "release": 2007 + "errors": [ + { + "message": "Movie does not exist", + "path": ["movie"], + "extensions": { + "code": "NOT_FOUND" + } } - } - } - `)) + ] + }`)) }), }, }, @@ -693,120 +535,2909 @@ func TestQueryExecutionMultipleServices(t *testing.T) { movie(id: "1") { id title - release - } - }`, - expected: `{ - "movie": { - "id": "1", - "title": "Test title", - "release": 2007 } }`, + errors: gqlerror.List{ + &gqlerror.Error{ + Message: "Movie does not exist", + Path: ast.Path{ast.PathName("movie")}, + Locations: []gqlerror.Location{ + {Line: 2, Column: 4}, + }, + Extensions: map[string]interface{}{ + "code": "NOT_FOUND", + "selectionSet": `{ movie(id: "1") { id title } }`, + "serviceName": "", + }, + }, + &gqlerror.Error{ + Message: `got a null response for non-nullable field "movie"`, + Path: ast.Path{ast.PathName("movie")}, + }, + }, } - f.checkSuccess(t) + f.run(t) } -func TestQueryExecutionNamespaceAndFragmentSpread(t *testing.T) { - f := &queryExecutionFixture{ - services: []testService{ - { - schema: ` - directive @namespace on OBJECT - type Foo { - id: ID! - } - - type MyNamespace @namespace { - foo: Foo! - } +func TestFederatedQueryFragmentSpreads(t *testing.T) { + serviceA := testService{ + schema: ` + directive @boundary on OBJECT + interface Snapshot { + id: ID! + name: String! + } - type Query { - ns: MyNamespace! - }`, - handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(`{ - "data": { - "ns": { - "foo": { - "id": "1" - } + type Gizmo @boundary { + id: ID! + } + + type Gadget @boundary { + id: ID! + } + + type GizmoImplementation implements Snapshot { + id: ID! + name: String! + gizmos: [Gizmo!]! + } + + type GadgetImplementation implements Snapshot { + id: ID! + name: String! + gadgets: [Gadget!]! + } + + type Query { + snapshot(id: ID!): Snapshot! + }`, + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + if strings.Contains(string(body), "GIZMO1") { + w.Write([]byte(` + { + "data": { + "snapshot": { + "id": "100", + "name": "foo", + "gizmos": [{ "id": "GIZMO1" }], + "__typename": "GizmoImplementation" + } + } + }`)) + } else { + w.Write([]byte(` + { + "data": { + "snapshot": { + "id": "100", + "name": "foo", + "gadgets": [{ "id": "GADGET1" }], + "__typename": "GadgetImplementation" + } + } + }`)) + + } + }), + } + + serviceB := testService{ + schema: ` + directive @boundary on OBJECT | FIELD_DEFINITION + type Gizmo @boundary { + id: ID! + name: String! + } + + type Agent { + name: String! + country: String! + } + + type Gadget @boundary { + id: ID! + name: String! + agents: [Agent!]! + } + + type Query { + gizmo(id: ID!): Gizmo @boundary + gadgets(id: [ID!]!): [Gadget!]! @boundary + }`, + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + if strings.Contains(string(body), "GIZMO1") { + w.Write([]byte(` + { + "data": { + "_0": { + "id": "GIZMO1", + "name": "Gizmo #1" + } + } + }`)) + } else { + w.Write([]byte(` + { + "data": { + "_result": [ + { + "id": "GADGET1", + "name": "Gadget #1", + "agents": [ + { + "name": "James Bond", + "country": "UK", + "__typename": "Agent" + } + ] } + ] + } + }`)) + } + }), + } + + t.Run("with inline fragment spread", func(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{serviceA, serviceB}, + query: ` + query Foo { + snapshot(id: "GIZMO1") { + id + name + ... on GizmoImplementation { + gizmos { + id + name + } + } + } + }`, + expected: ` + { + "snapshot": { + "id": "100", + "name": "foo", + "gizmos": [{ "id": "GIZMO1", "name": "Gizmo #1" }] + } + }`, + } + + f.checkSuccess(t) + }) + + t.Run("with overlap in field and fragment selection", func(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{serviceA, serviceB}, + query: ` + query Foo { + snapshot(id: "GIZMO1") { + id + name + ... on GizmoImplementation { + id + name + gizmos { + id + name + } + } + } + }`, + expected: ` + { + "snapshot": { + "id": "100", + "name": "foo", + "gizmos": [{ "id": "GIZMO1", "name": "Gizmo #1" }] + } + }`, + } + + f.checkSuccess(t) + }) + + t.Run("with named fragment spread", func(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{serviceA, serviceB}, + query: ` + query Foo { + snapshot(id: "GIZMO1") { + id + name + ... NamedFragment + } + } + + fragment NamedFragment on GizmoImplementation { + gizmos { + id + name + } + }`, + expected: ` + { + "snapshot": { + "id": "100", + "name": "foo", + "gizmos": [{ "id": "GIZMO1", "name": "Gizmo #1" }] + } + }`, + } + + f.checkSuccess(t) + }) + + t.Run("with nested fragment spread", func(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{serviceA, serviceB}, + query: ` + query Foo { + snapshot(id: "GIZMO1") { + ... NamedFragment + } + } + + fragment NamedFragment on Snapshot { + id + name + ... on GizmoImplementation { + gizmos { + id + name + } + } + }`, + expected: ` + { + "snapshot": { + "id": "100", + "name": "foo", + "gizmos": [{ "id": "GIZMO1", "name": "Gizmo #1" }] + } + }`, + } + + f.checkSuccess(t) + }) + + t.Run("with multiple implementation fragment spreads (gizmo implementation)", func(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{serviceA, serviceB}, + query: ` + query { + snapshot(id: "GIZMO1") { + id + ... NamedFragment + } + } + + fragment NamedFragment on Snapshot { + name + ... on GizmoImplementation { + gizmos { + id + name + } + } + ... on GadgetImplementation { + gadgets { + id + name + } + } + }`, + expected: ` + { + "snapshot": { + "id": "100", + "name": "foo", + "gizmos": [{ "id": "GIZMO1", "name": "Gizmo #1" }] + } + }`, + } + + f.checkSuccess(t) + }) + + t.Run("with multiple implementation fragment spreads (gadget implementation)", func(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{serviceA, serviceB}, + query: ` + query Foo { + snapshot(id: "GADGET1") { + ... NamedFragment + } + } + + fragment GadgetFragment on GadgetImplementation { + gadgets { + id + name + agents { + name + ... on Agent { + country + } + } + } + } + + fragment NamedFragment on Snapshot { + id + name + ... on GizmoImplementation { + gizmos { + id + name + } + } + ... GadgetFragment + }`, + expected: ` + { + "snapshot": { + "id": "100", + "name": "foo", + "gadgets": [ + { + "id": "GADGET1", + "name": "Gadget #1", + "agents": [ + {"name": "James Bond", "country": "UK"} + ] } + ] + } + }`, + } + + f.checkSuccess(t) + }) + +} + +func TestQueryExecutionMultipleServices(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{ + { + schema: `directive @boundary on OBJECT + type Movie @boundary { + id: ID! + title: String + } + + type Query { + movie(id: ID!): Movie! + }`, + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte(`{ + "data": { + "movie": { + "id": "1", + "title": "Test title" + } + } + } + `)) + }), + }, + { + schema: `directive @boundary on OBJECT | FIELD_DEFINITION + + type Movie @boundary { + id: ID! + release: Int + } + + type Query { + movie(id: ID!): Movie! @boundary + }`, + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte(`{ + "data": { + "_0": { + "id": "1", + "release": 2007 + } + } + } + `)) + }), + }, + }, + query: `{ + movie(id: "1") { + id + title + release + } + }`, + expected: `{ + "movie": { + "id": "1", + "title": "Test title", + "release": 2007 + } + }`, + } + + f.checkSuccess(t) +} + +func TestQueryExecutionNamespaceAndFragmentSpread(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{ + { + schema: ` + directive @namespace on OBJECT + type Foo { + id: ID! + } + + type MyNamespace @namespace { + foo: Foo! + } + + type Query { + ns: MyNamespace! + }`, + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte(`{ + "data": { + "ns": { + "foo": { + "id": "1" + } + } + } + } + `)) + }), + }, + { + schema: ` + directive @namespace on OBJECT + interface Person { name: String! } + + type Movie { + title: String! + } + + type Director implements Person { + name: String! + movies: [Movie!] + } + + type MyNamespace @namespace { + somePerson: Person! + } + + type Query { + ns: MyNamespace! + }`, + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte(`{ + "data": { + "ns": { + "somePerson": { + "name": "Luc Besson", + "movies": [ + {"title": "The Big Blue"} + ], + "__typename": "Person" + } + } + } + } + `)) + }), + }, + }, + query: `{ + ns { + somePerson { + ... on Director { + name + movies { + title + } + } + } + foo { + id + } + } + }`, + expected: `{ + "ns": { + "somePerson": { + "name": "Luc Besson", + "movies": [ + {"title": "The Big Blue"} + ] + }, + "foo": { + "id": "1" + } + } + }`, + } + + f.run(t) +} + +func TestQueryExecutionWithNullResponse(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{ + { + schema: `directive @boundary on OBJECT + type Movie @boundary { + id: ID! + } + + type Query { + movies: [Movie!] + }`, + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte(`{ + "data": { + "movies": null + } + } + `)) + }), + }, + { + schema: `directive @boundary on OBJECT | FIELD_DEFINITION + + type Movie @boundary { + id: ID! + title: String + } + + type Query { + movie(id: ID!): Movie! @boundary + }`, + handler: http.HandlerFunc(func(http.ResponseWriter, *http.Request) { + require.Fail(t, "handler should not be called") + }), + }, + }, + query: `{ + movies { + id + title + } + }`, + expected: `{ + "movies": null + }`, + } + + f.checkSuccess(t) +} + +func TestQueryExecutionWithSingleService(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{ + { + schema: `type Movie { + id: ID! + title: String + } + + type Query { + movie(id: ID!): Movie! + } + `, + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte(`{ + "data": { + "movie": { + "id": "1", + "title": "Test title" + } + } + }`)) + }), + }, + }, + query: `{ + movie(id: "1") { + id + title + } + }`, + expected: `{ + "movie": { + "id": "1", + "title": "Test title" + } + }`, + } + + f.checkSuccess(t) +} + +func TestQueryWithArrayBoundaryFieldsAndMultipleChildrenSteps(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{ + { + schema: `directive @boundary on OBJECT | FIELD_DEFINITION + + type Movie @boundary { + id: ID! + title: String + } + + type Query { + randomMovie: Movie! + movies(ids: [ID!]!): [Movie]! @boundary + }`, + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + b, _ := io.ReadAll(r.Body) + if strings.Contains(string(b), "randomMovie") { + w.Write([]byte(`{ + "data": { + "randomMovie": { + "id": "1", + "title": "Movie 1" + } + } + } + `)) + } else { + w.Write([]byte(`{ + "data": { + "_result": [ + { "id": "2", "title": "Movie 2" }, + { "id": "3", "title": "Movie 3" }, + { "id": "4", "title": "Movie 4" } + ] + } + } + `)) + } + }), + }, + { + schema: `directive @boundary on OBJECT | FIELD_DEFINITION + + type Movie @boundary { + id: ID! + compTitles: [Movie!]! + } + + type Query { + movies(ids: [ID!]): [Movie]! @boundary + }`, + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte(`{ + "data": { + "_result": [ + { + "_id": "1", + "compTitles": [ + {"id": "2"}, + {"id": "3"}, + {"id": "4"} + ] + } + ] + } + } + `)) + }), + }, + }, + query: `{ + randomMovie { + id + title + compTitles { + id + title + } + } + }`, + expected: `{ + "randomMovie": + { + "id": "1", + "title": "Movie 1", + "compTitles": [ + { "id": "2", "title": "Movie 2" }, + { "id": "3", "title": "Movie 3" }, + { "id": "4", "title": "Movie 4" } + ] + } + }`, + } + + f.checkSuccess(t) +} + +func TestQueryWithBoundaryFieldsAndNullsAboveInsertionPoint(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{ + { + schema: `directive @boundary on OBJECT | FIELD_DEFINITION + directive @namespace on OBJECT + + type Movie @boundary { + id: ID! + title: String + director: Person + } + + type Person @boundary { + id: ID! + } + + type Namespace @namespace { + movies: [Movie!]! + } + + type Query { + ns: Namespace! + }`, + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + response := jsonToInterfaceMap(`{ + "data": { + "ns": { + "movies": [ + { + "id": "MOVIE1", + "title": "Movie #1", + "director": { "id": "DIRECTOR1" } + }, + { + "id": "MOVIE2", + "title": "Movie #2", + "director": null + } + ] + } + } + } + `) + if err := json.NewEncoder(w).Encode(response); err != nil { + t.Error(err) + } + }), + }, + { + schema: `directive @boundary on OBJECT | FIELD_DEFINITION + + type Person @boundary { + id: ID! + name: String! + } + + type Query { + person(id: ID!): Person @boundary + }`, + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write([]byte(`{ + "data": { + "_0": { + "_id": "DIRECTOR1", + "name": "David Fincher" + } + } + }`)) + }), + }, + }, + query: `{ + ns { + movies { + id + title + director { + id + name + } + } + } + }`, + expected: `{ + "ns": { + "movies": [ + { + "id": "MOVIE1", + "title": "Movie #1", + "director": { + "id": "DIRECTOR1", + "name": "David Fincher" + } + }, + { + "id": "MOVIE2", + "title": "Movie #2", + "director": null + } + ] + } + }`, + } + + f.checkSuccess(t) +} + +func TestExtractBoundaryIDs(t *testing.T) { + dataJSON := `{ + "gizmos": [ + { + "id": "1", + "name": "Gizmo 1", + "owner": { + "_id": "1" + } + }, + { + "id": "2", + "name": "Gizmo 2", + "owner": { + "id": "1" + } + }, + { + "id": "3", + "name": "Gizmo 3", + "owner": { + "_id": "2" + } + }, + { + "id": "4", + "name": "Gizmo 4", + "owner": { + "id": "5" + } + } + ] + }` + data := map[string]interface{}{} + expected := []string{"1", "1", "2", "5"} + insertionPoint := []string{"gizmos", "owner"} + require.NoError(t, json.Unmarshal([]byte(dataJSON), &data)) + result, err := extractBoundaryIDs(data, insertionPoint) + require.NoError(t, err) + require.Equal(t, expected, result) +} + +func TestTrimInsertionPointForNestedBoundaryQuery(t *testing.T) { + dataJSON := `[ + { + "id": "1", + "name": "Gizmo 1", + "owner": { + "_id": "1" + } + }, + { + "id": "2", + "name": "Gizmo 2", + "owner": { + "id": "1" + } + }, + { + "id": "3", + "name": "Gizmo 3", + "owner": { + "_id": "2" + } + }, + { + "id": "4", + "name": "Gizmo 4", + "owner": { + "id": "5" + } + } + ]` + insertionPoint := []string{"namespace", "gizmos", "owner"} + expected := []string{"owner"} + result, err := trimInsertionPointForNestedBoundaryStep(jsonToInterfaceSlice(dataJSON), insertionPoint) + require.NoError(t, err) + require.Equal(t, expected, result) +} + +func TestBuildBoundaryQueryDocuments(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo!]! + getOwners(ids: [ID!]!): [Owner!]! + } + ` + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + boundaryField := BoundaryField{Field: "getOwners", Array: true} + ids := []string{"1", "2", "3"} + selectionSet := []ast.Selection{ + &ast.Field{ + Alias: "_id", + Name: "id", + Definition: schema.Types["Owner"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Owner"], + }, + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["Owner"].Fields.ForName("name"), + ObjectDefinition: schema.Types["Owner"], + }, + } + step := &QueryPlanStep{ + ServiceURL: "http://example.com:8080", + ServiceName: "test", + ParentType: "Gizmo", + SelectionSet: selectionSet, + InsertionPoint: []string{"gizmos", "owner"}, + Then: nil, + } + expected := []string{`{ _result: getOwners(ids: ["1", "2", "3"]) { _id: id name } }`} + ctx := testContextWithoutVariables(nil) + docs, err := buildBoundaryQueryDocuments(ctx, schema, step, ids, boundaryField, 1) + require.NoError(t, err) + require.Equal(t, expected, docs) +} + +func TestBuildNonArrayBoundaryQueryDocuments(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo!]! + getOwner(id: ID!): Owner! + } + ` + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + boundaryField := BoundaryField{Field: "getOwner", Array: false} + ids := []string{"1", "2", "3"} + selectionSet := []ast.Selection{ + &ast.Field{ + Alias: "_id", + Name: "id", + Definition: schema.Types["Owner"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Owner"], + }, + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["Owner"].Fields.ForName("name"), + ObjectDefinition: schema.Types["Owner"], + }, + } + step := &QueryPlanStep{ + ServiceURL: "http://example.com:8080", + ServiceName: "test", + ParentType: "Gizmo", + SelectionSet: selectionSet, + InsertionPoint: []string{"gizmos", "owner"}, + Then: nil, + } + expected := []string{`{ _0: getOwner(id: "1") { _id: id name } _1: getOwner(id: "2") { _id: id name } _2: getOwner(id: "3") { _id: id name } }`} + ctx := testContextWithoutVariables(nil) + docs, err := buildBoundaryQueryDocuments(ctx, schema, step, ids, boundaryField, 10) + require.NoError(t, err) + require.Equal(t, expected, docs) +} + +func TestBuildBatchedNonArrayBoundaryQueryDocuments(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo!]! + getOwner(id: ID!): Owner! + } + ` + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + boundaryField := BoundaryField{Field: "getOwner", Array: false} + ids := []string{"1", "2", "3"} + selectionSet := []ast.Selection{ + &ast.Field{ + Alias: "_id", + Name: "id", + Definition: schema.Types["Owner"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Owner"], + }, + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["Owner"].Fields.ForName("name"), + ObjectDefinition: schema.Types["Owner"], + }, + } + step := &QueryPlanStep{ + ServiceURL: "http://example.com:8080", + ServiceName: "test", + ParentType: "Gizmo", + SelectionSet: selectionSet, + InsertionPoint: []string{"gizmos", "owner"}, + Then: nil, + } + expected := []string{`{ _0: getOwner(id: "1") { _id: id name } _1: getOwner(id: "2") { _id: id name } }`, `{ _2: getOwner(id: "3") { _id: id name } }`} + ctx := testContextWithoutVariables(nil) + docs, err := buildBoundaryQueryDocuments(ctx, schema, step, ids, boundaryField, 2) + require.NoError(t, err) + require.Equal(t, expected, docs) +} + +func TestMergeExecutionResults(t *testing.T) { + t.Run("merges single map", func(t *testing.T) { + inputMap := jsonToInterfaceMap(`{ + "gizmo": { + "id": "1", + "color": "Gizmo A" + } + }`) + + result := executionResult{ + ServiceURL: "http://service-a", + InsertionPoint: []string{}, + Data: inputMap, + } + + mergedMap, err := mergeExecutionResults([]executionResult{result}) + + require.NoError(t, err) + require.Equal(t, inputMap, mergedMap) + }) + + t.Run("merges two top level results", func(t *testing.T) { + inputMapA := jsonToInterfaceMap(`{ + "gizmoA": { + "id": "1", + "color": "Gizmo A" + } + }`) + + resultA := executionResult{ + ServiceURL: "http://service-a", + InsertionPoint: []string{}, + Data: inputMapA, + } + + inputMapB := jsonToInterfaceMap(`{ + "gizmoB": { + "id": "2", + "color": "Gizmo B" + } + }`) + + resultB := executionResult{ + ServiceURL: "http://service-b", + InsertionPoint: []string{}, + Data: inputMapB, + } + + mergedMap, err := mergeExecutionResults([]executionResult{resultA, resultB}) + + expected := jsonToInterfaceMap(`{ + "gizmoA": { + "id": "1", + "color": "Gizmo A" + }, + "gizmoB": { + "id": "2", + "color": "Gizmo B" + } + }`) + + require.NoError(t, err) + require.Equal(t, expected, mergedMap) + }) + + t.Run("merges mid level array", func(t *testing.T) { + inputMapA := jsonToInterfaceMap(`{ + "gizmo": { + "id": "1", + "gadgets": [{"id": "GADGET1", "owner": { "id": "OWNER1" }}, {"id": "GADGET3", "owner": { "id": "OWNER3" }}, {"id": "GADGET2", "owner": null}] + } + }`) + + resultA := executionResult{ + ServiceURL: "http://service-a", + InsertionPoint: []string{}, + Data: inputMapA, + } + + inputMapB := jsonToInterfaceSlice(`[ + { + "id": "OWNER1", + "name": "008" + } + ]`) + + resultB := executionResult{ + ServiceURL: "http://service-b", + InsertionPoint: []string{"gizmo", "gadgets", "owner"}, + Data: inputMapB, + } + + mergedMap, err := mergeExecutionResults([]executionResult{resultA, resultB}) + + expected := jsonToInterfaceMap(` + { + "gizmo": { + "gadgets": [ + { + "id": "GADGET1", + "owner": { + "id": "OWNER1", + "name": "008" + } + }, + { + "id": "GADGET3", + "owner": { + "id": "OWNER3" + } + }, + { + "id": "GADGET2", + "owner": null + } + ], + "id": "1" + } + }`) + + require.NoError(t, err) + require.Equal(t, expected, mergedMap) + }) + + t.Run("merges nested mid level array", func(t *testing.T) { + inputMapA := jsonToInterfaceMap(`{ + "gizmo": { + "id": "1", + "gadgets": [[{"id": "GADGET1", "owner": { "id": "OWNER1" }}, {"id": "GADGET3", "owner": { "id": "OWNER3" }}], [{"id": "GADGET2", "owner": null}]] + } + }`) + + resultA := executionResult{ + ServiceURL: "http://service-a", + InsertionPoint: []string{}, + Data: inputMapA, + } + + inputMapB := jsonToInterfaceSlice(`[ + { + "id": "OWNER1", + "name": "008" + } + ]`) + + resultB := executionResult{ + ServiceURL: "http://service-b", + InsertionPoint: []string{"gizmo", "gadgets", "owner"}, + Data: inputMapB, + } + + mergedMap, err := mergeExecutionResults([]executionResult{resultA, resultB}) + + expected := jsonToInterfaceMap(` + { + "gizmo": { + "gadgets": [ + [ + { + "id": "GADGET1", + "owner": { + "id": "OWNER1", + "name": "008" + } + }, + { + "id": "GADGET3", + "owner": { + "id": "OWNER3" + } + } + ], + [ + { + "id": "GADGET2", + "owner": null + } + ] + ], + "id": "1" + } + }`) + + require.NoError(t, err) + require.Equal(t, expected, mergedMap) + }) + + t.Run("merges root step with child step (root step returns object, boundary field is non array)", func(t *testing.T) { + inputMapA := jsonToInterfaceMap(`{ + "gizmo": { + "id": "1", + "color": "Gizmo A", + "owner": { + "_id": "1" + } + } + }`) + + resultA := executionResult{ + ServiceURL: "http://service-a", + InsertionPoint: []string{}, + Data: inputMapA, + } + + inputSliceB := jsonToInterfaceSlice(`[ + { + "_id": "1", + "name": "Owner A" + } + ]`) + + resultB := executionResult{ + ServiceURL: "http://service-b", + InsertionPoint: []string{"gizmo", "owner"}, + Data: inputSliceB, + } + + mergedMap, err := mergeExecutionResults([]executionResult{resultA, resultB}) + + expected := jsonToInterfaceMap(`{ + "gizmo": { + "id": "1", + "color": "Gizmo A", + "owner": { + "_id": "1", + "name": "Owner A" + } + } + }`) + + require.NoError(t, err) + require.Equal(t, expected, mergedMap) + }) + + t.Run("merges root step with child step (root step returns array, boundary field is non array)", func(t *testing.T) { + inputMapA := jsonToInterfaceMap(`{ + "gizmos": [ + { + "id": "1", + "color": "RED", + "owner": { + "_id": "4" + } + }, + { + "id": "2", + "color": "GREEN", + "owner": { + "_id": "5" + } + }, + { + "id": "3", + "color": "BLUE", + "owner": { + "_id": "6" + } + } + ] + }`) + + resultA := executionResult{ + ServiceURL: "http://service-a", + InsertionPoint: []string{}, + Data: inputMapA, + } + + inputSliceB := jsonToInterfaceSlice(`[ + { + "_id": "4", + "name": "Owner A" + }, + { + "_id": "5", + "name": "Owner B" + }, + { + "_id": "6", + "name": "Owner C" + } + ]`) + + resultB := executionResult{ + ServiceURL: "http://service-b", + InsertionPoint: []string{"gizmos", "owner"}, + Data: inputSliceB, + } + + mergedMap, err := mergeExecutionResults([]executionResult{resultA, resultB}) + + expected := jsonToInterfaceMap(`{ + "gizmos": [ + { + "id": "1", + "color": "RED", + "owner": { + "_id": "4", + "name": "Owner A" + } + }, + { + "id": "2", + "color": "GREEN", + "owner": { + "_id": "5", + "name": "Owner B" + } + }, + { + "id": "3", + "color": "BLUE", + "owner": { + "_id": "6", + "name": "Owner C" + } + } + ] + }`) + + require.NoError(t, err) + require.Equal(t, expected, mergedMap) + }) + + t.Run("merges root step with child step (root step returns array, boundary field is array)", func(t *testing.T) { + inputMapA := jsonToInterfaceMap(`{ + "gizmos": [ + { + "id": "1", + "color": "RED", + "owner": { + "_id": "4" + } + }, + { + "id": "2", + "color": "GREEN", + "owner": { + "_id": "5" + } + }, + { + "id": "3", + "color": "BLUE", + "owner": { + "_id": "6" + } + } + ] + }`) + + resultA := executionResult{ + ServiceURL: "http://service-a", + InsertionPoint: []string{}, + Data: inputMapA, + } + + inputSliceB := jsonToInterfaceSlice(`[ + { + "_id": "4", + "name": "Owner A" + }, + { + "_id": "5", + "name": "Owner B" + }, + { + "_id": "6", + "name": "Owner C" + } + ]`) + + resultB := executionResult{ + ServiceURL: "http://service-b", + InsertionPoint: []string{"gizmos", "owner"}, + Data: inputSliceB, + } + + mergedMap, err := mergeExecutionResults([]executionResult{resultA, resultB}) + + expected := jsonToInterfaceMap(`{ + "gizmos": [ + { + "id": "1", + "color": "RED", + "owner": { + "_id": "4", + "name": "Owner A" + } + }, + { + "id": "2", + "color": "GREEN", + "owner": { + "_id": "5", + "name": "Owner B" + } + }, + { + "id": "3", + "color": "BLUE", + "owner": { + "_id": "6", + "name": "Owner C" + } + } + ] + }`) + + require.NoError(t, err) + require.Equal(t, expected, mergedMap) + }) + + t.Run("allows using both 'id' and '_id'", func(t *testing.T) { + inputMapA := jsonToInterfaceMap(`{ + "gizmos": [ + { + "id": "1", + "color": "RED", + "owner": { + "id": "4" + } + }, + { + "id": "2", + "color": "GREEN", + "owner": { + "id": "5" + } + }, + { + "id": "3", + "color": "BLUE", + "owner": { + "_id": "6" + } + } + ] + }`) + + resultA := executionResult{ + ServiceURL: "http://service-a", + InsertionPoint: []string{}, + Data: inputMapA, + } + + inputSliceB := jsonToInterfaceSlice(`[ + { + "_id": "4", + "name": "Owner A" + }, + { + "id": "5", + "name": "Owner B" + }, + { + "id": "6", + "name": "Owner C" + } + ]`) + + resultB := executionResult{ + ServiceURL: "http://service-b", + InsertionPoint: []string{"gizmos", "owner"}, + Data: inputSliceB, + } + + mergedMap, err := mergeExecutionResults([]executionResult{resultA, resultB}) + + expected := jsonToInterfaceMap(`{ + "gizmos": [ + { + "id": "1", + "color": "RED", + "owner": { + "id": "4", + "name": "Owner A" + } + }, + { + "id": "2", + "color": "GREEN", + "owner": { + "id": "5", + "name": "Owner B" + } + }, + { + "id": "3", + "color": "BLUE", + "owner": { + "_id": "6", + "name": "Owner C" + } + } + ] + }`) + + require.NoError(t, err) + require.Equal(t, expected, mergedMap) + }) +} + +func TestUnionAndTrimSelectionSet(t *testing.T) { + schemaString := ` + directive @boundary on OBJECT + interface Tool { + id: ID! + name: String! + } + + union GadgetOrGizmo = Gadget | Gizmo + + type Gizmo @boundary { + id: ID! + } + + type Gadget @boundary { + id: ID! + } + + type Agent { + id: ID! + name: String! + country: Country! + } + + type Country { + id: ID! + name: String! + } + + type GizmoImplementation implements Tool { + id: ID! + name: String! + gizmos: [Gizmo!]! + } + + type GadgetImplementation implements Tool { + id: ID! + name: String! + gadgets: [Gadget!]! + } + + type Query { + tool(id: ID!): Tool! + }` + + schema := gqlparser.MustLoadSchema(&ast.Source{Input: schemaString}) + ctx := testContextWithoutVariables(nil) + + t.Run("does not touch simple selection sets", func(t *testing.T) { + selectionSet := ast.SelectionSet{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["Agent"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Agent"], + }, + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["Agent"].Fields.ForName("name"), + ObjectDefinition: schema.Types["Agent"], + }, + &ast.Field{ + Alias: "country", + Name: "country", + Definition: schema.Types["Agent"].Fields.ForName("country"), + ObjectDefinition: schema.Types["Agent"], + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["Country"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Country"], + }, + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["Country"].Fields.ForName("name"), + ObjectDefinition: schema.Types["Country"], + }, + }, + }, + } + + filtered, err := unionAndTrimSelectionSet("", schema, selectionSet) + require.NoError(t, err) + require.Equal(t, selectionSet, filtered) + }) + + t.Run("removes field duplicates from inline fragment", func(t *testing.T) { + initialSelectionSet := ast.SelectionSet{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["Tool"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Tool"], + }, + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["Tool"].Fields.ForName("name"), + ObjectDefinition: schema.Types["Tool"], + }, + &ast.InlineFragment{ + TypeCondition: schema.Types["GizmoImplementation"].Name, + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["GizmoImplementation"].Fields.ForName("id"), + ObjectDefinition: schema.Types["GizmoImplementation"], + }, + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["GizmoImplementation"].Fields.ForName("name"), + ObjectDefinition: schema.Types["GizmoImplementation"], + }, + &ast.Field{ + Alias: "gizmos", + Name: "gizmos", + Definition: schema.Types["GizmoImplementation"].Fields.ForName("gizmos"), + ObjectDefinition: schema.Types["GizmoImplementation"], + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["Gizmo"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Gizmo"], + }, + }, + }, + }, + ObjectDefinition: schema.Types["GizmoImplementation"], + }, + } + + expected := ast.SelectionSet{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["Tool"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Tool"], + }, + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["Tool"].Fields.ForName("name"), + ObjectDefinition: schema.Types["Tool"], + }, + &ast.InlineFragment{ + TypeCondition: schema.Types["GizmoImplementation"].Name, + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: "gizmos", + Name: "gizmos", + Definition: schema.Types["GizmoImplementation"].Fields.ForName("gizmos"), + ObjectDefinition: schema.Types["GizmoImplementation"], + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["Gizmo"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Gizmo"], + }, + }, + }, + }, + ObjectDefinition: schema.Types["GizmoImplementation"], + }, + } + + filtered, err := unionAndTrimSelectionSet("GizmoImplementation", schema, initialSelectionSet) + require.NoError(t, err) + require.Equal(t, formatSelectionSetSingleLine(ctx, schema, expected), formatSelectionSetSingleLine(ctx, schema, filtered)) + }) + + t.Run("removes inline fragment if it only contains duplicate selections", func(t *testing.T) { + initialSelectionSet := ast.SelectionSet{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["Tool"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Tool"], + }, + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["Tool"].Fields.ForName("name"), + ObjectDefinition: schema.Types["Tool"], + }, + &ast.InlineFragment{ + TypeCondition: schema.Types["GizmoImplementation"].Name, + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["GizmoImplementation"].Fields.ForName("id"), + ObjectDefinition: schema.Types["GizmoImplementation"], + }, + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["GizmoImplementation"].Fields.ForName("name"), + ObjectDefinition: schema.Types["GizmoImplementation"], + }, + }, + ObjectDefinition: schema.Types["GizmoImplementation"], + }, + } + + expected := ast.SelectionSet{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["Tool"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Tool"], + }, + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["Tool"].Fields.ForName("name"), + ObjectDefinition: schema.Types["Tool"], + }, + } + + filtered, err := unionAndTrimSelectionSet("GizmoImplementation", schema, initialSelectionSet) + require.NoError(t, err) + require.Equal(t, formatSelectionSetSingleLine(ctx, schema, expected), formatSelectionSetSingleLine(ctx, schema, filtered)) + }) + + t.Run("removes inline fragment that does not match typename", func(t *testing.T) { + initialSelectionSet := ast.SelectionSet{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["Tool"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Tool"], + }, + &ast.InlineFragment{ + TypeCondition: schema.Types["GizmoImplementation"].Name, + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["GizmoImplementation"].Fields.ForName("id"), + ObjectDefinition: schema.Types["GizmoImplementation"], + }, + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["GizmoImplementation"].Fields.ForName("name"), + ObjectDefinition: schema.Types["GizmoImplementation"], + }, + }, + ObjectDefinition: schema.Types["GizmoImplementation"], + }, + &ast.InlineFragment{ + TypeCondition: schema.Types["GadgetImplementation"].Name, + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["GadgetImplementation"].Fields.ForName("name"), + ObjectDefinition: schema.Types["GadgetImplementation"], + }, + }, + ObjectDefinition: schema.Types["GadgetImplementation"], + }, + } + + expected := ast.SelectionSet{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["Tool"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Tool"], + }, + &ast.InlineFragment{ + TypeCondition: schema.Types["GizmoImplementation"].Name, + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["GizmoImplementation"].Fields.ForName("name"), + ObjectDefinition: schema.Types["GizmoImplementation"], + }, + }, + ObjectDefinition: schema.Types["GizmoImplementation"], + }, + } + + filtered, err := unionAndTrimSelectionSet("GizmoImplementation", schema, initialSelectionSet) + require.NoError(t, err) + require.Equal(t, formatSelectionSetSingleLine(ctx, schema, expected), formatSelectionSetSingleLine(ctx, schema, filtered)) + }) + + t.Run("works with unions", func(t *testing.T) { + initialSelectionSet := ast.SelectionSet{ + &ast.InlineFragment{ + TypeCondition: schema.Types["Gizmo"].Name, + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: "id", + Name: "id", + Definition: schema.Types["Gizmo"].Fields.ForName("id"), + ObjectDefinition: schema.Types["Gizmo"], + }, + }, + ObjectDefinition: schema.Types["GadgetOrGizmo"], + }, + &ast.InlineFragment{ + TypeCondition: schema.Types["Gadget"].Name, + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["Gadget"].Fields.ForName("name"), + ObjectDefinition: schema.Types["Gadget"], + }, + }, + ObjectDefinition: schema.Types["GadgetOrGizmo"], + }, + } + + expected := ast.SelectionSet{ + &ast.InlineFragment{ + TypeCondition: schema.Types["Gadget"].Name, + SelectionSet: []ast.Selection{ + &ast.Field{ + Alias: "name", + Name: "name", + Definition: schema.Types["Gadget"].Fields.ForName("name"), + ObjectDefinition: schema.Types["Gadget"], + }, + }, + ObjectDefinition: schema.Types["GadgetOrGizmo"], + }, + } + + filtered, err := unionAndTrimSelectionSet("Gadget", schema, initialSelectionSet) + require.NoError(t, err) + require.Equal(t, formatSelectionSetSingleLine(ctx, schema, expected), formatSelectionSetSingleLine(ctx, schema, filtered)) + }) +} + +func TestBubbleUpNullValuesInPlace(t *testing.T) { + t.Run("no expected or unexpected nulls", func(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo!]! + getOwners(ids: [ID!]!): [Owner!]! + }` + + result := jsonToInterfaceMap(` + { + "gizmos": [ + { "id": "GIZMO1" }, + { "id": "GIZMO2" }, + { "id": "GIZMO3" } + ] + } + `) + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + { + gizmos { + id + } + }` + + document := gqlparser.MustLoadQuery(schema, query) + errs, err := bubbleUpNullValuesInPlace(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.Nil(t, errs) + }) + + t.Run("1 expected null (bubble to root)", func(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo!]! + getOwners(ids: [ID!]!): [Owner!]! + }` + + result := jsonToInterfaceMap(` + { + "gizmos": [ + { "id": "GIZMO1", "color": "RED" }, + { "id": "GIZMO2", "color": "GREEN" }, + { "id": "GIZMO3", "color": null } + ] + } + `) + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + { + gizmos { + id + color + } + }` + + document := gqlparser.MustLoadQuery(schema, query) + errs, err := bubbleUpNullValuesInPlace(schema, document.Operations[0].SelectionSet, result) + require.Equal(t, errNullBubbledToRoot, err) + require.Len(t, errs, 1) + }) + + t.Run("1 expected null (bubble to middle)", func(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo!] + getOwners(ids: [ID!]!): [Owner!]! + }` + + result := jsonToInterfaceMap(` + { + "gizmos": [ + { "id": "GIZMO1", "color": "RED" }, + { "id": "GIZMO2", "color": "GREEN" }, + { "id": "GIZMO3", "color": null } + ] + } + `) + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + { + gizmos { + id + color + } + }` + + document := gqlparser.MustLoadQuery(schema, query) + errs, err := bubbleUpNullValuesInPlace(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.Equal(t, []*gqlerror.Error([]*gqlerror.Error{ + { + Message: `got a null response for non-nullable field "color"`, + Path: ast.Path{ast.PathName("gizmos"), ast.PathIndex(2), ast.PathName("color")}, + Extensions: nil, + }}), errs) + require.Equal(t, jsonToInterfaceMap(`{ "gizmos": null }`), result) + }) + + t.Run("all nulls (bubble to middle)", func(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo!] + getOwners(ids: [ID!]!): [Owner!]! + }` + + result := jsonToInterfaceMap(` + { + "gizmos": [ + { "id": "GIZMO1", "color": null }, + { "id": "GIZMO2", "color": null }, + { "id": "GIZMO3", "color": null } + ] + } + `) + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + { + gizmos { + id + color + } + }` + + document := gqlparser.MustLoadQuery(schema, query) + errs, err := bubbleUpNullValuesInPlace(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.Equal(t, []*gqlerror.Error([]*gqlerror.Error{ + { + Message: `got a null response for non-nullable field "color"`, + Path: ast.Path{ast.PathName("gizmos"), ast.PathIndex(0), ast.PathName("color")}, + Extensions: nil, + }, + { + Message: `got a null response for non-nullable field "color"`, + Path: ast.Path{ast.PathName("gizmos"), ast.PathIndex(1), ast.PathName("color")}, + Extensions: nil, + }, + { + Message: `got a null response for non-nullable field "color"`, + Path: ast.Path{ast.PathName("gizmos"), ast.PathIndex(2), ast.PathName("color")}, + Extensions: nil, + }, + }), errs) + require.Equal(t, jsonToInterfaceMap(`{ "gizmos": null }`), result) + }) + + t.Run("1 expected null (bubble to middle in array)", func(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo]! + getOwners(ids: [ID!]!): [Owner!]! + }` + + result := jsonToInterfaceMap(` + { + "gizmos": [ + { "id": "GIZMO1", "color": "RED" }, + { "id": "GIZMO3", "color": null }, + { "id": "GIZMO2", "color": "GREEN" } + ] + } + `) + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + { + gizmos { + id + color + } + }` + + document := gqlparser.MustLoadQuery(schema, query) + errs, err := bubbleUpNullValuesInPlace(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.Equal(t, []*gqlerror.Error([]*gqlerror.Error{ + { + Message: `got a null response for non-nullable field "color"`, + Path: ast.Path{ast.PathName("gizmos"), ast.PathIndex(1), ast.PathName("color")}, + Extensions: nil, + }}), errs) + require.Equal(t, jsonToInterfaceMap(`{ "gizmos": [ { "id": "GIZMO1", "color": "RED" }, null, { "id": "GIZMO2", "color": "GREEN" } ] }`), result) + }) + + t.Run("0 expected nulls", func(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo!]! + getOwners(ids: [ID!]!): [Owner!]! + }` + + resultJSON := `{ + "gizmos": [ + { "id": "GIZMO1", "color": "RED" }, + { "id": "GIZMO2", "color": "GREEN" }, + { "id": "GIZMO3", "color": null } + ] + }` + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + { + gizmos { + id + color + } + }` + + document := gqlparser.MustLoadQuery(schema, query) + result := jsonToInterfaceMap(resultJSON) + errs, err := bubbleUpNullValuesInPlace(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.Empty(t, errs) + require.Equal(t, jsonToInterfaceMap(resultJSON), result) + }) + + t.Run("works with fragment spreads", func(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo]! + getOwners(ids: [ID!]!): [Owner!]! + }` + + resultJSON := `{ + "gizmos": [ + { "id": "GIZMO1", "color": "RED", "__typename": "Gizmo" }, + { "id": "GIZMO2", "color": "GREEN", "__typename": "Gizmo" }, + { "id": "GIZMO3", "color": null, "__typename": "Gizmo" } + ] + }` + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + fragment GizmoDetails on Gizmo { + id + color + __typename + } + { + gizmos { + ...GizmoDetails + } + } + ` + + document := gqlparser.MustLoadQuery(schema, query) + + result := jsonToInterfaceMap(resultJSON) + + errs, err := bubbleUpNullValuesInPlace(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.Equal(t, []*gqlerror.Error([]*gqlerror.Error{ + { + Message: `got a null response for non-nullable field "color"`, + Path: ast.Path{ast.PathName("gizmos"), ast.PathIndex(2), ast.PathName("color")}, + Extensions: nil, + }}), errs) + require.Equal(t, jsonToInterfaceMap(`{ "gizmos": [ { "id": "GIZMO1", "color": "RED", "__typename": "Gizmo" }, { "id": "GIZMO2", "color": "GREEN", "__typename": "Gizmo" }, null ] }`), result) + }) + + t.Run("works with inline fragments", func(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo]! + getOwners(ids: [ID!]!): [Owner!]! + }` + + resultJSON := `{ + "gizmos": [ + { "id": "GIZMO1", "color": "RED", "__typename": "Gizmo" }, + { "id": "GIZMO2", "color": "GREEN", "__typename": "Gizmo" }, + { "id": "GIZMO3", "color": null, "__typename": "Gizmo" } + ] + }` + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + { + gizmos { + ... on Gizmo { + id + color + __typename + } + } + } + ` + + document := gqlparser.MustLoadQuery(schema, query) + result := jsonToInterfaceMap(resultJSON) + errs, err := bubbleUpNullValuesInPlace(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.Equal(t, []*gqlerror.Error([]*gqlerror.Error{ + { + Message: `got a null response for non-nullable field "color"`, + Path: ast.Path{ast.PathName("gizmos"), ast.PathIndex(2), ast.PathName("color")}, + Extensions: nil, + }}), errs) + require.Equal(t, jsonToInterfaceMap(`{ "gizmos": [ { "id": "GIZMO1", "color": "RED", "__typename": "Gizmo" }, { "id": "GIZMO2", "color": "GREEN", "__typename": "Gizmo" }, null ] }`), result) + }) + + t.Run("inline fragment inside interface", func(t *testing.T) { + ddl := ` + interface Critter { + id: ID! + } + + type Gizmo implements Critter { + id: ID! + color: String! + } + + type Gremlin implements Critter { + id: ID! + name: String! + } + + type Query { + critters: [Critter]! + }` + + resultJSON := `{ + "critters": [ + { "id": "GIZMO1", "color": "RED", "__typename": "Gizmo" }, + { "id": "GREMLIN1", "name": "Spikey", "__typename": "Gremlin" }, + { "id": "GIZMO2", "color": null, "__typename": "Gizmo" } + ] + }` + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + { + critters { + id + ... on Gizmo { + color + __typename + } + ... on Gremlin { + name + __typename + } + } + } + ` + + document := gqlparser.MustLoadQuery(schema, query) + result := jsonToInterfaceMap(resultJSON) + errs, err := bubbleUpNullValuesInPlace(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.Equal(t, []*gqlerror.Error([]*gqlerror.Error{ + { + Message: `got a null response for non-nullable field "color"`, + Path: ast.Path{ast.PathName("critters"), ast.PathIndex(2), ast.PathName("color")}, + Extensions: nil, + }}), errs) + require.Equal(t, jsonToInterfaceMap(`{ "critters": [ { "id": "GIZMO1", "color": "RED", "__typename": "Gizmo" }, { "id": "GREMLIN1", "name": "Spikey", "__typename": "Gremlin" }, null ] }`), result) + }) + + t.Run("fragment spread inside interface", func(t *testing.T) { + ddl := ` + interface Critter { + id: ID! + } + + type Gizmo implements Critter { + id: ID! + color: String! + } + + type Gremlin implements Critter { + id: ID! + name: String! + } + + type Query { + critters: [Critter]! + }` + + resultJSON := `{ + "critters": [ + { "id": "GIZMO1", "color": "RED", "__typename": "Gizmo" }, + { "id": "GREMLIN1", "name": "Spikey", "__typename": "Gremlin" }, + { "id": "GIZMO2", "color": null, "__typename": "Gizmo" } + ] + }` + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + fragment CritterDetails on Critter { + ... on Gizmo { + color + __typename + } + ... on Gremlin { + name + __typename + } + } + + { + critters { + id + ... CritterDetails + } + } + ` + + document := gqlparser.MustLoadQuery(schema, query) + result := jsonToInterfaceMap(resultJSON) + errs, err := bubbleUpNullValuesInPlace(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.Equal(t, []*gqlerror.Error([]*gqlerror.Error{ + { + Message: `got a null response for non-nullable field "color"`, + Path: ast.Path{ast.PathName("critters"), ast.PathIndex(2), ast.PathName("color")}, + Extensions: nil, + }}), errs) + require.Equal(t, jsonToInterfaceMap(`{ "critters": [ { "id": "GIZMO1", "color": "RED", "__typename": "Gizmo" }, { "id": "GREMLIN1", "name": "Spikey", "__typename": "Gremlin" }, null ] }`), result) + }) +} + +func TestFormatResponseBody(t *testing.T) { + t.Run("simple response with no errors", func(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo!]! + }` + + result := jsonToInterfaceMap(` + { + "gizmos": [ + { "color": "RED","owner": { "name": "Owner1", "id": "1" }, "id": "GIZMO1" }, + { "color": "BLUE","owner": { "name": "Owner2", "id": "2" }, "id": "GIZMO2" }, + { "color": "GREEN","owner": { "name": "Owner3", "id": "3" }, "id": "GIZMO3" } + ] + } + `) + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + { + gizmos { + id + color + owner { + id + name } - `)) - }), - }, + } + }` + + expectedJSON := ` { - schema: ` - directive @namespace on OBJECT - interface Person { name: String! } + "gizmos": [ + { "id": "GIZMO1", "color": "RED", "owner": { "id": "1", "name": "Owner1" } }, + { "id": "GIZMO2", "color": "BLUE", "owner": { "id": "2", "name": "Owner2" } }, + { "id": "GIZMO3", "color": "GREEN", "owner": { "id": "3", "name": "Owner3" } } + ] + }` - type Movie { - title: String! - } + document := gqlparser.MustLoadQuery(schema, query) + bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.JSONEq(t, expectedJSON, bodyJSON) + }) - type Director implements Person { - name: String! - movies: [Movie!] - } + t.Run("null data", func(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } - type MyNamespace @namespace { - somePerson: Person! - } + type Owner { + id: ID! + name: String + } - type Query { - ns: MyNamespace! - }`, - handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(`{ - "data": { - "ns": { - "somePerson": { - "name": "Luc Besson", - "movies": [ - {"title": "The Big Blue"} - ] - } - } - } + type Query { + gizmos: [Gizmo!]! + gizmo: Gizmo! + }` + + result := jsonToInterfaceMap(` + { + "gizmos": [ + { "color": "RED","owner": null, "id": "GIZMO1" }, + { "color": "BLUE","owner": { "name": "Owner2", "id": "2" }, "id": "GIZMO2" }, + { "color": "GREEN","owner": { "name": null, "id": "3" }, "id": "GIZMO3" } + ] + } + `) + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + { + gizmos { + id + color + owner { + id + name } - `)) - }), - }, - }, - query: `{ - ns { - somePerson { - ... on Director { - name - movies { - title + } + }` + + expectedJSON := ` + { + "gizmos": [ + { "id": "GIZMO1", "color": "RED", "owner": null }, + { "id": "GIZMO2", "color": "BLUE", "owner": { "id": "2", "name": "Owner2" } }, + { "id": "GIZMO3", "color": "GREEN", "owner": { "id": "3", "name": null } } + ] + }` + + document := gqlparser.MustLoadQuery(schema, query) + bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.JSONEq(t, expectedJSON, bodyJSON) + }) + + t.Run("simple response with errors", func(t *testing.T) { + ddl := ` + type Gizmo { + id: ID! + color: String! + owner: Owner + } + + type Owner { + id: ID! + name: String! + } + + type Query { + gizmos: [Gizmo!]! + }` + + result := jsonToInterfaceMap(` + { + "gizmos": [ + { "color": "RED","owner": { "name": "Owner1", "id": "1" }, "id": "GIZMO1" }, + { "color": "BLUE","owner": { "name": "Owner2", "id": "2" }, "id": "GIZMO2" }, + { "color": "GREEN","owner": { "name": "Owner3", "id": "3" }, "id": "GIZMO3" } + ] + } + `) + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + { + gizmos { + id + color + owner { + id + name } } + }` + + expectedJSON := ` + { + "gizmos": [ + { "id": "GIZMO1", "color": "RED", "owner": { "id": "1", "name": "Owner1" } }, + { "id": "GIZMO2", "color": "BLUE", "owner": { "id": "2", "name": "Owner2" } }, + { "id": "GIZMO3", "color": "GREEN", "owner": { "id": "3", "name": "Owner3" } } + ] + }` + + document := gqlparser.MustLoadQuery(schema, query) + bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.JSONEq(t, expectedJSON, bodyJSON) + }) + + t.Run("field selection overlaps with fragment selection", func(t *testing.T) { + ddl := ` + interface Gizmo { + id: ID! + name: String! + } + + type Owner { + id: ID! + fullName: String! + } + + type Gadget implements Gizmo { + id: ID! + name: String! + owner: Owner + } + + type Query { + gizmo: Gizmo! + } + ` + + result := jsonToInterfaceMap(`{ + "gizmo": { + "id": "GADGET1", + "name": "Gadget #1", + "owner": { + "id": "OWNER1", + "fullName": "James Bond" + }, + "__typename": "Gadget" + } + } + `) + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + query Gizmo { + gizmo { + __typename + ...GizmoDetails + } + } + + fragment GizmoDetails on Gizmo { + id + name + ... on Gadget { + id + name + owner { + id + fullName + } + } + }` + + expectedJSON := ` + { + "gizmo": { + "id": "GADGET1", + "name": "Gadget #1", + "owner": { + "id": "OWNER1", + "fullName": "James Bond" + }, + "__typename": "Gadget" + } + }` + + document := gqlparser.MustLoadQuery(schema, query) + bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.JSONEq(t, expectedJSON, bodyJSON) + }) + + t.Run("field selection entirely overlaps with fragment selection", func(t *testing.T) { + ddl := ` + interface Gizmo { + id: ID! + name: String! + } + + type Owner { + id: ID! + fullName: String! + } + + type Gadget implements Gizmo { + id: ID! + name: String! + owner: Owner + } + + type Query { + gizmo: Gizmo! + } + ` + + result := jsonToInterfaceMap(`{ + "gizmo": { + "id": "GADGET1", + "name": "Gadget #1", + "__typename": "Gadget" + } + } + `) + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + query Gizmo { + gizmo { + ...GizmoDetails + __typename } - foo { + } + + fragment GizmoDetails on Gizmo { + id + name + ... on Gadget { id + name + } + }` + + expectedJSON := ` + { + "gizmo": { + "id": "GADGET1", + "name": "Gadget #1", + "__typename": "Gadget" + } + }` + + document := gqlparser.MustLoadQuery(schema, query) + bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.JSONEq(t, expectedJSON, bodyJSON) + }) + + t.Run("multiple implementation fragment spreads", func(t *testing.T) { + ddl := ` + interface Gizmo { + id: ID! + name: String! + } + + type Owner { + id: ID! + fullName: String! + } + + type Gadget implements Gizmo { + id: ID! + name: String! + owner: Owner + } + + type Tool implements Gizmo { + id: ID! + name: String! + category: String! + } + + type Query { + gizmo: Gizmo! + } + ` + + result := jsonToInterfaceMap(`{ + "gizmo": { + "id": "GADGET1", + "name": "Gadget #1", + "__typename": "Gadget" } } - }`, - expected: `{ - "ns": { - "somePerson": { - "name": "Luc Besson", - "movies": [ - {"title": "The Big Blue"} - ] - }, - "foo": { - "id": "1" + `) + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + query Gizmo { + gizmo { + ...GizmoDetails + __typename } } - }`, - } - f.run(t) + fragment GizmoDetails on Gizmo { + id + name + ... on Gadget { + id + name + } + ... on Tool { + category + } + }` + + expectedJSON := ` + { + "gizmo": { + "id": "GADGET1", + "name": "Gadget #1", + "__typename": "Gadget" + } + }` + + document := gqlparser.MustLoadQuery(schema, query) + bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.JSONEq(t, expectedJSON, bodyJSON) + }) + + t.Run("multiple implementation fragment spreads (bottom fragment matches)", func(t *testing.T) { + ddl := ` + interface Gizmo { + id: ID! + name: String! + } + + type Owner { + id: ID! + fullName: String! + } + + type Gadget implements Gizmo { + id: ID! + name: String! + owner: Owner + } + + type Tool implements Gizmo { + id: ID! + name: String! + category: String! + } + + type Query { + gizmo: Gizmo! + } + ` + + result := jsonToInterfaceMap(`{ + "gizmo": { + "id": "TOOL1", + "name": "Tool #1", + "category": "Screwdriver", + "__typename": "Tool" + } + } + `) + + schema := gqlparser.MustLoadSchema(&ast.Source{Name: "fixture", Input: ddl}) + + query := ` + query Gizmo { + gizmo { + ...GizmoDetails + __typename + } + } + + fragment GizmoDetails on Gizmo { + id + name + ... on Gadget { + id + name + } + ... on Tool { + category + } + }` + + expectedJSON := ` + { + "gizmo": { + "id": "TOOL1", + "name": "Tool #1", + "category": "Screwdriver", + "__typename": "Tool" + } + }` + + document := gqlparser.MustLoadQuery(schema, query) + bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) + require.NoError(t, err) + require.JSONEq(t, expectedJSON, bodyJSON) + }) } func TestQueryExecutionWithTypename(t *testing.T) { @@ -926,8 +3557,8 @@ func TestQueryExecutionWithTypenameAndNamespaces(t *testing.T) { f.checkSuccess(t) } -func TestQueryExecutionWithMultipleNodeQueries(t *testing.T) { - schema1 := `directive @boundary on OBJECT +func TestQueryExecutionWithMultipleBoundaryQueries(t *testing.T) { + schema1 := `directive @boundary on OBJECT | FIELD_DEFINITION type Movie @boundary { id: ID! title: String @@ -936,17 +3567,16 @@ func TestQueryExecutionWithMultipleNodeQueries(t *testing.T) { type Query { movies: [Movie!]! }` - schema2 := `directive @boundary on OBJECT - interface Node { id: ID! } + schema2 := `directive @boundary on OBJECT | FIELD_DEFINITION - type Movie implements Node @boundary { + type Movie @boundary { id: ID! release: Int } type Query { - node(id: ID!): Node! - }` + movie(id: ID!): Movie @boundary + }` f := &queryExecutionFixture{ services: []testService{ @@ -956,9 +3586,9 @@ func TestQueryExecutionWithMultipleNodeQueries(t *testing.T) { w.Write([]byte(`{ "data": { "movies": [ - { "id": "1", "title": "Test title 1" }, - { "id": "2", "title": "Test title 2" }, - { "id": "3", "title": "Test title 3" } + { "id": "1", "title": "Test title 1" }, + { "id": "2", "title": "Test title 2" }, + { "id": "3", "title": "Test title 3" } ] } } @@ -970,11 +3600,6 @@ func TestQueryExecutionWithMultipleNodeQueries(t *testing.T) { handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { var q map[string]string json.NewDecoder(r.Body).Decode(&q) - assertQueriesEqual(t, schema2, `{ - _0: node(id: "1") { ... on Movie { _id: id release } } - _1: node(id: "2") { ... on Movie { _id: id release } } - _2: node(id: "3") { ... on Movie { _id: id release } } - }`, q["query"]) w.Write([]byte(`{ "data": { "_0": { "id": "1", "release": 2007 }, @@ -1018,16 +3643,15 @@ func TestQueryExecutionWithMultipleNodeQueries(t *testing.T) { } func TestQueryExecutionMultipleServicesWithArray(t *testing.T) { - schema1 := `directive @boundary on OBJECT - interface Node { id: ID! } + schema1 := `directive @boundary on OBJECT | FIELD_DEFINITION - type Movie implements Node @boundary { + type Movie @boundary { id: ID! title: String } type Query { - node(id: ID!): Node + _movie(id: ID!): Movie @boundary movie(id: ID!): Movie! }` @@ -1043,7 +3667,7 @@ func TestQueryExecutionMultipleServicesWithArray(t *testing.T) { for _, s := range query.Operations[0].SelectionSet { ids = append(ids, s.(*ast.Field).Arguments[0].Value.Raw) } - if query.Operations[0].SelectionSet[0].(*ast.Field).Name == "node" { + if query.Operations[0].SelectionSet[0].(*ast.Field).Name == "_movie" { var res string for i, id := range ids { if i != 0 { @@ -1069,16 +3693,15 @@ func TestQueryExecutionMultipleServicesWithArray(t *testing.T) { }), }, { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } + schema: `directive @boundary on OBJECT | FIELD_DEFINITION - type Movie implements Node @boundary { + type Movie @boundary { id: ID! compTitles: [Movie] } type Query { - node(id: ID!): Node + movie(id: ID!): Movie @boundary }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte(`{ @@ -1167,10 +3790,9 @@ func TestQueryExecutionMultipleServicesWithEmptyArray(t *testing.T) { f := &queryExecutionFixture{ services: []testService{ { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } + schema: `directive @boundary on OBJECT | FIELD_DEFINITION - type Movie implements Node @boundary { + type Movie @boundary { id: ID! } @@ -1186,16 +3808,15 @@ func TestQueryExecutionMultipleServicesWithEmptyArray(t *testing.T) { }), }, { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } + schema: `directive @boundary on OBJECT | FIELD_DEFINITION - type Movie implements Node @boundary { + type Movie @boundary { id: ID! title: String } type Query { - node(id: ID!): Node + movie(id: ID!): Movie @boundary }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { t.Fatal("service should not be called on empty array") @@ -1217,16 +3838,15 @@ func TestQueryExecutionMultipleServicesWithEmptyArray(t *testing.T) { } func TestQueryExecutionMultipleServicesWithNestedArrays(t *testing.T) { - schema1 := `directive @boundary on OBJECT - interface Node { id: ID! } + schema1 := `directive @boundary on OBJECT | FIELD_DEFINITION - type Movie implements Node @boundary { + type Movie @boundary { id: ID! title: String } type Query { - node(id: ID!): Node + _movie(id: ID!): Movie @boundary movie(id: ID!): Movie! }` services := []testService{ @@ -1240,7 +3860,7 @@ func TestQueryExecutionMultipleServicesWithNestedArrays(t *testing.T) { for _, s := range query.Operations[0].SelectionSet { ids = append(ids, s.(*ast.Field).Arguments[0].Value.Raw) } - if query.Operations[0].SelectionSet[0].(*ast.Field).Name == "node" { + if query.Operations[0].SelectionSet[0].(*ast.Field).Name == "_movie" { var res string for i, id := range ids { if i != 0 { @@ -1266,16 +3886,15 @@ func TestQueryExecutionMultipleServicesWithNestedArrays(t *testing.T) { }), }, { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } + schema: `directive @boundary on OBJECT | FIELD_DEFINITION - type Movie implements Node @boundary { + type Movie @boundary { id: ID! compTitles: [[Movie]] } type Query { - node(id: ID!): Node + movie(id: ID!): Movie @boundary }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte(`{ @@ -1330,11 +3949,11 @@ func TestQueryExecutionMultipleServicesWithNestedArrays(t *testing.T) { f.checkSuccess(t) } -func TestQueryExecutionEmptyNodeResponse(t *testing.T) { +func TestQueryExecutionEmptyBoundaryResponse(t *testing.T) { f := &queryExecutionFixture{ services: []testService{ { - schema: `directive @boundary on OBJECT + schema: `directive @boundary on OBJECT | FIELD_DEFINITION type Movie @boundary { id: ID! title: String @@ -1356,8 +3975,7 @@ func TestQueryExecutionEmptyNodeResponse(t *testing.T) { }), }, { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } + schema: `directive @boundary on OBJECT | FIELD_DEFINITION type Movie @boundary { id: ID! @@ -1365,7 +3983,7 @@ func TestQueryExecutionEmptyNodeResponse(t *testing.T) { } type Query { - node(id: ID!): Node! + movie(id: ID!): Movie @boundary }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte(`{ @@ -1396,58 +4014,6 @@ func TestQueryExecutionEmptyNodeResponse(t *testing.T) { f.checkSuccess(t) } -func TestQueryExecutionWithNullResponse(t *testing.T) { - f := &queryExecutionFixture{ - services: []testService{ - { - schema: `directive @boundary on OBJECT - type Movie @boundary { - id: ID! - } - - type Query { - movies: [Movie!] - }`, - handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(`{ - "data": { - "movies": null - } - } - `)) - }), - }, - { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } - - type Movie @boundary { - id: ID! - title: String - } - - type Query { - node(id: ID!): Node! - }`, - handler: http.HandlerFunc(func(http.ResponseWriter, *http.Request) { - assert.Fail(t, "handler should not be called") - }), - }, - }, - query: `{ - movies { - id - title - } - }`, - expected: `{ - "movies": null - }`, - } - - f.checkSuccess(t) -} - func TestQueryExecutionWithNullResponseAndSubBoundaryType(t *testing.T) { f := &queryExecutionFixture{ services: []testService{ @@ -1471,7 +4037,7 @@ func TestQueryExecutionWithNullResponseAndSubBoundaryType(t *testing.T) { }), }, { - schema: `directive @boundary on OBJECT + schema: `directive @boundary on OBJECT | FIELD_DEFINITION interface Node { id: ID! } type Movie @boundary { @@ -1480,7 +4046,7 @@ func TestQueryExecutionWithNullResponseAndSubBoundaryType(t *testing.T) { } type Query { - node(id: ID!): Node! + movie(id: ID!): Movie @boundary }`, handler: http.HandlerFunc(func(http.ResponseWriter, *http.Request) { assert.Fail(t, "handler should not be called") @@ -1505,7 +4071,7 @@ func TestQueryExecutionWithNullResponseAndSubBoundaryType(t *testing.T) { } func TestQueryExecutionWithInputObject(t *testing.T) { - schema1 := `directive @boundary on OBJECT + schema1 := `directive @boundary on OBJECT | FIELD_DEFINITION type Movie @boundary { id: ID! title: String @@ -1551,8 +4117,7 @@ func TestQueryExecutionWithInputObject(t *testing.T) { }), }, { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } + schema: `directive @boundary on OBJECT | FIELD_DEFINITION type Movie @boundary { id: ID! @@ -1560,7 +4125,7 @@ func TestQueryExecutionWithInputObject(t *testing.T) { } type Query { - node(id: ID!): Node! + movie(id: ID!): Movie! @boundary }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte(`{ @@ -1626,8 +4191,7 @@ func TestQueryExecutionMultipleObjects(t *testing.T) { }), }, { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } + schema: `directive @boundary on OBJECT | FIELD_DEFINITION type Movie @boundary { id: ID! @@ -1635,7 +4199,7 @@ func TestQueryExecutionMultipleObjects(t *testing.T) { } type Query { - node(id: ID!): Node! + movie(id: ID!): Movie! @boundary movies: [Movie!] }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { @@ -1716,8 +4280,7 @@ func TestQueryExecutionMultipleServicesWithSkipTrueDirectives(t *testing.T) { }), }, { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } + schema: `directive @boundary on OBJECT | FIELD_DEFINITION type Gizmo { foo: String! bar: String! @@ -1727,7 +4290,7 @@ func TestQueryExecutionMultipleServicesWithSkipTrueDirectives(t *testing.T) { gizmo: Gizmo } type Query { - node(id: ID!): Node! + movie(id: ID!): Movie @boundary }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { panic("should not be called") @@ -1782,8 +4345,7 @@ func TestQueryExecutionMultipleServicesWithSkipFalseDirectives(t *testing.T) { }), }, { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } + schema: `directive @boundary on OBJECT | FIELD_DEFINITION type Gizmo { foo: String! bar: String! @@ -1793,7 +4355,7 @@ func TestQueryExecutionMultipleServicesWithSkipFalseDirectives(t *testing.T) { gizmo: Gizmo } type Query { - node(id: ID!): Node! + movie(id: ID!): Movie @boundary }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte(`{ @@ -1865,8 +4427,7 @@ func TestQueryExecutionMultipleServicesWithIncludeFalseDirectives(t *testing.T) }), }, { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } + schema: `directive @boundary on OBJECT | FIELD_DEFINITION type Gizmo { foo: String! bar: String! @@ -1876,7 +4437,7 @@ func TestQueryExecutionMultipleServicesWithIncludeFalseDirectives(t *testing.T) gizmo: Gizmo } type Query { - node(id: ID!): Node! + movie(id: ID!): Movie @boundary }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { panic("should not be called") @@ -1931,8 +4492,7 @@ func TestQueryExecutionMultipleServicesWithIncludeTrueDirectives(t *testing.T) { }), }, { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } + schema: `directive @boundary on OBJECT | FIELD_DEFINITION type Gizmo { foo: String! bar: String! @@ -1942,7 +4502,7 @@ func TestQueryExecutionMultipleServicesWithIncludeTrueDirectives(t *testing.T) { gizmo: Gizmo } type Query { - node(id: ID!): Node! + movie(id: ID!): Movie @boundary }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte(`{ @@ -2023,14 +4583,13 @@ func TestMutationExecution(t *testing.T) { }), }, { - schema: `directive @boundary on OBJECT - interface Node { id: ID! } + schema: `directive @boundary on OBJECT | FIELD_DEFINITION type Movie @boundary { id: ID! release: Int } type Query { - node(id: ID!): Node! + movie(id: ID!): Movie @boundary }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte(`{ @@ -2061,13 +4620,13 @@ func TestMutationExecution(t *testing.T) { f.checkSuccess(t) } + func TestQueryExecutionWithUnions(t *testing.T) { f := &queryExecutionFixture{ services: []testService{ { schema: ` - interface Node { id: ID! } - directive @boundary on OBJECT + directive @boundary on OBJECT | FIELD_DEFINITION type Dog { name: String! age: Int } type Cat { name: String! age: Int } @@ -2080,7 +4639,7 @@ func TestQueryExecutionWithUnions(t *testing.T) { } type Query { - node(id: ID!): Node + animal(id: ID!): Animal @boundary animals: [Animal]! }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { @@ -2089,9 +4648,9 @@ func TestQueryExecutionWithUnions(t *testing.T) { w.Write([]byte(`{ "data": { "foo": [ - { "name": "fido", "age": 4 }, - { "name": "felix", "age": 2 }, - { "age": 20, "name": "ka" } + { "name": "fido", "age": 4, "__typename": "Dog" }, + { "name": "felix", "age": 2, "__typename": "Cat" }, + { "age": 20, "name": "ka", "__typename": "Snake" } ] } } @@ -2103,7 +4662,8 @@ func TestQueryExecutionWithUnions(t *testing.T) { "_id": "2", "pet": { "name": "felix", - "age": 2 + "age": 2, + "__typename": "Cat" } } } @@ -2176,11 +4736,10 @@ func TestQueryExecutionWithNamespaces(t *testing.T) { services: []testService{ { schema: ` - directive @boundary on OBJECT + directive @boundary on OBJECT | FIELD_DEFINITION directive @namespace on OBJECT - interface Node { id: ID! } - type Cat implements Node @boundary { + type Cat @boundary { id: ID! name: String! } @@ -2195,16 +4754,17 @@ func TestQueryExecutionWithNamespaces(t *testing.T) { type Query { animals: AnimalsQuery! - node(id: ID!): Node! + cat(id: ID!): Cat @boundary } `, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { b, _ := ioutil.ReadAll(r.Body) - if strings.Contains(string(b), "node") { + if strings.Contains(string(b), "CA7") { w.Write([]byte(`{ "data": { "_0": { + "_id": "CA7", "name": "Felix" } } @@ -2229,11 +4789,10 @@ func TestQueryExecutionWithNamespaces(t *testing.T) { }, { schema: ` - directive @boundary on OBJECT + directive @boundary on OBJECT | FIELD_DEFINITION directive @namespace on OBJECT - interface Node { id: ID! } - type Cat implements Node @boundary { + type Cat @boundary { id: ID! } @@ -2503,160 +5062,6 @@ func TestQueryWithArrayBoundaryFields(t *testing.T) { f.checkSuccess(t) } -func TestQueryWithArrayBoundaryFieldsAndMultipleChildrenSteps(t *testing.T) { - f := &queryExecutionFixture{ - services: []testService{ - { - schema: `directive @boundary on OBJECT | FIELD_DEFINITION - - type Movie @boundary { - id: ID! - title: String - } - - type Query { - randomMovie: Movie! - movies(ids: [ID!]!): [Movie]! @boundary - }`, - handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - b, _ := io.ReadAll(r.Body) - if strings.Contains(string(b), "randomMovie") { - w.Write([]byte(`{ - "data": { - "randomMovie": { - "id": "1", - "title": "Movie 1" - } - } - } - `)) - } else { - w.Write([]byte(`{ - "data": { - "_result": [ - { "id": 2, "title": "Movie 2" }, - { "id": 3, "title": "Movie 3" }, - { "id": 4, "title": "Movie 4" } - ] - } - } - `)) - } - }), - }, - { - schema: `directive @boundary on OBJECT | FIELD_DEFINITION - - type Movie @boundary { - id: ID! - compTitles: [Movie!]! - } - - type Query { - movies(ids: [ID!]): [Movie]! @boundary - }`, - handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(`{ - "data": { - "_result": [ - { - "_id": "1", - "compTitles": [ - {"id": "2"}, - {"id": "3"}, - {"id": "4"} - ] - } - ] - } - } - `)) - }), - }, - }, - query: `{ - randomMovie { - id - title - compTitles { - id - title - } - } - }`, - expected: `{ - "randomMovie": - { - "id": "1", - "title": "Movie 1", - "compTitles": [ - { "id": 2, "title": "Movie 2" }, - { "id": 3, "title": "Movie 3" }, - { "id": 4, "title": "Movie 4" } - ] - } - }`, - } - - f.checkSuccess(t) -} - -func TestQueryError(t *testing.T) { - f := &queryExecutionFixture{ - services: []testService{ - { - schema: `type Movie { - id: ID! - title: String - } - - type Query { - movie(id: ID!): Movie! - } - `, - handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write([]byte(`{ - "errors": [ - { - "message": "Movie does not exist", - "path": ["movie"], - "extensions": { - "code": "NOT_FOUND" - } - } - ] - }`)) - }), - }, - }, - query: `{ - movie(id: "1") { - id - title - } - }`, - errors: gqlerror.List{ - &gqlerror.Error{ - Message: "Movie does not exist", - Path: ast.Path{ast.PathName("movie")}, - Locations: []gqlerror.Location{ - {Line: 2, Column: 4}, - }, - Extensions: map[string]interface{}{ - "code": "NOT_FOUND", - "selectionSet": `{ movie(id: "1") { id title } }`, - "serviceName": "", - }, - }, - &gqlerror.Error{ - Message: `got a null response for non-nullable field "movie"`, - }, - }, - } - - f.run(t) -} - type testService struct { schema string handler http.Handler @@ -2675,7 +5080,7 @@ type queryExecutionFixture struct { func (f *queryExecutionFixture) checkSuccess(t *testing.T) { f.run(t) - assert.Empty(t, f.resp.Errors) + require.Empty(t, f.resp.Errors) jsonEqWithOrder(t, f.expected, string(f.resp.Data)) } @@ -2701,7 +5106,7 @@ func (f *queryExecutionFixture) run(t *testing.T) { es := newExecutableSchema(nil, 50, nil, services...) es.MergedSchema = merged - es.BoundaryQueries = buildBoundaryQueriesMap(services...) + es.BoundaryQueries = buildBoundaryFieldsMap(services...) es.Locations = buildFieldURLMap(services...) es.IsBoundary = buildIsBoundaryMap(services...) query := gqlparser.MustLoadQuery(merged, f.query) @@ -2717,17 +5122,37 @@ func (f *queryExecutionFixture) run(t *testing.T) { f.resp.Extensions = graphql.GetExtensions(ctx) if len(f.errors) == 0 { - assert.Empty(t, f.resp.Errors) + require.Empty(t, f.resp.Errors) jsonEqWithOrder(t, f.expected, string(f.resp.Data)) } else { require.Equal(t, len(f.errors), len(f.resp.Errors)) for i := range f.errors { delete(f.resp.Errors[i].Extensions, "serviceUrl") - assert.Equal(t, *f.errors[i], *f.resp.Errors[i]) + require.Equal(t, *f.errors[i], *f.resp.Errors[i]) } } } +func jsonToInterfaceMap(jsonString string) map[string]interface{} { + var outputMap map[string]interface{} + err := json.Unmarshal([]byte(jsonString), &outputMap) + if err != nil { + panic(err) + } + + return outputMap +} + +func jsonToInterfaceSlice(jsonString string) []interface{} { + var outputSlice []interface{} + err := json.Unmarshal([]byte(jsonString), &outputSlice) + if err != nil { + panic(err) + } + + return outputSlice +} + // jsonEqWithOrder checks that the JSON are equals, including the order of the // fields func jsonEqWithOrder(t *testing.T, expected, actual string) { diff --git a/go.mod b/go.mod index a8260e57..dca64dd4 100644 --- a/go.mod +++ b/go.mod @@ -25,12 +25,13 @@ require ( github.com/prometheus/procfs v0.0.10 // indirect github.com/rs/cors v1.7.0 github.com/sirupsen/logrus v1.4.2 - github.com/stretchr/testify v1.5.1 + github.com/stretchr/testify v1.7.0 github.com/uber/jaeger-client-go v2.22.1+incompatible github.com/uber/jaeger-lib v2.2.0+incompatible // indirect github.com/vektah/gqlparser/v2 v2.0.1 go.uber.org/atomic v1.6.0 // indirect golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 // indirect + golang.org/x/sync v0.0.0-20210220032951-036812b2e83c golang.org/x/tools v0.1.0 // indirect google.golang.org/protobuf v1.25.0 // indirect gopkg.in/square/go-jose.v2 v2.5.1 diff --git a/go.sum b/go.sum index 383e50b7..90272fb2 100644 --- a/go.sum +++ b/go.sum @@ -149,6 +149,7 @@ github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPx github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= @@ -156,6 +157,8 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/uber/jaeger-client-go v2.22.1+incompatible h1:NHcubEkVbahf9t3p75TOCR83gdUHXjRJvjoBh1yACsM= github.com/uber/jaeger-client-go v2.22.1+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= github.com/uber/jaeger-lib v2.2.0+incompatible h1:MxZXOiR2JuoANZ3J6DE/U0kSFv/eJ/GfSYVCjK7dyaw= @@ -197,6 +200,8 @@ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -259,6 +264,8 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= sourcegraph.com/sourcegraph/appdash v0.0.0-20180110180208-2cc67fd64755/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= diff --git a/merge.go b/merge.go index 562fec66..1f103073 100644 --- a/merge.go +++ b/merge.go @@ -97,19 +97,19 @@ func buildIsBoundaryMap(services ...*Service) map[string]bool { return result } -func buildBoundaryQueriesMap(services ...*Service) BoundaryQueriesMap { - result := make(BoundaryQueriesMap) +func buildBoundaryFieldsMap(services ...*Service) BoundaryFieldsMap { + result := make(BoundaryFieldsMap) for _, rs := range services { for _, f := range rs.Schema.Query.Fields { if isBoundaryField(f) { - queryType := f.Type.Name() + typeName := f.Type.Name() array := false if f.Type.Elem != nil { - queryType = f.Type.Elem.Name() + typeName = f.Type.Elem.Name() array = true } - result.RegisterQuery(rs.ServiceURL, queryType, f.Name, array) + result.RegisterField(rs.ServiceURL, typeName, f.Name, array) } } } diff --git a/plan.go b/plan.go index 3709548e..93605592 100644 --- a/plan.go +++ b/plan.go @@ -187,6 +187,9 @@ func extractSelectionSet(ctx *PlanningContext, insertionPoint []string, parentTy if err != nil { return nil, nil, err } + if !selectionSetHasFieldNamed(selectionSet, "__typename") { + selectionSet = append(selectionSet, &ast.Field{Alias: "__typename", Name: "__typename", Definition: &ast.FieldDefinition{Name: "__typename", Type: ast.NamedType("String", nil)}}) + } inlineFragment := *selection inlineFragment.SelectionSet = selectionSet selectionSetResult = append(selectionSetResult, &inlineFragment) @@ -203,6 +206,9 @@ func extractSelectionSet(ctx *PlanningContext, insertionPoint []string, parentTy if err != nil { return nil, nil, err } + if !selectionSetHasFieldNamed(selectionSet, "__typename") { + selectionSet = append(selectionSet, &ast.Field{Alias: "__typename", Name: "__typename", Definition: &ast.FieldDefinition{Name: "__typename", Type: ast.NamedType("String", nil)}}) + } inlineFragment := ast.InlineFragment{ TypeCondition: selection.Definition.TypeCondition, SelectionSet: selectionSet, @@ -363,35 +369,35 @@ func stringArraysEqual(a, b []string) bool { return true } -// BoundaryQuery contains the name and format for a boundary query -type BoundaryQuery struct { - Query string +// BoundaryField contains the name and format for a boundary query +type BoundaryField struct { + Field string // Whether the query is in the array format Array bool } -// BoundaryQueriesMap is a mapping service -> type -> boundary query -type BoundaryQueriesMap map[string]map[string]BoundaryQuery +// BoundaryFieldsMap is a mapping service -> type -> boundary query +type BoundaryFieldsMap map[string]map[string]BoundaryField -// RegisterQuery registers a boundary query -func (m BoundaryQueriesMap) RegisterQuery(serviceURL, typeName, query string, array bool) { +// RegisterField registers a boundary field +func (m BoundaryFieldsMap) RegisterField(serviceURL, typeName, field string, array bool) { if _, ok := m[serviceURL]; !ok { - m[serviceURL] = make(map[string]BoundaryQuery) + m[serviceURL] = make(map[string]BoundaryField) } - m[serviceURL][typeName] = BoundaryQuery{Query: query, Array: array} + m[serviceURL][typeName] = BoundaryField{Field: field, Array: array} } -// Query returns the boundary query for the given service and type -func (m BoundaryQueriesMap) Query(serviceURL, typeName string) BoundaryQuery { +// Query returns the boundary field for the given service and type +func (m BoundaryFieldsMap) Field(serviceURL, typeName string) BoundaryField { serviceMap, ok := m[serviceURL] if !ok { - return BoundaryQuery{Query: "node"} + return BoundaryField{Field: "node"} } query, ok := serviceMap[typeName] if !ok { - return BoundaryQuery{Query: "node"} + return BoundaryField{Field: "node"} } return query diff --git a/plan_test.go b/plan_test.go index b614be4f..92cbbfbf 100644 --- a/plan_test.go +++ b/plan_test.go @@ -262,7 +262,31 @@ func TestQueryPlanInlineFragment(t *testing.T) { { "ServiceURL": "A", "ParentType": "Query", - "SelectionSet": "{ movies { ... on Movie { id title(language: French) } } }", + "SelectionSet": "{ movies { ... on Movie { id title(language: French) __typename } } }", + "InsertionPoint": null, + "Then": null + } + ] + }` + PlanTestFixture1.Check(t, query, plan) +} + +func TestQueryPlanInlineFragmentDoesNotDuplicateTypename(t *testing.T) { + query := `{ + movies { + ... on Movie { + __typename + id + title(language: French) + } + } + }` + plan := `{ + "RootSteps": [ + { + "ServiceURL": "A", + "ParentType": "Query", + "SelectionSet": "{ movies { ... on Movie { __typename id title(language: French) } } }", "InsertionPoint": null, "Then": null } @@ -288,7 +312,7 @@ func TestQueryPlanInlineFragmentPlan(t *testing.T) { { "ServiceURL": "A", "ParentType": "Query", - "SelectionSet": "{ movies { _id: id ... on Movie { id title(language: French) } } }", + "SelectionSet": "{ movies { _id: id ... on Movie { id title(language: French) __typename } } }", "InsertionPoint": null, "Then": [ { @@ -321,7 +345,34 @@ func TestQueryPlanFragmentSpread1(t *testing.T) { { "ServiceURL": "A", "ParentType": "Query", - "SelectionSet": "{ movies { ... on Movie { id title(language: French) } } }", + "SelectionSet": "{ movies { ... on Movie { id title(language: French) __typename } } }", + "InsertionPoint": null, + "Then": null + } + ] + }` + + PlanTestFixture1.Check(t, query, plan) +} + +func TestQueryPlanFragmentSpread1DontDuplicateTypename(t *testing.T) { + query := ` + fragment Frag on Movie { + id + __typename + title(language: French) + } + { + movies { + ...Frag + } + }` + plan := `{ + "RootSteps": [ + { + "ServiceURL": "A", + "ParentType": "Query", + "SelectionSet": "{ movies { ... on Movie { id __typename title(language: French) } } }", "InsertionPoint": null, "Then": null } @@ -356,7 +407,6 @@ func TestQueryPlanFragmentSpread2(t *testing.T) { } func TestQueryPlanInlineFragmentSpreadOfInterface(t *testing.T) { - t.Skip("not supported at this time") query := ` { animals { @@ -374,16 +424,9 @@ func TestQueryPlanInlineFragmentSpreadOfInterface(t *testing.T) { { "ServiceURL": "A", "ParentType": "Query", - "SelectionSet": "{ animals { id name __typename }", + "SelectionSet": "{ animals { name ... on Lion { maneColor __typename } ... on Snake { _id: id __typename } } }", "InsertionPoint": null, "Then": [ - { - "ServiceURL": "A", - "ParentType": "Lion", - "SelectionSet": "{ _id: id maneColor }", - "InsertionPoint": ["animals"], - "Then": null - }, { "ServiceURL": "B", "ParentType": "Snake", @@ -513,7 +556,7 @@ func TestQueryPlanSupportsUnions(t *testing.T) { { "ServiceURL": "A", "ParentType": "Query", - "SelectionSet": "{ animals { ... on Dog { name } ... on Cat { name } ... on Snake { name } } }", + "SelectionSet": "{ animals { ... on Dog { name __typename } ... on Cat { name __typename } ... on Snake { name __typename } } }", "InsertionPoint": null, "Then": null } diff --git a/plugin.go b/plugin.go index 08ac6c6c..c8c84553 100644 --- a/plugin.go +++ b/plugin.go @@ -25,7 +25,7 @@ type Plugin interface { GraphqlQueryPath() (bool, string) ApplyMiddlewarePublicMux(http.Handler) http.Handler ApplyMiddlewarePrivateMux(http.Handler) http.Handler - ModifyExtensions(ctx context.Context, e *QueryExecution, extensions map[string]interface{}) error + ModifyExtensions(ctx context.Context, e *queryExecution, extensions map[string]interface{}) error } // BasePlugin is an empty plugin. It can be embedded by any plugin as a way to avoid @@ -62,7 +62,7 @@ func (p *BasePlugin) ApplyMiddlewarePrivateMux(h http.Handler) http.Handler { } // ModifyExtensions ... -func (p *BasePlugin) ModifyExtensions(ctx context.Context, e *QueryExecution, extensions map[string]interface{}) error { +func (p *BasePlugin) ModifyExtensions(ctx context.Context, e *queryExecution, extensions map[string]interface{}) error { return nil } diff --git a/query_execution.go b/query_execution.go new file mode 100644 index 00000000..e36bb300 --- /dev/null +++ b/query_execution.go @@ -0,0 +1,898 @@ +package bramble + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "sort" + "strings" + "sync" + "sync/atomic" + + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" + "golang.org/x/sync/errgroup" +) + +var ( + errNullBubbledToRoot = errors.New("bubbleUpNullValuesInPlace: null bubbled up to root") +) + +type executionResult struct { + ServiceURL string + InsertionPoint []string + Data interface{} + Errors gqlerror.List +} + +type queryExecution struct { + ctx context.Context + schema *ast.Schema + requestCount int32 + maxRequest int32 + graphqlClient *GraphQLClient + boundaryFields BoundaryFieldsMap + + group *errgroup.Group + results chan executionResult +} + +func newQueryExecution(ctx context.Context, client *GraphQLClient, schema *ast.Schema, boundaryFields BoundaryFieldsMap, maxRequest int32) *queryExecution { + group, ctx := errgroup.WithContext(ctx) + return &queryExecution{ + ctx: ctx, + schema: schema, + graphqlClient: client, + boundaryFields: boundaryFields, + maxRequest: maxRequest, + group: group, + results: make(chan executionResult), + } +} + +func (q *queryExecution) Execute(queryPlan *QueryPlan) ([]executionResult, gqlerror.List) { + readWg := &sync.WaitGroup{} + results := []executionResult{} + + if len(queryPlan.RootSteps) > int(q.maxRequest) { + return nil, gqlerror.List{ + &gqlerror.Error{ + Message: fmt.Sprintf("exceeded max requests of %v", q.maxRequest), + }, + } + } + + for _, step := range queryPlan.RootSteps { + if step.ServiceURL == internalServiceName { + r, err := executeBrambleStep(step) + if err != nil { + return nil, q.createGQLErrors(step, err) + } + results = append(results, *r) + continue + } + + step := step + q.group.Go(func() error { + return q.executeRootStep(step) + }) + } + + readWg.Add(1) + go func() { + for result := range q.results { + results = append(results, result) + } + readWg.Done() + }() + + if err := q.group.Wait(); err != nil { + return nil, gqlerror.List{ + &gqlerror.Error{ + Message: err.Error(), + }, + } + } + close(q.results) + readWg.Wait() + return results, nil +} + +func (q *queryExecution) executeRootStep(step *QueryPlanStep) error { + var document string + if step.ParentType == "Query" { + document = "query " + formatSelectionSet(q.ctx, q.schema, step.SelectionSet) + } else if step.ParentType == "Mutation" { + document = "mutation " + formatSelectionSet(q.ctx, q.schema, step.SelectionSet) + } else { + return errors.New("non mutation or query root step") + } + + var data map[string]interface{} + + err := q.executeDocument(document, step.ServiceURL, &data) + if err != nil { + q.writeExecutionResult(step, data, err) + return nil + } + + q.writeExecutionResult(step, data, nil) + + for _, childStep := range step.Then { + boundaryIDs, err := extractAndDedupeBoundaryIDs(data, childStep.InsertionPoint) + if err != nil { + return err + } + if len(boundaryIDs) == 0 { + continue + } + + childStep := childStep + q.group.Go(func() error { + return q.executeChildStep(childStep, boundaryIDs) + }) + } + return nil +} + +func (q *queryExecution) executeDocument(document string, serviceURL string, response interface{}) error { + req := NewRequest(document). + WithHeaders(GetOutgoingRequestHeadersFromContext(q.ctx)) + return q.graphqlClient.Request(q.ctx, serviceURL, req, &response) +} + +func (q *queryExecution) writeExecutionResult(step *QueryPlanStep, data interface{}, err error) { + result := executionResult{ + ServiceURL: step.ServiceURL, + InsertionPoint: step.InsertionPoint, + Data: data, + } + if err != nil { + result.Errors = q.createGQLErrors(step, err) + } + + q.results <- result +} + +func (q *queryExecution) executeChildStep(step *QueryPlanStep, boundaryIDs []string) error { + atomic.AddInt32(&q.requestCount, 1) + if q.requestCount > q.maxRequest { + return fmt.Errorf("exceeded max requests of %v", q.maxRequest) + } + + boundaryField := q.boundaryFields.Field(step.ServiceURL, step.ParentType) + + documents, err := buildBoundaryQueryDocuments(q.ctx, q.schema, step, boundaryIDs, boundaryField, 50) + if err != nil { + return err + } + + data, err := q.executeBoundaryQuery(documents, step.ServiceURL, boundaryField) + if err != nil { + q.writeExecutionResult(step, data, err) + return nil + } + + q.writeExecutionResult(step, data, nil) + + if len(data) > 0 { + for _, childStep := range step.Then { + boundaryResultInsertionPoint, err := trimInsertionPointForNestedBoundaryStep(data, childStep.InsertionPoint) + if err != nil { + return err + } + boundaryIDs, err := extractAndDedupeBoundaryIDs(data, boundaryResultInsertionPoint) + if err != nil { + return err + } + if len(boundaryIDs) == 0 { + continue + } + childStep := childStep + q.group.Go(func() error { + return q.executeChildStep(childStep, boundaryIDs) + }) + } + } + + return nil +} + +func (q *queryExecution) executeBoundaryQuery(documents []string, serviceURL string, boundaryFieldGetter BoundaryField) ([]interface{}, error) { + output := make([]interface{}, 0) + if !boundaryFieldGetter.Array { + for _, document := range documents { + partialData := make(map[string]interface{}) + err := q.executeDocument(document, serviceURL, &partialData) + if err != nil { + return nil, err + } + for _, value := range partialData { + output = append(output, value) + } + } + return output, nil + } + + if len(documents) != 1 { + return nil, errors.New("there should only be a single document for array boundary field lookups") + } + + data := struct { + Result []interface{} `json:"_result"` + }{} + + err := q.executeDocument(documents[0], serviceURL, &data) + return data.Result, err +} + +func (q *queryExecution) createGQLErrors(step *QueryPlanStep, err error) gqlerror.List { + var path ast.Path + for _, p := range step.InsertionPoint { + path = append(path, ast.PathName(p)) + } + + var locs []gqlerror.Location + for _, f := range selectionSetToFields(step.SelectionSet) { + pos := f.GetPosition() + if pos == nil { + continue + } + locs = append(locs, gqlerror.Location{Line: pos.Line, Column: pos.Column}) + + // if the field has a subset it's part of the path + if len(f.SelectionSet) > 0 { + path = append(path, ast.PathName(f.Alias)) + } + } + + var gqlErr GraphqlErrors + var outputErrs gqlerror.List + if errors.As(err, &gqlErr) { + for _, ge := range gqlErr { + extensions := ge.Extensions + if extensions == nil { + extensions = make(map[string]interface{}) + } + extensions["selectionSet"] = formatSelectionSetSingleLine(q.ctx, q.schema, step.SelectionSet) + extensions["serviceName"] = step.ServiceName + extensions["serviceUrl"] = step.ServiceURL + + outputErrs = append(outputErrs, &gqlerror.Error{ + Message: ge.Message, + Path: path, + Locations: locs, + Extensions: extensions, + }) + } + return outputErrs + } else { + outputErrs = append(outputErrs, &gqlerror.Error{ + Message: err.Error(), + Path: path, + Locations: locs, + Extensions: map[string]interface{}{ + "selectionSet": formatSelectionSetSingleLine(q.ctx, q.schema, step.SelectionSet), + }, + }) + } + + return outputErrs +} + +// The insertionPoint represents the level a piece of data should be inserted at, relative to the root of the root step's data. +// However results from a boundary query only contain a portion of that tree. For example, you could +// have insertionPoint: ["foo", "bar", "movies". "movie", "compTitles"], with the below example as the boundary result we're +// crawling for ids: +// [ +// { +// "_id": "MOVIE1", +// "compTitles": [ +// { +// "_id": "1" +// } +// ] +// } +// ] +// +// We therefore cannot use the insertionPoint as is in order to extract the boundary ids for the next child step. +// This function trims the insertionPoint up until we find a key that exists in both the boundary result and insertionPoint. +// When a match is found, the remainder of the insertionPoint is used, which in this case is only ["compTitles"]. +// This logic is only needed when we are already in a child step, which itself contains it's own child steps. +func trimInsertionPointForNestedBoundaryStep(data []interface{}, childInsertionPoint []string) ([]string, error) { + if len(data) < 1 { + return nil, fmt.Errorf("no boundary results to process") + } + + firstBoundaryResult, ok := data[0].(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("a single boundary result should be a map[string]interface{}") + } + for i, point := range childInsertionPoint { + _, ok := firstBoundaryResult[point] + if ok { + return childInsertionPoint[i:], nil + } + } + return nil, fmt.Errorf("could not find any insertion points inside boundary data") +} + +func executeBrambleStep(queryPlanStep *QueryPlanStep) (*executionResult, error) { + result, err := buildTypenameResponseMap(queryPlanStep.SelectionSet, queryPlanStep.ParentType) + if err != nil { + return nil, err + } + + return &executionResult{ + ServiceURL: internalServiceName, + InsertionPoint: []string{}, + Data: result, + }, nil +} + +func buildTypenameResponseMap(selectionSet ast.SelectionSet, parentTypeName string) (map[string]interface{}, error) { + result := make(map[string]interface{}) + for _, field := range selectionSetToFields(selectionSet) { + if field.SelectionSet != nil { + if field.Definition.Type.NamedType == "" { + return nil, fmt.Errorf("expected named type") + } + + var err error + result[field.Alias], err = buildTypenameResponseMap(field.SelectionSet, field.Definition.Type.Name()) + if err != nil { + return nil, err + } + } else { + if field.Name != "__typename" { + return nil, fmt.Errorf("expected __typename") + } + result[field.Alias] = parentTypeName + } + } + return result, nil +} + +func fragmentImplementsAbstractType(schema *ast.Schema, objectType, interfaceType string) bool { + for _, def := range schema.Implements[objectType] { + if def.Name == interfaceType { + return true + } + } + return false +} + +func extractAndDedupeBoundaryIDs(data interface{}, insertionPoint []string) ([]string, error) { + boundaryIDs, err := extractBoundaryIDs(data, insertionPoint) + if err != nil { + return nil, err + } + dedupeMap := make(map[string]struct{}, len(boundaryIDs)) + for _, boundaryID := range boundaryIDs { + dedupeMap[boundaryID] = struct{}{} + } + + deduped := make([]string, 0, len(boundaryIDs)) + for id := range dedupeMap { + deduped = append(deduped, id) + } + + return sort.StringSlice(deduped), nil +} + +func extractBoundaryIDs(data interface{}, insertionPoint []string) ([]string, error) { + ptr := data + if ptr == nil { + return nil, nil + } + if len(insertionPoint) == 0 { + switch ptr := ptr.(type) { + case map[string]interface{}: + var id string + var ok bool + id, ok = ptr["_id"].(string) + if !ok { + id, ok = ptr["id"].(string) + } + if !ok { + return nil, errors.New("extractBoundaryIDs: unexpected missing '_id' or 'id' in map") + } + return []string{id}, nil + case []interface{}: + result := []string{} + for _, innerPtr := range ptr { + ids, err := extractBoundaryIDs(innerPtr, insertionPoint) + if err != nil { + return nil, err + } + result = append(result, ids...) + } + return result, nil + default: + return nil, fmt.Errorf("extractBoundaryIDs: unexpected type: %T", ptr) + } + } + switch ptr := ptr.(type) { + case map[string]interface{}: + if len(insertionPoint) == 1 { + return extractBoundaryIDs(ptr[insertionPoint[0]], nil) + } else { + return extractBoundaryIDs(ptr[insertionPoint[0]], insertionPoint[1:]) + } + case []interface{}: + result := []string{} + for _, innerPtr := range ptr { + ids, err := extractBoundaryIDs(innerPtr, insertionPoint) + if err != nil { + return nil, err + } + result = append(result, ids...) + } + return result, nil + default: + return nil, fmt.Errorf("extractBoundaryIDs: unexpected type: %T", ptr) + } +} + +func buildBoundaryQueryDocuments(ctx context.Context, schema *ast.Schema, step *QueryPlanStep, ids []string, parentTypeBoundaryField BoundaryField, batchSize int) ([]string, error) { + selectionSetQL := formatSelectionSetSingleLine(ctx, schema, step.SelectionSet) + if parentTypeBoundaryField.Array { + qids := []string{} + for _, id := range ids { + qids = append(qids, fmt.Sprintf("%q", id)) + } + idsQL := fmt.Sprintf("[%s]", strings.Join(qids, ", ")) + return []string{fmt.Sprintf(`{ _result: %s(ids: %s) %s }`, parentTypeBoundaryField.Field, idsQL, selectionSetQL)}, nil + } + + var ( + documents []string + selectionIndex int + ) + for _, batch := range batchBy(ids, batchSize) { + var selections []string + for _, id := range batch { + selection := fmt.Sprintf("%s: %s(id: %q) %s", nodeAlias(selectionIndex), parentTypeBoundaryField.Field, id, selectionSetQL) + selections = append(selections, selection) + selectionIndex++ + } + document := "{ " + strings.Join(selections, " ") + " }" + documents = append(documents, document) + } + + return documents, nil +} + +func batchBy(items []string, batchSize int) (batches [][]string) { + for batchSize < len(items) { + items, batches = items[batchSize:], append(batches, items[0:batchSize:batchSize]) + } + + return append(batches, items) +} + +func mergeExecutionResults(results []executionResult) (map[string]interface{}, error) { + if len(results) == 0 { + return nil, errors.New("mergeExecutionResults: nothing to merge") + } + + if len(results) == 1 { + data := results[0].Data + if data == nil { + return nil, nil + } + + dataMap, ok := data.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("a complete graphql response should be map[string]interface{}, got %T", results[0].Data) + } + return dataMap, nil + } + + data := results[0].Data + for _, result := range results[1:] { + if err := mergeExecutionResultsRec(result.Data, data, result.InsertionPoint); err != nil { + return nil, err + } + } + + dataMap, ok := data.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("merged execution results should be map[string]interface{}, got %T", data) + } + + return dataMap, nil +} + +func mergeExecutionResultsRec(src interface{}, dst interface{}, insertionPoint []string) error { + // base case + if len(insertionPoint) == 0 { + switch ptr := dst.(type) { + case nil: + return nil + case map[string]interface{}: + switch src := src.(type) { + // base case for root step merging + case map[string]interface{}: + mergeMaps(ptr, src) + + // base case for children step merging + case []interface{}: + boundaryResults, err := getBoundaryFieldResults(src) + if err != nil { + return err + } + + dstID, err := boundaryIDFromMap(ptr) + if err != nil { + return err + } + + for _, result := range boundaryResults { + srcID, err := boundaryIDFromMap(result) + if err != nil { + return err + } + if srcID == dstID { + for k, v := range result { + if k == "_id" || k == "id" { + continue + } + + ptr[k] = v + } + } + } + + } + case []interface{}: + for _, innerPtr := range ptr { + if err := mergeExecutionResultsRec(src, innerPtr, insertionPoint); err != nil { + return err + } + } + default: + return fmt.Errorf("mergeExecutionResultsRec: unxpected type '%T' for top-level merge", ptr) + } + return nil + } + + // recursive case + switch ptr := dst.(type) { + case map[string]interface{}: + switch ptr := ptr[insertionPoint[0]].(type) { + case []interface{}: + for _, innerPtr := range ptr { + if err := mergeExecutionResultsRec(src, innerPtr, insertionPoint[1:]); err != nil { + return err + } + } + default: + if err := mergeExecutionResultsRec(src, ptr, insertionPoint[1:]); err != nil { + return err + } + } + case []interface{}: + for _, innerPtr := range ptr { + if err := mergeExecutionResultsRec(src, innerPtr, insertionPoint); err != nil { + return err + } + } + default: + return fmt.Errorf("mergeExecutionResultsRec: unxpected type '%T' for non top-level merge", ptr) + } + return nil +} + +func boundaryIDFromMap(boundaryMap map[string]interface{}) (string, error) { + id, ok := boundaryMap["_id"].(string) + if ok { + return id, nil + } + id, ok = boundaryMap["id"].(string) + if ok { + return id, nil + } + return "", errors.New("boundaryIDFromMap: 'id' or '_id' not found") +} + +func getBoundaryFieldResults(src []interface{}) ([]map[string]interface{}, error) { + var results []map[string]interface{} + for i, element := range src { + if element == nil { + continue + } + elementMap, ok := element.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("getBoundaryFieldResults: expect value at index %d to be map[string]interface{}' but got '%T'", i, element) + } + results = append(results, elementMap) + } + return results, nil +} + +// bubbleUpNullValuesInPlace checks for expected null values (as per schema) and bubbles them up if needed, and checks for +// unexpected null values and returns errors for each (these unexpected nulls are also bubbled up). +// See https://spec.graphql.org/June2018/#sec-Errors-and-Non-Nullability +func bubbleUpNullValuesInPlace(schema *ast.Schema, selectionSet ast.SelectionSet, result map[string]interface{}) ([]*gqlerror.Error, error) { + errs, bubbleUp, err := bubbleUpNullValuesInPlaceRec(schema, nil, selectionSet, result, ast.Path{}) + if err != nil { + return nil, err + } + if bubbleUp { + return errs, errNullBubbledToRoot + } + return errs, nil +} + +func bubbleUpNullValuesInPlaceRec(schema *ast.Schema, currentType *ast.Type, selectionSet ast.SelectionSet, result interface{}, path ast.Path) (errs []*gqlerror.Error, bubbleUp bool, err error) { + switch result := result.(type) { + case map[string]interface{}: + for _, selection := range selectionSet { + switch selection := selection.(type) { + case *ast.Field: + field := selection + if strings.HasPrefix(field.Name, "__") { + continue + } + value := result[field.Alias] + if value == nil { + if field.Definition.Type.NonNull { + errs = append(errs, &gqlerror.Error{ + Message: fmt.Sprintf("got a null response for non-nullable field %q", field.Alias), + Path: append(path, ast.PathName(field.Alias)), + Extensions: nil, + }) + bubbleUp = true + } + return + } + if field.SelectionSet != nil { + lowerErrs, lowerBubbleUp, lowerErr := bubbleUpNullValuesInPlaceRec(schema, field.Definition.Type, field.SelectionSet, value, append(path, ast.PathName(field.Alias))) + if lowerErr != nil { + return nil, false, lowerErr + } + if lowerBubbleUp { + if field.Definition.Type.NonNull { + bubbleUp = true + } else { + result[field.Alias] = nil + } + } + errs = append(errs, lowerErrs...) + } + case *ast.FragmentSpread: + fragment := selection + typename, ok := result["__typename"].(string) + if !ok { + return nil, false, errors.New("missing expected __typename") + } + if typename != fragment.Definition.TypeCondition && !fragmentImplementsAbstractType(schema, typename, fragment.Definition.TypeCondition) { + continue + } + lowerErrs, lowerBubbleUp, lowerErr := bubbleUpNullValuesInPlaceRec(schema, nil, fragment.Definition.SelectionSet, result, path) + if lowerErr != nil { + return nil, false, lowerErr + } + bubbleUp = lowerBubbleUp + errs = append(errs, lowerErrs...) + case *ast.InlineFragment: + fragment := selection + typename, ok := result["__typename"].(string) + if !ok { + return nil, false, errors.New("missing expected __typename") + } + if typename != fragment.TypeCondition && !fragmentImplementsAbstractType(schema, typename, fragment.TypeCondition) { + continue + } + lowerErrs, lowerBubbleUp, lowerErr := bubbleUpNullValuesInPlaceRec(schema, nil, fragment.SelectionSet, result, path) + if lowerErr != nil { + return nil, false, lowerErr + } + bubbleUp = lowerBubbleUp + errs = append(errs, lowerErrs...) + default: + err = fmt.Errorf("unknown selection type: %T", selection) + return + } + } + case []interface{}: + for i, value := range result { + pathWithIndex := appendPathIndex(path, i) + lowerErrs, lowerBubbleUp, lowerErr := bubbleUpNullValuesInPlaceRec(schema, currentType, selectionSet, value, pathWithIndex) + if lowerErr != nil { + return nil, false, lowerErr + } + if lowerBubbleUp { + if currentType.Elem.NonNull { + bubbleUp = true + } else { + result[i] = nil + } + } + errs = append(errs, lowerErrs...) + } + case []map[string]interface{}: + for i, value := range result { + pathWithIndex := appendPathIndex(path, i) + lowerErrs, lowerBubbleUp, lowerErr := bubbleUpNullValuesInPlaceRec(schema, currentType, selectionSet, value, pathWithIndex) + if lowerErr != nil { + return nil, false, lowerErr + } + if lowerBubbleUp { + if currentType.Elem.NonNull { + bubbleUp = true + } else { + result[i] = nil + } + } + errs = append(errs, lowerErrs...) + } + default: + return nil, false, fmt.Errorf("bubbleUpNullValuesInPlaceRec: unxpected result type '%T'", result) + } + return +} + +func appendPathIndex(path []ast.PathElement, index int) []ast.PathElement { + pathCopy := make([]ast.PathElement, len(path)) + copy(pathCopy, path) + return append(pathCopy, ast.PathIndex(index)) +} + +func formatResponseData(schema *ast.Schema, selectionSet ast.SelectionSet, result map[string]interface{}) (string, error) { + return formatResponseDataRec(schema, selectionSet, result, false) +} + +func formatResponseDataRec(schema *ast.Schema, selectionSet ast.SelectionSet, result interface{}, insideFragment bool) (string, error) { + var builder strings.Builder + if result == nil { + return "null", nil + } + switch result := result.(type) { + case map[string]interface{}: + if len(result) == 0 { + return "null", nil + } + if !insideFragment { + builder.WriteString("{") + } + + objectTypename, _ := result["__typename"].(string) + filteredSelectionSet, err := unionAndTrimSelectionSet(objectTypename, schema, selectionSet) + if err != nil { + return "", err + } + + for i, selection := range filteredSelectionSet { + switch selection := selection.(type) { + case *ast.InlineFragment: + innerBody, err := formatResponseDataRec(schema, selection.SelectionSet, result, true) + if err != nil { + return "", err + } + builder.WriteString(innerBody) + + case *ast.FragmentSpread: + innerBody, err := formatResponseDataRec(schema, selection.Definition.SelectionSet, result, true) + if err != nil { + return "", err + } + builder.WriteString(innerBody) + case *ast.Field: + field := selection + fieldData, ok := result[field.Alias] + builder.WriteString(fmt.Sprintf(`"%s":`, field.Alias)) + if !ok { + builder.WriteString("null") + if i < len(filteredSelectionSet)-1 { + builder.WriteString(",") + } + continue + } + if field.SelectionSet != nil && len(field.SelectionSet) > 0 { + innerBody, err := formatResponseDataRec(schema, field.SelectionSet, fieldData, false) + if err != nil { + return "", err + } + builder.WriteString(innerBody) + } else { + fieldJSON, err := json.Marshal(&fieldData) + if err != nil { + return "", err + } + + builder.Write(fieldJSON) + } + } + if i < len(filteredSelectionSet)-1 { + builder.WriteString(",") + } + } + if !insideFragment { + builder.WriteString("}") + } + case []interface{}: + builder.WriteString("[") + for i, v := range result { + innerBody, err := formatResponseDataRec(schema, selectionSet, v, false) + if err != nil { + return "", err + } + builder.WriteString(innerBody) + + if i < len(result)-1 { + builder.WriteString(",") + } + } + builder.WriteString("]") + case []map[string]interface{}: + builder.WriteString("[") + for i, v := range result { + innerBody, err := formatResponseDataRec(schema, selectionSet, v, false) + if err != nil { + return "", err + } + builder.WriteString(innerBody) + + if i < len(result)-1 { + builder.WriteString(",") + } + } + builder.WriteString("]") + } + + return builder.String(), nil +} + +// When formatting the response data, the shape of the selection set has to potentially be modified to more closely resemble the shape +// of the response. This only happens when running into fragments, there are two cases we need to deal with: +// 1. the selection set of the target fragment has to be unioned with the selection set at the level for which the target fragment is referenced +// 2. if the target fragments are an implementation of an abstract type, we need to use the __typename from the response body to check which +// implementation was resolved. Any fragments that do not match are dropped from the selection set. +func unionAndTrimSelectionSet(objectTypename string, schema *ast.Schema, selectionSet ast.SelectionSet) (ast.SelectionSet, error) { + return unionAndTrimSelectionSetRec(objectTypename, schema, selectionSet, map[string]bool{}) +} + +func unionAndTrimSelectionSetRec(objectTypename string, schema *ast.Schema, selectionSet ast.SelectionSet, seenFields map[string]bool) (ast.SelectionSet, error) { + var filteredSelectionSet ast.SelectionSet + for _, selection := range selectionSet { + switch selection := selection.(type) { + case *ast.Field: + if seenFields[selection.Alias] { + continue + } + seenFields[selection.Alias] = true + filteredSelectionSet = append(filteredSelectionSet, selection) + case *ast.InlineFragment: + fragment := selection + if objectTypename == "" { + return nil, errors.New("__typename must have been injected when dealing with fragments, check the planner is doing the right thing") + } + + if fragment.ObjectDefinition.IsAbstractType() && + fragmentImplementsAbstractType(schema, objectTypename, fragment.ObjectDefinition.Name) && + objectTypenameMatchesDifferentFragment(objectTypename, fragment) { + continue + } + + filteredSelections, err := unionAndTrimSelectionSetRec(objectTypename, schema, fragment.SelectionSet, seenFields) + if err != nil { + return nil, err + } + if len(filteredSelections) > 0 { + fragment.SelectionSet = filteredSelections + filteredSelectionSet = append(filteredSelectionSet, selection) + } + case *ast.FragmentSpread: + filteredSelectionSet = append(filteredSelectionSet, selection) + } + } + + return filteredSelectionSet, nil +} + +func objectTypenameMatchesDifferentFragment(typename string, fragment *ast.InlineFragment) bool { + return fragment.TypeCondition != typename +} From 8ab21e39bd0dcbb571ccd6b6f2dae519fa74ddf4 Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Mon, 27 Sep 2021 14:24:34 +1300 Subject: [PATCH 02/13] Use byte buffer directly when building response * avoids extra allocations from requiring casting back to []byte --- execution.go | 2 +- execution_test.go | 14 +++++------ query_execution.go | 63 +++++++++++++++++++++++----------------------- 3 files changed, 40 insertions(+), 39 deletions(-) diff --git a/execution.go b/execution.go index aa5489b2..d7134f6c 100644 --- a/execution.go +++ b/execution.go @@ -271,7 +271,7 @@ func (s *ExecutableSchema) ExecuteQuery(ctx context.Context) *graphql.Response { } return &graphql.Response{ - Data: []byte(formattedResponse), + Data: formattedResponse, Errors: errs, } } diff --git a/execution_test.go b/execution_test.go index eac9f152..1dc929dd 100644 --- a/execution_test.go +++ b/execution_test.go @@ -3027,7 +3027,7 @@ func TestFormatResponseBody(t *testing.T) { document := gqlparser.MustLoadQuery(schema, query) bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) require.NoError(t, err) - require.JSONEq(t, expectedJSON, bodyJSON) + require.JSONEq(t, expectedJSON, string(bodyJSON)) }) t.Run("null data", func(t *testing.T) { @@ -3084,7 +3084,7 @@ func TestFormatResponseBody(t *testing.T) { document := gqlparser.MustLoadQuery(schema, query) bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) require.NoError(t, err) - require.JSONEq(t, expectedJSON, bodyJSON) + require.JSONEq(t, expectedJSON, string(bodyJSON)) }) t.Run("simple response with errors", func(t *testing.T) { @@ -3140,7 +3140,7 @@ func TestFormatResponseBody(t *testing.T) { document := gqlparser.MustLoadQuery(schema, query) bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) require.NoError(t, err) - require.JSONEq(t, expectedJSON, bodyJSON) + require.JSONEq(t, expectedJSON, string(bodyJSON)) }) t.Run("field selection overlaps with fragment selection", func(t *testing.T) { @@ -3218,7 +3218,7 @@ func TestFormatResponseBody(t *testing.T) { document := gqlparser.MustLoadQuery(schema, query) bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) require.NoError(t, err) - require.JSONEq(t, expectedJSON, bodyJSON) + require.JSONEq(t, expectedJSON, string(bodyJSON)) }) t.Run("field selection entirely overlaps with fragment selection", func(t *testing.T) { @@ -3284,7 +3284,7 @@ func TestFormatResponseBody(t *testing.T) { document := gqlparser.MustLoadQuery(schema, query) bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) require.NoError(t, err) - require.JSONEq(t, expectedJSON, bodyJSON) + require.JSONEq(t, expectedJSON, string(bodyJSON)) }) t.Run("multiple implementation fragment spreads", func(t *testing.T) { @@ -3359,7 +3359,7 @@ func TestFormatResponseBody(t *testing.T) { document := gqlparser.MustLoadQuery(schema, query) bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) require.NoError(t, err) - require.JSONEq(t, expectedJSON, bodyJSON) + require.JSONEq(t, expectedJSON, string(bodyJSON)) }) t.Run("multiple implementation fragment spreads (bottom fragment matches)", func(t *testing.T) { @@ -3436,7 +3436,7 @@ func TestFormatResponseBody(t *testing.T) { document := gqlparser.MustLoadQuery(schema, query) bodyJSON, err := formatResponseData(schema, document.Operations[0].SelectionSet, result) require.NoError(t, err) - require.JSONEq(t, expectedJSON, bodyJSON) + require.JSONEq(t, expectedJSON, string(bodyJSON)) }) } diff --git a/query_execution.go b/query_execution.go index e36bb300..73a2f826 100644 --- a/query_execution.go +++ b/query_execution.go @@ -1,6 +1,7 @@ package bramble import ( + "bytes" "context" "encoding/json" "errors" @@ -741,28 +742,28 @@ func appendPathIndex(path []ast.PathElement, index int) []ast.PathElement { return append(pathCopy, ast.PathIndex(index)) } -func formatResponseData(schema *ast.Schema, selectionSet ast.SelectionSet, result map[string]interface{}) (string, error) { +func formatResponseData(schema *ast.Schema, selectionSet ast.SelectionSet, result map[string]interface{}) ([]byte, error) { return formatResponseDataRec(schema, selectionSet, result, false) } -func formatResponseDataRec(schema *ast.Schema, selectionSet ast.SelectionSet, result interface{}, insideFragment bool) (string, error) { - var builder strings.Builder +func formatResponseDataRec(schema *ast.Schema, selectionSet ast.SelectionSet, result interface{}, insideFragment bool) ([]byte, error) { + var buf bytes.Buffer if result == nil { - return "null", nil + return []byte("null"), nil } switch result := result.(type) { case map[string]interface{}: if len(result) == 0 { - return "null", nil + return []byte("null"), nil } if !insideFragment { - builder.WriteString("{") + buf.WriteString("{") } objectTypename, _ := result["__typename"].(string) filteredSelectionSet, err := unionAndTrimSelectionSet(objectTypename, schema, selectionSet) if err != nil { - return "", err + return []byte{}, err } for i, selection := range filteredSelectionSet { @@ -770,80 +771,80 @@ func formatResponseDataRec(schema *ast.Schema, selectionSet ast.SelectionSet, re case *ast.InlineFragment: innerBody, err := formatResponseDataRec(schema, selection.SelectionSet, result, true) if err != nil { - return "", err + return []byte{}, err } - builder.WriteString(innerBody) + buf.Write(innerBody) case *ast.FragmentSpread: innerBody, err := formatResponseDataRec(schema, selection.Definition.SelectionSet, result, true) if err != nil { - return "", err + return []byte{}, err } - builder.WriteString(innerBody) + buf.Write(innerBody) case *ast.Field: field := selection fieldData, ok := result[field.Alias] - builder.WriteString(fmt.Sprintf(`"%s":`, field.Alias)) + buf.WriteString(fmt.Sprintf(`"%s":`, field.Alias)) if !ok { - builder.WriteString("null") + buf.WriteString("null") if i < len(filteredSelectionSet)-1 { - builder.WriteString(",") + buf.WriteString(",") } continue } if field.SelectionSet != nil && len(field.SelectionSet) > 0 { innerBody, err := formatResponseDataRec(schema, field.SelectionSet, fieldData, false) if err != nil { - return "", err + return []byte{}, err } - builder.WriteString(innerBody) + buf.Write(innerBody) } else { fieldJSON, err := json.Marshal(&fieldData) if err != nil { - return "", err + return []byte{}, err } - builder.Write(fieldJSON) + buf.Write(fieldJSON) } } if i < len(filteredSelectionSet)-1 { - builder.WriteString(",") + buf.WriteString(",") } } if !insideFragment { - builder.WriteString("}") + buf.WriteString("}") } case []interface{}: - builder.WriteString("[") + buf.WriteString("[") for i, v := range result { innerBody, err := formatResponseDataRec(schema, selectionSet, v, false) if err != nil { - return "", err + return []byte{}, err } - builder.WriteString(innerBody) + buf.Write(innerBody) if i < len(result)-1 { - builder.WriteString(",") + buf.WriteString(",") } } - builder.WriteString("]") + buf.WriteString("]") case []map[string]interface{}: - builder.WriteString("[") + buf.WriteString("[") for i, v := range result { innerBody, err := formatResponseDataRec(schema, selectionSet, v, false) if err != nil { - return "", err + return []byte{}, err } - builder.WriteString(innerBody) + buf.Write(innerBody) if i < len(result)-1 { - builder.WriteString(",") + buf.WriteString(",") } } - builder.WriteString("]") + buf.WriteString("]") } - return builder.String(), nil + return buf.Bytes(), nil } // When formatting the response data, the shape of the selection set has to potentially be modified to more closely resemble the shape From a10ce3507f2bb13715b112546ad8e6f187b5073b Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Mon, 27 Sep 2021 14:36:10 +1300 Subject: [PATCH 03/13] Reword some error messages --- query_execution.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/query_execution.go b/query_execution.go index 73a2f826..7420ad9f 100644 --- a/query_execution.go +++ b/query_execution.go @@ -107,7 +107,7 @@ func (q *queryExecution) executeRootStep(step *QueryPlanStep) error { } else if step.ParentType == "Mutation" { document = "mutation " + formatSelectionSet(q.ctx, q.schema, step.SelectionSet) } else { - return errors.New("non mutation or query root step") + return errors.New("expected mutation or query root step") } var data map[string]interface{} @@ -337,7 +337,7 @@ func buildTypenameResponseMap(selectionSet ast.SelectionSet, parentTypeName stri for _, field := range selectionSetToFields(selectionSet) { if field.SelectionSet != nil { if field.Definition.Type.NamedType == "" { - return nil, fmt.Errorf("expected named type") + return nil, fmt.Errorf("buildTypenameResponseMap: expected named type") } var err error @@ -347,7 +347,7 @@ func buildTypenameResponseMap(selectionSet ast.SelectionSet, parentTypeName stri } } else { if field.Name != "__typename" { - return nil, fmt.Errorf("expected __typename") + return nil, fmt.Errorf("buildTypenameResponseMap: expected __typename") } result[field.Alias] = parentTypeName } @@ -869,7 +869,7 @@ func unionAndTrimSelectionSetRec(objectTypename string, schema *ast.Schema, sele case *ast.InlineFragment: fragment := selection if objectTypename == "" { - return nil, errors.New("__typename must have been injected when dealing with fragments, check the planner is doing the right thing") + return nil, errors.New("unionAndTrimSelectionSetRec: expected __typename") } if fragment.ObjectDefinition.IsAbstractType() && From a1bd395be0f3faebc6e65eab031573c7f0e038a8 Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Mon, 27 Sep 2021 14:38:06 +1300 Subject: [PATCH 04/13] Use unionAndTrimSelectionSet when bubbling nulls The algorithms for formatting the response and bubbling up nulls are very similar. Using unionAndTrimSelectionSet means they now operate on the same selection set too. --- query_execution.go | 23 ++++++++--------------- 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/query_execution.go b/query_execution.go index 7420ad9f..c447dc41 100644 --- a/query_execution.go +++ b/query_execution.go @@ -630,7 +630,14 @@ func bubbleUpNullValuesInPlace(schema *ast.Schema, selectionSet ast.SelectionSet func bubbleUpNullValuesInPlaceRec(schema *ast.Schema, currentType *ast.Type, selectionSet ast.SelectionSet, result interface{}, path ast.Path) (errs []*gqlerror.Error, bubbleUp bool, err error) { switch result := result.(type) { case map[string]interface{}: - for _, selection := range selectionSet { + typename, _ := result["__typename"].(string) + filteredSelectionSet, unionErr := unionAndTrimSelectionSet(typename, schema, selectionSet) + if err != nil { + err = unionErr + return + } + + for _, selection := range filteredSelectionSet { switch selection := selection.(type) { case *ast.Field: field := selection @@ -665,13 +672,6 @@ func bubbleUpNullValuesInPlaceRec(schema *ast.Schema, currentType *ast.Type, sel } case *ast.FragmentSpread: fragment := selection - typename, ok := result["__typename"].(string) - if !ok { - return nil, false, errors.New("missing expected __typename") - } - if typename != fragment.Definition.TypeCondition && !fragmentImplementsAbstractType(schema, typename, fragment.Definition.TypeCondition) { - continue - } lowerErrs, lowerBubbleUp, lowerErr := bubbleUpNullValuesInPlaceRec(schema, nil, fragment.Definition.SelectionSet, result, path) if lowerErr != nil { return nil, false, lowerErr @@ -680,13 +680,6 @@ func bubbleUpNullValuesInPlaceRec(schema *ast.Schema, currentType *ast.Type, sel errs = append(errs, lowerErrs...) case *ast.InlineFragment: fragment := selection - typename, ok := result["__typename"].(string) - if !ok { - return nil, false, errors.New("missing expected __typename") - } - if typename != fragment.TypeCondition && !fragmentImplementsAbstractType(schema, typename, fragment.TypeCondition) { - continue - } lowerErrs, lowerBubbleUp, lowerErr := bubbleUpNullValuesInPlaceRec(schema, nil, fragment.SelectionSet, result, path) if lowerErr != nil { return nil, false, lowerErr From a55c7863e4dd9242727db332d0aa4439ec5406d2 Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Mon, 27 Sep 2021 14:42:58 +1300 Subject: [PATCH 05/13] Create function to extract __typename --- query_execution.go | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/query_execution.go b/query_execution.go index c447dc41..4290b9e2 100644 --- a/query_execution.go +++ b/query_execution.go @@ -630,8 +630,8 @@ func bubbleUpNullValuesInPlace(schema *ast.Schema, selectionSet ast.SelectionSet func bubbleUpNullValuesInPlaceRec(schema *ast.Schema, currentType *ast.Type, selectionSet ast.SelectionSet, result interface{}, path ast.Path) (errs []*gqlerror.Error, bubbleUp bool, err error) { switch result := result.(type) { case map[string]interface{}: - typename, _ := result["__typename"].(string) - filteredSelectionSet, unionErr := unionAndTrimSelectionSet(typename, schema, selectionSet) + objectTypename := extractAndCastTypenameField(result) + filteredSelectionSet, unionErr := unionAndTrimSelectionSet(objectTypename, schema, selectionSet) if err != nil { err = unionErr return @@ -753,7 +753,7 @@ func formatResponseDataRec(schema *ast.Schema, selectionSet ast.SelectionSet, re buf.WriteString("{") } - objectTypename, _ := result["__typename"].(string) + objectTypename := extractAndCastTypenameField(result) filteredSelectionSet, err := unionAndTrimSelectionSet(objectTypename, schema, selectionSet) if err != nil { return []byte{}, err @@ -887,6 +887,15 @@ func unionAndTrimSelectionSetRec(objectTypename string, schema *ast.Schema, sele return filteredSelectionSet, nil } +func extractAndCastTypenameField(result map[string]interface{}) string { + typeNameInterface, ok := result["__typename"] + if !ok { + return "" + } + + return typeNameInterface.(string) +} + func objectTypenameMatchesDifferentFragment(typename string, fragment *ast.InlineFragment) bool { return fragment.TypeCondition != typename } From 002dc9c38b6acaa79ca607189cafaaaa5d8e317d Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Mon, 27 Sep 2021 14:54:12 +1300 Subject: [PATCH 06/13] Prefer array based boundary field lookup --- plan.go | 6 ++++++ plan_test.go | 11 +++++++++++ 2 files changed, 17 insertions(+) diff --git a/plan.go b/plan.go index 93605592..529ae43a 100644 --- a/plan.go +++ b/plan.go @@ -385,6 +385,12 @@ func (m BoundaryFieldsMap) RegisterField(serviceURL, typeName, field string, arr m[serviceURL] = make(map[string]BoundaryField) } + // We prefer to use the array based boundary lookup + _, exists := m[serviceURL][typeName] + if exists && !array { + return + } + m[serviceURL][typeName] = BoundaryField{Field: field, Array: array} } diff --git a/plan_test.go b/plan_test.go index 92cbbfbf..e190a32c 100644 --- a/plan_test.go +++ b/plan_test.go @@ -2,6 +2,8 @@ package bramble import ( "testing" + + "github.com/stretchr/testify/require" ) func TestQueryPlanA(t *testing.T) { @@ -745,6 +747,15 @@ func TestQueryPlanWithNestedNamespaces(t *testing.T) { `) } +func TestPrefersArrayBasedBoundaryLookups(t *testing.T) { + boundaryFieldMap := make(BoundaryFieldsMap) + boundaryFieldMap.RegisterField("service-a", "movie", "_movie", true) + boundaryFieldMap.RegisterField("service-a", "movie", "_movies", false) + + boundaryField := boundaryFieldMap.Field("service-a", "movie") + require.True(t, boundaryField.Array) +} + func TestQueryPlanNoUnnessecaryID(t *testing.T) { PlanTestFixture1.Check(t, "{ movies { title } }", ` { From eee79de9e6ed56cf23f2730d72fc0dc57ce410f2 Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Mon, 27 Sep 2021 15:48:53 +1300 Subject: [PATCH 07/13] Drop opentracing support The opentracing/jaeger support has not been at use within Movio for a while due to issues with deploying and running jaeger. This has put the opentracing support in a position where it's unmaintaned so it will be dropped for the time being. --- README.md | 2 +- client.go | 12 ------ docs/debugging.md | 69 ---------------------------------- docs/plugins.md | 8 ---- execution.go | 17 --------- go.mod | 11 ++---- go.sum | 29 -------------- plugins/opentracing.go | 85 ------------------------------------------ 8 files changed, 4 insertions(+), 229 deletions(-) delete mode 100644 plugins/opentracing.go diff --git a/README.md b/README.md index 852bb807..a34ffc19 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ Bramble supports: - Namespaces - Field-level permissions - Plugins: - - JWT, Open tracing, CORS, ... + - JWT, CORS, ... - Or add your own - Hot reloading of configuration diff --git a/client.go b/client.go index ca344f01..03b3afc4 100644 --- a/client.go +++ b/client.go @@ -12,7 +12,6 @@ import ( "strings" "time" - opentracing "github.com/opentracing/opentracing-go" "github.com/vektah/gqlparser/v2/ast" ) @@ -20,7 +19,6 @@ import ( type GraphQLClient struct { HTTPClient *http.Client MaxResponseSize int64 - Tracer opentracing.Tracer UserAgent string } @@ -83,16 +81,6 @@ func (c *GraphQLClient) Request(ctx context.Context, url string, request *Reques httpReq.Header.Set("User-Agent", c.UserAgent) } - if c.Tracer != nil { - span := opentracing.SpanFromContext(ctx) - if span != nil { - c.Tracer.Inject( - span.Context(), - opentracing.HTTPHeaders, - opentracing.HTTPHeadersCarrier(httpReq.Header)) - } - } - res, err := c.HTTPClient.Do(httpReq) if err != nil { return fmt.Errorf("error during request: %w", err) diff --git a/docs/debugging.md b/docs/debugging.md index 25693842..5014dd27 100644 --- a/docs/debugging.md +++ b/docs/debugging.md @@ -9,73 +9,4 @@ One or multiple of the following options can be provided (white space separated) - `query`: input query - `plan`: the query plan, including services and subqueries - `timing`: total execution time for the query (as a duration string, e.g. `12ms`) -- `trace-id`: the jaeger trace-id - `all` (all of the above) - -## Open tracing (Jaeger) - -Tracing is a powerful way to understand exactly how your queries are executed and to troubleshoot slow queries. - -### Enable tracing on Bramble - -See the [open tracing plugin](plugins?id=open-tracing-jaeger). - -### Add tracing to your services (optional) - -Adding tracing to your individual services will add a lot more details to your traces. - -1. Create a tracer, see the [Jaeger documentation](https://pkg.go.dev/github.com/uber/jaeger-client-go#NewTracer) - -2. Add a tracing middleware to your HTTP endpoint. - -```go -mux.Handle("/query", NewTracingMiddleware(tracer).Middleware(gqlserver)) -``` - -
- -Example Go middleware - - -```go -// TracingMiddleware is a middleware to add open tracing to incoming requests. -// It creates a span for each incoming requests, using the request context if -// present. -type TracingMiddleware struct { - tracer opentracing.Tracer -} - -// NewTracingMiddleware returns a new tracing middleware -func NewTracingMiddleware(tracer opentracing.Tracer) *TracingMiddleware { - return &TracingMiddleware{ - tracer: tracer, - } -} - -// Middleware applies the tracing middleware to the handler -func (m *TracingMiddleware) Middleware(h http.Handler) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - spanContext, _ := m.tracer.Extract(opentracing.HTTPHeaders, opentracing.HTTPHeadersCarrier(r.Header)) - span := m.tracer.StartSpan("query", ext.RPCServerOption(spanContext)) - c := opentracing.ContextWithSpan(r.Context(), span) - h.ServeHTTP(rw, r.WithContext(c)) - span.Finish() - }) -} -``` - -
- -3. Add the tracer to the resolver - - - With graph-gophers - - ```go - parsedSchema := graphql.MustParseSchema(schema, resolver, graphql.Tracer(trace.OpenTracingTracer{})) - ``` - - - With gqlgen - - ```go - gqlserver.Use(support.NewGqlgenOpenTracing(tracer)) - ``` diff --git a/docs/plugins.md b/docs/plugins.md index fcfd16e5..234800ab 100644 --- a/docs/plugins.md +++ b/docs/plugins.md @@ -181,11 +181,3 @@ Exposes the GraphQL playground on `/playground`. ``` You access the GraphQL playground by visiting `http://localhost:/playground` in your browser. - -## Open Tracing (Jaeger) - -The Jaeger plugin captures and sends traces to a Jaeger server. - -Configuration is done through environment variables, see the [Jaeger -documentation](https://github.com/jaegertracing/jaeger-client-go#environment-variables) -for more information. diff --git a/execution.go b/execution.go index d7134f6c..37af02d8 100644 --- a/execution.go +++ b/execution.go @@ -8,9 +8,7 @@ import ( "time" "github.com/99designs/gqlgen/graphql" - "github.com/opentracing/opentracing-go" log "github.com/sirupsen/logrus" - "github.com/uber/jaeger-client-go" "github.com/vektah/gqlparser/v2/ast" "github.com/vektah/gqlparser/v2/gqlerror" ) @@ -43,7 +41,6 @@ type ExecutableSchema struct { Services map[string]*Service BoundaryQueries BoundaryFieldsMap GraphqlClient *GraphQLClient - Tracer opentracing.Tracer MaxRequestsPerQuery int64 mutex sync.RWMutex @@ -276,20 +273,6 @@ func (s *ExecutableSchema) ExecuteQuery(ctx context.Context) *graphql.Response { } } -// TraceIDFromContext retrieves the trace ID from the context if it exists. -// Returns an empty string otherwise. -func TraceIDFromContext(ctx context.Context) string { - span := opentracing.SpanFromContext(ctx) - if span == nil { - return "" - } - jaegerContext, ok := span.Context().(jaeger.SpanContext) - if !ok { - return "" - } - return jaegerContext.TraceID().String() -} - // Schema returns the merged schema func (s *ExecutableSchema) Schema() *ast.Schema { return s.MergedSchema diff --git a/go.mod b/go.mod index dca64dd4..45c84e92 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,6 @@ go 1.16 require ( github.com/99designs/gqlgen v0.11.2 - github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd // indirect github.com/felixge/httpsnoop v1.0.1 github.com/fsnotify/fsnotify v1.4.9 github.com/golang-jwt/jwt/v4 v4.0.0 @@ -18,21 +17,17 @@ require ( github.com/kr/pretty v0.2.0 // indirect github.com/kr/text v0.2.0 // indirect github.com/mitchellh/mapstructure v1.1.2 // indirect - github.com/opentracing/opentracing-go v1.1.0 - github.com/pkg/errors v0.9.1 // indirect github.com/prometheus/client_golang v1.2.1 github.com/prometheus/common v0.9.1 // indirect github.com/prometheus/procfs v0.0.10 // indirect github.com/rs/cors v1.7.0 github.com/sirupsen/logrus v1.4.2 github.com/stretchr/testify v1.7.0 - github.com/uber/jaeger-client-go v2.22.1+incompatible - github.com/uber/jaeger-lib v2.2.0+incompatible // indirect github.com/vektah/gqlparser/v2 v2.0.1 - go.uber.org/atomic v1.6.0 // indirect - golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 // indirect + golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c - golang.org/x/tools v0.1.0 // indirect + golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4 // indirect + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect google.golang.org/protobuf v1.25.0 // indirect gopkg.in/square/go-jose.v2 v2.5.1 gopkg.in/yaml.v2 v2.3.0 // indirect diff --git a/go.sum b/go.sum index 90272fb2..d638eb84 100644 --- a/go.sum +++ b/go.sum @@ -21,8 +21,6 @@ github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA github.com/cespare/xxhash/v2 v2.1.0 h1:yTUvW7Vhb89inJ+8irsUqiWjh8iT6sQPZiQzI6ReGkA= github.com/cespare/xxhash/v2 v2.1.0/go.mod h1:dgIUBU3pDso/gPgZ1osOZ0iQf77oPR28Tjxl5dIMyVM= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd h1:qMd81Ts1T2OTKmB4acZcyKaMtRnY5Y44NuXGX2GFJ1w= -github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -114,8 +112,6 @@ github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsq github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= @@ -149,27 +145,17 @@ github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPx github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/uber/jaeger-client-go v2.22.1+incompatible h1:NHcubEkVbahf9t3p75TOCR83gdUHXjRJvjoBh1yACsM= -github.com/uber/jaeger-client-go v2.22.1+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= -github.com/uber/jaeger-lib v2.2.0+incompatible h1:MxZXOiR2JuoANZ3J6DE/U0kSFv/eJ/GfSYVCjK7dyaw= -github.com/uber/jaeger-lib v2.2.0+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= github.com/urfave/cli/v2 v2.1.1/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ= github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= github.com/vektah/gqlparser/v2 v2.0.1 h1:xgl5abVnsd4hkN9rk65OJID9bfcLSMuTaTcZj777q1o= github.com/vektah/gqlparser/v2 v2.0.1/go.mod h1:SyUiHgLATUR8BiYURfTirrTcGpcE+4XkV2se04Px1Ms= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -go.uber.org/atomic v1.6.0 h1:Ezj3JGmsOnG1MoRWQkPBsKLe9DwWD9QeXzTRzzldNVk= -go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -179,11 +165,7 @@ golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 h1:2M3HP5CCK1Si9FQhwnzYhXdG6DXeebvUHFpre8QvbyI= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -192,14 +174,12 @@ golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -212,25 +192,16 @@ golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4 h1:myAQVi0cGEoqQVR5POX+8RR2mrocKqNN1hmeMqhX27k= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190515012406-7d7faa4812bd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200114235610-7ae403b6b589/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= diff --git a/plugins/opentracing.go b/plugins/opentracing.go deleted file mode 100644 index 49401c7f..00000000 --- a/plugins/opentracing.go +++ /dev/null @@ -1,85 +0,0 @@ -package plugins - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "strings" - - "github.com/movio/bramble" - "github.com/opentracing/opentracing-go" - "github.com/opentracing/opentracing-go/ext" - "github.com/uber/jaeger-client-go" - jaegercfg "github.com/uber/jaeger-client-go/config" -) - -func init() { - bramble.RegisterPlugin(&OpenTracingPlugin{}) -} - -type OpenTracingPlugin struct { - bramble.BasePlugin - tracer opentracing.Tracer -} - -func (p *OpenTracingPlugin) ID() string { - return "open-tracing" -} - -func (p *OpenTracingPlugin) Configure(cfg *bramble.Config, pluginCfg json.RawMessage) error { - jaegerConfig := jaegercfg.Configuration{ - ServiceName: "bramble", - Sampler: &jaegercfg.SamplerConfig{ - Type: "remote", - Param: 1, - }, - } - - jaegerCfg, err := jaegerConfig.FromEnv() - if err != nil { - return fmt.Errorf("could not get Jaeger config from env: %w", err) - } - - p.tracer, _, err = jaegerCfg.NewTracer() - return err -} - -func (p *OpenTracingPlugin) Init(s *bramble.ExecutableSchema) { - s.Tracer = p.tracer - s.GraphqlClient.Tracer = p.tracer -} - -func (p *OpenTracingPlugin) ApplyMiddlewarePublicMux(h http.Handler) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - // do not trace healthcheck - if strings.HasPrefix(r.Header.Get("user-agent"), "Bramble") { - h.ServeHTTP(rw, r) - return - } - - spanContext, _ := p.tracer.Extract(opentracing.HTTPHeaders, opentracing.HTTPHeadersCarrier(r.Header)) - span := p.tracer.StartSpan("query", ext.RPCServerOption(spanContext)) - c := opentracing.ContextWithSpan(r.Context(), span) - bramble.AddFields(r.Context(), bramble.EventFields{ - "trace-id": traceIDFromContext(c), - }) - r = r.WithContext(c) - h.ServeHTTP(rw, r) - span.Finish() - }) -} - -// traceIDFromContext returns the Jaeger's trace ID if a span exists in the -// current context -func traceIDFromContext(ctx context.Context) string { - span := opentracing.SpanFromContext(ctx) - if span == nil { - return "" - } - jaegerContext, ok := span.Context().(jaeger.SpanContext) - if !ok { - return "" - } - return jaegerContext.TraceID().String() -} From a704b0055c1e2c46b5868ffc760170b94df253b9 Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Tue, 28 Sep 2021 09:09:35 +1300 Subject: [PATCH 08/13] Use switch statement to build document --- query_execution.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/query_execution.go b/query_execution.go index 4290b9e2..90b17226 100644 --- a/query_execution.go +++ b/query_execution.go @@ -102,11 +102,11 @@ func (q *queryExecution) Execute(queryPlan *QueryPlan) ([]executionResult, gqler func (q *queryExecution) executeRootStep(step *QueryPlanStep) error { var document string - if step.ParentType == "Query" { - document = "query " + formatSelectionSet(q.ctx, q.schema, step.SelectionSet) - } else if step.ParentType == "Mutation" { - document = "mutation " + formatSelectionSet(q.ctx, q.schema, step.SelectionSet) - } else { + + switch operationType := step.ParentType; operationType { + case queryObjectName, mutationObjectName: + document = strings.ToLower(operationType) + formatSelectionSet(q.ctx, q.schema, step.SelectionSet) + default: return errors.New("expected mutation or query root step") } From cc03d0a9c6e3612cef8a332ed32ad0efc64dce05 Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Tue, 28 Sep 2021 09:51:04 +1300 Subject: [PATCH 09/13] Use boundaryIDFromMap to extract ID --- query_execution.go | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/query_execution.go b/query_execution.go index 90b17226..fc4fa1d8 100644 --- a/query_execution.go +++ b/query_execution.go @@ -390,16 +390,8 @@ func extractBoundaryIDs(data interface{}, insertionPoint []string) ([]string, er if len(insertionPoint) == 0 { switch ptr := ptr.(type) { case map[string]interface{}: - var id string - var ok bool - id, ok = ptr["_id"].(string) - if !ok { - id, ok = ptr["id"].(string) - } - if !ok { - return nil, errors.New("extractBoundaryIDs: unexpected missing '_id' or 'id' in map") - } - return []string{id}, nil + id, err := boundaryIDFromMap(ptr) + return []string{id}, err case []interface{}: result := []string{} for _, innerPtr := range ptr { From 0b416de7c321561172a80dc29da9e48ab5ff9e82 Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Tue, 28 Sep 2021 10:05:21 +1300 Subject: [PATCH 10/13] Remove unnecessary branching --- query_execution.go | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/query_execution.go b/query_execution.go index fc4fa1d8..b96891f3 100644 --- a/query_execution.go +++ b/query_execution.go @@ -408,11 +408,7 @@ func extractBoundaryIDs(data interface{}, insertionPoint []string) ([]string, er } switch ptr := ptr.(type) { case map[string]interface{}: - if len(insertionPoint) == 1 { - return extractBoundaryIDs(ptr[insertionPoint[0]], nil) - } else { - return extractBoundaryIDs(ptr[insertionPoint[0]], insertionPoint[1:]) - } + return extractBoundaryIDs(ptr[insertionPoint[0]], insertionPoint[1:]) case []interface{}: result := []string{} for _, innerPtr := range ptr { From 64ec6ccaada0a32d6a4936065c248a838111527c Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Tue, 28 Sep 2021 10:30:05 +1300 Subject: [PATCH 11/13] Remove outdated algorithm docs --- docs/algorithms.md | 117 +-------------------------------------------- 1 file changed, 1 insertion(+), 116 deletions(-) diff --git a/docs/algorithms.md b/docs/algorithms.md index f4c3b236..d8ec6310 100644 --- a/docs/algorithms.md +++ b/docs/algorithms.md @@ -303,119 +303,4 @@ function RouteSelectionSet(ctx, parentType, selectionSet) { ## Query Execution -The `Execute` function is straightforward, it simply iterates over each root step in the query plan, and executes them in turn. The implementation does this in parallel, but this is omitted in the pseudo-code for simplicity. - -``` -function Execute(ctx, queryPlan, resultPtr) { - for step in queryPlan.RootSteps { - ExecuteRootStep(ctx, step, resultPtr) - } -} -``` - -The `ExecuteRootStep` function executes a single step of the query plan, along with its children steps, if any. - -The operation document is simply composed of the operation type and the step's selection set. - -Once the document is constructed, we invoke the remote GraphQL service with the document and store the response at the given result pointer. - -Finally, for each child step in `step.Then`, we call `ExecuteChildStep`. - -``` -function ExecuteRootStep(ctx, step, resultPtr) { - operationType = if step.ParentType == "Mutation" then "mutation" else "query" - if id is the empty string { - document = "${operationType} ${step.SelectionSet}" - } - execute document at URL step.ServiceURL and write response to resultPtr - for childStep in step.Then { - ExecuteChildStep(ctx, childStep, resultPtr) - } -} -``` - -The `ExecuteChildStep` function execute a single child step, along with its -children steps, if any. - -First we build the corresponding insertion slice. This is a slice containing -all the target elements for the operation (where we need to insert the data). -They are represented by the id of the element along with a pointer to a -structure that can receive JSON document. See `buildInsertionSlice` below. - -Then we build the document: one boundary query per insertion target. To avoid -conflict we alias each query with an id. - -Once we have the document is constructed we invoke the remote GraphQL service -and store the response into each corresponding target. - -Finally recursively call `ExecuteChildStep` for each child step in -`step.Then`. - -``` -function ExecuteChildStep(ctx, step, resultPtr) { - targets = buildInsertionSlice(step, resultPtr) - queries = [] - for target in targets { - query = """ - { - ${id}: $boundaryQuery(id: ${target.Id}) { - ${step.SelectionSet} - } - } - """ - append query to queries - } - document = "{ ${queries} }" - execute document at URL step.ServiceURL and write response to resultPtr - for childStep in step.Then { - ExecuteChildStep(ctx, childStep, resultPtr) - } -} -``` - -The `buildInsertionSlice` algorithm traverses the structure pointed by -`resultPtr`, along the path described by `insertionPoint`. It returns a slice -of pointers to JSON results along with the id of the element. -Those pointers indicate where data should be written by a step that has the -corresponding insertion point. - -First, if the insertion point is empty, it means that we have reached the end of the path, and `resultPtr` points to the destination we were looking for. If this destination is a `map`, we return a singleton slice of that map. If this destination is a slice then we call `buildInsertionSlice` recursively on each element of that slice in order to ensure that the returned slice is not nested (`resultPtr` may be a list of lists, in which case the resulting slice must be flattened). - -Finally, if the insertion point is not empty, we consider whether `resultPtr` is a map or a slice. -
-If it's a map, we look up the insertion point's first item in that map and call `buildInsertionSlice` recursively on that value, passing a new insertion point to the recursive call that skips that first element. -
-If `resultPtr` is a slice we perform the same operation as described above, i.e. we call `buildInsertionSlice` recursively on each element of that slice in order to ensure that the returned slice is not nested. - -``` -function buildInsertionSlice(insertionPoint, resultPtr) { - if insertionPoint is empty { - switch on the type of resultPtr { - case resultPtr is a slice: - newResultPtr = empty slice - for element in resultPtr { - for newElement in buildInsertionSlice(insertionPoint, element) { - append newElement to newResultPtr - } - } - return newResultPtr - case resultPtr is a map: - id = resultPtr["id"] || resultPtr["_id"] - return [ (id, resultPtr) ] - } - } - - switch on the type of resultPtr { - case resultPtr is a slice: - newResultPtr = empty slice - for element in resultPtr { - for newElement in buildInsertionSlice(insertionPoint, element) { - append newElement to newResultPtr - } - } - return newResultPtr - case resultPtr is a map: - return buildInsertionSlice(insertionPoint[1:], resultPtr[insertionPoint[0]]) - } -} -``` +To be updated for the new execution pipeline. From 5a9eca056ddf2696dcbf07e6cfb22ad936114e14 Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Tue, 28 Sep 2021 17:19:24 +1300 Subject: [PATCH 12/13] Review changes * inline nodeAlias function as it only has a single use and the name no longer makes sense * return error when unable to lookup BoundaryField for a type, fix error in test schema * rename wait group for reading results off channel * some minor comment and naming fixes --- execution.go | 4 ---- execution_test.go | 5 +++-- plan.go | 10 +++++----- plan_test.go | 3 ++- query_execution.go | 33 ++++++++++++++------------------- 5 files changed, 24 insertions(+), 31 deletions(-) diff --git a/execution.go b/execution.go index 37af02d8..7cb1b4e1 100644 --- a/execution.go +++ b/execution.go @@ -596,10 +596,6 @@ func jsonMapToInterfaceMap(m map[string]json.RawMessage) map[string]interface{} return res } -func nodeAlias(i int) string { - return fmt.Sprintf("_%d", i) -} - // mergeMaps merge dst into src, unmarshalling json.RawMessages when necessary func mergeMaps(dst, src map[string]interface{}) { for k, v := range dst { diff --git a/execution_test.go b/execution_test.go index 1dc929dd..34871f16 100644 --- a/execution_test.go +++ b/execution_test.go @@ -4639,7 +4639,8 @@ func TestQueryExecutionWithUnions(t *testing.T) { } type Query { - animal(id: ID!): Animal @boundary + animal(id: ID!): Animal + person(id: ID!): Person @boundary animals: [Animal]! }`, handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { @@ -4673,7 +4674,7 @@ func TestQueryExecutionWithUnions(t *testing.T) { }), }, { - schema: `directive @boundary on OBJECT + schema: `directive @boundary on OBJECT | FIELD_DEFINITION type Person @boundary { id: ID! diff --git a/plan.go b/plan.go index 529ae43a..278882c7 100644 --- a/plan.go +++ b/plan.go @@ -395,16 +395,16 @@ func (m BoundaryFieldsMap) RegisterField(serviceURL, typeName, field string, arr } // Query returns the boundary field for the given service and type -func (m BoundaryFieldsMap) Field(serviceURL, typeName string) BoundaryField { +func (m BoundaryFieldsMap) Field(serviceURL, typeName string) (BoundaryField, error) { serviceMap, ok := m[serviceURL] if !ok { - return BoundaryField{Field: "node"} + return BoundaryField{}, fmt.Errorf("could not find BoundaryFieldsMap entry for service %s", serviceURL) } - query, ok := serviceMap[typeName] + field, ok := serviceMap[typeName] if !ok { - return BoundaryField{Field: "node"} + return BoundaryField{}, fmt.Errorf("could not find BoundaryFieldsMap entry for typeName %s", typeName) } - return query + return field, nil } diff --git a/plan_test.go b/plan_test.go index e190a32c..e85fa4ec 100644 --- a/plan_test.go +++ b/plan_test.go @@ -752,7 +752,8 @@ func TestPrefersArrayBasedBoundaryLookups(t *testing.T) { boundaryFieldMap.RegisterField("service-a", "movie", "_movie", true) boundaryFieldMap.RegisterField("service-a", "movie", "_movies", false) - boundaryField := boundaryFieldMap.Field("service-a", "movie") + boundaryField, err := boundaryFieldMap.Field("service-a", "movie") + require.NoError(t, err) require.True(t, boundaryField.Array) } diff --git a/query_execution.go b/query_execution.go index b96891f3..df3b861c 100644 --- a/query_execution.go +++ b/query_execution.go @@ -53,17 +53,9 @@ func newQueryExecution(ctx context.Context, client *GraphQLClient, schema *ast.S } func (q *queryExecution) Execute(queryPlan *QueryPlan) ([]executionResult, gqlerror.List) { - readWg := &sync.WaitGroup{} + wg := &sync.WaitGroup{} results := []executionResult{} - if len(queryPlan.RootSteps) > int(q.maxRequest) { - return nil, gqlerror.List{ - &gqlerror.Error{ - Message: fmt.Sprintf("exceeded max requests of %v", q.maxRequest), - }, - } - } - for _, step := range queryPlan.RootSteps { if step.ServiceURL == internalServiceName { r, err := executeBrambleStep(step) @@ -80,12 +72,12 @@ func (q *queryExecution) Execute(queryPlan *QueryPlan) ([]executionResult, gqler }) } - readWg.Add(1) + wg.Add(1) go func() { for result := range q.results { results = append(results, result) } - readWg.Done() + wg.Done() }() if err := q.group.Wait(); err != nil { @@ -96,7 +88,7 @@ func (q *queryExecution) Execute(queryPlan *QueryPlan) ([]executionResult, gqler } } close(q.results) - readWg.Wait() + wg.Wait() return results, nil } @@ -162,7 +154,10 @@ func (q *queryExecution) executeChildStep(step *QueryPlanStep, boundaryIDs []str return fmt.Errorf("exceeded max requests of %v", q.maxRequest) } - boundaryField := q.boundaryFields.Field(step.ServiceURL, step.ParentType) + boundaryField, err := q.boundaryFields.Field(step.ServiceURL, step.ParentType) + if err != nil { + return err + } documents, err := buildBoundaryQueryDocuments(q.ctx, q.schema, step, boundaryIDs, boundaryField, 50) if err != nil { @@ -242,7 +237,7 @@ func (q *queryExecution) createGQLErrors(step *QueryPlanStep, err error) gqlerro } locs = append(locs, gqlerror.Location{Line: pos.Line, Column: pos.Column}) - // if the field has a subset it's part of the path + // if the field has a selection set it's part of the path if len(f.SelectionSet) > 0 { path = append(path, ast.PathName(f.Alias)) } @@ -284,7 +279,7 @@ func (q *queryExecution) createGQLErrors(step *QueryPlanStep, err error) gqlerro // The insertionPoint represents the level a piece of data should be inserted at, relative to the root of the root step's data. // However results from a boundary query only contain a portion of that tree. For example, you could -// have insertionPoint: ["foo", "bar", "movies". "movie", "compTitles"], with the below example as the boundary result we're +// have insertionPoint: ["foo", "bar", "movies", "movie", "compTitles"], with the below example as the boundary result we're // crawling for ids: // [ // { @@ -355,9 +350,9 @@ func buildTypenameResponseMap(selectionSet ast.SelectionSet, parentTypeName stri return result, nil } -func fragmentImplementsAbstractType(schema *ast.Schema, objectType, interfaceType string) bool { - for _, def := range schema.Implements[objectType] { - if def.Name == interfaceType { +func fragmentImplementsAbstractType(schema *ast.Schema, fragmentTypeDefinition, abstractObjectTypename string) bool { + for _, def := range schema.Implements[fragmentTypeDefinition] { + if def.Name == abstractObjectTypename { return true } } @@ -442,7 +437,7 @@ func buildBoundaryQueryDocuments(ctx context.Context, schema *ast.Schema, step * for _, batch := range batchBy(ids, batchSize) { var selections []string for _, id := range batch { - selection := fmt.Sprintf("%s: %s(id: %q) %s", nodeAlias(selectionIndex), parentTypeBoundaryField.Field, id, selectionSetQL) + selection := fmt.Sprintf("%s: %s(id: %q) %s", fmt.Sprintf("_%d", selectionIndex), parentTypeBoundaryField.Field, id, selectionSetQL) selections = append(selections, selection) selectionIndex++ } From ee3f3d88f4ecb657ecac4a54030925e489f44903 Mon Sep 17 00:00:00 2001 From: Lucian Jones Date: Thu, 30 Sep 2021 17:28:51 +1300 Subject: [PATCH 13/13] Fix fragment elimination checks --- execution_test.go | 24 +++++++++++++++++++++++- query_execution.go | 4 ++-- 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/execution_test.go b/execution_test.go index 34871f16..e8f6e0de 100644 --- a/execution_test.go +++ b/execution_test.go @@ -741,6 +741,28 @@ func TestFederatedQueryFragmentSpreads(t *testing.T) { f.checkSuccess(t) }) + t.Run("with non abstract fragment", func(t *testing.T) { + f := &queryExecutionFixture{ + services: []testService{serviceA, serviceB}, + query: ` + query Foo { + snapshot(id: "GIZMO1") { + ... on Snapshot { + name + } + } + }`, + expected: ` + { + "snapshot": { + "name": "foo" + } + }`, + } + + f.checkSuccess(t) + }) + t.Run("with named fragment spread", func(t *testing.T) { f := &queryExecutionFixture{ services: []testService{serviceA, serviceB}, @@ -1029,7 +1051,7 @@ func TestQueryExecutionNamespaceAndFragmentSpread(t *testing.T) { "movies": [ {"title": "The Big Blue"} ], - "__typename": "Person" + "__typename": "Director" } } } diff --git a/query_execution.go b/query_execution.go index df3b861c..bdfeda30 100644 --- a/query_execution.go +++ b/query_execution.go @@ -350,7 +350,7 @@ func buildTypenameResponseMap(selectionSet ast.SelectionSet, parentTypeName stri return result, nil } -func fragmentImplementsAbstractType(schema *ast.Schema, fragmentTypeDefinition, abstractObjectTypename string) bool { +func fragmentImplementsAbstractType(schema *ast.Schema, abstractObjectTypename, fragmentTypeDefinition string) bool { for _, def := range schema.Implements[fragmentTypeDefinition] { if def.Name == abstractObjectTypename { return true @@ -849,7 +849,7 @@ func unionAndTrimSelectionSetRec(objectTypename string, schema *ast.Schema, sele } if fragment.ObjectDefinition.IsAbstractType() && - fragmentImplementsAbstractType(schema, objectTypename, fragment.ObjectDefinition.Name) && + fragmentImplementsAbstractType(schema, fragment.ObjectDefinition.Name, fragment.TypeCondition) && objectTypenameMatchesDifferentFragment(objectTypename, fragment) { continue }