diff --git a/request/graphql/schema/descriptions.go b/request/graphql/schema/descriptions.go index e720a770fb..6f967046b9 100644 --- a/request/graphql/schema/descriptions.go +++ b/request/graphql/schema/descriptions.go @@ -74,3 +74,111 @@ var ( client.FieldKind_FOREIGN_OBJECT_ARRAY: client.NONE_CRDT, } ) + +const ( + dockeyArgDescription string = ` +An optional dockey parameter for this field. Only documents with + the given dockey will be returned. If no documents match, the result + will be null/empty. +` + dockeysArgDescription string = ` +An optional set of dockeys for this field. Only documents with a dockey + matching a dockey in the given set will be returned. If no documents match, + the result will be null/empty. If an empty set is provided, this argument will + be ignored. +` + cidArgDescription string = ` +An optional value that specifies the commit ID of the document to return. + This CID does not need to be the most recent for a document, if it + corresponds to an older version of a document the document will be returned + at the state it was in at the time of that commit. If a matching commit is + not found then an empty set will be returned. +` + singleFieldFilterArgDescription string = ` +An optional filter for this join, if the related record does + not meet the filter criteria the host record will still be returned, + but the value of this field will be null. +` + listFieldFilterArgDescription string = ` +An optional filter for this join, if none of the related records meet the filter + criteria the host record will still be returned, but the value of this field will + be empty. +` + selectFilterArgDescription string = ` +An optional filter for this select, only documents matching the given criteria + will be returned. +` + aggregateFilterArgDescription string = ` +An optional filter for this aggregate, only documents matching the given criteria + will be aggregated. +` + showDeletedArgDescription string = ` +An optional value that specifies as to whether deleted documents may be + returned. This argument will propagate down through any child selects/joins. +` + createDocumentDescription string = ` +Creates a single document of this type using the data provided. +` + createDataArgDescription string = ` +The json representation of the document you wish to create. Required. +` + updateDocumentsDescription string = ` +Updates documents in this collection using the data provided. Only documents + matching any provided criteria will be updated, if no criteria are provided + the update will be applied to all documents in the collection. +` + updateIDArgDescription string = ` +An optional dockey value that will limit the update to the document with + a matching dockey. If no matching document is found, the operation will + succeed, but no documents will be updated. +` + updateIDsArgDescription string = ` +An optional set of dockey values that will limit the update to documents + with a matching dockey. If no matching documents are found, the operation will + succeed, but no documents will be updated. +` + updateFilterArgDescription string = ` +An optional filter for this update that will limit the update to the documents + matching the given criteria. If no matching documents are found, the operation + will succeed, but no documents will be updated. +` + updateDataArgDescription string = ` +The json representation of the fields to update and their new values. Required. + Fields not explicitly mentioned here will not be updated. +` + deleteDocumentsDescription string = ` +Deletes documents in this collection matching any provided criteria. If no + criteria are provided all documents in the collection will be deleted. +` + deleteIDArgDescription string = ` +An optional dockey value that will limit the delete to the document with + a matching dockey. If no matching document is found, the operation will + succeed, but no documents will be deleted. +` + deleteIDsArgDescription string = ` +An optional set of dockey values that will limit the delete to documents with + a matching dockey. If no matching documents are found, the operation will + succeed, but no documents will be deleted. If an empty set is provided, no + documents will be deleted. +` + deleteFilterArgDescription string = ` +An optional filter for this delete that will limit the delete to documents + matching the given criteria. If no matching documents are found, the operation + will succeed, but no documents will be deleted. +` + keyFieldDescription string = ` +The immutable primary key (dockey) value for this document. +` + groupFieldDescription string = ` +The group field may be used to return a set of records belonging to the group. + It must be used alongside a 'groupBy' argument on the parent selector. It may + contain any field on the type being grouped, including those used by the + groupBy. +` + deletedFieldDescription string = ` +Indicates as to whether or not this document has been deleted. +` + versionFieldDescription string = ` +Returns the head commit for this document. +` +) diff --git a/request/graphql/schema/generate.go b/request/graphql/schema/generate.go index 2d811a068d..7688900b2d 100644 --- a/request/graphql/schema/generate.go +++ b/request/graphql/schema/generate.go @@ -148,7 +148,8 @@ func (g *Generator) generate(ctx context.Context, collections []client.Collectio if _, isAggregate := request.Aggregates[def.Name]; isAggregate { for name, aggregateTarget := range def.Args { expandedField := &gql.InputObjectFieldConfig{ - Type: g.manager.schema.TypeMap()[name+"FilterArg"], + Description: aggregateFilterArgDescription, + Type: g.manager.schema.TypeMap()[name+"FilterArg"], } aggregateTarget.Type.(*gql.InputObject).AddFieldConfig(request.FilterClause, expandedField) } @@ -281,7 +282,8 @@ func (g *Generator) createExpandedFieldAggregate( if filterType, canHaveFilter := g.manager.schema.TypeMap()[filterTypeName]; canHaveFilter { // Sometimes a filter is not permitted, for example when aggregating `_version` expandedField := &gql.InputObjectFieldConfig{ - Type: filterType, + Description: aggregateFilterArgDescription, + Type: filterType, } aggregateTarget.Type.(*gql.InputObject).AddFieldConfig("filter", expandedField) } @@ -296,11 +298,14 @@ func (g *Generator) createExpandedFieldSingle( ) (*gql.Field, error) { typeName := t.Name() field := &gql.Field{ - // @todo: Handle collection name from @collection directive - Name: f.Name, - Type: t, + Name: f.Name, + Description: f.Description, + Type: t, Args: gql.FieldConfigArgument{ - "filter": schemaTypes.NewArgConfig(g.manager.schema.TypeMap()[typeName+"FilterArg"]), + "filter": schemaTypes.NewArgConfig( + g.manager.schema.TypeMap()[typeName+"FilterArg"], + singleFieldFilterArgDescription, + ), }, } return field, nil @@ -313,19 +318,26 @@ func (g *Generator) createExpandedFieldList( ) (*gql.Field, error) { typeName := t.Name() field := &gql.Field{ - // @todo: Handle collection name from @collection directive - Name: f.Name, - Type: gql.NewList(t), + Name: f.Name, + Description: f.Description, + Type: gql.NewList(t), Args: gql.FieldConfigArgument{ - "dockey": schemaTypes.NewArgConfig(gql.String), - "dockeys": schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String))), - "filter": schemaTypes.NewArgConfig(g.manager.schema.TypeMap()[typeName+"FilterArg"]), + "dockey": schemaTypes.NewArgConfig(gql.String, dockeyArgDescription), + "dockeys": schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), dockeysArgDescription), + "filter": schemaTypes.NewArgConfig( + g.manager.schema.TypeMap()[typeName+"FilterArg"], + listFieldFilterArgDescription, + ), "groupBy": schemaTypes.NewArgConfig( gql.NewList(gql.NewNonNull(g.manager.schema.TypeMap()[typeName+"Fields"])), + schemaTypes.GroupByArgDescription, + ), + "order": schemaTypes.NewArgConfig( + g.manager.schema.TypeMap()[typeName+"OrderArg"], + schemaTypes.OrderArgDescription, ), - "order": schemaTypes.NewArgConfig(g.manager.schema.TypeMap()[typeName+"OrderArg"]), - request.LimitClause: schemaTypes.NewArgConfig(gql.Int), - request.OffsetClause: schemaTypes.NewArgConfig(gql.Int), + request.LimitClause: schemaTypes.NewArgConfig(gql.Int, schemaTypes.LimitArgDescription), + request.OffsetClause: schemaTypes.NewArgConfig(gql.Int, schemaTypes.OffsetArgDescription), }, } @@ -370,9 +382,20 @@ func (g *Generator) buildTypes( fields := gql.Fields{} // automatically add the _key: ID field to the type - fields[request.KeyFieldName] = &gql.Field{Type: gql.ID} + fields[request.KeyFieldName] = &gql.Field{ + Description: keyFieldDescription, + Type: gql.ID, + } for _, field := range fieldDescriptions { + if field.Name == request.KeyFieldName { + // The `_key` field is included in the fieldDescriptions, + // but we do not wish to override the standard definition + // with the collection held definition (particularly the + // description) + continue + } + var ttype gql.Type if field.Kind == client.FieldKind_FOREIGN_OBJECT { var ok bool @@ -402,11 +425,15 @@ func (g *Generator) buildTypes( // add _version field fields[request.VersionFieldName] = &gql.Field{ - Type: gql.NewList(schemaTypes.CommitObject), + Description: versionFieldDescription, + Type: gql.NewList(schemaTypes.CommitObject), } // add _deleted field - fields[request.DeletedFieldName] = &gql.Field{Type: gql.Boolean} + fields[request.DeletedFieldName] = &gql.Field{ + Description: deletedFieldDescription, + Type: gql.Boolean, + } gqlType, ok := g.manager.schema.TypeMap()[collection.Name] if !ok { @@ -414,7 +441,8 @@ func (g *Generator) buildTypes( } fields[request.GroupFieldName] = &gql.Field{ - Type: gql.NewList(gqlType), + Description: groupFieldDescription, + Type: gql.NewList(gqlType), } return fields, nil @@ -503,13 +531,14 @@ func (g *Generator) genAggregateFields(ctx context.Context) error { func genTopLevelCount(topLevelCountInputs map[string]*gql.InputObject) *gql.Field { topLevelCountField := gql.Field{ - Name: request.CountFieldName, - Type: gql.Int, - Args: gql.FieldConfigArgument{}, + Name: request.CountFieldName, + Description: schemaTypes.CountFieldDescription, + Type: gql.Int, + Args: gql.FieldConfigArgument{}, } for name, inputObject := range topLevelCountInputs { - topLevelCountField.Args[name] = schemaTypes.NewArgConfig(inputObject) + topLevelCountField.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) } return &topLevelCountField @@ -517,20 +546,22 @@ func genTopLevelCount(topLevelCountInputs map[string]*gql.InputObject) *gql.Fiel func genTopLevelNumericAggregates(topLevelNumericAggInputs map[string]*gql.InputObject) []*gql.Field { topLevelSumField := gql.Field{ - Name: request.SumFieldName, - Type: gql.Float, - Args: gql.FieldConfigArgument{}, + Name: request.SumFieldName, + Description: schemaTypes.SumFieldDescription, + Type: gql.Float, + Args: gql.FieldConfigArgument{}, } topLevelAverageField := gql.Field{ - Name: request.AverageFieldName, - Type: gql.Float, - Args: gql.FieldConfigArgument{}, + Name: request.AverageFieldName, + Description: schemaTypes.AverageFieldDescription, + Type: gql.Float, + Args: gql.FieldConfigArgument{}, } for name, inputObject := range topLevelNumericAggInputs { - topLevelSumField.Args[name] = schemaTypes.NewArgConfig(inputObject) - topLevelAverageField.Args[name] = schemaTypes.NewArgConfig(inputObject) + topLevelSumField.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) + topLevelAverageField.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) } return []*gql.Field{&topLevelSumField, &topLevelAverageField} @@ -560,13 +591,14 @@ func (g *Generator) genCountFieldConfig(obj *gql.Object) (gql.Field, error) { } field := gql.Field{ - Name: request.CountFieldName, - Type: gql.Int, - Args: gql.FieldConfigArgument{}, + Name: request.CountFieldName, + Description: schemaTypes.CountFieldDescription, + Type: gql.Int, + Args: gql.FieldConfigArgument{}, } for name, inputObject := range childTypesByFieldName { - field.Args[name] = schemaTypes.NewArgConfig(inputObject) + field.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) } return field, nil @@ -599,13 +631,14 @@ func (g *Generator) genSumFieldConfig(obj *gql.Object) (gql.Field, error) { } field := gql.Field{ - Name: request.SumFieldName, - Type: gql.Float, - Args: gql.FieldConfigArgument{}, + Name: request.SumFieldName, + Description: schemaTypes.SumFieldDescription, + Type: gql.Float, + Args: gql.FieldConfigArgument{}, } for name, inputObject := range childTypesByFieldName { - field.Args[name] = schemaTypes.NewArgConfig(inputObject) + field.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) } return field, nil @@ -638,13 +671,14 @@ func (g *Generator) genAverageFieldConfig(obj *gql.Object) (gql.Field, error) { } field := gql.Field{ - Name: request.AverageFieldName, - Type: gql.Float, - Args: gql.FieldConfigArgument{}, + Name: request.AverageFieldName, + Description: schemaTypes.AverageFieldDescription, + Type: gql.Float, + Args: gql.FieldConfigArgument{}, } for name, inputObject := range childTypesByFieldName { - field.Args[name] = schemaTypes.NewArgConfig(inputObject) + field.Args[name] = schemaTypes.NewArgConfig(inputObject, inputObject.Description()) } return field, nil @@ -667,15 +701,15 @@ func (g *Generator) genNumericInlineArraySelectorObject(obj *gql.Object) []*gql. Fields: gql.InputObjectConfigFieldMap{ request.LimitClause: &gql.InputObjectFieldConfig{ Type: gql.Int, - Description: "The maximum number of child items to aggregate.", + Description: schemaTypes.LimitArgDescription, }, request.OffsetClause: &gql.InputObjectFieldConfig{ Type: gql.Int, - Description: "The index from which to start aggregating items.", + Description: schemaTypes.OffsetArgDescription, }, request.OrderClause: &gql.InputObjectFieldConfig{ Type: g.manager.schema.TypeMap()["Ordering"], - Description: "The order in which to aggregate items.", + Description: schemaTypes.OrderArgDescription, }, }, }) @@ -700,11 +734,11 @@ func (g *Generator) genCountBaseArgInputs(obj *gql.Object) *gql.InputObject { Fields: gql.InputObjectConfigFieldMap{ request.LimitClause: &gql.InputObjectFieldConfig{ Type: gql.Int, - Description: "The maximum number of child items to count.", + Description: schemaTypes.LimitArgDescription, }, request.OffsetClause: &gql.InputObjectFieldConfig{ Type: gql.Int, - Description: "The index from which to start counting items.", + Description: schemaTypes.OffsetArgDescription, }, }, }) @@ -728,11 +762,11 @@ func (g *Generator) genCountInlineArrayInputs(obj *gql.Object) []*gql.InputObjec Fields: gql.InputObjectConfigFieldMap{ request.LimitClause: &gql.InputObjectFieldConfig{ Type: gql.Int, - Description: "The maximum number of child items to count.", + Description: schemaTypes.LimitArgDescription, }, request.OffsetClause: &gql.InputObjectFieldConfig{ Type: gql.Int, - Description: "The index from which to start counting items.", + Description: schemaTypes.OffsetArgDescription, }, }, }) @@ -802,15 +836,15 @@ func (g *Generator) genNumericAggregateBaseArgInputs(obj *gql.Object) *gql.Input }, request.LimitClause: &gql.InputObjectFieldConfig{ Type: gql.Int, - Description: "The maximum number of child items to aggregate.", + Description: schemaTypes.LimitArgDescription, }, request.OffsetClause: &gql.InputObjectFieldConfig{ Type: gql.Int, - Description: "The index from which to start aggregating items.", + Description: schemaTypes.OffsetArgDescription, }, request.OrderClause: &gql.InputObjectFieldConfig{ Type: g.manager.schema.TypeMap()[genTypeName(obj, "OrderArg")], - Description: "The order in which to aggregate items.", + Description: schemaTypes.OrderArgDescription, }, }, nil } @@ -823,8 +857,9 @@ func (g *Generator) genNumericAggregateBaseArgInputs(obj *gql.Object) *gql.Input func appendCommitChildGroupField() { schemaTypes.CommitObject.Fields()[request.GroupFieldName] = &gql.FieldDefinition{ - Name: request.GroupFieldName, - Type: gql.NewList(schemaTypes.CommitObject), + Name: request.GroupFieldName, + Description: groupFieldDescription, + Type: gql.NewList(schemaTypes.CommitObject), } } @@ -888,11 +923,11 @@ func (g *Generator) genTypeMutationFields( func (g *Generator) genTypeMutationCreateField(obj *gql.Object) (*gql.Field, error) { field := &gql.Field{ - // @todo: Handle collection name from @collection directive - Name: "create_" + obj.Name(), - Type: obj, + Name: "create_" + obj.Name(), + Description: createDocumentDescription, + Type: obj, Args: gql.FieldConfigArgument{ - "data": schemaTypes.NewArgConfig(gql.String), + "data": schemaTypes.NewArgConfig(gql.String, createDataArgDescription), }, } return field, nil @@ -903,14 +938,14 @@ func (g *Generator) genTypeMutationUpdateField( filter *gql.InputObject, ) (*gql.Field, error) { field := &gql.Field{ - // @todo: Handle collection name from @collection directive - Name: "update_" + obj.Name(), - Type: gql.NewList(obj), + Name: "update_" + obj.Name(), + Description: updateDocumentsDescription, + Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - "id": schemaTypes.NewArgConfig(gql.ID), - "ids": schemaTypes.NewArgConfig(gql.NewList(gql.ID)), - "filter": schemaTypes.NewArgConfig(filter), - "data": schemaTypes.NewArgConfig(gql.String), + "id": schemaTypes.NewArgConfig(gql.ID, updateIDArgDescription), + "ids": schemaTypes.NewArgConfig(gql.NewList(gql.ID), updateIDsArgDescription), + "filter": schemaTypes.NewArgConfig(filter, updateFilterArgDescription), + "data": schemaTypes.NewArgConfig(gql.String, updateDataArgDescription), }, } return field, nil @@ -921,20 +956,18 @@ func (g *Generator) genTypeMutationDeleteField( filter *gql.InputObject, ) (*gql.Field, error) { field := &gql.Field{ - // @todo: Handle collection name from @collection directive - Name: "delete_" + obj.Name(), - Type: gql.NewList(obj), + Name: "delete_" + obj.Name(), + Description: deleteDocumentsDescription, + Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - "id": schemaTypes.NewArgConfig(gql.ID), - "ids": schemaTypes.NewArgConfig(gql.NewList(gql.ID)), - "filter": schemaTypes.NewArgConfig(filter), - // "data": newArgConfig(gql.String), + "id": schemaTypes.NewArgConfig(gql.ID, deleteIDArgDescription), + "ids": schemaTypes.NewArgConfig(gql.NewList(gql.ID), deleteIDsArgDescription), + "filter": schemaTypes.NewArgConfig(filter, deleteFilterArgDescription), }, } return field, nil } -// enum {Type.Name}Fields { ... } func (g *Generator) genTypeFieldsEnum(obj *gql.Object) *gql.Enum { enumFieldsCfg := gql.EnumConfig{ Name: genTypeName(obj, "Fields"), @@ -959,15 +992,17 @@ func (g *Generator) genTypeFilterArgInput(obj *gql.Object) *gql.InputObject { func() (gql.InputObjectConfigFieldMap, error) { fields := gql.InputObjectConfigFieldMap{} - // conditionals - compoundListType := &gql.InputObjectFieldConfig{ - Type: gql.NewList(selfRefType), + fields["_and"] = &gql.InputObjectFieldConfig{ + Description: schemaTypes.AndOperatorDescription, + Type: gql.NewList(selfRefType), + } + fields["_or"] = &gql.InputObjectFieldConfig{ + Description: schemaTypes.OrOperatorDescription, + Type: gql.NewList(selfRefType), } - - fields["_and"] = compoundListType - fields["_or"] = compoundListType fields["_not"] = &gql.InputObjectFieldConfig{ - Type: selfRefType, + Description: schemaTypes.NotOperatorDescription, + Type: selfRefType, } // generate basic filter operator blocks @@ -1121,19 +1156,22 @@ func (g *Generator) genTypeQueryableFieldList( g.manager.schema.TypeMap()[config.order.Name()] = config.order field := &gql.Field{ - // @todo: Handle collection name from @collection directive - Name: name, - Type: gql.NewList(obj), + Name: name, + Description: obj.Description(), + Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - "dockey": schemaTypes.NewArgConfig(gql.String), - "dockeys": schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String))), - "cid": schemaTypes.NewArgConfig(gql.String), - "filter": schemaTypes.NewArgConfig(config.filter), - "groupBy": schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(config.groupBy))), - "order": schemaTypes.NewArgConfig(config.order), - request.ShowDeleted: schemaTypes.NewArgConfig(gql.Boolean), - request.LimitClause: schemaTypes.NewArgConfig(gql.Int), - request.OffsetClause: schemaTypes.NewArgConfig(gql.Int), + "dockey": schemaTypes.NewArgConfig(gql.String, dockeyArgDescription), + "dockeys": schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), dockeysArgDescription), + "cid": schemaTypes.NewArgConfig(gql.String, cidArgDescription), + "filter": schemaTypes.NewArgConfig(config.filter, selectFilterArgDescription), + "groupBy": schemaTypes.NewArgConfig( + gql.NewList(gql.NewNonNull(config.groupBy)), + schemaTypes.GroupByArgDescription, + ), + "order": schemaTypes.NewArgConfig(config.order, schemaTypes.OrderArgDescription), + request.ShowDeleted: schemaTypes.NewArgConfig(gql.Boolean, showDeletedArgDescription), + request.LimitClause: schemaTypes.NewArgConfig(gql.Int, schemaTypes.LimitArgDescription), + request.OffsetClause: schemaTypes.NewArgConfig(gql.Int, schemaTypes.OffsetArgDescription), }, } diff --git a/request/graphql/schema/manager.go b/request/graphql/schema/manager.go index 9d6313c384..045a4e33ae 100644 --- a/request/graphql/schema/manager.go +++ b/request/graphql/schema/manager.go @@ -159,7 +159,6 @@ func defaultTypes() []gql.Type { schemaTypes.CommitsOrderArg, schemaTypes.CommitLinkObject, schemaTypes.CommitObject, - schemaTypes.DeltaObject, schemaTypes.ExplainEnum, } diff --git a/request/graphql/schema/types/base.go b/request/graphql/schema/types/base.go index 896afe78fc..6a48e91349 100644 --- a/request/graphql/schema/types/base.go +++ b/request/graphql/schema/types/base.go @@ -16,262 +16,336 @@ import ( // BooleanOperatorBlock filter block for boolean types. var BooleanOperatorBlock = gql.NewInputObject(gql.InputObjectConfig{ - Name: "BooleanOperatorBlock", + Name: "BooleanOperatorBlock", + Description: booleanOperatorBlockDescription, Fields: gql.InputObjectConfigFieldMap{ "_eq": &gql.InputObjectFieldConfig{ - Type: gql.Boolean, + Description: eqOperatorDescription, + Type: gql.Boolean, }, "_ne": &gql.InputObjectFieldConfig{ - Type: gql.Boolean, + Description: neOperatorDescription, + Type: gql.Boolean, }, "_in": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.Boolean), + Description: inOperatorDescription, + Type: gql.NewList(gql.Boolean), }, "_nin": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.Boolean), + Description: ninOperatorDescription, + Type: gql.NewList(gql.Boolean), }, }, }) // NotNullBooleanOperatorBlock filter block for boolean! types. var NotNullBooleanOperatorBlock = gql.NewInputObject(gql.InputObjectConfig{ - Name: "NotNullBooleanOperatorBlock", + Name: "NotNullBooleanOperatorBlock", + Description: notNullBooleanOperatorBlockDescription, Fields: gql.InputObjectConfigFieldMap{ "_eq": &gql.InputObjectFieldConfig{ - Type: gql.Boolean, + Description: eqOperatorDescription, + Type: gql.Boolean, }, "_ne": &gql.InputObjectFieldConfig{ - Type: gql.Boolean, + Description: neOperatorDescription, + Type: gql.Boolean, }, "_in": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.NewNonNull(gql.Boolean)), + Description: inOperatorDescription, + Type: gql.NewList(gql.NewNonNull(gql.Boolean)), }, "_nin": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.NewNonNull(gql.Boolean)), + Description: ninOperatorDescription, + Type: gql.NewList(gql.NewNonNull(gql.Boolean)), }, }, }) // DateTimeOperatorBlock filter block for DateTime types. var DateTimeOperatorBlock = gql.NewInputObject(gql.InputObjectConfig{ - Name: "DateTimeOperatorBlock", + Name: "DateTimeOperatorBlock", + Description: dateTimeOperatorBlockDescription, Fields: gql.InputObjectConfigFieldMap{ "_eq": &gql.InputObjectFieldConfig{ - Type: gql.DateTime, + Description: eqOperatorDescription, + Type: gql.DateTime, }, "_ne": &gql.InputObjectFieldConfig{ - Type: gql.DateTime, + Description: neOperatorDescription, + Type: gql.DateTime, }, "_gt": &gql.InputObjectFieldConfig{ - Type: gql.DateTime, + Description: gtOperatorDescription, + Type: gql.DateTime, }, "_ge": &gql.InputObjectFieldConfig{ - Type: gql.DateTime, + Description: geOperatorDescription, + Type: gql.DateTime, }, "_lt": &gql.InputObjectFieldConfig{ - Type: gql.DateTime, + Description: ltOperatorDescription, + Type: gql.DateTime, }, "_le": &gql.InputObjectFieldConfig{ - Type: gql.DateTime, + Description: leOperatorDescription, + Type: gql.DateTime, }, "_in": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.DateTime), + Description: inOperatorDescription, + Type: gql.NewList(gql.DateTime), }, "_nin": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.DateTime), + Description: ninOperatorDescription, + Type: gql.NewList(gql.DateTime), }, }, }) // FloatOperatorBlock filter block for Float types. var FloatOperatorBlock = gql.NewInputObject(gql.InputObjectConfig{ - Name: "FloatOperatorBlock", + Name: "FloatOperatorBlock", + Description: floatOperatorBlockDescription, Fields: gql.InputObjectConfigFieldMap{ "_eq": &gql.InputObjectFieldConfig{ - Type: gql.Float, + Description: eqOperatorDescription, + Type: gql.Float, }, "_ne": &gql.InputObjectFieldConfig{ - Type: gql.Float, + Description: neOperatorDescription, + Type: gql.Float, }, "_gt": &gql.InputObjectFieldConfig{ - Type: gql.Float, + Description: gtOperatorDescription, + Type: gql.Float, }, "_ge": &gql.InputObjectFieldConfig{ - Type: gql.Float, + Description: geOperatorDescription, + Type: gql.Float, }, "_lt": &gql.InputObjectFieldConfig{ - Type: gql.Float, + Description: ltOperatorDescription, + Type: gql.Float, }, "_le": &gql.InputObjectFieldConfig{ - Type: gql.Float, + Description: leOperatorDescription, + Type: gql.Float, }, "_in": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.Float), + Description: inOperatorDescription, + Type: gql.NewList(gql.Float), }, "_nin": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.Float), + Description: ninOperatorDescription, + Type: gql.NewList(gql.Float), }, }, }) // NotNullFloatOperatorBlock filter block for Float! types. var NotNullFloatOperatorBlock = gql.NewInputObject(gql.InputObjectConfig{ - Name: "NotNullFloatOperatorBlock", + Name: "NotNullFloatOperatorBlock", + Description: notNullFloatOperatorBlockDescription, Fields: gql.InputObjectConfigFieldMap{ "_eq": &gql.InputObjectFieldConfig{ - Type: gql.Float, + Description: eqOperatorDescription, + Type: gql.Float, }, "_ne": &gql.InputObjectFieldConfig{ - Type: gql.Float, + Description: neOperatorDescription, + Type: gql.Float, }, "_gt": &gql.InputObjectFieldConfig{ - Type: gql.Float, + Description: gtOperatorDescription, + Type: gql.Float, }, "_ge": &gql.InputObjectFieldConfig{ - Type: gql.Float, + Description: geOperatorDescription, + Type: gql.Float, }, "_lt": &gql.InputObjectFieldConfig{ - Type: gql.Float, + Description: ltOperatorDescription, + Type: gql.Float, }, "_le": &gql.InputObjectFieldConfig{ - Type: gql.Float, + Description: leOperatorDescription, + Type: gql.Float, }, "_in": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.NewNonNull(gql.Float)), + Description: inOperatorDescription, + Type: gql.NewList(gql.NewNonNull(gql.Float)), }, "_nin": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.NewNonNull(gql.Float)), + Description: ninOperatorDescription, + Type: gql.NewList(gql.NewNonNull(gql.Float)), }, }, }) // IntOperatorBlock filter block for Int types. var IntOperatorBlock = gql.NewInputObject(gql.InputObjectConfig{ - Name: "IntOperatorBlock", + Name: "IntOperatorBlock", + Description: intOperatorBlockDescription, Fields: gql.InputObjectConfigFieldMap{ "_eq": &gql.InputObjectFieldConfig{ - Type: gql.Int, + Description: eqOperatorDescription, + Type: gql.Int, }, "_ne": &gql.InputObjectFieldConfig{ - Type: gql.Int, + Description: neOperatorDescription, + Type: gql.Int, }, "_gt": &gql.InputObjectFieldConfig{ - Type: gql.Int, + Description: gtOperatorDescription, + Type: gql.Int, }, "_ge": &gql.InputObjectFieldConfig{ - Type: gql.Int, + Description: geOperatorDescription, + Type: gql.Int, }, "_lt": &gql.InputObjectFieldConfig{ - Type: gql.Int, + Description: ltOperatorDescription, + Type: gql.Int, }, "_le": &gql.InputObjectFieldConfig{ - Type: gql.Int, + Description: leOperatorDescription, + Type: gql.Int, }, "_in": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.Int), + Description: inOperatorDescription, + Type: gql.NewList(gql.Int), }, "_nin": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.Int), + Description: ninOperatorDescription, + Type: gql.NewList(gql.Int), }, }, }) // NotNullIntOperatorBlock filter block for Int! types. var NotNullIntOperatorBlock = gql.NewInputObject(gql.InputObjectConfig{ - Name: "NotNullIntOperatorBlock", + Name: "NotNullIntOperatorBlock", + Description: notNullIntOperatorBlockDescription, Fields: gql.InputObjectConfigFieldMap{ "_eq": &gql.InputObjectFieldConfig{ - Type: gql.Int, + Description: eqOperatorDescription, + Type: gql.Int, }, "_ne": &gql.InputObjectFieldConfig{ - Type: gql.Int, + Description: neOperatorDescription, + Type: gql.Int, }, "_gt": &gql.InputObjectFieldConfig{ - Type: gql.Int, + Description: gtOperatorDescription, + Type: gql.Int, }, "_ge": &gql.InputObjectFieldConfig{ - Type: gql.Int, + Description: geOperatorDescription, + Type: gql.Int, }, "_lt": &gql.InputObjectFieldConfig{ - Type: gql.Int, + Description: ltOperatorDescription, + Type: gql.Int, }, "_le": &gql.InputObjectFieldConfig{ - Type: gql.Int, + Description: leOperatorDescription, + Type: gql.Int, }, "_in": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.NewNonNull(gql.Int)), + Description: inOperatorDescription, + Type: gql.NewList(gql.NewNonNull(gql.Int)), }, "_nin": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.NewNonNull(gql.Int)), + Description: ninOperatorDescription, + Type: gql.NewList(gql.NewNonNull(gql.Int)), }, }, }) // StringOperatorBlock filter block for string types. var StringOperatorBlock = gql.NewInputObject(gql.InputObjectConfig{ - Name: "StringOperatorBlock", + Name: "StringOperatorBlock", + Description: stringOperatorBlockDescription, Fields: gql.InputObjectConfigFieldMap{ "_eq": &gql.InputObjectFieldConfig{ - Type: gql.String, + Description: eqOperatorDescription, + Type: gql.String, }, "_ne": &gql.InputObjectFieldConfig{ - Type: gql.String, + Description: neOperatorDescription, + Type: gql.String, }, "_in": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.String), + Description: inOperatorDescription, + Type: gql.NewList(gql.String), }, "_nin": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.String), + Description: ninOperatorDescription, + Type: gql.NewList(gql.String), }, "_like": &gql.InputObjectFieldConfig{ - Type: gql.String, + Description: likeStringOperatorDescription, + Type: gql.String, }, "_nlike": &gql.InputObjectFieldConfig{ - Type: gql.String, + Description: nlikeStringOperatorDescription, + Type: gql.String, }, }, }) // NotNullstringOperatorBlock filter block for string! types. var NotNullstringOperatorBlock = gql.NewInputObject(gql.InputObjectConfig{ - Name: "NotNullStringOperatorBlock", + Name: "NotNullStringOperatorBlock", + Description: notNullStringOperatorBlockDescription, Fields: gql.InputObjectConfigFieldMap{ "_eq": &gql.InputObjectFieldConfig{ - Type: gql.String, + Description: eqOperatorDescription, + Type: gql.String, }, "_ne": &gql.InputObjectFieldConfig{ - Type: gql.String, + Description: neOperatorDescription, + Type: gql.String, }, "_in": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.NewNonNull(gql.String)), + Description: inOperatorDescription, + Type: gql.NewList(gql.NewNonNull(gql.String)), }, "_nin": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.NewNonNull(gql.String)), + Description: ninOperatorDescription, + Type: gql.NewList(gql.NewNonNull(gql.String)), }, "_like": &gql.InputObjectFieldConfig{ - Type: gql.String, + Description: likeStringOperatorDescription, + Type: gql.String, }, "_nlike": &gql.InputObjectFieldConfig{ - Type: gql.String, + Description: nlikeStringOperatorDescription, + Type: gql.String, }, }, }) // IdOperatorBlock filter block for ID types. var IdOperatorBlock = gql.NewInputObject(gql.InputObjectConfig{ - Name: "IDOperatorBlock", + Name: "IDOperatorBlock", + Description: idOperatorBlockDescription, Fields: gql.InputObjectConfigFieldMap{ "_eq": &gql.InputObjectFieldConfig{ - Type: gql.ID, + Description: eqOperatorDescription, + Type: gql.ID, }, "_ne": &gql.InputObjectFieldConfig{ - Type: gql.ID, + Description: neOperatorDescription, + Type: gql.ID, }, "_in": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.NewNonNull(gql.ID)), + Description: inOperatorDescription, + Type: gql.NewList(gql.NewNonNull(gql.ID)), }, "_nin": &gql.InputObjectFieldConfig{ - Type: gql.NewList(gql.NewNonNull(gql.ID)), + Description: ninOperatorDescription, + Type: gql.NewList(gql.NewNonNull(gql.ID)), }, }, }) diff --git a/request/graphql/schema/types/commits.go b/request/graphql/schema/types/commits.go index 3fe7b6e572..69f5af025e 100644 --- a/request/graphql/schema/types/commits.go +++ b/request/graphql/schema/types/commits.go @@ -19,9 +19,13 @@ import ( var ( // Helper only for `commit` below. commitCountFieldArg = gql.NewEnum(gql.EnumConfig{ - Name: "commitCountFieldArg", + Name: "commitCountFieldArg", + Description: CountFieldDescription, Values: gql.EnumValueConfigMap{ - "links": &gql.EnumValueConfig{Value: "links"}, + "links": &gql.EnumValueConfig{ + Description: commitLinksDescription, + Value: "links", + }, }, }) @@ -40,31 +44,40 @@ var ( // Any self referential type needs to be initialized // inside the init() func CommitObject = gql.NewObject(gql.ObjectConfig{ - Name: request.CommitTypeName, + Name: request.CommitTypeName, + Description: commitDescription, Fields: gql.Fields{ "height": &gql.Field{ - Type: gql.Int, + Description: commitHeightFieldDescription, + Type: gql.Int, }, "cid": &gql.Field{ - Type: gql.String, + Description: commitCIDFieldDescription, + Type: gql.String, }, "dockey": &gql.Field{ - Type: gql.String, + Description: commitDockeyFieldDescription, + Type: gql.String, }, "collectionID": &gql.Field{ - Type: gql.Int, + Description: commitCollectionIDFieldDescription, + Type: gql.Int, }, "schemaVersionId": &gql.Field{ - Type: gql.String, + Description: commitSchemaVersionIDFieldDescription, + Type: gql.String, }, "delta": &gql.Field{ - Type: gql.String, + Description: commitDeltaFieldDescription, + Type: gql.String, }, "links": &gql.Field{ - Type: gql.NewList(CommitLinkObject), + Description: commitLinksDescription, + Type: gql.NewList(CommitLinkObject), }, "_count": &gql.Field{ - Type: gql.Int, + Description: CountFieldDescription, + Type: gql.Int, Args: gql.FieldConfigArgument{ "field": &gql.ArgumentConfig{ Type: commitCountFieldArg, @@ -74,48 +87,43 @@ var ( }, }) - // Delta represents a Delta State update for a CRDT - // type Delta { - // Payload: String - // } - DeltaObject = gql.NewObject(gql.ObjectConfig{ - Name: "Delta", - Fields: gql.Fields{ - "payload": &gql.Field{ - Type: gql.String, - }, - }, - }) - // CommitLink is a named DAG link between commits. // This is primary used for CompositeDAG CRDTs CommitLinkObject = gql.NewObject(gql.ObjectConfig{ - Name: "CommitLink", + Name: "CommitLink", + Description: commitLinksDescription, Fields: gql.Fields{ "name": &gql.Field{ - Type: gql.String, + Description: commitLinkNameFieldDescription, + Type: gql.String, }, "cid": &gql.Field{ - Type: gql.String, + Description: commitLinkCIDFieldDescription, + Type: gql.String, }, }, }) CommitsOrderArg = gql.NewInputObject( gql.InputObjectConfig{ - Name: "commitsOrderArg", + Name: "commitsOrderArg", + Description: OrderArgDescription, Fields: gql.InputObjectConfigFieldMap{ "height": &gql.InputObjectFieldConfig{ - Type: OrderingEnum, + Description: commitHeightFieldDescription, + Type: OrderingEnum, }, "cid": &gql.InputObjectFieldConfig{ - Type: OrderingEnum, + Description: commitCIDFieldDescription, + Type: OrderingEnum, }, "dockey": &gql.InputObjectFieldConfig{ - Type: OrderingEnum, + Description: commitDockeyFieldDescription, + Type: OrderingEnum, }, "collectionID": &gql.InputObjectFieldConfig{ - Type: OrderingEnum, + Description: commitCollectionIDFieldDescription, + Type: OrderingEnum, }, }, }, @@ -123,43 +131,59 @@ var ( commitFields = gql.NewEnum( gql.EnumConfig{ - Name: "commitFields", + Name: "commitFields", + Description: commitFieldsEnumDescription, Values: gql.EnumValueConfigMap{ - "height": &gql.EnumValueConfig{Value: "height"}, - "cid": &gql.EnumValueConfig{Value: "cid"}, - "dockey": &gql.EnumValueConfig{Value: "dockey"}, - "collectionID": &gql.EnumValueConfig{Value: "collectionID"}, + "height": &gql.EnumValueConfig{ + Value: "height", + Description: commitHeightFieldDescription, + }, + "cid": &gql.EnumValueConfig{ + Value: "cid", + Description: commitCIDFieldDescription, + }, + "dockey": &gql.EnumValueConfig{ + Value: "dockey", + Description: commitDockeyFieldDescription, + }, + "collectionID": &gql.EnumValueConfig{ + Value: "collectionID", + Description: commitCollectionIDFieldDescription, + }, }, }, ) QueryCommits = &gql.Field{ - Name: "commits", - Type: gql.NewList(CommitObject), + Name: "commits", + Description: commitsQueryDescription, + Type: gql.NewList(CommitObject), Args: gql.FieldConfigArgument{ - "dockey": NewArgConfig(gql.ID), - "field": NewArgConfig(gql.String), - "order": NewArgConfig(CommitsOrderArg), - "cid": NewArgConfig(gql.ID), + "dockey": NewArgConfig(gql.ID, commitDockeyArgDescription), + "field": NewArgConfig(gql.String, commitFieldArgDescription), + "order": NewArgConfig(CommitsOrderArg, OrderArgDescription), + "cid": NewArgConfig(gql.ID, commitCIDArgDescription), "groupBy": NewArgConfig( gql.NewList( gql.NewNonNull( commitFields, ), ), + GroupByArgDescription, ), - request.LimitClause: NewArgConfig(gql.Int), - request.OffsetClause: NewArgConfig(gql.Int), - request.DepthClause: NewArgConfig(gql.Int), + request.LimitClause: NewArgConfig(gql.Int, LimitArgDescription), + request.OffsetClause: NewArgConfig(gql.Int, OffsetArgDescription), + request.DepthClause: NewArgConfig(gql.Int, commitDepthArgDescription), }, } QueryLatestCommits = &gql.Field{ - Name: "latestCommits", - Type: gql.NewList(CommitObject), + Name: "latestCommits", + Description: latestCommitsQueryDescription, + Type: gql.NewList(CommitObject), Args: gql.FieldConfigArgument{ - "dockey": NewArgConfig(gql.NewNonNull(gql.ID)), - "field": NewArgConfig(gql.String), + "dockey": NewArgConfig(gql.NewNonNull(gql.ID), commitDockeyArgDescription), + "field": NewArgConfig(gql.String, commitFieldArgDescription), }, } ) diff --git a/request/graphql/schema/types/descriptions.go b/request/graphql/schema/types/descriptions.go new file mode 100644 index 0000000000..7f51753969 --- /dev/null +++ b/request/graphql/schema/types/descriptions.go @@ -0,0 +1,223 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package types + +const ( + OrderArgDescription string = ` +An optional set of field-orders which may be used to sort the results. An + empty set will be ignored. +` + GroupByArgDescription string = ` +An optional set of fields for which to group the contents of this field by. + If this argument is provided, only fields used to group may be rendered in + the immediate child selector. Additional fields may be selected by using + the '_group' selector within the immediate child selector. If an empty set + is provided, the restrictions mentioned still apply, although all results + will appear within the same group. +` + LimitArgDescription string = ` +An optional value that caps the number of results to the number provided. + A limit of zero will be ignored. +` + OffsetArgDescription string = ` +An optional value that skips the given number of results that would have + otherwise been returned. Commonly used alongside the 'limit' argument, + this argument will still work on its own. +` + commitDescription string = ` +Commit represents an individual commit to a MerkleCRDT, every mutation to a + document will result in a new commit per modified field, and one composite + commit composed of the field level commits and, in the case of an update, + the prior composite commit. +` + commitDockeyArgDescription string = ` +An optional dockey parameter for this commit query. Only commits for a document + with a matching dockey will be returned. If no documents match, the result + set will be empty. +` + commitFieldArgDescription string = ` +An optional field ID parameter for this commit query. Only commits for a fields + matching this ID will be returned. Specifying 'C' will limit the results to + composite (document level) commits only, otherwise field IDs are numeric. If no + fields match, the result set will be empty. +` + commitCIDArgDescription string = ` +An optional value that specifies the commit ID of the commits to return. If a + matching commit is not found then an empty set will be returned. +` + commitDepthArgDescription string = ` +An optional value that specifies the maximum depth to which the commit DAG graph + should be traversed from matching commits. +` + commitLinksDescription string = ` +Child commits in the DAG that contribute to the composition of this commit. + Composite commits will link to the field commits for the fields modified during + the single mutation. +` + commitHeightFieldDescription string = ` +Height represents the location of the commit in the DAG. All commits (composite, + and field level) on create will have a height of '1', each subsequent local update + will increment this by one for the new commits. +` + commitCIDFieldDescription string = ` +The unique CID of this commit, and the primary means through which to safely identify + a specific commit. +` + commitDockeyFieldDescription string = ` +The dockey of the document that this commit is for. +` + commitCollectionIDFieldDescription string = ` +The ID of the collection that this commit was committed against. +` + commitSchemaVersionIDFieldDescription string = ` +The ID of the schema version that this commit was committed against. This ID allows one + to determine the state of the data model at the time of commit. +` + commitDeltaFieldDescription string = ` +The CBOR encoded representation of the value that is saved as part of this commit. +` + commitLinkNameFieldDescription string = ` +The Name of the field that this linked commit mutated. +` + commitLinkCIDFieldDescription string = ` +The CID of this linked commit. +` + commitFieldsEnumDescription string = ` +These are the set of fields supported for grouping by in a commits query. +` + commitsQueryDescription string = ` +Returns a set of commits matching any provided criteria. If no arguments are + provided all commits in the system will be returned. +` + latestCommitsQueryDescription string = ` +Returns a set of head commits matching any provided criteria. If no arguments are + provided all head commits in the system will be returned. If no 'field' argument + is provided only composite commits will be returned. This is equivalent to + a 'commits' query with Depth: 1, and a differing 'field' default value. +` + CountFieldDescription string = ` +Returns the total number of items within the specified child sets. If multiple child + sets are specified, the combined total of all of them will be returned as a single value. +` + SumFieldDescription string = ` +Returns the total sum of the specified field values within the specified child sets. If + multiple fields/sets are specified, the combined sum of all of them will be returned as + a single value. +` + AverageFieldDescription string = ` +Returns the average of the specified field values within the specified child sets. If + multiple fields/sets are specified, the combined average of all items within each set + (true average, not an average of averages) will be returned as a single value. +` + booleanOperatorBlockDescription string = ` +These are the set of filter operators available for use when filtering on Boolean + values. +` + notNullBooleanOperatorBlockDescription string = ` +These are the set of filter operators available for use when filtering on Boolean! + values. +` + dateTimeOperatorBlockDescription string = ` +These are the set of filter operators available for use when filtering on DateTime + values. +` + floatOperatorBlockDescription string = ` +These are the set of filter operators available for use when filtering on Float + values. +` + notNullFloatOperatorBlockDescription string = ` +These are the set of filter operators available for use when filtering on Float! + values. +` + intOperatorBlockDescription string = ` +These are the set of filter operators available for use when filtering on Int + values. +` + notNullIntOperatorBlockDescription string = ` +These are the set of filter operators available for use when filtering on Int! + values. +` + stringOperatorBlockDescription string = ` +These are the set of filter operators available for use when filtering on String + values. +` + notNullStringOperatorBlockDescription string = ` +These are the set of filter operators available for use when filtering on String! + values. +` + idOperatorBlockDescription string = ` +These are the set of filter operators available for use when filtering on ID + values. +` + eqOperatorDescription string = ` +The equality operator - if the target matches the value the check will pass. +` + neOperatorDescription string = ` +The inequality operator - if the target does not matches the value the check will pass. +` + inOperatorDescription string = ` +The contains operator - if the target value is within the given set the check will pass. +` + ninOperatorDescription string = ` +The does not contains operator - if the target value is not within the given set the + check will pass. +` + gtOperatorDescription string = ` +The greater than operator - if the target value is greater than the given value the + check will pass. +` + geOperatorDescription string = ` +The greater than or equal to operator - if the target value is greater than or equal to the + given value the check will pass. +` + ltOperatorDescription string = ` +The less than operator - if the target value is less than the given value the check will pass. +` + leOperatorDescription string = ` +The less than or equal to operator - if the target value is less than or equal to the + given value the check will pass. +` + likeStringOperatorDescription string = ` +The like operator - if the target value contains the given sub-string the check will pass. '%' + characters may be used as wildcards, for example '_like: "%Ritchie"' would match on strings + ending in 'Ritchie'. +` + nlikeStringOperatorDescription string = ` +The not-like operator - if the target value does not contain the given sub-string the check will + pass. '%' characters may be used as wildcards, for example '_nlike: "%Ritchie"' would match on + the string 'Quentin Tarantino'. +` + AndOperatorDescription string = ` +The and operator - all checks within this clause must pass in order for this check to pass. +` + OrOperatorDescription string = ` +The or operator - only one check within this clause must pass in order for this check to pass. +` + NotOperatorDescription string = ` +The negative operator - this check will only pass if all checks within it fail. +` + ascOrderDescription string = ` +Sort the results in ascending order, e.g. null,1,2,3,a,b,c. +` + descOrderDescription string = ` +Sort the results in descending order, e.g. c,b,a,3,2,1,null. +` + primaryDirectiveDescription string = ` +Indicate the primary side of a one-to-one relationship. +` + relationDirectiveDescription string = ` +Allows the explicit definition of relationship attributes instead of using the system generated + defaults. +` + relationDirectiveNameArgDescription string = ` +Explicitly define the name of the relationship instead of using the system generated defaults. +` +) diff --git a/request/graphql/schema/types/types.go b/request/graphql/schema/types/types.go index dbbb09a0b9..ae01b30255 100644 --- a/request/graphql/schema/types/types.go +++ b/request/graphql/schema/types/types.go @@ -30,10 +30,12 @@ var ( Name: "Ordering", Values: gql.EnumValueConfigMap{ "ASC": &gql.EnumValueConfig{ - Value: 0, + Description: ascOrderDescription, + Value: 0, }, "DESC": &gql.EnumValueConfig{ - Value: 1, + Description: descOrderDescription, + Value: 1, }, }, }) @@ -74,7 +76,8 @@ var ( // PrimaryDirective @primary is used to indicate the primary // side of a one-to-one relationship. PrimaryDirective = gql.NewDirective(gql.DirectiveConfig{ - Name: PrimaryLabel, + Name: PrimaryLabel, + Description: primaryDirectiveDescription, Locations: []string{ gql.DirectiveLocationFieldDefinition, }, @@ -85,10 +88,12 @@ var ( // if you don't want to use the default generated relationship // name. RelationDirective = gql.NewDirective(gql.DirectiveConfig{ - Name: RelationLabel, + Name: RelationLabel, + Description: relationDirectiveDescription, Args: gql.FieldConfigArgument{ "name": &gql.ArgumentConfig{ - Type: gql.String, + Description: relationDirectiveNameArgDescription, + Type: gql.String, }, }, Locations: []string{ @@ -97,8 +102,9 @@ var ( }) ) -func NewArgConfig(t gql.Type) *gql.ArgumentConfig { +func NewArgConfig(t gql.Type, description string) *gql.ArgumentConfig { return &gql.ArgumentConfig{ - Type: t, + Type: t, + Description: description, } } diff --git a/tests/integration/mutation/simple/delete/multi_ids_test.go b/tests/integration/mutation/simple/delete/multi_ids_test.go index 479938f0ef..72d1026f4e 100644 --- a/tests/integration/mutation/simple/delete/multi_ids_test.go +++ b/tests/integration/mutation/simple/delete/multi_ids_test.go @@ -405,3 +405,61 @@ func TestDeletionOfMultipleDocumentsUsingSingleKeyWithShowDeletedDocumentQuery_S testUtils.ExecuteTestCase(t, []string{"User"}, test) } + +func TestDeletionOfMultipleDocumentsUsingEmptySet(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "age": 43 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Andy", + "age": 74 + }`, + }, + testUtils.Request{ + Request: `mutation { + delete_User(ids: []){ + _key + } + }`, + Results: []map[string]any{}, + }, + testUtils.Request{ + // Make sure no documents have been deleted + Request: `query { + User { + name + age + } + }`, + Results: []map[string]any{ + { + "name": "Andy", + "age": uint64(74), + }, + { + "name": "John", + "age": uint64(43), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, []string{"User"}, test) +} diff --git a/tests/integration/mutation/simple/update/with_filter_test.go b/tests/integration/mutation/simple/update/with_filter_test.go index c61bd3513a..d844c35337 100644 --- a/tests/integration/mutation/simple/update/with_filter_test.go +++ b/tests/integration/mutation/simple/update/with_filter_test.go @@ -231,3 +231,29 @@ func TestSimpleMutationUpdateWithIdEqualsFilter(t *testing.T) { ExecuteTestCase(t, test) } + +func TestSimpleMutationUpdateWithNonExistantId(t *testing.T) { + test := testUtils.RequestTestCase{ + Description: "Simple update mutation with non existant id", + Request: `mutation { + update_user(id: "bae-does-not-exist", data: "{\"points\": 59}") { + _key + name + points + } + }`, + Docs: map[int][]string{ + 0: { + `{ + "name": "John", + "age": 27, + "verified": true, + "points": 42.1 + }`, + }, + }, + Results: []map[string]any{}, + } + + ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_group_test.go b/tests/integration/query/simple/with_group_test.go index c4ec5317c0..2388959181 100644 --- a/tests/integration/query/simple/with_group_test.go +++ b/tests/integration/query/simple/with_group_test.go @@ -16,6 +16,45 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) +func TestQuerySimpleWithGroupByEmpty(t *testing.T) { + test := testUtils.RequestTestCase{ + Description: "Simple query with group by empty set, children", + Request: `query { + users(groupBy: []) { + _group { + Name + } + } + }`, + Docs: map[int][]string{ + 0: { + `{ + "Name": "John", + "Age": 32 + }`, + `{ + "Name": "Bob", + "Age": 32 + }`, + }, + }, + Results: []map[string]any{ + { + "_group": []map[string]any{ + { + "Name": "Bob", + }, + { + "Name": "John", + }, + }, + }, + }, + } + + executeTestCase(t, test) +} + func TestQuerySimpleWithGroupByNumber(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with group by number, no children", diff --git a/tests/integration/query/simple/with_limit_offset_test.go b/tests/integration/query/simple/with_limit_offset_test.go index f91440d2fc..3d056fa50d 100644 --- a/tests/integration/query/simple/with_limit_offset_test.go +++ b/tests/integration/query/simple/with_limit_offset_test.go @@ -16,6 +16,39 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) +func TestQuerySimpleWithLimit0(t *testing.T) { + test := testUtils.RequestTestCase{ + Description: "Simple query with limit 0", + Request: `query { + users(limit: 0) { + Name + } + }`, + Docs: map[int][]string{ + 0: { + `{ + "Name": "John", + "Age": 21 + }`, + `{ + "Name": "Bob", + "Age": 32 + }`, + }, + }, + Results: []map[string]any{ + { + "Name": "Bob", + }, + { + "Name": "John", + }, + }, + } + + executeTestCase(t, test) +} + func TestQuerySimpleWithLimit(t *testing.T) { tests := []testUtils.RequestTestCase{ { diff --git a/tests/integration/query/simple/with_order_test.go b/tests/integration/query/simple/with_order_test.go index c4bacb86ff..16d36cd5e1 100644 --- a/tests/integration/query/simple/with_order_test.go +++ b/tests/integration/query/simple/with_order_test.go @@ -16,6 +16,49 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) +func TestQuerySimpleWithEmptyOrder(t *testing.T) { + test := testUtils.RequestTestCase{ + Description: "Simple query with empty order", + Request: `query { + users(order: {}) { + Name + } + }`, + Docs: map[int][]string{ + 0: { + `{ + "Name": "John", + "Age": 21 + }`, + `{ + "Name": "Bob", + "Age": 32 + }`, + `{ + "Name": "Carlo", + "Age": 55 + }`, + }, + }, + Results: []map[string]any{ + { + "Name": "Bob", + "Age": uint64(32), + }, + { + "Name": "John", + "Age": uint64(21), + }, + { + "Name": "Carlo", + "Age": uint64(55), + }, + }, + } + + executeTestCase(t, test) +} + func TestQuerySimpleWithNumericOrderAscending(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with basic order ASC",