From b84579472c24d3deacaf1d1d4033440cc9a99a1d Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Mon, 11 Dec 2023 12:02:59 +0100 Subject: [PATCH 01/15] Make fields lowercase --- tests/integration/index/create_drop_test.go | 16 +++++----- tests/integration/index/create_get_test.go | 10 +++--- tests/integration/index/create_test.go | 34 ++++++++++----------- tests/integration/index/drop_test.go | 16 +++++----- tests/integration/index/get_test.go | 4 +-- 5 files changed, 40 insertions(+), 40 deletions(-) diff --git a/tests/integration/index/create_drop_test.go b/tests/integration/index/create_drop_test.go index e9f27bfe5e..6bb9da4dbb 100644 --- a/tests/integration/index/create_drop_test.go +++ b/tests/integration/index/create_drop_test.go @@ -23,8 +23,8 @@ func TestIndexDrop_ShouldNotHinderQuerying(t *testing.T) { testUtils.SchemaUpdate{ Schema: ` type Users { - Name: String @index - Age: Int + name: String @index + age: Int } `, }, @@ -33,8 +33,8 @@ func TestIndexDrop_ShouldNotHinderQuerying(t *testing.T) { // bae-52b9170d-b77a-5887-b877-cbdbb99b009f Doc: ` { - "Name": "John", - "Age": 21 + "name": "John", + "age": 21 }`, }, testUtils.DropIndex{ @@ -45,14 +45,14 @@ func TestIndexDrop_ShouldNotHinderQuerying(t *testing.T) { Request: ` query { Users { - Name - Age + name + age } }`, Results: []map[string]any{ { - "Name": "John", - "Age": int64(21), + "name": "John", + "age": int64(21), }, }, }, diff --git a/tests/integration/index/create_get_test.go b/tests/integration/index/create_get_test.go index 2e758bb637..c22b6ef99b 100644 --- a/tests/integration/index/create_get_test.go +++ b/tests/integration/index/create_get_test.go @@ -23,9 +23,9 @@ func TestIndexGet_ShouldReturnListOfExistingIndexes(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users @index(name: "age_index", fields: ["Age"]) { - Name: String @index(name: "name_index") - Age: Int + type Users @index(name: "age_index", fields: ["age"]) { + name: String @index(name: "name_index") + age: Int } `, }, @@ -37,7 +37,7 @@ func TestIndexGet_ShouldReturnListOfExistingIndexes(t *testing.T) { ID: 1, Fields: []client.IndexedFieldDescription{ { - Name: "Name", + Name: "name", Direction: client.Ascending, }, }, @@ -47,7 +47,7 @@ func TestIndexGet_ShouldReturnListOfExistingIndexes(t *testing.T) { ID: 2, Fields: []client.IndexedFieldDescription{ { - Name: "Age", + Name: "age", Direction: client.Ascending, }, }, diff --git a/tests/integration/index/create_test.go b/tests/integration/index/create_test.go index 692b329079..8aed37bfc8 100644 --- a/tests/integration/index/create_test.go +++ b/tests/integration/index/create_test.go @@ -24,8 +24,8 @@ func TestIndexCreateWithCollection_ShouldNotHinderQuerying(t *testing.T) { testUtils.SchemaUpdate{ Schema: ` type Users { - Name: String @index - Age: Int + name: String @index + age: Int } `, }, @@ -34,22 +34,22 @@ func TestIndexCreateWithCollection_ShouldNotHinderQuerying(t *testing.T) { // bae-52b9170d-b77a-5887-b877-cbdbb99b009f Doc: ` { - "Name": "John", - "Age": 21 + "name": "John", + "age": 21 }`, }, testUtils.Request{ Request: ` query { Users { - Name - Age + name + age } }`, Results: []map[string]any{ { - "Name": "John", - "Age": int64(21), + "name": "John", + "age": int64(21), }, }, }, @@ -66,8 +66,8 @@ func TestIndexCreate_ShouldNotHinderQuerying(t *testing.T) { testUtils.SchemaUpdate{ Schema: ` type Users { - Name: String - Age: Int + name: String + age: Int } `, }, @@ -76,27 +76,27 @@ func TestIndexCreate_ShouldNotHinderQuerying(t *testing.T) { // bae-52b9170d-b77a-5887-b877-cbdbb99b009f Doc: ` { - "Name": "John", - "Age": 21 + "name": "John", + "age": 21 }`, }, testUtils.CreateIndex{ CollectionID: 0, IndexName: "some_index", - FieldName: "Name", + FieldName: "name", }, testUtils.Request{ Request: ` query { Users { - Name - Age + name + age } }`, Results: []map[string]any{ { - "Name": "John", - "Age": int64(21), + "name": "John", + "age": int64(21), }, }, }, diff --git a/tests/integration/index/drop_test.go b/tests/integration/index/drop_test.go index ab03e1df50..eabe89ca2f 100644 --- a/tests/integration/index/drop_test.go +++ b/tests/integration/index/drop_test.go @@ -23,8 +23,8 @@ func TestIndexDrop_IfIndexDoesNotExist_ReturnError(t *testing.T) { testUtils.SchemaUpdate{ Schema: ` type Users { - Name: String - Age: Int + name: String + age: Int } `, }, @@ -33,8 +33,8 @@ func TestIndexDrop_IfIndexDoesNotExist_ReturnError(t *testing.T) { // bae-52b9170d-b77a-5887-b877-cbdbb99b009f Doc: ` { - "Name": "John", - "Age": 21 + "name": "John", + "age": 21 }`, }, testUtils.DropIndex{ @@ -46,14 +46,14 @@ func TestIndexDrop_IfIndexDoesNotExist_ReturnError(t *testing.T) { Request: ` query { Users { - Name - Age + name + age } }`, Results: []map[string]any{ { - "Name": "John", - "Age": int64(21), + "name": "John", + "age": int64(21), }, }, }, diff --git a/tests/integration/index/get_test.go b/tests/integration/index/get_test.go index 09308a51cf..a5ca4ddf5a 100644 --- a/tests/integration/index/get_test.go +++ b/tests/integration/index/get_test.go @@ -24,8 +24,8 @@ func TestIndexGet_IfThereAreNoIndexes_ReturnEmptyList(t *testing.T) { testUtils.SchemaUpdate{ Schema: ` type Users { - Name: String - Age: Int + name: String + age: Int } `, }, From 26b83b7f98e0a3aa80ce6d297d64a6b65bec9429 Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Mon, 11 Dec 2023 12:07:09 +0100 Subject: [PATCH 02/15] Unique index parsing --- client/index.go | 2 + request/graphql/schema/collection.go | 12 +++ .../{index_test.go => index_parse_test.go} | 77 +++++++++++++++++-- request/graphql/schema/types/types.go | 1 + 4 files changed, 87 insertions(+), 5 deletions(-) rename request/graphql/schema/{index_test.go => index_parse_test.go} (80%) diff --git a/client/index.go b/client/index.go index 69f0362017..5e2d397394 100644 --- a/client/index.go +++ b/client/index.go @@ -36,6 +36,8 @@ type IndexDescription struct { ID uint32 // Fields contains the fields that are being indexed. Fields []IndexedFieldDescription + // Unique indicates whether the index is unique. + Unique bool } // CollectIndexedFields returns all fields that are indexed by all collection indexes. diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index ed77a9d614..fd4c354a32 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -186,6 +186,12 @@ func fieldIndexFromAST(field *ast.FieldDefinition, directive *ast.Directive) (cl if !IsValidIndexName(desc.Name) { return client.IndexDescription{}, NewErrIndexWithInvalidName(desc.Name) } + case types.IndexDirectivePropUnique: + boolVal, ok := arg.Value.(*ast.BooleanValue) + if !ok { + return client.IndexDescription{}, ErrIndexWithInvalidArg + } + desc.Unique = boolVal.Value default: return client.IndexDescription{}, ErrIndexWithUnknownArg } @@ -227,6 +233,12 @@ func indexFromAST(directive *ast.Directive) (client.IndexDescription, error) { if !ok { return client.IndexDescription{}, ErrIndexWithInvalidArg } + case types.IndexDirectivePropUnique: + boolVal, ok := arg.Value.(*ast.BooleanValue) + if !ok { + return client.IndexDescription{}, ErrIndexWithInvalidArg + } + desc.Unique = boolVal.Value default: return client.IndexDescription{}, ErrIndexWithUnknownArg } diff --git a/request/graphql/schema/index_test.go b/request/graphql/schema/index_parse_test.go similarity index 80% rename from request/graphql/schema/index_test.go rename to request/graphql/schema/index_parse_test.go index 155a17fbf6..ca1ce32696 100644 --- a/request/graphql/schema/index_test.go +++ b/request/graphql/schema/index_parse_test.go @@ -19,7 +19,7 @@ import ( "github.com/sourcenetwork/defradb/client" ) -func TestStructIndex(t *testing.T) { +func TestParseIndexOnStruct(t *testing.T) { cases := []indexTestCase{ { description: "Index with a single field", @@ -30,6 +30,7 @@ func TestStructIndex(t *testing.T) { Fields: []client.IndexedFieldDescription{ {Name: "name", Direction: client.Ascending}, }, + Unique: false, }, }, }, @@ -45,6 +46,30 @@ func TestStructIndex(t *testing.T) { }, }, }, + { + description: "Unique index", + sdl: `type user @index(fields: ["name"], unique: true) {}`, + targetDescriptions: []client.IndexDescription{ + { + Fields: []client.IndexedFieldDescription{ + {Name: "name", Direction: client.Ascending}, + }, + Unique: true, + }, + }, + }, + { + description: "Index explicitly not unique", + sdl: `type user @index(fields: ["name"], unique: false) {}`, + targetDescriptions: []client.IndexDescription{ + { + Fields: []client.IndexedFieldDescription{ + {Name: "name", Direction: client.Ascending}, + }, + Unique: false, + }, + }, + }, { description: "Index with explicit ascending field", sdl: `type user @index(fields: ["name"], directions: [ASC]) {}`, @@ -96,11 +121,11 @@ func TestStructIndex(t *testing.T) { } } -func TestInvalidStructIndex(t *testing.T) { +func TestParseInvalidIndexOnStruct(t *testing.T) { cases := []invalidIndexTestCase{ { description: "missing 'fields' argument", - sdl: `type user @index(name: "userIndex") {}`, + sdl: `type user @index(name: "userIndex", unique: true) {}`, expectedErr: errIndexMissingFields, }, { @@ -133,6 +158,11 @@ func TestInvalidStructIndex(t *testing.T) { sdl: `type user @index(name: "user!name", fields: ["name"]) {}`, expectedErr: errIndexInvalidArgument, }, + { + description: "invalid 'unique' value type", + sdl: `type user @index(fields: ["name"], unique: "true") {}`, + expectedErr: errIndexInvalidArgument, + }, { description: "invalid 'fields' value type (not a list)", sdl: `type user @index(fields: "name") {}`, @@ -175,7 +205,7 @@ func TestInvalidStructIndex(t *testing.T) { } } -func TestFieldIndex(t *testing.T) { +func TestParseIndexOnField(t *testing.T) { cases := []indexTestCase{ { description: "field index", @@ -188,6 +218,7 @@ func TestFieldIndex(t *testing.T) { Fields: []client.IndexedFieldDescription{ {Name: "name", Direction: client.Ascending}, }, + Unique: false, }, }, }, @@ -202,6 +233,35 @@ func TestFieldIndex(t *testing.T) { Fields: []client.IndexedFieldDescription{ {Name: "name", Direction: client.Ascending}, }, + Unique: false, + }, + }, + }, + { + description: "unique field index", + sdl: `type user { + name: String @index(unique: true) + }`, + targetDescriptions: []client.IndexDescription{ + { + Fields: []client.IndexedFieldDescription{ + {Name: "name", Direction: client.Ascending}, + }, + Unique: true, + }, + }, + }, + { + description: "field index explicitly not unique", + sdl: `type user { + name: String @index(unique: false) + }`, + targetDescriptions: []client.IndexDescription{ + { + Fields: []client.IndexedFieldDescription{ + {Name: "name", Direction: client.Ascending}, + }, + Unique: false, }, }, }, @@ -212,7 +272,7 @@ func TestFieldIndex(t *testing.T) { } } -func TestInvalidFieldIndex(t *testing.T) { +func TestParseInvalidIndexOnField(t *testing.T) { cases := []invalidIndexTestCase{ { description: "forbidden 'field' argument", @@ -263,6 +323,13 @@ func TestInvalidFieldIndex(t *testing.T) { }`, expectedErr: errIndexInvalidName, }, + { + description: "invalid 'unique' value type", + sdl: `type user { + name: String @index(unique: "true") + }`, + expectedErr: errIndexInvalidArgument, + }, } for _, test := range cases { diff --git a/request/graphql/schema/types/types.go b/request/graphql/schema/types/types.go index c28ef566ea..065dadaa6d 100644 --- a/request/graphql/schema/types/types.go +++ b/request/graphql/schema/types/types.go @@ -26,6 +26,7 @@ const ( IndexDirectiveLabel = "index" IndexDirectivePropName = "name" + IndexDirectivePropUnique = "unique" IndexDirectivePropFields = "fields" IndexDirectivePropDirections = "directions" ) From 6acb7b24449f677f9ea3bfb7304cfddd92269377 Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Mon, 11 Dec 2023 12:08:25 +0100 Subject: [PATCH 03/15] Query with unique index --- db/errors.go | 150 +++--- db/fetcher/indexer.go | 22 +- db/fetcher/indexer_iterators.go | 176 +++++-- db/index.go | 222 ++++++--- db/index_test.go | 9 +- db/indexed_docs_test.go | 4 +- tests/integration/index/create_unique_test.go | 189 +++++++ ...uery_with_unique_index_only_filter_test.go | 464 ++++++++++++++++++ tests/integration/test_case.go | 3 + 9 files changed, 1049 insertions(+), 190 deletions(-) create mode 100644 tests/integration/index/create_unique_test.go create mode 100644 tests/integration/index/query_with_unique_index_only_filter_test.go diff --git a/db/errors.go b/db/errors.go index 17e82c6738..353a82868e 100644 --- a/db/errors.go +++ b/db/errors.go @@ -16,76 +16,77 @@ import ( ) const ( - errFailedToGetHeads string = "failed to get document heads" - errFailedToCreateCollectionQuery string = "failed to create collection prefix query" - errFailedToGetCollection string = "failed to get collection" - errFailedToGetAllCollections string = "failed to get all collections" - errDocVerification string = "the document verification failed" - errAddingP2PCollection string = "cannot add collection ID" - errRemovingP2PCollection string = "cannot remove collection ID" - errAddCollectionWithPatch string = "unknown collection, adding collections via patch is not supported" - errCollectionIDDoesntMatch string = "CollectionID does not match existing" - errSchemaRootDoesntMatch string = "SchemaRoot does not match existing" - errCannotModifySchemaName string = "modifying the schema name is not supported" - errCannotSetVersionID string = "setting the VersionID is not supported. It is updated automatically" - errCannotSetFieldID string = "explicitly setting a field ID value is not supported" - errRelationalFieldMissingSchema string = "a `Schema` [name] must be provided when adding a new relation field" - errRelationalFieldInvalidRelationType string = "invalid RelationType" - errRelationalFieldMissingIDField string = "missing id field for relation object field" - errRelationalFieldMissingRelationName string = "missing relation name" - errPrimarySideNotDefined string = "primary side of relation not defined" - errPrimarySideOnMany string = "cannot set the many side of a relation as primary" - errBothSidesPrimary string = "both sides of a relation cannot be primary" - errRelatedFieldKindMismatch string = "invalid Kind of the related field" - errRelatedFieldRelationTypeMismatch string = "invalid RelationType of the related field" - errRelationalFieldIDInvalidType string = "relational id field of invalid kind" - errDuplicateField string = "duplicate field" - errCannotMutateField string = "mutating an existing field is not supported" - errCannotMoveField string = "moving fields is not currently supported" - errInvalidCRDTType string = "only default or LWW (last writer wins) CRDT types are supported" - errCannotDeleteField string = "deleting an existing field is not supported" - errFieldKindNotFound string = "no type found for given name" - errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" - errSchemaNotFound string = "no schema found for given name" - errDocumentAlreadyExists string = "a document with the given dockey already exists" - errDocumentDeleted string = "a document with the given dockey has been deleted" - errIndexMissingFields string = "index missing fields" - errNonZeroIndexIDProvided string = "non-zero index ID provided" - errIndexFieldMissingName string = "index field missing name" - errIndexFieldMissingDirection string = "index field missing direction" - errIndexSingleFieldWrongDirection string = "wrong direction for index with a single field" - errIndexWithNameAlreadyExists string = "index with name already exists" - errInvalidStoredIndex string = "invalid stored index" - errInvalidStoredIndexKey string = "invalid stored index key" - errNonExistingFieldForIndex string = "creating an index on a non-existing property" - errCollectionDoesntExisting string = "collection with given name doesn't exist" - errFailedToStoreIndexedField string = "failed to store indexed field" - errFailedToReadStoredIndexDesc string = "failed to read stored index description" - errCanNotDeleteIndexedField string = "can not delete indexed field" - errCanNotAddIndexWithPatch string = "adding indexes via patch is not supported" - errCanNotDropIndexWithPatch string = "dropping indexes via patch is not supported" - errCanNotChangeIndexWithPatch string = "changing indexes via patch is not supported" - errIndexWithNameDoesNotExists string = "index with name doesn't exists" - errCorruptedIndex string = "corrupted index. Please delete and recreate the index" - errInvalidFieldValue string = "invalid field value" - errUnsupportedIndexFieldType string = "unsupported index field type" - errIndexDescriptionHasNoFields string = "index description has no fields" - errIndexDescHasNonExistingField string = "index description has non existing field" - errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" - errCreateFile string = "failed to create file" - errOpenFile string = "failed to open file" - errCloseFile string = "failed to close file" - errRemoveFile string = "failed to remove file" - errFailedToReadByte string = "failed to read byte" - errFailedToWriteString string = "failed to write string" - errJSONDecode string = "failed to decode JSON" - errDocFromMap string = "failed to create a new doc from map" - errDocCreate string = "failed to save a new doc to collection" - errDocUpdate string = "failed to update doc to collection" - errExpectedJSONObject string = "expected JSON object" - errExpectedJSONArray string = "expected JSON array" - errOneOneAlreadyLinked string = "target document is already linked to another document" - errIndexDoesNotMatchName string = "the index used does not match the given name" + errFailedToGetHeads string = "failed to get document heads" + errFailedToCreateCollectionQuery string = "failed to create collection prefix query" + errFailedToGetCollection string = "failed to get collection" + errFailedToGetAllCollections string = "failed to get all collections" + errDocVerification string = "the document verification failed" + errAddingP2PCollection string = "cannot add collection ID" + errRemovingP2PCollection string = "cannot remove collection ID" + errAddCollectionWithPatch string = "unknown collection, adding collections via patch is not supported" + errCollectionIDDoesntMatch string = "CollectionID does not match existing" + errSchemaRootDoesntMatch string = "SchemaRoot does not match existing" + errCannotModifySchemaName string = "modifying the schema name is not supported" + errCannotSetVersionID string = "setting the VersionID is not supported. It is updated automatically" + errCannotSetFieldID string = "explicitly setting a field ID value is not supported" + errRelationalFieldMissingSchema string = "a `Schema` [name] must be provided when adding a new relation field" + errRelationalFieldInvalidRelationType string = "invalid RelationType" + errRelationalFieldMissingIDField string = "missing id field for relation object field" + errRelationalFieldMissingRelationName string = "missing relation name" + errPrimarySideNotDefined string = "primary side of relation not defined" + errPrimarySideOnMany string = "cannot set the many side of a relation as primary" + errBothSidesPrimary string = "both sides of a relation cannot be primary" + errRelatedFieldKindMismatch string = "invalid Kind of the related field" + errRelatedFieldRelationTypeMismatch string = "invalid RelationType of the related field" + errRelationalFieldIDInvalidType string = "relational id field of invalid kind" + errDuplicateField string = "duplicate field" + errCannotMutateField string = "mutating an existing field is not supported" + errCannotMoveField string = "moving fields is not currently supported" + errInvalidCRDTType string = "only default or LWW (last writer wins) CRDT types are supported" + errCannotDeleteField string = "deleting an existing field is not supported" + errFieldKindNotFound string = "no type found for given name" + errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" + errSchemaNotFound string = "no schema found for given name" + errDocumentAlreadyExists string = "a document with the given dockey already exists" + errDocumentDeleted string = "a document with the given dockey has been deleted" + errIndexMissingFields string = "index missing fields" + errNonZeroIndexIDProvided string = "non-zero index ID provided" + errIndexFieldMissingName string = "index field missing name" + errIndexFieldMissingDirection string = "index field missing direction" + errIndexSingleFieldWrongDirection string = "wrong direction for index with a single field" + errIndexWithNameAlreadyExists string = "index with name already exists" + errInvalidStoredIndex string = "invalid stored index" + errInvalidStoredIndexKey string = "invalid stored index key" + errNonExistingFieldForIndex string = "creating an index on a non-existing property" + errCollectionDoesntExisting string = "collection with given name doesn't exist" + errFailedToStoreIndexedField string = "failed to store indexed field" + errFailedToReadStoredIndexDesc string = "failed to read stored index description" + errCanNotDeleteIndexedField string = "can not delete indexed field" + errCanNotAddIndexWithPatch string = "adding indexes via patch is not supported" + errCanNotDropIndexWithPatch string = "dropping indexes via patch is not supported" + errCanNotChangeIndexWithPatch string = "changing indexes via patch is not supported" + errIndexWithNameDoesNotExists string = "index with name doesn't exists" + errCorruptedIndex string = "corrupted index. Please delete and recreate the index" + errInvalidFieldValue string = "invalid field value" + errUnsupportedIndexFieldType string = "unsupported index field type" + errIndexDescriptionHasNoFields string = "index description has no fields" + errIndexDescHasNonExistingField string = "index description has non existing field" + errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" + errCreateFile string = "failed to create file" + errOpenFile string = "failed to open file" + errCloseFile string = "failed to close file" + errRemoveFile string = "failed to remove file" + errFailedToReadByte string = "failed to read byte" + errFailedToWriteString string = "failed to write string" + errJSONDecode string = "failed to decode JSON" + errDocFromMap string = "failed to create a new doc from map" + errDocCreate string = "failed to save a new doc to collection" + errDocUpdate string = "failed to update doc to collection" + errExpectedJSONObject string = "expected JSON object" + errExpectedJSONArray string = "expected JSON array" + errOneOneAlreadyLinked string = "target document is already linked to another document" + errIndexDoesNotMatchName string = "the index used does not match the given name" + errCanNotIndexNonUniqueField string = "can not create doc that violates unique index" ) var ( @@ -631,3 +632,12 @@ func NewErrIndexDoesNotMatchName(index, name string) error { errors.NewKV("Name", name), ) } + +func NewErrCanNotIndexNonUniqueField(dockey, fieldName string, value any) error { + return errors.New( + errCanNotIndexNonUniqueField, + errors.NewKV("Dockey", dockey), + errors.NewKV("Field name", fieldName), + errors.NewKV("Field value", value), + ) +} diff --git a/db/fetcher/indexer.go b/db/fetcher/indexer.go index a0ee94d0b9..04ff33f2aa 100644 --- a/db/fetcher/indexer.go +++ b/db/fetcher/indexer.go @@ -32,6 +32,7 @@ type IndexFetcher struct { mapping *core.DocumentMapping indexedField client.FieldDescription docFields []client.FieldDescription + indexDesc client.IndexDescription indexIter indexIterator indexDataStoreKey core.IndexDataStoreKey execInfo ExecInfo @@ -70,6 +71,7 @@ func (f *IndexFetcher) Init( for _, index := range col.Description().Indexes { if index.Fields[0].Name == f.indexedField.Name { + f.indexDesc = index f.indexDataStoreKey.IndexID = index.ID break } @@ -84,7 +86,7 @@ func (f *IndexFetcher) Init( } } - iter, err := createIndexIterator(f.indexDataStoreKey, f.indexFilter, &f.execInfo) + iter, err := createIndexIterator(f.indexDataStoreKey, f.indexFilter, &f.execInfo, f.indexDesc.Unique) if err != nil { return err } @@ -112,28 +114,32 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo for { f.doc.Reset() - indexKey, hasValue, err := f.indexIter.Next() - if err != nil { - return nil, ExecInfo{}, err + res := f.indexIter.Next() + if res.err != nil { + return nil, ExecInfo{}, res.err } - if !hasValue { + if !res.foundKey { return nil, f.execInfo, nil } property := &encProperty{ Desc: f.indexedField, - Raw: indexKey.FieldValues[0], + Raw: res.key.FieldValues[0], } - f.doc.key = indexKey.FieldValues[1] + if f.indexDesc.Unique { + f.doc.key = res.value + } else { + f.doc.key = res.key.FieldValues[1] + } f.doc.properties[f.indexedField] = property f.execInfo.FieldsFetched++ if f.docFetcher != nil && len(f.docFields) > 0 { targetKey := base.MakeDocKey(f.col.Description(), string(f.doc.key)) spans := core.NewSpans(core.NewSpan(targetKey, targetKey.PrefixEnd())) - err = f.docFetcher.Start(ctx, spans) + err := f.docFetcher.Start(ctx, spans) if err != nil { return nil, ExecInfo{}, err } diff --git a/db/fetcher/indexer_iterators.go b/db/fetcher/indexer_iterators.go index b563c9b3a3..d892918644 100644 --- a/db/fetcher/indexer_iterators.go +++ b/db/fetcher/indexer_iterators.go @@ -45,45 +45,52 @@ const ( // For example, iteration over condition _eq and _gt will have completely different logic. type indexIterator interface { Init(context.Context, datastore.DSReaderWriter) error - Next() (core.IndexDataStoreKey, bool, error) + Next() indexIterResult Close() error } +type indexIterResult struct { + key core.IndexDataStoreKey + foundKey bool + value []byte + err error +} + type queryResultIterator struct { resultIter query.Results } -func (i queryResultIterator) Next() (core.IndexDataStoreKey, bool, error) { +func (i *queryResultIterator) Next() indexIterResult { res, hasVal := i.resultIter.NextSync() if res.Error != nil { - return core.IndexDataStoreKey{}, false, res.Error + return indexIterResult{err: res.Error} } if !hasVal { - return core.IndexDataStoreKey{}, false, nil + return indexIterResult{} } key, err := core.NewIndexDataStoreKey(res.Key) if err != nil { - return core.IndexDataStoreKey{}, false, err + return indexIterResult{err: err} } - return key, true, nil + return indexIterResult{key: key, value: res.Value, foundKey: true} } -func (i queryResultIterator) Close() error { +func (i *queryResultIterator) Close() error { return i.resultIter.Close() } -type eqIndexIterator struct { +type eqPrefixIndexIterator struct { + filterValueHolder + indexKey core.IndexDataStoreKey + execInfo *ExecInfo + queryResultIterator - indexKey core.IndexDataStoreKey - filterVal []byte - execInfo *ExecInfo } -func (i *eqIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error { - i.indexKey.FieldValues = [][]byte{i.filterVal} +func (i *eqPrefixIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error { + i.indexKey.FieldValues = [][]byte{i.value} resultIter, err := store.Query(ctx, query.Query{ - Prefix: i.indexKey.ToString(), - KeysOnly: true, + Prefix: i.indexKey.ToString(), }) if err != nil { return err @@ -92,16 +99,62 @@ func (i *eqIndexIterator) Init(ctx context.Context, store datastore.DSReaderWrit return nil } -func (i *eqIndexIterator) Next() (core.IndexDataStoreKey, bool, error) { - key, hasValue, err := i.queryResultIterator.Next() - if hasValue { +func (i *eqPrefixIndexIterator) Next() indexIterResult { + res := i.queryResultIterator.Next() + if res.foundKey { i.execInfo.IndexesFetched++ } - return key, hasValue, err + return res +} + +type filterValueIndexIterator interface { + indexIterator + SetFilterValue([]byte) +} + +type filterValueHolder struct { + value []byte +} + +func (h *filterValueHolder) SetFilterValue(value []byte) { + h.value = value +} + +type eqSingleIndexIterator struct { + filterValueHolder + indexKey core.IndexDataStoreKey + execInfo *ExecInfo + + ctx context.Context + store datastore.DSReaderWriter +} + +func (i *eqSingleIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error { + i.ctx = ctx + i.store = store + return nil +} + +func (i *eqSingleIndexIterator) Next() indexIterResult { + if i.store == nil { + return indexIterResult{} + } + i.indexKey.FieldValues = [][]byte{i.value} + val, err := i.store.Get(i.ctx, i.indexKey.ToDS()) + if err != nil { + return indexIterResult{err: err} + } + i.store = nil + i.execInfo.IndexesFetched++ + return indexIterResult{key: i.indexKey, value: val, foundKey: true} +} + +func (i *eqSingleIndexIterator) Close() error { + return nil } type inIndexIterator struct { - eqIndexIterator + filterValueIndexIterator filterValues [][]byte nextValIndex int ctx context.Context @@ -110,22 +163,18 @@ type inIndexIterator struct { } func newInIndexIterator( - indexKey core.IndexDataStoreKey, + indexIter filterValueIndexIterator, filterValues [][]byte, - execInfo *ExecInfo, ) *inIndexIterator { return &inIndexIterator{ - eqIndexIterator: eqIndexIterator{ - indexKey: indexKey, - execInfo: execInfo, - }, - filterValues: filterValues, + filterValueIndexIterator: indexIter, + filterValues: filterValues, } } func (i *inIndexIterator) nextIterator() (bool, error) { if i.nextValIndex > 0 { - err := i.eqIndexIterator.Close() + err := i.filterValueIndexIterator.Close() if err != nil { return false, err } @@ -135,8 +184,8 @@ func (i *inIndexIterator) nextIterator() (bool, error) { return false, nil } - i.filterVal = i.filterValues[i.nextValIndex] - err := i.eqIndexIterator.Init(i.ctx, i.store) + i.SetFilterValue(i.filterValues[i.nextValIndex]) + err := i.filterValueIndexIterator.Init(i.ctx, i.store) if err != nil { return false, err } @@ -152,22 +201,23 @@ func (i *inIndexIterator) Init(ctx context.Context, store datastore.DSReaderWrit return err } -func (i *inIndexIterator) Next() (core.IndexDataStoreKey, bool, error) { +func (i *inIndexIterator) Next() indexIterResult { for i.hasIterator { - key, hasValue, err := i.eqIndexIterator.Next() - if err != nil { - return core.IndexDataStoreKey{}, false, err + res := i.filterValueIndexIterator.Next() + if res.err != nil { + return res } - if !hasValue { + if !res.foundKey { + var err error i.hasIterator, err = i.nextIterator() if err != nil { - return core.IndexDataStoreKey{}, false, err + return indexIterResult{err: err} } continue } - return key, true, nil + return res } - return core.IndexDataStoreKey{}, false, nil + return indexIterResult{} } func (i *inIndexIterator) Close() error { @@ -220,9 +270,8 @@ func (i *scanningIndexIterator) Init(ctx context.Context, store datastore.DSRead i.filter.matcher = &execInfoIndexMatcherDecorator{matcher: i.matcher, execInfo: i.execInfo} iter, err := store.Query(ctx, query.Query{ - Prefix: i.indexKey.ToString(), - KeysOnly: true, - Filters: []query.Filter{&i.filter}, + Prefix: i.indexKey.ToString(), + Filters: []query.Filter{&i.filter}, }) if err != nil { return err @@ -232,12 +281,12 @@ func (i *scanningIndexIterator) Init(ctx context.Context, store datastore.DSRead return nil } -func (i *scanningIndexIterator) Next() (core.IndexDataStoreKey, bool, error) { - key, hasValue, err := i.queryResultIterator.Next() +func (i *scanningIndexIterator) Next() indexIterResult { + res := i.queryResultIterator.Next() if i.filter.err != nil { - return core.IndexDataStoreKey{}, false, i.filter.err + return indexIterResult{err: i.filter.err} } - return key, hasValue, err + return res } // checks if the stored index value satisfies the condition @@ -348,6 +397,7 @@ func createIndexIterator( indexDataStoreKey core.IndexDataStoreKey, indexFilterConditions *mapper.Filter, execInfo *ExecInfo, + isUnique bool, ) (indexIterator, error) { var op string var filterVal any @@ -373,11 +423,23 @@ func createIndexIterator( switch op { case opEq: - return &eqIndexIterator{ - indexKey: indexDataStoreKey, - filterVal: valueBytes, - execInfo: execInfo, - }, nil + if isUnique { + return &eqSingleIndexIterator{ + indexKey: indexDataStoreKey, + filterValueHolder: filterValueHolder{ + value: valueBytes, + }, + execInfo: execInfo, + }, nil + } else { + return &eqPrefixIndexIterator{ + indexKey: indexDataStoreKey, + filterValueHolder: filterValueHolder{ + value: valueBytes, + }, + execInfo: execInfo, + }, nil + } case opGt: return &scanningIndexIterator{ indexKey: indexDataStoreKey, @@ -438,7 +500,19 @@ func createIndexIterator( valArr = append(valArr, valueBytes) } if op == opIn { - return newInIndexIterator(indexDataStoreKey, valArr, execInfo), nil + var iter filterValueIndexIterator + if isUnique { + iter = &eqSingleIndexIterator{ + indexKey: indexDataStoreKey, + execInfo: execInfo, + } + } else { + iter = &eqPrefixIndexIterator{ + indexKey: indexDataStoreKey, + execInfo: execInfo, + } + } + return newInIndexIterator(iter, valArr), nil } else { return &scanningIndexIterator{ indexKey: indexDataStoreKey, diff --git a/db/index.go b/db/index.go index 5d43bddb21..804eac492e 100644 --- a/db/index.go +++ b/db/index.go @@ -90,44 +90,32 @@ func NewCollectionIndex( if len(desc.Fields) == 0 { return nil, NewErrIndexDescHasNoFields(desc) } - index := &collectionSimpleIndex{collection: collection, desc: desc} field, foundField := collection.Schema().GetField(desc.Fields[0].Name) if !foundField { return nil, NewErrIndexDescHasNonExistingField(desc, desc.Fields[0].Name) } - var e error - index.fieldDesc = field - index.validateFieldFunc, e = getFieldValidateFunc(field.Kind) - return index, e + base := collectionBaseIndex{collection: collection, desc: desc} + base.fieldDesc = field + var err error + base.validateFieldFunc, err = getFieldValidateFunc(field.Kind) + if err != nil { + return nil, err + } + if desc.Unique { + return &collectionUniqueIndex{collectionBaseIndex: base}, nil + } else { + return &collectionSimpleIndex{collectionBaseIndex: base}, nil + } } -// collectionSimpleIndex is an non-unique index that indexes documents by a single field. -// Single-field indexes store values only in ascending order. -type collectionSimpleIndex struct { +type collectionBaseIndex struct { collection client.Collection desc client.IndexDescription validateFieldFunc func(any) bool fieldDesc client.FieldDescription } -var _ CollectionIndex = (*collectionSimpleIndex)(nil) - -func (i *collectionSimpleIndex) getDocumentsIndexKey( - doc *client.Document, -) (core.IndexDataStoreKey, error) { - fieldValue, err := i.getDocFieldValue(doc) - if err != nil { - return core.IndexDataStoreKey{}, err - } - - indexDataStoreKey := core.IndexDataStoreKey{} - indexDataStoreKey.CollectionID = i.collection.ID() - indexDataStoreKey.IndexID = i.desc.ID - indexDataStoreKey.FieldValues = [][]byte{fieldValue, []byte(doc.Key().String())} - return indexDataStoreKey, nil -} - -func (i *collectionSimpleIndex) getDocFieldValue(doc *client.Document) ([]byte, error) { +func (i *collectionBaseIndex) getDocFieldValue(doc *client.Document) ([]byte, error) { // collectionSimpleIndex only supports single field indexes, that's why we // can safely access the first field indexedFieldName := i.desc.Fields[0].Name @@ -146,35 +134,26 @@ func (i *collectionSimpleIndex) getDocFieldValue(doc *client.Document) ([]byte, return writeableVal.Bytes() } -// Save indexes a document by storing the indexed field value. -func (i *collectionSimpleIndex) Save( - ctx context.Context, - txn datastore.Txn, +func (i *collectionBaseIndex) getDocumentsIndexKey( doc *client.Document, -) error { - key, err := i.getDocumentsIndexKey(doc) - if err != nil { - return err - } - err = txn.Datastore().Put(ctx, key.ToDS(), []byte{}) +) (core.IndexDataStoreKey, error) { + fieldValue, err := i.getDocFieldValue(doc) if err != nil { - return NewErrFailedToStoreIndexedField(key.ToDS().String(), err) + return core.IndexDataStoreKey{}, err } - return nil + + indexDataStoreKey := core.IndexDataStoreKey{} + indexDataStoreKey.CollectionID = i.collection.ID() + indexDataStoreKey.IndexID = i.desc.ID + indexDataStoreKey.FieldValues = [][]byte{fieldValue} + return indexDataStoreKey, nil } -// Update updates indexed field values of an existing document. -// It removes the old document from the index and adds the new one. -func (i *collectionSimpleIndex) Update( +func (i *collectionBaseIndex) deleteIndexKey( ctx context.Context, txn datastore.Txn, - oldDoc *client.Document, - newDoc *client.Document, + key core.IndexDataStoreKey, ) error { - key, err := i.getDocumentsIndexKey(oldDoc) - if err != nil { - return err - } exists, err := txn.Datastore().Has(ctx, key.ToDS()) if err != nil { return err @@ -182,16 +161,12 @@ func (i *collectionSimpleIndex) Update( if !exists { return NewErrCorruptedIndex(i.desc.Name) } - err = txn.Datastore().Delete(ctx, key.ToDS()) - if err != nil { - return err - } - return i.Save(ctx, txn, newDoc) + return txn.Datastore().Delete(ctx, key.ToDS()) } // RemoveAll remove all artifacts of the index from the storage, i.e. all index // field values for all documents. -func (i *collectionSimpleIndex) RemoveAll(ctx context.Context, txn datastore.Txn) error { +func (i *collectionBaseIndex) RemoveAll(ctx context.Context, txn datastore.Txn) error { prefixKey := core.IndexDataStoreKey{} prefixKey.CollectionID = i.collection.ID() prefixKey.IndexID = i.desc.ID @@ -212,11 +187,148 @@ func (i *collectionSimpleIndex) RemoveAll(ctx context.Context, txn datastore.Txn } // Name returns the name of the index -func (i *collectionSimpleIndex) Name() string { +func (i *collectionBaseIndex) Name() string { return i.desc.Name } // Description returns the description of the index -func (i *collectionSimpleIndex) Description() client.IndexDescription { +func (i *collectionBaseIndex) Description() client.IndexDescription { return i.desc } + +// collectionSimpleIndex is an non-unique index that indexes documents by a single field. +// Single-field indexes store values only in ascending order. +type collectionSimpleIndex struct { + collectionBaseIndex +} + +var _ CollectionIndex = (*collectionSimpleIndex)(nil) + +func (i *collectionSimpleIndex) getDocumentsIndexKey( + doc *client.Document, +) (core.IndexDataStoreKey, error) { + key, err := i.collectionBaseIndex.getDocumentsIndexKey(doc) + if err != nil { + return core.IndexDataStoreKey{}, err + } + + key.FieldValues = append(key.FieldValues, []byte(doc.Key().String())) + return key, nil +} + +// Save indexes a document by storing the indexed field value. +func (i *collectionSimpleIndex) Save( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + key, err := i.getDocumentsIndexKey(doc) + if err != nil { + return err + } + err = txn.Datastore().Put(ctx, key.ToDS(), []byte{}) + if err != nil { + return NewErrFailedToStoreIndexedField(key.ToDS().String(), err) + } + return nil +} + +func (i *collectionSimpleIndex) Update( + ctx context.Context, + txn datastore.Txn, + oldDoc *client.Document, + newDoc *client.Document, +) error { + err := i.deleteDocIndex(ctx, txn, oldDoc) + if err != nil { + return err + } + return i.Save(ctx, txn, newDoc) +} + +func (i *collectionSimpleIndex) deleteDocIndex( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + key, err := i.getDocumentsIndexKey(doc) + if err != nil { + return err + } + return i.deleteIndexKey(ctx, txn, key) +} + +type collectionUniqueIndex struct { + collectionBaseIndex +} + +var _ CollectionIndex = (*collectionUniqueIndex)(nil) + +func (i *collectionUniqueIndex) Save( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + key, err := i.getDocumentsIndexKey(doc) + if err != nil { + return err + } + exists, err := txn.Datastore().Has(ctx, key.ToDS()) + if err != nil { + return err + } + if exists { + return i.newUniqueIndexError(doc) + } + err = txn.Datastore().Put(ctx, key.ToDS(), []byte(doc.Key().String())) + if err != nil { + return NewErrFailedToStoreIndexedField(key.ToDS().String(), err) + } + return nil +} + +func (i *collectionUniqueIndex) newUniqueIndexError( + doc *client.Document, +) error { + fieldVal, err := doc.GetValue(i.fieldDesc.Name) + if err != nil { + return err + } + return NewErrCanNotIndexNonUniqueField(doc.Key().String(), i.fieldDesc.Name, fieldVal.Value()) +} + +func (i *collectionUniqueIndex) Update( + ctx context.Context, + txn datastore.Txn, + oldDoc *client.Document, + newDoc *client.Document, +) error { + newKey, err := i.getDocumentsIndexKey(newDoc) + if err != nil { + return err + } + exists, err := txn.Datastore().Has(ctx, newKey.ToDS()) + if err != nil { + return err + } + if exists { + return i.newUniqueIndexError(newDoc) + } + err = i.deleteDocIndex(ctx, txn, oldDoc) + if err != nil { + return err + } + return i.Save(ctx, txn, newDoc) +} + +func (i *collectionUniqueIndex) deleteDocIndex( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + key, err := i.getDocumentsIndexKey(doc) + if err != nil { + return err + } + return i.deleteIndexKey(ctx, txn, key) +} diff --git a/db/index_test.go b/db/index_test.go index e85fd9bfb4..06ecb9148b 100644 --- a/db/index_test.go +++ b/db/index_test.go @@ -58,7 +58,7 @@ type indexTestFixture struct { t *testing.T } -func (f *indexTestFixture) getUsersCollectionDesc() client.Collection { +func (f *indexTestFixture) addUsersCollection() client.Collection { _, err := f.db.AddSchema( f.ctx, fmt.Sprintf( @@ -129,7 +129,7 @@ func newIndexTestFixtureBare(t *testing.T) *indexTestFixture { func newIndexTestFixture(t *testing.T) *indexTestFixture { f := newIndexTestFixtureBare(t) - f.users = f.getUsersCollectionDesc() + f.users = f.addUsersCollection() return f } @@ -278,6 +278,7 @@ func TestCreateIndex_IfValidInput_CreateIndex(t *testing.T) { assert.NoError(t, err) assert.Equal(t, desc.Name, resultDesc.Name) assert.Equal(t, desc.Fields, resultDesc.Fields) + assert.Equal(t, desc.Unique, resultDesc.Unique) } func TestCreateIndex_IfFieldNameIsEmpty_ReturnError(t *testing.T) { @@ -414,7 +415,7 @@ func TestCreateIndex_IfPropertyDoesntExist_ReturnError(t *testing.T) { func TestCreateIndex_WithMultipleCollectionsAndIndexes_AssignIncrementedIDPerCollection(t *testing.T) { f := newIndexTestFixtureBare(t) - users := f.getUsersCollectionDesc() + users := f.addUsersCollection() products := f.getProductsCollectionDesc() makeIndex := func(fieldName string) client.IndexDescription { @@ -856,7 +857,7 @@ func TestCollectionGetIndexes_IfFailsToCreateTxn_ShouldNotCache(t *testing.T) { func TestCollectionGetIndexes_IfStoredIndexWithUnsupportedType_ReturnError(t *testing.T) { f := newIndexTestFixtureBare(t) - f.getUsersCollectionDesc() + f.addUsersCollection() const unsupportedKind = client.FieldKind_BOOL_ARRAY _, err := f.db.AddSchema( diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index bb569bdc6c..cf24a73e87 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -354,7 +354,7 @@ func TestNonUnique_IfIndexIntField_StoreIt(t *testing.T) { func TestNonUnique_IfMultipleCollectionsWithIndexes_StoreIndexWithCollectionID(t *testing.T) { f := newIndexTestFixtureBare(t) - users := f.getUsersCollectionDesc() + users := f.addUsersCollection() products := f.getProductsCollectionDesc() _, err := f.createCollectionIndexFor(users.Name(), getUsersIndexDescOnName()) @@ -631,7 +631,7 @@ func TestNonUniqueCreate_IfDatastoreFailsToStoreIndex_ReturnError(t *testing.T) func TestNonUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) { f := newIndexTestFixtureBare(t) - users := f.getUsersCollectionDesc() + users := f.addUsersCollection() _, err := f.createCollectionIndexFor(users.Name(), getUsersIndexDescOnName()) require.NoError(f.t, err) _, err = f.createCollectionIndexFor(users.Name(), getUsersIndexDescOnAge()) diff --git a/tests/integration/index/create_unique_test.go b/tests/integration/index/create_unique_test.go new file mode 100644 index 0000000000..66ca1dfd36 --- /dev/null +++ b/tests/integration/index/create_unique_test.go @@ -0,0 +1,189 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/db" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +const johnDockey = "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7" + +func TestCreateUniqueIndex_IfFieldValuesAreNotUnique_ReturnError(t *testing.T) { + + test := testUtils.TestCase{ + Description: "If field is not unique, creating of unique index fails", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "John", + "age": 21 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Andy", + "age": 22 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Shahzad", + "age": 21 + }`, + }, + testUtils.CreateIndex{ + CollectionID: 0, + FieldName: "age", + Unique: true, + ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDockey, "age", 21).Error(), + }, + testUtils.GetIndexes{ + CollectionID: 0, + ExpectedIndexes: []client.IndexDescription{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCreateUniqueIndex_UponAddingDocWithExistingFieldValue_ReturnError(t *testing.T) { + test := testUtils.TestCase{ + Description: "adding a new doc with existing value for indexed field should fail", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true, name: "age_unique_index") + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Shahzad", + "age": 21 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "John", + "age": 21 + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDockey, "age", 21).Error(), + }, + testUtils.Request{ + Request: `query { + User(filter: {name: {_eq: "John"}}) { + name + } + }`, + Results: []map[string]any{}, + }, + testUtils.GetIndexes{ + CollectionID: 0, + ExpectedIndexes: []client.IndexDescription{ + { + Name: "age_unique_index", + ID: 1, + Unique: true, + Fields: []client.IndexedFieldDescription{ + { + Name: "age", + Direction: client.Ascending, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCreateUniqueIndex_IfFieldValuesAreUnique_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Description: "create unique index if all docs have unique field values", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "John", + "age": 21 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Shahzad", + "age": 22 + }`, + }, + testUtils.CreateIndex{ + CollectionID: 0, + IndexName: "age_unique_index", + FieldName: "age", + Unique: true, + }, + testUtils.GetIndexes{ + CollectionID: 0, + ExpectedIndexes: []client.IndexDescription{ + { + Name: "age_unique_index", + ID: 1, + Unique: true, + Fields: []client.IndexedFieldDescription{ + { + Name: "age", + Direction: client.Ascending, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/query_with_unique_index_only_filter_test.go b/tests/integration/index/query_with_unique_index_only_filter_test.go new file mode 100644 index 0000000000..bba4028cd3 --- /dev/null +++ b/tests/integration/index/query_with_unique_index_only_filter_test.go @@ -0,0 +1,464 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + + +func TestQueryWithUniqueIndex_WithEqualFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {name: {_eq: "Islam"}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _eq filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Islam"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(1).WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithGreaterThanFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {age: {_gt: 48}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _gt filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Chris"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithGreaterOrEqualFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {age: {_ge: 48}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _ge filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Keenan"}, + {"name": "Chris"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithLessThanFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {age: {_lt: 22}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _lt filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Shahzad"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithLessOrEqualFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {age: {_le: 23}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _le filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Shahzad"}, + {"name": "Bruno"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithNotEqualFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {name: {_ne: "Islam"}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _ne filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index(unique: true) + age: Int + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Roy"}, + {"name": "Addo"}, + {"name": "Andy"}, + {"name": "Fred"}, + {"name": "John"}, + {"name": "Bruno"}, + {"name": "Chris"}, + {"name": "Keenan"}, + {"name": "Shahzad"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(9).WithFieldFetches(9).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithInFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {age: {_in: [20, 33]}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _in filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Shahzad"}, + {"name": "Andy"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(2), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithNotInFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {age: {_nin: [20, 23, 28, 33, 42, 55]}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _nin filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "John"}, + {"name": "Islam"}, + {"name": "Roy"}, + {"name": "Keenan"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(4).WithFieldFetches(8).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithLikeFilter_ShouldFetch(t *testing.T) { + req1 := `query { + User(filter: {email: {_like: "a%"}}) { + name + } + }` + req2 := `query { + User(filter: {email: {_like: "%d@gmail.com"}}) { + name + } + }` + req3 := `query { + User(filter: {email: {_like: "%e%"}}) { + name + } + }` + req4 := `query { + User(filter: {email: {_like: "fred@gmail.com"}}) { + name + } + }` + req5 := `query { + User(filter: {email: {_like: "a%@gmail.com"}}) { + name + } + }` + req6 := `query { + User(filter: {email: {_like: "a%com%m"}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _like filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + email: String @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req1, + Results: []map[string]any{ + {"name": "Addo"}, + {"name": "Andy"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req1), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10), + }, + testUtils.Request{ + Request: req2, + Results: []map[string]any{ + {"name": "Fred"}, + {"name": "Shahzad"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req2), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10), + }, + testUtils.Request{ + Request: req3, + Results: []map[string]any{ + {"name": "Fred"}, + {"name": "Keenan"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req3), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10), + }, + testUtils.Request{ + Request: req4, + Results: []map[string]any{ + {"name": "Fred"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req4), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(10), + }, + testUtils.Request{ + Request: req5, + Results: []map[string]any{ + {"name": "Addo"}, + {"name": "Andy"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req5), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10), + }, + testUtils.Request{ + Request: req6, + Results: []map[string]any{}, + }, + testUtils.Request{ + Request: makeExplainQuery(req6), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(0).WithFieldFetches(0).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithNotLikeFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {name: {_nlike: "%h%"}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _nlike filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index(unique: true) + age: Int + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Roy"}, + {"name": "Addo"}, + {"name": "Andy"}, + {"name": "Fred"}, + {"name": "Bruno"}, + {"name": "Islam"}, + {"name": "Keenan"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(7).WithFieldFetches(7).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index 6ebe6242b3..ecb00e602e 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -228,6 +228,9 @@ type CreateIndex struct { // The directions of the 'FieldsNames' to index. Used only for composite indexes. Directions []client.IndexDirection + // If Unique is true, the index will be created as a unique index. + Unique bool + // Any error expected from the action. Optional. // // String can be a partial, and the test will pass if an error is returned that From 4fcace480db7cc3586b97ad181b847529c9704ca Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Mon, 11 Dec 2023 13:01:16 +0100 Subject: [PATCH 04/15] Fix lint --- db/errors.go | 142 +++++++++--------- ...uery_with_unique_index_only_filter_test.go | 1 - 2 files changed, 71 insertions(+), 72 deletions(-) diff --git a/db/errors.go b/db/errors.go index 353a82868e..1413c1289d 100644 --- a/db/errors.go +++ b/db/errors.go @@ -16,77 +16,77 @@ import ( ) const ( - errFailedToGetHeads string = "failed to get document heads" - errFailedToCreateCollectionQuery string = "failed to create collection prefix query" - errFailedToGetCollection string = "failed to get collection" - errFailedToGetAllCollections string = "failed to get all collections" - errDocVerification string = "the document verification failed" - errAddingP2PCollection string = "cannot add collection ID" - errRemovingP2PCollection string = "cannot remove collection ID" - errAddCollectionWithPatch string = "unknown collection, adding collections via patch is not supported" - errCollectionIDDoesntMatch string = "CollectionID does not match existing" - errSchemaRootDoesntMatch string = "SchemaRoot does not match existing" - errCannotModifySchemaName string = "modifying the schema name is not supported" - errCannotSetVersionID string = "setting the VersionID is not supported. It is updated automatically" - errCannotSetFieldID string = "explicitly setting a field ID value is not supported" - errRelationalFieldMissingSchema string = "a `Schema` [name] must be provided when adding a new relation field" - errRelationalFieldInvalidRelationType string = "invalid RelationType" - errRelationalFieldMissingIDField string = "missing id field for relation object field" - errRelationalFieldMissingRelationName string = "missing relation name" - errPrimarySideNotDefined string = "primary side of relation not defined" - errPrimarySideOnMany string = "cannot set the many side of a relation as primary" - errBothSidesPrimary string = "both sides of a relation cannot be primary" - errRelatedFieldKindMismatch string = "invalid Kind of the related field" - errRelatedFieldRelationTypeMismatch string = "invalid RelationType of the related field" - errRelationalFieldIDInvalidType string = "relational id field of invalid kind" - errDuplicateField string = "duplicate field" - errCannotMutateField string = "mutating an existing field is not supported" - errCannotMoveField string = "moving fields is not currently supported" - errInvalidCRDTType string = "only default or LWW (last writer wins) CRDT types are supported" - errCannotDeleteField string = "deleting an existing field is not supported" - errFieldKindNotFound string = "no type found for given name" - errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" - errSchemaNotFound string = "no schema found for given name" - errDocumentAlreadyExists string = "a document with the given dockey already exists" - errDocumentDeleted string = "a document with the given dockey has been deleted" - errIndexMissingFields string = "index missing fields" - errNonZeroIndexIDProvided string = "non-zero index ID provided" - errIndexFieldMissingName string = "index field missing name" - errIndexFieldMissingDirection string = "index field missing direction" - errIndexSingleFieldWrongDirection string = "wrong direction for index with a single field" - errIndexWithNameAlreadyExists string = "index with name already exists" - errInvalidStoredIndex string = "invalid stored index" - errInvalidStoredIndexKey string = "invalid stored index key" - errNonExistingFieldForIndex string = "creating an index on a non-existing property" - errCollectionDoesntExisting string = "collection with given name doesn't exist" - errFailedToStoreIndexedField string = "failed to store indexed field" - errFailedToReadStoredIndexDesc string = "failed to read stored index description" - errCanNotDeleteIndexedField string = "can not delete indexed field" - errCanNotAddIndexWithPatch string = "adding indexes via patch is not supported" - errCanNotDropIndexWithPatch string = "dropping indexes via patch is not supported" - errCanNotChangeIndexWithPatch string = "changing indexes via patch is not supported" - errIndexWithNameDoesNotExists string = "index with name doesn't exists" - errCorruptedIndex string = "corrupted index. Please delete and recreate the index" - errInvalidFieldValue string = "invalid field value" - errUnsupportedIndexFieldType string = "unsupported index field type" - errIndexDescriptionHasNoFields string = "index description has no fields" - errIndexDescHasNonExistingField string = "index description has non existing field" - errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" - errCreateFile string = "failed to create file" - errOpenFile string = "failed to open file" - errCloseFile string = "failed to close file" - errRemoveFile string = "failed to remove file" - errFailedToReadByte string = "failed to read byte" - errFailedToWriteString string = "failed to write string" - errJSONDecode string = "failed to decode JSON" - errDocFromMap string = "failed to create a new doc from map" - errDocCreate string = "failed to save a new doc to collection" - errDocUpdate string = "failed to update doc to collection" - errExpectedJSONObject string = "expected JSON object" - errExpectedJSONArray string = "expected JSON array" - errOneOneAlreadyLinked string = "target document is already linked to another document" - errIndexDoesNotMatchName string = "the index used does not match the given name" - errCanNotIndexNonUniqueField string = "can not create doc that violates unique index" + errFailedToGetHeads string = "failed to get document heads" + errFailedToCreateCollectionQuery string = "failed to create collection prefix query" + errFailedToGetCollection string = "failed to get collection" + errFailedToGetAllCollections string = "failed to get all collections" + errDocVerification string = "the document verification failed" + errAddingP2PCollection string = "cannot add collection ID" + errRemovingP2PCollection string = "cannot remove collection ID" + errAddCollectionWithPatch string = "unknown collection, adding collections via patch is not supported" + errCollectionIDDoesntMatch string = "CollectionID does not match existing" + errSchemaRootDoesntMatch string = "SchemaRoot does not match existing" + errCannotModifySchemaName string = "modifying the schema name is not supported" + errCannotSetVersionID string = "setting the VersionID is not supported. It is updated automatically" + errCannotSetFieldID string = "explicitly setting a field ID value is not supported" + errRelationalFieldMissingSchema string = "a `Schema` [name] must be provided when adding a new relation field" + errRelationalFieldInvalidRelationType string = "invalid RelationType" + errRelationalFieldMissingIDField string = "missing id field for relation object field" + errRelationalFieldMissingRelationName string = "missing relation name" + errPrimarySideNotDefined string = "primary side of relation not defined" + errPrimarySideOnMany string = "cannot set the many side of a relation as primary" + errBothSidesPrimary string = "both sides of a relation cannot be primary" + errRelatedFieldKindMismatch string = "invalid Kind of the related field" + errRelatedFieldRelationTypeMismatch string = "invalid RelationType of the related field" + errRelationalFieldIDInvalidType string = "relational id field of invalid kind" + errDuplicateField string = "duplicate field" + errCannotMutateField string = "mutating an existing field is not supported" + errCannotMoveField string = "moving fields is not currently supported" + errInvalidCRDTType string = "only default or LWW (last writer wins) CRDT types are supported" + errCannotDeleteField string = "deleting an existing field is not supported" + errFieldKindNotFound string = "no type found for given name" + errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" + errSchemaNotFound string = "no schema found for given name" + errDocumentAlreadyExists string = "a document with the given dockey already exists" + errDocumentDeleted string = "a document with the given dockey has been deleted" + errIndexMissingFields string = "index missing fields" + errNonZeroIndexIDProvided string = "non-zero index ID provided" + errIndexFieldMissingName string = "index field missing name" + errIndexFieldMissingDirection string = "index field missing direction" + errIndexSingleFieldWrongDirection string = "wrong direction for index with a single field" + errIndexWithNameAlreadyExists string = "index with name already exists" + errInvalidStoredIndex string = "invalid stored index" + errInvalidStoredIndexKey string = "invalid stored index key" + errNonExistingFieldForIndex string = "creating an index on a non-existing property" + errCollectionDoesntExisting string = "collection with given name doesn't exist" + errFailedToStoreIndexedField string = "failed to store indexed field" + errFailedToReadStoredIndexDesc string = "failed to read stored index description" + errCanNotDeleteIndexedField string = "can not delete indexed field" + errCanNotAddIndexWithPatch string = "adding indexes via patch is not supported" + errCanNotDropIndexWithPatch string = "dropping indexes via patch is not supported" + errCanNotChangeIndexWithPatch string = "changing indexes via patch is not supported" + errIndexWithNameDoesNotExists string = "index with name doesn't exists" + errCorruptedIndex string = "corrupted index. Please delete and recreate the index" + errInvalidFieldValue string = "invalid field value" + errUnsupportedIndexFieldType string = "unsupported index field type" + errIndexDescriptionHasNoFields string = "index description has no fields" + errIndexDescHasNonExistingField string = "index description has non existing field" + errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" + errCreateFile string = "failed to create file" + errOpenFile string = "failed to open file" + errCloseFile string = "failed to close file" + errRemoveFile string = "failed to remove file" + errFailedToReadByte string = "failed to read byte" + errFailedToWriteString string = "failed to write string" + errJSONDecode string = "failed to decode JSON" + errDocFromMap string = "failed to create a new doc from map" + errDocCreate string = "failed to save a new doc to collection" + errDocUpdate string = "failed to update doc to collection" + errExpectedJSONObject string = "expected JSON object" + errExpectedJSONArray string = "expected JSON array" + errOneOneAlreadyLinked string = "target document is already linked to another document" + errIndexDoesNotMatchName string = "the index used does not match the given name" + errCanNotIndexNonUniqueField string = "can not create doc that violates unique index" ) var ( diff --git a/tests/integration/index/query_with_unique_index_only_filter_test.go b/tests/integration/index/query_with_unique_index_only_filter_test.go index bba4028cd3..54ac7b2d8d 100644 --- a/tests/integration/index/query_with_unique_index_only_filter_test.go +++ b/tests/integration/index/query_with_unique_index_only_filter_test.go @@ -16,7 +16,6 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) - func TestQueryWithUniqueIndex_WithEqualFilter_ShouldFetch(t *testing.T) { req := `query { User(filter: {name: {_eq: "Islam"}}) { From d5d12ac3d3f769a14c9b6d5234102fb31e032952 Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Mon, 11 Dec 2023 15:27:09 +0100 Subject: [PATCH 05/15] Add more tests for unique indexes --- db/collection_index.go | 4 +- db/index_test.go | 13 ++++++ db/indexed_docs_test.go | 96 +++++++++++++++++++++++++++++++++++++++-- 3 files changed, 109 insertions(+), 4 deletions(-) diff --git a/db/collection_index.go b/db/collection_index.go index 278586902b..88476bf481 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -13,6 +13,7 @@ package db import ( "context" "encoding/json" + "errors" "fmt" "strconv" "strings" @@ -222,7 +223,8 @@ func (c *collection) createIndex( c.indexes = append(c.indexes, colIndex) err = c.indexExistingDocs(ctx, txn, colIndex) if err != nil { - return nil, err + removeErr := colIndex.RemoveAll(ctx, txn) + return nil, errors.Join(err, removeErr) } return colIndex, nil } diff --git a/db/index_test.go b/db/index_test.go index 06ecb9148b..33e85275c7 100644 --- a/db/index_test.go +++ b/db/index_test.go @@ -182,6 +182,19 @@ func (f *indexTestFixture) createUserCollectionIndexOnName() client.IndexDescrip return newDesc } +func makeUnique(indexDesc client.IndexDescription) client.IndexDescription { + indexDesc.Unique = true + return indexDesc +} + +func (f *indexTestFixture) createUserCollectionUniqueIndexOnName() client.IndexDescription { + indexDesc := makeUnique(getUsersIndexDescOnName()) + newDesc, err := f.createCollectionIndexFor(f.users.Name(), indexDesc) + require.NoError(f.t, err) + f.commitTxn() + return newDesc +} + func (f *indexTestFixture) createUserCollectionIndexOnAge() client.IndexDescription { newDesc, err := f.createCollectionIndexFor(f.users.Name(), getUsersIndexDescOnAge()) require.NoError(f.t, err) diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index cf24a73e87..948245ce87 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -173,7 +173,10 @@ func (b *indexKeyBuilder) Build() core.IndexDataStoreKey { fieldBytesVal, err = writeableVal.Bytes() require.NoError(b.f.t, err) - key.FieldValues = [][]byte{fieldBytesVal, []byte(b.doc.Key().String())} + key.FieldValues = [][]byte{fieldBytesVal} + if !b.isUnique { + key.FieldValues = append(key.FieldValues, []byte(b.doc.Key().String())) + } } else if len(b.values) > 0 { key.FieldValues = b.values } @@ -623,9 +626,10 @@ func TestNonUniqueCreate_IfDatastoreFailsToStoreIndex_ReturnError(t *testing.T) invalidKeyString := fieldKeyString + "/doesn't matter/" // Insert an invalid key within the document prefix, this will generate an error within the fetcher. - f.db.multistore.Datastore().Put(f.ctx, ipfsDatastore.NewKey(invalidKeyString), []byte("doesn't matter")) + err := f.db.multistore.Datastore().Put(f.ctx, ipfsDatastore.NewKey(invalidKeyString), []byte("doesn't matter")) + require.NoError(f.t, err) - _, err := f.users.CreateIndex(f.ctx, getUsersIndexDescOnName()) + _, err = f.users.CreateIndex(f.ctx, getUsersIndexDescOnName()) require.ErrorIs(f.t, err, core.ErrInvalidKey) } @@ -1004,3 +1008,89 @@ func (encdoc *shimEncodedDocument) Reset() { encdoc.status = 0 encdoc.properties = map[client.FieldDescription]any{} } + +func TestUniqueCreate_ShouldIndexExistingDocs(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + doc1 := f.newUserDoc("John", 21) + f.saveDocToCollection(doc1, f.users) + doc2 := f.newUserDoc("Islam", 18) + f.saveDocToCollection(doc2, f.users) + + f.createUserCollectionUniqueIndexOnName() + + key1 := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc1).Build() + key2 := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc2).Build() + + data, err := f.txn.Datastore().Get(f.ctx, key1.ToDS()) + require.NoError(t, err, key1.ToString()) + assert.Equal(t, data, []byte(doc1.Key().String())) + data, err = f.txn.Datastore().Get(f.ctx, key2.ToDS()) + require.NoError(t, err) + assert.Equal(t, data, []byte(doc2.Key().String())) +} + +func TestUniqueCreate_IfFailsToIndex_ShouldNotLeaveArtifacts(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + doc1 := f.newUserDoc("John", 21) + f.saveDocToCollection(doc1, f.users) + doc2 := f.newUserDoc("John", 18) + f.saveDocToCollection(doc2, f.users) + + indexDesc := makeUnique(getUsersIndexDescOnName()) + _, err := f.createCollectionIndexFor(f.users.Name(), indexDesc) + require.Error(t, err) + + // We assume here that the newly created index (that failed to index) got an ID of 1. + key := core.IndexDataStoreKey{CollectionID: f.users.ID(), IndexID: 1} + + assert.Len(t, f.getPrefixFromDataStore(key.ToString()), 0) +} + +func TestUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + f.createUserCollectionUniqueIndexOnName() + + docJSON, err := json.Marshal(struct { + Age int `json:"age"` + }{Age: 44}) + require.NoError(f.t, err) + + doc, err := client.NewDocFromJSON(docJSON) + require.NoError(f.t, err) + + f.saveDocToCollection(doc, f.users) + + key := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc). + Values([]byte(nil)).Build() + + data, err := f.txn.Datastore().Get(f.ctx, key.ToDS()) + require.NoError(t, err) + assert.Equal(t, data, []byte(doc.Key().String())) +} + +func TestUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) { + f := newIndexTestFixtureBare(t) + users := f.addUsersCollection() + _, err := f.createCollectionIndexFor(users.Name(), makeUnique(getUsersIndexDescOnName())) + require.NoError(f.t, err) + _, err = f.createCollectionIndexFor(users.Name(), makeUnique(getUsersIndexDescOnAge())) + require.NoError(f.t, err) + f.commitTxn() + + f.saveDocToCollection(f.newUserDoc("John", 21), users) + f.saveDocToCollection(f.newUserDoc("Islam", 23), users) + + userNameKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Build() + userAgeKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersAgeFieldName).Build() + + err = f.dropIndex(usersColName, testUsersColIndexAge) + require.NoError(f.t, err) + + assert.Len(t, f.getPrefixFromDataStore(userNameKey.ToString()), 2) + assert.Len(t, f.getPrefixFromDataStore(userAgeKey.ToString()), 0) +} From e6ad1a203723ac6e4e4b635a3d304ecec72485e3 Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Mon, 11 Dec 2023 15:29:12 +0100 Subject: [PATCH 06/15] Fix lint --- tests/integration/index/create_unique_test.go | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/integration/index/create_unique_test.go b/tests/integration/index/create_unique_test.go index 66ca1dfd36..dfff0bbdee 100644 --- a/tests/integration/index/create_unique_test.go +++ b/tests/integration/index/create_unique_test.go @@ -21,7 +21,6 @@ import ( const johnDockey = "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7" func TestCreateUniqueIndex_IfFieldValuesAreNotUnique_ReturnError(t *testing.T) { - test := testUtils.TestCase{ Description: "If field is not unique, creating of unique index fails", Actions: []any{ From 80145ee18d42e02cdb6e0d94a01066ee7b46eddf Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Mon, 11 Dec 2023 16:35:44 +0100 Subject: [PATCH 07/15] Add "unique" flag to CreateIndex CLI command --- cli/index_create.go | 6 +++++- tests/clients/cli/wrapper_collection.go | 7 ++++++- tests/integration/utils2.go | 1 + 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/cli/index_create.go b/cli/index_create.go index 42866267fc..099eb7e7a6 100644 --- a/cli/index_create.go +++ b/cli/index_create.go @@ -21,12 +21,14 @@ func MakeIndexCreateCommand() *cobra.Command { var collectionArg string var nameArg string var fieldsArg []string + var uniqueArg bool var cmd = &cobra.Command{ - Use: "create -c --collection --fields [-n --name ]", + Use: "create -c --collection --fields [-n --name ] [--unique]", Short: "Creates a secondary index on a collection's field(s)", Long: `Creates a secondary index on a collection's field(s). The --name flag is optional. If not provided, a name will be generated automatically. +The --unique flag is optional. If provided, the index will be unique. Example: create an index for 'Users' collection on 'name' field: defradb client index create --collection Users --fields name @@ -44,6 +46,7 @@ Example: create a named index for 'Users' collection on 'name' field: desc := client.IndexDescription{ Name: nameArg, Fields: fields, + Unique: uniqueArg, } col, err := store.GetCollectionByName(cmd.Context(), collectionArg) if err != nil { @@ -62,6 +65,7 @@ Example: create a named index for 'Users' collection on 'name' field: cmd.Flags().StringVarP(&collectionArg, "collection", "c", "", "Collection name") cmd.Flags().StringVarP(&nameArg, "name", "n", "", "Index name") cmd.Flags().StringSliceVar(&fieldsArg, "fields", []string{}, "Fields to index") + cmd.Flags().BoolVarP(&uniqueArg, "unique", "u", false, "Make the index unique") return cmd } diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go index 0ce3c92836..f29135d201 100644 --- a/tests/clients/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -366,7 +366,12 @@ func (c *Collection) CreateIndex( ) (index client.IndexDescription, err error) { args := []string{"client", "index", "create"} args = append(args, "--collection", c.Description().Name) - args = append(args, "--name", indexDesc.Name) + if indexDesc.Name != "" { + args = append(args, "--name", indexDesc.Name) + } + if indexDesc.Unique { + args = append(args, "--unique") + } fields := make([]string, len(indexDesc.Fields)) for i := range indexDesc.Fields { diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index d414cc1ca4..87d8487ccc 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -1319,6 +1319,7 @@ func createIndex( }) } } + indexDesc.Unique = action.Unique err := withRetry( actionNodes, nodeID, From 21942de0cf5722a92cbc62176be6eda7a5197205 Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Tue, 12 Dec 2023 12:06:10 +0100 Subject: [PATCH 08/15] Add a test for update with unique index --- db/indexed_docs_test.go | 47 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index 948245ce87..103c454054 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -1094,3 +1094,50 @@ func TestUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) { assert.Len(t, f.getPrefixFromDataStore(userNameKey.ToString()), 2) assert.Len(t, f.getPrefixFromDataStore(userAgeKey.ToString()), 0) } + +func TestUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + f.createUserCollectionUniqueIndexOnName() + + cases := []struct { + Name string + NewValue string + Exec func(doc *client.Document) error + }{ + { + Name: "update", + NewValue: "Islam", + Exec: func(doc *client.Document) error { + return f.users.Update(f.ctx, doc) + }, + }, + { + Name: "save", + NewValue: "Andy", + Exec: func(doc *client.Document) error { + return f.users.Save(f.ctx, doc) + }, + }, + } + + doc := f.newUserDoc("John", 21) + f.saveDocToCollection(doc, f.users) + + for _, tc := range cases { + oldKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc).Build() + + err := doc.Set(usersNameFieldName, tc.NewValue) + require.NoError(t, err) + err = tc.Exec(doc) + require.NoError(t, err) + f.commitTxn() + + newKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc).Build() + + _, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS()) + require.Error(t, err) + _, err = f.txn.Datastore().Get(f.ctx, newKey.ToDS()) + require.NoError(t, err) + } +} From eef1ffd57040f21969714795a23771dc8b8aa645 Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Tue, 12 Dec 2023 18:12:25 +0100 Subject: [PATCH 09/15] Fix GetIndex via http --- http/client_collection.go | 2 +- http/handler_collection.go | 8 ++++++-- tests/integration/index/create_unique_test.go | 4 ++-- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/http/client_collection.go b/http/client_collection.go index 9f56594db7..35ca21ce4f 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -424,5 +424,5 @@ func (c *Collection) GetIndexes(ctx context.Context) ([]client.IndexDescription, if err := c.http.requestJson(req, &indexes); err != nil { return nil, err } - return c.Description().Indexes, nil + return indexes, nil } diff --git a/http/handler_collection.go b/http/handler_collection.go index a5622f1336..69f08d7073 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -312,13 +312,17 @@ func (s *collectionHandler) CreateIndex(rw http.ResponseWriter, req *http.Reques } func (s *collectionHandler) GetIndexes(rw http.ResponseWriter, req *http.Request) { - col := req.Context().Value(colContextKey).(client.Collection) + store := req.Context().Value(storeContextKey).(client.Store) + indexesMap, err := store.GetAllIndexes(req.Context()) - indexes, err := col.GetIndexes(req.Context()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } + indexes := make([]client.IndexDescription, 0, len(indexesMap)) + for _, index := range indexesMap { + indexes = append(indexes, index...) + } responseJSON(rw, http.StatusOK, indexes) } diff --git a/tests/integration/index/create_unique_test.go b/tests/integration/index/create_unique_test.go index dfff0bbdee..f4f0313a4f 100644 --- a/tests/integration/index/create_unique_test.go +++ b/tests/integration/index/create_unique_test.go @@ -72,7 +72,7 @@ func TestCreateUniqueIndex_IfFieldValuesAreNotUnique_ReturnError(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestCreateUniqueIndex_UponAddingDocWithExistingFieldValue_ReturnError(t *testing.T) { +func TestUniqueIndexCreate_UponAddingDocWithExistingFieldValue_ReturnError(t *testing.T) { test := testUtils.TestCase{ Description: "adding a new doc with existing value for indexed field should fail", Actions: []any{ @@ -131,7 +131,7 @@ func TestCreateUniqueIndex_UponAddingDocWithExistingFieldValue_ReturnError(t *te testUtils.ExecuteTestCase(t, test) } -func TestCreateUniqueIndex_IfFieldValuesAreUnique_Succeed(t *testing.T) { +func TestUniqueIndexCreate_IfFieldValuesAreUnique_Succeed(t *testing.T) { test := testUtils.TestCase{ Description: "create unique index if all docs have unique field values", Actions: []any{ From 0423ae13715d96cde81841746c7c3ea30a1cb8da Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Wed, 13 Dec 2023 08:19:30 +0100 Subject: [PATCH 10/15] Use consistently "User" instead of "Users" --- tests/integration/index/create_drop_test.go | 4 ++-- tests/integration/index/create_get_test.go | 2 +- tests/integration/index/create_test.go | 4 ++-- tests/integration/index/create_unique_test.go | 2 +- tests/integration/index/drop_test.go | 4 ++-- tests/integration/index/get_test.go | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/integration/index/create_drop_test.go b/tests/integration/index/create_drop_test.go index 6bb9da4dbb..0680ea7aed 100644 --- a/tests/integration/index/create_drop_test.go +++ b/tests/integration/index/create_drop_test.go @@ -22,7 +22,7 @@ func TestIndexDrop_ShouldNotHinderQuerying(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users { + type User { name: String @index age: Int } @@ -44,7 +44,7 @@ func TestIndexDrop_ShouldNotHinderQuerying(t *testing.T) { testUtils.Request{ Request: ` query { - Users { + User { name age } diff --git a/tests/integration/index/create_get_test.go b/tests/integration/index/create_get_test.go index c22b6ef99b..6ec0962c17 100644 --- a/tests/integration/index/create_get_test.go +++ b/tests/integration/index/create_get_test.go @@ -23,7 +23,7 @@ func TestIndexGet_ShouldReturnListOfExistingIndexes(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users @index(name: "age_index", fields: ["age"]) { + type User @index(name: "age_index", fields: ["age"]) { name: String @index(name: "name_index") age: Int } diff --git a/tests/integration/index/create_test.go b/tests/integration/index/create_test.go index 8aed37bfc8..ce3f94080a 100644 --- a/tests/integration/index/create_test.go +++ b/tests/integration/index/create_test.go @@ -65,7 +65,7 @@ func TestIndexCreate_ShouldNotHinderQuerying(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users { + type User { name: String age: Int } @@ -88,7 +88,7 @@ func TestIndexCreate_ShouldNotHinderQuerying(t *testing.T) { testUtils.Request{ Request: ` query { - Users { + User { name age } diff --git a/tests/integration/index/create_unique_test.go b/tests/integration/index/create_unique_test.go index f4f0313a4f..0cea5023e6 100644 --- a/tests/integration/index/create_unique_test.go +++ b/tests/integration/index/create_unique_test.go @@ -137,7 +137,7 @@ func TestUniqueIndexCreate_IfFieldValuesAreUnique_Succeed(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users { + type User { name: String age: Int } diff --git a/tests/integration/index/drop_test.go b/tests/integration/index/drop_test.go index eabe89ca2f..96e136c332 100644 --- a/tests/integration/index/drop_test.go +++ b/tests/integration/index/drop_test.go @@ -22,7 +22,7 @@ func TestIndexDrop_IfIndexDoesNotExist_ReturnError(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users { + type User { name: String age: Int } @@ -45,7 +45,7 @@ func TestIndexDrop_IfIndexDoesNotExist_ReturnError(t *testing.T) { testUtils.Request{ Request: ` query { - Users { + User { name age } diff --git a/tests/integration/index/get_test.go b/tests/integration/index/get_test.go index a5ca4ddf5a..f29d9046cc 100644 --- a/tests/integration/index/get_test.go +++ b/tests/integration/index/get_test.go @@ -23,7 +23,7 @@ func TestIndexGet_IfThereAreNoIndexes_ReturnEmptyList(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users { + type User { name: String age: Int } From 6b13af835612c99b019b196c5f1aac907c220460 Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Wed, 13 Dec 2023 12:33:36 +0100 Subject: [PATCH 11/15] Add file for change detector --- docs/data_format_changes/i2131-rename-schema-root.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 docs/data_format_changes/i2131-rename-schema-root.md diff --git a/docs/data_format_changes/i2131-rename-schema-root.md b/docs/data_format_changes/i2131-rename-schema-root.md new file mode 100644 index 0000000000..f71258c0d8 --- /dev/null +++ b/docs/data_format_changes/i2131-rename-schema-root.md @@ -0,0 +1,3 @@ +# Changed some tests so that they are consistent with others + +Change collection names from "Users" to "User" and made all fields start with lower case letters. \ No newline at end of file From 0e3280e0ae1a3ef065b4195acb4611213653e39d Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Wed, 13 Dec 2023 14:06:07 +0100 Subject: [PATCH 12/15] Fix race --- tests/gen/cli/util_test.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/gen/cli/util_test.go b/tests/gen/cli/util_test.go index 2e93f7b146..07f027ef7a 100644 --- a/tests/gen/cli/util_test.go +++ b/tests/gen/cli/util_test.go @@ -74,14 +74,14 @@ func start(ctx context.Context, cfg *config.Config) (*defraInstance, error) { cfg.API.Address = server.AssignedAddr() // run the server in a separate goroutine - go func() { - log.FeedbackInfo(ctx, fmt.Sprintf("Providing HTTP API at %s.", cfg.API.AddressToURL())) + go func(apiAddress string) { + log.FeedbackInfo(ctx, fmt.Sprintf("Providing HTTP API at %s.", apiAddress)) if err := server.Run(ctx); err != nil && !errors.Is(err, http.ErrServerClosed) { log.FeedbackErrorE(ctx, "Failed to run the HTTP server", err) db.Close() os.Exit(1) } - }() + }(cfg.API.AddressToURL()) return &defraInstance{ db: db, From 07027af045a6425da6033f797992668811f715e9 Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Mon, 18 Dec 2023 19:47:46 +0100 Subject: [PATCH 13/15] make index iterator return error directly --- db/fetcher/indexer.go | 6 ++-- db/fetcher/indexer_iterators.go | 50 ++++++++++++++++----------------- 2 files changed, 27 insertions(+), 29 deletions(-) diff --git a/db/fetcher/indexer.go b/db/fetcher/indexer.go index 04ff33f2aa..6b4833d00f 100644 --- a/db/fetcher/indexer.go +++ b/db/fetcher/indexer.go @@ -114,9 +114,9 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo for { f.doc.Reset() - res := f.indexIter.Next() - if res.err != nil { - return nil, ExecInfo{}, res.err + res, err := f.indexIter.Next() + if err != nil { + return nil, ExecInfo{}, err } if !res.foundKey { diff --git a/db/fetcher/indexer_iterators.go b/db/fetcher/indexer_iterators.go index d892918644..3b2bd1f996 100644 --- a/db/fetcher/indexer_iterators.go +++ b/db/fetcher/indexer_iterators.go @@ -45,7 +45,7 @@ const ( // For example, iteration over condition _eq and _gt will have completely different logic. type indexIterator interface { Init(context.Context, datastore.DSReaderWriter) error - Next() indexIterResult + Next() (indexIterResult, error) Close() error } @@ -53,26 +53,25 @@ type indexIterResult struct { key core.IndexDataStoreKey foundKey bool value []byte - err error } type queryResultIterator struct { resultIter query.Results } -func (i *queryResultIterator) Next() indexIterResult { +func (i *queryResultIterator) Next() (indexIterResult, error) { res, hasVal := i.resultIter.NextSync() if res.Error != nil { - return indexIterResult{err: res.Error} + return indexIterResult{}, res.Error } if !hasVal { - return indexIterResult{} + return indexIterResult{}, nil } key, err := core.NewIndexDataStoreKey(res.Key) if err != nil { - return indexIterResult{err: err} + return indexIterResult{}, err } - return indexIterResult{key: key, value: res.Value, foundKey: true} + return indexIterResult{key: key, value: res.Value, foundKey: true}, nil } func (i *queryResultIterator) Close() error { @@ -99,12 +98,12 @@ func (i *eqPrefixIndexIterator) Init(ctx context.Context, store datastore.DSRead return nil } -func (i *eqPrefixIndexIterator) Next() indexIterResult { - res := i.queryResultIterator.Next() +func (i *eqPrefixIndexIterator) Next() (indexIterResult, error) { + res, err := i.queryResultIterator.Next() if res.foundKey { i.execInfo.IndexesFetched++ } - return res + return res, err } type filterValueIndexIterator interface { @@ -135,18 +134,18 @@ func (i *eqSingleIndexIterator) Init(ctx context.Context, store datastore.DSRead return nil } -func (i *eqSingleIndexIterator) Next() indexIterResult { +func (i *eqSingleIndexIterator) Next() (indexIterResult, error) { if i.store == nil { - return indexIterResult{} + return indexIterResult{}, nil } i.indexKey.FieldValues = [][]byte{i.value} val, err := i.store.Get(i.ctx, i.indexKey.ToDS()) if err != nil { - return indexIterResult{err: err} + return indexIterResult{}, err } i.store = nil i.execInfo.IndexesFetched++ - return indexIterResult{key: i.indexKey, value: val, foundKey: true} + return indexIterResult{key: i.indexKey, value: val, foundKey: true}, nil } func (i *eqSingleIndexIterator) Close() error { @@ -201,23 +200,22 @@ func (i *inIndexIterator) Init(ctx context.Context, store datastore.DSReaderWrit return err } -func (i *inIndexIterator) Next() indexIterResult { +func (i *inIndexIterator) Next() (indexIterResult, error) { for i.hasIterator { - res := i.filterValueIndexIterator.Next() - if res.err != nil { - return res + res, err := i.filterValueIndexIterator.Next() + if err != nil { + return indexIterResult{}, err } if !res.foundKey { - var err error i.hasIterator, err = i.nextIterator() if err != nil { - return indexIterResult{err: err} + return indexIterResult{}, err } continue } - return res + return res, nil } - return indexIterResult{} + return indexIterResult{}, nil } func (i *inIndexIterator) Close() error { @@ -281,12 +279,12 @@ func (i *scanningIndexIterator) Init(ctx context.Context, store datastore.DSRead return nil } -func (i *scanningIndexIterator) Next() indexIterResult { - res := i.queryResultIterator.Next() +func (i *scanningIndexIterator) Next() (indexIterResult, error) { + res, err := i.queryResultIterator.Next() if i.filter.err != nil { - return indexIterResult{err: i.filter.err} + return indexIterResult{}, i.filter.err } - return res + return res, err } // checks if the stored index value satisfies the condition From 3d681e4b500041fa139040a92f053f5b8d2969c7 Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Tue, 19 Dec 2023 09:57:41 +0100 Subject: [PATCH 14/15] Explicitly commit txn after saving a doc --- db/indexed_docs_test.go | 1 + 1 file changed, 1 insertion(+) diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index 103c454054..5037084aec 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -50,6 +50,7 @@ type productDoc struct { func (f *indexTestFixture) saveDocToCollection(doc *client.Document, col client.Collection) { err := col.Create(f.ctx, doc) require.NoError(f.t, err) + f.commitTxn() f.txn, err = f.db.NewTxn(f.ctx, false) require.NoError(f.t, err) } From 0017a0e1824ad567464c07472839345f49c6d747 Mon Sep 17 00:00:00 2001 From: Islam Aleiv Date: Tue, 19 Dec 2023 11:34:14 +0100 Subject: [PATCH 15/15] Make transaction state consistent --- db/collection_index.go | 4 +--- db/index_test.go | 15 +++++---------- db/indexed_docs_test.go | 19 ------------------- 3 files changed, 6 insertions(+), 32 deletions(-) diff --git a/db/collection_index.go b/db/collection_index.go index 88476bf481..278586902b 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -13,7 +13,6 @@ package db import ( "context" "encoding/json" - "errors" "fmt" "strconv" "strings" @@ -223,8 +222,7 @@ func (c *collection) createIndex( c.indexes = append(c.indexes, colIndex) err = c.indexExistingDocs(ctx, txn, colIndex) if err != nil { - removeErr := colIndex.RemoveAll(ctx, txn) - return nil, errors.Join(err, removeErr) + return nil, err } return colIndex, nil } diff --git a/db/index_test.go b/db/index_test.go index 33e85275c7..911228e649 100644 --- a/db/index_test.go +++ b/db/index_test.go @@ -178,7 +178,6 @@ func getProductsIndexDescOnCategory() client.IndexDescription { func (f *indexTestFixture) createUserCollectionIndexOnName() client.IndexDescription { newDesc, err := f.createCollectionIndexFor(f.users.Name(), getUsersIndexDescOnName()) require.NoError(f.t, err) - f.commitTxn() return newDesc } @@ -191,14 +190,12 @@ func (f *indexTestFixture) createUserCollectionUniqueIndexOnName() client.IndexD indexDesc := makeUnique(getUsersIndexDescOnName()) newDesc, err := f.createCollectionIndexFor(f.users.Name(), indexDesc) require.NoError(f.t, err) - f.commitTxn() return newDesc } func (f *indexTestFixture) createUserCollectionIndexOnAge() client.IndexDescription { newDesc, err := f.createCollectionIndexFor(f.users.Name(), getUsersIndexDescOnAge()) require.NoError(f.t, err) - f.commitTxn() return newDesc } @@ -239,7 +236,11 @@ func (f *indexTestFixture) createCollectionIndexFor( collectionName string, desc client.IndexDescription, ) (client.IndexDescription, error) { - return f.db.createCollectionIndex(f.ctx, f.txn, collectionName, desc) + index, err := f.db.createCollectionIndex(f.ctx, f.txn, collectionName, desc) + if err == nil { + f.commitTxn() + } + return index, err } func (f *indexTestFixture) getAllIndexes() (map[client.CollectionName][]client.IndexDescription, error) { @@ -525,7 +526,6 @@ func TestCreateIndex_IfAttemptToIndexOnUnsupportedType_ReturnError(t *testing.T) _, err = f.createCollectionIndexFor(collection.Name(), indexDesc) require.ErrorIs(f.t, err, NewErrUnsupportedIndexFieldType(unsupportedKind)) - f.commitTxn() } func TestGetIndexes_ShouldReturnListOfAllExistingIndexes(t *testing.T) { @@ -539,8 +539,6 @@ func TestGetIndexes_ShouldReturnListOfAllExistingIndexes(t *testing.T) { _, err := f.createCollectionIndexFor(usersColName, usersIndexDesc) assert.NoError(t, err) - f.commitTxn() - f.getProductsCollectionDesc() productsIndexDesc := client.IndexDescription{ Name: "products_description_index", @@ -665,8 +663,6 @@ func TestGetCollectionIndexes_ShouldReturnListOfCollectionIndexes(t *testing.T) _, err := f.createCollectionIndexFor(usersColName, usersIndexDesc) assert.NoError(t, err) - f.commitTxn() - f.getProductsCollectionDesc() productsIndexDesc := client.IndexDescription{ Name: "products_description_index", @@ -1018,7 +1014,6 @@ func TestCollectionGetIndexes_ShouldReturnIndexesInOrderedByName(t *testing.T) { _, err := f.createCollectionIndexFor(collection.Name(), indexDesc) require.NoError(t, err) } - f.commitTxn() indexes, err := collection.GetIndexes(f.ctx) require.NoError(t, err) diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index 5037084aec..4110463c09 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -1032,25 +1032,6 @@ func TestUniqueCreate_ShouldIndexExistingDocs(t *testing.T) { assert.Equal(t, data, []byte(doc2.Key().String())) } -func TestUniqueCreate_IfFailsToIndex_ShouldNotLeaveArtifacts(t *testing.T) { - f := newIndexTestFixture(t) - defer f.db.Close() - - doc1 := f.newUserDoc("John", 21) - f.saveDocToCollection(doc1, f.users) - doc2 := f.newUserDoc("John", 18) - f.saveDocToCollection(doc2, f.users) - - indexDesc := makeUnique(getUsersIndexDescOnName()) - _, err := f.createCollectionIndexFor(f.users.Name(), indexDesc) - require.Error(t, err) - - // We assume here that the newly created index (that failed to index) got an ID of 1. - key := core.IndexDataStoreKey{CollectionID: f.users.ID(), IndexID: 1} - - assert.Len(t, f.getPrefixFromDataStore(key.ToString()), 0) -} - func TestUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close()