diff --git a/tests/integration/acp/p2p/create_test.go b/tests/integration/acp/p2p/create_test.go new file mode 100644 index 0000000000..8775a553d7 --- /dev/null +++ b/tests/integration/acp/p2p/create_test.go @@ -0,0 +1,132 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_p2p + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_P2PCreatePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, p2p create private documents on different nodes, with source-hub", + + SupportedACPTypes: immutable.Some( + []testUtils.ACPType{ + testUtils.SourceHubACPType, + }, + ), + + Actions: []any{ + testUtils.RandomNetworkingConfig(), + + testUtils.RandomNetworkingConfig(), + + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(0), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad", + }, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(1), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad Lone", + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/p2p/delete_test.go b/tests/integration/acp/p2p/delete_test.go new file mode 100644 index 0000000000..59cae4cde9 --- /dev/null +++ b/tests/integration/acp/p2p/delete_test.go @@ -0,0 +1,159 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_p2p + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_P2PDeletePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, p2p delete private documents on different nodes, with source-hub", + + SupportedACPTypes: immutable.Some( + []testUtils.ACPType{ + testUtils.SourceHubACPType, + }, + ), + + Actions: []any{ + testUtils.RandomNetworkingConfig(), + + testUtils.RandomNetworkingConfig(), + + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(0), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad", + }, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(1), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad Lone", + }, + }, + + testUtils.WaitForSync{}, + + testUtils.DeleteDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(0), + + CollectionID: 0, + + DocID: 0, + }, + + testUtils.DeleteDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(1), + + CollectionID: 0, + + DocID: 1, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/acp/p2p/update_test.go b/tests/integration/acp/p2p/update_test.go new file mode 100644 index 0000000000..339babee10 --- /dev/null +++ b/tests/integration/acp/p2p/update_test.go @@ -0,0 +1,171 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_acp_p2p + +import ( + "fmt" + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestACP_P2PUpdatePrivateDocumentsOnDifferentNodes_SourceHubACP(t *testing.T) { + expectedPolicyID := "fc56b7509c20ac8ce682b3b9b4fdaad868a9c70dda6ec16720298be64f16e9a4" + + test := testUtils.TestCase{ + + Description: "Test acp, p2p update private documents on different nodes, with source-hub", + + SupportedACPTypes: immutable.Some( + []testUtils.ACPType{ + testUtils.SourceHubACPType, + }, + ), + + Actions: []any{ + testUtils.RandomNetworkingConfig(), + + testUtils.RandomNetworkingConfig(), + + testUtils.AddPolicy{ + + Identity: immutable.Some(1), + + Policy: ` + name: Test Policy + + description: A Policy + + actor: + name: actor + + resources: + users: + permissions: + read: + expr: owner + reader + writer + + write: + expr: owner + writer + + nothing: + expr: dummy + + relations: + owner: + types: + - actor + + reader: + types: + - actor + + writer: + types: + - actor + + admin: + manages: + - reader + types: + - actor + + dummy: + types: + - actor + `, + + ExpectedPolicyID: expectedPolicyID, + }, + + testUtils.SchemaUpdate{ + Schema: fmt.Sprintf(` + type Users @policy( + id: "%s", + resource: "users" + ) { + name: String + age: Int + } + `, + expectedPolicyID, + ), + }, + + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(0), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad", + }, + }, + + testUtils.CreateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(1), + + CollectionID: 0, + + DocMap: map[string]any{ + "name": "Shahzad Lone", + }, + }, + + testUtils.WaitForSync{}, + + testUtils.UpdateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(0), + + CollectionID: 0, + + DocID: 0, + + Doc: ` + { + "name": "ShahzadLone" + } + `, + }, + + testUtils.UpdateDoc{ + Identity: immutable.Some(1), + + NodeID: immutable.Some(1), + + CollectionID: 0, + + DocID: 1, + + Doc: ` + { + "name": "ShahzadLone" + } + `, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index 9b0bce913b..f102294e97 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -718,7 +718,8 @@ type ClientIntrospectionRequest struct { type BackupExport struct { // NodeID may hold the ID (index) of a node to generate the backup from. // - // If a value is not provided the indexes will be retrieved from the first nodes. + // If a value is not provided the backup export will be done for all the nodes. + // todo: https://github.com/sourcenetwork/defradb/issues/3067 NodeID immutable.Option[int] // The backup configuration. @@ -738,7 +739,8 @@ type BackupExport struct { type BackupImport struct { // NodeID may hold the ID (index) of a node to generate the backup from. // - // If a value is not provided the indexes will be retrieved from the first nodes. + // If a value is not provided the backup import will be done for all the nodes. + // todo: https://github.com/sourcenetwork/defradb/issues/3067 NodeID immutable.Option[int] // The backup file path. diff --git a/tests/integration/utils.go b/tests/integration/utils.go index 85ba2f870d..e6ab296140 100644 --- a/tests/integration/utils.go +++ b/tests/integration/utils.go @@ -572,6 +572,12 @@ func getNodes(nodeID immutable.Option[int], nodes []clients.Client) []clients.Cl // // If nodeID has a value it will return collections for that node only, otherwise all collections across all // nodes will be returned. +// +// WARNING: +// The caller must not assume the returned collections are in order of the node index if the specified +// index is greater than 0. For example if requesting collections with nodeID=2 then the resulting output +// will contain only one element (at index 0) that will be the collections of the respective node, the +// caller might accidentally assume that these collections belong to node 0. func getNodeCollections(nodeID immutable.Option[int], collections [][]client.Collection) [][]client.Collection { if !nodeID.HasValue() { return collections @@ -931,11 +937,12 @@ func getIndexes( } var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { - err := withRetry( - actionNodes, - nodeID, + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + collections := s.collections[nodeID] + err := withRetryOnNode( + s.nodes[nodeID], func() error { actualIndexes, err := collections[action.CollectionID].GetIndexes(s.ctx) if err != nil { @@ -950,6 +957,25 @@ func getIndexes( ) expectedErrorRaised = expectedErrorRaised || AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for nodeID, collections := range s.collections { + err := withRetryOnNode( + s.nodes[nodeID], + func() error { + actualIndexes, err := collections[action.CollectionID].GetIndexes(s.ctx) + if err != nil { + return err + } + + assertIndexesListsEqual(action.ExpectedIndexes, + actualIndexes, s.t, s.testCase.Description) + + return nil + }, + ) + expectedErrorRaised = expectedErrorRaised || + AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1206,18 +1232,43 @@ func createDoc( var expectedErrorRaised bool var docIDs []client.DocID - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { - err := withRetry( - actionNodes, - nodeID, + + if action.NodeID.HasValue() { + actionNode := s.nodes[action.NodeID.Value()] + collections := s.collections[action.NodeID.Value()] + err := withRetryOnNode( + actionNode, func() error { var err error - docIDs, err = mutation(s, action, actionNodes[nodeID], nodeID, collections[action.CollectionID]) + docIDs, err = mutation( + s, + action, + actionNode, + action.NodeID.Value(), + collections[action.CollectionID], + ) return err }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for nodeID, collections := range s.collections { + err := withRetryOnNode( + s.nodes[nodeID], + func() error { + var err error + docIDs, err = mutation( + s, + action, + s.nodes[nodeID], + nodeID, + collections[action.CollectionID], + ) + return err + }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1389,20 +1440,34 @@ func deleteDoc( docID := s.docIDs[action.CollectionID][action.DocID] var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + actionNode := s.nodes[nodeID] + collections := s.collections[nodeID] identity := getIdentity(s, nodeID, action.Identity) ctx := db.SetContextIdentity(s.ctx, identity) - - err := withRetry( - actionNodes, - nodeID, + err := withRetryOnNode( + actionNode, func() error { _, err := collections[action.CollectionID].Delete(ctx, docID) return err }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for nodeID, collections := range s.collections { + identity := getIdentity(s, nodeID, action.Identity) + ctx := db.SetContextIdentity(s.ctx, identity) + err := withRetryOnNode( + s.nodes[nodeID], + func() error { + _, err := collections[action.CollectionID].Delete(ctx, docID) + return err + }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1433,16 +1498,41 @@ func updateDoc( } var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { - err := withRetry( - actionNodes, - nodeID, + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + collections := s.collections[nodeID] + actionNode := s.nodes[nodeID] + err := withRetryOnNode( + actionNode, func() error { - return mutation(s, action, actionNodes[nodeID], nodeID, collections[action.CollectionID]) + return mutation( + s, + action, + actionNode, + nodeID, + collections[action.CollectionID], + ) }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for nodeID, collections := range s.collections { + actionNode := s.nodes[nodeID] + err := withRetryOnNode( + actionNode, + func() error { + return mutation( + s, + action, + actionNode, + nodeID, + collections[action.CollectionID], + ) + }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1531,14 +1621,13 @@ func updateDocViaGQL( func updateWithFilter(s *state, action UpdateWithFilter) { var res *client.UpdateResult var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + collections := s.collections[nodeID] identity := getIdentity(s, nodeID, action.Identity) ctx := db.SetContextIdentity(s.ctx, identity) - - err := withRetry( - actionNodes, - nodeID, + err := withRetryOnNode( + s.nodes[nodeID], func() error { var err error res, err = collections[action.CollectionID].UpdateWithFilter(ctx, action.Filter, action.Updater) @@ -1546,6 +1635,20 @@ func updateWithFilter(s *state, action UpdateWithFilter) { }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for nodeID, collections := range s.collections { + identity := getIdentity(s, nodeID, action.Identity) + ctx := db.SetContextIdentity(s.ctx, identity) + err := withRetryOnNode( + s.nodes[nodeID], + func() error { + var err error + res, err = collections[action.CollectionID].UpdateWithFilter(ctx, action.Filter, action.Updater) + return err + }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1562,11 +1665,15 @@ func createIndex( ) { if action.CollectionID >= len(s.indexes) { // Expand the slice if required, so that the index can be accessed by collection index - s.indexes = append(s.indexes, - make([][][]client.IndexDescription, action.CollectionID-len(s.indexes)+1)...) + s.indexes = append( + s.indexes, + make([][][]client.IndexDescription, action.CollectionID-len(s.indexes)+1)..., + ) } - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + collections := s.collections[nodeID] indexDesc := client.IndexDescription{ Name: action.IndexName, } @@ -1584,23 +1691,64 @@ func createIndex( }) } } + indexDesc.Unique = action.Unique - err := withRetry( - actionNodes, - nodeID, + err := withRetryOnNode( + s.nodes[nodeID], func() error { desc, err := collections[action.CollectionID].CreateIndex(s.ctx, indexDesc) if err != nil { return err } - s.indexes[nodeID][action.CollectionID] = - append(s.indexes[nodeID][action.CollectionID], desc) + s.indexes[nodeID][action.CollectionID] = append( + s.indexes[nodeID][action.CollectionID], + desc, + ) return nil }, ) if AssertError(s.t, s.testCase.Description, err, action.ExpectedError) { return } + } else { + for nodeID, collections := range s.collections { + indexDesc := client.IndexDescription{ + Name: action.IndexName, + } + if action.FieldName != "" { + indexDesc.Fields = []client.IndexedFieldDescription{ + { + Name: action.FieldName, + }, + } + } else if len(action.Fields) > 0 { + for i := range action.Fields { + indexDesc.Fields = append(indexDesc.Fields, client.IndexedFieldDescription{ + Name: action.Fields[i].Name, + Descending: action.Fields[i].Descending, + }) + } + } + + indexDesc.Unique = action.Unique + err := withRetryOnNode( + s.nodes[nodeID], + func() error { + desc, err := collections[action.CollectionID].CreateIndex(s.ctx, indexDesc) + if err != nil { + return err + } + s.indexes[nodeID][action.CollectionID] = append( + s.indexes[nodeID][action.CollectionID], + desc, + ) + return nil + }, + ) + if AssertError(s.t, s.testCase.Description, err, action.ExpectedError) { + return + } + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, false) @@ -1612,21 +1760,38 @@ func dropIndex( action DropIndex, ) { var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, collections := range getNodeCollections(action.NodeID, s.collections) { + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + collections := s.collections[nodeID] + indexName := action.IndexName if indexName == "" { indexName = s.indexes[nodeID][action.CollectionID][action.IndexID].Name } - err := withRetry( - actionNodes, - nodeID, + err := withRetryOnNode( + s.nodes[nodeID], func() error { return collections[action.CollectionID].DropIndex(s.ctx, indexName) }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for nodeID, collections := range s.collections { + indexName := action.IndexName + if indexName == "" { + indexName = s.indexes[nodeID][action.CollectionID][action.IndexID].Name + } + + err := withRetryOnNode( + s.nodes[nodeID], + func() error { + return collections[action.CollectionID].DropIndex(s.ctx, indexName) + }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1642,11 +1807,12 @@ func backupExport( } var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, node := range actionNodes { - err := withRetry( - actionNodes, - nodeID, + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + node := s.nodes[nodeID] + err := withRetryOnNode( + node, func() error { return node.BasicExport(s.ctx, &action.Config) }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) @@ -1654,7 +1820,20 @@ func backupExport( if !expectedErrorRaised { assertBackupContent(s.t, action.ExpectedContent, action.Config.Filepath) } + } else { + for _, node := range s.nodes { + err := withRetryOnNode( + node, + func() error { return node.BasicExport(s.ctx, &action.Config) }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + + if !expectedErrorRaised { + assertBackupContent(s.t, action.ExpectedContent, action.Config.Filepath) + } + } } + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) } @@ -1672,31 +1851,40 @@ func backupImport( _ = os.WriteFile(action.Filepath, []byte(action.ImportContent), 0664) var expectedErrorRaised bool - actionNodes := getNodes(action.NodeID, s.nodes) - for nodeID, node := range actionNodes { - err := withRetry( - actionNodes, - nodeID, + + if action.NodeID.HasValue() { + nodeID := action.NodeID.Value() + node := s.nodes[nodeID] + err := withRetryOnNode( + node, func() error { return node.BasicImport(s.ctx, action.Filepath) }, ) expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } else { + for _, node := range s.nodes { + err := withRetryOnNode( + node, + func() error { return node.BasicImport(s.ctx, action.Filepath) }, + ) + expectedErrorRaised = AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + } } + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) } -// withRetry attempts to perform the given action, retrying up to a DB-defined +// withRetryOnNode attempts to perform the given action, retrying up to a DB-defined // maximum attempt count if a transaction conflict error is returned. // // If a P2P-sync commit for the given document is already in progress this // Save call can fail as the transaction will conflict. We dont want to worry // about this in our tests so we just retry a few times until it works (or the // retry limit is breached - important incase this is a different error) -func withRetry( - nodes []clients.Client, - nodeID int, +func withRetryOnNode( + node clients.Client, action func() error, ) error { - for i := 0; i < nodes[nodeID].MaxTxnRetries(); i++ { + for i := 0; i < node.MaxTxnRetries(); i++ { err := action() if errors.Is(err, datastore.ErrTxnConflict) { time.Sleep(100 * time.Millisecond)