diff --git a/CHANGELOG.md b/CHANGELOG.md index 9261b1355b6..99d5a1c01bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,6 +35,8 @@ as necessary. Empty sections will not end in the release notes. [projectnessie.org](https://projectnessie.org/guides/iceberg-rest/#customizing-nessie-commit-author-et-al). - Introduce new `JDBC2` version store type, which is has the same functionality as the `JDBC` version store type, but uses way less columns, which reduces storage overhead for example in PostgreSQL a lot. +- Introduce new `MONGODB2` version store type, which is has the same functionality as the `MONGODB` version + store type, but uses way less attributes, which reduces storage overhead. ### Changes @@ -43,6 +45,9 @@ as necessary. Empty sections will not end in the release notes. - The current version store type `JDBC` is deprecated, please migrate to the new `JDBC2` version store type. Please use the [Nessie Server Admin Tool](https://projectnessie.org/nessie-latest/export_import) to migrate from the `JDBC` version store type to `JDBC2`. +- The current version store type `MONGODB` is deprecated, please migrate to the new `MONGODB2` version store + type. Please use the [Nessie Server Admin Tool](https://projectnessie.org/nessie-latest/export_import) + to migrate from the `MONGODB` version store type to `MONGODB2`. ### Fixes diff --git a/bom/build.gradle.kts b/bom/build.gradle.kts index bd0a11ba85d..f9e3bb32e96 100644 --- a/bom/build.gradle.kts +++ b/bom/build.gradle.kts @@ -95,6 +95,8 @@ dependencies { api(project(":nessie-versioned-storage-jdbc2-tests")) api(project(":nessie-versioned-storage-mongodb")) api(project(":nessie-versioned-storage-mongodb-tests")) + api(project(":nessie-versioned-storage-mongodb2")) + api(project(":nessie-versioned-storage-mongodb2-tests")) api(project(":nessie-versioned-storage-rocksdb")) api(project(":nessie-versioned-storage-rocksdb-tests")) api(project(":nessie-versioned-storage-store")) diff --git a/build-tools-integration-tests/build.gradle b/build-tools-integration-tests/build.gradle index 086254870de..2194a62d971 100644 --- a/build-tools-integration-tests/build.gradle +++ b/build-tools-integration-tests/build.gradle @@ -30,7 +30,7 @@ dependencies { implementation("org.projectnessie.nessie:nessie-versioned-storage-cache") implementation("org.projectnessie.nessie:nessie-versioned-storage-common-serialize") implementation("org.projectnessie.nessie:nessie-versioned-storage-inmemory-tests") - implementation("org.projectnessie.nessie:nessie-versioned-storage-mongodb") + implementation("org.projectnessie.nessie:nessie-versioned-storage-mongodb2") implementation("org.projectnessie.nessie:nessie-versioned-storage-rocksdb-tests") implementation("org.projectnessie.nessie:nessie-versioned-storage-store") implementation("org.projectnessie.nessie:nessie-versioned-storage-testextension") diff --git a/build-tools-integration-tests/pom.xml b/build-tools-integration-tests/pom.xml index dc7cef7c99a..06a3a091015 100644 --- a/build-tools-integration-tests/pom.xml +++ b/build-tools-integration-tests/pom.xml @@ -68,7 +68,7 @@ org.projectnessie.nessie - nessie-versioned-storage-mongodb + nessie-versioned-storage-mongodb2 org.projectnessie.nessie diff --git a/build-tools-integration-tests/src/test/java/org/projectnessie/buildtools/TestClassesAvailability.java b/build-tools-integration-tests/src/test/java/org/projectnessie/buildtools/TestClassesAvailability.java index 02d1e803b61..bd8ad7b23f3 100644 --- a/build-tools-integration-tests/src/test/java/org/projectnessie/buildtools/TestClassesAvailability.java +++ b/build-tools-integration-tests/src/test/java/org/projectnessie/buildtools/TestClassesAvailability.java @@ -34,8 +34,8 @@ public class TestClassesAvailability { "org.projectnessie.versioned.storage.inmemorytests.InmemoryBackendTestFactory", // from nessie-versioned-storage-rocksdb-tests "org.projectnessie.versioned.storage.rocksdbtests.RocksDBBackendTestFactory", - // from nessie-versioned-storage-mongodb - "org.projectnessie.versioned.storage.mongodb.MongoDBBackendFactory", + // from nessie-versioned-storage-mongodb2 + "org.projectnessie.versioned.storage.mongodb2.MongoDB2BackendFactory", // from nessie-versioned-storage-cache "org.projectnessie.versioned.storage.cache.PersistCaches", // from nessie-versioned-storage-testextension diff --git a/build-tools-integration-tests/src/test/java/org/projectnessie/buildtools/TestPublishedPoms.java b/build-tools-integration-tests/src/test/java/org/projectnessie/buildtools/TestPublishedPoms.java index 223fcb6725e..71aa59b88c9 100644 --- a/build-tools-integration-tests/src/test/java/org/projectnessie/buildtools/TestPublishedPoms.java +++ b/build-tools-integration-tests/src/test/java/org/projectnessie/buildtools/TestPublishedPoms.java @@ -100,7 +100,7 @@ static void setup() { "nessie-versioned-storage-common-serialize", "nessie-versioned-storage-common-tests", "nessie-versioned-storage-inmemory", - "nessie-versioned-storage-mongodb", + "nessie-versioned-storage-mongodb2", "nessie-versioned-storage-rocksdb", "nessie-versioned-storage-store", "nessie-versioned-storage-testextension", diff --git a/gradle/projects.main.properties b/gradle/projects.main.properties index 3617bdd4b4e..0cfb5cc0207 100644 --- a/gradle/projects.main.properties +++ b/gradle/projects.main.properties @@ -82,6 +82,8 @@ nessie-versioned-storage-jdbc2=versioned/storage/jdbc2 nessie-versioned-storage-jdbc2-tests=versioned/storage/jdbc2-tests nessie-versioned-storage-mongodb=versioned/storage/mongodb nessie-versioned-storage-mongodb-tests=versioned/storage/mongodb-tests +nessie-versioned-storage-mongodb2=versioned/storage/mongodb2 +nessie-versioned-storage-mongodb2-tests=versioned/storage/mongodb2-tests nessie-versioned-storage-rocksdb=versioned/storage/rocksdb nessie-versioned-storage-rocksdb-tests=versioned/storage/rocksdb-tests nessie-versioned-storage-store=versioned/storage/store diff --git a/helm/nessie/values.yaml b/helm/nessie/values.yaml index 705adca9060..600dba7f3a7 100644 --- a/helm/nessie/values.yaml +++ b/helm/nessie/values.yaml @@ -28,8 +28,9 @@ imagePullSecrets: [] # `quarkus.log.category."io.smallrye.config".level: DEBUG` logLevel: INFO -# -- Which type of version store to use: IN_MEMORY, ROCKSDB, DYNAMODB, MONGODB, CASSANDRA, JDBC2, BIGTABLE. +# -- Which type of version store to use: IN_MEMORY, ROCKSDB, DYNAMODB, MONGODB2, CASSANDRA, JDBC2, BIGTABLE. # Note: the version store type JDBC is deprecated, please use the Nessie Server Admin Tool to migrate to JDBC2. +# Note: the version store type MONGODB is deprecated, please use the Nessie Server Admin Tool to migrate to MONGODB2. versionStoreType: IN_MEMORY # Cassandra settings. Only required when using CASSANDRA version store type; ignored otherwise. diff --git a/servers/quarkus-common/build.gradle.kts b/servers/quarkus-common/build.gradle.kts index d552b0163bc..7d7ed7291a7 100644 --- a/servers/quarkus-common/build.gradle.kts +++ b/servers/quarkus-common/build.gradle.kts @@ -49,6 +49,7 @@ dependencies { implementation(project(":nessie-versioned-storage-jdbc")) implementation(project(":nessie-versioned-storage-jdbc2")) implementation(project(":nessie-versioned-storage-mongodb")) + implementation(project(":nessie-versioned-storage-mongodb2")) implementation(project(":nessie-versioned-storage-rocksdb")) implementation(project(":nessie-versioned-storage-store")) diff --git a/servers/quarkus-common/src/main/java/org/projectnessie/quarkus/providers/storage/MongoDB2BackendBuilder.java b/servers/quarkus-common/src/main/java/org/projectnessie/quarkus/providers/storage/MongoDB2BackendBuilder.java new file mode 100644 index 00000000000..e5210109e8f --- /dev/null +++ b/servers/quarkus-common/src/main/java/org/projectnessie/quarkus/providers/storage/MongoDB2BackendBuilder.java @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2022 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.quarkus.providers.storage; + +import static org.projectnessie.quarkus.config.VersionStoreConfig.VersionStoreType.MONGODB2; + +import com.mongodb.client.MongoClient; +import io.quarkus.arc.Arc; +import io.quarkus.mongodb.runtime.MongoClientBeanUtil; +import io.quarkus.mongodb.runtime.MongoClients; +import jakarta.enterprise.context.Dependent; +import jakarta.inject.Inject; +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.projectnessie.quarkus.providers.versionstore.StoreType; +import org.projectnessie.versioned.storage.common.persist.Backend; +import org.projectnessie.versioned.storage.mongodb2.MongoDB2BackendConfig; +import org.projectnessie.versioned.storage.mongodb2.MongoDB2BackendFactory; + +@StoreType(MONGODB2) +@Dependent +public class MongoDB2BackendBuilder implements BackendBuilder { + + @Inject + @ConfigProperty(name = "quarkus.mongodb.database") + String databaseName; + + @Override + public Backend buildBackend() { + MongoClients mongoClients = Arc.container().instance(MongoClients.class).get(); + MongoClient client = + mongoClients.createMongoClient(MongoClientBeanUtil.DEFAULT_MONGOCLIENT_NAME); + + MongoDB2BackendFactory factory = new MongoDB2BackendFactory(); + MongoDB2BackendConfig c = + MongoDB2BackendConfig.builder().databaseName(databaseName).client(client).build(); + return factory.buildBackend(c); + } +} diff --git a/servers/quarkus-config/src/main/java/org/projectnessie/quarkus/config/VersionStoreConfig.java b/servers/quarkus-config/src/main/java/org/projectnessie/quarkus/config/VersionStoreConfig.java index 44173ef6427..860817d90d0 100644 --- a/servers/quarkus-config/src/main/java/org/projectnessie/quarkus/config/VersionStoreConfig.java +++ b/servers/quarkus-config/src/main/java/org/projectnessie/quarkus/config/VersionStoreConfig.java @@ -29,7 +29,10 @@ enum VersionStoreType { IN_MEMORY, ROCKSDB, DYNAMODB, + /** MongoDB variant using many distinct attributes. */ MONGODB, + /** MongoDB variant using few attributes, saves storage overhead. */ + MONGODB2, CASSANDRA, /** JDBC variant using many distinct columns. */ JDBC, diff --git a/servers/quarkus-server/src/main/resources/application.properties b/servers/quarkus-server/src/main/resources/application.properties index 71300e50127..8bdabf73c2c 100644 --- a/servers/quarkus-server/src/main/resources/application.properties +++ b/servers/quarkus-server/src/main/resources/application.properties @@ -146,8 +146,9 @@ nessie.server.send-stacktrace-to-client=false # nessie.server.authorization.rules.allow_listing_reflog=\ # op=='VIEW_REFLOG' && role=='admin_user' -### which type of version store to use: IN_MEMORY, ROCKSDB, DYNAMODB, MONGODB, CASSANDRA, JDBC2, BIGTABLE. +### which type of version store to use: IN_MEMORY, ROCKSDB, DYNAMODB, MONGODB2, CASSANDRA, JDBC2, BIGTABLE. # Note: the version store type JDBC is deprecated, please use the Nessie Server Admin Tool to migrate to JDBC2. +# Note: the version store type MONGODB is deprecated, please use the Nessie Server Admin Tool to migrate to MONGODB2. nessie.version.store.type=IN_MEMORY # Object cache size as a value relative to the JVM's max heap size. The `cache-capacity-fraction-adjust-mb` diff --git a/servers/quarkus-tests/build.gradle.kts b/servers/quarkus-tests/build.gradle.kts index ce58a5d7680..6a2188d6019 100644 --- a/servers/quarkus-tests/build.gradle.kts +++ b/servers/quarkus-tests/build.gradle.kts @@ -35,6 +35,7 @@ dependencies { implementation(project(":nessie-versioned-storage-jdbc-tests")) implementation(project(":nessie-versioned-storage-jdbc2-tests")) implementation(project(":nessie-versioned-storage-mongodb-tests")) + implementation(project(":nessie-versioned-storage-mongodb2-tests")) implementation(project(":nessie-versioned-storage-rocksdb-tests")) implementation(project(":nessie-versioned-storage-testextension")) implementation(project(":nessie-container-spec-helper")) diff --git a/servers/quarkus-tests/src/main/java/org/projectnessie/quarkus/tests/profiles/MongoTestResourceLifecycleManager.java b/servers/quarkus-tests/src/main/java/org/projectnessie/quarkus/tests/profiles/MongoTestResourceLifecycleManager.java index 7f711ec7225..0402967fa2e 100644 --- a/servers/quarkus-tests/src/main/java/org/projectnessie/quarkus/tests/profiles/MongoTestResourceLifecycleManager.java +++ b/servers/quarkus-tests/src/main/java/org/projectnessie/quarkus/tests/profiles/MongoTestResourceLifecycleManager.java @@ -19,12 +19,12 @@ import io.quarkus.test.common.QuarkusTestResourceLifecycleManager; import java.util.Map; import java.util.Optional; -import org.projectnessie.versioned.storage.mongodbtests.MongoDBBackendTestFactory; +import org.projectnessie.versioned.storage.mongodbtests2.MongoDB2BackendTestFactory; public class MongoTestResourceLifecycleManager implements QuarkusTestResourceLifecycleManager, DevServicesContext.ContextAware { - private MongoDBBackendTestFactory mongo; + private MongoDB2BackendTestFactory mongo; private Optional containerNetworkId; @@ -35,7 +35,7 @@ public void setIntegrationTestContext(DevServicesContext context) { @Override public Map start() { - mongo = new MongoDBBackendTestFactory(); + mongo = new MongoDB2BackendTestFactory(); try { mongo.start(containerNetworkId); diff --git a/servers/quarkus-tests/src/main/java/org/projectnessie/quarkus/tests/profiles/QuarkusTestProfilePersistMongo.java b/servers/quarkus-tests/src/main/java/org/projectnessie/quarkus/tests/profiles/QuarkusTestProfilePersistMongo.java index 93e98f67c7c..8e89cfdda52 100644 --- a/servers/quarkus-tests/src/main/java/org/projectnessie/quarkus/tests/profiles/QuarkusTestProfilePersistMongo.java +++ b/servers/quarkus-tests/src/main/java/org/projectnessie/quarkus/tests/profiles/QuarkusTestProfilePersistMongo.java @@ -15,7 +15,7 @@ */ package org.projectnessie.quarkus.tests.profiles; -import static org.projectnessie.quarkus.config.VersionStoreConfig.VersionStoreType.MONGODB; +import static org.projectnessie.quarkus.config.VersionStoreConfig.VersionStoreType.MONGODB2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,7 +28,7 @@ public class QuarkusTestProfilePersistMongo extends BaseConfigProfile { public Map getConfigOverrides() { return ImmutableMap.builder() .putAll(super.getConfigOverrides()) - .put("nessie.version.store.type", MONGODB.name()) + .put("nessie.version.store.type", MONGODB2.name()) .put("quarkus.mongodb.write-concern.journal", "false") .build(); } diff --git a/servers/services-bench/build.gradle.kts b/servers/services-bench/build.gradle.kts index 378cf0ecf0f..1189569f4f6 100644 --- a/servers/services-bench/build.gradle.kts +++ b/servers/services-bench/build.gradle.kts @@ -47,6 +47,7 @@ dependencies { jmhRuntimeOnly(project(":nessie-versioned-storage-cassandra")) jmhRuntimeOnly(project(":nessie-versioned-storage-rocksdb")) jmhRuntimeOnly(project(":nessie-versioned-storage-mongodb")) + jmhRuntimeOnly(project(":nessie-versioned-storage-mongodb2")) jmhRuntimeOnly(project(":nessie-versioned-storage-dynamodb")) jmhRuntimeOnly(project(":nessie-versioned-storage-jdbc")) jmhRuntimeOnly(project(":nessie-versioned-storage-jdbc2")) diff --git a/servers/services/build.gradle.kts b/servers/services/build.gradle.kts index 1889fa3dbc8..525b73ab17a 100644 --- a/servers/services/build.gradle.kts +++ b/servers/services/build.gradle.kts @@ -71,6 +71,7 @@ dependencies { intTestImplementation(project(":nessie-versioned-storage-cassandra-tests")) intTestImplementation(project(":nessie-versioned-storage-rocksdb-tests")) intTestImplementation(project(":nessie-versioned-storage-mongodb-tests")) + intTestImplementation(project(":nessie-versioned-storage-mongodb2-tests")) intTestImplementation(project(":nessie-versioned-storage-dynamodb-tests")) intTestRuntimeOnly(platform(libs.testcontainers.bom)) intTestRuntimeOnly("org.testcontainers:testcontainers") diff --git a/servers/services/src/intTest/java/org/projectnessie/services/impl/ITApiImplsPersistMongoDB.java b/servers/services/src/intTest/java/org/projectnessie/services/impl/ITApiImplsPersistMongoDB.java index eba568f9105..9df8b7a03d3 100644 --- a/servers/services/src/intTest/java/org/projectnessie/services/impl/ITApiImplsPersistMongoDB.java +++ b/servers/services/src/intTest/java/org/projectnessie/services/impl/ITApiImplsPersistMongoDB.java @@ -18,11 +18,11 @@ import org.junit.jupiter.api.condition.DisabledOnOs; import org.junit.jupiter.api.condition.OS; import org.junit.jupiter.api.extension.ExtendWith; -import org.projectnessie.versioned.storage.mongodbtests.MongoDBBackendTestFactory; +import org.projectnessie.versioned.storage.mongodbtests2.MongoDB2BackendTestFactory; import org.projectnessie.versioned.storage.testextension.NessieBackend; import org.projectnessie.versioned.storage.testextension.PersistExtension; @ExtendWith(PersistExtension.class) -@NessieBackend(MongoDBBackendTestFactory.class) +@NessieBackend(MongoDB2BackendTestFactory.class) @DisabledOnOs(OS.WINDOWS) // testcontainers does not support Windows public class ITApiImplsPersistMongoDB extends BaseTestApis {} diff --git a/site/in-dev/configuration.md b/site/in-dev/configuration.md index e075c64d06c..ca63f6569a6 100644 --- a/site/in-dev/configuration.md +++ b/site/in-dev/configuration.md @@ -194,7 +194,7 @@ Related Quarkus settings: | "in memory" | only for development and local testing | `IN_MEMORY` | Do not use for any serious use case. | | RocksDB | production, single node only | `ROCKSDB` | | | Google BigTable | production | `BIGTABLE` | | -| MongoDB | production | `MONGODB` | | +| MongoDB | production | `MONGODB2` & `MONGODB` (deprecated) | | | Amazon DynamoDB | beta, only tested against the simulator | `DYNAMODB` | Not recommended for use with Nessie Catalog (Iceberg REST) due to its restrictive row-size limit. | | PostgreSQL | production | `JDBC2` & `JDBC` (deprecated) | | | H2 | only for development and local testing | `JDBC2` & `JDBC` (deprecated) | Do not use for any serious use case. | @@ -204,6 +204,11 @@ Related Quarkus settings: | Apache Cassandra | experimental, known issues | `CASSANDRA` | Known to raise user-facing errors due to Cassandra's concept of letting the driver timeout too early, or database timeouts. | | ScyllaDB | experimental, known issues | `CASSANDRA` | Known to raise user-facing errors due to Cassandra's concept of letting the driver timeout too early, or database timeouts. Known to be slow in container based testing. Unclear how good Scylla's LWT implementation performs. | +!!! warn + Prefer the `MONGODB2` version store type over the `MONGODB` version store type, because it has way less storage overhead. + The `MONGODB` version store type is _deprecated for removal_, please use the + [Nessie Server Admin Tool](export_import.md) to migrate from the `MONGODB` version store type to `MONGODB2`. + !!! warn Prefer the `JDBC2` version store type over the `JDBC` version store type, because it has way less storage overhead. The `JDBC` version store type is _deprecated for removal_, please use the @@ -273,7 +278,7 @@ Related Quarkus settings: #### MongoDB Version Store Settings -When setting `nessie.version.store.type=MONGODB` which enables MongoDB as the version store used by the Nessie server, the following configurations are applicable in combination with `nessie.version.store.type`. +When setting `nessie.version.store.type=MONGODB2` which enables MongoDB as the version store used by the Nessie server, the following configurations are applicable in combination with `nessie.version.store.type`. Related Quarkus settings: diff --git a/tools/doc-generator/doclet/build.gradle.kts b/tools/doc-generator/doclet/build.gradle.kts index 07f4487ce85..c43423ad213 100644 --- a/tools/doc-generator/doclet/build.gradle.kts +++ b/tools/doc-generator/doclet/build.gradle.kts @@ -35,7 +35,7 @@ val genProjectPaths = ":nessie-versioned-storage-inmemory", ":nessie-versioned-storage-jdbc", ":nessie-versioned-storage-jdbc2", - ":nessie-versioned-storage-mongodb", + ":nessie-versioned-storage-mongodb2", ":nessie-versioned-storage-rocksdb" ) diff --git a/tools/doc-generator/site-gen/build.gradle.kts b/tools/doc-generator/site-gen/build.gradle.kts index aa068e68922..ef8ba079c31 100644 --- a/tools/doc-generator/site-gen/build.gradle.kts +++ b/tools/doc-generator/site-gen/build.gradle.kts @@ -44,6 +44,7 @@ val genProjectPaths = listOf( ":nessie-versioned-storage-jdbc", ":nessie-versioned-storage-jdbc2", ":nessie-versioned-storage-mongodb", + ":nessie-versioned-storage-mongodb2", ":nessie-versioned-storage-rocksdb", ":nessie-catalog-files-impl", ":nessie-catalog-service-common" diff --git a/tools/server-admin/build.gradle.kts b/tools/server-admin/build.gradle.kts index 6270a88a347..7a9d13149d2 100644 --- a/tools/server-admin/build.gradle.kts +++ b/tools/server-admin/build.gradle.kts @@ -62,6 +62,7 @@ dependencies { implementation(project(":nessie-versioned-storage-jdbc")) implementation(project(":nessie-versioned-storage-jdbc2")) implementation(project(":nessie-versioned-storage-mongodb")) + implementation(project(":nessie-versioned-storage-mongodb2")) implementation(project(":nessie-versioned-storage-rocksdb")) implementation(project(":nessie-catalog-service-common")) @@ -92,6 +93,7 @@ dependencies { testFixturesApi(project(":nessie-quarkus-tests")) testFixturesApi(project(":nessie-versioned-tests")) intTestImplementation(project(":nessie-versioned-storage-mongodb-tests")) + intTestImplementation(project(":nessie-versioned-storage-mongodb2-tests")) intTestImplementation(project(":nessie-versioned-storage-jdbc-tests")) intTestImplementation(project(":nessie-versioned-storage-jdbc2-tests")) intTestImplementation(project(":nessie-versioned-storage-cassandra-tests")) diff --git a/tools/server-admin/src/intTest/java/org/projectnessie/tools/admin/cli/ITExportImport.java b/tools/server-admin/src/intTest/java/org/projectnessie/tools/admin/cli/ITExportImport.java index df8eddb0804..ca1a07c1c00 100644 --- a/tools/server-admin/src/intTest/java/org/projectnessie/tools/admin/cli/ITExportImport.java +++ b/tools/server-admin/src/intTest/java/org/projectnessie/tools/admin/cli/ITExportImport.java @@ -77,7 +77,7 @@ public class ITExportImport { private static final String VERSION_STORES = - "(ROCKSDB|DYNAMODB|MONGODB|CASSANDRA|JDBC2|BIGTABLE)"; + "(ROCKSDB|DYNAMODB|MONGODB2|CASSANDRA|JDBC2|BIGTABLE)"; @InjectSoftAssertions private SoftAssertions soft; diff --git a/tools/server-admin/src/intTest/java/org/projectnessie/tools/admin/cli/NessieServerAdminTestBackends.java b/tools/server-admin/src/intTest/java/org/projectnessie/tools/admin/cli/NessieServerAdminTestBackends.java index 701179901a3..c5b66cba4d0 100644 --- a/tools/server-admin/src/intTest/java/org/projectnessie/tools/admin/cli/NessieServerAdminTestBackends.java +++ b/tools/server-admin/src/intTest/java/org/projectnessie/tools/admin/cli/NessieServerAdminTestBackends.java @@ -23,19 +23,19 @@ import org.projectnessie.versioned.storage.jdbc2tests.MariaDBBackendTestFactory; import org.projectnessie.versioned.storage.jdbc2tests.MySQLBackendTestFactory; import org.projectnessie.versioned.storage.jdbc2tests.PostgreSQLBackendTestFactory; -import org.projectnessie.versioned.storage.mongodbtests.MongoDBBackendTestFactory; +import org.projectnessie.versioned.storage.mongodbtests2.MongoDB2BackendTestFactory; import org.projectnessie.versioned.storage.testextension.BackendTestFactory; public enum NessieServerAdminTestBackends { mongo { @Override BackendTestFactory backendFactory() { - return new MongoDBBackendTestFactory(); + return new MongoDB2BackendTestFactory(); } @Override Map quarkusConfig() { - return Map.of("nessie.version.store.type", VersionStoreType.MONGODB.name()); + return Map.of("nessie.version.store.type", VersionStoreType.MONGODB2.name()); } }, diff --git a/versioned/storage/mongodb2-tests/build.gradle.kts b/versioned/storage/mongodb2-tests/build.gradle.kts new file mode 100644 index 00000000000..e3abd1991c7 --- /dev/null +++ b/versioned/storage/mongodb2-tests/build.gradle.kts @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2022 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +plugins { + id("nessie-conventions-server") + id("nessie-jacoco") +} + +publishingHelper { mavenName = "Nessie - Storage - MongoDB2 - Tests" } + +description = "Base test code for creating test backends using MongoDB." + +dependencies { + implementation(project(":nessie-versioned-storage-mongodb2")) + implementation(project(":nessie-versioned-storage-common")) + implementation(project(":nessie-versioned-storage-testextension")) + implementation(project(":nessie-container-spec-helper")) + + implementation(libs.slf4j.api) + + compileOnly(libs.immutables.builder) + compileOnly(libs.immutables.value.annotations) + annotationProcessor(libs.immutables.value.processor) + + implementation(libs.mongodb.driver.sync) + + implementation(platform(libs.testcontainers.bom)) + implementation("org.testcontainers:mongodb") +} diff --git a/versioned/storage/mongodb2-tests/src/main/java/org/projectnessie/versioned/storage/mongodbtests2/MongoClientProducer.java b/versioned/storage/mongodb2-tests/src/main/java/org/projectnessie/versioned/storage/mongodbtests2/MongoClientProducer.java new file mode 100644 index 00000000000..e572410949d --- /dev/null +++ b/versioned/storage/mongodb2-tests/src/main/java/org/projectnessie/versioned/storage/mongodbtests2/MongoClientProducer.java @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodbtests2; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import org.immutables.value.Value; +import org.projectnessie.versioned.storage.mongodbtests2.ImmutableMongoClientProducer.Builder; + +@Value.Immutable +public abstract class MongoClientProducer { + + static Builder builder() { + return ImmutableMongoClientProducer.builder(); + } + + abstract String connectionString(); + + public MongoClient createClient() { + return MongoClients.create(connectionString()); + } +} diff --git a/versioned/storage/mongodb2-tests/src/main/java/org/projectnessie/versioned/storage/mongodbtests2/MongoDB2BackendTestFactory.java b/versioned/storage/mongodb2-tests/src/main/java/org/projectnessie/versioned/storage/mongodbtests2/MongoDB2BackendTestFactory.java new file mode 100644 index 00000000000..8b1c6b3136e --- /dev/null +++ b/versioned/storage/mongodb2-tests/src/main/java/org/projectnessie/versioned/storage/mongodbtests2/MongoDB2BackendTestFactory.java @@ -0,0 +1,136 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodbtests2; + +import com.mongodb.client.MongoClient; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import org.projectnessie.nessie.testing.containerspec.ContainerSpecHelper; +import org.projectnessie.versioned.storage.common.persist.Backend; +import org.projectnessie.versioned.storage.mongodb2.MongoDB2Backend; +import org.projectnessie.versioned.storage.mongodb2.MongoDB2BackendConfig; +import org.projectnessie.versioned.storage.mongodb2.MongoDB2BackendFactory; +import org.projectnessie.versioned.storage.testextension.BackendTestFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.ContainerLaunchException; +import org.testcontainers.containers.MongoDBContainer; +import org.testcontainers.containers.output.Slf4jLogConsumer; +import org.testcontainers.utility.DockerImageName; + +public class MongoDB2BackendTestFactory implements BackendTestFactory { + private static final Logger LOGGER = LoggerFactory.getLogger(MongoDB2BackendTestFactory.class); + public static final String MONGO_DB_NAME = "test"; + public static final int MONGO_PORT = 27017; + + private MongoDBContainer container; + private String connectionString; + + @Override + public String getName() { + return MongoDB2BackendFactory.NAME; + } + + @Override + public void start(Optional containerNetworkId) { + if (container != null) { + throw new IllegalStateException("Already started"); + } + + DockerImageName dockerImage = + ContainerSpecHelper.builder() + .name("mongodb") + .containerClass(MongoDB2BackendTestFactory.class) + .build() + .dockerImageName(null) + .asCompatibleSubstituteFor("mongo"); + + for (int retry = 0; ; retry++) { + MongoDBContainer c = + new MongoDBContainer(dockerImage).withLogConsumer(new Slf4jLogConsumer(LOGGER)); + containerNetworkId.ifPresent(c::withNetworkMode); + try { + c.start(); + container = c; + break; + } catch (ContainerLaunchException e) { + c.close(); + if (e.getCause() != null && retry < 3) { + LOGGER.warn("Launch of container {} failed, will retry...", c.getDockerImageName(), e); + continue; + } + LOGGER.error("Launch of container {} failed", c.getDockerImageName(), e); + throw new RuntimeException(e); + } + } + + connectionString = container.getReplicaSetUrl(MONGO_DB_NAME); + + if (containerNetworkId.isPresent()) { + String hostPort = container.getHost() + ':' + container.getMappedPort(MONGO_PORT); + String networkHostPort = + container.getCurrentContainerInfo().getConfig().getHostName() + ':' + MONGO_PORT; + connectionString = connectionString.replace(hostPort, networkHostPort); + } + } + + @Override + public Backend createNewBackend() { + MongoClient client = buildNewClient(); + + MongoDB2BackendConfig config = + MongoDB2BackendConfig.builder().databaseName(MONGO_DB_NAME).client(client).build(); + + return new MongoDB2Backend(config, true); + } + + public MongoClient buildNewClient() { + return MongoClientProducer.builder().connectionString(connectionString).build().createClient(); + } + + public String getDatabaseName() { + return MONGO_DB_NAME; + } + + public String getConnectionString() { + return connectionString; + } + + @Override + public void start() { + start(Optional.empty()); + } + + @Override + public void stop() { + try { + if (container != null) { + container.stop(); + } + } finally { + container = null; + } + } + + @Override + public Map getQuarkusConfig() { + Map config = new HashMap<>(); + config.put("quarkus.mongodb.connection-string", connectionString); + config.put("quarkus.mongodb.database", MONGO_DB_NAME); + return config; + } +} diff --git a/versioned/storage/mongodb2-tests/src/main/resources/META-INF/services/org.projectnessie.versioned.storage.testextension.BackendTestFactory b/versioned/storage/mongodb2-tests/src/main/resources/META-INF/services/org.projectnessie.versioned.storage.testextension.BackendTestFactory new file mode 100644 index 00000000000..30999497eac --- /dev/null +++ b/versioned/storage/mongodb2-tests/src/main/resources/META-INF/services/org.projectnessie.versioned.storage.testextension.BackendTestFactory @@ -0,0 +1,16 @@ +# +# Copyright (C) 2022 Dremio +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +org.projectnessie.versioned.storage.mongodbtests2.MongoDB2BackendTestFactory diff --git a/versioned/storage/mongodb2-tests/src/main/resources/org/projectnessie/versioned/storage/mongodbtests2/Dockerfile-mongodb-version b/versioned/storage/mongodb2-tests/src/main/resources/org/projectnessie/versioned/storage/mongodbtests2/Dockerfile-mongodb-version new file mode 100644 index 00000000000..ecf68bea9f9 --- /dev/null +++ b/versioned/storage/mongodb2-tests/src/main/resources/org/projectnessie/versioned/storage/mongodbtests2/Dockerfile-mongodb-version @@ -0,0 +1,3 @@ +# Dockerfile to provide the image name and tag to a test. +# Version is managed by Renovate - do not edit. +FROM mongo:7.0.12 diff --git a/versioned/storage/mongodb2/build.gradle.kts b/versioned/storage/mongodb2/build.gradle.kts new file mode 100644 index 00000000000..f0322ab6338 --- /dev/null +++ b/versioned/storage/mongodb2/build.gradle.kts @@ -0,0 +1,66 @@ +/* + * Copyright (C) 2022 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import org.apache.tools.ant.taskdefs.condition.Os + +plugins { + id("nessie-conventions-server") + id("nessie-jacoco") +} + +publishingHelper { mavenName = "Nessie - Storage - MongoDB2" } + +description = "Storage implementation for MongoDB." + +dependencies { + implementation(project(":nessie-versioned-storage-common")) + implementation(project(":nessie-versioned-storage-common-proto")) + implementation(project(":nessie-versioned-storage-common-serialize")) + + compileOnly(libs.jakarta.validation.api) + compileOnly(libs.jakarta.annotation.api) + + compileOnly(libs.errorprone.annotations) + implementation(libs.agrona) + implementation(libs.guava) + implementation(libs.slf4j.api) + + implementation(libs.mongodb.driver.sync) + + compileOnly(libs.immutables.builder) + compileOnly(libs.immutables.value.annotations) + annotationProcessor(libs.immutables.value.processor) + + intTestImplementation(project(":nessie-versioned-storage-mongodb2-tests")) + intTestImplementation(project(":nessie-versioned-storage-common-tests")) + intTestImplementation(project(":nessie-versioned-storage-testextension")) + intTestImplementation(project(":nessie-versioned-tests")) + intTestRuntimeOnly(platform(libs.testcontainers.bom)) + intTestRuntimeOnly("org.testcontainers:mongodb") + intTestImplementation(platform(libs.junit.bom)) + intTestImplementation(libs.bundles.junit.testing) + intTestRuntimeOnly(libs.logback.classic) +} + +// Testcontainers is not supported on Windows :( +if (Os.isFamily(Os.FAMILY_WINDOWS)) { + tasks.withType().configureEach { this.enabled = false } +} + +// Issue w/ testcontainers/podman in GH workflows :( +if (Os.isFamily(Os.FAMILY_MAC) && System.getenv("CI") != null) { + tasks.named("intTest").configure { this.enabled = false } +} diff --git a/versioned/storage/mongodb2/src/intTest/java/org/projectnessie/versioned/storage/mongodb2/ITMongoDBBackendFactory.java b/versioned/storage/mongodb2/src/intTest/java/org/projectnessie/versioned/storage/mongodb2/ITMongoDBBackendFactory.java new file mode 100644 index 00000000000..94960ac4195 --- /dev/null +++ b/versioned/storage/mongodb2/src/intTest/java/org/projectnessie/versioned/storage/mongodb2/ITMongoDBBackendFactory.java @@ -0,0 +1,128 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodb2; + +import static org.projectnessie.versioned.storage.common.logic.Logics.repositoryLogic; + +import com.mongodb.client.MongoClient; +import org.assertj.core.api.SoftAssertions; +import org.assertj.core.api.junit.jupiter.InjectSoftAssertions; +import org.assertj.core.api.junit.jupiter.SoftAssertionsExtension; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.projectnessie.versioned.storage.common.config.StoreConfig; +import org.projectnessie.versioned.storage.common.logic.RepositoryDescription; +import org.projectnessie.versioned.storage.common.logic.RepositoryLogic; +import org.projectnessie.versioned.storage.common.persist.Backend; +import org.projectnessie.versioned.storage.common.persist.BackendFactory; +import org.projectnessie.versioned.storage.common.persist.Persist; +import org.projectnessie.versioned.storage.common.persist.PersistFactory; +import org.projectnessie.versioned.storage.common.persist.PersistLoader; +import org.projectnessie.versioned.storage.mongodbtests2.MongoDB2BackendTestFactory; + +@ExtendWith(SoftAssertionsExtension.class) +public class ITMongoDBBackendFactory { + @InjectSoftAssertions protected SoftAssertions soft; + + @Test + public void productionLike() throws Exception { + MongoDB2BackendTestFactory testFactory = new MongoDB2BackendTestFactory(); + testFactory.start(); + try { + BackendFactory factory = + PersistLoader.findFactoryByName(MongoDB2BackendFactory.NAME); + soft.assertThat(factory).isNotNull().isInstanceOf(MongoDB2BackendFactory.class); + + try (MongoClient client = testFactory.buildNewClient()) { + String dbname = "dbname"; + RepositoryDescription repoDesc; + try (Backend backend = + factory.buildBackend( + MongoDB2BackendConfig.builder().client(client).databaseName(dbname).build())) { + soft.assertThat(backend).isNotNull().isInstanceOf(MongoDB2Backend.class); + backend.setupSchema(); + PersistFactory persistFactory = backend.createFactory(); + soft.assertThat(persistFactory).isNotNull().isInstanceOf(MongoDB2PersistFactory.class); + Persist persist = persistFactory.newPersist(StoreConfig.Adjustable.empty()); + soft.assertThat(persist).isNotNull().isInstanceOf(MongoDB2Persist.class); + + RepositoryLogic repositoryLogic = repositoryLogic(persist); + repositoryLogic.initialize("initializeAgain"); + repoDesc = repositoryLogic.fetchRepositoryDescription(); + soft.assertThat(repoDesc).isNotNull(); + } + + try (Backend backend = + factory.buildBackend( + MongoDB2BackendConfig.builder().client(client).databaseName(dbname).build())) { + soft.assertThat(backend).isNotNull().isInstanceOf(MongoDB2Backend.class); + backend.setupSchema(); + PersistFactory persistFactory = backend.createFactory(); + soft.assertThat(persistFactory).isNotNull().isInstanceOf(MongoDB2PersistFactory.class); + Persist persist = persistFactory.newPersist(StoreConfig.Adjustable.empty()); + soft.assertThat(persist).isNotNull().isInstanceOf(MongoDB2Persist.class); + + RepositoryLogic repositoryLogic = repositoryLogic(persist); + repositoryLogic.initialize("initializeAgain"); + soft.assertThat(repositoryLogic.fetchRepositoryDescription()).isEqualTo(repoDesc); + } + } + } finally { + testFactory.stop(); + } + } + + @Test + public void backendTestFactory() throws Exception { + MongoDB2BackendTestFactory testFactory = new MongoDB2BackendTestFactory(); + testFactory.start(); + try { + BackendFactory factory = + PersistLoader.findFactoryByName(MongoDB2BackendFactory.NAME); + soft.assertThat(factory).isNotNull().isInstanceOf(MongoDB2BackendFactory.class); + + RepositoryDescription repoDesc; + try (Backend backend = testFactory.createNewBackend()) { + soft.assertThat(backend).isNotNull().isInstanceOf(MongoDB2Backend.class); + backend.setupSchema(); + PersistFactory persistFactory = backend.createFactory(); + soft.assertThat(persistFactory).isNotNull().isInstanceOf(MongoDB2PersistFactory.class); + Persist persist = persistFactory.newPersist(StoreConfig.Adjustable.empty()); + soft.assertThat(persist).isNotNull().isInstanceOf(MongoDB2Persist.class); + + RepositoryLogic repositoryLogic = repositoryLogic(persist); + repositoryLogic.initialize("initializeAgain"); + repoDesc = repositoryLogic.fetchRepositoryDescription(); + soft.assertThat(repoDesc).isNotNull(); + } + + try (Backend backend = testFactory.createNewBackend()) { + soft.assertThat(backend).isNotNull().isInstanceOf(MongoDB2Backend.class); + backend.setupSchema(); + PersistFactory persistFactory = backend.createFactory(); + soft.assertThat(persistFactory).isNotNull().isInstanceOf(MongoDB2PersistFactory.class); + Persist persist = persistFactory.newPersist(StoreConfig.Adjustable.empty()); + soft.assertThat(persist).isNotNull().isInstanceOf(MongoDB2Persist.class); + + RepositoryLogic repositoryLogic = repositoryLogic(persist); + repositoryLogic.initialize("initializeAgain"); + soft.assertThat(repositoryLogic.fetchRepositoryDescription()).isEqualTo(repoDesc); + } + } finally { + testFactory.stop(); + } + } +} diff --git a/versioned/storage/mongodb2/src/intTest/java/org/projectnessie/versioned/storage/mongodb2/ITMongoDBPersist.java b/versioned/storage/mongodb2/src/intTest/java/org/projectnessie/versioned/storage/mongodb2/ITMongoDBPersist.java new file mode 100644 index 00000000000..e79128f5e92 --- /dev/null +++ b/versioned/storage/mongodb2/src/intTest/java/org/projectnessie/versioned/storage/mongodb2/ITMongoDBPersist.java @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodb2; + +import org.projectnessie.versioned.storage.commontests.AbstractPersistTests; +import org.projectnessie.versioned.storage.mongodbtests2.MongoDB2BackendTestFactory; +import org.projectnessie.versioned.storage.testextension.NessieBackend; + +@NessieBackend(MongoDB2BackendTestFactory.class) +public class ITMongoDBPersist extends AbstractPersistTests {} diff --git a/versioned/storage/mongodb2/src/intTest/java/org/projectnessie/versioned/storage/mongodb2/ITMongoDBVersionStore.java b/versioned/storage/mongodb2/src/intTest/java/org/projectnessie/versioned/storage/mongodb2/ITMongoDBVersionStore.java new file mode 100644 index 00000000000..ab4cd44bd54 --- /dev/null +++ b/versioned/storage/mongodb2/src/intTest/java/org/projectnessie/versioned/storage/mongodb2/ITMongoDBVersionStore.java @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodb2; + +import org.projectnessie.versioned.storage.commontests.AbstractVersionStoreTests; +import org.projectnessie.versioned.storage.mongodbtests2.MongoDB2BackendTestFactory; +import org.projectnessie.versioned.storage.testextension.NessieBackend; + +@NessieBackend(MongoDB2BackendTestFactory.class) +public class ITMongoDBVersionStore extends AbstractVersionStoreTests {} diff --git a/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2Backend.java b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2Backend.java new file mode 100644 index 00000000000..4927e683184 --- /dev/null +++ b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2Backend.java @@ -0,0 +1,100 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodb2; + +import static com.mongodb.client.model.Filters.in; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.ID_REPO_PATH; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.TABLE_OBJS; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.TABLE_REFS; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import jakarta.annotation.Nonnull; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Stream; +import org.bson.Document; +import org.bson.conversions.Bson; +import org.projectnessie.versioned.storage.common.persist.Backend; +import org.projectnessie.versioned.storage.common.persist.PersistFactory; + +public class MongoDB2Backend implements Backend { + + private final MongoDB2BackendConfig config; + private final MongoClient client; + private final boolean closeClient; + private MongoCollection refs; + private MongoCollection objs; + + public MongoDB2Backend(@Nonnull MongoDB2BackendConfig config, boolean closeClient) { + this.config = config; + this.client = config.client(); + this.closeClient = closeClient; + } + + @Nonnull + MongoCollection refs() { + return refs; + } + + @Nonnull + MongoCollection objs() { + return objs; + } + + private synchronized void initialize() { + if (refs == null) { + String databaseName = config.databaseName(); + MongoDatabase database = + client.getDatabase(Objects.requireNonNull(databaseName, "Database name must be set")); + + refs = database.getCollection(TABLE_REFS); + objs = database.getCollection(TABLE_OBJS); + } + } + + @Override + @Nonnull + public PersistFactory createFactory() { + initialize(); + return new MongoDB2PersistFactory(this); + } + + @Override + public synchronized void close() { + if (closeClient) { + client.close(); + } + } + + @Override + public Optional setupSchema() { + initialize(); + return Optional.of("database name: " + config.databaseName()); + } + + @Override + public void eraseRepositories(Set repositoryIds) { + if (repositoryIds == null || repositoryIds.isEmpty()) { + return; + } + + Bson repoIdFilter = in(ID_REPO_PATH, repositoryIds); + Stream.of(refs(), objs()).forEach(coll -> coll.deleteMany(repoIdFilter)); + } +} diff --git a/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2BackendBaseConfig.java b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2BackendBaseConfig.java new file mode 100644 index 00000000000..72150018f92 --- /dev/null +++ b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2BackendBaseConfig.java @@ -0,0 +1,20 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodb2; + +public interface MongoDB2BackendBaseConfig { + String databaseName(); +} diff --git a/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2BackendConfig.java b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2BackendConfig.java new file mode 100644 index 00000000000..16b2862a5e7 --- /dev/null +++ b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2BackendConfig.java @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodb2; + +import com.mongodb.client.MongoClient; +import org.immutables.value.Value; + +@Value.Immutable +public interface MongoDB2BackendConfig extends MongoDB2BackendBaseConfig { + + MongoClient client(); + + static ImmutableMongoDB2BackendConfig.Builder builder() { + return ImmutableMongoDB2BackendConfig.builder(); + } +} diff --git a/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2BackendFactory.java b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2BackendFactory.java new file mode 100644 index 00000000000..f4693344336 --- /dev/null +++ b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2BackendFactory.java @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodb2; + +import jakarta.annotation.Nonnull; +import org.projectnessie.versioned.storage.common.persist.BackendFactory; + +public class MongoDB2BackendFactory implements BackendFactory { + + public static final String NAME = "MongoDB"; + + @Override + @Nonnull + public String name() { + return NAME; + } + + @Override + @Nonnull + public MongoDB2BackendConfig newConfigInstance() { + return MongoDB2BackendConfig.builder().build(); + } + + @Override + @Nonnull + public MongoDB2Backend buildBackend(@Nonnull MongoDB2BackendConfig config) { + return new MongoDB2Backend(config, false); + } +} diff --git a/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2Constants.java b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2Constants.java new file mode 100644 index 00000000000..5b7c019e198 --- /dev/null +++ b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2Constants.java @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodb2; + +final class MongoDB2Constants { + + static final String TABLE_REFS = "refs2"; + static final String TABLE_OBJS = "objs2"; + + static final String ID_PROPERTY_NAME = "_id"; + + static final String COL_REFERENCES_NAME = "n"; + static final String COL_REFERENCES_POINTER = "p"; + static final String COL_REFERENCES_DELETED = "d"; + static final String COL_REFERENCES_CREATED_AT = "c"; + static final String COL_REFERENCES_EXTENDED_INFO = "e"; + static final String COL_REFERENCES_PREVIOUS = "h"; + + static final String COL_OBJ_ID = "i"; + static final String COL_REPO = "r"; + static final String COL_OBJ_TYPE = "y"; + static final String COL_OBJ_VERS = "V"; + static final String COL_OBJ_VALUE = "v"; + + static final String ID_REPO_PATH = ID_PROPERTY_NAME + "." + COL_REPO; + + private MongoDB2Constants() {} +} diff --git a/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2Persist.java b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2Persist.java new file mode 100644 index 00000000000..4db6a1fec2b --- /dev/null +++ b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2Persist.java @@ -0,0 +1,786 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodb2; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.mongodb.ErrorCategory.DUPLICATE_KEY; +import static com.mongodb.client.model.Filters.and; +import static com.mongodb.client.model.Filters.eq; +import static com.mongodb.client.model.Filters.exists; +import static com.mongodb.client.model.Filters.in; +import static com.mongodb.client.model.Filters.not; +import static com.mongodb.client.model.Updates.set; +import static java.util.Collections.emptyList; +import static java.util.Collections.singleton; +import static java.util.stream.Collectors.toList; +import static org.projectnessie.versioned.storage.common.persist.ObjTypes.objTypeByName; +import static org.projectnessie.versioned.storage.common.persist.Reference.reference; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.COL_OBJ_ID; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.COL_OBJ_TYPE; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.COL_OBJ_VALUE; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.COL_OBJ_VERS; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.COL_REFERENCES_CREATED_AT; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.COL_REFERENCES_DELETED; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.COL_REFERENCES_EXTENDED_INFO; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.COL_REFERENCES_NAME; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.COL_REFERENCES_POINTER; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.COL_REFERENCES_PREVIOUS; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.COL_REPO; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.ID_PROPERTY_NAME; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Constants.ID_REPO_PATH; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Serde.binaryToObjId; +import static org.projectnessie.versioned.storage.mongodb2.MongoDB2Serde.objIdToBinary; +import static org.projectnessie.versioned.storage.serialize.ProtoSerialization.deserializeObj; +import static org.projectnessie.versioned.storage.serialize.ProtoSerialization.deserializePreviousPointers; +import static org.projectnessie.versioned.storage.serialize.ProtoSerialization.serializeObj; +import static org.projectnessie.versioned.storage.serialize.ProtoSerialization.serializePreviousPointers; + +import com.google.common.collect.AbstractIterator; +import com.mongodb.MongoBulkWriteException; +import com.mongodb.MongoException; +import com.mongodb.MongoExecutionTimeoutException; +import com.mongodb.MongoInterruptedException; +import com.mongodb.MongoServerUnavailableException; +import com.mongodb.MongoSocketReadTimeoutException; +import com.mongodb.MongoTimeoutException; +import com.mongodb.MongoWriteException; +import com.mongodb.WriteError; +import com.mongodb.bulk.BulkWriteError; +import com.mongodb.bulk.BulkWriteInsert; +import com.mongodb.bulk.BulkWriteResult; +import com.mongodb.client.FindIterable; +import com.mongodb.client.MongoCursor; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.Updates; +import com.mongodb.client.model.WriteModel; +import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.UpdateResult; +import jakarta.annotation.Nonnull; +import java.lang.reflect.Array; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Stream; +import org.agrona.collections.Hashing; +import org.agrona.collections.Object2IntHashMap; +import org.bson.Document; +import org.bson.conversions.Bson; +import org.bson.types.Binary; +import org.projectnessie.versioned.storage.common.config.StoreConfig; +import org.projectnessie.versioned.storage.common.exceptions.ObjNotFoundException; +import org.projectnessie.versioned.storage.common.exceptions.ObjTooLargeException; +import org.projectnessie.versioned.storage.common.exceptions.RefAlreadyExistsException; +import org.projectnessie.versioned.storage.common.exceptions.RefConditionFailedException; +import org.projectnessie.versioned.storage.common.exceptions.RefNotFoundException; +import org.projectnessie.versioned.storage.common.exceptions.UnknownOperationResultException; +import org.projectnessie.versioned.storage.common.objtypes.UpdateableObj; +import org.projectnessie.versioned.storage.common.persist.CloseableIterator; +import org.projectnessie.versioned.storage.common.persist.Obj; +import org.projectnessie.versioned.storage.common.persist.ObjId; +import org.projectnessie.versioned.storage.common.persist.ObjType; +import org.projectnessie.versioned.storage.common.persist.Persist; +import org.projectnessie.versioned.storage.common.persist.Reference; + +public class MongoDB2Persist implements Persist { + + private final StoreConfig config; + private final MongoDB2Backend backend; + + MongoDB2Persist(MongoDB2Backend backend, StoreConfig config) { + this.config = config; + this.backend = backend; + } + + @Nonnull + @Override + public String name() { + return MongoDB2BackendFactory.NAME; + } + + @Nonnull + @Override + public StoreConfig config() { + return config; + } + + private Document idRefDoc(Reference reference) { + return idRefDoc(reference.name()); + } + + private Document idRefDoc(String name) { + Document idDoc = new Document(); + idDoc.put(COL_REPO, config.repositoryId()); + idDoc.put(COL_REFERENCES_NAME, name); + return idDoc; + } + + private Document idObjDoc(ObjId id) { + Document idDoc = new Document(); + idDoc.put(COL_REPO, config.repositoryId()); + idDoc.put(COL_OBJ_ID, objIdToBinary(id)); + return idDoc; + } + + @Nonnull + @Override + public Reference addReference(@Nonnull Reference reference) throws RefAlreadyExistsException { + checkArgument(!reference.deleted(), "Deleted references must not be added"); + + Document doc = new Document(); + doc.put(ID_PROPERTY_NAME, idRefDoc(reference)); + doc.put(COL_REFERENCES_POINTER, objIdToBinary(reference.pointer())); + doc.put(COL_REFERENCES_DELETED, reference.deleted()); + long createdAtMicros = reference.createdAtMicros(); + if (createdAtMicros != 0L) { + doc.put(COL_REFERENCES_CREATED_AT, createdAtMicros); + } + ObjId extendedInfoObj = reference.extendedInfoObj(); + if (extendedInfoObj != null) { + doc.put(COL_REFERENCES_EXTENDED_INFO, objIdToBinary(extendedInfoObj)); + } + byte[] previous = serializePreviousPointers(reference.previousPointers()); + if (previous != null) { + doc.put(COL_REFERENCES_PREVIOUS, new Binary(previous)); + } + try { + backend.refs().insertOne(doc); + } catch (MongoWriteException e) { + if (e.getError().getCategory() == DUPLICATE_KEY) { + throw new RefAlreadyExistsException(fetchReference(reference.name())); + } + throw unhandledException(e); + } catch (RuntimeException e) { + throw unhandledException(e); + } + return reference; + } + + @Nonnull + @Override + public Reference markReferenceAsDeleted(@Nonnull Reference reference) + throws RefNotFoundException, RefConditionFailedException { + reference = reference.withDeleted(false); + + UpdateResult result; + try { + result = + backend + .refs() + .updateOne(referenceCondition(reference), set(COL_REFERENCES_DELETED, true)); + } catch (RuntimeException e) { + throw unhandledException(e); + } + + if (result.getModifiedCount() != 1) { + Reference ex = fetchReference(reference.name()); + if (ex == null) { + throw new RefNotFoundException(reference.name()); + } + throw new RefConditionFailedException(ex); + } + + return reference.withDeleted(true); + } + + private Bson referenceCondition(Reference reference) { + List filters = new ArrayList<>(5); + filters.add(eq(ID_PROPERTY_NAME, idRefDoc(reference))); + filters.add(eq(COL_REFERENCES_POINTER, objIdToBinary(reference.pointer()))); + filters.add(eq(COL_REFERENCES_DELETED, reference.deleted())); + long createdAt = reference.createdAtMicros(); + if (createdAt != 0L) { + filters.add(eq(COL_REFERENCES_CREATED_AT, createdAt)); + } else { + filters.add(not(exists(COL_REFERENCES_CREATED_AT))); + } + ObjId extendedInfoObj = reference.extendedInfoObj(); + if (extendedInfoObj != null) { + filters.add(eq(COL_REFERENCES_EXTENDED_INFO, objIdToBinary(extendedInfoObj))); + } else { + filters.add(not(exists(COL_REFERENCES_EXTENDED_INFO))); + } + + return and(filters); + } + + @Nonnull + @Override + public Reference updateReferencePointer(@Nonnull Reference reference, @Nonnull ObjId newPointer) + throws RefNotFoundException, RefConditionFailedException { + reference = reference.withDeleted(false); + + Reference updated = reference.forNewPointer(newPointer, config); + List updates = new ArrayList<>(); + updates.add(set(COL_REFERENCES_POINTER, objIdToBinary(newPointer))); + byte[] previous = serializePreviousPointers(updated.previousPointers()); + if (previous != null) { + updates.add(set(COL_REFERENCES_PREVIOUS, new Binary(previous))); + } + + UpdateResult result; + try { + result = backend.refs().updateOne(referenceCondition(reference), updates); + } catch (RuntimeException e) { + throw unhandledException(e); + } + + if (result.getModifiedCount() != 1) { + if (result.getMatchedCount() == 1) { + // not updated + return updated; + } + + Reference ex = fetchReference(reference.name()); + if (ex == null) { + throw new RefNotFoundException(reference.name()); + } + throw new RefConditionFailedException(ex); + } + + return updated; + } + + @Override + public void purgeReference(@Nonnull Reference reference) + throws RefNotFoundException, RefConditionFailedException { + reference = reference.withDeleted(true); + + DeleteResult result; + try { + result = backend.refs().deleteOne(referenceCondition(reference)); + } catch (RuntimeException e) { + throw unhandledException(e); + } + + if (result.getDeletedCount() != 1) { + Reference ex = fetchReference(reference.name()); + if (ex == null) { + throw new RefNotFoundException(reference.name()); + } + throw new RefConditionFailedException(ex); + } + } + + @Override + public Reference fetchReference(@Nonnull String name) { + FindIterable result; + try { + result = backend.refs().find(eq(ID_PROPERTY_NAME, idRefDoc(name))); + } catch (RuntimeException e) { + throw unhandledException(e); + } + + Document doc = result.first(); + if (doc == null) { + return null; + } + + Binary prev = doc.get(COL_REFERENCES_PREVIOUS, Binary.class); + List previous = + prev != null ? deserializePreviousPointers(prev.getData()) : emptyList(); + return reference( + name, + binaryToObjId(doc.get(COL_REFERENCES_POINTER, Binary.class)), + doc.getBoolean(COL_REFERENCES_DELETED), + refCreatedAt(doc), + binaryToObjId(doc.get(COL_REFERENCES_EXTENDED_INFO, Binary.class)), + previous); + } + + private static Long refCreatedAt(Document doc) { + return doc.containsKey(COL_REFERENCES_CREATED_AT) ? doc.getLong(COL_REFERENCES_CREATED_AT) : 0L; + } + + @Nonnull + @Override + public Reference[] fetchReferences(@Nonnull String[] names) { + List nameIdDocs = + Arrays.stream(names).filter(Objects::nonNull).map(this::idRefDoc).collect(toList()); + FindIterable result; + try { + result = backend.refs().find(in(ID_PROPERTY_NAME, nameIdDocs)); + } catch (RuntimeException e) { + throw unhandledException(e); + } + + Reference[] r = new Reference[names.length]; + + for (Document doc : result) { + String name = doc.get(ID_PROPERTY_NAME, Document.class).getString(COL_REFERENCES_NAME); + Binary prev = doc.get(COL_REFERENCES_PREVIOUS, Binary.class); + List previous = + prev != null ? deserializePreviousPointers(prev.getData()) : emptyList(); + Reference reference = + reference( + name, + binaryToObjId(doc.get(COL_REFERENCES_POINTER, Binary.class)), + doc.getBoolean(COL_REFERENCES_DELETED), + refCreatedAt(doc), + binaryToObjId(doc.get(COL_REFERENCES_EXTENDED_INFO, Binary.class)), + previous); + for (int i = 0; i < names.length; i++) { + if (name.equals(names[i])) { + r[i] = reference; + } + } + } + + return r; + } + + @Override + @Nonnull + public T fetchTypedObj( + @Nonnull ObjId id, ObjType type, @Nonnull Class typeClass) throws ObjNotFoundException { + FindIterable result; + try { + result = + type != null + ? backend + .objs() + .find(and(eq(ID_PROPERTY_NAME, idObjDoc(id)), eq(COL_OBJ_TYPE, type.shortName()))) + : backend.objs().find(eq(ID_PROPERTY_NAME, idObjDoc(id))); + } catch (RuntimeException e) { + throw unhandledException(e); + } + + Document doc = result.first(); + + T obj = docToObj(id, doc, type, typeClass); + if (obj == null) { + throw new ObjNotFoundException(id); + } + return obj; + } + + @Override + @Nonnull + public ObjType fetchObjType(@Nonnull ObjId id) throws ObjNotFoundException { + FindIterable result; + try { + result = backend.objs().find(eq(ID_PROPERTY_NAME, idObjDoc(id))); + } catch (RuntimeException e) { + throw unhandledException(e); + } + + Document doc = result.first(); + if (doc == null) { + throw new ObjNotFoundException(id); + } + + return objTypeByName(doc.getString(COL_OBJ_TYPE)); + } + + @Override + public T[] fetchTypedObjsIfExist( + @Nonnull ObjId[] ids, ObjType type, @Nonnull Class typeClass) { + List list = new ArrayList<>(ids.length); + Object2IntHashMap idToIndex = + new Object2IntHashMap<>(ids.length * 2, Hashing.DEFAULT_LOAD_FACTOR, -1); + @SuppressWarnings("unchecked") + T[] r = (T[]) Array.newInstance(typeClass, ids.length); + for (int i = 0; i < ids.length; i++) { + ObjId id = ids[i]; + if (id != null) { + list.add(idObjDoc(id)); + idToIndex.put(id, i); + } + } + + if (!list.isEmpty()) { + fetchObjsPage(r, list, idToIndex, type, typeClass); + } + + return r; + } + + private void fetchObjsPage( + Obj[] r, + List list, + Object2IntHashMap idToIndex, + ObjType type, + Class typeClass) { + FindIterable result; + try { + result = backend.objs().find(in(ID_PROPERTY_NAME, list)); + } catch (RuntimeException e) { + throw unhandledException(e); + } + for (Document doc : result) { + T obj = docToObj(doc, type, typeClass); + if (obj != null) { + int idx = idToIndex.getValue(obj.id()); + if (idx != -1) { + r[idx] = obj; + } + } + } + } + + @Override + public boolean storeObj(@Nonnull Obj obj, boolean ignoreSoftSizeRestrictions) + throws ObjTooLargeException { + Document doc = objToDoc(obj, ignoreSoftSizeRestrictions); + try { + backend.objs().insertOne(doc); + } catch (MongoWriteException e) { + if (e.getError().getCategory() == DUPLICATE_KEY) { + return false; + } + throw handleMongoWriteException(e); + } catch (RuntimeException e) { + throw unhandledException(e); + } + + return true; + } + + @Nonnull + @Override + public boolean[] storeObjs(@Nonnull Obj[] objs) throws ObjTooLargeException { + List> docs = new ArrayList<>(objs.length); + for (Obj obj : objs) { + if (obj != null) { + docs.add(new InsertOneModel<>(objToDoc(obj, false))); + } + } + + boolean[] r = new boolean[objs.length]; + + List> inserts = new ArrayList<>(docs); + while (!inserts.isEmpty()) { + try { + BulkWriteResult res = backend.objs().bulkWrite(inserts); + for (BulkWriteInsert insert : res.getInserts()) { + ObjId id = objIdFromBulkWriteInsert(insert); + r[objIdIndex(objs, id)] = id != null; + } + break; + } catch (MongoBulkWriteException e) { + // Handle "insert of already existing objects". + // + // MongoDB returns a BulkWriteResult of what _would_ have succeeded. Use that information + // to retry the bulk write to make progress. + List errs = e.getWriteErrors(); + for (BulkWriteError err : errs) { + if (err.getCategory() != DUPLICATE_KEY) { + throw handleMongoWriteError(e, err); + } + } + BulkWriteResult res = e.getWriteResult(); + inserts.clear(); + res.getInserts().stream() + .map(MongoDB2Persist::objIdFromBulkWriteInsert) + .mapToInt(id -> objIdIndex(objs, id)) + .mapToObj(docs::get) + .forEach(inserts::add); + } catch (RuntimeException e) { + throw unhandledException(e); + } + } + return r; + } + + private static ObjId objIdFromDoc(Document doc) { + return binaryToObjId(doc.get(ID_PROPERTY_NAME, Document.class).get(COL_OBJ_ID, Binary.class)); + } + + private static int objIdIndex(Obj[] objs, ObjId id) { + for (int i = 0; i < objs.length; i++) { + if (id.equals(objs[i].id())) { + return i; + } + } + throw new IllegalArgumentException("ObjId " + id + " not in objs"); + } + + private static ObjId objIdFromBulkWriteInsert(BulkWriteInsert insert) { + return ObjId.objIdFromByteArray(insert.getId().asDocument().getBinary(COL_OBJ_ID).getData()); + } + + @Override + public void deleteObj(@Nonnull ObjId id) { + try { + backend.objs().deleteOne(eq(ID_PROPERTY_NAME, idObjDoc(id))); + } catch (RuntimeException e) { + throw unhandledException(e); + } + } + + @Override + public void deleteObjs(@Nonnull ObjId[] ids) { + List list = + Stream.of(ids).filter(Objects::nonNull).map(this::idObjDoc).collect(toList()); + if (list.isEmpty()) { + return; + } + try { + backend.objs().deleteMany(in(ID_PROPERTY_NAME, list)); + } catch (RuntimeException e) { + throw unhandledException(e); + } + } + + @Override + public void upsertObj(@Nonnull Obj obj) throws ObjTooLargeException { + ObjId id = obj.id(); + checkArgument(id != null, "Obj to store must have a non-null ID"); + + ReplaceOptions options = upsertOptions(); + + Document doc = objToDoc(obj, false); + UpdateResult result; + try { + result = backend.objs().replaceOne(eq(ID_PROPERTY_NAME, idObjDoc(id)), doc, options); + } catch (RuntimeException e) { + throw unhandledException(e); + } + if (!result.wasAcknowledged()) { + throw new RuntimeException("Upsert not acknowledged"); + } + } + + private static ReplaceOptions upsertOptions() { + // A `ReplaceOneModel` with the default replace options (upsert==false) silently does just + // nothing. + ReplaceOptions options = new ReplaceOptions(); + options.upsert(true); + return options; + } + + @Override + public void upsertObjs(@Nonnull Obj[] objs) throws ObjTooLargeException { + ReplaceOptions options = upsertOptions(); + + List> docs = new ArrayList<>(objs.length); + for (Obj obj : objs) { + if (obj != null) { + ObjId id = obj.id(); + docs.add( + new ReplaceOneModel<>( + eq(ID_PROPERTY_NAME, idObjDoc(id)), objToDoc(obj, false), options)); + } + } + + List> updates = new ArrayList<>(docs); + if (!updates.isEmpty()) { + BulkWriteResult res; + try { + res = backend.objs().bulkWrite(updates); + } catch (RuntimeException e) { + throw unhandledException(e); + } + if (!res.wasAcknowledged()) { + throw new RuntimeException("Upsert not acknowledged"); + } + } + } + + @Override + public boolean deleteConditional(@Nonnull UpdateableObj obj) { + ObjId id = obj.id(); + ObjType type = obj.type(); + + try { + return backend + .objs() + .findOneAndDelete( + and( + eq(ID_PROPERTY_NAME, idObjDoc(id)), + eq(COL_OBJ_TYPE, type.shortName()), + eq(COL_OBJ_VERS, obj.versionToken()))) + != null; + } catch (RuntimeException e) { + throw unhandledException(e); + } + } + + @Override + public boolean updateConditional(@Nonnull UpdateableObj expected, @Nonnull UpdateableObj newValue) + throws ObjTooLargeException { + ObjId id = expected.id(); + ObjType type = expected.type(); + String expectedVersion = expected.versionToken(); + + checkArgument(id != null && id.equals(newValue.id())); + checkArgument(expected.type().equals(newValue.type())); + checkArgument(!expected.versionToken().equals(newValue.versionToken())); + + Document doc = objToDoc(newValue, false); + + List updates = + doc.entrySet().stream() + .filter(e -> !ID_PROPERTY_NAME.equals(e.getKey())) + .map(e -> set(e.getKey(), e.getValue())) + .collect(toList()); + + Bson update = Updates.combine(updates); + + try { + return backend + .objs() + .findOneAndUpdate( + and( + eq(ID_PROPERTY_NAME, idObjDoc(id)), + eq(COL_OBJ_TYPE, type.shortName()), + eq(COL_OBJ_VERS, expectedVersion)), + update) + != null; + } catch (RuntimeException e) { + throw unhandledException(e); + } + } + + @Nonnull + @Override + public CloseableIterator scanAllObjects(@Nonnull Set returnedObjTypes) { + return new ScanAllObjectsIterator(returnedObjTypes); + } + + @Override + public void erase() { + backend.eraseRepositories(singleton(config.repositoryId())); + } + + private T docToObj(Document doc, ObjType type, Class typeClass) { + ObjId id = objIdFromDoc(doc); + return docToObj(id, doc, type, typeClass); + } + + private T docToObj( + @Nonnull ObjId id, Document doc, ObjType t, @SuppressWarnings("unused") Class typeClass) { + if (doc == null) { + return null; + } + ObjType type = objTypeByName(doc.getString(COL_OBJ_TYPE)); + if (t != null && !t.equals(type)) { + return null; + } + Binary bin = doc.get(COL_OBJ_VALUE, Binary.class); + String versionToken = doc.getString(COL_OBJ_VERS); + Obj obj = deserializeObj(id, bin.getData(), versionToken); + @SuppressWarnings("unchecked") + T r = (T) obj; + return r; + } + + private Document objToDoc(@Nonnull Obj obj, boolean ignoreSoftSizeRestrictions) + throws ObjTooLargeException { + ObjId id = obj.id(); + checkArgument(id != null, "Obj to store must have a non-null ID"); + + ObjType type = obj.type(); + + Document doc = new Document(); + Document inner = new Document(); + doc.put(ID_PROPERTY_NAME, idObjDoc(id)); + doc.put(COL_OBJ_TYPE, type.shortName()); + UpdateableObj.extractVersionToken(obj).ifPresent(token -> doc.put(COL_OBJ_VERS, token)); + int incrementalIndexSizeLimit = + ignoreSoftSizeRestrictions ? Integer.MAX_VALUE : effectiveIncrementalIndexSizeLimit(); + int indexSizeLimit = + ignoreSoftSizeRestrictions ? Integer.MAX_VALUE : effectiveIndexSegmentSizeLimit(); + byte[] serialized = serializeObj(obj, incrementalIndexSizeLimit, indexSizeLimit, false); + doc.put(COL_OBJ_VALUE, new Binary(serialized)); + return doc; + } + + private class ScanAllObjectsIterator extends AbstractIterator + implements CloseableIterator { + + private final MongoCursor result; + + public ScanAllObjectsIterator(Set returnedObjTypes) { + List objTypeShortNames = + returnedObjTypes.stream().map(ObjType::shortName).collect(toList()); + try { + result = + backend + .objs() + .find( + and( + eq(ID_REPO_PATH, config.repositoryId()), + in(COL_OBJ_TYPE, objTypeShortNames))) + .iterator(); + } catch (RuntimeException e) { + throw unhandledException(e); + } + } + + @Override + protected Obj computeNext() { + if (!result.hasNext()) { + return endOfData(); + } + + try { + Document doc = result.next(); + return docToObj(doc, null, Obj.class); + } catch (RuntimeException e) { + throw unhandledException(e); + } + } + + @Override + public void close() { + result.close(); + } + } + + static RuntimeException unhandledException(RuntimeException e) { + if (e instanceof MongoInterruptedException + || e instanceof MongoTimeoutException + || e instanceof MongoServerUnavailableException + || e instanceof MongoSocketReadTimeoutException + || e instanceof MongoExecutionTimeoutException) { + return new UnknownOperationResultException(e); + } + if (e instanceof MongoWriteException) { + return handleMongoWriteException((MongoWriteException) e); + } + if (e instanceof MongoBulkWriteException) { + MongoBulkWriteException specific = (MongoBulkWriteException) e; + for (BulkWriteError error : specific.getWriteErrors()) { + switch (error.getCategory()) { + case EXECUTION_TIMEOUT: + case UNCATEGORIZED: + return new UnknownOperationResultException(e); + default: + break; + } + } + } + return e; + } + + static RuntimeException handleMongoWriteException(MongoWriteException e) { + return handleMongoWriteError(e, e.getError()); + } + + static RuntimeException handleMongoWriteError(MongoException e, WriteError error) { + switch (error.getCategory()) { + case EXECUTION_TIMEOUT: + case UNCATEGORIZED: + return new UnknownOperationResultException(e); + default: + return e; + } + } +} diff --git a/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2PersistFactory.java b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2PersistFactory.java new file mode 100644 index 00000000000..e3140b838ec --- /dev/null +++ b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2PersistFactory.java @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodb2; + +import jakarta.annotation.Nonnull; +import org.projectnessie.versioned.storage.common.config.StoreConfig; +import org.projectnessie.versioned.storage.common.persist.Persist; +import org.projectnessie.versioned.storage.common.persist.PersistFactory; + +final class MongoDB2PersistFactory implements PersistFactory { + + private final MongoDB2Backend backend; + + MongoDB2PersistFactory(MongoDB2Backend backend) { + this.backend = backend; + } + + @Override + @Nonnull + public Persist newPersist(@Nonnull StoreConfig config) { + return new MongoDB2Persist(backend, config); + } +} diff --git a/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2Serde.java b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2Serde.java new file mode 100644 index 00000000000..192baaa0041 --- /dev/null +++ b/versioned/storage/mongodb2/src/main/java/org/projectnessie/versioned/storage/mongodb2/MongoDB2Serde.java @@ -0,0 +1,111 @@ +/* + * Copyright (C) 2024 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.projectnessie.versioned.storage.mongodb2; + +import static java.util.Collections.emptyList; +import static java.util.stream.Collectors.toList; +import static org.projectnessie.nessie.relocated.protobuf.UnsafeByteOperations.unsafeWrap; +import static org.projectnessie.versioned.storage.common.indexes.StoreKey.keyFromString; +import static org.projectnessie.versioned.storage.common.objtypes.IndexStripe.indexStripe; + +import jakarta.annotation.Nonnull; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; +import org.bson.Document; +import org.bson.types.Binary; +import org.projectnessie.nessie.relocated.protobuf.ByteString; +import org.projectnessie.versioned.storage.common.objtypes.IndexStripe; +import org.projectnessie.versioned.storage.common.persist.ObjId; + +public final class MongoDB2Serde { + + private static final String COL_STRIPES_FIRST_KEY = "f"; + private static final String COL_STRIPES_LAST_KEY = "l"; + private static final String COL_STRIPES_SEGMENT = "s"; + + private MongoDB2Serde() {} + + public static Binary bytesToBinary(ByteString bytes) { + return new Binary(bytes.toByteArray()); + } + + public static ByteString binaryToBytes(Binary binary) { + return binary != null ? unsafeWrap(binary.getData()) : null; + } + + public static Binary objIdToBinary(ObjId id) { + return new Binary(id.asByteArray()); + } + + public static ObjId binaryToObjId(Binary id) { + return id != null ? ObjId.objIdFromByteArray(id.getData()) : null; + } + + public static void objIdsToDoc(Document doc, String n, List ids) { + if (ids == null || ids.isEmpty()) { + return; + } + doc.put(n, objIdsToBinary(ids)); + } + + public static List objIdsToBinary(List ids) { + if (ids == null) { + return emptyList(); + } + return ids.stream().map(MongoDB2Serde::objIdToBinary).collect(toList()); + } + + public static List binaryToObjIds(List ids) { + if (ids == null) { + return emptyList(); + } + return ids.stream().map(MongoDB2Serde::binaryToObjId).collect(toList()); + } + + public static void binaryToObjIds(List ids, Consumer receiver) { + if (ids != null) { + ids.stream().map(MongoDB2Serde::binaryToObjId).forEach(receiver); + } + } + + public static void fromStripesDocList( + Document doc, String attrName, Consumer consumer) { + List refIndexStripes = doc.getList(attrName, Document.class); + if (refIndexStripes != null) { + for (Document seg : refIndexStripes) { + consumer.accept( + indexStripe( + keyFromString(seg.getString(COL_STRIPES_FIRST_KEY)), + keyFromString(seg.getString(COL_STRIPES_LAST_KEY)), + binaryToObjId(seg.get(COL_STRIPES_SEGMENT, Binary.class)))); + } + } + } + + @Nonnull + public static List stripesToDocs(List stripes) { + List stripesDocs = new ArrayList<>(); + for (IndexStripe stripe : stripes) { + Document sv = new Document(); + sv.put(COL_STRIPES_FIRST_KEY, stripe.firstKey().rawString()); + sv.put(COL_STRIPES_LAST_KEY, stripe.lastKey().rawString()); + sv.put(COL_STRIPES_SEGMENT, objIdToBinary(stripe.segment())); + stripesDocs.add(sv); + } + return stripesDocs; + } +} diff --git a/versioned/storage/mongodb2/src/main/resources/META-INF/services/org.projectnessie.versioned.storage.common.persist.BackendFactory b/versioned/storage/mongodb2/src/main/resources/META-INF/services/org.projectnessie.versioned.storage.common.persist.BackendFactory new file mode 100644 index 00000000000..69e631d597c --- /dev/null +++ b/versioned/storage/mongodb2/src/main/resources/META-INF/services/org.projectnessie.versioned.storage.common.persist.BackendFactory @@ -0,0 +1,16 @@ +# +# Copyright (C) 2022 Dremio +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +org.projectnessie.versioned.storage.mongodb2.MongoDB2BackendFactory