diff --git a/.editorconfig b/.editorconfig
deleted file mode 100644
index ba5b292..0000000
--- a/.editorconfig
+++ /dev/null
@@ -1,19 +0,0 @@
-# .editorconfig
-# http://editorconfig.org
-
-root = true
-
-[*]
-charset = utf-8
-indent_size = 2
-end_of_line = lf
-indent_style = space
-max_line_length = 120
-trim_trailing_whitespace = true
-
-[*.md]
-max_line_length = 80
-trim_trailing_whitespace = false
-
-[Makefile]
-indent_style = tab
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000..059ac35
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,15 @@
+*.sh text eol=lf
+*.bat text eol=crlf
+
+pk_generated_parent.pom linguist-generated=true
+dependencies.md linguist-generated=true
+doc/changes/changelog.md linguist-generated=true
+.github/workflows/broken_links_checker.yml linguist-generated=true
+.github/workflows/ci-build-next-java.yml linguist-generated=true
+.github/workflows/ci-build.yml linguist-generated=true
+.github/workflows/dependencies_check.yml linguist-generated=true
+.github/workflows/release_droid_prepare_original_checksum.yml linguist-generated=true
+.github/workflows/release_droid_print_quick_checksum.yml linguist-generated=true
+.github/workflows/release_droid_upload_github_release_assets.yml linguist-generated=true
+.settings/org.eclipse.jdt.core.prefs linguist-generated=true
+.settings/org.eclipse.jdt.ui.prefs linguist-generated=true
diff --git a/.github/workflows/broken_link_checker.yml b/.github/workflows/broken_link_checker.yml
deleted file mode 100644
index b67cb03..0000000
--- a/.github/workflows/broken_link_checker.yml
+++ /dev/null
@@ -1,16 +0,0 @@
-name: Broken Links Checker
-
-on:
- schedule:
- - cron: "0 5 * * *"
- push:
-
-jobs:
- linkChecker:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - uses: gaurav-nelson/github-action-markdown-link-check@v1
- with:
- use-quiet-mode: 'yes'
- use-verbose-mode: 'yes'
diff --git a/.github/workflows/broken_links_checker.yml b/.github/workflows/broken_links_checker.yml
new file mode 100644
index 0000000..f2079ec
--- /dev/null
+++ b/.github/workflows/broken_links_checker.yml
@@ -0,0 +1,32 @@
+name: Broken Links Checker
+
+on:
+ schedule:
+ - cron: "0 5 * * 0"
+ push:
+ branches:
+ - main
+ pull_request:
+
+jobs:
+ linkChecker:
+ runs-on: ubuntu-latest
+ concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+ steps:
+ - uses: actions/checkout@v3
+ - name: Configure broken links checker
+ run: |
+ mkdir -p ./target
+ echo '{"aliveStatusCodes": [429, 200], "ignorePatterns": [' \
+ '{"pattern": "^https?://(www|dev).mysql.com/"},' \
+ '{"pattern": "^https?://(www.)?opensource.org"}' \
+ '{"pattern": "^https?://(www.)?eclipse.org"}' \
+ '{"pattern": "^https?://projects.eclipse.org"}' \
+ ']}' > ./target/broken_links_checker.json
+ - uses: gaurav-nelson/github-action-markdown-link-check@v1
+ with:
+ use-quiet-mode: 'yes'
+ use-verbose-mode: 'yes'
+ config-file: ./target/broken_links_checker.json
diff --git a/.github/workflows/ci-build-next-java.yml b/.github/workflows/ci-build-next-java.yml
new file mode 100644
index 0000000..e0c15cf
--- /dev/null
+++ b/.github/workflows/ci-build-next-java.yml
@@ -0,0 +1,35 @@
+name: CI Build next Java
+
+on:
+ push:
+ branches:
+ - main
+ pull_request:
+
+jobs:
+ java-17-compatibility:
+ runs-on: ubuntu-latest
+ concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+ steps:
+ - name: Checkout the repository
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ - name: Set up JDK 17
+ uses: actions/setup-java@v3
+ with:
+ distribution: 'temurin'
+ java-version: 17
+ cache: 'maven'
+ - name: Run tests and build with Maven
+ run: |
+ mvn --batch-mode --update-snapshots clean package -DtrimStackTrace=false \
+ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn
+ - name: Publish Test Report
+ uses: scacap/action-surefire-report@v1
+ if: ${{ always() && github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]' }}
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ fail_if_no_tests: false
diff --git a/.github/workflows/ci-build.yml b/.github/workflows/ci-build.yml
index 1deb2a0..e520870 100644
--- a/.github/workflows/ci-build.yml
+++ b/.github/workflows/ci-build.yml
@@ -1,75 +1,55 @@
name: CI Build
on:
- - push
+ push:
+ branches:
+ - main
+ pull_request:
jobs:
build:
- name: Build with Scala ${{ matrix.scala }} and Exasol ${{ matrix.exasol-docker-version }}
runs-on: ubuntu-latest
- strategy:
- fail-fast: false
- matrix:
- scala: [ 2.13.8 ]
- exasol-docker-version: [ 7.0.14, 7.1.4 ]
-
+ concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
steps:
- - name: Checkout the Repository
- uses: actions/checkout@v2
+ - name: Checkout the repository
+ uses: actions/checkout@v3
with:
fetch-depth: 0
-
- - name: Setup Java and Scala with JDK 11
- uses: olafurpg/setup-scala@v10
+ - name: Set up JDK 11
+ uses: actions/setup-java@v3
with:
+ distribution: 'temurin'
java-version: 11
-
- - name: Pull Docker Images
+ cache: 'maven'
+ - name: Cache SonarCloud packages
+ uses: actions/cache@v3
+ with:
+ path: ~/.sonar/cache
+ key: ${{ runner.os }}-sonar
+ restore-keys: ${{ runner.os }}-sonar
+ - name: Enable testcontainer reuse
+ run: echo 'testcontainers.reuse.enable=true' > "$HOME/.testcontainers.properties"
+ - name: Run tests and build with Maven
run: |
- docker pull exasol/docker-db:${{ matrix.exasol-docker-version }}
-
- - name: Cache Local SBT Dependencies
- uses: actions/cache@v2
+ mvn --batch-mode clean verify \
+ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \
+ -DtrimStackTrace=false
+ - name: Publish Test Report
+ uses: scacap/action-surefire-report@v1
+ if: ${{ always() && github.event.pull_request.head.repo.full_name == github.repository && github.actor != 'dependabot[bot]' }}
with:
- path: |
- ~/.sbt
- ~/.ivy2/cache
- ~/.coursier/cache/v1
- ~/.cache/coursier/v1
- key: ${{ runner.os }}-sbt-cache-${{ hashFiles('**/*.sbt') }}-${{ hashFiles('project/build.properties') }}
- restore-keys: |
- ${{ runner.os }}-sbt-cache-
-
- - name: Run CI
- run: ./scripts/ci.sh
- env:
- SCALA_VERSION: ${{ matrix.scala }}
- EXASOL_DOCKER_VERSION: ${{ matrix.exasol-docker-version }}
-
- - name: Upload Coverage Results to Coveralls
- run: sbt ++${{ matrix.scala }} coveralls
- env:
- COVERALLS_REPO_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
- # This required because of the sonarcloud-github-action docker volume mapping.
- - name: Prepare for Sonar Cloud Scan
- if: startsWith(matrix.exasol-docker-version, '7.1')
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ - name: Sonar analysis
+ if: ${{ env.SONAR_TOKEN != null }}
run: |
- find . -name scoverage.xml -exec sed -i \
- 's#/home/runner/work/kafka-connector-extension/kafka-connector-extension#/github/workspace#g' {} +
-
- - name: Sonar Cloud Scan
- if: startsWith(matrix.exasol-docker-version, '7.1')
- uses: sonarsource/sonarcloud-github-action@master
+ mvn --batch-mode org.sonarsource.scanner.maven:sonar-maven-plugin:sonar \
+ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \
+ -DtrimStackTrace=false \
+ -Dsonar.organization=exasol \
+ -Dsonar.host.url=https://sonarcloud.io \
+ -Dsonar.login=$SONAR_TOKEN
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
-
- - name: Cleanup
- run: |
- rm -rf "$HOME/.ivy2/local" || true
- find $HOME/Library/Caches/Coursier/v1 -name "ivydata-*.properties" -delete || true
- find $HOME/.ivy2/cache -name "ivydata-*.properties" -delete || true
- find $HOME/.cache/coursier/v1 -name "ivydata-*.properties" -delete || true
- find $HOME/.coursier/cache -name "*.lock" -delete || true
- find $HOME/.sbt -name "*.lock" -delete || true
+ SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
\ No newline at end of file
diff --git a/.github/workflows/dependencies_check.yml b/.github/workflows/dependencies_check.yml
new file mode 100644
index 0000000..b2ab231
--- /dev/null
+++ b/.github/workflows/dependencies_check.yml
@@ -0,0 +1,20 @@
+name: Dependencies Check
+
+on:
+ schedule:
+ - cron: "0 2 * * *"
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK 11
+ uses: actions/setup-java@v3
+ with:
+ distribution: 'temurin'
+ java-version: 11
+ cache: 'maven'
+ - name: Checking dependencies for vulnerabilities
+ run: mvn --batch-mode org.sonatype.ossindex.maven:ossindex-maven-plugin:audit -f pom.xml
\ No newline at end of file
diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml
new file mode 100644
index 0000000..24d5657
--- /dev/null
+++ b/.github/workflows/linting.yml
@@ -0,0 +1,21 @@
+name: Linting
+
+on:
+ push:
+ branches:
+ - main
+ pull_request:
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+ steps:
+ - name: Checkout the repository
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+ - name: Run scalafix checks
+ run: mvn --batch-mode clean compile test-compile scalastyle:check scalafix:scalafix
diff --git a/.github/workflows/release_droid_prepare_original_checksum.yml b/.github/workflows/release_droid_prepare_original_checksum.yml
index f3cb63b..4a980f8 100644
--- a/.github/workflows/release_droid_prepare_original_checksum.yml
+++ b/.github/workflows/release_droid_prepare_original_checksum.yml
@@ -8,20 +8,24 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
- uses: actions/checkout@v2
- - name: Setup Scala
- uses: olafurpg/setup-scala@v10
+ uses: actions/checkout@v3
with:
- java-version: adopt@1.11
- - name: Assembly with SBT
- run: sbt assembly
- - name: Running tests
- run: sbt test it:test
+ fetch-depth: 0
+ - name: Set up JDK 11
+ uses: actions/setup-java@v3
+ with:
+ distribution: 'temurin'
+ java-version: 11
+ cache: 'maven'
+ - name: Enable testcontainer reuse
+ run: echo 'testcontainers.reuse.enable=true' > "$HOME/.testcontainers.properties"
+ - name: Run tests and build with Maven
+ run: mvn --batch-mode clean verify --file pom.xml
- name: Prepare checksum
- run: find target/scala*/stripped -name *.jar -exec sha256sum "{}" + > original_checksum
+ run: find target -maxdepth 1 -name *.jar -exec sha256sum "{}" + > original_checksum
- name: Upload checksum to the artifactory
- uses: actions/upload-artifact@v2
+ uses: actions/upload-artifact@v3
with:
name: original_checksum
retention-days: 5
- path: original_checksum
+ path: original_checksum
\ No newline at end of file
diff --git a/.github/workflows/release_droid_print_quick_checksum.yml b/.github/workflows/release_droid_print_quick_checksum.yml
index 1e30d6b..8add957 100644
--- a/.github/workflows/release_droid_print_quick_checksum.yml
+++ b/.github/workflows/release_droid_print_quick_checksum.yml
@@ -8,12 +8,17 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
- uses: actions/checkout@v2
- - name: Setup Scala
- uses: olafurpg/setup-scala@v10
+ uses: actions/checkout@v3
with:
- java-version: adopt@1.11
- - name: Assembly with SBT skipping tests
- run: sbt assembly
- - name: Prepare checksum
- run: echo 'checksum_start==';find target/scala*/stripped -name *.jar -exec sha256sum "{}" + | xargs;echo '==checksum_end'
+ fetch-depth: 0
+ - name: Set up JDK 11
+ uses: actions/setup-java@v3
+ with:
+ distribution: 'temurin'
+ java-version: 11
+ cache: 'maven'
+ - name: Build with Maven skipping tests
+ run: mvn --batch-mode clean verify -DskipTests
+ - name: Print checksum
+ run: echo 'checksum_start==';find target -maxdepth 1 -name *.jar -exec sha256sum "{}" + | xargs;echo '==checksum_end'
+
diff --git a/.github/workflows/release_droid_upload_github_release_assets.yml b/.github/workflows/release_droid_upload_github_release_assets.yml
index f956263..7350faf 100644
--- a/.github/workflows/release_droid_upload_github_release_assets.yml
+++ b/.github/workflows/release_droid_upload_github_release_assets.yml
@@ -4,7 +4,7 @@ on:
workflow_dispatch:
inputs:
upload_url:
- description: 'Upload URL'
+ description: 'Assets upload URL'
required: true
jobs:
@@ -12,24 +12,33 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
- uses: actions/checkout@v2
- - name: Setup Scala
- uses: olafurpg/setup-scala@v10
+ uses: actions/checkout@v3
with:
- java-version: adopt@1.11
- - name: Assembly with SBT skipping tests
- run: sbt assembly
+ fetch-depth: 0
+ - name: Set up JDK 11
+ uses: actions/setup-java@v3
+ with:
+ distribution: 'temurin'
+ java-version: 11
+ cache: 'maven'
+ - name: Build with Maven skipping tests
+ run: mvn --batch-mode clean verify -DskipTests
- name: Generate sha256sum files
run: |
- cd target/scala-2.13/stripped/
- find *.jar -exec bash -c 'sha256sum {} > {}.sha256' \;
+ cd target
+ find . -maxdepth 1 -name \*.jar -exec bash -c 'sha256sum {} > {}.sha256' \;
- name: Upload assets to the GitHub release draft
uses: shogo82148/actions-upload-release-asset@v1
with:
upload_url: ${{ github.event.inputs.upload_url }}
- asset_path: target/scala-2.13/stripped/*.jar
+ asset_path: target/*.jar
- name: Upload sha256sum files
uses: shogo82148/actions-upload-release-asset@v1
with:
upload_url: ${{ github.event.inputs.upload_url }}
- asset_path: target/scala-2.13/stripped/*.sha256
+ asset_path: target/*.sha256
+ - name: Upload error-code-report
+ uses: shogo82148/actions-upload-release-asset@v1
+ with:
+ upload_url: ${{ github.event.inputs.upload_url }}
+ asset_path: target/error_code_report.json
diff --git a/.gitignore b/.gitignore
index 257cc15..6c483df 100644
--- a/.gitignore
+++ b/.gitignore
@@ -43,7 +43,6 @@ tmp/
.classpath
.project
.scala_dependencies
-.settings/
*.sc
# Ensime
@@ -57,3 +56,19 @@ tmp/
# Windows
Desktop.ini
Thumbs.db
+
+*.swp
+local
+.dbeaver*
+**/*.log
+.directory
+venv/
+.idea
+target
+dependency-reduced-pom.xml
+pom.xml.versionsBackup
+~*
+*.lock
+*.orig
+*.md.html
+*.flattened-pom.xml
diff --git a/.project-keeper.yml b/.project-keeper.yml
new file mode 100644
index 0000000..987bafe
--- /dev/null
+++ b/.project-keeper.yml
@@ -0,0 +1,6 @@
+sources:
+ - type: maven
+ path: pom.xml
+ modules:
+ - jar_artifact
+ - integration_tests
diff --git a/.scalafix.conf b/.scalafix.conf
new file mode 100644
index 0000000..354da92
--- /dev/null
+++ b/.scalafix.conf
@@ -0,0 +1,45 @@
+rules = [
+ // ExplicitResultTypes
+ Disable
+ DisableSyntax
+ LeakingImplicitClassVal
+ MissingFinal
+ NoAutoTupling
+ NoValInForComprehension
+ ProcedureSyntax
+ OrganizeImports
+ RemoveUnused
+]
+
+Disable {
+ ifSynthetic = [
+ "scala/Option.option2Iterable"
+ "scala/Predef.any2stringadd"
+ ]
+}
+
+DisableSyntax {
+ noFinalize = true
+ noImplicitConversion = false
+ noImplicitObject = true
+ noSemicolons = true
+ noTabs = true
+ noValPatterns = true
+ noXml = true
+}
+
+ExplicitResultTypes {
+ fatalWarnings = true
+ skipSimpleDefinitions = false
+}
+
+OrganizeImports.removeUnused = true
+OrganizeImports {
+ groups = ["re:javax?\\.", "scala.", "com.exasol.", "*"]
+ groupExplicitlyImportedImplicitsSeparately = true
+}
+
+RemoveUnused {
+ // handled by OrganizeImports
+ imports = false
+}
diff --git a/.scalafmt.conf b/.scalafmt.conf
index 1afa520..74fc012 100644
--- a/.scalafmt.conf
+++ b/.scalafmt.conf
@@ -1,5 +1,6 @@
-version = 3.0.0-RC6
+version = 3.7.3
project.git = true
+runner.dialect = scala213
maxColumn = 120
docstrings.style = Asterisk
diff --git a/.settings/org.eclipse.jdt.core.prefs b/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..8b5a9aa
--- /dev/null
+++ b/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,502 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=disabled
+org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore
+org.eclipse.jdt.core.compiler.annotation.nonnull=org.eclipse.jdt.annotation.NonNull
+org.eclipse.jdt.core.compiler.annotation.nonnull.secondary=
+org.eclipse.jdt.core.compiler.annotation.nonnullbydefault=org.eclipse.jdt.annotation.NonNullByDefault
+org.eclipse.jdt.core.compiler.annotation.nonnullbydefault.secondary=
+org.eclipse.jdt.core.compiler.annotation.nullable=org.eclipse.jdt.annotation.Nullable
+org.eclipse.jdt.core.compiler.annotation.nullable.secondary=
+org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.methodParameters=do not generate
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=11
+org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
+org.eclipse.jdt.core.compiler.compliance=11
+org.eclipse.jdt.core.compiler.debug.lineNumber=generate
+org.eclipse.jdt.core.compiler.debug.localVariable=generate
+org.eclipse.jdt.core.compiler.debug.sourceFile=generate
+org.eclipse.jdt.core.compiler.problem.APILeak=warning
+org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=warning
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.autoboxing=ignore
+org.eclipse.jdt.core.compiler.problem.comparingIdentical=warning
+org.eclipse.jdt.core.compiler.problem.deadCode=warning
+org.eclipse.jdt.core.compiler.problem.deprecation=warning
+org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
+org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
+org.eclipse.jdt.core.compiler.problem.discouragedReference=warning
+org.eclipse.jdt.core.compiler.problem.emptyStatement=ignore
+org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.problem.explicitlyClosedAutoCloseable=ignore
+org.eclipse.jdt.core.compiler.problem.fallthroughCase=ignore
+org.eclipse.jdt.core.compiler.problem.fatalOptionalError=disabled
+org.eclipse.jdt.core.compiler.problem.fieldHiding=ignore
+org.eclipse.jdt.core.compiler.problem.finalParameterBound=warning
+org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=warning
+org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
+org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=warning
+org.eclipse.jdt.core.compiler.problem.includeNullInfoFromAsserts=disabled
+org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=warning
+org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=warning
+org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=ignore
+org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore
+org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=warning
+org.eclipse.jdt.core.compiler.problem.missingDefaultCase=ignore
+org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore
+org.eclipse.jdt.core.compiler.problem.missingEnumCaseDespiteDefault=disabled
+org.eclipse.jdt.core.compiler.problem.missingHashCodeMethod=ignore
+org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=warning
+org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotationForInterfaceMethodImplementation=enabled
+org.eclipse.jdt.core.compiler.problem.missingSerialVersion=warning
+org.eclipse.jdt.core.compiler.problem.missingSynchronizedOnInheritedMethod=ignore
+org.eclipse.jdt.core.compiler.problem.noEffectAssignment=warning
+org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=warning
+org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
+org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=warning
+org.eclipse.jdt.core.compiler.problem.nonnullTypeVariableFromLegacyInvocation=warning
+org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=error
+org.eclipse.jdt.core.compiler.problem.nullReference=warning
+org.eclipse.jdt.core.compiler.problem.nullSpecViolation=error
+org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning
+org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=warning
+org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore
+org.eclipse.jdt.core.compiler.problem.pessimisticNullAnalysisForFreeTypeVariables=warning
+org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore
+org.eclipse.jdt.core.compiler.problem.potentialNullReference=ignore
+org.eclipse.jdt.core.compiler.problem.potentiallyUnclosedCloseable=ignore
+org.eclipse.jdt.core.compiler.problem.rawTypeReference=warning
+org.eclipse.jdt.core.compiler.problem.redundantNullAnnotation=warning
+org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore
+org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=ignore
+org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=ignore
+org.eclipse.jdt.core.compiler.problem.reportMethodCanBePotentiallyStatic=ignore
+org.eclipse.jdt.core.compiler.problem.reportMethodCanBeStatic=ignore
+org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=warning
+org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled
+org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=warning
+org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=disabled
+org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled
+org.eclipse.jdt.core.compiler.problem.syntacticNullAnalysisForFields=disabled
+org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore
+org.eclipse.jdt.core.compiler.problem.terminalDeprecation=warning
+org.eclipse.jdt.core.compiler.problem.typeParameterHiding=warning
+org.eclipse.jdt.core.compiler.problem.unavoidableGenericTypeProblems=enabled
+org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=warning
+org.eclipse.jdt.core.compiler.problem.unclosedCloseable=warning
+org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=ignore
+org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=ignore
+org.eclipse.jdt.core.compiler.problem.unlikelyCollectionMethodArgumentType=warning
+org.eclipse.jdt.core.compiler.problem.unlikelyCollectionMethodArgumentTypeStrict=disabled
+org.eclipse.jdt.core.compiler.problem.unlikelyEqualsArgumentType=info
+org.eclipse.jdt.core.compiler.problem.unnecessaryElse=ignore
+org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=ignore
+org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore
+org.eclipse.jdt.core.compiler.problem.unstableAutoModuleName=warning
+org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=ignore
+org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionExemptExceptionAndThrowable=enabled
+org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionIncludeDocCommentReference=enabled
+org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled
+org.eclipse.jdt.core.compiler.problem.unusedExceptionParameter=ignore
+org.eclipse.jdt.core.compiler.problem.unusedImport=warning
+org.eclipse.jdt.core.compiler.problem.unusedLabel=warning
+org.eclipse.jdt.core.compiler.problem.unusedLocal=warning
+org.eclipse.jdt.core.compiler.problem.unusedObjectAllocation=ignore
+org.eclipse.jdt.core.compiler.problem.unusedParameter=ignore
+org.eclipse.jdt.core.compiler.problem.unusedParameterIncludeDocCommentReference=enabled
+org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled
+org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled
+org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=warning
+org.eclipse.jdt.core.compiler.problem.unusedTypeParameter=ignore
+org.eclipse.jdt.core.compiler.problem.unusedWarningToken=warning
+org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=warning
+org.eclipse.jdt.core.compiler.processAnnotations=enabled
+org.eclipse.jdt.core.compiler.release=disabled
+org.eclipse.jdt.core.compiler.source=11
+org.eclipse.jdt.core.formatter.align_assignment_statements_on_columns=false
+org.eclipse.jdt.core.formatter.align_fields_grouping_blank_lines=2147483647
+org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
+org.eclipse.jdt.core.formatter.align_variable_declarations_on_columns=false
+org.eclipse.jdt.core.formatter.align_with_spaces=false
+org.eclipse.jdt.core.formatter.alignment_for_additive_operator=16
+org.eclipse.jdt.core.formatter.alignment_for_annotations_on_enum_constant=49
+org.eclipse.jdt.core.formatter.alignment_for_annotations_on_field=49
+org.eclipse.jdt.core.formatter.alignment_for_annotations_on_local_variable=49
+org.eclipse.jdt.core.formatter.alignment_for_annotations_on_method=49
+org.eclipse.jdt.core.formatter.alignment_for_annotations_on_package=49
+org.eclipse.jdt.core.formatter.alignment_for_annotations_on_parameter=0
+org.eclipse.jdt.core.formatter.alignment_for_annotations_on_type=49
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation=0
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_assertion_message=0
+org.eclipse.jdt.core.formatter.alignment_for_assignment=0
+org.eclipse.jdt.core.formatter.alignment_for_bitwise_operator=16
+org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
+org.eclipse.jdt.core.formatter.alignment_for_compact_loops=16
+org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
+org.eclipse.jdt.core.formatter.alignment_for_conditional_expression_chain=0
+org.eclipse.jdt.core.formatter.alignment_for_enum_constants=16
+org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
+org.eclipse.jdt.core.formatter.alignment_for_expressions_in_for_loop_header=0
+org.eclipse.jdt.core.formatter.alignment_for_logical_operator=16
+org.eclipse.jdt.core.formatter.alignment_for_method_declaration=0
+org.eclipse.jdt.core.formatter.alignment_for_module_statements=16
+org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
+org.eclipse.jdt.core.formatter.alignment_for_multiplicative_operator=16
+org.eclipse.jdt.core.formatter.alignment_for_parameterized_type_references=0
+org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_record_components=16
+org.eclipse.jdt.core.formatter.alignment_for_relational_operator=0
+org.eclipse.jdt.core.formatter.alignment_for_resources_in_try=80
+org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
+org.eclipse.jdt.core.formatter.alignment_for_shift_operator=0
+org.eclipse.jdt.core.formatter.alignment_for_string_concatenation=16
+org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_record_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_type_annotations=0
+org.eclipse.jdt.core.formatter.alignment_for_type_arguments=0
+org.eclipse.jdt.core.formatter.alignment_for_type_parameters=0
+org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch=16
+org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
+org.eclipse.jdt.core.formatter.blank_lines_after_last_class_body_declaration=0
+org.eclipse.jdt.core.formatter.blank_lines_after_package=1
+org.eclipse.jdt.core.formatter.blank_lines_before_abstract_method=1
+org.eclipse.jdt.core.formatter.blank_lines_before_field=0
+org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
+org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
+org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
+org.eclipse.jdt.core.formatter.blank_lines_before_method=1
+org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
+org.eclipse.jdt.core.formatter.blank_lines_before_package=0
+org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
+org.eclipse.jdt.core.formatter.blank_lines_between_statement_group_in_switch=0
+org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
+org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_lambda_body=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_record_constructor=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_record_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.comment.align_tags_descriptions_grouped=true
+org.eclipse.jdt.core.formatter.comment.align_tags_names_descriptions=false
+org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
+org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
+org.eclipse.jdt.core.formatter.comment.count_line_length_from_starting_position=false
+org.eclipse.jdt.core.formatter.comment.format_block_comments=true
+org.eclipse.jdt.core.formatter.comment.format_header=false
+org.eclipse.jdt.core.formatter.comment.format_html=true
+org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
+org.eclipse.jdt.core.formatter.comment.format_line_comments=true
+org.eclipse.jdt.core.formatter.comment.format_source_code=true
+org.eclipse.jdt.core.formatter.comment.indent_parameter_description=false
+org.eclipse.jdt.core.formatter.comment.indent_root_tags=false
+org.eclipse.jdt.core.formatter.comment.indent_tag_description=false
+org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
+org.eclipse.jdt.core.formatter.comment.insert_new_line_between_different_tags=do not insert
+org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=do not insert
+org.eclipse.jdt.core.formatter.comment.line_length=120
+org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries=true
+org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries=true
+org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=false
+org.eclipse.jdt.core.formatter.compact_else_if=true
+org.eclipse.jdt.core.formatter.continuation_indentation=2
+org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
+org.eclipse.jdt.core.formatter.disabling_tag=@formatter\:off
+org.eclipse.jdt.core.formatter.enabling_tag=@formatter\:on
+org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
+org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column=false
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_record_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
+org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
+org.eclipse.jdt.core.formatter.indent_empty_lines=false
+org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
+org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
+org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
+org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=false
+org.eclipse.jdt.core.formatter.indentation.size=4
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_enum_constant=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_label=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_additive_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
+org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_case=insert
+org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_default=insert
+org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_bitwise_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_record_components=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_switch_case_expressions=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
+org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow=insert
+org.eclipse.jdt.core.formatter.insert_space_after_logical_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_multiplicative_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_not_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_record_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_relational_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources=insert
+org.eclipse.jdt.core.formatter.insert_space_after_shift_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_string_concatenation=insert
+org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_additive_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
+org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_case=insert
+org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_default=insert
+org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_bitwise_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_record_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_record_components=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_switch_case_expressions=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow=insert
+org.eclipse.jdt.core.formatter.insert_space_before_logical_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_multiplicative_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_record_constructor=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_record_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_record_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
+org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
+org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
+org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_relational_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_shift_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_string_concatenation=insert
+org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.join_lines_in_comments=true
+org.eclipse.jdt.core.formatter.join_wrapped_lines=true
+org.eclipse.jdt.core.formatter.keep_annotation_declaration_on_one_line=one_line_never
+org.eclipse.jdt.core.formatter.keep_anonymous_type_declaration_on_one_line=one_line_never
+org.eclipse.jdt.core.formatter.keep_code_block_on_one_line=one_line_never
+org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
+org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
+org.eclipse.jdt.core.formatter.keep_enum_constant_declaration_on_one_line=one_line_never
+org.eclipse.jdt.core.formatter.keep_enum_declaration_on_one_line=one_line_never
+org.eclipse.jdt.core.formatter.keep_if_then_body_block_on_one_line=one_line_never
+org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
+org.eclipse.jdt.core.formatter.keep_lambda_body_block_on_one_line=one_line_never
+org.eclipse.jdt.core.formatter.keep_loop_body_block_on_one_line=one_line_never
+org.eclipse.jdt.core.formatter.keep_method_body_on_one_line=one_line_never
+org.eclipse.jdt.core.formatter.keep_record_constructor_on_one_line=one_line_never
+org.eclipse.jdt.core.formatter.keep_record_declaration_on_one_line=one_line_never
+org.eclipse.jdt.core.formatter.keep_simple_do_while_body_on_same_line=false
+org.eclipse.jdt.core.formatter.keep_simple_for_body_on_same_line=false
+org.eclipse.jdt.core.formatter.keep_simple_getter_setter_on_one_line=false
+org.eclipse.jdt.core.formatter.keep_simple_while_body_on_same_line=false
+org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
+org.eclipse.jdt.core.formatter.keep_type_declaration_on_one_line=one_line_never
+org.eclipse.jdt.core.formatter.lineSplit=120
+org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
+org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
+org.eclipse.jdt.core.formatter.number_of_blank_lines_after_code_block=0
+org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_code_block=0
+org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
+org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_code_block=0
+org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_method_body=0
+org.eclipse.jdt.core.formatter.number_of_blank_lines_before_code_block=0
+org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
+org.eclipse.jdt.core.formatter.parentheses_positions_in_annotation=common_lines
+org.eclipse.jdt.core.formatter.parentheses_positions_in_catch_clause=common_lines
+org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration=common_lines
+org.eclipse.jdt.core.formatter.parentheses_positions_in_for_statment=common_lines
+org.eclipse.jdt.core.formatter.parentheses_positions_in_if_while_statement=common_lines
+org.eclipse.jdt.core.formatter.parentheses_positions_in_lambda_declaration=common_lines
+org.eclipse.jdt.core.formatter.parentheses_positions_in_method_delcaration=common_lines
+org.eclipse.jdt.core.formatter.parentheses_positions_in_method_invocation=common_lines
+org.eclipse.jdt.core.formatter.parentheses_positions_in_record_declaration=common_lines
+org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement=common_lines
+org.eclipse.jdt.core.formatter.parentheses_positions_in_try_clause=common_lines
+org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
+org.eclipse.jdt.core.formatter.tabulation.char=space
+org.eclipse.jdt.core.formatter.tabulation.size=4
+org.eclipse.jdt.core.formatter.text_block_indentation=0
+org.eclipse.jdt.core.formatter.use_on_off_tags=false
+org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=true
+org.eclipse.jdt.core.formatter.wrap_before_additive_operator=true
+org.eclipse.jdt.core.formatter.wrap_before_assertion_message_operator=true
+org.eclipse.jdt.core.formatter.wrap_before_assignment_operator=false
+org.eclipse.jdt.core.formatter.wrap_before_bitwise_operator=true
+org.eclipse.jdt.core.formatter.wrap_before_conditional_operator=true
+org.eclipse.jdt.core.formatter.wrap_before_logical_operator=true
+org.eclipse.jdt.core.formatter.wrap_before_multiplicative_operator=true
+org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch=true
+org.eclipse.jdt.core.formatter.wrap_before_relational_operator=true
+org.eclipse.jdt.core.formatter.wrap_before_shift_operator=true
+org.eclipse.jdt.core.formatter.wrap_before_string_concatenation=true
+org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested=true
+org.eclipse.jdt.core.javaFormatter=org.eclipse.jdt.core.defaultJavaFormatter
diff --git a/.settings/org.eclipse.jdt.ui.prefs b/.settings/org.eclipse.jdt.ui.prefs
new file mode 100644
index 0000000..1add06a
--- /dev/null
+++ b/.settings/org.eclipse.jdt.ui.prefs
@@ -0,0 +1,205 @@
+cleanup.add_default_serial_version_id=true
+cleanup.add_generated_serial_version_id=false
+cleanup.add_missing_annotations=true
+cleanup.add_missing_deprecated_annotations=true
+cleanup.add_missing_methods=false
+cleanup.add_missing_nls_tags=false
+cleanup.add_missing_override_annotations=true
+cleanup.add_missing_override_annotations_interface_methods=true
+cleanup.add_serial_version_id=false
+cleanup.always_use_blocks=true
+cleanup.always_use_parentheses_in_expressions=false
+cleanup.always_use_this_for_non_static_field_access=true
+cleanup.always_use_this_for_non_static_method_access=false
+cleanup.convert_functional_interfaces=true
+cleanup.convert_to_enhanced_for_loop=true
+cleanup.correct_indentation=true
+cleanup.format_source_code=true
+cleanup.format_source_code_changes_only=false
+cleanup.insert_inferred_type_arguments=false
+cleanup.make_local_variable_final=true
+cleanup.make_parameters_final=true
+cleanup.make_private_fields_final=true
+cleanup.make_type_abstract_if_missing_method=false
+cleanup.make_variable_declarations_final=true
+cleanup.never_use_blocks=false
+cleanup.never_use_parentheses_in_expressions=true
+cleanup.organize_imports=false
+cleanup.qualify_static_field_accesses_with_declaring_class=false
+cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+cleanup.qualify_static_member_accesses_with_declaring_class=true
+cleanup.qualify_static_method_accesses_with_declaring_class=false
+cleanup.remove_private_constructors=true
+cleanup.remove_redundant_modifiers=false
+cleanup.remove_redundant_semicolons=true
+cleanup.remove_redundant_type_arguments=true
+cleanup.remove_trailing_whitespaces=true
+cleanup.remove_trailing_whitespaces_all=true
+cleanup.remove_trailing_whitespaces_ignore_empty=false
+cleanup.remove_unnecessary_casts=true
+cleanup.remove_unnecessary_nls_tags=true
+cleanup.remove_unused_imports=true
+cleanup.remove_unused_local_variables=false
+cleanup.remove_unused_private_fields=true
+cleanup.remove_unused_private_members=true
+cleanup.remove_unused_private_methods=true
+cleanup.remove_unused_private_types=true
+cleanup.sort_members=false
+cleanup.sort_members_all=false
+cleanup.use_anonymous_class_creation=false
+cleanup.use_blocks=true
+cleanup.use_blocks_only_for_return_and_throw=false
+cleanup.use_lambda=true
+cleanup.use_parentheses_in_expressions=true
+cleanup.use_this_for_non_static_field_access=true
+cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+cleanup.use_this_for_non_static_method_access=false
+cleanup.use_this_for_non_static_method_access_only_if_necessary=true
+cleanup_profile=_Exasol
+cleanup_settings_version=2
+eclipse.preferences.version=1
+editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
+formatter_profile=_Exasol
+formatter_settings_version=21
+org.eclipse.jdt.ui.ignorelowercasenames=true
+org.eclipse.jdt.ui.importorder=java;javax;org;com;
+org.eclipse.jdt.ui.ondemandthreshold=3
+org.eclipse.jdt.ui.staticondemandthreshold=3
+sp_cleanup.add_all=false
+sp_cleanup.add_default_serial_version_id=true
+sp_cleanup.add_generated_serial_version_id=false
+sp_cleanup.add_missing_annotations=true
+sp_cleanup.add_missing_deprecated_annotations=true
+sp_cleanup.add_missing_methods=false
+sp_cleanup.add_missing_nls_tags=false
+sp_cleanup.add_missing_override_annotations=true
+sp_cleanup.add_missing_override_annotations_interface_methods=true
+sp_cleanup.add_serial_version_id=false
+sp_cleanup.always_use_blocks=true
+sp_cleanup.always_use_parentheses_in_expressions=true
+sp_cleanup.always_use_this_for_non_static_field_access=true
+sp_cleanup.always_use_this_for_non_static_method_access=false
+sp_cleanup.array_with_curly=false
+sp_cleanup.arrays_fill=false
+sp_cleanup.bitwise_conditional_expression=false
+sp_cleanup.boolean_literal=false
+sp_cleanup.boolean_value_rather_than_comparison=false
+sp_cleanup.break_loop=false
+sp_cleanup.collection_cloning=false
+sp_cleanup.comparing_on_criteria=false
+sp_cleanup.comparison_statement=false
+sp_cleanup.controlflow_merge=false
+sp_cleanup.convert_functional_interfaces=true
+sp_cleanup.convert_to_enhanced_for_loop=true
+sp_cleanup.convert_to_enhanced_for_loop_if_loop_var_used=false
+sp_cleanup.convert_to_switch_expressions=false
+sp_cleanup.correct_indentation=true
+sp_cleanup.do_while_rather_than_while=false
+sp_cleanup.double_negation=false
+sp_cleanup.else_if=false
+sp_cleanup.embedded_if=false
+sp_cleanup.evaluate_nullable=false
+sp_cleanup.extract_increment=false
+sp_cleanup.format_source_code=true
+sp_cleanup.format_source_code_changes_only=false
+sp_cleanup.hash=false
+sp_cleanup.if_condition=false
+sp_cleanup.insert_inferred_type_arguments=false
+sp_cleanup.instanceof=false
+sp_cleanup.instanceof_keyword=false
+sp_cleanup.invert_equals=false
+sp_cleanup.join=false
+sp_cleanup.lazy_logical_operator=false
+sp_cleanup.make_local_variable_final=true
+sp_cleanup.make_parameters_final=true
+sp_cleanup.make_private_fields_final=true
+sp_cleanup.make_type_abstract_if_missing_method=false
+sp_cleanup.make_variable_declarations_final=true
+sp_cleanup.map_cloning=false
+sp_cleanup.merge_conditional_blocks=false
+sp_cleanup.multi_catch=false
+sp_cleanup.never_use_blocks=false
+sp_cleanup.never_use_parentheses_in_expressions=false
+sp_cleanup.no_string_creation=false
+sp_cleanup.no_super=false
+sp_cleanup.number_suffix=false
+sp_cleanup.objects_equals=false
+sp_cleanup.on_save_use_additional_actions=true
+sp_cleanup.one_if_rather_than_duplicate_blocks_that_fall_through=false
+sp_cleanup.operand_factorization=false
+sp_cleanup.organize_imports=true
+sp_cleanup.overridden_assignment=false
+sp_cleanup.plain_replacement=false
+sp_cleanup.precompile_regex=false
+sp_cleanup.primitive_comparison=false
+sp_cleanup.primitive_parsing=false
+sp_cleanup.primitive_rather_than_wrapper=false
+sp_cleanup.primitive_serialization=false
+sp_cleanup.pull_out_if_from_if_else=false
+sp_cleanup.pull_up_assignment=false
+sp_cleanup.push_down_negation=false
+sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
+sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
+sp_cleanup.qualify_static_member_accesses_with_declaring_class=true
+sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
+sp_cleanup.reduce_indentation=false
+sp_cleanup.redundant_comparator=false
+sp_cleanup.redundant_falling_through_block_end=false
+sp_cleanup.remove_private_constructors=true
+sp_cleanup.remove_redundant_modifiers=false
+sp_cleanup.remove_redundant_semicolons=true
+sp_cleanup.remove_redundant_type_arguments=true
+sp_cleanup.remove_trailing_whitespaces=true
+sp_cleanup.remove_trailing_whitespaces_all=true
+sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
+sp_cleanup.remove_unnecessary_array_creation=false
+sp_cleanup.remove_unnecessary_casts=true
+sp_cleanup.remove_unnecessary_nls_tags=true
+sp_cleanup.remove_unused_imports=true
+sp_cleanup.remove_unused_local_variables=false
+sp_cleanup.remove_unused_private_fields=true
+sp_cleanup.remove_unused_private_members=false
+sp_cleanup.remove_unused_private_methods=true
+sp_cleanup.remove_unused_private_types=true
+sp_cleanup.return_expression=false
+sp_cleanup.simplify_lambda_expression_and_method_ref=false
+sp_cleanup.single_used_field=false
+sp_cleanup.sort_members=false
+sp_cleanup.sort_members_all=false
+sp_cleanup.standard_comparison=false
+sp_cleanup.static_inner_class=false
+sp_cleanup.strictly_equal_or_different=false
+sp_cleanup.stringbuffer_to_stringbuilder=false
+sp_cleanup.stringbuilder=false
+sp_cleanup.stringbuilder_for_local_vars=false
+sp_cleanup.substring=false
+sp_cleanup.switch=false
+sp_cleanup.system_property=false
+sp_cleanup.system_property_boolean=false
+sp_cleanup.system_property_file_encoding=false
+sp_cleanup.system_property_file_separator=false
+sp_cleanup.system_property_line_separator=false
+sp_cleanup.system_property_path_separator=false
+sp_cleanup.ternary_operator=false
+sp_cleanup.try_with_resource=false
+sp_cleanup.unlooped_while=false
+sp_cleanup.unreachable_block=false
+sp_cleanup.use_anonymous_class_creation=false
+sp_cleanup.use_autoboxing=false
+sp_cleanup.use_blocks=true
+sp_cleanup.use_blocks_only_for_return_and_throw=false
+sp_cleanup.use_directly_map_method=false
+sp_cleanup.use_lambda=true
+sp_cleanup.use_parentheses_in_expressions=true
+sp_cleanup.use_string_is_blank=false
+sp_cleanup.use_this_for_non_static_field_access=true
+sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=false
+sp_cleanup.use_this_for_non_static_method_access=false
+sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=true
+sp_cleanup.use_unboxing=false
+sp_cleanup.use_var=false
+sp_cleanup.useless_continue=false
+sp_cleanup.useless_return=false
+sp_cleanup.valueof_rather_than_instantiation=false
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 0000000..f938933
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,17 @@
+{
+ "editor.formatOnSave": true,
+ "editor.codeActionsOnSave": {
+ "source.organizeImports": true,
+ "source.generate.finalModifiers": true,
+ "source.fixAll": true
+ },
+ "java.codeGeneration.useBlocks": true,
+ "java.saveActions.organizeImports": true,
+ "java.sources.organizeImports.starThreshold": 3,
+ "java.sources.organizeImports.staticStarThreshold": 3,
+ "java.test.config": {
+ "vmArgs": [
+ "-Djava.util.logging.config.file=src/test/resources/logging.properties"
+ ]
+ }
+}
diff --git a/README.md b/README.md
index a6fddf9..d810d2d 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,6 @@
# Kafka Connector Extension
[![Build Status](https://github.com/exasol/kafka-connector-extension/actions/workflows/ci-build.yml/badge.svg)](https://github.com/exasol/kafka-connector-extension/actions/workflows/ci-build.yml)
-[![Coveralls](https://img.shields.io/coveralls/exasol/kafka-connector-extension.svg)](https://coveralls.io/github/exasol/kafka-connector-extension)
-[![GitHub Release](https://img.shields.io/github/release/exasol/kafka-connector-extension.svg?logo=github)](https://github.com/exasol/kafka-connector-extension/releases/latest)
[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=com.exasol%3Akafka-connector-extension&metric=alert_status)](https://sonarcloud.io/dashboard?id=com.exasol%3Akafka-connector-extension)
@@ -33,69 +31,10 @@ and importing data from a Kafka topic into an Exasol table.
Additional resources:
-* [Tech Blog: How to import data from Apache Kafka with Exasol][tech-blog-part1]
-* [Tech Blog: How to import data from Apache Kafka using our user-defined functions][tech-blog-part2]
+* [Tech Blog: How to import data from Apache Kafka with Exasol](https://community.exasol.com/t5/tech-blog/how-to-import-data-from-apache-kafka-with-exasol/ba-p/1409)
+* [Tech Blog: How to import data from Apache Kafka using our user-defined functions](https://community.exasol.com/t5/tech-blog/how-to-import-data-from-apache-kafka-using-our-user-defined/ba-p/1699)
## Information for Contributors
* [Developer Guide](doc/development/developer_guide.md)
-* [Releasing Guide](doc/development/releasing.md)
-
-## Dependencies
-
-### Runtime Dependencies
-
-| Dependency | Purpose | License |
-|---------------------------------------------|-----------------------------------------------------------------|--------------------|
-| [Exasol Import Export UDF Common][ieudf] | Common Import Export Library for Scala UDFs | MIT License |
-| [Apache Kafka Clients][kafka-clients-link] | An Apache Kafka client support for Java / Scala | Apache License 2.0 |
-| [Kafka Avro Serializer][kafka-avro-link] | Support for serializing / deserializing Avro formats with Kafka | Apache License 2.0 |
-| [Scala Collection Compat][scala-compat-link]| Support for cross building Scala 2.13.x code on older versions | Apache License 2.0 |
-
-### Test Dependencies
-
-| Dependency | Purpose | License |
-|---------------------------------------------|-----------------------------------------------------------------|--------------------|
-| [Scalatest][scalatest-link] | Testing tool for Scala and Java developers | Apache License 2.0 |
-| [Scalatest Plus][scalatestplus-link] | Integration support between Scalatest and Mockito | Apache License 2.0 |
-| [Mockito Core][mockitocore-link] | Mocking framework for unit tests | MIT License |
-| [Embedded Kafka Schema Registry][kafka-link]| In memory instances of Kafka and Schema registry for tests | MIT License |
-
-### Compiler Plugin Dependencies
-
-These plugins help with project development.
-
-| Plugin Name | Purpose | License |
-|---------------------------------------------|-----------------------------------------------------------------|----------------------|
-| [SBT Wartremover][sbt-wartremover-link] | Flexible Scala code linting tool | Apache License 2.0 |
-| [SBT Wartremover Contrib][sbt-wcontrib-link]| Community managed additional warts for wartremover | Apache License 2.0 |
-| [SBT Assembly][sbt-assembly-link] | Create fat jars with all project dependencies | MIT License |
-| [SBT API Mappings][sbt-apimapping-link] | A plugin that fetches API mappings for common Scala libraries | Apache License 2.0 |
-| [SBT Scoverage][sbt-scoverage-link] | Integrates the scoverage code coverage library | Apache License 2.0 |
-| [SBT Coveralls][sbt-coveralls-link] | Uploads scala code coverage results to https://coveralls.io | Apache License 2.0 |
-| [SBT Updates][sbt-updates-link] | Checks Maven and Ivy repositories for dependency updates | BSD 3-Clause License |
-| [SBT Scalafmt][sbt-scalafmt-link] | A plugin for https://scalameta.org/scalafmt/ formatting | Apache License 2.0 |
-| [SBT Scalastyle][sbt-style-link] | A plugin for http://www.scalastyle.org/ Scala style checker | Apache License 2.0 |
-| [SBT Dependency Graph][sbt-depgraph-link] | A plugin for visualizing dependency graph of your project | Apache License 2.0 |
-
-[ieudf]: https://github.com/exasol/import-export-udf-common-scala
-[kafka-clients-link]: https://github.com/apache/kafka/tree/trunk/clients
-[kafka-avro-link]: https://github.com/confluentinc/schema-registry/tree/master/avro-serializer
-[scala-compat-link]: https://github.com/scala/scala-collection-compat
-[scalatest-link]: http://www.scalatest.org/
-[scalatestplus-link]: https://github.com/scalatest/scalatestplus-mockito
-[mockitocore-link]: https://site.mockito.org/
-[kafka-link]: https://github.com/embeddedkafka/embedded-kafka-schema-registry
-[sbt-wartremover-link]: http://github.com/puffnfresh/wartremover
-[sbt-wcontrib-link]: http://github.com/wartremover/wartremover-contrib
-[sbt-assembly-link]: https://github.com/sbt/sbt-assembly
-[sbt-apimapping-link]: https://github.com/ThoughtWorksInc/sbt-api-mappings
-[sbt-scoverage-link]: http://github.com/scoverage/sbt-scoverage
-[sbt-coveralls-link]: https://github.com/scoverage/sbt-coveralls
-[sbt-updates-link]: http://github.com/rtimush/sbt-updates
-[sbt-scalafmt-link]: https://github.com/scalameta/sbt-scalafmt
-[sbt-style-link]: https://github.com/scalastyle/scalastyle-sbt-plugin
-[sbt-depgraph-link]: https://github.com/jrudolph/sbt-dependency-graph
-[sbt-expdep-link]: https://github.com/cb372/sbt-explicit-dependencies
-[tech-blog-part1]: https://community.exasol.com/t5/tech-blog/how-to-import-data-from-apache-kafka-with-exasol/ba-p/1409
-[tech-blog-part2]: https://community.exasol.com/t5/tech-blog/how-to-import-data-from-apache-kafka-using-our-user-defined/ba-p/1699
+* [Dependencies](dependencies.md)
diff --git a/build.sbt b/build.sbt
deleted file mode 100644
index e0ea796..0000000
--- a/build.sbt
+++ /dev/null
@@ -1,31 +0,0 @@
-import com.exasol.cloudetl.sbt.Dependencies
-import com.exasol.cloudetl.sbt.IntegrationTestPlugin
-import com.exasol.cloudetl.sbt.Settings
-
-lazy val orgSettings = Seq(
- name := "kafka-connector-extension",
- description := "Exasol Kafka Connector Extension",
- organization := "com.exasol",
- organizationHomepage := Some(url("http://www.exasol.com"))
-)
-
-lazy val buildSettings = Seq(
- scalaVersion := "2.13.8"
-)
-
-lazy val root =
- project
- .in(file("."))
- .settings(moduleName := "exasol-kafka-connector-extension")
- .settings(version := "1.5.3")
- .settings(orgSettings)
- .settings(buildSettings)
- .settings(Settings.projectSettings(scalaVersion))
- .settings(
- resolvers ++= Dependencies.Resolvers,
- libraryDependencies ++= Dependencies.AllDependencies,
- excludeDependencies ++= Dependencies.ExcludedDependencies
- )
- .enablePlugins(IntegrationTestPlugin, ReproducibleBuildsPlugin)
-
-addCommandAlias("pluginUpdates", ";reload plugins;dependencyUpdates;reload return")
diff --git a/dependencies.md b/dependencies.md
new file mode 100644
index 0000000..06c7ed5
--- /dev/null
+++ b/dependencies.md
@@ -0,0 +1,126 @@
+
+# Dependencies
+
+## Compile Dependencies
+
+| Dependency | License |
+| ------------------------------------------ | --------------------------------- |
+| [Scala Library][0] | [Apache-2.0][1] |
+| [Import Export UDF Common Scala][2] | [MIT License][3] |
+| [error-reporting-java][4] | [MIT License][5] |
+| [kafka-avro-serializer][6] | [Apache License 2.0][7] |
+| [scala-collection-compat][8] | [Apache-2.0][1] |
+| [Guava: Google Core Libraries for Java][9] | [Apache License, Version 2.0][10] |
+
+## Test Dependencies
+
+| Dependency | License |
+| ------------------------------------------ | ----------------------------------------- |
+| [scalatest][11] | [the Apache License, ASL Version 2.0][12] |
+| [scalatestplus-mockito][13] | [Apache-2.0][12] |
+| [mockito-core][14] | [The MIT License][15] |
+| [Test containers for Exasol on Docker][16] | [MIT License][17] |
+| [Test Database Builder for Java][18] | [MIT License][19] |
+| [Matcher for SQL Result Sets][20] | [MIT License][21] |
+| [embedded-kafka-schema-registry][22] | [MIT][23] |
+| [kafka-streams-avro-serde][24] | [Apache License 2.0][7] |
+| [avro4s-core][25] | [MIT][23] |
+| [Netty/Handler][26] | [Apache License, Version 2.0][1] |
+
+## Plugin Dependencies
+
+| Dependency | License |
+| ------------------------------------------------------- | ---------------------------------------------- |
+| [SonarQube Scanner for Maven][27] | [GNU LGPL 3][28] |
+| [Apache Maven Compiler Plugin][29] | [Apache-2.0][30] |
+| [Apache Maven Enforcer Plugin][31] | [Apache-2.0][30] |
+| [Maven Flatten Plugin][32] | [Apache Software Licenese][30] |
+| [org.sonatype.ossindex.maven:ossindex-maven-plugin][33] | [ASL2][10] |
+| [scala-maven-plugin][34] | [Public domain (Unlicense)][35] |
+| [ScalaTest Maven Plugin][36] | [the Apache License, ASL Version 2.0][12] |
+| [Apache Maven Javadoc Plugin][37] | [Apache-2.0][30] |
+| [Maven Surefire Plugin][38] | [Apache-2.0][30] |
+| [Versions Maven Plugin][39] | [Apache License, Version 2.0][30] |
+| [duplicate-finder-maven-plugin Maven Mojo][40] | [Apache License 2.0][7] |
+| [Apache Maven Assembly Plugin][41] | [Apache-2.0][30] |
+| [Apache Maven JAR Plugin][42] | [Apache License, Version 2.0][30] |
+| [Artifact reference checker and unifier][43] | [MIT License][44] |
+| [Maven Failsafe Plugin][45] | [Apache-2.0][30] |
+| [JaCoCo :: Maven Plugin][46] | [Eclipse Public License 2.0][47] |
+| [error-code-crawler-maven-plugin][48] | [MIT License][49] |
+| [Reproducible Build Maven Plugin][50] | [Apache 2.0][10] |
+| [Project keeper maven plugin][51] | [The MIT License][52] |
+| [OpenFastTrace Maven Plugin][53] | [GNU General Public License v3.0][54] |
+| [Scalastyle Maven Plugin][55] | [Apache 2.0][7] |
+| [spotless-maven-plugin][56] | [The Apache Software License, Version 2.0][30] |
+| [scalafix-maven-plugin][57] | [BSD-3-Clause][58] |
+| [Maven Clean Plugin][59] | [The Apache Software License, Version 2.0][10] |
+| [Maven Resources Plugin][60] | [The Apache Software License, Version 2.0][10] |
+| [Maven Install Plugin][61] | [The Apache Software License, Version 2.0][10] |
+| [Maven Deploy Plugin][62] | [The Apache Software License, Version 2.0][10] |
+| [Maven Site Plugin 3][63] | [The Apache Software License, Version 2.0][10] |
+
+[0]: https://www.scala-lang.org/
+[1]: https://www.apache.org/licenses/LICENSE-2.0
+[2]: https://github.com/exasol/import-export-udf-common-scala/
+[3]: https://github.com/exasol/import-export-udf-common-scala/blob/main/LICENSE
+[4]: https://github.com/exasol/error-reporting-java/
+[5]: https://github.com/exasol/error-reporting-java/blob/main/LICENSE
+[6]: http://confluent.io/kafka-avro-serializer
+[7]: http://www.apache.org/licenses/LICENSE-2.0.html
+[8]: http://www.scala-lang.org/
+[9]: https://github.com/google/guava
+[10]: http://www.apache.org/licenses/LICENSE-2.0.txt
+[11]: http://www.scalatest.org
+[12]: http://www.apache.org/licenses/LICENSE-2.0
+[13]: https://github.com/scalatest/scalatestplus-mockito
+[14]: https://github.com/mockito/mockito
+[15]: https://github.com/mockito/mockito/blob/main/LICENSE
+[16]: https://github.com/exasol/exasol-testcontainers/
+[17]: https://github.com/exasol/exasol-testcontainers/blob/main/LICENSE
+[18]: https://github.com/exasol/test-db-builder-java/
+[19]: https://github.com/exasol/test-db-builder-java/blob/main/LICENSE
+[20]: https://github.com/exasol/hamcrest-resultset-matcher/
+[21]: https://github.com/exasol/hamcrest-resultset-matcher/blob/main/LICENSE
+[22]: https://github.com/embeddedkafka/embedded-kafka-schema-registry
+[23]: https://opensource.org/licenses/MIT
+[24]: http://confluent.io/kafka-streams-avro-serde
+[25]: https://github.com/sksamuel/avro4s
+[26]: https://netty.io/netty-handler/
+[27]: http://sonarsource.github.io/sonar-scanner-maven/
+[28]: http://www.gnu.org/licenses/lgpl.txt
+[29]: https://maven.apache.org/plugins/maven-compiler-plugin/
+[30]: https://www.apache.org/licenses/LICENSE-2.0.txt
+[31]: https://maven.apache.org/enforcer/maven-enforcer-plugin/
+[32]: https://www.mojohaus.org/flatten-maven-plugin/
+[33]: https://sonatype.github.io/ossindex-maven/maven-plugin/
+[34]: http://github.com/davidB/scala-maven-plugin
+[35]: http://unlicense.org/
+[36]: https://www.scalatest.org/user_guide/using_the_scalatest_maven_plugin
+[37]: https://maven.apache.org/plugins/maven-javadoc-plugin/
+[38]: https://maven.apache.org/surefire/maven-surefire-plugin/
+[39]: https://www.mojohaus.org/versions/versions-maven-plugin/
+[40]: https://basepom.github.io/duplicate-finder-maven-plugin
+[41]: https://maven.apache.org/plugins/maven-assembly-plugin/
+[42]: https://maven.apache.org/plugins/maven-jar-plugin/
+[43]: https://github.com/exasol/artifact-reference-checker-maven-plugin/
+[44]: https://github.com/exasol/artifact-reference-checker-maven-plugin/blob/main/LICENSE
+[45]: https://maven.apache.org/surefire/maven-failsafe-plugin/
+[46]: https://www.jacoco.org/jacoco/trunk/doc/maven.html
+[47]: https://www.eclipse.org/legal/epl-2.0/
+[48]: https://github.com/exasol/error-code-crawler-maven-plugin/
+[49]: https://github.com/exasol/error-code-crawler-maven-plugin/blob/main/LICENSE
+[50]: http://zlika.github.io/reproducible-build-maven-plugin
+[51]: https://github.com/exasol/project-keeper/
+[52]: https://github.com/exasol/project-keeper/blob/main/LICENSE
+[53]: https://github.com/itsallcode/openfasttrace-maven-plugin
+[54]: https://www.gnu.org/licenses/gpl-3.0.html
+[55]: http://www.scalastyle.org
+[56]: https://github.com/diffplug/spotless
+[57]: https://github.com/evis/scalafix-maven-plugin
+[58]: https://opensource.org/licenses/BSD-3-Clause
+[59]: http://maven.apache.org/plugins/maven-clean-plugin/
+[60]: http://maven.apache.org/plugins/maven-resources-plugin/
+[61]: http://maven.apache.org/plugins/maven-install-plugin/
+[62]: http://maven.apache.org/plugins/maven-deploy-plugin/
+[63]: http://maven.apache.org/plugins/maven-site-plugin/
diff --git a/doc/changes/changelog.md b/doc/changes/changelog.md
index b06bb48..64d930e 100644
--- a/doc/changes/changelog.md
+++ b/doc/changes/changelog.md
@@ -1,5 +1,6 @@
-# Releases
+# Changes
+* [1.6.0](changes_1.6.0.md)
* [1.5.3](changes_1.5.3.md)
* [1.5.2](changes_1.5.2.md)
* [1.5.1](changes_1.5.1.md)
diff --git a/doc/changes/changes_1.6.0.md b/doc/changes/changes_1.6.0.md
new file mode 100644
index 0000000..f8231cb
--- /dev/null
+++ b/doc/changes/changes_1.6.0.md
@@ -0,0 +1,66 @@
+# Exasol Kafka Connector Extension 1.6.0, released 2023-07-25
+
+Code name: Migrated to `mvn` build
+
+## Summary
+
+In this release, we migrated from Scala `sbt` based build system to Java `maven` build system. This change fully enables us to use our tooling infrastructure such as GitHub workflows, dependency checks etc. We also updated all dependencies to their latest versions.
+
+## Refactoring
+
+* #69: Migrated from `sbt` to `maven` build system.
+
+## Dependency Updates
+
+### Compile Dependency Updates
+
+* Added `com.exasol:error-reporting-java:1.0.1`
+* Added `com.exasol:import-export-udf-common-scala_2.13:1.1.1`
+* Added `com.google.guava:guava:32.1.1-jre`
+* Added `io.confluent:kafka-avro-serializer:7.4.1`
+* Added `org.scala-lang.modules:scala-collection-compat_2.13:2.11.0`
+* Added `org.scala-lang:scala-library:2.13.3`
+
+### Test Dependency Updates
+
+* Added `com.exasol:exasol-testcontainers:6.6.1`
+* Added `com.exasol:hamcrest-resultset-matcher:1.6.0`
+* Added `com.exasol:test-db-builder-java:3.4.2`
+* Added `com.sksamuel.avro4s:avro4s-core_2.13:4.1.1`
+* Added `io.confluent:kafka-streams-avro-serde:7.4.1`
+* Added `io.github.embeddedkafka:embedded-kafka-schema-registry_2.13:7.4.1`
+* Added `io.netty:netty-handler:4.1.95.Final`
+* Added `org.mockito:mockito-core:5.4.0`
+* Added `org.scalatestplus:scalatestplus-mockito_2.13:1.0.0-M2`
+* Added `org.scalatest:scalatest_2.13:3.2.16`
+
+### Plugin Dependency Updates
+
+* Added `com.diffplug.spotless:spotless-maven-plugin:2.37.0`
+* Added `com.exasol:artifact-reference-checker-maven-plugin:0.4.2`
+* Added `com.exasol:error-code-crawler-maven-plugin:1.3.0`
+* Added `com.exasol:project-keeper-maven-plugin:2.9.9`
+* Added `io.github.evis:scalafix-maven-plugin_2.13:0.1.8_0.11.0`
+* Added `io.github.zlika:reproducible-build-maven-plugin:0.16`
+* Added `net.alchim31.maven:scala-maven-plugin:4.8.1`
+* Added `org.apache.maven.plugins:maven-assembly-plugin:3.6.0`
+* Added `org.apache.maven.plugins:maven-clean-plugin:2.5`
+* Added `org.apache.maven.plugins:maven-compiler-plugin:3.11.0`
+* Added `org.apache.maven.plugins:maven-deploy-plugin:2.7`
+* Added `org.apache.maven.plugins:maven-enforcer-plugin:3.3.0`
+* Added `org.apache.maven.plugins:maven-failsafe-plugin:3.1.2`
+* Added `org.apache.maven.plugins:maven-install-plugin:2.4`
+* Added `org.apache.maven.plugins:maven-jar-plugin:3.3.0`
+* Added `org.apache.maven.plugins:maven-javadoc-plugin:3.5.0`
+* Added `org.apache.maven.plugins:maven-resources-plugin:2.6`
+* Added `org.apache.maven.plugins:maven-site-plugin:3.3`
+* Added `org.apache.maven.plugins:maven-surefire-plugin:3.1.2`
+* Added `org.basepom.maven:duplicate-finder-maven-plugin:2.0.1`
+* Added `org.codehaus.mojo:flatten-maven-plugin:1.5.0`
+* Added `org.codehaus.mojo:versions-maven-plugin:2.16.0`
+* Added `org.itsallcode:openfasttrace-maven-plugin:1.6.2`
+* Added `org.jacoco:jacoco-maven-plugin:0.8.10`
+* Added `org.scalastyle:scalastyle-maven-plugin:1.0.0`
+* Added `org.scalatest:scalatest-maven-plugin:2.2.0`
+* Added `org.sonarsource.scanner.maven:sonar-maven-plugin:3.9.1.2184`
+* Added `org.sonatype.ossindex.maven:ossindex-maven-plugin:3.2.0`
diff --git a/doc/development/releasing.md b/doc/development/releasing.md
deleted file mode 100644
index 77d68cf..0000000
--- a/doc/development/releasing.md
+++ /dev/null
@@ -1,41 +0,0 @@
-# Releasing
-
-Currently, the releasing is performed using the git tags and artifacts are
-uploaded to the [Github releases][gh-releases].
-
-## Pre Release Process
-
-Update the `doc/changes/changelog.md` and `doc/changes/changes_VERSION.md` files
-with the summary of all the changes since the last release.
-
-Please make sure to update any other necessary files, for example, `README.md`
-or user guide with new features or updated new versions.
-
-## Releasing Steps
-
-Follow these steps in order to create a new release:
-
-- Run `./scripts/ci.sh` and make sure everything is working.
-- Add a git tag, for example, `git tag -a 0.4.4 -m "Release version 0.4.4"`.
-- Push tags to remote, `git push --tags`.
-
-Please make sure that the new version tag follows the [Semantic Versioning
-2.0.0](https://semver.org/).
-
-The next Travis CI run will detect the new tag and create a new Github release
-and publish the artifacts.
-
-## Post Release Process
-
-After the release process, the new [Github release][gh-releases] notes should be
-added. It should be the same as the pre-release update to the
-`doc/changes/changes_VERSION.md` file.
-
-Click on the "Edit release" button on the latest release version on the Github
-releases page, and add the release notes.
-
-## Using Release Robot
-
-TODO
-
-[gh-releases]: https://github.com/exasol/kafka-connector-extension/releases
diff --git a/pk_generated_parent.pom b/pk_generated_parent.pom
new file mode 100644
index 0000000..17f3e34
--- /dev/null
+++ b/pk_generated_parent.pom
@@ -0,0 +1,306 @@
+
+
+ 4.0.0
+ com.exasol
+ kafka-connector-extension-generated-parent
+ 1.6.0
+ pom
+
+ UTF-8
+ UTF-8
+ 11
+
+
+
+
+ The MIT License (MIT)
+ https://github.com/exasol/kafka-connector-extension/blob/main/LICENSE
+ repo
+
+
+
+
+ Exasol
+ opensource@exasol.com
+ Exasol AG
+ https://www.exasol.com/
+
+
+
+ scm:git:https://github.com/exasol/kafka-connector-extension.git
+ scm:git:https://github.com/exasol/kafka-connector-extension.git
+ https://github.com/exasol/kafka-connector-extension/
+
+
+
+
+
+ org.sonarsource.scanner.maven
+ sonar-maven-plugin
+ 3.9.1.2184
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 3.11.0
+
+
+ ${java.version}
+
+
+
+ org.apache.maven.plugins
+ maven-enforcer-plugin
+ 3.3.0
+
+
+ enforce-maven
+
+ enforce
+
+
+
+
+ [3.8.7,3.9.0)
+
+
+
+
+
+
+
+ org.codehaus.mojo
+ flatten-maven-plugin
+ 1.5.0
+
+ true
+ oss
+
+
+
+ flatten
+ process-resources
+
+ flatten
+
+
+
+ flatten.clean
+ clean
+
+ clean
+
+
+
+
+
+ org.sonatype.ossindex.maven
+ ossindex-maven-plugin
+ 3.2.0
+
+
+ audit
+ package
+
+ audit
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ 3.1.2
+
+
+ -Djava.util.logging.config.file=src/test/resources/logging.properties ${argLine}
+ ${test.excludeTags}
+
+
+
+ org.codehaus.mojo
+ versions-maven-plugin
+ 2.16.0
+
+
+ display-updates
+ package
+
+ display-plugin-updates
+ display-dependency-updates
+
+
+
+
+ file:///${project.basedir}/versionsMavenPluginRules.xml
+
+
+
+ org.basepom.maven
+ duplicate-finder-maven-plugin
+ 2.0.1
+
+
+ default
+ verify
+
+ check
+
+
+
+
+ true
+ true
+ true
+ true
+ true
+ true
+ false
+ true
+ true
+ false
+
+
+
+ org.apache.maven.plugins
+ maven-assembly-plugin
+ 3.6.0
+
+
+ src/assembly/all-dependencies.xml
+
+ NAME_OF_YOUR_JAR
+ false
+
+
+ true
+
+
+
+
+
+ make-assembly
+ package
+
+ single
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+ 3.3.0
+
+
+ default-jar
+ none
+
+
+
+
+ com.exasol
+ artifact-reference-checker-maven-plugin
+ 0.4.2
+
+
+ verify
+
+ verify
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+ 3.1.2
+
+
+ -Djava.util.logging.config.file=src/test/resources/logging.properties ${argLine}
+
+ ${test.excludeTags}
+
+
+
+ verify
+
+ integration-test
+ verify
+
+
+
+
+
+ org.jacoco
+ jacoco-maven-plugin
+ 0.8.10
+
+
+ prepare-agent
+
+ prepare-agent
+
+
+
+ merge-results
+ verify
+
+ merge
+
+
+
+
+ ${project.build.directory}/
+
+ jacoco*.exec
+
+
+
+ ${project.build.directory}/aggregate.exec
+
+
+
+ report
+ verify
+
+ report
+
+
+ ${project.build.directory}/aggregate.exec
+
+
+
+
+
+ com.exasol
+ error-code-crawler-maven-plugin
+ 1.3.0
+
+
+ verify
+
+ verify
+
+
+
+
+
+ io.github.zlika
+ reproducible-build-maven-plugin
+ 0.16
+
+
+ strip-jar
+ package
+
+ strip-jar
+
+
+
+
+
+
+
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..815c6ca
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,480 @@
+
+
+ 4.0.0
+ com.exasol
+ kafka-connector-extension
+ 1.6.0
+ Exasol Kafka Connector Extension
+ Exasol Kafka Extension for accessing Apache Kafka
+ https://github.com/exasol/kafka-connector-extension/
+
+ kafka-connector-extension-generated-parent
+ com.exasol
+ 1.6.0
+ pk_generated_parent.pom
+
+
+
+
+ 2.13.3
+ 2.13
+ .
+ src/main/**
+
+
+
+ io.confluent
+ https://packages.confluent.io/maven/
+
+ false
+
+
+
+
+
+ org.scala-lang
+ scala-library
+ ${scala.version}
+
+
+ com.exasol
+ import-export-udf-common-scala_${scala.compat.version}
+ 1.1.1
+
+
+ org.slf4j
+ slf4j-simple
+
+
+ org.apache.avro
+ avro
+
+
+ org.apache.commons
+ commons-compress
+
+
+
+
+ com.exasol
+ error-reporting-java
+ 1.0.1
+
+
+ io.confluent
+ kafka-avro-serializer
+ 7.4.1
+
+
+ org.slf4j
+ slf4j-api
+
+
+ org.apache.avro
+ avro
+
+
+ com.google.guava
+ guava
+
+
+ org.apache.commons
+ commons-lang3
+
+
+ io.swagger
+ swagger-core
+
+
+ io.swagger
+ swagger-models
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
+
+
+ org.scala-lang.modules
+ scala-collection-compat_${scala.compat.version}
+ 2.11.0
+
+
+ com.google.guava
+ guava
+ 32.1.1-jre
+
+
+
+ org.scalatest
+ scalatest_${scala.compat.version}
+ 3.2.16
+ test
+
+
+ org.scalatestplus
+ scalatestplus-mockito_${scala.compat.version}
+ 1.0.0-M2
+ test
+
+
+ org.mockito
+ mockito-core
+ 5.4.0
+ test
+
+
+ com.exasol
+ exasol-testcontainers
+ 6.6.1
+ test
+
+
+ com.exasol
+ test-db-builder-java
+ 3.4.2
+ test
+
+
+ com.exasol
+ hamcrest-resultset-matcher
+ 1.6.0
+ test
+
+
+ io.github.embeddedkafka
+ embedded-kafka-schema-registry_${scala.compat.version}
+ 7.4.1
+ test
+
+
+ log4j
+ log4j
+
+
+ org.jetbrains.kotlin
+ kotlin-reflect
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+
+
+ com.fasterxml.jackson.core
+ jackson-core
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
+
+
+ io.confluent
+ kafka-streams-avro-serde
+ 7.4.1
+ test
+
+
+ org.apache.avro
+ avro
+
+
+
+
+ com.sksamuel.avro4s
+ avro4s-core_${scala.compat.version}
+ 4.1.1
+ test
+
+
+ io.netty
+ netty-handler
+ 4.1.95.Final
+ test
+
+
+
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+ 4.8.1
+
+
+ scala-compile-first
+ process-resources
+
+ add-source
+ compile
+
+
+
+ scala-test-compile
+ process-test-resources
+
+ testCompile
+
+
+
+ attach-scaladocs
+ verify
+
+ doc
+ doc-jar
+
+
+
+
+ ${scala.version}
+ ${scala.compat.version}
+ true
+ true
+ incremental
+
+ -unchecked
+ -deprecation
+ -feature
+ -explaintypes
+ -Xcheckinit
+ -Xfatal-warnings
+ -Xlint:_
+ -Ywarn-dead-code
+ -Ywarn-numeric-widen
+ -Ywarn-value-discard
+ -Ywarn-extra-implicit
+ -Ywarn-unused:_
+
+
+ -source
+ ${java.version}
+ -target
+ ${java.version}
+ -deprecation
+ -parameters
+ -Xlint:all
+
+
+ -Xmx2048m
+ -Xss64m
+
+
+
+ org.scalameta
+ semanticdb-scalac_${scala.version}
+ 4.8.0
+
+
+
+
+
+ org.scalatest
+ scalatest-maven-plugin
+ 2.2.0
+
+ .
+ TestSuite.txt
+ -Djava.util.logging.config.file=src/test/resources/logging.properties ${argLine}
+
+
+
+ test
+
+ test
+
+
+ (?<!IT)
+ TestSuite.txt
+ ${project.build.directory}/surefire-reports
+
+
+
+ integration-test
+ integration-test
+
+ test
+
+
+ (?<=IT)
+ IntegrationTestSuite.txt
+ ${project.build.directory}/failsafe-reports
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ 3.5.0
+
+
+ attach-javadocs
+
+ jar
+
+
+
+
+ UTF-8
+
+ true
+ true
+ true
+ ${project.basedir}/src/main/java
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+
+ default-test
+ none
+
+
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+
+ false
+
+
+
+ org.apache.maven.plugins
+ maven-assembly-plugin
+
+ exasol-${project.artifactId}-${project.version}
+
+
+
+ org.sonatype.ossindex.maven
+ ossindex-maven-plugin
+
+ ${ossindex.skip}
+
+
+
+
+ CVE-2022-36944
+
+
+
+
+ org.basepom.maven
+ duplicate-finder-maven-plugin
+
+ ${duplicate-finder.skip}
+ false
+ false
+ false
+
+
+ com.exasol
+ import-export-udf-common-scala_${scala.compat.version}
+ 1.1.1
+
+
+
+
+
+ com.exasol
+ project-keeper-maven-plugin
+ 2.9.9
+
+
+
+ verify
+
+
+
+
+
+ org.itsallcode
+ openfasttrace-maven-plugin
+ 1.6.2
+
+
+ trace-requirements
+
+ trace
+
+
+
+
+ html
+ ALL
+ true
+
+
+
+ org.scalastyle
+ scalastyle-maven-plugin
+ 1.0.0
+
+ false
+ true
+ true
+ false
+ ${project.basedir}/src/main/scala
+ ${project.basedir}/src/test/scala
+ ${project.basedir}/scalastyle-config.xml
+ ${project.build.directory}/scalastyle-output.xml
+ UTF-8
+
+
+
+
+ check
+
+
+
+
+
+ com.diffplug.spotless
+ spotless-maven-plugin
+ 2.37.0
+
+
+
+ ${project.basedir}/.scalafmt.conf
+
+
+
+
+
+
+ check
+
+
+
+
+
+ io.github.evis
+ scalafix-maven-plugin_${scala.compat.version}
+ 0.1.8_0.11.0
+
+
+ com.geirsson
+ metaconfig-pprint_${scala.compat.version}
+ 0.11.1
+
+
+ com.github.vovapolu
+ scaluzzi_${scala.compat.version}
+ 0.1.23
+
+
+ com.nequissimus
+ sort-imports_2.13
+ 0.6.1
+
+
+
+ CHECK
+
+
+
+
+
diff --git a/project/Compilation.scala b/project/Compilation.scala
deleted file mode 100644
index 0aa5e47..0000000
--- a/project/Compilation.scala
+++ /dev/null
@@ -1,134 +0,0 @@
-package com.exasol.cloudetl.sbt
-
-import sbt._
-import wartremover.Wart
-import wartremover.Warts
-
-/** Compiler related settings (flags, warts, lints) */
-object Compilation {
-
- def compilerFlagsFn(scalaVersion: String): Seq[String] =
- CrossVersion.partialVersion(scalaVersion) match {
- case Some((2, 13)) => CompilerFlags.filter(_ != "-Xfuture")
- case Some((2, 12)) => CompilerFlags ++ Scala12CompilerFlags
- case Some((2, 11)) => CompilerFlags
- case _ => CompilerFlags
- }
-
- def consoleFlagsFn(scalaVersion: String): Seq[String] =
- compilerFlagsFn(scalaVersion).filterNot(
- Set(
- "-Xfatal-warnings",
- "-Ywarn-unused-import",
- "-Ywarn-unused:imports"
- )
- )
-
- // format: off
- /** Compiler flags specific to Scala version 2.12.x */
- private val Scala12CompilerFlags: Seq[String] = Seq(
- "-Xlint:constant", // Evaluation of a constant arithmetic expression results in an error.
- "-Xlint:by-name-right-associative", // By-name parameter of right associative operator.
- "-Xlint:nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
- "-Xlint:unsound-match", // Pattern match may not be typesafe.
- "-Yno-adapted-args", // Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.
- "-Ywarn-extra-implicit", // Warn when more than one implicit parameter section is defined.
- "-Ywarn-inaccessible", // Warn about inaccessible types in method signatures.
- "-Ywarn-infer-any", // Warn when a type argument is inferred to be `Any`.
- "-Ywarn-nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
- "-Ywarn-nullary-unit", // Warn when nullary methods return Unit.
- "-Ywarn-unused:implicits", // Warn if an implicit parameter is unused.
- "-Ywarn-unused:imports", // Warn if an import selector is not referenced.
- "-Ywarn-unused:locals", // Warn if a local definition is unused.
- // "-Ywarn-unused:params", // Warn if a value parameter is unused.
- "-Ywarn-unused:patvars", // Warn if a variable bound in a pattern is unused.
- "-Ywarn-unused:privates", // Warn if a private member is unused.
- "-Ypartial-unification" // Enable partial unification in type constructor inference
- )
-
- /**
- * Compiler flags specific to Scala versions 2.10.x and 2.11.x
- *
- * From tpolecat, https://tpolecat.github.io/2017/04/25/scalac-flags.html
- */
- private val CompilerFlags: Seq[String] = Seq(
- "-encoding", "utf-8", // Specify character encoding used by source files.
- "-deprecation", // Emit warning and location for usages of deprecated APIs.
- "-explaintypes", // Explain type errors in more detail.
- "-feature", // Emit warning and location for usages of features that should be imported explicitly.
- "-language:existentials", // Existential types (besides wildcard types) can be written and inferred
- "-language:experimental.macros", // Allow macro definition (besides implementation and application)
- "-language:higherKinds", // Allow higher-kinded types
- "-language:implicitConversions", // Allow definition of implicit functions called views
- "-unchecked", // Enable additional warnings where generated code depends on assumptions.
- "-Xcheckinit", // Wrap field accessors to throw an exception on uninitialized access.
- "-Xfatal-warnings", // Fail the compilation if there are any warnings.
- "-Xfuture", // Turn on future language features.
- "-Xlint:adapted-args", // Warn if an argument list is modified to match the receiver.
- "-Xlint:delayedinit-select", // Selecting member of DelayedInit.
- "-Xlint:doc-detached", // A Scaladoc comment appears to be detached from its element.
- "-Xlint:inaccessible", // Warn about inaccessible types in method signatures.
- "-Xlint:infer-any", // Warn when a type argument is inferred to be `Any`.
- "-Xlint:missing-interpolator", // A string literal appears to be missing an interpolator id.
- "-Xlint:nullary-unit", // Warn when nullary methods return Unit.
- "-Xlint:option-implicit", // Option.apply used implicit view.
- "-Xlint:package-object-classes", // Class or object defined in package object.
- "-Xlint:poly-implicit-overload", // Parameterized overloaded implicit methods are not visible as view bounds.
- "-Xlint:private-shadow", // A private field (or class parameter) shadows a superclass field.
- "-Xlint:stars-align", // Pattern sequence wildcard must align with sequence component.
- "-Xlint:type-parameter-shadow", // A local type parameter shadows a type already in scope.
- "-Ywarn-dead-code", // Warn when dead code is identified.
- "-Ywarn-numeric-widen", // Warn when numerics are widened.
- "-Ywarn-value-discard" // Warn when non-Unit expression results are unused.
- )
- // format: on
-
- val JavacCompilerFlags: Seq[String] = Seq(
- "-encoding",
- "UTF-8",
- "-deprecation",
- "-parameters",
- "-Xlint:all"
- )
-
- private def contribWart(name: String) =
- Wart.custom(s"org.wartremover.contrib.warts.$name")
-
- private val ExtraWartremoverFlags = Seq(
- contribWart("Apply"),
- contribWart("ExposedTuples"),
- contribWart("MissingOverride"),
- contribWart("NoNeedForMonad"),
- contribWart("OldTime"),
- contribWart("SealedCaseClass"),
- contribWart("SomeApply"),
- contribWart("SymbolicName"),
- contribWart("UnintendedLaziness"),
- contribWart("UnsafeInheritance")
- )
-
- val WartremoverFlags: Seq[Wart] = ExtraWartremoverFlags ++ Warts.allBut(
- Wart.Any,
- Wart.AsInstanceOf,
- Wart.Equals,
- Wart.IsInstanceOf,
- Wart.Null,
- Wart.MutableDataStructures,
- Wart.Overloading,
- Wart.StringPlusAny,
- Wart.Throw,
- Wart.Var,
- Wart.While
- )
-
- val WartremoverTestFlags: Seq[Wart] = ExtraWartremoverFlags ++ Warts.allBut(
- Wart.Any,
- Wart.IsInstanceOf,
- Wart.NonUnitStatements,
- Wart.Null,
- Wart.Overloading,
- Wart.StringPlusAny,
- Wart.Var
- )
-
-}
diff --git a/project/Dependencies.scala b/project/Dependencies.scala
deleted file mode 100644
index 2ac1eb7..0000000
--- a/project/Dependencies.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-package com.exasol.cloudetl.sbt
-
-import sbt.{ExclusionRule, _}
-import sbt.librarymanagement.InclExclRule
-
-/** A list of required dependencies */
-object Dependencies {
-
- // Runtime dependencies versions
- private val ImportExportUDFVersion = "0.3.1"
- private val KafkaClientsVersion = "3.0.0"
- private val KafkaAvroSerializerVersion = "7.0.1"
- private val ScalaCollectionCompatVersion = "2.6.0"
-
- // Test dependencies versions
- private val ScalaTestVersion = "3.2.10"
- private val ScalaTestPlusVersion = "1.0.0-M2"
- private val MockitoCoreVersion = "4.2.0"
- private val KafkaSchemaRegistryVersion = "7.0.0"
- private val ExasolTestDBBuilderVersion = "3.2.2"
- private val ExasolTestContainersVersion = "5.1.1"
- private val ExasolHamcrestMatcherVersion = "1.5.1"
-
- val Resolvers: Seq[Resolver] = Seq(
- "jitpack.io" at "https://jitpack.io",
- "Confluent Maven Repo" at "https://packages.confluent.io/maven/",
- "Exasol Releases" at "https://maven.exasol.com/artifactory/exasol-releases"
- )
-
- lazy val RuntimeDependencies: Seq[ModuleID] = Seq(
- "com.exasol" %% "import-export-udf-common-scala" % ImportExportUDFVersion,
- "com.exasol" % "error-reporting-java" % "0.4.1",
- "org.apache.kafka" % "kafka-clients" % KafkaClientsVersion,
- "io.confluent" % "kafka-avro-serializer" % KafkaAvroSerializerVersion
- exclude ("org.slf4j", "slf4j-api")
- exclude ("org.apache.avro", "avro")
- exclude ("org.apache.commons", "commons-lang3")
- exclude ("com.google.guava", "guava")
- exclude ("com.fasterxml.jackson.core", "jackson-databind")
- exclude ("io.swagger", "swagger-core")
- exclude ("io.swagger", "swagger-models"),
- "org.scala-lang.modules" %% "scala-collection-compat" % ScalaCollectionCompatVersion,
- "com.google.guava" % "guava" % "31.0.1-jre"
- )
-
- lazy val TestDependencies: Seq[ModuleID] = Seq(
- "org.scalatest" %% "scalatest" % ScalaTestVersion,
- "org.scalatestplus" %% "scalatestplus-mockito" % ScalaTestPlusVersion,
- "org.mockito" % "mockito-core" % MockitoCoreVersion,
- "com.exasol" % "exasol-testcontainers" % ExasolTestContainersVersion,
- "com.exasol" % "test-db-builder-java" % ExasolTestDBBuilderVersion,
- "com.exasol" % "hamcrest-resultset-matcher" % ExasolHamcrestMatcherVersion,
- "io.github.embeddedkafka" %% "embedded-kafka-schema-registry" % KafkaSchemaRegistryVersion
- exclude ("log4j", "log4j")
- exclude ("org.jetbrains.kotlin", "kotlin-reflect")
- exclude ("com.fasterxml.jackson.core", "jackson-annotations")
- exclude ("com.fasterxml.jackson.core", "jackson-core")
- exclude ("com.fasterxml.jackson.core", "jackson-databind"),
- "io.confluent" % "kafka-streams-avro-serde" % KafkaAvroSerializerVersion
- exclude ("org.apache.avro", "avro"),
- "com.sksamuel.avro4s" %% "avro4s-core" % "4.0.12"
- ).map(_ % Test)
-
- lazy val AllDependencies: Seq[ModuleID] = RuntimeDependencies ++ TestDependencies
-
- lazy val ExcludedDependencies: Seq[InclExclRule] = Seq(
- ExclusionRule("org.openjfx", "javafx.base")
- )
-
-}
diff --git a/project/IntegrationTestPlugin.scala b/project/IntegrationTestPlugin.scala
deleted file mode 100644
index 6923a3b..0000000
--- a/project/IntegrationTestPlugin.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package com.exasol.cloudetl.sbt
-
-import sbt._
-import org.scalastyle.sbt.ScalastylePlugin
-import org.scalafmt.sbt.ScalafmtPlugin
-
-/** A plugin for creating an integration test task and settings */
-object IntegrationTestPlugin extends AutoPlugin {
-
- /**
- * Ensure the scalastyle and scalafmt plugins are loaded before integration test plugin, so
- * that, we can enable them for the integration test sources
- */
- override def requires: Plugins = ScalastylePlugin && ScalafmtPlugin
-
- /** Add integration test settings to the projects */
- override val projectSettings: Seq[Setting[_]] =
- IntegrationTestSettings.settings
-
- /** Add the IntegrationTest configuration to the projects */
- override val projectConfigurations: Seq[Configuration] =
- IntegrationTestSettings.configurations
-}
diff --git a/project/IntegrationTestSettings.scala b/project/IntegrationTestSettings.scala
deleted file mode 100644
index fad8271..0000000
--- a/project/IntegrationTestSettings.scala
+++ /dev/null
@@ -1,35 +0,0 @@
-package com.exasol.cloudetl.sbt
-
-import sbt._
-import sbt.Keys._
-import sbt.Def.Setting
-import org.scalastyle.sbt.ScalastylePlugin
-import org.scalafmt.sbt.ScalafmtPlugin
-
-/** Settings for running integration tests */
-object IntegrationTestSettings {
-
- /**
- * Extend Test settings in integration tests, so for instance dependencies in `% test` are also
- * available for integration tests
- */
- lazy val IntegrationTestConfig: Configuration = config("it").extend(Test)
-
- /** Integration test related configurations */
- lazy val configurations: Seq[Configuration] = Seq(IntegrationTestConfig)
-
- /** Integration test settings to add to the projects */
- lazy val settings: Seq[Setting[_]] = {
- val itSettings =
- ScalastylePlugin.rawScalastyleSettings ++
- ScalafmtPlugin.scalafmtConfigSettings ++
- Seq(
- fork := true,
- parallelExecution := false,
- scalaSource := baseDirectory.value / "src/it/scala"
- )
-
- Seq.concat(Defaults.itSettings, inConfig(IntegrationTest)(itSettings))
- }
-
-}
diff --git a/project/Settings.scala b/project/Settings.scala
deleted file mode 100644
index 3e48314..0000000
--- a/project/Settings.scala
+++ /dev/null
@@ -1,91 +0,0 @@
-package com.exasol.cloudetl.sbt
-
-import sbt._
-import sbt.Keys._
-
-import sbtassembly.MergeStrategy
-import sbtassembly.PathList
-import sbtassembly.AssemblyPlugin.autoImport._
-
-import scoverage.ScoverageSbtPlugin.autoImport._
-import org.scalastyle.sbt.ScalastylePlugin.autoImport._
-import wartremover.WartRemover.autoImport.wartremoverErrors
-
-/** A list of (boilerplate) settings for build process */
-object Settings {
-
- def projectSettings(scalaVersion: SettingKey[String]): Seq[Setting[_]] =
- buildSettings(scalaVersion) ++ miscSettings ++ scalaStyleSettings ++ assemblySettings
-
- def buildSettings(scalaVersion: SettingKey[String]): Seq[Setting[_]] = Seq(
- // Compiler settings
- scalacOptions ++= Compilation.compilerFlagsFn(scalaVersion.value),
- Compile / console / scalacOptions := Compilation.consoleFlagsFn(scalaVersion.value),
- javacOptions ++= Compilation.JavacCompilerFlags,
- Compile / compileOrder := CompileOrder.JavaThenScala
- )
-
- def miscSettings(): Seq[Setting[_]] = Seq(
- // Wartremover settings
- Compile / compile / wartremoverErrors := Compilation.WartremoverFlags,
- Test / compile / wartremoverErrors := Compilation.WartremoverTestFlags,
- // General settings
- Global / cancelable := true,
- // Scoverage settings
- coverageOutputHTML := true,
- coverageOutputXML := true,
- coverageOutputCobertura := true,
- coverageFailOnMinimum := false
- )
-
- /** Creates a Scalastyle tasks that run with unit and integration tests. */
- def scalaStyleSettings(): Seq[Setting[_]] = {
- lazy val mainScalastyle = taskKey[Unit]("mainScalastyle")
- lazy val testScalastyle = taskKey[Unit]("testScalastyle")
- Seq(
- scalastyleFailOnError := true,
- Compile / scalastyleConfig := (ThisBuild / baseDirectory).value / "project" / "scalastyle-config.xml",
- Test / scalastyleConfig := (ThisBuild / baseDirectory).value / "project" / "scalastyle-test-config.xml",
- mainScalastyle := (Compile / scalastyle).toTask("").value,
- testScalastyle := (Compile / scalastyle).toTask("").value,
- Test / test := (Test / test).dependsOn(mainScalastyle).value,
- Test / test := (Test / test).dependsOn(testScalastyle).value
- )
- }
-
- /**
- * Creates settings for integration tests.
- *
- * Use only when [[IntegrationTestPlugin]] is enabled.
- */
- def integrationTestSettings(): Seq[Setting[_]] = {
- lazy val mainScalastyle = taskKey[Unit]("mainScalastyle")
- lazy val itTestScalastyle = taskKey[Unit]("itTestScalastyle")
- Seq(
- IntegrationTest / scalastyleConfig := (Test / scalastyleConfig).value,
- IntegrationTest / scalastyleSources := Seq((IntegrationTest / scalaSource).value),
- mainScalastyle := (Compile / scalastyle).toTask("").value,
- itTestScalastyle := (IntegrationTest / scalastyle).toTask("").value,
- IntegrationTest / test := (IntegrationTest / test).dependsOn(mainScalastyle).value,
- IntegrationTest / test := (IntegrationTest / test).dependsOn(itTestScalastyle).value
- )
- }
-
- def assemblySettings(): Seq[Setting[_]] = Seq(
- assembly / test := {},
- assembly / logLevel := Level.Info,
- assembly / assemblyJarName := moduleName.value + "-" + version.value + ".jar",
- assembly / assemblyMergeStrategy := {
- case PathList("META-INF", xs @ _*) => MergeStrategy.discard
- case x => MergeStrategy.first
- },
- assembly / assemblyExcludedJars := {
- val cp = (assembly / fullClasspath).value
- val exludeSet = Set.empty[String]
- cp.filter { jar =>
- exludeSet(jar.data.getName)
- }
- }
- )
-
-}
diff --git a/project/build.properties b/project/build.properties
deleted file mode 100644
index 3161d21..0000000
--- a/project/build.properties
+++ /dev/null
@@ -1 +0,0 @@
-sbt.version=1.6.1
diff --git a/project/plugins.sbt b/project/plugins.sbt
deleted file mode 100644
index 1dd8f2c..0000000
--- a/project/plugins.sbt
+++ /dev/null
@@ -1,46 +0,0 @@
-// Adds a `scalafmt` sbt plugin
-// https://github.com/scalameta/sbt-scalafmt
-addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6")
-
-// Adds a `wartremover` a flexible Scala code linting tool
-// http://github.com/puffnfresh/wartremover
-addSbtPlugin("org.wartremover" % "sbt-wartremover" % "2.4.16")
-
-// Adds Contrib Warts
-// http://github.com/wartremover/wartremover-contrib/
-addSbtPlugin("org.wartremover" % "sbt-wartremover-contrib" % "1.3.13")
-
-// Adds a `assembly` task to create a fat JAR with all of its
-// dependencies
-// https://github.com/sbt/sbt-assembly
-addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.1.0")
-
-// Adds most common doc api mappings
-// https://github.com/ThoughtWorksInc/sbt-api-mappings
-addSbtPlugin("com.thoughtworks.sbt-api-mappings" % "sbt-api-mappings" % "3.0.0")
-
-// Adds Scala Code Coverage (Scoverage) used during unit tests
-// http://github.com/scoverage/sbt-scoverage
-addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3")
-
-// Adds SBT Coveralls plugin for uploading Scala code coverage to
-// https://coveralls.io
-// https://github.com/scoverage/sbt-coveralls
-addSbtPlugin("org.scoverage" % "sbt-coveralls" % "1.3.1")
-
-// Adds a `dependencyUpdates` task to check Maven repositories for
-// dependency updates
-// http://github.com/rtimush/sbt-updates
-addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.1")
-
-// Adds `scalastyle` a coding style checker and enforcer
-// https://github.com/scalastyle/scalastyle-sbt-plugin
-addSbtPlugin("org.scalastyle" % "scalastyle-sbt-plugin" % "1.0.0")
-
-// Adds a `dependencyUpdates` task to check for dependency updates
-// https://github.com/jrudolph/sbt-dependency-graph
-addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1")
-
-// Adds a `sbt-reproducible-builds` plugin
-// https://github.com/raboof/sbt-reproducible-builds
-addSbtPlugin("net.bzzt" % "sbt-reproducible-builds" % "0.30")
diff --git a/project/project/plugins.sbt b/project/project/plugins.sbt
deleted file mode 100644
index a0d7e79..0000000
--- a/project/project/plugins.sbt
+++ /dev/null
@@ -1,3 +0,0 @@
-// Used to get updates for plugins
-// see https://github.com/rtimush/sbt-updates/issues/10
-addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.1")
diff --git a/project/scalastyle-test-config.xml b/project/scalastyle-test-config.xml
deleted file mode 100644
index cef24b7..0000000
--- a/project/scalastyle-test-config.xml
+++ /dev/null
@@ -1,479 +0,0 @@
-
-
- Exasol Cloud ETL Scalastyle Configurations (adapted from Spark Scala Style Guide)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- true
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- EQUALS, ELSE, TRY, CATCH, FINALLY, LARROW, RARROW
-
-
-
-
-
-
- ARROW, EQUALS, COMMA, COLON, IF, ELSE, DO, WHILE, FOR, MATCH, TRY, CATCH, FINALLY, LARROW, RARROW
-
-
-
-
-
-
-
-
-
-
- ^println$
-
-
-
-
-
-
-
-
- mutable\.SynchronizedBuffer
-
-
-
-
-
-
-
-
- Class\.forName
-
-
-
-
-
-
-
-
- Await\.result
-
-
-
-
-
-
-
-
- Await\.ready
-
-
-
-
-
-
-
-
- JavaConversions
-
-
-
-
-
-
-
-
- org\.apache\.commons\.lang\.
-
-
-
-
-
-
-
-
- extractOpt
-
-
-
-
-
-
-
-
- \)\{
-
-
-
-
-
-
-
-
- (?m)^(\s*)/[*][*].*$(\r|)\n^\1 [*]
-
-
-
-
-
-
-
-
- case[^\n>]*=>\s*\{
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- java,scala,exasol,others
- javax?\..*
- scala\..*
- com\.exasol\..*
- .*
-
-
-
-
-
-
- COMMA
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/release_config.yml b/release_config.yml
index 10049a6..45f75e8 100644
--- a/release_config.yml
+++ b/release_config.yml
@@ -1,3 +1,3 @@
release-platforms:
- GitHub
- - Jira
+language: Java
diff --git a/project/scalastyle-config.xml b/scalastyle-config.xml
similarity index 98%
rename from project/scalastyle-config.xml
rename to scalastyle-config.xml
index 3c41d85..add90ca 100644
--- a/project/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -334,7 +334,7 @@
-
+
java,scala,exasol,others
javax?\..*
@@ -418,20 +418,20 @@
-
+
-
+
-
+
-
+
-
+
diff --git a/scripts/ci.sh b/scripts/ci.sh
deleted file mode 100755
index 4591376..0000000
--- a/scripts/ci.sh
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/usr/bin/env bash
-
-set -o errtrace -o nounset -o pipefail -o errexit
-
-# Goto parent (base) directory of this script
-BASE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/.. && pwd )"
-cd "$BASE_DIR"
-
-DEFAULT_SCALA_VERSION=2.13.8
-
-if [[ -z "${SCALA_VERSION:-}" ]]; then
- echo "Environment variable SCALA_VERSION is not set"
- echo "Using DEFAULT_SCALA_VERSION: $DEFAULT_SCALA_VERSION"
- SCALA_VERSION=$DEFAULT_SCALA_VERSION
-fi
-
-run_self_check () {
- echo "############################################"
- echo "# #"
- echo "# Self Script Check #"
- echo "# #"
- echo "############################################"
- # Don't fail here, failing later at the end when all shell scripts are checked anyway.
- shellcheck "$BASE_DIR"/scripts/ci.sh \
- && echo "Self-check succeeded!" || echo "Self-check failed!"
-}
-
-run_cleaning () {
- echo "############################################"
- echo "# #"
- echo "# Clean and Assembly #"
- echo "# #"
- echo "############################################"
- sbt ++$SCALA_VERSION clean assembly
-}
-
-run_formatting_checks () {
- echo "############################################"
- echo "# #"
- echo "# ScalaFmt Checks #"
- echo "# #"
- echo "############################################"
- sbt ++$SCALA_VERSION scalafmtSbtCheck scalafmtCheckAll
-}
-
-run_unit_tests () {
- echo "############################################"
- echo "# #"
- echo "# Unit Testing #"
- echo "# #"
- echo "############################################"
- sbt ++$SCALA_VERSION coverage test
-}
-
-run_integration_tests () {
- echo "############################################"
- echo "# #"
- echo "# Integration Testing #"
- echo "# #"
- echo "############################################"
- sbt ++$SCALA_VERSION ";coverage ;IntegrationTest / test"
-}
-
-run_coverage_report () {
- echo "############################################"
- echo "# #"
- echo "# Coverage Report #"
- echo "# #"
- echo "############################################"
- sbt ++$SCALA_VERSION coverageReport
-}
-
-run_api_doc () {
- echo "############################################"
- echo "# #"
- echo "# Generating API Documentaion #"
- echo "# #"
- echo "############################################"
- sbt ++$SCALA_VERSION doc
-}
-
-run_dependency_info () {
- echo "############################################"
- echo "# #"
- echo "# Dependency Information #"
- echo "# #"
- echo "############################################"
- sbt ++$SCALA_VERSION dependencyUpdates pluginUpdates dependencyTree
-}
-
-run_shell_check () {
- echo "############################################"
- echo "# #"
- echo "# Shellcheck #"
- echo "# #"
- echo "############################################"
- find . -name "*.sh" -print0 | xargs -n 1 -0 shellcheck
-}
-
-run_assembly () {
- echo "############################################"
- echo "# #"
- echo "# Assembling Binary Artifact #"
- echo "# #"
- echo "############################################"
- sbt ++$SCALA_VERSION assembly
-}
-
-run_clean_worktree_check () {
- echo "############################################"
- echo "# #"
- echo "# Check for Clean Worktree #"
- echo "# #"
- echo "############################################"
- # To be executed after all other steps, to ensures that there is no uncommitted code and there
- # are no untracked files, which means .gitignore is complete and all code is part of a
- # reviewable commit.
- GIT_STATUS="$(git status --porcelain)"
- if [[ $GIT_STATUS ]]; then
- echo "Your worktree is not clean,"
- echo "there is either uncommitted code or there are untracked files:"
- echo "${GIT_STATUS}"
- exit 1
- fi
-}
-
-run_self_check
-run_cleaning
-run_formatting_checks
-run_unit_tests
-run_integration_tests
-run_coverage_report
-run_api_doc
-run_dependency_info
-run_shell_check
-run_assembly
-run_clean_worktree_check
diff --git a/sonar-project.properties b/sonar-project.properties
deleted file mode 100644
index 1da07ea..0000000
--- a/sonar-project.properties
+++ /dev/null
@@ -1,8 +0,0 @@
-sonar.host.url=https://sonarcloud.io
-sonar.organization=exasol
-sonar.projectKey=com.exasol:kafka-connector-extension
-sonar.sources=src/main/scala
-sonar.tests=src/it,src/test
-sonar.exclusions=project/Dependencies.scala
-sonar.scala.coverage.reportPaths=target/scala-2.13/scoverage-report/scoverage.xml
-sonar.sourceEncoding=UTF-8
diff --git a/src/assembly/all-dependencies.xml b/src/assembly/all-dependencies.xml
new file mode 100644
index 0000000..efe5abc
--- /dev/null
+++ b/src/assembly/all-dependencies.xml
@@ -0,0 +1,22 @@
+
+ all-dependencies
+
+ jar
+
+ false
+
+
+
+ metaInf-services
+
+
+
+
+ true
+ runtime
+ /
+
+
+
diff --git a/src/main/scala/com/exasol/cloudetl/kafka/KafkaConsumerFactory.scala b/src/main/scala/com/exasol/cloudetl/kafka/KafkaConsumerFactory.scala
index 4731411..b57fac7 100644
--- a/src/main/scala/com/exasol/cloudetl/kafka/KafkaConsumerFactory.scala
+++ b/src/main/scala/com/exasol/cloudetl/kafka/KafkaConsumerFactory.scala
@@ -1,10 +1,5 @@
package com.exasol.cloudetl.kafka
-import java.nio.file.Files
-import java.nio.file.Paths
-
-import com.exasol.ExaMetadata
-import com.exasol.cloudetl.kafka.KafkaConsumerProperties._
import com.exasol.errorreporting.ExaError
import org.apache.kafka.clients.consumer.KafkaConsumer
diff --git a/src/main/scala/com/exasol/cloudetl/kafka/KafkaConsumerProperties.scala b/src/main/scala/com/exasol/cloudetl/kafka/KafkaConsumerProperties.scala
index f574a93..2f71ab0 100644
--- a/src/main/scala/com/exasol/cloudetl/kafka/KafkaConsumerProperties.scala
+++ b/src/main/scala/com/exasol/cloudetl/kafka/KafkaConsumerProperties.scala
@@ -5,7 +5,8 @@ import java.nio.file.Paths
import java.util.Locale
import scala.collection.mutable.{Map => MMap}
-import scala.io.{Codec, Source}
+import scala.io.Codec
+import scala.io.Source
import scala.jdk.CollectionConverters._
import com.exasol.ExaMetadata
@@ -299,7 +300,6 @@ class KafkaConsumerProperties(private val properties: Map[String, String]) exten
}
/** Returns the Kafka consumer properties as Java map. */
- @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements"))
final def getProperties(): java.util.Map[String, AnyRef] = {
val props = MMap.empty[String, String]
props.put(ENABLE_AUTO_COMMIT.kafkaPropertyName, ENABLE_AUTO_COMMIT.defaultValue)
diff --git a/src/main/scala/com/exasol/cloudetl/kafka/consumer/KafkaRecordConsumer.scala b/src/main/scala/com/exasol/cloudetl/kafka/consumer/KafkaRecordConsumer.scala
index 498f002..6683531 100644
--- a/src/main/scala/com/exasol/cloudetl/kafka/consumer/KafkaRecordConsumer.scala
+++ b/src/main/scala/com/exasol/cloudetl/kafka/consumer/KafkaRecordConsumer.scala
@@ -6,8 +6,8 @@ import java.util.Arrays
import scala.jdk.CollectionConverters._
import com.exasol.ExaIterator
-import com.exasol.cloudetl.kafka._
import com.exasol.cloudetl.kafka.KafkaConnectorConstants._
+import com.exasol.cloudetl.kafka._
import com.exasol.cloudetl.kafka.deserialization._
import com.exasol.errorreporting.ExaError
@@ -75,7 +75,6 @@ class KafkaRecordConsumer(
private[this] type FieldType = Map[FieldSpecification, Seq[Any]]
// This is okay, since it is only overridden in tests.
- @SuppressWarnings(Array("org.wartremover.contrib.warts.UnsafeInheritance"))
protected def getRecordConsumer(): KafkaConsumer[FieldType, FieldType] = {
val topicPartition = new TopicPartition(topic, partitionId)
val recordFields = FieldParser.get(properties.getRecordFields())
diff --git a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/AsStringDeserializer.scala b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/AsStringDeserializer.scala
index 90fc004..d6df9bd 100644
--- a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/AsStringDeserializer.scala
+++ b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/AsStringDeserializer.scala
@@ -1,6 +1,7 @@
package com.exasol.cloudetl.kafka.deserialization
-import org.apache.kafka.common.serialization.{Deserializer, StringDeserializer}
+import org.apache.kafka.common.serialization.Deserializer
+import org.apache.kafka.common.serialization.StringDeserializer
/**
* Emits the string representation record as single element sequence.
diff --git a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/AvroDeserialization.scala b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/AvroDeserialization.scala
index 38ce627..644408e 100644
--- a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/AvroDeserialization.scala
+++ b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/AvroDeserialization.scala
@@ -1,14 +1,16 @@
package com.exasol.cloudetl.kafka.deserialization
-import scala.jdk.CollectionConverters.MapHasAsJava
-
-import com.exasol.cloudetl.kafka.{KafkaConnectorException, KafkaConsumerProperties}
+import com.exasol.cloudetl.kafka.KafkaConnectorException
+import com.exasol.cloudetl.kafka.KafkaConsumerProperties
import com.exasol.errorreporting.ExaError
-import io.confluent.kafka.serializers.{AbstractKafkaSchemaSerDeConfig, KafkaAvroDeserializer}
+import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig
+import io.confluent.kafka.serializers.KafkaAvroDeserializer
import org.apache.avro.generic.GenericRecord
import org.apache.kafka.common.serialization.Deserializer
+import scala.jdk.CollectionConverters.MapHasAsJava
+
/**
* Creates deserializers for avro records that are serialized with the Confluent schema registry.
*/
diff --git a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/DeserializationFactory.scala b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/DeserializationFactory.scala
index 6c3d71d..d5eb62b 100644
--- a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/DeserializationFactory.scala
+++ b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/DeserializationFactory.scala
@@ -1,6 +1,7 @@
package com.exasol.cloudetl.kafka.deserialization
-import com.exasol.cloudetl.kafka.{KafkaConnectorException, KafkaConsumerProperties}
+import com.exasol.cloudetl.kafka.KafkaConnectorException
+import com.exasol.cloudetl.kafka.KafkaConsumerProperties
import com.exasol.errorreporting.ExaError
import org.apache.kafka.common.serialization.Deserializer
diff --git a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/GenericRecordDeserializer.scala b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/GenericRecordDeserializer.scala
index d198c28..4f97e7f 100644
--- a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/GenericRecordDeserializer.scala
+++ b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/GenericRecordDeserializer.scala
@@ -1,12 +1,12 @@
package com.exasol.cloudetl.kafka.deserialization
-import scala.jdk.CollectionConverters.CollectionHasAsScala
-
import com.exasol.common.avro.AvroConverter
import org.apache.avro.generic.GenericRecord
import org.apache.kafka.common.serialization.Deserializer
+import scala.jdk.CollectionConverters.CollectionHasAsScala
+
/**
* Extract a set of fields from an Avro [[org.apache.avro.generic.GenericRecord]].
*
diff --git a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/JsonDeserialization.scala b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/JsonDeserialization.scala
index 9a13ca5..d0e9f0e 100644
--- a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/JsonDeserialization.scala
+++ b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/JsonDeserialization.scala
@@ -1,9 +1,11 @@
package com.exasol.cloudetl.kafka.deserialization
-import com.exasol.cloudetl.kafka.{KafkaConnectorException, KafkaConsumerProperties}
+import com.exasol.cloudetl.kafka.KafkaConnectorException
+import com.exasol.cloudetl.kafka.KafkaConsumerProperties
import com.exasol.errorreporting.ExaError
-import org.apache.kafka.common.serialization.{Deserializer, StringDeserializer}
+import org.apache.kafka.common.serialization.Deserializer
+import org.apache.kafka.common.serialization.StringDeserializer
/**
* Creates deserializers for JSON records.
diff --git a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/JsonDeserializer.scala b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/JsonDeserializer.scala
index 487527f..5f4a347 100644
--- a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/JsonDeserializer.scala
+++ b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/JsonDeserializer.scala
@@ -1,12 +1,17 @@
package com.exasol.cloudetl.kafka.deserialization
import com.exasol.cloudetl.kafka.KafkaConnectorException
-import com.exasol.cloudetl.kafka.deserialization.JsonDeserializer.{jsonNodeToObject, objectMapper}
+import com.exasol.cloudetl.kafka.deserialization.JsonDeserializer.jsonNodeToObject
+import com.exasol.cloudetl.kafka.deserialization.JsonDeserializer.objectMapper
import com.exasol.errorreporting.ExaError
-import com.fasterxml.jackson.databind.{JsonNode, ObjectMapper}
-import com.fasterxml.jackson.databind.node.JsonNodeType.{BOOLEAN, NUMBER, STRING}
-import org.apache.kafka.common.serialization.{Deserializer, StringDeserializer}
+import com.fasterxml.jackson.databind.JsonNode
+import com.fasterxml.jackson.databind.ObjectMapper
+import com.fasterxml.jackson.databind.node.JsonNodeType.BOOLEAN
+import com.fasterxml.jackson.databind.node.JsonNodeType.NUMBER
+import com.fasterxml.jackson.databind.node.JsonNodeType.STRING
+import org.apache.kafka.common.serialization.Deserializer
+import org.apache.kafka.common.serialization.StringDeserializer
class JsonDeserializer(fieldSpecs: Seq[FieldSpecification], stringDeserializer: StringDeserializer)
extends Deserializer[Map[FieldSpecification, Seq[Any]]] {
@@ -33,7 +38,6 @@ object JsonDeserializer {
private val objectMapper = new ObjectMapper
- @SuppressWarnings(Array("org.wartremover.warts.ToString"))
private def jsonNodeToObject(jsonNode: JsonNode): Any =
jsonNode.getNodeType match {
case STRING => jsonNode.asText()
diff --git a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/StringDeserialization.scala b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/StringDeserialization.scala
index fb1da8e..5b6e324 100644
--- a/src/main/scala/com/exasol/cloudetl/kafka/deserialization/StringDeserialization.scala
+++ b/src/main/scala/com/exasol/cloudetl/kafka/deserialization/StringDeserialization.scala
@@ -1,6 +1,7 @@
package com.exasol.cloudetl.kafka.deserialization
-import com.exasol.cloudetl.kafka.{KafkaConnectorException, KafkaConsumerProperties}
+import com.exasol.cloudetl.kafka.KafkaConnectorException
+import com.exasol.cloudetl.kafka.KafkaConsumerProperties
import com.exasol.errorreporting.ExaError
import org.apache.kafka.common.serialization.Deserializer
diff --git a/src/test/resources/logging.properties b/src/test/resources/logging.properties
new file mode 100644
index 0000000..8c97abe
--- /dev/null
+++ b/src/test/resources/logging.properties
@@ -0,0 +1,6 @@
+handlers=java.util.logging.ConsoleHandler
+.level=INFO
+java.util.logging.ConsoleHandler.level=ALL
+java.util.logging.ConsoleHandler.formatter=java.util.logging.SimpleFormatter
+java.util.logging.SimpleFormatter.format=%1$tF %1$tT.%1$tL [%4$-7s] %5$s %n
+com.exasol.level=ALL
diff --git a/src/test/scala/com/exasol/cloudetl/kafka/JsonArgumentMatcher.scala b/src/test/scala/com/exasol/cloudetl/kafka/JsonArgumentMatcher.scala
index 4edb070..cfbb03b 100644
--- a/src/test/scala/com/exasol/cloudetl/kafka/JsonArgumentMatcher.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/JsonArgumentMatcher.scala
@@ -4,8 +4,6 @@ import com.exasol.common.json.JsonMapper
import com.fasterxml.jackson.databind.JsonNode
import org.mockito.ArgumentMatcher
-import org.scalactic.TypeCheckedTripleEquals.convertToCheckingEqualizer
-import org.scalatest.Assertions.{===, unconstrainedEquality}
/**
* The order of JSON fields is not deterministic, therefore, we need a custom
@@ -16,5 +14,5 @@ class JsonArgumentMatcher(expectedJson: String) extends ArgumentMatcher[String]
private[this] val expectedJsonNode = JsonMapper.fromJson[JsonNode](expectedJson)
override final def matches(argument: String): Boolean =
- JsonMapper.fromJson[JsonNode](argument) === expectedJsonNode
+ JsonMapper.fromJson[JsonNode](argument).equals(expectedJsonNode)
}
diff --git a/src/test/scala/com/exasol/cloudetl/kafka/KafkaConsumerFactoryTest.scala b/src/test/scala/com/exasol/cloudetl/kafka/KafkaConsumerFactoryTest.scala
index d1f9a87..ef9aa6a 100644
--- a/src/test/scala/com/exasol/cloudetl/kafka/KafkaConsumerFactoryTest.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/KafkaConsumerFactoryTest.scala
@@ -4,7 +4,6 @@ import com.exasol.ExaMetadata
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.common.serialization.VoidDeserializer
-import org.mockito.Mockito.when
import org.scalatest.funsuite.AnyFunSuite
import org.scalatestplus.mockito.MockitoSugar
diff --git a/src/test/scala/com/exasol/cloudetl/kafka/KafkaConsumerPropertiesTest.scala b/src/test/scala/com/exasol/cloudetl/kafka/KafkaConsumerPropertiesTest.scala
index 7bbddb4..8abede0 100644
--- a/src/test/scala/com/exasol/cloudetl/kafka/KafkaConsumerPropertiesTest.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/KafkaConsumerPropertiesTest.scala
@@ -3,7 +3,8 @@ package com.exasol.cloudetl.kafka
import java.nio.file.Path
import java.nio.file.Paths
-import com.exasol.{ExaConnectionInformation, ExaMetadata}
+import com.exasol.ExaConnectionInformation
+import com.exasol.ExaMetadata
import com.exasol.cloudetl.kafka.KafkaConsumerProperties._
import org.mockito.Mockito.when
@@ -552,7 +553,6 @@ class KafkaConsumerPropertiesTest extends AnyFunSuite with BeforeAndAfterEach wi
assert(properties.getProperties().get(SSL_TRUSTSTORE_PASSWORD.kafkaPropertyName) === "tspw")
}
- @SuppressWarnings(Array("org.wartremover.warts.DefaultArguments")) // fine in tests
private[this] def getSecurityEnabledConsumerProperties(
securityProtocol: String,
keystoreFile: Option[Path] = None,
diff --git a/src/test/scala/com/exasol/cloudetl/kafka/PathTest.scala b/src/test/scala/com/exasol/cloudetl/kafka/PathTest.scala
index 5741077..f943753 100644
--- a/src/test/scala/com/exasol/cloudetl/kafka/PathTest.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/PathTest.scala
@@ -27,7 +27,6 @@ trait PathTest extends AnyFunSuite with BeforeAndAfterEach with MockitoSugar {
private[kafka] var importSpec: ExaImportSpecification = _
private[kafka] var exportSpec: ExaExportSpecification = _
- @SuppressWarnings(Array("org.wartremover.contrib.warts.UnsafeInheritance"))
override def beforeEach(): Unit = {
metadata = mock[ExaMetadata]
importSpec = mock[ExaImportSpecification]
diff --git a/src/test/scala/com/exasol/cloudetl/kafka/consumer/KafkaRecordConsumerTest.scala b/src/test/scala/com/exasol/cloudetl/kafka/consumer/KafkaRecordConsumerTest.scala
index bd3b25f..ac68a48 100644
--- a/src/test/scala/com/exasol/cloudetl/kafka/consumer/KafkaRecordConsumerTest.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/consumer/KafkaRecordConsumerTest.scala
@@ -1,9 +1,9 @@
package com.exasol.cloudetl.kafka
import java.time.Duration
-import java.util.{Map => JMap}
import java.util.Arrays
import java.util.Collections
+import java.util.{Map => JMap}
import com.exasol.ExaIterator
import com.exasol.cloudetl.kafka.consumer.KafkaRecordConsumer
@@ -104,8 +104,7 @@ class KafkaRecordConsumerTest extends AnyFunSuite with BeforeAndAfterEach with M
.thenReturn(emptyConsumerRecords)
.thenThrow(new RuntimeException("test should not poll twice"))
when(consumer.position(topicPartition)).thenReturn(4L)
- KafkaImportChecker(consumeAllOffsetsProperties, startOffset = defaultEndOffset - 1)
- .assertEmitCount(0)
+ KafkaImportChecker(consumeAllOffsetsProperties, startOffset = defaultEndOffset - 1).assertEmitCount(0)
}
test("returns with empty records and offset reset") {
@@ -150,7 +149,6 @@ class KafkaRecordConsumerTest extends AnyFunSuite with BeforeAndAfterEach with M
assertExpectedException(new AuthenticationException("authError"), "E-KCE-23", Option("authError"))
}
- @SuppressWarnings(Array("org.wartremover.warts.DefaultArguments"))
private[this] def assertExpectedException(
exception: Exception,
errorCode: String,
@@ -185,7 +183,6 @@ class KafkaRecordConsumerTest extends AnyFunSuite with BeforeAndAfterEach with M
}
// It is alright to use default arguments in tests.
- @SuppressWarnings(Array("org.wartremover.warts.DefaultArguments"))
case class KafkaImportChecker(
additionalProperties: Map[String, String] = Map.empty[String, String],
startOffset: Long = 0L
@@ -193,7 +190,7 @@ class KafkaRecordConsumerTest extends AnyFunSuite with BeforeAndAfterEach with M
final def assertEmitCount(count: Int): Unit = {
val properties = new KafkaConsumerProperties(defaultProperties ++ additionalProperties)
TestKafkaRecordConsumer(properties, startOffset).emit(iterator)
- verify(iterator, times(count)).emit(Seq(any[Object]): _*)
+ verify(iterator, times(count)).emit(any(classOf[Array[Object]]))
}
}
diff --git a/src/test/scala/com/exasol/cloudetl/kafka/deserialization/GenericRecordDeserializerTest.scala b/src/test/scala/com/exasol/cloudetl/kafka/deserialization/GenericRecordDeserializerTest.scala
index 5099ed0..d45f99f 100644
--- a/src/test/scala/com/exasol/cloudetl/kafka/deserialization/GenericRecordDeserializerTest.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/deserialization/GenericRecordDeserializerTest.scala
@@ -7,7 +7,8 @@ import com.exasol.common.json.JsonMapper
import com.fasterxml.jackson.databind.JsonNode
import org.apache.avro.SchemaBuilder
-import org.apache.avro.generic.{GenericRecord, GenericRecordBuilder}
+import org.apache.avro.generic.GenericRecord
+import org.apache.avro.generic.GenericRecordBuilder
import org.apache.kafka.common.serialization.Deserializer
import org.mockito.ArgumentMatchers
import org.mockito.Mockito.when
diff --git a/src/test/scala/com/exasol/cloudetl/kafka/deserialization/JsonDeserializerTest.scala b/src/test/scala/com/exasol/cloudetl/kafka/deserialization/JsonDeserializerTest.scala
index 6efa834..c605221 100644
--- a/src/test/scala/com/exasol/cloudetl/kafka/deserialization/JsonDeserializerTest.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/deserialization/JsonDeserializerTest.scala
@@ -104,16 +104,15 @@ class JsonDeserializerTest extends AnyFunSuite {
test("must fail when all fields are referenced") {
intercept[KafkaConnectorException] {
- val row = new JsonDeserializer(
- Seq(RecordValueFields),
- new StringDeserializer
- ).deserialize(
- "randomTopic",
- """
- |{
- | "number": 1
- |}""".stripMargin.getBytes(StandardCharsets.UTF_8)
- )
+ new JsonDeserializer(Seq(RecordValueFields), new StringDeserializer)
+ .deserialize(
+ "randomTopic",
+ """
+ |{
+ | "number": 1
+ |}
+ |""".stripMargin.getBytes(StandardCharsets.UTF_8)
+ )
}
}
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/KafkaIntegrationTest.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaIntegrationTest.scala
similarity index 65%
rename from src/it/scala/com/exasol/cloudetl/kafka/KafkaIntegrationTest.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaIntegrationTest.scala
index 9898e50..3631084 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/KafkaIntegrationTest.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaIntegrationTest.scala
@@ -1,7 +1,9 @@
package com.exasol.cloudetl.kafka
import scala.util.Random
+
import com.exasol.ExaIterator
+
import io.github.embeddedkafka.schemaregistry.EmbeddedKafka
import org.mockito.ArgumentMatchers.argThat
import org.mockito.Mockito.when
@@ -21,11 +23,7 @@ trait KafkaIntegrationTest
var properties: Map[String, String] = _
val bootstrapServers = "localhost:6001"
-
- val defaultProperties = Map(
- "BOOTSTRAP_SERVERS" -> bootstrapServers,
- "TABLE_NAME" -> "exasolTable"
- )
+ val defaultProperties = Map("BOOTSTRAP_SERVERS" -> bootstrapServers, "TABLE_NAME" -> "exasolTable")
def getTopic(): String =
Random.alphanumeric.take(4).mkString
@@ -47,20 +45,18 @@ trait KafkaIntegrationTest
()
}
- final def mockExasolIterator(
- params: Map[String, String],
- partitions: Seq[Int],
- offsets: Seq[Long]
- ): ExaIterator = {
+ final def mockExasolIterator(params: Map[String, String], partitions: Seq[Int], offsets: Seq[Long]): ExaIterator = {
val mockedIterator = mock[ExaIterator]
when(mockedIterator.getString(0)).thenReturn(KafkaConsumerProperties(params).mkString())
- val brokersHead :: brokersTail = Seq.fill(partitions.size - 1)(true) ++ Seq(false)
- when(mockedIterator.next()).thenReturn(brokersHead, brokersTail: _*)
- val partitionsHead :: partitionsTail = partitions.map(Integer.valueOf)
- when(mockedIterator.getInteger(1)).thenReturn(partitionsHead, partitionsTail: _*)
- val offsetsHead :: offsetsTail = offsets.map(java.lang.Long.valueOf)
- when(mockedIterator.getLong(2)).thenReturn(offsetsHead, offsetsTail: _*)
+ val brokers = Seq.fill(partitions.size - 1)(true) ++ Seq(false)
+ when(mockedIterator.next()).thenReturn(brokers(0), brokers.tail: _*)
+
+ val partitionIds = partitions.map(Integer.valueOf(_))
+ when(mockedIterator.getInteger(1)).thenReturn(partitionIds(0), partitionIds.tail: _*)
+
+ val offsetIds = offsets.map(java.lang.Long.valueOf(_))
+ when(mockedIterator.getLong(2)).thenReturn(offsetIds(0), offsetIds.tail: _*)
mockedIterator
}
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterAvroIT.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterAvroIT.scala
similarity index 87%
rename from src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterAvroIT.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterAvroIT.scala
index b0157b2..9d0ea57 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterAvroIT.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterAvroIT.scala
@@ -5,7 +5,8 @@ import scala.jdk.CollectionConverters._
import com.exasol.cloudetl.kafka.KafkaTopicDataImporterAvroIT.schemaRegistryUrl
import io.confluent.kafka.serializers.KafkaAvroSerializer
-import org.apache.avro.{AvroRuntimeException, Schema}
+import org.apache.avro.AvroRuntimeException
+import org.apache.avro.Schema
import org.apache.avro.specific.SpecificRecordBase
import org.apache.kafka.common.serialization.Serializer
@@ -20,9 +21,14 @@ class KafkaTopicDataImporterAvroIT extends KafkaIntegrationTest {
serializer.configure(properties.asJava, false)
serializer.asInstanceOf[Serializer[AvroRecord]]
}
+
+}
+
+object KafkaTopicDataImporterAvroIT {
+ val schemaRegistryUrl = "http://localhost:6002"
}
-case class AvroRecord(var col_str: String, var col_int: Int, var col_long: Long) extends SpecificRecordBase {
+final case class AvroRecord(var col_str: String, var col_int: Int, var col_long: Long) extends SpecificRecordBase {
private[this] val avroRecordSchema =
new Schema.Parser().parse(
@@ -53,15 +59,11 @@ case class AvroRecord(var col_str: String, var col_int: Int, var col_long: Long)
case (utf8: org.apache.avro.util.Utf8) => utf8.toString
case _ => value.asInstanceOf[String]
}
- case 1 =>
- col_int = value.asInstanceOf[Int]
- case 2 =>
- col_long = value.asInstanceOf[Long]
+ case 1 => col_int = value.asInstanceOf[Int]
+ case 2 => col_long = value.asInstanceOf[Long]
case _ => throw new AvroRuntimeException(s"Unknown index $index!")
}
override def getSchema(): Schema = avroRecordSchema
-}
-object KafkaTopicDataImporterAvroIT {
- val schemaRegistryUrl = "http://localhost:6002"
+
}
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterAvroToColumnsIT.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterAvroToColumnsIT.scala
similarity index 71%
rename from src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterAvroToColumnsIT.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterAvroToColumnsIT.scala
index cc5189f..ca62de0 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterAvroToColumnsIT.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterAvroToColumnsIT.scala
@@ -1,19 +1,24 @@
package com.exasol.cloudetl.kafka
-import java.lang.{Integer => JInt, Long => JLong}
+import java.lang.{Integer => JInt}
+import java.lang.{Long => JLong}
import java.util.Collections
-import scala.jdk.CollectionConverters.{CollectionHasAsScala, MapHasAsJava, MapHasAsScala}
-
-import com.exasol.{ExaDataTypeException, ExaMetadata}
+import com.exasol.ExaDataTypeException
+import com.exasol.ExaMetadata
import org.apache.kafka.clients.admin.RecordsToDelete
import org.apache.kafka.common.TopicPartition
import org.mockito.ArgumentMatchers._
-import org.mockito.Mockito.{times, verify, when}
+import org.mockito.Mockito.times
+import org.mockito.Mockito.verify
+import org.mockito.Mockito.when
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
+import scala.jdk.CollectionConverters.CollectionHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsJava
+
class KafkaTopicDataImporterAvroToColumnsIT extends KafkaTopicDataImporterAvroIT {
test("run emits records from starting initial offset") {
@@ -21,25 +26,25 @@ class KafkaTopicDataImporterAvroToColumnsIT extends KafkaTopicDataImporterAvroIT
publishToKafka(topic, AvroRecord("abc", 3, 13))
publishToKafka(topic, AvroRecord("hello", 4, 14))
- val iter = mockExasolIterator(properties, Seq(0), Seq(-1))
- KafkaTopicDataImporter.run(getMockedMetadata(), iter)
+ val mockedIterator = mockExasolIterator(properties, Seq(0), Seq(-1))
+ KafkaTopicDataImporter.run(getMockedMetadata(), mockedIterator)
- verify(iter, times(2)).emit(Seq(any[Object]): _*)
- verify(iter, times(2)).emit(
+ verify(mockedIterator, times(2)).emit(any(classOf[Array[Object]]))
+ verify(mockedIterator, times(2)).emit(
anyString(),
anyInt().asInstanceOf[JInt],
anyLong().asInstanceOf[JLong],
anyInt().asInstanceOf[JInt],
anyLong().asInstanceOf[JLong]
)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(1)).emit(
"abc",
JInt.valueOf(3),
JLong.valueOf(13),
JInt.valueOf(0),
JLong.valueOf(0)
)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(1)).emit(
"hello",
JInt.valueOf(4),
JLong.valueOf(14),
@@ -50,44 +55,37 @@ class KafkaTopicDataImporterAvroToColumnsIT extends KafkaTopicDataImporterAvroIT
test("run emits records when the starting offset of the topic is greater zero") {
createCustomTopic(topic)
-
val startingOffset = 12
- 0.until(startingOffset)
- .foreach { recordNr =>
- publishToKafka(
- topic,
- AvroRecord(
- "Some record that we delete" +
- "to ensure the offset does not start at zero",
- recordNr,
- 13
- )
- )
- }
+ 0.until(startingOffset).foreach { recordNr =>
+ publishToKafka(
+ topic,
+ AvroRecord("Some record that we delete to ensure the offset does not start at zero", recordNr, 13)
+ )
+ }
deleteRecordsFromTopic(topic, startingOffset)
publishToKafka(topic, AvroRecord("abc", 3, 13))
publishToKafka(topic, AvroRecord("hello", 4, 14))
- val iter = mockExasolIterator(properties, Seq(0), Seq(-1))
- KafkaTopicDataImporter.run(getMockedMetadata(), iter)
+ val mockedIterator = mockExasolIterator(properties, Seq(0), Seq(-1))
+ KafkaTopicDataImporter.run(getMockedMetadata(), mockedIterator)
- verify(iter, times(2)).emit(Seq(any[Object]): _*)
- verify(iter, times(2)).emit(
+ verify(mockedIterator, times(2)).emit(any(classOf[Array[Object]]))
+ verify(mockedIterator, times(2)).emit(
anyString(),
anyInt().asInstanceOf[JInt],
anyLong().asInstanceOf[JLong],
anyInt().asInstanceOf[JInt],
anyLong().asInstanceOf[JLong]
)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(1)).emit(
"abc",
JInt.valueOf(3),
JLong.valueOf(13),
JInt.valueOf(0),
JLong.valueOf(startingOffset + 0L)
)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(1)).emit(
"hello",
JInt.valueOf(4),
JLong.valueOf(14),
@@ -104,25 +102,25 @@ class KafkaTopicDataImporterAvroToColumnsIT extends KafkaTopicDataImporterAvroIT
publishToKafka(topic, AvroRecord("xyz", 13, 23))
// records at 0, 1 are already read, committed
- val iter = mockExasolIterator(properties, Seq(0), Seq(1))
- KafkaTopicDataImporter.run(getMockedMetadata(), iter)
+ val mockedIterator = mockExasolIterator(properties, Seq(0), Seq(1))
+ KafkaTopicDataImporter.run(getMockedMetadata(), mockedIterator)
- verify(iter, times(2)).emit(Seq(any[Object]): _*)
- verify(iter, times(2)).emit(
+ verify(mockedIterator, times(2)).emit(any(classOf[Array[Object]]))
+ verify(mockedIterator, times(2)).emit(
anyString(),
anyInt().asInstanceOf[JInt],
anyLong().asInstanceOf[JLong],
anyInt().asInstanceOf[JInt],
anyLong().asInstanceOf[JLong]
)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(1)).emit(
"def",
JInt.valueOf(7),
JLong.valueOf(17),
JInt.valueOf(0),
JLong.valueOf(2)
)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(1)).emit(
"xyz",
JInt.valueOf(13),
JLong.valueOf(23),
@@ -145,11 +143,11 @@ class KafkaTopicDataImporterAvroToColumnsIT extends KafkaTopicDataImporterAvroIT
publishToKafka(topic, AvroRecord("last", 11, 22))
// comsumer in two batches each with 2 records
- val iter = mockExasolIterator(newProperties, Seq(0), Seq(-1))
- KafkaTopicDataImporter.run(getMockedMetadata(), iter)
+ val mockedIterator = mockExasolIterator(newProperties, Seq(0), Seq(-1))
+ KafkaTopicDataImporter.run(getMockedMetadata(), mockedIterator)
- verify(iter, times(4)).emit(Seq(any[Object]): _*)
- verify(iter, times(4)).emit(
+ verify(mockedIterator, times(4)).emit(any(classOf[Array[Object]]))
+ verify(mockedIterator, times(4)).emit(
anyString(),
anyInt().asInstanceOf[JInt],
anyLong().asInstanceOf[JLong],
@@ -166,14 +164,14 @@ class KafkaTopicDataImporterAvroToColumnsIT extends KafkaTopicDataImporterAvroIT
"CONSUME_ALL_OFFSETS" -> "true"
)
createCustomTopic(topic)
- for (i <- 1 to 5) {
+ for { i <- 1 to 5 } {
publishToKafka(topic, AvroRecord(s"$i", i, i.toLong))
}
- val iter = mockExasolIterator(newProperties, Seq(0), Seq(-1))
- KafkaTopicDataImporter.run(getMockedMetadata(), iter)
+ val mockedIterator = mockExasolIterator(newProperties, Seq(0), Seq(-1))
+ KafkaTopicDataImporter.run(getMockedMetadata(), mockedIterator)
- verify(iter, times(5)).emit(Seq(any[Object]): _*)
- verify(iter, times(5)).emit(
+ verify(mockedIterator, times(5)).emit(any(classOf[Array[Object]]))
+ verify(mockedIterator, times(5)).emit(
anyString(),
anyInt().asInstanceOf[JInt],
anyLong().asInstanceOf[JLong],
@@ -219,11 +217,9 @@ class KafkaTopicDataImporterAvroToColumnsIT extends KafkaTopicDataImporterAvroIT
withAdminClient { client =>
val allPartitions = client
.describeTopics(Collections.singletonList(topic))
- .values
- .asScala
- .values
- .head
+ .allTopicNames()
.get()
+ .get(topic)
.partitions()
.asScala
.map(tpi => new TopicPartition(topic, tpi.partition()))
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterAvroToJsonIT.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterAvroToJsonIT.scala
similarity index 68%
rename from src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterAvroToJsonIT.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterAvroToJsonIT.scala
index 86858ae..8414954 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterAvroToJsonIT.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterAvroToJsonIT.scala
@@ -7,42 +7,38 @@ import com.exasol.ExaMetadata
import org.mockito.ArgumentMatchers
import org.mockito.ArgumentMatchers._
-import org.mockito.Mockito.{times, verify, when}
+import org.mockito.Mockito.times
+import org.mockito.Mockito.verify
+import org.mockito.Mockito.when
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
class KafkaTopicDataImporterAvroToJsonIT extends KafkaTopicDataImporterAvroIT {
test("run emits records from starting initial offset") {
- val newProperties = properties ++ Map(
- "AS_JSON_DOC" -> "true"
- )
+ val newProperties = properties ++ Map("AS_JSON_DOC" -> "true")
createCustomTopic(topic)
publishToKafka(topic, AvroRecord("{'Value':'abc'}", 3, 13))
publishToKafka(topic, AvroRecord("{'Value':'hello'}", 4, 14))
publishToKafka(topic, AvroRecord("{'Value':'xyz'}", 5, 15))
- val iter = mockExasolIterator(newProperties, Seq(0), Seq(-1))
- KafkaTopicDataImporter.run(getMockedMetadata(), iter)
+ val mockedIterator = mockExasolIterator(newProperties, Seq(0), Seq(-1))
+ KafkaTopicDataImporter.run(getMockedMetadata(), mockedIterator)
- verify(iter, times(3)).emit(Seq(any[Object]): _*)
- verify(iter, times(3)).emit(
- anyString(),
- anyInt().asInstanceOf[JInt],
- anyLong().asInstanceOf[JLong]
- )
+ verify(mockedIterator, times(3)).emit(any(classOf[Array[Object]]))
+ verify(mockedIterator, times(3)).emit(anyString(), anyInt().asInstanceOf[JInt], anyLong().asInstanceOf[JLong])
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(1)).emit(
jsonMatcher("{\"col_str\": \"{'Value':'abc'}\", \"col_int\": 3, \"col_long\": 13}"),
ArgumentMatchers.eq(JInt.valueOf(0)),
ArgumentMatchers.eq(JLong.valueOf(0))
)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(1)).emit(
jsonMatcher("{\"col_str\": \"{'Value':'hello'}\", \"col_int\": 4, \"col_long\": 14}"),
ArgumentMatchers.eq(JInt.valueOf(0)),
ArgumentMatchers.eq(JLong.valueOf(1))
)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(1)).emit(
jsonMatcher("{\"col_str\": \"{'Value':'xyz'}\", \"col_int\": 5, \"col_long\": 15}"),
ArgumentMatchers.eq(JInt.valueOf(0)),
ArgumentMatchers.eq(JLong.valueOf(2))
@@ -50,9 +46,7 @@ class KafkaTopicDataImporterAvroToJsonIT extends KafkaTopicDataImporterAvroIT {
}
test("run emits records starting from provided offset") {
- val newProperties = properties ++ Map(
- "AS_JSON_DOC" -> "true"
- )
+ val newProperties = properties ++ Map("AS_JSON_DOC" -> "true")
createCustomTopic(topic)
publishToKafka(topic, AvroRecord("{'Value':'abc'}", 3, 13))
publishToKafka(topic, AvroRecord("{'Value':'hello'}", 4, 14))
@@ -60,21 +54,17 @@ class KafkaTopicDataImporterAvroToJsonIT extends KafkaTopicDataImporterAvroIT {
publishToKafka(topic, AvroRecord("{'Value':'xyz'}", 13, 23))
// records at 0, 1 are already read, committed
- val iter = mockExasolIterator(newProperties, Seq(0), Seq(1))
- KafkaTopicDataImporter.run(getMockedMetadata(), iter)
-
- verify(iter, times(2)).emit(Seq(any[Object]): _*)
- verify(iter, times(2)).emit(
- anyString(),
- anyInt().asInstanceOf[JInt],
- anyLong().asInstanceOf[JLong]
- )
- verify(iter, times(1)).emit(
+ val mockedIterator = mockExasolIterator(newProperties, Seq(0), Seq(1))
+ KafkaTopicDataImporter.run(getMockedMetadata(), mockedIterator)
+
+ verify(mockedIterator, times(2)).emit(any(classOf[Array[Object]]))
+ verify(mockedIterator, times(2)).emit(anyString(), anyInt().asInstanceOf[JInt], anyLong().asInstanceOf[JLong])
+ verify(mockedIterator, times(1)).emit(
jsonMatcher("{\"col_str\": \"{'Value':'def'}\", \"col_int\": 7, \"col_long\": 17}"),
ArgumentMatchers.eq(JInt.valueOf(0)),
ArgumentMatchers.eq(JLong.valueOf(2))
)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(1)).emit(
jsonMatcher("{\"col_str\": \"{'Value':'xyz'}\", \"col_int\": 13, \"col_long\": 23}"),
ArgumentMatchers.eq(JInt.valueOf(0)),
ArgumentMatchers.eq(JLong.valueOf(3))
@@ -95,15 +85,11 @@ class KafkaTopicDataImporterAvroToJsonIT extends KafkaTopicDataImporterAvroIT {
publishToKafka(topic, AvroRecord("{'Value':'xyz'}", 13, 23))
// comsumer in two batches each with 2 records
- val iter = mockExasolIterator(newProperties, Seq(0), Seq(-1))
- KafkaTopicDataImporter.run(getMockedMetadata(), iter)
-
- verify(iter, times(4)).emit(Seq(any[Object]): _*)
- verify(iter, times(4)).emit(
- anyString(),
- anyInt().asInstanceOf[JInt],
- anyLong().asInstanceOf[JLong]
- )
+ val mockedIterator = mockExasolIterator(newProperties, Seq(0), Seq(-1))
+ KafkaTopicDataImporter.run(getMockedMetadata(), mockedIterator)
+
+ verify(mockedIterator, times(4)).emit(any(classOf[Array[Object]]))
+ verify(mockedIterator, times(4)).emit(anyString(), anyInt().asInstanceOf[JInt], anyLong().asInstanceOf[JLong])
}
private[this] def getMockedMetadata(): ExaMetadata = {
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterJsonToColumnsIT.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterJsonToColumnsIT.scala
similarity index 66%
rename from src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterJsonToColumnsIT.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterJsonToColumnsIT.scala
index ad501df..2944dfa 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterJsonToColumnsIT.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterJsonToColumnsIT.scala
@@ -1,12 +1,16 @@
package com.exasol.cloudetl.kafka
-import java.lang.{Integer => JInt, Long => JLong}
+import java.lang.{Integer => JInt}
+import java.lang.{Long => JLong}
import com.exasol.ExaMetadata
-import org.apache.kafka.common.serialization.{Serializer, StringSerializer}
+import org.apache.kafka.common.serialization.Serializer
+import org.apache.kafka.common.serialization.StringSerializer
import org.mockito.ArgumentMatchers._
-import org.mockito.Mockito.{times, verify, when}
+import org.mockito.Mockito.times
+import org.mockito.Mockito.verify
+import org.mockito.Mockito.when
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
@@ -42,10 +46,10 @@ class KafkaTopicDataImporterJsonToColumnsIT extends KafkaIntegrationTest {
|}""".stripMargin
)
- val iter = mockExasolIterator(properties, Seq(0), Seq(-1))
- val meta = mock[ExaMetadata]
- when(meta.getOutputColumnCount()).thenReturn(5L)
- when(meta.getOutputColumnType(anyInt())).thenAnswer(new Answer[Class[_]]() {
+ val mockedIterator = mockExasolIterator(properties, Seq(0), Seq(-1))
+ val exasolMetadata = mock[ExaMetadata]
+ when(exasolMetadata.getOutputColumnCount()).thenReturn(5L)
+ when(exasolMetadata.getOutputColumnType(anyInt())).thenAnswer(new Answer[Class[_]]() {
override def answer(invocation: InvocationOnMock): Class[_] = {
val columnIndex = invocation.getArguments()(0).asInstanceOf[JInt]
Seq(
@@ -57,17 +61,17 @@ class KafkaTopicDataImporterJsonToColumnsIT extends KafkaIntegrationTest {
)(columnIndex)
}
})
- KafkaTopicDataImporter.run(meta, iter)
+ KafkaTopicDataImporter.run(exasolMetadata, mockedIterator)
- verify(iter, times(2)).emit(Seq(any[Object]): _*)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(2)).emit(any(classOf[Array[Object]]))
+ verify(mockedIterator, times(1)).emit(
"val1",
JInt.valueOf(11),
"""{"field":"value"}""",
JInt.valueOf(0),
JLong.valueOf(0)
)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(1)).emit(
"val2",
JInt.valueOf(22),
null,
@@ -75,4 +79,5 @@ class KafkaTopicDataImporterJsonToColumnsIT extends KafkaIntegrationTest {
JLong.valueOf(1)
)
}
+
}
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterJsonToJsonIT.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterJsonToJsonIT.scala
similarity index 74%
rename from src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterJsonToJsonIT.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterJsonToJsonIT.scala
index b869d81..7abcd70 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterJsonToJsonIT.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterJsonToJsonIT.scala
@@ -1,13 +1,17 @@
package com.exasol.cloudetl.kafka
-import java.lang.{Integer => JInt, Long => JLong}
+import java.lang.{Integer => JInt}
+import java.lang.{Long => JLong}
import com.exasol.ExaMetadata
-import org.apache.kafka.common.serialization.{Serializer, StringSerializer}
+import org.apache.kafka.common.serialization.Serializer
+import org.apache.kafka.common.serialization.StringSerializer
import org.mockito.ArgumentMatchers
import org.mockito.ArgumentMatchers._
-import org.mockito.Mockito.{times, verify, when}
+import org.mockito.Mockito.times
+import org.mockito.Mockito.verify
+import org.mockito.Mockito.when
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
@@ -40,7 +44,7 @@ class KafkaTopicDataImporterJsonToJsonIT extends KafkaIntegrationTest {
publishToKafka(topic, inputRecord1)
publishToKafka(topic, inputRecord2)
- val iter = mockExasolIterator(properties, Seq(0), Seq(-1))
+ val mockedIterator = mockExasolIterator(properties, Seq(0), Seq(-1))
val meta = mock[ExaMetadata]
when(meta.getOutputColumnCount()).thenReturn(3L)
when(meta.getOutputColumnType(anyInt())).thenAnswer(new Answer[Class[_]]() {
@@ -53,18 +57,19 @@ class KafkaTopicDataImporterJsonToJsonIT extends KafkaIntegrationTest {
)(columnIndex)
}
})
- KafkaTopicDataImporter.run(meta, iter)
+ KafkaTopicDataImporter.run(meta, mockedIterator)
- verify(iter, times(2)).emit(Seq(any[Object]): _*)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(2)).emit(any(classOf[Array[Object]]))
+ verify(mockedIterator, times(1)).emit(
jsonMatcher(inputRecord1),
ArgumentMatchers.eq(JInt.valueOf(0)),
ArgumentMatchers.eq(JLong.valueOf(0))
)
- verify(iter, times(1)).emit(
+ verify(mockedIterator, times(1)).emit(
jsonMatcher(inputRecord2),
ArgumentMatchers.eq(JInt.valueOf(0)),
ArgumentMatchers.eq(JLong.valueOf(1))
)
}
+
}
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterNullValueIT.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterNullValueIT.scala
similarity index 66%
rename from src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterNullValueIT.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterNullValueIT.scala
index fa2421e..405904e 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicDataImporterNullValueIT.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicDataImporterNullValueIT.scala
@@ -5,18 +5,19 @@ import java.lang.{Long => JLong}
import scala.jdk.CollectionConverters._
-import com.exasol.ExaDataTypeException
import com.exasol.ExaMetadata
import com.exasol.cloudetl.kafka.KafkaTopicDataImporterAvroIT.schemaRegistryUrl
-import org.apache.kafka.common.TopicPartition
+import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.Serializer
import org.apache.kafka.common.serialization.StringSerializer
-import org.apache.kafka.clients.producer.ProducerRecord
import org.mockito.ArgumentMatchers._
-import org.mockito.Mockito.{times, verify, when}
+import org.mockito.Mockito.times
+import org.mockito.Mockito.verify
+import org.mockito.Mockito.when
class KafkaTopicDataImporterNullValueIT extends KafkaIntegrationTest {
+
override def additionalProperties: Map[String, String] =
Map("SCHEMA_REGISTRY_URL" -> schemaRegistryUrl)
@@ -33,14 +34,14 @@ class KafkaTopicDataImporterNullValueIT extends KafkaIntegrationTest {
publishToKafka(new ProducerRecord(topic, null: String))
publishToKafka(new ProducerRecord(topic, null: String))
- val iter = mockExasolIterator(properties, Seq(0), Seq(-1))
- val meta = mock[ExaMetadata]
- when(meta.getOutputColumnCount()).thenReturn(4)
- KafkaTopicDataImporter.run(meta, iter)
+ val mockedIterator = mockExasolIterator(properties, Seq(0), Seq(-1))
+ val exasolMetadata = mock[ExaMetadata]
+ when(exasolMetadata.getOutputColumnCount()).thenReturn(4)
+ KafkaTopicDataImporter.run(exasolMetadata, mockedIterator)
- verify(iter, times(2)).emit(Seq(any[Object]): _*)
- verify(iter, times(1)).emit(null, null, JInt.valueOf(0), JLong.valueOf(0))
- verify(iter, times(1)).emit(null, null, JInt.valueOf(0), JLong.valueOf(1))
+ verify(mockedIterator, times(2)).emit(any(classOf[Array[Object]]))
+ verify(mockedIterator, times(1)).emit(null, null, JInt.valueOf(0), JLong.valueOf(0))
+ verify(mockedIterator, times(1)).emit(null, null, JInt.valueOf(0), JLong.valueOf(1))
}
}
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicMetadataReaderIT.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicMetadataReaderIT.scala
similarity index 96%
rename from src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicMetadataReaderIT.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicMetadataReaderIT.scala
index d929011..6c5c4ae 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/KafkaTopicMetadataReaderIT.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/KafkaTopicMetadataReaderIT.scala
@@ -10,9 +10,6 @@ import com.exasol.ExaMetadata
import org.mockito.ArgumentMatchers._
import org.mockito.Mockito._
-@SuppressWarnings(
- Array("org.wartremover.warts.AsInstanceOf", "org.wartremover.contrib.warts.SymbolicName")
-)
class KafkaTopicMetadataReaderIT extends KafkaIntegrationTest {
override def additionalProperties: Map[String, String] =
@@ -51,8 +48,7 @@ class KafkaTopicMetadataReaderIT extends KafkaIntegrationTest {
verify(iter, times(1)).emit(JInt.valueOf(2), JLong.valueOf(-1))
}
- // Do not emit partitionId maxOffset pairs if partitionId is not
- // available in topic partitions
+ // Do not emit partitionId maxOffset pairs if partitionId is not available in topic partitions
test("run emits partitionId maxOffset pairs with fewer topic partitions") {
createCustomTopic(topic, partitions = 2)
val iter = mockExasolIterator(properties, Seq(1, 3), Seq(7, 17))
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/RecordFieldSpecificationIT.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/RecordFieldSpecificationIT.scala
similarity index 75%
rename from src/it/scala/com/exasol/cloudetl/kafka/RecordFieldSpecificationIT.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/RecordFieldSpecificationIT.scala
index cadb34a..e37416a 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/RecordFieldSpecificationIT.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/RecordFieldSpecificationIT.scala
@@ -1,8 +1,7 @@
package com.exasol.cloudetl.kafka
-import java.lang.{Integer => JInt, Long => JLong}
-
-import scala.jdk.CollectionConverters.CollectionHasAsScala
+import java.lang.{Integer => JInt}
+import java.lang.{Long => JLong}
import com.exasol.ExaMetadata
import com.exasol.common.json.JsonMapper
@@ -10,52 +9,22 @@ import com.exasol.common.json.JsonMapper
import com.fasterxml.jackson.databind.JsonNode
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.StringSerializer
-import org.mockito.{ArgumentCaptor, Mockito}
+import org.mockito.ArgumentCaptor
import org.mockito.ArgumentMatchers.anyInt
-import org.mockito.Mockito.{times, when}
+import org.mockito.Mockito.times
+import org.mockito.Mockito.verify
+import org.mockito.Mockito.when
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
/**
- * A test class that tests the {@code RECORD_FIELDS} integration and not the offset and partition
- * handling.
+ * A test class that tests the {@code RECORD_FIELDS} integration and not the offset and partition handling.
*/
class RecordFieldSpecificationIT extends KafkaTopicDataImporterAvroIT {
private[this] val customRecord = AvroRecord("abc", 3, 13)
-
implicit val stringSerializer = new StringSerializer
- private[this] def assertJson(actual: String, expected: String): Unit = {
- assert(JsonMapper.fromJson[JsonNode](actual) === JsonMapper.fromJson[JsonNode](expected))
- ()
- }
-
- private[this] def getEmittedValues(recordFieldsStmt: String, outputColumnTypes: Seq[Class[_]]): Seq[Any] = {
- val iter = mockExasolIterator(
- properties ++ Map("RECORD_FIELDS" -> recordFieldsStmt),
- Seq(0),
- Seq(-1)
- )
- val outputColumnTypesWithMeta = outputColumnTypes ++ Seq(classOf[JInt], classOf[JLong])
- val columnCount = outputColumnTypesWithMeta.size
- val meta = mock[ExaMetadata]
- when(meta.getOutputColumnCount()).thenReturn(columnCount)
- when(meta.getOutputColumnType(anyInt())).thenAnswer(new Answer[Class[_]]() {
- override def answer(invocation: InvocationOnMock): Class[_] = {
- val columnIndex = invocation.getArguments()(0).asInstanceOf[JInt]
- outputColumnTypesWithMeta(columnIndex)
- }
- })
- KafkaTopicDataImporter.run(meta, iter)
-
- val captor = ArgumentCaptor.forClass[Any, Any](classOf[Any])
- Mockito.verify(iter, times(1)).emit(captor.capture())
-
- val valuesEmitted = captor.getAllValues.asScala
- valuesEmitted.slice(0, valuesEmitted.size - 2).toSeq
- }
-
test("default must be 'value.*': All fields from the record") {
createCustomTopic(topic)
publishToKafka(topic, customRecord)
@@ -115,11 +84,7 @@ class RecordFieldSpecificationIT extends KafkaTopicDataImporterAvroIT {
test("must handle null values combined with a present key and timestamps") {
createCustomTopic(topic)
publishToKafka(topic, "theKey", null.asInstanceOf[AvroRecord])
-
- val values = getEmittedValues(
- "key, timestamp, value",
- Seq(classOf[String], classOf[JLong], classOf[Any])
- )
+ val values = getEmittedValues("key, timestamp, value", Seq(classOf[String], classOf[JLong], classOf[Any]))
assert(values.size === 3)
assert(values(0) === "theKey")
assert(values(1).isInstanceOf[JLong])
@@ -130,21 +95,38 @@ class RecordFieldSpecificationIT extends KafkaTopicDataImporterAvroIT {
createCustomTopic(topic)
val recordTimestamp = 123456L
withProducer[String, AvroRecord, Unit] { producer =>
- producer.send(
- new ProducerRecord[String, AvroRecord](
- topic,
- 0,
- recordTimestamp,
- "record_key",
- customRecord
- )
- )
+ producer.send(new ProducerRecord[String, AvroRecord](topic, 0, recordTimestamp, "record_key", customRecord))
()
}
- val values = getEmittedValues(
- "timestamp, value.str_col, key",
- Seq(classOf[JLong], classOf[Any], classOf[String])
- )
+ val values = getEmittedValues("timestamp, value.str_col, key", Seq(classOf[JLong], classOf[Any], classOf[String]))
assert(values === Seq(recordTimestamp, null, "record_key"))
}
+
+ private[this] def assertJson(actual: String, expected: String): Unit = {
+ assert(JsonMapper.fromJson[JsonNode](actual) === JsonMapper.fromJson[JsonNode](expected))
+ ()
+ }
+
+ private[this] def getEmittedValues(recordFieldsStmt: String, outputColumnTypes: Seq[Class[_]]): Seq[Any] = {
+ val mockedIterator = mockExasolIterator(properties ++ Map("RECORD_FIELDS" -> recordFieldsStmt), Seq(0), Seq(-1))
+ val outputColumnTypesWithMeta = outputColumnTypes ++ Seq(classOf[JInt], classOf[JLong])
+ val columnCount = outputColumnTypesWithMeta.size
+ val exasolMetadata = mock[ExaMetadata]
+ when(exasolMetadata.getOutputColumnCount()).thenReturn(columnCount)
+ when(exasolMetadata.getOutputColumnType(anyInt())).thenAnswer(new Answer[Class[_]]() {
+ override def answer(invocation: InvocationOnMock): Class[_] = {
+ val columnIndex = invocation.getArguments()(0).asInstanceOf[JInt]
+ outputColumnTypesWithMeta(columnIndex)
+ }
+ })
+ KafkaTopicDataImporter.run(exasolMetadata, mockedIterator)
+
+ val captor = ArgumentCaptor.forClass(classOf[Array[Any]])
+ verify(mockedIterator, times(1)).emit(captor.capture())
+
+ val valuesEmitted = captor.getValue().toSeq
+ // Don't need the last two values (partition and offset)
+ valuesEmitted.slice(0, valuesEmitted.size - 2)
+ }
+
}
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/docker/BaseDockerIntegrationTest.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/BaseDockerIntegrationTest.scala
similarity index 90%
rename from src/it/scala/com/exasol/cloudetl/kafka/docker/BaseDockerIntegrationTest.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/BaseDockerIntegrationTest.scala
index 4c8283b..8cca4dc 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/docker/BaseDockerIntegrationTest.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/BaseDockerIntegrationTest.scala
@@ -14,9 +14,8 @@ import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite
trait BaseDockerIntegrationTest extends AnyFunSuite with BeforeAndAfterAll {
- private[this] val JAR_DIRECTORY_PATTERN = "scala-"
private[this] val JAR_NAME_PATTERN = "exasol-kafka-connector-extension-"
- private[this] val DEFAULT_EXASOL_DOCKER_IMAGE = "7.1.4"
+ private[this] val DEFAULT_EXASOL_DOCKER_IMAGE = "7.1.21"
val network = DockerNamedNetwork("kafka-it-tests", true)
val exasolContainer = {
@@ -59,10 +58,8 @@ trait BaseDockerIntegrationTest extends AnyFunSuite with BeforeAndAfterAll {
private[this] def getConnection(): java.sql.Connection =
exasolContainer.createConnection("")
- private[this] def getAssembledJarName(): String = {
- val jarDir = findFileOrDirectory("target", JAR_DIRECTORY_PATTERN)
- findFileOrDirectory("target/" + jarDir, JAR_NAME_PATTERN)
- }
+ private[this] def getAssembledJarName(): String =
+ findFileOrDirectory("target/", JAR_NAME_PATTERN)
private[this] def createKafkaImportDeploymentScripts(): Unit = {
val jarPath = s"/buckets/bfsdefault/default/$assembledJarName"
@@ -97,8 +94,7 @@ trait BaseDockerIntegrationTest extends AnyFunSuite with BeforeAndAfterAll {
}
private[this] def uploadJarToBucket(): Unit = {
- val jarDir = findFileOrDirectory("target", JAR_DIRECTORY_PATTERN)
- val jarPath = Paths.get("target", jarDir, assembledJarName)
+ val jarPath = Paths.get("target", assembledJarName)
exasolContainer.getDefaultBucket.uploadFile(jarPath, assembledJarName)
}
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/docker/BaseKafkaDockerIntegrationTest.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/BaseKafkaDockerIntegrationTest.scala
similarity index 97%
rename from src/it/scala/com/exasol/cloudetl/kafka/docker/BaseKafkaDockerIntegrationTest.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/BaseKafkaDockerIntegrationTest.scala
index a062420..74fcb76 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/docker/BaseKafkaDockerIntegrationTest.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/BaseKafkaDockerIntegrationTest.scala
@@ -3,7 +3,6 @@ package com.exasol.cloudetl.kafka
import java.util.List
import java.util.Map
import java.util.concurrent.TimeUnit
-import java.util.stream.Stream
import com.exasol.cloudetl.kafka.serde._
@@ -15,12 +14,10 @@ import org.apache.kafka.common.serialization.StringSerializer
import org.testcontainers.containers.Container
import org.testcontainers.containers.GenericContainer
import org.testcontainers.containers.wait.strategy.Wait
-import org.testcontainers.lifecycle.Startables
import org.testcontainers.utility.DockerImageName
trait BaseKafkaDockerIntegrationTest extends BaseDockerIntegrationTest {
-
- private[this] val DEFAULT_CONFLUENT_PLATFORM_VERSION = "7.0.1"
+ private[this] val DEFAULT_CONFLUENT_PLATFORM_VERSION = "7.4.1"
private[this] val ZOOKEEPER_PORT = 2181
private[this] val KAFKA_EXTERNAL_PORT = 29092
private[this] val SCHEMA_REGISTRY_PORT = 8081
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/docker/DockerNamedNetwork.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/DockerNamedNetwork.scala
similarity index 89%
rename from src/it/scala/com/exasol/cloudetl/kafka/docker/DockerNamedNetwork.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/DockerNamedNetwork.scala
index d6c0a26..72c1388 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/docker/DockerNamedNetwork.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/DockerNamedNetwork.scala
@@ -7,14 +7,12 @@ import org.junit.runner.Description
import org.junit.runners.model.Statement
import org.testcontainers.DockerClientFactory
import org.testcontainers.containers.Network
-import org.testcontainers.utility.ResourceReaper
/**
* A reusable docker network.
*
- * At the moment, the docker container {@code reuse} is ignored when a
- * network is attached to a container. This class creates docker network
- * that can be attached to reusable container.
+ * At the moment, the docker container {@code reuse} is ignored when a network is attached to a container. This class
+ * creates docker network that can be attached to reusable container.
*
* @param name name of the docker network
* @param reuse boolean value to indicate reusability
@@ -32,7 +30,8 @@ class DockerNamedNetwork(name: String, reuse: Boolean) extends Network with Lazy
+ s"the network manually using 'docker network rm $id'."
)
} else {
- ResourceReaper.instance().removeNetworkById(id)
+ DockerClientFactory.lazyClient().removeNetworkCmd(id).exec()
+ ()
}
override def apply(base: Statement, description: Description): Statement =
@@ -77,4 +76,5 @@ object DockerNamedNetwork {
}
namedNetworks.get(name)
}
+
}
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/docker/KafkaContainer.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/KafkaContainer.scala
similarity index 79%
rename from src/it/scala/com/exasol/cloudetl/kafka/docker/KafkaContainer.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/KafkaContainer.scala
index 07b3d6f..a72ebb3 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/docker/KafkaContainer.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/KafkaContainer.scala
@@ -6,7 +6,7 @@ import org.testcontainers.utility.DockerImageName
/**
* A Kafka container that exposes a fixed port.
*
- * This is required since [[GenericContainer]] does not provide public API for exposing fixed ports.
+ * This is required since {@link GenericContainer} does not provide public API for exposing fixed ports.
*/
class KafkaContainer(image: DockerImageName, fixedPort: Int = 29092) extends GenericContainer(image) {
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/docker/KafkaImportIT.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/KafkaImportIT.scala
similarity index 98%
rename from src/it/scala/com/exasol/cloudetl/kafka/docker/KafkaImportIT.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/KafkaImportIT.scala
index b009beb..2504304 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/docker/KafkaImportIT.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/KafkaImportIT.scala
@@ -2,12 +2,13 @@ package com.exasol.cloudetl.kafka
import java.sql.ResultSet
import java.sql.Timestamp
-import java.util.{TimeZone, UUID}
+import java.util.TimeZone
+import java.util.UUID
-import com.exasol.cloudetl.kafka.serde._
import com.exasol.cloudetl.kafka.serde.AvroRecordFormat.Implicits._
import com.exasol.cloudetl.kafka.serde.AvroSerdes.Implicits._
import com.exasol.cloudetl.kafka.serde.PrimitiveSerdes.Implicits._
+import com.exasol.cloudetl.kafka.serde._
import com.exasol.dbbuilder.dialects.Table
import com.exasol.matcher.ResultSetStructureMatcher.table
import com.exasol.matcher.TypeMatchMode.NO_JAVA_TYPE_CHECK
@@ -41,7 +42,7 @@ class KafkaImportIT extends BaseKafkaDockerIntegrationTest with BeforeAndAfterEa
}
test("import longs as timestamp values") {
- case class TimestampRecord(timestamp: Long)
+ final case class TimestampRecord(timestamp: Long)
implicit val timestampRecordValueSerde = valueAvroSerde[TimestampRecord](getExternalSchemaRegistryUrl())
TimeZone.setDefault(exasolContainer.getTimeZone())
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/docker/SchemaRegistryContainer.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/SchemaRegistryContainer.scala
similarity index 100%
rename from src/it/scala/com/exasol/cloudetl/kafka/docker/SchemaRegistryContainer.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/docker/SchemaRegistryContainer.scala
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/serde/AvroRecordFormat.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/serde/AvroRecordFormat.scala
similarity index 99%
rename from src/it/scala/com/exasol/cloudetl/kafka/serde/AvroRecordFormat.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/serde/AvroRecordFormat.scala
index 78c0e85..6089660 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/serde/AvroRecordFormat.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/serde/AvroRecordFormat.scala
@@ -1,14 +1,12 @@
package com.exasol.cloudetl.kafka.serde
-import com.sksamuel.avro4s.Encoder
import com.sksamuel.avro4s.Decoder
+import com.sksamuel.avro4s.Encoder
import com.sksamuel.avro4s.RecordFormat
import com.sksamuel.avro4s.SchemaFor
trait AvroRecordFormat {
-
implicit def avroRecordFormat[T: Encoder: Decoder: SchemaFor]: RecordFormat[T] = RecordFormat[T]
-
}
object AvroRecordFormat {
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/serde/AvroSerdes.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/serde/AvroSerdes.scala
similarity index 97%
rename from src/it/scala/com/exasol/cloudetl/kafka/serde/AvroSerdes.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/serde/AvroSerdes.scala
index 7a4b358..82caf2a 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/serde/AvroSerdes.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/serde/AvroSerdes.scala
@@ -3,12 +3,14 @@ package com.exasol.cloudetl.kafka.serde
import java.util.Map
import com.sksamuel.avro4s.RecordFormat
+import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig
+import io.confluent.kafka.streams.serdes.avro.GenericAvroSerde
+import org.apache.kafka.common.serialization.Deserializer
import org.apache.kafka.common.serialization.Serde
import org.apache.kafka.common.serialization.Serdes
import org.apache.kafka.common.serialization.Serializer
-import org.apache.kafka.common.serialization.Deserializer
-import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig
-import io.confluent.kafka.streams.serdes.avro.GenericAvroSerde
+
+import scala.language.implicitConversions
trait AvroSerdes {
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/serde/PrimitiveSerdes.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/serde/PrimitiveSerdes.scala
similarity index 99%
rename from src/it/scala/com/exasol/cloudetl/kafka/serde/PrimitiveSerdes.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/serde/PrimitiveSerdes.scala
index ba41788..5a928a6 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/serde/PrimitiveSerdes.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/serde/PrimitiveSerdes.scala
@@ -3,11 +3,9 @@ package com.exasol.cloudetl.kafka.serde
import org.apache.kafka.common.serialization.Serdes
trait PrimitiveSerdes {
-
implicit val stringValueSerde = Serdes.String().asInstanceOf[ValueSerde[String]]
// Other types (int, long, float, double) can be added similarly when required
-
}
object PrimitiveSerdes {
diff --git a/src/it/scala/com/exasol/cloudetl/kafka/serde/package.scala b/src/test/scala/com/exasol/cloudetl/kafka/ittests/serde/package.scala
similarity index 99%
rename from src/it/scala/com/exasol/cloudetl/kafka/serde/package.scala
rename to src/test/scala/com/exasol/cloudetl/kafka/ittests/serde/package.scala
index fa81bee..80661e1 100644
--- a/src/it/scala/com/exasol/cloudetl/kafka/serde/package.scala
+++ b/src/test/scala/com/exasol/cloudetl/kafka/ittests/serde/package.scala
@@ -6,11 +6,9 @@ import org.apache.kafka.common.serialization.Serializer
sealed trait HasValueSerde
package object serde {
-
type ValueSerde[T] = Serde[T] with HasValueSerde
object ValueSerializer {
def apply[T](implicit valueSerde: ValueSerde[T]): Serializer[T] = valueSerde.serializer()
}
-
}
diff --git a/versionsMavenPluginRules.xml b/versionsMavenPluginRules.xml
new file mode 100644
index 0000000..35bd03d
--- /dev/null
+++ b/versionsMavenPluginRules.xml
@@ -0,0 +1,18 @@
+
+
+
+
+ (?i).*Alpha(?:-?[\d.]+)?
+ (?i).*a(?:-?[\d.]+)?
+ (?i).*Beta(?:-?[\d.]+)?
+ (?i).*-B(?:-?[\d.]+)?
+ (?i).*-b(?:-?[\d.]+)?
+ (?i).*RC(?:-?[\d.]+)?
+ (?i).*CR(?:-?[\d.]+)?
+ (?i).*M(?:-?[\d.]+)?
+
+
+
+
\ No newline at end of file