diff --git a/.evergreen/config.yml b/.evergreen/config.yml index d435a01c..43878a66 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -262,7 +262,7 @@ functions: working_dir: "src" script: | ${PREPARE_SHELL} - SCALA_VERSION="${SCALA}" SPARK_VERSION="${SPARK}" .evergreen/run-static-checks.sh + .evergreen/run-static-checks.sh "run unit tests": - command: shell.exec @@ -326,15 +326,6 @@ post: variables: - &run-on run_on: "ubuntu2004-small" - - &exclude-spec - # Scala 2.12 only works with 3.1.3 spark - # Scala 2.13 doesn't work on 3.1.3 spark - exclude_spec: - - { scala: "2.12", spark: "3.2.4" } - - { scala: "2.12", spark: "3.3.4" } - - { scala: "2.12", spark: "3.4.2" } - - { scala: "2.12", spark: "3.5.1" } - - { scala: "2.13", spark: "3.1.3" } tasks: - name: "static-checks-task" @@ -412,76 +403,29 @@ axes: variables: TOPOLOGY: "sharded_cluster" - - id: "scala" - display_name: "Scala" - values: - - id: "2.12" - display_name: "Scala 2.12" - variables: - SCALA: "2.12" - - id: "2.13" - display_name: "Scala 2.13" - variables: - SCALA: "2.13" - - - id: "spark" - display_name: "Spark" - values: - - id: "3.1.3" - display_name: "Spark 3.1.3" - variables: - SPARK: "3.1.3" - - id: "3.2.4" - display_name: "Spark 3.2.4" - variables: - SPARK: "3.2.4" - - id: "3.3.4" - display_name: "Spark 3.3.4" - variables: - SPARK: "3.3.4" - - id: "3.4.2" - display_name: "Spark 3.4.2" - variables: - SPARK: "3.4.2" - - id: "3.5.1" - display_name: "Spark 3.5.1" - variables: - SPARK: "3.5.1" - buildvariants: - - matrix_name: "static-checks" - matrix_spec: { scala: "*", spark: "*" } - display_name: "Static checks: ${scala} ${spark}" - <<: *exclude-spec + - name: "static-checks" + display_name: "Static checks" tags: ["static-check"] tasks: - name: "static-checks-task" - - matrix_name: "unit-tests" - matrix_spec: { scala: "*", spark: "*" } - <<: *exclude-spec - display_name: "Units tests: ${scala} ${spark}" + - name: "unit-tests" + display_name: "Units tests" tags: ["unit-test"] tasks: - name: "unit-test-task" - - matrix_name: "integration-tests-2-12" - matrix_spec: { scala: "2.12", spark: "3.1.3", version: ["7.0"], topology: "replicaset"} - display_name: "Integration tests: ${scala} ${spark} ${version} ${topology}" - tags: ["integration-test"] - tasks: - - name: "integration-test-task" - - matrix_name: "integration-tests-2-13" - matrix_spec: { scala: "2.13", spark: ["3.2.4", "3.5.1"], version: ["4.4", "7.0", "latest"], topology: "replicaset" } - display_name: "Integration tests: ${scala} ${spark} ${version} ${topology}" + matrix_spec: { version: ["4.4", "7.0", "latest"], topology: "replicaset" } + display_name: "Integration tests: ${version} ${topology}" tags: ["integration-test"] tasks: - name: "integration-test-task" - matrix_name: "integration-tests-2-13-sharded" - matrix_spec: { scala: "2.13", spark: [ "3.2.4", "3.5.1" ], version: ["4.4", "7.0", "latest" ], topology: "sharded" } + matrix_spec: { version: ["4.4", "7.0", "latest" ], topology: "sharded" } display_name: "Integration tests: ${scala} ${spark} ${version} ${topology}" tags: [ "sharded-integration-test" ] tasks: diff --git a/.evergreen/publish.sh b/.evergreen/publish.sh index 9c182ed2..29bbdc42 100755 --- a/.evergreen/publish.sh +++ b/.evergreen/publish.sh @@ -10,8 +10,8 @@ set -o errexit # Exit the script with error if any of the commands fail echo "Publishing" -export JDK11="/opt/java/jdk11" -export JAVA_HOME=$JDK11 +export JDK17="/opt/java/jdk17" +export JAVA_HOME=$JDK17 RELEASE=${RELEASE:false} @@ -29,5 +29,4 @@ fi SYSTEM_PROPERTIES="-Dorg.gradle.internal.publish.checksums.insecure=true" ./gradlew -version -./gradlew ${SYSTEM_PROPERTIES} --stacktrace --info ${TASK} -DscalaVersion=2.12 -DsparkVersion=3.1.2 -./gradlew ${SYSTEM_PROPERTIES} --stacktrace --info ${TASK} -DscalaVersion=2.13 -DsparkVersion=3.2.2 +./gradlew ${SYSTEM_PROPERTIES} --stacktrace --info ${TASK} diff --git a/.evergreen/run-integration-tests.sh b/.evergreen/run-integration-tests.sh index a5a6810a..cabcfd8d 100755 --- a/.evergreen/run-integration-tests.sh +++ b/.evergreen/run-integration-tests.sh @@ -5,13 +5,9 @@ set -o errexit # Exit the script with error if any of the commands fail # Supported/used environment variables: # MONGODB_URI Set the suggested connection MONGODB_URI (including credentials and topology info) -# SCALA_VERSION The Scala version to compile with -# SPARK_VERSION The spark version to test against MONGODB_URI=${MONGODB_URI:-} -SCALA_VERSION=${SCALA_VERSION:-2.12} -SPARK_VERSION=${SPARK_VERSION:-3.1.2} -export JAVA_HOME="/opt/java/jdk11" +export JAVA_HOME="/opt/java/jdk17" ############################################ # Main Program # @@ -21,4 +17,4 @@ export JAVA_HOME="/opt/java/jdk11" echo "Running tests connecting to $MONGODB_URI on JDK${JAVA_VERSION}" ./gradlew -version -./gradlew -Dorg.mongodb.test.uri=${MONGODB_URI} --stacktrace --info integrationTest -DscalaVersion=$SCALA_VERSION -DsparkVersion=$SPARK_VERSION +./gradlew -Dorg.mongodb.test.uri=${MONGODB_URI} --stacktrace --info integrationTest diff --git a/.evergreen/run-sharded-integration-tests.sh b/.evergreen/run-sharded-integration-tests.sh index f4838693..9b88e516 100755 --- a/.evergreen/run-sharded-integration-tests.sh +++ b/.evergreen/run-sharded-integration-tests.sh @@ -5,13 +5,9 @@ set -o errexit # Exit the script with error if any of the commands fail # Supported/used environment variables: # MONGODB_URI Set the suggested connection MONGODB_URI (including credentials and topology info) -# SCALA_VERSION The Scala version to compile with -# SPARK_VERSION The spark version to test against MONGODB_URI=${MONGODB_URI:-} -SCALA_VERSION=${SCALA_VERSION:-2.12} -SPARK_VERSION=${SPARK_VERSION:-3.1.2} -export JAVA_HOME="/opt/java/jdk11" +export JAVA_HOME="/opt/java/jdk17" ############################################ # Main Program # @@ -21,4 +17,4 @@ export JAVA_HOME="/opt/java/jdk11" echo "Running tests connecting to $MONGODB_URI on JDK${JAVA_VERSION}" ./gradlew -version -./gradlew -Dorg.mongodb.test.uri=${MONGODB_URI} --stacktrace --info integrationTest --tests "com.mongodb.spark.sql.connector.read.partitioner.ShardedPartitionerTest" -DscalaVersion=$SCALA_VERSION -DsparkVersion=$SPARK_VERSION +./gradlew -Dorg.mongodb.test.uri=${MONGODB_URI} --stacktrace --info integrationTest --tests "com.mongodb.spark.sql.connector.read.partitioner.ShardedPartitionerTest" diff --git a/.evergreen/run-static-checks.sh b/.evergreen/run-static-checks.sh index 76ce064a..068821de 100755 --- a/.evergreen/run-static-checks.sh +++ b/.evergreen/run-static-checks.sh @@ -4,12 +4,7 @@ set -o xtrace # Write all commands first to stderr set -o errexit # Exit the script with error if any of the commands fail # Supported/used environment variables: -# SCALA_VERSION The Scala version to compile with -# SPARK_VERSION The spark version to test against -SCALA_VERSION=${SCALA_VERSION:-2.12} -SPARK_VERSION=${SPARK_VERSION:-3.1.2} - -export JAVA_HOME="/opt/java/jdk11" +export JAVA_HOME="/opt/java/jdk17" ############################################ # Main Program # @@ -19,4 +14,4 @@ echo "Compiling and running checks" # We always compile with the latest version of java ./gradlew -version -./gradlew -PxmlReports.enabled=true --info -x test -x integrationTest clean check jar testClasses javadoc -DscalaVersion=$SCALA_VERSION -DsparkVersion=$SPARK_VERSION +./gradlew -PxmlReports.enabled=true --info -x test -x integrationTest clean check jar testClasses javadoc diff --git a/.evergreen/run-unit-tests.sh b/.evergreen/run-unit-tests.sh index 27f61e35..fe6c3abc 100755 --- a/.evergreen/run-unit-tests.sh +++ b/.evergreen/run-unit-tests.sh @@ -4,12 +4,8 @@ set -o xtrace # Write all commands first to stderr set -o errexit # Exit the script with error if any of the commands fail # Supported/used environment variables: -# SCALA_VERSION The Scala version to compile with -# SPARK_VERSION The spark version to test against -SCALA_VERSION=${SCALA_VERSION:-2.12} -SPARK_VERSION=${SPARK_VERSION:-3.1.2} -export JAVA_HOME="/opt/java/jdk11" +export JAVA_HOME="/opt/java/jdk17" ############################################ # Main Program # @@ -18,4 +14,4 @@ export JAVA_HOME="/opt/java/jdk11" echo "Running unit tests on JDK${JAVA_VERSION}" ./gradlew -version -./gradlew --stacktrace --info test -DscalaVersion=$SCALA_VERSION -DsparkVersion=$SPARK_VERSION +./gradlew --stacktrace --info test diff --git a/build.gradle.kts b/build.gradle.kts index 67cb0b42..e2b86940 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -14,7 +14,9 @@ * limitations under the License. */ -import java.io.ByteArrayOutputStream +import com.github.spotbugs.snom.Confidence +import com.github.spotbugs.snom.Effort +import com.github.spotbugs.snom.SpotBugsTask import java.time.Duration buildscript { @@ -25,34 +27,28 @@ buildscript { plugins { idea - `java-library` + java `maven-publish` signing checkstyle id("com.github.gmazzo.buildconfig") version "3.0.2" - id("com.github.spotbugs") version "4.7.9" + id("com.github.spotbugs") version "6.4.2" id("com.diffplug.spotless") version "6.19.0" - id("com.github.johnrengelman.shadow") version "7.0.0" + id("com.github.johnrengelman.shadow") version "8.1.1" id("io.github.gradle-nexus.publish-plugin") version "2.0.0" } -version = "10.5.1-SNAPSHOT" +version = "11.0.0-SNAPSHOT" group = "org.mongodb.spark" description = "The official MongoDB Apache Spark Connect Connector." -java { - sourceCompatibility = JavaVersion.VERSION_1_8 - targetCompatibility = JavaVersion.VERSION_1_8 -} - repositories { mavenCentral() } -// Usage: ./gradlew -DscalaVersion=2.12 -DsparkVersion=3.1.4 -val scalaVersion = System.getProperty("scalaVersion", "2.13") -val sparkVersion = System.getProperty("sparkVersion", "3.5.1") +val scalaVersion = "2.13" +val sparkVersion = "4.0.1" extra.apply { set("annotationsVersion", "22.0.0") @@ -66,14 +62,13 @@ extra.apply { set("mockitoVersion", "3.12.4") // Integration test dependencies - set("commons-lang3", "3.12.0") + set("commons-lang3", "3.18.0") } sourceSets { main { java { - val scalaInteropSrcDir = if (scalaVersion == "2.12") "java_scala_212" else "java_scala_213" - srcDirs("src/main/java", "src/main/$scalaInteropSrcDir") + srcDirs("src/main/java", "src/main/java_scala_213") } } } @@ -97,22 +92,17 @@ dependencies { testImplementation("org.apache.spark:spark-streaming_$scalaVersion:$sparkVersion") // Unit Tests - testImplementation(platform("org.junit:junit-bom:5.8.1")) + testImplementation(platform("org.junit:junit-bom:5.13.4")) testImplementation("org.junit.jupiter:junit-jupiter") testImplementation("org.mockito:mockito-junit-jupiter:${project.extra["mockitoVersion"]}") testImplementation("org.apiguardian:apiguardian-api:1.1.2") // https://github.com/gradle/gradle/issues/18627 + testRuntimeOnly("org.junit.platform:junit-platform-launcher") // Integration Tests testImplementation("org.apache.commons:commons-lang3:${project.extra["commons-lang3"]}") testImplementation("org.jetbrains:annotations:${project.extra["annotationsVersion"]}") } -val defaultJdkVersion: Int = 11 - -java { - toolchain.languageVersion.set(JavaLanguageVersion.of(defaultJdkVersion)) -} - tasks.withType { options.encoding = "UTF-8" options.release.set(8) @@ -121,22 +111,29 @@ tasks.withType { // =========================== // Build Config // =========================== +// Gets the git version val gitVersion: String by lazy { - val describeStdOut = ByteArrayOutputStream() - exec { - commandLine = listOf("git", "describe", "--tags", "--always", "--dirty") - standardOutput = describeStdOut - } - describeStdOut.toString().substring(1).trim() + providers + .exec { + isIgnoreExitValue = true + commandLine("git", "describe", "--tags", "--always", "--dirty") + } + .standardOutput + .asText + .map { it.trim().removePrefix("r") } + .getOrElse("UNKNOWN") } val gitDiffNameOnly: String by lazy { - val describeStdOut = ByteArrayOutputStream() - exec { - commandLine = listOf("git", "diff", "--name-only") - standardOutput = describeStdOut - } - describeStdOut.toString().replaceIndent(" - ") + providers + .exec { + isIgnoreExitValue = true + commandLine("git", "diff", "--name-only") + } + .standardOutput + .asText + .map { it.trim().replaceIndent("-") } + .getOrElse(" ") } buildConfig { @@ -156,7 +153,7 @@ sourceSets.create("integrationTest") { runtimeClasspath += output + compileClasspath + sourceSets["test"].runtimeClasspath } -tasks.create("integrationTest", Test::class.java) { +tasks.register("integrationTest") { description = "Runs the integration tests" group = "verification" testClassesDirs = sourceSets["integrationTest"].output.classesDirs @@ -173,13 +170,7 @@ tasks.withType { events("passed", "skipped", "failed") } - val javaVersion: Int = (project.findProperty("javaVersion") as String? ?: defaultJdkVersion.toString()).toInt() logger.info("Running tests using JDK$javaVersion") - javaLauncher.set( - javaToolchains.launcherFor { - languageVersion.set(JavaLanguageVersion.of(javaVersion)) - }, - ) systemProperties(mapOf("org.mongodb.test.uri" to System.getProperty("org.mongodb.test.uri", ""))) @@ -190,6 +181,7 @@ tasks.withType { executable = javaExecutablesPath.absolutePath } } + jvmArgs("--add-opens=java.base/sun.util.calendar=ALL-UNNAMED") addTestListener(object : TestListener { override fun beforeTest(testDescriptor: TestDescriptor?) {} @@ -226,14 +218,14 @@ checkstyle { spotbugs { excludeFilter.set(project.file("config/spotbugs/exclude.xml")) showProgress.set(true) - setReportLevel("high") - setEffort("max") + reportLevel.set(Confidence.HIGH) + effort.set(Effort.MAX) } -tasks.withType { - enabled = baseName.equals("main") - reports.maybeCreate("html").isEnabled = !project.hasProperty("xmlReports.enabled") - reports.maybeCreate("xml").isEnabled = project.hasProperty("xmlReports.enabled") +tasks.withType { + enabled = getBaseName().equals("main", ignoreCase = true) + reports.maybeCreate("html").getRequired().set(!project.hasProperty("xmlReports.enabled")) + reports.maybeCreate("xml").getRequired().set(project.hasProperty("xmlReports.enabled")) } // Spotless is used to lint and reformat source files. diff --git a/config/checkstyle/checkstyle.xml b/config/checkstyle/checkstyle.xml index 863fb960..88186aa2 100644 --- a/config/checkstyle/checkstyle.xml +++ b/config/checkstyle/checkstyle.xml @@ -87,6 +87,7 @@ + diff --git a/gradle.properties b/gradle.properties index f2644b0d..4c9bc018 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,2 +1,2 @@ org.gradle.java.installations.auto-download=false -org.gradle.java.installations.fromEnv=JDK8,JDK11 +org.gradle.java.installations.fromEnv=JDK8,JDK17 diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 7454180f..8bdaf60c 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 84d1f85f..2a84e188 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-9.0.0-bin.zip +networkTimeout=10000 +validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 1b6c7873..adff685a 100755 --- a/gradlew +++ b/gradlew @@ -1,7 +1,7 @@ #!/bin/sh # -# Copyright © 2015-2021 the original authors. +# Copyright © 2015 the original authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,6 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# SPDX-License-Identifier: Apache-2.0 +# ############################################################################## # @@ -55,7 +57,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -80,13 +82,11 @@ do esac done -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -APP_NAME="Gradle" +# This is normally unused +# shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -114,7 +114,6 @@ case "$( uname )" in #( NONSTOP* ) nonstop=true ;; esac -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar # Determine the Java command to use to start the JVM. @@ -133,22 +132,29 @@ location of your Java installation." fi else JAVACMD=java - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." + fi fi # Increase the maximum file descriptors if we can. if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac case $MAX_FD in #( '' | soft) :;; #( *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -165,7 +171,6 @@ fi # For Cygwin or MSYS, switch paths to Windows format before running java if "$cygwin" || "$msys" ; then APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) - CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) JAVACMD=$( cygpath --unix "$JAVACMD" ) @@ -193,18 +198,27 @@ if "$cygwin" || "$msys" ; then done fi -# Collect all arguments for the java command; -# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of -# shell script including quotes and variable substitutions, so put them in -# double quotes to make sure that they get re-expanded; and -# * put everything else in single quotes, so that it's not re-expanded. + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. set -- \ "-Dorg.gradle.appname=$APP_BASE_NAME" \ - -classpath "$CLASSPATH" \ - org.gradle.wrapper.GradleWrapperMain \ + -jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \ "$@" +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + # Use "xargs" to parse quoted args. # # With -n1 it outputs one arg per line, with the quotes and backslashes removed. diff --git a/gradlew.bat b/gradlew.bat index ac1b06f9..e509b2dd 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -13,8 +13,10 @@ @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem -@if "%DEBUG%" == "" @echo off +@if "%DEBUG%"=="" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @@ -25,7 +27,8 @@ if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @@ -40,13 +43,13 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto execute +if %ERRORLEVEL% equ 0 goto execute -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. +echo. 1>&2 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 goto fail @@ -56,32 +59,33 @@ set JAVA_EXE=%JAVA_HOME%/bin/java.exe if exist "%JAVA_EXE%" goto execute -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. +echo. 1>&2 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 goto fail :execute @rem Setup the command line -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %* :end @rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd +if %ERRORLEVEL% equ 0 goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% :mainEnd if "%OS%"=="Windows_NT" endlocal diff --git a/src/integrationTest/java/com/mongodb/spark/sql/connector/mongodb/MongoSparkConnectorHelper.java b/src/integrationTest/java/com/mongodb/spark/sql/connector/mongodb/MongoSparkConnectorHelper.java index d3ceddb6..bb343b39 100644 --- a/src/integrationTest/java/com/mongodb/spark/sql/connector/mongodb/MongoSparkConnectorHelper.java +++ b/src/integrationTest/java/com/mongodb/spark/sql/connector/mongodb/MongoSparkConnectorHelper.java @@ -170,8 +170,8 @@ public synchronized SparkContext getOrCreateSparkContext( synchronized void resetSparkContext() { if (sparkContext != null) { sparkContext.stop(); - SparkSession.clearActiveSession(); - SparkSession.clearDefaultSession(); + SparkSession.setActiveSession(null); + SparkSession.setDefaultSession(null); } sparkContext = null; } @@ -226,8 +226,8 @@ private void loadSampleData( format(template, "00000", "_10000", "00000", 1, "")) .getByteBuffer() .limit(); - String sampleString = - RandomStringUtils.randomAlphabetic(totalDocumentSize - sampleDataWithEmptySampleStringSize); + String sampleString = RandomStringUtils.insecure() + .nextAlphabetic(totalDocumentSize - sampleDataWithEmptySampleStringSize); List sampleDocuments = IntStream.range(0, numberOfDocuments) .boxed() diff --git a/src/integrationTest/java/com/mongodb/spark/sql/connector/read/MongoBatchTest.java b/src/integrationTest/java/com/mongodb/spark/sql/connector/read/MongoBatchTest.java index 8dcdce43..45e8ebca 100644 --- a/src/integrationTest/java/com/mongodb/spark/sql/connector/read/MongoBatchTest.java +++ b/src/integrationTest/java/com/mongodb/spark/sql/connector/read/MongoBatchTest.java @@ -691,7 +691,7 @@ void testLogsCommentsInProfilerLogs() { getCollection().drop(); getCollection().insertMany(collectionData); - ReadConfig readConfig = MongoConfig.readConfig(asJava(spark.initialSessionOptions())) + ReadConfig readConfig = MongoConfig.readConfig(asJava(spark.conf().getAll())) .withOption(COMMENT_CONFIG, TEST_COMMENT); assertCommentsInProfile( diff --git a/src/integrationTest/java/com/mongodb/spark/sql/connector/write/MongoSparkConnectorWriteTest.java b/src/integrationTest/java/com/mongodb/spark/sql/connector/write/MongoSparkConnectorWriteTest.java index 8353c924..8d773338 100644 --- a/src/integrationTest/java/com/mongodb/spark/sql/connector/write/MongoSparkConnectorWriteTest.java +++ b/src/integrationTest/java/com/mongodb/spark/sql/connector/write/MongoSparkConnectorWriteTest.java @@ -241,7 +241,7 @@ void testLogCommentsInProfilerLogs() { SparkSession spark = getOrCreateSparkSession(); Dataset df = spark.read().json(WRITE_RESOURCES_JSON_PATH); - WriteConfig writeConfig = MongoConfig.writeConfig(asJava(spark.initialSessionOptions())) + WriteConfig writeConfig = MongoConfig.writeConfig(asJava(spark.conf().getAll())) .withOption(COMMENT_CONFIG, TEST_COMMENT); assertCommentsInProfile( @@ -264,7 +264,7 @@ void testLogCommentsInProfilerLogs() { void testLogCommentsInProfilerLogsStreamingWrites() { SparkSession spark = getOrCreateSparkSession(); - WriteConfig writeConfig = MongoConfig.writeConfig(asJava(spark.initialSessionOptions())) + WriteConfig writeConfig = MongoConfig.writeConfig(asJava(spark.conf().getAll())) .withOption(COMMENT_CONFIG, TEST_COMMENT); assertCommentsInProfile( diff --git a/src/main/java_scala_212/com/mongodb/spark/sql/connector/interop/JavaScala.java b/src/main/java_scala_212/com/mongodb/spark/sql/connector/interop/JavaScala.java deleted file mode 100644 index 37f9e47f..00000000 --- a/src/main/java_scala_212/com/mongodb/spark/sql/connector/interop/JavaScala.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under t1 - * - */ - -package com.mongodb.spark.sql.connector.interop; - -import java.util.List; -import java.util.Map; -import scala.Tuple2; -import scala.collection.JavaConverters; -import scala.collection.immutable.Map$; -import scala.collection.mutable.Builder; - -/** Utils object to convert Java To Scala to enable cross build */ -@SuppressWarnings("deprecated") -public final class JavaScala { - private JavaScala() {} - - /** - * Wrapper to convert a java map to a scala map - * - * @param data java collection - * @param key - * @param value - * @return scala collection - */ - public static scala.collection.Map asScala(final Map data) { - return JavaConverters.mapAsScalaMap(data); - } - - /** - * Wrapper to convert a java map to an immutable scala map - * - * @param data java collection - * @param key - * @param value - * @return scala collection - */ - public static scala.collection.immutable.Map asScalaImmutable(final Map data) { - Builder, scala.collection.immutable.Map> mapBuilder = - Map$.MODULE$.newBuilder(); - for (Map.Entry entry : data.entrySet()) { - mapBuilder.$plus$eq(new Tuple2<>(entry.getKey(), entry.getValue())); - } - return mapBuilder.result(); - } - - /** - * Wrapper to convert a java list to a scala seq - * - * @param data java collection - * @param value - * @return scala collection - */ - public static scala.collection.Seq asScala(final List data) { - return JavaConverters.collectionAsScalaIterable(data).toSeq(); - } - - /** - * Wrapper to convert a scala map to a java map - * - * @param data java collection - * @param key - * @param value - * @return scala collection - */ - public static Map asJava(final scala.collection.Map data) { - return JavaConverters.mapAsJavaMap(data); - } - - /** - * Wrapper to convert a scala seq to a java list - * - * @param data java collection - * @param value - * @return scala collection - */ - public static List asJava(final scala.collection.Seq data) { - return JavaConverters.seqAsJavaList(data); - } -} diff --git a/src/main/java_scala_212/com/mongodb/spark/sql/connector/interop/package-info.java b/src/main/java_scala_212/com/mongodb/spark/sql/connector/interop/package-info.java deleted file mode 100644 index 3685447a..00000000 --- a/src/main/java_scala_212/com/mongodb/spark/sql/connector/interop/package-info.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright 2008-present MongoDB, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -@ApiStatus.Internal -package com.mongodb.spark.sql.connector.interop; - -import org.jetbrains.annotations.ApiStatus;