diff --git a/.github/docker/build_image.sh b/.github/docker/build_image.sh index a126fb02..c5b04182 100755 --- a/.github/docker/build_image.sh +++ b/.github/docker/build_image.sh @@ -33,7 +33,7 @@ cd ../.. # Build docker image docker build \ ---build-arg SPARK_URL=https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz \ +--build-arg SPARK_URL=https://archive.apache.org/dist/spark/spark-3.5.1/spark-3.5.1-bin-hadoop3.tgz \ --build-arg SPARK_BINARY_NAME=spark-3.4.0-bin-hadoop3.tgz \ --build-arg JDBC_URL=https://repo1.maven.org/maven2/net/snowflake/snowflake-jdbc/${TEST_JDBC_VERSION}/$JDBC_JAR_NAME \ --build-arg JDBC_BINARY_NAME=$JDBC_JAR_NAME \ diff --git a/.github/workflows/ClusterTest.yml b/.github/workflows/ClusterTest.yml index 656f3a65..fd40b497 100644 --- a/.github/workflows/ClusterTest.yml +++ b/.github/workflows/ClusterTest.yml @@ -13,13 +13,13 @@ jobs: strategy: matrix: scala_version: [ '2.12.11' ] - spark_version: [ '3.4.0' ] + spark_version: [ '3.5.1' ] use_copy_unload: [ 'true' ] cloud_provider: [ 'gcp' ] env: SNOWFLAKE_TEST_CONFIG_SECRET: ${{ secrets.SNOWFLAKE_TEST_CONFIG_SECRET }} - TEST_SPARK_VERSION: '3.4' - DOCKER_IMAGE_TAG: 'snowflakedb/spark-base:3.4.0' + TEST_SPARK_VERSION: '3.5' + DOCKER_IMAGE_TAG: 'snowflakedb/spark-base:3.5.1' TEST_SCALA_VERSION: '2.12' TEST_COMPILE_SCALA_VERSION: '2.12.11' TEST_SPARK_CONNECTOR_VERSION: '2.15.0' diff --git a/.github/workflows/IntegrationTest_2.12.yml b/.github/workflows/IntegrationTest_2.12.yml index 80235ce8..5e1ac85b 100644 --- a/.github/workflows/IntegrationTest_2.12.yml +++ b/.github/workflows/IntegrationTest_2.12.yml @@ -13,7 +13,7 @@ jobs: strategy: matrix: scala_version: [ '2.12.11' ] - spark_version: [ '3.4.0' ] + spark_version: [ '3.5.1' ] use_copy_unload: [ 'true', 'false' ] cloud_provider: [ 'aws', 'azure' ] # run_query_in_async can be removed after async mode is stable diff --git a/.github/workflows/IntegrationTest_2.13.yml b/.github/workflows/IntegrationTest_2.13.yml index 99999310..9121aecb 100644 --- a/.github/workflows/IntegrationTest_2.13.yml +++ b/.github/workflows/IntegrationTest_2.13.yml @@ -13,7 +13,7 @@ jobs: strategy: matrix: scala_version: [ '2.13.9' ] - spark_version: [ '3.4.0' ] + spark_version: [ '3.5.1' ] use_copy_unload: [ 'true', 'false' ] cloud_provider: [ 'aws', 'azure' ] # run_query_in_async can be removed after async mode is stable diff --git a/.github/workflows/IntegrationTest_gcp_2.12.yml b/.github/workflows/IntegrationTest_gcp_2.12.yml index 674e0b8f..9fa8d505 100644 --- a/.github/workflows/IntegrationTest_gcp_2.12.yml +++ b/.github/workflows/IntegrationTest_gcp_2.12.yml @@ -13,7 +13,7 @@ jobs: strategy: matrix: scala_version: [ '2.12.11' ] - spark_version: [ '3.4.0' ] + spark_version: [ '3.5.1' ] use_copy_unload: [ 'false' ] cloud_provider: [ 'gcp' ] # run_query_in_async can be removed after async mode is stable diff --git a/.github/workflows/IntegrationTest_gcp_2.13.yml b/.github/workflows/IntegrationTest_gcp_2.13.yml index 4a0f2b64..5508b420 100644 --- a/.github/workflows/IntegrationTest_gcp_2.13.yml +++ b/.github/workflows/IntegrationTest_gcp_2.13.yml @@ -13,7 +13,7 @@ jobs: strategy: matrix: scala_version: [ '2.13.9' ] - spark_version: [ '3.4.0' ] + spark_version: [ '3.5.1' ] use_copy_unload: [ 'false' ] cloud_provider: [ 'gcp' ] # run_query_in_async can be removed after async mode is stable diff --git a/ClusterTest/build.sbt b/ClusterTest/build.sbt index da670026..8d10426c 100644 --- a/ClusterTest/build.sbt +++ b/ClusterTest/build.sbt @@ -16,8 +16,8 @@ val sparkConnectorVersion = "2.15.0" val scalaVersionMajor = "2.12" -val sparkVersionMajor = "3.4" -val sparkVersion = s"${sparkVersionMajor}.0" +val sparkVersionMajor = "3.5" +val sparkVersion = s"${sparkVersionMajor}.1" val testSparkVersion = sys.props.get("spark.testVersion").getOrElse(sparkVersion) unmanagedJars in Compile += file(s"../target/scala-${scalaVersionMajor}/" + diff --git a/build.sbt b/build.sbt index 6298d289..8cdca2a0 100644 --- a/build.sbt +++ b/build.sbt @@ -17,7 +17,7 @@ import scala.util.Properties val sparkVersion = "3.5" -val testSparkVersion = sys.props.get("spark.testVersion").getOrElse("3.5.0") +val testSparkVersion = sys.props.get("spark.testVersion").getOrElse("3.5.1") /* * Don't change the variable name "sparkConnectorVersion" because diff --git a/src/main/scala/net/snowflake/spark/snowflake/SnowflakeConnectorUtils.scala b/src/main/scala/net/snowflake/spark/snowflake/SnowflakeConnectorUtils.scala index f1b50b53..5cbde0ec 100644 --- a/src/main/scala/net/snowflake/spark/snowflake/SnowflakeConnectorUtils.scala +++ b/src/main/scala/net/snowflake/spark/snowflake/SnowflakeConnectorUtils.scala @@ -34,7 +34,7 @@ object SnowflakeConnectorUtils { * Check Spark version, if Spark version matches SUPPORT_SPARK_VERSION enable PushDown, * otherwise disable it. */ - val SUPPORT_SPARK_VERSION = "3.4" + val SUPPORT_SPARK_VERSION = "3.5" def checkVersionAndEnablePushdown(session: SparkSession): Boolean = if (session.version.startsWith(SUPPORT_SPARK_VERSION)) { diff --git a/src/main/scala/net/snowflake/spark/snowflake/pushdowns/querygeneration/MiscStatement.scala b/src/main/scala/net/snowflake/spark/snowflake/pushdowns/querygeneration/MiscStatement.scala index 30d3c50e..3c79a7cc 100644 --- a/src/main/scala/net/snowflake/spark/snowflake/pushdowns/querygeneration/MiscStatement.scala +++ b/src/main/scala/net/snowflake/spark/snowflake/pushdowns/querygeneration/MiscStatement.scala @@ -122,7 +122,7 @@ private[querygeneration] object MiscStatement { // Spark 3.4 introduce join hint. The join hint doesn't affect correctness. // So it can be ignored in the pushdown process // https://github.com/apache/spark/commit/0fa9c554fc0b3940a47c3d1c6a5a17ca9a8cee8e - case ScalarSubquery(subquery, _, _, joinCond, _) if joinCond.isEmpty => + case ScalarSubquery(subquery, _, _, joinCond, _, _) if joinCond.isEmpty => blockStatement(new QueryBuilder(subquery).statement) case UnscaledValue(child) =>