Skip to content

Commit

Permalink
Support Spark 3.5
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-bli committed May 13, 2024
1 parent 860b7df commit d496c46
Show file tree
Hide file tree
Showing 10 changed files with 13 additions and 13 deletions.
2 changes: 1 addition & 1 deletion .github/docker/build_image.sh
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ cd ../..

# Build docker image
docker build \
--build-arg SPARK_URL=https://archive.apache.org/dist/spark/spark-3.4.0/spark-3.4.0-bin-hadoop3.tgz \
--build-arg SPARK_URL=https://archive.apache.org/dist/spark/spark-3.5.1/spark-3.5.1-bin-hadoop3.tgz \
--build-arg SPARK_BINARY_NAME=spark-3.4.0-bin-hadoop3.tgz \
--build-arg JDBC_URL=https://repo1.maven.org/maven2/net/snowflake/snowflake-jdbc/${TEST_JDBC_VERSION}/$JDBC_JAR_NAME \
--build-arg JDBC_BINARY_NAME=$JDBC_JAR_NAME \
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/ClusterTest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@ jobs:
strategy:
matrix:
scala_version: [ '2.12.11' ]
spark_version: [ '3.4.0' ]
spark_version: [ '3.5.1' ]
use_copy_unload: [ 'true' ]
cloud_provider: [ 'gcp' ]
env:
SNOWFLAKE_TEST_CONFIG_SECRET: ${{ secrets.SNOWFLAKE_TEST_CONFIG_SECRET }}
TEST_SPARK_VERSION: '3.4'
DOCKER_IMAGE_TAG: 'snowflakedb/spark-base:3.4.0'
TEST_SPARK_VERSION: '3.5'
DOCKER_IMAGE_TAG: 'snowflakedb/spark-base:3.5.1'
TEST_SCALA_VERSION: '2.12'
TEST_COMPILE_SCALA_VERSION: '2.12.11'
TEST_SPARK_CONNECTOR_VERSION: '2.15.0'
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/IntegrationTest_2.12.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
scala_version: [ '2.12.11' ]
spark_version: [ '3.4.0' ]
spark_version: [ '3.5.1' ]
use_copy_unload: [ 'true', 'false' ]
cloud_provider: [ 'aws', 'azure' ]
# run_query_in_async can be removed after async mode is stable
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/IntegrationTest_2.13.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
scala_version: [ '2.13.9' ]
spark_version: [ '3.4.0' ]
spark_version: [ '3.5.1' ]
use_copy_unload: [ 'true', 'false' ]
cloud_provider: [ 'aws', 'azure' ]
# run_query_in_async can be removed after async mode is stable
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/IntegrationTest_gcp_2.12.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
scala_version: [ '2.12.11' ]
spark_version: [ '3.4.0' ]
spark_version: [ '3.5.1' ]
use_copy_unload: [ 'false' ]
cloud_provider: [ 'gcp' ]
# run_query_in_async can be removed after async mode is stable
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/IntegrationTest_gcp_2.13.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
scala_version: [ '2.13.9' ]
spark_version: [ '3.4.0' ]
spark_version: [ '3.5.1' ]
use_copy_unload: [ 'false' ]
cloud_provider: [ 'gcp' ]
# run_query_in_async can be removed after async mode is stable
Expand Down
4 changes: 2 additions & 2 deletions ClusterTest/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@

val sparkConnectorVersion = "2.15.0"
val scalaVersionMajor = "2.12"
val sparkVersionMajor = "3.4"
val sparkVersion = s"${sparkVersionMajor}.0"
val sparkVersionMajor = "3.5"
val sparkVersion = s"${sparkVersionMajor}.1"
val testSparkVersion = sys.props.get("spark.testVersion").getOrElse(sparkVersion)

unmanagedJars in Compile += file(s"../target/scala-${scalaVersionMajor}/" +
Expand Down
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import scala.util.Properties

val sparkVersion = "3.5"
val testSparkVersion = sys.props.get("spark.testVersion").getOrElse("3.5.0")
val testSparkVersion = sys.props.get("spark.testVersion").getOrElse("3.5.1")

/*
* Don't change the variable name "sparkConnectorVersion" because
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ object SnowflakeConnectorUtils {
* Check Spark version, if Spark version matches SUPPORT_SPARK_VERSION enable PushDown,
* otherwise disable it.
*/
val SUPPORT_SPARK_VERSION = "3.4"
val SUPPORT_SPARK_VERSION = "3.5"

def checkVersionAndEnablePushdown(session: SparkSession): Boolean =
if (session.version.startsWith(SUPPORT_SPARK_VERSION)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ private[querygeneration] object MiscStatement {
// Spark 3.4 introduce join hint. The join hint doesn't affect correctness.
// So it can be ignored in the pushdown process
// https://github.com/apache/spark/commit/0fa9c554fc0b3940a47c3d1c6a5a17ca9a8cee8e
case ScalarSubquery(subquery, _, _, joinCond, _) if joinCond.isEmpty =>
case ScalarSubquery(subquery, _, _, joinCond, _, _) if joinCond.isEmpty =>
blockStatement(new QueryBuilder(subquery).statement)

case UnscaledValue(child) =>
Expand Down

0 comments on commit d496c46

Please sign in to comment.