From 3286b0301ade30639eef3e6d787d6dff1234f568 Mon Sep 17 00:00:00 2001 From: chgl Date: Sun, 31 Mar 2024 20:51:16 +0200 Subject: [PATCH] chore(deps): updated delta to 3.1.0 and spark to 3.5.1 --- LICENSE | 2 +- fhir-server/pom.xml | 2 +- fhirpath/pom.xml | 8 ++++---- lib/R/R/dependencies.R | 2 +- lib/python/examples/query.py | 2 +- lib/python/pathling/context.py | 2 +- lib/python/pom.xml | 2 +- lib/python/requirements/package.txt | 2 +- lib/python/tests/conftest.py | 2 +- library-api/pom.xml | 2 +- pom.xml | 8 ++++---- site/docs/libraries/installation/spark.md | 17 ++++++++--------- 12 files changed, 25 insertions(+), 26 deletions(-) diff --git a/LICENSE b/LICENSE index 0e0ba8bf6b..7623319917 100644 --- a/LICENSE +++ b/LICENSE @@ -202,7 +202,7 @@ separate files distributed with the Software. * (Apache License, Version 2.0) Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/) * (Apache License, Version 2.0) Apache Commons IO (commons-io:commons-io:2.13.0 - https://commons.apache.org/proper/commons-io/) * (Apache License, Version 2.0) Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/) -* (Apache License, Version 2.0) delta-core (io.delta:delta-core_2.12:2.4.0 - https://delta.io/) +* (Apache License, Version 2.0) delta-spark (io.delta:delta-spark_2.12:3.1.0 - https://delta.io/) * (Apache License, Version 2.0) micrometer-registry-prometheus (io.micrometer:micrometer-registry-prometheus:1.11.3 - https://github.com/micrometer-metrics/micrometer) * (MIT License) Sentry SDK (io.sentry:sentry:6.6.0 - https://github.com/getsentry/sentry-java) * (Apache License, Version 2.0) Jakarta Bean Validation API (jakarta.validation:jakarta.validation-api:2.0.2 - https://beanvalidation.org) diff --git a/fhir-server/pom.xml b/fhir-server/pom.xml index 5ec4d88594..e093143d32 100644 --- a/fhir-server/pom.xml +++ b/fhir-server/pom.xml @@ -65,7 +65,7 @@ io.delta - delta-core_${pathling.scalaVersion} + delta-spark_${pathling.scalaVersion} org.apache.hadoop diff --git a/fhirpath/pom.xml b/fhirpath/pom.xml index ba17ab397c..38df5b2785 100644 --- a/fhirpath/pom.xml +++ b/fhirpath/pom.xml @@ -51,7 +51,7 @@ io.delta - delta-core_${pathling.scalaVersion} + delta-spark_${pathling.scalaVersion} provided @@ -270,9 +270,9 @@ ${pathling.fhirServerDockerRepo} - ${pathling.fhirServerDockerTag} - ${project.version} - ${project.majorVersion} + ${pathling.fhirServerDockerTag}-delta3 + ${project.version}-delta3 + ${project.majorVersion}-delta3 ${git.commit.id} diff --git a/lib/R/R/dependencies.R b/lib/R/R/dependencies.R index 3540621b21..c792c675a4 100644 --- a/lib/R/R/dependencies.R +++ b/lib/R/R/dependencies.R @@ -18,7 +18,7 @@ spark_dependencies <- function(spark_version, scala_version, ...) { sparklyr::spark_dependency( packages = c( paste0("au.csiro.pathling:library-runtime:", pathling_version()), - paste0("io.delta:delta-core_", spark_info$scala_version, ":", spark_info$delta_version), + paste0("io.delta:delta-spark_", spark_info$scala_version, ":", spark_info$delta_version), paste0("org.apache.hadoop:hadoop-aws:", spark_info$hadoop_version) ) ) diff --git a/lib/python/examples/query.py b/lib/python/examples/query.py index f52e20db48..e837cb4617 100755 --- a/lib/python/examples/query.py +++ b/lib/python/examples/query.py @@ -36,7 +36,7 @@ spark = ( SparkSession.builder.config( "spark.jars.packages", - f"au.csiro.pathling:library-runtime:{__java_version__},io.delta:delta-core_2.12:2.2.0", + f"au.csiro.pathling:library-runtime:{__java_version__},io.delta:delta-spark_2.12:3.1.0", ) .config( "spark.sql.extensions", diff --git a/lib/python/pathling/context.py b/lib/python/pathling/context.py index fb887615c8..72e0e27e3f 100644 --- a/lib/python/pathling/context.py +++ b/lib/python/pathling/context.py @@ -184,7 +184,7 @@ def _new_spark_session(): SparkSession.builder.config( "spark.jars.packages", f"au.csiro.pathling:library-runtime:{__java_version__}," - f"io.delta:delta-core_{__scala_version__}:{__delta_version__}," + f"io.delta:delta-spark_{__scala_version__}:{__delta_version__}," f"org.apache.hadoop:hadoop-aws:{__hadoop_version__}", ) .config( diff --git a/lib/python/pom.xml b/lib/python/pom.xml index 47133b47dc..e6066dae02 100644 --- a/lib/python/pom.xml +++ b/lib/python/pom.xml @@ -225,7 +225,7 @@ --jars ${project.build.directory}/dependency/* --packages - au.csiro.pathling:library-runtime:${project.version},io.delta:delta-core_${pathling.scalaVersion}:${pathling.deltaVersion} + au.csiro.pathling:library-runtime:${project.version},io.delta:delta-spark_${pathling.scalaVersion}:${pathling.deltaVersion} --conf spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension --conf diff --git a/lib/python/requirements/package.txt b/lib/python/requirements/package.txt index 7de8ec0292..15af9fa8a4 100644 --- a/lib/python/requirements/package.txt +++ b/lib/python/requirements/package.txt @@ -1,2 +1,2 @@ -pyspark==3.4.1 +pyspark==3.5.1 deprecated==1.2.14 diff --git a/lib/python/tests/conftest.py b/lib/python/tests/conftest.py index e3b249f6d6..3aee982b7c 100644 --- a/lib/python/tests/conftest.py +++ b/lib/python/tests/conftest.py @@ -74,7 +74,7 @@ def pathling_ctx(request, temp_warehouse_dir): .config( "spark.jars.packages", f"au.csiro.pathling:library-runtime:{__java_version__}," - f"io.delta:delta-core_{__scala_version__}:{__delta_version__}," + f"io.delta:delta-spark_{__scala_version__}:{__delta_version__}," f"org.apache.hadoop:hadoop-aws:{__hadoop_version__}", ) .config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension") diff --git a/library-api/pom.xml b/library-api/pom.xml index 0e4c7996e2..36e4c30392 100644 --- a/library-api/pom.xml +++ b/library-api/pom.xml @@ -70,7 +70,7 @@ io.delta - delta-core_${pathling.scalaVersion} + delta-spark_${pathling.scalaVersion} provided diff --git a/pom.xml b/pom.xml index ba84385ff3..011317bb41 100644 --- a/pom.xml +++ b/pom.xml @@ -72,10 +72,10 @@ 6 UTF-8 - 3.4.2 + 3.5.1 2.12 2.12.17 - 2.4.0 + 3.1.0 3.3.4 3 @@ -312,7 +312,7 @@ io.delta - delta-core_${pathling.scalaVersion} + delta-spark_${pathling.scalaVersion} ${pathling.deltaVersion} @@ -628,7 +628,7 @@ ivy 2.5.1 - + org.apache.zookeeper diff --git a/site/docs/libraries/installation/spark.md b/site/docs/libraries/installation/spark.md index e0ecfc66c5..669b755c38 100644 --- a/site/docs/libraries/installation/spark.md +++ b/site/docs/libraries/installation/spark.md @@ -36,7 +36,7 @@ spark = ( SparkSession.builder.config( "spark.jars.packages", "au.csiro.pathling:library-runtime:6.4.2," - "io.delta:delta-core_2.12:2.4.0," + "io.delta:delta-spark_2.12:3.1.0," ) .config( "spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension" @@ -58,12 +58,12 @@ library(sparklyr) library(pathling) sc <- spark_connect(master = "local", - packages = c(paste("au.csiro.pathling:library-runtime:", pathling_version()), - "io.delta:delta-core_2.12:2.4.0"), + packages = c(paste("au.csiro.pathling:library-runtime:", pathling_version()), + "io.delta:delta-spark_2.12:3.1.0"), config = list("sparklyr.shell.conf" = c( "spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension", "spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog" - )), version = "3.4.0") + )), version = "3.5.0") pc <- pathling_connect(sc) ``` @@ -76,7 +76,7 @@ import au.csiro.pathling.library.PathlingContext val spark = SparkSession.builder .config("spark.jars.packages", "au.csiro.pathling:library-runtime:6.4.2," + - "io.delta:delta-core_2.12:2.4.0") + "io.delta:delta-spark_2.12:3.1.0") .config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension") .config("spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog") @@ -96,10 +96,10 @@ class MyApp { public static void main(String[] args) { SparkSession spark = SparkSession.builder() - .config("spark.jars.packages", + .config("spark.jars.packages", "au.csiro.pathling:library-runtime:6.4.2," + - "io.delta:delta-core_2.12:2.4.0") - .config("spark.sql.extensions", + "io.delta:delta-spark_2.12:3.1.0") + .config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension") .config("spark.sql.catalog.spark_catalog", "org.apache.spark.sql.delta.catalog.DeltaCatalog") @@ -143,4 +143,3 @@ RUN pip install --quiet --no-cache-dir pathling && \ fix-permissions "${CONDA_DIR}" && \ fix-permissions "/home/${NB_USER}" ``` -