Skip to content

Commit

Permalink
chore(deps): updated delta to 3.1.0 and spark to 3.5.1
Browse files Browse the repository at this point in the history
  • Loading branch information
chgl committed Mar 31, 2024
1 parent 720bbb8 commit 3286b03
Show file tree
Hide file tree
Showing 12 changed files with 25 additions and 26 deletions.
2 changes: 1 addition & 1 deletion LICENSE
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ separate files distributed with the Software.
* (Apache License, Version 2.0) Apache Commons BeanUtils (commons-beanutils:commons-beanutils:1.9.4 - https://commons.apache.org/proper/commons-beanutils/)
* (Apache License, Version 2.0) Apache Commons IO (commons-io:commons-io:2.13.0 - https://commons.apache.org/proper/commons-io/)
* (Apache License, Version 2.0) Commons Lang (commons-lang:commons-lang:2.6 - http://commons.apache.org/lang/)
* (Apache License, Version 2.0) delta-core (io.delta:delta-core_2.12:2.4.0 - https://delta.io/)
* (Apache License, Version 2.0) delta-spark (io.delta:delta-spark_2.12:3.1.0 - https://delta.io/)
* (Apache License, Version 2.0) micrometer-registry-prometheus (io.micrometer:micrometer-registry-prometheus:1.11.3 - https://github.com/micrometer-metrics/micrometer)
* (MIT License) Sentry SDK (io.sentry:sentry:6.6.0 - https://github.com/getsentry/sentry-java)
* (Apache License, Version 2.0) Jakarta Bean Validation API (jakarta.validation:jakarta.validation-api:2.0.2 - https://beanvalidation.org)
Expand Down
2 changes: 1 addition & 1 deletion fhir-server/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
</dependency>
<dependency>
<groupId>io.delta</groupId>
<artifactId>delta-core_${pathling.scalaVersion}</artifactId>
<artifactId>delta-spark_${pathling.scalaVersion}</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
Expand Down
8 changes: 4 additions & 4 deletions fhirpath/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
</dependency>
<dependency>
<groupId>io.delta</groupId>
<artifactId>delta-core_${pathling.scalaVersion}</artifactId>
<artifactId>delta-spark_${pathling.scalaVersion}</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
Expand Down Expand Up @@ -270,9 +270,9 @@
<to>

<tags>
<tag>${pathling.fhirServerDockerTag}</tag>
<tag>${project.version}</tag>
<tag>${project.majorVersion}</tag>
<tag>${pathling.fhirServerDockerTag}-delta3</tag>
<tag>${project.version}-delta3</tag>
<tag>${project.majorVersion}-delta3</tag>
<tag>${git.commit.id}</tag>
</tags>
</to>
Expand Down
2 changes: 1 addition & 1 deletion lib/R/R/dependencies.R
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ spark_dependencies <- function(spark_version, scala_version, ...) {
sparklyr::spark_dependency(
packages = c(
paste0("au.csiro.pathling:library-runtime:", pathling_version()),
paste0("io.delta:delta-core_", spark_info$scala_version, ":", spark_info$delta_version),
paste0("io.delta:delta-spark_", spark_info$scala_version, ":", spark_info$delta_version),
paste0("org.apache.hadoop:hadoop-aws:", spark_info$hadoop_version)
)
)
Expand Down
2 changes: 1 addition & 1 deletion lib/python/examples/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
spark = (
SparkSession.builder.config(
"spark.jars.packages",
f"au.csiro.pathling:library-runtime:{__java_version__},io.delta:delta-core_2.12:2.2.0",
f"au.csiro.pathling:library-runtime:{__java_version__},io.delta:delta-spark_2.12:3.1.0",
)
.config(
"spark.sql.extensions",
Expand Down
2 changes: 1 addition & 1 deletion lib/python/pathling/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def _new_spark_session():
SparkSession.builder.config(
"spark.jars.packages",
f"au.csiro.pathling:library-runtime:{__java_version__},"
f"io.delta:delta-core_{__scala_version__}:{__delta_version__},"
f"io.delta:delta-spark_{__scala_version__}:{__delta_version__},"
f"org.apache.hadoop:hadoop-aws:{__hadoop_version__}",
)
.config(
Expand Down
2 changes: 1 addition & 1 deletion lib/python/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@
<argument>--jars</argument>
<argument>${project.build.directory}/dependency/*</argument>
<argument>--packages</argument>
<argument>au.csiro.pathling:library-runtime:${project.version},io.delta:delta-core_${pathling.scalaVersion}:${pathling.deltaVersion}</argument>
<argument>au.csiro.pathling:library-runtime:${project.version},io.delta:delta-spark_${pathling.scalaVersion}:${pathling.deltaVersion}</argument>
<argument>--conf</argument>
<argument>spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension</argument>
<argument>--conf</argument>
Expand Down
2 changes: 1 addition & 1 deletion lib/python/requirements/package.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
pyspark==3.4.1
pyspark==3.5.1
deprecated==1.2.14
2 changes: 1 addition & 1 deletion lib/python/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def pathling_ctx(request, temp_warehouse_dir):
.config(
"spark.jars.packages",
f"au.csiro.pathling:library-runtime:{__java_version__},"
f"io.delta:delta-core_{__scala_version__}:{__delta_version__},"
f"io.delta:delta-spark_{__scala_version__}:{__delta_version__},"
f"org.apache.hadoop:hadoop-aws:{__hadoop_version__}",
)
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
Expand Down
2 changes: 1 addition & 1 deletion library-api/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@
</dependency>
<dependency>
<groupId>io.delta</groupId>
<artifactId>delta-core_${pathling.scalaVersion}</artifactId>
<artifactId>delta-spark_${pathling.scalaVersion}</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
Expand Down
8 changes: 4 additions & 4 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,10 @@
<project.majorVersion>6</project.majorVersion>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

<pathling.sparkVersion>3.4.2</pathling.sparkVersion>
<pathling.sparkVersion>3.5.1</pathling.sparkVersion>
<pathling.scalaVersion>2.12</pathling.scalaVersion>
<pathling.scalaFullVersion>2.12.17</pathling.scalaFullVersion>
<pathling.deltaVersion>2.4.0</pathling.deltaVersion>
<pathling.deltaVersion>3.1.0</pathling.deltaVersion>
<!-- Hadoop is currently pinned to 3.3.4 because of this vulnerability: CVE-2022-26612 -->
<pathling.hadoopVersion>3.3.4</pathling.hadoopVersion>
<pathling.hadoopMajorVersion>3</pathling.hadoopMajorVersion>
Expand Down Expand Up @@ -312,7 +312,7 @@
</dependency>
<dependency>
<groupId>io.delta</groupId>
<artifactId>delta-core_${pathling.scalaVersion}</artifactId>
<artifactId>delta-spark_${pathling.scalaVersion}</artifactId>
<version>${pathling.deltaVersion}</version>
</dependency>
<dependency>
Expand Down Expand Up @@ -628,7 +628,7 @@
<artifactId>ivy</artifactId>
<version>2.5.1</version>
</dependency>

<!-- CVE-2023-44981 -->
<dependency>
<groupId>org.apache.zookeeper</groupId>
Expand Down
17 changes: 8 additions & 9 deletions site/docs/libraries/installation/spark.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ spark = (
SparkSession.builder.config(
"spark.jars.packages",
"au.csiro.pathling:library-runtime:6.4.2,"
"io.delta:delta-core_2.12:2.4.0,"
"io.delta:delta-spark_2.12:3.1.0,"
)
.config(
"spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension"
Expand All @@ -58,12 +58,12 @@ library(sparklyr)
library(pathling)

sc <- spark_connect(master = "local",
packages = c(paste("au.csiro.pathling:library-runtime:", pathling_version()),
"io.delta:delta-core_2.12:2.4.0"),
packages = c(paste("au.csiro.pathling:library-runtime:", pathling_version()),
"io.delta:delta-spark_2.12:3.1.0"),
config = list("sparklyr.shell.conf" = c(
"spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension",
"spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog"
)), version = "3.4.0")
)), version = "3.5.0")

pc <- pathling_connect(sc)
```
Expand All @@ -76,7 +76,7 @@ import au.csiro.pathling.library.PathlingContext

val spark = SparkSession.builder
.config("spark.jars.packages", "au.csiro.pathling:library-runtime:6.4.2," +
"io.delta:delta-core_2.12:2.4.0")
"io.delta:delta-spark_2.12:3.1.0")
.config("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")
.config("spark.sql.catalog.spark_catalog",
"org.apache.spark.sql.delta.catalog.DeltaCatalog")
Expand All @@ -96,10 +96,10 @@ class MyApp {

public static void main(String[] args) {
SparkSession spark = SparkSession.builder()
.config("spark.jars.packages",
.config("spark.jars.packages",
"au.csiro.pathling:library-runtime:6.4.2," +
"io.delta:delta-core_2.12:2.4.0")
.config("spark.sql.extensions",
"io.delta:delta-spark_2.12:3.1.0")
.config("spark.sql.extensions",
"io.delta.sql.DeltaSparkSessionExtension")
.config("spark.sql.catalog.spark_catalog",
"org.apache.spark.sql.delta.catalog.DeltaCatalog")
Expand Down Expand Up @@ -143,4 +143,3 @@ RUN pip install --quiet --no-cache-dir pathling && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"
```

0 comments on commit 3286b03

Please sign in to comment.