Skip to content

Commit

Permalink
feat: update spark testing
Browse files Browse the repository at this point in the history
  • Loading branch information
vorel99 committed Sep 20, 2023
1 parent 80ffcac commit 8373d2e
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 18 deletions.
34 changes: 17 additions & 17 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,9 @@ jobs:
${{ runner.os }}-${{ matrix.pandas }}-pip-
- run: |
pip install --upgrade pip setuptools wheel
pip install -r requirements.txt "${{ matrix.pandas }}" "${{ matrix.numpy }}"
pip install -r requirements-test.txt
- run: make install
make install_test_pandas
- run: pip install "${{ matrix.pandas }}" "${{ matrix.numpy }}"

- run: make test

Expand Down Expand Up @@ -129,9 +129,9 @@ jobs:
${{ runner.os }}-${{ matrix.pandas }}-pip-
- run: |
pip install --upgrade pip setuptools wheel
pip install -r requirements.txt "${{ matrix.pandas }}" "${{ matrix.numpy }}"
pip install -r requirements-test.txt
- run: make install
make install_test_pandas
- run: pip install "${{ matrix.pandas }}" "${{ matrix.numpy }}"

- run: make test_cov

Expand All @@ -144,9 +144,9 @@ jobs:
${{ runner.os }}-${{ matrix.pandas }}-pip-
- run: |
pip install --upgrade pip setuptools wheel
pip install -r requirements.txt "${{ matrix.pandas }}" "${{ matrix.numpy }}"
pip install -r requirements-test.txt
- run: make install
make install_test_pandas
- run: pip install "${{ matrix.pandas }}" "${{ matrix.numpy }}"
- run: make test_cov
- run: codecov -F py${{ matrix.python-version }}-${{ matrix.os }}-${{ matrix.pandas }}-${{ matrix.numpy }}

Expand Down Expand Up @@ -181,7 +181,6 @@ jobs:
SPARK_VERSION: ${{ matrix.spark }}
HADOOP_VERSION: ${{ matrix.hadoop }}
SPARK_DIRECTORY: ${{ github.workspace }}/../
SPARK_HOME: ${{ github.workspace }}/../spark/
steps:
- uses: actions/checkout@v4
- name: Setup python
Expand Down Expand Up @@ -212,15 +211,16 @@ jobs:
${{ runner.os }}-${{ matrix.pandas }}-pip-
- run: |
pip install --upgrade pip setuptools wheel
pip install pytest-spark>=0.6.0 pyarrow==1.0.1 pyspark=="${{ matrix.spark }}"
pip install -r requirements.txt
pip install -r requirements-test.txt
pip install "${{ matrix.pandas }}" "${{ matrix.numpy }}"
make install_test_spark
- run: pip install "${{ matrix.pandas }}" "${{ matrix.numpy }} pyspark=="${{ matrix.spark }}"
# Make sure the proper version of pandas is install after everything
- run: |
pip show pandas numpy pyspark
- if: ${{ matrix.spark != '3.0.1' }}
run: echo "ARROW_PRE_0_15_IPC_FORMAT=1" >> $GITHUB_ENV
- run: echo "SPARK_LOCAL_IP=127.0.0.1" >> $GITHUB_ENV
- run: make install
- run: make install-spark-ci
- run: pip install -r requirements-spark.txt # Make sure the proper version of pandas is install after everything
- run: |
sudo apt-get update
sudo apt-get -y install openjdk-8-jdk
- run: make test_spark

1 change: 1 addition & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ package:
install_dev:
rm -rf $(VENV)
python -m venv $(VENV)
$(PYTHON) -m pip install --upgrade pip
$(PYTHON) -m pip install -e ".[dev, test]"

install_test_pandas:
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ classifiers=[

dependencies = [
"scipy>=1.4.1, <1.12",
"pandas>1.1, <2.1, !=1.4.0",
"pandas>1.1, <2.0, !=1.4.0",
"matplotlib>=3.2, <=3.7.3",
"pydantic>=1.8.1, <2",
"PyYAML>=5.0.0, <6.1",
Expand Down
17 changes: 17 additions & 0 deletions tests/backends/spark_backend/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import pytest
from pyspark.sql import SparkSession


@pytest.fixture(scope="session")
def spark_session():
spark = (
SparkSession.builder.master("local[1]")
.appName("local-tests")
.config("spark.executor.cores", "1")
.config("spark.executor.instances", "1")
.config("spark.sql.shuffle.partitions", "1")
.config("spark.driver.bindAddress", "127.0.0.1")
.getOrCreate()
)
yield spark
spark.stop()

0 comments on commit 8373d2e

Please sign in to comment.