Skip to content

Commit

Permalink
fix: Test for helpers and fixture scope
Browse files Browse the repository at this point in the history
  • Loading branch information
dashmug committed Aug 8, 2024
1 parent dd090c9 commit c49f99f
Show file tree
Hide file tree
Showing 14 changed files with 32 additions and 14 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ jobs:
- name: Check if requirements.txt is up-to-date
run: poetry export --with=dev --output docker/requirements.txt && git diff --exit-code
- name: Run tests that do not need the Glue container
run: poetry run pytest test/test_options.py
run: poetry run pytest test/test_*.py
- name: Build python packages
run: poetry build

Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,4 @@ wheels/
.coverage
coverage
htmlcov
test-results
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ checks: format typecheck importcheck test

.PHONY: clean
clean: ## Delete generated artifacts
@rm -rfv __pycache__ .coverage .import_linter_cache .mypy_cache .pytest_cache .ruff_cache dist htmlcov
@rm -rfv __pycache__ .coverage .import_linter_cache .mypy_cache .pytest_cache .ruff_cache coverage dist htmlcov test-results


.PHONY: publish
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ disallow_incomplete_defs = false
disallow_untyped_defs = false

[tool.pytest.ini_options]
addopts = "-p no:cacheprovider"
addopts = "-p no:cacheprovider --junitxml=test-results/results.xml"
filterwarnings = [
"ignore::FutureWarning:pyspark.sql.context",
"ignore::UserWarning:test.test_options",
Expand Down
1 change: 1 addition & 0 deletions sonar-project.properties
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ sonar.pullrequest.github.summary_comment=true
sonar.python.version=3.9,3.10
sonar.python.coverage.reportPaths=coverage/results.xml
sonar.scm.provider=git
sonar.python.xunit.reportPath=test-results/results.xml
18 changes: 17 additions & 1 deletion src/glue_utils/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,23 @@ def generate_partitioned_path(
partition_separator: str = "/",
key_value_separator: str = "=",
) -> str:
"""e.g. Given an ordered dictionary of strings, return a partitioned path."""
"""Generate a partitioned path from a dictionary of partitions.
Parameters
----------
partitions : dict[str, str]
A dictionary containing the partitions and their corresponding values.
partition_separator : str, optional
The separator to use between partitions in the generated path. Default is "/".
key_value_separator : str, optional
The separator to use between partition keys and values in the generated path. Default is "=".
Returns
-------
str
The generated partitioned path.
"""
if isinstance(partitions, dict) and not isinstance(partitions, OrderedDict):
warn(
"Regular dictionaries are unordered and may not produce the expected path. Use collections.OrderedDict instead.",
Expand Down
2 changes: 1 addition & 1 deletion src/glue_utils/pyspark/context/documentdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def create_dynamic_frame_from_documentdb(
connection_options: DocumentDBSourceConnectionOptions,
transformation_ctx: str = "",
) -> DynamicFrame:
"""Create a dynamic frame from a DocumentDB data source.
"""Create a DynamicFrame from a DocumentDB data source.
Parameters
----------
Expand Down
4 changes: 2 additions & 2 deletions src/glue_utils/pyspark/context/dynamodb.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def create_dynamic_frame_from_dynamodb(
connection_options: DynamoDBSourceConnectionOptions,
transformation_ctx: str = "",
) -> DynamicFrame:
"""Create a dynamic frame from a DynamoDB data source.
"""Create a DynamicFrame from a DynamoDB data source.
This uses the AWS Glue DynamoDB ETL connector.
Expand All @@ -46,7 +46,7 @@ def create_dynamic_frame_from_dynamodb_export(
connection_options: DynamoDBExportSourceConnectionOptions,
transformation_ctx: str = "",
) -> DynamicFrame:
"""Create a dynamic frame from a DynamoDB data source.
"""Create a DynamicFrame from a DynamoDB data source.
This uses the AWS Glue DynamoDB Export connector.
Expand Down
2 changes: 1 addition & 1 deletion src/glue_utils/pyspark/context/kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def create_dynamic_frame_from_kafka(
connection_options: KafkaSourceConnectionOptions,
transformation_ctx: str = "",
) -> DynamicFrame:
"""Create a dynamic frame from a Kafka data source.
"""Create a DynamicFrame from a Kafka data source.
Parameters
----------
Expand Down
2 changes: 1 addition & 1 deletion src/glue_utils/pyspark/context/kinesis.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def create_dynamic_frame_from_kinesis(
connection_options: KinesisSourceConnectionOptions,
transformation_ctx: str = "",
) -> DynamicFrame:
"""Create a dynamic frame from a Kinesis data source.
"""Create a DynamicFrame from a Kinesis data source.
Parameters
----------
Expand Down
2 changes: 1 addition & 1 deletion src/glue_utils/pyspark/context/mongodb.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def create_dynamic_frame_from_mongodb(
connection_options: MongoDBSourceConnectionOptions,
transformation_ctx: str = "",
) -> DynamicFrame:
"""Create a dynamic frame from a Mongodb data source.
"""Create a DynamicFrame from a Mongodb data source.
Parameters
----------
Expand Down
2 changes: 1 addition & 1 deletion src/glue_utils/pyspark/context/opensearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def create_dynamic_frame_from_opensearch(
connection_options: OpenSearchSourceConnectionOptions,
transformation_ctx: str = "",
) -> DynamicFrame:
"""Create a dynamic frame from a OpenSearch data source.
"""Create a DynamicFrame from a OpenSearch data source.
Parameters
----------
Expand Down
4 changes: 2 additions & 2 deletions src/glue_utils/pyspark/format_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from __future__ import annotations

from typing import Literal, TypedDict, Union
from typing import Literal, TypedDict


class S3FormatOptions(TypedDict, total=False):
Expand All @@ -24,7 +24,7 @@ class CSVFormatOptions(S3FormatOptions, total=False):

separator: str
escaper: str
quoteChar: Union[str, Literal[-1]]
quoteChar: str | Literal[-1]
multiLine: bool
withHeader: bool
writeHeader: bool
Expand Down
2 changes: 1 addition & 1 deletion test/pyspark/context/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from pyspark import SparkContext


@pytest.fixture(scope="session")
@pytest.fixture(scope="module")
def glue_pyspark_context():
sc = SparkContext.getOrCreate()
yield GluePySparkContext(sc)
Expand Down

0 comments on commit c49f99f

Please sign in to comment.