diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0d2d19ba..23b907d7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,31 +30,13 @@ repos: files: 'cloud\/.*\.py' # Static type and code checkers below - - - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: 'v0.6.0' hooks: - - id: flake8 - additional_dependencies: - - flake8-comprehensions - - flake8-print - - flake8-mutable - - flake8-pytest-style - - flake8-printf-formatting - - 'flake8-simplify==0.19.2' - - 'flake8-type-checking==2.1.3' - args: [ '--enable-extensions=G' ] + - id: ruff + args: [ --fix ] - repo: https://github.com/python-poetry/poetry rev: 1.5.0 hooks: - id: poetry-check - - - - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.942 - hooks: - - id: mypy - files: 'aladdin/.*|tests/.*' - additional_dependencies: - - types-freezegun diff --git a/Dockerfile b/Dockerfile index f2f95edb..80a53e43 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,14 +7,11 @@ RUN poetry config virtualenvs.create false RUN pip install pip --upgrade COPY ./pyproject.toml /opt/app/pyproject.toml -# COPY ./poetry.lock /opt/app/poetry.lock +COPY ./poetry.lock /opt/app/poetry.lock RUN mkdir /opt/app/aligned RUN poetry install --no-dev --no-root --extras "redis psql server aws" COPY ./aligned /opt/app/aligned -# COPY /. opt/app/aligned - ENTRYPOINT ["python", "-m", "aligned.cli"] -# RUN pip install -U 'opt/app/aligned[redis,aws,psql,server,text]' diff --git a/Dockerfile.test b/Dockerfile.test index da9e042c..7f64b4fc 100644 --- a/Dockerfile.test +++ b/Dockerfile.test @@ -7,9 +7,10 @@ RUN poetry config virtualenvs.create false RUN pip install pip --upgrade COPY ./pyproject.toml /opt/app/pyproject.toml +COPY ./poetry.lock /opt/app/poetry.lock RUN mkdir /opt/app/aligned -RUN poetry install --no-root --all-extras +RUN poetry install --no-root --extras "redis pandera kafka ollama mlflow lancedb" COPY ./aligned /opt/app/aligned COPY ./conftest.py /opt/app/conftest.py diff --git a/aligned/__init__.py b/aligned/__init__.py index 48b7e22a..ea75e1d4 100644 --- a/aligned/__init__.py +++ b/aligned/__init__.py @@ -24,7 +24,7 @@ from aligned.data_source.stream_data_source import HttpStreamSource from aligned.data_source.batch_data_source import CustomMethodDataSource from aligned.feature_store import ContractStore, FeatureStore -from aligned.feature_view import feature_view, combined_feature_view, check_schema +from aligned.feature_view import feature_view, check_schema from aligned.schemas.text_vectoriser import EmbeddingModel from aligned.sources.kafka import KafkaConfig from aligned.sources.local import FileSource, Directory, ParquetConfig, CsvConfig @@ -77,7 +77,6 @@ 'Json', 'EmbeddingModel', 'feature_view', - 'combined_feature_view', 'model_contract', # Aggregation 'CustomAggregation', diff --git a/aligned/active_learning/job.py b/aligned/active_learning/job.py index e0cd0271..48c7e158 100644 --- a/aligned/active_learning/job.py +++ b/aligned/active_learning/job.py @@ -1,9 +1,11 @@ +from __future__ import annotations + import logging from dataclasses import dataclass -import pandas as pd import polars as pl +from aligned.lazy_imports import pandas as pd from aligned.active_learning.selection import ActiveLearningMetric, ActiveLearningSelection from aligned.active_learning.write_policy import ActiveLearningWritePolicy from aligned.retrival_job import RetrivalJob diff --git a/aligned/checks.py b/aligned/checks.py index 87e928b6..94b3914f 100644 --- a/aligned/checks.py +++ b/aligned/checks.py @@ -37,7 +37,7 @@ def as_markdown(self) -> str: def feature_exist(feature: FeatureReference, store: ContractStore) -> bool: loc = feature.location - if loc.location == 'model': + if loc.location_type == 'model': model = store.model(loc.name).model all_features = model.predictions_view.full_schema else: diff --git a/aligned/cli.py b/aligned/cli.py index 59788e6a..7170a004 100644 --- a/aligned/cli.py +++ b/aligned/cli.py @@ -152,7 +152,12 @@ async def compile(repo_path: str, reference_file: str, env_file: str, ignore_fil repo_def = await RepoReader.definition_from_path(dir, excludes) - await file.write(repo_def.to_json(omit_none=True).encode('utf-8')) + data = repo_def.to_json(omit_none=True) + if isinstance(data, str): + data_bytes = data.encode('utf-8') + else: + data_bytes = data + await file.write(data_bytes) else: click.echo(f'No repo file found at {dir}') diff --git a/aligned/compiler/feature_factory.py b/aligned/compiler/feature_factory.py index 05966aaf..99c22442 100644 --- a/aligned/compiler/feature_factory.py +++ b/aligned/compiler/feature_factory.py @@ -4,7 +4,7 @@ from datetime import timedelta from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar -import pandas as pd +from aligned.lazy_imports import pandas as pd import polars as pl from aligned.compiler.vector_index_factory import VectorIndexFactory @@ -25,7 +25,7 @@ from aligned.schemas.derivied_feature import DerivedFeature, AggregateOver from aligned.schemas.event_trigger import EventTrigger as EventTriggerSchema from aligned.schemas.feature import EventTimestamp as EventTimestampFeature -from aligned.schemas.feature import Feature, FeatureLocation, FeatureReference, FeatureType +from aligned.schemas.feature import Feature, FeatureLocation, FeatureReference, FeatureType, StaticFeatureTags from aligned.schemas.literal_value import LiteralValue from aligned.schemas.target import ClassificationTarget as ClassificationTargetSchemas from aligned.schemas.target import ClassTargetProbability @@ -65,12 +65,12 @@ def using_features(self) -> list[FeatureFactory]: class AggregationTransformationFactory: def aggregate_over( - self, group_by: list[FeatureReference], time_columns: FeatureReference | None + self, group_by: list[FeatureReference], time_column: FeatureReference | None ) -> AggregateOver: raise NotImplementedError(type(self)) -T = TypeVar('T') +T = TypeVar('T', bound='FeatureFactory') @dataclass @@ -88,14 +88,14 @@ class TargetProbability: def __hash__(self) -> int: return self._name.__hash__() - def __set_name__(self, owner, name): + def __set_name__(self, owner: str, name: str) -> None: self._name = name def compile(self) -> ClassTargetProbability: assert self._name, 'Missing the name of the feature' return ClassTargetProbability( outcome=LiteralValue.from_value(self.of_value), - feature=Feature(self._name, dtype=FeatureType.float()), + feature=Feature(self._name, dtype=FeatureType.floating_point()), ) @@ -110,11 +110,11 @@ def compile_hidden_features( hidden_features: int, var_name: str, entities: set[Feature], -): +) -> tuple[set[Feature], set[DerivedFeature]]: aggregations = [] - features = set() - derived_features = set() + features: set[Feature] = set() + derived_features: set[DerivedFeature] = set() if feature.transformation: # Adding features that is not stored in the view @@ -182,7 +182,7 @@ class RecommendationTarget(FeatureReferencable): _name: str | None = field(default=None) _location: FeatureLocation | None = field(default=None) - def __set_name__(self, owner, name): + def __set_name__(self, owner: str, name: str) -> None: self._name = name def feature_reference(self) -> FeatureReference: @@ -214,7 +214,7 @@ class RegressionLabel(FeatureReferencable): _name: str | None = field(default=None) _location: FeatureLocation | None = field(default=None) - def __set_name__(self, owner, name): + def __set_name__(self, owner: str, name: str) -> None: self._name = name def feature_reference(self) -> FeatureReference: @@ -232,15 +232,18 @@ def listen_to_ground_truth_event(self, stream: StreamDataSource) -> RegressionLa ) def send_ground_truth_event(self, when: Bool, sink_to: StreamDataSource) -> RegressionLabel: - assert when.dtype == FeatureType.bool(), 'A trigger needs a boolean condition' + assert when.dtype == FeatureType.boolean(), 'A trigger needs a boolean condition' return RegressionLabel( self.feature, EventTrigger(when, sink_to), ground_truth_event=self.ground_truth_event ) def compile(self) -> RegressionTargetSchemas: + + assert self._name + on_ground_truth_event = self.ground_truth_event - trigger = self.event_trigger + trigger = None if self.event_trigger: event = self.event_trigger @@ -261,6 +264,9 @@ def compile(self) -> RegressionTargetSchemas: ) +GenericClassificationT = TypeVar('GenericClassificationT', bound='CanBeClassificationLabel') + + @dataclass class CanBeClassificationLabel: @@ -268,7 +274,7 @@ class CanBeClassificationLabel: event_trigger: EventTrigger | None = field(default=None) ground_truth_event: StreamDataSource | None = field(default=None) - def as_classification_label(self: T) -> T: + def as_classification_label(self: GenericClassificationT) -> GenericClassificationT: """ Tells Aligned that this feature is a classification target in a model_contract. @@ -278,7 +284,7 @@ def as_classification_label(self: T) -> T: assert isinstance(self, FeatureFactory) assert isinstance(self, CanBeClassificationLabel) - new_value = self.copy_type() # type: ignore + new_value: T = self.copy_type() # type: ignore new_value.ground_truth_feature = self return new_value @@ -287,14 +293,16 @@ def prediction_feature(self) -> Feature: return self.feature() raise ValueError(f'{self} is not a feature factory, and can therefore not be a feature') - def listen_to_ground_truth_event(self: T, stream: StreamDataSource) -> T: - assert isinstance(self, CanBeClassificationLabel) - + def listen_to_ground_truth_event( + self: GenericClassificationT, stream: StreamDataSource + ) -> GenericClassificationT: self.ground_truth_event = stream return self - def send_ground_truth_event(self: T, when: Bool, sink_to: StreamDataSource) -> T: - assert when.dtype == FeatureType.bool(), 'A trigger needs a boolean condition' + def send_ground_truth_event( + self: GenericClassificationT, when: Bool, sink_to: StreamDataSource + ) -> GenericClassificationT: + assert when.dtype == FeatureType.boolean(), 'A trigger needs a boolean condition' assert isinstance(self, CanBeClassificationLabel) self.event_trigger = EventTrigger(when, sink_to) @@ -338,7 +346,7 @@ def compile_classification_target(self) -> ClassificationTargetSchemas | None: pred_feature = self.prediction_feature() on_ground_truth_event = self.ground_truth_event - trigger = self.event_trigger + trigger = None if self.event_trigger: event = self.event_trigger @@ -380,12 +388,15 @@ class FeatureFactory(FeatureReferencable): _name: str | None = None _location: FeatureLocation | None = None _description: str | None = None + _default_value: LiteralValue | None = None + tags: set[str] | None = None transformation: TransformationFactory | None = None constraints: set[Constraint] | None = None - def __set_name__(self, owner, name): - self._name = name + def __set_name__(self, owner: str, name: str) -> None: + if self._name is None: + self._name = name @property def dtype(self) -> FeatureType: @@ -411,13 +422,20 @@ def feature_reference(self) -> FeatureReference: ) return FeatureReference(self.name, self._location, self.dtype) + def with_tag(self: T, key: str) -> T: + if self.tags is None: + self.tags = set() + self.tags.add(key) + return self + def feature(self) -> Feature: return Feature( name=self.name, dtype=self.dtype, description=self._description, - tags=None, + tags=list(self.tags) if self.tags else None, constraints=self.constraints, + default_value=self._default_value, ) def as_regression_label(self) -> RegressionLabel: @@ -441,6 +459,9 @@ def as_regression_target(self) -> RegressionLabel: def as_recommendation_target(self) -> RecommendationTarget: return RecommendationTarget(self) + def as_annotated_by(self: T) -> T: + return self.with_tag(StaticFeatureTags.is_annotated_by) + def compile(self) -> DerivedFeature: if not self.transformation: @@ -453,7 +474,7 @@ def compile(self) -> DerivedFeature: transformation=self.transformation.compile(), depth=self.depth(), description=self._description, - tags=None, + tags=list(self.tags) if self.tags else None, constraints=self.constraints, ) @@ -489,7 +510,7 @@ def add_values(feature: FeatureFactory) -> None: return values def copy_type(self: T) -> T: - raise NotImplementedError() + raise NotImplementedError(type(self)) def fill_na(self: T, value: FeatureFactory | Any) -> T: from aligned.compiler.transformation_factory import FillMissingFactory @@ -506,7 +527,7 @@ def fill_na(self: T, value: FeatureFactory | Any) -> T: return instance # type: ignore [return-value] def transformed_using_features_pandas( - self: T, using_features: list[FeatureFactory], transformation: Callable[[pd.DataFrame, pd.Series]] + self: T, using_features: list[FeatureFactory], transformation: Callable[[pd.DataFrame], pd.Series] ) -> T: from aligned.compiler.transformation_factory import PandasTransformationFactory @@ -574,6 +595,11 @@ def is_required(self: T) -> T: def is_optional(self: T) -> T: self._add_constraint(Optional()) # type: ignore[attr-defined] + self._default_value = LiteralValue.from_value(None) + return self + + def default_value(self: T, value: Any) -> T: + self._default_value = LiteralValue.from_value(value) return self def _add_constraint(self, constraint: Constraint) -> None: @@ -594,7 +620,11 @@ def is_not_null(self) -> Bool: instance.transformation = NotNullFactory(self) return instance - def referencing(self, entity: FeatureFactory) -> FeatureFactory: + def with_name(self: T, name: str) -> T: + self._name = name + return self + + def referencing(self: T, entity: FeatureFactory) -> T: from aligned.schemas.constraint_types import ReferencingColumn self._add_constraint(ReferencingColumn(entity.feature_reference())) @@ -604,7 +634,7 @@ def referencing(self, entity: FeatureFactory) -> FeatureFactory: class CouldBeModelVersion: def as_model_version(self) -> ModelVersion: if isinstance(self, FeatureFactory): - return ModelVersion(self) + return ModelVersion(self).with_tag(StaticFeatureTags.is_model_version) raise ValueError(f'{self} is not a feature factory, and can therefore not be a model version') @@ -612,7 +642,7 @@ def as_model_version(self) -> ModelVersion: class CouldBeEntityFeature: def as_entity(self) -> Entity: if isinstance(self, FeatureFactory): - return Entity(self) + return Entity(self).with_tag(StaticFeatureTags.is_entity) raise ValueError(f'{self} is not a feature factory, and can therefore not be an entity') @@ -649,7 +679,7 @@ def is_in(self, values: list[Any]) -> Bool: class ComparableFeature(EquatableFeature): - def __lt__(self, right: object) -> Bool: + def __lt__(self, right: float) -> Bool: from aligned.compiler.transformation_factory import LowerThenFactory instance = Bool() @@ -919,21 +949,17 @@ def day_of_year(self) -> Int32: class Bool(EquatableFeature, LogicalOperatableFeature, CanBeClassificationLabel): - - _is_shadow_model_flag: bool = field(default=False) - @property def dtype(self) -> FeatureType: - return FeatureType.bool() + return FeatureType.boolean() def copy_type(self) -> Bool: if self.constraints and Optional() in self.constraints: return Bool().is_optional() return Bool() - def is_shadow_model_flag(self: Bool, is_shadow: bool = True) -> Bool: - self._is_shadow_model_flag = is_shadow - return self + def is_shadow_model_flag(self: Bool) -> Bool: + return self.with_tag(StaticFeatureTags.is_shadow_model) class Float(ArithmeticFeature, DecimalOperations): @@ -944,7 +970,7 @@ def copy_type(self) -> Float: @property def dtype(self) -> FeatureType: - return FeatureType.float() + return FeatureType.floating_point() def aggregate(self) -> ArithmeticAggregation: return ArithmeticAggregation(self) @@ -1185,7 +1211,7 @@ def ollama_embedding(self, model: str, embedding_size: int, host_env: str | None def ollama_generate(self, model: str, system: str | None = None, host_env: str | None = None) -> String: from aligned.compiler.transformation_factory import OllamaGenerate - feature = Json() + feature = String() feature.transformation = OllamaGenerate(model, system or '', self, host_env) return feature @@ -1260,6 +1286,9 @@ def copy_type(self: Json) -> Json: return Json().is_optional() return Json() + def as_input_features(self) -> Json: + return self.with_tag(StaticFeatureTags.is_input_features) + @property def dtype(self) -> FeatureType: return FeatureType.json() @@ -1308,6 +1337,15 @@ def __init__(self, dtype: FeatureFactory): def aggregate(self) -> CategoricalAggregation: return CategoricalAggregation(self) + def feature(self) -> Feature: + return Feature( + name=self.name, + dtype=self.dtype, + description=self._description, + tags=None, + constraints=self._dtype.constraints, + ) + class Timestamp(DateFeature, ArithmeticFeature): @@ -1408,6 +1446,24 @@ def copy_type(self) -> List: def dtype(self) -> FeatureType: return FeatureType.array() + def feature(self) -> Feature: + from aligned.schemas.constraints import ListConstraint + + feat = super().feature() + if self.sub_type.constraints: + feat.constraints = (feat.constraints or set()).union( + {ListConstraint(list(self.sub_type.constraints))} + ) + return feat + + def max_length(self, value: int) -> List: + self._add_constraint(MaxLength(value)) + return self + + def min_length(self, value: int) -> List: + self._add_constraint(MinLength(value)) + return self + def contains(self, value: Any) -> Bool: from aligned.compiler.transformation_factory import ArrayContainsFactory diff --git a/aligned/compiler/model.py b/aligned/compiler/model.py index dd8f19e7..a5afca4f 100644 --- a/aligned/compiler/model.py +++ b/aligned/compiler/model.py @@ -18,11 +18,14 @@ RecommendationTarget, RegressionLabel, TargetProbability, - ModelVersion, ) -from aligned.data_source.batch_data_source import BatchDataSource +from aligned.data_source.batch_data_source import CodableBatchDataSource, DummyDataSource from aligned.data_source.stream_data_source import StreamDataSource -from aligned.feature_view.feature_view import FeatureView, FeatureViewWrapper +from aligned.feature_view.feature_view import ( + FeatureView, + FeatureViewMetadata, + FeatureViewWrapper, +) from aligned.exposed_model.interface import ExposedModel from aligned.request.retrival_request import RetrivalRequest from aligned.retrival_job import ConvertableToRetrivalJob, PredictionJob, RetrivalJob @@ -56,10 +59,10 @@ class ModelMetadata: tags: list[str] | None = field(default=None) description: str | None = field(default=None) - output_source: BatchDataSource | None = field(default=None) + output_source: CodableBatchDataSource | None = field(default=None) output_stream: StreamDataSource | None = field(default=None) - application_source: BatchDataSource | None = field(default=None) + application_source: CodableBatchDataSource | None = field(default=None) acceptable_freshness: timedelta | None = field(default=None) unacceptable_freshness: timedelta | None = field(default=None) @@ -69,6 +72,17 @@ class ModelMetadata: dataset_store: DatasetStore | None = field(default=None) + def as_view_meatadata(self) -> FeatureViewMetadata: + return FeatureViewMetadata( + name=self.name, + source=self.output_source or DummyDataSource(), + contacts=self.contacts, + tags=self.tags, + description=self.description, + acceptable_freshness=self.acceptable_freshness, + unacceptable_freshness=self.unacceptable_freshness, + ) + @dataclass class ModelContractWrapper(Generic[T]): @@ -76,6 +90,10 @@ class ModelContractWrapper(Generic[T]): metadata: ModelMetadata contract: Type[T] + @property + def location(self) -> FeatureLocation: + return FeatureLocation.model(self.metadata.name) + def __call__(self) -> T: # Needs to compiile the model to set the location for the view features _ = self.compile() @@ -89,7 +107,7 @@ def __call__(self) -> T: value = getattr(contract, attribute) if isinstance(value, FeatureFactory): - value._location = FeatureLocation.model(self.metadata.name) + value._location = self.location setattr(contract, attribute, copy.deepcopy(value)) setattr(contract, '__model_wrapper__', self) @@ -98,6 +116,49 @@ def __call__(self) -> T: def compile(self) -> ModelSchema: return compile_with_metadata(self.contract(), self.metadata) + def as_view_wrapper(self) -> FeatureViewWrapper[T]: + + return FeatureViewWrapper(self.metadata.as_view_meatadata(), self.contract()) + + def with_schema( + self, + name: str, + source: CodableBatchDataSource | FeatureViewWrapper, + materialized_source: CodableBatchDataSource | None = None, + entities: dict[str, FeatureFactory] | None = None, + additional_features: dict[str, FeatureFactory] | None = None, + copy_default_values: bool = False, + copy_transformations: bool = False, + ) -> FeatureViewWrapper[T]: + + return self.as_view_wrapper().with_schema( + name=name, + source=source, + materialized_source=materialized_source, + entities=entities, + additional_features=additional_features, + copy_default_values=copy_default_values, + copy_transformations=copy_transformations, + ) + + def as_langchain_retriver( + self, + number_of_docs: int = 5, + needed_views: list[FeatureViewWrapper | ModelContractWrapper] | None = None, + ): + from aligned.exposed_model.langchain import AlignedRetriver + from aligned.sources.vector_index import VectorIndex + + source = self.metadata.output_source + if not isinstance(source, VectorIndex): + raise ValueError(f"Found no vector index in source: {source}") + + store = self.query(needed_views) + + index_name = source.vector_index_name() or self.metadata.name + + return AlignedRetriver(store=store.store, index_name=index_name, number_of_docs=number_of_docs) + def query( self, needed_views: list[FeatureViewWrapper | ModelContractWrapper] | None = None ) -> ModelFeatureStore: @@ -147,7 +208,7 @@ def as_view(self) -> CompiledFeatureView | None: return view.as_view(self.metadata.name) def filter( - self, name: str, where: Callable[[T], Bool], application_source: BatchDataSource | None = None + self, name: str, where: Callable[[T], Bool], application_source: CodableBatchDataSource | None = None ) -> ModelContractWrapper[T]: from aligned.data_source.batch_data_source import FilteredDataSource @@ -176,7 +237,7 @@ def filter( return ModelContractWrapper(metadata=meta, contract=self.contract) - def as_source(self) -> BatchDataSource: + def as_source(self) -> CodableBatchDataSource: from aligned.schemas.model import ModelSource compiled_model = self.compile() @@ -193,7 +254,7 @@ def join( on_left: str | FeatureFactory | list[str] | list[FeatureFactory], on_right: str | FeatureFactory | list[str] | list[FeatureFactory], how: str = 'inner', - ) -> BatchDataSource: + ) -> CodableBatchDataSource: from aligned.data_source.batch_data_source import join_source from aligned.schemas.model import ModelSource @@ -214,7 +275,9 @@ def join( how=how, ) - def join_asof(self, view: FeatureViewWrapper, on_left: list[str], on_right: list[str]) -> BatchDataSource: + def join_asof( + self, view: FeatureViewWrapper, on_left: list[str], on_right: list[str] + ) -> CodableBatchDataSource: from aligned.data_source.batch_data_source import join_asof_source from aligned.schemas.model import ModelSource @@ -267,9 +330,9 @@ def model_contract( contacts: list[str] | None = None, tags: list[str] | None = None, description: str | None = None, - output_source: BatchDataSource | None = None, + output_source: CodableBatchDataSource | None = None, output_stream: StreamDataSource | None = None, - application_source: BatchDataSource | None = None, + application_source: CodableBatchDataSource | None = None, dataset_store: DatasetStore | StorageFileReference | None = None, exposed_at_url: str | None = None, exposed_model: ExposedModel | None = None, @@ -291,7 +354,7 @@ def decorator(cls: Type[T]) -> ModelContractWrapper[T]: feat.as_reference(FeatureLocation.feature_view(compiled_view.name)) for feat in request.request_result.features ] - unwrapped_input_features.extend(features) + unwrapped_input_features.extend(features) # type: ignore elif isinstance(feature, ModelContractWrapper): compiled_model = feature.compile() request = compiled_model.predictions_view.request('') @@ -299,7 +362,7 @@ def decorator(cls: Type[T]) -> ModelContractWrapper[T]: feat.as_reference(FeatureLocation.model(compiled_model.name)) for feat in request.request_result.features ] - unwrapped_input_features.extend(features) + unwrapped_input_features.extend(features) # type: ignore else: unwrapped_input_features.append(feature) @@ -361,7 +424,6 @@ class MyModel(ModelContract): entities=set(), features=set(), derived_features=set(), - model_version_column=None, source=metadata.output_source, application_source=metadata.application_source, stream_source=metadata.output_stream, @@ -371,6 +433,11 @@ class MyModel(ModelContract): acceptable_freshness=metadata.acceptable_freshness, unacceptable_freshness=metadata.unacceptable_freshness, ) + + assert inference_view.classification_targets is not None + assert inference_view.regression_targets is not None + assert inference_view.recommendation_targets is not None + probability_features: dict[str, set[TargetProbability]] = {} hidden_features = 0 @@ -380,12 +447,11 @@ class MyModel(ModelContract): for var_name in var_names: feature = getattr(model, var_name) if isinstance(feature, FeatureFactory): - assert feature._name + assert ( + feature._name + ), f"Expected name but found none in model: {metadata.name} for feature {var_name}" feature._location = FeatureLocation.model(metadata.name) - if isinstance(feature, ModelVersion): - inference_view.model_version_column = feature.feature() - if isinstance(feature, FeatureView): compiled = feature.compile() inference_view.entities.update(compiled.entities) @@ -412,7 +478,9 @@ class MyModel(ModelContract): inference_view.event_timestamp = feature.event_timestamp() elif isinstance(feature, TargetProbability): + assert isinstance(feature.target, FeatureFactory) feature_name = feature.target._name + assert feature_name assert feature._name assert feature.target._name in classification_targets, 'Target must be a classification target.' @@ -422,7 +490,7 @@ class MyModel(ModelContract): inference_view.features.add( Feature( var_name, - FeatureType.float(), + FeatureType.floating_point(), f"The probability of target named {feature_name} being '{feature.of_value}'.", ) ) @@ -484,9 +552,6 @@ def sort_key(x: tuple[int, FeatureFactory]) -> int: else: inference_view.features.add(feature.feature()) - if isinstance(feature, Bool) and feature._is_shadow_model_flag: - inference_view.is_shadow_model_flag = feature.feature() - # Needs to run after the feature views have compiled features = metadata.features.compile() @@ -502,7 +567,9 @@ def sort_key(x: tuple[int, FeatureFactory]) -> int: dtype=transformation.dtype, transformation=transformation, depending_on={ - FeatureReference(feat, FeatureLocation.model(metadata.name), dtype=FeatureType.float()) + FeatureReference( + feat, FeatureLocation.model(metadata.name), dtype=FeatureType.floating_point() + ) for feat in transformation.column_mappings.keys() }, depth=1, diff --git a/aligned/compiler/repo_reader.py b/aligned/compiler/repo_reader.py index bd61d1ef..a3a9a3a3 100644 --- a/aligned/compiler/repo_reader.py +++ b/aligned/compiler/repo_reader.py @@ -5,10 +5,8 @@ from typing import Any from aligned.compiler.model import ModelContractWrapper -from aligned.enricher import Enricher -from aligned.feature_view.combined_view import CombinedFeatureViewWrapper from aligned.feature_view.feature_view import FeatureViewWrapper -from aligned.schemas.repo_definition import EnricherReference, RepoDefinition, RepoMetadata, RepoReference +from aligned.schemas.repo_definition import RepoDefinition, RepoMetadata, RepoReference from pathlib import Path @@ -83,33 +81,22 @@ class RepoReader: """ @staticmethod - async def definition_from_path(repo_path: Path, excludes: list[str] | None = None) -> RepoDefinition: - - excluded_files: list[Path] = [] - for exclude in excludes or []: - excluded_files.extend(repo_path.resolve().glob(exclude)) + async def definition_from_files(files: list[Path], root_path: Path) -> RepoDefinition: - metadata = RepoMetadata(created_at=datetime.now(), name=repo_path.name, github_url=None) + metadata = RepoMetadata(created_at=datetime.now(), name=root_path.name, github_url=None) repo = RepoDefinition( metadata=metadata, feature_views=set(), - combined_feature_views=set(), models=set(), - enrichers=[], ) - feature_view_names: dict[str, str] = {} - - for py_file in find_files(repo_path): - if py_file in excluded_files: - continue - + for py_file in files: imports = imports_for(py_file) + module_path = path_to_py_module(py_file, root_path) - module_path = path_to_py_module(py_file, repo_path) - - if module_path.startswith('aladdin') or module_path.startswith('.') or module_path.endswith('__'): + if module_path.startswith('aligned') or module_path.startswith('.') or module_path.endswith('__'): # Skip no feature defintion modules + logger.debug(f"Skipping module {module_path}") continue module = import_module(module_path) @@ -120,44 +107,28 @@ async def definition_from_path(repo_path: Path, excludes: list[str] | None = Non obj = getattr(module, attribute) - if isinstance(obj, Enricher): - repo.enrichers.append( - EnricherReference(module=module_path, attribute_name=attribute, enricher=obj) - ) - elif isinstance(obj, FeatureViewWrapper): + if isinstance(obj, FeatureViewWrapper): repo.feature_views.add(obj.compile()) - elif isinstance(obj, CombinedFeatureViewWrapper): - repo.combined_feature_views.add(obj.compile()) elif isinstance(obj, ModelContractWrapper): repo.models.add(obj.compile()) - else: - classes = super_classes_in(obj) - if 'ModelContract' in classes: - repo.models.add(obj.compile()) - elif 'FeatureView' in classes: - fv = obj.compile() - if fv.name in feature_view_names: - raise Exception( - ( - f'Duplicate feature view names: {fv.name},', - f' in {py_file}, and {feature_view_names[fv.name]}', - ) - ) - feature_view_names[fv.name] = py_file.as_posix() - repo.feature_views.add(fv) - elif 'CombinedFeatureView' in classes: - fv = obj.compile() - if fv.name in feature_view_names: - raise Exception( - ( - f'Duplicate feature view names: {fv.name},', - f' in {py_file}, and {feature_view_names[fv.name]}', - ) - ) - feature_view_names[fv.name] = py_file.as_posix() - repo.combined_feature_views.add(fv) return repo + @staticmethod + async def definition_from_glob(root_path: Path, glob: str) -> RepoDefinition: + return await RepoReader.definition_from_files(files=list(root_path.glob(glob)), root_path=root_path) + + @staticmethod + async def definition_from_path(repo_path: Path, excludes: list[str] | None = None) -> RepoDefinition: + + excluded_files: list[Path] = [] + for exclude in excludes or []: + excluded_files.extend(repo_path.resolve().glob(exclude)) + + return await RepoReader.definition_from_files( + files=[py_file for py_file in find_files(repo_path) if py_file not in excluded_files], + root_path=repo_path, + ) + @staticmethod def reference_from_path(repo_path: Path, file: Path) -> RepoReference: diff --git a/aligned/compiler/transformation_factory.py b/aligned/compiler/transformation_factory.py index cffd54c5..6cba0aed 100644 --- a/aligned/compiler/transformation_factory.py +++ b/aligned/compiler/transformation_factory.py @@ -1,12 +1,14 @@ +from __future__ import annotations + import logging from dataclasses import dataclass, field from datetime import timedelta # noqa: TC003 from typing import Any, Callable -import pandas as pd import polars as pl from aligned import AwsS3Config +from aligned.lazy_imports import pandas as pd from aligned.compiler.feature_factory import FeatureFactory, Transformation, TransformationFactory from aligned.schemas.transformation import FillNaValuesColumns, LiteralValue, EmbeddingModel @@ -195,7 +197,7 @@ def compile(self) -> Transformation: @dataclass class LowerThenFactory(TransformationFactory): - value: float + value: Any in_feature: FeatureFactory @property @@ -204,14 +206,18 @@ def using_features(self) -> list[FeatureFactory]: def compile(self) -> Transformation: from aligned.schemas.transformation import LowerThen as LTTransformation + from aligned.schemas.transformation import LowerThenCol - return LTTransformation(self.in_feature.name, self.value) + if isinstance(self.value, FeatureFactory): + return LowerThenCol(self.in_feature.name, self.value.name) + else: + return LTTransformation(self.in_feature.name, self.value) @dataclass class LowerThenOrEqualFactory(TransformationFactory): - value: float + value: Any in_feature: FeatureFactory @property @@ -220,6 +226,10 @@ def using_features(self) -> list[FeatureFactory]: def compile(self) -> Transformation: from aligned.schemas.transformation import LowerThenOrEqual as LTETransformation + from aligned.schemas.transformation import LowerThenOrEqualCol + + if isinstance(self.value, FeatureFactory): + return LowerThenOrEqualCol(self.in_feature.name, self.value.name) return LTETransformation(self.in_feature.name, self.value) @@ -645,9 +655,11 @@ def compile(self) -> Transformation: dtype=self.dtype.dtype, ) else: + function_name = (dill.source.getname(self.method),) + assert isinstance(function_name, str), f"Expected string got {type(function_name)}" return PandasFunctionTransformation( code=inspect.getsource(self.method), - function_name=dill.source.getname(self.method), + function_name=function_name, dtype=self.dtype.dtype, ) @@ -672,7 +684,10 @@ def compile(self) -> Transformation: from aligned.schemas.transformation import PolarsFunctionTransformation, PolarsLambdaTransformation if isinstance(self.method, pl.Expr): - method = lambda df, alias: self.method # type: ignore + + def method(df: pl.DataFrame, alias: str) -> pl.Expr: + return self.method # type: ignore + return PolarsLambdaTransformation(method=dill.dumps(method), code='', dtype=self.dtype.dtype) else: code = inspect.getsource(self.method) @@ -682,9 +697,11 @@ def compile(self) -> Transformation: method=dill.dumps(self.method), code=code.strip(), dtype=self.dtype.dtype ) else: + function_name = (dill.source.getname(self.method),) + assert isinstance(function_name, str), f"Expected string got {type(function_name)}" return PolarsFunctionTransformation( code=code, - function_name=dill.source.getname(self.method), + function_name=function_name, dtype=self.dtype.dtype, ) @@ -709,11 +726,9 @@ def using_features(self) -> list[FeatureFactory]: return [self.feature] def compile(self) -> Transformation: - from aligned.schemas.transformation import Mean + from aligned.schemas.transformation import MeanAggregation - return Mean( - key=self.feature.name, group_keys=[feat.name for feat in self.group_by] if self.group_by else None - ) + return MeanAggregation(key=self.feature.name) def copy(self) -> 'MeanTransfomrationFactory': return MeanTransfomrationFactory(self.feature, self.over, self.group_by) @@ -783,7 +798,7 @@ def compile(self) -> Transformation: from aligned.schemas.transformation import AppendConstString, AppendStrings if isinstance(self.second_feature, LiteralValue): - return AppendConstString(self.first_feature.name, self.second_feature.value) + return AppendConstString(self.first_feature.name, self.second_feature.python_value) else: return AppendStrings(self.first_feature.name, self.second_feature.name, self.separator) diff --git a/aligned/data_file.py b/aligned/data_file.py index 0f9b5471..e5375a25 100644 --- a/aligned/data_file.py +++ b/aligned/data_file.py @@ -1,5 +1,7 @@ -import pandas as pd +from __future__ import annotations + import polars as pl +from aligned.lazy_imports import pandas as pd def upsert_on_column(columns: list[str], new_data: pl.LazyFrame, existing_data: pl.LazyFrame) -> pl.LazyFrame: diff --git a/aligned/data_source/batch_data_source.py b/aligned/data_source/batch_data_source.py index 65b359e5..91a0d951 100644 --- a/aligned/data_source/batch_data_source.py +++ b/aligned/data_source/batch_data_source.py @@ -28,7 +28,7 @@ class BatchDataSourceFactory: - supported_data_sources: dict[str, type[BatchDataSource]] + supported_data_sources: dict[str, type[CodableBatchDataSource]] _shared: BatchDataSourceFactory | None = None @@ -79,6 +79,7 @@ def __init__(self) -> None: CustomMethodDataSource, ModelSource, StackSource, + LoadedAtSource, ] self.supported_data_sources = {source.type_name: source for source in source_types} @@ -93,22 +94,13 @@ def shared(cls) -> BatchDataSourceFactory: class BatchSourceModification: - source: BatchDataSource + source: CodableBatchDataSource def wrap_job(self, job: RetrivalJob) -> RetrivalJob: raise NotImplementedError() -class BatchDataSource(Codable, SerializableType): - """ - A definition to where a specific pice of data can be found. - E.g: A database table, a file, a web service, etc. - - Ths can thereafter be combined with other BatchDataSources in order to create a rich dataset. - """ - - type_name: str - +class BatchDataSource: def job_group_key(self) -> str: """ A key defining which sources can be grouped together in one request. @@ -121,108 +113,9 @@ def source_id(self) -> str: """ return self.job_group_key() - def _serialize(self) -> dict: - assert ( - self.type_name in BatchDataSourceFactory.shared().supported_data_sources - ), f'Unknown type_name: {self.type_name}' - return self.to_dict() - def __hash__(self) -> int: return hash(self.job_group_key()) - def transform_with_polars( - self, - method: Callable[[pl.LazyFrame], Awaitable[pl.LazyFrame]] | Callable[[pl.LazyFrame], pl.LazyFrame], - ) -> BatchDataSource: - async def all(request: RetrivalRequest, limit: int | None) -> pl.LazyFrame: - import inspect - - df = await self.all_data(request, limit).to_lazy_polars() - - if inspect.iscoroutinefunction(method): - return await method(df) - else: - return method(df) - - async def all_between_dates( - request: RetrivalRequest, start_date: datetime, end_date: datetime - ) -> pl.LazyFrame: - import inspect - - df = await self.all_between_dates(request, start_date, end_date).to_lazy_polars() - - if inspect.iscoroutinefunction(method): - return await method(df) - else: - return method(df) - - async def features_for(entities: RetrivalJob, request: RetrivalRequest) -> pl.LazyFrame: - import inspect - - df = await self.features_for(entities, request).to_lazy_polars() - - if inspect.iscoroutinefunction(method): - return await method(df) - else: - return method(df) - - return CustomMethodDataSource.from_methods( - all_data=all, - all_between_dates=all_between_dates, - features_for=features_for, - depends_on_sources=self.location_id(), - ) - - def contains_config(self, config: Any) -> bool: - """ - Checks if a data source contains a source config. - This can be used to select different sources based on the data sources to connect to. - - ``` - config = PostgreSQLConfig(env_var='MY_APP_DB_URL') - source = config.table('my_table') - - print(source.contains_config(config)) - >> True - - store = await FileSource.json_at("features.json").feature_store() - views = store.views_with_config(config) - print(len(views)) - >> 3 - ``` - - Args: - config: The config to check for - - Returns: - bool: If the config is contained in the source - """ - if isinstance(config, BatchDataSource): - return config.to_dict() == self.to_dict() - return False - - @classmethod - def _deserialize(cls, value: dict) -> BatchDataSource: - name_type = value['type_name'] - if name_type not in BatchDataSourceFactory.shared().supported_data_sources: - raise ValueError( - f"Unknown batch data source id: '{name_type}'.\nRemember to add the" - ' data source to the BatchDataSourceFactory.supported_data_sources if' - ' it is a custom type.' - ) - del value['type_name'] - data_class = BatchDataSourceFactory.shared().supported_data_sources[name_type] - return data_class.from_dict(value) - - def all_columns(self, limit: int | None = None) -> RetrivalJob: - return self.all(RequestResult(set(), set(), None), limit=limit) - - def all(self, result: RequestResult, limit: int | None = None) -> RetrivalJob: - return self.all_data( - result.as_retrival_request('read_all', location=FeatureLocation.feature_view('read_all')), - limit=limit, - ) - def all_data(self, request: RetrivalRequest, limit: int | None) -> RetrivalJob: if isinstance(self, BatchSourceModification): return self.wrap_job(self.source.all_data(request, limit)) @@ -256,7 +149,9 @@ def multi_source_features_for( cls: type[T], facts: RetrivalJob, requests: list[tuple[T, RetrivalRequest]] ) -> RetrivalJob: - sources = {source.job_group_key() for source, _ in requests if isinstance(source, BatchDataSource)} + sources = { + source.job_group_key() for source, _ in requests if isinstance(source, CodableBatchDataSource) + } if len(sources) != 1: raise NotImplementedError( f'Type: {cls} have not implemented how to load fact data with multiple sources.' @@ -291,8 +186,18 @@ async def schema(self) -> dict[str, FeatureType]: """ if isinstance(self, BatchSourceModification): return await self.source.schema() + raise NotImplementedError(f'`schema()` is not implemented for {type(self)}.') + def all_columns(self, limit: int | None = None) -> RetrivalJob: + return self.all(RequestResult(set(), set(), None), limit=limit) + + def all(self, result: RequestResult, limit: int | None = None) -> RetrivalJob: + return self.all_data( + result.as_retrival_request('read_all', location=FeatureLocation.feature_view('read_all')), + limit=limit, + ) + async def feature_view_code(self, view_name: str) -> str: """Setup the code needed to represent the data source as a feature view @@ -348,7 +253,8 @@ async def freshness(self, event_timestamp: EventTimestamp) -> datetime | None: raise NotImplementedError(f'Freshness is not implemented for {type(self)}.') - def filter(self, condition: DerivedFeature | Feature) -> BatchDataSource: + def filter(self, condition: DerivedFeature | Feature) -> CodableBatchDataSource: + assert isinstance(self, CodableBatchDataSource) return FilteredDataSource(self, condition) def location_id(self) -> set[FeatureLocation]: @@ -358,11 +264,91 @@ def depends_on(self) -> set[FeatureLocation]: return set() def tags(self) -> list[str]: - return [self.type_name] + if isinstance(self, CodableBatchDataSource): + return [self.type_name] + return [] + + def with_loaded_at(self) -> CodableBatchDataSource: + if isinstance(self, CodableBatchDataSource): + return LoadedAtSource(self) + raise NotImplementedError(type(self)) + + def transform_with_polars( + self, + method: Callable[[pl.LazyFrame], Awaitable[pl.LazyFrame]] | Callable[[pl.LazyFrame], pl.LazyFrame], + ) -> CodableBatchDataSource: + async def all(request: RetrivalRequest, limit: int | None) -> pl.LazyFrame: + import inspect + + df = await self.all_data(request, limit).to_lazy_polars() + + if inspect.iscoroutinefunction(method): + return await method(df) + else: + return method(df) # type: ignore + + async def all_between_dates( + request: RetrivalRequest, start_date: datetime, end_date: datetime + ) -> pl.LazyFrame: + import inspect + + df = await self.all_between_dates(request, start_date, end_date).to_lazy_polars() + + if inspect.iscoroutinefunction(method): + return await method(df) + else: + return method(df) # type: ignore + + async def features_for(entities: RetrivalJob, request: RetrivalRequest) -> pl.LazyFrame: + import inspect + + df = await self.features_for(entities, request).to_lazy_polars() + + if inspect.iscoroutinefunction(method): + return await method(df) + else: + return method(df) # type: ignore + + return CustomMethodDataSource.from_methods( + all_data=all, + all_between_dates=all_between_dates, + features_for=features_for, + depends_on_sources=self.location_id(), + ) + + +class CodableBatchDataSource(Codable, SerializableType, BatchDataSource): + """ + A definition to where a specific pice of data can be found. + E.g: A database table, a file, a web service, etc. + + Ths can thereafter be combined with other BatchDataSources in order to create a rich dataset. + """ + + type_name: str + + def _serialize(self) -> dict: + assert ( + self.type_name in BatchDataSourceFactory.shared().supported_data_sources + ), f'Unknown type_name: {self.type_name}' + return self.to_dict() + + @classmethod + def _deserialize(cls, value: dict) -> CodableBatchDataSource: + name_type = value['type_name'] + if name_type not in BatchDataSourceFactory.shared().supported_data_sources: + raise ValueError( + f"Unknown batch data source id: '{name_type}'.\nRemember to add the" + ' data source to the BatchDataSourceFactory.supported_data_sources if' + ' it is a custom type.' + ) + del value['type_name'] + data_class = BatchDataSourceFactory.shared().supported_data_sources[name_type] + return data_class.from_dict(value) @dataclass -class CustomMethodDataSource(BatchDataSource): +class CustomMethodDataSource(CodableBatchDataSource): all_data_method: bytes all_between_dates_method: bytes @@ -374,6 +360,10 @@ class CustomMethodDataSource(BatchDataSource): def job_group_key(self) -> str: return 'custom_method' + @property + def to_markdown(self) -> str: + return '### Custom method\n\nThis uses dill which can be unsafe in some scenarios.' + def all_data(self, request: RetrivalRequest, limit: int | None) -> RetrivalJob: from aligned.retrival_job import CustomLazyPolarsJob import dill @@ -414,6 +404,23 @@ def multi_source_features_for( source, request = requests[0] return source.features_for(facts, request) # type: ignore + @staticmethod + def from_load( + method: Callable[[RetrivalRequest], Coroutine[None, None, pl.LazyFrame]], + depends_on: set[FeatureLocation] | None = None, + ) -> 'CustomMethodDataSource': + async def all(request: RetrivalRequest, limit: int | None) -> pl.LazyFrame: + return await method(request) + + async def all_between( + request: RetrivalRequest, start_date: datetime, end_date: datetime + ) -> pl.LazyFrame: + return await method(request) + + return CustomMethodDataSource.from_methods( + all_data=all, all_between_dates=all_between, depends_on_sources=depends_on + ) + @staticmethod def from_methods( all_data: Callable[[RetrivalRequest, int | None], Coroutine[None, None, pl.LazyFrame]] | None = None, @@ -452,10 +459,10 @@ def depends_on(self) -> set[FeatureLocation]: @dataclass -class FilteredDataSource(BatchDataSource): +class FilteredDataSource(CodableBatchDataSource): - source: BatchDataSource - condition: DerivedFeature | Feature + source: CodableBatchDataSource + condition: DerivedFeature | Feature | str type_name: str = 'subset' @@ -466,13 +473,15 @@ async def schema(self) -> dict[str, FeatureType]: return await self.source.schema() @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls: type[FilteredDataSource], facts: RetrivalJob, requests: list[tuple[FilteredDataSource, RetrivalRequest]], ) -> RetrivalJob: - sources = {source.job_group_key() for source, _ in requests if isinstance(source, BatchDataSource)} + sources = { + source.job_group_key() for source, _ in requests if isinstance(source, CodableBatchDataSource) + } if len(sources) != 1: raise NotImplementedError( f'Type: {cls} have not implemented how to load fact data with multiple sources.' @@ -481,10 +490,14 @@ def multi_source_features_for( if isinstance(source.condition, DerivedFeature): request.derived_features.add(source.condition) - else: + condition = source.condition + elif isinstance(source.condition, Feature): request.features.add(source.condition) + condition = source.condition + else: + condition = pl.Expr.deserialize(source.condition) - return source.source.features_for(facts, request).filter(source.condition) + return source.source.features_for(facts, request).filter(condition) async def freshness(self, event_timestamp: EventTimestamp) -> datetime | None: return await self.source.freshness(event_timestamp) @@ -493,10 +506,10 @@ def all_between_dates( self, request: RetrivalRequest, start_date: datetime, end_date: datetime ) -> RetrivalJob: - if isinstance(self.condition, Feature): - request.features.add(self.condition) - else: + if isinstance(self.condition, DerivedFeature): request.derived_features.add(self.condition) + elif isinstance(self.condition, Feature): + request.features.add(self.condition) return ( self.source.all_between_dates(request, start_date, end_date) @@ -509,7 +522,7 @@ def all_data(self, request: RetrivalRequest, limit: int | None) -> RetrivalJob: if isinstance(self.condition, DerivedFeature): request.derived_features.add(self.condition) - else: + elif isinstance(self.condition, Feature): request.features.add(self.condition) return ( @@ -537,7 +550,9 @@ def resolve_keys(keys: str | FeatureFactory | list[str] | list[FeatureFactory]) return keys # type: ignore -def model_prediction_instance_source(model: Any) -> tuple[BatchDataSource, RetrivalRequest] | Exception: +def model_prediction_instance_source( + model: Any, +) -> tuple[CodableBatchDataSource, RetrivalRequest] | Exception: from aligned.schemas.feature_view import FeatureViewReferenceSource from aligned.compiler.model import ModelContractWrapper @@ -560,7 +575,7 @@ def model_prediction_instance_source(model: Any) -> tuple[BatchDataSource, Retri ) -def view_wrapper_instance_source(view: Any) -> tuple[BatchDataSource, RetrivalRequest] | Exception: +def view_wrapper_instance_source(view: Any) -> tuple[CodableBatchDataSource, RetrivalRequest] | Exception: from aligned.feature_view.feature_view import FeatureViewWrapper from aligned.schemas.feature_view import FeatureViewReferenceSource @@ -582,7 +597,7 @@ def view_wrapper_instance_source(view: Any) -> tuple[BatchDataSource, RetrivalRe def join_asof_source( - source: BatchDataSource, + source: CodableBatchDataSource, left_request: RetrivalRequest, view: Any, left_on: list[str] | None = None, @@ -619,7 +634,7 @@ def join_asof_source( def join_source( - source: BatchDataSource, + source: CodableBatchDataSource, view: Any, on_left: str | FeatureFactory | list[str] | list[FeatureFactory] | None = None, on_right: str | FeatureFactory | list[str] | list[FeatureFactory] | None = None, @@ -669,11 +684,11 @@ def join_source( @dataclass -class JoinAsofDataSource(BatchDataSource): +class JoinAsofDataSource(CodableBatchDataSource): - source: BatchDataSource + source: CodableBatchDataSource left_request: RetrivalRequest - right_source: BatchDataSource + right_source: CodableBatchDataSource right_request: RetrivalRequest left_event_timestamp: str @@ -801,10 +816,10 @@ def depends_on(self) -> set[FeatureLocation]: @dataclass -class StackSource(BatchDataSource): +class StackSource(CodableBatchDataSource): - top: BatchDataSource - bottom: BatchDataSource + top: CodableBatchDataSource + bottom: CodableBatchDataSource source_column: str | None = None @@ -869,7 +884,7 @@ def all_data(self, request: RetrivalRequest, limit: int | None) -> RetrivalJob: ) @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls, facts: RetrivalJob, requests: list[tuple[StackSource, RetrivalRequest]] ) -> RetrivalJob: sources = {source.job_group_key() for source, _ in requests} @@ -892,8 +907,8 @@ def features_for(self, facts: RetrivalJob, request: RetrivalRequest) -> Retrival if config: sub_request = self.sub_request(request, config) - top = self.top.features_for(facts, sub_request) - bottom = self.bottom.features_for(facts, sub_request) + top = self.top.features_for(facts, sub_request).drop_invalid() + bottom = self.bottom.features_for(facts, sub_request).drop_invalid() stack_job = StackJob(top=top, bottom=bottom, source_column=config) @@ -917,12 +932,78 @@ def depends_on(self) -> set[FeatureLocation]: return self.top.depends_on().union(self.bottom.depends_on()) +def request_without_event_timestamp(request: RetrivalRequest) -> RetrivalRequest: + return RetrivalRequest( + request.name, + location=request.location, + features=request.features, + entities=request.entities, + derived_features=request.derived_features, + aggregated_features=request.aggregated_features, + features_to_include=request.features_to_include, + event_timestamp_request=None, + ) + + +@dataclass +class LoadedAtSource(CodableBatchDataSource): + + source: CodableBatchDataSource + type_name: str = 'loaded_at' + + @property + def to_markdown(self) -> str: + source_markdown = self.source.to_markdown if hasattr(self.source, 'to_markdown') else str(self.source) + + return f"""### Loaded At Source + +Adding a loaded at timestamp to the source: +{source_markdown} +""" # noqa + + def job_group_key(self) -> str: + return self.source.job_group_key() + + async def schema(self) -> dict[str, FeatureType]: + return await self.source.schema() + + def all_data(self, request: RetrivalRequest, limit: int | None) -> RetrivalJob: + from aligned.retrival_job import LoadedAtJob + + return LoadedAtJob(self.source.all_data(request_without_event_timestamp(request), limit), request) + + def all_between_dates( + self, request: RetrivalRequest, start_date: datetime, end_date: datetime + ) -> RetrivalJob: + from aligned.retrival_job import LoadedAtJob + + return LoadedAtJob( + self.all_data(request, limit=None), + request, + ) + + def depends_on(self) -> set[FeatureLocation]: + return self.source.depends_on() + + async def freshness(self, event_timestamp: EventTimestamp) -> datetime | None: + return None + + @classmethod + def multi_source_features_for( # type: ignore + cls: type[CodableBatchDataSource], + facts: RetrivalJob, + requests: list[tuple[CodableBatchDataSource, RetrivalRequest]], + ) -> RetrivalJob: + + return type(requests[0][0]).multi_source_features_for(facts, requests) + + @dataclass -class JoinDataSource(BatchDataSource): +class JoinDataSource(CodableBatchDataSource): - source: BatchDataSource + source: CodableBatchDataSource left_request: RetrivalRequest - right_source: BatchDataSource + right_source: CodableBatchDataSource right_request: RetrivalRequest left_on: list[str] right_on: list[str] @@ -1036,7 +1117,7 @@ def feature_identifier_for(self, columns: list[str]) -> list[str]: return [reverse_map.get(column, column) for column in columns] -def data_for_request(request: RetrivalRequest, size: int) -> pl.DataFrame: +def random_values_for(feature: Feature, size: int, seed: int | None = None) -> pl.Series: from aligned.schemas.constraints import ( InDomain, LowerBound, @@ -1045,75 +1126,114 @@ def data_for_request(request: RetrivalRequest, size: int) -> pl.DataFrame: UpperBound, UpperBoundInclusive, Optional, + ListConstraint, ) import numpy as np - needed_features = request.features.union(request.entities) - schema = {feature.name: feature.dtype.polars_type for feature in needed_features} + if seed: + np.random.seed(seed) + + dtype = feature.dtype + + choices: list[Any] | None = None + max_value: float | None = None + min_value: float | None = None + + is_optional = False + is_unique = False + + for constraints in feature.constraints or set(): + if isinstance(constraints, InDomain): + choices = constraints.values + elif isinstance(constraints, LowerBound): + min_value = constraints.value + elif isinstance(constraints, LowerBoundInclusive): + min_value = constraints.value + elif isinstance(constraints, UpperBound): + max_value = constraints.value + elif isinstance(constraints, UpperBoundInclusive): + max_value = constraints.value + elif isinstance(constraints, Unique): + is_unique = True + elif isinstance(constraints, Optional): + is_optional = True + + if dtype == FeatureType.boolean(): + values = np.random.choice([True, False], size=size) + elif dtype.is_numeric: + if is_unique: + values = np.arange(0, size, dtype=dtype.pandas_type) + else: + values = np.random.random(size) - exprs = {} + if max_value and min_value: + values = values * (max_value - min_value) + min_value + elif max_value is not None: + values = values * max_value + elif min_value is not None: + values = values * 1000 + min_value - for feature in needed_features: - dtype = feature.dtype - - choices: list[Any] | None = None - max_value: float | None = None - min_value: float | None = None - - is_optional = False - is_unique = False - - for constraints in feature.constraints or set(): - if isinstance(constraints, InDomain): - choices = constraints.values - elif isinstance(constraints, LowerBound): - min_value = constraints.value - elif isinstance(constraints, LowerBoundInclusive): - min_value = constraints.value - elif isinstance(constraints, UpperBound): - max_value = constraints.value - elif isinstance(constraints, UpperBoundInclusive): - max_value = constraints.value - elif isinstance(constraints, Unique): - is_unique = True - elif isinstance(constraints, Optional): - is_optional = True - - if dtype == FeatureType.bool(): - values = np.random.choice([True, False], size=size) - elif dtype.is_numeric: - if is_unique: - values = np.arange(0, size, dtype=dtype.pandas_type) - else: - values = np.random.random(size) * 1000 + if 'float' not in dtype.name: + values = np.round(values) - if max_value is not None: - values = values * max_value + elif dtype.is_datetime: + values = [ + datetime.now(tz=timezone.utc) - np.random.random() * timedelta(days=365) for _ in range(size) + ] + elif dtype.is_array: + subtype = dtype.array_subtype() + _sub_constraints: list[ListConstraint] = [ + constraint + for constraint in feature.constraints or set() + if isinstance(constraint, ListConstraint) + ] + sub_constraints = None + if _sub_constraints: + sub_constraints = set(_sub_constraints[0].constraints) - if min_value is not None: - values = values - min_value - elif dtype.is_datetime: + if subtype is None: + values = np.random.random((size, 4)) + else: values = [ - datetime.now(tz=timezone.utc) - np.random.random() * timedelta(days=365) for _ in range(size) + random_values_for(Feature('dd', dtype=subtype, constraints=sub_constraints), 4) + for _ in range(size) ] - elif dtype.is_embedding: - embedding_size = dtype.embedding_size() or 10 - values = np.random.random((size, embedding_size)) + elif dtype.is_embedding: + embedding_size = dtype.embedding_size() or 10 + values = np.random.random((size, embedding_size)) + else: + if choices: + values = np.random.choice(choices, size=size) else: - if choices: - values = np.random.choice(choices, size=size) - else: - values = np.random.choice(list('abcde'), size=size) + values = np.random.choice(list('abcde'), size=size) - if is_optional: - values = np.where(np.random.random(size) > 0.5, values, np.NaN) + pl_vals = pl.Series(values=values) + if is_optional: + pl_vals = pl_vals.set(pl.Series(values=np.random.random(size) > 0.5), value=None) - exprs[feature.name] = values + return pl_vals - return pl.DataFrame(exprs, schema=schema) +async def data_for_request(request: RetrivalRequest, size: int, seed: int | None = None) -> pl.DataFrame: + from aligned.retrival_job import RetrivalJob + + needed_features = request.features.union(request.entities) + if request.event_timestamp: + needed_features.add(request.event_timestamp.as_feature()) -class DummyDataSource(BatchDataSource): + schema = {feature.name: feature.dtype.polars_type for feature in needed_features} + + exprs = {} + + for feature in sorted(needed_features, key=lambda f: f.name): + logger.info(f"Generating data for {feature.name}") + exprs[feature.name] = random_values_for(feature, size, seed) + + job = RetrivalJob.from_polars_df(pl.DataFrame(exprs, schema=schema), request=[request]) + return await job.derive_features().to_polars() + + +class DummyDataSource(CodableBatchDataSource): """ The DummyDataBatchSource is a data source that generates random data for a given request. This can be useful for testing and development purposes. @@ -1135,8 +1255,14 @@ class MyView: ``` """ + default_data_size: int + seed: int | None type_name: str = 'dummy_data' + def __init__(self, default_data_size: int = 10_000, seed: int | None = None): + self.default_data_size = default_data_size + self.seed = seed + def job_group_key(self) -> str: return self.type_name @@ -1146,7 +1272,7 @@ def multi_source_features_for( ) -> RetrivalJob: async def random_features_for(facts: RetrivalJob, request: RetrivalRequest) -> pl.LazyFrame: df = await facts.to_polars() - random = data_for_request(request, df.height).lazy() + random = (await data_for_request(request, df.height)).lazy() join_columns = set(request.all_returned_columns) - set(df.columns) return df.hstack(random.select(pl.col(join_columns)).collect()).lazy() @@ -1159,7 +1285,7 @@ def all_data(self, request: RetrivalRequest, limit: int | None = None) -> Retriv from aligned import CustomMethodDataSource async def all_data(request: RetrivalRequest, limit: int | None = None) -> pl.LazyFrame: - return data_for_request(request, limit or 100).lazy() + return (await data_for_request(request, limit or self.default_data_size)).lazy() return CustomMethodDataSource.from_methods(all_data=all_data).all_data(request, limit) @@ -1171,7 +1297,7 @@ def all_between_dates( async def between_date( request: RetrivalRequest, start_date: datetime, end_date: datetime ) -> pl.LazyFrame: - return data_for_request(request, 100).lazy() + return (await data_for_request(request, self.default_data_size)).lazy() return CustomMethodDataSource.from_methods(all_between_dates=between_date).all_between_dates( request, start_date, end_date diff --git a/aligned/data_source/model_predictor.py b/aligned/data_source/model_predictor.py index 6ac96fa5..8eec8afe 100644 --- a/aligned/data_source/model_predictor.py +++ b/aligned/data_source/model_predictor.py @@ -3,6 +3,7 @@ from datetime import datetime from aligned.feature_store import ModelFeatureStore +from aligned.data_source.batch_data_source import BatchDataSource from aligned.request.retrival_request import RetrivalRequest from aligned.schemas.feature import FeatureLocation, FeatureType from aligned.schemas.model import Model @@ -10,7 +11,7 @@ @dataclass -class PredictModelSource: +class PredictModelSource(BatchDataSource): store: ModelFeatureStore type_name: str = 'pred_model_source' @@ -39,7 +40,7 @@ def all_data(self, request: RetrivalRequest, limit: int | None = None) -> Retriv ) location = reqs.needed_requests[0].location - if location.location != 'feature_view': + if location.location_type != 'feature_view': raise NotImplementedError( f'Type: {type(self)} have not implemented how to load fact data with multiple sources.' ) @@ -57,7 +58,7 @@ def all_between_dates( ) location = reqs.needed_requests[0].location - if location.location != 'feature_view': + if location.location_type != 'feature_view': raise NotImplementedError( f'Type: {type(self)} have not implemented how to load fact data with multiple sources.' ) @@ -69,7 +70,7 @@ def features_for(self, facts: RetrivalJob, request: RetrivalRequest) -> Retrival return self.store.predict_over(facts).with_request([request]) @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls, facts: RetrivalJob, requests: list[tuple[PredictModelSource, RetrivalRequest]] ) -> RetrivalJob: diff --git a/aligned/data_source/tests/test_batch_source.py b/aligned/data_source/tests/test_batch_source.py index 4f129f1b..532adbc6 100644 --- a/aligned/data_source/tests/test_batch_source.py +++ b/aligned/data_source/tests/test_batch_source.py @@ -1,6 +1,6 @@ import polars as pl import json -from aligned.data_source.batch_data_source import BatchDataSource +from aligned.data_source.batch_data_source import CodableBatchDataSource from aligned.request.retrival_request import RetrivalRequest from aligned.sources.local import CsvFileSource import pytest @@ -11,7 +11,7 @@ async def test_custom_transformation_as_lambda(scan_without_datetime: CsvFileSou new_source = scan_without_datetime.transform_with_polars( lambda df: df.with_columns(bucket=pl.col('id').mod(3)) - .groupby('bucket') + .group_by('bucket') .agg( pl.col('radius_mean').sum().alias('sum_radius_mean'), ) @@ -21,7 +21,7 @@ async def test_custom_transformation_as_lambda(scan_without_datetime: CsvFileSou source_as_json = new_source.to_json() - ds = BatchDataSource._deserialize(json.loads(source_as_json)) + ds = CodableBatchDataSource._deserialize(json.loads(source_as_json)) new_df = await ds.all_data(RetrivalRequest.all_data(), limit=None).to_polars() assert new_df.sort('bucket').equals(df.sort('bucket').select(new_df.columns)) @@ -32,7 +32,7 @@ async def test_custom_transformation_as_function(scan_without_datetime: CsvFileS async def custom_function(df: pl.LazyFrame) -> pl.LazyFrame: return ( df.with_columns(bucket=pl.col('id').mod(3)) - .groupby('bucket') + .group_by('bucket') .agg( pl.col('radius_mean').sum().alias('sum_radius_mean'), ) @@ -44,7 +44,7 @@ async def custom_function(df: pl.LazyFrame) -> pl.LazyFrame: source_as_json = new_source.to_json() - ds = BatchDataSource._deserialize(json.loads(source_as_json)) + ds = CodableBatchDataSource._deserialize(json.loads(source_as_json)) new_df = await ds.all_data(RetrivalRequest.all_data(), limit=None).to_polars() assert new_df.sort('bucket').equals(df.sort('bucket').select(new_df.columns)) diff --git a/aligned/enricher.py b/aligned/enricher.py deleted file mode 100644 index 5f0e8d11..00000000 --- a/aligned/enricher.py +++ /dev/null @@ -1,227 +0,0 @@ -from __future__ import annotations - -import logging -from abc import ABC, abstractmethod -from dataclasses import dataclass, field -from datetime import datetime, timedelta -from pathlib import Path - -import pandas as pd -from mashumaro.types import SerializableType - -from aligned.schemas.codable import Codable -from aligned.sources.redis import RedisConfig - -logger = logging.getLogger(__name__) - - -@dataclass -class TimespanSelector(Codable): - timespand: timedelta - time_column: str - - -class StatisticEricher: - def std( - self, columns: set[str], time: TimespanSelector | None = None, limit: int | None = None - ) -> Enricher: - raise NotImplementedError() - - def mean( - self, columns: set[str], time: TimespanSelector | None = None, limit: int | None = None - ) -> Enricher: - raise NotImplementedError() - - -class Enricher(ABC, Codable, SerializableType): - - name: str - - def _serialize(self) -> dict: - return self.to_dict() - - @classmethod - def _deserialize(cls, value: dict) -> Enricher: - name_type = value['name'] - del value['name'] - data_class = SupportedEnrichers.shared().types[name_type] - return data_class.from_dict(value) - - def lock(self, lock_name: str, redis_config: RedisConfig, timeout: int = 60) -> Enricher: - return RedisLockEnricher(lock_name=lock_name, enricher=self, config=redis_config, timeout=timeout) - - def cache(self, ttl: timedelta, cache_key: str) -> Enricher: - return FileCacheEnricher(ttl, cache_key, self) - - @abstractmethod - async def as_df(self) -> pd.DataFrame: - pass - - -class SupportedEnrichers: - - types: dict[str, type[Enricher]] - - _shared: SupportedEnrichers | None = None - - def __init__(self) -> None: - self.types = {} - - default_types: list[type[Enricher]] = [RedisLockEnricher, FileCacheEnricher, SqlDatabaseEnricher] - for enrich_type in default_types: - self.add(enrich_type) - - def add(self, enrich_type: type[Enricher]) -> None: - self.types[enrich_type.name] = enrich_type - - @classmethod - def shared(cls) -> SupportedEnrichers: - if cls._shared: - return cls._shared - cls._shared = SupportedEnrichers() - return cls._shared - - -@dataclass -class RedisLockEnricher(Enricher): - - enricher: Enricher - config: RedisConfig - lock_name: str - timeout: int - name: str = 'redis_lock' - - def __init__(self, lock_name: str, enricher: Enricher, config: RedisConfig, timeout: int): - self.lock_name = lock_name - self.config = config - self.enricher = enricher - self.timeout = timeout - - async def as_df(self) -> pd.DataFrame: - redis = self.config.redis() - async with redis.lock(self.lock_name, timeout=self.timeout) as _: - return await self.enricher.as_df() - - -@dataclass -class CsvFileSelectedEnricher(Enricher): - file: str - time: TimespanSelector | None = field(default=None) - limit: int | None = field(default=None) - name: str = 'selective_file' - - async def as_df(self) -> pd.DataFrame: - dates_to_parse = None - if self.time: - dates_to_parse = [self.time.time_column] - - uri = self.file - path = Path(self.file) - if 'http' not in path.parts[0]: - uri = str(path.absolute()) - - if self.limit: - file = pd.read_csv(uri, nrows=self.limit, parse_dates=dates_to_parse) - else: - file = pd.read_csv(uri, nrows=self.limit, parse_dates=dates_to_parse) - - if not self.time: - return file - - date = datetime.now() - self.time.timespand - selector = file[self.time.time_column] >= date - return file.loc[selector] - - -@dataclass -class CsvFileEnricher(Enricher): - - file: str - name: str = 'file' - - def selector( - self, time: TimespanSelector | None = None, limit: int | None = None - ) -> CsvFileSelectedEnricher: - return CsvFileSelectedEnricher(self.file, time=time, limit=limit) - - async def as_df(self) -> pd.DataFrame: - return pd.read_csv(self.file) - - -@dataclass -class LoadedStatEnricher(Enricher): - - stat: str - columns: list[str] - enricher: Enricher - mapping_keys: dict[str, str] = field(default_factory=dict) - - async def as_df(self) -> pd.DataFrame: - data = await self.enricher.as_df() - renamed = data.rename(columns=self.mapping_keys) - if self.stat == 'mean': - return renamed[self.columns].mean() - elif self.stat == 'std': - return renamed[self.columns].std() - else: - raise ValueError(f'Not supporting stat: {self.stat}') - - -@dataclass -class FileCacheEnricher(Enricher): - - ttl: timedelta - file_path: str - enricher: Enricher - name: str = 'file_cache' - - def is_out_of_date_cache(self) -> bool: - file_uri = Path(self.file_path).absolute() - try: - # Checks last modified metadata field - modified_at = datetime.fromtimestamp(file_uri.stat().st_mtime) - compare = datetime.now() - self.ttl - return modified_at < compare - except FileNotFoundError: - return True - - async def as_df(self) -> pd.DataFrame: - file_uri = Path(self.file_path).absolute() - - if self.is_out_of_date_cache(): - logger.info('Fetching from source') - data: pd.DataFrame = await self.enricher.as_df() - file_uri.parent.mkdir(exist_ok=True, parents=True) - logger.info(f'Storing cache at file {file_uri.as_uri()}') - data.to_parquet(file_uri) - else: - logger.info('Loading cache') - data = pd.read_parquet(file_uri) - return data - - -@dataclass -class SqlDatabaseEnricher(Enricher): - - query: str - values: dict | None - url_env: str - name: str = 'sql' - - def __init__(self, url_env: str, query: str, values: dict | None = None) -> None: - self.query = query - self.values = values - self.url_env = url_env - - async def as_df(self) -> pd.DataFrame: - import os - - import connectorx as cx - - df = cx.read_sql(os.environ[self.url_env], self.query, return_type='pandas') - - for name, dtype in df.dtypes.iteritems(): - if dtype == 'object': # Need to convert the databases UUID type - df[name] = df[name].astype('str') - - return df diff --git a/aligned/exposed_model/interface.py b/aligned/exposed_model/interface.py index 18b8d5b4..49e8f3c4 100644 --- a/aligned/exposed_model/interface.py +++ b/aligned/exposed_model/interface.py @@ -1,14 +1,14 @@ from __future__ import annotations import polars as pl -from typing import TYPE_CHECKING, Callable, Coroutine +from typing import TYPE_CHECKING, Any, AsyncIterable, Callable, Coroutine from dataclasses import dataclass from aligned.retrival_job import RetrivalJob from aligned.schemas.codable import Codable from mashumaro.types import SerializableType import logging -from aligned.schemas.feature import Feature, FeatureReference +from aligned.schemas.feature import Feature, FeatureLocation, FeatureReference if TYPE_CHECKING: from aligned.feature_store import ModelFeatureStore @@ -21,7 +21,7 @@ class PredictorFactory: supported_predictors: dict[str, type[ExposedModel]] _shared: PredictorFactory | None = None - def __init__(self): + def __init__(self) -> None: from aligned.exposed_model.mlflow import MLFlowServer, InMemMLFlowAlias from aligned.exposed_model.ollama import OllamaGeneratePredictor, OllamaEmbeddingPredictor @@ -48,6 +48,24 @@ def shared(cls) -> PredictorFactory: return cls._shared +class PromptModel: + @property + def precomputed_prompt_key(self) -> str | None: + """ + This is the property that contains the fully compiled prompt. + Meaning a user can bypass the prompt templating step. + + This is usefull in some scanarios where we want to do similarity search + when the prompt components do not make sense to provide. + """ + return None + + +class VersionedModel: + async def model_version(self) -> str: + raise NotImplementedError(type(self)) + + class ExposedModel(Codable, SerializableType): model_type: str @@ -60,6 +78,14 @@ def exposed_at_url(self) -> str | None: def as_markdown(self) -> str: raise NotImplementedError(type(self)) + async def depends_on(self) -> list[FeatureLocation]: + """ + The data artefacts that the model depends on. Which is not the input features. + This is useful for e.g. RAG systems, as we can describe which documents a model depends on + Or something like a vector database that we assume to be up to date. + """ + return [] + async def needed_features(self, store: ModelFeatureStore) -> list[FeatureReference]: raise NotImplementedError(type(self)) @@ -129,6 +155,7 @@ def ollama_embedding( input_features_versions: str, prompt_template: str, embedding_name: str | None = None, + precomputed_prompt_key: str = 'full_prompt', ) -> 'ExposedModel': from aligned.exposed_model.ollama import OllamaEmbeddingPredictor @@ -138,6 +165,7 @@ def ollama_embedding( prompt_template=prompt_template, input_features_versions=input_features_versions, embedding_name=embedding_name or 'embedding', + precomputed_prompt_key_overwrite=precomputed_prompt_key, ) @staticmethod @@ -145,7 +173,7 @@ def in_memory_mlflow( model_name: str, model_alias: str, model_contract_version_tag: str | None = None, - ): + ) -> 'ExposedModel': from aligned.exposed_model.mlflow import in_memory_mlflow return in_memory_mlflow( @@ -159,20 +187,23 @@ def mlflow_server( host: str, model_alias: str | None = None, model_name: str | None = None, - model_contract_version_tag: str | None = None, timeout: int = 30, - ): + ) -> 'ExposedModel': from aligned.exposed_model.mlflow import mlflow_server return mlflow_server( host=host, model_name=model_name, model_alias=model_alias, - model_contract_version_tag=model_contract_version_tag, timeout=timeout, ) +class StreamablePredictor: + async def stream_predict(self, input: dict[str, Any]) -> AsyncIterable[dict[str, Any]]: + raise NotImplementedError(type(self)) + + @dataclass class DillPredictor(ExposedModel): @@ -316,7 +347,7 @@ async def needed_features(self, store: ModelFeatureStore) -> list[FeatureReferen return features async def needed_entities(self, store: ModelFeatureStore) -> set[Feature]: - entities = set() + entities: set[Feature] = set() for model, _ in self.models: entities = entities.union(await model.needed_entities(store)) return entities @@ -325,7 +356,7 @@ async def run_polars(self, values: RetrivalJob, store: ModelFeatureStore) -> pl. import random total_weight = sum([weight for _, weight in self.models]) - total_sum = 0 + total_sum: float = 0.0 random_value = random.random() @@ -383,13 +414,10 @@ async def function_wrapper(values: RetrivalJob, store: ModelFeatureStore) -> pl. if len(pred_columns) != 1: raise ValueError(f"Expected exactly one prediction column, got {len(pred_columns)} columns.") - feature_request = store.features_for(values).log_each_job() - input_features = feature_request.request_result.feature_columns + feature_request = store.features_for(values) features = await feature_request.to_polars() - result = features.with_columns( - function(features.select(input_features)).alias(list(pred_columns)[0].name) - ) + result = features.with_columns(function(features).alias(next(iter(pred_columns)).name)) return result return DillFunction(function=dill.dumps(function_wrapper)) diff --git a/aligned/exposed_model/langchain.py b/aligned/exposed_model/langchain.py new file mode 100644 index 00000000..6ff2f221 --- /dev/null +++ b/aligned/exposed_model/langchain.py @@ -0,0 +1,229 @@ +from dataclasses import dataclass, field +from typing import Any, AsyncIterable +from aligned.compiler.model import ModelContractWrapper +from aligned.data_source.batch_data_source import CodableBatchDataSource, CustomMethodDataSource +from aligned.feature_view.feature_view import FeatureViewWrapper +import polars as pl +from datetime import datetime + +from aligned.exposed_model.interface import ExposedModel, PromptModel, StreamablePredictor, RetrivalJob +from aligned.feature_store import ContractStore, ModelFeatureStore +from aligned.request.retrival_request import RetrivalRequest +from aligned.schemas.feature import FeatureLocation + +try: + from langchain_core.language_models.base import LanguageModelLike + from langchain_core.retrievers import BaseRetriever + from langchain_core.callbacks.manager import ( + AsyncCallbackManagerForRetrieverRun, + CallbackManagerForRetrieverRun, + ) + from langchain_core.documents.base import Document +except ModuleNotFoundError: + + class BaseRetriever: + pass + + class LanguageModelLike: + pass + + +class AlignedRetriver(BaseRetriever): + + store: ContractStore + index_name: str + number_of_docs: int + + def __str__(self) -> str: + return f"Aligned Retriver - Loading {self.number_of_docs} from '{self.index_name}'" + + def _get_relevant_documents( + self, query: str, *, run_manager: 'CallbackManagerForRetrieverRun' + ) -> list['Document']: + raise NotImplementedError() + + async def _aget_relevant_documents( + self, query: str, *, run_manager: 'AsyncCallbackManagerForRetrieverRun' + ) -> list['Document']: + + store = self.store + index = store.vector_index(self.index_name) + embed_model = store.model(index.model.name) + + assert ( + embed_model.has_exposed_model() + ), f"The model {index.model.name} do not have an exposed model. Which means we can not use it." + + exposed_model = embed_model.model.exposed_model + + if isinstance(exposed_model, PromptModel) and exposed_model.precomputed_prompt_key: + input_name = exposed_model.precomputed_prompt_key + else: + inputs = list(embed_model.model.feature_references()) + assert len(inputs) == 1, ( + f"Model have more than one inputs: {len(inputs)}. " + f"Unclear what to name the query: '{query}'. " + 'This can be fixed by making sure the underlying model is a ' + '`PromptModel` with a `precomputed_prompt_key`.' + ) + input_name = inputs[0].name + + embedding = await store.model(embed_model.model.name).predict_over({input_name: [query]}).to_polars() + + embedding_output = [ + feature.name + for feature in embed_model.prediction_request().all_returned_features + if not feature.dtype.is_embedding or feature.dtype.is_array + ] + + documents = await index.nearest_n_to( + entities=embedding.select(pl.exclude(input_name)), number_of_records=self.number_of_docs + ).to_polars() + + documents = documents.with_columns( + page_content=pl.concat_str( + [pl.col(col).cast(pl.String) for col in embedding_output], separator='\n\n' + ) + ) + + return [Document(**doc) for doc in documents.to_dicts()] + + +@dataclass +class LangChain(ExposedModel, StreamablePredictor): + + chain_bytes: bytes + chain_output: str + output_key: str + + depends_on_data: list[FeatureLocation] = field(default_factory=list) + + @property + def chain(self) -> LanguageModelLike: + from dill import loads + + return loads(self.chain_bytes) + + @property + def as_markdown(self) -> str: + return f"A LangChain model looking like {str(self.chain)}" + + @staticmethod + def from_chain( + chain: LanguageModelLike, + chain_output: str = 'answer', + output_key: str = 'answer', + depends_on: list[FeatureLocation | FeatureViewWrapper | ModelContractWrapper] | None = None, + ) -> 'LangChain': + from dill import dumps + + return LangChain( + dumps(chain), + output_key=output_key, + chain_output=chain_output, + depends_on_data=[] + if depends_on is None + else [loc if isinstance(loc, FeatureLocation) else loc.location for loc in depends_on], + ) + + @property + def version(self) -> str: + from hashlib import sha256 + + return sha256(self.chain_bytes, usedforsecurity=False).hexdigest() + + async def depends_on(self) -> list[FeatureLocation]: + return self.depends_on_data + + async def run_polars(self, values: RetrivalJob, store: ModelFeatureStore) -> pl.DataFrame: + responses = [] + pred_view = store.model.predictions_view + df = await values.to_polars() + for question in df.to_dicts(): + responses.append((await self.chain.ainvoke(question))[self.chain_output]) + + if pred_view.model_version_column: + df = df.with_columns(pl.lit(self.version).alias(pred_view.model_version_column.name)) + + if pred_view.event_timestamp: + df = df.with_columns(pl.lit(datetime.utcnow()).alias(pred_view.event_timestamp.name)) + + return df.hstack([pl.Series(name=self.output_key, values=responses)]) + + async def stream_predict(self, input: dict[str, Any]) -> AsyncIterable[dict[str, Any]]: + async for output in self.chain.astream(input): + try: + if isinstance(output, dict): + value = output + else: + value = output.model_dump() + except AttributeError: + value = output.dict() + + if self.output_key != self.chain_output and self.chain_output in value: + value[self.output_key] = value[self.chain_output] + yield value + + +def web_chunks_source(pages: list[str]) -> CodableBatchDataSource: + async def load(request: RetrivalRequest) -> pl.LazyFrame: + import polars as pl + from datetime import timezone, datetime + from langchain_community.document_loaders import WebBaseLoader + from langchain_text_splitters import RecursiveCharacterTextSplitter + + all_splits = [] + + splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0) + for page in pages: + loader = WebBaseLoader(page) + data = loader.load() + + all_splits.extend(splitter.split_documents(data)) + + flattend_data = [] + for doc in all_splits: + flattend_data.append(dict(page_content=doc.page_content, **doc.metadata)) + + df = pl.DataFrame(flattend_data) + return df.with_columns( + loaded_at=pl.lit(datetime.now(tz=timezone.utc)), chunk_hash=pl.col('page_content').hash() + ).lazy() + + return CustomMethodDataSource.from_load(load) + + +def file_chunks_source(directory: str, glob: str = '**') -> CodableBatchDataSource: + async def load(request: RetrivalRequest) -> pl.LazyFrame: + import polars as pl + from datetime import timezone, datetime + from langchain_community.document_loaders import DirectoryLoader, TextLoader, PythonLoader + from langchain_text_splitters import RecursiveCharacterTextSplitter, Language + import logging + + logger = logging.getLogger(__name__) + + loader_cls = TextLoader + splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0) + + if glob.endswith('.py'): + loader_cls = PythonLoader + splitter = RecursiveCharacterTextSplitter.from_language( + language=Language.PYTHON, chunk_size=500, chunk_overlap=0 + ) + + logger.info(loader_cls) + logger.info(splitter) + + loader = DirectoryLoader(directory, glob=glob, loader_cls=loader_cls) + + flattend_data = [] + for doc in splitter.split_documents(loader.load()): + flattend_data.append(dict(page_content=doc.page_content, **doc.metadata)) + + df = pl.DataFrame(flattend_data) + return df.with_columns( + loaded_at=pl.lit(datetime.now(tz=timezone.utc)), chunk_hash=pl.col('page_content').hash() + ).lazy() + + return CustomMethodDataSource.from_load(load) diff --git a/aligned/exposed_model/ollama.py b/aligned/exposed_model/ollama.py index 1e4142e8..5d848113 100644 --- a/aligned/exposed_model/ollama.py +++ b/aligned/exposed_model/ollama.py @@ -1,11 +1,14 @@ from dataclasses import dataclass +from datetime import datetime, timedelta, timezone from aligned.compiler.model import ModelContractWrapper from aligned.compiler.feature_factory import ( + CouldBeModelVersion, Embedding, Entity, FeatureFactory, FeatureReferencable, Int32, + ModelVersion, String, List, Int64, @@ -13,8 +16,8 @@ ) import logging -from aligned.data_source.batch_data_source import BatchDataSource -from aligned.exposed_model.interface import ExposedModel +from aligned.data_source.batch_data_source import CodableBatchDataSource +from aligned.exposed_model.interface import ExposedModel, PromptModel from aligned.schemas.feature import Feature, FeatureReference from aligned.retrival_job import RetrivalJob import polars as pl @@ -99,7 +102,7 @@ async def run_polars(self, values: RetrivalJob, store: ModelFeatureStore) -> pl. @dataclass -class OllamaEmbeddingPredictor(ExposedModel): +class OllamaEmbeddingPredictor(ExposedModel, PromptModel): endpoint: str model_name: str @@ -108,8 +111,13 @@ class OllamaEmbeddingPredictor(ExposedModel): prompt_template: str input_features_versions: str + precomputed_prompt_key_overwrite: str model_type: str = 'ollama_embedding' + @property + def precomputed_prompt_key(self) -> str | None: + return self.precomputed_prompt_key_overwrite + @property def exposed_at_url(self) -> str | None: return self.endpoint @@ -164,21 +172,31 @@ async def run_polars(self, values: RetrivalJob, store: ModelFeatureStore) -> pl. expected_cols = [feat.name for feat in store.feature_references_for(self.input_features_versions)] entities = await values.to_polars() - missing_cols = set(expected_cols) - set(entities.columns) - if missing_cols: - entities = ( - await store.using_version(self.input_features_versions).features_for(values).to_polars() - ) - prompts = entities + prompts = [] - ret_vals = [] + if self.precomputed_prompt_key_overwrite in entities.columns: + prompts = entities[self.precomputed_prompt_key_overwrite].to_list() + else: + missing_cols = set(expected_cols) - set(entities.columns) + if missing_cols: + entities = ( + await store.using_version(self.input_features_versions).features_for(values).to_polars() + ) - for index, value in enumerate(prompts.iter_rows(named=True)): - logger.info(f"Processing row {index + 1}/{len(prompts)}") + for index, value in enumerate(entities.iter_rows(named=True)): + logger.info(f"Processing row {index + 1}/{len(prompts)}") - prompt = self.prompt_template.format(**value) + prompt = self.prompt_template.format(**value) + prompts.append(prompt) + + entities = entities.with_columns( + pl.Series(name=self.precomputed_prompt_key_overwrite, values=prompts) + ) + + ret_vals = [] + for prompt in prompts: response = await client.embeddings(self.model_name, prompt) if isinstance(response, dict): @@ -188,10 +206,27 @@ async def run_polars(self, values: RetrivalJob, store: ModelFeatureStore) -> pl. ret_vals.append(embedding) - model_version = f"{self.prompt_template_hash()} -> {self.model_name}" - return prompts.hstack([pl.Series(name=self.embedding_name, values=ret_vals)]).with_columns( - pl.lit(model_version).alias('model_version') - ) + pred_view = store.model.predictions_view + if pred_view.model_version_column: + model_version = f"{self.prompt_template_hash()} -> {self.model_name}" + model_version_name = pred_view.model_version_column.name + entities = entities.with_columns(pl.lit(model_version).alias(model_version_name)) + + if pred_view.event_timestamp: + new_et = pred_view.event_timestamp.name + existing_et = values.request_result.event_timestamp + need_to_add_et = new_et not in entities.columns + + if existing_et and need_to_add_et and existing_et in entities.columns: + logger.info(f"Using existing event timestamp `{existing_et}` as new timestamp.") + entities = entities.with_columns(pl.col(existing_et).alias(new_et)) + elif need_to_add_et: + logger.info('No event timestamp using now as the timestamp.') + entities = entities.with_columns( + pl.lit(datetime.now(tz=timezone.utc)).alias(pred_view.event_timestamp.name) + ) + + return entities.hstack([pl.Series(name=self.embedding_name, values=ret_vals)]) def ollama_generate( @@ -215,6 +250,7 @@ def ollama_embedding( input_features_versions: str, prompt_template: str, embedding_name: str | None = None, + precomputed_prompt_key: str = 'full_prompt', ) -> 'OllamaEmbeddingPredictor': return OllamaEmbeddingPredictor( @@ -223,6 +259,7 @@ def ollama_embedding( prompt_template=prompt_template, input_features_versions=input_features_versions, embedding_name=embedding_name or 'embedding', + precomputed_prompt_key_overwrite=precomputed_prompt_key, ) @@ -251,7 +288,7 @@ def ollama_generate_contract( endpoint: str, model: str, entities: list[FeatureFactory] | FeatureFactory, - output_source: BatchDataSource | None = None, + output_source: CodableBatchDataSource | None = None, contacts: list[str] | None = None, tags: list[str] | None = None, ) -> ModelContractWrapper[OllamaGeneration]: @@ -315,11 +352,16 @@ def ollama_embedding_contract( endpoint: str, model: str, entities: list[FeatureFactory] | FeatureFactory, - output_source: BatchDataSource | None = None, + output_source: CodableBatchDataSource | None = None, prompt_template: str | None = None, embedding_size: int | None = None, contacts: list[str] | None = None, tags: list[str] | None = None, + precomputed_prompt_key: str = 'full_prompt', + model_version_field: FeatureFactory | None = None, + additional_metadata: list[FeatureFactory] | None = None, + acceptable_freshness: timedelta | None = None, + unacceptable_freshness: timedelta | None = None, ): from aligned import model_contract, FeatureInputVersions @@ -359,14 +401,18 @@ def ollama_embedding_contract( input_features_versions='default', prompt_template=prompt_template, embedding_name='embedding', + precomputed_prompt_key=precomputed_prompt_key, ), output_source=output_source, contacts=contacts, tags=tags, + acceptable_freshness=acceptable_freshness, + unacceptable_freshness=unacceptable_freshness, ) class OllamaEmbedding: - + updated_at = EventTimestamp() embedding = Embedding(embedding_size=emb_size) + full_prompt = String().with_name(precomputed_prompt_key) if not isinstance(entities, list): entities = [entities] @@ -384,6 +430,32 @@ class OllamaEmbedding: setattr(OllamaEmbedding.contract, entity.name, new_entity) + def add_feature(feature: FeatureFactory) -> None: + + assert feature._name, ( + 'Trying to add a feature without any name. ' + 'Consider using the `.with_name(...)` to manually set it.' + ) + if feature._location is None: + setattr(OllamaEmbedding.contract, feature.name, feature) + return + + feature_copy = feature.copy_type() + feature_copy._name = feature._name + feature_copy.constraints = feature.constraints.copy() if feature.constraints else None + setattr(OllamaEmbedding.contract, feature_copy.name, feature_copy) + + if model_version_field is not None: + if isinstance(model_version_field, ModelVersion): + add_feature(model_version_field) + elif isinstance(model_version_field, CouldBeModelVersion): + add_feature(model_version_field.as_model_version().with_name(model_version_field.name)) + else: + raise ValueError(f"Feature {model_version_field} can not be a model version.") + + for feature in additional_metadata or []: + add_feature(feature) + return OllamaEmbedding # type: ignore @@ -394,7 +466,7 @@ def ollama_classification_contract( model: str, entities: list[FeatureFactory] | FeatureFactory, ground_truth: FeatureFactory, - output_source: BatchDataSource | None = None, + output_source: CodableBatchDataSource | None = None, prompt_template: str | None = None, contacts: list[str] | None = None, tags: list[str] | None = None, diff --git a/aligned/exposed_model/tests/test_model.py b/aligned/exposed_model/tests/test_model.py index cefd683f..2b704c4c 100644 --- a/aligned/exposed_model/tests/test_model.py +++ b/aligned/exposed_model/tests/test_model.py @@ -92,9 +92,9 @@ class MyModelContract2: entities = {'entity_id': ['a', 'b'], 'x': [1, 2]} pred_job = MyModelContract2.predict_over(entities, needed_views=[InputFeatureView, MyModelContract]) - preds = await pred_job.to_polars() - assert set(pred_job.request_result.feature_columns) == {'x', 'prediction', 'other_pred'} + + preds = await pred_job.to_polars() assert preds['other_pred'].to_list() == [6, 12] diff --git a/aligned/feature_source.py b/aligned/feature_source.py index 4f07de47..b81c4608 100644 --- a/aligned/feature_source.py +++ b/aligned/feature_source.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Mapping import asyncio @@ -56,7 +56,7 @@ class BatchFeatureSource(FeatureSource, RangeFeatureSource): This class will then know how to strucutre the query in the correct way """ - sources: dict[str, BatchDataSource] + sources: Mapping[str, BatchDataSource] @property def source_types(self) -> dict[str, type[BatchDataSource]]: diff --git a/aligned/feature_store.py b/aligned/feature_store.py index 178c87f9..bf2a820f 100644 --- a/aligned/feature_store.py +++ b/aligned/feature_store.py @@ -1,21 +1,23 @@ from __future__ import annotations import polars as pl -import pandas as pd +from aligned.lazy_imports import pandas as pd import logging from collections import defaultdict from dataclasses import dataclass, field from datetime import datetime, timedelta -from importlib import import_module -from typing import Any, Union +from typing import Union, TypeVar, Callable from prometheus_client import Histogram from aligned.compiler.model import ModelContractWrapper from aligned.data_file import DataFileReference, upsert_on_column -from aligned.data_source.batch_data_source import BatchDataSource, ColumnFeatureMappable -from aligned.enricher import Enricher +from aligned.data_source.batch_data_source import ( + CodableBatchDataSource, + ColumnFeatureMappable, + BatchDataSource, +) from aligned.exceptions import UnableToFindFileException from aligned.feature_source import ( BatchFeatureSource, @@ -24,7 +26,6 @@ RangeFeatureSource, WritableFeatureSource, ) -from aligned.feature_view.combined_view import CombinedFeatureView, CompiledCombinedFeatureView from aligned.feature_view.feature_view import FeatureView, FeatureViewWrapper from aligned.request.retrival_request import FeatureRequest, RetrivalRequest from aligned.retrival_job import ( @@ -41,7 +42,7 @@ from aligned.schemas.folder import DatasetStore from aligned.schemas.model import EventTrigger from aligned.schemas.model import Model as ModelSchema -from aligned.schemas.repo_definition import EnricherReference, RepoDefinition, RepoMetadata +from aligned.schemas.repo_definition import RepoDefinition, RepoMetadata from aligned.sources.vector_index import VectorIndex logger = logging.getLogger(__name__) @@ -53,6 +54,7 @@ ) FeatureSourceable = Union[FeatureSource, FeatureSourceFactory, None] +T = TypeVar('T') @dataclass @@ -63,7 +65,7 @@ class SourceRequest: """ location: FeatureLocation - source: BatchDataSource + source: CodableBatchDataSource request: RetrivalRequest @@ -103,7 +105,6 @@ class ContractStore: feature_source: FeatureSource feature_views: dict[str, CompiledFeatureView] - combined_feature_views: dict[str, CompiledCombinedFeatureView] models: dict[str, ModelSchema] vector_indexes: dict[str, ModelSchema] @@ -114,13 +115,11 @@ def all_models(self) -> list[str]: def __init__( self, feature_views: dict[str, CompiledFeatureView], - combined_feature_views: dict[str, CompiledCombinedFeatureView], models: dict[str, ModelSchema], feature_source: FeatureSource, vector_indexes: dict[str, ModelSchema] | None = None, ) -> None: self.feature_source = feature_source - self.combined_feature_views = combined_feature_views self.feature_views = feature_views self.models = models self.vector_indexes = vector_indexes or {} @@ -149,42 +148,6 @@ def empty() -> ContractStore: def experimental() -> ContractStore: return ContractStore.empty() - @staticmethod - def register_enrichers(enrichers: list[EnricherReference]) -> None: - from types import ModuleType - - class DynamicEnricher(ModuleType): - def __init__(self, values: dict[str, Enricher]) -> None: - for key, item in values.items(): - self.__setattr__(key, item) - - def set_module(path: str, module_class: DynamicEnricher) -> None: - import sys - - components = path.split('.') - cum_path = '' - - for component in components: - cum_path += f'.{component}' - if cum_path.startswith('.'): - cum_path = cum_path[1:] - - try: - sys.modules[cum_path] = import_module(cum_path) - except Exception: - logger.info(f'Setting enricher at {cum_path}') - sys.modules[cum_path] = module_class - - grouped_enrichers: dict[str, list[EnricherReference]] = defaultdict(list) - - for enricher in enrichers: - grouped_enrichers[enricher.module].append(enricher) - - for module, values in grouped_enrichers.items(): - set_module( - module, DynamicEnricher({enricher.attribute_name: enricher.enricher for enricher in values}) - ) - @staticmethod def from_definition(repo: RepoDefinition) -> ContractStore: """Creates a feature store based on a repo definition @@ -203,13 +166,8 @@ def from_definition(repo: RepoDefinition) -> ContractStore: Returns: FeatureStore: A ready to use feature store """ - combined_feature_views = {fv.name: fv for fv in repo.combined_feature_views} - - ContractStore.register_enrichers(repo.enrichers) - store = ContractStore( feature_views={}, - combined_feature_views=combined_feature_views, models={}, feature_source=BatchFeatureSource({}), ) @@ -226,9 +184,7 @@ def repo_definition(self) -> RepoDefinition: return RepoDefinition( metadata=RepoMetadata(datetime.utcnow(), name='feature_store_location.py'), feature_views=set(self.feature_views.values()), - combined_feature_views=set(self.combined_feature_views.values()), models=set(self.models.values()), - enrichers=[], ) def combined_with(self, other: ContractStore) -> ContractStore: @@ -272,6 +228,22 @@ async def from_reference_at_path( repo_def = await RepoDefinition.from_reference_at_path(path, reference_file) return ContractStore.from_definition(repo_def) + @staticmethod + async def from_glob(glob: str) -> ContractStore: + """Reads and generates a feature store based on the given glob path. + + This will read the feature views, services etc in a given repo and generate a feature store. + This can be used for fast development purposes. + + Args: + glob (str): the files to read. E.g. `src/**/*.py` + + Returns: + ContractStore: The generated contract store + """ + definition = await RepoDefinition.from_glob(glob) + return ContractStore.from_definition(definition) + @staticmethod async def from_dir(path: str = '.') -> ContractStore: """Reads and generates a feature store based on the given directory's content. @@ -295,9 +267,10 @@ async def from_dir(path: str = '.') -> ContractStore: def execute_sql(self, query: str) -> RetrivalJob: import polars as pl import sqlglot + import sqlglot.expressions as exp expr = sqlglot.parse_one(query) - select_expr = expr.find_all(sqlglot.exp.Select) + select_expr = expr.find_all(exp.Select) tables = set() table_alias: dict[str, str] = {} @@ -315,7 +288,7 @@ def execute_sql(self, query: str) -> RetrivalJob: for expr in select_expr: - for table in expr.find_all(sqlglot.exp.Table): + for table in expr.find_all(exp.Table): tables.add(table.name) table_alias[table.alias_or_name] = table.name @@ -328,13 +301,13 @@ def execute_sql(self, query: str) -> RetrivalJob: else: unique_column_table_lookup[column] = table.name - if expr.find(sqlglot.exp.Star): + if expr.find(exp.Star): for table in tables: table_columns[table].update( all_table_columns.get(table, set()).union(all_model_columns.get(table, set())) ) else: - for column in expr.find_all(sqlglot.exp.Column): + for column in expr.find_all(exp.Column): source_table = table_alias.get(column.table) if source_table: @@ -362,10 +335,10 @@ async def run_query() -> pl.LazyFrame: for req in feature_request.needed_requests: - if req.location.location == 'feature_view': + if req.location.location_type == 'feature_view': view = self.feature_view(req.location.name).select(req.all_feature_names).all() dfs[req.location.name] = await view.to_lazy_polars() - elif req.location.location == 'model': + elif req.location.location_type == 'model': model = ( self.model(req.location.name).all_predictions().select_columns(req.all_feature_names) ) @@ -433,7 +406,7 @@ def features_for( else: for request in requests.needed_requests: if view.name == request.location.name: - feature_names.update(request.all_feature_names) + feature_names.update(request.all_returned_columns) if not isinstance(entities, RetrivalJob): entities = RetrivalJob.from_convertable(entities, requests) @@ -458,7 +431,7 @@ def features_for( new_request = FeatureRequest(requests.location, requests.features_to_include, loaded_requests) return self.features_for_request(new_request, entities, feature_names) - def model(self, name: str) -> ModelFeatureStore: + def model(self, model: str | ModelContractWrapper) -> ModelFeatureStore: """ Selects a model for easy of use. @@ -470,17 +443,21 @@ def model(self, name: str) -> ModelFeatureStore: Returns: ModelFeatureStore: A new store that containes the selected model """ - model = self.models[name] - return ModelFeatureStore(model, self) + if isinstance(model, ModelContractWrapper): + name = model.location.name + else: + name = model + + return ModelFeatureStore(self.models[name], self) def vector_index(self, name: str) -> VectorIndexStore: - return VectorIndexStore(self, self.vector_indexes[name]) + return VectorIndexStore(self, self.vector_indexes[name], index_name=name) def event_triggers_for(self, feature_view: str) -> set[EventTrigger]: triggers = self.feature_views[feature_view].event_triggers or set() for model in self.models.values(): for target in model.predictions_view.classification_targets or set(): - if target.event_trigger and target.estimating.location.location == feature_view: + if target.event_trigger and target.estimating.location.location_type == feature_view: triggers.add(target.event_trigger) return triggers @@ -488,7 +465,6 @@ def event_triggers_for(self, feature_view: str) -> set[EventTrigger]: def _requests_for( feature_request: RawStringFeatureRequest, feature_views: dict[str, CompiledFeatureView], - combined_feature_views: dict[str, CompiledCombinedFeatureView], models: dict[str, ModelSchema], event_timestamp_column: str | None = None, model_version_as_entity: bool | None = None, @@ -501,7 +477,7 @@ def _requests_for( for location in feature_request.locations: location_name = location.name - if location.location == 'model': + if location.location_type == 'model': model = models[location_name] view = model.predictions_view if len(features[location]) == 1 and list(features[location])[0] == '*': @@ -513,16 +489,6 @@ def _requests_for( requests.append(request) entity_names.update(request.entity_names) - elif location_name in combined_feature_views: - cfv = combined_feature_views[location_name] - if len(features[location]) == 1 and list(features[location])[0] == '*': - sub_requests = cfv.request_all - else: - sub_requests = cfv.requests_for(features[location]) - requests.extend(sub_requests.needed_requests) - for request in sub_requests.needed_requests: - entity_names.update(request.entity_names) - elif location_name in feature_views: feature_view = feature_views[location_name] @@ -551,8 +517,7 @@ def _requests_for( entity_names.update(sub_request.entity_names) else: raise ValueError( - f'Unable to find: {location_name}, ' - f'availible views are: {combined_feature_views.keys()}, and: {feature_views.keys()}' + f'Unable to find: {location_name}, ' f'availible views are: {feature_views.keys()}' ) if event_timestamp_column: @@ -589,13 +554,12 @@ def requests_for( return ContractStore._requests_for( feature_request, self.feature_views, - self.combined_feature_views, self.models, event_timestamp_column=event_timestamp_column, model_version_as_entity=model_version_as_entity, ) - def feature_view(self, view: str) -> FeatureViewStore: + def feature_view(self, view: str | FeatureViewWrapper) -> FeatureViewStore: """ Selects a feature view based on a name. @@ -608,16 +572,16 @@ def feature_view(self, view: str) -> FeatureViewStore: Args: view (str): The name of the feature view - Raises: - CombinedFeatureViewQuerying: If the name is a combined feature view - Returns: FeatureViewStore: The selected feature view ready for querying """ - if view in self.combined_feature_views: - return FeatureViewStore(self, self.combined_feature_views[view], set()) - feature_view = self.feature_views[view] - return FeatureViewStore(self, feature_view, self.event_triggers_for(view)) + if isinstance(view, FeatureViewWrapper): + view_name = view.location.name + else: + view_name = view + + feature_view = self.feature_views[view_name] + return FeatureViewStore(self, feature_view, self.event_triggers_for(view_name)) def add_view(self, view: CompiledFeatureView | FeatureView | FeatureViewWrapper) -> None: """ @@ -662,6 +626,7 @@ class MyFeatureView: self.feature_views[view.name] = view if isinstance(self.feature_source, BatchFeatureSource): + assert isinstance(self.feature_source.sources, dict) self.feature_source.sources[FeatureLocation.feature_view(view.name).identifier] = ( view.materialized_source or view.source ) @@ -674,13 +639,6 @@ def add_feature_view(self, feature_view: FeatureView | FeatureViewWrapper | Comp else: self.add_compiled_view(feature_view) - def add_combined_feature_view(self, feature_view: CombinedFeatureView) -> None: - compiled_view = type(feature_view).compile() - self.combined_feature_views[compiled_view.name] = compiled_view - - def add_combined_view(self, compiled_view: CompiledCombinedFeatureView) -> None: - self.combined_feature_views[compiled_view.name] = compiled_view - def add_model(self, model: ModelContractWrapper) -> None: """ Compiles and adds the model to the store @@ -710,6 +668,7 @@ def add_compiled_model(self, model: ModelSchema) -> None: self.vector_indexes[index_name] = model if isinstance(self.feature_source, BatchFeatureSource) and source is not None: + assert isinstance(self.feature_source.sources, dict) self.feature_source.sources[FeatureLocation.model(model.name).identifier] = source def with_source(self, source: FeatureSourceable = None) -> ContractStore: @@ -730,6 +689,8 @@ def with_source(self, source: FeatureSourceable = None) -> ContractStore: """ if isinstance(source, FeatureSourceFactory): feature_source = source.feature_source() + elif isinstance(source, FeatureSource): + feature_source = source elif source is None: sources = { FeatureLocation.feature_view(view.name).identifier: view.source @@ -740,8 +701,6 @@ def with_source(self, source: FeatureSourceable = None) -> ContractStore: if model.predictions_view.source is not None } feature_source = source or BatchFeatureSource(sources=sources) - elif isinstance(source, FeatureSource): - feature_source = source else: raise ValueError( 'Setting a dedicated source needs to be either a FeatureSource, ' @@ -750,11 +709,26 @@ def with_source(self, source: FeatureSourceable = None) -> ContractStore: return ContractStore( feature_views=self.feature_views, - combined_feature_views=self.combined_feature_views, models=self.models, feature_source=feature_source, ) + def sources_of_type(self, source_type: type[T], function: Callable[[T, FeatureLocation], None]) -> None: + + if not isinstance(self.feature_source, BatchFeatureSource): + raise ValueError( + f'.update_source_for(...) needs a `BatchFeatureSource`, got {type(self.feature_source)}' + ) + + assert isinstance(self.feature_source.sources, dict), 'Can only operate on a dict' + + for location, source in self.feature_source.sources.items(): + if not isinstance(source, source_type): + continue + + loc = FeatureLocation.from_string(location) + function(source, loc) + def update_source_for(self, location: FeatureLocation | str, source: BatchDataSource) -> ContractStore: if not isinstance(self.feature_source, BatchFeatureSource): raise ValueError( @@ -765,11 +739,11 @@ def update_source_for(self, location: FeatureLocation | str, source: BatchDataSo location = FeatureLocation.from_string(location) new_source = self.feature_source + assert isinstance(new_source.sources, dict) new_source.sources[location.identifier] = source return ContractStore( feature_views=self.feature_views, - combined_feature_views=self.combined_feature_views, models=self.models, feature_source=new_source, ) @@ -801,7 +775,6 @@ def use_application_sources(self) -> ContractStore: } return ContractStore( feature_views=self.feature_views, - combined_feature_views=self.combined_feature_views, models=self.models, feature_source=BatchFeatureSource(sources=sources), ) @@ -818,44 +791,12 @@ def model_features_for(self, view_name: str) -> set[str]: ) return all_model_features - def views_with_config(self, config: Any) -> list[SourceRequest]: - """ - Returns the feature views where the config match. - - ```python - source = PostgreSQLConfig(env_var='SOURCE_URL') - store.views_with_conifg(source) - ``` - - Args: - config (Any): The config to find views for - - Returns: - list[SourceRequest]: A list of data sources, the request and it's location - """ - views: list[SourceRequest] = [] - for view in self.feature_views.values(): - request = view.request_all.needed_requests[0] - if view.source.contains_config(config): - views.append(SourceRequest(FeatureLocation.feature_view(view.name), view.source, request)) - - if view.application_source and view.application_source.contains_config(config): - views.append( - SourceRequest(FeatureLocation.feature_view(view.name), view.application_source, request) - ) - return views - def write_request_for(self, location: FeatureLocation) -> RetrivalRequest: - if location.location == 'feature_view': + if location.location_type == 'feature_view': return self.feature_views[location.name].request_all.needed_requests[0] - elif location.location == 'model': + elif location.location_type == 'model': return self.models[location.name].predictions_view.request('write', model_version_as_entity=True) - elif location.location == 'combined_view': - raise NotImplementedError( - 'Have not implemented write requests for combined views. ' - 'Please consider contributing and add a PR.' - ) else: raise ValueError(f"Unable to write to location: '{location}'.") @@ -890,8 +831,10 @@ async def insert_into( try: existing_df = await source.to_lazy_polars() - write_df = pl.concat([new_df, existing_df.select(columns)], how='vertical_relaxed') - except UnableToFindFileException: + write_df = ( + pl.concat([new_df, existing_df.select(columns)], how='vertical_relaxed').collect().lazy() + ) + except (UnableToFindFileException, pl.ComputeError): write_df = new_df if isinstance(source, ColumnFeatureMappable): @@ -933,8 +876,9 @@ async def upsert_into( try: existing_df = await source.to_lazy_polars() write_df = upsert_on_column(entities, new_df, existing_df) - except UnableToFindFileException: + except (UnableToFindFileException, pl.ComputeError): write_df = new_df + await source.write_polars(write_df) else: raise ValueError(f'The source {type(source)} do not support writes') @@ -986,6 +930,19 @@ def location(self) -> FeatureLocation: def dataset_store(self) -> DatasetStore | None: return self.model.dataset_store + def has_one_source_for_input_features(self) -> bool: + """ + If the input features are from the same source. + + This can be interesting to know in order to automatically predict over + the input. + E.g. predict over all data in the source. + """ + version = self.selected_version or self.model.features.default_version + features = self.model.features.features_for(version) + locations = {feature.location for feature in features} + return len(locations) == 1 + def raw_string_features(self, except_features: set[str]) -> set[str]: version = self.selected_version or self.model.features.default_version @@ -1097,9 +1054,7 @@ def features_for( else: features = self.raw_string_features(set()) - job = self.store.features_for( - entities, list(features), event_timestamp_column=event_timestamp_column - ).with_request(request.needed_requests) + job = self.store.features_for(entities, list(features), event_timestamp_column=event_timestamp_column) if isinstance(entities, (dict, pl.DataFrame, pd.DataFrame)): @@ -1125,6 +1080,9 @@ def features_for( return job async def freshness(self) -> dict[FeatureLocation, datetime | None]: + return await self.input_freshness() + + async def input_freshness(self) -> dict[FeatureLocation, datetime | None]: from aligned.schemas.feature import EventTimestamp locs: dict[FeatureLocation, EventTimestamp] = {} @@ -1133,8 +1091,29 @@ async def freshness(self) -> dict[FeatureLocation, datetime | None]: if req.event_timestamp: locs[req.location] = req.event_timestamp + if self.model.exposed_model: + additional_model_deps = await self.model.exposed_model.depends_on() + for loc in additional_model_deps: + if loc in locs: + continue + + if loc.location_type == 'model': + event_timestamp = self.store.model(loc.name).prediction_request().event_timestamp + else: + event_timestamp = self.store.feature_view(loc.name).request.event_timestamp + + if event_timestamp: + locs[loc] = event_timestamp + return await self.store.feature_source.freshness_for(locs) + async def prediction_freshness(self) -> datetime | None: + pred_req = self.prediction_request() + if not pred_req.event_timestamp: + return None + freshness = await self.store.feature_source.freshness_for({self.location: pred_req.event_timestamp}) + return freshness[self.location] + def with_labels(self, label_refs: set[FeatureReference] | None = None) -> SupervisedModelFeatureStore: """Will also load the labels for the model @@ -1621,6 +1600,9 @@ def select_columns(self, columns: list[str], limit: int | None = None) -> Retriv def all(self, limit: int | None = None) -> RetrivalJob: return self.all_columns(limit) + def filter(self, filter: pl.Expr | str) -> RetrivalJob: + return self.all().filter(filter) + def all_columns(self, limit: int | None = None) -> RetrivalJob: if not isinstance(self.source, RangeFeatureSource): raise ValueError(f'The source ({self.source}) needs to conform to RangeFeatureSource') @@ -1786,7 +1768,7 @@ async def batch_write(self, values: ConvertableToRetrivalJob | RetrivalJob) -> N # job = job.filter(self.feature_filter) with feature_view_write_time.labels(self.view.name).time(): - await self.source.insert(job, job.retrival_requests) + await self.source.insert(job, job.retrival_requests[0]) async def freshness(self) -> datetime | None: @@ -1805,8 +1787,9 @@ class VectorIndexStore: store: ContractStore model: ModelSchema + index_name: str - def __init__(self, store: ContractStore, model: ModelSchema): + def __init__(self, store: ContractStore, model: ModelSchema, index_name: str): if model.predictions_view.source is None: raise ValueError(f"An output source on the model {model.name} is needed") @@ -1819,6 +1802,7 @@ def __init__(self, store: ContractStore, model: ModelSchema): self.store = store self.model = model + self.index_name = index_name def nearest_n_to( self, entities: RetrivalJob | ConvertableToRetrivalJob, number_of_records: int @@ -1857,3 +1841,8 @@ def contains_embedding() -> bool: features: RetrivalJob = self.store.features_for(entities, features=[feature_ref.identifier]) return source.nearest_n_to(features, number_of_records, response) + + def as_langchain_retriver(self, number_of_docs: int = 5): + from aligned.exposed_model.langchain import AlignedRetriver + + return AlignedRetriver(store=self.store, index_name=self.index_name, number_of_docs=number_of_docs) diff --git a/aligned/feature_view/__init__.py b/aligned/feature_view/__init__.py index eeb9db73..a090938b 100644 --- a/aligned/feature_view/__init__.py +++ b/aligned/feature_view/__init__.py @@ -1,10 +1,6 @@ -from aligned.feature_view.combined_view import ( - combined_feature_view, -) from aligned.feature_view.feature_view import feature_view, check_schema __all__ = [ 'feature_view', - 'combined_feature_view', 'check_schema', ] diff --git a/aligned/feature_view/combined_view.py b/aligned/feature_view/combined_view.py deleted file mode 100644 index cc87d78a..00000000 --- a/aligned/feature_view/combined_view.py +++ /dev/null @@ -1,159 +0,0 @@ -import logging -from abc import ABC, abstractproperty -from dataclasses import dataclass -from typing import Generic, TypeVar, Any, Type, Callable, TYPE_CHECKING - -from aligned.compiler.feature_factory import FeatureFactory -from aligned.feature_view.feature_view import FeatureView -from aligned.request.retrival_request import RetrivalRequest -from aligned.schemas.derivied_feature import DerivedFeature -from aligned.schemas.feature import FeatureLocation -from aligned.schemas.feature_view import CompiledCombinedFeatureView, CompiledFeatureView - -if TYPE_CHECKING: - from aligned.feature_store import FeatureViewStore - -logger = logging.getLogger(__name__) - -T = TypeVar('T') - - -@dataclass -class CombinedFeatureViewMetadata: - name: str - description: str | None = None - tags: dict[str, str] | None = None - owner: str | None = None - - -@dataclass -class CombinedFeatureViewWrapper(Generic[T]): - - metadata: CombinedFeatureViewMetadata - view: Type[T] - - def __call__(self) -> T: - # Needed to make sure that the `location` is set in the view's features - _ = self.compile() - return self.view() - - def compile(self) -> CompiledCombinedFeatureView: - return CombinedFeatureView.compile_with_metadata(self.view(), self.metadata) - - def query(self) -> 'FeatureViewStore': - """Makes it possible to query the feature view for features - - ```python - @feature_view(...) - class SomeView: - - id = Int32().as_entity() - - a = Int32() - b = Int32() - - data = await SomeView.query().features_for({ - "id": [1, 2, 3], - }).to_pandas() - ``` - - Returns: - FeatureViewStore: Returns a queryable `FeatureViewStore` containing the feature view - """ - from aligned import ContractStore - - store = ContractStore.experimental() - store.add_combined_view(self.compile()) - return store.feature_view(self.metadata.name) - - async def process(self, data: dict[str, list[Any]]) -> list[dict]: - df = await self.query().process_input(data).to_lazy_polars() - return df.collect().to_dicts() - - -def combined_feature_view( - name: str, description: str, tags: dict[str, str] | None = None, owner: str | None = None -) -> Callable[[Type[T]], CombinedFeatureViewWrapper[T]]: - """ - Wraps a view as a combined view - - ```python - @combined_feature_view( - name="my_combined_view", - description="some description" - ) - class MyView: - - other = OtherView() - another = AnotherView() - - y = other.x * another.y - ``` - """ - - def decorator(cls: Type[T]) -> CombinedFeatureViewWrapper[T]: - return CombinedFeatureViewWrapper( - CombinedFeatureViewMetadata(name, description, tags=tags, owner=owner), cls - ) - - return decorator - - -class CombinedFeatureView(ABC): - @abstractproperty - def metadata(self) -> CombinedFeatureViewMetadata: - raise NotImplementedError(f'Need to add a metadata field to in {self}') - - @staticmethod - def _needed_features( - depending_on: list[FeatureFactory], feature_views: dict[FeatureLocation, CompiledFeatureView] - ) -> list[RetrivalRequest]: - - feature_refs: dict[CompiledFeatureView, set[str]] = {} - - for feature_dep in depending_on: - view = feature_views[feature_dep._location] - feature_refs.setdefault(view, set()).add(feature_dep.name) - - return [ - feature_view.request_for(features).needed_requests[0] - for feature_view, features in feature_refs.items() - ] - - @classmethod - def compile(cls) -> CompiledCombinedFeatureView: - instance = cls() - return CombinedFeatureView.compile_with_metadata(instance, instance.metadata) - - @staticmethod - def compile_with_metadata( - view: Any, metadata: CombinedFeatureViewMetadata - ) -> CompiledCombinedFeatureView: - transformations: set[DerivedFeature] = set() - var_names = [name for name in view.__dir__() if not name.startswith('_')] - - requests: dict[str, list[RetrivalRequest]] = {} - feature_view_deps: dict[FeatureLocation, CompiledFeatureView] = {} - - for var_name in var_names: - feature = getattr(view, var_name) - if isinstance(feature, FeatureView): - # Needs to compile the view one more time. unfortunally.. - # not optimal as the view will be duplicated in the definition file - feature_view_deps[FeatureLocation.feature_view(feature.metadata.name)] = feature.compile() - if isinstance(feature, FeatureFactory): - feature._location = FeatureLocation.combined_view(var_name) - if not feature.transformation: - logger.info('Feature had no transformation, which do not make sense in a CombinedView') - continue - requests[var_name] = CombinedFeatureView._needed_features( - feature.transformation.using_features, feature_view_deps - ) - - transformations.add(feature.compile()) - - return CompiledCombinedFeatureView( - name=metadata.name, - features=transformations, - feature_referances=requests, - ) diff --git a/aligned/feature_view/feature_view.py b/aligned/feature_view/feature_view.py index 1c63dc85..0456f419 100644 --- a/aligned/feature_view/feature_view.py +++ b/aligned/feature_view/feature_view.py @@ -3,7 +3,6 @@ import copy import logging import polars as pl -import pandas as pd from datetime import timedelta from abc import ABC, abstractproperty @@ -11,6 +10,7 @@ from typing import TYPE_CHECKING, Any, TypeVar, Generic, Type, Callable from uuid import uuid4 +from aligned.lazy_imports import pandas as pd from aligned.compiler.feature_factory import ( AggregationTransformationFactory, Embedding, @@ -19,7 +19,7 @@ Bool, ) from aligned.data_source.batch_data_source import ( - BatchDataSource, + CodableBatchDataSource, JoinAsofDataSource, JoinDataSource, join_asof_source, @@ -31,7 +31,7 @@ from aligned.schemas.derivied_feature import ( AggregatedFeature, ) -from aligned.schemas.feature import FeatureLocation, FeatureReference +from aligned.schemas.feature import FeatureLocation, FeatureReference, StaticFeatureTags from aligned.schemas.feature_view import CompiledFeatureView from aligned.compiler.feature_factory import FeatureFactory @@ -43,7 +43,7 @@ # Enables code compleation in the select method T = TypeVar('T') -ConvertableData = TypeVar('ConvertableData', dict, pl.DataFrame, pd.DataFrame) +ConvertableData = TypeVar('ConvertableData', dict, pl.DataFrame, 'pd.DataFrame') logger = logging.getLogger(__name__) @@ -52,11 +52,11 @@ @dataclass class FeatureViewMetadata: name: str - source: BatchDataSource + source: CodableBatchDataSource description: str | None = field(default=None) stream_source: StreamDataSource | None = field(default=None) - application_source: BatchDataSource | None = field(default=None) - materialized_source: BatchDataSource | None = field(default=None) + application_source: CodableBatchDataSource | None = field(default=None) + materialized_source: CodableBatchDataSource | None = field(default=None) materialize_from: datetime | None = field(default=None) contacts: list[str] | None = field(default=None) tags: list[str] | None = field(default=None) @@ -79,26 +79,26 @@ def from_compiled(view: CompiledFeatureView) -> FeatureViewMetadata: ) -def resolve_source(source: BatchDataSource | FeatureViewWrapper) -> BatchDataSource: +def resolve_source(source: CodableBatchDataSource | FeatureViewWrapper) -> CodableBatchDataSource: if isinstance(source, FeatureViewWrapper): from aligned.schemas.feature_view import FeatureViewReferenceSource compiled = source.compile() return FeatureViewReferenceSource(compiled, FeatureLocation.feature_view(compiled.name)) - elif isinstance(source, BatchDataSource): + elif isinstance(source, CodableBatchDataSource): return source else: raise ValueError(f'Unable to use source: {type(source)} - {source}') def feature_view( - source: BatchDataSource | FeatureViewWrapper, + source: CodableBatchDataSource | FeatureViewWrapper, name: str | None = None, description: str | None = None, stream_source: StreamDataSource | None = None, - application_source: BatchDataSource | None = None, - materialized_source: BatchDataSource | None = None, + application_source: CodableBatchDataSource | None = None, + materialized_source: CodableBatchDataSource | None = None, materialize_from: datetime | None = None, contacts: list[str] | None = None, tags: list[str] | None = None, @@ -128,6 +128,31 @@ def decorator(cls: Type[T]) -> FeatureViewWrapper[T]: return decorator +def annotated_feature_view( + annotation_source: FeatureLocation, + annotated_source: CodableBatchDataSource | FeatureViewWrapper, + name: str | None = None, + description: str | None = None, + contacts: list[str] | None = None, + tags: list[str] | None = None, +) -> Callable[[Type[T]], FeatureViewWrapper[T]]: + def decorator(cls: Type[T]) -> FeatureViewWrapper[T]: + + used_name = name or str(cls.__name__).lower() + used_description = description or str(cls.__doc__) + + metadata = FeatureViewMetadata( + used_name, + resolve_source(annotated_source), + description=used_description, + contacts=contacts, + tags=tags, + ) + return FeatureViewWrapper(metadata, cls()) + + return decorator + + def set_location_for_features_in(view: Any, location: FeatureLocation) -> Any: for attribute in dir(view): if attribute.startswith('__'): @@ -142,12 +167,30 @@ def set_location_for_features_in(view: Any, location: FeatureLocation) -> Any: return view +@dataclass +class AnnotatedViewWrapper(Generic[T]): + + annotated_view: FeatureLocation + view_wrapper: FeatureViewWrapper[T] + + def compile(self) -> None: + view = self.view_wrapper.compile() + annotated_by = [ + feat for feat in view.features if feat.tags and StaticFeatureTags.is_annotated_by in feat.tags + ] + assert len(annotated_by) <= 1 + + @dataclass class FeatureViewWrapper(Generic[T]): metadata: FeatureViewMetadata view: T + @property + def location(self) -> FeatureLocation: + return FeatureLocation.feature_view(self.metadata.name) + def __call__(self) -> T: view = copy.deepcopy(self.view) view = set_location_for_features_in(view, FeatureLocation.feature_view(self.metadata.name)) @@ -161,8 +204,8 @@ def compile(self) -> CompiledFeatureView: return FeatureView.compile_with_metadata(view, self.metadata) def vstack( - self, source: BatchDataSource | FeatureViewWrapper, source_column: str | None = None - ) -> BatchDataSource: + self, source: CodableBatchDataSource | FeatureViewWrapper, source_column: str | None = None + ) -> CodableBatchDataSource: from aligned.data_source.batch_data_source import StackSource return StackSource( @@ -170,7 +213,10 @@ def vstack( ) def filter( - self, name: str, where: Callable[[T], Bool], materialize_source: BatchDataSource | None = None + self, + name: str, + where: Callable[[T], Bool] | pl.Expr, + materialize_source: CodableBatchDataSource | None = None, ) -> FeatureViewWrapper[T]: from aligned.data_source.batch_data_source import FilteredDataSource @@ -180,21 +226,24 @@ def filter( meta.name = name meta.materialized_source = materialize_source - condition = where(self.__call__()) - main_source = FeatureViewReferenceSource( self.compile(), FeatureLocation.feature_view(self.metadata.name) ) - if not condition._name: - condition._name = str(uuid4()) - condition._location = FeatureLocation.feature_view(name) - - if condition.transformation: - meta.source = FilteredDataSource(main_source, condition.compile()) + if isinstance(where, pl.Expr): + filter = where.meta.serialize() else: - meta.source = FilteredDataSource(main_source, condition.feature()) + condition = where(self.__call__()) + + if not condition._name: + condition._name = str(uuid4()) + condition._location = FeatureLocation.feature_view(name) + if condition.transformation: + filter = condition.compile() + else: + filter = condition.feature() + meta.source = FilteredDataSource(main_source, filter) return FeatureViewWrapper(metadata=meta, view=self.view) def join( @@ -240,13 +289,72 @@ def join_asof( right_on=right_on, ) + def with_schema( + self, + name: str, + source: CodableBatchDataSource | FeatureViewWrapper, + materialized_source: CodableBatchDataSource | None = None, + entities: dict[str, FeatureFactory] | None = None, + additional_features: dict[str, FeatureFactory] | None = None, + copy_default_values: bool = False, + copy_transformations: bool = False, + ) -> FeatureViewWrapper[T]: + + meta = copy.deepcopy(self.metadata) + meta.name = name + meta.source = resolve_source(source) + meta.materialized_source = None + + if materialized_source: + meta.materialized_source = resolve_source(materialized_source) + + view = copy.deepcopy(self.view) + compiled = self.compile() + + for agg_feature in compiled.aggregated_features: + org_feature: FeatureFactory = getattr(view, agg_feature.derived_feature.name) + feature = org_feature.copy_type() + feature.transformation = None + feature.tags = set(agg_feature.derived_feature.tags or []) + if copy_transformations: + feature.transformation = copy.deepcopy(org_feature.transformation) + if copy_default_values: + feature._default_value = org_feature._default_value + setattr(view, agg_feature.derived_feature.name, feature) + + for derived_feature in compiled.derived_features: + org_feature: FeatureFactory = getattr(view, derived_feature.name) + feature = org_feature.copy_type() + feature.transformation = None + if copy_transformations: + feature.transformation = copy.deepcopy(org_feature.transformation) + feature.tags = set(derived_feature.tags or []) + if copy_default_values: + feature._default_value = org_feature._default_value + setattr(view, derived_feature.name, feature) + + if entities is not None: + for name, feature in entities.items(): + setattr(view, name, feature.as_entity()) # type: ignore + + for entity in compiled.entities: + if entity.name in entities: + continue + + setattr(view, entity.name, None) + + if additional_features is not None: + for name, feature in additional_features.items(): + setattr(view, name, feature) + + return FeatureViewWrapper(meta, view) + def with_source( self, named: str, - source: BatchDataSource | FeatureViewWrapper, - materialized_source: BatchDataSource | None = None, + source: CodableBatchDataSource | FeatureViewWrapper, + materialized_source: CodableBatchDataSource | None = None, ) -> FeatureViewWrapper[T]: - meta = copy.deepcopy(self.metadata) meta.name = named meta.source = resolve_source(source) @@ -255,7 +363,7 @@ def with_source( if materialized_source: meta.materialized_source = resolve_source(materialized_source) - return FeatureViewWrapper(meta, self.view) + return FeatureViewWrapper(metadata=meta, view=self.view) def with_entity_renaming(self, named: str, renames: dict[str, str] | str) -> FeatureViewWrapper[T]: from aligned.data_source.batch_data_source import ColumnFeatureMappable @@ -333,7 +441,7 @@ async def process(self, data: ConvertableToRetrivalJob) -> list[dict]: df = await self.query().process_input(data).to_lazy_polars() return df.collect().to_dicts() - async def freshness_in_source(self, source: BatchDataSource) -> datetime | None: + async def freshness_in_source(self, source: CodableBatchDataSource) -> datetime | None: """ Returns the freshest datetime for a provided source @@ -422,7 +530,7 @@ def drop_invalid(self, data: ConvertableData, validator: Validator | None = None else: raise ValueError(f'Invalid data type: {type(data)}') - def as_source(self, renames: dict[str, str] | None = None) -> BatchDataSource: + def as_source(self, renames: dict[str, str] | None = None) -> CodableBatchDataSource: from aligned.schemas.feature_view import FeatureViewReferenceSource return FeatureViewReferenceSource( @@ -444,11 +552,11 @@ def metadata(self) -> FeatureViewMetadata: @staticmethod def metadata_with( name: str, - batch_source: BatchDataSource, + batch_source: CodableBatchDataSource, description: str | None = None, stream_source: StreamDataSource | None = None, - application_source: BatchDataSource | None = None, - staging_source: BatchDataSource | None = None, + application_source: CodableBatchDataSource | None = None, + staging_source: CodableBatchDataSource | None = None, contacts: list[str] | None = None, tags: list[str] | None = None, ) -> FeatureViewMetadata: @@ -481,7 +589,9 @@ async def batch_source_freshness(cls) -> datetime | None: return await FeatureView.freshness_in_source(compiled, compiled.source) @staticmethod - async def freshness_in_source(view: CompiledFeatureView, source: BatchDataSource) -> datetime | None: + async def freshness_in_source( + view: CompiledFeatureView, source: CodableBatchDataSource + ) -> datetime | None: if not view.event_timestamp: raise ValueError( f'The feature view: {view.name}, needs an event timestamp', @@ -608,7 +718,6 @@ def sort_key(x: tuple[int, FeatureFactory]) -> int: ' FeatureViewDefinition. Check that this is the case for' f' {type(view).__name__}' ) - view.features.add(compiled_feature) view.event_timestamp = feature.event_timestamp() else: view.features.add(compiled_feature) @@ -765,7 +874,9 @@ def func_wrapper(*args, **kwargs) -> Any: # type: ignore from typing import _AnnotatedAlias # type: ignore params_to_check = { - name: value for name, value in func.__annotations__.items() if type(value) == _AnnotatedAlias + name: value + for name, value in func.__annotations__.items() + if type(value) == _AnnotatedAlias # noqa: E721 } function_args = func.__code__.co_varnames diff --git a/aligned/feature_view/tests/test_check_schema.py b/aligned/feature_view/tests/test_check_schema.py index 9109aeec..f2d7bfcb 100644 --- a/aligned/feature_view/tests/test_check_schema.py +++ b/aligned/feature_view/tests/test_check_schema.py @@ -2,7 +2,7 @@ from aligned import Bool, Float, String, feature_view, FileSource from aligned.feature_view.feature_view import check_schema from typing import Annotated -import pandas as pd +from aligned.lazy_imports import pandas as pd @feature_view( diff --git a/aligned/feature_view/tests/test_combined_view.py b/aligned/feature_view/tests/test_combined_view.py deleted file mode 100644 index 51947934..00000000 --- a/aligned/feature_view/tests/test_combined_view.py +++ /dev/null @@ -1,103 +0,0 @@ -import pytest - -from aligned import ContractStore, feature_view, Int32, Int64, FileSource - - -@pytest.mark.asyncio -async def test_combined_view(combined_feature_store: ContractStore) -> None: - - entities = {'passenger_id': [1, 2, 3, 4, None], 'scan_id': [842302, 84300903, 843786, None, 842301]} - result_job = combined_feature_store.features_for( - entities, - features=[ - 'combined:some_feature', - 'combined:other_feature', - ], - ) - result = await result_job.log_each_job().to_pandas() - - assert 'some_feature' in result.columns - assert 'other_feature' in result.columns - - assert result.shape == (len(entities['passenger_id']), 4) - assert result.isna().sum().sum() == 4 + 2 - - -@pytest.mark.asyncio -async def test_combined_view_get_all_features(combined_feature_store: ContractStore) -> None: - - entities = {'passenger_id': [1, 2, 3, 4, None], 'scan_id': [842302, 84300903, 843786, None, 842301]} - result = await combined_feature_store.features_for(entities, features=['combined:*']).to_pandas() - - assert 'some_feature' in result.columns - assert 'other_feature' in result.columns - - assert result.shape == (len(entities['passenger_id']), 4) - assert result.isna().sum().sum() == 4 + 2 - - -@pytest.mark.asyncio -async def test_new_combined_solution() -> None: - import pandas as pd - - expected_df = pd.DataFrame({'other_id': [6, 5], 'new_feature': [600, 400], 'some_id': [1, 2]}) - - @feature_view(name='test', source=FileSource.csv_at('test_data/test.csv')) - class Test: - some_id = Int64().as_entity() - - feature = Int64() - - derived_feature = feature * 10 - - @feature_view(name='other', source=FileSource.csv_at('test_data/other.csv')) - class Other: - - other_id = Int64().as_entity() - some_id = Int64() - - other_feature = Int64() - - test_feature = other_feature * 10 - - test = Test() - other = Other() - - @feature_view(name='combined', source=Test.join(other, on=test.some_id)) # type: ignore - class Combined: - some_id = Int64().as_entity() - - new_feature = test.derived_feature * other.test_feature - - result = await Combined.query().all().to_pandas() # type: ignore - - new_df = result.sort_values('some_id', ascending=True)[expected_df.columns].reset_index(drop=True) - assert new_df.equals(expected_df) - - -@pytest.mark.asyncio -async def test_view_reference() -> None: - import pandas as pd - - expected_df = pd.DataFrame({'new_feature': [100, 100, 100], 'some_id': [1, 2, 3]}) - - @feature_view(name='test', source=FileSource.csv_at('test_data/test.csv')) - class Test: - some_id = Int32().as_entity() - - feature = Int32() - - derived_feature = feature * 10 - - test = Test() - - @feature_view(name='test_ref', source=Test) # type: ignore - class TestRef: - some_id = Int32().as_entity() - - new_feature = test.derived_feature * 5 - - result = await TestRef.query().all().to_pandas() # type: ignore - result['new_feature'] = result['new_feature'].astype('int64') - result['some_id'] = result['some_id'].astype('int64') - assert result[expected_df.columns].equals(expected_df) diff --git a/aligned/jobs/tests/test_derived_job.py b/aligned/jobs/tests/test_derived_job.py index e383fd07..93c6f946 100644 --- a/aligned/jobs/tests/test_derived_job.py +++ b/aligned/jobs/tests/test_derived_job.py @@ -1,8 +1,11 @@ +from __future__ import annotations + from datetime import datetime, timedelta -import pandas as pd +import polars as pl import pytest +from aligned.lazy_imports import pandas as pd from aligned import feature_view, Float, String, FileSource from aligned.compiler.model import model_contract from aligned.feature_store import ContractStore @@ -125,6 +128,60 @@ def feature_store() -> ContractStore: return store +@pytest.mark.asyncio +async def test_without_derived_features(): + df = await Transaction.query().all().to_polars() + + assert 'is_expence' in df.columns + + without_job = Transaction.query().all().remove_derived_features() + without_df = await without_job.to_polars() + + assert 'is_expence' not in without_df.columns + + feature_columns = without_job.request_result.feature_columns + assert 'is_expence' not in feature_columns + + +def test_with_schema() -> None: + + Test = Transaction.with_schema( + name='test', + source=FileSource.parquet_at('test_data/transactions.parquet'), + entities=dict( # noqa: C408 + other_id=String(), + ), + additional_features=dict( # noqa: C408 + other=Float(), + ), + ) + transaction = Transaction.compile() + + assert len(transaction.derived_features) > 1 + + view = Test.compile() + assert len(view.entities) == 1 + + assert len(view.derived_features) == 0 + assert len(view.aggregated_features) == 0 + assert ( + len(view.features) == len({feat.name for feat in transaction.full_schema - transaction.entities}) + 1 + ) + + assert list(view.entities)[0].name == 'other_id' + + +@pytest.mark.asyncio +async def test_polars_filter_source() -> None: + + Expences = Transaction.filter(name='expence', where=pl.col('amount') > 0) # type: ignore + data = await Expences.query().all().to_lazy_polars() + + df = data.collect() + + assert df.height == 4 + + @pytest.mark.asyncio async def test_aggregate_over_derived() -> None: diff --git a/aligned/lazy_imports.py b/aligned/lazy_imports.py new file mode 100644 index 00000000..c7e987e4 --- /dev/null +++ b/aligned/lazy_imports.py @@ -0,0 +1,141 @@ +# Code copied from the polars package, as they handle optional deps very well +from __future__ import annotations + +import re +import sys +from importlib import import_module +from importlib.util import find_spec +from types import ModuleType +from typing import TYPE_CHECKING, Any, ClassVar + + +class _LazyModule(ModuleType): + """ + Module that can act both as a lazy-loader and as a proxy. + + Notes + ----- + We do NOT register this module with `sys.modules` so as not to cause + confusion in the global environment. This way we have a valid proxy + module for our own use, but it lives *exclusively* within polars. + """ + + __lazy__ = True + + _mod_pfx: ClassVar[dict[str, str]] = { + 'numpy': 'np.', + 'pandas': 'pd.', + 'pyarrow': 'pa.', + } + + def __init__( + self, + module_name: str, + *, + module_available: bool, + ) -> None: + """ + Initialise lazy-loading proxy module. + + Parameters + ---------- + module_name : str + the name of the module to lazy-load (if available). + + module_available : bool + indicate if the referenced module is actually available (we will proxy it + in both cases, but raise a helpful error when invoked if it doesn't exist). + """ + self._module_available = module_available + self._module_name = module_name + self._globals = globals() + super().__init__(module_name) + + def _import(self) -> ModuleType: + # import the referenced module, replacing the proxy in this module's globals + module = import_module(self.__name__) + self._globals[self._module_name] = module + self.__dict__.update(module.__dict__) + return module + + def __getattr__(self, name: str) -> Any: + # have "hasattr('__wrapped__')" return False without triggering import + # (it's for decorators, not modules, but keeps "make doctest" happy) + if name == '__wrapped__': + msg = f"{self._module_name!r} object has no attribute {name!r}" + raise AttributeError(msg) + + # accessing the proxy module's attributes triggers import of the real thing + if self._module_available: + # import the module and return the requested attribute + module = self._import() + return getattr(module, name) + + # user has not installed the proxied/lazy module + elif name == '__name__': + return self._module_name + elif re.match(r'^__\w+__$', name) and name != '__version__': + # allow some minimal introspection on private module + # attrs to avoid unnecessary error-handling elsewhere + return None + else: + # all other attribute access raises a helpful exception + pfx = self._mod_pfx.get(self._module_name, '') + msg = f"{pfx}{name} requires {self._module_name!r} module to be installed" + raise ModuleNotFoundError(msg) from None + + +def _lazy_import(module_name: str) -> tuple[ModuleType, bool]: + """ + Lazy import the given module; avoids up-front import costs. + + Parameters + ---------- + module_name : str + name of the module to import, eg: "pyarrow". + + Notes + ----- + If the requested module is not available (eg: has not been installed), a proxy + module is created in its place, which raises an exception on any attribute + access. This allows for import and use as normal, without requiring explicit + guard conditions - if the module is never used, no exception occurs; if it + is, then a helpful exception is raised. + + Returns + ------- + tuple of (Module, bool) + A lazy-loading module and a boolean indicating if the requested/underlying + module exists (if not, the returned module is a proxy). + """ + # check if module is LOADED + if module_name in sys.modules: + return sys.modules[module_name], True + + # check if module is AVAILABLE + try: + module_spec = find_spec(module_name) + module_available = not (module_spec is None or module_spec.loader is None) + except ModuleNotFoundError: + module_available = False + + # create lazy/proxy module that imports the real one on first use + # (or raises an explanatory ModuleNotFoundError if not available) + return ( + _LazyModule( + module_name=module_name, + module_available=module_available, + ), + module_available, + ) + + +_PANDAS_AVAILABLE = True +_PANDERA_AVAILABLE = True + +if TYPE_CHECKING: + import pandas + import pandera +else: + pandas, _PANDAS_AVAILABLE = _lazy_import('pandas') + pandera, _PANDERA_AVAILABLE = _lazy_import('pandera') diff --git a/aligned/local/job.py b/aligned/local/job.py index 6a85eec6..4e127c50 100644 --- a/aligned/local/job.py +++ b/aligned/local/job.py @@ -1,18 +1,18 @@ +from __future__ import annotations from dataclasses import dataclass, field from typing import Callable from pytz import timezone from datetime import datetime -import pandas as pd import polars as pl +from aligned.lazy_imports import pandas as pd from aligned.request.retrival_request import AggregatedFeature, AggregateOver, RetrivalRequest from aligned.retrival_job import RequestResult, RetrivalJob from aligned.schemas.date_formatter import DateFormatter from aligned.schemas.feature import Feature -from aligned.sources.local import DataFileReference -from aligned.schemas.constraints import Optional +from aligned.data_file import DataFileReference import logging logger = logging.getLogger(__name__) @@ -25,10 +25,14 @@ class LiteralRetrivalJob(RetrivalJob): def __init__(self, df: pl.LazyFrame | pd.DataFrame, requests: list[RetrivalRequest]) -> None: self.requests = requests - if isinstance(df, pd.DataFrame): + if isinstance(df, pl.DataFrame): + self.df = df.lazy() + elif isinstance(df, pl.LazyFrame): + self.df = df + elif isinstance(df, pd.DataFrame): self.df = pl.from_pandas(df).lazy() else: - self.df = df + raise ValueError(f"Unsupported type {type(df)}") @property def loaded_columns(self) -> list[str]: @@ -67,7 +71,7 @@ async def aggregate(request: RetrivalRequest, core_data: pl.LazyFrame) -> pl.Laz raise ValueError(f'Aggregation needs to be an expression, got {tran}') exprs.append(tran.alias(feat.name)) - return core_data.groupby(first_over.group_by_names).agg(exprs) + return core_data.group_by(first_over.group_by_names).agg(exprs) group_by_names = first_over.group_by_names @@ -93,18 +97,18 @@ async def aggregate(request: RetrivalRequest, core_data: pl.LazyFrame) -> pl.Laz if over.window.every_interval: sub = ( - sorted_data.groupby_dynamic( + sorted_data.group_by_dynamic( time_name, every=over.window.every_interval, period=over.window.time_window, - by=over.group_by_names, + group_by=over.group_by_names, offset=-over.window.time_window, ) .agg(exprs) .with_columns(pl.col(time_name) + over.window.time_window) ).filter(pl.col(time_name) <= sorted_data.select(pl.col(time_name).max()).collect()[0, 0]) else: - sub = sorted_data.groupby_rolling( + sub = sorted_data.group_by_rolling( time_name, period=over.window.time_window, by=over.group_by_names, @@ -202,6 +206,7 @@ def describe(self) -> str: async def file_transform_polars(self, df: pl.LazyFrame) -> pl.LazyFrame: from aligned.data_source.batch_data_source import ColumnFeatureMappable + from aligned.sources.local import fill_missing_in_request if not self.request.features_to_include: return df @@ -221,25 +226,12 @@ async def file_transform_polars(self, df: pl.LazyFrame) -> pl.LazyFrame: request_features = self.source.feature_identifier_for(all_names) feature_column_map = dict(zip(all_names, request_features)) + df = fill_missing_in_request(self.request, df, feature_column_map) renames = { org_name: wanted_name for org_name, wanted_name in zip(request_features, all_names) if org_name != wanted_name } - - optional_constraint = Optional() - optional_features = [ - feature - for feature in self.request.features - if ( - feature.constraints - and optional_constraint in feature.constraints - and feature_column_map.get(feature.name, feature.name) not in df.columns - ) - ] - if optional_features: - df = df.with_columns([pl.lit(None).alias(feature.name) for feature in optional_features]) - if renames: df = df.rename(mapping=renames) @@ -280,6 +272,7 @@ def retrival_requests(self) -> list[RetrivalRequest]: def file_transform_polars(self, df: pl.LazyFrame) -> pl.LazyFrame: from aligned.data_source.batch_data_source import ColumnFeatureMappable + from aligned.sources.local import fill_missing_in_request if not self.request.features_to_include: return df @@ -296,18 +289,7 @@ def file_transform_polars(self, df: pl.LazyFrame) -> pl.LazyFrame: request_features = self.source.feature_identifier_for(all_names) feature_column_map = dict(zip(all_names, request_features)) - optional_constraint = Optional() - optional_features = [ - feature - for feature in self.request.features - if ( - feature.constraints - and optional_constraint in feature.constraints - and feature_column_map.get(feature.name, feature.name) not in df.columns - ) - ] - if optional_features: - df = df.with_columns([pl.lit(None).alias(feature.name) for feature in optional_features]) + df = fill_missing_in_request(self.request, df, feature_column_map) df = df.rename(mapping=dict(zip(request_features, all_names))) event_timestamp_column = self.request.event_timestamp.name @@ -319,8 +301,8 @@ def file_transform_polars(self, df: pl.LazyFrame) -> pl.LazyFrame: end_date = self.end_date.replace(tzinfo=None) else: tz = timezone(time_zone) - start_date = tz.localize(self.start_date) - end_date = tz.localize(self.end_date) + start_date = self.start_date.astimezone(tz) + end_date = self.end_date.astimezone(tz) return df.filter(pl.col(event_timestamp_column).is_between(start_date, end_date)) @@ -363,7 +345,7 @@ async def aggregate_over( else: raise NotImplementedError('Only expressions are supported for file data source') - return subset.groupby(group_by).agg(transformations) + return subset.group_by(group_by).agg(transformations) @dataclass @@ -399,10 +381,18 @@ async def file_transformations(self, df: pl.LazyFrame) -> pl.LazyFrame: pl.LazyFrame: The subset of the source which is needed for the request """ from aligned.data_source.batch_data_source import ColumnFeatureMappable + from aligned.sources.local import fill_missing_in_request all_features: set[Feature] = set() + date_features: set[str] = set() + for request in self.requests: all_features.update(request.all_required_features) + if request.event_timestamp: + date_features.add(request.event_timestamp.name) + for feature in request.features: + if feature.dtype.is_datetime: + date_features.add(feature.name) result = await self.facts.to_lazy_polars() event_timestamp_col = 'aligned_event_timestamp' @@ -422,6 +412,7 @@ async def file_transformations(self, df: pl.LazyFrame) -> pl.LazyFrame: row_id_name = 'row_id' result = result.with_row_index(row_id_name) + for request in self.requests: entity_names = request.entity_names @@ -433,17 +424,7 @@ async def file_transformations(self, df: pl.LazyFrame) -> pl.LazyFrame: request_features = self.source.feature_identifier_for(list(all_names)) feature_column_map = dict(zip(all_names, request_features)) - optional_constraint = Optional() - optional_features = [ - feature - for feature in request.features - if feature.constraints is not None - and optional_constraint in feature.constraints - and feature_column_map.get(feature.name, feature.name) not in df.columns - ] - if optional_features: - df = df.with_columns([pl.lit(None).alias(feature.name) for feature in optional_features]) - + df = fill_missing_in_request(request, df, feature_column_map) for derived_feature in request.derived_features: if derived_feature.name in df.columns: all_names.add(derived_feature.name) @@ -475,7 +456,6 @@ async def file_transformations(self, df: pl.LazyFrame) -> pl.LazyFrame: if isinstance(self.source, ColumnFeatureMappable): request_features = self.source.feature_identifier_for(all_names) - df = decode_timestamps(df, request, self.date_formatter) feature_df = df.select(request_features) renames = { @@ -486,6 +466,8 @@ async def file_transformations(self, df: pl.LazyFrame) -> pl.LazyFrame: if renames: feature_df = feature_df.rename(renames) + feature_df = decode_timestamps(feature_df, request, self.date_formatter) + for entity in request.entities: feature_df = feature_df.with_columns(pl.col(entity.name).cast(entity.dtype.polars_type)) result = result.with_columns(pl.col(entity.name).cast(entity.dtype.polars_type)) @@ -519,13 +501,17 @@ async def file_transformations(self, df: pl.LazyFrame) -> pl.LazyFrame: new_result = new_result.filter( pl.col(field).is_null() | (pl.col(field) <= pl.col(event_timestamp_col)) ) - new_result = new_result.sort(field, descending=True).select(pl.exclude(field)) + new_result = new_result.sort(field, descending=True, nulls_last=True).select( + pl.exclude(field) + ) elif request.event_timestamp: new_result = new_result.sort([row_id_name, request.event_timestamp.name], descending=True) unique = new_result.unique(subset=row_id_name, keep='first') column_selects.remove('row_id') - result = result.join(unique.select(pl.exclude(column_selects)), on=row_id_name, how='left') + result = result.join( + unique.select(pl.exclude(column_selects)), on=row_id_name, how='left', coalesce=True + ) result = result.select(pl.exclude('.*_right')) if did_rename_event_timestamp: diff --git a/aligned/local/tests/test_jobs.py b/aligned/local/tests/test_jobs.py index 50d0436f..bd85e407 100644 --- a/aligned/local/tests/test_jobs.py +++ b/aligned/local/tests/test_jobs.py @@ -1,6 +1,8 @@ -import pandas as pd +from __future__ import annotations + import pytest +from aligned.lazy_imports import pandas as pd from aligned import ContractStore, FileSource from aligned.local.job import FileFullJob from aligned.retrival_job import RetrivalRequest diff --git a/aligned/psql/jobs.py b/aligned/psql/jobs.py index 1134b070..7fc8e7fa 100644 --- a/aligned/psql/jobs.py +++ b/aligned/psql/jobs.py @@ -4,9 +4,9 @@ import logging from dataclasses import dataclass, field -import pandas as pd import polars as pl +from aligned.lazy_imports import pandas as pd from aligned.request.retrival_request import RequestResult, RetrivalRequest from aligned.retrival_job import RetrivalJob from aligned.schemas.derivied_feature import AggregatedFeature, AggregateOver, DerivedFeature @@ -112,6 +112,7 @@ class PostgreSqlJob(RetrivalJob): query: str requests: list[RetrivalRequest] = field(default_factory=list) + @property def request_result(self) -> RequestResult: return RequestResult.from_request_list(self.retrival_requests) @@ -141,7 +142,7 @@ async def to_lazy_polars(self) -> pl.LazyFrame: def describe(self) -> str: return f'PostgreSQL Job: \n{self.query}\n' - def filter(self, condition: str | Feature | DerivedFeature) -> RetrivalJob: + def filter(self, condition: str | Feature | DerivedFeature | pl.Expr) -> RetrivalJob: query = f'SELECT * FROM ({self.query}) as values WHERE ' @@ -381,7 +382,7 @@ def sql_aggregated_request( aggregates = { SqlColumn( - feature.derived_feature.transformation.as_psql(), + feature.derived_feature.transformation.as_psql(), # type: ignore feature.name, ) for feature in features @@ -416,7 +417,7 @@ def sql_aggregated_request( join_conditions.append(event_timestamp_clause) field_selects = request.all_required_feature_names.union({'entities.*'}) - field_identifiers = source.feature_identifier_for(field_selects) + field_identifiers = source.feature_identifier_for(list(field_selects)) selects = { SqlColumn(db_field_name, feature) for feature, db_field_name in zip(field_selects, field_identifiers) diff --git a/aligned/redis/job.py b/aligned/redis/job.py index 3a143fdf..30203ba6 100644 --- a/aligned/redis/job.py +++ b/aligned/redis/job.py @@ -1,8 +1,10 @@ +from __future__ import annotations + from dataclasses import dataclass -import pandas as pd import polars as pl +from aligned.lazy_imports import pandas as pd from aligned.request.retrival_request import RetrivalRequest from aligned.retrival_job import RequestResult, RetrivalJob from aligned.schemas.feature import FeatureType @@ -69,7 +71,7 @@ async def to_lazy_polars(self) -> pl.LazyFrame: ).select(pl.exclude(redis_combine_id)) for feature in request.returned_features: - if feature.dtype == FeatureType.bool(): + if feature.dtype == FeatureType.boolean(): reqs = reqs.with_columns(pl.col(feature.name).cast(pl.Int8).cast(pl.Boolean)) elif reqs[feature.name].dtype == pl.Utf8 and ( feature.dtype == FeatureType.int32() or feature.dtype == FeatureType.int64() diff --git a/aligned/redshift/jobs.py b/aligned/redshift/jobs.py index 11d49d90..1a0d596f 100644 --- a/aligned/redshift/jobs.py +++ b/aligned/redshift/jobs.py @@ -3,9 +3,9 @@ import logging from dataclasses import dataclass, field -import pandas as pd import polars as pl +from aligned.lazy_imports import pandas as pd from aligned.psql.jobs import PostgreSqlJob from aligned.redshift.sql_job import SqlColumn, TableFetch from aligned.request.retrival_request import RequestResult, RetrivalRequest diff --git a/aligned/request/retrival_request.py b/aligned/request/retrival_request.py index 47e2184e..2c2b9c42 100644 --- a/aligned/request/retrival_request.py +++ b/aligned/request/retrival_request.py @@ -113,7 +113,7 @@ def is_dependent_on_agg_feature(feature: DerivedFeature) -> bool: @property def all_returned_columns(self) -> list[str]: - return [feature.name for feature in self.all_returned_features] + return sorted([feature.name for feature in self.all_returned_features]) @property def returned_features(self) -> set[Feature]: @@ -146,8 +146,10 @@ def all_required_feature_names(self) -> set[str]: @property def all_features(self) -> set[Feature]: - return self.features.union(self.derived_features).union( - {feature.derived_feature for feature in self.aggregated_features} + return self.features.union( + {feat for feat in self.derived_features if not feat.name.isnumeric()} + ).union( + {feature.derived_feature for feature in self.aggregated_features if not feature.name.isnumeric()} ) @property @@ -308,25 +310,30 @@ def rename_entities(self, mapping: dict[str, str]) -> 'RetrivalRequest': @staticmethod def unsafe_combine(requests: list['RetrivalRequest']) -> 'RetrivalRequest': - result_request = RetrivalRequest( - name=requests[0].name, - location=requests[0].location, - entities=set(), - features=set(), - derived_features=set(), - aggregated_features=set(), - event_timestamp_request=requests[0].event_timestamp_request, - ) + entities = set() + features = set() + derived_features = set() + aggregated_features = set() + event_timestamp_request = None + for request in requests: - result_request.derived_features.update(request.derived_features) - result_request.features.update(request.features) - result_request.entities.update(request.entities) - result_request.aggregated_features.update(request.aggregated_features) + derived_features.update(request.derived_features) + features.update(request.features) + entities.update(request.entities) + aggregated_features.update(request.aggregated_features) - if result_request.event_timestamp_request is None: - result_request.event_timestamp_request = request.event_timestamp_request + if event_timestamp_request is None: + event_timestamp_request = request.event_timestamp_request - return result_request + return RetrivalRequest( + name=requests[0].name, + location=requests[0].location, + entities=entities, + features=features, + derived_features=derived_features, + aggregated_features=aggregated_features, + event_timestamp_request=event_timestamp_request, + ) @dataclass diff --git a/aligned/retrival_job.py b/aligned/retrival_job.py index 6fbd3eda..059359f0 100644 --- a/aligned/retrival_job.py +++ b/aligned/retrival_job.py @@ -1,6 +1,8 @@ from __future__ import annotations +from io import StringIO from aligned.schemas.date_formatter import DateFormatter +from pytz import timezone import asyncio import logging import timeit @@ -8,10 +10,11 @@ from collections import defaultdict from dataclasses import dataclass, field from datetime import datetime -from typing import TYPE_CHECKING, Callable, Collection, Union, TypeVar, Coroutine, Any +from typing import TYPE_CHECKING, Callable, Collection, Literal, Union, TypeVar, Coroutine, Any -import pandas as pd import polars as pl +from aligned.lazy_imports import pandas as pd + from polars.type_aliases import TimeUnit from prometheus_client import Histogram @@ -21,13 +24,7 @@ from aligned.schemas.feature import Feature, FeatureType from aligned.schemas.derivied_feature import DerivedFeature from aligned.schemas.vector_storage import VectorIndex -from aligned.split_strategy import ( - SplitDataSet, - SplitStrategy, - SupervisedDataSet, - TrainTestSet, - TrainTestValidateSet, -) +from aligned.split_strategy import SupervisedDataSet from aligned.validation.interface import Validator, PolarsValidator if TYPE_CHECKING: @@ -53,7 +50,7 @@ def split( column = data[event_timestamp_column] if column.dtype != 'datetime64[ns]': column = pd.to_datetime(data[event_timestamp_column]) - data = data.iloc[column.sort_values().index] + data = data.iloc[column.sort_values().index] # type: ignore group_size = data.shape[0] start_index = round(group_size * start_ratio) @@ -83,35 +80,6 @@ def subset_polars( return data[start_index:end_index] -def split_polars( - data: pl.DataFrame, start_ratio: float, end_ratio: float, event_timestamp_column: str | None = None -) -> pd.Series: - - row_name = 'row_nr' - data = data.with_row_count(row_name) - - if event_timestamp_column: - data = data.sort(event_timestamp_column) - # values = data.select( - # [ - # pl.col(event_timestamp_column).quantile(start_ratio).alias('start_value'), - # pl.col(event_timestamp_column).quantile(end_ratio).alias('end_value'), - # ] - # ) - # return data.filter( - # pl.col(event_timestamp_column).is_between(values[0, 'start_value'], values[0, 'end_value']) - # ).collect() - - group_size = data.shape[0] - start_index = round(group_size * start_ratio) - end_index = round(group_size * end_ratio) - - if end_index >= group_size: - return data[start_index:][row_name].to_pandas() - else: - return data[start_index:end_index][row_name].to_pandas() - - def fraction_from_job(job: RetrivalJob) -> float | None: if isinstance(job, SubsetJob): return job.fraction @@ -142,6 +110,7 @@ async def store_dataset_at_directory( dataset_store: DatasetStore | StorageFileReference, metadata: DatasetMetadata | None = None, id: str | None = None, + tags: list[str] | None = None, ) -> TrainTestJob: from uuid import uuid4 from aligned.schemas.folder import DatasetMetadata @@ -151,6 +120,7 @@ async def store_dataset_at_directory( id=id or str(uuid4()), name='train_test - ' + datetime.now().strftime('%Y-%m-%d %H:%M:%S'), description='A train and test dataset.', + tags=tags, ) run_dir = directory.sub_directory(metadata.id) @@ -176,7 +146,7 @@ async def store_dataset( StorageFileSource, DatasetStore, ) - from aligned.data_source.batch_data_source import BatchDataSource + from aligned.data_source.batch_data_source import CodableBatchDataSource request_result = self.train_job.request_result @@ -193,16 +163,16 @@ async def store_dataset( if test_size is None: test_size = fraction_from_job(self.test_job) - if not isinstance(test_source, BatchDataSource): + if not isinstance(test_source, CodableBatchDataSource): raise ValueError('test_source should be a BatchDataSource') - if not isinstance(train_source, BatchDataSource): + if not isinstance(train_source, CodableBatchDataSource): raise ValueError('train_source should be a BatchDataSource') test_metadata = TrainDatasetMetadata( id=metadata.id, name=metadata.name, - request_result=request_result, + content=request_result, description=metadata.description, train_size_fraction=train_size, test_size_fraction=test_size, @@ -263,6 +233,7 @@ async def store_dataset_at_directory( dataset_store: DatasetStore | StorageFileReference, metadata: DatasetMetadata | None = None, id: str | None = None, + tags: list[str] | None = None, ) -> TrainTestValidateJob: from uuid import uuid4 from aligned.schemas.folder import DatasetMetadata @@ -272,6 +243,7 @@ async def store_dataset_at_directory( id=id or str(uuid4()), name='train_test_validate - ' + datetime.now().strftime('%Y-%m-%d %H:%M:%S'), description='A train, test and validation dataset.', + tags=tags, ) run_dir = directory.sub_directory(metadata.id) @@ -300,7 +272,7 @@ async def store_dataset( DatasetMetadata, DatasetStore, ) - from aligned.data_source.batch_data_source import BatchDataSource + from aligned.data_source.batch_data_source import CodableBatchDataSource from aligned.sources.local import StorageFileSource from uuid import uuid4 @@ -329,19 +301,19 @@ async def store_dataset( if validation_size is None: validation_size = fraction_from_job(self.validate_job) - if not isinstance(test_source, BatchDataSource): + if not isinstance(test_source, CodableBatchDataSource): raise ValueError('test_source should be a BatchDataSource') - if not isinstance(train_source, BatchDataSource): + if not isinstance(train_source, CodableBatchDataSource): raise ValueError('train_source should be a BatchDataSource') - if not isinstance(validate_source, BatchDataSource): + if not isinstance(validate_source, CodableBatchDataSource): raise ValueError('validation_source should be a BatchDataSource') test_metadata = TrainDatasetMetadata( id=metadata.id, name=metadata.name, - request_result=request_result, + content=request_result, description=metadata.description, train_size_fraction=train_size, test_size_fraction=test_size, @@ -439,9 +411,6 @@ def should_filter_null_targets(self, should_filter: bool) -> SupervisedJob: def request_result(self) -> RequestResult: return self.job.request_result - def train_set(self, train_size: float) -> SupervisedTrainJob: - return SupervisedTrainJob(self, train_size) - def train_test( self, train_size: float, splitter_factory: Callable[[SplitConfig], SplitterCallable] | None = None ) -> TrainTestJob: @@ -569,88 +538,7 @@ def describe(self) -> str: return f'{self.job.describe()} with target columns {self.target_columns}' -@dataclass -class SupervisedTrainJob: - - job: SupervisedJob - train_size: float - - async def to_pandas(self) -> TrainTestSet[pd.DataFrame]: - core_data = await self.job.to_lazy_polars() - data = core_data.data.collect() - data = data.to_pandas() - - test_ratio_start = self.train_size - return TrainTestSet( - data=data, - entity_columns=core_data.entity_columns, - features=core_data.feature_columns, - target_columns=core_data.target_columns, - train_index=split(data, 0, test_ratio_start, core_data.event_timestamp_column), - test_index=split(data, test_ratio_start, 1, core_data.event_timestamp_column), - event_timestamp_column=core_data.event_timestamp_column, - ) - - async def to_polars(self) -> TrainTestSet[pl.DataFrame]: - # Use the pandas method, as the split is not created for polars yet - # A but unsure if I should use the same index concept for polars - core_data = await self.job.to_lazy_polars() - - data = core_data.data.collect() - - return TrainTestSet( - data=data, - entity_columns=core_data.entity_columns, - features=core_data.feature_columns, - target_columns=core_data.target_columns, - train_index=split_polars(data, 0, self.train_size, core_data.event_timestamp_column), - test_index=split_polars(data, self.train_size, 1, core_data.event_timestamp_column), - event_timestamp_column=core_data.event_timestamp_column, - ) - - def validation_set(self, validation_size: float) -> SupervisedValidationJob: - return SupervisedValidationJob(self, validation_size) - - -@dataclass -class SupervisedValidationJob: - - job: SupervisedTrainJob - validation_size: float - - async def to_pandas(self) -> TrainTestValidateSet[pd.DataFrame]: - data = await self.job.to_pandas() - - test_start = self.job.train_size - validate_start = test_start + self.validation_size - - return TrainTestValidateSet( - data=data.data, - entity_columns=set(data.entity_columns), - features=data.features, - target=data.target_columns, - train_index=split(data.data, 0, test_start, data.event_timestamp_column), - test_index=split(data.data, test_start, validate_start, data.event_timestamp_column), - validate_index=split(data.data, validate_start, 1, data.event_timestamp_column), - event_timestamp_column=data.event_timestamp_column, - ) - - async def to_polars(self) -> TrainTestValidateSet[pl.DataFrame]: - data = await self.to_pandas() - - return TrainTestValidateSet( - data=pl.from_pandas(data.data), - entity_columns=data.entity_columns, - features=data.feature_columns, - target=data.labels, - train_index=data.train_index, - test_index=data.test_index, - validate_index=data.validate_index, - event_timestamp_column=data.event_timestamp_column, - ) - - -ConvertableToRetrivalJob = Union[dict[str, list], pd.DataFrame, pl.DataFrame, pl.LazyFrame] +ConvertableToRetrivalJob = Union[dict[str, list], 'pd.DataFrame', pl.DataFrame, pl.LazyFrame] class RetrivalJob(ABC): @@ -689,6 +577,9 @@ def describe(self) -> str: raise NotImplementedError(f'Describe not implemented for {self.__class__.__name__}') def remove_derived_features(self) -> RetrivalJob: + return self.without_derived_features() + + def without_derived_features(self) -> RetrivalJob: if isinstance(self, ModificationJob): return self.copy_with(self.job.remove_derived_features()) return self @@ -754,7 +645,11 @@ def split( return (job, job.with_dataset_index(1)) def join( - self, job: RetrivalJob, method: str, left_on: str | list[str], right_on: str | list[str] + self, + job: RetrivalJob, + method: Literal['inner', 'left', 'outer'], + left_on: str | list[str], + right_on: str | list[str], ) -> RetrivalJob: if isinstance(left_on, str): @@ -768,7 +663,11 @@ def join( def on_load(self, on_load: Callable[[], Coroutine[Any, Any, None]]) -> RetrivalJob: return OnLoadJob(self, on_load) - def filter(self, condition: str | Feature | DerivedFeature) -> RetrivalJob: + def filter(self, condition: str | Feature | DerivedFeature | pl.Expr) -> RetrivalJob: + """ + Filters based on a condition referencing either a feature, + a feature name, or an polars expression to filter on. + """ if isinstance(self, ModificationJob): return self.copy_with(self.job.filter(condition)) return FilteredJob(self, condition) @@ -794,12 +693,6 @@ def cached_at(self, location: DataFileReference | str) -> RetrivalJob: else: return FileCachedJob(location, self).derive_features() - def test_size(self, test_size: float, target_column: str) -> SupervisedTrainJob: - return SupervisedJob(self, {target_column}).train_set(train_size=1 - test_size) - - def train_set(self, train_size: float, target_column: str) -> SupervisedTrainJob: - return SupervisedJob(self, {target_column}).train_set(train_size=train_size) - def train_test(self, train_size: float, target_column: str) -> TrainTestJob: cached = InMemoryCacheJob(self) @@ -1010,7 +903,13 @@ async def write_to_source(self, source: WritableFeatureSource | DataFileReferenc if isinstance(source, DataFileReference): await source.write_polars(await self.to_lazy_polars()) else: - await source.insert(self, self.retrival_requests) + requests = self.retrival_requests + if len(requests) > 1: + request = RetrivalRequest.unsafe_combine(requests) + else: + assert len(requests) == 1, 'No requests. this should not happen and is a but' + request = requests[0] + await source.insert(self, request) JobType = TypeVar('JobType') @@ -1130,7 +1029,7 @@ class ReturnInvalidJob(RetrivalJob, ModificationJob): def describe(self) -> str: expressions = [ - expr.is_not().alias(f"not {name}") + expr.not_().alias(f"not {name}") for expr, name in polars_filter_expressions_from(list(self.request_result.features)) ] @@ -1138,7 +1037,7 @@ def describe(self) -> str: async def to_lazy_polars(self) -> pl.LazyFrame: raw_exprs = polars_filter_expressions_from(list(self.request_result.features)) - expressions = [expr.is_not().alias(f"not {name}") for expr, name in raw_exprs] + expressions = [expr.not_().alias(f"not {name}") for expr, name in raw_exprs] if self.should_return_validation: condition_cols = [f"not {name}" for _, name in raw_exprs] @@ -1163,17 +1062,17 @@ async def to_pandas(self) -> pd.DataFrame: class CustomPolarsJob(RetrivalJob, ModificationJob): job: RetrivalJob - polars_method: CustomPolarsTransform + polars_function: CustomPolarsTransform async def to_lazy_polars(self) -> pl.LazyFrame: import inspect df = await self.job.to_lazy_polars() - if inspect.iscoroutinefunction(self.polars_method): - return await self.polars_method(df) + if inspect.iscoroutinefunction(self.polars_function): + return await self.polars_function(df) else: - return self.polars_method(df) # type: ignore + return self.polars_function(df) # type: ignore async def to_pandas(self) -> pd.DataFrame: df = await self.job.to_lazy_polars() @@ -1447,7 +1346,7 @@ def describe(self) -> str: @dataclass class JoinJobs(RetrivalJob): - method: str + method: Literal['inner', 'left', 'outer'] left_job: RetrivalJob right_job: RetrivalJob @@ -1480,15 +1379,15 @@ async def to_lazy_polars(self) -> pl.LazyFrame: # Need to ensure that the data types are the same. Otherwise will the join fail for left_col, right_col in zip(self.left_on, self.right_on): - polars_type = [ + polars_types = [ feature for feature in return_request.features.union(return_request.entities) if feature.name == left_col ] - if not polars_type: + if not polars_types: raise ValueError(f'Unable to find {left_col} in left request {return_request}.') - polars_type = polars_type[0].dtype.polars_type + polars_type = polars_types[0].dtype.polars_type left_column_dtypes = dict(zip(left.columns, left.dtypes)) right_column_dtypes = dict(zip(right.columns, right.dtypes)) @@ -1528,13 +1427,18 @@ def describe(self) -> str: class FilteredJob(RetrivalJob, ModificationJob): job: RetrivalJob - condition: DerivedFeature | Feature | str + condition: Feature | str | pl.Expr | DerivedFeature async def to_lazy_polars(self) -> pl.LazyFrame: df = await self.job.to_lazy_polars() if isinstance(self.condition, str): - col = pl.col(self.condition) + try: + col = pl.Expr.deserialize(StringIO(self.condition)) + except Exception: + col = pl.col(self.condition) + elif isinstance(self.condition, pl.Expr): + col = self.condition elif isinstance(self.condition, DerivedFeature): expr = await self.condition.transformation.transform_polars(df, self.condition.name) if isinstance(expr, pl.Expr): @@ -1551,6 +1455,8 @@ async def to_lazy_polars(self) -> pl.LazyFrame: async def to_pandas(self) -> pd.DataFrame: df = await self.job.to_pandas() + if isinstance(self.condition, pl.Expr): + return (await self.to_polars()).to_pandas() if isinstance(self.condition, str): mask = df[self.condition] elif isinstance(self.condition, DerivedFeature): @@ -1566,11 +1472,6 @@ def describe(self) -> str: return f'{self.job.describe()} -> Filter based on {self.condition}' -class JoinBuilder: - - joins: list[str] - - @dataclass class RenameJob(RetrivalJob, ModificationJob): @@ -1781,7 +1682,7 @@ def request_result(self) -> RequestResult: def retrival_requests(self) -> list[RetrivalRequest]: return self.job.retrival_requests - def filter(self, condition: str | Feature | DerivedFeature) -> RetrivalJob: + def filter(self, condition: str | Feature | DerivedFeature | pl.Expr) -> RetrivalJob: if isinstance(condition, str): column_name = condition @@ -1862,7 +1763,20 @@ def drop_invalid(self, validator: Validator | None = None) -> RetrivalJob: return DropInvalidJob(self, validator or PolarsValidator()) def remove_derived_features(self) -> RetrivalJob: - return self.job.remove_derived_features() + new_requests = [] + for req in self.job.retrival_requests: + new_requests.append( + RetrivalRequest( + req.name, + location=req.location, + features=req.features, + entities=req.entities, + derived_features=set(), + aggregated_features=req.aggregated_features, + event_timestamp_request=req.event_timestamp_request, + ) + ) + return self.job.without_derived_features().with_request(new_requests) @dataclass @@ -2021,6 +1935,7 @@ async def to_lazy_polars(self) -> pl.LazyFrame: aggregations = self.aggregated_features[window] + assert window.window is not None required_features = set(window.group_by).union([window.window.time_column]) for agg in aggregations: required_features.update(agg.derived_feature.depending_on) @@ -2091,7 +2006,7 @@ async def to_pandas(self) -> AsyncIterator[pd.DataFrame]: class RawFileCachedJob(RetrivalJob, ModificationJob): location: DataFileReference - job: DerivedFeatureJob + job: RetrivalJob @property def request_result(self) -> RequestResult: @@ -2105,6 +2020,7 @@ async def to_pandas(self) -> pd.DataFrame: from aligned.local.job import FileFullJob from aligned.sources.local import LiteralReference + assert isinstance(self.job, DerivedFeatureJob) try: logger.debug('Trying to read cache file') df = await self.location.read_pandas() @@ -2129,6 +2045,50 @@ def remove_derived_features(self) -> RetrivalJob: return self.job.remove_derived_features() +@dataclass +class LoadedAtJob(RetrivalJob, ModificationJob): + + job: RetrivalJob + request: RetrivalRequest + + @property + def request_result(self) -> RequestResult: + return self.job.request_result + + @property + def retrival_requests(self) -> list[RetrivalRequest]: + return self.job.retrival_requests + + async def to_pandas(self) -> pd.DataFrame: + df = await self.job.to_pandas() + if not self.request.event_timestamp: + return df + + name = self.request.event_timestamp.name + timezone_name = self.request.event_timestamp.dtype.datetime_timezone + if timezone_name: + tz = timezone(timezone_name) + df[name] = datetime.now(tz=tz) + else: + df[name] = datetime.now() + return df + + async def to_lazy_polars(self) -> pl.LazyFrame: + + df = await self.job.to_lazy_polars() + if not self.request.event_timestamp: + return df + + name = self.request.event_timestamp.name + timezone_name = self.request.event_timestamp.dtype.datetime_timezone + if timezone_name: + tz = timezone(timezone_name) + col = pl.lit(datetime.now(tz=tz)) + else: + col = pl.lit(datetime.now()) + return df.with_columns(col.alias(name)) + + @dataclass class FileCachedJob(RetrivalJob, ModificationJob): @@ -2177,29 +2137,6 @@ def remove_derived_features(self) -> RetrivalJob: return self.job.remove_derived_features() -@dataclass -class SplitJob: - - job: RetrivalJob - target_column: str - strategy: SplitStrategy - - @property - def request_result(self) -> RequestResult: - return self.job.request_result - - @property - def retrival_requests(self) -> list[RetrivalRequest]: - return self.job.retrival_requests - - async def use_pandas(self) -> SplitDataSet[pd.DataFrame]: - data = await self.job.to_pandas() - return self.strategy.split_pandas(data, self.target_column) - - def remove_derived_features(self) -> RetrivalJob: - return self.job.remove_derived_features() - - @dataclass class WithRequests(RetrivalJob, ModificationJob): @@ -2293,7 +2230,7 @@ async def to_lazy_polars(self) -> pl.LazyFrame: logger.debug(f'Skipping feature {feature.name}, already correct type') continue - if feature.dtype == FeatureType.bool(): + if feature.dtype == FeatureType.boolean(): df = df.with_columns(pl.col(feature.name).cast(pl.Int8).cast(pl.Boolean)) elif (feature.dtype.is_array) or (feature.dtype.is_embedding): dtype = df.select(feature.name).dtypes[0] @@ -2539,6 +2476,9 @@ class CustomLazyPolarsJob(RetrivalJob): def retrival_requests(self) -> list[RetrivalRequest]: return [self.request] + def describe(self) -> str: + return f"Custom Lazy Polars Job returning {self.request.all_returned_columns}" + @property def request_result(self) -> RequestResult: return RequestResult.from_request(self.request) @@ -2602,24 +2542,48 @@ def retrival_requests(self) -> list[RetrivalRequest]: def describe(self) -> str: added = self.added_features() feature_names = {feat.name for feat in added} - return f"""{self.job.describe()} - -> predicting using model {self.model.name} with {feature_names} added features""" + return ( + f"{self.job.describe()} \n" + f"-> predicting using model {self.model.name} with {feature_names} added features" + ) async def to_pandas(self) -> pd.DataFrame: return await self.job.to_pandas() async def to_lazy_polars(self) -> pl.LazyFrame: + from aligned.exposed_model.interface import VersionedModel + from datetime import datetime, timezone + predictor = self.model.exposed_model if not predictor: raise ValueError('No predictor defined for model') + output = self.model.predictions_view + model_version_column = output.model_version_column + df = await predictor.run_polars( self.job, self.store.model(self.model.name), ) + if output.event_timestamp and output.event_timestamp.name not in df.columns: + df = df.with_columns( + pl.lit(datetime.now(timezone.utc)).alias(output.event_timestamp.name), + ) + + if ( + model_version_column + and isinstance(predictor, VersionedModel) + and model_version_column.name not in df.columns + ): + df = df.with_columns( + pl.lit(await predictor.model_version()).alias(model_version_column.name), + ) return df.lazy() - def filter(self, condition: str | Feature | DerivedFeature) -> RetrivalJob: + def log_each_job(self, logger_func: Callable[[object], None] | None = None) -> RetrivalJob: + return PredictionJob(self.job.log_each_job(logger_func), self.model, self.store) + + def filter(self, condition: str | Feature | DerivedFeature | pl.Expr) -> RetrivalJob: return PredictionJob(self.job.filter(condition), self.model, self.store) def remove_derived_features(self) -> RetrivalJob: diff --git a/aligned/schemas/constraints.py b/aligned/schemas/constraints.py index 4778bff3..f66ea773 100644 --- a/aligned/schemas/constraints.py +++ b/aligned/schemas/constraints.py @@ -53,6 +53,7 @@ def __init__(self) -> None: Unique, Regex, ReferencingColumn, + ListConstraint, ]: self.add(tran_type) @@ -202,3 +203,12 @@ class InDomain(Constraint): def __hash__(self) -> int: return hash(self.name) + + +@dataclass +class ListConstraint(Constraint): + constraints: list[Constraint] + name = 'list' + + def __hash__(self) -> int: + return hash(self.name) diff --git a/aligned/schemas/date_formatter.py b/aligned/schemas/date_formatter.py index 80d5b55d..d45b0c08 100644 --- a/aligned/schemas/date_formatter.py +++ b/aligned/schemas/date_formatter.py @@ -16,7 +16,7 @@ class AllDateFormatters: @classmethod def shared(cls) -> AllDateFormatters: if cls._shared is None: - formatters = [Timestamp, StringDateFormatter, NoopFormatter] + formatters: list[type[DateFormatter]] = [Timestamp, StringDateFormatter, NoopFormatter] cls._shared = AllDateFormatters({formatter.name(): formatter for formatter in formatters}) return cls._shared @@ -39,15 +39,15 @@ def _serialize(self) -> dict: return data @classmethod - def _deserialize(cls, data: dict) -> DateFormatter: - formatter_name = data.pop('name') + def _deserialize(cls, value: dict) -> DateFormatter: + formatter_name = value.pop('name') formatters = AllDateFormatters.shared().supported_formatters if formatter_name not in formatters: raise ValueError( f"Unknown formatter name: {formatter_name}. Supported formatters: {formatters.keys()}" ) formatter_class = formatters[formatter_name] - return formatter_class.from_dict(data) + return formatter_class.from_dict(value) @staticmethod def string_format(format: str) -> StringDateFormatter: @@ -55,7 +55,7 @@ def string_format(format: str) -> StringDateFormatter: @staticmethod def iso_8601() -> StringDateFormatter: - return StringDateFormatter('%Y-%m-%dT%H:%M:%S%.f+%Z') + return StringDateFormatter('%Y-%m-%dT%H:%M:%S%.f%Z', time_zone='UTC') @staticmethod def unix_timestamp(time_unit: TimeUnit = 'us', time_zone: str | None = 'UTC') -> Timestamp: diff --git a/aligned/schemas/derivied_feature.py b/aligned/schemas/derivied_feature.py index 1d4874ba..7c5866a0 100644 --- a/aligned/schemas/derivied_feature.py +++ b/aligned/schemas/derivied_feature.py @@ -22,7 +22,7 @@ def __init__( transformation: Transformation, depth: int, description: str | None = None, - tags: dict[str, str] | None = None, + tags: list[str] | None = None, constraints: set[Constraint] | None = None, ): self.name = name @@ -33,6 +33,7 @@ def __init__( self.description = description self.tags = tags self.constraints = constraints + self.default_value = None def __pre_serialize__(self) -> DerivedFeature: from aligned.schemas.transformation import SupportedTransformations diff --git a/aligned/schemas/event_trigger.py b/aligned/schemas/event_trigger.py index a81c7fcd..919aa5b6 100644 --- a/aligned/schemas/event_trigger.py +++ b/aligned/schemas/event_trigger.py @@ -1,9 +1,10 @@ +from __future__ import annotations import logging from dataclasses import dataclass -import pandas as pd import polars as pl +from aligned.lazy_imports import pandas as pd from aligned.data_source.stream_data_source import StreamDataSource from aligned.retrival_job import RequestResult from aligned.schemas.codable import Codable diff --git a/aligned/schemas/feature.py b/aligned/schemas/feature.py index af8e61f9..a927baff 100644 --- a/aligned/schemas/feature.py +++ b/aligned/schemas/feature.py @@ -8,11 +8,20 @@ from aligned.schemas.codable import Codable from aligned.schemas.constraints import Constraint +from aligned.schemas.literal_value import LiteralValue if TYPE_CHECKING: from aligned.compiler.feature_factory import FeatureFactory +class StaticFeatureTags: + is_shadow_model = 'is_shadow_model' + is_model_version = 'is_model_version' + is_entity = 'is_entity' + is_annotated_by = 'is_annotated_by' + is_input_features = 'is_input_features' + + NAME_POLARS_MAPPING = [ ('string', pl.Utf8), ('int8', pl.Int8), @@ -132,7 +141,7 @@ def pandas_type(self) -> str | type: }[self.name] @property - def polars_type(self) -> type: + def polars_type(self) -> pl.DataType: if self.is_datetime: time_zone = self.datetime_timezone return pl.Datetime(time_zone=time_zone) # type: ignore @@ -166,7 +175,9 @@ def feature_factory(self) -> FeatureFactory: return ff.List(FeatureType(name=sub_type).feature_factory) if self.is_embedding: - return ff.Embedding(embedding_size=self.embedding_size()) + embedding_size = self.embedding_size() + assert embedding_size, 'Missing embedding size' + return ff.Embedding(embedding_size=embedding_size) return { 'string': ff.String(), @@ -259,7 +270,7 @@ def int32() -> FeatureType: return FeatureType(name='int32') @staticmethod - def bool() -> FeatureType: + def boolean() -> FeatureType: return FeatureType(name='bool') @staticmethod @@ -267,7 +278,7 @@ def int64() -> FeatureType: return FeatureType(name='int64') @staticmethod - def float() -> FeatureType: + def floating_point() -> FeatureType: return FeatureType(name='float') @staticmethod @@ -293,8 +304,10 @@ def json() -> FeatureType: return FeatureType(name='json') @staticmethod - def array() -> FeatureType: - return FeatureType(name='array') + def array(sub_type: FeatureType | None = None) -> FeatureType: + if sub_type is None: + return FeatureType(name='array') + return FeatureType(name=f'array-{sub_type.name}') @staticmethod def embedding(size: int) -> FeatureType: @@ -315,15 +328,17 @@ class Feature(Codable): name: str dtype: FeatureType description: str | None = None - tags: dict[str, str] | None = None + tags: list[str] | None = None constraints: set[Constraint] | None = None + default_value: LiteralValue | None = None def __pre_serialize__(self) -> Feature: assert isinstance(self.name, str) assert isinstance(self.dtype, FeatureType) assert isinstance(self.description, str) or self.description is None - assert isinstance(self.tags, dict) or self.tags is None + assert isinstance(self.tags, list) or self.tags is None + assert isinstance(self.default_value, LiteralValue) or self.default_value is None if self.constraints: for constraint in self.constraints: assert isinstance(constraint, Constraint) @@ -337,6 +352,7 @@ def renamed(self, new_name: str) -> Feature: description=self.description, tags=self.tags, constraints=self.constraints, + default_value=self.default_value, ) def as_reference(self, location: FeatureLocation) -> FeatureReference: @@ -361,7 +377,7 @@ class EventTimestamp(Codable): name: str ttl: int | None = None description: str | None = None - tags: dict[str, str] | None = None + tags: set[str] | None = None dtype: FeatureType = field(default_factory=lambda: FeatureType.datetime()) def __hash__(self) -> int: @@ -378,24 +394,24 @@ def as_feature(self) -> Feature: name=self.name, dtype=self.dtype, description=self.description, - tags=self.tags, + tags=list(self.tags or set()), ) @dataclass class FeatureLocation(Codable): name: str - location: Literal['feature_view', 'combined_view', 'model'] + location_type: Literal['feature_view', 'combined_view', 'model'] @property def identifier(self) -> str: return str(self) def __str__(self) -> str: - return f'{self.location}:{self.name}' + return f'{self.location_type}:{self.name}' def __hash__(self) -> int: - return (self.name + self.location).__hash__() + return (self.name + self.location_type).__hash__() @staticmethod def feature_view(name: str) -> FeatureLocation: @@ -412,7 +428,13 @@ def model(name: str) -> FeatureLocation: @staticmethod def from_string(string: str) -> FeatureLocation: splits = string.split(':') - return FeatureLocation(name=splits[1], location=splits[0]) + location_type = splits[0] + assert location_type in [ + 'feature_view', + 'combined_view', + 'model', + ], f"Unexpected location type {location_type}" + return FeatureLocation(name=splits[1], location_type=location_type) # type: ignore @dataclass diff --git a/aligned/schemas/feature_view.py b/aligned/schemas/feature_view.py index 7f1a67cd..09d3d0be 100644 --- a/aligned/schemas/feature_view.py +++ b/aligned/schemas/feature_view.py @@ -6,7 +6,7 @@ from dataclasses import dataclass, field -from aligned.data_source.batch_data_source import BatchDataSource +from aligned.data_source.batch_data_source import CodableBatchDataSource from aligned.data_source.stream_data_source import StreamDataSource from aligned.request.retrival_request import FeatureRequest, RetrivalRequest from aligned.schemas.codable import Codable @@ -19,10 +19,20 @@ from aligned.retrival_job import RetrivalJob +@dataclass +class AnnotatedView(Codable): + name: str + + annotated_view: FeatureLocation + annotated_by: Feature | None + + view: CompiledFeatureView + + @dataclass class CompiledFeatureView(Codable): name: str - source: BatchDataSource + source: CodableBatchDataSource entities: set[Feature] features: set[Feature] @@ -34,8 +44,8 @@ class CompiledFeatureView(Codable): event_timestamp: EventTimestamp | None = field(default=None) stream_data_source: StreamDataSource | None = field(default=None) - application_source: BatchDataSource | None = field(default=None) - materialized_source: BatchDataSource | None = field(default=None) + application_source: CodableBatchDataSource | None = field(default=None) + materialized_source: CodableBatchDataSource | None = field(default=None) materialize_from: datetime | None = field(default=None) @@ -49,7 +59,7 @@ class CompiledFeatureView(Codable): def __pre_serialize__(self) -> CompiledFeatureView: assert isinstance(self.name, str) - assert isinstance(self.source, BatchDataSource) + assert isinstance(self.source, CodableBatchDataSource) for entity in self.entities: assert isinstance(entity, Feature) @@ -69,7 +79,7 @@ def __pre_serialize__(self) -> CompiledFeatureView: if self.stream_data_source is not None: assert isinstance(self.stream_data_source, StreamDataSource) if self.application_source is not None: - assert isinstance(self.application_source, BatchDataSource) + assert isinstance(self.application_source, CodableBatchDataSource) if self.event_triggers is not None: for event_trigger in self.event_triggers: assert isinstance(event_trigger, EventTrigger) @@ -85,7 +95,11 @@ def __pre_serialize__(self) -> CompiledFeatureView: @property def full_schema(self) -> set[Feature]: - return self.entities.union(self.features).union(self.derived_features) + schema = self.entities.union(self.features).union(self.derived_features) + if self.event_timestamp: + schema.add(self.event_timestamp.as_feature()) + schema.update({feature.derived_feature for feature in self.aggregated_features}) + return schema @property def entitiy_names(self) -> set[str]: @@ -105,6 +119,7 @@ def request_all(self) -> FeatureRequest: derived_features=self.derived_features, aggregated_features=self.aggregated_features, event_timestamp=self.event_timestamp, + features_to_include={feature.name for feature in self.full_schema}, ) ], ) @@ -120,9 +135,12 @@ def request_for(self, feature_names: set[str]) -> FeatureRequest: } derived_aggregated_feautres = {feature.derived_feature for feature in self.aggregated_features} + if self.event_timestamp and self.event_timestamp.name in feature_names: + features.add(self.event_timestamp.as_feature()) + def dependent_features_for( feature: DerivedFeature, - ) -> tuple[set[Feature], set[Feature], set[AggregatedFeature]]: + ) -> tuple[set[Feature], set[DerivedFeature], set[AggregatedFeature]]: core_features = set() derived_features = set() aggregated_features = set() @@ -218,109 +236,7 @@ def __str__(self) -> str: @dataclass -class CompiledCombinedFeatureView(Codable): - name: str - features: set[DerivedFeature] # FIXME: Should combine this and feature_referances into one class. - feature_referances: dict[str, list[RetrivalRequest]] - event_triggers: set[EventTrigger] | None = field(default=None) - - @property - def entity_features(self) -> set[Feature]: - values = set() - for requests in self.feature_referances.values(): - for request in requests: - values.update(request.entities) - return values - - @property - def entity_names(self) -> set[str]: - return {feature.name for feature in self.entity_features} - - @property - def request_all(self) -> FeatureRequest: - requests: dict[str, RetrivalRequest] = {} - entities = set() - for sub_requests in self.feature_referances.values(): - for request in sub_requests: - entities.update(request.entities) - if request.location not in requests: - requests[request.location] = RetrivalRequest( - name=request.name, - location=request.location, - entities=request.entities, - features=set(), - derived_features=set(), - event_timestamp=request.event_timestamp, - ) - requests[request.location].derived_features.update(request.derived_features) - requests[request.location].features.update(request.features) - requests[request.location].entities.update(request.entities) - - requests[self.name] = RetrivalRequest( - name=self.name, - location=FeatureLocation.combined_view(self.name), - entities=entities, - features=set(), - derived_features=self.features, - event_timestamp=None, - ) - - return FeatureRequest( - self.name, - features_to_include={feature.name for feature in self.features.union(entities)}, - needed_requests=RetrivalRequest.combine(list(requests.values())), - ) - - def requests_for(self, feature_names: set[str]) -> FeatureRequest: - entities = self.entity_names - dependent_views: dict[str, RetrivalRequest] = {} - for feature in feature_names: - if feature in entities: - continue - - if feature not in self.feature_referances.keys(): - raise ValueError(f'Invalid feature {feature} in {self.name}') - - requests = self.feature_referances[feature] - for request in requests: - if request.location not in dependent_views: - dependent_views[request.location] = RetrivalRequest( - name=request.name, - location=request.location, - entities=request.entities, - features=set(), - derived_features=set(), - aggregated_features=set(), - event_timestamp=request.event_timestamp, - ) - current = dependent_views[request.location] - current.derived_features = current.derived_features.union(request.derived_features) - current.features = current.features.union(request.features) - current.aggregated_features = current.aggregated_features.union(request.aggregated_features) - dependent_views[request.location] = current - - dependent_views[self.name] = RetrivalRequest( # Add the request we want - name=self.name, - location=FeatureLocation.combined_view(self.name), - entities=self.entity_features, - features=set(), - derived_features={feature for feature in self.features if feature.name in feature_names}, - aggregated_features=set(), - event_timestamp=None, - ) - - return FeatureRequest( - FeatureLocation.combined_view(self.name), - features_to_include=feature_names, - needed_requests=list(dependent_views.values()), - ) - - def __hash__(self) -> int: - return hash(self.name) - - -@dataclass -class FeatureViewReferenceSource(BatchDataSource): +class FeatureViewReferenceSource(CodableBatchDataSource): view: CompiledFeatureView location: FeatureLocation @@ -366,14 +282,16 @@ def sub_request(self, request: RetrivalRequest) -> RetrivalRequest: return sub_request.needed_requests[0] @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls: type[FeatureViewReferenceSource], facts: RetrivalJob, requests: list[tuple[FeatureViewReferenceSource, RetrivalRequest]], ) -> RetrivalJob: from aligned.local.job import FileFactualJob - sources = {source.job_group_key() for source, _ in requests if isinstance(source, BatchDataSource)} + sources = { + source.job_group_key() for source, _ in requests if isinstance(source, CodableBatchDataSource) + } if len(sources) != 1: raise NotImplementedError( f'Type: {cls} have not implemented how to load fact data with multiple sources.' diff --git a/aligned/schemas/folder.py b/aligned/schemas/folder.py index b573f7b5..78e47eb4 100644 --- a/aligned/schemas/folder.py +++ b/aligned/schemas/folder.py @@ -1,12 +1,12 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import Protocol, TypeVar +from typing import Protocol, Sequence, TypeVar from uuid import uuid4 -from datetime import datetime +from datetime import datetime, timezone from mashumaro.types import SerializableType -from aligned.data_source.batch_data_source import BatchDataSource +from aligned.data_source.batch_data_source import CodableBatchDataSource from aligned.request.retrival_request import RequestResult from aligned.sources.local import Deletable, StorageFileSource @@ -56,7 +56,7 @@ class DatasetMetadata(Codable): @dataclass class SingleDatasetMetadata(Codable): - source: BatchDataSource + source: CodableBatchDataSource content: RequestResult created_at: datetime = field(default_factory=datetime.utcnow) id: str = field(default_factory=lambda: str(uuid4())) @@ -80,23 +80,19 @@ def format_as_job(self, job: T) -> T: @dataclass -class TrainDatasetMetadata(Codable): +class TrainDatasetMetadata(Codable, DatasetMetadataInterface): name: str | None - request_result: RequestResult - - @property - def content(self) -> RequestResult: - return self.request_result + content: RequestResult - train_dataset: BatchDataSource - test_dataset: BatchDataSource + train_dataset: CodableBatchDataSource + test_dataset: CodableBatchDataSource - validation_dataset: BatchDataSource | None = field(default=None) + validation_dataset: CodableBatchDataSource | None = field(default=None) id: str = field(default_factory=lambda: str(uuid4())) - created_at: datetime = field(default_factory=datetime.utcnow) + created_at: datetime = field(default_factory=lambda: datetime.now(timezone.utc)) train_size_fraction: float | None = field(default=None) test_size_fraction: float | None = field(default=None) @@ -121,7 +117,7 @@ def as_datasets(self) -> list[SingleDatasetMetadata]: datasets.append( SingleDatasetMetadata( source=source, - content=self.request_result, + content=self.content, created_at=self.created_at, id=self.id, name=self.name, @@ -150,7 +146,7 @@ def all_datasets(self) -> list[SingleDatasetMetadata]: return datasets @property - def all(self) -> list[DatasetMetadataInterface]: + def all(self) -> Sequence[DatasetMetadataInterface]: return self.raw_data + self.train_test + self.train_test_validation @@ -219,7 +215,7 @@ async def list_datasets(self) -> GroupedDatasetList: train_test_validation=[], ) - def index_of(self, metadata_id: str, array: list[DatasetMetadataInterface]) -> int | None: + def index_of(self, metadata_id: str, array: Sequence[DatasetMetadataInterface]) -> int | None: for i, dataset in enumerate(array): if dataset.id == metadata_id: @@ -296,7 +292,7 @@ async def metadata_for(self, dataset_id: str) -> DatasetMetadataInterface | None async def delete_metadata_for(self, dataset_id: str) -> DatasetMetadataInterface | None: datasets = await self.list_datasets() - async def delete_dataset(source: BatchDataSource): + async def delete_dataset(source: CodableBatchDataSource): if isinstance(source, Deletable): await source.delete() diff --git a/aligned/schemas/literal_value.py b/aligned/schemas/literal_value.py index 2184e1dd..fa8ea855 100644 --- a/aligned/schemas/literal_value.py +++ b/aligned/schemas/literal_value.py @@ -1,5 +1,6 @@ from __future__ import annotations +from typing import TYPE_CHECKING from dataclasses import dataclass from datetime import date, datetime from typing import Any @@ -7,7 +8,9 @@ from mashumaro.types import SerializableType from aligned.schemas.codable import Codable -from aligned.schemas.feature import FeatureType + +if TYPE_CHECKING: + from aligned.schemas.feature import FeatureType class SupportedLiteralValues: @@ -18,7 +21,17 @@ class SupportedLiteralValues: def __init__(self) -> None: self.values = {} - for lit in [IntValue, FloatValue, BoolValue, DateValue, DatetimeValue, StringValue, ArrayValue]: + lits: list[type[LiteralValue]] = [ + IntValue, + FloatValue, + BoolValue, + DateValue, + DatetimeValue, + StringValue, + ArrayValue, + NullValue, + ] + for lit in lits: self.values[lit.name] = lit @classmethod @@ -38,6 +51,8 @@ def python_value(self) -> Any: @property def dtype(self) -> FeatureType: + from aligned.schemas.feature import FeatureType + return FeatureType(self.name) def _serialize(self) -> dict: @@ -66,6 +81,8 @@ def from_value(value: Any) -> LiteralValue: return StringValue(value) elif isinstance(value, list): return ArrayValue([LiteralValue.from_value(val) for val in value]) + elif value is None: + return NullValue() raise ValueError(f'Unable to find literal value for type {type(value)}') @@ -129,6 +146,15 @@ def python_value(self) -> Any: return self.value +@dataclass +class NullValue(LiteralValue): + name = 'null' + + @property + def python_value(self) -> Any: + return None + + @dataclass class ArrayValue(LiteralValue): value: list[LiteralValue] diff --git a/aligned/schemas/model.py b/aligned/schemas/model.py index 786f325d..5d4f1db6 100644 --- a/aligned/schemas/model.py +++ b/aligned/schemas/model.py @@ -4,7 +4,7 @@ from aligned.request.retrival_request import EventTimestampRequest, FeatureRequest, RetrivalRequest from aligned.schemas.codable import Codable -from aligned.schemas.feature import FeatureLocation, FeatureType +from aligned.schemas.feature import FeatureLocation, FeatureType, StaticFeatureTags from aligned.schemas.feature import EventTimestamp, Feature, FeatureReference from aligned.schemas.event_trigger import EventTrigger from aligned.schemas.target import ClassificationTarget, RecommendationTarget, RegressionTarget @@ -13,7 +13,7 @@ from aligned.schemas.folder import DatasetStore from aligned.exposed_model.interface import ExposedModel from aligned.data_source.stream_data_source import StreamDataSource -from aligned.data_source.batch_data_source import BatchDataSource +from aligned.data_source.batch_data_source import CodableBatchDataSource from aligned.retrival_job import RetrivalJob logger = logging.getLogger(__name__) @@ -61,11 +61,8 @@ class PredictionsView(Codable): derived_features: set[DerivedFeature] event_timestamp: EventTimestamp | None = field(default=None) - model_version_column: Feature | None = field(default=None) - is_shadow_model_flag: Feature | None = field(default=None) - - source: BatchDataSource | None = field(default=None) - application_source: BatchDataSource | None = field(default=None) + source: CodableBatchDataSource | None = field(default=None) + application_source: CodableBatchDataSource | None = field(default=None) stream_source: StreamDataSource | None = field(default=None) regression_targets: set[RegressionTarget] | None = field(default=None) @@ -75,6 +72,27 @@ class PredictionsView(Codable): acceptable_freshness: timedelta | None = field(default=None) unacceptable_freshness: timedelta | None = field(default=None) + @property + def is_shadow_model_flag(self) -> Feature | None: + for feature in self.features: + if feature.tags and StaticFeatureTags.is_shadow_model in feature.tags: + return feature + return None + + @property + def model_version_column(self) -> Feature | None: + for feature in self.features: + if feature.tags and StaticFeatureTags.is_model_version in feature.tags: + return feature + return None + + @property + def logged_features(self) -> Feature | None: + for feature in self.features: + if feature.tags and StaticFeatureTags.is_input_features in feature.tags: + return feature + return None + def as_view(self, name: str) -> CompiledFeatureView | None: if not self.source: return None @@ -217,7 +235,7 @@ def request_all_predictions(self) -> FeatureRequest: @dataclass -class ModelSource(BatchDataSource): +class ModelSource(CodableBatchDataSource): model: Model pred_view: CompiledFeatureView @@ -266,7 +284,7 @@ def features_for(self, facts: RetrivalJob, request: RetrivalRequest) -> Retrival return self.source().features_for(facts, request) @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls, facts: RetrivalJob, requests: list[tuple['ModelSource', RetrivalRequest]] ) -> RetrivalJob: diff --git a/aligned/schemas/record_coders.py b/aligned/schemas/record_coders.py index 328a7785..7cc7c5b2 100644 --- a/aligned/schemas/record_coders.py +++ b/aligned/schemas/record_coders.py @@ -28,10 +28,10 @@ def _deserialize(cls, value: dict) -> 'RecordCoder': return data_class.from_dict(value) def decode(self, records: list[dict]) -> list[dict]: - pass + raise NotImplementedError(type(self)) def encode(self, records: list[dict]) -> list[dict]: - pass + raise NotImplementedError(type(self)) class SupportedRecordCoders: diff --git a/aligned/schemas/repo_definition.py b/aligned/schemas/repo_definition.py index ac210ce6..4670feb3 100644 --- a/aligned/schemas/repo_definition.py +++ b/aligned/schemas/repo_definition.py @@ -7,10 +7,9 @@ from pathlib import Path from typing import TYPE_CHECKING -from aligned.enricher import Enricher from aligned.feature_source import FeatureSource, FeatureSourceFactory from aligned.schemas.codable import Codable -from aligned.schemas.feature_view import CompiledCombinedFeatureView, CompiledFeatureView +from aligned.schemas.feature_view import CompiledFeatureView from aligned.schemas.model import Model if TYPE_CHECKING: @@ -61,14 +60,7 @@ def feature_server(self, source: FeatureSource) -> FastAPI | FeatureSource: if not (selected_file := self.selected_file): raise ValueError('No selected file to serve features from') - try: - feature_store = asyncio.get_event_loop().run_until_complete(selected_file.feature_store()) - except RuntimeError: - import nest_asyncio - - nest_asyncio.apply() - feature_store = asyncio.new_event_loop().run_until_complete(selected_file.feature_store()) - + feature_store = asyncio.get_event_loop().run_until_complete(selected_file.feature_store()) return FastAPIServer.app(feature_store) @staticmethod @@ -123,24 +115,10 @@ def from_reference( from aligned.server import FastAPIServer - try: - feature_store = asyncio.get_event_loop().run_until_complete(reference.feature_store()) - except RuntimeError: - import nest_asyncio - - nest_asyncio.apply() - feature_store = asyncio.new_event_loop().run_until_complete(reference.feature_store()) - + feature_store = asyncio.get_event_loop().run_until_complete(reference.feature_store()) return FastAPIServer.app(feature_store.with_source(online_source)) -@dataclass -class EnricherReference(Codable): - module: str - attribute_name: str - enricher: Enricher - - @dataclass class RepoMetadata(Codable): created_at: datetime @@ -155,22 +133,15 @@ class RepoDefinition(Codable): metadata: RepoMetadata feature_views: set[CompiledFeatureView] = field(default_factory=set) - combined_feature_views: set[CompiledCombinedFeatureView] = field(default_factory=set) models: set[Model] = field(default_factory=set) - enrichers: list[EnricherReference] = field(default_factory=list) - def to_dict(self, **kwargs: dict) -> dict: + def to_dict(self, **kwargs: dict) -> dict: # type: ignore for view in self.feature_views: assert isinstance(view, CompiledFeatureView) - for view in self.combined_feature_views: - assert isinstance(view, CompiledCombinedFeatureView) - for model in self.models: assert isinstance(model, Model) - for enricher in self.enrichers: - assert isinstance(enricher, EnricherReference) return super().to_dict(**kwargs) @staticmethod @@ -205,6 +176,13 @@ async def from_path(path: str) -> RepoDefinition: dir_path = Path.cwd() if path == '.' else Path(path).absolute() return await RepoReader.definition_from_path(dir_path) + @staticmethod + async def from_glob(glob: str, root_dir: Path | None = None) -> RepoDefinition: + from aligned.compiler.repo_reader import RepoReader + + dir_path = Path.cwd() if root_dir is None else root_dir + return await RepoReader.definition_from_glob(dir_path, glob=glob) + # def add_old_version(self, old_version: "RepoDefinition") -> "RepoDefinition": # views: dict[str, VersionedData[CompiledFeatureView]] = {} diff --git a/aligned/schemas/text_vectoriser.py b/aligned/schemas/text_vectoriser.py index bbf0de78..6a540797 100644 --- a/aligned/schemas/text_vectoriser.py +++ b/aligned/schemas/text_vectoriser.py @@ -6,8 +6,8 @@ from typing import Any import numpy as np -import pandas as pd import polars as pl +from aligned.lazy_imports import pandas as pd from mashumaro.types import SerializableType from pydantic import BaseModel, ValidationError @@ -60,7 +60,7 @@ def _deserialize(cls, value: dict) -> EmbeddingModel: return data_class.from_dict(value) - async def load_model(self): + async def load_model(self) -> None: pass async def vectorise_pandas(self, texts: pd.Series) -> pd.Series: @@ -170,7 +170,7 @@ def vector(tokens: list[str]) -> list[float]: [pl.col(f'{text_key}_tokens').apply(vector, return_dtype=pl.List(pl.Float64)).alias(output_key)] ) - async def load_model(self): + async def load_model(self) -> None: import gensim.downloader as gensim_downloader self.loaded_model = gensim_downloader.load(self.model_name) @@ -238,7 +238,7 @@ async def embeddings(self, input: list[str]) -> OpenAiResponse: logger.error(f'Error parsing OpenAi Embeddings API response: {e}') raise e - async def load_model(self): + async def load_model(self) -> None: pass async def vectorise_pandas(self, texts: pd.Series) -> pd.Series: @@ -268,7 +268,7 @@ def embedding_size(self) -> int | None: return len(model.encode(['test'])[0]) - async def load_model(self): + async def load_model(self) -> None: from sentence_transformers import SentenceTransformer self.loaded_model = SentenceTransformer(self.model) diff --git a/aligned/schemas/transformation.py b/aligned/schemas/transformation.py index ee9ee142..8de9e839 100644 --- a/aligned/schemas/transformation.py +++ b/aligned/schemas/transformation.py @@ -7,10 +7,10 @@ from typing import TYPE_CHECKING, Any, Callable import numpy as np -import pandas as pd import polars as pl from mashumaro.types import SerializableType +from aligned.lazy_imports import pandas as pd from aligned.schemas.codable import Codable from aligned.schemas.feature import FeatureType from aligned.schemas.literal_value import LiteralValue @@ -42,7 +42,7 @@ def input_polars(self) -> pl.DataFrame: def output_polars(self) -> pl.Series: try: values = pl.Series(self.output).fill_nan(None) - if self.transformation.dtype == FeatureType.bool(): + if self.transformation.dtype == FeatureType.boolean(): return values.cast(pl.Boolean) else: return values @@ -116,14 +116,14 @@ async def run_transformation_test_polars(cls) -> None: assert (set(test.input_polars.columns) - set(output_df.columns)) == set() expected = test.output_polars - if test.transformation.dtype == FeatureType.bool(): + if test.transformation.dtype == FeatureType.boolean(): is_correct = output.equals(test.output_polars.alias(alias)) assert is_correct, ( f'Output for {cls.__name__} is not correct.,' f'\nGot: {output},\nexpected: {test.output_polars}' ) else: - assert_series_equal(expected.alias(alias), output, check_names=False, check_dtype=False) + assert_series_equal(expected.alias(alias), output, check_names=False, check_dtypes=False) except AttributeError: raise AssertionError( f'Error for transformation {cls.__name__}. Could be missing a return in the transformation' @@ -139,7 +139,7 @@ async def run_transformation_test_pandas(cls) -> None: with suppress(NotImplementedError): test = cls.test_definition() output = await test.transformation.transform_pandas(test.input_pandas) - if test.transformation.dtype == FeatureType.bool(): + if test.transformation.dtype == FeatureType.boolean(): is_correct = np.all(output == test.output_pandas) | output.equals(test.output_pandas) assert is_correct, ( f'Output for {cls.__name__} is not correct.,' @@ -351,7 +351,8 @@ class PolarsFunctionTransformation(Transformation): async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: polars_df = await self.transform_polars(pl.from_pandas(df).lazy(), self.function_name) - return polars_df.collect().to_pandas()[self.function_name] + assert isinstance(polars_df, pl.LazyFrame) + return polars_df.collect().to_pandas()[self.function_name] # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: if self.function_name not in locals(): @@ -396,10 +397,10 @@ class NotNull(Transformation): key: str name: str = 'not_null' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: - return df[self.key].notnull() + return df[self.key].notnull() # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: return df.with_columns(pl.col(self.key).is_not_null().alias(alias)) @@ -420,7 +421,7 @@ class Equals(Transformation): other_key: str name: str = 'equals_feature' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return df[self.key] == df[self.other_key] @@ -447,7 +448,7 @@ class EqualsLiteral(Transformation): value: LiteralValue name: str = 'equals' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() def __init__(self, key: str, value: LiteralValue) -> None: self.key = key @@ -475,7 +476,7 @@ class And(Transformation): second_key: str name: str = 'and' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() def __init__(self, first_key: str, second_key: str) -> None: self.first_key = first_key @@ -513,7 +514,7 @@ class Or(Transformation): second_key: str name: str = 'or' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() def __init__(self, first_key: str, second_key: str) -> None: self.first_key = first_key @@ -545,7 +546,7 @@ class Inverse(Transformation): key: str name: str = 'inverse' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() def __init__(self, key: str) -> None: self.key = key @@ -553,8 +554,8 @@ def __init__(self, key: str) -> None: async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return gracefull_transformation( df, - is_valid_mask=~(df[self.key].isnull()), - transformation=lambda dfv: ~dfv[self.key].astype('bool'), + is_valid_mask=~(df[self.key].isnull()), # type: ignore + transformation=lambda dfv: ~dfv[self.key].astype('bool'), # type: ignore ) async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: @@ -576,7 +577,7 @@ class NotEquals(Transformation): other_key: str name: str = 'not-equals-feature' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return df[self.key] != df[self.other_key] @@ -603,7 +604,7 @@ class NotEqualsLiteral(Transformation): value: LiteralValue name: str = 'not-equals' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() def __init__(self, key: str, value: Any) -> None: self.key = key @@ -634,7 +635,7 @@ class GreaterThenValue(Transformation): value: float name: str = 'gt' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return df[self.key] > self.value @@ -658,7 +659,7 @@ class GreaterThen(Transformation): right_key: str name: str = field(default='gtf') - dtype: FeatureType = field(default=FeatureType.bool()) + dtype: FeatureType = field(default=FeatureType.boolean()) async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return df[self.left_key] > df[self.right_key] @@ -684,7 +685,7 @@ class GreaterThenOrEqual(Transformation): value: float name: str = 'gte' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() def __init__(self, key: str, value: float) -> None: self.key = key @@ -711,6 +712,70 @@ def test_definition() -> TransformationTestDefinition: ) +@dataclass +class LowerThenCol(Transformation): + + key: str + right_col: str + + name: str = 'lt' + dtype: FeatureType = FeatureType.boolean() + + def __init__(self, key: str, right_col: str) -> None: + self.key = key + self.right_col = right_col + + async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: + return gracefull_transformation( + df, + is_valid_mask=~(df[self.key].isna() | df[self.key].isnull()), + transformation=lambda dfv: dfv[self.key] < dfv[self.right_col], + ) + + async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: + return df.with_columns((pl.col(self.key) < pl.col(self.right_col)).alias(alias)) + + @staticmethod + def test_definition() -> TransformationTestDefinition: + from numpy import nan + + return TransformationTestDefinition( + LowerThen(key='x', value=2), input={'x': [1, 2, 3, None]}, output=[True, False, False, nan] + ) + + +@dataclass +class LowerThenOrEqualCol(Transformation): + + key: str + right_col: str + + name: str = 'lte' + dtype: FeatureType = FeatureType.boolean() + + def __init__(self, key: str, right_col: str) -> None: + self.key = key + self.right_col = right_col + + async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: + return pl.col(self.key) <= pl.col(self.right_col) + + async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: + return gracefull_transformation( + df, + is_valid_mask=~(df[self.key].isna() | df[self.key].isnull()), + transformation=lambda dfv: dfv[self.key] <= dfv[self.right_col], + ) + + @staticmethod + def test_definition() -> TransformationTestDefinition: + from numpy import nan + + return TransformationTestDefinition( + LowerThenOrEqual(key='x', value=2), input={'x': [1, 2, 3, None]}, output=[True, True, False, nan] + ) + + @dataclass class LowerThen(Transformation): @@ -718,7 +783,7 @@ class LowerThen(Transformation): value: float name: str = 'lt' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() def __init__(self, key: str, value: float) -> None: self.key = key @@ -750,7 +815,7 @@ class LowerThenOrEqual(Transformation): value: float name: str = 'lte' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() def __init__(self, key: str, value: float) -> None: self.key = key @@ -782,7 +847,7 @@ class SubtractionValue(Transformation, PsqlTransformation, RedshiftTransformatio behind: LiteralValue name: str = 'sub_val' - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() def __init__(self, front: str, behind: LiteralValue) -> None: self.front = front @@ -794,7 +859,7 @@ async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return gracefull_transformation( df, - is_valid_mask=~(df[self.front].isna()), + is_valid_mask=~(df[self.front].isna()), # type: ignore transformation=lambda dfv: dfv[self.front] - self.behind.python_value, ) @@ -819,7 +884,7 @@ class Subtraction(Transformation, PsqlTransformation, RedshiftTransformation): behind: str name: str = 'sub' - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() def __init__(self, front: str, behind: str) -> None: self.front = front @@ -856,7 +921,7 @@ class AdditionValue(Transformation): value: LiteralValue name: str = 'add_value' - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return df[self.feature] + self.value.python_value @@ -882,7 +947,7 @@ class Multiply(Transformation, PsqlTransformation, RedshiftTransformation): behind: str name: str = 'mul' - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() def __init__(self, front: str, behind: str) -> None: self.front = front @@ -905,7 +970,7 @@ class MultiplyValue(Transformation, PsqlTransformation, RedshiftTransformation): value: LiteralValue name: str = 'mul_val' - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() def __init__(self, key: str, value: LiteralValue) -> None: self.key = key @@ -928,7 +993,7 @@ class Addition(Transformation, PsqlTransformation, RedshiftTransformation): behind: str name: str = 'add' - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() def __init__(self, front: str, behind: str) -> None: self.front = front @@ -966,7 +1031,7 @@ class TimeDifference(Transformation, PsqlTransformation, RedshiftTransformation) unit: str name: str = 'time-diff' - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() def __init__(self, front: str, behind: str, unit: str = 's') -> None: self.front = front @@ -977,7 +1042,7 @@ async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return gracefull_transformation( df, is_valid_mask=~(df[self.front].isna() | df[self.behind].isna()), - transformation=lambda dfv: (dfv[self.front] - dfv[self.behind]) / np.timedelta64(1, self.unit), + transformation=lambda dfv: (dfv[self.front] - dfv[self.behind]) / np.timedelta64(1, self.unit), # type: ignore ) async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: @@ -1018,7 +1083,7 @@ class Logarithm(Transformation): key: str name: str = 'log' - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() def __init__(self, key: str) -> None: self.key = key @@ -1027,7 +1092,7 @@ async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return gracefull_transformation( df, is_valid_mask=~(df[self.key].isna() | (df[self.key] <= 0)), - transformation=lambda dfv: np.log(dfv[self.key]), + transformation=lambda dfv: np.log(dfv[self.key]), # type: ignore ) async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: @@ -1050,7 +1115,7 @@ class LogarithmOnePluss(Transformation): key: str name: str = 'log1p' - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() def __init__(self, key: str) -> None: self.key = key @@ -1059,7 +1124,7 @@ async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return gracefull_transformation( df, is_valid_mask=~(df[self.key].isna() | (df[self.key] <= -1)), - transformation=lambda dfv: np.log1p(dfv[self.key]), + transformation=lambda dfv: np.log1p(dfv[self.key]), # type: ignore ) async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: @@ -1086,7 +1151,7 @@ class ToNumerical(Transformation): key: str name: str = 'to-num' - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() def __init__(self, key: str) -> None: self.key = key @@ -1094,7 +1159,7 @@ def __init__(self, key: str) -> None: async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: from pandas import to_numeric - return to_numeric(df[self.key], errors='coerce') + return to_numeric(df[self.key], errors='coerce') # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: return pl.col(self.key).cast(pl.Float64) @@ -1127,8 +1192,8 @@ async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return gracefull_transformation( df, - is_valid_mask=~(df[self.key].isna()), - transformation=lambda dfv: getattr(dfv[self.key].dt, self.component), + is_valid_mask=~(df[self.key].isna()), # type: ignore + transformation=lambda dfv: getattr(dfv[self.key].dt, self.component), # type: ignore ) async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: @@ -1214,7 +1279,7 @@ class ArrayAtIndex(Transformation): index: int name: str = 'array_at_index' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return pl.Series(df[self.key]).list.get(self.index).to_pandas() @@ -1243,7 +1308,7 @@ class ArrayContains(Transformation): value: LiteralValue name: str = 'array_contains' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() def __init__(self, key: str, value: Any | LiteralValue) -> None: self.key = key @@ -1279,7 +1344,7 @@ class Contains(Transformation): value: str name: str = 'contains' - dtype: FeatureType = FeatureType.bool() + dtype: FeatureType = FeatureType.boolean() def __init__(self, key: str, value: str) -> None: self.key = key @@ -1288,8 +1353,8 @@ def __init__(self, key: str, value: str) -> None: async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return gracefull_transformation( df, - is_valid_mask=~(df[self.key].isna()), - transformation=lambda dfv: dfv[self.key].astype('str').str.contains(self.value), + is_valid_mask=~(df[self.key].isna()), # type: ignore + transformation=lambda dfv: dfv[self.key].astype('str').str.contains(self.value), # type: ignore ) async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: @@ -1322,7 +1387,7 @@ def __init__(self, key: str, orders: list[str]) -> None: self.orders = orders async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: - return df[self.key].map(self.orders_dict) + return df[self.key].map(self.orders_dict) # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: mapper = pl.DataFrame({self.key: list(self.orders), alias: list(range(0, len(self.orders)))}) @@ -1359,7 +1424,7 @@ async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: for k, v in self.values: temp_df.loc[mask] = temp_df.loc[mask].str.replace(k, v, regex=True) - return temp_df + return temp_df # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: collected = df.collect() @@ -1391,7 +1456,7 @@ class Ratio(Transformation): denumerator: str name: str = 'ratio' - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() def __init__(self, numerator: str, denumerator: str) -> None: self.numerator = numerator @@ -1432,7 +1497,7 @@ class DivideDenumeratorValue(Transformation): denumerator: LiteralValue name: str = 'div_denum_val' - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() def __init__(self, numerator: str, denumerator: LiteralValue) -> None: self.numerator = numerator @@ -1442,7 +1507,7 @@ def __init__(self, numerator: str, denumerator: LiteralValue) -> None: async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return gracefull_transformation( df, - is_valid_mask=~(df[self.numerator].isna()), + is_valid_mask=~(df[self.numerator].isna()), # type: ignore transformation=lambda dfv: dfv[self.numerator].astype(float) / self.denumerator.python_value, ) @@ -1467,10 +1532,10 @@ class IsIn(Transformation): key: str name = 'isin' - dtype = FeatureType.bool() + dtype = FeatureType.boolean() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: - return df[self.key].isin(self.values) + return df[self.key].isin(self.values) # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: return pl.col(self.key).is_in(self.values) @@ -1494,10 +1559,10 @@ class FillNaValuesColumns(Transformation): name: str = 'fill_missing_key' async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: - return df[self.key].fillna(df[self.fill_key]) + return df[self.key].fillna(df[self.fill_key]) # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: - if self.dtype == FeatureType.float(): + if self.dtype == FeatureType.floating_point(): return pl.col(self.key).fill_nan(pl.col(self.fill_key)).fill_null(pl.col(self.fill_key)) else: @@ -1525,10 +1590,10 @@ class FillNaValues(Transformation): name: str = 'fill_missing' async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: - return df[self.key].fillna(self.value.python_value) + return df[self.key].fillna(self.value.python_value) # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: - if self.dtype == FeatureType.float(): + if self.dtype == FeatureType.floating_point(): return pl.col(self.key).fill_nan(self.value.python_value).fill_null(self.value.python_value) else: @@ -1554,7 +1619,7 @@ class CopyTransformation(Transformation): name: str = 'nothing' async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: - return df[self.key] + return df[self.key] # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: return pl.col(self.key).alias(alias) @@ -1571,7 +1636,7 @@ class Floor(Transformation): async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: from numpy import floor - return floor(df[self.key]) + return floor(df[self.key]) # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: return pl.col(self.key).floor().alias(alias) @@ -1596,7 +1661,7 @@ class Ceil(Transformation): async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: from numpy import ceil - return ceil(df[self.key]) + return ceil(df[self.key]) # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: return pl.col(self.key).ceil().alias(alias) @@ -1621,7 +1686,7 @@ class Round(Transformation): async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: from numpy import round - return round(df[self.key]) + return round(df[self.key]) # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: return pl.col(self.key).round(0).alias(alias) @@ -1639,14 +1704,14 @@ def test_definition() -> TransformationTestDefinition: class Absolute(Transformation): key: str - dtype: FeatureType = FeatureType.float() + dtype: FeatureType = FeatureType.floating_point() name: str = 'abs' async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: from numpy import abs - return abs(df[self.key]) + return abs(df[self.key]) # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: return pl.col(self.key).abs().alias(alias) @@ -1667,19 +1732,19 @@ class MapArgMax(Transformation): name = 'map_arg_max' @property - def dtype(self) -> FeatureType: + def dtype(self) -> FeatureType: # type: ignore return list(self.column_mappings.values())[0].dtype async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: pl_df = await self.transform_polars(pl.from_pandas(df).lazy(), 'feature') - return pl_df.collect().to_pandas()['feature'] + return pl_df.collect().to_pandas()['feature'] # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: expr: pl.Expr = pl.lit(None) if len(self.column_mappings) == 1: key, value = list(self.column_mappings.items())[0] - if self.dtype == FeatureType.bool(): + if self.dtype == FeatureType.boolean(): expr = pl.when(pl.col(key) > 0.5).then(value.python_value).otherwise(not value.python_value) elif self.dtype == FeatureType.string(): expr = ( @@ -1729,7 +1794,7 @@ class WordVectoriser(Transformation): dtype = FeatureType.embedding(768) async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: - return await self.model.vectorise_pandas(df[self.key]) + return await self.model.vectorise_pandas(df[self.key]) # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: return await self.model.vectorise_polars(df, self.key, alias) @@ -1771,7 +1836,7 @@ class GrayscaleImage(Transformation): async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: import numpy as np - def grayscale(images): + def grayscale(images) -> pl.Series: return pl.Series( [np.mean(image, axis=2) if len(image.shape) == 3 else image for image in images.to_list()] ) @@ -1785,7 +1850,7 @@ class Power(Transformation): key: str power: LiteralValue name = 'power' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return df[self.key] ** self.power.python_value @@ -1798,9 +1863,9 @@ async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | class PowerFeature(Transformation): key: str - power_key: float + power_key: str name = 'power_feat' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: return df[self.key] ** df[self.power_key] @@ -1873,14 +1938,12 @@ class ConcatStringAggregation(Transformation, PsqlTransformation, RedshiftTransf dtype = FeatureType.string() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: - return ( - (await self.transform_polars(pl.from_pandas(df).lazy(), self.name)) - .collect() - .to_pandas()[self.name] - ) + pdf = await self.transform_polars(pl.from_pandas(df).lazy(), self.name) + assert isinstance(pdf, pl.LazyFrame) + return pdf.collect().to_pandas()[self.name] # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: - return df.with_columns(pl.concat_str(pl.col(self.key), sep=self.separator).alias(alias)) + return df.with_columns(pl.concat_str(pl.col(self.key), separator=self.separator).alias(alias)) def as_psql(self) -> str: return f"array_to_string(array_agg({self.key}), '{self.separator}')" @@ -1895,7 +1958,7 @@ class SumAggregation(Transformation, PsqlTransformation, RedshiftTransformation) key: str name = 'sum_agg' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: raise NotImplementedError() @@ -1913,7 +1976,7 @@ class MeanAggregation(Transformation, PsqlTransformation, RedshiftTransformation key: str name = 'mean_agg' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: raise NotImplementedError() @@ -1931,7 +1994,7 @@ class MinAggregation(Transformation, PsqlTransformation, RedshiftTransformation) key: str name = 'min_agg' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: raise NotImplementedError() @@ -1949,7 +2012,7 @@ class MaxAggregation(Transformation, PsqlTransformation, RedshiftTransformation) key: str name = 'max_agg' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: raise NotImplementedError() @@ -1967,7 +2030,7 @@ class CountAggregation(Transformation, PsqlTransformation, RedshiftTransformatio key: str name = 'count_agg' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: raise NotImplementedError() @@ -1985,7 +2048,7 @@ class CountDistinctAggregation(Transformation, PsqlTransformation, RedshiftTrans key: str name = 'count_distinct_agg' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: raise NotImplementedError() @@ -2003,7 +2066,7 @@ class StdAggregation(Transformation, PsqlTransformation, RedshiftTransformation) key: str name = 'std_agg' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: raise NotImplementedError() @@ -2021,7 +2084,7 @@ class VarianceAggregation(Transformation, PsqlTransformation, RedshiftTransforma key: str name = 'var_agg' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: raise NotImplementedError() @@ -2039,7 +2102,7 @@ class MedianAggregation(Transformation, PsqlTransformation, RedshiftTransformati key: str name = 'median_agg' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: raise NotImplementedError() @@ -2058,7 +2121,7 @@ class PercentileAggregation(Transformation, PsqlTransformation, RedshiftTransfor percentile: float name = 'percentile_agg' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: raise NotImplementedError() @@ -2078,10 +2141,10 @@ class Clip(Transformation, PsqlTransformation, RedshiftTransformation): upper: LiteralValue name = 'clip' - dtype = FeatureType.float() + dtype = FeatureType.floating_point() async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: - return df[self.key].clip(lower=self.lower.python_value, upper=self.upper.python_value) + return df[self.key].clip(lower=self.lower.python_value, upper=self.upper.python_value) # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: return pl.col(self.key).clip(lower_bound=self.lower.python_value, upper_bound=self.upper.python_value) @@ -2117,7 +2180,7 @@ async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: from httpx import AsyncClient s3 = S3Client(AsyncClient(), config=self.config.s3_config) - return df[self.key].apply(lambda x: s3.signed_download_url(x, max_age=self.max_age_seconds)) + return df[self.key].apply(lambda x: s3.signed_download_url(x, max_age=self.max_age_seconds)) # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: from aioaws.s3 import S3Client @@ -2146,9 +2209,9 @@ async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: tran = await self.transform_polars(data, 'feature') if isinstance(tran, pl.LazyFrame): - return tran.collect().to_pandas()['feature'] + return tran.collect().to_pandas()['feature'] # type: ignore - return data.select(tran).collect().to_pandas()['feature'] + return data.select(tran).collect().to_pandas()['feature'] # type: ignore async def transform_polars(self, df: pl.LazyFrame, alias: str) -> pl.LazyFrame | pl.Expr: if df.schema[self.key].is_(pl.Utf8): @@ -2183,7 +2246,7 @@ async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: for index, row in df.iterrows(): response.iloc[index] = await client.generate( model=self.model, - prompt=row[self.key], + prompt=row[self.key], # type: ignore system=self.system, ) @@ -2239,7 +2302,8 @@ async def transform_pandas(self, df: pd.DataFrame) -> pd.Series: response = pd.Series([[]] * df.shape[0]) for index, row in df.iterrows(): - response.iloc[index] = await client.embeddings(self.model, row[self.key])['embedding'] + embedded = await client.embeddings(self.model, row[self.key]) # type: ignore + response.iloc[index] = embedded['embedding'] return response diff --git a/aligned/schemas/vector_storage.py b/aligned/schemas/vector_storage.py index 2ac3e02a..3d11f960 100644 --- a/aligned/schemas/vector_storage.py +++ b/aligned/schemas/vector_storage.py @@ -16,7 +16,7 @@ class VectorStorageFactory: _shared: VectorStorageFactory | None = None - def __init__(self): + def __init__(self) -> None: self.supported_storages = {} for storage in VectorStorage.__subclasses__(): @@ -84,7 +84,7 @@ def pa_dtype(dtype: FeatureType) -> pa.DataType: return pa_types[dtype.name] if dtype.is_datetime: - return pa.float64() + return pa.timestamp('us', tz=dtype.datetime_timezone) if dtype.is_embedding: embedding_size = dtype.embedding_size() diff --git a/aligned/server.py b/aligned/server.py index 9a843c4a..467bed27 100644 --- a/aligned/server.py +++ b/aligned/server.py @@ -107,7 +107,9 @@ def model_path(name: str, feature_store: ContractStore, app: FastAPI) -> None: from aligned.feature_store import RawStringFeatureRequest model = feature_store.models[name] - features = {f'{feature.location.identifier}:{feature.name}' for feature in model.features} + features = { + f'{feature.location.identifier}:{feature.name}' for feature in model.features.default_features + } feature_request = feature_store.requests_for(RawStringFeatureRequest(features)) entities: set[Feature] = set() @@ -146,7 +148,7 @@ def model_path(name: str, feature_store: ContractStore, app: FastAPI) -> None: # Using POST as this can have a body with the fact / entity table @app.post(f'/models/{name}', openapi_extra=featch_api_schema) - async def get_model(entity_values: dict) -> str: + async def get_model(entity_values: dict) -> Response: missing_entities = {entity.name for entity in entities if entity.name not in entity_values} if missing_entities: raise HTTPException(status_code=400, detail=f'Missing entity values {missing_entities}') @@ -230,7 +232,7 @@ async def root() -> RedirectResponse: return RedirectResponse('/docs') @app.post('/features') - async def features(payload: APIFeatureRequest) -> dict: + async def features(payload: APIFeatureRequest) -> Response: import json df = await feature_store.features_for( @@ -252,9 +254,10 @@ def run( app = FastAPIServer.app(feature_store) - uvicorn.run(app, host=host or '127.0.0.1', port=port or 8000, workers=workers or workers) + uvicorn.run(app, host=host or '127.0.0.1', port=port or 8000, workers=workers or workers) # type: ignore +@dataclass class FeatureServer: definition_reference: StorageFileReference diff --git a/aligned/sources/azure_blob_storage.py b/aligned/sources/azure_blob_storage.py index 0d964510..bb3a198e 100644 --- a/aligned/sources/azure_blob_storage.py +++ b/aligned/sources/azure_blob_storage.py @@ -7,9 +7,8 @@ from io import BytesIO from pathlib import Path -import pandas as pd import polars as pl -from aligned.data_source.batch_data_source import BatchDataSource, ColumnFeatureMappable +from aligned.data_source.batch_data_source import CodableBatchDataSource, ColumnFeatureMappable from aligned.exceptions import UnableToFindFileException from aligned.feature_source import WritableFeatureSource from aligned.local.job import FileDateJob, FileFactualJob, FileFullJob @@ -19,18 +18,25 @@ from aligned.sources.local import ( CsvConfig, DataFileReference, + Deletable, + DeltaFileConfig, ParquetConfig, StorageFileReference, Directory, data_file_freshness, + upsert_on_column, ) from aligned.storage import Storage from httpx import HTTPStatusError +from aligned.lazy_imports import pandas as pd try: - from azure.storage.blob import BlobServiceClient + from azure.storage.blob import BlobServiceClient # type: ignore except ModuleNotFoundError: - BlobServiceClient = None + + class BlobServiceClient: + pass + logger = logging.getLogger(__name__) @@ -79,12 +85,14 @@ def parquet_at( self, path: str, mapping_keys: dict[str, str] | None = None, + config: ParquetConfig | None = None, date_formatter: DateFormatter | None = None, ) -> AzureBlobParquetDataSource: return AzureBlobParquetDataSource( self, path, mapping_keys=mapping_keys or {}, + parquet_config=config or ParquetConfig(), date_formatter=date_formatter or DateFormatter.noop(), ) @@ -109,6 +117,7 @@ def csv_at( self, path: str, mapping_keys: dict[str, str] | None = None, + csv_config: CsvConfig | None = None, date_formatter: DateFormatter | None = None, ) -> AzureBlobCsvDataSource: return AzureBlobCsvDataSource( @@ -116,12 +125,14 @@ def csv_at( path, mapping_keys=mapping_keys or {}, date_formatter=date_formatter or DateFormatter.unix_timestamp(), + csv_config=csv_config or CsvConfig(), ) def delta_at( self, path: str, mapping_keys: dict[str, str] | None = None, + config: DeltaFileConfig | None = None, date_formatter: DateFormatter | None = None, ) -> AzureBlobDeltaDataSource: return AzureBlobDeltaDataSource( @@ -180,6 +191,11 @@ def storage(self) -> BlobStorage: return BlobStorage(self) +class AzureConfigurable: + 'Something that contains an azure config' + config: AzureBlobConfig + + @dataclass class AzureBlobDirectory(Directory): @@ -193,12 +209,14 @@ def parquet_at( self, path: str, mapping_keys: dict[str, str] | None = None, + config: ParquetConfig | None = None, date_formatter: DateFormatter | None = None, ) -> AzureBlobParquetDataSource: sub_path = self.sub_path / path return self.config.parquet_at( sub_path.as_posix(), mapping_keys=mapping_keys, + config=config, date_formatter=date_formatter or DateFormatter.noop(), ) @@ -223,21 +241,28 @@ def csv_at( self, path: str, mapping_keys: dict[str, str] | None = None, + csv_config: CsvConfig | None = None, date_formatter: DateFormatter | None = None, ) -> AzureBlobCsvDataSource: sub_path = self.sub_path / path return self.config.csv_at( - sub_path.as_posix(), date_formatter=date_formatter or DateFormatter.unix_timestamp() + sub_path.as_posix(), + mapping_keys=mapping_keys, + date_formatter=date_formatter or DateFormatter.unix_timestamp(), + csv_config=csv_config or CsvConfig(), ) def delta_at( self, path: str, mapping_keys: dict[str, str] | None = None, + config: DeltaFileConfig | None = None, date_formatter: DateFormatter | None = None, ) -> AzureBlobDeltaDataSource: sub_path = self.sub_path / path - return self.config.delta_at(sub_path.as_posix(), mapping_keys, date_formatter=date_formatter) + return self.config.delta_at( + sub_path.as_posix(), mapping_keys, config=config, date_formatter=date_formatter + ) def sub_directory(self, path: str) -> AzureBlobDirectory: return AzureBlobDirectory(self.config, self.sub_path / path) @@ -290,9 +315,7 @@ async def write(self, content: bytes) -> None: @dataclass class AzureBlobCsvDataSource( - BatchDataSource, - DataFileReference, - ColumnFeatureMappable, + CodableBatchDataSource, DataFileReference, ColumnFeatureMappable, AzureConfigurable ): config: AzureBlobConfig path: str @@ -370,7 +393,7 @@ async def write(self, job: RetrivalJob, requests: list[RetrivalRequest]) -> None await self.write_polars(df.select(features)) @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls, facts: RetrivalJob, requests: list[tuple[AzureBlobCsvDataSource, RetrivalRequest]] ) -> RetrivalJob: @@ -408,7 +431,14 @@ def all_between_dates( @dataclass -class AzureBlobPartitionedParquetDataSource(BatchDataSource, DataFileReference, ColumnFeatureMappable): +class AzureBlobPartitionedParquetDataSource( + CodableBatchDataSource, + DataFileReference, + ColumnFeatureMappable, + Deletable, + WritableFeatureSource, + AzureConfigurable, +): config: AzureBlobConfig directory: str partition_keys: list[str] @@ -465,12 +495,25 @@ async def write_pandas(self, df: pd.DataFrame) -> None: await self.write_polars(pl.from_pandas(df).lazy()) async def write_polars(self, df: pl.LazyFrame) -> None: - url = f"az://{self.directory}" - creds = self.config.read_creds() - df.collect().to_pandas().to_parquet(url, partition_cols=self.partition_keys, storage_options=creds) + from adlfs import AzureBlobFileSystem + from pyarrow.parquet import write_to_dataset + + fs = AzureBlobFileSystem(**self.config.read_creds()) # type: ignore + + pyarrow_options = { + 'partition_cols': self.partition_keys, + 'filesystem': fs, + 'compression': 'zstd', + } + + write_to_dataset( + table=df.collect().to_arrow(), + root_path=self.directory, + **(pyarrow_options or {}), + ) @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls, facts: RetrivalJob, requests: list[tuple[AzureBlobParquetDataSource, RetrivalRequest]] ) -> RetrivalJob: @@ -511,12 +554,83 @@ async def insert(self, job: RetrivalJob, request: RetrivalRequest) -> None: df = await job.select(features).to_lazy_polars() await self.write_polars(df) + async def upsert(self, job: RetrivalJob, request: RetrivalRequest) -> None: + from adlfs import AzureBlobFileSystem + + fs = AzureBlobFileSystem(**self.config.read_creds()) # type: ignore + + def delete_directory_recursively(directory_path: str) -> None: + paths = fs.find(directory_path) + + for path in paths: + if fs.info(path)['type'] == 'directory': + delete_directory_recursively(path) + else: + fs.rm(path) + + fs.rmdir(directory_path) + + upsert_on = sorted(request.entity_names) + + df = await job.select(request.all_returned_columns).to_polars() + unique_partitions = df.select(self.partition_keys).unique() + + filters: list[pl.Expr] = [] + for row in unique_partitions.iter_rows(named=True): + current: pl.Expr | None = None + + for key, value in row.items(): + if current is not None: + current = current & (pl.col(key) == value) + else: + current = pl.col(key) == value + + if current is not None: + filters.append(current) + + try: + existing_df = (await self.to_lazy_polars()).filter(*filters) + write_df = upsert_on_column(upsert_on, df.lazy(), existing_df).collect() + except (UnableToFindFileException, pl.ComputeError): + write_df = df.lazy() + + for row in unique_partitions.iter_rows(named=True): + + dir = Path(self.directory) + for partition_key in self.partition_keys: + dir = dir / f"{partition_key}={row[partition_key]}" + + if fs.exists(dir.as_posix()): + delete_directory_recursively(dir.as_posix()) + + await self.write_polars(write_df.lazy()) + + async def delete(self) -> None: + from adlfs import AzureBlobFileSystem + + fs = AzureBlobFileSystem(**self.config.read_creds()) # type: ignore + + def delete_directory_recursively(directory_path: str) -> None: + paths = fs.find(directory_path) + + for path in paths: + if fs.info(path)['type'] == 'directory': + delete_directory_recursively(path) + else: + fs.rm(path) + + fs.rmdir(directory_path) + + delete_directory_recursively(self.directory) + + async def overwrite(self, job: RetrivalJob, request: RetrivalRequest) -> None: + await self.delete() + await self.insert(job, request) + @dataclass class AzureBlobParquetDataSource( - BatchDataSource, - DataFileReference, - ColumnFeatureMappable, + CodableBatchDataSource, DataFileReference, ColumnFeatureMappable, AzureConfigurable ): config: AzureBlobConfig path: str @@ -591,7 +705,7 @@ async def write_polars(self, df: pl.LazyFrame) -> None: df.collect().to_pandas().to_parquet(url, storage_options=creds) @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls, facts: RetrivalJob, requests: list[tuple[AzureBlobParquetDataSource, RetrivalRequest]] ) -> RetrivalJob: @@ -630,10 +744,12 @@ def all_between_dates( @dataclass class AzureBlobDeltaDataSource( - BatchDataSource, + CodableBatchDataSource, DataFileReference, ColumnFeatureMappable, WritableFeatureSource, + Deletable, + AzureConfigurable, ): config: AzureBlobConfig path: str @@ -687,7 +803,7 @@ async def schema(self) -> dict[str, FeatureType]: raise UnableToFindFileException() from error @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls, facts: RetrivalJob, requests: list[tuple[AzureBlobDeltaDataSource, RetrivalRequest]] ) -> RetrivalJob: @@ -857,3 +973,14 @@ async def upsert(self, job: RetrivalJob, request: RetrivalRequest) -> None: storage_options=self.config.read_creds(), delta_write_options={'schema': pa.schema(schema)}, ) + + async def delete(self) -> None: + from deltalake import DeltaTable + + url = f"az://{self.path}" + table = DeltaTable(url, storage_options=self.config.read_creds()) + table.delete() + + async def overwrite(self, job: RetrivalJob, request: RetrivalRequest) -> None: + await self.delete() + await self.insert(job, request) diff --git a/aligned/sources/lancedb.py b/aligned/sources/lancedb.py index dfd78e17..d0217ed0 100644 --- a/aligned/sources/lancedb.py +++ b/aligned/sources/lancedb.py @@ -1,11 +1,13 @@ from dataclasses import dataclass from typing import TYPE_CHECKING import polars as pl -from aligned.data_source.batch_data_source import BatchDataSource +from datetime import datetime +from aligned.data_source.batch_data_source import CodableBatchDataSource from aligned.feature_source import WritableFeatureSource from aligned.request.retrival_request import RetrivalRequest -from aligned.schemas.feature import Feature +from aligned.schemas.feature import Feature, EventTimestamp from aligned.sources.local import Deletable +import logging from aligned.sources.vector_index import VectorIndex @@ -18,16 +20,19 @@ except ImportError: lancedb = None +logger = logging.getLogger(__name__) + @dataclass class LanceDBConfig: path: str - async def connect(self) -> 'lancedb.AsyncConnection': + async def connect(self) -> 'lancedb.AsyncConnection': # type: ignore + assert lancedb is not None, '`lancedb` is not installed' return await lancedb.connect_async(self.path) - async def connect_to_table(self, table: str) -> 'lancedb.AsyncTable': + async def connect_to_table(self, table: str) -> 'lancedb.AsyncTable': # type: ignore conn = await self.connect() return await conn.open_table(table) @@ -36,7 +41,7 @@ def table(self, name: str) -> 'LanceDbTable': @dataclass -class LanceDbTable(VectorIndex, BatchDataSource, WritableFeatureSource, Deletable): +class LanceDbTable(VectorIndex, CodableBatchDataSource, WritableFeatureSource, Deletable): table_name: str config: LanceDBConfig @@ -55,10 +60,75 @@ def as_vector_index(self, name: str) -> 'LanceDbTable': self._vector_index_name = name return self + async def freshness(self, event_timestamp: EventTimestamp) -> datetime | None: + from lancedb import AsyncTable + + try: + lance_table: AsyncTable = await self.config.connect_to_table(self.table_name) + table = await lance_table.query().select([event_timestamp.name]).to_arrow() + df = pl.from_arrow(table) + if isinstance(df, pl.Series): + col = df + else: + col = df.get_column(event_timestamp.name) + + max_value = col.max() + if max_value is not None: + assert isinstance(max_value, datetime) + + return max_value + except ValueError: + logger.info(f"Unable to load freshness. Assumes that it do not exist for '{self.table_name}'") + return None + + async def create(self, request: RetrivalRequest) -> None: + from aligned.schemas.vector_storage import pyarrow_schema + + db = await self.config.connect() + schema = pyarrow_schema(list(request.all_returned_features)) + + await db.create_table(self.table_name, schema=schema) + + async def upsert(self, job: 'RetrivalJob', request: RetrivalRequest) -> None: + import lancedb + + upsert_keys = list(request.entity_names) + + conn = lancedb.connect(self.config.path) + table = conn.open_table(self.table_name) + + df = await job.to_polars() + if df.is_empty(): + return + + arrow_table = df.to_arrow() + + # Is a bug when passing in an iterator + # As lancedb trys to access the .iter() which do not always exist I guess + ( + table.merge_insert(upsert_keys[0] if len(upsert_keys) == 1 else upsert_keys) + .when_matched_update_all() + .when_not_matched_insert_all() + .execute(arrow_table) + ) + async def insert(self, job: 'RetrivalJob', request: RetrivalRequest) -> None: - table = await self.config.connect_to_table(self.table_name) - df = (await job.to_polars()).to_arrow() - await table.add(df) + try: + table = await self.config.connect_to_table(self.table_name) + except ValueError: + await self.create(request) + table = await self.config.connect_to_table(self.table_name) + + df = await job.to_polars() + if df.is_empty(): + return + + arrow_table = df.to_arrow() + await table.add(arrow_table) + + async def overwrite(self, job: 'RetrivalJob', request: RetrivalRequest) -> None: + await self.delete() + await self.insert(job, request) async def delete(self) -> None: conn = await self.config.connect() @@ -82,7 +152,7 @@ async def load() -> pl.LazyFrame: return RetrivalJob.from_lazy_function(load, request) def nearest_n_to( - self, data: 'RetrivalJob', number_of_records: int, retrival_request: RetrivalRequest + self, data: 'RetrivalJob', number_of_records: int, request: RetrivalRequest ) -> 'RetrivalJob': from aligned.retrival_job import RetrivalJob @@ -99,9 +169,8 @@ def first_embedding(features: set[Feature]) -> Feature | None: result: pl.DataFrame | None = None embedding = first_embedding(data.request_result.features) - assert embedding, 'Expected to a least find one embedding in the input data' - + org_columns = df.columns df_cols = len(df.columns) for item in df.iter_rows(named=True): @@ -114,7 +183,12 @@ def first_embedding(features: set[Feature]) -> Feature | None: polars_df = polars_df.select(pl.exclude('_distance')) if df_cols > 1: - polars_df = polars_df.hstack(pl.DataFrame(item).select(pl.exclude(embedding.name))) + logger.info(f"Stacking {polars_df.columns} and {item.keys()}") + polars_df = polars_df.select(pl.exclude(org_columns)).hstack( + pl.DataFrame([item] * polars_df.height) + .select(org_columns) + .select(pl.exclude(embedding.name)) + ) if result is None: result = polars_df @@ -126,10 +200,10 @@ def first_embedding(features: set[Feature]) -> Feature | None: else: return result.lazy() - return RetrivalJob.from_lazy_function(load, retrival_request) + return RetrivalJob.from_lazy_function(load, request) @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls, facts: 'RetrivalJob', requests: list[tuple['LanceDbTable', RetrivalRequest]] ) -> 'RetrivalJob': from aligned.retrival_job import RetrivalJob diff --git a/aligned/sources/local.py b/aligned/sources/local.py index 0cc0af77..e8eb84f7 100644 --- a/aligned/sources/local.py +++ b/aligned/sources/local.py @@ -1,25 +1,25 @@ from __future__ import annotations import logging +import shutil from dataclasses import dataclass, field from pathlib import Path from typing import TYPE_CHECKING, Literal, Protocol from uuid import uuid4 -import pandas as pd +from aligned.lazy_imports import pandas as pd import polars as pl from httpx import HTTPStatusError from aligned.data_file import DataFileReference, upsert_on_column -from aligned.data_source.batch_data_source import BatchDataSource, ColumnFeatureMappable -from aligned.enricher import CsvFileEnricher, Enricher, LoadedStatEnricher, TimespanSelector +from aligned.data_source.batch_data_source import CodableBatchDataSource, ColumnFeatureMappable from aligned.exceptions import UnableToFindFileException from aligned.local.job import FileDateJob, FileFactualJob, FileFullJob from aligned.request.retrival_request import RetrivalRequest from aligned.retrival_job import RetrivalJob from aligned.s3.storage import FileStorage, HttpStorage from aligned.schemas.codable import Codable -from aligned.schemas.feature import EventTimestamp, FeatureType +from aligned.schemas.feature import EventTimestamp, FeatureType, Feature from aligned.storage import Storage from aligned.feature_source import WritableFeatureSource from aligned.schemas.date_formatter import DateFormatter @@ -71,19 +71,55 @@ async def as_repo_definition(self) -> RepoDefinition: return RepoDefinition.from_json(file) -async def data_file_freshness(reference: DataFileReference, column_name: str) -> datetime | None: +async def data_file_freshness( + reference: DataFileReference, column_name: str, formatter: DateFormatter | None = None +) -> datetime | None: try: + formatter = formatter or DateFormatter.unix_timestamp() file = await reference.to_lazy_polars() if isinstance(reference, ColumnFeatureMappable): source_column = reference.feature_identifier_for([column_name])[0] else: source_column = column_name - return file.select(source_column).max().collect()[0, source_column] + return file.select(formatter.decode_polars(source_column)).max().collect()[0, source_column] except UnableToFindFileException: return None +def fill_missing_in_request( + request: RetrivalRequest, df: pl.LazyFrame, feature_rename: dict[str, str] +) -> pl.LazyFrame: + existing_columns = df.columns + + missing_features = [ + feature + for feature in request.features + if feature_rename.get(feature.name, feature.name) not in existing_columns + ] + + if missing_features: + return fill_with_default(missing_features, df, feature_rename) + else: + return df + + +def fill_with_default( + features: list[Feature], df: pl.LazyFrame, feature_rename: dict[str, str] +) -> pl.LazyFrame: + + default_values = [ + (feature_rename.get(feature.name, feature.name), feature.default_value.python_value) + for feature in features + if feature.default_value is not None + ] + + if not default_values: + return df + + return df.with_columns([pl.lit(value).alias(feature_name) for feature_name, value in default_values]) + + def create_parent_dir(path: str) -> None: parents = [] @@ -105,8 +141,12 @@ def do_file_exist(path: str) -> bool: def delete_path(path: str) -> None: path_obj = Path(path) + + if not path_obj.exists(): + return + if path_obj.is_dir(): - path_obj.rmdir() + shutil.rmtree(path) else: Path(path).unlink() @@ -124,7 +164,7 @@ class CsvConfig(Codable): @dataclass class CsvFileSource( - BatchDataSource, ColumnFeatureMappable, DataFileReference, WritableFeatureSource, Deletable + CodableBatchDataSource, ColumnFeatureMappable, DataFileReference, WritableFeatureSource, Deletable ): """ A source pointing to a CSV file @@ -179,7 +219,7 @@ async def to_lazy_polars(self) -> pl.LazyFrame: buffer = await HttpStorage().read(self.path) io_buffer = BytesIO(buffer) io_buffer.seek(0) - return pl.read_csv(io_buffer, separator=self.csv_config.seperator, try_parse_dates=True).lazy() + return pl.read_csv(io_buffer, separator=self.csv_config.seperator).lazy() if not do_file_exist(self.path): raise UnableToFindFileException(self.path) @@ -197,9 +237,7 @@ async def to_lazy_polars(self) -> pl.LazyFrame: reverse_mapping = {v: k for k, v in self.mapping_keys.items()} schema = {reverse_mapping.get(name, name): dtype for name, dtype in schema.items()} - return pl.scan_csv( - self.path, dtypes=schema, separator=self.csv_config.seperator, try_parse_dates=True - ) + return pl.scan_csv(self.path, schema_overrides=schema, separator=self.csv_config.seperator) except OSError: raise UnableToFindFileException(self.path) @@ -234,6 +272,9 @@ async def insert(self, job: RetrivalJob, request: RetrivalRequest) -> None: if feature.dtype.is_datetime: data = data.with_columns(self.formatter.encode_polars(feature.name)) + if request.event_timestamp: + data = data.with_columns(self.formatter.encode_polars(request.event_timestamp.name)) + if self.mapping_keys: columns = self.feature_identifier_for(data.columns) data = data.rename(dict(zip(data.columns, columns))) @@ -253,6 +294,9 @@ async def overwrite(self, job: RetrivalJob, request: RetrivalRequest) -> None: if feature.dtype.is_datetime: data = data.with_columns(self.formatter.encode_polars(feature.name)) + if request.event_timestamp: + data = data.with_columns(self.formatter.encode_polars(request.event_timestamp.name)) + if self.mapping_keys: columns = self.feature_identifier_for(data.columns) data = data.rename(dict(zip(data.columns, columns))) @@ -273,34 +317,7 @@ async def write_polars(self, df: pl.LazyFrame) -> None: create_parent_dir(self.path) await self.write_pandas(df.collect().to_pandas()) - def std( - self, columns: set[str], time: TimespanSelector | None = None, limit: int | None = None - ) -> Enricher: - return LoadedStatEnricher( - stat='std', - columns=list(columns), - enricher=self.enricher().selector(time, limit), - mapping_keys=self.mapping_keys, - ) - - def mean( - self, columns: set[str], time: TimespanSelector | None = None, limit: int | None = None - ) -> Enricher: - return LoadedStatEnricher( - stat='mean', - columns=list(columns), - enricher=self.enricher().selector(time, limit), - mapping_keys=self.mapping_keys, - ) - - def enricher(self) -> CsvFileEnricher: - return CsvFileEnricher(file=self.path) - def all_data(self, request: RetrivalRequest, limit: int | None) -> RetrivalJob: - from aligned.schemas.constraints import Optional - - optional_constraint = Optional() - with_schema = CsvFileSource( path=self.path, mapping_keys=self.mapping_keys, @@ -309,8 +326,7 @@ def all_data(self, request: RetrivalRequest, limit: int | None) -> RetrivalJob: expected_schema={ feat.name: feat.dtype for feat in request.features.union(request.entities) - if not (feat.constraints and optional_constraint in feat.constraints) - and not feat.name.isdigit() + if (feat.default_value is None) and not feat.name.isdigit() }, ) return FileFullJob(with_schema, request, limit, date_formatter=self.formatter) @@ -327,7 +343,7 @@ def all_between_dates( ) @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls, facts: RetrivalJob, requests: list[tuple[CsvFileSource, RetrivalRequest]] ) -> RetrivalJob: sources = {source for source, _ in requests} @@ -364,7 +380,7 @@ async def feature_view_code(self, view_name: str) -> str: ) async def freshness(self, event_timestamp: EventTimestamp) -> datetime | None: - return await data_file_freshness(self, event_timestamp.name) + return await data_file_freshness(self, event_timestamp.name, self.formatter) @dataclass @@ -374,13 +390,12 @@ class ParquetConfig(Codable): """ engine: Literal['auto', 'pyarrow', 'fastparquet'] = field(default='auto') - compression: Literal['snappy', 'gzip', 'brotli', None] = field(default='snappy') - should_write_index: bool = field(default=False) + compression: Literal['snappy', 'gzip', 'brotli'] = field(default='snappy') @dataclass class PartitionedParquetFileSource( - BatchDataSource, ColumnFeatureMappable, DataFileReference, WritableFeatureSource, Deletable + CodableBatchDataSource, ColumnFeatureMappable, DataFileReference, WritableFeatureSource, Deletable ): """ A source pointing to a Parquet file @@ -418,11 +433,10 @@ async def to_pandas(self) -> pd.DataFrame: return (await self.to_lazy_polars()).collect().to_pandas() async def to_lazy_polars(self) -> pl.LazyFrame: - glob_path = f'{self.directory}/**/*.parquet' try: return pl.scan_parquet(glob_path, retries=3) - except OSError: + except (OSError, FileNotFoundError): raise UnableToFindFileException(self.directory) async def write_polars(self, df: pl.LazyFrame) -> None: @@ -451,7 +465,7 @@ def all_between_dates( ) @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls, facts: RetrivalJob, requests: list[tuple[ParquetFileSource, RetrivalRequest]] ) -> RetrivalJob: @@ -487,15 +501,54 @@ async def insert(self, job: RetrivalJob, request: RetrivalRequest) -> None: df = await job.to_lazy_polars() await self.write_polars(df) + async def upsert(self, job: RetrivalJob, request: RetrivalRequest) -> None: + import shutil + + upsert_on = sorted(request.entity_names) + + df = await job.select(request.all_returned_columns).to_polars() + unique_partitions = df.select(self.partition_keys).unique() + + filters: list[pl.Expr] = [] + for row in unique_partitions.iter_rows(named=True): + current: pl.Expr | None = None + + for key, value in row.items(): + if current is not None: + current = current & (pl.col(key) == value) + else: + current = pl.col(key) == value + + if current is not None: + filters.append(current) + + try: + existing_df = (await self.to_lazy_polars()).filter(*filters) + write_df = upsert_on_column(upsert_on, df.lazy(), existing_df).collect() + except (UnableToFindFileException, pl.ComputeError): + write_df = df.lazy() + + for row in unique_partitions.iter_rows(named=True): + dir = Path(self.directory) + for partition_key in self.partition_keys: + dir = dir / f"{partition_key}={row[partition_key]}" + + if dir.exists(): + shutil.rmtree(dir.as_posix()) + + await self.write_polars(write_df.lazy()) + async def overwrite(self, job: RetrivalJob, request: RetrivalRequest) -> None: import shutil - shutil.rmtree(self.directory) + if Path(self.directory).exists(): + shutil.rmtree(self.directory) + await self.insert(job, request) @dataclass -class ParquetFileSource(BatchDataSource, ColumnFeatureMappable, DataFileReference, Deletable): +class ParquetFileSource(CodableBatchDataSource, ColumnFeatureMappable, DataFileReference, Deletable): """ A source pointing to a Parquet file """ @@ -535,12 +588,7 @@ async def read_pandas(self) -> pd.DataFrame: async def write_pandas(self, df: pd.DataFrame) -> None: create_parent_dir(self.path) - df.to_parquet( - self.path, - engine=self.config.engine, - compression=self.config.compression, - index=self.config.should_write_index, - ) + df.to_parquet(self.path, engine=self.config.engine, compression=self.config.compression, index=False) async def to_lazy_polars(self) -> pl.LazyFrame: @@ -571,7 +619,7 @@ def all_between_dates( ) @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls, facts: RetrivalJob, requests: list[tuple[ParquetFileSource, RetrivalRequest]] ) -> RetrivalJob: @@ -615,7 +663,7 @@ class DeltaFileConfig(Codable): @dataclass class DeltaFileSource( - BatchDataSource, ColumnFeatureMappable, DataFileReference, WritableFeatureSource, Deletable + CodableBatchDataSource, ColumnFeatureMappable, DataFileReference, WritableFeatureSource, Deletable ): """ A source pointing to a Parquet file @@ -673,7 +721,7 @@ def all_between_dates( ) @classmethod - def multi_source_features_for( + def multi_source_features_for( # type: ignore cls, facts: RetrivalJob, requests: list[tuple[DeltaFileSource, RetrivalRequest]] ) -> RetrivalJob: @@ -756,7 +804,7 @@ def json_at(self, path: str) -> StorageFileReference: def csv_at( self, path: str, mapping_keys: dict[str, str] | None = None, csv_config: CsvConfig | None = None - ) -> BatchDataSource: + ) -> CodableBatchDataSource: ... def partitioned_parquet_at( @@ -766,17 +814,21 @@ def partitioned_parquet_at( mapping_keys: dict[str, str] | None = None, config: ParquetConfig | None = None, date_formatter: DateFormatter | None = None, - ) -> PartitionedParquetFileSource: + ) -> CodableBatchDataSource: ... def parquet_at( - self, path: str, mapping_keys: dict[str, str] | None = None, config: ParquetConfig | None = None - ) -> BatchDataSource: + self, + path: str, + mapping_keys: dict[str, str] | None = None, + config: ParquetConfig | None = None, + date_formatter: DateFormatter | None = None, + ) -> CodableBatchDataSource: ... def delta_at( self, path: str, mapping_keys: dict[str, str] | None = None, config: DeltaFileConfig | None = None - ) -> BatchDataSource: + ) -> CodableBatchDataSource: ... def sub_directory(self, path: str) -> Directory: @@ -809,10 +861,17 @@ def csv_at( ) def parquet_at( - self, path: str, mapping_keys: dict[str, str] | None = None, config: ParquetConfig | None = None + self, + path: str, + mapping_keys: dict[str, str] | None = None, + config: ParquetConfig | None = None, + date_formatter: DateFormatter | None = None, ) -> ParquetFileSource: return ParquetFileSource( - path=self.path_string(path), mapping_keys=mapping_keys or {}, config=config or ParquetConfig() + path=self.path_string(path), + mapping_keys=mapping_keys or {}, + config=config or ParquetConfig(), + date_formatter=date_formatter or DateFormatter.noop(), ) def partitioned_parquet_at( @@ -937,11 +996,15 @@ class LiteralReference(DataFileReference): file: pl.LazyFrame - def __init__(self, file: pl.LazyFrame | pd.DataFrame) -> None: - if isinstance(file, pd.DataFrame): + def __init__(self, file: pl.LazyFrame | pd.DataFrame | pl.DataFrame) -> None: + if isinstance(file, pl.DataFrame): + self.file = file.lazy() + elif isinstance(file, pl.LazyFrame): + self.file = file + elif isinstance(file, pd.DataFrame): self.file = pl.from_pandas(file).lazy() else: - self.file = file + raise ValueError(f"Unsupported type {type(file)}") def job_group_key(self) -> str: return str(uuid4()) diff --git a/aligned/sources/psql.py b/aligned/sources/psql.py index 5df6fd03..72f85bea 100644 --- a/aligned/sources/psql.py +++ b/aligned/sources/psql.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from typing import TYPE_CHECKING, Any -from aligned.data_source.batch_data_source import BatchDataSource, ColumnFeatureMappable +from aligned.data_source.batch_data_source import CodableBatchDataSource, ColumnFeatureMappable from aligned.feature_source import WritableFeatureSource from aligned.request.retrival_request import RetrivalRequest from aligned.retrival_job import RetrivalJob @@ -13,7 +13,6 @@ from aligned.schemas.feature import FeatureType if TYPE_CHECKING: - from aligned.enricher import Enricher from aligned.schemas.feature import EventTimestamp @@ -45,11 +44,6 @@ def localhost(db: str, credentials: tuple[str, str] | None = None) -> PostgreSQL def table(self, table: str, mapping_keys: dict[str, str] | None = None) -> PostgreSQLDataSource: return PostgreSQLDataSource(config=self, table=table, mapping_keys=mapping_keys or {}) - def data_enricher(self, sql: str, values: dict | None = None) -> Enricher: - from aligned.enricher import SqlDatabaseEnricher - - return SqlDatabaseEnricher(self.env_var, sql, values) - def fetch(self, query: str) -> RetrivalJob: from aligned.psql.jobs import PostgreSqlJob @@ -57,7 +51,7 @@ def fetch(self, query: str) -> RetrivalJob: @dataclass -class PostgreSQLDataSource(BatchDataSource, ColumnFeatureMappable, WritableFeatureSource): +class PostgreSQLDataSource(CodableBatchDataSource, ColumnFeatureMappable, WritableFeatureSource): config: PostgreSQLConfig table: str @@ -141,12 +135,12 @@ async def schema(self) -> dict[str, FeatureType]: 'character varying': FeatureType.string(), 'text': FeatureType.string(), 'integer': FeatureType.int32(), - 'float': FeatureType.float(), + 'float': FeatureType.floating_point(), 'date': FeatureType.date(), - 'boolean': FeatureType.bool(), + 'boolean': FeatureType.boolean(), 'jsonb': FeatureType.json(), 'smallint': FeatureType.int16(), - 'numeric': FeatureType.float(), + 'numeric': FeatureType.floating_point(), } values = df.select(['column_name', 'data_type']).to_dicts() return {value['column_name']: psql_types[value['data_type']] for value in values} @@ -170,7 +164,7 @@ async def freshness(self, event_timestamp: EventTimestamp) -> datetime | None: async def insert(self, job: RetrivalJob, request: RetrivalRequest) -> None: data = await job.to_lazy_polars() data.select(request.all_returned_columns).collect().write_database( - self.table, connection=self.config.url, if_exists='append' + self.table, connection=self.config.url, if_table_exists='append' ) async def upsert(self, job: RetrivalJob, request: RetrivalRequest) -> None: diff --git a/aligned/sources/redis.py b/aligned/sources/redis.py index 727ef3da..fcb2e225 100644 --- a/aligned/sources/redis.py +++ b/aligned/sources/redis.py @@ -166,8 +166,10 @@ def all_for(self, request: FeatureRequest, limit: int | None = None) -> Retrival def features_for(self, facts: RetrivalJob, request: FeatureRequest) -> RetrivalJob: from aligned.redis.job import FactualRedisJob - needed_requests = [req for req in request.needed_requests if req.location.location != 'combined_view'] - combined = [req for req in request.needed_requests if req.location.location == 'combined_view'] + needed_requests = [ + req for req in request.needed_requests if req.location.location_type != 'combined_view' + ] + combined = [req for req in request.needed_requests if req.location.location_type == 'combined_view'] return FactualRedisJob(self.config, needed_requests, facts).combined_features(combined) async def insert(self, job: RetrivalJob, request: RetrivalRequest) -> None: @@ -196,7 +198,7 @@ async def insert(self, job: RetrivalJob, request: RetrivalRequest) -> None: expr = pl.col(feature.name).cast(pl.Utf8).alias(feature.name) - if feature.dtype == FeatureType.bool(): + if feature.dtype == FeatureType.boolean(): # Redis do not support bools expr = pl.col(feature.name).cast(pl.Int8, strict=False).cast(pl.Utf8).alias(feature.name) elif feature.dtype == FeatureType.datetime(): @@ -255,7 +257,7 @@ def make_redis_friendly(self, data: pl.LazyFrame, features: set[Feature]) -> pl. expr = pl.col(feature.name) - if feature.dtype == FeatureType.bool(): + if feature.dtype == FeatureType.boolean(): # Redis do not support bools expr = pl.col(feature.name).cast(pl.Int8, strict=False) elif feature.dtype == FeatureType.datetime(): diff --git a/aligned/sources/redshift.py b/aligned/sources/redshift.py index e1f06e1b..e45f8872 100644 --- a/aligned/sources/redshift.py +++ b/aligned/sources/redshift.py @@ -1,12 +1,10 @@ from __future__ import annotations from dataclasses import dataclass, field -from datetime import datetime, timedelta +from datetime import datetime from typing import Any, TYPE_CHECKING -from aligned import RedisConfig -from aligned.data_source.batch_data_source import BatchDataSource, ColumnFeatureMappable -from aligned.enricher import Enricher +from aligned.data_source.batch_data_source import CodableBatchDataSource, ColumnFeatureMappable from aligned.request.retrival_request import RetrivalRequest from aligned.retrival_job import RetrivalJob from aligned.schemas.codable import Codable @@ -64,19 +62,6 @@ def table( config=self, table=table, mapping_keys=mapping_keys or {}, list_references=list_references or {} ) - def data_enricher( - self, name: str, sql: str, redis: RedisConfig, values: dict | None = None, lock_timeout: int = 60 - ) -> Enricher: - from aligned.enricher import FileCacheEnricher, RedisLockEnricher, SqlDatabaseEnricher - - return FileCacheEnricher( - timedelta(days=1), - file_path=f'./cache/{name}.parquet', - enricher=RedisLockEnricher( - name, SqlDatabaseEnricher(self.url, sql, values), redis, timeout=lock_timeout - ), - ) - def with_schema(self, name: str) -> RedshiftSQLConfig: return RedshiftSQLConfig(env_var=self.env_var, schema=name) @@ -87,7 +72,7 @@ def fetch(self, query: str) -> RetrivalJob: @dataclass -class RedshiftSQLDataSource(BatchDataSource, ColumnFeatureMappable): +class RedshiftSQLDataSource(CodableBatchDataSource, ColumnFeatureMappable): config: RedshiftSQLConfig table: str diff --git a/aligned/sources/renamer.py b/aligned/sources/renamer.py new file mode 100644 index 00000000..1041fae9 --- /dev/null +++ b/aligned/sources/renamer.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +import polars as pl +from aligned.lazy_imports import pandas as pd + + +class Renamer: + def rename_polars(self, df: pl.LazyFrame) -> pl.LazyFrame: + raise NotImplementedError(type(self)) + + def rename_pandas(self, df: pd.DataFrame) -> pd.DataFrame: + raise NotImplementedError(type(self)) + + +class NoopRenamer(Renamer): + def rename_polars(self, df: pl.LazyFrame) -> pl.LazyFrame: + return df + + def rename_pandas(self, df: pd.DataFrame) -> pd.DataFrame: + return df + + +def camel_to_snake_case(column: str) -> str: + return ''.join(['_' + char.lower() if char.isupper() else char for char in column]).lstrip('_') + + +class CamelToSnakeCase(Renamer): + """ + Renames the colums from camel case to snake case + """ + + def rename_polars(self, df: pl.LazyFrame) -> pl.LazyFrame: + return df.rename(camel_to_snake_case) + + def rename_pandas(self, df: pd.DataFrame) -> pd.DataFrame: + return df diff --git a/aligned/sources/s3.py b/aligned/sources/s3.py index 7c7a50fa..832fb531 100644 --- a/aligned/sources/s3.py +++ b/aligned/sources/s3.py @@ -1,12 +1,13 @@ +from __future__ import annotations from dataclasses import dataclass from datetime import datetime from io import BytesIO -import pandas as pd import polars as pl from httpx import HTTPStatusError -from aligned.data_source.batch_data_source import BatchDataSource, ColumnFeatureMappable +from aligned.lazy_imports import pandas as pd +from aligned.data_source.batch_data_source import CodableBatchDataSource, ColumnFeatureMappable from aligned.exceptions import UnableToFindFileException from aligned.local.job import FileDateJob, FileFullJob from aligned.retrival_job import RetrivalRequest, RetrivalJob @@ -25,11 +26,15 @@ from aligned.storage import Storage try: - from aioaws.s3 import S3Config + from aioaws.s3 import S3Config # type: ignore except ModuleNotFoundError: + @dataclass class S3Config: # type: ignore[no-redef] - pass + aws_access_key: str + aws_secret_key: str + aws_region: str + aws_s3_bucket: str @dataclass @@ -128,7 +133,7 @@ def partitioned_parquet_at( def delta_at( self, path: str, mapping_keys: dict[str, str] | None = None, config: DeltaFileConfig | None = None - ) -> BatchDataSource: + ) -> CodableBatchDataSource: raise NotImplementedError(type(self)) def sub_directory(self, path: str) -> 'AwsS3Directory': @@ -165,7 +170,7 @@ async def write(self, content: bytes) -> None: @dataclass -class AwsS3CsvDataSource(BatchDataSource, DataFileReference, ColumnFeatureMappable): +class AwsS3CsvDataSource(CodableBatchDataSource, DataFileReference, ColumnFeatureMappable): config: AwsS3Config path: str @@ -225,7 +230,7 @@ def all_between_dates( @dataclass -class AwsS3ParquetDataSource(BatchDataSource, DataFileReference, ColumnFeatureMappable): +class AwsS3ParquetDataSource(CodableBatchDataSource, DataFileReference, ColumnFeatureMappable): config: AwsS3Config path: str diff --git a/aligned/sources/tests/test_lancedb.py b/aligned/sources/tests/test_lancedb.py index a6807822..7cd03e52 100644 --- a/aligned/sources/tests/test_lancedb.py +++ b/aligned/sources/tests/test_lancedb.py @@ -36,7 +36,6 @@ class MyEmbedding: embedding = Embedding(embedding_size=2) schema = MyEmbedding.compile().predictions_view.request('').pyarrow_schema() - conn = await config.connect() await conn.create_table(table, schema=schema, mode='overwrite') diff --git a/aligned/sources/tests/test_parquet.py b/aligned/sources/tests/test_parquet.py index 52890d62..c52ed1eb 100644 --- a/aligned/sources/tests/test_parquet.py +++ b/aligned/sources/tests/test_parquet.py @@ -4,7 +4,9 @@ from aligned import ContractStore, FileSource, feature_view, Int32 from aligned.feature_view.feature_view import FeatureView +from aligned.retrival_job import RetrivalJob from aligned.schemas.date_formatter import DateFormatter +from aligned.schemas.feature import Feature, FeatureLocation, FeatureType from conftest import DataTest @@ -80,6 +82,42 @@ async def test_partition_parquet(point_in_time_data_test: DataTest) -> None: assert ordered_columns.equals(expected), f'Expected: {expected}\nGot: {ordered_columns}' +@pytest.mark.asyncio +async def test_partition_parquet_upsert(): + from polars.testing import assert_frame_equal + from aligned import FileSource + from aligned.request.retrival_request import RetrivalRequest + + dir = FileSource.directory('test_data/temp') + source = dir.partitioned_parquet_at('partition_upsert', partition_keys=['a', 'b']) + + request = RetrivalRequest( + name='test', + location=FeatureLocation.feature_view('test'), + entities={Feature('e', FeatureType.int8())}, + features={ + Feature('a', FeatureType.int8()), + Feature('b', FeatureType.int8()), + Feature('x', FeatureType.int8()), + }, + derived_features=set(), + ) + initial_data = pl.DataFrame( + {'a': [1, 1, 1, 2, 2, 2], 'b': [1, 2, 3, 1, 2, 3], 'e': [1, 2, 3, 4, 5, 6], 'x': [1, 2, 3, 4, 5, 6]} + ) + new_data = pl.DataFrame({'a': [1, 1, 1, 1], 'b': [2, 2, 3, 1], 'e': [1, 2, 3, 7], 'x': [7, 8, 9, 10]}) + expected = pl.concat([new_data, initial_data]).unique(['e'], keep='first') + + await source.delete() + await source.overwrite(RetrivalJob.from_polars_df(initial_data, [request]), request) + data = await source.to_polars() + assert_frame_equal(data, initial_data, check_column_order=False) + + await source.upsert(RetrivalJob.from_polars_df(new_data, [request]), request) + new = await source.to_polars() + assert_frame_equal(new.sort('e'), expected.sort('e'), check_column_order=False) + + @pytest.mark.asyncio async def test_parquet(point_in_time_data_test: DataTest) -> None: diff --git a/aligned/split_strategy.py b/aligned/split_strategy.py index 9c414730..d70310b3 100644 --- a/aligned/split_strategy.py +++ b/aligned/split_strategy.py @@ -1,77 +1,12 @@ -import math +from __future__ import annotations from typing import Generic, TypeVar import polars as pl -from pandas import DataFrame, Index, concat +from aligned.lazy_imports import pandas as pd DatasetType = TypeVar('DatasetType') -class TrainTestSet(Generic[DatasetType]): - - data: DatasetType - - entity_columns: set[str] - features: set[str] - target_columns: set[str] - - train_index: Index - test_index: Index - event_timestamp_column: str | None - - def __init__( - self, - data: DatasetType, - entity_columns: set[str], - features: set[str], - target_columns: set[str], - train_index: Index, - test_index: Index, - event_timestamp_column: str | None, - ): - if isinstance(data, pl.LazyFrame): - raise ValueError('The dataframe need to be a DataFrame, not a LazyFrame when using Polars.') - self.data = data - self.entity_columns = entity_columns - self.features = features - self.target_columns = target_columns - self.train_index = train_index - self.test_index = test_index - self.event_timestamp_column = event_timestamp_column - - @property - def sorted_features(self) -> list[str]: - return sorted(self.features) - - @property - def train(self) -> DatasetType: - if isinstance(self.data, pl.DataFrame): - return self.data[self.train_index.to_list(), :] - return self.data.iloc[self.train_index] # type: ignore - - @property - def train_input(self) -> DatasetType: - return self.train[self.sorted_features] # type: ignore - - @property - def train_target(self) -> DatasetType: - return self.train[list(self.target_columns)] # type: ignore - - @property - def test(self) -> DatasetType: - if isinstance(self.data, pl.DataFrame): - return self.data[self.test_index.to_list(), :] - return self.data.iloc[self.test_index] # type: ignore - - @property - def test_input(self) -> DatasetType: - return self.test[self.sorted_features] # type: ignore - - @property - def test_target(self) -> DatasetType: - return self.test[list(self.target_columns)] # type: ignore - - class SupervisedDataSet(Generic[DatasetType]): data: DatasetType @@ -125,9 +60,9 @@ class TrainTestValidateSet(Generic[DatasetType]): feature_columns: set[str] target_columns: set[str] - train_index: Index - test_index: Index - validate_index: Index + train_index: 'pd.Index' + test_index: 'pd.Index' + validate_index: 'pd.Index' event_timestamp_column: str | None def __init__( @@ -136,9 +71,9 @@ def __init__( entity_columns: set[str], features: set[str], target: set[str], - train_index: Index, - test_index: Index, - validate_index: Index, + train_index: 'pd.Index', + test_index: 'pd.Index', + validate_index: 'pd.Index', event_timestamp_column: str | None, ): self.data = data @@ -262,60 +197,3 @@ def __init__( self.develop_target = develop_target self.test_input = test_input self.test_target = test_target - - -class SplitStrategy: - def split_pandas(self, data: DataFrame, target_column: str) -> SplitDataSet[DataFrame]: - pass - - -class StrategicSplitStrategy(SplitStrategy): - - train_size_percentage: float - test_size_percentage: float - - def __init__(self, train_size_percentage: float, test_size_percentage: float): - assert train_size_percentage + test_size_percentage <= 1 - self.train_size_percentage = train_size_percentage - self.test_size_percentage = test_size_percentage - - def split_pandas(self, data: DataFrame, target_column: str) -> SplitDataSet[DataFrame]: - train = DataFrame(columns=data.columns) - test = DataFrame(columns=data.columns) - develop = DataFrame(columns=data.columns) - - target_data = data[target_column] - - def split(data: DataFrame, start_ratio: float, end_ratio: float) -> DataFrame: - group_size = data.shape[0] - start_index = math.floor(group_size * start_ratio) - end_index = math.floor(group_size * end_ratio) - return data.iloc[start_index:end_index] - - for target in target_data.unique(): - sub_group = data.loc[target_data == target] - - train = concat([train, split(sub_group, 0, self.train_size_percentage)], axis=0) - test = concat( - [ - test, - split( - sub_group, - self.train_size_percentage, - self.train_size_percentage + self.test_size_percentage, - ), - ], - axis=0, - ) - develop = concat( - [develop, split(sub_group, self.train_size_percentage + self.test_size_percentage, 1)], axis=0 - ) - - return SplitDataSet( # type: ignore - train_input=train.drop(columns=[target_column]), - train_target=train[target_column], - develop_input=develop.drop(columns=[target_column]), - develop_target=develop[target_column], - test_input=test.drop(columns=[target_column]), - test_target=test[target_column], - ) diff --git a/aligned/streams/interface.py b/aligned/streams/interface.py index 3462d9d6..3b1cd2c8 100644 --- a/aligned/streams/interface.py +++ b/aligned/streams/interface.py @@ -1,5 +1,5 @@ class ReadableStream: - async def read(self, max_records: int = None, max_wait: float = None) -> list[dict]: + async def read(self, max_records: int | None = None, max_wait: float | None = None) -> list[dict]: """Reads a stream of data ```python diff --git a/aligned/streams/kafka.py b/aligned/streams/kafka.py index 8e48750c..02f02834 100644 --- a/aligned/streams/kafka.py +++ b/aligned/streams/kafka.py @@ -15,7 +15,7 @@ class KafkaReadableStream(ReadableStream): client: KafkaConsumer - async def read(self, max_records: int = None, max_wait: float = None) -> list[dict]: + async def read(self, max_records: int | None = None, max_wait: float | None = None) -> list[dict]: values: list[dict] = [] raw_values = self.client.poll(timeout_ms=1000) diff --git a/aligned/streams/redis.py b/aligned/streams/redis.py index 8788d5a6..fbd5bee9 100644 --- a/aligned/streams/redis.py +++ b/aligned/streams/redis.py @@ -8,8 +8,8 @@ except ModuleNotFoundError: class Redis: # type: ignore - async def xread(self, streams: dict[str, str], count: int, block: int) -> list: - pass + async def xread(self, streams: dict[str, str], count: int | None, block: int) -> list: + raise NotImplementedError() async def xadd(self, stream: str, record: dict) -> None: pass @@ -24,10 +24,10 @@ class RedisStream(ReadableStream, SinakableStream): mappings: dict[str, str] = field(default_factory=dict) record_coder: RecordCoder = field(default_factory=lambda: PassthroughRecordCoder()) - async def read(self, max_records: int = None, max_wait: float = None) -> list[dict]: + async def read(self, max_records: int | None = None, max_wait: float | None = None) -> list[dict]: stream_values = await self.client.xread( - streams={self.stream_name: self.read_timestamp}, count=max_records, block=max_wait or 1000 + streams={self.stream_name: self.read_timestamp}, count=max_records, block=int(max_wait or 1000) ) if not stream_values: diff --git a/aligned/tests/test_cache_enricher.py b/aligned/tests/test_cache_enricher.py deleted file mode 100644 index 0099c870..00000000 --- a/aligned/tests/test_cache_enricher.py +++ /dev/null @@ -1,22 +0,0 @@ -from datetime import timedelta - -import pytest - -from aligned import FileSource - - -@pytest.mark.asyncio -async def test_cache_enricher(mocker) -> None: # type: ignore - cache_time = timedelta(hours=1) - source = FileSource.csv_at(path='test_data/data-with-datetime.csv', mapping_keys={}).enricher() - enricher = source.cache(ttl=cache_time, cache_key='cache/cached_data') - - file = await enricher.as_df() - expected = await source.as_df() - assert file.equals(expected) - - pandas_mock = mocker.patch('pandas.read_parquet', return_value=file.iloc[0:3]) - new_file = await enricher.as_df() - - assert file.iloc[0:3].equals(new_file) - pandas_mock.assert_called_once() diff --git a/aligned/tests/test_date_timezone_converter.py b/aligned/tests/test_date_timezone_converter.py index 87b26345..d910bd83 100644 --- a/aligned/tests/test_date_timezone_converter.py +++ b/aligned/tests/test_date_timezone_converter.py @@ -43,11 +43,13 @@ class TestFeatureNoneTimestamp: created_at = Timestamp(time_zone=None) data_utc = await TestFeatureUtc.query().all().to_polars() + assert data_utc['created_at'].dtype.time_zone == 'UTC' # type: ignore + data_none = await TestFeatureNone.query().all().to_polars() + assert data_none['created_at'].dtype.time_zone is None # type: ignore + data_utc_timestamp = await TestFeatureUtcTimestamp.query().all().to_polars() - data_none_timestamp = await TestFeatureNoneTimestamp.query().all().to_polars() + assert data_utc_timestamp['created_at'].dtype.time_zone == 'UTC' # type: ignore - assert data_utc['created_at'].dtype.time_zone == 'UTC' - assert data_none['created_at'].dtype.time_zone is None - assert data_utc_timestamp['created_at'].dtype.time_zone == 'UTC' - assert data_none_timestamp['created_at'].dtype.time_zone is None + data_none_timestamp = await TestFeatureNoneTimestamp.query().all().to_polars() + assert data_none_timestamp['created_at'].dtype.time_zone is None # type: ignore diff --git a/aligned/tests/test_feature_store.py b/aligned/tests/test_feature_store.py new file mode 100644 index 00000000..2a000202 --- /dev/null +++ b/aligned/tests/test_feature_store.py @@ -0,0 +1,17 @@ +from aligned import ContractStore +from aligned.sources.local import ParquetFileSource +from aligned.schemas.feature import FeatureLocation + + +def test_update_source(titanic_feature_store: ContractStore) -> None: + + sources = [] + + def update_parquet_source(source: ParquetFileSource, loc: FeatureLocation) -> None: + source.path = 'updated.parquet' + sources.append(loc) + + titanic_feature_store.sources_of_type(ParquetFileSource, update_parquet_source) + + assert sources + assert sources == [FeatureLocation.feature_view('titanic_parquet')] diff --git a/aligned/tests/test_feature_view_wrapper.py b/aligned/tests/test_feature_view_wrapper.py index 09d135f2..b4eb26f3 100644 --- a/aligned/tests/test_feature_view_wrapper.py +++ b/aligned/tests/test_feature_view_wrapper.py @@ -1,9 +1,22 @@ -# type: ignore import pytest -from aligned import feature_view, String, Int32, FileSource +from aligned import feature_view, String, Int32, FileSource, Float +from aligned.compiler.feature_factory import EventTimestamp from aligned.schemas.feature import FeatureLocation +@feature_view(name='test', source=FileSource.csv_at('test_data/test.csv')) +class DefaultValueTest: + + some_id = Int32().as_entity() + + feature = Int32() + + other_value = String().default_value('Hello') + optional_value = Int32().is_optional() + + other_default = Float().default_value(0) + + @feature_view(name='test', source=FileSource.csv_at('some_file.csv')) class Test: @@ -22,9 +35,25 @@ class TestDerived: contains_hello = feature.contains('Hello') +@feature_view(name='test', source=FileSource.csv_at('some_file.csv')) +class TestEventTimestamp: + some_id = Int32().as_entity() + feature = String() + loaded_at = EventTimestamp() + contains_hello = feature.contains('Hello') + + +def test_event_timestamp() -> None: + view = TestEventTimestamp.compile() + assert len(view.entities) == 1 + assert view.event_timestamp is not None + assert len(view.features) == 1 + assert len(view.derived_features) == 1 + + def test_feature_view_wrapper_feature_references() -> None: - NewTest = Test.filter('new_test', where=lambda view: view.feature == 'test') # type: ignore + NewTest = Test.filter('new_test', lambda view: view.feature == 'test') # type: ignore new_test = NewTest() test = Test() @@ -57,3 +86,12 @@ async def test_feature_view_wrapper_from_data() -> None: # Returns two as the int can be casted to a str, but a str can not be casted to int assert len(test_invalid_result['some_id']) == 2 + + +@pytest.mark.asyncio +async def test_fill_missing_features() -> None: + df = await DefaultValueTest.query().all().to_polars() + + fill_columns = ['other_default', 'other_value', 'optional_value'] + for col in fill_columns: + assert col in df.columns diff --git a/aligned/tests/test_source_validation.py b/aligned/tests/test_source_validation.py index e25abb9c..187030c3 100644 --- a/aligned/tests/test_source_validation.py +++ b/aligned/tests/test_source_validation.py @@ -1,21 +1,7 @@ import pytest -from aligned import ContractStore, FileSource -from aligned.schemas.feature import FeatureType, FeatureLocation -from aligned.source_validation import validate_sources_in - - -@pytest.mark.asyncio -async def test_source_validation(titanic_feature_store: ContractStore) -> None: - - source = FileSource.parquet_at('test_data/titanic.parquet') - - views = titanic_feature_store.views_with_config(source) - - assert len(views) == 1 - validation = await validate_sources_in(views) - - assert {FeatureLocation.feature_view('titanic_parquet'): True} == validation +from aligned import FileSource +from aligned.schemas.feature import FeatureType @pytest.mark.asyncio diff --git a/aligned/tests/test_statistic_enricher.py b/aligned/tests/test_statistic_enricher.py deleted file mode 100644 index bc114062..00000000 --- a/aligned/tests/test_statistic_enricher.py +++ /dev/null @@ -1,46 +0,0 @@ -from datetime import timedelta - -import pandas as pd -import pytest -from freezegun import freeze_time - -from aligned.enricher import TimespanSelector -from aligned.sources.local import CsvFileSource - - -@pytest.mark.asyncio -async def test_statistic_enricher(scan_with_datetime: CsvFileSource) -> None: - columns = {'fractal_dimension_worst', 'symmetry_worst'} - file = await scan_with_datetime.mean(columns=columns).as_df() - limit_file = await scan_with_datetime.mean(columns=columns, limit=3).as_df() - - assert set(file.index) == columns - expected_values = {'symmetry_worst': 0.3654, 'fractal_dimension_worst': 0.0985} - expected_series = pd.Series([expected_values[col] for col in limit_file.index], index=limit_file.index) - pd.testing.assert_series_equal(limit_file, expected_series, atol=0.001) - - -@pytest.mark.asyncio -async def test_statistic_enricher_with_limit(scan_with_datetime: CsvFileSource) -> None: - columns = {'fractal_dimension_worst', 'symmetry_worst'} - limit_file = await scan_with_datetime.mean(columns=columns, limit=3).as_df() - - assert set(limit_file.index) == columns - expected_values = {'symmetry_worst': 0.3654, 'fractal_dimension_worst': 0.0985} - expected_series = pd.Series([expected_values[col] for col in limit_file.index], index=limit_file.index) - pd.testing.assert_series_equal(limit_file, expected_series, atol=0.001) - - -@freeze_time('2020-01-11') -@pytest.mark.asyncio -async def test_statistic_enricher_with_timespand(scan_with_datetime: CsvFileSource) -> None: - - columns = {'fractal_dimension_worst', 'symmetry_worst'} - limit_file = await scan_with_datetime.mean( - columns=columns, time=TimespanSelector(timespand=timedelta(days=3), time_column='created_at') - ).as_df() - - assert set(limit_file.index) == columns - expected_values = {'symmetry_worst': 0.398, 'fractal_dimension_worst': 0.14326} - expected_series = pd.Series([expected_values[col] for col in limit_file.index], index=limit_file.index) - pd.testing.assert_series_equal(limit_file, expected_series, atol=0.001) diff --git a/aligned/tests/test_train_test_validate_set.py b/aligned/tests/test_train_test_validate_set.py index f7d5c8c6..3b556a5f 100644 --- a/aligned/tests/test_train_test_validate_set.py +++ b/aligned/tests/test_train_test_validate_set.py @@ -1,26 +1,8 @@ import pytest from aligned.feature_store import ContractStore -from aligned.retrival_job import split from aligned.schemas.folder import DatasetMetadata -from aligned.sources.local import CsvFileSource, FileSource - - -@pytest.mark.asyncio -async def test_split(scan_with_datetime: CsvFileSource) -> None: - - data_set_size = 10 - end_ratio = 0.8 - result_size = data_set_size * end_ratio - - dataset = await scan_with_datetime.enricher().as_df() - subset = dataset[:data_set_size] - - split_set = split(subset, event_timestamp_column='created_at', start_ratio=0, end_ratio=end_ratio) - other_set = split(subset, event_timestamp_column='created_at', start_ratio=end_ratio, end_ratio=1) - - assert split_set.shape[0] == result_size - assert other_set.shape[0] == (data_set_size - result_size) +from aligned.sources.local import FileSource @pytest.mark.asyncio @@ -34,27 +16,29 @@ async def test_train_test_validate_set(titanic_feature_store: ContractStore) -> test_size = int(round(dataset_size * (1 - train_fraction - validation_fraction))) validate_size = int(round(dataset_size * validation_fraction)) - dataset = ( - await titanic_feature_store.feature_view('titanic') + datasets = ( + titanic_feature_store.feature_view('titanic') .all(limit=dataset_size) - .train_set(train_fraction, target_column='survived') - .validation_set(validation_fraction) - .to_pandas() + .train_test_validate(train_fraction, validation_fraction, target_column='survived') ) + train = await datasets.train.to_pandas() + test = await datasets.test.to_pandas() + validate = await datasets.validate.to_pandas() - assert dataset.train.data.shape[0] == train_size - assert dataset.test.data.shape[0] == test_size - assert dataset.validate.data.shape[0] == validate_size + assert train.data.shape[0] == train_size + assert test.data.shape[0] == test_size + assert validate.data.shape[0] == validate_size - assert 'passenger_id' in dataset.data.columns - assert 'survived' in dataset.data.columns + assert 'passenger_id' in train.data.columns + assert 'survived' in train.data.columns - assert 'passenger_id' not in dataset.train_input.columns - assert 'survived' not in dataset.train_input.columns + assert 'passenger_id' not in train.input.columns + assert 'survived' not in train.input.columns @pytest.mark.asyncio async def test_train_test_validate_set_new(titanic_feature_store: ContractStore) -> None: + from pathlib import Path from aligned.schemas.folder import JsonDatasetStore dataset_size = 100 @@ -66,6 +50,17 @@ async def test_train_test_validate_set_new(titanic_feature_store: ContractStore) validate_size = int(round(dataset_size * validation_fraction)) dataset_store = FileSource.json_at('test_data/temp/titanic-sets.json') + train_source = FileSource.csv_at('test_data/temp/titanic-train.csv') + test_source = FileSource.csv_at('test_data/temp/titanic-test.csv') + validate_source = FileSource.csv_at('test_data/temp/titanic-validate.csv') + + delete_files = [dataset_store.path, train_source.path, test_source.path, validate_source.path] + + for file in delete_files: + path = Path(file) + if path.exists(): + path.unlink() + dataset = await ( titanic_feature_store.feature_view('titanic') .all(limit=dataset_size) @@ -75,9 +70,9 @@ async def test_train_test_validate_set_new(titanic_feature_store: ContractStore) metadata=DatasetMetadata( id='titanic_test', ), - train_source=FileSource.csv_at('test_data/temp/titanic-train.csv'), - test_source=FileSource.csv_at('test_data/temp/titanic-test.csv'), - validate_source=FileSource.csv_at('test_data/temp/titanic-validate.csv'), + train_source=train_source, + test_source=test_source, + validate_source=validate_source, ) ) diff --git a/aligned/tests/test_transformations.py b/aligned/tests/test_transformations.py index 0bdf233e..ece0f1fc 100644 --- a/aligned/tests/test_transformations.py +++ b/aligned/tests/test_transformations.py @@ -46,11 +46,17 @@ class TestAgg: student_loan_due = Int32() credit_card_due_sum = credit_card_due.aggregate().over(days=1).sum() - student_loan_due_mean = student_loan_due.aggregate().over(days=1).mean() + student_loan_due_mean = student_loan_due.aggregate().over(days=1).mean().with_tag('mean') df = await TestAgg.query().all().to_pandas() # type: ignore assert df.shape[0] == 6 + all_features = TestAgg.compile().request_all.request_result.features + assert len(all_features) == 4 + + with_tag = [feature for feature in all_features if feature.tags] + assert len(with_tag) == 1 + @pytest.mark.asyncio async def test_aggregations_on_all_no_window() -> None: diff --git a/aligned/validation/interface.py b/aligned/validation/interface.py index e0d725b7..2b914bbb 100644 --- a/aligned/validation/interface.py +++ b/aligned/validation/interface.py @@ -1,6 +1,7 @@ -import pandas as pd +from __future__ import annotations import polars as pl +from aligned.lazy_imports import pandas as pd from aligned.schemas.feature import Feature diff --git a/aligned/validation/pandera.py b/aligned/validation/pandera.py index 87d54c1e..9ee5718f 100644 --- a/aligned/validation/pandera.py +++ b/aligned/validation/pandera.py @@ -1,10 +1,11 @@ +from __future__ import annotations + import logging from typing import Callable -import pandas as pd import polars as pl -from pandera import Check, Column, DataFrameSchema # type: ignore[attr-defined] +from aligned.lazy_imports import pandas as pd, pandera as pa from aligned.schemas.constraints import Constraint, Optional from aligned.schemas.feature import Feature, FeatureType from aligned.validation.interface import Validator @@ -14,21 +15,20 @@ class PanderaValidator(Validator): - check_map: dict[str, Callable[[Constraint], Check]] = { - 'lower_bound': lambda constraint: Check.greater_than(constraint.value), - 'lower_bound_inc': lambda constraint: Check.greater_than_or_equal_to(constraint.value), - 'upper_bound': lambda constraint: Check.less_than(constraint.value), - 'upper_bound_inc': lambda constraint: Check.less_than_or_equal_to(constraint.value), - 'in_domain': lambda domain: Check.isin(domain.values), - 'min_length': lambda constraint: Check.str_length(min_value=constraint.value), - 'max_length': lambda constraint: Check.str_length(max_value=constraint.value), - 'regex': lambda constraint: Check.str_matches(constraint.value), - 'ends_with': lambda constraint: Check.str_endswith(constraint.value), - 'starts_with': lambda constraint: Check.str_startswith(constraint.value), + check_map: dict[str, Callable[[Constraint], pa.Check]] = { + 'lower_bound': lambda constraint: pa.Check.greater_than(constraint.value), # type: ignore + 'lower_bound_inc': lambda constraint: pa.Check.greater_than_or_equal_to(constraint.value), # type: ignore + 'upper_bound': lambda constraint: pa.Check.less_than(constraint.value), # type: ignore + 'upper_bound_inc': lambda constraint: pa.Check.less_than_or_equal_to(constraint.value), # type: ignore + 'in_domain': lambda domain: pa.Check.isin(domain.values), # type: ignore + 'min_length': lambda constraint: pa.Check.str_length(min_value=constraint.value), # type: ignore + 'max_length': lambda constraint: pa.Check.str_length(max_value=constraint.value), # type: ignore + 'regex': lambda constraint: pa.Check.str_matches(constraint.value), # type: ignore + 'ends_with': lambda constraint: pa.Check.str_endswith(constraint.value), # type: ignore + 'starts_with': lambda constraint: pa.Check.str_startswith(constraint.value), # type: ignore } datatype_check = { - # FeatureType.bool(), FeatureType.string(), FeatureType.uuid(), FeatureType.date(), @@ -36,10 +36,10 @@ class PanderaValidator(Validator): FeatureType.int64(), } - def _column_for(self, feature: Feature) -> Column: + def _column_for(self, feature: Feature) -> pa.Column: if feature.constraints is None: - return Column( + return pa.Column( feature.dtype.pandas_type if feature.dtype in self.datatype_check else None, nullable=False, coerce=True, @@ -53,15 +53,15 @@ def _column_for(self, feature: Feature) -> Column: if constraint.name in self.check_map ] - return Column( + return pa.Column( dtype=feature.dtype.pandas_type if feature.dtype in self.datatype_check else None, checks=checks, nullable=is_nullable, required=not is_nullable, ) - def _build_schema(self, features: list[Feature]) -> DataFrameSchema: - return DataFrameSchema( + def _build_schema(self, features: list[Feature]) -> pa.DataFrameSchema: + return pa.DataFrameSchema( columns={feature.name: self._column_for(feature) for feature in features}, drop_invalid_rows=True ) diff --git a/aligned/validation/tests/test_pandera_validator.py b/aligned/validation/tests/test_pandera_validator.py index 0b10eebe..663baa61 100644 --- a/aligned/validation/tests/test_pandera_validator.py +++ b/aligned/validation/tests/test_pandera_validator.py @@ -4,6 +4,23 @@ from aligned.validation.pandera import PanderaValidator +@pytest.mark.asyncio +async def test_validate_valid_feature_view_polars(titanic_feature_store: ContractStore) -> None: + original = await titanic_feature_store.feature_view('titanic').all(limit=5).to_pandas() + validated_df = await titanic_feature_store.feature_view('titanic').all(limit=5).drop_invalid().to_polars() + + assert original.shape == validated_df.shape + + +@pytest.mark.asyncio +async def test_validate_invalid_feature_view_polars(titanic_feature_store: ContractStore) -> None: + validated_df = ( + await titanic_feature_store.feature_view('titanic').all(limit=20).drop_invalid().to_polars() + ) + + assert validated_df.height == 16 + + @pytest.mark.asyncio async def test_validate_valid_feature_view(titanic_feature_store: ContractStore) -> None: original = await titanic_feature_store.feature_view('titanic').all(limit=5).to_pandas() diff --git a/conftest.py b/conftest.py index 6c294a53..00a2091a 100644 --- a/conftest.py +++ b/conftest.py @@ -21,8 +21,8 @@ from aligned.feature_view.feature_view import FeatureView, FeatureViewMetadata from aligned.compiler.model import model_contract, ModelContractWrapper from aligned.feature_store import ContractStore -from aligned.feature_view.combined_view import CombinedFeatureView, CombinedFeatureViewMetadata from aligned.retrival_job import DerivedFeatureJob, RetrivalJob, RetrivalRequest +from aligned.schemas.date_formatter import DateFormatter from aligned.schemas.derivied_feature import DerivedFeature from aligned.schemas.feature import Feature, FeatureLocation, FeatureReference, FeatureType from aligned.schemas.record_coders import JsonRecordCoder @@ -46,10 +46,9 @@ def retrival_request_without_derived() -> RetrivalRequest: @pytest.fixture def retrival_job(retrival_request_without_derived: RetrivalRequest) -> RetrivalJob: - import pandas as pd return FileFullJob( - LiteralReference(pd.DataFrame({'id': [1, 2, 3, 4, 5], 'a': [3, 4, 2, 3, 4], 'b': [1, 1, 1, 2, 4]})), + LiteralReference(pl.DataFrame({'id': [1, 2, 3, 4, 5], 'a': [3, 4, 2, 3, 4], 'b': [1, 1, 1, 2, 4]})), request=retrival_request_without_derived, ) @@ -95,14 +94,12 @@ def retrival_request_with_derived() -> RetrivalRequest: def retrival_job_with_timestamp(retrival_request_with_derived: RetrivalRequest) -> RetrivalJob: from datetime import datetime, timedelta - import pandas as pd - date = datetime(year=2022, month=1, day=1) one_day = timedelta(days=1) return DerivedFeatureJob( job=FileFullJob( LiteralReference( - pd.DataFrame( + pl.DataFrame( { 'id': [1, 2, 3, 4, 5], 'c': [3, 4, 2, 3, 4], @@ -220,7 +217,10 @@ def breast_scan_without_timestamp_feature_store( @pytest.fixture def scan_with_datetime() -> CsvFileSource: - return FileSource.csv_at(path='test_data/data-with-datetime.csv') + return FileSource.csv_at( + path='test_data/data-with-datetime.csv', + date_formatter=DateFormatter.string_format('%Y-%m-%d %H:%M:%S'), + ) @pytest.fixture @@ -233,7 +233,7 @@ class BreastDiagnoseFeatureView(FeatureView): source=scan_with_datetime, ) - scan_id = Entity(dtype=Int32()) + scan_id = Int32().as_entity() created_at = EventTimestamp() @@ -545,35 +545,14 @@ def alot_of_transforation_feature_store( return feature_store -@pytest.fixture -def combined_view( - titanic_feature_view: FeatureView, breast_scan_feature_viewout_with_datetime: FeatureView -) -> CombinedFeatureView: - class SomeCombinedView(CombinedFeatureView): - - metadata = CombinedFeatureViewMetadata( - name='combined', description='Some features that depend on multiple view' - ) - - titanic = titanic_feature_view - cancer_scan = breast_scan_feature_viewout_with_datetime - - some_feature = titanic.age + cancer_scan.radius_mean # type: ignore - other_feature = titanic.sibsp + cancer_scan.radius_mean # type: ignore - - return SomeCombinedView() - - @pytest.fixture def combined_feature_store( titanic_feature_view: FeatureView, breast_scan_feature_viewout_with_datetime: FeatureView, - combined_view: CombinedFeatureView, ) -> ContractStore: feature_store = ContractStore.empty() feature_store.add_feature_view(titanic_feature_view) feature_store.add_feature_view(breast_scan_feature_viewout_with_datetime) - feature_store.add_combined_feature_view(combined_view) return feature_store diff --git a/docker-compose.yaml b/docker-compose.yaml index a37aed91..086930c7 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -25,4 +25,4 @@ services: - PSQL_DATABASE_TEST=postgresql://postgres:postgres@psql_app_db:5432/aligned-test volumes: - ./pyproject.toml:/opt/app/pyproject.toml - - ./aligned:/opt/app/aligned + - ./:/opt/app/ diff --git a/poetry.lock b/poetry.lock index 57bf4efe..64f76b11 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "adlfs" -version = "2024.4.1" +version = "2024.7.0" description = "Access Azure Datalake Gen1 with fsspec and dask" optional = true python-versions = ">=3.8" files = [ - {file = "adlfs-2024.4.1-py3-none-any.whl", hash = "sha256:acea94612ddacaa34ea8c6babcc95b8da6982f930cdade7a86fbd17382403e16"}, - {file = "adlfs-2024.4.1.tar.gz", hash = "sha256:75530a45447f358ae53c5c39c298b8d966dae684be84db899f63b94cd96fc000"}, + {file = "adlfs-2024.7.0-py3-none-any.whl", hash = "sha256:2005c8e124fda3948f2a6abb2dbebb2c936d2d821acaca6afd61932edfa9bc07"}, + {file = "adlfs-2024.7.0.tar.gz", hash = "sha256:106995b91f0eb5e775bcd5957d180d9a14faef3271a063b1f65c66fd5ab05ddf"}, ] [package.dependencies] @@ -42,101 +42,128 @@ pydantic = ">=1.7.4" [[package]] name = "aiofiles" -version = "23.2.1" +version = "24.1.0" description = "File support for asyncio." optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "aiofiles-23.2.1-py3-none-any.whl", hash = "sha256:19297512c647d4b27a2cf7c34caa7e405c0d60b5560618a29a9fe027b18b0107"}, - {file = "aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a"}, + {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, + {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, ] [[package]] -name = "aiohttp" -version = "3.9.5" -description = "Async http client/server framework (asyncio)" +name = "aiohappyeyeballs" +version = "2.4.0" +description = "Happy Eyeballs for asyncio" optional = true python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, - {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, - {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, - {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, - {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, - {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, - {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, - {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, - {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, - {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, + {file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"}, + {file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"}, ] -[package.dependencies] +[[package]] +name = "aiohttp" +version = "3.10.5" +description = "Async http client/server framework (asyncio)" +optional = true +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6"}, + {file = "aiohttp-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f1f1c75c395991ce9c94d3e4aa96e5c59c8356a15b1c9231e783865e2772699"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7acae3cf1a2a2361ec4c8e787eaaa86a94171d2417aae53c0cca6ca3118ff6"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94c4381ffba9cc508b37d2e536b418d5ea9cfdc2848b9a7fea6aebad4ec6aac1"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c31ad0c0c507894e3eaa843415841995bf8de4d6b2d24c6e33099f4bc9fc0d4f"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0912b8a8fadeb32ff67a3ed44249448c20148397c1ed905d5dac185b4ca547bb"}, + {file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d93400c18596b7dc4794d48a63fb361b01a0d8eb39f28800dc900c8fbdaca91"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d00f3c5e0d764a5c9aa5a62d99728c56d455310bcc288a79cab10157b3af426f"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d742c36ed44f2798c8d3f4bc511f479b9ceef2b93f348671184139e7d708042c"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:814375093edae5f1cb31e3407997cf3eacefb9010f96df10d64829362ae2df69"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8224f98be68a84b19f48e0bdc14224b5a71339aff3a27df69989fa47d01296f3"}, + {file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9a487ef090aea982d748b1b0d74fe7c3950b109df967630a20584f9a99c0683"}, + {file = "aiohttp-3.10.5-cp310-cp310-win32.whl", hash = "sha256:d9ef084e3dc690ad50137cc05831c52b6ca428096e6deb3c43e95827f531d5ef"}, + {file = "aiohttp-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:66bf9234e08fe561dccd62083bf67400bdbf1c67ba9efdc3dac03650e97c6088"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c6a4e5e40156d72a40241a25cc226051c0a8d816610097a8e8f517aeacd59a2"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c634a3207a5445be65536d38c13791904fda0748b9eabf908d3fe86a52941cf"}, + {file = "aiohttp-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aff049b5e629ef9b3e9e617fa6e2dfeda1bf87e01bcfecaf3949af9e210105e"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1942244f00baaacaa8155eca94dbd9e8cc7017deb69b75ef67c78e89fdad3c77"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04a1f2a65ad2f93aa20f9ff9f1b672bf912413e5547f60749fa2ef8a644e061"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f2bfc0032a00405d4af2ba27f3c429e851d04fad1e5ceee4080a1c570476697"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424ae21498790e12eb759040bbb504e5e280cab64693d14775c54269fd1d2bb7"}, + {file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:975218eee0e6d24eb336d0328c768ebc5d617609affaca5dbbd6dd1984f16ed0"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4120d7fefa1e2d8fb6f650b11489710091788de554e2b6f8347c7a20ceb003f5"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b90078989ef3fc45cf9221d3859acd1108af7560c52397ff4ace8ad7052a132e"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ba5a8b74c2a8af7d862399cdedce1533642fa727def0b8c3e3e02fcb52dca1b1"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:02594361128f780eecc2a29939d9dfc870e17b45178a867bf61a11b2a4367277"}, + {file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8fb4fc029e135859f533025bc82047334e24b0d489e75513144f25408ecaf058"}, + {file = "aiohttp-3.10.5-cp311-cp311-win32.whl", hash = "sha256:e1ca1ef5ba129718a8fc827b0867f6aa4e893c56eb00003b7367f8a733a9b072"}, + {file = "aiohttp-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:349ef8a73a7c5665cca65c88ab24abe75447e28aa3bc4c93ea5093474dfdf0ff"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:305be5ff2081fa1d283a76113b8df7a14c10d75602a38d9f012935df20731487"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a1c32a19ee6bbde02f1cb189e13a71b321256cc1d431196a9f824050b160d5a"}, + {file = "aiohttp-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:61645818edd40cc6f455b851277a21bf420ce347baa0b86eaa41d51ef58ba23d"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c225286f2b13bab5987425558baa5cbdb2bc925b2998038fa028245ef421e75"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ba01ebc6175e1e6b7275c907a3a36be48a2d487549b656aa90c8a910d9f3178"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8eaf44ccbc4e35762683078b72bf293f476561d8b68ec8a64f98cf32811c323e"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c43eb1ab7cbf411b8e387dc169acb31f0ca0d8c09ba63f9eac67829585b44f"}, + {file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7a5299827253023c55ea549444e058c0eb496931fa05d693b95140a947cb73"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4790f0e15f00058f7599dab2b206d3049d7ac464dc2e5eae0e93fa18aee9e7bf"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44b324a6b8376a23e6ba25d368726ee3bc281e6ab306db80b5819999c737d820"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0d277cfb304118079e7044aad0b76685d30ecb86f83a0711fc5fb257ffe832ca"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:54d9ddea424cd19d3ff6128601a4a4d23d54a421f9b4c0fff740505813739a91"}, + {file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f1c9866ccf48a6df2b06823e6ae80573529f2af3a0992ec4fe75b1a510df8a6"}, + {file = "aiohttp-3.10.5-cp312-cp312-win32.whl", hash = "sha256:dc4826823121783dccc0871e3f405417ac116055bf184ac04c36f98b75aacd12"}, + {file = "aiohttp-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:22c0a23a3b3138a6bf76fc553789cb1a703836da86b0f306b6f0dc1617398abc"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7f6b639c36734eaa80a6c152a238242bedcee9b953f23bb887e9102976343092"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29930bc2921cef955ba39a3ff87d2c4398a0394ae217f41cb02d5c26c8b1b77"}, + {file = "aiohttp-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f489a2c9e6455d87eabf907ac0b7d230a9786be43fbe884ad184ddf9e9c1e385"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:123dd5b16b75b2962d0fff566effb7a065e33cd4538c1692fb31c3bda2bfb972"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b98e698dc34966e5976e10bbca6d26d6724e6bdea853c7c10162a3235aba6e16"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3b9162bab7e42f21243effc822652dc5bb5e8ff42a4eb62fe7782bcbcdfacf6"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1923a5c44061bffd5eebeef58cecf68096e35003907d8201a4d0d6f6e387ccaa"}, + {file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55f011da0a843c3d3df2c2cf4e537b8070a419f891c930245f05d329c4b0689"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afe16a84498441d05e9189a15900640a2d2b5e76cf4efe8cbb088ab4f112ee57"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8112fb501b1e0567a1251a2fd0747baae60a4ab325a871e975b7bb67e59221f"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e72589da4c90337837fdfe2026ae1952c0f4a6e793adbbfbdd40efed7c63599"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4d46c7b4173415d8e583045fbc4daa48b40e31b19ce595b8d92cf639396c15d5"}, + {file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33e6bc4bab477c772a541f76cd91e11ccb6d2efa2b8d7d7883591dfb523e5987"}, + {file = "aiohttp-3.10.5-cp313-cp313-win32.whl", hash = "sha256:c58c6837a2c2a7cf3133983e64173aec11f9c2cd8e87ec2fdc16ce727bcf1a04"}, + {file = "aiohttp-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:38172a70005252b6893088c0f5e8a47d173df7cc2b2bd88650957eb84fcf5022"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f6f18898ace4bcd2d41a122916475344a87f1dfdec626ecde9ee802a711bc569"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5ede29d91a40ba22ac1b922ef510aab871652f6c88ef60b9dcdf773c6d32ad7a"}, + {file = "aiohttp-3.10.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:673f988370f5954df96cc31fd99c7312a3af0a97f09e407399f61583f30da9bc"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58718e181c56a3c02d25b09d4115eb02aafe1a732ce5714ab70326d9776457c3"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b38b1570242fbab8d86a84128fb5b5234a2f70c2e32f3070143a6d94bc854cf"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:074d1bff0163e107e97bd48cad9f928fa5a3eb4b9d33366137ffce08a63e37fe"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd31f176429cecbc1ba499d4aba31aaccfea488f418d60376b911269d3b883c5"}, + {file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7384d0b87d4635ec38db9263e6a3f1eb609e2e06087f0aa7f63b76833737b471"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8989f46f3d7ef79585e98fa991e6ded55d2f48ae56d2c9fa5e491a6e4effb589"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c83f7a107abb89a227d6c454c613e7606c12a42b9a4ca9c5d7dad25d47c776ae"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cde98f323d6bf161041e7627a5fd763f9fd829bcfcd089804a5fdce7bb6e1b7d"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:676f94c5480d8eefd97c0c7e3953315e4d8c2b71f3b49539beb2aa676c58272f"}, + {file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2d21ac12dc943c68135ff858c3a989f2194a709e6e10b4c8977d7fcd67dfd511"}, + {file = "aiohttp-3.10.5-cp38-cp38-win32.whl", hash = "sha256:17e997105bd1a260850272bfb50e2a328e029c941c2708170d9d978d5a30ad9a"}, + {file = "aiohttp-3.10.5-cp38-cp38-win_amd64.whl", hash = "sha256:1c19de68896747a2aa6257ae4cf6ef59d73917a36a35ee9d0a6f48cff0f94db8"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7e2fe37ac654032db1f3499fe56e77190282534810e2a8e833141a021faaab0e"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5bf3ead3cb66ab990ee2561373b009db5bc0e857549b6c9ba84b20bc462e172"}, + {file = "aiohttp-3.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b2c16a919d936ca87a3c5f0e43af12a89a3ce7ccbce59a2d6784caba945b68b"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad146dae5977c4dd435eb31373b3fe9b0b1bf26858c6fc452bf6af394067e10b"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c5c6fa16412b35999320f5c9690c0f554392dc222c04e559217e0f9ae244b92"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95c4dc6f61d610bc0ee1edc6f29d993f10febfe5b76bb470b486d90bbece6b22"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da452c2c322e9ce0cfef392e469a26d63d42860f829026a63374fde6b5c5876f"}, + {file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:898715cf566ec2869d5cb4d5fb4be408964704c46c96b4be267442d265390f32"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:391cc3a9c1527e424c6865e087897e766a917f15dddb360174a70467572ac6ce"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:380f926b51b92d02a34119d072f178d80bbda334d1a7e10fa22d467a66e494db"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce91db90dbf37bb6fa0997f26574107e1b9d5ff939315247b7e615baa8ec313b"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9093a81e18c45227eebe4c16124ebf3e0d893830c6aca7cc310bfca8fe59d857"}, + {file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ee40b40aa753d844162dcc80d0fe256b87cba48ca0054f64e68000453caead11"}, + {file = "aiohttp-3.10.5-cp39-cp39-win32.whl", hash = "sha256:03f2645adbe17f274444953bdea69f8327e9d278d961d85657cb0d06864814c1"}, + {file = "aiohttp-3.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:d17920f18e6ee090bdd3d0bfffd769d9f2cb4c8ffde3eb203777a3895c128862"}, + {file = "aiohttp-3.10.5.tar.gz", hash = "sha256:f071854b47d39591ce9a17981c46790acb30518e2f83dfca8db2dfa091178691"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" @@ -145,7 +172,7 @@ multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] [[package]] name = "aiosignal" @@ -163,13 +190,13 @@ frozenlist = ">=1.1.0" [[package]] name = "alembic" -version = "1.13.1" +version = "1.13.2" description = "A database migration tool for SQLAlchemy." optional = true python-versions = ">=3.8" files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, + {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, + {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, ] [package.dependencies] @@ -331,22 +358,22 @@ test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3)"] [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = true python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "azure-core" @@ -385,13 +412,13 @@ requests = ">=2.20.0" [[package]] name = "azure-identity" -version = "1.16.1" +version = "1.17.1" description = "Microsoft Azure Identity Library for Python" optional = true python-versions = ">=3.8" files = [ - {file = "azure-identity-1.16.1.tar.gz", hash = "sha256:6d93f04468f240d59246d8afde3091494a5040d4f141cad0f49fc0c399d0d91e"}, - {file = "azure_identity-1.16.1-py3-none-any.whl", hash = "sha256:8fb07c25642cd4ac422559a8b50d3e77f73dcc2bbfaba419d06d6c9d7cff6726"}, + {file = "azure-identity-1.17.1.tar.gz", hash = "sha256:32ecc67cc73f4bd0595e4f64b1ca65cd05186f4fe6f98ed2ae9f1aa32646efea"}, + {file = "azure_identity-1.17.1-py3-none-any.whl", hash = "sha256:db8d59c183b680e763722bfe8ebc45930e6c57df510620985939f7f3191e0382"}, ] [package.dependencies] @@ -399,16 +426,17 @@ azure-core = ">=1.23.0" cryptography = ">=2.5" msal = ">=1.24.0" msal-extensions = ">=0.3.0" +typing-extensions = ">=4.0.0" [[package]] name = "azure-storage-blob" -version = "12.20.0" +version = "12.22.0" description = "Microsoft Azure Blob Storage Client Library for Python" optional = true python-versions = ">=3.8" files = [ - {file = "azure-storage-blob-12.20.0.tar.gz", hash = "sha256:eeb91256e41d4b5b9bad6a87fd0a8ade07dd58aa52344e2c8d2746e27a017d3b"}, - {file = "azure_storage_blob-12.20.0-py3-none-any.whl", hash = "sha256:de6b3bf3a90e9341a6bcb96a2ebe981dffff993e9045818f6549afea827a52a9"}, + {file = "azure-storage-blob-12.22.0.tar.gz", hash = "sha256:b3804bb4fe8ab1c32771fa464053da772a682c2737b19da438a3f4e5e3b3736e"}, + {file = "azure_storage_blob-12.22.0-py3-none-any.whl", hash = "sha256:bb7d2d824ce3f11f14a27ee7d9281289f7e072ac8311c52e3652672455b7d5e8"}, ] [package.dependencies] @@ -479,85 +507,100 @@ files = [ [[package]] name = "cachetools" -version = "5.3.3" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = true python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -725,106 +768,122 @@ files = [ [[package]] name = "contourpy" -version = "1.2.1" +version = "1.3.0" description = "Python library for calculating contours of 2D quadrilateral grids" optional = true python-versions = ">=3.9" files = [ - {file = "contourpy-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd7c23df857d488f418439686d3b10ae2fbf9bc256cd045b37a8c16575ea1040"}, - {file = "contourpy-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b9eb0ca724a241683c9685a484da9d35c872fd42756574a7cfbf58af26677fd"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c75507d0a55378240f781599c30e7776674dbaf883a46d1c90f37e563453480"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11959f0ce4a6f7b76ec578576a0b61a28bdc0696194b6347ba3f1c53827178b9"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb3315a8a236ee19b6df481fc5f997436e8ade24a9f03dfdc6bd490fea20c6da"}, - {file = "contourpy-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39f3ecaf76cd98e802f094e0d4fbc6dc9c45a8d0c4d185f0f6c2234e14e5f75b"}, - {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94b34f32646ca0414237168d68a9157cb3889f06b096612afdd296003fdd32fd"}, - {file = "contourpy-1.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:457499c79fa84593f22454bbd27670227874cd2ff5d6c84e60575c8b50a69619"}, - {file = "contourpy-1.2.1-cp310-cp310-win32.whl", hash = "sha256:ac58bdee53cbeba2ecad824fa8159493f0bf3b8ea4e93feb06c9a465d6c87da8"}, - {file = "contourpy-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:9cffe0f850e89d7c0012a1fb8730f75edd4320a0a731ed0c183904fe6ecfc3a9"}, - {file = "contourpy-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6022cecf8f44e36af10bd9118ca71f371078b4c168b6e0fab43d4a889985dbb5"}, - {file = "contourpy-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef5adb9a3b1d0c645ff694f9bca7702ec2c70f4d734f9922ea34de02294fdf72"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6150ffa5c767bc6332df27157d95442c379b7dce3a38dff89c0f39b63275696f"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c863140fafc615c14a4bf4efd0f4425c02230eb8ef02784c9a156461e62c965"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00e5388f71c1a0610e6fe56b5c44ab7ba14165cdd6d695429c5cd94021e390b2"}, - {file = "contourpy-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4492d82b3bc7fbb7e3610747b159869468079fe149ec5c4d771fa1f614a14df"}, - {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49e70d111fee47284d9dd867c9bb9a7058a3c617274900780c43e38d90fe1205"}, - {file = "contourpy-1.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b59c0ffceff8d4d3996a45f2bb6f4c207f94684a96bf3d9728dbb77428dd8cb8"}, - {file = "contourpy-1.2.1-cp311-cp311-win32.whl", hash = "sha256:7b4182299f251060996af5249c286bae9361fa8c6a9cda5efc29fe8bfd6062ec"}, - {file = "contourpy-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2855c8b0b55958265e8b5888d6a615ba02883b225f2227461aa9127c578a4922"}, - {file = "contourpy-1.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:62828cada4a2b850dbef89c81f5a33741898b305db244904de418cc957ff05dc"}, - {file = "contourpy-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:309be79c0a354afff9ff7da4aaed7c3257e77edf6c1b448a779329431ee79d7e"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e785e0f2ef0d567099b9ff92cbfb958d71c2d5b9259981cd9bee81bd194c9a4"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cac0a8f71a041aa587410424ad46dfa6a11f6149ceb219ce7dd48f6b02b87a7"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af3f4485884750dddd9c25cb7e3915d83c2db92488b38ccb77dd594eac84c4a0"}, - {file = "contourpy-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ce6889abac9a42afd07a562c2d6d4b2b7134f83f18571d859b25624a331c90b"}, - {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1eea9aecf761c661d096d39ed9026574de8adb2ae1c5bd7b33558af884fb2ce"}, - {file = "contourpy-1.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:187fa1d4c6acc06adb0fae5544c59898ad781409e61a926ac7e84b8f276dcef4"}, - {file = "contourpy-1.2.1-cp312-cp312-win32.whl", hash = "sha256:c2528d60e398c7c4c799d56f907664673a807635b857df18f7ae64d3e6ce2d9f"}, - {file = "contourpy-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:1a07fc092a4088ee952ddae19a2b2a85757b923217b7eed584fdf25f53a6e7ce"}, - {file = "contourpy-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb6834cbd983b19f06908b45bfc2dad6ac9479ae04abe923a275b5f48f1a186b"}, - {file = "contourpy-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1d59e739ab0e3520e62a26c60707cc3ab0365d2f8fecea74bfe4de72dc56388f"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd3db01f59fdcbce5b22afad19e390260d6d0222f35a1023d9adc5690a889364"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a12a813949e5066148712a0626895c26b2578874e4cc63160bb007e6df3436fe"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe0ccca550bb8e5abc22f530ec0466136379c01321fd94f30a22231e8a48d985"}, - {file = "contourpy-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1d59258c3c67c865435d8fbeb35f8c59b8bef3d6f46c1f29f6123556af28445"}, - {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f32c38afb74bd98ce26de7cc74a67b40afb7b05aae7b42924ea990d51e4dac02"}, - {file = "contourpy-1.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d31a63bc6e6d87f77d71e1abbd7387ab817a66733734883d1fc0021ed9bfa083"}, - {file = "contourpy-1.2.1-cp39-cp39-win32.whl", hash = "sha256:ddcb8581510311e13421b1f544403c16e901c4e8f09083c881fab2be80ee31ba"}, - {file = "contourpy-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:10a37ae557aabf2509c79715cd20b62e4c7c28b8cd62dd7d99e5ed3ce28c3fd9"}, - {file = "contourpy-1.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a31f94983fecbac95e58388210427d68cd30fe8a36927980fab9c20062645609"}, - {file = "contourpy-1.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef2b055471c0eb466033760a521efb9d8a32b99ab907fc8358481a1dd29e3bd3"}, - {file = "contourpy-1.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b33d2bc4f69caedcd0a275329eb2198f560b325605810895627be5d4b876bf7f"}, - {file = "contourpy-1.2.1.tar.gz", hash = "sha256:4d8908b3bee1c889e547867ca4cdc54e5ab6be6d3e078556814a22457f49423c"}, -] - -[package.dependencies] -numpy = ">=1.20" + {file = "contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7"}, + {file = "contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41"}, + {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d"}, + {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223"}, + {file = "contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f"}, + {file = "contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b"}, + {file = "contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad"}, + {file = "contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d"}, + {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c"}, + {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb"}, + {file = "contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c"}, + {file = "contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67"}, + {file = "contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f"}, + {file = "contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09"}, + {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd"}, + {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35"}, + {file = "contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb"}, + {file = "contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b"}, + {file = "contourpy-1.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3e1c7fa44aaae40a2247e2e8e0627f4bea3dd257014764aa644f319a5f8600e3"}, + {file = "contourpy-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:364174c2a76057feef647c802652f00953b575723062560498dc7930fc9b1cb7"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b238b3b3b649e09ce9aaf51f0c261d38644bdfa35cbaf7b263457850957a84"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d51fca85f9f7ad0b65b4b9fe800406d0d77017d7270d31ec3fb1cc07358fdea0"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:732896af21716b29ab3e988d4ce14bc5133733b85956316fb0c56355f398099b"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73f659398a0904e125280836ae6f88ba9b178b2fed6884f3b1f95b989d2c8da"}, + {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6c7c2408b7048082932cf4e641fa3b8ca848259212f51c8c59c45aa7ac18f14"}, + {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f317576606de89da6b7e0861cf6061f6146ead3528acabff9236458a6ba467f8"}, + {file = "contourpy-1.3.0-cp313-cp313-win32.whl", hash = "sha256:31cd3a85dbdf1fc002280c65caa7e2b5f65e4a973fcdf70dd2fdcb9868069294"}, + {file = "contourpy-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4553c421929ec95fb07b3aaca0fae668b2eb5a5203d1217ca7c34c063c53d087"}, + {file = "contourpy-1.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:345af746d7766821d05d72cb8f3845dfd08dd137101a2cb9b24de277d716def8"}, + {file = "contourpy-1.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3bb3808858a9dc68f6f03d319acd5f1b8a337e6cdda197f02f4b8ff67ad2057b"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:420d39daa61aab1221567b42eecb01112908b2cab7f1b4106a52caaec8d36973"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d63ee447261e963af02642ffcb864e5a2ee4cbfd78080657a9880b8b1868e18"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167d6c890815e1dac9536dca00828b445d5d0df4d6a8c6adb4a7ec3166812fa8"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:710a26b3dc80c0e4febf04555de66f5fd17e9cf7170a7b08000601a10570bda6"}, + {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:75ee7cb1a14c617f34a51d11fa7524173e56551646828353c4af859c56b766e2"}, + {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:33c92cdae89ec5135d036e7218e69b0bb2851206077251f04a6c4e0e21f03927"}, + {file = "contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8"}, + {file = "contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2"}, + {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e"}, + {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800"}, + {file = "contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5"}, + {file = "contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb"}, + {file = "contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4"}, +] + +[package.dependencies] +numpy = ">=1.23" [package.extras] bokeh = ["bokeh", "selenium"] docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] -mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.8.0)", "types-Pillow"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.11.1)", "types-Pillow"] test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] -test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] +test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] [[package]] name = "cryptography" -version = "42.0.8" +version = "43.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, - {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, - {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, - {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, - {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, - {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, - {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, ] [package.dependencies] @@ -837,7 +896,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -855,6 +914,25 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] +[[package]] +name = "databricks-sdk" +version = "0.32.1" +description = "Databricks SDK for Python (Beta)" +optional = true +python-versions = ">=3.7" +files = [ + {file = "databricks_sdk-0.32.1-py3-none-any.whl", hash = "sha256:b91efdd0c9d49db3ce47d1ee1cbe322cf24189d426da46d1f74e2bfd4e352361"}, + {file = "databricks_sdk-0.32.1.tar.gz", hash = "sha256:8af15b7f94b1ae609f91f4a6dac43f9ebdd786c1077050ffc5cd5ab5eda39d49"}, +] + +[package.dependencies] +google-auth = ">=2.0,<3.0" +requests = ">=2.28.1,<3" + +[package.extras] +dev = ["autoflake", "databricks-connect", "ipython", "ipywidgets", "isort", "pycodestyle", "pyfakefs", "pytest", "pytest-cov", "pytest-mock", "pytest-rerunfailures", "pytest-xdist", "requests-mock", "wheel", "yapf"] +notebook = ["ipython (>=8,<9)", "ipywidgets (>=8,<9)"] + [[package]] name = "decorator" version = "5.1.1" @@ -866,6 +944,30 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +[[package]] +name = "deltalake" +version = "0.18.2" +description = "Native Delta Lake Python binding based on delta-rs with Pandas integration" +optional = true +python-versions = ">=3.8" +files = [ + {file = "deltalake-0.18.2-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:da5ac401d4dd571d5e9438f773cfe7b726475afb83d01cdded7ef51f6eb92653"}, + {file = "deltalake-0.18.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c788520fe22225e860bea777d991893ae47a4778467c9ce65fa35bc511554989"}, + {file = "deltalake-0.18.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828026848c862d8d0edfb5b962da8398d38610ebee6c38d01e96ca45496db00d"}, + {file = "deltalake-0.18.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07f3de64f61bcdc894b341554c9385fa9fcb1a33ad1c61ab28c1f00d107e9bed"}, + {file = "deltalake-0.18.2-cp38-abi3-win_amd64.whl", hash = "sha256:ee0594f82e8dad10ad933de9dd14f379d23a25c4744edd3224cdba9a8b5951eb"}, + {file = "deltalake-0.18.2.tar.gz", hash = "sha256:c6f9ad68736413a6d7c1526d60905cdf4aaeb59b8c95cc782661250917719aef"}, +] + +[package.dependencies] +pyarrow = ">=8" +pyarrow-hotfix = "*" + +[package.extras] +devel = ["azure-storage-blob (==12.20.0)", "mypy (>=1.8.0,<1.9.0)", "packaging (>=20)", "pytest", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-timeout", "ruff (>=0.3.0,<0.4.0)", "sphinx (<=4.5)", "sphinx-rtd-theme", "toml", "wheel"] +pandas = ["pandas"] +pyspark = ["delta-spark", "numpy (==1.22.2)", "pyspark"] + [[package]] name = "deprecated" version = "1.2.14" @@ -934,26 +1036,15 @@ docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] ssh = ["paramiko (>=2.4.3)"] websockets = ["websocket-client (>=1.3.0)"] -[[package]] -name = "entrypoints" -version = "0.4" -description = "Discover and load entry points from installed packages." -optional = true -python-versions = ">=3.6" -files = [ - {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, - {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, -] - [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -961,13 +1052,13 @@ test = ["pytest (>=6)"] [[package]] name = "fakeredis" -version = "2.23.2" +version = "2.24.1" description = "Python implementation of redis API, can be used for testing purposes." optional = false python-versions = "<4.0,>=3.7" files = [ - {file = "fakeredis-2.23.2-py3-none-any.whl", hash = "sha256:3721946b955930c065231befd24a9cdc68b339746e93848ef01a010d98e4eb4f"}, - {file = "fakeredis-2.23.2.tar.gz", hash = "sha256:d649c409abe46c63690b6c35d3c460e4ce64c69a52cea3f02daff2649378f878"}, + {file = "fakeredis-2.24.1-py3-none-any.whl", hash = "sha256:09d3049a29910f80c0ef5789c31bef3dbb9727bd43a67ee8598217f4efd12f35"}, + {file = "fakeredis-2.24.1.tar.gz", hash = "sha256:4a52ab0edad53543ac5e3a41d761f91012613ed583344da54ae6473e05b0f6d0"}, ] [package.dependencies] @@ -1025,53 +1116,53 @@ dotenv = ["python-dotenv"] [[package]] name = "fonttools" -version = "4.53.0" +version = "4.53.1" description = "Tools to manipulate font files" optional = true python-versions = ">=3.8" files = [ - {file = "fonttools-4.53.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:52a6e0a7a0bf611c19bc8ec8f7592bdae79c8296c70eb05917fd831354699b20"}, - {file = "fonttools-4.53.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:099634631b9dd271d4a835d2b2a9e042ccc94ecdf7e2dd9f7f34f7daf333358d"}, - {file = "fonttools-4.53.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e40013572bfb843d6794a3ce076c29ef4efd15937ab833f520117f8eccc84fd6"}, - {file = "fonttools-4.53.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:715b41c3e231f7334cbe79dfc698213dcb7211520ec7a3bc2ba20c8515e8a3b5"}, - {file = "fonttools-4.53.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74ae2441731a05b44d5988d3ac2cf784d3ee0a535dbed257cbfff4be8bb49eb9"}, - {file = "fonttools-4.53.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:95db0c6581a54b47c30860d013977b8a14febc206c8b5ff562f9fe32738a8aca"}, - {file = "fonttools-4.53.0-cp310-cp310-win32.whl", hash = "sha256:9cd7a6beec6495d1dffb1033d50a3f82dfece23e9eb3c20cd3c2444d27514068"}, - {file = "fonttools-4.53.0-cp310-cp310-win_amd64.whl", hash = "sha256:daaef7390e632283051e3cf3e16aff2b68b247e99aea916f64e578c0449c9c68"}, - {file = "fonttools-4.53.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a209d2e624ba492df4f3bfad5996d1f76f03069c6133c60cd04f9a9e715595ec"}, - {file = "fonttools-4.53.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f520d9ac5b938e6494f58a25c77564beca7d0199ecf726e1bd3d56872c59749"}, - {file = "fonttools-4.53.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eceef49f457253000e6a2d0f7bd08ff4e9fe96ec4ffce2dbcb32e34d9c1b8161"}, - {file = "fonttools-4.53.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1f3e34373aa16045484b4d9d352d4c6b5f9f77ac77a178252ccbc851e8b2ee"}, - {file = "fonttools-4.53.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:28d072169fe8275fb1a0d35e3233f6df36a7e8474e56cb790a7258ad822b6fd6"}, - {file = "fonttools-4.53.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4a2a6ba400d386e904fd05db81f73bee0008af37799a7586deaa4aef8cd5971e"}, - {file = "fonttools-4.53.0-cp311-cp311-win32.whl", hash = "sha256:bb7273789f69b565d88e97e9e1da602b4ee7ba733caf35a6c2affd4334d4f005"}, - {file = "fonttools-4.53.0-cp311-cp311-win_amd64.whl", hash = "sha256:9fe9096a60113e1d755e9e6bda15ef7e03391ee0554d22829aa506cdf946f796"}, - {file = "fonttools-4.53.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d8f191a17369bd53a5557a5ee4bab91d5330ca3aefcdf17fab9a497b0e7cff7a"}, - {file = "fonttools-4.53.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:93156dd7f90ae0a1b0e8871032a07ef3178f553f0c70c386025a808f3a63b1f4"}, - {file = "fonttools-4.53.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bff98816cb144fb7b85e4b5ba3888a33b56ecef075b0e95b95bcd0a5fbf20f06"}, - {file = "fonttools-4.53.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:973d030180eca8255b1bce6ffc09ef38a05dcec0e8320cc9b7bcaa65346f341d"}, - {file = "fonttools-4.53.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4ee5a24e281fbd8261c6ab29faa7fd9a87a12e8c0eed485b705236c65999109"}, - {file = "fonttools-4.53.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5bc124fae781a4422f61b98d1d7faa47985f663a64770b78f13d2c072410c2"}, - {file = "fonttools-4.53.0-cp312-cp312-win32.whl", hash = "sha256:a239afa1126b6a619130909c8404070e2b473dd2b7fc4aacacd2e763f8597fea"}, - {file = "fonttools-4.53.0-cp312-cp312-win_amd64.whl", hash = "sha256:45b4afb069039f0366a43a5d454bc54eea942bfb66b3fc3e9a2c07ef4d617380"}, - {file = "fonttools-4.53.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:93bc9e5aaa06ff928d751dc6be889ff3e7d2aa393ab873bc7f6396a99f6fbb12"}, - {file = "fonttools-4.53.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2367d47816cc9783a28645bc1dac07f8ffc93e0f015e8c9fc674a5b76a6da6e4"}, - {file = "fonttools-4.53.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:907fa0b662dd8fc1d7c661b90782ce81afb510fc4b7aa6ae7304d6c094b27bce"}, - {file = "fonttools-4.53.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e0ad3c6ea4bd6a289d958a1eb922767233f00982cf0fe42b177657c86c80a8f"}, - {file = "fonttools-4.53.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:73121a9b7ff93ada888aaee3985a88495489cc027894458cb1a736660bdfb206"}, - {file = "fonttools-4.53.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ee595d7ba9bba130b2bec555a40aafa60c26ce68ed0cf509983e0f12d88674fd"}, - {file = "fonttools-4.53.0-cp38-cp38-win32.whl", hash = "sha256:fca66d9ff2ac89b03f5aa17e0b21a97c21f3491c46b583bb131eb32c7bab33af"}, - {file = "fonttools-4.53.0-cp38-cp38-win_amd64.whl", hash = "sha256:31f0e3147375002aae30696dd1dc596636abbd22fca09d2e730ecde0baad1d6b"}, - {file = "fonttools-4.53.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d6166192dcd925c78a91d599b48960e0a46fe565391c79fe6de481ac44d20ac"}, - {file = "fonttools-4.53.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef50ec31649fbc3acf6afd261ed89d09eb909b97cc289d80476166df8438524d"}, - {file = "fonttools-4.53.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f193f060391a455920d61684a70017ef5284ccbe6023bb056e15e5ac3de11d1"}, - {file = "fonttools-4.53.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba9f09ff17f947392a855e3455a846f9855f6cf6bec33e9a427d3c1d254c712f"}, - {file = "fonttools-4.53.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0c555e039d268445172b909b1b6bdcba42ada1cf4a60e367d68702e3f87e5f64"}, - {file = "fonttools-4.53.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a4788036201c908079e89ae3f5399b33bf45b9ea4514913f4dbbe4fac08efe0"}, - {file = "fonttools-4.53.0-cp39-cp39-win32.whl", hash = "sha256:d1a24f51a3305362b94681120c508758a88f207fa0a681c16b5a4172e9e6c7a9"}, - {file = "fonttools-4.53.0-cp39-cp39-win_amd64.whl", hash = "sha256:1e677bfb2b4bd0e5e99e0f7283e65e47a9814b0486cb64a41adf9ef110e078f2"}, - {file = "fonttools-4.53.0-py3-none-any.whl", hash = "sha256:6b4f04b1fbc01a3569d63359f2227c89ab294550de277fd09d8fca6185669fa4"}, - {file = "fonttools-4.53.0.tar.gz", hash = "sha256:c93ed66d32de1559b6fc348838c7572d5c0ac1e4a258e76763a5caddd8944002"}, + {file = "fonttools-4.53.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0679a30b59d74b6242909945429dbddb08496935b82f91ea9bf6ad240ec23397"}, + {file = "fonttools-4.53.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8bf06b94694251861ba7fdeea15c8ec0967f84c3d4143ae9daf42bbc7717fe3"}, + {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b96cd370a61f4d083c9c0053bf634279b094308d52fdc2dd9a22d8372fdd590d"}, + {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1c7c5aa18dd3b17995898b4a9b5929d69ef6ae2af5b96d585ff4005033d82f0"}, + {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e013aae589c1c12505da64a7d8d023e584987e51e62006e1bb30d72f26522c41"}, + {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9efd176f874cb6402e607e4cc9b4a9cd584d82fc34a4b0c811970b32ba62501f"}, + {file = "fonttools-4.53.1-cp310-cp310-win32.whl", hash = "sha256:c8696544c964500aa9439efb6761947393b70b17ef4e82d73277413f291260a4"}, + {file = "fonttools-4.53.1-cp310-cp310-win_amd64.whl", hash = "sha256:8959a59de5af6d2bec27489e98ef25a397cfa1774b375d5787509c06659b3671"}, + {file = "fonttools-4.53.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da33440b1413bad53a8674393c5d29ce64d8c1a15ef8a77c642ffd900d07bfe1"}, + {file = "fonttools-4.53.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ff7e5e9bad94e3a70c5cd2fa27f20b9bb9385e10cddab567b85ce5d306ea923"}, + {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6e7170d675d12eac12ad1a981d90f118c06cf680b42a2d74c6c931e54b50719"}, + {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee32ea8765e859670c4447b0817514ca79054463b6b79784b08a8df3a4d78e3"}, + {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e08f572625a1ee682115223eabebc4c6a2035a6917eac6f60350aba297ccadb"}, + {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b21952c092ffd827504de7e66b62aba26fdb5f9d1e435c52477e6486e9d128b2"}, + {file = "fonttools-4.53.1-cp311-cp311-win32.whl", hash = "sha256:9dfdae43b7996af46ff9da520998a32b105c7f098aeea06b2226b30e74fbba88"}, + {file = "fonttools-4.53.1-cp311-cp311-win_amd64.whl", hash = "sha256:d4d0096cb1ac7a77b3b41cd78c9b6bc4a400550e21dc7a92f2b5ab53ed74eb02"}, + {file = "fonttools-4.53.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d92d3c2a1b39631a6131c2fa25b5406855f97969b068e7e08413325bc0afba58"}, + {file = "fonttools-4.53.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b3c8ebafbee8d9002bd8f1195d09ed2bd9ff134ddec37ee8f6a6375e6a4f0e8"}, + {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f029c095ad66c425b0ee85553d0dc326d45d7059dbc227330fc29b43e8ba60"}, + {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f5e6c3510b79ea27bb1ebfcc67048cde9ec67afa87c7dd7efa5c700491ac7f"}, + {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f677ce218976496a587ab17140da141557beb91d2a5c1a14212c994093f2eae2"}, + {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9e6ceba2a01b448e36754983d376064730690401da1dd104ddb543519470a15f"}, + {file = "fonttools-4.53.1-cp312-cp312-win32.whl", hash = "sha256:791b31ebbc05197d7aa096bbc7bd76d591f05905d2fd908bf103af4488e60670"}, + {file = "fonttools-4.53.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ed170b5e17da0264b9f6fae86073be3db15fa1bd74061c8331022bca6d09bab"}, + {file = "fonttools-4.53.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c818c058404eb2bba05e728d38049438afd649e3c409796723dfc17cd3f08749"}, + {file = "fonttools-4.53.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:651390c3b26b0c7d1f4407cad281ee7a5a85a31a110cbac5269de72a51551ba2"}, + {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54f1bba2f655924c1138bbc7fa91abd61f45c68bd65ab5ed985942712864bbb"}, + {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9cd19cf4fe0595ebdd1d4915882b9440c3a6d30b008f3cc7587c1da7b95be5f"}, + {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2af40ae9cdcb204fc1d8f26b190aa16534fcd4f0df756268df674a270eab575d"}, + {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:35250099b0cfb32d799fb5d6c651220a642fe2e3c7d2560490e6f1d3f9ae9169"}, + {file = "fonttools-4.53.1-cp38-cp38-win32.whl", hash = "sha256:f08df60fbd8d289152079a65da4e66a447efc1d5d5a4d3f299cdd39e3b2e4a7d"}, + {file = "fonttools-4.53.1-cp38-cp38-win_amd64.whl", hash = "sha256:7b6b35e52ddc8fb0db562133894e6ef5b4e54e1283dff606fda3eed938c36fc8"}, + {file = "fonttools-4.53.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75a157d8d26c06e64ace9df037ee93a4938a4606a38cb7ffaf6635e60e253b7a"}, + {file = "fonttools-4.53.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4824c198f714ab5559c5be10fd1adf876712aa7989882a4ec887bf1ef3e00e31"}, + {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc5d7cb89c7b7afa8321b6bb3dbee0eec2b57855c90b3e9bf5fb816671fa7c"}, + {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ec3fb43befb54be490147b4a922b5314e16372a643004f182babee9f9c3407"}, + {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:73379d3ffdeecb376640cd8ed03e9d2d0e568c9d1a4e9b16504a834ebadc2dfb"}, + {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:02569e9a810f9d11f4ae82c391ebc6fb5730d95a0657d24d754ed7763fb2d122"}, + {file = "fonttools-4.53.1-cp39-cp39-win32.whl", hash = "sha256:aae7bd54187e8bf7fd69f8ab87b2885253d3575163ad4d669a262fe97f0136cb"}, + {file = "fonttools-4.53.1-cp39-cp39-win_amd64.whl", hash = "sha256:e5b708073ea3d684235648786f5f6153a48dc8762cdfe5563c57e80787c29fbb"}, + {file = "fonttools-4.53.1-py3-none-any.whl", hash = "sha256:f1f8758a2ad110bd6432203a344269f445a2907dc24ef6bccfd0ac4e14e0d71d"}, + {file = "fonttools-4.53.1.tar.gz", hash = "sha256:e128778a8e9bc11159ce5447f76766cefbd876f44bd79aff030287254e4752c4"}, ] [package.extras] @@ -1190,13 +1281,13 @@ files = [ [[package]] name = "fsspec" -version = "2024.6.0" +version = "2024.9.0" description = "File-system specification" optional = true python-versions = ">=3.8" files = [ - {file = "fsspec-2024.6.0-py3-none-any.whl", hash = "sha256:58d7122eb8a1a46f7f13453187bfea4972d66bf01618d37366521b1998034cee"}, - {file = "fsspec-2024.6.0.tar.gz", hash = "sha256:f579960a56e6d8038a9efc8f9c77279ec12e6299aa86b0769a7e9c46b94527c2"}, + {file = "fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b"}, + {file = "fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8"}, ] [package.extras] @@ -1259,6 +1350,29 @@ gitdb = ">=4.0.1,<5" doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] +[[package]] +name = "google-auth" +version = "2.34.0" +description = "Google Authentication Library" +optional = true +python-versions = ">=3.7" +files = [ + {file = "google_auth-2.34.0-py2.py3-none-any.whl", hash = "sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65"}, + {file = "google_auth-2.34.0.tar.gz", hash = "sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + [[package]] name = "graphene" version = "3.3" @@ -1281,13 +1395,13 @@ test = ["coveralls (>=3.3,<4)", "iso8601 (>=1,<2)", "mock (>=4,<5)", "pytest (>= [[package]] name = "graphql-core" -version = "3.2.3" +version = "3.2.4" description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." optional = true -python-versions = ">=3.6,<4" +python-versions = "<4,>=3.6" files = [ - {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, - {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, + {file = "graphql-core-3.2.4.tar.gz", hash = "sha256:acbe2e800980d0e39b4685dd058c2f4042660b89ebca38af83020fd872ff1264"}, + {file = "graphql_core-3.2.4-py3-none-any.whl", hash = "sha256:1604f2042edc5f3114f49cac9d77e25863be51b23a54a61a23245cf32f6476f0"}, ] [[package]] @@ -1306,69 +1420,77 @@ graphql-core = ">=3.2,<3.3" [[package]] name = "greenlet" -version = "3.0.3" +version = "3.1.0" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, + {file = "greenlet-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a814dc3100e8a046ff48faeaa909e80cdb358411a3d6dd5293158425c684eda8"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a771dc64fa44ebe58d65768d869fcfb9060169d203446c1d446e844b62bdfdca"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e49a65d25d7350cca2da15aac31b6f67a43d867448babf997fe83c7505f57bc"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cd8518eade968bc52262d8c46727cfc0826ff4d552cf0430b8d65aaf50bb91d"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dc19e660baea5c38e949455c1181bc018893f25372d10ffe24b3ed7341fb25"}, + {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0a5b1c22c82831f56f2f7ad9bbe4948879762fe0d59833a4a71f16e5fa0f682"}, + {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2651dfb006f391bcb240635079a68a261b227a10a08af6349cba834a2141efa1"}, + {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3e7e6ef1737a819819b1163116ad4b48d06cfdd40352d813bb14436024fcda99"}, + {file = "greenlet-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54"}, + {file = "greenlet-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9730929375021ec90f6447bff4f7f5508faef1c02f399a1953870cdb78e0c345"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:713d450cf8e61854de9420fb7eea8ad228df4e27e7d4ed465de98c955d2b3fa6"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c3446937be153718250fe421da548f973124189f18fe4575a0510b5c928f0cc"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ddc7bcedeb47187be74208bc652d63d6b20cb24f4e596bd356092d8000da6d6"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44151d7b81b9391ed759a2f2865bbe623ef00d648fed59363be2bbbd5154656f"}, + {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cea1cca3be76c9483282dc7760ea1cc08a6ecec1f0b6ca0a94ea0d17432da19"}, + {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:619935a44f414274a2c08c9e74611965650b730eb4efe4b2270f91df5e4adf9a"}, + {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:221169d31cada333a0c7fd087b957c8f431c1dba202c3a58cf5a3583ed973e9b"}, + {file = "greenlet-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:01059afb9b178606b4b6e92c3e710ea1635597c3537e44da69f4531e111dd5e9"}, + {file = "greenlet-3.1.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:24fc216ec7c8be9becba8b64a98a78f9cd057fd2dc75ae952ca94ed8a893bf27"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d07c28b85b350564bdff9f51c1c5007dfb2f389385d1bc23288de51134ca303"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:243a223c96a4246f8a30ea470c440fe9db1f5e444941ee3c3cd79df119b8eebf"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26811df4dc81271033a7836bc20d12cd30938e6bd2e9437f56fa03da81b0f8fc"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d86401550b09a55410f32ceb5fe7efcd998bd2dad9e82521713cb148a4a15f"}, + {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26d9c1c4f1748ccac0bae1dbb465fb1a795a75aba8af8ca871503019f4285e2a"}, + {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cd468ec62257bb4544989402b19d795d2305eccb06cde5da0eb739b63dc04665"}, + {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a53dfe8f82b715319e9953330fa5c8708b610d48b5c59f1316337302af5c0811"}, + {file = "greenlet-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:28fe80a3eb673b2d5cc3b12eea468a5e5f4603c26aa34d88bf61bba82ceb2f9b"}, + {file = "greenlet-3.1.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:76b3e3976d2a452cba7aa9e453498ac72240d43030fdc6d538a72b87eaff52fd"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655b21ffd37a96b1e78cc48bf254f5ea4b5b85efaf9e9e2a526b3c9309d660ca"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f4c2027689093775fd58ca2388d58789009116844432d920e9147f91acbe64"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76e5064fd8e94c3f74d9fd69b02d99e3cdb8fc286ed49a1f10b256e59d0d3a0b"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4bf607f690f7987ab3291406e012cd8591a4f77aa54f29b890f9c331e84989"}, + {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:037d9ac99540ace9424cb9ea89f0accfaff4316f149520b4ae293eebc5bded17"}, + {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:90b5bbf05fe3d3ef697103850c2ce3374558f6fe40fd57c9fac1bf14903f50a5"}, + {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:726377bd60081172685c0ff46afbc600d064f01053190e4450857483c4d44484"}, + {file = "greenlet-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:d46d5069e2eeda111d6f71970e341f4bd9aeeee92074e649ae263b834286ecc0"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81eeec4403a7d7684b5812a8aaa626fa23b7d0848edb3a28d2eb3220daddcbd0"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a3dae7492d16e85ea6045fd11cb8e782b63eac8c8d520c3a92c02ac4573b0a6"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b5ea3664eed571779403858d7cd0a9b0ebf50d57d2cdeafc7748e09ef8cd81a"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22f4e26400f7f48faef2d69c20dc055a1f3043d330923f9abe08ea0aecc44df"}, + {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13ff8c8e54a10472ce3b2a2da007f915175192f18e6495bad50486e87c7f6637"}, + {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9671e7282d8c6fcabc32c0fb8d7c0ea8894ae85cee89c9aadc2d7129e1a9954"}, + {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:184258372ae9e1e9bddce6f187967f2e08ecd16906557c4320e3ba88a93438c3"}, + {file = "greenlet-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:a0409bc18a9f85321399c29baf93545152d74a49d92f2f55302f122007cfda00"}, + {file = "greenlet-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9eb4a1d7399b9f3c7ac68ae6baa6be5f9195d1d08c9ddc45ad559aa6b556bce6"}, + {file = "greenlet-3.1.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a8870983af660798dc1b529e1fd6f1cefd94e45135a32e58bd70edd694540f33"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfcfb73aed40f550a57ea904629bdaf2e562c68fa1164fa4588e752af6efdc3f"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9482c2ed414781c0af0b35d9d575226da6b728bd1a720668fa05837184965b7"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d58ec349e0c2c0bc6669bf2cd4982d2f93bf067860d23a0ea1fe677b0f0b1e09"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd65695a8df1233309b701dec2539cc4b11e97d4fcc0f4185b4a12ce54db0491"}, + {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:665b21e95bc0fce5cab03b2e1d90ba9c66c510f1bb5fdc864f3a377d0f553f6b"}, + {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3c59a06c2c28a81a026ff11fbf012081ea34fb9b7052f2ed0366e14896f0a1d"}, + {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415b9494ff6240b09af06b91a375731febe0090218e2898d2b85f9b92abcda0"}, + {file = "greenlet-3.1.0-cp38-cp38-win32.whl", hash = "sha256:1544b8dd090b494c55e60c4ff46e238be44fdc472d2589e943c241e0169bcea2"}, + {file = "greenlet-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:7f346d24d74c00b6730440f5eb8ec3fe5774ca8d1c9574e8e57c8671bb51b910"}, + {file = "greenlet-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:db1b3ccb93488328c74e97ff888604a8b95ae4f35f4f56677ca57a4fc3a4220b"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cd313629ded43bb3b98737bba2f3e2c2c8679b55ea29ed73daea6b755fe8e7"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fad7a051e07f64e297e6e8399b4d6a3bdcad3d7297409e9a06ef8cbccff4f501"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3967dcc1cd2ea61b08b0b276659242cbce5caca39e7cbc02408222fb9e6ff39"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d45b75b0f3fd8d99f62eb7908cfa6d727b7ed190737dec7fe46d993da550b81a"}, + {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d004db911ed7b6218ec5c5bfe4cf70ae8aa2223dffbb5b3c69e342bb253cb28"}, + {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9505a0c8579899057cbefd4ec34d865ab99852baf1ff33a9481eb3924e2da0b"}, + {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fd6e94593f6f9714dbad1aaba734b5ec04593374fa6638df61592055868f8b8"}, + {file = "greenlet-3.1.0-cp39-cp39-win32.whl", hash = "sha256:d0dd943282231480aad5f50f89bdf26690c995e8ff555f26d8a5b9887b559bcc"}, + {file = "greenlet-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:ac0adfdb3a21dc2a24ed728b61e72440d297d0fd3a577389df566651fcd08f97"}, + {file = "greenlet-3.1.0.tar.gz", hash = "sha256:b395121e9bbe8d02a750886f108d540abe66075e61e22f7353d9acb0b81be0f0"}, ] [package.extras] @@ -1377,13 +1499,13 @@ test = ["objgraph", "psutil"] [[package]] name = "gunicorn" -version = "22.0.0" +version = "23.0.0" description = "WSGI HTTP Server for UNIX" optional = true python-versions = ">=3.7" files = [ - {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, - {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, + {file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"}, + {file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"}, ] [package.dependencies] @@ -1430,13 +1552,13 @@ trio = ["trio (>=0.22.0,<0.26.0)"] [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -1451,36 +1573,37 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" -version = "3.7" +version = "3.8" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, + {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, ] [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = true python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -1536,7 +1659,7 @@ files = [ name = "jinja2" version = "3.1.4" description = "A very fast and expressive template engine." -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, @@ -1576,115 +1699,125 @@ crc32c = ["crc32c"] [[package]] name = "kiwisolver" -version = "1.4.5" +version = "1.4.7" description = "A fast implementation of the Cassowary constraint solver" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, - {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, - {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, - {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, - {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, - {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, - {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, - {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, - {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, - {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, - {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, - {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, + {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8a9c83f75223d5e48b0bc9cb1bf2776cf01563e00ade8775ffe13b0b6e1af3a6"}, + {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58370b1ffbd35407444d57057b57da5d6549d2d854fa30249771775c63b5fe17"}, + {file = "kiwisolver-1.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa0abdf853e09aff551db11fce173e2177d00786c688203f52c87ad7fcd91ef9"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d53103597a252fb3ab8b5845af04c7a26d5e7ea8122303dd7a021176a87e8b9"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f17c5ffa8e9462fb79f62746428dd57b46eb931698e42e990ad63103f35e6c"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a9ca9c710d598fd75ee5de59d5bda2684d9db36a9f50b6125eaea3969c2599"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4d742cb7af1c28303a51b7a27aaee540e71bb8e24f68c736f6f2ffc82f2bf05"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28c7fea2196bf4c2f8d46a0415c77a1c480cc0724722f23d7410ffe9842c407"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e968b84db54f9d42046cf154e02911e39c0435c9801681e3fc9ce8a3c4130278"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0c18ec74c0472de033e1bebb2911c3c310eef5649133dd0bedf2a169a1b269e5"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8f0ea6da6d393d8b2e187e6a5e3fb81f5862010a40c3945e2c6d12ae45cfb2ad"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f106407dda69ae456dd1227966bf445b157ccc80ba0dff3802bb63f30b74e895"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84ec80df401cfee1457063732d90022f93951944b5b58975d34ab56bb150dfb3"}, + {file = "kiwisolver-1.4.7-cp310-cp310-win32.whl", hash = "sha256:71bb308552200fb2c195e35ef05de12f0c878c07fc91c270eb3d6e41698c3bcc"}, + {file = "kiwisolver-1.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:44756f9fd339de0fb6ee4f8c1696cfd19b2422e0d70b4cefc1cc7f1f64045a8c"}, + {file = "kiwisolver-1.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:78a42513018c41c2ffd262eb676442315cbfe3c44eed82385c2ed043bc63210a"}, + {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d2b0e12a42fb4e72d509fc994713d099cbb15ebf1103545e8a45f14da2dfca54"}, + {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a8781ac3edc42ea4b90bc23e7d37b665d89423818e26eb6df90698aa2287c95"}, + {file = "kiwisolver-1.4.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46707a10836894b559e04b0fd143e343945c97fd170d69a2d26d640b4e297935"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef97b8df011141c9b0f6caf23b29379f87dd13183c978a30a3c546d2c47314cb"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab58c12a2cd0fc769089e6d38466c46d7f76aced0a1f54c77652446733d2d02"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:803b8e1459341c1bb56d1c5c010406d5edec8a0713a0945851290a7930679b51"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9a9e8a507420fe35992ee9ecb302dab68550dedc0da9e2880dd88071c5fb052"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18077b53dc3bb490e330669a99920c5e6a496889ae8c63b58fbc57c3d7f33a18"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6af936f79086a89b3680a280c47ea90b4df7047b5bdf3aa5c524bbedddb9e545"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3abc5b19d24af4b77d1598a585b8a719beb8569a71568b66f4ebe1fb0449460b"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:933d4de052939d90afbe6e9d5273ae05fb836cc86c15b686edd4b3560cc0ee36"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:65e720d2ab2b53f1f72fb5da5fb477455905ce2c88aaa671ff0a447c2c80e8e3"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3bf1ed55088f214ba6427484c59553123fdd9b218a42bbc8c6496d6754b1e523"}, + {file = "kiwisolver-1.4.7-cp311-cp311-win32.whl", hash = "sha256:4c00336b9dd5ad96d0a558fd18a8b6f711b7449acce4c157e7343ba92dd0cf3d"}, + {file = "kiwisolver-1.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:929e294c1ac1e9f615c62a4e4313ca1823ba37326c164ec720a803287c4c499b"}, + {file = "kiwisolver-1.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:e33e8fbd440c917106b237ef1a2f1449dfbb9b6f6e1ce17c94cd6a1e0d438376"}, + {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5360cc32706dab3931f738d3079652d20982511f7c0ac5711483e6eab08efff2"}, + {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942216596dc64ddb25adb215c3c783215b23626f8d84e8eff8d6d45c3f29f75a"}, + {file = "kiwisolver-1.4.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:48b571ecd8bae15702e4f22d3ff6a0f13e54d3d00cd25216d5e7f658242065ee"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad42ba922c67c5f219097b28fae965e10045ddf145d2928bfac2eb2e17673640"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612a10bdae23404a72941a0fc8fa2660c6ea1217c4ce0dbcab8a8f6543ea9e7f"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e838bba3a3bac0fe06d849d29772eb1afb9745a59710762e4ba3f4cb8424483"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22f499f6157236c19f4bbbd472fa55b063db77a16cd74d49afe28992dff8c258"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693902d433cf585133699972b6d7c42a8b9f8f826ebcaf0132ff55200afc599e"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e77f2126c3e0b0d055f44513ed349038ac180371ed9b52fe96a32aa071a5107"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:657a05857bda581c3656bfc3b20e353c232e9193eb167766ad2dc58b56504948"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4bfa75a048c056a411f9705856abfc872558e33c055d80af6a380e3658766038"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:34ea1de54beef1c104422d210c47c7d2a4999bdecf42c7b5718fbe59a4cac383"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:90da3b5f694b85231cf93586dad5e90e2d71b9428f9aad96952c99055582f520"}, + {file = "kiwisolver-1.4.7-cp312-cp312-win32.whl", hash = "sha256:18e0cca3e008e17fe9b164b55735a325140a5a35faad8de92dd80265cd5eb80b"}, + {file = "kiwisolver-1.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:58cb20602b18f86f83a5c87d3ee1c766a79c0d452f8def86d925e6c60fbf7bfb"}, + {file = "kiwisolver-1.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:f5a8b53bdc0b3961f8b6125e198617c40aeed638b387913bf1ce78afb1b0be2a"}, + {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2e6039dcbe79a8e0f044f1c39db1986a1b8071051efba3ee4d74f5b365f5226e"}, + {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1ecf0ac1c518487d9d23b1cd7139a6a65bc460cd101ab01f1be82ecf09794b6"}, + {file = "kiwisolver-1.4.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7ab9ccab2b5bd5702ab0803676a580fffa2aa178c2badc5557a84cc943fcf750"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f816dd2277f8d63d79f9c8473a79fe54047bc0467754962840782c575522224d"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8bcc23ceb5a1b624572a1623b9f79d2c3b337c8c455405ef231933a10da379"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dea0bf229319828467d7fca8c7c189780aa9ff679c94539eed7532ebe33ed37c"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c06a4c7cf15ec739ce0e5971b26c93638730090add60e183530d70848ebdd34"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913983ad2deb14e66d83c28b632fd35ba2b825031f2fa4ca29675e665dfecbe1"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5337ec7809bcd0f424c6b705ecf97941c46279cf5ed92311782c7c9c2026f07f"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c26ed10c4f6fa6ddb329a5120ba3b6db349ca192ae211e882970bfc9d91420b"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c619b101e6de2222c1fcb0531e1b17bbffbe54294bfba43ea0d411d428618c27"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:073a36c8273647592ea332e816e75ef8da5c303236ec0167196793eb1e34657a"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ce6b2b0231bda412463e152fc18335ba32faf4e8c23a754ad50ffa70e4091ee"}, + {file = "kiwisolver-1.4.7-cp313-cp313-win32.whl", hash = "sha256:f4c9aee212bc89d4e13f58be11a56cc8036cabad119259d12ace14b34476fd07"}, + {file = "kiwisolver-1.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:8a3ec5aa8e38fc4c8af308917ce12c536f1c88452ce554027e55b22cbbfbff76"}, + {file = "kiwisolver-1.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:76c8094ac20ec259471ac53e774623eb62e6e1f56cd8690c67ce6ce4fcb05650"}, + {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5d5abf8f8ec1f4e22882273c423e16cae834c36856cac348cfbfa68e01c40f3a"}, + {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aeb3531b196ef6f11776c21674dba836aeea9d5bd1cf630f869e3d90b16cfade"}, + {file = "kiwisolver-1.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7d755065e4e866a8086c9bdada157133ff466476a2ad7861828e17b6026e22c"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08471d4d86cbaec61f86b217dd938a83d85e03785f51121e791a6e6689a3be95"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7bbfcb7165ce3d54a3dfbe731e470f65739c4c1f85bb1018ee912bae139e263b"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d34eb8494bea691a1a450141ebb5385e4b69d38bb8403b5146ad279f4b30fa3"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9242795d174daa40105c1d86aba618e8eab7bf96ba8c3ee614da8302a9f95503"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a0f64a48bb81af7450e641e3fe0b0394d7381e342805479178b3d335d60ca7cf"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8e045731a5416357638d1700927529e2b8ab304811671f665b225f8bf8d8f933"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4322872d5772cae7369f8351da1edf255a604ea7087fe295411397d0cfd9655e"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e1631290ee9271dffe3062d2634c3ecac02c83890ada077d225e081aca8aab89"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:edcfc407e4eb17e037bca59be0e85a2031a2ac87e4fed26d3e9df88b4165f92d"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4d05d81ecb47d11e7f8932bd8b61b720bf0b41199358f3f5e36d38e28f0532c5"}, + {file = "kiwisolver-1.4.7-cp38-cp38-win32.whl", hash = "sha256:b38ac83d5f04b15e515fd86f312479d950d05ce2368d5413d46c088dda7de90a"}, + {file = "kiwisolver-1.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:d83db7cde68459fc803052a55ace60bea2bae361fc3b7a6d5da07e11954e4b09"}, + {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9362ecfca44c863569d3d3c033dbe8ba452ff8eed6f6b5806382741a1334bd"}, + {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8df2eb9b2bac43ef8b082e06f750350fbbaf2887534a5be97f6cf07b19d9583"}, + {file = "kiwisolver-1.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f32d6edbc638cde7652bd690c3e728b25332acbadd7cad670cc4a02558d9c417"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e2e6c39bd7b9372b0be21456caab138e8e69cc0fc1190a9dfa92bd45a1e6e904"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dda56c24d869b1193fcc763f1284b9126550eaf84b88bbc7256e15028f19188a"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79849239c39b5e1fd906556c474d9b0439ea6792b637511f3fe3a41158d89ca8"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e3bc157fed2a4c02ec468de4ecd12a6e22818d4f09cde2c31ee3226ffbefab2"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3da53da805b71e41053dc670f9a820d1157aae77b6b944e08024d17bcd51ef88"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8705f17dfeb43139a692298cb6637ee2e59c0194538153e83e9ee0c75c2eddde"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:82a5c2f4b87c26bb1a0ef3d16b5c4753434633b83d365cc0ddf2770c93829e3c"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce8be0466f4c0d585cdb6c1e2ed07232221df101a4c6f28821d2aa754ca2d9e2"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:409afdfe1e2e90e6ee7fc896f3df9a7fec8e793e58bfa0d052c8a82f99c37abb"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b9c3f4ee0b9a439d2415012bd1b1cc2df59e4d6a9939f4d669241d30b414327"}, + {file = "kiwisolver-1.4.7-cp39-cp39-win32.whl", hash = "sha256:a79ae34384df2b615eefca647a2873842ac3b596418032bef9a7283675962644"}, + {file = "kiwisolver-1.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:cf0438b42121a66a3a667de17e779330fc0f20b0d97d59d2f2121e182b0505e4"}, + {file = "kiwisolver-1.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:764202cc7e70f767dab49e8df52c7455e8de0df5d858fa801a11aa0d882ccf3f"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:94252291e3fe68001b1dd747b4c0b3be12582839b95ad4d1b641924d68fd4643"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b7dfa3b546da08a9f622bb6becdb14b3e24aaa30adba66749d38f3cc7ea9706"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3de6481f4ed8b734da5df134cd5a6a64fe32124fe83dde1e5b5f29fe30b1e6"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91b5f9f1205845d488c928e8570dcb62b893372f63b8b6e98b863ebd2368ff2"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fa14dbd66b8b8f470d5fc79c089a66185619d31645f9b0773b88b19f7223c4"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:eb542fe7933aa09d8d8f9d9097ef37532a7df6497819d16efe4359890a2f417a"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bfa1acfa0c54932d5607e19a2c24646fb4c1ae2694437789129cf099789a3b00"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:eee3ea935c3d227d49b4eb85660ff631556841f6e567f0f7bda972df6c2c9935"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f3160309af4396e0ed04db259c3ccbfdc3621b5559b5453075e5de555e1f3a1b"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a17f6a29cf8935e587cc8a4dbfc8368c55edc645283db0ce9801016f83526c2d"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10849fb2c1ecbfae45a693c070e0320a91b35dd4bcf58172c023b994283a124d"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ac542bf38a8a4be2dc6b15248d36315ccc65f0743f7b1a76688ffb6b5129a5c2"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b01aac285f91ca889c800042c35ad3b239e704b150cfd3382adfc9dcc780e39"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48be928f59a1f5c8207154f935334d374e79f2b5d212826307d072595ad76a2e"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f37cfe618a117e50d8c240555331160d73d0411422b59b5ee217843d7b693608"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599b5c873c63a1f6ed7eead644a8a380cfbdf5db91dcb6f85707aaab213b1674"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:801fa7802e5cfabe3ab0c81a34c323a319b097dfb5004be950482d882f3d7225"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0c6c43471bc764fad4bc99c5c2d6d16a676b1abf844ca7c8702bdae92df01ee0"}, + {file = "kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60"}, ] [[package]] @@ -1744,13 +1877,13 @@ testing = ["pytest"] [[package]] name = "markdown" -version = "3.6" +version = "3.7" description = "Python implementation of John Gruber's Markdown." optional = true python-versions = ">=3.8" files = [ - {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, - {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, + {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, + {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, ] [package.extras] @@ -1761,7 +1894,7 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, @@ -1828,13 +1961,13 @@ files = [ [[package]] name = "mashumaro" -version = "3.13" +version = "3.13.1" description = "Fast and well tested serialization library" optional = false python-versions = ">=3.8" files = [ - {file = "mashumaro-3.13-py3-none-any.whl", hash = "sha256:59457aebb90e85b8b195e5ccc2d46b608f2709bedb679f9d19a952f9bb1fb6ec"}, - {file = "mashumaro-3.13.tar.gz", hash = "sha256:636c31afe39d991efe4cad269fef0c8ba408d87581118784d2a47924c2073faa"}, + {file = "mashumaro-3.13.1-py3-none-any.whl", hash = "sha256:ad0a162b8f4ea232dadd2891d77ff20165b855b9d84610f36ac84462d4576aa0"}, + {file = "mashumaro-3.13.1.tar.gz", hash = "sha256:169f0290253b3e6077bcb39c14a9dd0791a3fdedd9e286e536ae561d4ff1975b"}, ] [package.dependencies] @@ -1848,40 +1981,51 @@ yaml = ["pyyaml (>=3.13)"] [[package]] name = "matplotlib" -version = "3.9.0" +version = "3.9.2" description = "Python plotting package" optional = true python-versions = ">=3.9" files = [ - {file = "matplotlib-3.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2bcee1dffaf60fe7656183ac2190bd630842ff87b3153afb3e384d966b57fe56"}, - {file = "matplotlib-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f988bafb0fa39d1074ddd5bacd958c853e11def40800c5824556eb630f94d3b"}, - {file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe428e191ea016bb278758c8ee82a8129c51d81d8c4bc0846c09e7e8e9057241"}, - {file = "matplotlib-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaf3978060a106fab40c328778b148f590e27f6fa3cd15a19d6892575bce387d"}, - {file = "matplotlib-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e7f03e5cbbfacdd48c8ea394d365d91ee8f3cae7e6ec611409927b5ed997ee4"}, - {file = "matplotlib-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:13beb4840317d45ffd4183a778685e215939be7b08616f431c7795276e067463"}, - {file = "matplotlib-3.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:063af8587fceeac13b0936c42a2b6c732c2ab1c98d38abc3337e430e1ff75e38"}, - {file = "matplotlib-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a2fa6d899e17ddca6d6526cf6e7ba677738bf2a6a9590d702c277204a7c6152"}, - {file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550cdda3adbd596078cca7d13ed50b77879104e2e46392dcd7c75259d8f00e85"}, - {file = "matplotlib-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cce0f31b351e3551d1f3779420cf8f6ec0d4a8cf9c0237a3b549fd28eb4abb"}, - {file = "matplotlib-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c53aeb514ccbbcbab55a27f912d79ea30ab21ee0531ee2c09f13800efb272674"}, - {file = "matplotlib-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:a5be985db2596d761cdf0c2eaf52396f26e6a64ab46bd8cd810c48972349d1be"}, - {file = "matplotlib-3.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c79f3a585f1368da6049318bdf1f85568d8d04b2e89fc24b7e02cc9b62017382"}, - {file = "matplotlib-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bdd1ecbe268eb3e7653e04f451635f0fb0f77f07fd070242b44c076c9106da84"}, - {file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e85a1a6d732f645f1403ce5e6727fd9418cd4574521d5803d3d94911038e5"}, - {file = "matplotlib-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a490715b3b9984fa609116481b22178348c1a220a4499cda79132000a79b4db"}, - {file = "matplotlib-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8146ce83cbc5dc71c223a74a1996d446cd35cfb6a04b683e1446b7e6c73603b7"}, - {file = "matplotlib-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:d91a4ffc587bacf5c4ce4ecfe4bcd23a4b675e76315f2866e588686cc97fccdf"}, - {file = "matplotlib-3.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:616fabf4981a3b3c5a15cd95eba359c8489c4e20e03717aea42866d8d0465956"}, - {file = "matplotlib-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cd53c79fd02f1c1808d2cfc87dd3cf4dbc63c5244a58ee7944497107469c8d8a"}, - {file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06a478f0d67636554fa78558cfbcd7b9dba85b51f5c3b5a0c9be49010cf5f321"}, - {file = "matplotlib-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81c40af649d19c85f8073e25e5806926986806fa6d54be506fbf02aef47d5a89"}, - {file = "matplotlib-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52146fc3bd7813cc784562cb93a15788be0b2875c4655e2cc6ea646bfa30344b"}, - {file = "matplotlib-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:0fc51eaa5262553868461c083d9adadb11a6017315f3a757fc45ec6ec5f02888"}, - {file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bd4f2831168afac55b881db82a7730992aa41c4f007f1913465fb182d6fb20c0"}, - {file = "matplotlib-3.9.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:290d304e59be2b33ef5c2d768d0237f5bd132986bdcc66f80bc9bcc300066a03"}, - {file = "matplotlib-3.9.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff2e239c26be4f24bfa45860c20ffccd118d270c5b5d081fa4ea409b5469fcd"}, - {file = "matplotlib-3.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:af4001b7cae70f7eaacfb063db605280058246de590fa7874f00f62259f2df7e"}, - {file = "matplotlib-3.9.0.tar.gz", hash = "sha256:e6d29ea6c19e34b30fb7d88b7081f869a03014f66fe06d62cc77d5a6ea88ed7a"}, + {file = "matplotlib-3.9.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9d78bbc0cbc891ad55b4f39a48c22182e9bdaea7fc0e5dbd364f49f729ca1bbb"}, + {file = "matplotlib-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c375cc72229614632c87355366bdf2570c2dac01ac66b8ad048d2dabadf2d0d4"}, + {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d94ff717eb2bd0b58fe66380bd8b14ac35f48a98e7c6765117fe67fb7684e64"}, + {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab68d50c06938ef28681073327795c5db99bb4666214d2d5f880ed11aeaded66"}, + {file = "matplotlib-3.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:65aacf95b62272d568044531e41de26285d54aec8cb859031f511f84bd8b495a"}, + {file = "matplotlib-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:3fd595f34aa8a55b7fc8bf9ebea8aa665a84c82d275190a61118d33fbc82ccae"}, + {file = "matplotlib-3.9.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8dd059447824eec055e829258ab092b56bb0579fc3164fa09c64f3acd478772"}, + {file = "matplotlib-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c797dac8bb9c7a3fd3382b16fe8f215b4cf0f22adccea36f1545a6d7be310b41"}, + {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d719465db13267bcef19ea8954a971db03b9f48b4647e3860e4bc8e6ed86610f"}, + {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8912ef7c2362f7193b5819d17dae8629b34a95c58603d781329712ada83f9447"}, + {file = "matplotlib-3.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7741f26a58a240f43bee74965c4882b6c93df3e7eb3de160126d8c8f53a6ae6e"}, + {file = "matplotlib-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ae82a14dab96fbfad7965403c643cafe6515e386de723e498cf3eeb1e0b70cc7"}, + {file = "matplotlib-3.9.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ac43031375a65c3196bee99f6001e7fa5bdfb00ddf43379d3c0609bdca042df9"}, + {file = "matplotlib-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be0fc24a5e4531ae4d8e858a1a548c1fe33b176bb13eff7f9d0d38ce5112a27d"}, + {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf81de2926c2db243c9b2cbc3917619a0fc85796c6ba4e58f541df814bbf83c7"}, + {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ee45bc4245533111ced13f1f2cace1e7f89d1c793390392a80c139d6cf0e6c"}, + {file = "matplotlib-3.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:306c8dfc73239f0e72ac50e5a9cf19cc4e8e331dd0c54f5e69ca8758550f1e1e"}, + {file = "matplotlib-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:5413401594cfaff0052f9d8b1aafc6d305b4bd7c4331dccd18f561ff7e1d3bd3"}, + {file = "matplotlib-3.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:18128cc08f0d3cfff10b76baa2f296fc28c4607368a8402de61bb3f2eb33c7d9"}, + {file = "matplotlib-3.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4876d7d40219e8ae8bb70f9263bcbe5714415acfdf781086601211335e24f8aa"}, + {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d9f07a80deab4bb0b82858a9e9ad53d1382fd122be8cde11080f4e7dfedb38b"}, + {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7c0410f181a531ec4e93bbc27692f2c71a15c2da16766f5ba9761e7ae518413"}, + {file = "matplotlib-3.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:909645cce2dc28b735674ce0931a4ac94e12f5b13f6bb0b5a5e65e7cea2c192b"}, + {file = "matplotlib-3.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:f32c7410c7f246838a77d6d1eff0c0f87f3cb0e7c4247aebea71a6d5a68cab49"}, + {file = "matplotlib-3.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:37e51dd1c2db16ede9cfd7b5cabdfc818b2c6397c83f8b10e0e797501c963a03"}, + {file = "matplotlib-3.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b82c5045cebcecd8496a4d694d43f9cc84aeeb49fe2133e036b207abe73f4d30"}, + {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f053c40f94bc51bc03832a41b4f153d83f2062d88c72b5e79997072594e97e51"}, + {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbe196377a8248972f5cede786d4c5508ed5f5ca4a1e09b44bda889958b33f8c"}, + {file = "matplotlib-3.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5816b1e1fe8c192cbc013f8f3e3368ac56fbecf02fb41b8f8559303f24c5015e"}, + {file = "matplotlib-3.9.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cef2a73d06601437be399908cf13aee74e86932a5ccc6ccdf173408ebc5f6bb2"}, + {file = "matplotlib-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0830e188029c14e891fadd99702fd90d317df294c3298aad682739c5533721a"}, + {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ba9c1299c920964e8d3857ba27173b4dbb51ca4bab47ffc2c2ba0eb5e2cbc5"}, + {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd93b91ab47a3616b4d3c42b52f8363b88ca021e340804c6ab2536344fad9ca"}, + {file = "matplotlib-3.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6d1ce5ed2aefcdce11904fc5bbea7d9c21fff3d5f543841edf3dea84451a09ea"}, + {file = "matplotlib-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:b2696efdc08648536efd4e1601b5fd491fd47f4db97a5fbfd175549a7365c1b2"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d52a3b618cb1cbb769ce2ee1dcdb333c3ab6e823944e9a2d36e37253815f9556"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:039082812cacd6c6bec8e17a9c1e6baca230d4116d522e81e1f63a74d01d2e21"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6758baae2ed64f2331d4fd19be38b7b4eae3ecec210049a26b6a4f3ae1c85dcc"}, + {file = "matplotlib-3.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:050598c2b29e0b9832cde72bcf97627bf00262adbc4a54e2b856426bb2ef0697"}, + {file = "matplotlib-3.9.2.tar.gz", hash = "sha256:96ab43906269ca64a6366934106fa01534454a69e471b7bf3d79083981aaab92"}, ] [package.dependencies] @@ -1900,48 +2044,34 @@ dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6)", "setupto [[package]] name = "mlflow" -version = "2.13.2" +version = "2.16.0" description = "MLflow is an open source platform for the complete machine learning lifecycle" optional = true python-versions = ">=3.8" files = [ - {file = "mlflow-2.13.2-py3-none-any.whl", hash = "sha256:728e130085e296780f02c0c57cf085aca39aa40f11fb8deeca99ac625ceaf4db"}, - {file = "mlflow-2.13.2.tar.gz", hash = "sha256:8f1cf42a24aee26e527a86ec1c5265119d17a97528e729d4a96e781d37d50a2d"}, + {file = "mlflow-2.16.0-py3-none-any.whl", hash = "sha256:9f27ef6ae7a82d7ecd67b6b4a4d50637a5e8160639115570fbc689758f9c0b54"}, + {file = "mlflow-2.16.0.tar.gz", hash = "sha256:82ea1a2e800f404f1586783b7636091c0a5754cf9ff45afeadf3a5e467f5168f"}, ] [package.dependencies] alembic = "<1.10.0 || >1.10.0,<2" -cachetools = ">=5.0.0,<6" -click = ">=7.0,<9" -cloudpickle = "<4" docker = ">=4.0.0,<8" -entrypoints = "<1" Flask = "<4" -gitpython = ">=3.1.9,<4" graphene = "<4" -gunicorn = {version = "<23", markers = "platform_system != \"Windows\""} -importlib-metadata = ">=3.7.0,<4.7.0 || >4.7.0,<8" +gunicorn = {version = "<24", markers = "platform_system != \"Windows\""} Jinja2 = [ {version = ">=2.11,<4", markers = "platform_system != \"Windows\""}, {version = ">=3.0,<4", markers = "platform_system == \"Windows\""}, ] markdown = ">=3.3,<4" matplotlib = "<4" -numpy = "<2" -opentelemetry-api = ">=1.0.0,<3" -opentelemetry-sdk = ">=1.0.0,<3" -packaging = "<25" +mlflow-skinny = "2.16.0" +numpy = "<3" pandas = "<3" -protobuf = ">=3.12.0,<5" -pyarrow = ">=4.0.0,<16" -pytz = "<2025" -pyyaml = ">=5.1,<7" -querystring-parser = "<2" -requests = ">=2.17.3,<3" +pyarrow = ">=4.0.0,<18" scikit-learn = "<2" scipy = "<2" sqlalchemy = ">=1.4.0,<3" -sqlparse = ">=0.4.0,<1" waitress = {version = "<4", markers = "platform_system == \"Windows\""} [package.extras] @@ -1951,156 +2081,194 @@ extras = ["azureml-core (>=1.2.0)", "boto3", "botocore", "google-cloud-storage ( gateway = ["aiohttp (<4)", "boto3 (>=1.28.56,<2)", "fastapi (<1)", "pydantic (>=1.0,<3)", "slowapi (>=0.1.9,<1)", "tiktoken (<1)", "uvicorn[standard] (<1)", "watchfiles (<1)"] genai = ["aiohttp (<4)", "boto3 (>=1.28.56,<2)", "fastapi (<1)", "pydantic (>=1.0,<3)", "slowapi (>=0.1.9,<1)", "tiktoken (<1)", "uvicorn[standard] (<1)", "watchfiles (<1)"] jfrog = ["mlflow-jfrog-plugin"] -langchain = ["langchain (>=0.1.4,<=0.2.1)"] +langchain = ["langchain (>=0.1.0,<=0.2.15)"] +sqlserver = ["mlflow-dbstore"] +xethub = ["mlflow-xethub"] + +[[package]] +name = "mlflow-skinny" +version = "2.16.0" +description = "MLflow is an open source platform for the complete machine learning lifecycle" +optional = true +python-versions = ">=3.8" +files = [ + {file = "mlflow_skinny-2.16.0-py3-none-any.whl", hash = "sha256:c55541f50efd0f6637377b10e8a654847a3fcd815b8680a95f02e0ca6bd7700c"}, + {file = "mlflow_skinny-2.16.0.tar.gz", hash = "sha256:9b823173063743783b4e7b6c52bdadcc7d9dab48eb883ac454c0d56609df6b2d"}, +] + +[package.dependencies] +cachetools = ">=5.0.0,<6" +click = ">=7.0,<9" +cloudpickle = "<4" +databricks-sdk = ">=0.20.0,<1" +gitpython = ">=3.1.9,<4" +importlib-metadata = ">=3.7.0,<4.7.0 || >4.7.0,<9" +opentelemetry-api = ">=1.9.0,<3" +opentelemetry-sdk = ">=1.9.0,<3" +packaging = "<25" +protobuf = ">=3.12.0,<6" +pyyaml = ">=5.1,<7" +requests = ">=2.17.3,<3" +sqlparse = ">=0.4.0,<1" + +[package.extras] +aliyun-oss = ["aliyunstoreplugin"] +databricks = ["azure-storage-file-datalake (>12)", "boto3 (>1)", "botocore", "google-cloud-storage (>=1.30.0)"] +extras = ["azureml-core (>=1.2.0)", "boto3", "botocore", "google-cloud-storage (>=1.30.0)", "kubernetes", "mlserver (>=1.2.0,!=1.3.1,<1.4.0)", "mlserver-mlflow (>=1.2.0,!=1.3.1,<1.4.0)", "prometheus-flask-exporter", "pyarrow", "pysftp", "requests-auth-aws-sigv4", "virtualenv"] +gateway = ["aiohttp (<4)", "boto3 (>=1.28.56,<2)", "fastapi (<1)", "pydantic (>=1.0,<3)", "slowapi (>=0.1.9,<1)", "tiktoken (<1)", "uvicorn[standard] (<1)", "watchfiles (<1)"] +genai = ["aiohttp (<4)", "boto3 (>=1.28.56,<2)", "fastapi (<1)", "pydantic (>=1.0,<3)", "slowapi (>=0.1.9,<1)", "tiktoken (<1)", "uvicorn[standard] (<1)", "watchfiles (<1)"] +jfrog = ["mlflow-jfrog-plugin"] +langchain = ["langchain (>=0.1.0,<=0.2.15)"] sqlserver = ["mlflow-dbstore"] xethub = ["mlflow-xethub"] [[package]] name = "msal" -version = "1.28.1" +version = "1.31.0" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = true python-versions = ">=3.7" files = [ - {file = "msal-1.28.1-py3-none-any.whl", hash = "sha256:563c2d70de77a2ca9786aab84cb4e133a38a6897e6676774edc23d610bfc9e7b"}, - {file = "msal-1.28.1.tar.gz", hash = "sha256:d72bbfe2d5c2f2555f4bc6205be4450ddfd12976610dd9a16a9ab0f05c68b64d"}, + {file = "msal-1.31.0-py3-none-any.whl", hash = "sha256:96bc37cff82ebe4b160d5fc0f1196f6ca8b50e274ecd0ec5bf69c438514086e7"}, + {file = "msal-1.31.0.tar.gz", hash = "sha256:2c4f189cf9cc8f00c80045f66d39b7c0f3ed45873fd3d1f2af9f22db2e12ff4b"}, ] [package.dependencies] -cryptography = ">=2.5,<45" +cryptography = ">=2.5,<46" PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} requests = ">=2.0.0,<3" [package.extras] -broker = ["pymsalruntime (>=0.13.2,<0.17)"] +broker = ["pymsalruntime (>=0.14,<0.18)", "pymsalruntime (>=0.17,<0.18)"] [[package]] name = "msal-extensions" -version = "1.1.0" +version = "1.2.0" description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." optional = true python-versions = ">=3.7" files = [ - {file = "msal-extensions-1.1.0.tar.gz", hash = "sha256:6ab357867062db7b253d0bd2df6d411c7891a0ee7308d54d1e4317c1d1c54252"}, - {file = "msal_extensions-1.1.0-py3-none-any.whl", hash = "sha256:01be9711b4c0b1a151450068eeb2c4f0997df3bba085ac299de3a66f585e382f"}, + {file = "msal_extensions-1.2.0-py3-none-any.whl", hash = "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d"}, + {file = "msal_extensions-1.2.0.tar.gz", hash = "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef"}, ] [package.dependencies] -msal = ">=0.4.1,<2.0.0" -packaging = "*" -portalocker = [ - {version = ">=1.0,<3", markers = "platform_system != \"Windows\""}, - {version = ">=1.6,<3", markers = "platform_system == \"Windows\""}, -] +msal = ">=1.29,<2" +portalocker = ">=1.4,<3" [[package]] name = "multidict" -version = "6.0.5" +version = "6.1.0" description = "multidict implementation" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} [[package]] name = "multimethod" -version = "1.11.2" +version = "1.12" description = "Multiple argument dispatching." optional = true python-versions = ">=3.9" files = [ - {file = "multimethod-1.11.2-py3-none-any.whl", hash = "sha256:cb338f09395c0ee87d36c7691cdd794d13d8864358082cf1205f812edd5ce05a"}, - {file = "multimethod-1.11.2.tar.gz", hash = "sha256:7f2a4863967142e6db68632fef9cd79053c09670ba0c5f113301e245140bba5c"}, + {file = "multimethod-1.12-py3-none-any.whl", hash = "sha256:fd0c473c43558908d97cc06e4d68e8f69202f167db46f7b4e4058893e7dbdf60"}, + {file = "multimethod-1.12.tar.gz", hash = "sha256:8db8ef2a8d2a247e3570cc23317680892fdf903d84c8c1053667c8e8f7671a67"}, ] [[package]] @@ -2115,14 +2283,14 @@ files = [ ] [[package]] -name = "nest-asyncio" -version = "1.6.0" -description = "Patch asyncio to allow nested event loops" +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" optional = false -python-versions = ">=3.5" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] [[package]] @@ -2186,48 +2354,49 @@ httpx = ">=0.27.0,<0.28.0" [[package]] name = "opentelemetry-api" -version = "1.25.0" +version = "1.27.0" description = "OpenTelemetry Python API" optional = true python-versions = ">=3.8" files = [ - {file = "opentelemetry_api-1.25.0-py3-none-any.whl", hash = "sha256:757fa1aa020a0f8fa139f8959e53dec2051cc26b832e76fa839a6d76ecefd737"}, - {file = "opentelemetry_api-1.25.0.tar.gz", hash = "sha256:77c4985f62f2614e42ce77ee4c9da5fa5f0bc1e1821085e9a47533a9323ae869"}, + {file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"}, + {file = "opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342"}, ] [package.dependencies] deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<=7.1" +importlib-metadata = ">=6.0,<=8.4.0" [[package]] name = "opentelemetry-sdk" -version = "1.25.0" +version = "1.27.0" description = "OpenTelemetry Python SDK" optional = true python-versions = ">=3.8" files = [ - {file = "opentelemetry_sdk-1.25.0-py3-none-any.whl", hash = "sha256:d97ff7ec4b351692e9d5a15af570c693b8715ad78b8aafbec5c7100fe966b4c9"}, - {file = "opentelemetry_sdk-1.25.0.tar.gz", hash = "sha256:ce7fc319c57707ef5bf8b74fb9f8ebdb8bfafbe11898410e0d2a761d08a98ec7"}, + {file = "opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d"}, + {file = "opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f"}, ] [package.dependencies] -opentelemetry-api = "1.25.0" -opentelemetry-semantic-conventions = "0.46b0" +opentelemetry-api = "1.27.0" +opentelemetry-semantic-conventions = "0.48b0" typing-extensions = ">=3.7.4" [[package]] name = "opentelemetry-semantic-conventions" -version = "0.46b0" +version = "0.48b0" description = "OpenTelemetry Semantic Conventions" optional = true python-versions = ">=3.8" files = [ - {file = "opentelemetry_semantic_conventions-0.46b0-py3-none-any.whl", hash = "sha256:6daef4ef9fa51d51855d9f8e0ccd3a1bd59e0e545abe99ac6203804e36ab3e07"}, - {file = "opentelemetry_semantic_conventions-0.46b0.tar.gz", hash = "sha256:fbc982ecbb6a6e90869b15c1673be90bd18c8a56ff1cffc0864e38e2edffaefa"}, + {file = "opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f"}, + {file = "opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a"}, ] [package.dependencies] -opentelemetry-api = "1.25.0" +deprecated = ">=1.2.6" +opentelemetry-api = "1.27.0" [[package]] name = "overrides" @@ -2255,7 +2424,7 @@ files = [ name = "pandas" version = "2.2.2" description = "Powerful data structures for data analysis, time series, and statistics" -optional = false +optional = true python-versions = ">=3.9" files = [ {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, @@ -2449,19 +2618,19 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.2-py3-none-any.whl", hash = "sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617"}, + {file = "platformdirs-4.3.2.tar.gz", hash = "sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -2522,13 +2691,13 @@ xlsxwriter = ["xlsxwriter"] [[package]] name = "portalocker" -version = "2.8.2" +version = "2.10.1" description = "Wraps the portalocker recipe for easy usage" optional = true python-versions = ">=3.8" files = [ - {file = "portalocker-2.8.2-py3-none-any.whl", hash = "sha256:cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e"}, - {file = "portalocker-2.8.2.tar.gz", hash = "sha256:2b035aa7828e46c58e9b31390ee1f169b98e1066ab10b9a6a861fe7e25ee4f33"}, + {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, + {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, ] [package.dependencies] @@ -2589,29 +2758,29 @@ prometheus-client = ">=0.8.0,<1.0.0" [[package]] name = "protobuf" -version = "4.25.3" +version = "5.28.1" description = "" optional = true python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, - {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, - {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, - {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, - {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, - {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, - {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, - {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, - {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, - {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, + {file = "protobuf-5.28.1-cp310-abi3-win32.whl", hash = "sha256:fc063acaf7a3d9ca13146fefb5b42ac94ab943ec6e978f543cd5637da2d57957"}, + {file = "protobuf-5.28.1-cp310-abi3-win_amd64.whl", hash = "sha256:4c7f5cb38c640919791c9f74ea80c5b82314c69a8409ea36f2599617d03989af"}, + {file = "protobuf-5.28.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4304e4fceb823d91699e924a1fdf95cde0e066f3b1c28edb665bda762ecde10f"}, + {file = "protobuf-5.28.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:0dfd86d2b5edf03d91ec2a7c15b4e950258150f14f9af5f51c17fa224ee1931f"}, + {file = "protobuf-5.28.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:51f09caab818707ab91cf09cc5c156026599cf05a4520779ccbf53c1b352fb25"}, + {file = "protobuf-5.28.1-cp38-cp38-win32.whl", hash = "sha256:1b04bde117a10ff9d906841a89ec326686c48ececeb65690f15b8cabe7149495"}, + {file = "protobuf-5.28.1-cp38-cp38-win_amd64.whl", hash = "sha256:cabfe43044ee319ad6832b2fda332646f9ef1636b0130186a3ae0a52fc264bb4"}, + {file = "protobuf-5.28.1-cp39-cp39-win32.whl", hash = "sha256:4b4b9a0562a35773ff47a3df823177ab71a1f5eb1ff56d8f842b7432ecfd7fd2"}, + {file = "protobuf-5.28.1-cp39-cp39-win_amd64.whl", hash = "sha256:f24e5d70e6af8ee9672ff605d5503491635f63d5db2fffb6472be78ba62efd8f"}, + {file = "protobuf-5.28.1-py3-none-any.whl", hash = "sha256:c529535e5c0effcf417682563719e5d8ac8d2b93de07a56108b4c2d436d7a29a"}, + {file = "protobuf-5.28.1.tar.gz", hash = "sha256:42597e938f83bb7f3e4b35f03aa45208d49ae8d5bcb4bc10b9fc825e0ab5e423"}, ] [[package]] name = "psycopg2" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, @@ -2686,6 +2855,42 @@ files = [ [package.dependencies] numpy = ">=1.16.6,<2" +[[package]] +name = "pyarrow-hotfix" +version = "0.6" +description = "" +optional = true +python-versions = ">=3.5" +files = [ + {file = "pyarrow_hotfix-0.6-py3-none-any.whl", hash = "sha256:dcc9ae2d220dff0083be6a9aa8e0cdee5182ad358d4931fce825c545e5c89178"}, + {file = "pyarrow_hotfix-0.6.tar.gz", hash = "sha256:79d3e030f7ff890d408a100ac16d6f00b14d44a502d7897cd9fc3e3a534e9945"}, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.1" +description = "A collection of ASN.1-based protocols modules" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, + {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" + [[package]] name = "pycparser" version = "2.22" @@ -2699,109 +2904,123 @@ files = [ [[package]] name = "pydantic" -version = "2.7.3" +version = "2.9.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.3-py3-none-any.whl", hash = "sha256:ea91b002777bf643bb20dd717c028ec43216b24a6001a280f83877fd2655d0b4"}, - {file = "pydantic-2.7.3.tar.gz", hash = "sha256:c46c76a40bb1296728d7a8b99aa73dd70a48c3510111ff290034f860c99c419e"}, + {file = "pydantic-2.9.1-py3-none-any.whl", hash = "sha256:7aff4db5fdf3cf573d4b3c30926a510a10e19a0774d38fc4967f78beb6deb612"}, + {file = "pydantic-2.9.1.tar.gz", hash = "sha256:1363c7d975c7036df0db2b4a61f2e062fbc0aa5ab5f2772e0ffc7191a4f4bce2"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.18.4" -typing-extensions = ">=4.6.1" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.3" +typing-extensions = [ + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.18.4" +version = "2.23.3" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, - {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, - {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, - {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, - {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, - {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, - {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, - {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, - {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, - {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, - {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, - {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, - {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, - {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7f10a5d1b9281392f1bf507d16ac720e78285dfd635b05737c3911637601bae6"}, + {file = "pydantic_core-2.23.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c09a7885dd33ee8c65266e5aa7fb7e2f23d49d8043f089989726391dd7350c5"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6470b5a1ec4d1c2e9afe928c6cb37eb33381cab99292a708b8cb9aa89e62429b"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9172d2088e27d9a185ea0a6c8cebe227a9139fd90295221d7d495944d2367700"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86fc6c762ca7ac8fbbdff80d61b2c59fb6b7d144aa46e2d54d9e1b7b0e780e01"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0cb80fd5c2df4898693aa841425ea1727b1b6d2167448253077d2a49003e0ed"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03667cec5daf43ac4995cefa8aaf58f99de036204a37b889c24a80927b629cec"}, + {file = "pydantic_core-2.23.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:047531242f8e9c2db733599f1c612925de095e93c9cc0e599e96cf536aaf56ba"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5499798317fff7f25dbef9347f4451b91ac2a4330c6669821c8202fd354c7bee"}, + {file = "pydantic_core-2.23.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bbb5e45eab7624440516ee3722a3044b83fff4c0372efe183fd6ba678ff681fe"}, + {file = "pydantic_core-2.23.3-cp310-none-win32.whl", hash = "sha256:8b5b3ed73abb147704a6e9f556d8c5cb078f8c095be4588e669d315e0d11893b"}, + {file = "pydantic_core-2.23.3-cp310-none-win_amd64.whl", hash = "sha256:2b603cde285322758a0279995b5796d64b63060bfbe214b50a3ca23b5cee3e83"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c889fd87e1f1bbeb877c2ee56b63bb297de4636661cc9bbfcf4b34e5e925bc27"}, + {file = "pydantic_core-2.23.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea85bda3189fb27503af4c45273735bcde3dd31c1ab17d11f37b04877859ef45"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7f7f72f721223f33d3dc98a791666ebc6a91fa023ce63733709f4894a7dc611"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b2b55b0448e9da68f56b696f313949cda1039e8ec7b5d294285335b53104b61"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c24574c7e92e2c56379706b9a3f07c1e0c7f2f87a41b6ee86653100c4ce343e5"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2b05e6ccbee333a8f4b8f4d7c244fdb7a979e90977ad9c51ea31261e2085ce0"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2c409ce1c219c091e47cb03feb3c4ed8c2b8e004efc940da0166aaee8f9d6c8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d965e8b325f443ed3196db890d85dfebbb09f7384486a77461347f4adb1fa7f8"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f56af3a420fb1ffaf43ece3ea09c2d27c444e7c40dcb7c6e7cf57aae764f2b48"}, + {file = "pydantic_core-2.23.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b01a078dd4f9a52494370af21aa52964e0a96d4862ac64ff7cea06e0f12d2c5"}, + {file = "pydantic_core-2.23.3-cp311-none-win32.whl", hash = "sha256:560e32f0df04ac69b3dd818f71339983f6d1f70eb99d4d1f8e9705fb6c34a5c1"}, + {file = "pydantic_core-2.23.3-cp311-none-win_amd64.whl", hash = "sha256:c744fa100fdea0d000d8bcddee95213d2de2e95b9c12be083370b2072333a0fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e0ec50663feedf64d21bad0809f5857bac1ce91deded203efc4a84b31b2e4305"}, + {file = "pydantic_core-2.23.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db6e6afcb95edbe6b357786684b71008499836e91f2a4a1e55b840955b341dbb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98ccd69edcf49f0875d86942f4418a4e83eb3047f20eb897bffa62a5d419c8fa"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a678c1ac5c5ec5685af0133262103defb427114e62eafeda12f1357a12140162"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01491d8b4d8db9f3391d93b0df60701e644ff0894352947f31fff3e52bd5c801"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fcf31facf2796a2d3b7fe338fe8640aa0166e4e55b4cb108dbfd1058049bf4cb"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7200fd561fb3be06827340da066df4311d0b6b8eb0c2116a110be5245dceb326"}, + {file = "pydantic_core-2.23.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc1636770a809dee2bd44dd74b89cc80eb41172bcad8af75dd0bc182c2666d4c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67a5def279309f2e23014b608c4150b0c2d323bd7bccd27ff07b001c12c2415c"}, + {file = "pydantic_core-2.23.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:748bdf985014c6dd3e1e4cc3db90f1c3ecc7246ff5a3cd4ddab20c768b2f1dab"}, + {file = "pydantic_core-2.23.3-cp312-none-win32.whl", hash = "sha256:255ec6dcb899c115f1e2a64bc9ebc24cc0e3ab097775755244f77360d1f3c06c"}, + {file = "pydantic_core-2.23.3-cp312-none-win_amd64.whl", hash = "sha256:40b8441be16c1e940abebed83cd006ddb9e3737a279e339dbd6d31578b802f7b"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6daaf5b1ba1369a22c8b050b643250e3e5efc6a78366d323294aee54953a4d5f"}, + {file = "pydantic_core-2.23.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d015e63b985a78a3d4ccffd3bdf22b7c20b3bbd4b8227809b3e8e75bc37f9cb2"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3fc572d9b5b5cfe13f8e8a6e26271d5d13f80173724b738557a8c7f3a8a3791"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f6bd91345b5163ee7448bee201ed7dd601ca24f43f439109b0212e296eb5b423"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc379c73fd66606628b866f661e8785088afe2adaba78e6bbe80796baf708a63"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbdce4b47592f9e296e19ac31667daed8753c8367ebb34b9a9bd89dacaa299c9"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc3cf31edf405a161a0adad83246568647c54404739b614b1ff43dad2b02e6d5"}, + {file = "pydantic_core-2.23.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e22b477bf90db71c156f89a55bfe4d25177b81fce4aa09294d9e805eec13855"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0a0137ddf462575d9bce863c4c95bac3493ba8e22f8c28ca94634b4a1d3e2bb4"}, + {file = "pydantic_core-2.23.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:203171e48946c3164fe7691fc349c79241ff8f28306abd4cad5f4f75ed80bc8d"}, + {file = "pydantic_core-2.23.3-cp313-none-win32.whl", hash = "sha256:76bdab0de4acb3f119c2a4bff740e0c7dc2e6de7692774620f7452ce11ca76c8"}, + {file = "pydantic_core-2.23.3-cp313-none-win_amd64.whl", hash = "sha256:37ba321ac2a46100c578a92e9a6aa33afe9ec99ffa084424291d84e456f490c1"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d063c6b9fed7d992bcbebfc9133f4c24b7a7f215d6b102f3e082b1117cddb72c"}, + {file = "pydantic_core-2.23.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6cb968da9a0746a0cf521b2b5ef25fc5a0bee9b9a1a8214e0a1cfaea5be7e8a4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbefe079a520c5984e30e1f1f29325054b59534729c25b874a16a5048028d16"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbaaf2ef20d282659093913da9d402108203f7cb5955020bd8d1ae5a2325d1c4"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb539d7e5dc4aac345846f290cf504d2fd3c1be26ac4e8b5e4c2b688069ff4cf"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e6f33503c5495059148cc486867e1d24ca35df5fc064686e631e314d959ad5b"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04b07490bc2f6f2717b10c3969e1b830f5720b632f8ae2f3b8b1542394c47a8e"}, + {file = "pydantic_core-2.23.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:03795b9e8a5d7fda05f3873efc3f59105e2dcff14231680296b87b80bb327295"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c483dab0f14b8d3f0df0c6c18d70b21b086f74c87ab03c59250dbf6d3c89baba"}, + {file = "pydantic_core-2.23.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b2682038e255e94baf2c473dca914a7460069171ff5cdd4080be18ab8a7fd6e"}, + {file = "pydantic_core-2.23.3-cp38-none-win32.whl", hash = "sha256:f4a57db8966b3a1d1a350012839c6a0099f0898c56512dfade8a1fe5fb278710"}, + {file = "pydantic_core-2.23.3-cp38-none-win_amd64.whl", hash = "sha256:13dd45ba2561603681a2676ca56006d6dee94493f03d5cadc055d2055615c3ea"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82da2f4703894134a9f000e24965df73cc103e31e8c31906cc1ee89fde72cbd8"}, + {file = "pydantic_core-2.23.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dd9be0a42de08f4b58a3cc73a123f124f65c24698b95a54c1543065baca8cf0e"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89b731f25c80830c76fdb13705c68fef6a2b6dc494402987c7ea9584fe189f5d"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6de1ec30c4bb94f3a69c9f5f2182baeda5b809f806676675e9ef6b8dc936f28"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb68b41c3fa64587412b104294b9cbb027509dc2f6958446c502638d481525ef"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c3980f2843de5184656aab58698011b42763ccba11c4a8c35936c8dd6c7068c"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94f85614f2cba13f62c3c6481716e4adeae48e1eaa7e8bac379b9d177d93947a"}, + {file = "pydantic_core-2.23.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:510b7fb0a86dc8f10a8bb43bd2f97beb63cffad1203071dc434dac26453955cd"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1eba2f7ce3e30ee2170410e2171867ea73dbd692433b81a93758ab2de6c64835"}, + {file = "pydantic_core-2.23.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b259fd8409ab84b4041b7b3f24dcc41e4696f180b775961ca8142b5b21d0e70"}, + {file = "pydantic_core-2.23.3-cp39-none-win32.whl", hash = "sha256:40d9bd259538dba2f40963286009bf7caf18b5112b19d2b55b09c14dde6db6a7"}, + {file = "pydantic_core-2.23.3-cp39-none-win_amd64.whl", hash = "sha256:5a8cd3074a98ee70173a8633ad3c10e00dcb991ecec57263aacb4095c5efb958"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f399e8657c67313476a121a6944311fab377085ca7f490648c9af97fc732732d"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b5547d098c76e1694ba85f05b595720d7c60d342f24d5aad32c3049131fa5c4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dda0290a6f608504882d9f7650975b4651ff91c85673341789a476b1159f211"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b6e5da855e9c55a0c67f4db8a492bf13d8d3316a59999cfbaf98cc6e401961"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:09e926397f392059ce0afdcac920df29d9c833256354d0c55f1584b0b70cf07e"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:87cfa0ed6b8c5bd6ae8b66de941cece179281239d482f363814d2b986b79cedc"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e61328920154b6a44d98cabcb709f10e8b74276bc709c9a513a8c37a18786cc4"}, + {file = "pydantic_core-2.23.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce3317d155628301d649fe5e16a99528d5680af4ec7aa70b90b8dacd2d725c9b"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e89513f014c6be0d17b00a9a7c81b1c426f4eb9224b15433f3d98c1a071f8433"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4f62c1c953d7ee375df5eb2e44ad50ce2f5aff931723b398b8bc6f0ac159791a"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2718443bc671c7ac331de4eef9b673063b10af32a0bb385019ad61dcf2cc8f6c"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d90e08b2727c5d01af1b5ef4121d2f0c99fbee692c762f4d9d0409c9da6541"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b676583fc459c64146debea14ba3af54e540b61762dfc0613dc4e98c3f66eeb"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:50e4661f3337977740fdbfbae084ae5693e505ca2b3130a6d4eb0f2281dc43b8"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:68f4cf373f0de6abfe599a38307f4417c1c867ca381c03df27c873a9069cda25"}, + {file = "pydantic_core-2.23.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:59d52cf01854cb26c46958552a21acb10dd78a52aa34c86f284e66b209db8cab"}, + {file = "pydantic_core-2.23.3.tar.gz", hash = "sha256:3cb0f65d8b4121c1b015c60104a685feb929a29d7cf204387c7f2688c7974690"}, ] [package.dependencies] @@ -2809,13 +3028,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.9.0" description = "JSON Web Token implementation in Python" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [package.dependencies] @@ -2823,8 +3042,8 @@ cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryp [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] @@ -2855,27 +3074,45 @@ torch = ["torch"] [[package]] name = "pyparsing" -version = "3.1.2" +version = "3.1.4" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = true python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, + {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, + {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, ] [package.extras] diagrams = ["jinja2", "railroad-diagrams"] +[[package]] +name = "pyright" +version = "1.1.380" +description = "Command line wrapper for pyright" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyright-1.1.380-py3-none-any.whl", hash = "sha256:a6404392053d8848bacc7aebcbd9d318bb46baf1a1a000359305481920f43879"}, + {file = "pyright-1.1.380.tar.gz", hash = "sha256:e6ceb1a5f7e9f03106e0aa1d6fbb4d97735a5e7ffb59f3de6b2db590baf935b2"}, +] + +[package.dependencies] +nodeenv = ">=1.6.0" + +[package.extras] +all = ["twine (>=3.4.1)"] +dev = ["twine (>=3.4.1)"] + [[package]] name = "pytest" -version = "8.2.2" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, - {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] @@ -2883,7 +3120,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.5,<2.0" +pluggy = ">=1.5,<2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] @@ -2954,13 +3191,13 @@ cli = ["click (>=5.0)"] [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" -optional = false +optional = true python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] @@ -2988,77 +3225,66 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = true -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "querystring-parser" -version = "1.2.4" -description = "QueryString parser for Python/Django that correctly handles nested dictionaries" -optional = true -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "querystring_parser-1.2.4-py2.py3-none-any.whl", hash = "sha256:d2fa90765eaf0de96c8b087872991a10238e89ba015ae59fedfed6bd61c242a0"}, - {file = "querystring_parser-1.2.4.tar.gz", hash = "sha256:644fce1cffe0530453b43a83a38094dbe422ccba8c9b2f2a1c00280e14ca8a62"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] -[package.dependencies] -six = "*" - [[package]] name = "ratelimiter" version = "1.2.0.post0" @@ -3127,34 +3353,48 @@ files = [ decorator = ">=3.4.2" py = ">=1.4.26,<2.0.0" +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = true +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + [[package]] name = "scikit-learn" -version = "1.5.0" +version = "1.5.2" description = "A set of python modules for machine learning and data mining" optional = true python-versions = ">=3.9" files = [ - {file = "scikit_learn-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12e40ac48555e6b551f0a0a5743cc94cc5a765c9513fe708e01f0aa001da2801"}, - {file = "scikit_learn-1.5.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f405c4dae288f5f6553b10c4ac9ea7754d5180ec11e296464adb5d6ac68b6ef5"}, - {file = "scikit_learn-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df8ccabbf583315f13160a4bb06037bde99ea7d8211a69787a6b7c5d4ebb6fc3"}, - {file = "scikit_learn-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c75ea812cd83b1385bbfa94ae971f0d80adb338a9523f6bbcb5e0b0381151d4"}, - {file = "scikit_learn-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:a90c5da84829a0b9b4bf00daf62754b2be741e66b5946911f5bdfaa869fcedd6"}, - {file = "scikit_learn-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a65af2d8a6cce4e163a7951a4cfbfa7fceb2d5c013a4b593686c7f16445cf9d"}, - {file = "scikit_learn-1.5.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:4c0c56c3005f2ec1db3787aeaabefa96256580678cec783986836fc64f8ff622"}, - {file = "scikit_learn-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f77547165c00625551e5c250cefa3f03f2fc92c5e18668abd90bfc4be2e0bff"}, - {file = "scikit_learn-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:118a8d229a41158c9f90093e46b3737120a165181a1b58c03461447aa4657415"}, - {file = "scikit_learn-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:a03b09f9f7f09ffe8c5efffe2e9de1196c696d811be6798ad5eddf323c6f4d40"}, - {file = "scikit_learn-1.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:460806030c666addee1f074788b3978329a5bfdc9b7d63e7aad3f6d45c67a210"}, - {file = "scikit_learn-1.5.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:1b94d6440603752b27842eda97f6395f570941857456c606eb1d638efdb38184"}, - {file = "scikit_learn-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d82c2e573f0f2f2f0be897e7a31fcf4e73869247738ab8c3ce7245549af58ab8"}, - {file = "scikit_learn-1.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3a10e1d9e834e84d05e468ec501a356226338778769317ee0b84043c0d8fb06"}, - {file = "scikit_learn-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:855fc5fa8ed9e4f08291203af3d3e5fbdc4737bd617a371559aaa2088166046e"}, - {file = "scikit_learn-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:40fb7d4a9a2db07e6e0cae4dc7bdbb8fada17043bac24104d8165e10e4cff1a2"}, - {file = "scikit_learn-1.5.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:47132440050b1c5beb95f8ba0b2402bbd9057ce96ec0ba86f2f445dd4f34df67"}, - {file = "scikit_learn-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174beb56e3e881c90424e21f576fa69c4ffcf5174632a79ab4461c4c960315ac"}, - {file = "scikit_learn-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261fe334ca48f09ed64b8fae13f9b46cc43ac5f580c4a605cbb0a517456c8f71"}, - {file = "scikit_learn-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:057b991ac64b3e75c9c04b5f9395eaf19a6179244c089afdebaad98264bff37c"}, - {file = "scikit_learn-1.5.0.tar.gz", hash = "sha256:789e3db01c750ed6d496fa2db7d50637857b451e57bcae863bff707c1247bef7"}, + {file = "scikit_learn-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:299406827fb9a4f862626d0fe6c122f5f87f8910b86fe5daa4c32dcd742139b6"}, + {file = "scikit_learn-1.5.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:2d4cad1119c77930b235579ad0dc25e65c917e756fe80cab96aa3b9428bd3fb0"}, + {file = "scikit_learn-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c412ccc2ad9bf3755915e3908e677b367ebc8d010acbb3f182814524f2e5540"}, + {file = "scikit_learn-1.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a686885a4b3818d9e62904d91b57fa757fc2bed3e465c8b177be652f4dd37c8"}, + {file = "scikit_learn-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:c15b1ca23d7c5f33cc2cb0a0d6aaacf893792271cddff0edbd6a40e8319bc113"}, + {file = "scikit_learn-1.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03b6158efa3faaf1feea3faa884c840ebd61b6484167c711548fce208ea09445"}, + {file = "scikit_learn-1.5.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1ff45e26928d3b4eb767a8f14a9a6efbf1cbff7c05d1fb0f95f211a89fd4f5de"}, + {file = "scikit_learn-1.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f763897fe92d0e903aa4847b0aec0e68cadfff77e8a0687cabd946c89d17e675"}, + {file = "scikit_learn-1.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8b0ccd4a902836493e026c03256e8b206656f91fbcc4fde28c57a5b752561f1"}, + {file = "scikit_learn-1.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:6c16d84a0d45e4894832b3c4d0bf73050939e21b99b01b6fd59cbb0cf39163b6"}, + {file = "scikit_learn-1.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f932a02c3f4956dfb981391ab24bda1dbd90fe3d628e4b42caef3e041c67707a"}, + {file = "scikit_learn-1.5.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:3b923d119d65b7bd555c73be5423bf06c0105678ce7e1f558cb4b40b0a5502b1"}, + {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"}, + {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"}, + {file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"}, + {file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"}, + {file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"}, + {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"}, + {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca64b3089a6d9b9363cd3546f8978229dcbb737aceb2c12144ee3f70f95684b7"}, + {file = "scikit_learn-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:3bed4909ba187aca80580fe2ef370d9180dcf18e621a27c4cf2ef10d279a7efe"}, + {file = "scikit_learn-1.5.2.tar.gz", hash = "sha256:b4237ed7b3fdd0a4882792e68ef2545d5baa50aca3bb45aa7df468138ad8f94d"}, ] [package.dependencies] @@ -3165,54 +3405,62 @@ threadpoolctl = ">=3.1.0" [package.extras] benchmark = ["matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "pandas (>=1.1.5)"] -build = ["cython (>=3.0.10)", "meson-python (>=0.15.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] -docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.15.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +build = ["cython (>=3.0.10)", "meson-python (>=0.16.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.3.4)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.5.0)", "sphinx-design (>=0.6.0)", "sphinx-gallery (>=0.16.0)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)"] examples = ["matplotlib (>=3.3.4)", "pandas (>=1.1.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.17.2)", "seaborn (>=0.9.0)"] install = ["joblib (>=1.2.0)", "numpy (>=1.19.5)", "scipy (>=1.6.0)", "threadpoolctl (>=3.1.0)"] maintenance = ["conda-lock (==2.5.6)"] -tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.23)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] +tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (>=1.2.0)", "pandas (>=1.1.5)", "polars (>=0.20.30)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.2.1)", "scikit-image (>=0.17.2)"] [[package]] name = "scipy" -version = "1.13.1" +version = "1.14.1" description = "Fundamental algorithms for scientific computing in Python" optional = true -python-versions = ">=3.9" -files = [ - {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}, - {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}, - {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989"}, - {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f"}, - {file = "scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94"}, - {file = "scipy-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54"}, - {file = "scipy-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9"}, - {file = "scipy-1.13.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326"}, - {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299"}, - {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa"}, - {file = "scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59"}, - {file = "scipy-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b"}, - {file = "scipy-1.13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d72782f39716b2b3509cd7c33cdc08c96f2f4d2b06d51e52fb45a19ca0c86a1"}, - {file = "scipy-1.13.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:017367484ce5498445aade74b1d5ab377acdc65e27095155e448c88497755a5d"}, - {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627"}, - {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884"}, - {file = "scipy-1.13.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16"}, - {file = "scipy-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:cdd7dacfb95fea358916410ec61bbc20440f7860333aee6d882bb8046264e949"}, - {file = "scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5"}, - {file = "scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24"}, - {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004"}, - {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d"}, - {file = "scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c"}, - {file = "scipy-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2"}, - {file = "scipy-1.13.1.tar.gz", hash = "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c"}, -] - -[package.dependencies] -numpy = ">=1.22.4,<2.3" - -[package.extras] -dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] -doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] -test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +python-versions = ">=3.10" +files = [ + {file = "scipy-1.14.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389"}, + {file = "scipy-1.14.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3"}, + {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0"}, + {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3"}, + {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d"}, + {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69"}, + {file = "scipy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad"}, + {file = "scipy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5"}, + {file = "scipy-1.14.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675"}, + {file = "scipy-1.14.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2"}, + {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617"}, + {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8"}, + {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37"}, + {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2"}, + {file = "scipy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2"}, + {file = "scipy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94"}, + {file = "scipy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d"}, + {file = "scipy-1.14.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07"}, + {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5"}, + {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc"}, + {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310"}, + {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066"}, + {file = "scipy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1"}, + {file = "scipy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f"}, + {file = "scipy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79"}, + {file = "scipy-1.14.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e"}, + {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73"}, + {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e"}, + {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d"}, + {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e"}, + {file = "scipy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06"}, + {file = "scipy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84"}, + {file = "scipy-1.14.1.tar.gz", hash = "sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417"}, +] + +[package.dependencies] +numpy = ">=1.23.5,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<=7.3.7)", "sphinx-design (>=0.4.0)"] +test = ["Cython", "array-api-strict (>=2.0)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "six" @@ -3260,64 +3508,64 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.30" +version = "2.0.34" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b48154678e76445c7ded1896715ce05319f74b1e73cf82d4f8b59b46e9c0ddc"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2753743c2afd061bb95a61a51bbb6a1a11ac1c44292fad898f10c9839a7f75b2"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7bfc726d167f425d4c16269a9a10fe8630ff6d14b683d588044dcef2d0f6be7"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4f61ada6979223013d9ab83a3ed003ded6959eae37d0d685db2c147e9143797"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a365eda439b7a00732638f11072907c1bc8e351c7665e7e5da91b169af794af"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bba002a9447b291548e8d66fd8c96a6a7ed4f2def0bb155f4f0a1309fd2735d5"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-win32.whl", hash = "sha256:0138c5c16be3600923fa2169532205d18891b28afa817cb49b50e08f62198bb8"}, - {file = "SQLAlchemy-2.0.30-cp310-cp310-win_amd64.whl", hash = "sha256:99650e9f4cf3ad0d409fed3eec4f071fadd032e9a5edc7270cd646a26446feeb"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:955991a09f0992c68a499791a753523f50f71a6885531568404fa0f231832aa0"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f69e4c756ee2686767eb80f94c0125c8b0a0b87ede03eacc5c8ae3b54b99dc46"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c9db1ce00e59e8dd09d7bae852a9add716efdc070a3e2068377e6ff0d6fdaa"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1429a4b0f709f19ff3b0cf13675b2b9bfa8a7e79990003207a011c0db880a13"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:efedba7e13aa9a6c8407c48facfdfa108a5a4128e35f4c68f20c3407e4376aa9"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16863e2b132b761891d6c49f0a0f70030e0bcac4fd208117f6b7e053e68668d0"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-win32.whl", hash = "sha256:2ecabd9ccaa6e914e3dbb2aa46b76dede7eadc8cbf1b8083c94d936bcd5ffb49"}, - {file = "SQLAlchemy-2.0.30-cp311-cp311-win_amd64.whl", hash = "sha256:0b3f4c438e37d22b83e640f825ef0f37b95db9aa2d68203f2c9549375d0b2260"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5a79d65395ac5e6b0c2890935bad892eabb911c4aa8e8015067ddb37eea3d56c"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a5baf9267b752390252889f0c802ea13b52dfee5e369527da229189b8bd592e"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cb5a646930c5123f8461f6468901573f334c2c63c795b9af350063a736d0134"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296230899df0b77dec4eb799bcea6fbe39a43707ce7bb166519c97b583cfcab3"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c62d401223f468eb4da32627bffc0c78ed516b03bb8a34a58be54d618b74d472"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3b69e934f0f2b677ec111b4d83f92dc1a3210a779f69bf905273192cf4ed433e"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-win32.whl", hash = "sha256:77d2edb1f54aff37e3318f611637171e8ec71472f1fdc7348b41dcb226f93d90"}, - {file = "SQLAlchemy-2.0.30-cp312-cp312-win_amd64.whl", hash = "sha256:b6c7ec2b1f4969fc19b65b7059ed00497e25f54069407a8701091beb69e591a5"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a8e3b0a7e09e94be7510d1661339d6b52daf202ed2f5b1f9f48ea34ee6f2d57"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b60203c63e8f984df92035610c5fb76d941254cf5d19751faab7d33b21e5ddc0"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1dc3eabd8c0232ee8387fbe03e0a62220a6f089e278b1f0aaf5e2d6210741ad"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:40ad017c672c00b9b663fcfcd5f0864a0a97828e2ee7ab0c140dc84058d194cf"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e42203d8d20dc704604862977b1470a122e4892791fe3ed165f041e4bf447a1b"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-win32.whl", hash = "sha256:2a4f4da89c74435f2bc61878cd08f3646b699e7d2eba97144030d1be44e27584"}, - {file = "SQLAlchemy-2.0.30-cp37-cp37m-win_amd64.whl", hash = "sha256:b6bf767d14b77f6a18b6982cbbf29d71bede087edae495d11ab358280f304d8e"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc0c53579650a891f9b83fa3cecd4e00218e071d0ba00c4890f5be0c34887ed3"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:311710f9a2ee235f1403537b10c7687214bb1f2b9ebb52702c5aa4a77f0b3af7"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:408f8b0e2c04677e9c93f40eef3ab22f550fecb3011b187f66a096395ff3d9fd"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37a4b4fb0dd4d2669070fb05b8b8824afd0af57587393015baee1cf9890242d9"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a943d297126c9230719c27fcbbeab57ecd5d15b0bd6bfd26e91bfcfe64220621"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a089e218654e740a41388893e090d2e2c22c29028c9d1353feb38638820bbeb"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-win32.whl", hash = "sha256:fa561138a64f949f3e889eb9ab8c58e1504ab351d6cf55259dc4c248eaa19da6"}, - {file = "SQLAlchemy-2.0.30-cp38-cp38-win_amd64.whl", hash = "sha256:7d74336c65705b986d12a7e337ba27ab2b9d819993851b140efdf029248e818e"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8c62fe2480dd61c532ccafdbce9b29dacc126fe8be0d9a927ca3e699b9491a"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2383146973a15435e4717f94c7509982770e3e54974c71f76500a0136f22810b"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8409de825f2c3b62ab15788635ccaec0c881c3f12a8af2b12ae4910a0a9aeef6"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0094c5dc698a5f78d3d1539853e8ecec02516b62b8223c970c86d44e7a80f6c7"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:edc16a50f5e1b7a06a2dcc1f2205b0b961074c123ed17ebda726f376a5ab0953"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f7703c2010355dd28f53deb644a05fc30f796bd8598b43f0ba678878780b6e4c"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-win32.whl", hash = "sha256:1f9a727312ff6ad5248a4367358e2cf7e625e98b1028b1d7ab7b806b7d757513"}, - {file = "SQLAlchemy-2.0.30-cp39-cp39-win_amd64.whl", hash = "sha256:a0ef36b28534f2a5771191be6edb44cc2673c7b2edf6deac6562400288664221"}, - {file = "SQLAlchemy-2.0.30-py3-none-any.whl", hash = "sha256:7108d569d3990c71e26a42f60474b4c02c8586c4681af5fd67e51a044fdea86a"}, - {file = "SQLAlchemy-2.0.30.tar.gz", hash = "sha256:2b1708916730f4830bc69d6f49d37f7698b5bd7530aca7f04f785f8849e95255"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""} + {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d0b2cf8791ab5fb9e3aa3d9a79a0d5d51f55b6357eecf532a120ba3b5524db"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:243f92596f4fd4c8bd30ab8e8dd5965afe226363d75cab2468f2c707f64cd83b"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ea54f7300553af0a2a7235e9b85f4204e1fc21848f917a3213b0e0818de9a24"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:173f5f122d2e1bff8fbd9f7811b7942bead1f5e9f371cdf9e670b327e6703ebd"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:196958cde924a00488e3e83ff917be3b73cd4ed8352bbc0f2989333176d1c54d"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd90c221ed4e60ac9d476db967f436cfcecbd4ef744537c0f2d5291439848768"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-win32.whl", hash = "sha256:3166dfff2d16fe9be3241ee60ece6fcb01cf8e74dd7c5e0b64f8e19fab44911b"}, + {file = "SQLAlchemy-2.0.34-cp310-cp310-win_amd64.whl", hash = "sha256:6831a78bbd3c40f909b3e5233f87341f12d0b34a58f14115c9e94b4cdaf726d3"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7db3db284a0edaebe87f8f6642c2b2c27ed85c3e70064b84d1c9e4ec06d5d84"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:430093fce0efc7941d911d34f75a70084f12f6ca5c15d19595c18753edb7c33b"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79cb400c360c7c210097b147c16a9e4c14688a6402445ac848f296ade6283bbc"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1b30f31a36c7f3fee848391ff77eebdd3af5750bf95fbf9b8b5323edfdb4ec"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fddde2368e777ea2a4891a3fb4341e910a056be0bb15303bf1b92f073b80c02"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80bd73ea335203b125cf1d8e50fef06be709619eb6ab9e7b891ea34b5baa2287"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-win32.whl", hash = "sha256:6daeb8382d0df526372abd9cb795c992e18eed25ef2c43afe518c73f8cccb721"}, + {file = "SQLAlchemy-2.0.34-cp311-cp311-win_amd64.whl", hash = "sha256:5bc08e75ed11693ecb648b7a0a4ed80da6d10845e44be0c98c03f2f880b68ff4"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:53e68b091492c8ed2bd0141e00ad3089bcc6bf0e6ec4142ad6505b4afe64163e"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bcd18441a49499bf5528deaa9dee1f5c01ca491fc2791b13604e8f972877f812"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:165bbe0b376541092bf49542bd9827b048357f4623486096fc9aaa6d4e7c59a2"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3330415cd387d2b88600e8e26b510d0370db9b7eaf984354a43e19c40df2e2b"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97b850f73f8abbffb66ccbab6e55a195a0eb655e5dc74624d15cff4bfb35bd74"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee4c6917857fd6121ed84f56d1dc78eb1d0e87f845ab5a568aba73e78adf83"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-win32.whl", hash = "sha256:fbb034f565ecbe6c530dff948239377ba859420d146d5f62f0271407ffb8c580"}, + {file = "SQLAlchemy-2.0.34-cp312-cp312-win_amd64.whl", hash = "sha256:707c8f44931a4facd4149b52b75b80544a8d824162602b8cd2fe788207307f9a"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:24af3dc43568f3780b7e1e57c49b41d98b2d940c1fd2e62d65d3928b6f95f021"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60ed6ef0a35c6b76b7640fe452d0e47acc832ccbb8475de549a5cc5f90c2c06"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:413c85cd0177c23e32dee6898c67a5f49296640041d98fddb2c40888fe4daa2e"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:25691f4adfb9d5e796fd48bf1432272f95f4bbe5f89c475a788f31232ea6afba"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:526ce723265643dbc4c7efb54f56648cc30e7abe20f387d763364b3ce7506c82"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-win32.whl", hash = "sha256:13be2cc683b76977a700948411a94c67ad8faf542fa7da2a4b167f2244781cf3"}, + {file = "SQLAlchemy-2.0.34-cp37-cp37m-win_amd64.whl", hash = "sha256:e54ef33ea80d464c3dcfe881eb00ad5921b60f8115ea1a30d781653edc2fd6a2"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43f28005141165edd11fbbf1541c920bd29e167b8bbc1fb410d4fe2269c1667a"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b68094b165a9e930aedef90725a8fcfafe9ef95370cbb54abc0464062dbf808f"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1e03db964e9d32f112bae36f0cc1dcd1988d096cfd75d6a588a3c3def9ab2b"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:203d46bddeaa7982f9c3cc693e5bc93db476ab5de9d4b4640d5c99ff219bee8c"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ae92bebca3b1e6bd203494e5ef919a60fb6dfe4d9a47ed2453211d3bd451b9f5"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9661268415f450c95f72f0ac1217cc6f10256f860eed85c2ae32e75b60278ad8"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-win32.whl", hash = "sha256:895184dfef8708e15f7516bd930bda7e50ead069280d2ce09ba11781b630a434"}, + {file = "SQLAlchemy-2.0.34-cp38-cp38-win_amd64.whl", hash = "sha256:6e7cde3a2221aa89247944cafb1b26616380e30c63e37ed19ff0bba5e968688d"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbcdf987f3aceef9763b6d7b1fd3e4ee210ddd26cac421d78b3c206d07b2700b"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce119fc4ce0d64124d37f66a6f2a584fddc3c5001755f8a49f1ca0a177ef9796"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a17d8fac6df9835d8e2b4c5523666e7051d0897a93756518a1fe101c7f47f2f0"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ebc11c54c6ecdd07bb4efbfa1554538982f5432dfb8456958b6d46b9f834bb7"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e6965346fc1491a566e019a4a1d3dfc081ce7ac1a736536367ca305da6472a8"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:220574e78ad986aea8e81ac68821e47ea9202b7e44f251b7ed8c66d9ae3f4278"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-win32.whl", hash = "sha256:b75b00083e7fe6621ce13cfce9d4469c4774e55e8e9d38c305b37f13cf1e874c"}, + {file = "SQLAlchemy-2.0.34-cp39-cp39-win_amd64.whl", hash = "sha256:c29d03e0adf3cc1a8c3ec62d176824972ae29b67a66cbb18daff3062acc6faa8"}, + {file = "SQLAlchemy-2.0.34-py3-none-any.whl", hash = "sha256:7286c353ee6475613d8beff83167374006c6b3e3f0e6491bfe8ca610eb1dec0f"}, + {file = "sqlalchemy-2.0.34.tar.gz", hash = "sha256:10d8f36990dd929690666679b0f42235c159a7051534adb135728ee52828dd22"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\")"} typing-extensions = ">=4.6.0" [package.extras] @@ -3362,13 +3610,13 @@ rs = ["sqlglotrs (==0.1.2)"] [[package]] name = "sqlparse" -version = "0.5.0" +version = "0.5.1" description = "A non-validating SQL parser." optional = true python-versions = ">=3.8" files = [ - {file = "sqlparse-0.5.0-py3-none-any.whl", hash = "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663"}, - {file = "sqlparse-0.5.0.tar.gz", hash = "sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93"}, + {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, + {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, ] [package.extras] @@ -3416,13 +3664,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.4" +version = "4.66.5" description = "Fast, Extensible Progress Meter" optional = true python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, - {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, ] [package.dependencies] @@ -3468,13 +3716,13 @@ types-setuptools = "*" [[package]] name = "types-pyopenssl" -version = "24.1.0.20240425" +version = "24.1.0.20240722" description = "Typing stubs for pyOpenSSL" optional = false python-versions = ">=3.8" files = [ - {file = "types-pyOpenSSL-24.1.0.20240425.tar.gz", hash = "sha256:0a7e82626c1983dc8dc59292bf20654a51c3c3881bcbb9b337c1da6e32f0204e"}, - {file = "types_pyOpenSSL-24.1.0.20240425-py3-none-any.whl", hash = "sha256:f51a156835555dd2a1f025621e8c4fbe7493470331afeef96884d1d29bf3a473"}, + {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, + {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, ] [package.dependencies] @@ -3483,13 +3731,13 @@ types-cffi = "*" [[package]] name = "types-redis" -version = "4.6.0.20240425" +version = "4.6.0.20240903" description = "Typing stubs for redis" optional = false python-versions = ">=3.8" files = [ - {file = "types-redis-4.6.0.20240425.tar.gz", hash = "sha256:9402a10ee931d241fdfcc04592ebf7a661d7bb92a8dea631279f0d8acbcf3a22"}, - {file = "types_redis-4.6.0.20240425-py3-none-any.whl", hash = "sha256:ac5bc19e8f5997b9e76ad5d9cf15d0392d9f28cf5fc7746ea4a64b989c45c6a8"}, + {file = "types-redis-4.6.0.20240903.tar.gz", hash = "sha256:4bab1a378dbf23c2c95c370dfdb89a8f033957c4fd1a53fee71b529c182fe008"}, + {file = "types_redis-4.6.0.20240903-py3-none-any.whl", hash = "sha256:0e7537e5c085fe96b7d468d5edae0cf667b4ba4b62c6e4a5dfc340bd3b868c23"}, ] [package.dependencies] @@ -3498,13 +3746,13 @@ types-pyOpenSSL = "*" [[package]] name = "types-setuptools" -version = "70.0.0.20240524" +version = "74.1.0.20240907" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.8" files = [ - {file = "types-setuptools-70.0.0.20240524.tar.gz", hash = "sha256:e31fee7b9d15ef53980526579ac6089b3ae51a005a281acf97178e90ac71aff6"}, - {file = "types_setuptools-70.0.0.20240524-py3-none-any.whl", hash = "sha256:8f5379b9948682d72a9ab531fbe52932e84c4f38deda570255f9bae3edd766bc"}, + {file = "types-setuptools-74.1.0.20240907.tar.gz", hash = "sha256:0abdb082552ca966c1e5fc244e4853adc62971f6cd724fb1d8a3713b580e5a65"}, + {file = "types_setuptools-74.1.0.20240907-py3-none-any.whl", hash = "sha256:15b38c8e63ca34f42f6063ff4b1dd662ea20086166d5ad6a102e670a52574120"}, ] [[package]] @@ -3537,7 +3785,7 @@ typing-extensions = ">=3.7.4" name = "tzdata" version = "2024.1" description = "Provider of IANA time zone data" -optional = false +optional = true python-versions = ">=2" files = [ {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, @@ -3546,13 +3794,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = true python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -3597,13 +3845,13 @@ testing = ["coverage (>=5.0)", "pytest", "pytest-cov"] [[package]] name = "werkzeug" -version = "3.0.3" +version = "3.0.4" description = "The comprehensive WSGI web application library." optional = true python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, - {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, + {file = "werkzeug-3.0.4-py3-none-any.whl", hash = "sha256:02c9eb92b7d6c06f31a782811505d2157837cea66aaede3e217c7c27c039476c"}, + {file = "werkzeug-3.0.4.tar.gz", hash = "sha256:34f2371506b250df4d4f84bfe7b0921e4762525762bbd936614909fe25cd7306"}, ] [package.dependencies] @@ -3693,101 +3941,103 @@ files = [ [[package]] name = "yarl" -version = "1.9.4" +version = "1.11.1" description = "Yet another URL library" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, + {file = "yarl-1.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:400cd42185f92de559d29eeb529e71d80dfbd2f45c36844914a4a34297ca6f00"}, + {file = "yarl-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8258c86f47e080a258993eed877d579c71da7bda26af86ce6c2d2d072c11320d"}, + {file = "yarl-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2164cd9725092761fed26f299e3f276bb4b537ca58e6ff6b252eae9631b5c96e"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08ea567c16f140af8ddc7cb58e27e9138a1386e3e6e53982abaa6f2377b38cc"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:768ecc550096b028754ea28bf90fde071c379c62c43afa574edc6f33ee5daaec"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2909fa3a7d249ef64eeb2faa04b7957e34fefb6ec9966506312349ed8a7e77bf"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01a8697ec24f17c349c4f655763c4db70eebc56a5f82995e5e26e837c6eb0e49"}, + {file = "yarl-1.11.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e286580b6511aac7c3268a78cdb861ec739d3e5a2a53b4809faef6b49778eaff"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4179522dc0305c3fc9782549175c8e8849252fefeb077c92a73889ccbcd508ad"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:27fcb271a41b746bd0e2a92182df507e1c204759f460ff784ca614e12dd85145"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f61db3b7e870914dbd9434b560075e0366771eecbe6d2b5561f5bc7485f39efd"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:c92261eb2ad367629dc437536463dc934030c9e7caca861cc51990fe6c565f26"}, + {file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d95b52fbef190ca87d8c42f49e314eace4fc52070f3dfa5f87a6594b0c1c6e46"}, + {file = "yarl-1.11.1-cp310-cp310-win32.whl", hash = "sha256:489fa8bde4f1244ad6c5f6d11bb33e09cf0d1d0367edb197619c3e3fc06f3d91"}, + {file = "yarl-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:476e20c433b356e16e9a141449f25161e6b69984fb4cdbd7cd4bd54c17844998"}, + {file = "yarl-1.11.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:946eedc12895873891aaceb39bceb484b4977f70373e0122da483f6c38faaa68"}, + {file = "yarl-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21a7c12321436b066c11ec19c7e3cb9aec18884fe0d5b25d03d756a9e654edfe"}, + {file = "yarl-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c35f493b867912f6fda721a59cc7c4766d382040bdf1ddaeeaa7fa4d072f4675"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25861303e0be76b60fddc1250ec5986c42f0a5c0c50ff57cc30b1be199c00e63"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4b53f73077e839b3f89c992223f15b1d2ab314bdbdf502afdc7bb18e95eae27"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:327c724b01b8641a1bf1ab3b232fb638706e50f76c0b5bf16051ab65c868fac5"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4307d9a3417eea87715c9736d050c83e8c1904e9b7aada6ce61b46361b733d92"}, + {file = "yarl-1.11.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a28bed68ab8fb7e380775f0029a079f08a17799cb3387a65d14ace16c12e2b"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:067b961853c8e62725ff2893226fef3d0da060656a9827f3f520fb1d19b2b68a"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8215f6f21394d1f46e222abeb06316e77ef328d628f593502d8fc2a9117bde83"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:498442e3af2a860a663baa14fbf23fb04b0dd758039c0e7c8f91cb9279799bff"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:69721b8effdb588cb055cc22f7c5105ca6fdaa5aeb3ea09021d517882c4a904c"}, + {file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e969fa4c1e0b1a391f3fcbcb9ec31e84440253325b534519be0d28f4b6b533e"}, + {file = "yarl-1.11.1-cp311-cp311-win32.whl", hash = "sha256:7d51324a04fc4b0e097ff8a153e9276c2593106a811704025bbc1d6916f45ca6"}, + {file = "yarl-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:15061ce6584ece023457fb8b7a7a69ec40bf7114d781a8c4f5dcd68e28b5c53b"}, + {file = "yarl-1.11.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a4264515f9117be204935cd230fb2a052dd3792789cc94c101c535d349b3dab0"}, + {file = "yarl-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f41fa79114a1d2eddb5eea7b912d6160508f57440bd302ce96eaa384914cd265"}, + {file = "yarl-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02da8759b47d964f9173c8675710720b468aa1c1693be0c9c64abb9d8d9a4867"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9361628f28f48dcf8b2f528420d4d68102f593f9c2e592bfc842f5fb337e44fd"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b91044952da03b6f95fdba398d7993dd983b64d3c31c358a4c89e3c19b6f7aef"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74db2ef03b442276d25951749a803ddb6e270d02dda1d1c556f6ae595a0d76a8"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e975a2211952a8a083d1b9d9ba26472981ae338e720b419eb50535de3c02870"}, + {file = "yarl-1.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aef97ba1dd2138112890ef848e17d8526fe80b21f743b4ee65947ea184f07a2"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a7915ea49b0c113641dc4d9338efa9bd66b6a9a485ffe75b9907e8573ca94b84"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:504cf0d4c5e4579a51261d6091267f9fd997ef58558c4ffa7a3e1460bd2336fa"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3de5292f9f0ee285e6bd168b2a77b2a00d74cbcfa420ed078456d3023d2f6dff"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a34e1e30f1774fa35d37202bbeae62423e9a79d78d0874e5556a593479fdf239"}, + {file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66b63c504d2ca43bf7221a1f72fbe981ff56ecb39004c70a94485d13e37ebf45"}, + {file = "yarl-1.11.1-cp312-cp312-win32.whl", hash = "sha256:a28b70c9e2213de425d9cba5ab2e7f7a1c8ca23a99c4b5159bf77b9c31251447"}, + {file = "yarl-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:17b5a386d0d36fb828e2fb3ef08c8829c1ebf977eef88e5367d1c8c94b454639"}, + {file = "yarl-1.11.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1fa2e7a406fbd45b61b4433e3aa254a2c3e14c4b3186f6e952d08a730807fa0c"}, + {file = "yarl-1.11.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:750f656832d7d3cb0c76be137ee79405cc17e792f31e0a01eee390e383b2936e"}, + {file = "yarl-1.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b8486f322d8f6a38539136a22c55f94d269addb24db5cb6f61adc61eabc9d93"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fce4da3703ee6048ad4138fe74619c50874afe98b1ad87b2698ef95bf92c96d"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed653638ef669e0efc6fe2acb792275cb419bf9cb5c5049399f3556995f23c7"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18ac56c9dd70941ecad42b5a906820824ca72ff84ad6fa18db33c2537ae2e089"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:688654f8507464745ab563b041d1fb7dab5d9912ca6b06e61d1c4708366832f5"}, + {file = "yarl-1.11.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4973eac1e2ff63cf187073cd4e1f1148dcd119314ab79b88e1b3fad74a18c9d5"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:964a428132227edff96d6f3cf261573cb0f1a60c9a764ce28cda9525f18f7786"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6d23754b9939cbab02c63434776df1170e43b09c6a517585c7ce2b3d449b7318"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c2dc4250fe94d8cd864d66018f8344d4af50e3758e9d725e94fecfa27588ff82"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09696438cb43ea6f9492ef237761b043f9179f455f405279e609f2bc9100212a"}, + {file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:999bfee0a5b7385a0af5ffb606393509cfde70ecca4f01c36985be6d33e336da"}, + {file = "yarl-1.11.1-cp313-cp313-win32.whl", hash = "sha256:ce928c9c6409c79e10f39604a7e214b3cb69552952fbda8d836c052832e6a979"}, + {file = "yarl-1.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:501c503eed2bb306638ccb60c174f856cc3246c861829ff40eaa80e2f0330367"}, + {file = "yarl-1.11.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dae7bd0daeb33aa3e79e72877d3d51052e8b19c9025ecf0374f542ea8ec120e4"}, + {file = "yarl-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3ff6b1617aa39279fe18a76c8d165469c48b159931d9b48239065767ee455b2b"}, + {file = "yarl-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3257978c870728a52dcce8c2902bf01f6c53b65094b457bf87b2644ee6238ddc"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f351fa31234699d6084ff98283cb1e852270fe9e250a3b3bf7804eb493bd937"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8aef1b64da41d18026632d99a06b3fefe1d08e85dd81d849fa7c96301ed22f1b"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7175a87ab8f7fbde37160a15e58e138ba3b2b0e05492d7351314a250d61b1591"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba444bdd4caa2a94456ef67a2f383710928820dd0117aae6650a4d17029fa25e"}, + {file = "yarl-1.11.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ea9682124fc062e3d931c6911934a678cb28453f957ddccf51f568c2f2b5e05"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8418c053aeb236b20b0ab8fa6bacfc2feaaf7d4683dd96528610989c99723d5f"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:61a5f2c14d0a1adfdd82258f756b23a550c13ba4c86c84106be4c111a3a4e413"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f3a6d90cab0bdf07df8f176eae3a07127daafcf7457b997b2bf46776da2c7eb7"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:077da604852be488c9a05a524068cdae1e972b7dc02438161c32420fb4ec5e14"}, + {file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:15439f3c5c72686b6c3ff235279630d08936ace67d0fe5c8d5bbc3ef06f5a420"}, + {file = "yarl-1.11.1-cp38-cp38-win32.whl", hash = "sha256:238a21849dd7554cb4d25a14ffbfa0ef380bb7ba201f45b144a14454a72ffa5a"}, + {file = "yarl-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:67459cf8cf31da0e2cbdb4b040507e535d25cfbb1604ca76396a3a66b8ba37a6"}, + {file = "yarl-1.11.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:884eab2ce97cbaf89f264372eae58388862c33c4f551c15680dd80f53c89a269"}, + {file = "yarl-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a336eaa7ee7e87cdece3cedb395c9657d227bfceb6781295cf56abcd3386a26"}, + {file = "yarl-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87f020d010ba80a247c4abc335fc13421037800ca20b42af5ae40e5fd75e7909"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:637c7ddb585a62d4469f843dac221f23eec3cbad31693b23abbc2c366ad41ff4"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48dfd117ab93f0129084577a07287376cc69c08138694396f305636e229caa1a"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e0ae31fb5ccab6eda09ba1494e87eb226dcbd2372dae96b87800e1dcc98804"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f46f81501160c28d0c0b7333b4f7be8983dbbc161983b6fb814024d1b4952f79"}, + {file = "yarl-1.11.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04293941646647b3bfb1719d1d11ff1028e9c30199509a844da3c0f5919dc520"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:250e888fa62d73e721f3041e3a9abf427788a1934b426b45e1b92f62c1f68366"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e8f63904df26d1a66aabc141bfd258bf738b9bc7bc6bdef22713b4f5ef789a4c"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:aac44097d838dda26526cffb63bdd8737a2dbdf5f2c68efb72ad83aec6673c7e"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:267b24f891e74eccbdff42241c5fb4f974de2d6271dcc7d7e0c9ae1079a560d9"}, + {file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6907daa4b9d7a688063ed098c472f96e8181733c525e03e866fb5db480a424df"}, + {file = "yarl-1.11.1-cp39-cp39-win32.whl", hash = "sha256:14438dfc5015661f75f85bc5adad0743678eefee266ff0c9a8e32969d5d69f74"}, + {file = "yarl-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:94d0caaa912bfcdc702a4204cd5e2bb01eb917fc4f5ea2315aa23962549561b0"}, + {file = "yarl-1.11.1-py3-none-any.whl", hash = "sha256:72bf26f66456baa0584eff63e44545c9f0eaed9b73cb6601b647c91f14c11f38"}, + {file = "yarl-1.11.1.tar.gz", hash = "sha256:1bb2d9e212fb7449b8fb73bc461b51eaa17cc8430b4a87d87be7b25052d92f53"}, ] [package.dependencies] @@ -3796,29 +4046,35 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = true python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.1-py3-none-any.whl", hash = "sha256:9960cd8967c8f85a56f920d5d507274e74f9ff813a0ab8889a5b5be2daf44064"}, + {file = "zipp-3.20.1.tar.gz", hash = "sha256:c22b14cc4763c5a5b04134207736c107db42e9d3ef2d9779d465f5f1bcba572b"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [extras] aws = ["aioaws", "connectorx"] azure = ["adlfs"] +deltalake = ["deltalake"] image = ["pillow"] kafka = ["kafka-python"] lancedb = ["lancedb"] mlflow = ["mlflow"] ollama = ["ollama"] +pandas = ["pandas"] pandera = ["pandera"] -psql = ["asyncpg", "connectorx"] +psql = ["asyncpg", "connectorx", "psycopg2"] redis = ["redis"] server = ["asgi-correlation-id", "fastapi", "prometheus-fastapi-instrumentator", "uvicorn"] sql = ["sqlglot"] @@ -3826,4 +4082,4 @@ sql = ["sqlglot"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "72624486cf79558084e0fef935eac02a1beb2de5a7ec110ee6fa65ed0ef02489" +content-hash = "27393c56c557fd6269c65bc698e0823f6077ce0109e84e48bdcfe349d9558149" diff --git a/pyproject.toml b/pyproject.toml index c9bd49cc..da145eed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aligned" -version = "0.0.99" +version = "0.0.100" description = "A data managment and lineage tool for ML applications." authors = ["Mats E. Mollestad "] license = "Apache-2.0" @@ -46,35 +46,36 @@ packages = [ python = "^3.10" python-dotenv = "^0.21.0" click = "^8.1.3" -pandas = "^2.0.0" -fastapi = { version = "^0.100.0", optional = true } -uvicorn = { version = "^0.17.6", optional = true } -redis = { version = "^4.3.1", optional = true } +httpx = "^0.27.0" mashumaro = "^3.0.1" dill = "^0.3.4" -aioaws = { version = "^0.12", optional = true } -Jinja2 = "^3.1.2" -nest-asyncio = "^1.5.5" pydantic = "^2.0.0" prometheus_client = "^0.16.0" +polars = { version = "^0.20.0", extras = ["pyarrow"] } + +pandas = { version = "^2.0.0", optional = true } +fastapi = { version = "^0.100.0", optional = true } +uvicorn = { version = "^0.17.6", optional = true } +redis = { version = "^4.3.1", optional = true } +aioaws = { version = "^0.12", optional = true } asgi-correlation-id = { version = "^3.0.0", optional = true } pandera = { version = "^0.17.0", optional = true} -polars = { version = "^0.20.0", extras = ["pyarrow"] } pillow = { version = "^9.4.0", optional = true } prometheus-fastapi-instrumentator = { version="^5.9.1", optional = true } kafka-python = { version= "^2.0.2", optional = true } connectorx = { version = "^0.3.2", optional = true } asyncpg = { version = "^0.29.0", optional = true } +psycopg2 = { version = "^2.9.9", optional = true } sqlglot = { version = "^22.5.0", optional = true } ollama = { version = "^0.1.8", optional = true } -httpx = "^0.27.0" mlflow = { version = "^2.11.3", optional = true } adlfs = { version = "^2024.4.1", optional = true } lancedb = { version = "^0.8.2", optional = true } +deltalake = { version = "^0.18.1", optional = true } [tool.poetry.extras] aws = ["aioaws", "connectorx"] -psql = ["connectorx", "asyncpg"] +psql = ["connectorx", "asyncpg", "psycopg2"] redis = ["redis"] server = ["asgi-correlation-id", "fastapi", "uvicorn", "prometheus-fastapi-instrumentator"] pandera = ["pandera"] @@ -84,7 +85,9 @@ ollama = ["ollama"] sql = ["sqlglot"] mlflow = ["mlflow"] azure = ["adlfs"] +deltalake = ["deltalake"] lancedb = ["lancedb"] +pandas = ["pandas"] [tool.poetry.group.dev.dependencies] types-redis = "^4.2.6" @@ -96,7 +99,7 @@ sqlalchemy = "^2.0.19" printf-log-formatter = "^0.3.0" isort = "^5.12.0" black = "^23.7.0" -psycopg2 = "^2.9.6" +pyright = "^1.1.375" [build-system] requires = ["poetry-core>=1.0.0"] @@ -109,6 +112,9 @@ aligned = 'aligned.cli:cli' line-length = 110 skip-string-normalization = true + [tool.isort] profile = "black" line_length = 110 + +[tool.ruff] diff --git a/test_data/credit_history.csv b/test_data/credit_history.csv index 7608cc4b..7b784fd9 100644 --- a/test_data/credit_history.csv +++ b/test_data/credit_history.csv @@ -1,7 +1,7 @@ -event_timestamp,credit_card_due,student_loan_due,dob_ssn,due_sum,bankruptcies -1587924064746575,8419,22328,19530219_5179,30747,0 -1587924064746575,2944,2515,19520816_8737,5459,0 -1587924064746575,833,33000,19860413_2537,33833,0 -1588010464746575,5936,48955,19530219_5179,54891,0 -1588010464746575,1575,9501,19520816_8737,11076,0 -1588010464746575,6263,35510,19860413_2537,41773,0 +bankruptcies,credit_card_due,dob_ssn,due_sum,event_timestamp,student_loan_due +0,8419,19530219_5179,30747,1587924064746575,22328 +0,2944,19520816_8737,5459,1587924064746575,2515 +0,833,19860413_2537,33833,1587924064746575,33000 +0,5936,19530219_5179,54891,1588010464746575,48955 +0,1575,19520816_8737,11076,1588010464746575,9501 +0,6263,19860413_2537,41773,1588010464746575,35510 diff --git a/test_data/credit_history_mater.parquet b/test_data/credit_history_mater.parquet index 45c46f49..5d094613 100644 Binary files a/test_data/credit_history_mater.parquet and b/test_data/credit_history_mater.parquet differ diff --git a/test_data/data/csv_iso.csv b/test_data/data/csv_iso.csv index 571fc460..5ef02ca5 100644 --- a/test_data/data/csv_iso.csv +++ b/test_data/data/csv_iso.csv @@ -1,4 +1,4 @@ id,other,et,timestamp -1,foo,2024-06-11T19:07:59.791760+UTC,2024-06-11T19:07:59.791768+UTC -2,bar,2024-06-10T19:07:59.791766+UTC,2024-06-12T19:07:59.791769+UTC -3,baz,2024-06-09T19:07:59.791768+UTC,2024-06-13T19:07:59.791769+UTC +1,foo,2024-09-11T20:39:20.867538UTC,2024-09-11T20:39:20.867546UTC +2,bar,2024-09-10T20:39:20.867543UTC,2024-09-12T20:39:20.867546UTC +3,baz,2024-09-09T20:39:20.867545UTC,2024-09-13T20:39:20.867547UTC diff --git a/test_data/data/csv_unix.csv b/test_data/data/csv_unix.csv index 4800ae7b..0fd9236c 100644 --- a/test_data/data/csv_unix.csv +++ b/test_data/data/csv_unix.csv @@ -1,4 +1,4 @@ id,other,et,timestamp -1,foo,1718132879791760,1718132879791768 -2,bar,1718046479791766,1718219279791769 -3,baz,1717960079791768,1718305679791769 +1,foo,1726087160867538,1726087160867546 +2,bar,1726000760867543,1726173560867546 +3,baz,1725914360867545,1726259960867547 diff --git a/test_data/data/parquet_iso.parquet b/test_data/data/parquet_iso.parquet index d2eecd9a..f2a7dbb6 100644 Binary files a/test_data/data/parquet_iso.parquet and b/test_data/data/parquet_iso.parquet differ diff --git a/test_data/data/parquet_unix.parquet b/test_data/data/parquet_unix.parquet index 8df26b8f..b91a3f17 100644 Binary files a/test_data/data/parquet_unix.parquet and b/test_data/data/parquet_unix.parquet differ diff --git a/test_data/loan.csv b/test_data/loan.csv index 1c02073f..3033167a 100644 --- a/test_data/loan.csv +++ b/test_data/loan.csv @@ -1,7 +1,7 @@ -event_timestamp,loan_status,loan_id,personal_income,loan_amount -1587924064746575,True,10000,59000,35000 -1587924064746575,False,10001,9600,1000 -1587924064746575,True,10002,9600,5500 -1588010464746575,True,10000,65500,35000 -1588010464746575,True,10001,54400,35000 -1588010464746575,True,10002,9900,2500 +event_timestamp,loan_amount,loan_id,loan_status,personal_income +1587924064746575,35000,10000,True,59000 +1587924064746575,1000,10001,False,9600 +1587924064746575,5500,10002,True,9600 +1588010464746575,35000,10000,True,65500 +1588010464746575,35000,10001,True,54400 +1588010464746575,2500,10002,True,9900 diff --git a/test_data/test_model.parquet b/test_data/test_model.parquet index 804e5805..5902b9a9 100644 Binary files a/test_data/test_model.parquet and b/test_data/test_model.parquet differ