diff --git a/.github/workflows/docker_build.yml b/.github/workflows/docker_build.yml index d2864fbcd..ec990ed4b 100644 --- a/.github/workflows/docker_build.yml +++ b/.github/workflows/docker_build.yml @@ -8,6 +8,7 @@ on: push: tags: - 'v[0-9]+.[0-9]+.[0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+-[A-Za-z0-9]+' env: REGISTRY: ghcr.io diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index af83b5ee0..dbe438b37 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -96,7 +96,7 @@ jobs: run: | # ran only on certain files for now # add here when checked - poetry run black schematic/configuration.py schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py --check + poetry run black schematic/configuration/*.py schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py --check #---------------------------------------------- # type checking/enforcement @@ -105,9 +105,9 @@ jobs: run: | # ran only on certain files for now # add here when checked - poetry run mypy --install-types --non-interactive schematic/configuration.py + # poetry run mypy --install-types --non-interactive # add here when enforced - poetry run mypy --disallow-untyped-defs --install-types --non-interactive schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py + poetry run mypy --disallow-untyped-defs --install-types --non-interactive schematic/configuration/*.py schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py #---------------------------------------------- # linting @@ -116,7 +116,7 @@ jobs: run: | # ran only on certain files for now # add here when checked - poetry run pylint schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py + poetry run pylint schematic/configuration/*.py schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py #---------------------------------------------- # run test suite diff --git a/.gitignore b/.gitignore index f5b3beb50..fa0de2078 100644 --- a/.gitignore +++ b/.gitignore @@ -172,4 +172,11 @@ tests/data/mock_manifests/valid_test_manifest_censored.csv tests/data/mock_manifests/Rule_Combo_Manifest_censored.csv # Pickle file -tests/data/schema.gpickle \ No newline at end of file +tests/data/schema.gpickle + +# Created during testting +Example* +manifests/* + +# schematic config file +config.yml \ No newline at end of file diff --git a/CONTRIBUTION.md b/CONTRIBUTION.md index 52762c9a1..a9876d4df 100644 --- a/CONTRIBUTION.md +++ b/CONTRIBUTION.md @@ -8,7 +8,7 @@ Please note we have a [code of conduct](CODE_OF_CONDUCT.md), please follow it in ### Reporting bugs or feature requests -You can use the [`Issues`](https://github.com/Sage-Bionetworks/schematic/issues) tab to **create bug and feature requests**. Providing enough details to the developers to verify and troubleshoot your issue is paramount: +You can use [Sage Bionetwork's FAIR Data service desk](https://sagebionetworks.jira.com/servicedesk/customer/portal/5/group/8) to **create bug and feature requests**. Providing enough details to the developers to verify and troubleshoot your issue is paramount: - **Provide a clear and descriptive title as well as a concise summary** of the issue to identify the problem. - **Describe the exact steps which reproduce the problem** in as many details as possible. - **Describe the behavior you observed after following the steps** and point out what exactly is the problem with that behavior. diff --git a/README.md b/README.md index 2f2c00b4a..533885802 100644 --- a/README.md +++ b/README.md @@ -88,45 +88,69 @@ editor of your choice and edit the `username` and `authtoken` attribute under th Configure config.yml File -*Note*: Below is only a brief explanation of some attributes in `config.yml`. Please use the link [here](https://github.com/Sage-Bionetworks/schematic/blob/develop/config.yml) to get the latest version of `config.yml` in `develop` branch. - -Description of `config.yml` attributes - - definitions: - synapse_config: "~/path/to/.synapseConfig" - service_acct_creds: "~/path/to/service_account_creds.json" - - synapse: - master_fileview: "syn23643253" # fileview of project with datasets on Synapse - manifest_folder: "~/path/to/manifest_folder/" # manifests will be downloaded to this folder - manifest_basename: "filename" # base name of the manifest file in the project dataset, without extension - service_acct_creds: "syn25171627" # synapse ID of service_account_creds.json file - - manifest: - title: "example" # title of metadata manifest file - # to make all manifests enter only 'all manifests' - data_type: - - "Biospecimen" - - "Patient" - - model: - input: - location: "data/schema_org_schemas/example.jsonld" # path to JSON-LD data model - file_type: "local" # only type "local" is supported currently - style: # configuration of google sheet - google_manifest: - req_bg_color: - red: 0.9215 - green: 0.9725 - blue: 0.9803 - opt_bg_color: - red: 1.0 - green: 1.0 - blue: 0.9019 - master_template_id: '1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU' - strict_validation: true - -*Note*: Paths can be specified relative to the `config.yml` file or as absolute paths. +There are some defaults in schematic that can be configured. These fields are in ``config_example.yml``: + +```text + +# This is an example config for Schematic. +# All listed values are those that are the default if a config is not used. +# Save this as config.yml, this will be gitignored. +# Remove any fields in the config you don't want to change +# Change the values of any fields you do want to change + + +# This describes where assets such as manifests are stored +asset_store: + # This is when assets are stored in a synapse project + synapse: + # Synapse ID of the file view listing all project data assets. + master_fileview_id: "syn23643253" + # Path to the synapse config file, either absolute or relative to this file + config: ".synapseConfig" + # Base name that manifest files will be saved as + manifest_basename: "synapse_storage_manifest" + +# This describes information about manifests as it relates to generation and validation +manifest: + # Location where manifests will saved to + manifest_folder: "manifests" + # Title or title prefix given to generated manifest(s) + title: "example" + # Data types of manifests to be generated or data type (singular) to validate manifest against + data_type: + - "Biospecimen" + - "Patient" + +# Describes the location of your schema +model: + # Location of your schema jsonld, it must be a path relative to this file or absolute + location: "tests/data/example.model.jsonld" + +# This section is for using google sheets with Schematic +google_sheets: + # The Synapse id of the Google service account credentials. + service_acct_creds_synapse_id: "syn25171627" + # Path to the synapse config file, either absolute or relative to this file + service_acct_creds: "schematic_service_account_creds.json" + # When doing google sheet validation (regex match) with the validation rules. + # true is alerting the user and not allowing entry of bad values. + # false is warning but allowing the entry on to the sheet. + strict_validation: true +``` + +If you want to change any of these copy ``config_example.yml`` to ``config.yml``, change any fields you want to, and remove any fields you don't. + +For example if you wanted to change the folder where manifests are downloaded your config should look like: + +```text + +manifest: + manifest_folder: "my_manifest_folder_path" +``` + +_Note_: `config.yml` is ignored by git. + +_Note_: Paths can be specified relative to the `config.yml` file or as absolute paths. 6. Login to Synapse by using the command line On the CLI in your virtual environment, run the following command: @@ -264,7 +288,7 @@ docker run -v %cd%:/schematic \ If you install external libraries by using `poetry add `, please make sure that you include `pyproject.toml` and `poetry.lock` file in your commit. ## Reporting bugs or feature requests -You can use the [`Issues`](https://github.com/Sage-Bionetworks/schematic/issues) tab to **create bug and feature requests**. Providing enough details to the developers to verify and troubleshoot your issue is paramount: +You can **create bug and feature requests** through [Sage Bionetwork's FAIR Data service desk](https://sagebionetworks.jira.com/servicedesk/customer/portal/5/group/8). Providing enough details to the developers to verify and troubleshoot your issue is paramount: - **Provide a clear and descriptive title as well as a concise summary** of the issue to identify the problem. - **Describe the exact steps which reproduce the problem** in as many details as possible. - **Describe the behavior you observed after following the steps** and point out what exactly is the problem with that behavior. diff --git a/certificate.conf b/certificate.conf index a6d515072..eb5bf94d9 100644 --- a/certificate.conf +++ b/certificate.conf @@ -4,7 +4,9 @@ server { include /etc/nginx/conf.d/self-signed.conf; include /etc/nginx/conf.d/ssl-params.conf; server_name 127.0.0.1; - + proxy_read_timeout 300; + proxy_connect_timeout 300; + proxy_send_timeout 300; location / { try_files $uri @app; } diff --git a/config.yml b/config.yml deleted file mode 100644 index b71839aeb..000000000 --- a/config.yml +++ /dev/null @@ -1,36 +0,0 @@ -# Do not change the 'definitions' section unless you know what you're doing -definitions: - synapse_config: ".synapseConfig" - service_acct_creds: "schematic_service_account_creds.json" - -synapse: - master_fileview: 'syn23643253' - manifest_folder: 'manifests' - manifest_basename: 'synapse_storage_manifest' - service_acct_creds: 'syn25171627' - -manifest: - # if making many manifests, just include name prefix - title: 'example' - # to make all manifests enter only 'all manifests' - data_type: - - 'Biospecimen' - - 'Patient' - -model: - input: - location: 'tests/data/example.model.jsonld' - file_type: 'local' - -style: - google_manifest: - req_bg_color: - red: 0.9215 - green: 0.9725 - blue: 0.9803 - opt_bg_color: - red: 1.0 - green: 1.0 - blue: 0.9019 - master_template_id: '1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU' - strict_validation: true diff --git a/config_example.yml b/config_example.yml new file mode 100644 index 000000000..245b8fefe --- /dev/null +++ b/config_example.yml @@ -0,0 +1,45 @@ +# This is an example config for Schematic. +# All listed values are those that are the default if a config is not used. +# Save this as config.yml, this will be gitignored. +# Remove any fields in the config you don't want to change +# If you remove all fields from a section, the entire section should be removed including the header. +# Change the values of any fields you do want to change + + +# This describes where assets such as manifests are stored +asset_store: + # This is when assets are stored in a synapse project + synapse: + # Synapse ID of the file view listing all project data assets. + master_fileview_id: "syn23643253" + # Path to the synapse config file, either absolute or relative to this file + config: ".synapseConfig" + # Base name that manifest files will be saved as + manifest_basename: "synapse_storage_manifest" + +# This describes information about manifests as it relates to generation and validation +manifest: + # Location where manifests will saved to + manifest_folder: "manifests" + # Title or title prefix given to generated manifest(s) + title: "example" + # Data types of manifests to be generated or data type (singular) to validate manifest against + data_type: + - "Biospecimen" + - "Patient" + +# Describes the location of your schema +model: + # Location of your schema jsonld, it must be a path relative to this file or absolute + location: "tests/data/example.model.jsonld" + +# This section is for using google sheets with Schematic +google_sheets: + # The Synapse id of the Google service account credentials. + service_acct_creds_synapse_id: "syn25171627" + # Path to the synapse config file, either absolute or relative to this file + service_acct_creds: "schematic_service_account_creds.json" + # When doing google sheet validation (regex match) with the validation rules. + # true is alerting the user and not allowing entry of bad values. + # false is warning but allowing the entry on to the sheet. + strict_validation: true diff --git a/poetry.lock b/poetry.lock index b52c2d473..d1f5b59a4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -27,7 +27,7 @@ dev = ["black", "docutils", "flake8", "ipython", "m2r", "mistune (<2.0.0)", "pyt [[package]] name = "anyio" -version = "3.7.0" +version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "main" optional = false @@ -39,7 +39,7 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx (>=6.1.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme", "sphinxcontrib-jquery"] +doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (<0.22)"] @@ -95,7 +95,7 @@ python-dateutil = ">=2.7.0" [[package]] name = "astroid" -version = "2.15.5" +version = "2.15.6" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -168,18 +168,19 @@ lxml = ["lxml"] [[package]] name = "black" -version = "22.12.0" +version = "23.7.0" description = "The uncompromising code formatter." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] @@ -232,7 +233,7 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -240,7 +241,7 @@ python-versions = ">=3.7.0" [[package]] name = "click" -version = "8.1.3" +version = "8.1.5" description = "Composable command line interface toolkit" category = "main" optional = false @@ -339,7 +340,7 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.0" +version = "41.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -415,6 +416,17 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +packaging = "*" + [[package]] name = "dill" version = "0.3.6" @@ -452,7 +464,7 @@ python-versions = ">=3.6" [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.1.2" description = "Backport of PEP 654 (exception groups)" category = "main" optional = false @@ -537,41 +549,41 @@ python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" [[package]] name = "google-api-core" -version = "2.11.0" +version = "2.11.1" description = "Google API client core library" category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -google-auth = ">=2.14.1,<3.0dev" -googleapis-common-protos = ">=1.56.2,<2.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" -requests = ">=2.18.0,<3.0.0dev" +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" [package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)", "grpcio-status (>=1.49.1,<2.0dev)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.88.0" +version = "2.93.0" description = "Google API Client Library for Python" category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.19.0,<3.0.0dev" +google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0.dev0" +google-auth = ">=1.19.0,<3.0.0.dev0" google-auth-httplib2 = ">=0.1.0" -httplib2 = ">=0.15.0,<1dev" +httplib2 = ">=0.15.0,<1.dev0" uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.19.0" +version = "2.22.0" description = "Google Authentication Library" category = "main" optional = false @@ -585,11 +597,11 @@ six = ">=1.9.0" urllib3 = "<2.0" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0dev)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-auth-httplib2" @@ -621,17 +633,17 @@ tool = ["click (>=6.0.0)"] [[package]] name = "googleapis-common-protos" -version = "1.59.0" +version = "1.59.1" description = "Common protobufs used in Google APIs" category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" [package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0dev)"] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "graphviz" @@ -812,7 +824,7 @@ tests = ["pytest", "pytest-cov", "pytest-mock"] [[package]] name = "ipykernel" -version = "6.23.1" +version = "6.24.0" description = "IPython Kernel for Jupyter" category = "main" optional = false @@ -842,7 +854,7 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" [[package]] name = "ipython" -version = "8.13.2" +version = "8.14.0" description = "IPython: Productive Interactive Computing" category = "main" optional = false @@ -886,7 +898,7 @@ python-versions = "*" [[package]] name = "ipywidgets" -version = "8.0.6" +version = "8.0.7" description = "Jupyter interactive widgets" category = "main" optional = false @@ -990,40 +1002,42 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonpatch" -version = "1.32" +version = "1.33" description = "Apply JSON-Patches (RFC 6902)" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" [package.dependencies] jsonpointer = ">=1.9" [[package]] name = "jsonpointer" -version = "2.3" +version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" [[package]] name = "jsonschema" -version = "4.17.3" +version = "4.18.3" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] -attrs = ">=17.4.0" +attrs = ">=22.2.0" fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} +rpds-py = ">=0.7.1" uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} @@ -1031,9 +1045,20 @@ webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-n format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +[[package]] +name = "jsonschema-specifications" +version = "2023.6.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +referencing = ">=0.28.0" + [[package]] name = "jupyter-client" -version = "8.2.0" +version = "8.3.0" description = "Jupyter protocol implementation and client libraries" category = "main" optional = false @@ -1053,7 +1078,7 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt [[package]] name = "jupyter-core" -version = "5.3.0" +version = "5.3.1" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "main" optional = false @@ -1091,7 +1116,7 @@ test = ["click", "coverage", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>= [[package]] name = "jupyter-server" -version = "2.6.0" +version = "2.7.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "main" optional = false @@ -1120,7 +1145,7 @@ websocket-client = "*" [package.extras] docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] [[package]] name = "jupyter-server-terminals" @@ -1148,7 +1173,7 @@ python-versions = ">=3.7" [[package]] name = "jupyterlab-widgets" -version = "3.0.7" +version = "3.0.8" description = "Jupyter interactive widgets for JupyterLab" category = "main" optional = false @@ -1249,27 +1274,28 @@ python-versions = ">=3.6" [[package]] name = "mistune" -version = "2.0.5" -description = "A sane Markdown parser with useful plugins and renderers" +version = "3.0.1" +description = "A sane and fast Markdown parser with useful plugins and renderers" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" [[package]] name = "mypy" -version = "0.982" +version = "1.4.1" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] @@ -1334,7 +1360,7 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.4.0" +version = "7.6.0" description = "Converting Jupyter Notebooks" category = "main" optional = false @@ -1342,21 +1368,21 @@ python-versions = ">=3.7" [package.dependencies] beautifulsoup4 = "*" -bleach = "*" +bleach = "!=5.0.0" defusedxml = "*" importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} jinja2 = ">=3.0" jupyter-core = ">=4.7" jupyterlab-pygments = "*" markupsafe = ">=2.0" -mistune = ">=2.0.3,<3" +mistune = ">=2.0.3,<4" nbclient = ">=0.5.0" -nbformat = ">=5.1" +nbformat = ">=5.7" packaging = "*" pandocfilters = ">=1.4.1" pygments = ">=2.4.1" tinycss2 = "*" -traitlets = ">=5.0" +traitlets = ">=5.1" [package.extras] all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] @@ -1369,7 +1395,7 @@ webpdf = ["pyppeteer (>=1,<1.1)"] [[package]] name = "nbformat" -version = "5.9.0" +version = "5.9.1" description = "The Jupyter Notebook format" category = "main" optional = false @@ -1455,11 +1481,11 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" [[package]] name = "numpy" -version = "1.24.3" +version = "1.25.1" description = "Fundamental package for array computing in Python" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" [[package]] name = "oauth2client" @@ -1617,23 +1643,23 @@ python-versions = "*" [[package]] name = "platformdirs" -version = "3.5.1" +version = "3.8.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.2.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] dev = ["pre-commit", "tox"] @@ -1641,7 +1667,7 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "prometheus-client" -version = "0.17.0" +version = "0.17.1" description = "Python client for the Prometheus monitoring system." category = "main" optional = false @@ -1652,7 +1678,7 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.38" +version = "3.0.39" description = "Library for building powerful interactive command lines in Python" category = "main" optional = false @@ -1663,7 +1689,7 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "4.23.2" +version = "4.23.4" description = "" category = "main" optional = false @@ -1744,7 +1770,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.10.8" +version = "1.10.11" description = "Data validation and settings management using python type hints" category = "main" optional = false @@ -1831,7 +1857,7 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] [[package]] name = "pyparsing" -version = "3.0.9" +version = "3.1.0" description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "main" optional = false @@ -1840,17 +1866,9 @@ python-versions = ">=3.6.8" [package.extras] diagrams = ["jinja2", "railroad-diagrams"] -[[package]] -name = "pyrsistent" -version = "0.19.3" -description = "Persistent/Functional/Immutable data structures" -category = "main" -optional = false -python-versions = ">=3.7" - [[package]] name = "pytest" -version = "7.3.1" +version = "7.4.0" description = "pytest: simple powerful testing with Python" category = "dev" optional = false @@ -1865,7 +1883,7 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" @@ -1884,7 +1902,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-mock" -version = "3.10.0" +version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" category = "dev" optional = false @@ -1944,11 +1962,11 @@ python-versions = "*" [[package]] name = "pywin32-ctypes" -version = "0.2.0" -description = "" +version = "0.2.2" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "pywinpty" @@ -1995,9 +2013,21 @@ html = ["html5lib (>=1.0,<2.0)"] lxml = ["lxml (>=4.3.0,<5.0.0)"] networkx = ["networkx (>=2.0.0,<3.0.0)"] +[[package]] +name = "referencing" +version = "0.29.1" +description = "JSON Referencing + Python" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + [[package]] name = "regex" -version = "2023.5.5" +version = "2023.6.3" description = "Alternative regular expression module, to replace re." category = "main" optional = false @@ -2055,6 +2085,14 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "rpds-py" +version = "0.8.10" +description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" +optional = false +python-versions = ">=3.8" + [[package]] name = "rsa" version = "4.9" @@ -2091,13 +2129,14 @@ python-versions = ">=3.5" [[package]] name = "schematic-db" -version = "0.0.20" +version = "0.0.29" description = "" category = "main" optional = false python-versions = ">=3.9,<4.0" [package.dependencies] +deprecation = ">=2.1.0,<3.0.0" interrogate = ">=1.5.0,<2.0.0" networkx = ">=2.8.6,<3.0.0" pandas = ">=1.4.3,<2.0.0" @@ -2117,18 +2156,18 @@ synapse = ["synapseclient (>=2.7.0,<3.0.0)"] [[package]] name = "scipy" -version = "1.10.1" +version = "1.11.1" description = "Fundamental algorithms for scientific computing in Python" category = "main" optional = false -python-versions = "<3.12,>=3.8" +python-versions = "<3.13,>=3.9" [package.dependencies] -numpy = ">=1.19.5,<1.27.0" +numpy = ">=1.21.6,<1.28.0" [package.extras] -dev = ["click", "doit (>=0.36.0)", "flake8", "mypy", "pycodestyle", "pydevtool", "rich-click", "typing_extensions"] -doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] +dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] @@ -2319,7 +2358,7 @@ test = ["pytest"] [[package]] name = "sqlalchemy" -version = "1.4.48" +version = "1.4.49" description = "Database Abstraction Library" category = "main" optional = false @@ -2403,7 +2442,7 @@ Jinja2 = ">=2.0" [[package]] name = "synapseclient" -version = "2.7.1" +version = "2.7.2" description = "A client for Synapse, a collaborative compute space that allows scientists to share and analyze data together." category = "main" optional = false @@ -2415,10 +2454,11 @@ importlib-metadata = "<5.0" keyring = ">=15,<23.5" "keyrings.alt" = {version = "3.1", markers = "sys_platform == \"linux\""} requests = ">=2.22.0,<3.0" +urllib3 = "<2" [package.extras] boto3 = ["boto3 (>=1.7.0,<2.0)"] -docs = ["sphinx (>=3.0,<4.0)", "sphinx-argparse (>=0.2,<0.3)"] +docs = ["sphinx (>=4.0,<5.0)", "sphinx-argparse (>=0.2,<0.3)"] pandas = ["pandas (>=0.25.0,<1.5)"] pysftp = ["pysftp (>=0.2.8,<0.3)"] tests = ["flake8 (>=3.7.0,<4.0)", "pytest (>=5.0.0,<7.0)", "pytest-mock (>=3.0,<4.0)", "pytest-xdist[psutil] (>=2.2,<3.0.0)"] @@ -2578,14 +2618,14 @@ devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pyte [[package]] name = "uri-template" -version = "1.2.0" +version = "1.3.0" description = "RFC 6570 URI Template Processor" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -dev = ["flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "mypy", "pep8-naming"] +dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] [[package]] name = "uritemplate" @@ -2660,7 +2700,7 @@ python-versions = "*" [[package]] name = "websocket-client" -version = "1.5.2" +version = "1.6.1" description = "WebSocket client for Python with low level API options" category = "main" optional = false @@ -2684,7 +2724,7 @@ watchdog = ["watchdog"] [[package]] name = "widgetsnbextension" -version = "4.0.7" +version = "4.0.8" description = "Jupyter interactive widgets for Jupyter Notebook" category = "main" optional = false @@ -2700,20 +2740,20 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "zipp" -version = "3.15.0" +version = "3.16.1" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] lock-version = "1.1" python-versions = ">=3.9.0,<3.11" -content-hash = "b20d2b7dcf507cfe6397de135cbf25eca23c9c6241a5900199e59ae88dfc00d4" +content-hash = "ebae29c94e793b572346ce4ca38e9744e0cda913550dc0a3c05b76f8f4796715" [metadata.files] alabaster = [ @@ -2725,8 +2765,8 @@ altair = [ {file = "altair-4.2.0.tar.gz", hash = "sha256:d87d9372e63b48cd96b2a6415f0cf9457f50162ab79dc7a31cd7e024dd840026"}, ] anyio = [ - {file = "anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0"}, - {file = "anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce"}, + {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, + {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, ] appnope = [ {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, @@ -2764,8 +2804,8 @@ arrow = [ {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, ] astroid = [ - {file = "astroid-2.15.5-py3-none-any.whl", hash = "sha256:078e5212f9885fa85fbb0cf0101978a336190aadea6e13305409d099f71b2324"}, - {file = "astroid-2.15.5.tar.gz", hash = "sha256:1039262575027b441137ab4a62a793a9b43defb42c32d5670f38686207cd780f"}, + {file = "astroid-2.15.6-py3-none-any.whl", hash = "sha256:389656ca57b6108f939cf5d2f9a2a825a3be50ba9d589670f393236e0a03b91c"}, + {file = "astroid-2.15.6.tar.gz", hash = "sha256:903f024859b7c7687d7a7f3a3f73b17301f8e42dfd9cc9df9d4418172d3e2dbd"}, ] asttokens = [ {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, @@ -2788,18 +2828,28 @@ beautifulsoup4 = [ {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, ] black = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, + {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, + {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, + {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, + {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, + {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, + {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, + {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, + {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, + {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, + {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, ] bleach = [ {file = "bleach-6.0.0-py3-none-any.whl", hash = "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4"}, @@ -2880,85 +2930,85 @@ cffi = [ {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, ] charset-normalizer = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.5-py3-none-any.whl", hash = "sha256:e576aa487d679441d7d30abb87e1b43d24fc53bffb8758443b1a9e1cee504548"}, + {file = "click-8.1.5.tar.gz", hash = "sha256:4be4b1af8d665c6d942909916d31a213a106800c47d0eeba73d34da3cbc11367"}, ] click-log = [ {file = "click-log-0.4.0.tar.gz", hash = "sha256:3970f8570ac54491237bcdb3d8ab5e3eef6c057df29f8c3d1151a51a9c23b975"}, @@ -3043,25 +3093,29 @@ coverage = [ {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] cryptography = [ - {file = "cryptography-41.0.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8"}, - {file = "cryptography-41.0.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0"}, - {file = "cryptography-41.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d"}, - {file = "cryptography-41.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46"}, - {file = "cryptography-41.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237"}, - {file = "cryptography-41.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4"}, - {file = "cryptography-41.0.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75"}, - {file = "cryptography-41.0.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d"}, - {file = "cryptography-41.0.0-cp37-abi3-win32.whl", hash = "sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928"}, - {file = "cryptography-41.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be"}, - {file = "cryptography-41.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5"}, - {file = "cryptography-41.0.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb"}, - {file = "cryptography-41.0.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be"}, - {file = "cryptography-41.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9"}, - {file = "cryptography-41.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2"}, - {file = "cryptography-41.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d"}, - {file = "cryptography-41.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895"}, - {file = "cryptography-41.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55"}, - {file = "cryptography-41.0.0.tar.gz", hash = "sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78"}, + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"}, + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"}, + {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"}, + {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"}, + {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"}, ] dateparser = [ {file = "dateparser-1.1.8-py2.py3-none-any.whl", hash = "sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f"}, @@ -3099,6 +3153,10 @@ deprecated = [ {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, ] +deprecation = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] dill = [ {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, @@ -3116,8 +3174,8 @@ et-xmlfile = [ {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, + {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, ] executing = [ {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, @@ -3144,16 +3202,16 @@ fqdn = [ {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, ] google-api-core = [ - {file = "google-api-core-2.11.0.tar.gz", hash = "sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22"}, - {file = "google_api_core-2.11.0-py3-none-any.whl", hash = "sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e"}, + {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, + {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, ] google-api-python-client = [ - {file = "google-api-python-client-2.88.0.tar.gz", hash = "sha256:37068453f79ea28e5394a8fe20a4ba620594e7f8541068bea2e844dacdcc9d33"}, - {file = "google_api_python_client-2.88.0-py2.py3-none-any.whl", hash = "sha256:d003008400a779524ea21b5a3ddc6fc59327d401fb8c37c466d413694c279cae"}, + {file = "google-api-python-client-2.93.0.tar.gz", hash = "sha256:62ee28e96031a10a1c341f226a75ac6a4f16bdb1d888dc8222b2cdca133d0031"}, + {file = "google_api_python_client-2.93.0-py2.py3-none-any.whl", hash = "sha256:f34abb671afd488bd19d30721ea20fb30d3796ddd825d6f91f26d8c718a9f07d"}, ] google-auth = [ - {file = "google-auth-2.19.0.tar.gz", hash = "sha256:f39d528077ac540793dd3c22a8706178f157642a67d874db25c640b7fead277e"}, - {file = "google_auth-2.19.0-py2.py3-none-any.whl", hash = "sha256:be617bfaf77774008e9d177573f782e109188c8a64ae6e744285df5cea3e7df6"}, + {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, + {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, ] google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, @@ -3164,8 +3222,8 @@ google-auth-oauthlib = [ {file = "google_auth_oauthlib-0.8.0-py2.py3-none-any.whl", hash = "sha256:40cc612a13c3336d5433e94e2adb42a0c88f6feb6c55769e44500fc70043a576"}, ] googleapis-common-protos = [ - {file = "googleapis-common-protos-1.59.0.tar.gz", hash = "sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44"}, - {file = "googleapis_common_protos-1.59.0-py2.py3-none-any.whl", hash = "sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f"}, + {file = "googleapis-common-protos-1.59.1.tar.gz", hash = "sha256:b35d530fe825fb4227857bc47ad84c33c809ac96f312e13182bdeaa2abe1178a"}, + {file = "googleapis_common_protos-1.59.1-py2.py3-none-any.whl", hash = "sha256:0cbedb6fb68f1c07e18eb4c48256320777707e7d0c55063ae56c15db3224a61e"}, ] graphviz = [ {file = "graphviz-0.20.1-py3-none-any.whl", hash = "sha256:587c58a223b51611c0cf461132da386edd896a029524ca61a1462b880bf97977"}, @@ -3266,20 +3324,20 @@ interrogate = [ {file = "interrogate-1.5.0.tar.gz", hash = "sha256:b6f325f0aa84ac3ac6779d8708264d366102226c5af7d69058cecffcff7a6d6c"}, ] ipykernel = [ - {file = "ipykernel-6.23.1-py3-none-any.whl", hash = "sha256:77aeffab056c21d16f1edccdc9e5ccbf7d96eb401bd6703610a21be8b068aadc"}, - {file = "ipykernel-6.23.1.tar.gz", hash = "sha256:1aba0ae8453e15e9bc6b24e497ef6840114afcdb832ae597f32137fa19d42a6f"}, + {file = "ipykernel-6.24.0-py3-none-any.whl", hash = "sha256:2f5fffc7ad8f1fd5aadb4e171ba9129d9668dbafa374732cf9511ada52d6547f"}, + {file = "ipykernel-6.24.0.tar.gz", hash = "sha256:29cea0a716b1176d002a61d0b0c851f34536495bc4ef7dd0222c88b41b816123"}, ] ipython = [ - {file = "ipython-8.13.2-py3-none-any.whl", hash = "sha256:ffca270240fbd21b06b2974e14a86494d6d29290184e788275f55e0b55914926"}, - {file = "ipython-8.13.2.tar.gz", hash = "sha256:7dff3fad32b97f6488e02f87b970f309d082f758d7b7fc252e3b19ee0e432dbb"}, + {file = "ipython-8.14.0-py3-none-any.whl", hash = "sha256:248aca623f5c99a6635bc3857677b7320b9b8039f99f070ee0d20a5ca5a8e6bf"}, + {file = "ipython-8.14.0.tar.gz", hash = "sha256:1d197b907b6ba441b692c48cf2a3a2de280dc0ac91a3405b39349a50272ca0a1"}, ] ipython-genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, ] ipywidgets = [ - {file = "ipywidgets-8.0.6-py3-none-any.whl", hash = "sha256:a60bf8d2528997e05ac83fd19ea2fbe65f2e79fbe1b2b35779bdfc46c2941dcc"}, - {file = "ipywidgets-8.0.6.tar.gz", hash = "sha256:de7d779f2045d60de9f6c25f653fdae2dba57898e6a1284494b3ba20b6893bb8"}, + {file = "ipywidgets-8.0.7-py3-none-any.whl", hash = "sha256:e0aed0c95a1e55b6a123f64305245578bdc09e52965a34941c2b6a578b8c64a0"}, + {file = "ipywidgets-8.0.7.tar.gz", hash = "sha256:50ace0a8886e9a0d68b980db82f94c25d55d21ff2340ed36f802dd9365e94acf"}, ] isodate = [ {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, @@ -3310,32 +3368,36 @@ jinja2 = [ {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] jsonpatch = [ - {file = "jsonpatch-1.32-py2.py3-none-any.whl", hash = "sha256:26ac385719ac9f54df8a2f0827bb8253aa3ea8ab7b3368457bcdb8c14595a397"}, - {file = "jsonpatch-1.32.tar.gz", hash = "sha256:b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2"}, + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, ] jsonpointer = [ - {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, - {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, ] jsonschema = [ - {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, - {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, + {file = "jsonschema-4.18.3-py3-none-any.whl", hash = "sha256:aab78b34c2de001c6b692232f08c21a97b436fe18e0b817bf0511046924fceef"}, + {file = "jsonschema-4.18.3.tar.gz", hash = "sha256:64b7104d72efe856bea49ca4af37a14a9eba31b40bb7238179f3803130fd34d9"}, +] +jsonschema-specifications = [ + {file = "jsonschema_specifications-2023.6.1-py3-none-any.whl", hash = "sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7"}, + {file = "jsonschema_specifications-2023.6.1.tar.gz", hash = "sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28"}, ] jupyter-client = [ - {file = "jupyter_client-8.2.0-py3-none-any.whl", hash = "sha256:b18219aa695d39e2ad570533e0d71fb7881d35a873051054a84ee2a17c4b7389"}, - {file = "jupyter_client-8.2.0.tar.gz", hash = "sha256:9fe233834edd0e6c0aa5f05ca2ab4bdea1842bfd2d8a932878212fc5301ddaf0"}, + {file = "jupyter_client-8.3.0-py3-none-any.whl", hash = "sha256:7441af0c0672edc5d28035e92ba5e32fadcfa8a4e608a434c228836a89df6158"}, + {file = "jupyter_client-8.3.0.tar.gz", hash = "sha256:3af69921fe99617be1670399a0b857ad67275eefcfa291e2c81a160b7b650f5f"}, ] jupyter-core = [ - {file = "jupyter_core-5.3.0-py3-none-any.whl", hash = "sha256:d4201af84559bc8c70cead287e1ab94aeef3c512848dde077b7684b54d67730d"}, - {file = "jupyter_core-5.3.0.tar.gz", hash = "sha256:6db75be0c83edbf1b7c9f91ec266a9a24ef945da630f3120e1a0046dc13713fc"}, + {file = "jupyter_core-5.3.1-py3-none-any.whl", hash = "sha256:ae9036db959a71ec1cac33081eeb040a79e681f08ab68b0883e9a676c7a90dce"}, + {file = "jupyter_core-5.3.1.tar.gz", hash = "sha256:5ba5c7938a7f97a6b0481463f7ff0dbac7c15ba48cf46fa4035ca6e838aa1aba"}, ] jupyter-events = [ {file = "jupyter_events-0.6.3-py3-none-any.whl", hash = "sha256:57a2749f87ba387cd1bfd9b22a0875b889237dbf2edc2121ebb22bde47036c17"}, {file = "jupyter_events-0.6.3.tar.gz", hash = "sha256:9a6e9995f75d1b7146b436ea24d696ce3a35bfa8bfe45e0c33c334c79464d0b3"}, ] jupyter-server = [ - {file = "jupyter_server-2.6.0-py3-none-any.whl", hash = "sha256:19525a1515b5999618a91b3e99ec9f6869aa8c5ba73e0b6279fcda918b54ba36"}, - {file = "jupyter_server-2.6.0.tar.gz", hash = "sha256:ae4af349f030ed08dd78cb7ac1a03a92d886000380c9ea6283f3c542a81f4b06"}, + {file = "jupyter_server-2.7.0-py3-none-any.whl", hash = "sha256:6a77912aff643e53fa14bdb2634884b52b784a4be77ce8e93f7283faed0f0849"}, + {file = "jupyter_server-2.7.0.tar.gz", hash = "sha256:36da0a266d31a41ac335a366c88933c17dfa5bb817a48f5c02c16d303bc9477f"}, ] jupyter-server-terminals = [ {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, @@ -3346,8 +3408,8 @@ jupyterlab-pygments = [ {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, ] jupyterlab-widgets = [ - {file = "jupyterlab_widgets-3.0.7-py3-none-any.whl", hash = "sha256:c73f8370338ec19f1bec47254752d6505b03601cbd5a67e6a0b184532f73a459"}, - {file = "jupyterlab_widgets-3.0.7.tar.gz", hash = "sha256:c3a50ed5bf528a0c7a869096503af54702f86dda1db469aee1c92dc0c01b43ca"}, + {file = "jupyterlab_widgets-3.0.8-py3-none-any.whl", hash = "sha256:4715912d6ceab839c9db35953c764b3214ebbc9161c809f6e0510168845dfdf5"}, + {file = "jupyterlab_widgets-3.0.8.tar.gz", hash = "sha256:d428ab97b8d87cc7c54cbf37644d6e0f0e662f23876e05fa460a73ec3257252a"}, ] keyring = [ {file = "keyring-23.4.1-py3-none-any.whl", hash = "sha256:17e49fb0d6883c2b4445359434dba95aad84aabb29bbff044ad0ed7100232eca"}, @@ -3454,34 +3516,36 @@ mccabe = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] mistune = [ - {file = "mistune-2.0.5-py2.py3-none-any.whl", hash = "sha256:bad7f5d431886fcbaf5f758118ecff70d31f75231b34024a1341120340a65ce8"}, - {file = "mistune-2.0.5.tar.gz", hash = "sha256:0246113cb2492db875c6be56974a7c893333bf26cd92891c85f63151cee09d34"}, + {file = "mistune-3.0.1-py3-none-any.whl", hash = "sha256:b9b3e438efbb57c62b5beb5e134dab664800bdf1284a7ee09e8b12b13eb1aac6"}, + {file = "mistune-3.0.1.tar.gz", hash = "sha256:e912116c13aa0944f9dc530db38eb88f6a77087ab128f49f84a48f4c05ea163c"}, ] mypy = [ - {file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"}, - {file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"}, - {file = "mypy-0.982-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f793e3dd95e166b66d50e7b63e69e58e88643d80a3dcc3bcd81368e0478b089c"}, - {file = "mypy-0.982-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ebe67adf4d021b28c3f547da6aa2cce660b57f0432617af2cca932d4d378a6"}, - {file = "mypy-0.982-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:175f292f649a3af7082fe36620369ffc4661a71005aa9f8297ea473df5772046"}, - {file = "mypy-0.982-cp310-cp310-win_amd64.whl", hash = "sha256:8ee8c2472e96beb1045e9081de8e92f295b89ac10c4109afdf3a23ad6e644f3e"}, - {file = "mypy-0.982-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58f27ebafe726a8e5ccb58d896451dd9a662a511a3188ff6a8a6a919142ecc20"}, - {file = "mypy-0.982-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6af646bd46f10d53834a8e8983e130e47d8ab2d4b7a97363e35b24e1d588947"}, - {file = "mypy-0.982-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7aeaa763c7ab86d5b66ff27f68493d672e44c8099af636d433a7f3fa5596d40"}, - {file = "mypy-0.982-cp37-cp37m-win_amd64.whl", hash = "sha256:724d36be56444f569c20a629d1d4ee0cb0ad666078d59bb84f8f887952511ca1"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14d53cdd4cf93765aa747a7399f0961a365bcddf7855d9cef6306fa41de01c24"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26ae64555d480ad4b32a267d10cab7aec92ff44de35a7cd95b2b7cb8e64ebe3e"}, - {file = "mypy-0.982-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6389af3e204975d6658de4fb8ac16f58c14e1bacc6142fee86d1b5b26aa52bda"}, - {file = "mypy-0.982-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b35ce03a289480d6544aac85fa3674f493f323d80ea7226410ed065cd46f206"}, - {file = "mypy-0.982-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6e564f035d25c99fd2b863e13049744d96bd1947e3d3d2f16f5828864506763"}, - {file = "mypy-0.982-cp38-cp38-win_amd64.whl", hash = "sha256:cebca7fd333f90b61b3ef7f217ff75ce2e287482206ef4a8b18f32b49927b1a2"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a705a93670c8b74769496280d2fe6cd59961506c64f329bb179970ff1d24f9f8"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75838c649290d83a2b83a88288c1eb60fe7a05b36d46cbea9d22efc790002146"}, - {file = "mypy-0.982-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:91781eff1f3f2607519c8b0e8518aad8498af1419e8442d5d0afb108059881fc"}, - {file = "mypy-0.982-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa97b9ddd1dd9901a22a879491dbb951b5dec75c3b90032e2baa7336777363b"}, - {file = "mypy-0.982-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a692a8e7d07abe5f4b2dd32d731812a0175626a90a223d4b58f10f458747dd8a"}, - {file = "mypy-0.982-cp39-cp39-win_amd64.whl", hash = "sha256:eb7a068e503be3543c4bd329c994103874fa543c1727ba5288393c21d912d795"}, - {file = "mypy-0.982-py3-none-any.whl", hash = "sha256:1021c241e8b6e1ca5a47e4d52601274ac078a89845cfde66c6d5f769819ffa1d"}, - {file = "mypy-0.982.tar.gz", hash = "sha256:85f7a343542dc8b1ed0a888cdd34dca56462654ef23aa673907305b260b3d746"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, + {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, + {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, + {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, + {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, + {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, + {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, + {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, + {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, + {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, + {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, + {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, + {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, + {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, + {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, + {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, + {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, + {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, + {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, ] mypy-extensions = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, @@ -3496,12 +3560,12 @@ nbclient = [ {file = "nbclient-0.8.0.tar.gz", hash = "sha256:f9b179cd4b2d7bca965f900a2ebf0db4a12ebff2f36a711cb66861e4ae158e55"}, ] nbconvert = [ - {file = "nbconvert-7.4.0-py3-none-any.whl", hash = "sha256:af5064a9db524f9f12f4e8be7f0799524bd5b14c1adea37e34e83c95127cc818"}, - {file = "nbconvert-7.4.0.tar.gz", hash = "sha256:51b6c77b507b177b73f6729dba15676e42c4e92bcb00edc8cc982ee72e7d89d7"}, + {file = "nbconvert-7.6.0-py3-none-any.whl", hash = "sha256:5a445c6794b0791984bc5436608fe2c066cb43c83920c7bc91bde3b765e9a264"}, + {file = "nbconvert-7.6.0.tar.gz", hash = "sha256:24fcf27efdef2b51d7f090cc5ce5a9b178766a55be513c4ebab08c91899ab550"}, ] nbformat = [ - {file = "nbformat-5.9.0-py3-none-any.whl", hash = "sha256:8c8fa16d6d05062c26177754bfbfac22de644888e2ef69d27ad2a334cf2576e5"}, - {file = "nbformat-5.9.0.tar.gz", hash = "sha256:e98ebb6120c3efbafdee2a40af2a140cadee90bb06dd69a2a63d9551fcc7f976"}, + {file = "nbformat-5.9.1-py3-none-any.whl", hash = "sha256:b7968ebf4811178a4108ee837eae1442e3f054132100f0359219e9ed1ce3ca45"}, + {file = "nbformat-5.9.1.tar.gz", hash = "sha256:3a7f52d040639cbd8a3890218c8b0ffb93211588c57446c90095e32ba5881b5d"}, ] nest-asyncio = [ {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, @@ -3520,34 +3584,31 @@ notebook-shim = [ {file = "notebook_shim-0.2.3.tar.gz", hash = "sha256:f69388ac283ae008cd506dda10d0288b09a017d822d5e8c7129a152cbd3ce7e9"}, ] numpy = [ - {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"}, - {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"}, - {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"}, - {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"}, - {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"}, - {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"}, - {file = "numpy-1.24.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078"}, - {file = "numpy-1.24.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c"}, - {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c"}, - {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f"}, - {file = "numpy-1.24.3-cp38-cp38-win32.whl", hash = "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4"}, - {file = "numpy-1.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"}, - {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"}, - {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"}, - {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"}, + {file = "numpy-1.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77d339465dff3eb33c701430bcb9c325b60354698340229e1dff97745e6b3efa"}, + {file = "numpy-1.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d736b75c3f2cb96843a5c7f8d8ccc414768d34b0a75f466c05f3a739b406f10b"}, + {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a90725800caeaa160732d6b31f3f843ebd45d6b5f3eec9e8cc287e30f2805bf"}, + {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c6c9261d21e617c6dc5eacba35cb68ec36bb72adcff0dee63f8fbc899362588"}, + {file = "numpy-1.25.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0def91f8af6ec4bb94c370e38c575855bf1d0be8a8fbfba42ef9c073faf2cf19"}, + {file = "numpy-1.25.1-cp310-cp310-win32.whl", hash = "sha256:fd67b306320dcadea700a8f79b9e671e607f8696e98ec255915c0c6d6b818503"}, + {file = "numpy-1.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1516db588987450b85595586605742879e50dcce923e8973f79529651545b57"}, + {file = "numpy-1.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b82655dd8efeea69dbf85d00fca40013d7f503212bc5259056244961268b66e"}, + {file = "numpy-1.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e8f6049c4878cb16960fbbfb22105e49d13d752d4d8371b55110941fb3b17800"}, + {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41a56b70e8139884eccb2f733c2f7378af06c82304959e174f8e7370af112e09"}, + {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5154b1a25ec796b1aee12ac1b22f414f94752c5f94832f14d8d6c9ac40bcca6"}, + {file = "numpy-1.25.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38eb6548bb91c421261b4805dc44def9ca1a6eef6444ce35ad1669c0f1a3fc5d"}, + {file = "numpy-1.25.1-cp311-cp311-win32.whl", hash = "sha256:791f409064d0a69dd20579345d852c59822c6aa087f23b07b1b4e28ff5880fcb"}, + {file = "numpy-1.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:c40571fe966393b212689aa17e32ed905924120737194b5d5c1b20b9ed0fb171"}, + {file = "numpy-1.25.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3d7abcdd85aea3e6cdddb59af2350c7ab1ed764397f8eec97a038ad244d2d105"}, + {file = "numpy-1.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a180429394f81c7933634ae49b37b472d343cccb5bb0c4a575ac8bbc433722f"}, + {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d412c1697c3853c6fc3cb9751b4915859c7afe6a277c2bf00acf287d56c4e625"}, + {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e1266411120a4f16fad8efa8e0454d21d00b8c7cee5b5ccad7565d95eb42dd"}, + {file = "numpy-1.25.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f76aebc3358ade9eacf9bc2bb8ae589863a4f911611694103af05346637df1b7"}, + {file = "numpy-1.25.1-cp39-cp39-win32.whl", hash = "sha256:247d3ffdd7775bdf191f848be8d49100495114c82c2bd134e8d5d075fb386a1c"}, + {file = "numpy-1.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:1d5d3c68e443c90b38fdf8ef40e60e2538a27548b39b12b73132456847f4b631"}, + {file = "numpy-1.25.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35a9527c977b924042170a0887de727cd84ff179e478481404c5dc66b4170009"}, + {file = "numpy-1.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d3fe3dd0506a28493d82dc3cf254be8cd0d26f4008a417385cbf1ae95b54004"}, + {file = "numpy-1.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:012097b5b0d00a11070e8f2e261128c44157a8689f7dedcf35576e525893f4fe"}, + {file = "numpy-1.25.1.tar.gz", hash = "sha256:9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf"}, ] oauth2client = [ {file = "oauth2client-4.1.3-py2.py3-none-any.whl", hash = "sha256:b8a81cc5d60e2d364f0b1b98f958dbd472887acaf1a5b05e21c28c31a2d6d3ac"}, @@ -3626,35 +3687,35 @@ pickleshare = [ {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] platformdirs = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-3.8.1-py3-none-any.whl", hash = "sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c"}, + {file = "platformdirs-3.8.1.tar.gz", hash = "sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528"}, ] pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] prometheus-client = [ - {file = "prometheus_client-0.17.0-py3-none-any.whl", hash = "sha256:a77b708cf083f4d1a3fb3ce5c95b4afa32b9c521ae363354a4a910204ea095ce"}, - {file = "prometheus_client-0.17.0.tar.gz", hash = "sha256:9c3b26f1535945e85b8934fb374678d263137b78ef85f305b1156c7c881cd11b"}, + {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, + {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, - {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, + {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, + {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, ] protobuf = [ - {file = "protobuf-4.23.2-cp310-abi3-win32.whl", hash = "sha256:384dd44cb4c43f2ccddd3645389a23ae61aeb8cfa15ca3a0f60e7c3ea09b28b3"}, - {file = "protobuf-4.23.2-cp310-abi3-win_amd64.whl", hash = "sha256:09310bce43353b46d73ba7e3bca78273b9bc50349509b9698e64d288c6372c2a"}, - {file = "protobuf-4.23.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2cfab63a230b39ae603834718db74ac11e52bccaaf19bf20f5cce1a84cf76df"}, - {file = "protobuf-4.23.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:c52cfcbfba8eb791255edd675c1fe6056f723bf832fa67f0442218f8817c076e"}, - {file = "protobuf-4.23.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:86df87016d290143c7ce3be3ad52d055714ebaebb57cc659c387e76cfacd81aa"}, - {file = "protobuf-4.23.2-cp37-cp37m-win32.whl", hash = "sha256:281342ea5eb631c86697e1e048cb7e73b8a4e85f3299a128c116f05f5c668f8f"}, - {file = "protobuf-4.23.2-cp37-cp37m-win_amd64.whl", hash = "sha256:ce744938406de1e64b91410f473736e815f28c3b71201302612a68bf01517fea"}, - {file = "protobuf-4.23.2-cp38-cp38-win32.whl", hash = "sha256:6c081863c379bb1741be8f8193e893511312b1d7329b4a75445d1ea9955be69e"}, - {file = "protobuf-4.23.2-cp38-cp38-win_amd64.whl", hash = "sha256:25e3370eda26469b58b602e29dff069cfaae8eaa0ef4550039cc5ef8dc004511"}, - {file = "protobuf-4.23.2-cp39-cp39-win32.whl", hash = "sha256:efabbbbac1ab519a514579ba9ec52f006c28ae19d97915951f69fa70da2c9e91"}, - {file = "protobuf-4.23.2-cp39-cp39-win_amd64.whl", hash = "sha256:54a533b971288af3b9926e53850c7eb186886c0c84e61daa8444385a4720297f"}, - {file = "protobuf-4.23.2-py3-none-any.whl", hash = "sha256:8da6070310d634c99c0db7df48f10da495cc283fd9e9234877f0cd182d43ab7f"}, - {file = "protobuf-4.23.2.tar.gz", hash = "sha256:20874e7ca4436f683b64ebdbee2129a5a2c301579a67d1a7dda2cdf62fb7f5f7"}, + {file = "protobuf-4.23.4-cp310-abi3-win32.whl", hash = "sha256:5fea3c64d41ea5ecf5697b83e41d09b9589e6f20b677ab3c48e5f242d9b7897b"}, + {file = "protobuf-4.23.4-cp310-abi3-win_amd64.whl", hash = "sha256:7b19b6266d92ca6a2a87effa88ecc4af73ebc5cfde194dc737cf8ef23a9a3b12"}, + {file = "protobuf-4.23.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8547bf44fe8cec3c69e3042f5c4fb3e36eb2a7a013bb0a44c018fc1e427aafbd"}, + {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fee88269a090ada09ca63551bf2f573eb2424035bcf2cb1b121895b01a46594a"}, + {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:effeac51ab79332d44fba74660d40ae79985901ac21bca408f8dc335a81aa597"}, + {file = "protobuf-4.23.4-cp37-cp37m-win32.whl", hash = "sha256:c3e0939433c40796ca4cfc0fac08af50b00eb66a40bbbc5dee711998fb0bbc1e"}, + {file = "protobuf-4.23.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9053df6df8e5a76c84339ee4a9f5a2661ceee4a0dab019e8663c50ba324208b0"}, + {file = "protobuf-4.23.4-cp38-cp38-win32.whl", hash = "sha256:e1c915778d8ced71e26fcf43c0866d7499891bca14c4368448a82edc61fdbc70"}, + {file = "protobuf-4.23.4-cp38-cp38-win_amd64.whl", hash = "sha256:351cc90f7d10839c480aeb9b870a211e322bf05f6ab3f55fcb2f51331f80a7d2"}, + {file = "protobuf-4.23.4-cp39-cp39-win32.whl", hash = "sha256:6dd9b9940e3f17077e820b75851126615ee38643c2c5332aa7a359988820c720"}, + {file = "protobuf-4.23.4-cp39-cp39-win_amd64.whl", hash = "sha256:0a5759f5696895de8cc913f084e27fd4125e8fb0914bb729a17816a33819f474"}, + {file = "protobuf-4.23.4-py3-none-any.whl", hash = "sha256:e9d0be5bf34b275b9f87ba7407796556abeeba635455d036c7351f7c183ef8ff"}, + {file = "protobuf-4.23.4.tar.gz", hash = "sha256:ccd9430c0719dce806b93f89c91de7977304729e55377f872a92465d548329a9"}, ] psutil = [ {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, @@ -3701,42 +3762,42 @@ pycparser = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pydantic = [ - {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, - {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, - {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, - {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, - {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, - {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, - {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, - {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, - {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, - {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, + {file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"}, + {file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"}, + {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"}, + {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"}, + {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"}, + {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"}, + {file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"}, + {file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"}, + {file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"}, + {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"}, + {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"}, + {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"}, + {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"}, + {file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"}, + {file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"}, + {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"}, + {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"}, + {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"}, + {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"}, + {file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"}, + {file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"}, + {file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"}, + {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"}, + {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"}, + {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"}, + {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"}, + {file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"}, + {file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"}, + {file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"}, + {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"}, + {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"}, + {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"}, + {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"}, + {file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"}, + {file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"}, + {file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"}, ] pyflakes = [ {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, @@ -3759,49 +3820,20 @@ pyopenssl = [ {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, ] pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pyrsistent = [ - {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, - {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, - {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, - {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, - {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, - {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, + {file = "pyparsing-3.1.0-py3-none-any.whl", hash = "sha256:d554a96d1a7d3ddaf7183104485bc19fd80543ad6ac5bdb6426719d766fb06c1"}, + {file = "pyparsing-3.1.0.tar.gz", hash = "sha256:edb662d6fe322d6e990b1594b5feaeadf806803359e3d4d42f11e295e588f0ea"}, ] pytest = [ - {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, - {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, ] pytest-cov = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] pytest-mock = [ - {file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"}, - {file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"}, + {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, + {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, ] python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, @@ -3836,8 +3868,8 @@ pywin32 = [ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] pywin32-ctypes = [ - {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, - {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, + {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, + {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, ] pywinpty = [ {file = "pywinpty-2.0.10-cp310-none-win_amd64.whl", hash = "sha256:4c7d06ad10f6e92bc850a467f26d98f4f30e73d2fe5926536308c6ae0566bc16"}, @@ -3972,95 +4004,99 @@ rdflib = [ {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"}, {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"}, ] +referencing = [ + {file = "referencing-0.29.1-py3-none-any.whl", hash = "sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f"}, + {file = "referencing-0.29.1.tar.gz", hash = "sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e"}, +] regex = [ - {file = "regex-2023.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48c9ec56579d4ba1c88f42302194b8ae2350265cb60c64b7b9a88dcb7fbde309"}, - {file = "regex-2023.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f4541550459c08fdd6f97aa4e24c6f1932eec780d58a2faa2068253df7d6ff"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e22e4460f0245b468ee645156a4f84d0fc35a12d9ba79bd7d79bdcd2f9629d"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b870b6f632fc74941cadc2a0f3064ed8409e6f8ee226cdfd2a85ae50473aa94"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:171c52e320fe29260da550d81c6b99f6f8402450dc7777ef5ced2e848f3b6f8f"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad5524c2aedaf9aa14ef1bc9327f8abd915699dea457d339bebbe2f0d218f86"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a0f874ee8c0bc820e649c900243c6d1e6dc435b81da1492046716f14f1a2a96"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e645c757183ee0e13f0bbe56508598e2d9cd42b8abc6c0599d53b0d0b8dd1479"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a4c5da39bca4f7979eefcbb36efea04471cd68db2d38fcbb4ee2c6d440699833"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5e3f4468b8c6fd2fd33c218bbd0a1559e6a6fcf185af8bb0cc43f3b5bfb7d636"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:59e4b729eae1a0919f9e4c0fc635fbcc9db59c74ad98d684f4877be3d2607dd6"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ba73a14e9c8f9ac409863543cde3290dba39098fc261f717dc337ea72d3ebad2"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0bbd5dcb19603ab8d2781fac60114fb89aee8494f4505ae7ad141a3314abb1f9"}, - {file = "regex-2023.5.5-cp310-cp310-win32.whl", hash = "sha256:40005cbd383438aecf715a7b47fe1e3dcbc889a36461ed416bdec07e0ef1db66"}, - {file = "regex-2023.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:59597cd6315d3439ed4b074febe84a439c33928dd34396941b4d377692eca810"}, - {file = "regex-2023.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f08276466fedb9e36e5193a96cb944928301152879ec20c2d723d1031cd4ddd"}, - {file = "regex-2023.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cd46f30e758629c3ee91713529cfbe107ac50d27110fdcc326a42ce2acf4dafc"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2910502f718828cecc8beff004917dcf577fc5f8f5dd40ffb1ea7612124547b"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:445d6f4fc3bd9fc2bf0416164454f90acab8858cd5a041403d7a11e3356980e8"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18196c16a584619c7c1d843497c069955d7629ad4a3fdee240eb347f4a2c9dbe"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d430a23b661629661f1fe8395be2004006bc792bb9fc7c53911d661b69dd7e"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72a28979cc667e5f82ef433db009184e7ac277844eea0f7f4d254b789517941d"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f764e4dfafa288e2eba21231f455d209f4709436baeebb05bdecfb5d8ddc3d35"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23d86ad2121b3c4fc78c58f95e19173790e22ac05996df69b84e12da5816cb17"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:690a17db524ee6ac4a27efc5406530dd90e7a7a69d8360235323d0e5dafb8f5b"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1ecf3dcff71f0c0fe3e555201cbe749fa66aae8d18f80d2cc4de8e66df37390a"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:811040d7f3dd9c55eb0d8b00b5dcb7fd9ae1761c454f444fd9f37fe5ec57143a"}, - {file = "regex-2023.5.5-cp311-cp311-win32.whl", hash = "sha256:c8c143a65ce3ca42e54d8e6fcaf465b6b672ed1c6c90022794a802fb93105d22"}, - {file = "regex-2023.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:586a011f77f8a2da4b888774174cd266e69e917a67ba072c7fc0e91878178a80"}, - {file = "regex-2023.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b6365703e8cf1644b82104cdd05270d1a9f043119a168d66c55684b1b557d008"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a56c18f21ac98209da9c54ae3ebb3b6f6e772038681d6cb43b8d53da3b09ee81"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b942d8b3ce765dbc3b1dad0a944712a89b5de290ce8f72681e22b3c55f3cc8"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:844671c9c1150fcdac46d43198364034b961bd520f2c4fdaabfc7c7d7138a2dd"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2ce65bdeaf0a386bb3b533a28de3994e8e13b464ac15e1e67e4603dd88787fa"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fee0016cc35a8a91e8cc9312ab26a6fe638d484131a7afa79e1ce6165328a135"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:18f05d14f14a812fe9723f13afafefe6b74ca042d99f8884e62dbd34dcccf3e2"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:941b3f1b2392f0bcd6abf1bc7a322787d6db4e7457be6d1ffd3a693426a755f2"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:921473a93bcea4d00295799ab929522fc650e85c6b9f27ae1e6bb32a790ea7d3"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:e2205a81f815b5bb17e46e74cc946c575b484e5f0acfcb805fb252d67e22938d"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:385992d5ecf1a93cb85adff2f73e0402dd9ac29b71b7006d342cc920816e6f32"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:890a09cb0a62198bff92eda98b2b507305dd3abf974778bae3287f98b48907d3"}, - {file = "regex-2023.5.5-cp36-cp36m-win32.whl", hash = "sha256:821a88b878b6589c5068f4cc2cfeb2c64e343a196bc9d7ac68ea8c2a776acd46"}, - {file = "regex-2023.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:7918a1b83dd70dc04ab5ed24c78ae833ae8ea228cef84e08597c408286edc926"}, - {file = "regex-2023.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:338994d3d4ca4cf12f09822e025731a5bdd3a37aaa571fa52659e85ca793fb67"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a69cf0c00c4d4a929c6c7717fd918414cab0d6132a49a6d8fc3ded1988ed2ea"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f5e06df94fff8c4c85f98c6487f6636848e1dc85ce17ab7d1931df4a081f657"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8906669b03c63266b6a7693d1f487b02647beb12adea20f8840c1a087e2dfb5"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fda3e50abad8d0f48df621cf75adc73c63f7243cbe0e3b2171392b445401550"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ac2b7d341dc1bd102be849d6dd33b09701223a851105b2754339e390be0627a"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fb2b495dd94b02de8215625948132cc2ea360ae84fe6634cd19b6567709c8ae2"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aa7d032c1d84726aa9edeb6accf079b4caa87151ca9fabacef31fa028186c66d"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3d45864693351c15531f7e76f545ec35000d50848daa833cead96edae1665559"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21e90a288e6ba4bf44c25c6a946cb9b0f00b73044d74308b5e0afd190338297c"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:10250a093741ec7bf74bcd2039e697f519b028518f605ff2aa7ac1e9c9f97423"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6b8d0c153f07a953636b9cdb3011b733cadd4178123ef728ccc4d5969e67f3c2"}, - {file = "regex-2023.5.5-cp37-cp37m-win32.whl", hash = "sha256:10374c84ee58c44575b667310d5bbfa89fb2e64e52349720a0182c0017512f6c"}, - {file = "regex-2023.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9b320677521aabf666cdd6e99baee4fb5ac3996349c3b7f8e7c4eee1c00dfe3a"}, - {file = "regex-2023.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:afb1c70ec1e594a547f38ad6bf5e3d60304ce7539e677c1429eebab115bce56e"}, - {file = "regex-2023.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf123225945aa58b3057d0fba67e8061c62d14cc8a4202630f8057df70189051"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99757ad7fe5c8a2bb44829fc57ced11253e10f462233c1255fe03888e06bc19"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a623564d810e7a953ff1357f7799c14bc9beeab699aacc8b7ab7822da1e952b8"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ced02e3bd55e16e89c08bbc8128cff0884d96e7f7a5633d3dc366b6d95fcd1d6"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cbe6b5be3b9b698d8cc4ee4dee7e017ad655e83361cd0ea8e653d65e469468"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a6e4b0e0531223f53bad07ddf733af490ba2b8367f62342b92b39b29f72735a"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e9c4f778514a560a9c9aa8e5538bee759b55f6c1dcd35613ad72523fd9175b8"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:256f7f4c6ba145f62f7a441a003c94b8b1af78cee2cccacfc1e835f93bc09426"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd7b68fd2e79d59d86dcbc1ccd6e2ca09c505343445daaa4e07f43c8a9cc34da"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4a5059bd585e9e9504ef9c07e4bc15b0a621ba20504388875d66b8b30a5c4d18"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:6893544e06bae009916a5658ce7207e26ed17385149f35a3125f5259951f1bbe"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c64d5abe91a3dfe5ff250c6bb267ef00dbc01501518225b45a5f9def458f31fb"}, - {file = "regex-2023.5.5-cp38-cp38-win32.whl", hash = "sha256:7923470d6056a9590247ff729c05e8e0f06bbd4efa6569c916943cb2d9b68b91"}, - {file = "regex-2023.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:4035d6945cb961c90c3e1c1ca2feb526175bcfed44dfb1cc77db4fdced060d3e"}, - {file = "regex-2023.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50fd2d9b36938d4dcecbd684777dd12a407add4f9f934f235c66372e630772b0"}, - {file = "regex-2023.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d19e57f888b00cd04fc38f5e18d0efbd91ccba2d45039453ab2236e6eec48d4d"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd966475e963122ee0a7118ec9024388c602d12ac72860f6eea119a3928be053"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db09e6c18977a33fea26fe67b7a842f706c67cf8bda1450974d0ae0dd63570df"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6164d4e2a82f9ebd7752a06bd6c504791bedc6418c0196cd0a23afb7f3e12b2d"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84397d3f750d153ebd7f958efaa92b45fea170200e2df5e0e1fd4d85b7e3f58a"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c3efee9bb53cbe7b285760c81f28ac80dc15fa48b5fe7e58b52752e642553f1"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:144b5b017646b5a9392a5554a1e5db0000ae637be4971c9747566775fc96e1b2"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1189fbbb21e2c117fda5303653b61905aeeeea23de4a94d400b0487eb16d2d60"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f83fe9e10f9d0b6cf580564d4d23845b9d692e4c91bd8be57733958e4c602956"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:72aa4746993a28c841e05889f3f1b1e5d14df8d3daa157d6001a34c98102b393"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:de2f780c3242ea114dd01f84848655356af4dd561501896c751d7b885ea6d3a1"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:290fd35219486dfbc00b0de72f455ecdd63e59b528991a6aec9fdfc0ce85672e"}, - {file = "regex-2023.5.5-cp39-cp39-win32.whl", hash = "sha256:732176f5427e72fa2325b05c58ad0b45af341c459910d766f814b0584ac1f9ac"}, - {file = "regex-2023.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:1307aa4daa1cbb23823d8238e1f61292fd07e4e5d8d38a6efff00b67a7cdb764"}, - {file = "regex-2023.5.5.tar.gz", hash = "sha256:7d76a8a1fc9da08296462a18f16620ba73bcbf5909e42383b253ef34d9d5141e"}, + {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, + {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, + {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, + {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, + {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, + {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, + {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, + {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, + {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, + {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, + {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, + {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, + {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, + {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, + {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, + {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, + {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, ] requests = [ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, @@ -4078,6 +4114,105 @@ rfc3986-validator = [ {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, ] +rpds-py = [ + {file = "rpds_py-0.8.10-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711"}, + {file = "rpds_py-0.8.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8"}, + {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0"}, + {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451"}, + {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0"}, + {file = "rpds_py-0.8.10-cp310-none-win32.whl", hash = "sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84"}, + {file = "rpds_py-0.8.10-cp310-none-win_amd64.whl", hash = "sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e"}, + {file = "rpds_py-0.8.10-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7"}, + {file = "rpds_py-0.8.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb"}, + {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe"}, + {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0"}, + {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8"}, + {file = "rpds_py-0.8.10-cp311-none-win32.whl", hash = "sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9"}, + {file = "rpds_py-0.8.10-cp311-none-win_amd64.whl", hash = "sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055"}, + {file = "rpds_py-0.8.10-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2"}, + {file = "rpds_py-0.8.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58"}, + {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346"}, + {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7"}, + {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4"}, + {file = "rpds_py-0.8.10-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7"}, + {file = "rpds_py-0.8.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873"}, + {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52"}, + {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38"}, + {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b"}, + {file = "rpds_py-0.8.10-cp38-none-win32.whl", hash = "sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6"}, + {file = "rpds_py-0.8.10-cp38-none-win_amd64.whl", hash = "sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6"}, + {file = "rpds_py-0.8.10-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8"}, + {file = "rpds_py-0.8.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8"}, + {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d"}, + {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a"}, + {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2"}, + {file = "rpds_py-0.8.10-cp39-none-win32.whl", hash = "sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49"}, + {file = "rpds_py-0.8.10-cp39-none-win_amd64.whl", hash = "sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734"}, + {file = "rpds_py-0.8.10.tar.gz", hash = "sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4"}, +] rsa = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -4122,31 +4257,29 @@ ruamel-yaml-clib = [ {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, ] schematic-db = [ - {file = "schematic_db-0.0.20-py3-none-any.whl", hash = "sha256:e1c5a3774156fe510c703df74fee5e7b5f38b721d870c9161dcc657b6fe18723"}, - {file = "schematic_db-0.0.20.tar.gz", hash = "sha256:577cdb32004b6ab5d383a3411e7c812410ae56d46d5a7065af57b488ffe5fe0a"}, + {file = "schematic_db-0.0.29-py3-none-any.whl", hash = "sha256:e43f1d7c06d877d47036c5a480ac8f22333daa967df67c4d8316091ff4ddc0a5"}, + {file = "schematic_db-0.0.29.tar.gz", hash = "sha256:77d338b34dd8f1e75b9df5b9b3f20de35087285079019d48d162de0d131f3ffb"}, ] scipy = [ - {file = "scipy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7354fd7527a4b0377ce55f286805b34e8c54b91be865bac273f527e1b839019"}, - {file = "scipy-1.10.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4b3f429188c66603a1a5c549fb414e4d3bdc2a24792e061ffbd607d3d75fd84e"}, - {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1553b5dcddd64ba9a0d95355e63fe6c3fc303a8fd77c7bc91e77d61363f7433f"}, - {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c0ff64b06b10e35215abce517252b375e580a6125fd5fdf6421b98efbefb2d2"}, - {file = "scipy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:fae8a7b898c42dffe3f7361c40d5952b6bf32d10c4569098d276b4c547905ee1"}, - {file = "scipy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f1564ea217e82c1bbe75ddf7285ba0709ecd503f048cb1236ae9995f64217bd"}, - {file = "scipy-1.10.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d925fa1c81b772882aa55bcc10bf88324dadb66ff85d548c71515f6689c6dac5"}, - {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaea0a6be54462ec027de54fca511540980d1e9eea68b2d5c1dbfe084797be35"}, - {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a35c4242ec5f292c3dd364a7c71a61be87a3d4ddcc693372813c0b73c9af1d"}, - {file = "scipy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:43b8e0bcb877faf0abfb613d51026cd5cc78918e9530e375727bf0625c82788f"}, - {file = "scipy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5678f88c68ea866ed9ebe3a989091088553ba12c6090244fdae3e467b1139c35"}, - {file = "scipy-1.10.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:39becb03541f9e58243f4197584286e339029e8908c46f7221abeea4b749fa88"}, - {file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bce5869c8d68cf383ce240e44c1d9ae7c06078a9396df68ce88a1230f93a30c1"}, - {file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07c3457ce0b3ad5124f98a86533106b643dd811dd61b548e78cf4c8786652f6f"}, - {file = "scipy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:049a8bbf0ad95277ffba9b3b7d23e5369cc39e66406d60422c8cfef40ccc8415"}, - {file = "scipy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd9f1027ff30d90618914a64ca9b1a77a431159df0e2a195d8a9e8a04c78abf9"}, - {file = "scipy-1.10.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:79c8e5a6c6ffaf3a2262ef1be1e108a035cf4f05c14df56057b64acc5bebffb6"}, - {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51af417a000d2dbe1ec6c372dfe688e041a7084da4fdd350aeb139bd3fb55353"}, - {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b4735d6c28aad3cdcf52117e0e91d6b39acd4272f3f5cd9907c24ee931ad601"}, - {file = "scipy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ff7f37b1bf4417baca958d254e8e2875d0cc23aaadbe65b3d5b3077b0eb23ea"}, - {file = "scipy-1.10.1.tar.gz", hash = "sha256:2cf9dfb80a7b4589ba4c40ce7588986d6d5cebc5457cad2c2880f6bc2d42f3a5"}, + {file = "scipy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aec8c62fbe52914f9cf28d846cf0401dd80ab80788bbab909434eb336ed07c04"}, + {file = "scipy-1.11.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:3b9963798df1d8a52db41a6fc0e6fa65b1c60e85d73da27ae8bb754de4792481"}, + {file = "scipy-1.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e8eb42db36526b130dfbc417609498a6192381abc1975b91e3eb238e0b41c1a"}, + {file = "scipy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:366a6a937110d80dca4f63b3f5b00cc89d36f678b2d124a01067b154e692bab1"}, + {file = "scipy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:08d957ca82d3535b3b9ba6c8ff355d78fe975271874e2af267cb5add5bd78625"}, + {file = "scipy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:e866514bc2d660608447b6ba95c8900d591f2865c07cca0aa4f7ff3c4ca70f30"}, + {file = "scipy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba94eeef3c9caa4cea7b402a35bb02a5714ee1ee77eb98aca1eed4543beb0f4c"}, + {file = "scipy-1.11.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:512fdc18c65f76dadaca139348e525646d440220d8d05f6d21965b8d4466bccd"}, + {file = "scipy-1.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce154372f0ebe88556ed06d7b196e9c2e0c13080ecb58d0f35062dc7cc28b47"}, + {file = "scipy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4bb943010203465ac81efa392e4645265077b4d9e99b66cf3ed33ae12254173"}, + {file = "scipy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:249cfa465c379c9bb2c20123001e151ff5e29b351cbb7f9c91587260602c58d0"}, + {file = "scipy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:ffb28e3fa31b9c376d0fb1f74c1f13911c8c154a760312fbee87a21eb21efe31"}, + {file = "scipy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:39154437654260a52871dfde852adf1b93b1d1bc5dc0ffa70068f16ec0be2624"}, + {file = "scipy-1.11.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:b588311875c58d1acd4ef17c983b9f1ab5391755a47c3d70b6bd503a45bfaf71"}, + {file = "scipy-1.11.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d51565560565a0307ed06fa0ec4c6f21ff094947d4844d6068ed04400c72d0c3"}, + {file = "scipy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b41a0f322b4eb51b078cb3441e950ad661ede490c3aca66edef66f4b37ab1877"}, + {file = "scipy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:396fae3f8c12ad14c5f3eb40499fd06a6fef8393a6baa352a652ecd51e74e029"}, + {file = "scipy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:be8c962a821957fdde8c4044efdab7a140c13294997a407eaee777acf63cbf0c"}, + {file = "scipy-1.11.1.tar.gz", hash = "sha256:fb5b492fa035334fd249f0973cc79ecad8b09c604b42a127a677b45a9a3d4289"}, ] secretstorage = [ {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, @@ -4209,47 +4342,44 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] sqlalchemy = [ - {file = "SQLAlchemy-1.4.48-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:4bac3aa3c3d8bc7408097e6fe8bf983caa6e9491c5d2e2488cfcfd8106f13b6a"}, - {file = "SQLAlchemy-1.4.48-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dbcae0e528d755f4522cad5842f0942e54b578d79f21a692c44d91352ea6d64e"}, - {file = "SQLAlchemy-1.4.48-cp27-cp27m-win32.whl", hash = "sha256:cbbe8b8bffb199b225d2fe3804421b7b43a0d49983f81dc654d0431d2f855543"}, - {file = "SQLAlchemy-1.4.48-cp27-cp27m-win_amd64.whl", hash = "sha256:627e04a5d54bd50628fc8734d5fc6df2a1aa5962f219c44aad50b00a6cdcf965"}, - {file = "SQLAlchemy-1.4.48-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9af1db7a287ef86e0f5cd990b38da6bd9328de739d17e8864f1817710da2d217"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:ce7915eecc9c14a93b73f4e1c9d779ca43e955b43ddf1e21df154184f39748e5"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5381ddd09a99638f429f4cbe1b71b025bed318f6a7b23e11d65f3eed5e181c33"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:87609f6d4e81a941a17e61a4c19fee57f795e96f834c4f0a30cee725fc3f81d9"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb0808ad34167f394fea21bd4587fc62f3bd81bba232a1e7fbdfa17e6cfa7cd7"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-win32.whl", hash = "sha256:d53cd8bc582da5c1c8c86b6acc4ef42e20985c57d0ebc906445989df566c5603"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-win_amd64.whl", hash = "sha256:4355e5915844afdc5cf22ec29fba1010166e35dd94a21305f49020022167556b"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:066c2b0413e8cb980e6d46bf9d35ca83be81c20af688fedaef01450b06e4aa5e"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c99bf13e07140601d111a7c6f1fc1519914dd4e5228315bbda255e08412f61a4"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee26276f12614d47cc07bc85490a70f559cba965fb178b1c45d46ffa8d73fda"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-win32.whl", hash = "sha256:49c312bcff4728bffc6fb5e5318b8020ed5c8b958a06800f91859fe9633ca20e"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-win_amd64.whl", hash = "sha256:cef2e2abc06eab187a533ec3e1067a71d7bbec69e582401afdf6d8cad4ba3515"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3509159e050bd6d24189ec7af373359f07aed690db91909c131e5068176c5a5d"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc2ab4d9f6d9218a5caa4121bdcf1125303482a1cdcfcdbd8567be8518969c0"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e1ddbbcef9bcedaa370c03771ebec7e39e3944782bef49e69430383c376a250b"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f82d8efea1ca92b24f51d3aea1a82897ed2409868a0af04247c8c1e4fef5890"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-win32.whl", hash = "sha256:e3e98d4907805b07743b583a99ecc58bf8807ecb6985576d82d5e8ae103b5272"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-win_amd64.whl", hash = "sha256:25887b4f716e085a1c5162f130b852f84e18d2633942c8ca40dfb8519367c14f"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0817c181271b0ce5df1aa20949f0a9e2426830fed5ecdcc8db449618f12c2730"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe1dd2562313dd9fe1778ed56739ad5d9aae10f9f43d9f4cf81d65b0c85168bb"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:68413aead943883b341b2b77acd7a7fe2377c34d82e64d1840860247cec7ff7c"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbde5642104ac6e95f96e8ad6d18d9382aa20672008cf26068fe36f3004491df"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-win32.whl", hash = "sha256:11c6b1de720f816c22d6ad3bbfa2f026f89c7b78a5c4ffafb220e0183956a92a"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-win_amd64.whl", hash = "sha256:eb5464ee8d4bb6549d368b578e9529d3c43265007193597ddca71c1bae6174e6"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:92e6133cf337c42bfee03ca08c62ba0f2d9695618c8abc14a564f47503157be9"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d29a3fc6d9c45962476b470a81983dd8add6ad26fdbfae6d463b509d5adcda"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:005e942b451cad5285015481ae4e557ff4154dde327840ba91b9ac379be3b6ce"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8cfe951ed074ba5e708ed29c45397a95c4143255b0d022c7c8331a75ae61f3"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-win32.whl", hash = "sha256:2b9af65cc58726129d8414fc1a1a650dcdd594ba12e9c97909f1f57d48e393d3"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-win_amd64.whl", hash = "sha256:2b562e9d1e59be7833edf28b0968f156683d57cabd2137d8121806f38a9d58f4"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a1fc046756cf2a37d7277c93278566ddf8be135c6a58397b4c940abf837011f4"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d9b55252d2ca42a09bcd10a697fa041e696def9dfab0b78c0aaea1485551a08"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6dab89874e72a9ab5462997846d4c760cdb957958be27b03b49cf0de5e5c327c"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fd8b5ee5a3acc4371f820934b36f8109ce604ee73cc668c724abb054cebcb6e"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-win32.whl", hash = "sha256:eee09350fd538e29cfe3a496ec6f148504d2da40dbf52adefb0d2f8e4d38ccc4"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-win_amd64.whl", hash = "sha256:7ad2b0f6520ed5038e795cc2852eb5c1f20fa6831d73301ced4aafbe3a10e1f6"}, - {file = "SQLAlchemy-1.4.48.tar.gz", hash = "sha256:b47bc287096d989a0838ce96f7d8e966914a24da877ed41a7531d44b55cdb8df"}, + {file = "SQLAlchemy-1.4.49-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e126cf98b7fd38f1e33c64484406b78e937b1a280e078ef558b95bf5b6895f6"}, + {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-win_amd64.whl", hash = "sha256:f8a65990c9c490f4651b5c02abccc9f113a7f56fa482031ac8cb88b70bc8ccaa"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8923dfdf24d5aa8a3adb59723f54118dd4fe62cf59ed0d0d65d940579c1170a4"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9ab2c507a7a439f13ca4499db6d3f50423d1d65dc9b5ed897e70941d9e135b0"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-win_amd64.whl", hash = "sha256:bbdf16372859b8ed3f4d05f925a984771cd2abd18bd187042f24be4886c2a15f"}, + {file = "SQLAlchemy-1.4.49.tar.gz", hash = "sha256:06ff25cbae30c396c4b7737464f2a7fc37a67b7da409993b182b024cec80aed9"}, ] sqlalchemy-utils = [ {file = "SQLAlchemy-Utils-0.38.3.tar.gz", hash = "sha256:9f9afba607a40455cf703adfa9846584bf26168a0c5a60a70063b70d65051f4d"}, @@ -4264,8 +4394,8 @@ swagger-ui-bundle = [ {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, ] synapseclient = [ - {file = "synapseclient-2.7.1-py3-none-any.whl", hash = "sha256:c15efaec148dda18faa5a1736846f427713ceaa656178d5e7044fcd87fa8aa05"}, - {file = "synapseclient-2.7.1.tar.gz", hash = "sha256:c6a7d5ff834c825390a0514f3f0020876ea4fb8c863889894b9a636458278d69"}, + {file = "synapseclient-2.7.2-py3-none-any.whl", hash = "sha256:dd8b1a1b4667d08311bb651469431f43fe2eeab83c0ef1fe5a03c2929aeb26cd"}, + {file = "synapseclient-2.7.2.tar.gz", hash = "sha256:dc5a61f9f495109a0c89aa7d42b641b6ff278280d7961fb450dd5015704fe15b"}, ] tabulate = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, @@ -4333,8 +4463,8 @@ tzlocal = [ {file = "tzlocal-5.0.1.tar.gz", hash = "sha256:46eb99ad4bdb71f3f72b7d24f4267753e240944ecfc16f25d2719ba89827a803"}, ] uri-template = [ - {file = "uri_template-1.2.0-py3-none-any.whl", hash = "sha256:f1699c77b73b925cf4937eae31ab282a86dc885c333f2e942513f08f691fc7db"}, - {file = "uri_template-1.2.0.tar.gz", hash = "sha256:934e4d09d108b70eb8a24410af8615294d09d279ce0e7cbcdaef1bd21f932b06"}, + {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, + {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, ] uritemplate = [ {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, @@ -4363,16 +4493,16 @@ webencodings = [ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] websocket-client = [ - {file = "websocket-client-1.5.2.tar.gz", hash = "sha256:c7d67c13b928645f259d9b847ab5b57fd2d127213ca41ebd880de1f553b7c23b"}, - {file = "websocket_client-1.5.2-py3-none-any.whl", hash = "sha256:f8c64e28cd700e7ba1f04350d66422b6833b82a796b525a51e740b8cc8dab4b1"}, + {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, + {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, ] werkzeug = [ {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, ] widgetsnbextension = [ - {file = "widgetsnbextension-4.0.7-py3-none-any.whl", hash = "sha256:be3228a73bbab189a16be2d4a3cd89ecbd4e31948bfdc64edac17dcdee3cd99c"}, - {file = "widgetsnbextension-4.0.7.tar.gz", hash = "sha256:ea67c17a7cd4ae358f8f46c3b304c40698bc0423732e3f273321ee141232c8be"}, + {file = "widgetsnbextension-4.0.8-py3-none-any.whl", hash = "sha256:2e37f0ce9da11651056280c7efe96f2db052fe8fc269508e3724f5cbd6c93018"}, + {file = "widgetsnbextension-4.0.8.tar.gz", hash = "sha256:9ec291ba87c2dfad42c3d5b6f68713fa18be1acd7476569516b2431682315c17"}, ] wrapt = [ {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, @@ -4452,6 +4582,6 @@ wrapt = [ {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] zipp = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "zipp-3.16.1-py3-none-any.whl", hash = "sha256:0b37c326d826d5ca35f2b9685cd750292740774ef16190008b00a0227c256fe0"}, + {file = "zipp-3.16.1.tar.gz", hash = "sha256:857b158da2cbf427b376da1c24fd11faecbac5a4ac7523c3607f8a01f94c2ec0"}, ] diff --git a/pyproject.toml b/pyproject.toml index 3ae1b4cd5..cb6a9d194 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,7 @@ Flask-Cors = "^3.0.10" pdoc = "^12.2.0" dateparser = "^1.1.4" pandarallel = "^1.6.4" -schematic-db = {version = "^0.0.20", extras = ["synapse"]} +schematic-db = {version = "^0.0.29", extras = ["synapse"]} pyopenssl = "^23.0.0" typing-extensions = "<4.6.0" @@ -80,8 +80,8 @@ pytest-cov = "^4.0.0" pytest-mock = "^3.5.1" flake8 = "^6.0.0" python-dotenv = "^0.21.0" -black = "^22.6.0" -mypy = "^0.982" +black = "^23.7.0" +mypy = "^1.4.1" pylint = "^2.16.1" [tool.poetry.group.aws] diff --git a/schematic/__init__.py b/schematic/__init__.py index cca9173b5..de46c4fe4 100644 --- a/schematic/__init__.py +++ b/schematic/__init__.py @@ -4,7 +4,7 @@ import click import click_log -from schematic.configuration import CONFIG +from schematic.configuration.configuration import CONFIG from schematic.loader import LOADER from schematic.utils.google_api_utils import download_creds_file from schematic.utils.cli_utils import query_dict @@ -35,11 +35,11 @@ def init(config): """Initialize authentication for schematic.""" try: logger.debug(f"Loading config file contents in '{config}'") - obj = CONFIG.load_config(config) - except ValueError as e: + CONFIG.load_config(config) + except ValueError as exc: logger.error("'--config' not provided or environment variable not set.") - logger.exception(e) + logger.exception(exc) sys.exit(1) - # download crdentials file based on selected mode of authentication + # download credentials file based on selected mode of authentication download_creds_file() diff --git a/schematic/configuration.py b/schematic/configuration.py deleted file mode 100644 index b6ff6da02..000000000 --- a/schematic/configuration.py +++ /dev/null @@ -1,132 +0,0 @@ -from typing import Optional -import os -import yaml - - -class Configuration(object): - def __init__(self): - # path to config.yml file - self.CONFIG_PATH = None - # entire configuration data - self.DATA = None - - def __getattribute__(self, name): - value = super().__getattribute__(name) - if value is None and "SCHEMATIC_CONFIG_CONTENT" in os.environ: - self.load_config_content_from_env() - value = super().__getattribute__(name) - elif value is None and "SCHEMATIC_CONFIG" in os.environ: - self.load_config_from_env() - value = super().__getattribute__(name) - elif ( - value is None - and "SCHEMATIC_CONFIG" not in os.environ - and "SCHEMATIC_CONFIG_CONTENT" not in os.environ - ): - raise AttributeError( - "The '%s' configuration field was accessed, but it hasn't been " - "set yet, presumably because the schematic.CONFIG.load_config() " - "method hasn't been run yet. Alternatively, you can re-run this " - "code with the 'SCHEMATIC_CONFIG' environment variable set to " - "the config.yml file, which will be automatically loaded." % name - ) - return value - - def __getitem__(self, key): - return self.DATA[key] - - def get(self, key, default): - try: - value = self[key] - except AttributeError or KeyError: - value = default - return value - - def load_config_content(self, str_yaml: str) -> Optional[dict]: - try: - config_data = yaml.safe_load(str_yaml) - except yaml.YAMLError as exc: - print(exc) - return None - return config_data - - @staticmethod - def load_yaml(file_path: str) -> Optional[dict]: - with open(file_path, "r") as stream: - try: - config_data = yaml.safe_load(stream) - except yaml.YAMLError as exc: - print(exc) - return None - return config_data - - def normalize_path(self, path): - - if self.CONFIG_PATH: - # Retrieve parent directory of the config to decode relative paths - parent_dir = os.path.dirname(self.CONFIG_PATH) - else: - # assume the parent dir would be the current work dir - parent_dir = os.getcwd() - - # Ensure absolute file paths - if not os.path.isabs(path): - path = os.path.join(parent_dir, path) - # And lastly, normalize file paths - return os.path.normpath(path) - - def load_config_from_env(self): - schematic_config = os.environ["SCHEMATIC_CONFIG"] - print( - "Loading config YAML file specified in 'SCHEMATIC_CONFIG' " - "environment variable: %s" % schematic_config - ) - return self.load_config(schematic_config) - - def load_config_content_from_env(self): - schematic_config_content = os.environ["SCHEMATIC_CONFIG_CONTENT"] - - print("Loading content of config file: %s" % schematic_config_content) - - config_content_yaml = self.load_config_content(schematic_config_content) - self.DATA = config_content_yaml - - return self.DATA - - def load_config(self, config_path=None, asset_view=None): - # If config_path is None, try loading from environment - if config_path is None and "SCHEMATIC_CONFIG" in os.environ: - return self.load_config_from_env() - # Otherwise, raise an error - elif config_path is None and "SCHEMATIC_CONFIG" not in os.environ: - raise ValueError( - "No configuration file provided to the `config_path` argument " - "in `load_config`()`, nor was one specified in the " - "'SCHEMATIC_CONFIG' environment variable. Quitting now..." - ) - # Load configuration YAML file - config_path = os.path.expanduser(config_path) - config_path = os.path.abspath(config_path) - self.DATA = self.load_yaml(config_path) - self.CONFIG_PATH = config_path - # handle user input (for API endpoints) - if asset_view: - self.DATA["synapse"]["master_fileview"] = asset_view - - # Return self.DATA as a side-effect - return self.DATA - - @property - def SERVICE_ACCT_CREDS(self): - self._SERVICE_ACCT_CREDS = self.DATA["definitions"]["service_acct_creds"] - self._SERVICE_ACCT_CREDS = self.normalize_path(self._SERVICE_ACCT_CREDS) - return self._SERVICE_ACCT_CREDS - - @property - def SYNAPSE_CONFIG_PATH(self): - self._SYNAPSE_CONFIG_PATH = self.DATA["definitions"]["synapse_config"] - self._SYNAPSE_CONFIG_PATH = self.normalize_path(self._SYNAPSE_CONFIG_PATH) - return self._SYNAPSE_CONFIG_PATH - - -CONFIG = Configuration() diff --git a/schematic/configuration/__init__.py b/schematic/configuration/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py new file mode 100644 index 000000000..c31007672 --- /dev/null +++ b/schematic/configuration/configuration.py @@ -0,0 +1,226 @@ +"""Configuration singleton for the Schematic Package""" + +from typing import Optional, Any +import os +import yaml +from schematic.utils.general import normalize_path +from .dataclasses import ( + SynapseConfig, + ManifestConfig, + ModelConfig, + GoogleSheetsConfig, +) + + +class ConfigNonAllowedFieldError(Exception): + """Raised when a user submitted config file contains non allowed fields""" + + def __init__( + self, message: str, fields: list[str], allowed_fields: list[str] + ) -> None: + """ + Args: + message (str): A message describing the error + fields (list[str]): The fields in the config + allowed_fields (list[str]): The allowed fields in the config + """ + self.message = message + self.fields = fields + self.allowed_fields = allowed_fields + super().__init__(self.message) + + def __str__(self) -> str: + """String representation""" + return ( + f"{self.message}; " + f"config contains fields: {self.fields}; " + f"allowed fields: {self.allowed_fields}" + ) + + +class Configuration: + """ + This class is used as a singleton by the rest of the package. + It is instantiated only once at the bottom of this file, and that + instance is imported by other modules + """ + + def __init__(self) -> None: + self.config_path: Optional[str] = None + self._parent_directory = os.getcwd() + self._synapse_config = SynapseConfig() + self._manifest_config = ManifestConfig() + self._model_config = ModelConfig() + self._google_sheets_config = GoogleSheetsConfig() + + def load_config(self, config_path: str) -> None: + """Loads a user created config file and overwrites any defaults listed in the file + + Args: + config_path (str): The path to the config file + + Raises: + ConfigNonAllowedFieldError: If there are non allowed fields in the config file + """ + allowed_config_fields = {"asset_store", "manifest", "model", "google_sheets"} + config_path = os.path.expanduser(config_path) + config_path = os.path.abspath(config_path) + self.config_path = config_path + + self._parent_directory = os.path.dirname(config_path) + + with open(config_path, "r", encoding="utf-8") as file: + config: dict[str, Any] = yaml.safe_load(file) + if not set(config.keys()).issubset(allowed_config_fields): + raise ConfigNonAllowedFieldError( + "Non allowed fields in top level of configuration file.", + list(config.keys()), + list(allowed_config_fields), + ) + + self._manifest_config = ManifestConfig(**config.get("manifest", {})) + self._model_config = ModelConfig(**config.get("model", {})) + self._google_sheets_config = GoogleSheetsConfig( + **config.get("google_sheets", {}) + ) + self._set_asset_store(config.get("asset_store", {})) + + def _set_asset_store(self, config: dict[str, Any]) -> None: + allowed_config_fields = {"synapse"} + if not config: + pass + if not set(config.keys()).issubset(allowed_config_fields): + raise ConfigNonAllowedFieldError( + "Non allowed fields in asset_store of configuration file.", + list(config.keys()), + list(allowed_config_fields), + ) + self._synapse_config = SynapseConfig(**config["synapse"]) + + @property + def synapse_configuration_path(self) -> str: + """ + Returns: + str: The path to the synapse configuration file + """ + return normalize_path(self._synapse_config.config, self._parent_directory) + + @property + def synapse_manifest_basename(self) -> str: + """ + Returns: + str: + """ + return self._synapse_config.manifest_basename + + @property + def synapse_master_fileview_id(self) -> str: + """ + Returns: + str: + """ + return self._synapse_config.master_fileview_id + + @synapse_master_fileview_id.setter + def synapse_master_fileview_id(self, synapse_id: str) -> None: + """Sets the Synapse master fileview ID + + Args: + synapse_id (str): The synapse id to set + """ + self._synapse_config.master_fileview_id = synapse_id + + @property + def manifest_folder(self) -> str: + """ + Returns: + str: Location where manifests will saved to + """ + return self._manifest_config.manifest_folder + + @property + def manifest_title(self) -> str: + """ + Returns: + str: Title or title prefix given to generated manifest(s) + """ + return self._manifest_config.title + + @property + def manifest_data_type(self) -> list[str]: + """ + Returns: + list[str]: Data types of manifests to be generated or data type (singular) to validate + manifest against + """ + return self._manifest_config.data_type + + @property + def model_location(self) -> str: + """ + Returns: + str: The path to the model.jsonld + """ + return self._model_config.location + + @property + def service_account_credentials_synapse_id(self) -> str: + """ + Returns: + str: The Synapse id of the Google service account credentials. + """ + return self._google_sheets_config.service_acct_creds_synapse_id + + @property + def service_account_credentials_path(self) -> str: + """ + Returns: + str: The path of the Google service account credentials. + """ + return normalize_path( + self._google_sheets_config.service_acct_creds, self._parent_directory + ) + + @property + def google_sheets_master_template_id(self) -> str: + """ + Returns: + str: The template id of the google sheet. + """ + return "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + + @property + def google_sheets_strict_validation(self) -> bool: + """ + Returns: + bool: Weather or not to disallow bad values in the google sheet + """ + return self._google_sheets_config.strict_validation + + @property + def google_required_background_color(self) -> dict[str, float]: + """ + Returns: + dict[str, float]: Background color for google sheet + """ + return { + "red": 0.9215, + "green": 0.9725, + "blue": 0.9803, + } + + @property + def google_optional_background_color(self) -> dict[str, float]: + """ + Returns: + dict[str, float]: Background color for google sheet + """ + return { + "red": 1.0, + "green": 1.0, + "blue": 0.9019, + } + + +# This instantiates the singleton for the rest of the package +CONFIG = Configuration() diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py new file mode 100644 index 000000000..7fbc7df57 --- /dev/null +++ b/schematic/configuration/dataclasses.py @@ -0,0 +1,170 @@ +"""Pydantic dataclasses""" + +import re +from dataclasses import field +from pydantic.dataclasses import dataclass +from pydantic import validator, ConfigDict, Extra + +# This turns on validation for value assignments after creation +pydantic_config = ConfigDict(validate_assignment=True, extra=Extra.forbid) + + +@dataclass(config=pydantic_config) +class SynapseConfig: + """ + config_basename: Path to the synapse config file, either absolute or relative to this file + manifest_basename: the name of downloaded manifest files + master_fileview_id: Synapse ID of the file view listing all project data assets. + """ + + config: str = ".synapseConfig" + manifest_basename: str = "synapse_storage_manifest" + master_fileview_id: str = "syn23643253" + + @validator("master_fileview_id") + @classmethod + def validate_synapse_id(cls, value: str) -> str: + """Check if string is a valid synapse id + + Args: + value (str): A string + + Raises: + ValueError: If the value isn't a valid Synapse id + + Returns: + (str): The input value + """ + if not re.search("^syn[0-9]+", value): + raise ValueError(f"{value} is not a valid Synapse id") + return value + + @validator("config", "manifest_basename") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if not value: + raise ValueError(f"{value} is an empty string") + return value + + +@dataclass(config=pydantic_config) +class ManifestConfig: + """ + manifest_folder: name of the folder manifests will be saved to locally + title: Title or title prefix given to generated manifest(s) + data_type: Data types of manifests to be generated or data type (singular) to validate + manifest against + """ + + manifest_folder: str = "manifests" + title: str = "example" + data_type: list[str] = field(default_factory=lambda: ["Biospecimen", "Patient"]) + + @validator("title", "manifest_folder") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if not value: + raise ValueError(f"{value} is an empty string") + return value + + +@dataclass(config=pydantic_config) +class ModelConfig: + """ + location: location of the schema jsonld + """ + + location: str = "tests/data/example.model.jsonld" + + @validator("location") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if not value: + raise ValueError(f"{value} is an empty string") + return value + + +@dataclass(config=pydantic_config) +class GoogleSheetsConfig: + """ + master_template_id: The template id of the google sheet. + strict_validation: When doing google sheet validation (regex match) with the validation rules. + True is alerting the user and not allowing entry of bad values. + False is warning but allowing the entry on to the sheet. + service_acct_creds_synapse_id: The Synapse id of the Google service account credentials. + service_acct_creds: Path to the Google service account credentials, + either absolute or relative to this file + """ + + service_acct_creds_synapse_id: str = "syn25171627" + service_acct_creds: str = "schematic_service_account_creds.json" + strict_validation: bool = True + + @validator("service_acct_creds") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if not value: + raise ValueError(f"{value} is an empty string") + return value + + @validator("service_acct_creds_synapse_id") + @classmethod + def validate_synapse_id(cls, value: str) -> str: + """Check if string is a valid synapse id + + Args: + value (str): A string + + Raises: + ValueError: If the value isn't a valid Synapse id + + Returns: + (str): The input value + """ + if not re.search("^syn[0-9]+", value): + raise ValueError(f"{value} is not a valid Synapse id") + return value diff --git a/schematic/exceptions.py b/schematic/exceptions.py index 321cb4282..200ffe7bc 100644 --- a/schematic/exceptions.py +++ b/schematic/exceptions.py @@ -1,5 +1,5 @@ """Schematic Exceptions""" -from typing import Any, Sequence +from typing import Optional, Any, Sequence class MissingConfigValueError(Exception): @@ -13,7 +13,9 @@ class MissingConfigValueError(Exception): message. """ - def __init__(self, config_keys: Sequence[Any], message: str = None) -> None: + def __init__( + self, config_keys: Sequence[Any], message: Optional[str] = None + ) -> None: config_keys_str = " > ".join(config_keys) self.message = ( "The configuration value corresponding to the argument " @@ -41,7 +43,7 @@ class WrongEntityTypeError(Exception): message. """ - def __init__(self, syn_id: str, message: str = None) -> None: + def __init__(self, syn_id: str, message: Optional[str] = None) -> None: self.message = ( f"'{syn_id}'' is not a desired entity type" "Please ensure that you put in the right syn_id" @@ -69,7 +71,7 @@ class MissingConfigAndArgumentValueError(Exception): """ def __init__( - self, arg_name: str, config_keys: Sequence[Any], message: str = None + self, arg_name: str, config_keys: Sequence[Any], message: Optional[str] = None ) -> None: config_keys_str = " > ".join(config_keys) self.message = ( @@ -99,7 +101,7 @@ class AccessCredentialsError(Exception): message. """ - def __init__(self, project: str, message: str = None) -> None: + def __init__(self, project: str, message: Optional[str] = None) -> None: self.message = ( f"Your access to '{project}'' could not be resolved. " "Please check your credentials and try again." diff --git a/schematic/help.py b/schematic/help.py index 94fd06a08..c738df1bc 100644 --- a/schematic/help.py +++ b/schematic/help.py @@ -48,7 +48,7 @@ "json_schema": ( "Specify the path to the JSON Validation Schema for this argument. " "You can either explicitly pass the `.json` file here or provide it in the `config.yml` file " - "as a value for the `(model > input > validation_schema)` key." + "as a value for the `(model > location)` key." ), "alphabetize_valid_values": ( "Specify to alphabetize valid attribute values either ascending (a) or descending (d)." diff --git a/schematic/manifest/commands.py b/schematic/manifest/commands.py index 03edf648e..a75aa7216 100644 --- a/schematic/manifest/commands.py +++ b/schematic/manifest/commands.py @@ -1,26 +1,24 @@ import os import logging from pathlib import Path -import click -import click_log -import logging import sys from typing import List +import click +import click_log from schematic.manifest.generator import ManifestGenerator -from schematic.utils.cli_utils import fill_in_from_config, query_dict, parse_synIDs +from schematic.utils.cli_utils import log_value_from_config, query_dict, parse_synIDs from schematic.help import manifest_commands -from schematic import CONFIG from schematic.schemas.generator import SchemaGenerator -from schematic.utils.google_api_utils import export_manifest_csv, export_manifest_excel, export_manifest_drive_service +from schematic.utils.google_api_utils import export_manifest_csv from schematic.store.synapse import SynapseStorage +from schematic.configuration.configuration import CONFIG logger = logging.getLogger('schematic') click_log.basic_config(logger) CONTEXT_SETTINGS = dict(help_option_names=["--help", "-h"]) # help options - # invoke_without_command=True -> forces the application not to show aids before losing them with a --h @click.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) @click_log.simple_verbosity_option(logger) @@ -37,7 +35,8 @@ def manifest(ctx, config): # use as `schematic manifest ...` """ try: logger.debug(f"Loading config file contents in '{config}'") - ctx.obj = CONFIG.load_config(config) + CONFIG.load_config(config) + ctx.obj = CONFIG except ValueError as e: logger.error("'--config' not provided or environment variable not set.") logger.exception(e) @@ -117,17 +116,18 @@ def get_manifest( """ Running CLI with manifest generation options. """ - # optional parameters that need to be passed to ManifestGenerator() - # can be read from config.yml as well - data_type = fill_in_from_config("data_type", data_type, ("manifest", "data_type")) - jsonld = fill_in_from_config("jsonld", jsonld, ("model", "input", "location")) - title = fill_in_from_config("title", title, ("manifest", "title"), allow_none=True) - json_schema = fill_in_from_config( - "json_schema", - json_schema, - ("model", "input", "validation_schema"), - allow_none=True, - ) + # Optional parameters that need to be passed to ManifestGenerator() + # If CLI parameters are None they are gotten from the CONFIG object and logged + if data_type is None: + data_type = CONFIG.manifest_data_type + log_value_from_config("data_type", data_type) + if jsonld is None: + jsonld = CONFIG.model_location + log_value_from_config("jsonld", jsonld) + if title is None: + title = CONFIG.manifest_title + log_value_from_config("title", title) + def create_single_manifest(data_type, output_csv=None, output_xlsx=None): # create object of type ManifestGenerator manifest_generator = ManifestGenerator( @@ -262,9 +262,10 @@ def migrate_manifests( """ Running CLI with manifest migration options. """ - jsonld = fill_in_from_config("jsonld", jsonld, ("model", "input", "location")) + if jsonld is None: + jsonld = CONFIG.model_location + log_value_from_config("jsonld", jsonld) - full_scope = project_scope + [archive_project] synStore = SynapseStorage(project_scope = full_scope) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index a244bcd1e..eb68be3fb 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -9,7 +9,7 @@ from pathlib import Path import pygsheets as ps from tempfile import NamedTemporaryFile -from typing import Dict, List, Tuple, Union +from typing import Dict, List, Optional, Tuple, Union from schematic.schemas.generator import SchemaGenerator from schematic.utils.google_api_utils import ( @@ -23,7 +23,7 @@ # we shouldn't need to expose Synapse functionality explicitly from schematic.store.synapse import SynapseStorage -from schematic import CONFIG +from schematic.configuration.configuration import CONFIG from schematic.utils.google_api_utils import export_manifest_drive_service @@ -129,13 +129,9 @@ def _column_to_cond_format_eq_rule( col_letter = self._column_to_letter(column_idx) if not required: - bg_color = CONFIG["style"]["google_manifest"].get( - "opt_bg_color", {"red": 1.0, "green": 1.0, "blue": 0.9019,}, - ) + bg_color = CONFIG.google_optional_background_color else: - bg_color = CONFIG["style"]["google_manifest"].get( - "req_bg_color", {"red": 0.9215, "green": 0.9725, "blue": 0.9803,}, - ) + bg_color = CONFIG.google_required_background_color boolean_rule = { "condition": { @@ -174,22 +170,33 @@ def _gdrive_copy_file(self, origin_file_id, copy_title): .execute()["id"] ) - def _create_empty_manifest_spreadsheet(self, title): - if CONFIG["style"]["google_manifest"]["master_template_id"]: + def _create_empty_manifest_spreadsheet(self, title:str) -> str: + """ + Creates an empty google spreadsheet returning the id. + If the configuration has a template id it will be used - # if provided with a template manifest google sheet, use it - spreadsheet_id = self._gdrive_copy_file( - CONFIG["style"]["google_manifest"]["master_template_id"], title - ) + Args: + title (str): The title of the spreadsheet + + Returns: + str: The id of the created spreadsheet + """ + template_id = CONFIG.google_sheets_master_template_id + + if template_id: + spreadsheet_id = self._gdrive_copy_file(template_id, title) else: spreadsheet_body = { - 'properties': { - 'title': title - }} + 'properties': { + 'title': title + } + } - # if no template, create an empty spreadsheet - spreadsheet_id = self.sheet_service.spreadsheets().create(body=spreadsheet_body, fields="spreadsheetId").execute().get("spreadsheetId") + spreadsheet_id = self.sheet_service.spreadsheets().create( + body=spreadsheet_body, + fields="spreadsheetId").execute().get("spreadsheetId" + ) return spreadsheet_id @@ -281,7 +288,7 @@ def _get_column_data_validation_values( spreadsheet_id, valid_values, column_id, - strict, + strict:Optional[bool], validation_type="ONE_OF_LIST", custom_ui=True, input_message="Choose one from dropdown", @@ -289,8 +296,7 @@ def _get_column_data_validation_values( # set validation strictness to config file default if None indicated. if strict == None: - strict = CONFIG["style"]["google_manifest"].get("strict_validation", True) - + strict = CONFIG.google_sheets_strict_validation #store valid values explicitly in workbook at the provided range to use as validation values if validation_type == "ONE_OF_RANGE": valid_values=self._store_valid_values_as_data_dictionary(column_id, valid_values, spreadsheet_id) @@ -700,7 +706,7 @@ def _request_regex_vr(self, gs_formula, i:int, text_color={"red": 1}): return requests_vr def _request_regex_match_vr_formatting(self, validation_rules: List[str], i: int, - spreadsheet_id: str, requests_body: dict, + spreadsheet_id: str, requests_body: dict, strict: Optional[bool], ): """ Purpose: @@ -740,7 +746,6 @@ def _request_regex_match_vr_formatting(self, validation_rules: List[str], i: int } ] ## Set validaiton strictness based on user specifications. - strict = None if split_rules[-1].lower() == "strict": strict = True @@ -895,14 +900,7 @@ def _request_notes_comments(self, i, req, json_schema): """ # check if attribute is required and set a corresponding color if req in json_schema["required"]: - bg_color = CONFIG["style"]["google_manifest"].get( - "req_bg_color", - { - "red": 0.9215, - "green": 0.9725, - "blue": 0.9803, - }, - ) + bg_color = CONFIG.google_required_background_color req_format_body = { "requests": [ @@ -1081,6 +1079,8 @@ def _create_requests_body( ordered_metadata_fields, json_schema, spreadsheet_id, + sheet_url, + strict: Optional[bool], ): """Create and store all formatting changes for the google sheet to execute at once. @@ -1095,6 +1095,8 @@ def _create_requests_body( representing the data model, including: '$schema', '$id', 'title', 'type', 'properties', 'required' spreadsheet_id: str, of the id for the google sheet + sheet_url (Will be deprecated): a boolean ; determine if a pandas dataframe or a google sheet url gets return + strict (Optional Bool): strictness with which to apply validation rules to google sheets. True, blocks incorrect entries, False, raises a warning Return: requests_body(dict): containing all the update requests to add to the gs @@ -1103,12 +1105,13 @@ def _create_requests_body( requests_body = {} requests_body["requests"] = [] for i, req in enumerate(ordered_metadata_fields[0]): - # Gather validation rules and valid values for attribute + # Gather validation rules and valid values for attribute. validation_rules = self.sg.get_node_validation_rules(req) - - if validation_rules: + + # Add regex match validaiton rule to Google Sheets. + if validation_rules and sheet_url: requests_body =self._request_regex_match_vr_formatting( - validation_rules, i, spreadsheet_id, requests_body + validation_rules, i, spreadsheet_id, requests_body, strict ) if req in json_schema["properties"].keys(): @@ -1164,7 +1167,7 @@ def _create_requests_body( requests_body["requests"].append(borders_formatting) return requests_body - def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id): + def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id, sheet_url, strict: Optional[bool]): """Generate requests to add columns and format the google sheet. Args: required_metadata_fields(dict): @@ -1175,6 +1178,8 @@ def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id representing the data model, including: '$schema', '$id', 'title', 'type', 'properties', 'required' spreadsheet_id: str, of the id for the google sheet + sheet_url (str): google sheet url of template manifest + strict (Optional Bool): strictness with which to apply validation rules to google sheets. True, blocks incorrect entries, False, raises a warning Returns: manifest_url (str): url of the google sheet manifest. """ @@ -1194,6 +1199,8 @@ def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id ordered_metadata_fields, json_schema, spreadsheet_id, + sheet_url, + strict, ) # Execute requests @@ -1236,25 +1243,28 @@ def _gather_all_fields(self, fields, json_schema): ) return required_metadata_fields - def get_empty_manifest(self, json_schema_filepath=None): + def get_empty_manifest(self, strict: Optional[bool], json_schema_filepath: str=None, sheet_url: Optional[bool]=None): """Create an empty manifest using specifications from the json schema. Args: + strict (bool): strictness with which to apply validation rules to google sheets. If true, blocks incorrect entries; if false, raises a warning json_schema_filepath (str): path to json schema file + sheet_url (Will be deprecated): a boolean ; determine if a pandas dataframe or a google sheet url gets return + strict (Optional Bool): strictness with which to apply validation rules to google sheets. True, blocks incorrect entries, False, raises a warning Returns: manifest_url (str): url of the google sheet manifest. TODO: Refactor to not be dependent on GS. """ spreadsheet_id = self._create_empty_manifest_spreadsheet(self.title) - json_schema = self._get_json_schema(json_schema_filepath) + json_schema = self._get_json_schema(json_schema_filepath=json_schema_filepath) required_metadata_fields = self._gather_all_fields( json_schema["properties"].keys(), json_schema ) manifest_url = self._create_empty_gs( - required_metadata_fields, json_schema, spreadsheet_id + required_metadata_fields, json_schema, spreadsheet_id, sheet_url=sheet_url, strict=strict, ) return manifest_url @@ -1298,6 +1308,8 @@ def set_dataframe_by_url( start_col = self._column_to_letter(len(manifest_df.columns) - num_out_of_schema_columns) # find start of out of schema columns end_col = self._column_to_letter(len(manifest_df.columns) + 1) # find end of out of schema columns wb.set_data_validation(start = start_col, end = end_col, condition_type = None) + + # set permissions so that anyone with the link can edit sh.share("", role="writer", type="anyone") @@ -1353,13 +1365,14 @@ def map_annotation_names_to_display_names( return annotations.rename(columns=label_map) def get_manifest_with_annotations( - self, annotations: pd.DataFrame + self, annotations: pd.DataFrame, sheet_url:bool=None, strict: Optional[bool]=None, ) -> Tuple[ps.Spreadsheet, pd.DataFrame]: """Generate manifest, optionally with annotations (if requested). Args: annotations (pd.DataFrame): Annotations table (can be empty). - + strict (Optional Bool): strictness with which to apply validation rules to google sheets. True, blocks incorrect entries, False, raises a warning + sheet_url (Will be deprecated): a boolean ; determine if a pandas dataframe or a google sheet url gets return Returns: Tuple[ps.Spreadsheet, pd.DataFrame]: Both the Google Sheet URL and the corresponding data frame is returned. @@ -1378,8 +1391,8 @@ def get_manifest_with_annotations( self.additional_metadata = annotations_dict # Generate empty manifest using `additional_metadata` - manifest_url = self.get_empty_manifest() - manifest_df = self.get_dataframe_by_url(manifest_url) + manifest_url = self.get_empty_manifest(sheet_url=sheet_url, strict=strict) + manifest_df = self.get_dataframe_by_url(manifest_url=manifest_url) # Annotations clashing with manifest attributes are skipped # during empty manifest generation. For more info, search @@ -1462,7 +1475,7 @@ def _handle_output_format_logic(self, output_format: str = None, output_path: st return output_file_path # Return google sheet if sheet_url flag is raised. - elif sheet_url: + elif sheet_url: manifest_sh = self.set_dataframe_by_url(manifest_url=empty_manifest_url, manifest_df=dataframe, out_of_schema_columns=out_of_schema_columns) return manifest_sh.url @@ -1471,14 +1484,14 @@ def _handle_output_format_logic(self, output_format: str = None, output_path: st return dataframe def get_manifest( - self, dataset_id: str = None, sheet_url: bool = None, json_schema: str = None, output_format: str = None, output_path: str = None, access_token: str = None + self, dataset_id: str = None, sheet_url: bool = None, json_schema: str = None, output_format: str = None, output_path: str = None, access_token: str = None, strict: Optional[bool]=None, ) -> Union[str, pd.DataFrame]: """Gets manifest for a given dataset on Synapse. TODO: move this function to class MetadatModel (after MetadataModel is refactored) Args: dataset_id: Synapse ID of the "dataset" entity on Synapse (for a given center/project). - sheet_url: Determines if googlesheet URL or pandas dataframe should be returned. + sheet_url (Will be deprecated): a boolean ; determine if a pandas dataframe or a google sheet url gets return output_format: Determines if Google sheet URL, pandas dataframe, or Excel spreadsheet gets returned. output_path: Determines the output path of the exported manifest access_token: Token in .synapseConfig. Since we could not pre-load access_token as an environment variable on AWS, we have to add this variable. @@ -1489,7 +1502,7 @@ def get_manifest( # Handle case when no dataset ID is provided if not dataset_id: - manifest_url = self.get_empty_manifest(json_schema_filepath=json_schema) + manifest_url = self.get_empty_manifest(json_schema_filepath=json_schema, strict=strict, sheet_url=sheet_url) # if output_form parameter is set to "excel", return an excel spreadsheet if output_format == "excel": @@ -1514,13 +1527,12 @@ def get_manifest( manifest_record = store.updateDatasetManifestFiles(self.sg, datasetId = dataset_id, store = False) # get URL of an empty manifest file created based on schema component - empty_manifest_url = self.get_empty_manifest() + empty_manifest_url = self.get_empty_manifest(strict=strict, sheet_url=sheet_url) # Populate empty template with existing manifest if manifest_record: # TODO: Update or remove the warning in self.__init__() if # you change the behavior here based on self.use_annotations - # Update df with existing manifest. Agnostic to output format updated_df, out_of_schema_columns = self._update_dataframe_with_existing_df(empty_manifest_url=empty_manifest_url, existing_df=manifest_record[1]) @@ -1545,7 +1557,7 @@ def get_manifest( # if there are no files with annotations just generate an empty manifest if annotations.empty: - manifest_url = self.get_empty_manifest() + manifest_url = self.get_empty_manifest(strict=strict) manifest_df = self.get_dataframe_by_url(manifest_url) else: # Subset columns if no interested in user-defined annotations and there are files present @@ -1553,8 +1565,8 @@ def get_manifest( annotations = annotations[["Filename", "eTag", "entityId"]] # Update `additional_metadata` and generate manifest - manifest_url, manifest_df = self.get_manifest_with_annotations(annotations) - + manifest_url, manifest_df = self.get_manifest_with_annotations(annotations, sheet_url=sheet_url, strict=strict) + # Update df with existing manifest. Agnostic to output format updated_df, out_of_schema_columns = self._update_dataframe_with_existing_df(empty_manifest_url=empty_manifest_url, existing_df=manifest_df) @@ -1563,10 +1575,35 @@ def get_manifest( output_path = output_path, sheet_url = sheet_url, empty_manifest_url=empty_manifest_url, - dataframe = manifest_df, + dataframe = updated_df, + out_of_schema_columns = out_of_schema_columns, ) return result + def _get_end_columns(self, current_schema_headers, existing_manifest_headers, out_of_schema_columns): + """ + Gather columns to be added to the end of the manifest, and ensure entityId is at the end. + Args: + current_schema_headers: list, columns in the current manifest schema + existing_manifest_headers: list, columns in the existing manifest + out_of_schema_columns: set, columns that are in the existing manifest, but not the current schema + Returns: + end_columns: list of columns to be added to the end of the manifest. + """ + # Identify columns to add to the end of the manifest + end_columns = list(out_of_schema_columns) + + # Make sure want Ids are placed at end of manifest, in given order. + for id_name in ['Uuid', 'Id', 'entityId']: + if id_name in end_columns: + end_columns.remove(id_name) + end_columns.append(id_name) + + # Add entity_id to the end columns if it should be there but isn't + if 'entityId' in (current_schema_headers or existing_manfiest_headers) and 'entityId' not in end_columns: + end_columns.append('entityId') + return end_columns + def _update_dataframe_with_existing_df(self, empty_manifest_url: str, existing_df: pd.DataFrame) -> pd.DataFrame: """ Handle scenario when existing manifest does not match new manifest template due to changes in the data model: @@ -1583,14 +1620,14 @@ def _update_dataframe_with_existing_df(self, empty_manifest_url: str, existing_d """ # Get headers for the current schema and existing manifest df. - current_schema_headers = list(self.get_dataframe_by_url(empty_manifest_url).columns) + current_schema_headers = list(self.get_dataframe_by_url(manifest_url=empty_manifest_url).columns) existing_manfiest_headers = list(existing_df.columns) # Find columns that exist in the current schema, but are not in the manifest being downloaded. - new_columns = self._get_missing_columns(current_schema_headers, existing_manfiest_headers) + new_columns = self._get_missing_columns(current_schema_headers, existing_manifest_headers) # Find columns that exist in the manifest being downloaded, but not in the current schema. - out_of_schema_columns = self._get_missing_columns(existing_manfiest_headers, current_schema_headers) + out_of_schema_columns = self._get_missing_columns(existing_manifest_headers, current_schema_headers) # clean empty columns if any are present (there should be none) # TODO: Remove this line once we start preventing empty column names @@ -1606,12 +1643,17 @@ def _update_dataframe_with_existing_df(self, empty_manifest_url: str, existing_d **dict(zip(new_columns, len(new_columns) * [""])) ) + end_columns = self._get_end_columns(current_schema_headers=current_schema_headers, + existing_manifest_headers=existing_manifest_headers, + out_of_schema_columns=out_of_schema_columns) + # sort columns in the updated manifest: # match latest schema order # move obsolete columns at the end updated_df = updated_df[self.sort_manifest_fields(updated_df.columns)] - updated_df = updated_df[[c for c in updated_df if c not in out_of_schema_columns] + list(out_of_schema_columns)] + # move obsolete columns at the end with entityId at the very end + updated_df = updated_df[[c for c in updated_df if c not in end_columns] + list(end_columns)] return updated_df, out_of_schema_columns def _format_new_excel_column(self, worksheet, new_column_index: int, col: str): diff --git a/schematic/models/commands.py b/schematic/models/commands.py index aba15decb..eeeb7c809 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -11,10 +11,10 @@ from jsonschema import ValidationError from schematic.models.metadata import MetadataModel -from schematic.utils.cli_utils import get_from_config, fill_in_from_config, query_dict, parse_synIDs, parse_comma_str_to_list +from schematic.utils.cli_utils import log_value_from_config, query_dict, parse_synIDs, parse_comma_str_to_list from schematic.help import model_commands from schematic.exceptions import MissingConfigValueError -from schematic import CONFIG +from schematic.configuration.configuration import CONFIG logger = logging.getLogger('schematic') click_log.basic_config(logger) @@ -38,7 +38,8 @@ def model(ctx, config): # use as `schematic model ...` """ try: logger.debug(f"Loading config file contents in '{config}'") - ctx.obj = CONFIG.load_config(config) + CONFIG.load_config(config) + ctx.obj = CONFIG except ValueError as e: logger.error("'--config' not provided or environment variable not set.") logger.exception(e) @@ -110,12 +111,11 @@ def submit_manifest( Running CLI with manifest validation (optional) and submission options. """ - jsonld = get_from_config(CONFIG.DATA, ("model", "input", "location")) - - model_file_type = get_from_config(CONFIG.DATA, ("model", "input", "file_type")) + jsonld = CONFIG.model_location + log_value_from_config("jsonld", jsonld) metadata_model = MetadataModel( - inputMModelLocation=jsonld, inputMModelLocationType=model_file_type + inputMModelLocation=jsonld, inputMModelLocationType="local" ) @@ -181,9 +181,10 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules """ Running CLI for manifest validation. """ - if not data_type: - data_type = fill_in_from_config("data_type", data_type, ("manifest", "data_type")) - + if data_type is None: + data_type = CONFIG.manifest_data_type + log_value_from_config("data_type", data_type) + try: len(data_type) == 1 except: @@ -193,19 +194,13 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules data_type = data_type[0] - json_schema = fill_in_from_config( - "json_schema", - json_schema, - ("model", "input", "validation_schema"), - allow_none=True, - ) t_validate = perf_counter() - jsonld = get_from_config(CONFIG.DATA, ("model", "input", "location")) - model_file_type = get_from_config(CONFIG.DATA, ("model", "input", "file_type")) + jsonld = CONFIG.model_location + log_value_from_config("jsonld", jsonld) metadata_model = MetadataModel( - inputMModelLocation=jsonld, inputMModelLocationType=model_file_type + inputMModelLocation=jsonld, inputMModelLocationType="local" ) errors, warnings = metadata_model.validateModelManifest( diff --git a/schematic/schemas/generator.py b/schematic/schemas/generator.py index e1f4b436c..8cb392470 100644 --- a/schematic/schemas/generator.py +++ b/schematic/schemas/generator.py @@ -12,7 +12,6 @@ from schematic.utils.schema_utils import load_schema_into_networkx from schematic.utils.validate_utils import validate_schema, rule_in_rule_list -from schematic import CONFIG logger = logging.getLogger(__name__) @@ -689,31 +688,19 @@ def get_json_schema_requirements(self, source_node: str, schema_name: str) -> Di if not json_schema["allOf"]: del json_schema["allOf"] - # Check if config value is provided; otherwise, set to None - json_schema_log_file = query_dict( - CONFIG.DATA, ("model", "input", "log_location") - ) - # If no config value and SchemaGenerator was initialized with # a JSON-LD path, construct - if json_schema_log_file is None and self.jsonld_path is not None: + if self.jsonld_path is not None: prefix = self.jsonld_path_root prefix_root, prefix_ext = os.path.splitext(prefix) if prefix_ext == ".model": prefix = prefix_root json_schema_log_file = f"{prefix}.{source_node}.schema.json" - if json_schema_log_file is None: - logger.info( - "The JSON schema file can be inspected by setting the following " - "nested key in the configuration: (model > input > log_location)." - ) - else: - json_schema_dirname = os.path.dirname(json_schema_log_file) - if json_schema_dirname != '': - os.makedirs(json_schema_dirname, exist_ok=True) - with open(json_schema_log_file, "w") as js_f: - json.dump(json_schema, js_f, indent=2) + logger.info( + "The JSON schema file can be inspected by setting the following " + "nested key in the configuration: (model > input > log_location)." + ) logger.info(f"JSON schema file log stored as {json_schema_log_file}") diff --git a/schematic/schemas/validator.py b/schematic/schemas/validator.py index f88613a3f..301dcbfe3 100644 --- a/schematic/schemas/validator.py +++ b/schematic/schemas/validator.py @@ -13,8 +13,6 @@ validate_schema, ) -from schematic import CONFIG - class SchemaValidator: """Validate Schema against SchemaOrg standard diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 1f77715b0..cdc93f434 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -40,7 +40,6 @@ import uuid -from schematic_db.synapse.synapse import SynapseConfig from schematic_db.rdb.synapse_database import SynapseDatabase @@ -52,7 +51,9 @@ from schematic.store.base import BaseStorage from schematic.exceptions import MissingConfigValueError, AccessCredentialsError -from schematic import CONFIG +from schematic.configuration.configuration import CONFIG + +from schematic.utils.general import profile logger = logging.getLogger("Synapse storage") @@ -69,35 +70,23 @@ def _download_manifest_to_folder(self) -> File: """ try downloading a manifest to local cache or a given folder manifest - Return: + Return: manifest_data: A Synapse file entity of the downloaded manifest """ - # TO DO: potentially deprecate the if else statement because "manifest_folder" key always exist in config (See issue FDS-349 in Jira) - # on AWS, to avoid overriding manifest, we download the manifest to a temporary folder if "SECRETS_MANAGER_SECRETS" in os.environ: temporary_manifest_storage = "/var/tmp/temp_manifest_download" if not os.path.exists(temporary_manifest_storage): os.mkdir("/var/tmp/temp_manifest_download") download_location = create_temp_folder(temporary_manifest_storage) - - elif CONFIG["synapse"]["manifest_folder"]: - download_location=CONFIG["synapse"]["manifest_folder"] - else: - download_location=None - - if not download_location: - manifest_data = self.syn.get( - self.manifest_id, - ) - # if download_location is provided and it is not an empty string - else: - manifest_data = self.syn.get( - self.manifest_id, - downloadLocation=download_location, - ifcollision="overwrite.local", - ) - return manifest_data + download_location=CONFIG.manifest_folder + + manifest_data = self.syn.get( + self.manifest_id, + downloadLocation=download_location, + ifcollision="overwrite.local", + ) + return manifest_data def _entity_type_checking(self) -> str: """ @@ -191,20 +180,8 @@ def __init__( self.syn = self.login(token, access_token) self.project_scope = project_scope - - - # check if "master_fileview" has been set - try: - self.storageFileview = CONFIG["synapse"]["master_fileview"] - except KeyError: - raise MissingConfigValueError(("synapse", "master_fileview")) - - # check if "manifest_basename" has been set - try: - self.manifest = CONFIG["synapse"]["manifest_basename"] - except KeyError: - raise MissingConfigValueError(("synapse", "manifest_basename")) - + self.storageFileview = CONFIG.synapse_master_fileview_id + self.manifest = CONFIG.synapse_manifest_basename self._query_fileview() def _purge_synapse_cache(self, root_dir: str = "/var/www/.synapseCache/", maximum_storage_allowed_cache_gb=7): @@ -239,8 +216,8 @@ def _purge_synapse_cache(self, root_dir: str = "/var/www/.synapseCache/", maximu def _query_fileview(self): self._purge_synapse_cache() try: - self.storageFileview = CONFIG["synapse"]["master_fileview"] - self.manifest = CONFIG["synapse"]["manifest_basename"] + self.storageFileview = CONFIG.synapse_master_fileview_id + self.manifest = CONFIG.synapse_manifest_basename if self.project_scope: self.storageFileviewTable = self.syn.tableQuery( f"SELECT * FROM {self.storageFileview} WHERE projectId IN {tuple(self.project_scope + [''])}" @@ -250,9 +227,6 @@ def _query_fileview(self): self.storageFileviewTable = self.syn.tableQuery( "SELECT * FROM " + self.storageFileview ).asDataFrame() - - except AttributeError: - raise AttributeError("storageFileview attribute has not been set.") except SynapseHTTPError: raise AccessCredentialsError(self.storageFileview) @@ -278,9 +252,8 @@ def login(token=None, access_token=None): raise ValueError("No access to resources. Please make sure that your token is correct") else: # login using synapse credentials provided by user in .synapseConfig (default) file - syn = synapseclient.Synapse(configPath=CONFIG.SYNAPSE_CONFIG_PATH) + syn = synapseclient.Synapse(configPath=CONFIG.synapse_configuration_path) syn.login(silent=True) - return syn def missing_entity_handler(method): @@ -296,7 +269,6 @@ def wrapper(*args, **kwargs): raise ex return wrapper - def getStorageFileviewTable(self): """ Returns the storageFileviewTable obtained during initialization. """ @@ -601,13 +573,7 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: # the columns Filename and entityId are assumed to be present in manifest schema # TODO: use idiomatic panda syntax if dataset_files: - new_files = {"Filename": [], "entityId": []} - - # find new files if any - for file_id, file_name in dataset_files: - if not file_id in manifest["entityId"].values: - new_files["Filename"].append(file_name) - new_files["entityId"].append(file_id) + new_files = self._get_file_entityIds(dataset_files=dataset_files, only_new_files=True, manifest=manifest) # update manifest so that it contain new files new_files = pd.DataFrame(new_files) @@ -627,6 +593,38 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: manifest = manifest.fillna("") return manifest_id, manifest + + def _get_file_entityIds(self, dataset_files: List, only_new_files: bool = False, manifest: pd.DataFrame = None): + """ + Get a dictionary of files in a dataset. Either files that are not in the current manifest or all files + + Args: + manifest: metadata manifest + dataset_file: List of all files in a dataset + only_new_files: boolean to control whether only new files are returned or all files in the dataset + Returns: + files: dictionary of file names and entityIDs, with scope as specified by `only_new_files` + """ + files = {"Filename": [], "entityId": []} + + if only_new_files: + if manifest is None: + raise UnboundLocalError( + "No manifest was passed in, a manifest is required when `only_new_files` is True." + ) + + # find new files (that are not in the current manifest) if any + for file_id, file_name in dataset_files: + if not file_id in manifest["entityId"].values: + files["Filename"].append(file_name) + files["entityId"].append(file_id) + else: + # get all files + for file_id, file_name in dataset_files: + files["Filename"].append(file_name) + files["entityId"].append(file_id) + + return files def getProjectManifests(self, projectId: str) -> List[str]: """Gets all metadata manifest files across all datasets in a specified project. @@ -995,17 +993,31 @@ def buildDB(self, # Put table manifest onto synapse schema = Schema(name=table_name, columns=col_schema, parent=self.getDatasetProject(datasetId)) - + if table_name in table_info: + existingTableId = table_info[table_name] + else: + existingTableId = None + + + tableOps = TableOperations( + synStore = self, + tableToLoad = table_manifest, + tableName = table_name, + datasetId = datasetId, + existingTableId = existingTableId, + restrict = restrict, + ) + if not table_manipulation or table_name not in table_info.keys(): - manifest_table_id = TableOperations.createTable(self, tableToLoad=table_manifest, tableName=table_name, datasetId=datasetId, columnTypeDict=col_schema, specifySchema=True, restrict=restrict) + manifest_table_id = tableOps.createTable(columnTypeDict=col_schema, specifySchema=True,) elif table_name in table_info.keys() and table_info[table_name]: if table_manipulation.lower() == 'replace': - manifest_table_id = TableOperations.replaceTable(self, tableToLoad=table_manifest, tableName=table_name, existingTableId=table_info[table_name], specifySchema = True, datasetId = datasetId, columnTypeDict=col_schema, restrict=restrict) + manifest_table_id = tableOps.replaceTable(specifySchema = True, columnTypeDict=col_schema,) elif table_manipulation.lower() == 'upsert': - manifest_table_id = TableOperations.upsertTable(self, sg=sg, tableToLoad = table_manifest, tableName=table_name, existingTableId=table_info[table_name], datasetId=datasetId) + manifest_table_id = tableOps.upsertTable(sg=sg,) elif table_manipulation.lower() == 'update': - manifest_table_id = TableOperations.updateTable(self, tableToLoad=table_manifest, existingTableId=table_info[table_name], restrict=restrict) + manifest_table_id = tableOps.updateTable() @@ -1030,9 +1042,9 @@ def upload_manifest_file(self, manifest, metadataManifestPath, datasetId, restri # Differentiate "censored" and "uncensored" manifest if "censored" in file_name_full: - file_name_new = os.path.basename(CONFIG["synapse"]["manifest_basename"]) + "_" + component_name + "_censored" + '.' + file_extension + file_name_new = os.path.basename(CONFIG.synapse_manifest_basename) + "_" + component_name + "_censored" + '.' + file_extension else: - file_name_new = os.path.basename(CONFIG["synapse"]["manifest_basename"]) + "_" + component_name + '.' + file_extension + file_name_new = os.path.basename(CONFIG.synapse_manifest_basename) + "_" + component_name + '.' + file_extension manifestSynapseFile = File( metadataManifestPath, @@ -1047,7 +1059,7 @@ def upload_manifest_file(self, manifest, metadataManifestPath, datasetId, restri return manifest_synapse_file_id @missing_entity_handler - def format_row_annotations(self, se, sg, row, entityId, useSchemaLabel, hideBlanks): + def format_row_annotations(self, se, sg, row, entityId, hideBlanks): # prepare metadata for Synapse storage (resolve display name into a name that Synapse annotations support (e.g no spaces, parenthesis) # note: the removal of special characters, will apply only to annotation keys; we are not altering the manifest # this could create a divergence between manifest column and annotations. this should be ok for most use cases. @@ -1057,10 +1069,7 @@ def format_row_annotations(self, se, sg, row, entityId, useSchemaLabel, hideBlan for k, v in row.to_dict().items(): - if useSchemaLabel: - keySyn = se.get_class_label_from_display_name(str(k)).translate({ord(x): '' for x in blacklist_chars}) - else: - keySyn = str(k) + keySyn = se.get_class_label_from_display_name(str(k)).translate({ord(x): '' for x in blacklist_chars}) # Skip `Filename` and `ETag` columns when setting annotations if keySyn in ["Filename", "ETag", "eTag"]: @@ -1292,20 +1301,19 @@ def _generate_table_name(self, manifest): table_name = 'synapse_storage_manifest_table' return table_name, component_name - def _add_annotations(self, se, schemaGenerator, row, entityId, useSchemaLabel, hideBlanks): + def _add_annotations(self, se, schemaGenerator, row, entityId, hideBlanks): """Helper function to format and add annotations to entities in Synapse. Args: se: schemaExplorer object, schemaGenerator: schemaGenerator Object. row: current row of manifest being processed entityId (str): synapseId of entity to add annotations to - useSchemaLabel (bool): Flag to use schema label instead of display name hideBlanks: Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. Returns: Annotations are added to entities in Synapse, no return. """ # Format annotations for Synapse - annos = self.format_row_annotations(se, schemaGenerator, row, entityId, useSchemaLabel, hideBlanks) + annos = self.format_row_annotations(se, schemaGenerator, row, entityId, hideBlanks) if annos: # Store annotations for an entity folder @@ -1338,7 +1346,6 @@ def add_entities( manifest, manifest_record_type, datasetId, - useSchemaLabel, hideBlanks, manifest_synapse_table_id='' ): @@ -1349,13 +1356,24 @@ def add_entities( manifest (pd.DataFrame): loaded df containing user supplied data. manifest_record_type: valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. datasetId (str): synapse ID of folder containing the dataset - useSchemaLabel (bool): Default is True - use the schema label. If False, uses the display label from the schema. Attribute display names in the schema must not only include characters that are not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'. hideBlanks (bool): Default is false -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. manifest_synapse_table_id (str): Default is an empty string ''. Returns: manifest (pd.DataFrame): modified to add entitiyId as appropriate. ''' + + # Expected behavior is to annotate files if `Filename` is present regardless of `-mrt` setting + if 'filename' in [col.lower() for col in manifest.columns]: + # get current list of files and store as dataframe + dataset_files = self.getFilesInStorageDataset(datasetId) + files_and_entityIds = self._get_file_entityIds(dataset_files=dataset_files, only_new_files=False) + file_df = pd.DataFrame(files_and_entityIds) + + # Merge dataframes to add entityIds + manifest = manifest.merge(file_df, how = 'left', on='Filename', suffixes=['_x',None]).drop('entityId_x',axis=1) + + # Fill `entityId` for each row if missing and annotate entity as appropriate for idx, row in manifest.iterrows(): if not row["entityId"] and (manifest_record_type == 'file_and_entities' or manifest_record_type == 'table_file_and_entities'): @@ -1371,7 +1389,7 @@ def add_entities( # Adding annotations to connected files. if entityId: - self._add_annotations(se, schemaGenerator, row, entityId, useSchemaLabel, hideBlanks) + self._add_annotations(se, schemaGenerator, row, entityId, hideBlanks) return manifest def upload_manifest_as_table( @@ -1400,7 +1418,6 @@ def upload_manifest_as_table( component_name (str): Name of the component manifest that is currently being uploaded. restrict (bool): Flag for censored data. manifest_record_type (str): valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. - useSchemaLabel(bool): Default is True - use the schema label. If False, uses the display label from the schema. Attribute display names in the schema must not only include characters that are not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'. hideBlanks (bool): Default is False -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. table_malnipulation (str): Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. Return: @@ -1416,7 +1433,7 @@ def upload_manifest_as_table( useSchemaLabel, table_manipulation) - manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, useSchemaLabel, hideBlanks, manifest_synapse_table_id) + manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name = component_name) @@ -1449,7 +1466,6 @@ def upload_manifest_as_csv( datasetId, restrict, manifest_record_type, - useSchemaLabel, hideBlanks, component_name, with_entities = False,): @@ -1462,7 +1478,6 @@ def upload_manifest_as_csv( datasetId (str): synapse ID of folder containing the dataset restrict (bool): Flag for censored data. manifest_record_type: valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. - useSchemaLabel (bool): Default is True - use the schema label. If False, uses the display label from the schema. Attribute display names in the schema must not only include characters that are not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'. hideBlanks (bool): Default is False -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. table_malnipulation (str): Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. with_entities (bool): Default is False - Flag to indicate whether to create entityIds and add annotations. @@ -1470,7 +1485,7 @@ def upload_manifest_as_csv( manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ if with_entities: - manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, useSchemaLabel, hideBlanks) + manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, @@ -1525,7 +1540,7 @@ def upload_manifest_combo( useSchemaLabel=useSchemaLabel, table_manipulation=table_manipulation,) - manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, useSchemaLabel, hideBlanks, manifest_synapse_table_id) + manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name) @@ -1599,7 +1614,6 @@ def associateMetadataWithFiles( metadataManifestPath, datasetId=datasetId, restrict=restrict_manifest, - useSchemaLabel=useSchemaLabel, hideBlanks=hideBlanks, manifest_record_type=manifest_record_type, component_name = component_name, @@ -1628,7 +1642,6 @@ def associateMetadataWithFiles( metadataManifestPath, datasetId=datasetId, restrict=restrict_manifest, - useSchemaLabel=useSchemaLabel, hideBlanks=hideBlanks, manifest_record_type=manifest_record_type, component_name = component_name, @@ -1928,38 +1941,58 @@ class TableOperations: Operations currently in development are: upsertTable: add metadata from a manifest to an existing table that contains metadata from another manifest """ + def __init__(self, + synStore: SynapseStorage, + tableToLoad: pd.DataFrame = None, + tableName: str = None, + datasetId: str = None, + existingTableId: str = None, + restrict: bool = False + ): + + """ + Class governing table operations (creation, replacement, upserts, updates) in schematic + + tableToLoad: manifest formatted appropriately for the table + tableName: name of the table to be uploaded + datasetId: synID of the dataset for the manifest + existingTableId: synId of the table currently exising on synapse (if there is one) + restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions + + """ + self.synStore = synStore + self.tableToLoad = tableToLoad + self.tableName = tableName + self.datasetId = datasetId + self.existingTableId = existingTableId + self.restrict = restrict - def createTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, tableName: str = None, datasetId: str = None, columnTypeDict: dict = None, specifySchema: bool = True, restrict: bool = False): + def createTable(self, columnTypeDict: dict = None, specifySchema: bool = True,): """ Method to create a table from a metadata manifest and upload it to synapse Args: - tableToLoad: manifest formatted appropriately for the table - tableName: name of the table to be uploaded - datasetId: synID of the dataset for the manifest columnTypeDict: dictionary schema for table columns: type, size, etc - specifySchema: to specify a specific schema for the table format - restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions - + specifySchema: to specify a specific schema for the table format Returns: table.schema.id: synID of the newly created table """ - datasetEntity = synStore.syn.get(datasetId, downloadFile = False) + datasetEntity = self.synStore.syn.get(self.datasetId, downloadFile = False) datasetName = datasetEntity.name - table_schema_by_cname = synStore._get_table_schema_by_cname(columnTypeDict) + table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) - if not tableName: - tableName = datasetName + 'table' - datasetParentProject = synStore.getDatasetProject(datasetId) + if not self.tableName: + self.tableName = datasetName + 'table' + datasetParentProject = self.synStore.getDatasetProject(self.datasetId) if specifySchema: if columnTypeDict == {}: logger.error("Did not provide a columnTypeDict.") #create list of columns: cols = [] - for col in tableToLoad.columns: + for col in self.tableToLoad.columns: if col in table_schema_by_cname: col_type = table_schema_by_cname[col]['columnType'] max_size = table_schema_by_cname[col]['maximumSize'] if 'maximumSize' in table_schema_by_cname[col].keys() else 100 @@ -1975,62 +2008,56 @@ def createTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, tabl else: #TODO add warning that the given col was not found and it's max size is set to 100 cols.append(Column(name=col, columnType='STRING', maximumSize=100)) - schema = Schema(name=tableName, columns=cols, parent=datasetParentProject) - table = Table(schema, tableToLoad) - table = synStore.syn.store(table, isRestricted = restrict) + schema = Schema(name=self.tableName, columns=cols, parent=datasetParentProject) + table = Table(schema, self.tableToLoad) + table = self.synStore.syn.store(table, isRestricted = self.restrict) return table.schema.id else: # For just uploading the tables to synapse using default # column types. - table = build_table(tableName, datasetParentProject, tableToLoad) - table = synStore.syn.store(table, isRestricted = restrict) + table = build_table(self.tableName, datasetParentProject, self.tableToLoad) + table = self.synStore.syn.store(table, isRestricted = self.restrict) return table.schema.id - def replaceTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, tableName: str = None, existingTableId: str = None, specifySchema: bool = True, datasetId: str = None, columnTypeDict: dict = None, restrict: bool = False): + def replaceTable(self, specifySchema: bool = True, columnTypeDict: dict = None,): """ Method to replace an existing table on synapse with metadata from a new manifest Args: - tableToLoad: manifest formatted appropriately for the table - tableName: name of the table to be uploaded - existingTableId: synId of the existing table to be replaced specifySchema: to infer a schema for the table format - datasetId: synID of the dataset for the manifest - columnTypeDict: dictionary schema for table columns: type, size, etc - restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions - + columnTypeDict: dictionary schema for table columns: type, size, etc Returns: existingTableId: synID of the already existing table that had its metadata replaced """ - datasetEntity = synStore.syn.get(datasetId, downloadFile = False) + datasetEntity = self.synStore.syn.get(self.datasetId, downloadFile = False) datasetName = datasetEntity.name - table_schema_by_cname = synStore._get_table_schema_by_cname(columnTypeDict) - existing_table, existing_results = synStore.get_synapse_table(existingTableId) + table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) + existing_table, existing_results = self.synStore.get_synapse_table(self.existingTableId) # remove rows - synStore.syn.delete(existing_results) + self.synStore.syn.delete(existing_results) # wait for row deletion to finish on synapse before getting empty table sleep(10) # removes all current columns - current_table = synStore.syn.get(existingTableId) - current_columns = synStore.syn.getTableColumns(current_table) + current_table = self.synStore.syn.get(self.existingTableId) + current_columns = self.synStore.syn.getTableColumns(current_table) for col in current_columns: current_table.removeColumn(col) - if not tableName: - tableName = datasetName + 'table' + if not self.tableName: + self.tableName = datasetName + 'table' # Process columns according to manifest entries - table_schema_by_cname = synStore._get_table_schema_by_cname(columnTypeDict) - datasetParentProject = synStore.getDatasetProject(datasetId) + table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) + datasetParentProject = self.synStore.getDatasetProject(self.datasetId) if specifySchema: if columnTypeDict == {}: logger.error("Did not provide a columnTypeDict.") #create list of columns: cols = [] - for col in tableToLoad.columns: + for col in self.tableToLoad.columns: if col in table_schema_by_cname: col_type = table_schema_by_cname[col]['columnType'] @@ -2052,76 +2079,64 @@ def replaceTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, tab # adds new columns to schema for col in cols: current_table.addColumn(col) - synStore.syn.store(current_table, isRestricted = restrict) + self.synStore.syn.store(current_table, isRestricted = self.restrict) # wait for synapse store to finish sleep(1) # build schema and table from columns and store with necessary restrictions - schema = Schema(name=tableName, columns=cols, parent=datasetParentProject) - schema.id = existingTableId - table = Table(schema, tableToLoad, etag = existing_results.etag) - table = synStore.syn.store(table, isRestricted = restrict) + schema = Schema(name=self.tableName, columns=cols, parent=datasetParentProject) + schema.id = self.existingTableId + table = Table(schema, self.tableToLoad, etag = existing_results.etag) + table = self.synStore.syn.store(table, isRestricted = self.restrict) else: logging.error("Must specify a schema for table replacements") # remove system metadata from manifest existing_table.drop(columns = ['ROW_ID', 'ROW_VERSION'], inplace = True) - return existingTableId + return self.existingTableId - def _get_schematic_db_creds(synStore: SynapseStorage): - username = None + def _get_auth_token(self,): authtoken = None - # Get access token from environment variable if available # Primarily useful for testing environments, with other possible usefulness for containers env_access_token = os.getenv("SYNAPSE_ACCESS_TOKEN") if env_access_token: authtoken = env_access_token - return username, authtoken + return authtoken # Get token from authorization header # Primarily useful for API endpoint functionality - if 'Authorization' in synStore.syn.default_headers: - authtoken = synStore.syn.default_headers['Authorization'].split('Bearer ')[-1] - return username, authtoken + if 'Authorization' in self.synStore.syn.default_headers: + authtoken = self.synStore.syn.default_headers['Authorization'].split('Bearer ')[-1] + return authtoken # retrive credentials from synapse object # Primarily useful for local users, could only be stored here when a .synapseConfig file is used, but including to be safe - synapse_object_creds = synStore.syn.credentials - if hasattr(synapse_object_creds, 'username'): - username = synapse_object_creds.username + synapse_object_creds = self.synStore.syn.credentials if hasattr(synapse_object_creds, '_token'): authtoken = synapse_object_creds.secret # Try getting creds from .synapseConfig file if it exists # Primarily useful for local users. Seems to correlate with credentials stored in synaspe object when logged in - if os.path.exists(CONFIG.SYNAPSE_CONFIG_PATH): - config = synStore.syn.getConfigFile(CONFIG.SYNAPSE_CONFIG_PATH) + if os.path.exists(CONFIG.synapse_configuration_path): + config = self.synStore.syn.getConfigFile(CONFIG.synapse_configuration_path) # check which credentials are provided in file - if config.has_option('authentication', 'username'): - username = config.get('authentication', 'username') if config.has_option('authentication', 'authtoken'): authtoken = config.get('authentication', 'authtoken') # raise error if required credentials are not found - # providing an authtoken without a username did not prohibit upsert functionality, - # but including username gathering for completeness for schematic_db - if not username and not authtoken: - raise NameError( - "Username and authtoken credentials could not be found in the environment, synapse object, or the .synapseConfig file" - ) if not authtoken: raise NameError( "authtoken credentials could not be found in the environment, synapse object, or the .synapseConfig file" ) - return username, authtoken + return authtoken - def upsertTable(synStore: SynapseStorage, sg: SchemaGenerator, tableToLoad: pd.DataFrame = None, tableName: str = None, existingTableId: str = None, datasetId: str = None): + def upsertTable(self, sg: SchemaGenerator,): """ Method to upsert rows from a new manifest into an existing table on synapse For upsert functionality to work, primary keys must follow the naming convention of _id @@ -2130,50 +2145,44 @@ def upsertTable(synStore: SynapseStorage, sg: SchemaGenerator, tableToLoad: pd.D Args: - tableToLoad: manifest formatted appropriately for the table - tableName: name of the table to be uploaded - existingTableId: synId of the existing table to be replaced - datasetId: synID of the dataset for the manifest - columnTypeDict: dictionary schema for table columns: type, size, etc + sg: SchemaGenerator instance - Returns: existingTableId: synID of the already existing table that had its metadata replaced """ - username, authtoken = TableOperations._get_schematic_db_creds(synStore) + authtoken = self._get_auth_token() - synConfig = SynapseConfig(username, authtoken, synStore.getDatasetProject(datasetId)) - synapseDB = SynapseDatabase(synConfig) + synapseDB = SynapseDatabase(auth_token=authtoken, project_id=self.synStore.getDatasetProject(self.datasetId)) try: # Try performing upsert - synapseDB.upsert_table_rows(table_name=tableName, data=tableToLoad) + synapseDB.upsert_table_rows(table_name=self.tableName, data=self.tableToLoad) except(SynapseHTTPError) as ex: # If error is raised because Table has old `Uuid` column and not new `Id` column, then handle and re-attempt upload if 'Id is not a valid column name or id' in str(ex): - TableOperations._update_table_uuid_column(synStore, existingTableId, sg) - synapseDB.upsert_table_rows(table_name=tableName, data=tableToLoad) + self._update_table_uuid_column(sg) + synapseDB.upsert_table_rows(table_name=self.tableName, data=self.tableToLoad) # Raise if other error else: raise ex - return existingTableId - - def _update_table_uuid_column(synStore: SynapseStorage, table_id: str, sg: SchemaGenerator,) -> None: + return self.existingTableId + + def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: """Removes the `Uuid` column when present, and relpaces with an `Id` column Used to enable backwards compatability for manifests using the old `Uuid` convention Args: - table_id (str): The Synapse id of the table to be upserted into, that needs columns updated + sg: SchemaGenerator instance Returns: None """ # Get the columns of the schema - schema = synStore.syn.get(table_id) - cols = synStore.syn.getTableColumns(schema) + schema = self.synStore.syn.get(self.existingTableId) + cols = self.synStore.syn.getTableColumns(schema) # Iterate through columns until `Uuid` column is found for col in cols: @@ -2188,70 +2197,49 @@ def _update_table_uuid_column(synStore: SynapseStorage, table_id: str, sg: Schem if uuid_col_in_schema: new_col = Column(columnType = "STRING", maximumSize = 64, name = "Id") schema.addColumn(new_col) - schema = synStore.syn.store(schema) + schema = self.synStore.syn.store(schema) # If there is not, then use the old `Uuid` column as a basis for the new `Id` column else: - # Create a new `Id` column based off of the old `Uuid` column, and store (column is empty) - new_col = deepcopy(col) - new_col['name'] = 'Id' - schema.addColumn(new_col) - schema = synStore.syn.store(schema) - - - # Recently stored column is empty, so populated with uuid values - TableOperations._populate_new_id_column(synStore, table_id, schema) - - # get the up-to-date table, remove old `Uuid` column, and store - sleep(1) - schema = synStore.syn.get(table_id) - schema.removeColumn(col) - schema = synStore.syn.store(schema) - - # Exit iteration; only concerned with `Uuid` column + + # Build ColumnModel that will be used for new column + id_column = Column(name='Id', columnType='STRING', maximumSize=64, defaultValue=None, maximumListLength=1) + new_col_response = self.synStore.syn.store(id_column) + + + # Define columnChange body + columnChangeDict = { + "concreteType": "org.sagebionetworks.repo.model.table.TableSchemaChangeRequest", + "entityId": self.existingTableId, + "changes": [ + { + "oldColumnId": col['id'], + "newColumnId": new_col_response['id'], + } + ] + } + + self.synStore.syn._async_table_update(table=self.existingTableId, changes=[columnChangeDict], wait=False) break return - def _populate_new_id_column(synStore: SynapseStorage, table_id: str, schema: Schema) -> None: - """Copies the uuid values that were present in the column named `Uuid` to the new column named `Id` - - Args: - table_id (str): The Synapse id of the table to be upserted into, that needs columns updated - schema (synapseclient.table.Schema): Schema of the table columns - - Returns: - None - """ - # Query the table for the old `Uuid` column and new `Id` column - results = synStore.syn.tableQuery(f"select Uuid,Id from {table_id}") - results_df = results.asDataFrame() - - # Copy uuid values to new column, and store in table - results_df = populate_df_col_with_another_col(results_df, 'Uuid', 'Id') - table = synStore.syn.store(Table(schema, results_df, etag=results.etag)) - return - - def updateTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, existingTableId: str = None, update_col: str = 'Id', restrict: bool = False): + def updateTable(self, update_col: str = 'Id',): """ Method to update an existing table with a new column Args: - tableToLoad: manifest formatted appropriately for the table, that contains the new column - existingTableId: synId of the existing table to be replaced - updateCol: column to index the old and new tables on - restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions - + updateCol: column to index the old and new tables on Returns: existingTableId: synID of the already existing table that had its metadata replaced """ - existing_table, existing_results = synStore.get_synapse_table(existingTableId) + existing_table, existing_results = self.synStore.get_synapse_table(self.existingTableId) - tableToLoad = update_df(existing_table, tableToLoad, update_col) + self.tableToLoad = update_df(existing_table, self.tableToLoad, update_col) # store table with existing etag data and impose restrictions as appropriate - synStore.syn.store(Table(existingTableId, tableToLoad, etag = existing_results.etag), isRestricted = restrict) + self.synStore.syn.store(Table(self.existingTableId, self.tableToLoad, etag = existing_results.etag), isRestricted = self.restrict) - return existingTableId + return self.existingTableId class DatasetFileView: diff --git a/schematic/utils/__init__.py b/schematic/utils/__init__.py index 73e77e0d8..e69de29bb 100644 --- a/schematic/utils/__init__.py +++ b/schematic/utils/__init__.py @@ -1,27 +0,0 @@ -from schematic.utils.curie_utils import ( - expand_curie_to_uri, - expand_curies_in_schema, - extract_name_from_uri_or_curie, - uri2label, -) -from schematic.utils.df_utils import update_df -from schematic.utils.general import dict2list, find_duplicates, str2list, unlist -from schematic.utils.google_api_utils import ( - download_creds_file, - execute_google_api_requests, - export_manifest_csv, - export_manifest_excel, -) -from schematic.utils.io_utils import ( - export_json, - load_default, - load_json, - load_schemaorg, -) -from schematic.utils.schema_utils import load_schema_into_networkx -from schematic.utils.validate_utils import ( - validate_class_schema, - validate_property_schema, - validate_schema, -) -from schematic.utils.viz_utils import visualize diff --git a/schematic/utils/cli_utils.py b/schematic/utils/cli_utils.py index 8a1f27ad5..c68fe46f6 100644 --- a/schematic/utils/cli_utils.py +++ b/schematic/utils/cli_utils.py @@ -7,14 +7,11 @@ from functools import reduce import re -from schematic import CONFIG -from schematic.exceptions import ( - MissingConfigValueError, - MissingConfigAndArgumentValueError, -) - logger = logging.getLogger(__name__) +# We are using fstrings in logger methods +# pylint: disable=logging-fstring-interpolation + def query_dict(dictionary: Mapping[Any, Any], keys: Sequence[Any]) -> Union[Any, None]: """Access a nested value in a dictionary corresponding @@ -39,88 +36,17 @@ def extract(dictionary: Any, key: Any) -> Union[Any, None]: return reduce(extract, keys, dictionary) -def get_from_config( - dictionary: Mapping[Any, Any], keys: Sequence[Any] -) -> Union[Any, None]: - """Access a nested configuration value from a yaml - configuration file. +def log_value_from_config(arg_name: str, config_value: Any): + """Logs when getting a value from the config Args: - dictionary: A dictionary containing anything. - keys: A sequence of values corresponding to keys - in `dictionary`. - - Returns: - The nested value corresponding to the given series. - - Raises: - MissingConfigValueError: When configuration value not - found in config.yml file for given key. + arg_name (str): Name of the argument. Used for logging. + config_value (Any): The value in the config """ - # get configuration value from config file - config_value = query_dict(dictionary, keys) - - # if configuration value not present then raise Exception - if config_value is None: - raise MissingConfigValueError(keys) - - config_keys_str = " > ".join(keys) - logger.info( - f"The ({config_keys_str}) argument with value " - f"'{config_value}' is being read from the config file." + f"The {arg_name} argument is being taken from configuration file, i.e., {config_value}." ) - return config_value - - -def fill_in_from_config( - arg_name: str, arg_value: Any, config_keys: Sequence[Any], allow_none: bool = False -) -> Any: - """Fill in a missing value from a configuration object. - - Args: - arg_name: Name of the argument. Used for logging. - config_keys: List of keys used to access a nested - value in `config` corresponding to `arg_name`. - arg_value: Value of the argument provided at the - command line. - allow_none: Return None if argument value and - configuration value are both None (rather - than raising an error). - - Returns: - The argument value, either from the calling context - or the corresponding field in the configuration. - - Raises: - AssertionError: If both the argument value and the - configuration object are `None`. - """ - - # Avoid accessing config if argument value is provided - if arg_value is not None: - return arg_value - - # raise Exception if both, configuration value not present - # in config file and CLI argument value is missing - try: - config_value = get_from_config(CONFIG.DATA, config_keys) - except MissingConfigValueError: - if allow_none: - return None - raise MissingConfigAndArgumentValueError(arg_name, config_keys) - - # Make sure argument value and - config_keys_str = " > ".join(config_keys) - - logger.info( - f"The '--{arg_name}' argument is being taken from configuration " - f"file ({config_keys_str}), i.e., '{config_value}'." - ) - - return config_value - def parse_synIDs( ctx, param, synIDs, ) -> List[str]: diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 93325ced9..789fb4881 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -60,6 +60,9 @@ def load_df(file_path, preserve_raw_input=True, data_model=False, **load_args): pandarallel.initialize(verbose = 1) ints = org_df.parallel_applymap(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) + # Identify cells converted to intergers + ints_tf_df = ints.applymap(pd.api.types.is_integer) + # convert strings to numerical dtype (float) if possible, preserve non-numerical strings for col in org_df.columns: float_df[col]=pd.to_numeric(float_df[col], errors='coerce') @@ -68,9 +71,9 @@ def load_df(file_path, preserve_raw_input=True, data_model=False, **load_args): # Trim nans and empty rows and columns processed_df = trim_commas_df(float_df) - + # Store values that were converted to type int in the final dataframe - processed_df=processed_df.mask(ints != False, other = ints) + processed_df=processed_df.mask(ints_tf_df, other = ints) # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") diff --git a/schematic/utils/general.py b/schematic/utils/general.py index 6c5d27b73..59edf4243 100644 --- a/schematic/utils/general.py +++ b/schematic/utils/general.py @@ -183,3 +183,19 @@ def wrapper(*args, **kwargs): return wrapper return inner + +def normalize_path(path: str, parent_folder: str) -> str: + """ + Normalizes a path. + If the path is relative, the parent_folder is added to make it an absolute path. + + Args: + path (str): The path to the file to normalize. + parent_folder (str): The folder the file is in. + + Returns: + str: The normalized path. + """ + if not os.path.isabs(path): + path = os.path.join(parent_folder, path) + return os.path.normpath(path) diff --git a/schematic/utils/google_api_utils.py b/schematic/utils/google_api_utils.py index 8844c3da0..ac3def72f 100644 --- a/schematic/utils/google_api_utils.py +++ b/schematic/utils/google_api_utils.py @@ -11,7 +11,7 @@ from google.auth.transport.requests import Request from google.oauth2 import service_account from google.oauth2.credentials import Credentials -from schematic import CONFIG +from schematic.configuration.configuration import CONFIG from schematic.store.synapse import SynapseStorage import pandas as pd @@ -24,30 +24,6 @@ "https://www.googleapis.com/auth/drive", ] - -# it will create 'token.pickle' based on credentials.json -def generate_token() -> Credentials: - creds = None - # The file token.pickle stores the user's access and refresh tokens, - # and is created automatically when the authorization flow completes for the first time. - if os.path.exists(CONFIG.TOKEN_PICKLE): - with open(CONFIG.TOKEN_PICKLE, "rb") as token: - creds = pickle.load(token) - - # If there are no (valid) credentials available, let the user log in. - if not creds or not creds.valid: - if creds and creds.expired and creds.refresh_token: - creds.refresh(Request()) - else: - flow = InstalledAppFlow.from_client_secrets_file(CONFIG.CREDS_PATH, SCOPES) - creds = flow.run_console() ### don't have to deal with ports - # Save the credentials for the next run - with open(CONFIG.TOKEN_PICKLE, "wb") as token: - pickle.dump(creds, token) - - return creds - - # TODO: replace by pygsheets calls? def build_credentials() -> Dict[str, Any]: creds = generate_token() @@ -76,7 +52,7 @@ def build_service_account_creds() -> Dict[str, Any]: credentials = service_account.Credentials.from_service_account_info(dict_creds, scopes=SCOPES) else: credentials = service_account.Credentials.from_service_account_file( - CONFIG.SERVICE_ACCT_CREDS, scopes=SCOPES + CONFIG.service_account_credentials_path, scopes=SCOPES ) # get a Google Sheet API service @@ -97,21 +73,21 @@ def download_creds_file() -> None: # if file path of service_account does not exist # and if an environment variable related to service account is not found # regenerate service_account credentials - if not os.path.exists(CONFIG.SERVICE_ACCT_CREDS) and "SERVICE_ACCOUNT_CREDS" not in os.environ: + if not os.path.exists(CONFIG.service_account_credentials_path) and "SERVICE_ACCOUNT_CREDS" not in os.environ: # synapse ID of the 'schematic_service_account_creds.json' file - API_CREDS = CONFIG["synapse"]["service_acct_creds"] + API_CREDS = CONFIG.service_account_credentials_synapse_id # Download in parent directory of SERVICE_ACCT_CREDS to # ensure same file system for os.rename() - creds_dir = os.path.dirname(CONFIG.SERVICE_ACCT_CREDS) + creds_dir = os.path.dirname(CONFIG.service_account_credentials_path) creds_file = syn.get(API_CREDS, downloadLocation=creds_dir) - os.rename(creds_file.path, CONFIG.SERVICE_ACCT_CREDS) + os.rename(creds_file.path, CONFIG.service_account_credentials_path) logger.info( "The credentials file has been downloaded " - f"to '{CONFIG.SERVICE_ACCT_CREDS}'" + f"to '{CONFIG.service_account_credentials_path}'" ) elif "SERVICE_ACCOUNT_CREDS" in os.environ: diff --git a/schematic/utils/io_utils.py b/schematic/utils/io_utils.py index d6e4d3fcc..016ea5dcd 100644 --- a/schematic/utils/io_utils.py +++ b/schematic/utils/io_utils.py @@ -2,7 +2,7 @@ import json import urllib.request -from schematic import CONFIG, LOADER +from schematic import LOADER def load_json(file_path): diff --git a/schematic/utils/validate_utils.py b/schematic/utils/validate_utils.py index 5264d73da..3e694bce8 100644 --- a/schematic/utils/validate_utils.py +++ b/schematic/utils/validate_utils.py @@ -3,7 +3,7 @@ from jsonschema import validate from re import compile, search, IGNORECASE from schematic.utils.io_utils import load_json -from schematic import CONFIG, LOADER +from schematic import LOADER from typing import List import numpy as np diff --git a/schematic/visualization/commands.py b/schematic/visualization/commands.py index 0930ba8ea..ad9670e2b 100644 --- a/schematic/visualization/commands.py +++ b/schematic/visualization/commands.py @@ -8,10 +8,10 @@ from schematic.visualization.attributes_explorer import AttributesExplorer from schematic.visualization.tangled_tree import TangledTree -from schematic.utils.cli_utils import get_from_config, fill_in_from_config, query_dict +from schematic.utils.cli_utils import log_value_from_config, query_dict from schematic.help import viz_commands from schematic.help import model_commands -from schematic import CONFIG +from schematic.configuration.configuration import CONFIG logger = logging.getLogger(__name__) click_log.basic_config(logger) @@ -35,7 +35,8 @@ def viz(ctx, config): # use as `schematic model ...` """ try: logger.debug(f"Loading config file contents in '{config}'") - ctx.obj = CONFIG.load_config(config) + CONFIG.load_config(config) + ctx.obj = CONFIG except ValueError as e: logger.error("'--config' not provided or environment variable not set.") logger.exception(e) @@ -52,7 +53,8 @@ def get_attributes(ctx): """ # Get JSONLD file path - path_to_jsonld = get_from_config(CONFIG.DATA, ("model", "input", "location")) + path_to_jsonld = CONFIG.model_location + log_value_from_config("jsonld", path_to_jsonld) # Run attributes explorer AttributesExplorer(path_to_jsonld).parse_attributes(save_file=True) return @@ -79,7 +81,8 @@ def get_tangled_tree_text(ctx, figure_type, text_format): """ Get text to be placed on the tangled tree visualization. """ # Get JSONLD file path - path_to_jsonld = get_from_config(CONFIG.DATA, ("model", "input", "location")) + path_to_jsonld = CONFIG.model_location + log_value_from_config("jsonld", path_to_jsonld) # Initialize TangledTree tangled_tree = TangledTree(path_to_jsonld, figure_type) @@ -104,7 +107,8 @@ def get_tangled_tree_component_layers(ctx, figure_type): ''' Get the components that belong in each layer of the tangled tree visualization. ''' # Get JSONLD file path - path_to_jsonld = get_from_config(CONFIG.DATA, ("model", "input", "location")) + path_to_jsonld = CONFIG.model_location + log_value_from_config("jsonld", path_to_jsonld) # Initialize Tangled Tree tangled_tree = TangledTree(path_to_jsonld, figure_type) diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index 66332f3b1..d6788c5aa 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -86,6 +86,13 @@ paths: enum: ["excel", "google_sheet", "dataframe (only if getting existing manifests)"] description: If "excel" gets selected, this approach would avoid sending metadata to Google sheet APIs; if "google_sheet" gets selected, this would return a Google sheet URL. This parameter could potentially override sheet_url parameter. required: false + - in: query + name: strict_validation + schema: + type: boolean + default: True + description: If using Google Sheets, can set the strictness of Google Sheets regex match validation. True (default) will block users from entering incorrect values, False will throw a warning to users. + required: false operationId: schematic_api.api.routes.get_manifest_route responses: "200": diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 1246fae21..7fa17c268 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -18,7 +18,7 @@ import pandas as pd import json -from schematic import CONFIG +from schematic.configuration.configuration import CONFIG from schematic.visualization.attributes_explorer import AttributesExplorer from schematic.visualization.tangled_tree import TangledTree from schematic.manifest.generator import ManifestGenerator @@ -32,24 +32,13 @@ logger = logging.getLogger(__name__) logging.basicConfig(level=logging.DEBUG) -def config_handler(asset_view=None): - path_to_config = app.config["SCHEMATIC_CONFIG"] - - # if content of the config file is provided: - content_of_config = app.config["SCHEMATIC_CONFIG_CONTENT"] - - # if the environment variable exists - if content_of_config: - CONFIG.load_config_content_from_env() - +def config_handler(asset_view: str=None): # check if path to config is provided - if os.path.isfile(path_to_config): - CONFIG.load_config(path_to_config, asset_view = asset_view) - - else: - raise FileNotFoundError( - f"No configuration file was found at this path: {path_to_config}" - ) + path_to_config = app.config["SCHEMATIC_CONFIG"] + if path_to_config is not None and os.path.isfile(path_to_config): + CONFIG.load_config(path_to_config) + if asset_view is not None: + CONFIG.synapse_master_fileview_id = asset_view class JsonConverter: ''' @@ -207,7 +196,7 @@ def get_temp_jsonld(schema_url): return tmp_file.name # @before_request -def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, asset_view = None, output_format=None, title=None, access_token=None): +def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, asset_view = None, output_format=None, title=None, access_token=None, strict_validation:bool=True): """Get the immediate dependencies that are related to a given source node. Args: schema_url: link to data model in json ld format @@ -217,6 +206,7 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, use_annotations: Whether to use existing annotations during manifest generation asset_view: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project. access_token: Token + strict: bool, strictness with which to apply validation rules to google sheets. Returns: Googlesheet URL (if sheet_url is True), or pandas dataframe (if sheet_url is False). """ @@ -231,7 +221,7 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, all_args = connexion.request.args args_dict = dict(all_args.lists()) data_type = args_dict['data_type'] - + # Gather all dataset_ids try: dataset_ids = args_dict['dataset_id'] @@ -262,7 +252,7 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, ) - def create_single_manifest(data_type, title, dataset_id=None, output_format=None, access_token=None): + def create_single_manifest(data_type, title, dataset_id=None, output_format=None, access_token=None, strict=strict_validation): # create object of type ManifestGenerator manifest_generator = ManifestGenerator( path_to_json_ld=jsonld, @@ -278,7 +268,7 @@ def create_single_manifest(data_type, title, dataset_id=None, output_format=None output_format = "dataframe" result = manifest_generator.get_manifest( - dataset_id=dataset_id, sheet_url=True, output_format=output_format, access_token=access_token + dataset_id=dataset_id, sheet_url=True, output_format=output_format, access_token=access_token, strict=strict, ) # return an excel file if output_format is set to "excel" diff --git a/tests/conftest.py b/tests/conftest.py index 7af6eb9a5..f19b4a9dc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,7 +9,7 @@ from dotenv import load_dotenv, find_dotenv from schematic.schemas.explorer import SchemaExplorer -from schematic.configuration import CONFIG +from schematic.configuration.configuration import CONFIG from schematic.utils.df_utils import load_df load_dotenv() @@ -27,8 +27,6 @@ TESTS_DIR = os.path.dirname(os.path.abspath(__file__)) DATA_DIR = os.path.join(TESTS_DIR, "data") -CONFIG_PATH = os.path.join(DATA_DIR, "test_config.yml") -CONFIG.load_config(CONFIG_PATH) @pytest.fixture(scope="session") def dataset_id(): @@ -92,7 +90,3 @@ def helpers(): @pytest.fixture(scope="session") def config(): yield CONFIG - -@pytest.fixture(scope="session") -def config_path(): - yield CONFIG_PATH diff --git a/tests/data/example.model.csv b/tests/data/example.model.csv index 1f6c9589c..f15db469c 100644 --- a/tests/data/example.model.csv +++ b/tests/data/example.model.csv @@ -1,43 +1,44 @@ -Attribute,Description,Valid Values,DependsOn,Properties,Required,Parent,DependsOn Component,Source,Validation Rules -Patient,,,"Patient ID, Sex, Year of Birth, Diagnosis, Component",,FALSE,DataType,,, -Patient ID,,,,,TRUE,DataProperty,,, -Sex,,"Female, Male, Other",,,TRUE,DataProperty,,, -Year of Birth,,,,,FALSE,DataProperty,,, -Diagnosis,,"Healthy, Cancer",,,TRUE,DataProperty,,, -Cancer,,,"Cancer Type, Family History",,FALSE,ValidValue,,, -Cancer Type,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,,, -Family History,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,,,list strict -Biospecimen,,,"Sample ID, Patient ID, Tissue Status, Component",,FALSE,DataType,Patient,, -Sample ID,,,,,TRUE,DataProperty,,, -Tissue Status,,"Healthy, Malignant",,,TRUE,DataProperty,,, -Bulk RNA-seq Assay,,,"Filename, Sample ID, File Format, Component",,FALSE,DataType,Biospecimen,, -Filename,,,,,TRUE,DataProperty,,, -File Format,,"FASTQ, BAM, CRAM, CSV/TSV",,,TRUE,DataProperty,,, -BAM,,,Genome Build,,FALSE,ValidValue,,, -CRAM,,,"Genome Build, Genome FASTA",,FALSE,ValidValue,,, -CSV/TSV,,,Genome Build,,FALSE,ValidValue,,, -Genome Build,,"GRCh37, GRCh38, GRCm38, GRCm39",,,TRUE,DataProperty,,, -Genome FASTA,,,,,TRUE,DataProperty,,, -MockComponent,,,"Component, Check List, Check Regex List, Check Regex Single, Check Regex Format, Check Num, Check Float, Check Int, Check String, Check URL,Check Match at Least, Check Match at Least values, Check Match Exactly, Check Match Exactly values, Check Recommended, Check Ages, Check Unique, Check Range, Check Date, Check NA",,FALSE,DataType,,, -Check List,,"ab, cd, ef, gh",,,TRUE,DataProperty,,,list strict -Check Regex List,,,,,TRUE,DataProperty,,,list strict::regex match [a-f] -Check Regex Single,,,,,TRUE,DataProperty,,,regex search [a-f] -Check Regex Format,,,,,TRUE,DataProperty,,,regex match [a-f] -Check Num,,,,,TRUE,DataProperty,,,num -Check Float,,,,,TRUE,DataProperty,,,float -Check Int,,,,,TRUE,DataProperty,,,int -Check String,,,,,TRUE,DataProperty,,,str -Check URL,,,,,TRUE,DataProperty,,,url -Check Match at Least,,,,,TRUE,DataProperty,,,matchAtLeastOne Patient.PatientID set -Check Match Exactly,,,,,TRUE,DataProperty,,,matchExactlyOne MockComponent.checkMatchExactly set -Check Match at Least values,,,,,TRUE,DataProperty,,,matchAtLeastOne MockComponent.checkMatchatLeastvalues value -Check Match Exactly values,,,,,TRUE,DataProperty,,,matchExactlyOne MockComponent.checkMatchExactlyvalues value -Check Recommended,,,,,FALSE,DataProperty,,,recommended -Check Ages,,,,,TRUE,DataProperty,,,protectAges -Check Unique,,,,,TRUE,DataProperty,,,unique error -Check Range,,,,,TRUE,DataProperty,,,inRange 50 100 error -Check Date,,,,,TRUE,DataProperty,,,date -Check NA,,,,,TRUE,DataProperty,,,int::IsNA -MockRDB,,,"Component, MockRDB_id, SourceManifest",,FALSE,DataType,,, -MockRDB_id,,,,,TRUE,DataProperty,,,int -SourceManifest,,,,,TRUE,DataProperty,,, +Attribute,Description,Valid Values,DependsOn,Properties,Required,Parent,DependsOn Component,Source,Validation Rules +Patient,,,"Patient ID, Sex, Year of Birth, Diagnosis, Component",,FALSE,DataType,,, +Patient ID,,,,,TRUE,DataProperty,,, +Sex,,"Female, Male, Other",,,TRUE,DataProperty,,, +Year of Birth,,,,,FALSE,DataProperty,,, +Diagnosis,,"Healthy, Cancer",,,TRUE,DataProperty,,, +Cancer,,,"Cancer Type, Family History",,FALSE,ValidValue,,, +Cancer Type,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,,, +Family History,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,,,list strict +Biospecimen,,,"Sample ID, Patient ID, Tissue Status, Component",,FALSE,DataType,Patient,, +Sample ID,,,,,TRUE,DataProperty,,, +Tissue Status,,"Healthy, Malignant",,,TRUE,DataProperty,,, +Bulk RNA-seq Assay,,,"Filename, Sample ID, File Format, Component",,FALSE,DataType,Biospecimen,, +Filename,,,,,TRUE,DataProperty,,, +File Format,,"FASTQ, BAM, CRAM, CSV/TSV",,,TRUE,DataProperty,,, +BAM,,,Genome Build,,FALSE,ValidValue,,, +CRAM,,,"Genome Build, Genome FASTA",,FALSE,ValidValue,,, +CSV/TSV,,,Genome Build,,FALSE,ValidValue,,, +Genome Build,,"GRCh37, GRCh38, GRCm38, GRCm39",,,TRUE,DataProperty,,, +Genome FASTA,,,,,TRUE,DataProperty,,, +MockComponent,,,"Component, Check List, Check Regex List, Check Regex Single, Check Regex Format, Check Regex Integer, Check Num, Check Float, Check Int, Check String, Check URL,Check Match at Least, Check Match at Least values, Check Match Exactly, Check Match Exactly values, Check Recommended, Check Ages, Check Unique, Check Range, Check Date, Check NA",,FALSE,DataType,,, +Check List,,"ab, cd, ef, gh",,,TRUE,DataProperty,,,list strict +Check Regex List,,,,,TRUE,DataProperty,,,list strict::regex match [a-f] +Check Regex Single,,,,,TRUE,DataProperty,,,regex search [a-f] +Check Regex Format,,,,,TRUE,DataProperty,,,regex match [a-f] +Check Regex Integer,,,,,TRUE,DataProperty,,,regex search ^\d+$ +Check Num,,,,,TRUE,DataProperty,,,num +Check Float,,,,,TRUE,DataProperty,,,float +Check Int,,,,,TRUE,DataProperty,,,int +Check String,,,,,TRUE,DataProperty,,,str +Check URL,,,,,TRUE,DataProperty,,,url +Check Match at Least,,,,,TRUE,DataProperty,,,matchAtLeastOne Patient.PatientID set +Check Match Exactly,,,,,TRUE,DataProperty,,,matchExactlyOne MockComponent.checkMatchExactly set +Check Match at Least values,,,,,TRUE,DataProperty,,,matchAtLeastOne MockComponent.checkMatchatLeastvalues value +Check Match Exactly values,,,,,TRUE,DataProperty,,,matchExactlyOne MockComponent.checkMatchExactlyvalues value +Check Recommended,,,,,FALSE,DataProperty,,,recommended +Check Ages,,,,,TRUE,DataProperty,,,protectAges +Check Unique,,,,,TRUE,DataProperty,,,unique error +Check Range,,,,,TRUE,DataProperty,,,inRange 50 100 error +Check Date,,,,,TRUE,DataProperty,,,date +Check NA,,,,,TRUE,DataProperty,,,int::IsNA +MockRDB,,,"Component, MockRDB_id, SourceManifest",,FALSE,DataType,,, +MockRDB_id,,,,,TRUE,DataProperty,,,int +SourceManifest,,,,,TRUE,DataProperty,,, \ No newline at end of file diff --git a/tests/data/example.model.jsonld b/tests/data/example.model.jsonld index 2d65b72bf..6f29cbf7b 100644 --- a/tests/data/example.model.jsonld +++ b/tests/data/example.model.jsonld @@ -2498,6 +2498,9 @@ { "@id": "bts:CheckRegexFormat" }, + { + "@id": "bts:CheckRegexInteger" + }, { "@id": "bts:CheckNum" }, @@ -2637,6 +2640,25 @@ "regex match [a-f]" ] }, + { + "@id": "bts:CheckRegexInteger", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "CheckRegexInteger", + "rdfs:subClassOf": [ + { + "@id": "bts:DataProperty" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Check Regex Integer", + "sms:required": "sms:true", + "sms:validationRules": [ + "regex search ^\\d+$" + ] + }, { "@id": "bts:CheckNum", "@type": "rdfs:Class", diff --git a/tests/data/mock_manifests/Invalid_Test_Manifest.csv b/tests/data/mock_manifests/Invalid_Test_Manifest.csv index fcd84fa7f..ea0e7685c 100644 --- a/tests/data/mock_manifests/Invalid_Test_Manifest.csv +++ b/tests/data/mock_manifests/Invalid_Test_Manifest.csv @@ -1,4 +1,4 @@ -Component,Check List,Check Regex List,Check Regex Format,Check Regex Single,Check Num,Check Float,Check Int,Check String,Check URL,Check Match at Least,Check Match at Least values,Check Match Exactly,Check Match Exactly values,Check Recommended,Check Ages,Check Unique,Check Range,Check Date,Check NA -MockComponent,"ab,cd","ab,cd,ef",a,a,6,99.65,7,valid,https://www.google.com/,1738,1738,8085,98085,,6549,str1,70,32-984,7 -MockComponent,invalid list values,ab cd ef,m,q,c,99,5.63,94,http://googlef.com/,7163,51100,9965,71738,,32851,str1,30,notADate,9.5 -MockComponent,"ab,cd","ab,cd,ef",b,b,6.5,62.3,2,valid,https://github.com/Sage-Bionetworks/schematic,8085,8085,1738,210065,,6550,str1,90,84-43-094,Not Applicable +Component,Check List,Check Regex List,Check Regex Single,Check Regex Format,Check Regex Integer,Check Num,Check Float,Check Int,Check String,Check URL,Check Match at Least,Check Match at Least values,Check Match Exactly,Check Match Exactly values,Check Recommended,Check Ages,Check Unique,Check Range,Check Date,Check NA +MockComponent,"ab,cd","ab,cd,ef",a,a,5.4,6,99.65,7,valid,https://www.google.com/,1738,1738,8085,98085,,6549,str1,70,32-984,7 +MockComponent,invalid list values,ab cd ef,q,m,0,c,99,5.63,94,http://googlef.com/,7163,51100,9965,71738,,32851,str1,30,notADate,9.5 +MockComponent,"ab,cd","ab,cd,ef",b,b,683902,6.5,62.3,2,valid,https://github.com/Sage-Bionetworks/schematic,8085,8085,1738,210065,,6550,str1,90,84-43-094,Not Applicable \ No newline at end of file diff --git a/tests/data/mock_manifests/Valid_Test_Manifest.csv b/tests/data/mock_manifests/Valid_Test_Manifest.csv index a3d061026..c4b6fb01f 100644 --- a/tests/data/mock_manifests/Valid_Test_Manifest.csv +++ b/tests/data/mock_manifests/Valid_Test_Manifest.csv @@ -1,5 +1,5 @@ -Component,Check List,Check Regex List,Check Regex Single,Check Regex Format,Check Num,Check Float,Check Int,Check String,Check URL,Check Match at Least,Check Match at Least values,Check Match Exactly,Check Match Exactly values,Check Recommended,Check Ages,Check Unique,Check Range,Check Date,Check NA -MockComponent,"ab,cd","a,c,f",a,a,6,99.65,7,valid,https://www.google.com/,1738,1738,8085,8085,,6571,str1,75,10/21/2022,Not Applicable -MockComponent,"ab,cd","a,c,f",e,b,71,58.4,3,valid,https://www.google.com/,9965,9965,9965,9965,,6571,str2,80,October 21 2022,8 -MockComponent,"ab,cd","b,d,f",b,c,6.5,62.3,2,valid,https://www.google.com/,8085,8085,1738,1738,present,32849,str3,95,10/21/2022,Not Applicable -MockComponent,"ab,cd","b,d,f",b,c,6.5,62.3,2,valid,https://www.google.com/,79,79,7,7,,32849,str4,55,21/10/2022,695 +Component,Check List,Check Regex List,Check Regex Single,Check Regex Format,Check Regex Integer,Check Num,Check Float,Check Int,Check String,Check URL,Check Match at Least,Check Match at Least values,Check Match Exactly,Check Match Exactly values,Check Recommended,Check Ages,Check Unique,Check Range,Check Date,Check NA +MockComponent,"ab,cd","a,c,f",a,a,0,6,99.65,7,valid,https://www.google.com/,1738,1738,8085,8085,,6571,str1,75,10/21/2022,Not Applicable +MockComponent,"ab,cd","a,c,f",e,b,1234,71,58.4,3,valid,https://www.google.com/,9965,9965,9965,9965,,6571,str2,80,October 21 2022,8 +MockComponent,"ab,cd","b,d,f",b,c,683902,6.5,62.3,2,valid,https://www.google.com/,8085,8085,1738,1738,present,32849,str3,95,10/21/2022,Not Applicable +MockComponent,"ab,cd","b,d,f",b,c,0,6.5,62.3,2,valid,https://www.google.com/,79,79,7,7,,32849,str4,55,21/10/2022,695 \ No newline at end of file diff --git a/tests/data/mock_manifests/annotations_test_manifest.csv b/tests/data/mock_manifests/annotations_test_manifest.csv index 8cb700f55..ba15606bc 100644 --- a/tests/data/mock_manifests/annotations_test_manifest.csv +++ b/tests/data/mock_manifests/annotations_test_manifest.csv @@ -1,3 +1,3 @@ -Component,CheckList,CheckRegexList,CheckRegexSingle,CheckNum,CheckFloat,CheckInt,CheckString,CheckURL,CheckMatchatLeast,CheckMatchatLeastvalues,CheckMatchExactly,CheckMatchExactlyvalues,CheckRecommended,CheckAges,CheckUnique,Uuid,entityId +Component,CheckList,CheckRegexList,CheckRegexSingle,CheckNum,CheckFloat,CheckInt,CheckString,CheckURL,CheckMatchatLeast,CheckMatchatLeastvalues,CheckMatchExactly,CheckMatchExactlyvalues,CheckRecommended,CheckAges,CheckUnique,Id,entityId MockComponent,"valid,list,values","a,c,f",a,6,99.65,7,valid,https://www.google.com/,1985,4891,23487492,24323472834,,6571,str1,0f7812cc-8a0e-4f54-b8c4-e497cb7b34d0,syn35367245 MockComponent,"valid,list,values","a,c,f",a,6,99.65,8.52,valid,https://www.google.com/,1985,4891,23487492,24323472834,,6571,str1,da82f8e2-c7b0-428f-8f9d-677252ef5f68,syn35367246 diff --git a/tests/data/mock_manifests/test_BulkRNAseq.csv b/tests/data/mock_manifests/test_BulkRNAseq.csv new file mode 100644 index 000000000..facfa3f6a --- /dev/null +++ b/tests/data/mock_manifests/test_BulkRNAseq.csv @@ -0,0 +1,3 @@ +Filename,Sample ID,File Format,Component,Genome Build,Genome FASTA +TestRNA-seqDataset1/TestRNA-seq-dummy-dataset.rtf,ABCD,BAM,BulkRNA-seqAssay,GRCh38, +TestRNA-seqDataset1/TestRNA-seq-dummy-dataset2.rtf,EFGH,CRAM,BulkRNA-seqAssay,GRCm39, diff --git a/tests/data/test_config.yml b/tests/data/test_config.yml deleted file mode 100644 index 72c1dbf9c..000000000 --- a/tests/data/test_config.yml +++ /dev/null @@ -1,19 +0,0 @@ -definitions: - creds_path: "../../credentials.json" - token_pickle: "token.pickle" - synapse_config: "../../.synapseConfig" ### Note: this key is required for people who use Synapse token authentication approach. - service_acct_creds: "../../schematic_service_account_creds.json" ## Note: this key is required for google drive services - -synapse: - master_fileview: "syn23643253" - manifest_basename: "synapse_storage_manifest" - manifest_folder: 'manifests' - -model: - input: - location: 'example.model.jsonld' - file_type: 'local' - -style: - google_manifest: - master_template_id: '1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU' diff --git a/tests/data/test_configs/default_config.yml b/tests/data/test_configs/default_config.yml new file mode 100644 index 000000000..6775b569a --- /dev/null +++ b/tests/data/test_configs/default_config.yml @@ -0,0 +1,21 @@ +# This config has the same default values as schematic itself has +asset_store: + synapse: + config: ".synapseConfig" + manifest_basename: 'synapse_storage_manifest' + master_fileview_id: 'syn23643253' + +manifest: + manifest_folder: 'manifests' + title: 'example' + data_type: + - 'Biospecimen' + - 'Patient' + +model: + location: 'tests/data/example.model.jsonld' + +google_sheets: + service_acct_creds_synapse_id: 'syn25171627' + service_acct_creds: "schematic_service_account_creds.json" + strict_validation: true diff --git a/tests/data/test_configs/invalid_config1.yml b/tests/data/test_configs/invalid_config1.yml new file mode 100644 index 000000000..071047b05 --- /dev/null +++ b/tests/data/test_configs/invalid_config1.yml @@ -0,0 +1,5 @@ +# This is a invalid config, but has fields that a previous version included + +definitions: + synapse_config: ".synapseConfig" + service_acct_creds: "schematic_service_account_creds.json" diff --git a/tests/data/test_configs/invalid_config2.yml b/tests/data/test_configs/invalid_config2.yml new file mode 100644 index 000000000..e2f710b48 --- /dev/null +++ b/tests/data/test_configs/invalid_config2.yml @@ -0,0 +1,3 @@ +# This is a invalid config, but has fields in asset store that are not supported +asset_store: + invalid_field: "xxx" \ No newline at end of file diff --git a/tests/data/test_configs/invalid_config3.yml b/tests/data/test_configs/invalid_config3.yml new file mode 100644 index 000000000..7cb2bc0f8 --- /dev/null +++ b/tests/data/test_configs/invalid_config3.yml @@ -0,0 +1,5 @@ +# This is a invalid config, but has fields in the synapse section that are not supported +asset_store: + synapse: + invalid_field: "xxx" + diff --git a/tests/data/test_configs/valid_config.yml b/tests/data/test_configs/valid_config.yml new file mode 100644 index 000000000..3e340721c --- /dev/null +++ b/tests/data/test_configs/valid_config.yml @@ -0,0 +1,21 @@ +# This is a valid config, but all values are different from defaults + +asset_store: + synapse: + config: "file_name" + manifest_basename: "file_name" + master_fileview_id: "syn1" + +manifest: + manifest_folder: "folder_name" + title: "title" + data_type: + - "data_type" + +model: + location: "model.jsonld" + +google_sheets: + service_acct_creds_synapse_id: "syn1" + service_acct_creds: "creds.json" + strict_validation: false diff --git a/tests/test_api.py b/tests/test_api.py index 82fb783a8..ea597a9ce 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -12,10 +12,12 @@ import pandas as pd # third party library import import pytest +from schematic.configuration.configuration import Configuration from schematic.schemas.generator import \ SchemaGenerator # Local application/library specific imports. from schematic_api.api import create_app + logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -26,8 +28,8 @@ def app(): yield app @pytest.fixture(scope="class") -def client(app, config_path): - app.config['SCHEMATIC_CONFIG'] = config_path +def client(app): + app.config['SCHEMATIC_CONFIG'] = None with app.test_client() as client: yield client @@ -80,8 +82,8 @@ def get_MockComponent_attribute(): yield MockComponent_attribute @pytest.fixture(scope="class") -def syn_token(config): - synapse_config_path = config.SYNAPSE_CONFIG_PATH +def syn_token(config:Configuration): + synapse_config_path = config.synapse_configuration_path config_parser = configparser.ConfigParser() config_parser.read(synapse_config_path) # try using synapse access token @@ -403,11 +405,11 @@ def ifPandasDataframe(self, response_dt): def test_generate_existing_manifest(self, client, data_model_jsonld, data_type, output_format, caplog): # set dataset if data_type == "Patient": - dataset_id = ["syn42171373"] #Mock Patient Manifest folder on synapse + dataset_id = ["syn51730545"] #Mock Patient Manifest folder on synapse elif data_type == "Biospecimen": - dataset_id = ["syn42171508"] #Mock biospecimen manifest folder + dataset_id = ["syn51730547"] #Mock biospecimen manifest folder elif data_type == ["Biospecimen", "Patient"]: - dataset_id = ["syn42171508", "syn42171373"] + dataset_id = ["syn51730547", "syn51730545"] else: dataset_id = None #if "all manifests", dataset id is None @@ -588,7 +590,7 @@ def test_get_datatype_manifest(self, client, syn_token): @pytest.mark.parametrize("manifest_id, expected_component, expected_file_name", [("syn51078535", "BulkRNA-seqAssay", "synapse_storage_manifest.csv"), ("syn51156998", "Biospecimen", "synapse_storage_manifest_biospecimen.csv")]) @pytest.mark.parametrize("new_manifest_name",[None,"Example.csv"]) @pytest.mark.parametrize("as_json",[None,True,False]) - def test_manifest_download(self, config, client, syn_token, manifest_id, new_manifest_name, as_json, expected_component, expected_file_name): + def test_manifest_download(self, config: Configuration, client, syn_token, manifest_id, new_manifest_name, as_json, expected_component, expected_file_name): params = { "access_token": syn_token, "manifest_id": manifest_id, @@ -608,7 +610,7 @@ def test_manifest_download(self, config, client, syn_token, manifest_id, new_man assert response_dta[0]["Component"] == expected_component current_work_dir = os.getcwd() - folder_test_manifests = config["synapse"]["manifest_folder"] + folder_test_manifests = config.manifest_folder folder_dir = os.path.join(current_work_dir, folder_test_manifests) # if a manfiest gets renamed, get new manifest file path diff --git a/tests/test_cli.py b/tests/test_cli.py index d5f5e2e5c..f3cd19a59 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -6,8 +6,8 @@ # from schematic import init from schematic.schemas.commands import schema -from schematic.utils.google_api_utils import download_creds_file from schematic.manifest.commands import manifest +from schematic.configuration.configuration import Configuration @pytest.fixture def runner() -> CliRunner: @@ -38,7 +38,7 @@ def assert_expected_file(self, result, output_path): except: pass - def test_schema_convert_cli(self, runner, config_path, helpers): + def test_schema_convert_cli(self, runner, helpers): data_model_csv_path = helpers.get_data_path("example.model.csv") @@ -59,11 +59,12 @@ def test_schema_convert_cli(self, runner, config_path, helpers): # get manifest by default # by default this should download the manifest as a CSV file @pytest.mark.google_credentials_needed - def test_get_example_manifest_default(self, runner, helpers, config, data_model_jsonld): + def test_get_example_manifest_default(self, runner, helpers, config: Configuration, data_model_jsonld): output_path = helpers.get_data_path("example.Patient.manifest.csv") + config.load_config("config_example.yml") result = runner.invoke( - manifest, ["--config", config.CONFIG_PATH, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld] + manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld] ) @@ -73,22 +74,24 @@ def test_get_example_manifest_default(self, runner, helpers, config, data_model_ # get manifest as a csv # use google drive to export @pytest.mark.google_credentials_needed - def test_get_example_manifest_csv(self, runner, helpers, config, data_model_jsonld): + def test_get_example_manifest_csv(self, runner, helpers, config: Configuration, data_model_jsonld): output_path = helpers.get_data_path("test.csv") + config.load_config("config_example.yml") result = runner.invoke( - manifest, ["--config", config.CONFIG_PATH, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_csv", output_path] + manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_csv", output_path] ) assert result.exit_code == 0 self.assert_expected_file(result, output_path) # get manifest as an excel spreadsheet @pytest.mark.google_credentials_needed - def test_get_example_manifest_excel(self, runner, helpers, config, data_model_jsonld): + def test_get_example_manifest_excel(self, runner, helpers, config: Configuration, data_model_jsonld): output_path = helpers.get_data_path("test.xlsx") + config.load_config("config_example.yml") result = runner.invoke( - manifest, ["--config", config.CONFIG_PATH, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_xlsx", output_path] + manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_xlsx", output_path] ) assert result.exit_code == 0 diff --git a/tests/test_configuration.py b/tests/test_configuration.py index e3349c8de..b2fd59a7a 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -1,32 +1,215 @@ -import logging +"""Testing for Configuration module""" + +import os import pytest +from pydantic import ValidationError + +from schematic.configuration.dataclasses import ( + SynapseConfig, + ManifestConfig, + ModelConfig, + GoogleSheetsConfig, +) +from schematic.configuration.configuration import Configuration, ConfigNonAllowedFieldError + + +class TestDataclasses: + """Testing for pydantic dataclasses""" + + def test_synapse_config(self) -> None: + """Testing for SynapseConfig""" + assert isinstance(SynapseConfig(), SynapseConfig) + assert isinstance( + SynapseConfig( + config="file_name", + manifest_basename="file_name", + master_fileview_id="syn1", + ), + SynapseConfig, + ) + + with pytest.raises(ValidationError): + SynapseConfig( + config=None, + manifest_basename="file_name", + master_fileview_id="syn1", + ) + + with pytest.raises(ValidationError): + SynapseConfig( + config="file_name", + manifest_basename="file_name", + master_fileview_id="syn", + ) + + with pytest.raises(ValidationError): + SynapseConfig( + config="", + manifest_basename="file_name", + master_fileview_id="syn", + ) -from schematic.configuration import Configuration + def test_manifest_config(self) -> None: + """Testing for ManifestConfig""" + assert isinstance(ManifestConfig(), ManifestConfig) + assert isinstance( + ManifestConfig(title="title", data_type=[]), + ManifestConfig, + ) + with pytest.raises(ValidationError): + ManifestConfig(title="title", data_type="type") + with pytest.raises(ValidationError): + ManifestConfig(title="", data_type="type") -logging.basicConfig(level=logging.DEBUG) -logger = logging.getLogger(__name__) + def test_model_config(self) -> None: + """Testing for ModelConfig""" + assert isinstance(ModelConfig(), ModelConfig) + assert isinstance( + ModelConfig(location="url"), + ModelConfig, + ) + with pytest.raises(ValidationError): + ModelConfig(location="") + + def test_google_sheets_config(self) -> None: + """Testing for ModelConfig""" + assert isinstance(GoogleSheetsConfig(), GoogleSheetsConfig) + assert isinstance( + GoogleSheetsConfig( + service_acct_creds="file_name", + service_acct_creds_synapse_id="syn1", + strict_validation=True, + ), + GoogleSheetsConfig, + ) + with pytest.raises(ValidationError): + GoogleSheetsConfig( + service_acct_creds="file_name", + service_acct_creds_synapse_id="syn1", + strict_validation="tru", + ) + with pytest.raises(ValidationError): + GoogleSheetsConfig( + service_acct_creds="", + service_acct_creds_synapse_id="syn1", + strict_validation=True, + ) + with pytest.raises(ValidationError): + GoogleSheetsConfig( + service_acct_creds="file_name", + service_acct_creds_synapse_id="syn", + strict_validation=True, + ) class TestConfiguration: - def test_load_yaml_valid(self, tmpdir): - mock_contents = """ - section: - key: value + """Testing Configuration class""" + + def test_init(self) -> None: + """Testing for Configuration.__init__""" + config = Configuration() + assert config.config_path is None + assert config.synapse_configuration_path != ".synapseConfig" + assert os.path.basename(config.synapse_configuration_path) == ".synapseConfig" + assert config.synapse_manifest_basename == "synapse_storage_manifest" + assert config.synapse_master_fileview_id == "syn23643253" + assert config.manifest_folder == "manifests" + assert config.manifest_title == "example" + assert config.manifest_data_type == ["Biospecimen", "Patient"] + assert config.model_location == "tests/data/example.model.jsonld" + assert config.service_account_credentials_synapse_id + assert ( + config.service_account_credentials_path + != "schematic_service_account_creds.json" + ) + assert ( + os.path.basename(config.service_account_credentials_path) + == "schematic_service_account_creds.json" + ) + assert config.google_sheets_master_template_id == ( + "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + ) + assert config.google_sheets_strict_validation + assert config.google_required_background_color == { + "red": 0.9215, + "green": 0.9725, + "blue": 0.9803, + } + assert config.google_optional_background_color == { + "red": 1.0, + "green": 1.0, + "blue": 0.9019, + } + + def test_load_config1(self) -> None: + """Testing for Configuration.load_config where config file contains default values""" + config = Configuration() + + config.load_config("tests/data/test_configs/default_config.yml") + assert os.path.basename(config.config_path) == "default_config.yml" + assert config.synapse_configuration_path != ".synapseConfig" + assert os.path.basename(config.synapse_configuration_path) == ".synapseConfig" + assert config.synapse_manifest_basename == "synapse_storage_manifest" + assert config.synapse_master_fileview_id == "syn23643253" + assert config.manifest_folder == "manifests" + assert config.manifest_title == "example" + assert config.manifest_data_type == ["Biospecimen", "Patient"] + assert config.model_location == "tests/data/example.model.jsonld" + assert config.service_account_credentials_synapse_id + assert ( + config.service_account_credentials_path + != "schematic_service_account_creds.json" + ) + assert ( + os.path.basename(config.service_account_credentials_path) + == "schematic_service_account_creds.json" + ) + assert config.google_sheets_master_template_id == ( + "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + ) + assert config.google_sheets_strict_validation + + def test_load_config2(self) -> None: + """ + Testing for Configuration.load_config where config file + contains values different from the default """ - mock_file = tmpdir.join("mock.yml") - mock_file.write(mock_contents) - mock_object = {"section": {"key": "value"}} + config = Configuration() - test_object = Configuration.load_yaml(str(mock_file)) - assert test_object == mock_object + config.load_config("tests/data/test_configs/valid_config.yml") + assert os.path.basename(config.config_path) == "valid_config.yml" + assert os.path.basename(config.synapse_configuration_path) == "file_name" + assert config.synapse_manifest_basename == "file_name" + assert config.synapse_master_fileview_id == "syn1" + assert config.manifest_folder == "folder_name" + assert config.manifest_title == "title" + assert config.manifest_data_type == ["data_type"] + assert config.model_location == "model.jsonld" + assert config.service_account_credentials_synapse_id + assert os.path.basename(config.service_account_credentials_path) == "creds.json" + assert config.google_sheets_master_template_id == ( + "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + ) + assert not config.google_sheets_strict_validation - def test_load_yaml_invalid(self, tmpdir): - mock_contents = """ - section: - key: bad-value: + def test_load_config3(self) -> None: + """ + Testing for Configuration.load_config where config file + is not valid """ - mock_file = tmpdir.join("mock.yml") - mock_file.write(mock_contents) + config = Configuration() + with pytest.raises(ConfigNonAllowedFieldError): + config.load_config("tests/data/test_configs/invalid_config1.yml") + with pytest.raises(ConfigNonAllowedFieldError): + config.load_config("tests/data/test_configs/invalid_config2.yml") + with pytest.raises(TypeError): + config.load_config("tests/data/test_configs/invalid_config3.yml") - test_object = Configuration.load_yaml(str(mock_file)) - assert test_object is None + def test_set_synapse_master_fileview_id(self) -> None: + """Testing for Configuration synapse_master_fileview_id setter""" + config = Configuration() + assert config.synapse_master_fileview_id == "syn23643253" + config.synapse_master_fileview_id = "syn1" + assert config.synapse_master_fileview_id == "syn1" + with pytest.raises(ValidationError): + config.synapse_master_fileview_id = "syn" diff --git a/tests/test_manifest.py b/tests/test_manifest.py index d2fb7eda8..a145f6426 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -8,6 +8,7 @@ from unittest.mock import MagicMock from schematic.manifest.generator import ManifestGenerator from schematic.schemas.generator import SchemaGenerator +from schematic.configuration.configuration import Configuration from schematic.utils.google_api_utils import execute_google_api_requests @@ -193,43 +194,19 @@ def test_get_manifest_excel(self, helpers, sheet_url, output_format, dataset_id) os.remove(manifest) # test all the functions used under get_manifest - @pytest.mark.parametrize("master_template_id", [None, "mock_master_template_id"]) - def test_create_empty_manifest_spreadsheet(self, config, simple_manifest_generator, master_template_id): + def test_create_empty_manifest_spreadsheet(self, simple_manifest_generator): ''' - Create an empty manifest spreadsheet regardless if master_template_id is provided - Note: _create_empty_manifest_spreadsheet calls _gdrive_copy_file. If there's no template id provided in config, this function will create a new manifest + Create an empty manifest spreadsheet. + Note: _create_empty_manifest_spreadsheet calls _gdrive_copy_file. ''' generator = simple_manifest_generator - - mock_spreadsheet = MagicMock() - title="Example" - if master_template_id: - # mock _gdrive_copy_file function - config["style"]["google_manifest"]["master_template_id"] = master_template_id - with patch('schematic.manifest.generator.ManifestGenerator._gdrive_copy_file', return_value="mock google sheet id") as MockClass: - - spreadsheet_id = generator._create_empty_manifest_spreadsheet(title=title) - assert spreadsheet_id == "mock google sheet id" - - else: - config["style"]["google_manifest"]["master_template_id"] = "" - - mock_spreadsheet = Mock() - mock_execute = Mock() - - - # Chain the mocks together - mock_spreadsheet.create.return_value = mock_spreadsheet - mock_spreadsheet.execute.return_value = mock_execute - mock_execute.get.return_value = "mock id" - mock_create = Mock(return_value=mock_spreadsheet) - - with patch.object(generator.sheet_service, "spreadsheets", mock_create): + # mock _gdrive_copy_file function + with patch('schematic.manifest.generator.ManifestGenerator._gdrive_copy_file', return_value="mock google sheet id"): + spreadsheet_id = generator._create_empty_manifest_spreadsheet(title=title) + assert spreadsheet_id == "mock google sheet id" - spreadsheet_id = generator._create_empty_manifest_spreadsheet(title) - assert spreadsheet_id == "mock id" @pytest.mark.parametrize("schema_path_provided", [True, False]) def test_get_json_schema(self, simple_manifest_generator, helpers, schema_path_provided): diff --git a/tests/test_store.py b/tests/test_store.py index 626d1e23d..4005069b2 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -12,9 +12,10 @@ from schematic.models.metadata import MetadataModel from schematic.store.base import BaseStorage from schematic.store.synapse import SynapseStorage, DatasetFileView, ManifestDownload -from schematic.utils.cli_utils import get_from_config from schematic.schemas.generator import SchemaGenerator from synapseclient.core.exceptions import SynapseHTTPError +from synapseclient.entity import File +from schematic.configuration.configuration import Configuration logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) @@ -113,12 +114,39 @@ def test_getFileAnnotations(self, synapse_store): assert expected_dict == actual_dict - def test_annotation_submission(self, synapse_store, helpers, config): - manifest_path = "mock_manifests/annotations_test_manifest.csv" - + @pytest.mark.parametrize('only_new_files',[True, False]) + def test_get_file_entityIds(self, helpers, synapse_store, only_new_files): + manifest_path = "mock_manifests/test_BulkRNAseq.csv" + dataset_files = synapse_store.getFilesInStorageDataset('syn39241199') + + if only_new_files: + # Prepare manifest is getting Ids for new files only + manifest = helpers.get_data_frame(manifest_path) + entityIds = pd.DataFrame({'entityId': ['syn39242580', 'syn51900502']}) + manifest = manifest.join(entityIds) + + # get entityIds for new files + files_and_Ids = synapse_store._get_file_entityIds(dataset_files=dataset_files, only_new_files=only_new_files, manifest=manifest) + + # Assert that there are no new files + for value in files_and_Ids.values(): + assert value == [] + + else: + # get entityIds for all files + files_and_Ids = synapse_store._get_file_entityIds(dataset_files=dataset_files, only_new_files=only_new_files) + + # assert that the correct number of files were found + assert len(files_and_Ids['entityId']) == 2 + + @pytest.mark.parametrize('manifest_path, test_annotations, datasetId, manifest_record_type', + [ ("mock_manifests/annotations_test_manifest.csv", {'CheckInt': '7', 'CheckList': 'valid, list, values'}, 'syn34295552', 'file_and_entities'), + ("mock_manifests/test_BulkRNAseq.csv", {'FileFormat': 'BAM', 'GenomeBuild': 'GRCh38'}, 'syn39241199', 'table_and_file')], + ids = ['non file-based', + 'file-based']) + def test_annotation_submission(self, synapse_store, helpers, manifest_path, test_annotations, datasetId, manifest_record_type, config: Configuration): # Upload dataset annotations - inputModelLocaiton = helpers.get_data_path(get_from_config(config.DATA, ("model", "input", "location"))) - sg = SchemaGenerator(inputModelLocaiton) + sg = SchemaGenerator(config.model_location) try: for attempt in Retrying( @@ -130,8 +158,8 @@ def test_annotation_submission(self, synapse_store, helpers, config): manifest_id = synapse_store.associateMetadataWithFiles( schemaGenerator = sg, metadataManifestPath = helpers.get_data_path(manifest_path), - datasetId = 'syn34295552', - manifest_record_type = 'file_and_entities', + datasetId = datasetId, + manifest_record_type = manifest_record_type, useSchemaLabel = True, hideBlanks = True, restrict_manifest = False, @@ -140,17 +168,19 @@ def test_annotation_submission(self, synapse_store, helpers, config): pass # Retrive annotations - entity_id, entity_id_spare = helpers.get_data_frame(manifest_path)["entityId"][0:2] + entity_id = helpers.get_data_frame(manifest_path)["entityId"][0] annotations = synapse_store.getFileAnnotations(entity_id) # Check annotations of interest - assert annotations['CheckInt'] == '7' - assert annotations['CheckList'] == 'valid, list, values' - assert 'CheckRecommended' not in annotations.keys() - - - + for key in test_annotations.keys(): + assert key in annotations.keys() + assert annotations[key] == test_annotations[key] + if manifest_path.endswith('annotations_test_manifest.csv'): + assert 'CheckRecommended' not in annotations.keys() + elif manifest_path.endswith('test_BulkRNAseq.csv'): + entity = synapse_store.syn.get(entity_id) + assert type(entity) == File @pytest.mark.parametrize("force_batch", [True, False], ids=["batch", "non_batch"]) def test_getDatasetAnnotations(self, dataset_id, synapse_store, force_batch): @@ -218,7 +248,6 @@ def test_getDatasetManifest(self, synapse_store, downloadFile): # return manifest id assert manifest_data == "syn51204513" - class TestDatasetFileView: def test_init(self, dataset_id, dataset_fileview, synapse_store): @@ -291,7 +320,7 @@ def test_tidy_table(self, dataset_fileview_table_tidy): @pytest.mark.table_operations class TestTableOperations: - def test_createTable(self, helpers, synapse_store, config, projectId, datasetId): + def test_createTable(self, helpers, synapse_store, config: Configuration, projectId, datasetId): table_manipulation = None # Check if FollowUp table exists if so delete @@ -307,7 +336,7 @@ def test_createTable(self, helpers, synapse_store, config, projectId, datasetId) # associate metadata with files manifest_path = "mock_manifests/table_manifest.csv" - inputModelLocaiton = helpers.get_data_path(get_from_config(config.DATA, ("model", "input", "location"))) + inputModelLocaiton = helpers.get_data_path(os.path.basename(config.model_location)) sg = SchemaGenerator(inputModelLocaiton) # updating file view on synapse takes a long time @@ -328,7 +357,7 @@ def test_createTable(self, helpers, synapse_store, config, projectId, datasetId) # assert table exists assert table_name in existing_tables.keys() - def test_replaceTable(self, helpers, synapse_store, config, projectId, datasetId): + def test_replaceTable(self, helpers, synapse_store, config: Configuration, projectId, datasetId): table_manipulation = 'replace' table_name='followup_synapse_storage_manifest_table' @@ -346,7 +375,7 @@ def test_replaceTable(self, helpers, synapse_store, config, projectId, datasetId assert table_name not in synapse_store.get_table_info(projectId = projectId).keys() # associate org FollowUp metadata with files - inputModelLocaiton = helpers.get_data_path(get_from_config(config.DATA, ("model", "input", "location"))) + inputModelLocaiton = helpers.get_data_path(os.path.basename(config.model_location)) sg = SchemaGenerator(inputModelLocaiton) # updating file view on synapse takes a long time @@ -395,7 +424,7 @@ def test_replaceTable(self, helpers, synapse_store, config, projectId, datasetId # delete table synapse_store.syn.delete(tableId) - def test_upsertTable(self, helpers, synapse_store, config, projectId, datasetId): + def test_upsertTable(self, helpers, synapse_store, config:Configuration, projectId, datasetId): table_manipulation = "upsert" table_name="MockRDB_synapse_storage_manifest_table".lower() @@ -413,7 +442,7 @@ def test_upsertTable(self, helpers, synapse_store, config, projectId, datasetId) assert table_name not in synapse_store.get_table_info(projectId = projectId).keys() # associate org FollowUp metadata with files - inputModelLocaiton = helpers.get_data_path(get_from_config(config.DATA, ("model", "input", "location"))) + inputModelLocaiton = helpers.get_data_path(os.path.basename(config.model_location)) sg = SchemaGenerator(inputModelLocaiton) # updating file view on synapse takes a long time @@ -468,7 +497,6 @@ def test_upsertTable(self, helpers, synapse_store, config, projectId, datasetId) # delete table synapse_store.syn.delete(tableId) - class TestDownloadManifest: @pytest.mark.parametrize("datasetFileView", [{"id": ["syn51203973", "syn51203943"], "name": ["synapse_storage_manifest.csv", "synapse_storage_manifest_censored.csv"]}, {"id": ["syn51203973"], "name": ["synapse_storage_manifest.csv"]}, {"id": ["syn51203943"], "name": ["synapse_storage_manifest_censored.csv"]}]) def test_get_manifest_id(self, synapse_store, datasetFileView): @@ -493,7 +521,7 @@ def test_get_manifest_id(self, synapse_store, datasetFileView): assert manifest_syn_id == censored_manifest_id @pytest.mark.parametrize("newManifestName",["", "Example"]) - def test_download_manifest(self, config, mock_manifest_download, newManifestName): + def test_download_manifest(self, mock_manifest_download, newManifestName): # test the download function by downloading a manifest manifest_data = mock_manifest_download.download_manifest(mock_manifest_download, newManifestName) assert os.path.exists(manifest_data['path']) diff --git a/tests/test_utils.py b/tests/test_utils.py index bc21cdbd2..98fa3b63a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -97,41 +97,6 @@ def test_query_dict(self): assert test_result_valid == "foobar" assert test_result_invalid is None - def test_get_from_config(self): - - mock_dict = {"k1": {"k2": {"k3": "foobar"}}} - mock_keys_valid = ["k1", "k2", "k3"] - mock_keys_invalid = ["k1", "k2", "k4"] - - test_result_valid = cli_utils.get_from_config(mock_dict, mock_keys_valid) - - assert test_result_valid == "foobar" - - with pytest.raises(MissingConfigValueError): - cli_utils.get_from_config(mock_dict, mock_keys_invalid) - - def test_fill_in_from_config(self, mocker): - - jsonld = "/path/to/one" - jsonld_none = None - - mock_config = {"model": {"path": "/path/to/two"}} - mock_keys = ["model", "path"] - mock_keys_invalid = ["model", "file"] - - mocker.patch("schematic.CONFIG.DATA", mock_config) - - result1 = cli_utils.fill_in_from_config("jsonld", jsonld, mock_keys) - result2 = cli_utils.fill_in_from_config("jsonld", jsonld, mock_keys) - result3 = cli_utils.fill_in_from_config("jsonld_none", jsonld_none, mock_keys) - - assert result1 == "/path/to/one" - assert result2 == "/path/to/one" - assert result3 == "/path/to/two" - - with pytest.raises(MissingConfigAndArgumentValueError): - cli_utils.fill_in_from_config("jsonld_none", jsonld_none, mock_keys_invalid) - class FakeResponse: status: int diff --git a/tests/test_validation.py b/tests/test_validation.py index 1f9e68e69..22b64199b 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -128,7 +128,17 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): module_to_call = 'search', invalid_entry = 'q', sg = sg, - )[0] in errors + )[0] in errors + + assert GenerateError.generate_regex_error( + val_rule = 'regex', + reg_expression = '^\d+$', + row_num = '2', + attribute_name = 'Check Regex Integer', + module_to_call = 'search', + invalid_entry = '5.4', + sg = sg, + )[0] in errors assert GenerateError.generate_url_error( val_rule = 'url', diff --git a/uwsgi-nginx-entrypoint.sh b/uwsgi-nginx-entrypoint.sh index 6c568dd31..0fa2e6188 100644 --- a/uwsgi-nginx-entrypoint.sh +++ b/uwsgi-nginx-entrypoint.sh @@ -19,6 +19,9 @@ else content_server=$content_server" listen ${USE_LISTEN_PORT} default_server;\n" content_server=$content_server" listen [::]:${USE_LISTEN_PORT} default_server;\n" content_server=$content_server' server_name 127.0.0.1;\n' + content_server=$content_server' proxy_read_timeout 300;\n' + content_server=$content_server' proxy_connect_timeout 300;\n' + content_server=$content_server' proxy_send_timeout 300;\n' content_server=$content_server' location / {\n' content_server=$content_server' try_files $uri @app;\n' content_server=$content_server' }\n'