Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update environment/requirements file creation #1262

Merged
merged 5 commits into from
Dec 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/build-publish-pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ jobs:

- name: Build and check package
run: |
python -m pip install --user hatch twine validate-pyproject[all]
python -m pip install --user -r ./environments/requirements/requirements-packaging.txt
python -m validate_pyproject ./pyproject.toml
python -m hatch build --clean
python -m twine check --strict ./dist/*
Expand Down
2 changes: 1 addition & 1 deletion environments/docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ COPY --chmod=0755 ./README.md ./osmnx/

# install and configuration everything in one RUN to keep image tidy
RUN conda update --yes -c conda-forge --strict-channel-priority -n base conda mamba && \
mamba install --update-all --force-reinstall --yes -c conda-forge --strict-channel-priority --file ./osmnx/requirements.txt && \
mamba install --update-all --force-reinstall --yes -c conda-forge --strict-channel-priority --file ./osmnx/requirements-all.txt && \
python -m pip install --no-cache-dir -e ./osmnx/ && \
python -m ipykernel install --sys-prefix --name ox --display-name "Python (ox)" && \
rm -f -r -v /opt/conda/share/jupyter/kernels/python3 && \
Expand Down
104 changes: 39 additions & 65 deletions environments/make-env-files.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
# noqa: INP001
"""Make conda env.yml and pip requirements.txt files from environments.json data."""
"""Make conda env.yaml and pip requirements.txt files from environments.json data."""

from __future__ import annotations

import argparse
import itertools
import json
from itertools import chain
from json import load as json_load
from pathlib import Path
from typing import Any

import tomllib
from packaging.requirements import Requirement
from tomllib import load as tomllib_load

# path to package's pyproject and the config json file
pyproject_path = "./pyproject.toml"
environments_config_path = "./environments/requirements/environments.json"
PYPROJECT_PATH = "./pyproject.toml"
ENVS_CONFIG_PATH = "./environments/requirements/environments.json"

# what channels to specify in conda env yml files
# what channels to specify in conda env yaml files
CHANNELS = ["conda-forge"]

HEADER = (
Expand All @@ -25,21 +25,9 @@
)


def extract_optional_deps() -> list[Requirement]:
"""
Extract a list of the optional dependencies/versions from pyproject.toml.

Returns
-------
optional_deps
"""
opts = pyproject["project"]["optional-dependencies"]
return list({Requirement(o) for o in itertools.chain.from_iterable(opts.values())})


def make_requirement(
requirement: Requirement,
force_pin: bool = False, # noqa: FBT001,FBT002
pin_exact: bool = False, # noqa: FBT001,FBT002
is_conda: bool = True, # noqa: FBT001,FBT002
) -> str:
"""
Expand All @@ -51,73 +39,68 @@ def make_requirement(
----------
requirement
A requirement object
force_pin
pin_exact
If True, pin requirement to version rather than using existing
specifier. Allows you to convert minimum versions to pinned versions.
is_conda
If True and if `force_pin` is True, format the requirement string to
If True and if `pin_exact` is True, format the requirement string to
end with ".*" for conda environment file pinning format compatibility.

Returns
-------
requirement_str
"""
specifiers = list(requirement.specifier)
if force_pin and len(specifiers) == 1:
if pin_exact and len(specifiers) == 1:
spec = f"{requirement.name}=={specifiers[0].version}"
if is_conda and not spec.endswith(".*"):
spec += ".*"
return spec
return str(requirement)


def make_file(env_name: str) -> None:
def make_file(env: dict[str, Any]) -> None:
"""
Write a conda environment yaml file or pip requirements.txt file.

Parameters
----------
env_name
An enviroment name among the keys of environments.json.
env
An environment configuration dictionary.

Returns
-------
None
"""
env = envs[env_name]
depends_on = []
output_path = Path(env["output_path"])

# it's a conda env file if it ends with ".yml", otherwise it's a pip
# requirements.txt file
is_conda = env["output_path"].endswith(".yml")
# it's conda env if it's a yaml file, otherwise it's pip requirements.txt
is_conda = output_path.suffix in {".yaml", ".yml"}

# determine which dependencies to add based on the configuration
depends_on = []
if env["needs_python"]:
python_dep = Requirement(f"python{pyproject['project']['requires-python']}")
depends_on.append(python_dep)
if is_conda:
depends_on.append(Requirement(f"python{pyproject['project']['requires-python']}"))
if env["needs_dependencies"]:
dependencies = [Requirement(d) for d in pyproject["project"]["dependencies"]]
depends_on.extend(dependencies)
if env["needs_optionals"]:
optionals = extract_optional_deps()
depends_on.extend(optionals)
depends_on.extend(Requirement(d) for d in pyproject["project"]["dependencies"])
optionals = pyproject["project"]["optional-dependencies"].values()
depends_on.extend({Requirement(o) for o in chain.from_iterable(optionals)})

# make the list of requirements
requirements = [
make_requirement(dep, force_pin=env["force_pin"], is_conda=is_conda) for dep in depends_on
]
requirements = [make_requirement(dep, env["pin_exact"], is_conda) for dep in depends_on]

# add any extra requirements if provided in the configuration
# inject any additional requirement files if specified by the config
if env["extras"] is not None:
for extras_filepath in env["extras"]:
with Path(extras_filepath).open() as f:
requirements += f.read().splitlines()

# convert the requirements to conda env yml or pip requirements.txt
# convert the requirements to conda env yaml or pip requirements text
requirements = sorted(requirements)
if not is_conda:
text = HEADER + "\n".join(requirements) + "\n"
else:
env_name = Path(output_path).stem
data = {"name": env_name, "channels": CHANNELS, "dependencies": requirements}
text = ""
for k, v in data.items():
Expand All @@ -127,29 +110,20 @@ def make_file(env_name: str) -> None:
text += k + ": " + v + "\n"
text = HEADER + text

# write the file to disk
with Path(env["output_path"]).open("w") as f:
# write the text to file on disk
with Path(output_path).open("w") as f:
f.writelines(text)

print(f"Wrote {len(requirements)} requirements to {env['output_path']!r}") # noqa: T201
print(f"Wrote {len(requirements)} requirements to {str(output_path)!r}") # noqa: T201


if __name__ == "__main__":
# load the pyproject.toml and the environments.json config files
with Path(pyproject_path).open("rb") as f:
pyproject = tomllib.load(f)
with Path(environments_config_path).open("rb") as f:
envs = json.load(f)

# parse any command-line arguments passed by the user
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("-n", dest="env_name", type=str)
args = arg_parser.parse_args()

if args.env_name is not None:
# if user passed -n command line argument, generate only that file
make_file(args.env_name)
else:
# otherwise, make all environment files
for env_name in envs:
make_file(env_name)
with Path(PYPROJECT_PATH).open("rb") as f:
pyproject = tomllib_load(f)
with Path(ENVS_CONFIG_PATH).open("rb") as f:
envs = json_load(f)

# make each environment/requirements file as configured
for env in envs:
make_file(env)
92 changes: 47 additions & 45 deletions environments/requirements/environments.json
Original file line number Diff line number Diff line change
@@ -1,45 +1,47 @@
{
"env-ci": {
"output_path": "./environments/tests/env-ci.yml",
"needs_python": true,
"needs_dependencies": true,
"needs_optionals": true,
"force_pin": false,
"extras": ["./environments/requirements/requirements-docs.txt",
"./environments/requirements/requirements-tests.txt"]
},
"env-test-minimum-deps": {
"output_path": "./environments/tests/env-test-minimum-deps.yml",
"needs_python": true,
"needs_dependencies": true,
"needs_optionals": true,
"force_pin": true,
"extras": ["./environments/requirements/requirements-tests.txt"]
},
"requirements-test-latest-deps": {
"output_path": "./environments/tests/requirements-test-latest-deps.txt",
"needs_python": false,
"needs_dependencies": true,
"needs_optionals": true,
"force_pin": false,
"extras": ["./environments/requirements/requirements-tests.txt"]
},
"requirements-rtd": {
"output_path": "./docs/requirements-rtd.txt",
"needs_python": false,
"needs_dependencies": false,
"needs_optionals": false,
"force_pin": false,
"extras": ["./environments/requirements/requirements-docs.txt"]
},
"requirements-all": {
"output_path": "./environments/requirements/requirements-all.txt",
"needs_python": false,
"needs_dependencies": true,
"needs_optionals": true,
"force_pin": false,
"extras": ["./environments/requirements/requirements-docs.txt",
"./environments/requirements/requirements-extras.txt",
"./environments/requirements/requirements-tests.txt"]
}
}
[
{
"output_path": "./environments/tests/env-ci.yml",
"needs_dependencies": true,
"pin_exact": false,
"extras": [
"./environments/requirements/requirements-docs.txt",
"./environments/requirements/requirements-packaging.txt",
"./environments/requirements/requirements-tests.txt"
]
},
{
"output_path": "./environments/tests/env-test-minimum-deps.yml",
"needs_dependencies": true,
"pin_exact": true,
"extras": [
"./environments/requirements/requirements-tests.txt"
]
},
{
"output_path": "./environments/tests/requirements-test-latest-deps.txt",
"needs_dependencies": true,
"pin_exact": false,
"extras": [
"./environments/requirements/requirements-tests.txt"
]
},
{
"output_path": "./docs/requirements-rtd.txt",
"needs_dependencies": false,
"pin_exact": false,
"extras": [
"./environments/requirements/requirements-docs.txt"
]
},
{
"output_path": "./environments/requirements/requirements-all.txt",
"needs_dependencies": true,
"pin_exact": false,
"extras": [
"./environments/requirements/requirements-docs.txt",
"./environments/requirements/requirements-extras.txt",
"./environments/requirements/requirements-packaging.txt",
"./environments/requirements/requirements-tests.txt"
]
}
]
1 change: 0 additions & 1 deletion environments/requirements/requirements-all.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# /environments/requirements/ and the requirements in pyproject.toml.
bottleneck
cartopy
conda-smithy
folium
furo
geopandas>=1.0
Expand Down
1 change: 0 additions & 1 deletion environments/requirements/requirements-extras.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
bottleneck
cartopy
conda-smithy
folium
jupyterlab
nbdime
Expand Down
3 changes: 3 additions & 0 deletions environments/requirements/requirements-packaging.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
hatch
twine
validate-pyproject
3 changes: 0 additions & 3 deletions environments/requirements/requirements-tests.txt
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
hatch
lxml
pip
pre-commit
pytest
pytest-cov
twine
typeguard
validate-pyproject
3 changes: 0 additions & 3 deletions environments/tests/env-test-minimum-deps.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ channels:
- conda-forge
dependencies:
- geopandas==1.0.*
- hatch
- lxml
- matplotlib==3.5.*
- networkx==2.5.*
Expand All @@ -23,6 +22,4 @@ dependencies:
- scikit-learn==0.23.*
- scipy==1.5.*
- shapely==2.0.*
- twine
- typeguard
- validate-pyproject
3 changes: 0 additions & 3 deletions environments/tests/requirements-test-latest-deps.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
# /environments/make-env-files.py using the environment definition files in
# /environments/requirements/ and the requirements in pyproject.toml.
geopandas>=1.0
hatch
lxml
matplotlib>=3.5
networkx>=2.5
Expand All @@ -18,6 +17,4 @@ rio-vrt>=0.3
scikit-learn>=0.23
scipy>=1.5
shapely>=2.0
twine
typeguard
validate-pyproject
8 changes: 4 additions & 4 deletions environments/unix-create-env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@ conda activate base
conda env remove --yes -n $ENV || true
mamba create --yes -c conda-forge --strict-channel-priority -n $ENV --file ./requirements/requirements-all.txt
conda activate $ENV
python -m pip --python $ENV_PATH uninstall $PACKAGE --yes
python -m pip --python $ENV_PATH install -e ../.
python -m ipykernel install --prefix $ENV_PATH --name $ENV --display-name "Python ($ENV)"
python -m pip --python "$ENV_PATH" uninstall $PACKAGE --yes
python -m pip --python "$ENV_PATH" install -e ../.
python -m ipykernel install --prefix "$ENV_PATH" --name $ENV --display-name "Python ($ENV)"
conda list -n $ENV
python -m pip --python $ENV_PATH check
python -m pip --python "$ENV_PATH" check
jupyter kernelspec list
ipython -c "import $PACKAGE; print('$PACKAGE version', $PACKAGE.__version__)"
12 changes: 7 additions & 5 deletions tests/lint_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ set -euo pipefail
# delete temp files and folders
rm -r -f ./.coverage* ./.pytest_cache ./.temp ./dist ./docs/build ./*/__pycache__

# create all the configured environment/requirements files
python ./environments/make-env-files.py

# run the pre-commit hooks for linting/formatting
pre-commit run --all-files

Expand All @@ -12,13 +15,12 @@ python -m validate_pyproject ./pyproject.toml
python -m hatch build --clean
python -m twine check --strict ./dist/*

# build the docs and test that links are alive
python -m sphinx -E -W --keep-going -b html ./docs/source ./docs/build/html
python -m sphinx -E -W --keep-going -b linkcheck ./docs/source ./docs/build/linkcheck

# run the tests and report the test coverage
python -m pytest --verbose --maxfail=1 --typeguard-packages=osmnx --cov=osmnx --cov-report=term-missing:skip-covered

# build the docs and test that links are alive
python -m sphinx -q -a -E -W --keep-going -b html ./docs/source ./docs/build/html
python -m sphinx -q -a -E -W --keep-going -b linkcheck ./docs/source ./docs/build/linkcheck

# delete temp files and folders
sleep 1
rm -r -f ./.coverage* ./.pytest_cache ./.temp ./dist ./docs/build ./*/__pycache__
Loading