Skip to content

Commit

Permalink
Merge pull request #1130 from gboeing/lint
Browse files Browse the repository at this point in the history
Increase linting
  • Loading branch information
gboeing authored Feb 11, 2024
2 parents 565589c + d1fb5f5 commit d299a88
Show file tree
Hide file tree
Showing 26 changed files with 413 additions and 221 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ jobs:
run: make -C ./docs html SPHINXOPTS="-W --keep-going"

- name: Test code
run: pytest --maxfail=1 --typeguard-packages=osmnx --cov=./osmnx --cov-report=xml --cov-report=term-missing --verbose
run: pytest --maxfail=1 --typeguard-packages=osmnx --cov=./osmnx --cov-report=xml --verbose

- name: Upload coverage report
uses: codecov/codecov-action@v4
2 changes: 1 addition & 1 deletion .github/workflows/test-minimal.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,4 +50,4 @@ jobs:
python -m sphinx -b linkcheck ./docs/source ./docs/build/linkcheck
- name: Test code
run: pytest --maxfail=1 --typeguard-packages=osmnx --cov=./osmnx --cov-report=term-missing --verbose
run: pytest --maxfail=1 --typeguard-packages=osmnx --cov=./osmnx --cov-report=xml --verbose
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ repos:
- id: check-builtin-literals
- id: check-case-conflict
- id: check-docstring-first
- id: check-executables-have-shebangs
- id: check-json
- id: check-merge-conflict
args: [--assume-in-merge]
Expand Down
1 change: 1 addition & 0 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# ruff: noqa: INP001
"""
Configuration file for the Sphinx documentation builder.
Expand Down
37 changes: 21 additions & 16 deletions osmnx/_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,18 @@


def _save_to_cache(
url: str, response_json: dict[str, Any] | list[dict[str, Any]], ok: bool
url: str,
response_json: dict[str, Any] | list[dict[str, Any]],
ok: bool,
) -> None:
"""
Save a HTTP response JSON object to a file in the cache folder.
Function calculates the checksum of url to generate the cache file's name.
If the request was sent to server via POST instead of GET, then URL should
be a GET-style representation of request. Response is only saved to a
cache file if settings.use_cache is True, response_json is not None, and
ok is True.
This calculates the checksum of `url` to generate the cache file name. If
the request was sent to server via POST instead of GET, then `url` should
be a GET-style representation of the request. Response is only saved to a
cache file if `settings.use_cache` is True, `response_json` is not None,
and `ok` is True.
Users should always pass OrderedDicts instead of dicts of parameters into
request functions, so the parameters remain in the same order each time,
Expand Down Expand Up @@ -64,8 +66,8 @@ def _save_to_cache(

# hash the url to make the filename succinct but unique
# sha1 digest is 160 bits = 20 bytes = 40 hexadecimal characters
filename = sha1(url.encode("utf-8")).hexdigest() + ".json"
cache_filepath = cache_folder / filename
checksum = sha1(url.encode("utf-8")).hexdigest() # noqa: S324
cache_filepath = cache_folder / f"{checksum}.json"

# dump to json, and save to file
cache_filepath.write_text(json.dumps(response_json), encoding="utf-8")
Expand All @@ -87,19 +89,20 @@ def _url_in_cache(url: str) -> Path | None:
Returns
-------
filepath
cache_filepath
Path to cached response for `url` if it exists, otherwise None.
"""
# hash the url to generate the cache filename
filename = sha1(url.encode("utf-8")).hexdigest() + ".json"
filepath = Path(settings.cache_folder) / filename
checksum = sha1(url.encode("utf-8")).hexdigest() # noqa: S324
cache_filepath = Path(settings.cache_folder) / f"{checksum}.json"

# if this file exists in the cache, return its full path
return filepath if filepath.is_file() else None
return cache_filepath if cache_filepath.is_file() else None


def _retrieve_from_cache(
url: str, check_remark: bool = True
url: str,
check_remark: bool = True,
) -> dict[str, Any] | list[dict[str, Any]] | None:
"""
Retrieve a HTTP response JSON object from the cache if it exists.
Expand All @@ -123,7 +126,7 @@ def _retrieve_from_cache(
cache_filepath = _url_in_cache(url)
if cache_filepath is not None:
response_json: dict[str, Any] | list[dict[str, Any]] = json.loads(
cache_filepath.read_text(encoding="utf-8")
cache_filepath.read_text(encoding="utf-8"),
)

# return None if check_remark is True and there is a server
Expand All @@ -146,7 +149,9 @@ def _retrieve_from_cache(


def _get_http_headers(
user_agent: str | None = None, referer: str | None = None, accept_language: str | None = None
user_agent: str | None = None,
referer: str | None = None,
accept_language: str | None = None,
) -> dict[str, str]:
"""
Update the default requests HTTP headers with OSMnx information.
Expand Down Expand Up @@ -265,7 +270,7 @@ def _config_dns(url: str) -> None:
ip = _resolve_host_via_doh(hostname)

# mutate socket.getaddrinfo to map hostname -> IP address
def _getaddrinfo(*args, **kwargs): # type: ignore[no-untyped-def]
def _getaddrinfo(*args: Any, **kwargs: Any) -> Any: # noqa: ANN401
if args[0] == hostname:
msg = f"Resolved {hostname!r} to {ip!r}"
utils.log(msg, level=lg.INFO)
Expand Down
92 changes: 56 additions & 36 deletions osmnx/_osm_xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,10 @@
from typing import Any
from typing import TextIO
from warnings import warn
from xml.etree import ElementTree as ET
from xml.etree.ElementTree import Element
from xml.etree.ElementTree import ElementTree
from xml.etree.ElementTree import SubElement
from xml.etree.ElementTree import parse as etree_parse

import networkx as nx
import numpy as np
Expand All @@ -34,25 +37,25 @@ class _OSMContentHandler(xml.sax.handler.ContentHandler):
https://overpass-api.de
"""

def __init__(self) -> None:
def __init__(self) -> None: # noqa: ANN101
self._element: dict[str, Any] | None = None
self.object: dict[str, Any] = {"elements": []}

def startElement(self, name: str, attrs: xml.sax.xmlreader.AttributesImpl) -> None:
def startElement(self, name: str, attrs: xml.sax.xmlreader.AttributesImpl) -> None: # noqa: ANN101,N802
if name == "osm":
self.object.update({k: v for k, v in attrs.items() if k in {"version", "generator"}})

elif name in {"node", "way"}:
self._element = dict(type=name, tags={}, nodes=[], **attrs)
self._element.update({k: float(v) for k, v in attrs.items() if k in {"lat", "lon"}})
self._element.update(
{k: int(v) for k, v in attrs.items() if k in {"id", "uid", "version", "changeset"}}
{k: int(v) for k, v in attrs.items() if k in {"id", "uid", "version", "changeset"}},
)

elif name == "relation":
self._element = dict(type=name, tags={}, members=[], **attrs)
self._element.update(
{k: int(v) for k, v in attrs.items() if k in {"id", "uid", "version", "changeset"}}
{k: int(v) for k, v in attrs.items() if k in {"id", "uid", "version", "changeset"}},
)

elif name == "tag":
Expand All @@ -63,10 +66,10 @@ def startElement(self, name: str, attrs: xml.sax.xmlreader.AttributesImpl) -> No

elif name == "member":
self._element["members"].append( # type: ignore[index]
{k: (int(v) if k == "ref" else v) for k, v in attrs.items()}
{k: (int(v) if k == "ref" else v) for k, v in attrs.items()},
)

def endElement(self, name: str) -> None:
def endElement(self, name: str) -> None: # noqa: ANN101,N802
if name in {"node", "way", "relation"}:
self.object["elements"].append(self._element)

Expand Down Expand Up @@ -98,7 +101,7 @@ def _opener(filepath: Path, encoding: str) -> TextIO:

# warn if this XML file was generated by OSMnx itself
with _opener(Path(filepath), encoding) as f:
root_attrs = ET.parse(f).getroot().attrib
root_attrs = etree_parse(f).getroot().attrib # noqa: S314
if "generator" in root_attrs and "OSMnx" in root_attrs["generator"]:
msg = (
"The XML file you are loading appears to have been generated "
Expand All @@ -112,11 +115,11 @@ def _opener(filepath: Path, encoding: str) -> TextIO:
# parse the XML to Overpass-like JSON
with _opener(Path(filepath), encoding) as f:
handler = _OSMContentHandler()
xml.sax.parse(f, handler)
xml.sax.parse(f, handler) # noqa: S317
return handler.object


def _save_graph_xml(
def _save_graph_xml( # noqa: PLR0913
data: nx.MultiDiGraph | tuple[gpd.GeoDataFrame, gpd.GeoDataFrame],
filepath: str | Path | None,
node_tags: list[str],
Expand Down Expand Up @@ -187,7 +190,9 @@ def _save_graph_xml(

if isinstance(data, nx.MultiDiGraph):
gdf_nodes, gdf_edges = utils_graph.graph_to_gdfs(
data, node_geometry=False, fill_edge_geometry=False
data,
node_geometry=False,
fill_edge_geometry=False,
)
elif isinstance(data, tuple):
gdf_nodes, gdf_edges = data
Expand Down Expand Up @@ -225,21 +230,29 @@ def _save_graph_xml(
)

# initialize XML tree with an OSM root element then append nodes/edges
root = ET.Element("osm", attrib={"version": api_version, "generator": f"OSMnx {__version__}"})
root = Element("osm", attrib={"version": api_version, "generator": f"OSMnx {__version__}"})
root = _append_nodes_xml_tree(root, gdf_nodes, node_attrs, node_tags)
root = _append_edges_xml_tree(
root, gdf_edges, edge_attrs, edge_tags, edge_tag_aggs, merge_edges
root,
gdf_edges,
edge_attrs,
edge_tags,
edge_tag_aggs,
merge_edges,
)

# write to disk
ET.ElementTree(root).write(filepath, encoding="utf-8", xml_declaration=True)
ElementTree(root).write(filepath, encoding="utf-8", xml_declaration=True)
msg = f"Saved graph as .osm file at {filepath!r}"
utils.log(msg, level=lg.INFO)


def _append_nodes_xml_tree(
root: ET.Element, gdf_nodes: gpd.GeoDataFrame, node_attrs: list[str], node_tags: list[str]
) -> ET.Element:
root: Element,
gdf_nodes: gpd.GeoDataFrame,
node_attrs: list[str],
node_tags: list[str],
) -> Element:
"""
Append nodes to an XML tree.
Expand All @@ -261,16 +274,19 @@ def _append_nodes_xml_tree(
"""
for _, row in gdf_nodes.iterrows():
row_str = row.dropna().astype(str)
node = ET.SubElement(root, "node", attrib=row_str[node_attrs].to_dict())
node = SubElement(root, "node", attrib=row_str[node_attrs].to_dict())

for tag in node_tags:
if tag in row_str:
ET.SubElement(node, "tag", attrib={"k": tag, "v": row_str[tag]})
SubElement(node, "tag", attrib={"k": tag, "v": row_str[tag]})
return root


def _create_way_for_each_edge(
root: ET.Element, gdf_edges: gpd.GeoDataFrame, edge_attrs: list[str], edge_tags: list[str]
root: Element,
gdf_edges: gpd.GeoDataFrame,
edge_attrs: list[str],
edge_tags: list[str],
) -> None:
"""
Append a new way to an empty XML tree graph for each edge in way.
Expand Down Expand Up @@ -298,16 +314,16 @@ def _create_way_for_each_edge(
"""
for _, row in gdf_edges.iterrows():
row_str = row.dropna().astype(str)
edge = ET.SubElement(root, "way", attrib=row_str[edge_attrs].to_dict())
ET.SubElement(edge, "nd", attrib={"ref": row_str["u"]})
ET.SubElement(edge, "nd", attrib={"ref": row_str["v"]})
edge = SubElement(root, "way", attrib=row_str[edge_attrs].to_dict())
SubElement(edge, "nd", attrib={"ref": row_str["u"]})
SubElement(edge, "nd", attrib={"ref": row_str["v"]})
for tag in edge_tags:
if tag in row_str:
ET.SubElement(edge, "tag", attrib={"k": tag, "v": row_str[tag]})
SubElement(edge, "tag", attrib={"k": tag, "v": row_str[tag]})


def _append_merged_edge_attrs(
xml_edge: ET.Element,
xml_edge: Element,
sample_edge: dict[str, Any],
all_edges_df: pd.DataFrame,
edge_tags: list[str],
Expand Down Expand Up @@ -344,15 +360,15 @@ def _append_merged_edge_attrs(
if edge_tag_aggs is None:
for tag in edge_tags:
if tag in sample_edge:
ET.SubElement(xml_edge, "tag", attrib={"k": tag, "v": sample_edge[tag]})
SubElement(xml_edge, "tag", attrib={"k": tag, "v": sample_edge[tag]})
else:
for tag in edge_tags:
if (tag in sample_edge) and (tag not in (t for t, agg in edge_tag_aggs)):
ET.SubElement(xml_edge, "tag", attrib={"k": tag, "v": sample_edge[tag]})
SubElement(xml_edge, "tag", attrib={"k": tag, "v": sample_edge[tag]})

for tag, agg in edge_tag_aggs:
if tag in all_edges_df.columns:
ET.SubElement(
SubElement(
xml_edge,
"tag",
attrib={
Expand All @@ -363,7 +379,9 @@ def _append_merged_edge_attrs(


def _append_nodes_as_edge_attrs(
xml_edge: ET.Element, sample_edge: dict[str, Any], all_edges_df: pd.DataFrame
xml_edge: Element,
sample_edge: dict[str, Any],
all_edges_df: pd.DataFrame,
) -> None:
"""
Extract list of ordered nodes and append as attributes of XML edge.
Expand All @@ -382,8 +400,8 @@ def _append_nodes_as_edge_attrs(
None
"""
if len(all_edges_df) == 1:
ET.SubElement(xml_edge, "nd", attrib={"ref": sample_edge["u"]})
ET.SubElement(xml_edge, "nd", attrib={"ref": sample_edge["v"]})
SubElement(xml_edge, "nd", attrib={"ref": sample_edge["u"]})
SubElement(xml_edge, "nd", attrib={"ref": sample_edge["v"]})
else:
# topological sort
all_edges_df = all_edges_df.reset_index()
Expand All @@ -392,19 +410,19 @@ def _append_nodes_as_edge_attrs(
except nx.NetworkXUnfeasible:
first_node = all_edges_df.iloc[0]["u"]
ordered_nodes = _get_unique_nodes_ordered_from_way(all_edges_df.iloc[1:])
ordered_nodes = [first_node] + ordered_nodes
ordered_nodes = [first_node, *ordered_nodes]
for node in ordered_nodes:
ET.SubElement(xml_edge, "nd", attrib={"ref": str(node)})
SubElement(xml_edge, "nd", attrib={"ref": str(node)})


def _append_edges_xml_tree(
root: ET.Element,
root: Element,
gdf_edges: gpd.GeoDataFrame,
edge_attrs: list[str],
edge_tags: list[str],
edge_tag_aggs: list[tuple[str, str]] | None,
merge_edges: bool,
) -> ET.Element:
) -> Element:
"""
Append edges to an XML tree.
Expand Down Expand Up @@ -442,9 +460,11 @@ def _append_edges_xml_tree(
if merge_edges:
for _, all_way_edges in gdf_edges.groupby("id"):
first = all_way_edges.iloc[0].dropna().astype(str)
edge = ET.SubElement(root, "way", attrib=first[edge_attrs].dropna().to_dict())
edge = SubElement(root, "way", attrib=first[edge_attrs].dropna().to_dict())
_append_nodes_as_edge_attrs(
xml_edge=edge, sample_edge=first.to_dict(), all_edges_df=all_way_edges
xml_edge=edge,
sample_edge=first.to_dict(),
all_edges_df=all_way_edges,
)
_append_merged_edge_attrs(
xml_edge=edge,
Expand Down
Loading

0 comments on commit d299a88

Please sign in to comment.