Skip to content

Commit

Permalink
Merge branch 'main' into dependabot/pip/wheel-0.38.1
Browse files Browse the repository at this point in the history
  • Loading branch information
KasiaKoz authored Aug 15, 2023
2 parents 5feb1af + 002550c commit 5dbdd2e
Show file tree
Hide file tree
Showing 24 changed files with 923 additions and 105 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/build_pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
export C_INCLUDE_PATH=/usr/include/gdal
sudo apt-get install ca-certificates
export CURL_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt
pip install GDAL==3.0.2
pip install GDAL==3.4.1
pip install -e .
- name: Lint the source code
run: ./bash_scripts/lint-check.sh
Expand Down Expand Up @@ -67,7 +67,7 @@ jobs:
aws s3 cp app.zip "s3://$AWS_S3_CODE_BUCKET/$repo_slug.zip"
- name: Send build success notification
if: success()
uses: rtCamp/action-slack-notify@v2.0.0
uses: rtCamp/action-slack-notify@v2.2.0
env:
SLACK_MESSAGE: ${{ github.repository }} build ${{ github.run_number }} launched by ${{ github.actor }} has succeeded
SLACK_TITLE: Build Success
Expand All @@ -77,7 +77,7 @@ jobs:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
- name: Send build failure notification
if: failure()
uses: rtCamp/action-slack-notify@v2.0.0
uses: rtCamp/action-slack-notify@v2.2.0
env:
SLACK_COLOR: '#FF0000'
SLACK_MESSAGE: ${{ github.repository }} build ${{ github.run_number }} launched by ${{ github.actor }} has failed
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/daily-scheduled-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ jobs:
export C_INCLUDE_PATH=/usr/include/gdal
sudo apt-get install ca-certificates
export CURL_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt
pip install GDAL==3.0.2
pip install GDAL==3.4.1
pip install -e .
- name: Run tests
Expand All @@ -45,7 +45,7 @@ jobs:
- name: Send build success notification
if: success()
uses: rtCamp/action-slack-notify@v2.0.0
uses: rtCamp/action-slack-notify@v2.2.0
env:
SLACK_MESSAGE: ${{ github.repository }} Daily scheduled CI Build ${{ github.run_number }} has succeeded
SLACK_TITLE: Daily Scheduled CI Build Success
Expand All @@ -56,7 +56,7 @@ jobs:

- name: Send build failure notification
if: failure()
uses: rtCamp/action-slack-notify@v2.0.0
uses: rtCamp/action-slack-notify@v2.2.0
env:
SLACK_COLOR: '#FF0000'
SLACK_LINK_NAMES: true
Expand Down
6 changes: 3 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
FROM python:3.7-slim-stretch
FROM python:3.7.16-bullseye

RUN apt-get update && \
apt-get upgrade -y && \
apt-get -y install gcc git libspatialindex-dev curl coinor-cbc && \
apt-get -y install gcc git libgdal-dev libgeos-dev libspatialindex-dev curl coinor-cbc && \
rm -rf /var/lib/apt/lists/*

RUN curl -sL https://deb.nodesource.com/setup_17.x | bash -
Expand All @@ -17,4 +17,4 @@ COPY . .
RUN pip3 install --no-cache-dir --compile -e . && pip cache purge
ENV PYTHONPATH=./scripts:${PYTHONPATH}

ENTRYPOINT ["python3"]
ENTRYPOINT ["python3"]
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

## Overview

This package provides tools to represent and work with a multi-modal transport network with public transport (PT)
GeNet provides tools to represent and work with a multi-modal transport network with public transport (PT)
services. It is based on [MATSim's](https://www.matsim.org/) representation of such networks. The underlying
network available to PT services (roads, railways, but also ferry/flight connections) uses a `networkx.MultiDiGraph`
with additional methods for `'links'` which are unique in `genet.Network` (`networkx.MultiDiGraph` accepts multiple
Expand Down
1 change: 1 addition & 0 deletions genet/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@
ServiceInitialisationError # noqa: F401
from genet.utils import graph_operations # noqa: F401
from genet.utils import google_directions # noqa: F401
from genet.utils import elevation # noqa: F401
2 changes: 1 addition & 1 deletion genet/max_stable_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,7 +412,7 @@ def generate_additional_links_modes(self, max_stable_set):
link_ids = {link_id for route_list in self.df_route_data['route'].values for link_id in route_list}
links = max_stable_set.network_spatial_tree.links.copy()
links = links.loc[links['link_id'].isin(link_ids), ['link_id', 'modes']]
links['modes'] = links['modes'].apply(lambda x: x.__class__(set(x) | max_stable_set.service_modes))
links['modes'] = links['modes'].apply(lambda x: set(x) | max_stable_set.service_modes)
return links.set_index('link_id').T.to_dict()

def schedule_stops(self, max_stable_set):
Expand Down
124 changes: 103 additions & 21 deletions genet/schedule_elements.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@
import json
import logging
import os
import math
import pkgutil
from abc import abstractmethod
from collections import defaultdict
from copy import deepcopy
from datetime import datetime
from datetime import datetime, timedelta
from typing import Union, Dict, List, Set, Tuple

import dictdiffer
Expand Down Expand Up @@ -79,7 +80,7 @@ def _service_ids_in_graph(self, service_ids: List[str]):
return set(service_ids).issubset(set(self._graph.graph['services'].keys()))

def change_log(self):
return self._graph.graph['change_log']
return change_log.ChangeLog(df=self._graph.graph['change_log'])

@abstractmethod
def _add_additional_attribute_to_graph(self, k, v):
Expand Down Expand Up @@ -647,7 +648,10 @@ def __eq__(self, other):
same_route_name = self.route_short_name == other.route_short_name
same_mode = self.mode.lower() == other.mode.lower()
same_stops = list(self.stops()) == list(other.stops())
return same_route_name and same_mode and same_stops
same_trips = self.trips == other.trips
same_arrival_offsets = self.arrival_offsets == other.arrival_offsets
same_departure_offsets = self.departure_offsets == other.departure_offsets
return all([same_route_name, same_mode, same_stops, same_trips, same_arrival_offsets, same_departure_offsets])

def __repr__(self):
return "<{} instance at {}: with {} stops and {} trips>".format(
Expand Down Expand Up @@ -1926,16 +1930,16 @@ def add(self, other, overwrite=True):
other._graph.graph['services'], self._graph.graph['services'])
self._graph.graph['routes'] = dict_support.merge_complex_dictionaries(
other._graph.graph['routes'], self._graph.graph['routes'])
route_to_service_map = {**self._graph.graph['route_to_service_map'],
**other._graph.graph['route_to_service_map']}
service_to_route_map = {**self._graph.graph['service_to_route_map'],
**other._graph.graph['service_to_route_map']}
self._graph.graph['route_to_service_map'] = {**self._graph.graph['route_to_service_map'],
**other._graph.graph['route_to_service_map']}
self._graph.graph['service_to_route_map'] = {**self._graph.graph['service_to_route_map'],
**other._graph.graph['service_to_route_map']}
self.minimal_transfer_times = dict_support.merge_complex_dictionaries(
other.minimal_transfer_times, self.minimal_transfer_times)
# todo assuming separate schedules, with non conflicting ids, nodes and edges
_ = deepcopy(self._graph.graph)
self._graph.update(other._graph)
self._graph.graph['route_to_service_map'] = route_to_service_map
self._graph.graph['service_to_route_map'] = service_to_route_map
self._graph.graph = _

# merge change_log DataFrames
self._graph.graph['change_log'] = self.change_log().merge_logs(other.change_log())
Expand Down Expand Up @@ -2659,22 +2663,16 @@ def add_services(self, services: List[Service], force=False):
dict(g.nodes(data=True)), dict(self._graph.nodes(data=True)))
edges = dict_support.combine_edge_data_lists(
list(g.edges(data=True)), list(self._graph.edges(data=True)))
graph_routes = dict_support.merge_complex_dictionaries(
g.graph['routes'], self._graph.graph['routes'])
graph_services = dict_support.merge_complex_dictionaries(
g.graph['services'], self._graph.graph['services'])
route_to_service_map = {**self._graph.graph['route_to_service_map'],
**g.graph['route_to_service_map']}
service_to_route_map = {**self._graph.graph['service_to_route_map'],
**g.graph['service_to_route_map']}

route_ids_to_add = list(service.route_ids())
self._graph.add_nodes_from(nodes)
self._graph.add_edges_from(edges)
nx.set_node_attributes(self._graph, nodes)
self._graph.graph['routes'] = graph_routes
self._graph.graph['services'] = graph_services
self._graph.graph['route_to_service_map'] = route_to_service_map
self._graph.graph['service_to_route_map'] = service_to_route_map
for route_id in route_ids_to_add:
self._graph.graph['routes'][route_id] = g.graph['routes'][route_id]
self._graph.graph['route_to_service_map'][route_id] = g.graph['route_to_service_map'][route_id]
self._graph.graph['services'][service.id] = g.graph['services'][service.id]
self._graph.graph['service_to_route_map'][service.id] = g.graph['service_to_route_map'][service.id]

service_ids = [service.id for service in services]
service_data = [self._graph.graph['services'][sid] for sid in service_ids]
Expand Down Expand Up @@ -2951,6 +2949,90 @@ def remove_unused_stops(self):
if stops_to_remove:
self.remove_stops(stops_to_remove)

def has_trips_with_zero_headways(self):
"""
Deletes trips that have zero headways and thus deemed duplicates
:return:
"""
trip_headways_df = self.trips_headways()
zero_headways = trip_headways_df[(trip_headways_df['headway_mins'] == 0)]
return not bool(zero_headways.empty)

def fix_trips_with_zero_headways(self):
"""
Deletes trips that have zero headways and thus deemed duplicates
:return:
"""
trip_headways_df = self.trips_headways()
zero_headways = trip_headways_df[(trip_headways_df['headway_mins'] == 0)]

if not zero_headways.empty:
logging.info(f"Found {len(zero_headways)} trips with zero headways. "
f"{len(set(zero_headways['route_id']))} out of {len(set(trip_headways_df['route_id']))} "
f"routes and {len(set(zero_headways['service_id']))} out of "
f"{len(set(trip_headways_df['service_id']))} services are affected. "
"These will now be dropped as though they are duplicates of other trips, "
"thus resulting in zero headway between them")
new_trips = trip_headways_df[trip_headways_df['headway_mins'] != 0].drop(['headway_mins', 'headway'],
axis=1)
new_trips_for_affected_routes = new_trips.loc[new_trips['route_id'].isin(set(zero_headways['route_id'])), :]
self.set_trips_dataframe(new_trips_for_affected_routes.copy())
self.generate_vehicles(overwrite=True)
# check
_trip_headways_df = self.trips_headways()
_zero_headways = _trip_headways_df[(_trip_headways_df['headway_mins'] == 0)]
logging.info(f"Checks after alterations result in {len(_zero_headways)} trips with zero headway")
else:
logging.info("No trips with zero headway found. Nothing to do.")

def has_infinite_speeds(self):
pt_speeds = self.speed_geodataframe()
pt_speeds_inf = pt_speeds[(pt_speeds['speed'] == math.inf)]
return not bool(pt_speeds_inf.empty)

def fix_infinite_speeds(self):
df_speeds = self.speed_geodataframe()
df_speeds_inf = df_speeds[(df_speeds['speed'] == math.inf)]
if not df_speeds_inf.empty:
affected_routes = set(df_speeds_inf['route_id'])
logging.info(f"Found {len(affected_routes)} routes with infinite speeds. "
f"{len(set(df_speeds_inf['service_id']))} out of {len(set(df_speeds['service_id']))} "
"services are affected. "
"These will now be dropped as though they are duplicates of other trips, "
"thus resulting in zero headway between them")
new_route_attributes_dict = {}
for route_id in affected_routes:
df_route_speeds = df_speeds[df_speeds['route_id'] == route_id]
df_route_speeds['length'] = [1.3 * x.length for x in df_route_speeds['geometry']]

old_arrival_offsets = self.route(route_id).__dict__['arrival_offsets']
old_departure_offsets = self.route(route_id).__dict__['departure_offsets']
updated_arrival_offsets = ['00:00:00']
updated_departure_offsets = ['00:00:00']

avg_speed = df_route_speeds[df_route_speeds['speed'] != math.inf]['speed'].mean()
distances = df_route_speeds['length'].to_list()

for i in range(1, len(old_arrival_offsets)):
# if the offset is the same as previous (i.e. GTFS error),
# OR if the previous offset got infilled with a value bigger than the current offset
if datetime.strptime(updated_departure_offsets[i - 1], '%H:%M:%S') >= datetime.strptime(
old_arrival_offsets[i], '%H:%M:%S'):
time = round(distances[i - 1] / avg_speed, 0)
previous_offset = datetime.strptime(updated_departure_offsets[i - 1], '%H:%M:%S')
current_offset = previous_offset + timedelta(seconds=time)
str_current_offset = datetime.strftime(current_offset, '%H:%M:%S')
updated_arrival_offsets.append(str_current_offset)
updated_departure_offsets.append(str_current_offset)
else:
updated_arrival_offsets.append(old_arrival_offsets[i])
updated_departure_offsets.append(old_departure_offsets[i])
new_route_attributes_dict[route_id] = {
'arrival_offsets': updated_arrival_offsets, 'departure_offsets': updated_departure_offsets}
self.apply_attributes_to_routes(new_attributes=new_route_attributes_dict)
else:
logging.info("No routes with infinite speeds were found. Nothing to do.")

def is_strongly_connected(self):
if nx.number_strongly_connected_components(self.graph()) == 1:
return True
Expand Down
21 changes: 13 additions & 8 deletions genet/utils/dict_support.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import pandas as pd
from numpy import ndarray
from typing import Union

from copy import deepcopy
import genet.utils.graph_operations as graph_operations


Expand Down Expand Up @@ -82,24 +82,29 @@ def nest_at_leaf(d: dict, value):
def merge_complex_dictionaries(d1, d2):
"""
Merges two dictionaries where the values can be lists, sets or other dictionaries with the same behaviour.
If values are not list, set or dict then d2 values prevail
If values are not list, set or dict then d2 values prevail.
If the values are lists, the two merge, retaining all elements of both lists and preserving their order
the result is: d1_list + d2_list.
If the values are sets, the two combine with the OR operator.
If the values are dicts, the two merge using this method.
:param d1:
:param d2:
:return:
"""
d = deepcopy(d1)
clashing_keys = set(d1) & set(d2)
for key in clashing_keys:
if isinstance(d1[key], dict) and isinstance(d2[key], dict):
d1[key] = merge_complex_dictionaries(d1[key], d2[key])
d[key] = merge_complex_dictionaries(d1[key], d2[key])
elif isinstance(d1[key], list) and isinstance(d2[key], list):
d1[key] = list(set(d1[key]) | set(d2[key]))
d[key] = d1[key] + d2[key]
elif isinstance(d1[key], set) and isinstance(d2[key], set):
d1[key] = d1[key] | d2[key]
d[key] = d1[key] | d2[key]
else:
d1[key] = d2[key]
d[key] = d2[key]
for key in set(d2) - clashing_keys:
d1[key] = d2[key]
return d1
d[key] = d2[key]
return d


def combine_edge_data_lists(l1, l2):
Expand Down
3 changes: 2 additions & 1 deletion genet/utils/elevation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import numpy as np
from lxml import etree
import os
import logging


def get_elevation_image(elevation_tif):
Expand Down Expand Up @@ -72,7 +73,7 @@ def write_slope_xml(link_slope_dictionary, output_dir):
:param output_dir: directory where the XML file will be written to
"""
fname = os.path.join(output_dir, 'link_slopes.xml')
print('Writing {}'.format(fname))
logging.info(f'Writing {fname}')

with open(fname, "wb") as f, etree.xmlfile(f, encoding='UTF-8') as xf:
xf.write_declaration(
Expand Down
2 changes: 1 addition & 1 deletion genet/utils/simplification.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def simplify_graph(n, no_processes=1):
df_routes['route'] = df_routes['route'].apply(lambda x: update_link_ids(x, n.link_simplification_map))
n.schedule.apply_attributes_to_routes(df_routes.T.to_dict())
logging.info("Updated Network Routes")
logging.info("Finished simplifying network")
logging.info("Finished simplifying network")


def update_link_ids(old_route, link_mapping):
Expand Down
Loading

0 comments on commit 5dbdd2e

Please sign in to comment.