Skip to content

Commit

Permalink
Merge branch 'main' into dependabot/pip/requests-2.31.0
Browse files Browse the repository at this point in the history
  • Loading branch information
KasiaKoz authored Aug 15, 2023
2 parents 5c32590 + 002550c commit f0de04f
Show file tree
Hide file tree
Showing 16 changed files with 783 additions and 37 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build_pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ jobs:
aws s3 cp app.zip "s3://$AWS_S3_CODE_BUCKET/$repo_slug.zip"
- name: Send build success notification
if: success()
uses: rtCamp/action-slack-notify@v2.0.0
uses: rtCamp/action-slack-notify@v2.2.0
env:
SLACK_MESSAGE: ${{ github.repository }} build ${{ github.run_number }} launched by ${{ github.actor }} has succeeded
SLACK_TITLE: Build Success
Expand All @@ -77,7 +77,7 @@ jobs:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
- name: Send build failure notification
if: failure()
uses: rtCamp/action-slack-notify@v2.0.0
uses: rtCamp/action-slack-notify@v2.2.0
env:
SLACK_COLOR: '#FF0000'
SLACK_MESSAGE: ${{ github.repository }} build ${{ github.run_number }} launched by ${{ github.actor }} has failed
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/daily-scheduled-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
- name: Send build success notification
if: success()
uses: rtCamp/action-slack-notify@v2.0.0
uses: rtCamp/action-slack-notify@v2.2.0
env:
SLACK_MESSAGE: ${{ github.repository }} Daily scheduled CI Build ${{ github.run_number }} has succeeded
SLACK_TITLE: Daily Scheduled CI Build Success
Expand All @@ -56,7 +56,7 @@ jobs:

- name: Send build failure notification
if: failure()
uses: rtCamp/action-slack-notify@v2.0.0
uses: rtCamp/action-slack-notify@v2.2.0
env:
SLACK_COLOR: '#FF0000'
SLACK_LINK_NAMES: true
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

## Overview

This package provides tools to represent and work with a multi-modal transport network with public transport (PT)
GeNet provides tools to represent and work with a multi-modal transport network with public transport (PT)
services. It is based on [MATSim's](https://www.matsim.org/) representation of such networks. The underlying
network available to PT services (roads, railways, but also ferry/flight connections) uses a `networkx.MultiDiGraph`
with additional methods for `'links'` which are unique in `genet.Network` (`networkx.MultiDiGraph` accepts multiple
Expand Down
87 changes: 86 additions & 1 deletion genet/schedule_elements.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@
import json
import logging
import os
import math
import pkgutil
from abc import abstractmethod
from collections import defaultdict
from copy import deepcopy
from datetime import datetime
from datetime import datetime, timedelta
from typing import Union, Dict, List, Set, Tuple

import dictdiffer
Expand Down Expand Up @@ -2948,6 +2949,90 @@ def remove_unused_stops(self):
if stops_to_remove:
self.remove_stops(stops_to_remove)

def has_trips_with_zero_headways(self):
"""
Deletes trips that have zero headways and thus deemed duplicates
:return:
"""
trip_headways_df = self.trips_headways()
zero_headways = trip_headways_df[(trip_headways_df['headway_mins'] == 0)]
return not bool(zero_headways.empty)

def fix_trips_with_zero_headways(self):
"""
Deletes trips that have zero headways and thus deemed duplicates
:return:
"""
trip_headways_df = self.trips_headways()
zero_headways = trip_headways_df[(trip_headways_df['headway_mins'] == 0)]

if not zero_headways.empty:
logging.info(f"Found {len(zero_headways)} trips with zero headways. "
f"{len(set(zero_headways['route_id']))} out of {len(set(trip_headways_df['route_id']))} "
f"routes and {len(set(zero_headways['service_id']))} out of "
f"{len(set(trip_headways_df['service_id']))} services are affected. "
"These will now be dropped as though they are duplicates of other trips, "
"thus resulting in zero headway between them")
new_trips = trip_headways_df[trip_headways_df['headway_mins'] != 0].drop(['headway_mins', 'headway'],
axis=1)
new_trips_for_affected_routes = new_trips.loc[new_trips['route_id'].isin(set(zero_headways['route_id'])), :]
self.set_trips_dataframe(new_trips_for_affected_routes.copy())
self.generate_vehicles(overwrite=True)
# check
_trip_headways_df = self.trips_headways()
_zero_headways = _trip_headways_df[(_trip_headways_df['headway_mins'] == 0)]
logging.info(f"Checks after alterations result in {len(_zero_headways)} trips with zero headway")
else:
logging.info("No trips with zero headway found. Nothing to do.")

def has_infinite_speeds(self):
pt_speeds = self.speed_geodataframe()
pt_speeds_inf = pt_speeds[(pt_speeds['speed'] == math.inf)]
return not bool(pt_speeds_inf.empty)

def fix_infinite_speeds(self):
df_speeds = self.speed_geodataframe()
df_speeds_inf = df_speeds[(df_speeds['speed'] == math.inf)]
if not df_speeds_inf.empty:
affected_routes = set(df_speeds_inf['route_id'])
logging.info(f"Found {len(affected_routes)} routes with infinite speeds. "
f"{len(set(df_speeds_inf['service_id']))} out of {len(set(df_speeds['service_id']))} "
"services are affected. "
"These will now be dropped as though they are duplicates of other trips, "
"thus resulting in zero headway between them")
new_route_attributes_dict = {}
for route_id in affected_routes:
df_route_speeds = df_speeds[df_speeds['route_id'] == route_id]
df_route_speeds['length'] = [1.3 * x.length for x in df_route_speeds['geometry']]

old_arrival_offsets = self.route(route_id).__dict__['arrival_offsets']
old_departure_offsets = self.route(route_id).__dict__['departure_offsets']
updated_arrival_offsets = ['00:00:00']
updated_departure_offsets = ['00:00:00']

avg_speed = df_route_speeds[df_route_speeds['speed'] != math.inf]['speed'].mean()
distances = df_route_speeds['length'].to_list()

for i in range(1, len(old_arrival_offsets)):
# if the offset is the same as previous (i.e. GTFS error),
# OR if the previous offset got infilled with a value bigger than the current offset
if datetime.strptime(updated_departure_offsets[i - 1], '%H:%M:%S') >= datetime.strptime(
old_arrival_offsets[i], '%H:%M:%S'):
time = round(distances[i - 1] / avg_speed, 0)
previous_offset = datetime.strptime(updated_departure_offsets[i - 1], '%H:%M:%S')
current_offset = previous_offset + timedelta(seconds=time)
str_current_offset = datetime.strftime(current_offset, '%H:%M:%S')
updated_arrival_offsets.append(str_current_offset)
updated_departure_offsets.append(str_current_offset)
else:
updated_arrival_offsets.append(old_arrival_offsets[i])
updated_departure_offsets.append(old_departure_offsets[i])
new_route_attributes_dict[route_id] = {
'arrival_offsets': updated_arrival_offsets, 'departure_offsets': updated_departure_offsets}
self.apply_attributes_to_routes(new_attributes=new_route_attributes_dict)
else:
logging.info("No routes with infinite speeds were found. Nothing to do.")

def is_strongly_connected(self):
if nx.number_strongly_connected_components(self.graph()) == 1:
return True
Expand Down
2 changes: 1 addition & 1 deletion genet/utils/simplification.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def simplify_graph(n, no_processes=1):
df_routes['route'] = df_routes['route'].apply(lambda x: update_link_ids(x, n.link_simplification_map))
n.schedule.apply_attributes_to_routes(df_routes.T.to_dict())
logging.info("Updated Network Routes")
logging.info("Finished simplifying network")
logging.info("Finished simplifying network")


def update_link_ids(old_route, link_mapping):
Expand Down
9 changes: 6 additions & 3 deletions scripts/add_elevation_to_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,12 +71,15 @@
projection = args['projection']
elevation = args['elevation']
tif_null_value = args['null_value']
output_dir = args['output_dir']
write_elevation_to_network = args['write_elevation_to_network']
write_slope_to_network = args['write_slope_to_network']
write_slope_to_object_attribute_file = args['write_slope_to_object_attribute_file']
save_dict_to_json = args['save_jsons']

output_dir = args['output_dir']
supporting_outputs = os.path.join(output_dir, 'supporting_outputs')
ensure_dir(output_dir)
ensure_dir(supporting_outputs)

logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.WARNING)

Expand Down Expand Up @@ -114,7 +117,7 @@

gdf_nodes = n.to_geodataframe()['nodes']
gdf_nodes = gdf_nodes[['id', 'z', 'geometry']]
save_geodataframe(gdf_nodes.to_crs('epsg:4326'), 'node_elevation', output_dir)
save_geodataframe(gdf_nodes.to_crs('epsg:4326'), 'node_elevation', supporting_outputs)

logging.info('Creating slope dictionary for network links')
slope_dictionary = n.get_link_slope_dictionary(elevation_dict=elevation_dictionary)
Expand All @@ -138,7 +141,7 @@
df['slope'] = [x['slope'] for x in df['slope_tuple']]
df = df[['id', 'slope']]
gdf_links = pd.merge(gdf, df, on='id')
save_geodataframe(gdf_links.to_crs('epsg:4326'), 'link_slope', output_dir)
save_geodataframe(gdf_links.to_crs('epsg:4326'), 'link_slope', supporting_outputs)

if write_slope_to_object_attribute_file:
genet.elevation.write_slope_xml(slope_dictionary, output_dir)
Expand Down
115 changes: 115 additions & 0 deletions scripts/auto_schedule_fixes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
import math

import argparse
import logging
import geopandas as gpd

from genet import read_matsim
from genet.utils.persistence import ensure_dir
from genet.output.geojson import save_geodataframe


def write_scaled_vehicles(network, list_of_scales, output_dir):
for i in list_of_scales:
scale = float(i) / 100
network.schedule.scale_vehicle_capacity(scale, scale, output_dir)


def generate_headway_geojson(n, gdf, output_dir, filename_suffix):
headways = n.schedule.headway_stats()
headways = headways.merge(gdf[['route_id', 'geometry']], how='left', on='route_id')
save_geodataframe(gpd.GeoDataFrame(headways).to_crs('epsg:4326'), f'headway_stats_{filename_suffix}', output_dir)


def generate_speed_geojson(n, gdf, output_dir, filename_suffix):
speeds = n.schedule.speed_geodataframe()
# fill infinity by large number to show up in visualisations
speeds.loc[speeds['speed'] == math.inf, 'speed'] = 9999

speeds = speeds.groupby(['service_id', 'route_id', 'route_name', 'mode']).max()['speed'].reset_index()
speeds = speeds.merge(gdf[['route_id', 'geometry']], how='left', on='route_id')
save_geodataframe(gpd.GeoDataFrame(speeds).to_crs('epsg:4326'), f'max_speeds_{filename_suffix}', output_dir)


if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(
description=''
)

arg_parser.add_argument('-n',
'--network',
help='Location of the network.xml file',
required=True)

arg_parser.add_argument('-s',
'--schedule',
help='Location of the schedule.xml file',
required=False,
default=None)

arg_parser.add_argument('-v',
'--vehicles',
help='Location of the vehicles.xml file',
required=False,
default=None)

arg_parser.add_argument('-p',
'--projection',
help='The projection network is in, eg. "epsg:27700"',
required=True)

arg_parser.add_argument('-vsc',
'--vehicle_scalings',
help='Comma seperated string of scales for vehicles, e.g. 1,10,25',
required=False,
default=None,
type=str)

arg_parser.add_argument('-od',
'--output_dir',
help='Output directory for the simplified network',
required=True)

args = vars(arg_parser.parse_args())
network = args['network']
schedule = args['schedule']
vehicles = args['vehicles']
projection = args['projection']
output_dir = args['output_dir']
scale_list = args['vehicle_scalings']
ensure_dir(output_dir)

logging.basicConfig(format='%(asctime)s - %(message)s', level=logging.WARNING)

logging.info('Reading in network at {}'.format(network))
n = read_matsim(
path_to_network=network,
epsg=projection,
path_to_schedule=schedule,
path_to_vehicles=vehicles
)

gdf = n.schedule_network_routes_geodataframe().to_crs('epsg:4326')

logging.info("Checking for zero headways")
if n.schedule.has_trips_with_zero_headways():
generate_headway_geojson(n, gdf, output_dir, 'before')
n.schedule.fix_trips_with_zero_headways()
generate_headway_geojson(n, gdf, output_dir, 'after')
else:
logging.info("No trips with zero headways were found")

logging.info("Checking for infinite speeds")
if n.schedule.has_infinite_speeds():
generate_speed_geojson(n, gdf, output_dir, 'before')
n.schedule.fix_infinite_speeds()
generate_speed_geojson(n, gdf, output_dir, 'after')
else:
logging.info("No routes with infinite speeds were found")

logging.info(f'Saving network in {output_dir}')
n.write_to_matsim(output_dir)
if scale_list:
logging.info('Generating scaled vehicles xml.')
scale_list = scale_list.split(",")
write_scaled_vehicles(n, scale_list, output_dir)
15 changes: 6 additions & 9 deletions scripts/intermodal_access_egress_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,8 @@ def threshold_reached(d):
distance_threshold=distance_threshold
)

# TODO There are multiple links to choose from, for the time being we are not precious about which link is selected.
# TODO There are multiple links to choose from, for the time being we are not precious about which link is
# selected.
selected_links = closest_links.reset_index().groupby('index').first()
if len(selected_links) != len(df_stops):
logging.warning(f'Only {len(selected_links)} out of {len(df_stops)} stops found a link to snap to. '
Expand All @@ -193,12 +194,9 @@ def threshold_reached(d):
accessible_tag = f'{snap_mode}Accessible'
distance_catchment_tag = f'{snap_mode}_distance_catchment_tag'

selected_links[access_link_id_tag] = selected_links['link_id'].apply(
lambda x: {'name': access_link_id_tag, 'class': 'java.lang.String', 'text': x})
selected_links[accessible_tag] = selected_links.apply(
lambda x: {'name': accessible_tag, 'class': 'java.lang.String', 'text': 'true'}, axis=1)
selected_links[distance_catchment_tag] = selected_links['catchment'].apply(
lambda x: {'name': distance_catchment_tag, 'class': 'java.lang.String', 'text': str(x)})
selected_links[access_link_id_tag] = selected_links['link_id']
selected_links[accessible_tag] = 'true'
selected_links[distance_catchment_tag] = selected_links['catchment'].astype(str)
new_stops_data = selected_links[[access_link_id_tag, accessible_tag, distance_catchment_tag]].T.to_dict()
new_stops_data = {k: {'attributes': v} for k, v in new_stops_data.items()}

Expand All @@ -210,8 +208,7 @@ def threshold_reached(d):

# generate the data dictionaries for updating stops data
accessible_tag = f'{tele_mode}Accessible'
df_stops[accessible_tag] = df_stops.apply(
lambda x: {'name': accessible_tag, 'class': 'java.lang.String', 'text': 'true'}, axis=1)
df_stops[accessible_tag] = 'true'
new_stops_data = df_stops[[accessible_tag]].T.to_dict()
new_stops_data = {k: {'attributes': v} for k, v in new_stops_data.items()}

Expand Down
Loading

0 comments on commit f0de04f

Please sign in to comment.