diff --git a/python/cfr/two_step_routing/README.md b/python/cfr/two_step_routing/README.md index 6a9d11ed..41d99459 100644 --- a/python/cfr/two_step_routing/README.md +++ b/python/cfr/two_step_routing/README.md @@ -4,7 +4,7 @@ This directory contains a Python library that uses the Cloud Fleet Routing (CFR) API to optimize routes with two-step deliveries: under this model, shipments can be handled in two ways: - delivered directly: the vehicle handling the shipment arrives directly to the - final delivery addres. + final delivery address. - delivered through a parking location: when handling the shipment, the vehicle parks at a specified parking location, while the driver delivers the shipment by foot. diff --git a/python/cfr/two_step_routing/two_step_routing.py b/python/cfr/two_step_routing/two_step_routing.py index bd814320..0db5b642 100644 --- a/python/cfr/two_step_routing/two_step_routing.py +++ b/python/cfr/two_step_routing/two_step_routing.py @@ -53,6 +53,7 @@ import re from typing import Any, TypeAlias, TypeVar, cast +from .. import utils from ..json import cfr_json @@ -81,7 +82,7 @@ class _ParkingGroupKey: @enum.unique -class LocalModelGrouping(enum.Enum): +class LocalModelGrouping(utils.EnumForArgparse): """Specifies how shipments are grouped in the local model. In the local model, the routes are computed for each group separately, i.e. @@ -429,8 +430,9 @@ def make_local_request(self) -> cfr_json.OptimizeToursRequest: delivery = shipment["deliveries"][0] local_delivery = { "arrivalWaypoint": delivery["arrivalWaypoint"], - "duration": delivery["duration"], } + if (delivery_duration := delivery.get("duration")) is not None: + local_delivery["duration"] = delivery_duration # Preserve tags in the local shipment. tags = delivery.get("tags") if tags is not None: @@ -441,7 +443,7 @@ def make_local_request(self) -> cfr_json.OptimizeToursRequest: local_delivery["timeWindows"] = time_windows local_shipment: cfr_json.Shipment = { "deliveries": [local_delivery], - "label": f"{shipment_index}: {shipment['label']}", + "label": f"{shipment_index}: {shipment.get('label', '')}", "allowedVehicleIndices": group_vehicle_indices, } # Copy load demands from the original shipment, if present. diff --git a/python/cfr/two_step_routing/two_step_routing_main.py b/python/cfr/two_step_routing/two_step_routing_main.py index 923fa12f..a1459862 100644 --- a/python/cfr/two_step_routing/two_step_routing_main.py +++ b/python/cfr/two_step_routing/two_step_routing_main.py @@ -27,7 +27,6 @@ """ import argparse -from collections.abc import Mapping import dataclasses from http import client import json @@ -55,6 +54,7 @@ class Flags: google_cloud_token: The value of the --token flag. reuse_existing: The value of the --reuse_existing flag. When a file with a response exists, load it instead of resolving the request. + num_refinements: The value of the --use_refinements flag. use_refinement: The value of the --use_refinement flag. When True, the planner uses a third solve to reoptimize local routes from the same parking if they are performed in a sequence (allowing the planner to merge @@ -76,7 +76,7 @@ class Flags: google_cloud_project: str google_cloud_token: str reuse_existing: bool - use_refinement: bool + num_refinements: int local_grouping: two_step_routing.LocalModelGrouping travel_mode_in_merged_transitions: bool local_timeout: cfr_json.DurationString @@ -112,11 +112,11 @@ def _parse_flags() -> Flags: parser.add_argument( "--token", required=True, help="The Google Cloud auth key." ) - parser.add_argument( + two_step_routing.LocalModelGrouping.add_as_argument( + parser, "--local_grouping", help="Controls the grouping mode in the local model.", - choices=tuple(two_step_routing.LocalModelGrouping.__members__), - default="PARKING_AND_TIME", + default=two_step_routing.LocalModelGrouping.PARKING_AND_TIME, ) parser.add_argument( "--travel_mode_in_merged_transitions", @@ -154,10 +154,18 @@ def _parse_flags() -> Flags: default="240s", ) parser.add_argument( - "--use_refinement", - help="Use the refinement models to clean up parking location visits.", - default=False, - action="store_true", + "--num_refinements", + help=( + "The number of refinement rounds applied to the solution. In each" + " refinement round, the solver first re-optimizes local routes when" + " there are two or more visits to the parking in a sequence, and then" + " updates the global solution to reflect and take advantage of the" + " potentially more optimized local routes. When 0, no refinement is" + " applied." + ), + default=0, + type=int, + action="store", ) parser.add_argument( "--reuse_existing", @@ -173,12 +181,12 @@ def _parse_flags() -> Flags: google_cloud_project=flags.project, google_cloud_token=flags.token, local_timeout=flags.local_timeout, - local_grouping=two_step_routing.LocalModelGrouping[flags.local_grouping], + local_grouping=flags.local_grouping, travel_mode_in_merged_transitions=flags.travel_mode_in_merged_transitions, global_timeout=flags.global_timeout, local_refinement_timeout=flags.local_refinement_timeout, global_refinement_timeout=flags.global_refinement_timeout, - use_refinement=flags.use_refinement, + num_refinements=flags.num_refinements, reuse_existing=flags.reuse_existing, ) @@ -313,16 +321,27 @@ def _run_two_step_planner() -> None: request_json, parking_locations, parking_for_shipment, options ) + refinement_index = None + timeout_suffix = f"{flags.local_timeout}.{flags.global_timeout}" + + def make_filename(stem, timeout_string=None): + if timeout_string is None: + timeout_string = timeout_suffix + parts = [base_filename] + if refinement_index is not None: + parts.append(f"refined_{refinement_index}") + parts.append(stem) + if timeout_string: + parts.append(timeout_string) + parts.append("json") + return ".".join(parts) + local_request = planner.make_local_request() local_request["searchMode"] = 2 - io_utils.write_json_to_file( - f"{base_filename}.local_request.json", local_request - ) + io_utils.write_json_to_file(make_filename("local_request", ""), local_request) logging.info("Solving local model") - local_response_filename = ( - f"{base_filename}.local_response.{flags.local_timeout}.json" - ) + local_response_filename = make_filename("local_response", flags.local_timeout) local_response = _optimize_tours_and_write_response( local_request, flags, @@ -335,24 +354,23 @@ def _run_two_step_planner() -> None: # global model. We will be injecting the solution from the base global model # into a refined global model, and for this to work correctly, we need to use # the same duration/distance matrices in both solves. - global_request_traffic_override = False if flags.use_refinement else None + global_request_traffic_override = False if flags.num_refinements > 0 else None global_request = planner.make_global_request( local_response, consider_road_traffic_override=global_request_traffic_override, ) global_request["searchMode"] = 2 io_utils.write_json_to_file( - f"{base_filename}.global_request.{flags.local_timeout}.json", + make_filename("global_request", flags.local_timeout), global_request, ) logging.info("Solving global model") - timeout_suffix = f"{flags.local_timeout}.{flags.global_timeout}" - global_response_filename = ( - f"{base_filename}.global_response.{timeout_suffix}.json" - ) global_response = _optimize_tours_and_write_response( - global_request, flags, flags.global_timeout, global_response_filename + global_request, + flags, + flags.global_timeout, + make_filename("global_response"), ) # NOTE(ondrasej): Create the merged request+response from the first two phases @@ -367,37 +385,31 @@ def _run_two_step_planner() -> None: ) logging.info("Writing merged request") - io_utils.write_json_to_file( - f"{base_filename}.merged_request.{timeout_suffix}.json", - merged_request, - ) + io_utils.write_json_to_file(make_filename("merged_request"), merged_request) logging.info("Writing merged response") - io_utils.write_json_to_file( - f"{base_filename}.merged_response.{timeout_suffix}.json", - merged_response, + io_utils.write_json_to_file(make_filename("merged_response"), merged_response) + + # Add the refinement timeouts to the file names produced by make_filename(). + timeout_suffix += ( + f".{flags.local_refinement_timeout}.{flags.global_refinement_timeout}" ) - if flags.use_refinement: + for refinement_index in range(1, flags.num_refinements + 1): + logging.info("Refinement round #%d", refinement_index) logging.info("Creating local refinement model") - local_refinement_request_filename = ( - f"{base_filename}.local_refinement_request.{timeout_suffix}.json" - ) local_refinement_request = planner.make_local_refinement_request( local_response, global_response ) io_utils.write_json_to_file( - local_refinement_request_filename, + make_filename("local_request"), local_refinement_request, ) logging.info("Solving local refinement model") - local_refinement_response_filename = ( - f"{base_filename}.local_refinement_response.{timeout_suffix}.json" - ) local_refinement_response = _optimize_tours_and_write_response( local_refinement_request, flags, flags.local_refinement_timeout, - local_refinement_response_filename, + make_filename("local_response"), ) logging.info("Integrating the refinement") @@ -412,28 +424,28 @@ def _run_two_step_planner() -> None: global_response, local_refinement_response, ) + if refinement_index != flags.num_refinements: + # Override the live traffic option for all but the last global request. + integrated_global_request["considerRoadTraffic"] = False io_utils.write_json_to_file( - f"{base_filename}.integrated_local_request.{timeout_suffix}.json", + make_filename("integrated_local_request"), integrated_local_request, ) io_utils.write_json_to_file( - f"{base_filename}.integrated_local_response.{timeout_suffix}.json", + make_filename("integrated_local_response"), integrated_local_response, ) io_utils.write_json_to_file( - f"{base_filename}.integrated_global_request.{timeout_suffix}.json", + make_filename("integrated_global_request"), integrated_global_request, ) logging.info("Solving the integrated global model") - integrated_global_response_filename = ( - f"{base_filename}.integrated_global_response.{timeout_suffix}.json" - ) integrated_global_response = _optimize_tours_and_write_response( integrated_global_request, flags, flags.global_refinement_timeout, - integrated_global_response_filename, + make_filename("integrated_global_response"), ) logging.info("Merging the results") @@ -444,15 +456,20 @@ def _run_two_step_planner() -> None: logging.info("Writing merged integrated request") io_utils.write_json_to_file( - f"{base_filename}.merged_integrated_request.{timeout_suffix}.json", + make_filename("merged_integrated_request"), merged_request, ) logging.info("Writing merged integrated response") io_utils.write_json_to_file( - f"{base_filename}.merged_integrated_response.{timeout_suffix}.json", + make_filename("merged_integrated_response"), merged_response, ) + local_request = integrated_local_request + local_response = integrated_local_response + global_request = integrated_global_request + global_response = integrated_global_response + if __name__ == "__main__": logging.basicConfig( diff --git a/python/cfr/two_step_routing/two_step_routing_test.py b/python/cfr/two_step_routing/two_step_routing_test.py index 9aeb9a15..aaee81ce 100644 --- a/python/cfr/two_step_routing/two_step_routing_test.py +++ b/python/cfr/two_step_routing/two_step_routing_test.py @@ -3,7 +3,6 @@ # Use of this source code is governed by an MIT-style license that can be found # in the LICENSE file or at https://opensource.org/licenses/MIT. -from collections.abc import Sequence import copy import datetime from importlib import resources @@ -474,47 +473,39 @@ class GetLocalModelRouteStartTimeWindowsTest(unittest.TestCase): "globalEndTime": "2023-10-25T23:59:59Z", "shipments": [ { - "deliveries": [ - { - "timeWindows": [{ - "startTime": "2023-10-25T09:00:00Z", - "endTime": "2023-10-25T12:00:00Z", - }] - } - ], + "deliveries": [{ + "timeWindows": [{ + "startTime": "2023-10-25T09:00:00Z", + "endTime": "2023-10-25T12:00:00Z", + }] + }], "label": "S001", }, { - "deliveries": [ - { - "timeWindows": [{ - "startTime": "2023-10-25T09:00:00Z", - "endTime": "2023-10-25T12:00:00Z", - }] - } - ], + "deliveries": [{ + "timeWindows": [{ + "startTime": "2023-10-25T09:00:00Z", + "endTime": "2023-10-25T12:00:00Z", + }] + }], "label": "S002", }, { - "deliveries": [ - { - "timeWindows": [{ - "startTime": "2023-10-25T14:00:00Z", - "endTime": "2023-10-25T16:00:00Z", - }] - } - ], + "deliveries": [{ + "timeWindows": [{ + "startTime": "2023-10-25T14:00:00Z", + "endTime": "2023-10-25T16:00:00Z", + }] + }], "label": "S003", }, { - "deliveries": [ - { - "timeWindows": [{ - "startTime": "2023-10-25T12:00:00Z", - "endTime": "2023-10-25T15:00:00Z", - }] - } - ], + "deliveries": [{ + "timeWindows": [{ + "startTime": "2023-10-25T12:00:00Z", + "endTime": "2023-10-25T15:00:00Z", + }] + }], "label": "S004", }, { diff --git a/python/cfr/utils.py b/python/cfr/utils.py new file mode 100644 index 00000000..efbc5bed --- /dev/null +++ b/python/cfr/utils.py @@ -0,0 +1,107 @@ +# Copyright 2023 Google LLC. All Rights Reserved. +# +# Use of this source code is governed by an MIT-style license that can be found +# in the LICENSE file or at https://opensource.org/licenses/MIT. + +"""General helper functions and classes.""" + +import argparse +import enum +import os +from typing import Type, TypeVar + + +_EnumType = TypeVar("_EnumType", bound="EnumForArgparse") + + +class EnumForArgparse(enum.Enum): + """An argparse-friendly Enum class. + + Provides a `from_string` class method for argparse-friendly parsing, and + overrides `__str__` to produce expected formatting of argparse help strings. + + Typical use: + ``` + class MyEnum(EnumForArgparse): + A = 1 + B = 2 + C = 3 + + parser = argparse.ArgumentParser() + MyEnum.add_as_argument( + parser, "--my", help="My arg.", default=MyEnum.C + ) + ``` + """ + + @classmethod + def add_as_argument(cls, parser: argparse.ArgumentParser, *args, **kwargs): + """Adds an argument of the enum type to parser. + + Calls `parser.add_argument()` with the right arguments to parse a + command-line flag of the enum type. + + Args: + parser: An ArgumentParser to which the flag is added. + *args: Additional positional arguments are forwarded to + `parser.add_argument()`. + **kwargs: Additional keyword arguments are forwarded to + `parser.add_argument()`. + + Returns: + The return value of parser.add_argument(). + """ + return parser.add_argument( + *args, type=cls.from_string, choices=tuple(cls), **kwargs + ) + + @classmethod + def from_string(cls: Type[_EnumType], value: str) -> _EnumType: + """Converts a string value to a corresponding enum value. + + Args: + value: The string value. This must be the name of one of the enum values. + + Returns: + The enum value corresponding to `value`. + + Raises: + ArgumentTypeError: When `value` can't be converted to an enum value. The + exception has an argparse-friendly error message. + """ + try: + return cls[value] + except KeyError: + readable_possible_values = ", ".join(repr(value.name) for value in cls) + raise argparse.ArgumentTypeError( + f"invalid value: {value!r}, possible values are:" + f" {readable_possible_values}" + ) from None + + def __str__(self): + """Converts the enum to a string. + + Uses the `self.name` as the string conversion, so that the flags appear in + argparse messages the way the user should be entering them. Use `repr()` to + obtain the usual `Enum` value formatting. + + Returns: + The string representation of the enum value. + """ + return self.name + + +def is_non_empty_file(filename: str) -> bool: + """Checks whether `filename` exists and is non-empty. + + Args: + filename: The name of a file to check. + + Returns: + True when the file exists and is non-empty; False, when it doesn't exist, + its size can't be determined, or its size is zero. + """ + try: + return os.path.getsize(filename) > 0 + except IOError: + return False diff --git a/python/cfr/utils_test.py b/python/cfr/utils_test.py new file mode 100644 index 00000000..0f4dd9bf --- /dev/null +++ b/python/cfr/utils_test.py @@ -0,0 +1,89 @@ +# Copyright 2023 Google LLC. All Rights Reserved. +# +# Use of this source code is governed by an MIT-style license that can be found +# in the LICENSE file or at https://opensource.org/licenses/MIT. + +import argparse +from os import path +import tempfile +import unittest + +from . import utils + + +class MyEnum(utils.EnumForArgparse): + A = 1 + B = 2 + C = 3 + + +class EnumForArgparseTest(unittest.TestCase): + """Tests for EnumForArgparse.""" + + def test_help(self): + parser = argparse.ArgumentParser(exit_on_error=False) + MyEnum.add_as_argument(parser, "--my") + + help_string = parser.format_help() + self.assertIn("--my {A,B,C}", help_string) + self.assertNotIn("MyEnum.A", help_string) + + def test_parse_flags(self): + parser = argparse.ArgumentParser(exit_on_error=False) + MyEnum.add_as_argument(parser, "--my") + + args = parser.parse_args(()) + self.assertIsNone(args.my) + + args = parser.parse_args(("--my", "C")) + self.assertEqual(args.my, MyEnum.C) + + with self.assertRaisesRegex( + argparse.ArgumentError, + "argument --my: invalid value: 'D', possible values are: 'A', 'B', 'C'", + ): + parser.parse_args( + ("--my", "D"), + ) + + def test_with_default_value(self): + parser = argparse.ArgumentParser(exit_on_error=False) + MyEnum.add_as_argument(parser, "--my", default=MyEnum.B) + + args = parser.parse_args(()) + self.assertEqual(args.my, MyEnum.B) + + args = parser.parse_args(("--my", "A")) + self.assertEqual(args.my, MyEnum.A) + + def test_repr(self): + self.assertEqual(repr(MyEnum.A), "") + + +class IsNonEmptyFileTest(unittest.TestCase): + """Tests for is_non_empty_file.""" + + def test_file_does_not_exist(self): + with tempfile.TemporaryDirectory() as tmpdir: + filename = path.join(tmpdir, "does_not_exist.json") + self.assertFalse(utils.is_non_empty_file(filename)) + + def test_file_is_empty(self): + with tempfile.TemporaryDirectory() as tmpdir: + filename = path.join(tmpdir, "exists.txt") + with open(filename, "w"): + # Create the file, but do not write anything to it. + pass + self.assertTrue(path.exists(filename)) + self.assertFalse(utils.is_non_empty_file(filename)) + + def test_file_is_not_empty(self): + with tempfile.TemporaryDirectory() as tmpdir: + filename = path.join(tmpdir, "non_empty.txt") + with open(filename, "w") as f: + f.write("I contain something!") + self.assertTrue(utils.is_non_empty_file(filename)) + + +if __name__ == "__main__": + unittest.main()