Skip to content

Commit

Permalink
Allow multiple rounds of refinement in the two-step library main func…
Browse files Browse the repository at this point in the history
…tion.
  • Loading branch information
ondrasej committed Dec 4, 2023
1 parent 6d17bf6 commit d30a374
Show file tree
Hide file tree
Showing 6 changed files with 292 additions and 86 deletions.
2 changes: 1 addition & 1 deletion python/cfr/two_step_routing/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ This directory contains a Python library that uses the Cloud Fleet Routing (CFR)
API to optimize routes with two-step deliveries: under this model, shipments can
be handled in two ways:
- delivered directly: the vehicle handling the shipment arrives directly to the
final delivery addres.
final delivery address.
- delivered through a parking location: when handling the shipment, the vehicle
parks at a specified parking location, while the driver delivers the shipment
by foot.
Expand Down
8 changes: 5 additions & 3 deletions python/cfr/two_step_routing/two_step_routing.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
import re
from typing import Any, TypeAlias, TypeVar, cast

from .. import utils
from ..json import cfr_json


Expand Down Expand Up @@ -81,7 +82,7 @@ class _ParkingGroupKey:


@enum.unique
class LocalModelGrouping(enum.Enum):
class LocalModelGrouping(utils.EnumForArgparse):
"""Specifies how shipments are grouped in the local model.
In the local model, the routes are computed for each group separately, i.e.
Expand Down Expand Up @@ -429,8 +430,9 @@ def make_local_request(self) -> cfr_json.OptimizeToursRequest:
delivery = shipment["deliveries"][0]
local_delivery = {
"arrivalWaypoint": delivery["arrivalWaypoint"],
"duration": delivery["duration"],
}
if (delivery_duration := delivery.get("duration")) is not None:
local_delivery["duration"] = delivery_duration
# Preserve tags in the local shipment.
tags = delivery.get("tags")
if tags is not None:
Expand All @@ -441,7 +443,7 @@ def make_local_request(self) -> cfr_json.OptimizeToursRequest:
local_delivery["timeWindows"] = time_windows
local_shipment: cfr_json.Shipment = {
"deliveries": [local_delivery],
"label": f"{shipment_index}: {shipment['label']}",
"label": f"{shipment_index}: {shipment.get('label', '')}",
"allowedVehicleIndices": group_vehicle_indices,
}
# Copy load demands from the original shipment, if present.
Expand Down
115 changes: 66 additions & 49 deletions python/cfr/two_step_routing/two_step_routing_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
"""

import argparse
from collections.abc import Mapping
import dataclasses
from http import client
import json
Expand Down Expand Up @@ -55,6 +54,7 @@ class Flags:
google_cloud_token: The value of the --token flag.
reuse_existing: The value of the --reuse_existing flag. When a file with a
response exists, load it instead of resolving the request.
num_refinements: The value of the --use_refinements flag.
use_refinement: The value of the --use_refinement flag. When True, the
planner uses a third solve to reoptimize local routes from the same
parking if they are performed in a sequence (allowing the planner to merge
Expand All @@ -76,7 +76,7 @@ class Flags:
google_cloud_project: str
google_cloud_token: str
reuse_existing: bool
use_refinement: bool
num_refinements: int
local_grouping: two_step_routing.LocalModelGrouping
travel_mode_in_merged_transitions: bool
local_timeout: cfr_json.DurationString
Expand Down Expand Up @@ -112,11 +112,11 @@ def _parse_flags() -> Flags:
parser.add_argument(
"--token", required=True, help="The Google Cloud auth key."
)
parser.add_argument(
two_step_routing.LocalModelGrouping.add_as_argument(
parser,
"--local_grouping",
help="Controls the grouping mode in the local model.",
choices=tuple(two_step_routing.LocalModelGrouping.__members__),
default="PARKING_AND_TIME",
default=two_step_routing.LocalModelGrouping.PARKING_AND_TIME,
)
parser.add_argument(
"--travel_mode_in_merged_transitions",
Expand Down Expand Up @@ -154,10 +154,18 @@ def _parse_flags() -> Flags:
default="240s",
)
parser.add_argument(
"--use_refinement",
help="Use the refinement models to clean up parking location visits.",
default=False,
action="store_true",
"--num_refinements",
help=(
"The number of refinement rounds applied to the solution. In each"
" refinement round, the solver first re-optimizes local routes when"
" there are two or more visits to the parking in a sequence, and then"
" updates the global solution to reflect and take advantage of the"
" potentially more optimized local routes. When 0, no refinement is"
" applied."
),
default=0,
type=int,
action="store",
)
parser.add_argument(
"--reuse_existing",
Expand All @@ -173,12 +181,12 @@ def _parse_flags() -> Flags:
google_cloud_project=flags.project,
google_cloud_token=flags.token,
local_timeout=flags.local_timeout,
local_grouping=two_step_routing.LocalModelGrouping[flags.local_grouping],
local_grouping=flags.local_grouping,
travel_mode_in_merged_transitions=flags.travel_mode_in_merged_transitions,
global_timeout=flags.global_timeout,
local_refinement_timeout=flags.local_refinement_timeout,
global_refinement_timeout=flags.global_refinement_timeout,
use_refinement=flags.use_refinement,
num_refinements=flags.num_refinements,
reuse_existing=flags.reuse_existing,
)

Expand Down Expand Up @@ -313,16 +321,27 @@ def _run_two_step_planner() -> None:
request_json, parking_locations, parking_for_shipment, options
)

refinement_index = None
timeout_suffix = f"{flags.local_timeout}.{flags.global_timeout}"

def make_filename(stem, timeout_string=None):
if timeout_string is None:
timeout_string = timeout_suffix
parts = [base_filename]
if refinement_index is not None:
parts.append(f"refined_{refinement_index}")
parts.append(stem)
if timeout_string:
parts.append(timeout_string)
parts.append("json")
return ".".join(parts)

local_request = planner.make_local_request()
local_request["searchMode"] = 2
io_utils.write_json_to_file(
f"{base_filename}.local_request.json", local_request
)
io_utils.write_json_to_file(make_filename("local_request", ""), local_request)

logging.info("Solving local model")
local_response_filename = (
f"{base_filename}.local_response.{flags.local_timeout}.json"
)
local_response_filename = make_filename("local_response", flags.local_timeout)
local_response = _optimize_tours_and_write_response(
local_request,
flags,
Expand All @@ -335,24 +354,23 @@ def _run_two_step_planner() -> None:
# global model. We will be injecting the solution from the base global model
# into a refined global model, and for this to work correctly, we need to use
# the same duration/distance matrices in both solves.
global_request_traffic_override = False if flags.use_refinement else None
global_request_traffic_override = False if flags.num_refinements > 0 else None
global_request = planner.make_global_request(
local_response,
consider_road_traffic_override=global_request_traffic_override,
)
global_request["searchMode"] = 2
io_utils.write_json_to_file(
f"{base_filename}.global_request.{flags.local_timeout}.json",
make_filename("global_request", flags.local_timeout),
global_request,
)

logging.info("Solving global model")
timeout_suffix = f"{flags.local_timeout}.{flags.global_timeout}"
global_response_filename = (
f"{base_filename}.global_response.{timeout_suffix}.json"
)
global_response = _optimize_tours_and_write_response(
global_request, flags, flags.global_timeout, global_response_filename
global_request,
flags,
flags.global_timeout,
make_filename("global_response"),
)

# NOTE(ondrasej): Create the merged request+response from the first two phases
Expand All @@ -367,37 +385,31 @@ def _run_two_step_planner() -> None:
)

logging.info("Writing merged request")
io_utils.write_json_to_file(
f"{base_filename}.merged_request.{timeout_suffix}.json",
merged_request,
)
io_utils.write_json_to_file(make_filename("merged_request"), merged_request)
logging.info("Writing merged response")
io_utils.write_json_to_file(
f"{base_filename}.merged_response.{timeout_suffix}.json",
merged_response,
io_utils.write_json_to_file(make_filename("merged_response"), merged_response)

# Add the refinement timeouts to the file names produced by make_filename().
timeout_suffix += (
f".{flags.local_refinement_timeout}.{flags.global_refinement_timeout}"
)
if flags.use_refinement:
for refinement_index in range(1, flags.num_refinements + 1):
logging.info("Refinement round #%d", refinement_index)
logging.info("Creating local refinement model")
local_refinement_request_filename = (
f"{base_filename}.local_refinement_request.{timeout_suffix}.json"
)
local_refinement_request = planner.make_local_refinement_request(
local_response, global_response
)
io_utils.write_json_to_file(
local_refinement_request_filename,
make_filename("local_request"),
local_refinement_request,
)

logging.info("Solving local refinement model")
local_refinement_response_filename = (
f"{base_filename}.local_refinement_response.{timeout_suffix}.json"
)
local_refinement_response = _optimize_tours_and_write_response(
local_refinement_request,
flags,
flags.local_refinement_timeout,
local_refinement_response_filename,
make_filename("local_response"),
)

logging.info("Integrating the refinement")
Expand All @@ -412,28 +424,28 @@ def _run_two_step_planner() -> None:
global_response,
local_refinement_response,
)
if refinement_index != flags.num_refinements:
# Override the live traffic option for all but the last global request.
integrated_global_request["considerRoadTraffic"] = False
io_utils.write_json_to_file(
f"{base_filename}.integrated_local_request.{timeout_suffix}.json",
make_filename("integrated_local_request"),
integrated_local_request,
)
io_utils.write_json_to_file(
f"{base_filename}.integrated_local_response.{timeout_suffix}.json",
make_filename("integrated_local_response"),
integrated_local_response,
)
io_utils.write_json_to_file(
f"{base_filename}.integrated_global_request.{timeout_suffix}.json",
make_filename("integrated_global_request"),
integrated_global_request,
)

logging.info("Solving the integrated global model")
integrated_global_response_filename = (
f"{base_filename}.integrated_global_response.{timeout_suffix}.json"
)
integrated_global_response = _optimize_tours_and_write_response(
integrated_global_request,
flags,
flags.global_refinement_timeout,
integrated_global_response_filename,
make_filename("integrated_global_response"),
)

logging.info("Merging the results")
Expand All @@ -444,15 +456,20 @@ def _run_two_step_planner() -> None:

logging.info("Writing merged integrated request")
io_utils.write_json_to_file(
f"{base_filename}.merged_integrated_request.{timeout_suffix}.json",
make_filename("merged_integrated_request"),
merged_request,
)
logging.info("Writing merged integrated response")
io_utils.write_json_to_file(
f"{base_filename}.merged_integrated_response.{timeout_suffix}.json",
make_filename("merged_integrated_response"),
merged_response,
)

local_request = integrated_local_request
local_response = integrated_local_response
global_request = integrated_global_request
global_response = integrated_global_response


if __name__ == "__main__":
logging.basicConfig(
Expand Down
57 changes: 24 additions & 33 deletions python/cfr/two_step_routing/two_step_routing_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
# Use of this source code is governed by an MIT-style license that can be found
# in the LICENSE file or at https://opensource.org/licenses/MIT.

from collections.abc import Sequence
import copy
import datetime
from importlib import resources
Expand Down Expand Up @@ -474,47 +473,39 @@ class GetLocalModelRouteStartTimeWindowsTest(unittest.TestCase):
"globalEndTime": "2023-10-25T23:59:59Z",
"shipments": [
{
"deliveries": [
{
"timeWindows": [{
"startTime": "2023-10-25T09:00:00Z",
"endTime": "2023-10-25T12:00:00Z",
}]
}
],
"deliveries": [{
"timeWindows": [{
"startTime": "2023-10-25T09:00:00Z",
"endTime": "2023-10-25T12:00:00Z",
}]
}],
"label": "S001",
},
{
"deliveries": [
{
"timeWindows": [{
"startTime": "2023-10-25T09:00:00Z",
"endTime": "2023-10-25T12:00:00Z",
}]
}
],
"deliveries": [{
"timeWindows": [{
"startTime": "2023-10-25T09:00:00Z",
"endTime": "2023-10-25T12:00:00Z",
}]
}],
"label": "S002",
},
{
"deliveries": [
{
"timeWindows": [{
"startTime": "2023-10-25T14:00:00Z",
"endTime": "2023-10-25T16:00:00Z",
}]
}
],
"deliveries": [{
"timeWindows": [{
"startTime": "2023-10-25T14:00:00Z",
"endTime": "2023-10-25T16:00:00Z",
}]
}],
"label": "S003",
},
{
"deliveries": [
{
"timeWindows": [{
"startTime": "2023-10-25T12:00:00Z",
"endTime": "2023-10-25T15:00:00Z",
}]
}
],
"deliveries": [{
"timeWindows": [{
"startTime": "2023-10-25T12:00:00Z",
"endTime": "2023-10-25T15:00:00Z",
}]
}],
"label": "S004",
},
{
Expand Down
Loading

0 comments on commit d30a374

Please sign in to comment.