Skip to content

Commit

Permalink
Clean up docstrings
Browse files Browse the repository at this point in the history
  • Loading branch information
mmorang committed Apr 3, 2022
1 parent 53ddfee commit 9e9df2a
Show file tree
Hide file tree
Showing 7 changed files with 88 additions and 62 deletions.
16 changes: 13 additions & 3 deletions LargeNetworkAnalysisTools.pyt
Original file line number Diff line number Diff line change
Expand Up @@ -591,7 +591,7 @@ def get_catalog_path(param):
param (arcpy.Parameter): Parameter from which to retrieve the catalog path.
Returns:
list(str): List of catalog paths to the data
str: Catalog path to the data
"""
param_value = param.value
if not param_value:
Expand Down Expand Up @@ -631,7 +631,12 @@ def get_catalog_path_multivalue(param):


def update_precalculate_parameter(param_network, param_precalculate):
"""Turn off and hide Precalculate Network Locations parameter if the network data source is a service."""
"""Turn off and hide Precalculate Network Locations parameter if the network data source is a service.
Args:
param_network (arcpy.Parameter): Parameter for the network data source
param_precalculate (arcpy.Parameter): Parameter for precalculate network locations
"""
if not param_network.hasBeenValidated and param_network.altered and param_network.valueAsText:
if helpers.is_nds_service(param_network.valueAsText):
param_precalculate.value = False
Expand All @@ -641,7 +646,12 @@ def update_precalculate_parameter(param_network, param_precalculate):


def cap_max_processes_for_agol(param_network, param_max_processes):
"""If the network data source is arcgis.com, cap max processes."""
"""If the network data source is arcgis.com, cap max processes.
Args:
param_network (arcpy.Parameter): Parameter for the network data source
param_max_processes (arcpy.Parameter): Parameter for the max processes
"""
if param_network.altered and param_network.valueAsText and helpers.is_nds_service(param_network.valueAsText):
if param_max_processes.altered and param_max_processes.valueAsText:
if "arcgis.com" in param_network.valueAsText and param_max_processes.value > helpers.MAX_AGOL_PROCESSES:
Expand Down
18 changes: 18 additions & 0 deletions helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,14 @@ def update_agol_max_processes(max_processes):
def convert_time_units_str_to_enum(time_units):
"""Convert a string representation of time units to an arcpy.nax enum.
Args:
time_units (str): String representation of time units
Raises:
ValueError: If the string cannot be parsed as a valid arcpy.nax.TimeUnits enum value.
Returns:
arcpy.nax.TimeUnits: Time units enum for use in arcpy.nax solver objects
"""
if time_units.lower() == "minutes":
return arcpy.nax.TimeUnits.Minutes
Expand All @@ -122,8 +128,14 @@ def convert_time_units_str_to_enum(time_units):
def convert_distance_units_str_to_enum(distance_units):
"""Convert a string representation of distance units to an arcpy.nax.DistanceUnits enum.
Args:
distance_units (str): String representation of distance units
Raises:
ValueError: If the string cannot be parsed as a valid arcpy.nax.DistanceUnits enum value.
Returns:
arcpy.nax.DistanceUnits: Distance units enum for use in arcpy.nax solver objects
"""
if distance_units.lower() == "miles":
return arcpy.nax.DistanceUnits.Miles
Expand Down Expand Up @@ -154,8 +166,14 @@ class OutputFormat(enum.Enum):
def convert_output_format_str_to_enum(output_format):
"""Convert a string representation of the desired output format to an enum.
Args:
output_format (str): String representation of the output format
Raises:
ValueError: If the string cannot be parsed as a valid arcpy.nax.DistanceUnits enum value.
Returns:
OutputFormat: Output format enum value
"""
if output_format.lower() == "feature class":
return OutputFormat.featureclass
Expand Down
2 changes: 2 additions & 0 deletions parallel_odcm.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ def __init__(self, **kwargs):
- distance_units
- cutoff
- num_destinations
- time_of_day
- scratch_folder
- barriers
"""
Expand Down Expand Up @@ -722,6 +723,7 @@ def __init__( # pylint: disable=too-many-locals, too-many-arguments
Defaults to None. When None, do not use a cutoff.
num_destinations (int, optional): The number of destinations to find for each origin. Defaults to None,
which means to find all destinations.
time_of_day (str): String representation of the start time for the analysis ("%Y%m%d %H:%M" format)
barriers (list(str), optional): List of catalog paths to point, line, and polygon barriers to use.
Defaults to None.
"""
Expand Down
67 changes: 36 additions & 31 deletions parallel_route_pairs.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
"""TODO: Compute a large Origin Destination (OD) cost matrix by chunking the
inputs and solving in parallel. Write outputs into a single combined
feature class, a collection of CSV files, or a collection of Apache
Arrow files.
"""Compute a large multi-route analysis by chunking the input origins
and their assigned destinations and solving in parallel. Write outputs to
a single combined feature class.
This is a sample script users can modify to fit their specific needs.
This script is intended to be called as a subprocess from the solve_large_rt.py script
so that it can launch parallel processes with concurrent.futures. It must be
called as a subprocess because the main script tool process, when running
This script is intended to be called as a subprocess from the
solve_large_route_pair_analysis.py script so that it can launch parallel
processes with concurrent.futures. It must be called as a subprocess
because the main script tool process, when running
within ArcGIS Pro, cannot launch parallel subprocesses on its own.
This script should not be called directly from the command line.
Expand Down Expand Up @@ -61,22 +61,23 @@


class Route: # pylint:disable = too-many-instance-attributes
"""Used for solving an Route problem in parallel for a designated chunk of the input datasets."""
"""Used for solving a Route problem in parallel for a designated chunk of the input datasets."""

def __init__(self, **kwargs):
"""Initialize the Route analysis for the given inputs.
Expected arguments: TODO
Expected arguments:
- origins
- origin_id_field
- assigned_dest_field
- destinations
- output_format
- output_od_location
- dest_id_field
- network_data_source
- travel_mode
- time_units
- distance_units
- cutoff
- num_destinations
- time_of_day
- reverse_direction
- scratch_folder
- barriers
"""
Expand Down Expand Up @@ -217,7 +218,7 @@ def initialize_rt_solver(self):
self.logger.debug(f"timeOfDay: {self.time_of_day}")

def _insert_stops(self):
"""Insert the origins and destinations as stops for the analysis."""
"""Insert the origins and destinations as stops for the Route analysis."""
# Make a layer for destinations for quicker access
helpers.run_gp_tool(
self.logger,
Expand All @@ -234,7 +235,7 @@ def _insert_stops(self):
origin_field_def += [self.origin_unique_id_field_name, origin_id_field.length]
dest_fields = arcpy.ListFields(self.input_destinations_layer)
location_fields = ["SourceID", "SourceOID", "PosAlong", "SideOfEdge"]
if not set(location_fields).issubset(set([f.name for f in dest_fields])):
if not set(location_fields).issubset({f.name for f in dest_fields}):
location_fields = [] # Do not use location fields for this analysis
dest_id_field = arcpy.ListFields(self.input_origins_layer, wild_card=self.dest_id_field)[0]
dest_field_def = [self.dest_unique_id_field_name, field_types[dest_id_field.type]]
Expand Down Expand Up @@ -292,7 +293,7 @@ def _insert_stops(self):
icur.insertRow(destination_row)

def solve(self, origins_criteria): # pylint: disable=too-many-locals, too-many-statements
"""Create and solve an Route analysis for the designated chunk of origins and their assigned destinations.
"""Create and solve a Route analysis for the designated chunk of origins and their assigned destinations.
Args:
origins_criteria (list): ObjectID range to select from the input origins
Expand Down Expand Up @@ -417,7 +418,7 @@ def setup_logger(self, logger_obj):


def solve_route(inputs, chunk):
"""Solve an Route analysis for the given inputs for the given chunk of ObjectIDs.
"""Solve a Route analysis for the given inputs for the given chunk of ObjectIDs.
Args:
inputs (dict): Dictionary of keyword inputs suitable for initializing the Route class
Expand All @@ -441,25 +442,29 @@ def __init__( # pylint: disable=too-many-locals, too-many-arguments
max_routes, max_processes, out_routes, reverse_direction, scratch_folder, time_of_day=None, barriers=None
):
"""Compute Routes between origins and their assigned destinations in parallel and combine results.
TODO
Compute Routes in parallel and combine and post-process the results.
Compute Routes in parallel and combine the results.
This class assumes that the inputs have already been pre-processed and validated.
Args:
origins (str): Catalog path to origins
destinations (str): Catalog path to destinations
network_data_source (str): Network data source catalog path or URL
travel_mode (str): String-based representation of a travel mode (name or JSON)
output_format (str): String representation of the output format
output_od_location (str): Catalog path to the output feature class or folder where the OD Lines output will
be stored.
max_origins (int): Maximum origins allowed in a chunk
max_destinations (int): Maximum destinations allowed in a chunk
max_processes (int): Maximum number of parallel processes allowed
origins (str, layer): Catalog path or layer for the input origins
origin_id_field (str): Unique ID field of the input origins
assigned_dest_field (str): Field in the input origins with the assigned destination ID
destinations (str, layer): Catalog path or layer for the input destinations
dest_id_field: (str): Unique ID field of the input destinations
network_data_source (str, layer): Catalog path, layer, or URL for the input network dataset
travel_mode (str, travel mode): Travel mode object, name, or json string representation
time_units (str): String representation of time units
distance_units (str): String representation of distance units
barriers (list(str), optional): List of catalog paths to point, line, and polygon barriers to use.
Defaults to None.
max_routes (int): Maximum number of origin-destination pairs that can be in one chunk
max_processes (int): Maximum number of allowed parallel processes
out_routes (str): Catalog path to the output routes feature class
reverse_direction (bool, optional): Whether to reverse the direction of travel and calculate routes from
destination to origin instead of origin to destination. Defaults to False.
scratch_folder (str): Catalog path to the folder where intermediate outputs will be written.
time_of_day (str): String representation of the start time for the analysis ("%Y%m%d %H:%M" format)
barriers (list(str, layer), optional): List of catalog paths or layers for point, line, and polygon barriers
to use. Defaults to None.
"""
self.out_routes = out_routes
self.scratch_folder = scratch_folder
Expand Down
5 changes: 2 additions & 3 deletions rt_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,8 @@
https://pro.arcgis.com/en/pro-app/latest/arcpy/network-analyst/route.htm
You can include any of them in the dictionary in this file, and the tool will
use them. However, travelMode, timeUnits, distanceUnits, defaultImpedanceCutoff,
and defaultDestinationCount will be ignored because they are specified in the
tool dialog. TODO
use them. However, travelMode, timeUnits, distanceUnits, and timeOfDay
will be ignored because they are specified in the tool dialog.
Copyright 2022 Esri
Licensed under the Apache License, Version 2.0 (the "License");
Expand Down
1 change: 1 addition & 0 deletions solve_large_odcm.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ def __init__( # pylint: disable=too-many-locals, too-many-arguments
Defaults to None. When None, do not use a cutoff.
num_destinations (int, optional): The number of destinations to find for each origin. Defaults to None,
which means to find all destinations.
time_of_day (str): String representation of the start time for the analysis ("%Y%m%d %H:%M" format)
precalculate_network_locations (bool, optional): Whether to precalculate network location fields for all
inputs. Defaults to True. Should be false if the network_data_source is a service.
barriers (list(str, layer), optional): List of catalog paths or layers for point, line, and polygon barriers
Expand Down
41 changes: 16 additions & 25 deletions solve_large_route_pair_analysis.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Compute a large analysis with origins preassigned to specific destinations
by chunking the inputs and solving in parallel. Write outputs into a single
combined feature class. TODO
combined feature class.
This is a sample script users can modify to fit their specific needs.
Expand Down Expand Up @@ -56,32 +56,26 @@ def __init__( # pylint: disable=too-many-locals, too-many-arguments
"""Initialize the RoutePairSolver class.
Args:
TODO
origins (str, layer): Catalog path or layer for the input origins
origin_id_field (str): Unique ID field of the input origins
assigned_dest_field (str): Field in the input origins with the assigned destination ID
destinations (str, layer): Catalog path or layer for the input destinations
dest_id_field: (str): Unique ID field of the input destinations
network_data_source (str, layer): Catalog path, layer, or URL for the input network dataset
travel_mode (str, travel mode): Travel mode object, name, or json string representation
output_origins (str): Catalog path to the output Origins feature class
output_destinations (str): Catalog path to the output Destinations feature class
chunk_size (int): Maximum number of origins and destinations that can be in one chunk
max_processes (int): Maximum number of allowed parallel processes
time_units (str): String representation of time units
distance_units (str): String representation of distance units
output_format (str): String representation of the output format
output_od_lines (str, optional): Catalog path to the output OD Lines feature class. Required if
output_format is "Feature class".
output_data_folder (str, optional): Catalog path to the output folder where CSV or Arrow files will be
stored. Required if output_format is "CSV files" or "Apache Arrow files".
cutoff (float, optional): Impedance cutoff to limit the Route solve. Interpreted in the time_units
if the travel mode is time-based. Interpreted in the distance-units if the travel mode is distance-
based. Interpreted in the impedance units if the travel mode is neither time- nor distance-based.
Defaults to None. When None, do not use a cutoff.
num_destinations (int, optional): The number of destinations to find for each origin. Defaults to None,
which means to find all destinations.
precalculate_network_locations (bool, optional): Whether to precalculate network location fields for all
inputs. Defaults to True. Should be false if the network_data_source is a service.
chunk_size (int): Maximum number of origin-destination pairs that can be in one chunk
max_processes (int): Maximum number of allowed parallel processes
output_routes (str): Catalog path to the output routes feature class
time_of_day (str): String representation of the start time for the analysis ("%Y%m%d %H:%M" format)
barriers (list(str, layer), optional): List of catalog paths or layers for point, line, and polygon barriers
to use. Defaults to None.
precalculate_network_locations (bool, optional): Whether to precalculate network location fields for all
inputs. Defaults to True. Should be false if the network_data_source is a service.
sort_origins (bool, optional): Whether to sort the origins by assigned destination ID. Defaults to True.
reverse_direction (bool, optional): Whether to reverse the direction of travel and calculate routes from
destination to origin instead of origin to destination. Defaults to False.
"""
self.origins = origins
self.origin_id_field = origin_id_field
Expand Down Expand Up @@ -271,10 +265,7 @@ def _update_max_inputs_for_service(self):
f"Max OD pairs per chunk has been updated to {self.chunk_size} to accommodate service limits.")

def _sort_origins_by_assigned_destination(self):
"""Sort the origins by the assigned destination field.
Also adds a field called "OriginOID" to the input feature class to preserve the original OID values.
"""
"""Sort the origins by the assigned destination field."""
arcpy.AddMessage("Sorting origins by assigned destination...")

# Sort input features
Expand Down Expand Up @@ -355,7 +346,7 @@ def _preprocess_inputs(self):
barrier_fc, self.network_data_source, self.travel_mode, RT_PROPS)

def _execute_solve(self):
"""Solve the multi-route analysis."""
"""Execute the solve in a subprocess."""
# Launch the parallel_route_pairs script as a subprocess so it can spawn parallel processes. We have to do this
# because a tool running in the Pro UI cannot call concurrent.futures without opening multiple instances of Pro.
cwd = os.path.dirname(os.path.abspath(__file__))
Expand All @@ -366,7 +357,7 @@ def _execute_solve(self):
"--origins-id-field", self.origin_id_field,
"--assigned-dest-field", self.assigned_dest_field,
"--destinations", self.output_destinations,
"--destinations-id-field", self.dest_id_field, ## TODO: If ID field is ObjectID, transfer it
"--destinations-id-field", self.dest_id_field,
"--network-data-source", self.network_data_source,
"--travel-mode", self.travel_mode,
"--time-units", self.time_units,
Expand Down

0 comments on commit 9e9df2a

Please sign in to comment.