diff --git a/alembic/versions/02b8e7c7fbca_rename_driver_assignment_columns.py b/alembic/versions/02b8e7c7fbca_rename_driver_assignment_columns.py new file mode 100644 index 0000000..249b988 --- /dev/null +++ b/alembic/versions/02b8e7c7fbca_rename_driver_assignment_columns.py @@ -0,0 +1,50 @@ +"""Rename driver assignment columns + +Revision ID: 02b8e7c7fbca +Revises: 69859f4d0367 +Create Date: 2020-08-18 23:17:43.161501 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '02b8e7c7fbca' +down_revision = '69859f4d0367' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('driver_assignment', sa.Column('trip_dropoff_addresses', postgresql.ARRAY(sa.String()), nullable=True)) + op.add_column('driver_assignment', sa.Column('trip_estimated_dropoff_times', postgresql.ARRAY(sa.Interval()), nullable=True)) + op.add_column('driver_assignment', sa.Column('trip_estimated_pickup_times', postgresql.ARRAY(sa.Interval()), nullable=True)) + op.add_column('driver_assignment', sa.Column('trip_pickup_addresses', postgresql.ARRAY(sa.String()), nullable=True)) + op.add_column('driver_assignment', sa.Column('trip_scheduled_dropoff_times', postgresql.ARRAY(sa.Interval()), nullable=True)) + op.add_column('driver_assignment', sa.Column('trip_scheduled_pickup_times', postgresql.ARRAY(sa.Interval()), nullable=True)) + op.drop_column('driver_assignment', 'trip_est_pu') + op.drop_column('driver_assignment', 'trip_sch_pu') + op.drop_column('driver_assignment', 'trip_do') + op.drop_column('driver_assignment', 'trip_pu') + op.drop_column('driver_assignment', 'trip_sch_do') + op.drop_column('driver_assignment', 'trip_est_do') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('driver_assignment', sa.Column('trip_est_do', postgresql.ARRAY(postgresql.INTERVAL()), autoincrement=False, nullable=True)) + op.add_column('driver_assignment', sa.Column('trip_sch_do', postgresql.ARRAY(postgresql.INTERVAL()), autoincrement=False, nullable=True)) + op.add_column('driver_assignment', sa.Column('trip_pu', postgresql.ARRAY(sa.VARCHAR()), autoincrement=False, nullable=True)) + op.add_column('driver_assignment', sa.Column('trip_do', postgresql.ARRAY(sa.VARCHAR()), autoincrement=False, nullable=True)) + op.add_column('driver_assignment', sa.Column('trip_sch_pu', postgresql.ARRAY(postgresql.INTERVAL()), autoincrement=False, nullable=True)) + op.add_column('driver_assignment', sa.Column('trip_est_pu', postgresql.ARRAY(postgresql.INTERVAL()), autoincrement=False, nullable=True)) + op.drop_column('driver_assignment', 'trip_scheduled_pickup_times') + op.drop_column('driver_assignment', 'trip_scheduled_dropoff_times') + op.drop_column('driver_assignment', 'trip_pickup_addresses') + op.drop_column('driver_assignment', 'trip_estimated_pickup_times') + op.drop_column('driver_assignment', 'trip_estimated_dropoff_times') + op.drop_column('driver_assignment', 'trip_dropoff_addresses') + # ### end Alembic commands ### diff --git a/avicena/__init__.py b/avicena/__init__.py index 7f93189..2ede8d5 100644 --- a/avicena/__init__.py +++ b/avicena/__init__.py @@ -1 +1 @@ -from . import util, parsers, optimizers, models, app \ No newline at end of file +from . import util, parsers, optimizers, models, app diff --git a/avicena/app/run.py b/avicena/app/run.py index 7a73815..71e711d 100644 --- a/avicena/app/run.py +++ b/avicena/app/run.py @@ -1,24 +1,24 @@ +import argparse import os import random +from datetime import datetime +from types import ModuleType from typing import Type, Union, Dict, List, Any -import yaml -import argparse +import yaml from pandas import DataFrame from sqlalchemy.orm import Session from avicena.models.Assignment import generate_visualization_from_df, load_assignment_from_df -from avicena.util.Database import create_db_session, save_and_commit_to_db, close_db_session from avicena.models.Driver import load_drivers_from_db, load_drivers_from_csv, prepare_drivers_for_optimizer, Driver from avicena.models.MergeAddress import load_merge_details_from_csv, load_merge_details_from_db, MergeAddress from avicena.models.RevenueRate import load_revenue_table_from_csv, load_revenue_table_from_db, RevenueRate from avicena.models.Trip import load_and_filter_valid_trips_from_df, Trip +from avicena.optimizers.GeneralOptimizer import GeneralOptimizer from avicena.parsers import LogistiCareParser, CSVParser from avicena.util.ConfigValidation import validate_app_config +from avicena.util.Database import create_db_session, save_and_commit_to_db, close_db_session from avicena.util.Exceptions import InvalidConfigException -from avicena.optimizers.GeneralOptimizer import GeneralOptimizer -from datetime import datetime - from avicena.util.ParserUtil import verify_and_save_parsed_trips_df_to_csv # Supported Parser and Optimizer types that will be passed into the Config file @@ -26,7 +26,7 @@ optimizers = {'GeneralOptimizer': GeneralOptimizer} -def _run_parser(trip_parser: Union[Type[LogistiCareParser], Type[CSVParser]], trips_file: str, +def _run_parser(trip_parser: Union[Type[ModuleType]], trips_file: str, revenue_table: Dict[str, List[RevenueRate]], merge_details: Dict[str, MergeAddress], assumed_speed: int, model_name: str, output_directory: str) -> List[Trip]: """ @@ -68,7 +68,7 @@ def _run_optimizer(trip_optimizer: Union[Type[GeneralOptimizer]], trips: List[Tr def _retrieve_database_inputs(db_session: Session) -> ( -Dict[str, List[RevenueRate]], Dict[str, MergeAddress], List[Driver]): + Dict[str, List[RevenueRate]], Dict[str, MergeAddress], List[Driver]): """ Retrieve the static inputs of the model from the database :param db_session: SQLAlchemy Database connection session @@ -83,7 +83,7 @@ def _retrieve_database_inputs(db_session: Session) -> ( def _retrieve_csv_inputs(app_config: Dict[str, Any]) -> ( -Dict[str, List[RevenueRate]], Dict[str, MergeAddress], List[Driver]): + Dict[str, List[RevenueRate]], Dict[str, MergeAddress], List[Driver]): """ Retrieve static inputs of the model from CSV files :param app_config: App Configuration @@ -155,7 +155,7 @@ def _retrieve_csv_inputs(app_config: Dict[str, Any]) -> ( else: revenue_table, merge_details, drivers_table = _retrieve_csv_inputs(app_config) trips = _run_parser(trip_parser, args.trips_file, revenue_table, merge_details, args.speed, - app_config['output_directory']) + args.name, app_config['output_directory']) drivers = prepare_drivers_for_optimizer(drivers_table, args.driver_ids, args.date) solution = _run_optimizer(trip_optimizer, trips, drivers, args.name, args.date, args.speed, optimizer_config, app_config['output_directory']) diff --git a/avicena/models/Assignment.py b/avicena/models/Assignment.py index 70d16c7..c6ddd1e 100644 --- a/avicena/models/Assignment.py +++ b/avicena/models/Assignment.py @@ -1,13 +1,12 @@ import random -from typing import Dict, Any, Union, Mapping, Tuple, List +from datetime import datetime, timedelta +from typing import Dict, Any, Tuple, List -import pandas as pd import numpy as np +import pandas as pd import plotly.graph_objects as go from pandas import DataFrame, Series from plotly.subplots import make_subplots - -from datetime import datetime, timedelta from sqlalchemy import Column, Integer, DateTime, String, Interval, Float from sqlalchemy.dialects.postgresql import ARRAY as Array from sqlalchemy.orm import relationship, Session @@ -90,7 +89,7 @@ def generate_visualization(self, visualization_file_name: str = 'visualized.html """ # Prepare Table Setup - titles = self.driver_names + titles = list(self.driver_names) titles.insert(0, "Map") titles.insert(1, "Driver Summary: " + self.name) subplots = [[{"type": "table"}]] * (len(self.driver_names) + 1) @@ -114,14 +113,15 @@ def generate_visualization(self, visualization_file_name: str = 'visualized.html for i, name in enumerate(self.driver_names): r = lambda: random.randint(0, 255) col = '#%02X%02X%02X' % (r(), r(), r()) + print(i, name, self.driver_names, self.driver_assignments) driver_assignment = self.driver_assignments[i] details = [driver_assignment.trip_ids, - driver_assignment.trip_pu, - driver_assignment.trip_do, - list(map(timedelta_to_hhmmss, driver_assignment.trip_est_pu)), - list(map(timedelta_to_hhmmss, driver_assignment.trip_sch_pu)), - list(map(timedelta_to_hhmmss, driver_assignment.trip_est_do)), - list(map(timedelta_to_hhmmss, driver_assignment.trip_sch_do)), + driver_assignment.trip_pickup_addresses, + driver_assignment.trip_dropoff_addresses, + list(map(timedelta_to_hhmmss, driver_assignment.trip_estimated_pickup_times)), + list(map(timedelta_to_hhmmss, driver_assignment.trip_scheduled_pickup_times)), + list(map(timedelta_to_hhmmss, driver_assignment.trip_estimated_dropoff_times)), + list(map(timedelta_to_hhmmss, driver_assignment.trip_scheduled_dropoff_times)), driver_assignment.trip_miles, driver_assignment.trip_los, driver_assignment.trip_rev] diff --git a/avicena/models/Driver.py b/avicena/models/Driver.py index 42464fa..fd9c736 100644 --- a/avicena/models/Driver.py +++ b/avicena/models/Driver.py @@ -1,4 +1,3 @@ -import datetime import random from typing import List, Iterable diff --git a/avicena/models/MergeAddress.py b/avicena/models/MergeAddress.py index 6156d86..b79fbd9 100644 --- a/avicena/models/MergeAddress.py +++ b/avicena/models/MergeAddress.py @@ -1,10 +1,8 @@ +from datetime import timedelta from typing import Dict import pandas as pd - from sqlalchemy import Column, Integer, String, Interval -from datetime import timedelta - from sqlalchemy.orm import Session from . import Base diff --git a/avicena/models/RevenueRate.py b/avicena/models/RevenueRate.py index 8cffbfa..e0f8ba3 100644 --- a/avicena/models/RevenueRate.py +++ b/avicena/models/RevenueRate.py @@ -45,7 +45,7 @@ def calculate_revenue(self, miles: float) -> float: :param miles: distance for which revenue is being calculated :return: revenue made for trip with given distance """ - if self.lower_mileage_bound <= miles <= self.upper_mileage_bound: + if not self.lower_mileage_bound <= miles <= self.upper_mileage_bound: raise InvalidRevenueRateMileageException( f"{miles} miles not within RevenueRate bounds [{self.lower_mileage_bound},{self.upper_mileage_bound}]") return self.base_rate + self.revenue_per_mile * miles diff --git a/avicena/models/__init__.py b/avicena/models/__init__.py index a948194..be46c30 100644 --- a/avicena/models/__init__.py +++ b/avicena/models/__init__.py @@ -4,10 +4,4 @@ metadata = MetaData() Base = declarative_base(metadata=metadata) -from .Driver import Driver -from .Trip import Trip -from .Location import Location -from .LocationPair import LocationPair -from .Assignment import Assignment -from .DriverAssignment import DriverAssignment -from .RevenueRate import RevenueRate +from . import Assignment, Driver, DriverAssignment, Location, LocationPair, MergeAddress, RevenueRate, Trip \ No newline at end of file diff --git a/avicena/optimizers/BaseOptimizer.py b/avicena/optimizers/BaseOptimizer.py index f2b66e7..dfebe01 100644 --- a/avicena/optimizers/BaseOptimizer.py +++ b/avicena/optimizers/BaseOptimizer.py @@ -3,7 +3,8 @@ from docplex.mp.model import Model from pandas import DataFrame -from avicena.models import Trip, Driver +from avicena.models.Trip import Trip +from avicena.models.Driver import Driver class BaseOptimizer: @@ -12,7 +13,9 @@ class BaseOptimizer: Every optimizer must extend from BaseOptimizer, ensure the initialization details are filled, and implement the "solve" method. """ - def __init__(self, trips: List[Trip], drivers: List[Driver], name: str, date: str, speed: int, config: Dict[str, Any]) -> None: + + def __init__(self, trips: List[Trip], drivers: List[Driver], name: str, date: str, speed: int, + config: Dict[str, Any]) -> None: """ Initialize the Model Base Optimizer. It also sets the configuration details used by all optimizers :param trips: List of valid Trip objects that were parsed and cleaned from the input file diff --git a/avicena/optimizers/GeneralOptimizer.py b/avicena/optimizers/GeneralOptimizer.py index 5d14e07..e2fb5cf 100644 --- a/avicena/optimizers/GeneralOptimizer.py +++ b/avicena/optimizers/GeneralOptimizer.py @@ -1,16 +1,13 @@ +from copy import copy from typing import List, Any, Dict, Iterable import pandas as pd - -from copy import copy from docloud.status import JobSolveStatus -from docplex.mp.conflict_refiner import ConflictRefiner, VarLbConstraintWrapper, VarUbConstraintWrapper -from docplex.mp.model import Model -from docplex.mp.relaxer import Relaxer from docplex.mp.utils import DOcplexException from pandas import DataFrame -from avicena.models import Trip, Location, Driver +from avicena.models.Trip import Trip, Location +from avicena.models.Driver import Driver from avicena.optimizers.BaseOptimizer import BaseOptimizer from avicena.optimizers.solver_util.cplex.Listeners import GapListener, TimeListener from avicena.util.Exceptions import InvalidTripException, SolutionNotFoundException @@ -24,6 +21,7 @@ class GeneralOptimizer(BaseOptimizer): The GeneralOptimizer uses CPLEX to solve the Patient Dispatch problem. The exact formulation can be found at: """ + def __init__(self, trips: List[Trip], drivers: List[Driver], name: str, date: str, speed: int, config: Dict[str, Any]) -> None: """ @@ -37,7 +35,7 @@ def __init__(self, trips: List[Trip], drivers: List[Driver], name: str, date: st """ super().__init__(trips, drivers, name, date, speed, config) self.drivers = list() # List of all Drivers - self.primary_trips = dict() # Map Primary trip pair to trip object + self.primary_trips = dict() # Map Primary trip pair to trip object self.all_trips = dict() # Maps Trip-ID to Trip Object self.driver_nodes = set() # All Driver Nodes self.driver_starts = set() # Starting Nodes of Drivers @@ -52,8 +50,8 @@ def __init__(self, trips: List[Trip], drivers: List[Driver], name: str, date: st self.node_window_close = dict() # earliest arrival time to a node self.location_to_primary_trip_id_map = dict() # Map from starting location to ID of primary trip from that location self.merges = dict() # Map from merge trip to incoming primary trip - self.revenues = dict() # Map from start node to revenue of the trip - self.wheelchair_locations = set() # Set of locations where wheelchair trips start + self.revenues = dict() # Map from start node to revenue of the trip + self.wheelchair_locations = set() # Set of locations where wheelchair trips start # Decision Variable Structures self.trip_vars = dict() # Map from driver to map of trip to model variable @@ -106,10 +104,12 @@ def filter_driver_feasible_trips(self, driver: Driver, iter: Iterable[Trip]) -> :param iter: Iterable of Trips :return: Filter generator of feasible trips that a given driver can perform """ - return filter(lambda t: not ((t.lp.o in self.driver_nodes and t.lp.o.get_clean_address() != driver.get_clean_address()) or ( - t.lp.d in self.driver_nodes and t.lp.d.get_clean_address() != driver.get_clean_address())) + return filter(lambda t: not ( + (t.lp.o in self.driver_nodes and t.lp.o.get_clean_address() != driver.get_clean_address()) or ( + t.lp.d in self.driver_nodes and t.lp.d.get_clean_address() != driver.get_clean_address())) and t.required_level_of_service in driver.level_of_service - and not (abs(self.node_capacities[t.lp.o] + self.node_capacities[t.lp.d]) > driver.capacity), iter) + and not ( + abs(self.node_capacities[t.lp.o] + self.node_capacities[t.lp.d]) > driver.capacity), iter) def __prepare_trip_parameters(self) -> None: """ @@ -244,7 +244,8 @@ def __generate_variables(self) -> None: try: space = 0 if rS in self.wheelchair_locations: space = 1.5 - t = Trip(rS, rE, space, id, self.node_window_open[rS], self.node_window_close[rE], self.SPEED, False, 0.0) + t = Trip(rS, rE, space, id, self.node_window_open[rS], self.node_window_close[rE], self.SPEED, + False, 0.0) except InvalidTripException: # print(rS, rE, nodeDeps[rS], nodeArrs[rE]) continue @@ -270,7 +271,8 @@ def __generate_variables(self) -> None: self.trip_vars[d][t] = self.mdl.binary_var(name='y' + '_' + str(d.id) + '_' + str(t.id)) self.time_vars[d][t] = self.mdl.continuous_var(lb=0, ub=1, name='t' + '_' + str(d.id) + '_' + str(t.id)) self.mdl.add_constraint(self.time_vars[d][t] - self.trip_vars[d][t] <= 0) - self.capacity_vars[d][t] = self.mdl.continuous_var(lb=0, ub=d.capacity, name='q' + '_' + str(d.id) + '_' + str(t.id)) + self.capacity_vars[d][t] = self.mdl.continuous_var(lb=0, ub=d.capacity, + name='q' + '_' + str(d.id) + '_' + str(t.id)) self.mdl.add_constraint(self.capacity_vars[d][t] - self.trip_vars[d][t] * d.capacity <= 0) def __prepare_constraints(self) -> None: @@ -353,7 +355,8 @@ def __prepare_constraints(self) -> None: """ for trp in self.all_trips: if isinstance(trp, str): - if (trp.endswith('A') and (trp[:-1] + 'B' in self.all_trips)) or (trp.endswith('B') and (trp[:-1] + 'C' in self.all_trips)): + if (trp.endswith('A') and (trp[:-1] + 'B' in self.all_trips)) or ( + trp.endswith('B') and (trp[:-1] + 'C' in self.all_trips)): main_origin = self.all_trips[trp].lp.o main_dest = self.all_trips[trp].lp.d if trp.endswith('A'): @@ -382,7 +385,8 @@ def __prepare_constraints(self) -> None: for otrip in self.filter_driver_feasible_trips(d2, self.outtrips[alt_dest]): alt_dest_incoming_time_var_sum += self.time_vars[d2][otrip] self.mdl.add_constraint(main_origin_outtrip_time_var_sum <= main_dest_intrip_time_var_sum) - self.mdl.add_constraint(main_dest_intrip_time_var_sum + main_dest_incoming_travel_time_sum <= alt_origin_outgoing_time_var_sum) + self.mdl.add_constraint( + main_dest_intrip_time_var_sum + main_dest_incoming_travel_time_sum <= alt_origin_outgoing_time_var_sum) self.mdl.add_constraint(alt_origin_outgoing_time_var_sum <= alt_dest_incoming_time_var_sum) print("Set primary trip precedence constraints") @@ -485,8 +489,10 @@ def __add_custom_constraints(self) -> None: """ for d in self.drivers: for mer in self.filter_driver_feasible_trips(d, self.merges): - self.mdl.add_constraint(ct =self.trip_vars[d][mer] == self.trip_vars[d][self.merges[mer]]) - self.obj += self.MERGE_PEN * (self.time_vars[d][mer] - (self.time_vars[d][self.merges[mer]] + self.merges[mer].lp.time * self.trip_vars[d][mer])) * (24) + self.mdl.add_constraint(ct=self.trip_vars[d][mer] == self.trip_vars[d][self.merges[mer]]) + self.obj += self.MERGE_PEN * (self.time_vars[d][mer] - ( + self.time_vars[d][self.merges[mer]] + self.merges[mer].lp.time * self.trip_vars[d][ + mer])) * (24) """ Equalizing Revenue Penalty """ @@ -494,7 +500,9 @@ def __add_custom_constraints(self) -> None: self.rev_min = self.mdl.continuous_var(0) for d in self.drivers: self.revenue_vars[d] = self.mdl.continuous_var(lb=0, name="Revenue" + str(d.id)) - self.mdl.add_constraint(self.revenue_vars[d] == sum(self.revenues[t.lp.o] * self.trip_vars[d][t] for t in self.filter_driver_feasible_trips(d, self.all_trips.values()))) + self.mdl.add_constraint(self.revenue_vars[d] == sum(self.revenues[t.lp.o] * self.trip_vars[d][t] for t in + self.filter_driver_feasible_trips(d, + self.all_trips.values()))) self.mdl.add_constraint(self.rev_max >= self.revenue_vars[d]) self.mdl.add_constraint(self.rev_min <= self.revenue_vars[d]) self.obj += self.REVENUE_PEN * (self.rev_max - self.rev_min) @@ -507,7 +515,10 @@ def __add_custom_constraints(self) -> None: for d in self.drivers: if 'W' not in d.level_of_service: continue self.wheelchair_vars[d] = self.mdl.continuous_var(lb=0, name="Wheelchairs" + str(d.id)) - self.mdl.add_constraint(self.wheelchair_vars[d] == sum(self.trip_vars[d][t] for t in filter(lambda x: x.required_level_of_service == 'W', self.filter_driver_feasible_trips(d, self.all_trips.values())))) + self.mdl.add_constraint(self.wheelchair_vars[d] == sum(self.trip_vars[d][t] for t in + filter(lambda x: x.required_level_of_service == 'W', + self.filter_driver_feasible_trips(d, + self.all_trips.values())))) self.mdl.add_constraint(self.max_wheelchair_trips >= self.wheelchair_vars[d]) self.mdl.add_constraint(self.min_wheelchair_trips <= self.wheelchair_vars[d]) self.obj += self.W_PEN * (self.max_wheelchair_trips - self.min_wheelchair_trips) @@ -532,7 +543,8 @@ def solve(self, solution_file: str, save_stages: bool = False) -> DataFrame: progress_listener = TimeListener(self.STAGE1_TIME) self.mdl.add_progress_listener(progress_listener) first_solve = self.mdl.solve() - if first_solve and (first_solve.solve_status == JobSolveStatus.FEASIBLE_SOLUTION or first_solve.solve_status == JobSolveStatus.OPTIMAL_SOLUTION): + if first_solve and ( + first_solve.solve_status == JobSolveStatus.FEASIBLE_SOLUTION or first_solve.solve_status == JobSolveStatus.OPTIMAL_SOLUTION): print("First solve status: " + str(self.mdl.get_solve_status())) print("First solve obj value: " + str(self.mdl.objective_value)) if save_stages: self.__save_solution(solution_file + '_stage1') @@ -591,7 +603,8 @@ def solve(self, solution_file: str, save_stages: bool = False) -> DataFrame: print("Total Number of trip miles by each driver: ", driver_miles) break if self.solution_df is None: - raise SolutionNotFoundException(f"General Optimizer failed to find solution for {self.mdl.name} after {self.MAX_RETRIES} tries") + raise SolutionNotFoundException( + f"General Optimizer failed to find solution for {self.mdl.name} after {self.MAX_RETRIES} tries") return self.solution_df @@ -613,6 +626,7 @@ def __save_solution(self, solution_file: str) -> None: Write solution in CSV format to the :param solution_file: Path to where solution will be saved """ + def assigned_trip_generator(): for d, driver_trips in self.trip_vars.items(): for t, var in driver_trips.items(): @@ -630,7 +644,9 @@ def debug_trip_generator(d): for d, t in sorted(debug_trip_generator(dr), key=lambda x: self.time_vars[x[0]][x[1]].solution_value): print(d.name, t.lp.o, t.lp.d, self.time_vars[d][t].solution_value, t.lp.time) - columns = ['trip_id','driver_id','driver_name','trip_date','trip_pickup_address','trip_pickup_time','est_pickup_time','trip_dropoff_address','trip_dropoff_time','est_dropoff_time','trip_los','est_miles','est_time','trip_rev'] + columns = ['trip_id', 'driver_id', 'driver_name', 'trip_date', 'trip_pickup_address', 'trip_pickup_time', + 'est_pickup_time', 'trip_dropoff_address', 'trip_dropoff_time', 'est_dropoff_time', 'trip_los', + 'est_miles', 'est_time', 'trip_rev'] data = [] for d, t in sorted(assigned_trip_generator(), key=lambda x: self.time_vars[x[0]][x[1]].solution_value): end_time = -1 @@ -640,18 +656,22 @@ def debug_trip_generator(d): end_time = self.time_vars[d][intrip].solution_value + intrip.lp.time if end_time < self.time_vars[d][t].solution_value + t.lp.time: print('Something wrong') - print(sum(self.trip_vars[d][intrip].solution_value for intrip in self.filter_driver_feasible_trips(d, self.intrips[rE]))) + print(sum(self.trip_vars[d][intrip].solution_value for intrip in + self.filter_driver_feasible_trips(d, self.intrips[rE]))) print(rE) print(t.lp.o, t.lp.d) print(intrip.lp.o, intrip.lp.d) - print(t.id, self.time_vars[d][t].solution_value, self.time_vars[d][intrip].solution_value, intrip.lp.time) + print(t.id, self.time_vars[d][t].solution_value, self.time_vars[d][intrip].solution_value, + intrip.lp.time) break if end_time < 0: print("Something wrong") required_end = self.all_trips[self.location_to_primary_trip_id_map[t.lp.o]].scheduled_dropoff ptrip = self.all_trips[self.location_to_primary_trip_id_map[t.lp.o]] - data.append([self.location_to_primary_trip_id_map[t.lp.o], d.id, d.name, self.date,t.lp.o.get_clean_address(), - t.scheduled_pickup, self.time_vars[d][t].solution_value, rE.get_clean_address(), required_end, end_time, - t.required_level_of_service, ptrip.lp.miles, ptrip.lp.time, self.revenues[t.lp.o]]) + data.append( + [self.location_to_primary_trip_id_map[t.lp.o], d.id, d.name, self.date, t.lp.o.get_clean_address(), + t.scheduled_pickup, self.time_vars[d][t].solution_value, rE.get_clean_address(), required_end, + end_time, + t.required_level_of_service, ptrip.lp.miles, ptrip.lp.time, self.revenues[t.lp.o]]) self.solution_df = pd.DataFrame(data, columns=columns) self.solution_df.to_csv(solution_file) diff --git a/avicena/optimizers/PDWTWOptimizer.py b/avicena/optimizers/PDWTWOptimizer.py index ab55bc0..f183858 100644 --- a/avicena/optimizers/PDWTWOptimizer.py +++ b/avicena/optimizers/PDWTWOptimizer.py @@ -1,15 +1,14 @@ -from copy import copy -from datetime import datetime, timedelta -import pandas as pd +import random +from datetime import timedelta + import numpy as np +import pandas as pd import plotly.graph_objects as go -from plotly.subplots import make_subplots - from docplex.mp.model import Model +from plotly.subplots import make_subplots -from experimental.Trip import Trip, locations, TripType +from experimental.Trip import Trip, TripType from experimental.listeners import TimeListener, GapListener -import random class PDWTWOptimizer: @@ -36,8 +35,8 @@ def __init__(self, trips, drivers, params): self.t = [] # time of traversing trip ij; length of A self.c = [] # cost of traversing trip ij; length of A self.r = [] # driver revenue for doing a trip; length of P - self.merges = [] # binary whether merge trip was satisfied - self.location_pair = set() # Set of tuples of pickup and dropoff pairs + self.merges = [] # binary whether merge trip was satisfied + self.location_pair = set() # Set of tuples of pickup and dropoff pairs self.homes = set() # set of home locations self.not_homes = set() # set of medical office locations self.inflow_trips = dict() # mapping between a location and list of trips ending at the location @@ -46,9 +45,9 @@ def __init__(self, trips, drivers, params): self.idxes = dict() # mapping between location and associated index self.tripdex = dict() # mapping between location_pair and index of trip in trip time/cost/binary var containers self.primaryTID = set() # set of IDs of primary trips - self.primaryOIDs = dict() # map from origin location to primary trip ID + self.primaryOIDs = dict() # map from origin location to primary trip ID self.opposingTrip = dict() # mapping between trip ID and trip - self.mergeDict = dict() # Map between origins of two merge trip locations + self.mergeDict = dict() # Map between origins of two merge trip locations # Constants self.TRIPS_TO_DO = params["TRIPS_TO_DO"] @@ -127,7 +126,7 @@ def __prepare_constraints(self): for intrip in self.inflow_trips[i]: # print((intrip.lp.o, intrip.lp.d)) in_total += self.x[self.tripdex[(intrip.lp.o, intrip.lp.d)]] - self.mdl.add_constraint(total == in_total, "Drivers Returning to Depot" ) + self.mdl.add_constraint(total == in_total, "Drivers Returning to Depot") else: self.mdl.add_constraint(total == 1, "Primary Location Exited " + i) """ @@ -137,7 +136,7 @@ def __prepare_constraints(self): for j, d in enumerate(self.PuD): if o != d: self.mdl.add_constraint(ct=self.B[j] >= self.B[i] + self.t[self.tripdex[(o, d)]] - self.BIGM * ( - 1 - self.x[self.tripdex[(o, d)]])) + 1 - self.x[self.tripdex[(o, d)]])) self.mdl.add_constraint( ct=self.Q[j] >= self.Q[i] + self.q[j] - self.BIGM * (1 - self.x[self.tripdex[(o, d)]])) """ @@ -163,7 +162,7 @@ def __prepare_constraints(self): for j, loc in enumerate(self.PuD): self.mdl.add_constraint(self.v[j] >= j * self.x[self.tripdex[(self.driverstart, loc)]]) self.mdl.add_constraint(self.v[j] <= j * self.x[self.tripdex[(self.driverstart, loc)]] - n * ( - self.x[self.tripdex[(self.driverstart, loc)]] - 1)) + self.x[self.tripdex[(self.driverstart, loc)]] - 1)) for i, o in enumerate(self.PuD): for j, d in enumerate(self.PuD): if o != d: @@ -182,9 +181,9 @@ def __prepare_custom_constraints(self): """ for i, loc1 in enumerate(self.P): for j, loc2 in enumerate(self.P): - if loc1 == loc2 or abs(self.e[i] - self.e[j+len(self.P)]) <= self.ROUTE_LIMIT: continue - z1 = self.mdl.binary_var(loc1+loc2+'z1') - z2 = self.mdl.binary_var(loc1+loc2+'z2') + if loc1 == loc2 or abs(self.e[i] - self.e[j + len(self.P)]) <= self.ROUTE_LIMIT: continue + z1 = self.mdl.binary_var(loc1 + loc2 + 'z1') + z2 = self.mdl.binary_var(loc1 + loc2 + 'z2') m = self.v[i] y = self.v[j] self.mdl.add_constraint(y - m <= -0.5 * z1 + n * z2) @@ -202,7 +201,7 @@ def __prepare_custom_constraints(self): m = self.v[self.idxes[loc1]] y = self.v[self.idxes[tr.lp.o]] self.mdl.add_constraint(y - m <= -0.005 * z1 + n * z2) - self.mdl.add_constraint(y - m >= -n*z1 + z2 * 0.005) + self.mdl.add_constraint(y - m >= -n * z1 + z2 * 0.005) self.mdl.add_constraint(x + z1 + z2 == 1) self.obj += self.MERGE_PEN * (1 - x) @@ -218,8 +217,8 @@ def __prepare_custom_constraints(self): a = k - 0.4 b = k + 0.4 delta = self.mdl.binary_var(self.primaryOIDs[loc2] + "-delta-" + str(k)) - self.mdl.add_constraint(x <= a + self.BIGM * delta + self.BIGM * var) - self.mdl.add_constraint(x >= b - self.BIGM * (1 - delta) - self.BIGM * var) + self.mdl.add_constraint(x <= a + self.BIGM * delta + self.BIGM * var) + self.mdl.add_constraint(x >= b - self.BIGM * (1 - delta) - self.BIGM * var) self.mdl.add_constraint(x >= a - self.BIGM * (1 - var)) self.mdl.add_constraint(x <= b + self.BIGM * (1 - var)) idx_vars.append(var) @@ -244,7 +243,6 @@ def __prepare_custom_constraints(self): self.mdl.add_constraint(unique_w_routes >= self.MIN_W_DRIVERS) self.obj += self.W_DRIVER_PEN * (unique_w_routes - self.MIN_W_DRIVERS) - """ Equalizing Revenue Penalty """ @@ -256,23 +254,21 @@ def __prepare_custom_constraints(self): tot = sum(self.loc_v_binary[loc_idx][k] * self.r[loc_idx] for loc_idx in self.loc_v_binary) # bin_tot = sum(self.loc_v_binary[loc_idx][k] for loc_idx in self.loc_v_binary) # self.mdl.add_equivalence(bin_var, bin_tot == 0) - sum_var = self.mdl.continuous_var(0,name=str(k) + "Revenue") + sum_var = self.mdl.continuous_var(0, name=str(k) + "Revenue") self.mdl.add_constraint(bin_var * self.BIGM >= tot) self.mdl.add_constraint(tot >= bin_var) self.mdl.add_constraint(self.rev_max >= tot) - self.mdl.add_constraint(tot + (1-bin_var)*self.BIGM >= self.rev_min) + self.mdl.add_constraint(tot + (1 - bin_var) * self.BIGM >= self.rev_min) self.mdl.add_constraint(sum_var == tot) self.revens.append(sum_var) self.mdl.add_constraint(self.rev_max >= self.rev_min) self.obj += self.REVENUE_PEN * (self.rev_max - self.rev_min) - """ Adjustable Speed Penalty """ # self.obj += self.SPEED_PENALTY * (1/self.MIN_SPEED - self.SPEED) - def __generate_trips(self): # self.SPEED = self.mdl.continuous_var(1/80, 1/self.MIN_SPEED, "Speed") # print(self.SPEED) @@ -330,9 +326,8 @@ def __generate_trips(self): t.write("Start,End,Time") c.write("Start,End,Cost") for pair, trp in self.trip_map.items(): - t.write(pair[0]+ "," + pair[1] + "," + str(trp.lp.time)) - c.write(pair[0]+ "," + pair[1] + "," + str(trp.lp.miles)) - + t.write(pair[0] + "," + pair[1] + "," + str(trp.lp.time)) + c.write(pair[0] + "," + pair[1] + "," + str(trp.lp.miles)) def __prepare_depot(self): self.N.append(self.drivers[self.DRIVER_IDX].address) @@ -397,7 +392,8 @@ def __prepare_trip_parameters(self): Dv.append(self.mdl.integer_var(lb=0, ub=len(self.trips) + 1, name='v_' + str( self.TRIPS_TO_DO + count))) # Varaible for undex of first location on route dropoff if trip.type == TripType.MERGE: - vars = (self.mdl.binary_var(name=trip.id), self.mdl.binary_var(name=trip.id + 'z1'), self.mdl.binary_var(name=trip.id + 'z2')) + vars = (self.mdl.binary_var(name=trip.id), self.mdl.binary_var(name=trip.id + 'z1'), + self.mdl.binary_var(name=trip.id + 'z2')) self.merges.append(vars) if 'B' in trip.id: self.mergeDict[self.merges[-1]] = (trip.lp.o, self.opposingTrip[trip.id[:-1] + 'A']) @@ -506,7 +502,8 @@ def names(id): def get_labels(trips): data = "
".join( - "0" * (10 - len(str(t.id))) + str(t.id) + " | " + str(timedelta(days=self.B[self.idxes[t.lp.o]].solution_value)).split('.')[0] + + "0" * (10 - len(str(t.id))) + str(t.id) + " | " + + str(timedelta(days=self.B[self.idxes[t.lp.o]].solution_value)).split('.')[0] + " | " + str(int(self.v[self.idxes[t.lp.o]].solution_value)) for t in trips ) return trips[0].lp.o[:-4] + "
TripID, Time, DriverID
" + data @@ -516,10 +513,10 @@ def get_labels(trips): titles = [names(i) for i in driver_ids] titles.insert(0, "Map") titles.insert(1, "Driver Summary") - subplots = [[{"type":"table"}]] * (len(driver_ids) + 1) - subplots.insert(0,[{"type": "scattermapbox"}]) - map_height = 600/ (600 + 400 * (len(driver_ids) + 1)) - heights = [(1-map_height-0.05)/((len(driver_ids)) + 1)] * (len(driver_ids) +1) + subplots = [[{"type": "table"}]] * (len(driver_ids) + 1) + subplots.insert(0, [{"type": "scattermapbox"}]) + map_height = 600 / (600 + 400 * (len(driver_ids) + 1)) + heights = [(1 - map_height - 0.05) / ((len(driver_ids)) + 1)] * (len(driver_ids) + 1) heights.insert(0, map_height) # heights = [0.25] fig = make_subplots( @@ -542,11 +539,13 @@ def get_labels(trips): x, y = zip(*points) filtered_trips = list(filter(lambda t: t.id in self.primaryOIDs.values(), trips)) details = [[str(t.id) for t in filtered_trips], - [t.lp.o[:-4] for t in filtered_trips], + [t.lp.o[:-4] for t in filtered_trips], [t.lp.d[:-4] for t in filtered_trips], - [str(timedelta(days=self.B[self.idxes[t.lp.o]].solution_value)).split('.')[0] for t in filtered_trips], + [str(timedelta(days=self.B[self.idxes[t.lp.o]].solution_value)).split('.')[0] for t in + filtered_trips], [str(timedelta(days=self.opposingTrip[t.id].start)).split('.')[0] for t in filtered_trips], - [str(timedelta(days=self.B[self.idxes[t.lp.o] + self.TRIPS_TO_DO].solution_value)).split('.')[0] for t in filtered_trips], + [str(timedelta(days=self.B[self.idxes[t.lp.o] + self.TRIPS_TO_DO].solution_value)).split('.')[0] + for t in filtered_trips], [str(timedelta(days=self.opposingTrip[t.id].end)).split('.')[0] for t in filtered_trips], [str(t.preset_m) for t in filtered_trips], [str(t.los) for t in filtered_trips], @@ -562,13 +561,15 @@ def get_labels(trips): size=8, color=col, ), - name= names(d_id), + name=names(d_id), - ),row=1, col=1) + ), row=1, col=1) fig.add_trace( go.Table( header=dict( - values=["TripID", "Pickup Address", "Dropoff Address", "Estimated Pickup Time", "Scheduled Pickup Time", "Estimated Dropoff Time", "Scheduled Dropoff Time", "Miles", "LOS", "Revenue"], + values=["TripID", "Pickup Address", "Dropoff Address", "Estimated Pickup Time", + "Scheduled Pickup Time", "Estimated Dropoff Time", "Scheduled Dropoff Time", "Miles", + "LOS", "Revenue"], font=dict(size=10), align="left" ), @@ -583,7 +584,8 @@ def get_labels(trips): for idx, point in enumerate(points): if point in locations: locations[point].append(trips[idx]) - locations[point] = list(sorted(locations[point], key=lambda x: self.B[self.idxes[x.lp.o]].solution_value)) + locations[point] = list( + sorted(locations[point], key=lambda x: self.B[self.idxes[x.lp.o]].solution_value)) else: locations[point] = [trips[idx]] @@ -606,7 +608,7 @@ def get_labels(trips): name="Locations", ), - row=1,col=1 + row=1, col=1 ) ids, times, miles, rev = zip(*(self.__get_driver_trips_times_miles_rev(id) for id in driver_ids)) fig.add_trace( @@ -623,14 +625,14 @@ def get_labels(trips): row=2, col=1, ) - fig.update_mapboxes(zoom = 10, center=go.layout.mapbox.Center( - lat=np.mean(all_y), - lon=np.mean(all_x)), style='open-street-map') + fig.update_mapboxes(zoom=10, center=go.layout.mapbox.Center( + lat=np.mean(all_y), + lon=np.mean(all_x)), style='open-street-map') fig.update_layout( title_text=self.mdl.name, showlegend=True, - height = (600 + 400 * (len(driver_ids) + 1)) + height=(600 + 400 * (len(driver_ids) + 1)) ) fig.write_html(vfile, auto_open=True) @@ -653,6 +655,7 @@ def filt(t): return False idx2 = self.idxes[t.lp.o] return int(round(self.v[idx2].solution_value)) == id + return filt def __sortTrips(self, t): @@ -666,7 +669,8 @@ def __get_driver_coords(self, id): depot = Trip(self.driverstart, self.driverstop, 'A', 'ID', None, 0, 1, prefix=False, suffix=True) prev = 0.0 for trip in sorted(filter(self.__filterTrips(id), self.trip_map.values()), key=self.__sortTrips): - t = Trip(trip.lp.o, trip.lp.d, 1 if trip.los == 'A' else 1.5, trip.id, None, trip.start, trip.end, rev=0.0, lp = trip.lp) + t = Trip(trip.lp.o, trip.lp.d, 1 if trip.los == 'A' else 1.5, trip.id, None, trip.start, trip.end, rev=0.0, + lp=trip.lp) # if t.lp.o != self.driverstart and self.Q[self.idxes[t.lp.o]].solution_value - prev > 0.1 and t.lp.d != self.driverstop: if t.lp.o in self.primaryOIDs: try: @@ -674,15 +678,16 @@ def __get_driver_coords(self, id): t.lp.d = self.opposingTrip[t.id].lp.d t.rev = self.r[self.idxes[t.lp.o]] except: - print("Failed to get primary origin ID", t.id, t.lp.o, t.lp.d,self.Q[self.idxes[t.lp.o]].solution_value, prev) + print("Failed to get primary origin ID", t.id, t.lp.o, t.lp.d, + self.Q[self.idxes[t.lp.o]].solution_value, prev) exit(1) prev = self.Q[self.idxes[t.lp.o]].solution_value yield (t.lp.c1[1], t.lp.c1[0]), t yield (depot.lp.c2[1], depot.lp.c2[0]), depot - def __get_driver_trips_times_miles_rev(self, id): - return ", ".join(map(lambda t: str(t.id),filter(self.__filterPrimaryTrips(id), self.trip_map.values()))), \ - str(timedelta(days=sum(t.lp.time for t in filter(self.__filterTrips(id), self.trip_map.values())))).split('.')[0], \ + return ", ".join(map(lambda t: str(t.id), filter(self.__filterPrimaryTrips(id), self.trip_map.values()))), \ + str(timedelta( + days=sum(t.lp.time for t in filter(self.__filterTrips(id), self.trip_map.values())))).split('.')[0], \ str(sum(t.lp.miles for t in filter(self.__filterTrips(id), self.trip_map.values()))), \ str(sum(t.rev for t in filter(self.__filterPrimaryTrips(id), self.trip_map.values()))) diff --git a/avicena/optimizers/solver_util/cplex/Listeners.py b/avicena/optimizers/solver_util/cplex/Listeners.py index 58de044..9480722 100644 --- a/avicena/optimizers/solver_util/cplex/Listeners.py +++ b/avicena/optimizers/solver_util/cplex/Listeners.py @@ -6,6 +6,7 @@ class TimeListener(ProgressListener): Sample CPLEX Listener found on IBM DoCPLEX Forums. This listener logs and tracks MIP Gap and the time passed in attempt to solve the problem. It aborts the solve if a certain amount of time has passed. """ + def __init__(self, time: int): """ Initalize Listener @@ -31,15 +32,17 @@ def notify_progress(self, data: ProgressData) -> None: elif data.time > self._time: self.abort() else: - #print('No incumbent yet') + # print('No incumbent yet') pass + class GapListener(ProgressListener): """ Sample CPLEX Listener found on IBM DoCPLEX Forums. This listener logs and tracks MIP Gap and the time passed in attempt to solve the problem. It aborts the solve if a certain MIP gap is reached or a certain amount of time has passed. """ + def __init__(self, time: int, gap: float) -> None: """ Initialize Listener @@ -65,5 +68,5 @@ def notify_progress(self, data: ProgressData) -> None: print('ABORTING') self.abort() else: - #print('No incumbent yet') - pass \ No newline at end of file + # print('No incumbent yet') + pass diff --git a/avicena/parsers/CSVParser.py b/avicena/parsers/CSVParser.py index 4305a46..0acdd6b 100644 --- a/avicena/parsers/CSVParser.py +++ b/avicena/parsers/CSVParser.py @@ -3,7 +3,8 @@ import pandas as pd from pandas import DataFrame -from avicena.models import RevenueRate, MergeAddress +from avicena.models.RevenueRate import RevenueRate +from avicena.models.MergeAddress import MergeAddress from avicena.util.ParserUtil import standardize_trip_df diff --git a/avicena/parsers/LogistiCareParser.py b/avicena/parsers/LogistiCareParser.py index 2d0634f..cf17519 100644 --- a/avicena/parsers/LogistiCareParser.py +++ b/avicena/parsers/LogistiCareParser.py @@ -1,16 +1,14 @@ +import re +from datetime import datetime from typing import List, Optional, Match, AnyStr, Dict +import pandas as pd from PyPDF2 import PdfFileReader from nltk.tokenize import word_tokenize -from datetime import datetime -import pandas as pd -import re -import os - from pandas import DataFrame -from avicena.models import MergeAddress, RevenueRate -from avicena.util.Geolocator import locations +from avicena.models.MergeAddress import MergeAddress +from avicena.models.RevenueRate import RevenueRate from avicena.util.ParserUtil import standardize_trip_df diff --git a/avicena/prepare_database.py b/avicena/prepare_database.py index be4b48b..3471a49 100644 --- a/avicena/prepare_database.py +++ b/avicena/prepare_database.py @@ -2,24 +2,29 @@ import yaml -from avicena.util.Database import close_db_session, create_db_session, save_to_db_session, commit_db_session from avicena.models.Driver import load_drivers_from_csv from avicena.models.MergeAddress import load_merge_details_from_csv from avicena.models.RevenueRate import load_revenue_table_from_csv from avicena.util.ConfigValidation import _validate_db_details +from avicena.util.Database import close_db_session, create_db_session, save_to_db_session, commit_db_session from avicena.util.Exceptions import InvalidConfigException if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Populate Database with Base Information needed including Revenue Table, Merge Address Details, and Driver Details.') + parser = argparse.ArgumentParser( + description='Populate Database with Base Information needed including Revenue Table, Merge Address Details, ' + 'and Driver Details.') required_named = parser.add_argument_group('required arguments') - required_named.add_argument('-r', '--revenue-table-csv', action='store', type=str, dest='revenue_table_file', required=True, + required_named.add_argument('-r', '--revenue-table-csv', action='store', type=str, dest='revenue_table_file', + required=True, help='Path to revenue table CSV') - required_named.add_argument('-m', '--merge-details-csv', action='store', type=str, dest='merge_details_file', required=True, + required_named.add_argument('-m', '--merge-details-csv', action='store', type=str, dest='merge_details_file', + required=True, help='Path to merge details CSV') - required_named.add_argument('-d', '--driver-details-csv', action='store', type=str, dest='driver_details_file', required=True, + required_named.add_argument('-d', '--driver-details-csv', action='store', type=str, dest='driver_details_file', + required=True, help='Path to driver details CSV') args = parser.parse_args() @@ -45,4 +50,4 @@ save_to_db_session(db_session, driver) commit_db_session(db_session) - close_db_session(db_session) \ No newline at end of file + close_db_session(db_session) diff --git a/avicena/util/ConfigValidation.py b/avicena/util/ConfigValidation.py index 5082a1e..d66d2ab 100644 --- a/avicena/util/ConfigValidation.py +++ b/avicena/util/ConfigValidation.py @@ -13,7 +13,8 @@ def _validate_db_details(db_config: Dict[str, Any]) -> bool: if field not in db_config: raise InvalidConfigException(f"app_config.database missing required field {field}") if type(db_config[field]) != required_types[field]: - raise InvalidConfigException(f"app_config.database.{field} is expected to be {required_types[field]}, found {type(db_config[field])} instead") + raise InvalidConfigException( + f"app_config.database.{field} is expected to be {required_types[field]}, found {type(db_config[field])} instead") return db_config['enabled'] @@ -23,7 +24,7 @@ def validate_app_config(loaded_config: Dict[str, Any]) -> None: This validation raises Exceptions for name mismatches and type mismatches in the configuration file :param loaded_config: A dictionary loaded from the app_config.yaml """ - required_types = {'database':dict, 'geocoder_key':str, 'trips_parser':str, 'optimizer':str, 'seed':int} + required_types = {'database': dict, 'geocoder_key': str, 'trips_parser': str, 'optimizer': str, 'seed': int} for field in required_types: if field not in loaded_config: raise InvalidConfigException(f"app_config missing required field {field}") @@ -32,10 +33,11 @@ def validate_app_config(loaded_config: Dict[str, Any]) -> None: f"app_config.{field} is expected to be {required_types[field]}, found {type(loaded_config[field])} instead") db_enabled = _validate_db_details(loaded_config['database']) if not db_enabled: - non_db_required_fields = {'merge_address_table_path':str, 'revenue_table_path':str, 'driver_table_path':str, 'output_directory':str} + non_db_required_fields = {'merge_address_table_path': str, 'revenue_table_path': str, 'driver_table_path': str, + 'output_directory': str} for field in non_db_required_fields: if field not in loaded_config: raise InvalidConfigException(f"app_config missing required field {field}") if type(loaded_config[field]) != non_db_required_fields[field]: raise InvalidConfigException( - f"app_config.{field} is expected to be {non_db_required_fields[field]}, found {type(loaded_config[field])} instead") \ No newline at end of file + f"app_config.{field} is expected to be {non_db_required_fields[field]}, found {type(loaded_config[field])} instead") diff --git a/avicena/util/Database.py b/avicena/util/Database.py index eb43dcd..6dff867 100644 --- a/avicena/util/Database.py +++ b/avicena/util/Database.py @@ -1,7 +1,7 @@ from typing import Dict, Any -from sqlalchemy.orm import Session from sqlalchemy import create_engine +from sqlalchemy.orm import Session def create_db_session(db_config: Dict[str, Any]) -> Session: diff --git a/avicena/util/Geolocator.py b/avicena/util/Geolocator.py index ba24846..dd9b39b 100644 --- a/avicena/util/Geolocator.py +++ b/avicena/util/Geolocator.py @@ -34,4 +34,4 @@ def find_coord_lon_lat(addr: str, key: Optional[str] = None) -> (float, float): :param key: optional string for geocoder key :return: Longitude, Latitude of address """ - return tuple(reversed(find_coord_lat_lon(addr, key))) \ No newline at end of file + return tuple(reversed(find_coord_lat_lon(addr, key))) diff --git a/avicena/util/ParserUtil.py b/avicena/util/ParserUtil.py index 9390968..e149887 100644 --- a/avicena/util/ParserUtil.py +++ b/avicena/util/ParserUtil.py @@ -4,9 +4,8 @@ import pandas as pd from pandas import Series, DataFrame -from avicena.models import MergeAddress, RevenueRate -from avicena.models.MergeAddress import load_merge_details_from_db, load_merge_details_from_csv -from avicena.models.RevenueRate import load_revenue_table_from_db, load_revenue_table_from_csv +from avicena.models.MergeAddress import MergeAddress +from avicena.models.RevenueRate import RevenueRate from avicena.util.Exceptions import RevenueCalculationException, MissingTripDetailsException from avicena.util.Geolocator import find_coord_lat_lon from avicena.util.TimeWindows import get_time_window_by_hours_minutes, timedelta_to_fraction_of_day @@ -36,7 +35,8 @@ def convert_time(time: Union[float, str]) -> float: return timedelta_to_fraction_of_day(td) -def _adjust_pickup_dropoff_merge(pickup_time: float, id: str, pickup_address: str, dropoff_times: Series, ids: Series, merge_details: Dict[str, MergeAddress]) -> Series: +def _adjust_pickup_dropoff_merge(pickup_time: float, id: str, pickup_address: str, dropoff_times: Series, ids: Series, + merge_details: Dict[str, MergeAddress]) -> Series: """ Clean up the pickup and dropoff times for a given trip that was parsed from the inputs This function returns a a series with the udpated pickup_time, dropoff_time, and indication of whether it is a merge trip @@ -80,7 +80,7 @@ def _revenue_calculation(table: Dict[str, List[RevenueRate]], miles: float, los: def _get_trip_coordinates(df: DataFrame) -> None: """ Populate DataFrame with coordinates of pickup and dropoff addresses - :param df: Dataframe to update + :param df: DataFrame to update """ df[['trip_pickup_lat', 'trip_pickup_lon']] = df['trip_pickup_address'].apply( lambda x: pd.Series(find_coord_lat_lon(x))) @@ -119,7 +119,8 @@ def _standardize_time_format_trip_df(df: DataFrame) -> None: df['trip_dropoff_time'] = df['trip_dropoff_time'].apply(convert_time) -def standardize_trip_df(df: DataFrame, merge_details: Dict[str, MergeAddress], revenue_table: Dict[str, List[RevenueRate]]) -> None: +def standardize_trip_df(df: DataFrame, merge_details: Dict[str, MergeAddress], + revenue_table: Dict[str, List[RevenueRate]]) -> None: """ Apply time standardization, merge trip updates, missing time updates, revenue calculations, and coordinates to the trip DataFrame @@ -149,4 +150,4 @@ def verify_and_save_parsed_trips_df_to_csv(df: DataFrame, path_to_save: str) -> raise MissingTripDetailsException(f"Expected {column} to be in DataFrame") parsed_df = df[required_columns] - parsed_df.to_csv(path_to_save) \ No newline at end of file + parsed_df.to_csv(path_to_save) diff --git a/avicena/util/TimeWindows.py b/avicena/util/TimeWindows.py index 940b509..1c8d73d 100644 --- a/avicena/util/TimeWindows.py +++ b/avicena/util/TimeWindows.py @@ -50,4 +50,4 @@ def timedelta_to_fraction_of_day(td: timedelta) -> float: :param td: timedelta object :return: fraction of day passed by timedelta """ - return td.total_seconds() / (60 * 60 * 24) \ No newline at end of file + return td.total_seconds() / (60 * 60 * 24) diff --git a/avicena/util/VisualizationUtil.py b/avicena/util/VisualizationUtil.py index 6933733..de48a81 100644 --- a/avicena/util/VisualizationUtil.py +++ b/avicena/util/VisualizationUtil.py @@ -2,7 +2,7 @@ from pandas import Series -from avicena.models import Driver +from avicena.models.Driver import Driver def generate_html_label_for_addr(trips: Series, addr: str) -> str: @@ -26,5 +26,4 @@ def generate_html_label_for_driver_addr(d: Driver) -> str: :param d: Driver object :return: HTML formatted driver address """ - return d.address.get_clean_address() + "
Driver " + str(d.id) + " Home" - + return d.get_clean_address() + "
Driver " + str(d.id) + " Home" diff --git a/experimental/Assumptions.py b/experimental/Assumptions.py index 5f33d77..d95fd09 100644 --- a/experimental/Assumptions.py +++ b/experimental/Assumptions.py @@ -1,45 +1,44 @@ from .constants import FIFTEEN preprocess_assumptions = { - "UNKNOWN_TIME_BUFFER": FIFTEEN * 10, - "UNKNOWN_TIME_DROP": FIFTEEN * 8, - "MERGE_ADDRESSES": {"1631 E 2nd St", "1110 W Willia", "1801 E 51st St , Austin , TX", "1000 E 41st St"}, - "MERGE_ADDRESS_WINDOW": FIFTEEN + "UNKNOWN_TIME_BUFFER": FIFTEEN * 10, + "UNKNOWN_TIME_DROP": FIFTEEN * 8, + "MERGE_ADDRESSES": {"1631 E 2nd St", "1110 W Willia", "1801 E 51st St , Austin , TX", "1000 E 41st St"}, + "MERGE_ADDRESS_WINDOW": FIFTEEN } opt_params = { - "TRIPS_TO_DO": 53, # if greater than total number of trips, then ignored - "DRIVER_IDX": 1, # index of which driver's address is assumed to be debot + "TRIPS_TO_DO": 53, # if greater than total number of trips, then ignored + "DRIVER_IDX": 1, # index of which driver's address is assumed to be debot "NUM_DRIVERS": 4, "MIN_DRIVERS": 4, - "MAX_DRIVERS": 60, # unused + "MAX_DRIVERS": 60, # unused "DRIVER_PEN": 10000, "MAX_WHEELCHAIR_DRIVERS": 2, "MIN_WHEELCHAIR_DRIVERS": 2, "W_DRIVER_PEN": 3000, - "PICKUP_WINDOW": FIFTEEN/2, # general window unused + "PICKUP_WINDOW": FIFTEEN / 2, # general window unused "EARLY_PICKUP_WINDOW": FIFTEEN * 3, "LATE_PICKUP_WINDOW": FIFTEEN * 3, - "DROP_WINDOW": FIFTEEN * 2/3, #general window unused + "DROP_WINDOW": FIFTEEN * 2 / 3, # general window unused "EARLY_DROP_WINDOW": FIFTEEN * 4, - "LATE_DROP_WINDOW": FIFTEEN * 1/3, + "LATE_DROP_WINDOW": FIFTEEN * 1 / 3, - "DRIVER_CAP": 2.5, # W trips take 1.5 space and A trips take 1 space + "DRIVER_CAP": 2.5, # W trips take 1.5 space and A trips take 1 space "ROUTE_LIMIT": FIFTEEN * 60, # Limits on the length of time of the route "MERGE_PENALTY": 1000, # Penalty for not meeting AB or BC Trip Merge Requirements - "REVENUE_PENALTY": 250, # "MIN_DRIVING_SPEED": 40, # unused, not working # "MAX_DRIVING_SPEED": 60, # unused, not working # "SPEED_PENALTY" : 100, # Penalty is applied to inverse of speed unused, not working - "TIME_LIMIT": 900 * 4 * 8, # Overall Solver time limit in seconds + "TIME_LIMIT": 900 * 4 * 8, # Overall Solver time limit in seconds "MIP_GAP": 0.03, # Solver Gap Levels "MODEL_NAME": "PDWTW", @@ -51,16 +50,16 @@ } gen_opt_params = { - "TRIPS_TO_DO": 1000, # if greater than total number of trips, then ignored + "TRIPS_TO_DO": 1000, # if greater than total number of trips, then ignored "NUM_DRIVERS": 4, "EARLY_PICKUP_WINDOW": FIFTEEN * 3, "LATE_PICKUP_WINDOW": FIFTEEN, "EARLY_DROP_WINDOW": FIFTEEN * 4, - "LATE_DROP_WINDOW": FIFTEEN * 1/3, + "LATE_DROP_WINDOW": FIFTEEN * 1 / 3, - "DRIVER_CAP": 2.5, # W trips take 1.5 space and A trips take 1 space + "DRIVER_CAP": 2.5, # W trips take 1.5 space and A trips take 1 space "ROUTE_LIMIT": FIFTEEN * 60, # Limits on the length of time of the route "ROUTE_LIMIT_PENALTY": 500, # Limits on the length of time of the route @@ -68,14 +67,13 @@ "MERGE_PENALTY": 1000, # Penalty for not meeting AB or BC Trip Merge Requirements - - "REVENUE_PENALTY": 250, # Penalty for Revenue Difference - "WHEELCHAIR_PENALTY": 150, # Penalty for Wheelchair Trips Difference + "REVENUE_PENALTY": 250, # Penalty for Revenue Difference + "WHEELCHAIR_PENALTY": 150, # Penalty for Wheelchair Trips Difference "MODEL_NAME": "PDWTW", - "STAGE1_TIME": 60 * 10, # Stage 1 Time - "STAGE1_GAP": 0.05, # Stage 1 Target MIP Gap - "STAGE2_TIME": 60 * 10, # Stage 2 Time - "STAGE2_GAP": 0.05 # Stage 2 Target MIP Gap -} \ No newline at end of file + "STAGE1_TIME": 60 * 10, # Stage 1 Time + "STAGE1_GAP": 0.05, # Stage 1 Target MIP Gap + "STAGE2_TIME": 60 * 10, # Stage 2 Time + "STAGE2_GAP": 0.05 # Stage 2 Target MIP Gap +} diff --git a/experimental/Driver.py b/experimental/Driver.py index d7edcf6..f8b92f2 100644 --- a/experimental/Driver.py +++ b/experimental/Driver.py @@ -1,10 +1,11 @@ class Driver: - def __init__(self, id, name, address, cap, los,ed): + def __init__(self, id, name, address, cap, los, ed): self.id = int(id) self.name = name self.address = address self.capacity = cap self.los = los self.ed = ed + def __repr__(self): - return str(self.id) \ No newline at end of file + return str(self.id) diff --git a/experimental/GeneralizedOptimizer.py b/experimental/GeneralizedOptimizer.py index 5d615fb..acaa0fe 100644 --- a/experimental/GeneralizedOptimizer.py +++ b/experimental/GeneralizedOptimizer.py @@ -1,30 +1,28 @@ import random from copy import copy from datetime import timedelta + +import numpy as np import pandas as pd import plotly.graph_objects as go -import numpy as np from docloud.status import JobSolveStatus -from docplex.mp.conflict_refiner import ConflictRefiner, VarLbConstraintWrapper, VarUbConstraintWrapper from docplex.mp.model import Model -from docplex.mp.relaxer import Relaxer from docplex.mp.utils import DOcplexException from plotly.subplots import make_subplots -from experimental.Trip import Trip, locations, InvalidTripException, TripType, Location -from experimental.constants import FIFTEEN +from experimental.Trip import Trip, InvalidTripException, TripType, Location from experimental.listeners import TimeListener, GapListener + class GeneralOptimizer: def filtered(self, d, iter): return filter(lambda t: not ((t.lp.o in self.driverNodes and t.lp.o[:4] != d.address[:4]) or ( - t.lp.d in self.driverNodes and t.lp.d[:4] != d.address[:4])) + t.lp.d in self.driverNodes and t.lp.d[:4] != d.address[:4])) and t.los in d.los # and and not (abs(self.nodeCaps[t.lp.o] + self.nodeCaps[t.lp.d]) > d.capacity), iter) - - def __init__(self, trips, drivers, params, name = None): + def __init__(self, trips, drivers, params, name=None): self.drivers_inp = drivers self.trips_inp = trips if not name: @@ -33,7 +31,7 @@ def __init__(self, trips, drivers, params, name = None): self.mdl = Model(name=name) self.drivers = list() # List of all Drivers - self.primary_trips = dict() # Map Primary trip pair to trip object + self.primary_trips = dict() # Map Primary trip pair to trip object self.all_trips = dict() # Maps Trip-ID to Trip Object self.driverNodes = set() # All Driver Nodes self.driverStart = set() # Starting Nodes of Driver @@ -48,9 +46,9 @@ def __init__(self, trips, drivers, params, name = None): self.nodeClose = dict() # earliest arrival time to a node self.primaryTID = dict() # Map from starting location to ID of primary trip from that location self.merges = dict() # Map from merge trip to incoming primary trip - self.revenues = dict() # Map from start node to revenue of the trip - self.badPairs = dict() # Map from Pickup location of trip pair to dropoff of incoming trip pair - self.wheelchairs = set() # Set of locations where wheelchair trips start + self.revenues = dict() # Map from start node to revenue of the trip + self.badPairs = dict() # Map from Pickup location of trip pair to dropoff of incoming trip pair + self.wheelchairs = set() # Set of locations where wheelchair trips start # Decision Variable Structures self.trips = dict() # Map from driver to map of trip to model variable @@ -93,8 +91,6 @@ def __init__(self, trips, drivers, params, name = None): self.__prepare_constraints() self.__prepare_objective() - - def __prepare_trip_parameters(self): count = 0 for index, trip in enumerate(self.trips_inp): @@ -221,12 +217,14 @@ def __generate_trips(self): for rS in self.requestNodes: for rE in self.requestNodes: if rS == rE or (rS in self.requestPair and self.requestPair[rS] == rE) or ( - rE in self.requestPair and self.requestPair[rE] == rS) or (rS in self.badPairs and self.badPairs[rS] == rE): + rE in self.requestPair and self.requestPair[rE] == rS) or ( + rS in self.badPairs and self.badPairs[rS] == rE): continue try: space = 0 if rS in self.wheelchairs: space = 1.5 - t = Trip(rS, rE, space, id, None, self.nodeOpen[rS], self.nodeClose[rE], 0.0, prefix=False, suffix=True) + t = Trip(rS, rE, space, id, None, self.nodeOpen[rS], self.nodeClose[rE], 0.0, prefix=False, + suffix=True) except InvalidTripException: # print(rS, rE, nodeDeps[rS], nodeArrs[rE]) continue @@ -252,17 +250,16 @@ def __generate_trips(self): self.trips[d][t] = self.mdl.binary_var(name='y' + '_' + str(d.id) + '_' + str(t.id)) self.times[d][t] = self.mdl.continuous_var(lb=0, ub=1, name='t' + '_' + str(d.id) + '_' + str(t.id)) self.mdl.add_constraint(self.times[d][t] - self.trips[d][t] <= 0) - self.caps[d][t] = self.mdl.continuous_var(lb=0, ub=d.capacity, name='q' + '_' + str(d.id) + '_' + str(t.id)) + self.caps[d][t] = self.mdl.continuous_var(lb=0, ub=d.capacity, + name='q' + '_' + str(d.id) + '_' + str(t.id)) self.mdl.add_constraint(self.caps[d][t] - self.trips[d][t] * d.capacity <= 0) - with open("time.csv", "w") as t, open("cost.csv", "w") as c: t.write("Start,End,Time") c.write("Start,End,Cost") for pair, trp in self.primary_trips.items(): - t.write(pair[0]+ "," + pair[1] + "," + str(trp.lp.time) + "\n") - c.write(pair[0]+ "," + pair[1] + "," + str(trp.lp.miles) + "\n") - + t.write(pair[0] + "," + pair[1] + "," + str(trp.lp.time) + "\n") + c.write(pair[0] + "," + pair[1] + "," + str(trp.lp.miles) + "\n") def __prepare_constraints(self): """ @@ -340,7 +337,8 @@ def __prepare_constraints(self): """ for trp in self.all_trips: if isinstance(trp, str): - if (trp.endswith('A') and (trp[:-1] + 'B' in self.all_trips)) or (trp.endswith('B') and (trp[:-1] + 'C' in self.all_trips)): + if (trp.endswith('A') and (trp[:-1] + 'B' in self.all_trips)) or ( + trp.endswith('B') and (trp[:-1] + 'C' in self.all_trips)): main_trip_start = self.all_trips[trp].lp.o main_trip_dest = self.all_trips[trp].lp.d alt_trip_start = None @@ -471,8 +469,9 @@ def __prepare_custom_constraints(self): """ for d in self.drivers: for mer in self.filtered(d, self.merges): - self.mdl.add_constraint(ct = self.trips[d][mer] == self.trips[d][self.merges[mer]]) - self.obj += self.MERGE_PEN * (self.times[d][mer] - (self.times[d][self.merges[mer]] + self.merges[mer].lp.time * self.trips[d][mer])) * (24) + self.mdl.add_constraint(ct=self.trips[d][mer] == self.trips[d][self.merges[mer]]) + self.obj += self.MERGE_PEN * (self.times[d][mer] - ( + self.times[d][self.merges[mer]] + self.merges[mer].lp.time * self.trips[d][mer])) * (24) """ Equalizing Revenue Penalty """ @@ -480,7 +479,8 @@ def __prepare_custom_constraints(self): self.rev_min = self.mdl.continuous_var(0) for d in self.drivers: self.revs[d] = self.mdl.continuous_var(lb=0, name="Revenue" + str(d.id)) - self.mdl.add_constraint(self.revs[d] == sum(self.revenues[t.lp.o] * self.trips[d][t] for t in self.filtered(d, self.all_trips.values()))) + self.mdl.add_constraint(self.revs[d] == sum( + self.revenues[t.lp.o] * self.trips[d][t] for t in self.filtered(d, self.all_trips.values()))) self.mdl.add_constraint(self.rev_max >= self.revs[d]) self.mdl.add_constraint(self.rev_min <= self.revs[d]) self.obj += self.REVENUE_PEN * (self.rev_max - self.rev_min) @@ -493,7 +493,8 @@ def __prepare_custom_constraints(self): for d in self.drivers: if 'W' not in d.los: continue self.ws[d] = self.mdl.continuous_var(lb=0, name="Wheelchairs" + str(d.id)) - self.mdl.add_constraint(self.ws[d] == sum(self.trips[d][t] for t in filter(lambda x: x.los == 'W', self.filtered(d, self.all_trips.values())))) + self.mdl.add_constraint(self.ws[d] == sum( + self.trips[d][t] for t in filter(lambda x: x.los == 'W', self.filtered(d, self.all_trips.values())))) self.mdl.add_constraint(self.w_max >= self.ws[d]) self.mdl.add_constraint(self.w_min <= self.ws[d]) self.obj += self.W_PEN * (self.w_max - self.w_min) @@ -519,13 +520,14 @@ def solve(self, solution_file): pL = TimeListener(self.STAGE1_TIME) self.mdl.add_progress_listener(pL) first_solve = self.mdl.solve() - if first_solve and (first_solve.solve_status == JobSolveStatus.FEASIBLE_SOLUTION or first_solve.solve_status == JobSolveStatus.OPTIMAL_SOLUTION): + if first_solve and ( + first_solve.solve_status == JobSolveStatus.FEASIBLE_SOLUTION or first_solve.solve_status == JobSolveStatus.OPTIMAL_SOLUTION): print("First solve status: " + str(self.mdl.get_solve_status())) print("First solve obj value: " + str(self.mdl.objective_value)) - driverMiles = self.__write_sol(solution_file+'stage1') + driverMiles = self.__write_sol(solution_file + 'stage1') print("Total Number of trip miles by each driver after stage 1: ") print(driverMiles) - self.visualize(solution_file+'stage1', 'stage1vis.html') + self.visualize(solution_file + 'stage1', 'stage1vis.html') else: print("Stage 1 Infeasible with ED") if not first_solve or first_solve.solve_status == JobSolveStatus.INFEASIBLE_SOLUTION: @@ -607,9 +609,11 @@ def generate_addr_label(trips, addr): def visualize(self, sfile, vfile='visualized.html', open_after=False): def names(id): return "Driver " + str(id) + " Route" + def get_labels(trips, addr): data = "
".join( - "0" * (10 - len(str(t['trip_id']))) + str(t['trip_id']) + " | " + str(timedelta(days=float(t['est_pickup_time']))).split('.')[0] + + "0" * (10 - len(str(t['trip_id']))) + str(t['trip_id']) + " | " + + str(timedelta(days=float(t['est_pickup_time']))).split('.')[0] + " | " + str(t['driver_id']) for t in trips ) return addr + "
TripID, Time, DriverID
" + data @@ -640,19 +644,23 @@ def get_labels(trips, addr): for i, d in enumerate(self.drivers): r = lambda: random.randint(0, 255) col = '#%02X%02X%02X' % (r(), r(), r()) - filtered_trips = sol_df[sol_df['driver_id']==d.id] + filtered_trips = sol_df[sol_df['driver_id'] == d.id] points, trips = self.__get_driver_coords(filtered_trips, d) x, y = zip(*points) details = [[str(t['trip_id']) for _, t in filtered_trips.iterrows()], - [t['trip_pickup_address'] for _,t in filtered_trips.iterrows()], - [t['trip_dropoff_address'] for _,t in filtered_trips.iterrows()], - [str(timedelta(days=float(t['est_pickup_time']))).split('.')[0] for _,t in filtered_trips.iterrows()], - [str(timedelta(days=float(t['trip_pickup_time']))).split('.')[0] for _,t in filtered_trips.iterrows()], - [str(timedelta(days=float(t['est_dropoff_time']))).split('.')[0] for _,t in filtered_trips.iterrows()], - [str(timedelta(days=float(t['trip_dropoff_time']))).split('.')[0] for _,t in filtered_trips.iterrows()], - [str(t['est_miles']) for _,t in filtered_trips.iterrows()], - [str(t['trip_los']) for _,t in filtered_trips.iterrows()], - [str(t['trip_rev']) for _,t in filtered_trips.iterrows()], + [t['trip_pickup_address'] for _, t in filtered_trips.iterrows()], + [t['trip_dropoff_address'] for _, t in filtered_trips.iterrows()], + [str(timedelta(days=float(t['est_pickup_time']))).split('.')[0] for _, t in + filtered_trips.iterrows()], + [str(timedelta(days=float(t['trip_pickup_time']))).split('.')[0] for _, t in + filtered_trips.iterrows()], + [str(timedelta(days=float(t['est_dropoff_time']))).split('.')[0] for _, t in + filtered_trips.iterrows()], + [str(timedelta(days=float(t['trip_dropoff_time']))).split('.')[0] for _, t in + filtered_trips.iterrows()], + [str(t['est_miles']) for _, t in filtered_trips.iterrows()], + [str(t['trip_los']) for _, t in filtered_trips.iterrows()], + [str(t['trip_rev']) for _, t in filtered_trips.iterrows()], ] all_x += x all_y += y @@ -713,7 +721,8 @@ def get_labels(trips, addr): ), row=1, col=1 ) - names, ids, times, ep, ld, miles, rev = zip(*(self.__get_driver_trips_times_miles_rev(sol_df, id) for id in driver_ids)) + names, ids, times, ep, ld, miles, rev = zip( + *(self.__get_driver_trips_times_miles_rev(sol_df, id) for id in driver_ids)) names = list(names) ids = list(ids) times = list(times) @@ -722,14 +731,14 @@ def get_labels(trips, addr): miles = list(miles) rev = list(rev) ids.append("Average") - times.append(sum(times)/len(times)) - ep.append(sum(ep)/len(ep)) - ld.append(sum(ld)/len(ld)) - miles.append(sum(miles)/len(miles)) - rev.append(sum(rev)/len(rev)) - times = list(map(lambda t: str(timedelta(days=t)).split('.')[0],times)) - ep = list(map(lambda t: str(timedelta(days=t)).split('.')[0],ep)) - ld = list(map(lambda t: str(timedelta(days=t)).split('.')[0],ld)) + times.append(sum(times) / len(times)) + ep.append(sum(ep) / len(ep)) + ld.append(sum(ld) / len(ld)) + miles.append(sum(miles) / len(miles)) + rev.append(sum(rev) / len(rev)) + times = list(map(lambda t: str(timedelta(days=t)).split('.')[0], times)) + ep = list(map(lambda t: str(timedelta(days=t)).split('.')[0], ep)) + ld = list(map(lambda t: str(timedelta(days=t)).split('.')[0], ld)) miles = list(map(str, miles)) rev = list(map(str, rev)) fig.add_trace( @@ -745,10 +754,10 @@ def get_labels(trips, addr): ), row=2, col=1, ) - fig.update_mapboxes(zoom=10,center=go.layout.mapbox.Center( - lat=np.mean(all_y), - lon=np.mean(all_x)), - style='open-street-map') + fig.update_mapboxes(zoom=10, center=go.layout.mapbox.Center( + lat=np.mean(all_y), + lon=np.mean(all_x)), + style='open-street-map') fig.update_layout( title_text=self.mdl.name, @@ -762,7 +771,9 @@ def __get_driver_coords(self, filtered_trips, driver): pairs.append((0.0, Location(driver.address[:-4]).rev_coord(), {})) for idx, t in filtered_trips.iterrows(): pairs.append((float(t['est_pickup_time']), Location(t['trip_pickup_address']).rev_coord(), t)) - pairs.append((float(t['est_dropoff_time']), Location(t['trip_dropoff_address']).rev_coord(), {'est_pickup_time': t['est_dropoff_time'], 'driver_id': driver.id, 'trip_id': 'INTER', 'trip_pickup_address': t['trip_dropoff_address']})) + pairs.append((float(t['est_dropoff_time']), Location(t['trip_dropoff_address']).rev_coord(), + {'est_pickup_time': t['est_dropoff_time'], 'driver_id': driver.id, 'trip_id': 'INTER', + 'trip_pickup_address': t['trip_dropoff_address']})) pairs.append((1.0, Location(driver.address[:-4]).rev_coord(), {})) _, coords, trips = zip(*sorted(pairs, key=lambda x: x[0])) @@ -805,11 +816,13 @@ def tripGen(): if t.lp.o not in self.requestStart or var.solution_value != 1: continue yield (d, t) + def tripGen_debug(d): for t, var in self.trips[d].items(): if var.solution_value != 1: continue yield (d, t) + for dr in self.drivers: for d, t in sorted(tripGen_debug(dr), key=lambda x: self.times[x[0]][x[1]].solution_value): print(d.name, t.lp.o, t.lp.d, self.times[d][t].solution_value, t.lp.time) @@ -827,11 +840,13 @@ def tripGen_debug(d): end_time = self.times[d][intrip].solution_value + intrip.lp.time if end_time < self.times[d][t].solution_value + t.lp.time: print('Something wrong') - print(sum(self.trips[d][intrip].solution_value for intrip in self.filtered(d, self.intrips[rE]))) + print(sum( + self.trips[d][intrip].solution_value for intrip in self.filtered(d, self.intrips[rE]))) print(rE) print(t.lp.o, t.lp.d) print(intrip.lp.o, intrip.lp.d) - print(t.id, self.times[d][t].solution_value, self.times[d][intrip].solution_value, intrip.lp.time) + print(t.id, self.times[d][t].solution_value, self.times[d][intrip].solution_value, + intrip.lp.time) break if end_time < 0: print("Something wrong") diff --git a/experimental/PDWTWOptimizer.py b/experimental/PDWTWOptimizer.py index ab55bc0..f183858 100644 --- a/experimental/PDWTWOptimizer.py +++ b/experimental/PDWTWOptimizer.py @@ -1,15 +1,14 @@ -from copy import copy -from datetime import datetime, timedelta -import pandas as pd +import random +from datetime import timedelta + import numpy as np +import pandas as pd import plotly.graph_objects as go -from plotly.subplots import make_subplots - from docplex.mp.model import Model +from plotly.subplots import make_subplots -from experimental.Trip import Trip, locations, TripType +from experimental.Trip import Trip, TripType from experimental.listeners import TimeListener, GapListener -import random class PDWTWOptimizer: @@ -36,8 +35,8 @@ def __init__(self, trips, drivers, params): self.t = [] # time of traversing trip ij; length of A self.c = [] # cost of traversing trip ij; length of A self.r = [] # driver revenue for doing a trip; length of P - self.merges = [] # binary whether merge trip was satisfied - self.location_pair = set() # Set of tuples of pickup and dropoff pairs + self.merges = [] # binary whether merge trip was satisfied + self.location_pair = set() # Set of tuples of pickup and dropoff pairs self.homes = set() # set of home locations self.not_homes = set() # set of medical office locations self.inflow_trips = dict() # mapping between a location and list of trips ending at the location @@ -46,9 +45,9 @@ def __init__(self, trips, drivers, params): self.idxes = dict() # mapping between location and associated index self.tripdex = dict() # mapping between location_pair and index of trip in trip time/cost/binary var containers self.primaryTID = set() # set of IDs of primary trips - self.primaryOIDs = dict() # map from origin location to primary trip ID + self.primaryOIDs = dict() # map from origin location to primary trip ID self.opposingTrip = dict() # mapping between trip ID and trip - self.mergeDict = dict() # Map between origins of two merge trip locations + self.mergeDict = dict() # Map between origins of two merge trip locations # Constants self.TRIPS_TO_DO = params["TRIPS_TO_DO"] @@ -127,7 +126,7 @@ def __prepare_constraints(self): for intrip in self.inflow_trips[i]: # print((intrip.lp.o, intrip.lp.d)) in_total += self.x[self.tripdex[(intrip.lp.o, intrip.lp.d)]] - self.mdl.add_constraint(total == in_total, "Drivers Returning to Depot" ) + self.mdl.add_constraint(total == in_total, "Drivers Returning to Depot") else: self.mdl.add_constraint(total == 1, "Primary Location Exited " + i) """ @@ -137,7 +136,7 @@ def __prepare_constraints(self): for j, d in enumerate(self.PuD): if o != d: self.mdl.add_constraint(ct=self.B[j] >= self.B[i] + self.t[self.tripdex[(o, d)]] - self.BIGM * ( - 1 - self.x[self.tripdex[(o, d)]])) + 1 - self.x[self.tripdex[(o, d)]])) self.mdl.add_constraint( ct=self.Q[j] >= self.Q[i] + self.q[j] - self.BIGM * (1 - self.x[self.tripdex[(o, d)]])) """ @@ -163,7 +162,7 @@ def __prepare_constraints(self): for j, loc in enumerate(self.PuD): self.mdl.add_constraint(self.v[j] >= j * self.x[self.tripdex[(self.driverstart, loc)]]) self.mdl.add_constraint(self.v[j] <= j * self.x[self.tripdex[(self.driverstart, loc)]] - n * ( - self.x[self.tripdex[(self.driverstart, loc)]] - 1)) + self.x[self.tripdex[(self.driverstart, loc)]] - 1)) for i, o in enumerate(self.PuD): for j, d in enumerate(self.PuD): if o != d: @@ -182,9 +181,9 @@ def __prepare_custom_constraints(self): """ for i, loc1 in enumerate(self.P): for j, loc2 in enumerate(self.P): - if loc1 == loc2 or abs(self.e[i] - self.e[j+len(self.P)]) <= self.ROUTE_LIMIT: continue - z1 = self.mdl.binary_var(loc1+loc2+'z1') - z2 = self.mdl.binary_var(loc1+loc2+'z2') + if loc1 == loc2 or abs(self.e[i] - self.e[j + len(self.P)]) <= self.ROUTE_LIMIT: continue + z1 = self.mdl.binary_var(loc1 + loc2 + 'z1') + z2 = self.mdl.binary_var(loc1 + loc2 + 'z2') m = self.v[i] y = self.v[j] self.mdl.add_constraint(y - m <= -0.5 * z1 + n * z2) @@ -202,7 +201,7 @@ def __prepare_custom_constraints(self): m = self.v[self.idxes[loc1]] y = self.v[self.idxes[tr.lp.o]] self.mdl.add_constraint(y - m <= -0.005 * z1 + n * z2) - self.mdl.add_constraint(y - m >= -n*z1 + z2 * 0.005) + self.mdl.add_constraint(y - m >= -n * z1 + z2 * 0.005) self.mdl.add_constraint(x + z1 + z2 == 1) self.obj += self.MERGE_PEN * (1 - x) @@ -218,8 +217,8 @@ def __prepare_custom_constraints(self): a = k - 0.4 b = k + 0.4 delta = self.mdl.binary_var(self.primaryOIDs[loc2] + "-delta-" + str(k)) - self.mdl.add_constraint(x <= a + self.BIGM * delta + self.BIGM * var) - self.mdl.add_constraint(x >= b - self.BIGM * (1 - delta) - self.BIGM * var) + self.mdl.add_constraint(x <= a + self.BIGM * delta + self.BIGM * var) + self.mdl.add_constraint(x >= b - self.BIGM * (1 - delta) - self.BIGM * var) self.mdl.add_constraint(x >= a - self.BIGM * (1 - var)) self.mdl.add_constraint(x <= b + self.BIGM * (1 - var)) idx_vars.append(var) @@ -244,7 +243,6 @@ def __prepare_custom_constraints(self): self.mdl.add_constraint(unique_w_routes >= self.MIN_W_DRIVERS) self.obj += self.W_DRIVER_PEN * (unique_w_routes - self.MIN_W_DRIVERS) - """ Equalizing Revenue Penalty """ @@ -256,23 +254,21 @@ def __prepare_custom_constraints(self): tot = sum(self.loc_v_binary[loc_idx][k] * self.r[loc_idx] for loc_idx in self.loc_v_binary) # bin_tot = sum(self.loc_v_binary[loc_idx][k] for loc_idx in self.loc_v_binary) # self.mdl.add_equivalence(bin_var, bin_tot == 0) - sum_var = self.mdl.continuous_var(0,name=str(k) + "Revenue") + sum_var = self.mdl.continuous_var(0, name=str(k) + "Revenue") self.mdl.add_constraint(bin_var * self.BIGM >= tot) self.mdl.add_constraint(tot >= bin_var) self.mdl.add_constraint(self.rev_max >= tot) - self.mdl.add_constraint(tot + (1-bin_var)*self.BIGM >= self.rev_min) + self.mdl.add_constraint(tot + (1 - bin_var) * self.BIGM >= self.rev_min) self.mdl.add_constraint(sum_var == tot) self.revens.append(sum_var) self.mdl.add_constraint(self.rev_max >= self.rev_min) self.obj += self.REVENUE_PEN * (self.rev_max - self.rev_min) - """ Adjustable Speed Penalty """ # self.obj += self.SPEED_PENALTY * (1/self.MIN_SPEED - self.SPEED) - def __generate_trips(self): # self.SPEED = self.mdl.continuous_var(1/80, 1/self.MIN_SPEED, "Speed") # print(self.SPEED) @@ -330,9 +326,8 @@ def __generate_trips(self): t.write("Start,End,Time") c.write("Start,End,Cost") for pair, trp in self.trip_map.items(): - t.write(pair[0]+ "," + pair[1] + "," + str(trp.lp.time)) - c.write(pair[0]+ "," + pair[1] + "," + str(trp.lp.miles)) - + t.write(pair[0] + "," + pair[1] + "," + str(trp.lp.time)) + c.write(pair[0] + "," + pair[1] + "," + str(trp.lp.miles)) def __prepare_depot(self): self.N.append(self.drivers[self.DRIVER_IDX].address) @@ -397,7 +392,8 @@ def __prepare_trip_parameters(self): Dv.append(self.mdl.integer_var(lb=0, ub=len(self.trips) + 1, name='v_' + str( self.TRIPS_TO_DO + count))) # Varaible for undex of first location on route dropoff if trip.type == TripType.MERGE: - vars = (self.mdl.binary_var(name=trip.id), self.mdl.binary_var(name=trip.id + 'z1'), self.mdl.binary_var(name=trip.id + 'z2')) + vars = (self.mdl.binary_var(name=trip.id), self.mdl.binary_var(name=trip.id + 'z1'), + self.mdl.binary_var(name=trip.id + 'z2')) self.merges.append(vars) if 'B' in trip.id: self.mergeDict[self.merges[-1]] = (trip.lp.o, self.opposingTrip[trip.id[:-1] + 'A']) @@ -506,7 +502,8 @@ def names(id): def get_labels(trips): data = "
".join( - "0" * (10 - len(str(t.id))) + str(t.id) + " | " + str(timedelta(days=self.B[self.idxes[t.lp.o]].solution_value)).split('.')[0] + + "0" * (10 - len(str(t.id))) + str(t.id) + " | " + + str(timedelta(days=self.B[self.idxes[t.lp.o]].solution_value)).split('.')[0] + " | " + str(int(self.v[self.idxes[t.lp.o]].solution_value)) for t in trips ) return trips[0].lp.o[:-4] + "
TripID, Time, DriverID
" + data @@ -516,10 +513,10 @@ def get_labels(trips): titles = [names(i) for i in driver_ids] titles.insert(0, "Map") titles.insert(1, "Driver Summary") - subplots = [[{"type":"table"}]] * (len(driver_ids) + 1) - subplots.insert(0,[{"type": "scattermapbox"}]) - map_height = 600/ (600 + 400 * (len(driver_ids) + 1)) - heights = [(1-map_height-0.05)/((len(driver_ids)) + 1)] * (len(driver_ids) +1) + subplots = [[{"type": "table"}]] * (len(driver_ids) + 1) + subplots.insert(0, [{"type": "scattermapbox"}]) + map_height = 600 / (600 + 400 * (len(driver_ids) + 1)) + heights = [(1 - map_height - 0.05) / ((len(driver_ids)) + 1)] * (len(driver_ids) + 1) heights.insert(0, map_height) # heights = [0.25] fig = make_subplots( @@ -542,11 +539,13 @@ def get_labels(trips): x, y = zip(*points) filtered_trips = list(filter(lambda t: t.id in self.primaryOIDs.values(), trips)) details = [[str(t.id) for t in filtered_trips], - [t.lp.o[:-4] for t in filtered_trips], + [t.lp.o[:-4] for t in filtered_trips], [t.lp.d[:-4] for t in filtered_trips], - [str(timedelta(days=self.B[self.idxes[t.lp.o]].solution_value)).split('.')[0] for t in filtered_trips], + [str(timedelta(days=self.B[self.idxes[t.lp.o]].solution_value)).split('.')[0] for t in + filtered_trips], [str(timedelta(days=self.opposingTrip[t.id].start)).split('.')[0] for t in filtered_trips], - [str(timedelta(days=self.B[self.idxes[t.lp.o] + self.TRIPS_TO_DO].solution_value)).split('.')[0] for t in filtered_trips], + [str(timedelta(days=self.B[self.idxes[t.lp.o] + self.TRIPS_TO_DO].solution_value)).split('.')[0] + for t in filtered_trips], [str(timedelta(days=self.opposingTrip[t.id].end)).split('.')[0] for t in filtered_trips], [str(t.preset_m) for t in filtered_trips], [str(t.los) for t in filtered_trips], @@ -562,13 +561,15 @@ def get_labels(trips): size=8, color=col, ), - name= names(d_id), + name=names(d_id), - ),row=1, col=1) + ), row=1, col=1) fig.add_trace( go.Table( header=dict( - values=["TripID", "Pickup Address", "Dropoff Address", "Estimated Pickup Time", "Scheduled Pickup Time", "Estimated Dropoff Time", "Scheduled Dropoff Time", "Miles", "LOS", "Revenue"], + values=["TripID", "Pickup Address", "Dropoff Address", "Estimated Pickup Time", + "Scheduled Pickup Time", "Estimated Dropoff Time", "Scheduled Dropoff Time", "Miles", + "LOS", "Revenue"], font=dict(size=10), align="left" ), @@ -583,7 +584,8 @@ def get_labels(trips): for idx, point in enumerate(points): if point in locations: locations[point].append(trips[idx]) - locations[point] = list(sorted(locations[point], key=lambda x: self.B[self.idxes[x.lp.o]].solution_value)) + locations[point] = list( + sorted(locations[point], key=lambda x: self.B[self.idxes[x.lp.o]].solution_value)) else: locations[point] = [trips[idx]] @@ -606,7 +608,7 @@ def get_labels(trips): name="Locations", ), - row=1,col=1 + row=1, col=1 ) ids, times, miles, rev = zip(*(self.__get_driver_trips_times_miles_rev(id) for id in driver_ids)) fig.add_trace( @@ -623,14 +625,14 @@ def get_labels(trips): row=2, col=1, ) - fig.update_mapboxes(zoom = 10, center=go.layout.mapbox.Center( - lat=np.mean(all_y), - lon=np.mean(all_x)), style='open-street-map') + fig.update_mapboxes(zoom=10, center=go.layout.mapbox.Center( + lat=np.mean(all_y), + lon=np.mean(all_x)), style='open-street-map') fig.update_layout( title_text=self.mdl.name, showlegend=True, - height = (600 + 400 * (len(driver_ids) + 1)) + height=(600 + 400 * (len(driver_ids) + 1)) ) fig.write_html(vfile, auto_open=True) @@ -653,6 +655,7 @@ def filt(t): return False idx2 = self.idxes[t.lp.o] return int(round(self.v[idx2].solution_value)) == id + return filt def __sortTrips(self, t): @@ -666,7 +669,8 @@ def __get_driver_coords(self, id): depot = Trip(self.driverstart, self.driverstop, 'A', 'ID', None, 0, 1, prefix=False, suffix=True) prev = 0.0 for trip in sorted(filter(self.__filterTrips(id), self.trip_map.values()), key=self.__sortTrips): - t = Trip(trip.lp.o, trip.lp.d, 1 if trip.los == 'A' else 1.5, trip.id, None, trip.start, trip.end, rev=0.0, lp = trip.lp) + t = Trip(trip.lp.o, trip.lp.d, 1 if trip.los == 'A' else 1.5, trip.id, None, trip.start, trip.end, rev=0.0, + lp=trip.lp) # if t.lp.o != self.driverstart and self.Q[self.idxes[t.lp.o]].solution_value - prev > 0.1 and t.lp.d != self.driverstop: if t.lp.o in self.primaryOIDs: try: @@ -674,15 +678,16 @@ def __get_driver_coords(self, id): t.lp.d = self.opposingTrip[t.id].lp.d t.rev = self.r[self.idxes[t.lp.o]] except: - print("Failed to get primary origin ID", t.id, t.lp.o, t.lp.d,self.Q[self.idxes[t.lp.o]].solution_value, prev) + print("Failed to get primary origin ID", t.id, t.lp.o, t.lp.d, + self.Q[self.idxes[t.lp.o]].solution_value, prev) exit(1) prev = self.Q[self.idxes[t.lp.o]].solution_value yield (t.lp.c1[1], t.lp.c1[0]), t yield (depot.lp.c2[1], depot.lp.c2[0]), depot - def __get_driver_trips_times_miles_rev(self, id): - return ", ".join(map(lambda t: str(t.id),filter(self.__filterPrimaryTrips(id), self.trip_map.values()))), \ - str(timedelta(days=sum(t.lp.time for t in filter(self.__filterTrips(id), self.trip_map.values())))).split('.')[0], \ + return ", ".join(map(lambda t: str(t.id), filter(self.__filterPrimaryTrips(id), self.trip_map.values()))), \ + str(timedelta( + days=sum(t.lp.time for t in filter(self.__filterTrips(id), self.trip_map.values())))).split('.')[0], \ str(sum(t.lp.miles for t in filter(self.__filterTrips(id), self.trip_map.values()))), \ str(sum(t.rev for t in filter(self.__filterPrimaryTrips(id), self.trip_map.values()))) diff --git a/experimental/Preprocessor.py b/experimental/Preprocessor.py index 2f03444..cf1658d 100644 --- a/experimental/Preprocessor.py +++ b/experimental/Preprocessor.py @@ -13,14 +13,13 @@ class TripPreprocess: @staticmethod def load_revenue_table(rev_table_file): rev_df = pd.read_csv(rev_table_file) - table = {'A':dict(), 'W':dict(), 'A-EP':dict(), 'W-EP':dict()} + table = {'A': dict(), 'W': dict(), 'A-EP': dict(), 'W-EP': dict()} for typ in table: details = rev_df[['Miles', typ]] for _, row in details.iterrows(): table[typ][row['Miles']] = float(row[typ]) return table - @staticmethod def calc_revenue(table, miles, los): rates = table[los] @@ -47,8 +46,12 @@ def prepare_and_load_trips(trips_file, revenue_table, assumptions, processed_fil names = {} for index, row in trip_df.iterrows(): if not row['trip_status'] == "CANCELED": - o = row['trip_pickup_address'].replace('No Gc', '').replace('*','').replace('Apt .','').replace('//','').replace('Bldg .','') + "P" + str(hash(row['trip_id']))[1:4] - d = row['trip_dropoff_address'].replace('No Gc', '').replace('*','').replace('Apt .','').replace('//','').replace('Bldg .','') + "D" + str(hash(row['trip_id']))[1:4] + o = row['trip_pickup_address'].replace('No Gc', '').replace('*', '').replace('Apt .', '').replace('//', + '').replace( + 'Bldg .', '') + "P" + str(hash(row['trip_id']))[1:4] + d = row['trip_dropoff_address'].replace('No Gc', '').replace('*', '').replace('Apt .', '').replace('//', + '').replace( + 'Bldg .', '') + "D" + str(hash(row['trip_id']))[1:4] temp_start = TripPreprocess.convert_time(str(row['trip_pickup_time'])) temp_end = TripPreprocess.convert_time(str(row['trip_dropoff_time'])) los = row['trip_los'] @@ -57,24 +60,31 @@ def prepare_and_load_trips(trips_file, revenue_table, assumptions, processed_fil start = min(temp_start, temp_end) end = max(temp_start, temp_end) # Uknown Time Assumption - if start == 0.0 or end == 0.0 or start > 1 - (1/24): + if start == 0.0 or end == 0.0 or start > 1 - (1 / 24): if id[-1] == 'B': - start = TripPreprocess.convert_time(str(trip_df.loc[trip_df['trip_id'] == id[:-1]+'A','trip_dropoff_time'].values[0])) + buffer + start = TripPreprocess.convert_time(str( + trip_df.loc[trip_df['trip_id'] == id[:-1] + 'A', 'trip_dropoff_time'].values[0])) + buffer elif id[-1] == 'C': - start = TripPreprocess.convert_time(str(trip_df.loc[trip_df['trip_id'] == id[:-1]+'B','trip_dropoff_time'].values[0])) + buffer + start = TripPreprocess.convert_time(str( + trip_df.loc[trip_df['trip_id'] == id[:-1] + 'B', 'trip_dropoff_time'].values[0])) + buffer else: print('A Trip with Unknown Time', id) exit(1) - end = min(1 - (1/24), start + end_buffer) + end = min(1 - (1 / 24), start + end_buffer) trip_df.at[index, 'trip_pickup_time'] = start trip_df.at[index, 'trip_dropoff_time'] = end # AB Merge Assumption - if "MERGE_ADDRESSES" in assumptions and (id[-1] == 'B' or id[-1] == 'C') and any(ad in row['trip_pickup_address'] for ad in assumptions['MERGE_ADDRESSES']): + if "MERGE_ADDRESSES" in assumptions and (id[-1] == 'B' or id[-1] == 'C') and any( + ad in row['trip_pickup_address'] for ad in assumptions['MERGE_ADDRESSES']): if id[-1] == 'B': - start = TripPreprocess.convert_time(str(trip_df.loc[trip_df['trip_id'] == id[:-1]+'A', 'trip_dropoff_time'].values[0])) + assumptions["MERGE_ADDRESS_WINDOW"] + start = TripPreprocess.convert_time( + str(trip_df.loc[trip_df['trip_id'] == id[:-1] + 'A', 'trip_dropoff_time'].values[0])) + \ + assumptions["MERGE_ADDRESS_WINDOW"] elif id[-1] == 'C': - start = TripPreprocess.convert_time(str(trip_df.loc[trip_df['trip_id'] == id[:-1]+'B', 'trip_dropoff_time'].values[0])) + assumptions["MERGE_ADDRESS_WINDOW"] + start = TripPreprocess.convert_time( + str(trip_df.loc[trip_df['trip_id'] == id[:-1] + 'B', 'trip_dropoff_time'].values[0])) + \ + assumptions["MERGE_ADDRESS_WINDOW"] else: print("Error processing merge Trip", id) print(o, d, id, start, end) @@ -90,7 +100,8 @@ def prepare_and_load_trips(trips_file, revenue_table, assumptions, processed_fil # Revenue Calculation rev = TripPreprocess.calc_revenue(revenue_table, int(row['trip_miles']), los) try: - t = Trip(o, d, cap, id, typ, start, end, rev, preset_miles=row['trip_miles'],prefix=False, suffix=True) + t = Trip(o, d, cap, id, typ, start, end, rev, preset_miles=row['trip_miles'], prefix=False, + suffix=True) trips.append(t) names[t] = row["customer_name"] except InvalidTripException as e: @@ -101,12 +112,14 @@ def prepare_and_load_trips(trips_file, revenue_table, assumptions, processed_fil trips = list(filter(lambda t: t.id not in ignore_ids, trips)) with open(processed_file_name, "w") as ct: for t in trips: - ct.write("trip_id,customer_name,trip_pickup_time,trip_pickup_address,trip_dropoff_time,trip_dropoff_address,trip_los," - "scheduled_miles, trip_miles,trip_rev,orig_lat,orig_long,dest_lat,dest_long,duration\n") - ct.write(",".join([t.id, '"' + " ".join(names[t].split(",")) + '"', str(t.start), - '"' + t.lp.o[:-4] + '"',str(t.end) ,'"' + t.lp.d[:-4] + '"', - t.los,str(t.preset_m), str(t.lp.miles), str(t.rev),str(t.lp.c1[0]), str(t.lp.c1[1]), - str(t.lp.c2[0]), str(t.lp.c2[1]), str(t.lp.time)]) + "\n") + ct.write( + "trip_id,customer_name,trip_pickup_time,trip_pickup_address,trip_dropoff_time,trip_dropoff_address,trip_los," + "scheduled_miles, trip_miles,trip_rev,orig_lat,orig_long,dest_lat,dest_long,duration\n") + ct.write(",".join([t.id, '"' + " ".join(names[t].split(",")) + '"', str(t.start), + '"' + t.lp.o[:-4] + '"', str(t.end), '"' + t.lp.d[:-4] + '"', + t.los, str(t.preset_m), str(t.lp.miles), str(t.rev), str(t.lp.c1[0]), + str(t.lp.c1[1]), + str(t.lp.c2[0]), str(t.lp.c2[1]), str(t.lp.time)]) + "\n") return trips @staticmethod @@ -121,13 +134,15 @@ def load_trips(processed_trips_file='calc_trips.csv', assumptions=None): cap = 1 if row['trip_los'] == 'A' else 1.5 id = row['trip_id'] rev = float(row['trip_rev']) - lp = LocationPair(o,d, (float(row['orig_lat']), float(row['orig_long'])), (float(row['dest_lat']), float(row['dest_long']))) + lp = LocationPair(o, d, (float(row['orig_lat']), float(row['orig_long'])), + (float(row['dest_lat']), float(row['dest_long']))) # AB Merge Assumption - if assumptions and "MERGE_ADDRESSES" in assumptions and (id[-1] == 'B' or id[-1] == 'C') and any( ad in o for ad in assumptions['MERGE_ADDRESSES']): + if assumptions and "MERGE_ADDRESSES" in assumptions and (id[-1] == 'B' or id[-1] == 'C') and any( + ad in o for ad in assumptions['MERGE_ADDRESSES']): typ = TripType.MERGE else: typ = None - trips.append(Trip(o, d, cap, id, typ, start, end, rev, preset_miles=int(row['scheduled_miles']),lp=lp)) + trips.append(Trip(o, d, cap, id, typ, start, end, rev, preset_miles=int(row['scheduled_miles']), lp=lp)) return trips @staticmethod @@ -164,4 +179,4 @@ def convert_time(time): segments.append(0) segments.append(0) x = datetime.timedelta(hours=segments[0], minutes=segments[1], seconds=segments[2]) - return x.total_seconds()/(60*60*24) + return x.total_seconds() / (60 * 60 * 24) diff --git a/experimental/Trip.py b/experimental/Trip.py index c85ac39..1ed40c3 100644 --- a/experimental/Trip.py +++ b/experimental/Trip.py @@ -1,31 +1,109 @@ from enum import Enum -import requests -from opencage.geocoder import OpenCageGeocode -from time import sleep + from haversine import haversine, Unit -from geopy.geocoders import Nominatim +from opencage.geocoder import OpenCageGeocode + from experimental.constants import * try: from locations import locations_cache - locations = locations_cache # cached dict mapping an address to its lat long, not in repository for privacy + + locations = locations_cache # cached dict mapping an address to its lat long, not in repository for privacy except: - locations = {'134 E 2nd Ave , Taylor , TX 76574': (30.5657537, -97.3937946), '16010 Park Valley Dr , Apt . 100 , Round Rock , TX 78681-3574': (30.512862, -97.711188), '9345 E Highway 290 , Apt . 12102 , Bldg . 12 , Austin , TX 78724-2463': (30.329392, -97.64733), '1801 E 51st St , Bldg . G , Austin , TX 78723-3434': (30.305428, -97.702329), 'x 85 Trinity St , Apt . 811 , Bldg . No // No Gc , Austin , TX 78701': (30.197644, -97.7491), '1631 E 2nd St , Bldg . C , Austin , TX 78702-4490': (30.259142, -97.72774), '400 E Cypress Creek Rd , Apt . 1103 , Bldg . No // No Gc , Cedar Park , TX 78613': (30.048154, -98.35584), '1631 E 2nd St , Bldg . A C & D , Austin , TX 78702-4490': (30.259142, -97.72774), '11020 Dessau Rd , Austin , TX 78754-2053': (30.4027877, -97.6388753), '2606 W Pecan St , Apt . 300 , Bldg . 3 , Pflugerville , TX 78660-1917': (30.449825, -97.658057), '2806 Real St , Austin , TX 78722': (30.282913, -97.7119655), '1801 E 51st St , Austin , TX 78723-3434': (30.3010094, -97.6985202), '110 E Live Oak St , Austin , TX 78704': (30.2419797, -97.7514697), '2800 S I H 35 , Apt . 120 , Austin , TX 78704-5700': (30.218854, -97.750234), '1416 Mangrum St , Pflugerville , TX 78660': (30.444169, -97.659798), '2129 W Pecan St , Pflugerville , TX 78660': (30.445484, -97.649736), '508 E Howard Ln , Apt . LOT 319 , Austin , TX 78753-9704': (30.409607, -97.64855), '10000 Metric Blvd , Austin , TX 78758-5202': (30.3802058, -97.7165808), '7510 Lazy Creek Dr , Apt . B , Bldg . 5 , Austin , TX 78724-3300': (30.315474, -97.65692), '1304 Webberwood Way , Mobile Home , Elgin , TX 78621-5246': (30.223948, -97.487572), '6114 S First St , Austin , TX 78745-4008': (30.202407, -97.784597), '2213 Santa Maria St , Austin , TX 78702-4615': (30.2578565, -97.7187487), '1701 W Ben White Blvd , Austin , TX 78704-7667': (30.2268293, -97.7828497), '5401 Spring Meadow Rd , A , Duplex , Austin , TX 78744': (30.19618, -97.735356), '16701 N Heatherwilde B , 516 , 5 Ii , Pflugerville , TX 7866': (30.413328, -97.646238), '1801 E 51st St , 100 , G , Austin , TX 78723-3434': (30.305428, -97.702329), '12433 Dessau Rd , 602 , Unit 3152 , Austin , TX 78754- 0021': (30.396522, -97.642911), '1631 E 2nd St , A C & D , Austin , TX 78702-4490': (30.259142, -97.72774), '1636 E 3rd St , 103 , No , Austin , TX 78702': (30.260152, -97.727137), '1631 E 2nd St , A , Austin , TX 78702-4490': (30.258913, -97.727466), '9933 Milla Cir , House , Austin , TX 78748-3905': (30.1618354, -97.7976865), '1221 W Ben White Blvd , 200 , Austin , TX 78704-7192': (30.227302, -97.77835), '1214 Southport Dr , D , Austin , TX 78704': (30.232307, -97.7766741), '2800 S I H 35 , Austin , TX 78704-5700': (30.3557599, -97.6890565), '5301 W Duval Rd , 406 A , Code 5301 , Austin , TX 78727- 6618': (30.417285, -97.750034), '8010 N Interstate 35 , 121 , Austin , TX 78753': (30.388382, -97.672564), '1701 W Ben White Blvd , 180 , Austin , TX 78704-7667': (30.227818, -97.784649), '1601 Royal Crest Dr , 2160 , 8/ , Austin , TX 78741- 2848': (30.237574, -97.731207), '706 W Ben White Blvd , 100 , Austin , TX 78704-8124': (30.226898, -97.770735), '13838 The Lakes Blvd, Pflugerville, TX 78660': (30.4233445, -97.6656668), '1733 Arial Dr, Austin, TX': (30.3918023, -97.6491909), '16010 Park Valley Dr , 100 , Round Rock , TX 78681-3574': (30.512862, -97.711188), '9345 E Highway 290 , 12102 , 12 , Austin , TX 78724-2463': (30.278098, -97.685082), '1801 E 51st St , G , Austin , TX 78723-3434': (30.3010094, -97.6985202), '6301 Berkman Dr , 206 , No/ , Austin , TX 78723': (30.316869, -97.690829), '1500 Red River St , Austin , TX 78701': (30.276589, -97.7345157), '303 E Brenham St , B , Duplex , Manor , TX 78653': (30.337426, -97.556906), '2410 Round Rock Ave , 150 , Round Rock , TX 78681-4003': (30.509384, -97.712457), '5701 Tracy Lynn Ln , B , Duplex , Austin , TX 78721': (30.255844, -97.688515), '1010 W 40th St , Austin , TX 78756-4010': (30.308401, -97.741926), '1304 S Webberwood Way , 2 , Mobile Home , Elgin , TX 78621-5246': (30.223948, -97.487572), '5717 Balcones Dr , Austin , TX 78731-4203': (30.338, -97.756495), '2606 W Pecan St , 300 , 3 , Pflugerville , TX 78660-1917': (30.449825, -97.658057), 'xcell 1806 Harvey St , House , Austin , TX 78702-1663': (30.281568, -97.705418), 'xbus 8220 Cross Park Dr , 100 , Austin , TX 78754-5228': (30.335443, -97.669293), '2800 S I H 35 , 120 , Austin , TX 78704-5700': (30.218854, -97.750234), '3226 W Slaughter Ln , 127 , Austin , TX 78748': (30.181096, -97.84445), '1631 E 2nd St , D , Austin , TX 78702-4490': (30.258913, -97.727466), '12221 N Mopac Exwy , Austin , TX 78758': (30.413874, -97.706466), '508 E Howard Ln , LOT 319 , Austin , TX 78753-9704': (30.4174122, -97.6512382), '12433 Dessau Rd , 2142 , B , Austin , TX 78754- 2183': (30.404676, -97.637745), '4681 College Park Dr , Round Rock , TX 78665': (30.5641494, -97.6568948), '1638 E 2nd St , 413 , Austin , TX 78702': (30.259224, -97.727542), '2911 Medical Arts St , 9 , Austin , TX 78705': (30.289351, -97.728493), '14610 Menifee St , House , Austin , TX 78725-4718': (30.233446, -97.588285), '4614 N I-35 , Austin , TX 78751': (30.304022, -97.714198), '6200 Loyola Ln , 424 , 4 Gc : 2007 , Austin , TX 78724- 3500': (30.197384, -97.748202), '5200 Davis Ln , 200 , B , Austin , TX 78749': (30.207621, -97.860453), '2724 Philomena St , 123 , No , Austin , TX 78723': (30.305107, -97.686631), '8913 Collinfield Dr , 1 , , Austin , TX 78758-6704': (30.360677, -97.70592), '18112 moreto loop, pflugerville, tx 78660': (29.873711, -97.680043), '12151 N ih 35, austin, tx 78753': (30.388382, -97.672564)} + locations = {'134 E 2nd Ave , Taylor , TX 76574': (30.5657537, -97.3937946), + '16010 Park Valley Dr , Apt . 100 , Round Rock , TX 78681-3574': (30.512862, -97.711188), + '9345 E Highway 290 , Apt . 12102 , Bldg . 12 , Austin , TX 78724-2463': (30.329392, -97.64733), + '1801 E 51st St , Bldg . G , Austin , TX 78723-3434': (30.305428, -97.702329), + 'x 85 Trinity St , Apt . 811 , Bldg . No // No Gc , Austin , TX 78701': (30.197644, -97.7491), + '1631 E 2nd St , Bldg . C , Austin , TX 78702-4490': (30.259142, -97.72774), + '400 E Cypress Creek Rd , Apt . 1103 , Bldg . No // No Gc , Cedar Park , TX 78613': ( + 30.048154, -98.35584), + '1631 E 2nd St , Bldg . A C & D , Austin , TX 78702-4490': (30.259142, -97.72774), + '11020 Dessau Rd , Austin , TX 78754-2053': (30.4027877, -97.6388753), + '2606 W Pecan St , Apt . 300 , Bldg . 3 , Pflugerville , TX 78660-1917': (30.449825, -97.658057), + '2806 Real St , Austin , TX 78722': (30.282913, -97.7119655), + '1801 E 51st St , Austin , TX 78723-3434': (30.3010094, -97.6985202), + '110 E Live Oak St , Austin , TX 78704': (30.2419797, -97.7514697), + '2800 S I H 35 , Apt . 120 , Austin , TX 78704-5700': (30.218854, -97.750234), + '1416 Mangrum St , Pflugerville , TX 78660': (30.444169, -97.659798), + '2129 W Pecan St , Pflugerville , TX 78660': (30.445484, -97.649736), + '508 E Howard Ln , Apt . LOT 319 , Austin , TX 78753-9704': (30.409607, -97.64855), + '10000 Metric Blvd , Austin , TX 78758-5202': (30.3802058, -97.7165808), + '7510 Lazy Creek Dr , Apt . B , Bldg . 5 , Austin , TX 78724-3300': (30.315474, -97.65692), + '1304 Webberwood Way , Mobile Home , Elgin , TX 78621-5246': (30.223948, -97.487572), + '6114 S First St , Austin , TX 78745-4008': (30.202407, -97.784597), + '2213 Santa Maria St , Austin , TX 78702-4615': (30.2578565, -97.7187487), + '1701 W Ben White Blvd , Austin , TX 78704-7667': (30.2268293, -97.7828497), + '5401 Spring Meadow Rd , A , Duplex , Austin , TX 78744': (30.19618, -97.735356), + '16701 N Heatherwilde B , 516 , 5 Ii , Pflugerville , TX 7866': (30.413328, -97.646238), + '1801 E 51st St , 100 , G , Austin , TX 78723-3434': (30.305428, -97.702329), + '12433 Dessau Rd , 602 , Unit 3152 , Austin , TX 78754- 0021': (30.396522, -97.642911), + '1631 E 2nd St , A C & D , Austin , TX 78702-4490': (30.259142, -97.72774), + '1636 E 3rd St , 103 , No , Austin , TX 78702': (30.260152, -97.727137), + '1631 E 2nd St , A , Austin , TX 78702-4490': (30.258913, -97.727466), + '9933 Milla Cir , House , Austin , TX 78748-3905': (30.1618354, -97.7976865), + '1221 W Ben White Blvd , 200 , Austin , TX 78704-7192': (30.227302, -97.77835), + '1214 Southport Dr , D , Austin , TX 78704': (30.232307, -97.7766741), + '2800 S I H 35 , Austin , TX 78704-5700': (30.3557599, -97.6890565), + '5301 W Duval Rd , 406 A , Code 5301 , Austin , TX 78727- 6618': (30.417285, -97.750034), + '8010 N Interstate 35 , 121 , Austin , TX 78753': (30.388382, -97.672564), + '1701 W Ben White Blvd , 180 , Austin , TX 78704-7667': (30.227818, -97.784649), + '1601 Royal Crest Dr , 2160 , 8/ , Austin , TX 78741- 2848': (30.237574, -97.731207), + '706 W Ben White Blvd , 100 , Austin , TX 78704-8124': (30.226898, -97.770735), + '13838 The Lakes Blvd, Pflugerville, TX 78660': (30.4233445, -97.6656668), + '1733 Arial Dr, Austin, TX': (30.3918023, -97.6491909), + '16010 Park Valley Dr , 100 , Round Rock , TX 78681-3574': (30.512862, -97.711188), + '9345 E Highway 290 , 12102 , 12 , Austin , TX 78724-2463': (30.278098, -97.685082), + '1801 E 51st St , G , Austin , TX 78723-3434': (30.3010094, -97.6985202), + '6301 Berkman Dr , 206 , No/ , Austin , TX 78723': (30.316869, -97.690829), + '1500 Red River St , Austin , TX 78701': (30.276589, -97.7345157), + '303 E Brenham St , B , Duplex , Manor , TX 78653': (30.337426, -97.556906), + '2410 Round Rock Ave , 150 , Round Rock , TX 78681-4003': (30.509384, -97.712457), + '5701 Tracy Lynn Ln , B , Duplex , Austin , TX 78721': (30.255844, -97.688515), + '1010 W 40th St , Austin , TX 78756-4010': (30.308401, -97.741926), + '1304 S Webberwood Way , 2 , Mobile Home , Elgin , TX 78621-5246': (30.223948, -97.487572), + '5717 Balcones Dr , Austin , TX 78731-4203': (30.338, -97.756495), + '2606 W Pecan St , 300 , 3 , Pflugerville , TX 78660-1917': (30.449825, -97.658057), + 'xcell 1806 Harvey St , House , Austin , TX 78702-1663': (30.281568, -97.705418), + 'xbus 8220 Cross Park Dr , 100 , Austin , TX 78754-5228': (30.335443, -97.669293), + '2800 S I H 35 , 120 , Austin , TX 78704-5700': (30.218854, -97.750234), + '3226 W Slaughter Ln , 127 , Austin , TX 78748': (30.181096, -97.84445), + '1631 E 2nd St , D , Austin , TX 78702-4490': (30.258913, -97.727466), + '12221 N Mopac Exwy , Austin , TX 78758': (30.413874, -97.706466), + '508 E Howard Ln , LOT 319 , Austin , TX 78753-9704': (30.4174122, -97.6512382), + '12433 Dessau Rd , 2142 , B , Austin , TX 78754- 2183': (30.404676, -97.637745), + '4681 College Park Dr , Round Rock , TX 78665': (30.5641494, -97.6568948), + '1638 E 2nd St , 413 , Austin , TX 78702': (30.259224, -97.727542), + '2911 Medical Arts St , 9 , Austin , TX 78705': (30.289351, -97.728493), + '14610 Menifee St , House , Austin , TX 78725-4718': (30.233446, -97.588285), + '4614 N I-35 , Austin , TX 78751': (30.304022, -97.714198), + '6200 Loyola Ln , 424 , 4 Gc : 2007 , Austin , TX 78724- 3500': (30.197384, -97.748202), + '5200 Davis Ln , 200 , B , Austin , TX 78749': (30.207621, -97.860453), + '2724 Philomena St , 123 , No , Austin , TX 78723': (30.305107, -97.686631), + '8913 Collinfield Dr , 1 , , Austin , TX 78758-6704': (30.360677, -97.70592), + '18112 moreto loop, pflugerville, tx 78660': (29.873711, -97.680043), + '12151 N ih 35, austin, tx 78753': (30.388382, -97.672564)} pass + class TripType(Enum): - A = 1 # Destination is a home without passenger Must be before B for a location - B = 2 # Destination is a hospital with passenger Must be before C for a location - C = 3 # Destination is a hospital without a passenger Must be before D for a location - D = 4 # Destination is a home with a passenger - INTER_A = 5 # From driver home to any other location Must occur before any A trips - INTER_B = 6 # From any location to driver home Must occur after all D trips + A = 1 # Destination is a home without passenger Must be before B for a location + B = 2 # Destination is a hospital with passenger Must be before C for a location + C = 3 # Destination is a hospital without a passenger Must be before D for a location + D = 4 # Destination is a home with a passenger + INTER_A = 5 # From driver home to any other location Must occur before any A trips + INTER_B = 6 # From any location to driver home Must occur after all D trips MERGE = 7 + + class InvalidTripException(Exception): pass + + class Trip: - def __init__(self, o, d, space, id, type, start, end, rev= 0, preset_miles = 0, lp = None, prefix=False, suffix=False, prefixLen=3, suffixLen=4): + def __init__(self, o, d, space, id, type, start, end, rev=0, preset_miles=0, lp=None, prefix=False, suffix=False, + prefixLen=3, suffixLen=4): self.type = type self.id = id if lp: @@ -38,13 +116,17 @@ def __init__(self, o, d, space, id, type, start, end, rev= 0, preset_miles = 0, self.los = 'W' if space == 1.5 else 'A' self.rev = rev if self.lp.time > end - max(0, start - BUFFER): - raise InvalidTripException("Trip ID:" + str(id) + " start:" + str(start) + " end:" + str(end) + " trip length: " + str(self.lp.time)) + raise InvalidTripException( + "Trip ID:" + str(id) + " start:" + str(start) + " end:" + str(end) + " trip length: " + str( + self.lp.time)) self.preset_m = preset_miles + def __repr__(self): return self.lp.o + "->" + self.lp.d + class Location: - def __init__(self, addr, coord = None): + def __init__(self, addr, coord=None): self.addr = addr if coord is None and self.addr in locations: self.coord = locations[self.addr] @@ -69,6 +151,7 @@ def find_coord(self, addr): def rev_coord(self): return tuple(reversed(self.coord)) + class LocationPair: def __init__(self, l1, l2, c1=None, c2=None, prefix=False, suffix=False, plen=3, slen=4): self.o = l1 @@ -93,7 +176,7 @@ def __init__(self, l1, l2, c1=None, c2=None, prefix=False, suffix=False, plen=3, self.miles = haversine(self.c1, self.c2, Unit.MILES) speed = self.get_speed(self.miles) - self.time = (self.miles / speed) / 24 + FIFTEEN/15 + self.time = (self.miles / speed) / 24 + FIFTEEN / 15 if self.time > 1: print("Time Longer than a Day") print(self.o, self.c1) @@ -105,7 +188,7 @@ def getCoords(self, l1): return Location(l1).coord def get_speed(self, miles): - return SPEED[0] # Adjust speed if needed + return SPEED[0] # Adjust speed if needed # if miles < 30: # # print(50) # return 50 @@ -115,4 +198,3 @@ def get_speed(self, miles): # else: # # print(70) # return 70 - diff --git a/experimental/__init__.py b/experimental/__init__.py index 53a6ef4..65e0948 100644 --- a/experimental/__init__.py +++ b/experimental/__init__.py @@ -1,8 +1,7 @@ from .Assumptions import * -from .constants import * from .Driver import * from .GeneralizedOptimizer import GeneralOptimizer -from .listeners import TimeListener, ProgressListener from .PDWTWOptimizer import PDWTWOptimizer from .Preprocessor import TripPreprocess -from .Trip import Trip, TripType, InvalidTripException \ No newline at end of file +from .Trip import Trip, TripType, InvalidTripException +from .constants import * diff --git a/experimental/constants.py b/experimental/constants.py index e80bc19..28a753f 100644 --- a/experimental/constants.py +++ b/experimental/constants.py @@ -1,4 +1,4 @@ FIFTEEN = 0.01041666666 BUFFER = FIFTEEN * (4 / 3) SPEED = [40] -keys = {} \ No newline at end of file +keys = {} diff --git a/experimental/listeners.py b/experimental/listeners.py index e393df2..76a389d 100644 --- a/experimental/listeners.py +++ b/experimental/listeners.py @@ -5,6 +5,7 @@ class TimeListener(ProgressListener): """ Sample Listener found on IBM DoCPLEX Forums """ + def __init__(self, time): ProgressListener.__init__(self) self._time = time @@ -22,13 +23,15 @@ def notify_progress(self, data): elif data.time > self._time: self.abort() else: - #print('No incumbent yet') + # print('No incumbent yet') pass + class GapListener(ProgressListener): """ Sample Listener found on IBM DoCPLEX Forums """ + def __init__(self, time, gap): ProgressListener.__init__(self) self._time = time @@ -45,5 +48,5 @@ def notify_progress(self, data): print('ABORTING') self.abort() else: - #print('No incumbent yet') - pass \ No newline at end of file + # print('No incumbent yet') + pass diff --git a/experimental/run.py b/experimental/run.py index 8451181..a521e5b 100644 --- a/experimental/run.py +++ b/experimental/run.py @@ -1,33 +1,32 @@ +import argparse + +from experimental.Assumptions import preprocess_assumptions, gen_opt_params from experimental.GeneralizedOptimizer import GeneralOptimizer from experimental.Preprocessor import TripPreprocess as tp -from datetime import datetime -from experimental.Assumptions import preprocess_assumptions, gen_opt_params -from experimental.constants import keys, SPEED - -import argparse +from experimental.constants import keys if __name__ == "__main__": parser = argparse.ArgumentParser(description='Run the Generalized Optimizer with given files') rn = parser.add_argument_group('required named args') parser.add_argument('-s', '--speed', action='store', type=int, dest='s', default=60, - help='Speed in MPH to use for time calculations. Default is 60 MPH') + help='Speed in MPH to use for time calculations. Default is 60 MPH') parser.add_argument('-n', '--name', action='store', type=str, dest='n', default=None, - help='Name of the Model') + help='Name of the Model') rn.add_argument('-r', '--rev', action='store', type=str, dest='r', - help='Path to CSV with Revenue Table', required=True) - rn.add_argument('-t','--trips', action='store', type=str, dest='t', - help='Path to CSV Trips File', required=True) + help='Path to CSV with Revenue Table', required=True) + rn.add_argument('-t', '--trips', action='store', type=str, dest='t', + help='Path to CSV Trips File', required=True) rn.add_argument('-d', '--drivers', action='store', type=str, dest='d', - help='Path to CSV Driver Details File', required=True) + help='Path to CSV Driver Details File', required=True) rn.add_argument('-k', '--key', action='store', type=str, dest='k', - help='Path to File With OpenCage GeoCode API Key', required=True) + help='Path to File With OpenCage GeoCode API Key', required=True) rn.add_argument('-o', '--output', action='store', type=str, dest='o', - help='File To Store Assignment CSV', required=True) + help='File To Store Assignment CSV', required=True) rn.add_argument('-v', '--vis', action='store', type=str, dest='v', - help='File To Store Assignment HTML Visualization', required=True) + help='File To Store Assignment HTML Visualization', required=True) args = parser.parse_args() if not all([args.r, args.k, args.t, args.d, args.o, args.v]): @@ -46,4 +45,4 @@ optimizer = GeneralOptimizer(trips, drivers, gen_opt_params, args.n) outfile = args.o optimizer.solve(outfile) - optimizer.visualize(outfile, args.v, open_after=True) \ No newline at end of file + optimizer.visualize(outfile, args.v, open_after=True) diff --git a/experimental/testing.py b/experimental/testing.py index 22ed8c1..f9e2467 100644 --- a/experimental/testing.py +++ b/experimental/testing.py @@ -1,16 +1,15 @@ -from experimental.GeneralizedOptimizer import GeneralOptimizer -from experimental.PDWTWOptimizer import PDWTWOptimizer -from experimental.Preprocessor import TripPreprocess as tp from datetime import datetime + from experimental.Assumptions import preprocess_assumptions, opt_params +from experimental.GeneralizedOptimizer import GeneralOptimizer +from experimental.Preprocessor import TripPreprocess as tp from experimental.constants import keys keyFile = open('geocode.key') keys['geo_key'] = keyFile.readline().rstrip() - rev_table = tp.load_revenue_table('../Data/rev_table.csv') -trips = tp.prepare_and_load_trips('../Data/in_trips_010220.csv',rev_table, preprocess_assumptions) +trips = tp.prepare_and_load_trips('../Data/in_trips_010220.csv', rev_table, preprocess_assumptions) # trips = tp.load_trips('calc_trips.csv') drivers = tp.load_drivers('../Data/in_drivers.csv') # optimizer = PDWTWOptimizer(trips, drivers, opt_params) @@ -33,4 +32,4 @@ # optimizer2 = GeneralOptimizer(trips2, drivers, opt_params) # outfile = 'output/gen_final_output' + str(datetime.now()) + '.csv' # optimizer2.solve(outfile) -# optimizer2.visualize(outfile, 'vis-022620-genopt.html') \ No newline at end of file +# optimizer2.visualize(outfile, 'vis-022620-genopt.html') diff --git a/legacy/PDWTWImplementation.py b/legacy/PDWTWImplementation.py index c82e5bc..93515d6 100644 --- a/legacy/PDWTWImplementation.py +++ b/legacy/PDWTWImplementation.py @@ -1,11 +1,10 @@ -import pandas as pd -from docplex.mp.progress import ProgressListener -from docplex.mp.utils import DOcplexException +from datetime import datetime +import pandas as pd from Driver import Driver from Trip import Trip, TripType from docplex.mp.model import Model -from datetime import datetime +from docplex.mp.progress import ProgressListener print("Started", datetime.now()) # Read input data @@ -38,12 +37,15 @@ FIFTEEN = 0.01041666666 last_trip = None -P = [] # Pickup locations -D = [] # Dropoff locations +P = [] # Pickup locations +D = [] # Dropoff locations + + class Listener(ProgressListener): """ Sample Listener found on IBM DoCPLEX Forums """ + def __init__(self, time, gap): ProgressListener.__init__(self) self._time = time @@ -63,6 +65,7 @@ def notify_progress(self, data): # print('No incumbent yet') pass + """ The following varialbes have the prefixes signifying which location they relate to: P-Pickup @@ -73,28 +76,26 @@ def notify_progress(self, data): """ Decision Variables """ -PQ = [] # Capacity after node j is visited; Length of N -DQ = [] # Capacity after node j is visited; Length of N - -PB = [] # time that node j is visited; Length of N -DB = [] # time that node j is visited; Length of N - +PQ = [] # Capacity after node j is visited; Length of N +DQ = [] # Capacity after node j is visited; Length of N -Pv = [] # index of first node that is visited in the route; Length of N -Dv = [] # index of first node that is visited in the route; Length of N +PB = [] # time that node j is visited; Length of N +DB = [] # time that node j is visited; Length of N +Pv = [] # index of first node that is visited in the route; Length of N +Dv = [] # index of first node that is visited in the route; Length of N """ Parameters """ -Pe = [] # start window of node j; length of N -De = [] # start window of node j; length of N +Pe = [] # start window of node j; length of N +De = [] # start window of node j; length of N -Pl = [] # end window of node j; length of N -Dl = [] # end window of node j; length of N +Pl = [] # end window of node j; length of N +Dl = [] # end window of node j; length of N -Pq = [] # demand for each location j; length of N -Dq = [] # demand for each location j; length of N +Pq = [] # demand for each location j; length of N +Dq = [] # demand for each location j; length of N CAP = 2 BIGM = 100000 @@ -136,26 +137,30 @@ def notify_progress(self, data): start = last_trip.end + (1 / 24) idxes[o] = count idxes[d] = TRIPS_TO_DO + count - P.append(o) # Add to Pickups - D.append(d) # Add to Dropoffs - Pe.append(start - FIFTEEN/2) # Add to Pickups open window - De.append(start - FIFTEEN/2) # Add to Dropoffs open window - Pl.append(end + FIFTEEN/2) # Add to Pickups close window - Dl.append(end + FIFTEEN/2) # Add to Dropoffs close window - Pq.append(cap) # Add to Pickup capacity - Dq.append(-cap) # Add to dropoff capacity - - PQ.append(mdl.continuous_var(lb=0, name='Q_'+str(count))) #Varaible for capacity at location pickup - DQ.append(mdl.continuous_var(lb=0, name='Q_'+str(TRIPS_TO_DO + count))) #Varaible for capacity at location dropoff - - PB.append(mdl.continuous_var(lb=0, ub=1, name='B_' + str(count))) #Varaible for time at location pickup - DB.append(mdl.continuous_var(lb=0, ub=1, name='B_' + str(TRIPS_TO_DO + count))) #Varaible for time at location dropoff - - Pv.append(mdl.continuous_var(lb=0, name='v_' + str(count))) #Varaible for index of first location on route pickup - Dv.append(mdl.continuous_var(lb=0, name='v_' + str(TRIPS_TO_DO + count))) #Varaible for undex of first location on route dropoff - - location_pair.add((o,d)) - ar[(o,d)] = t + P.append(o) # Add to Pickups + D.append(d) # Add to Dropoffs + Pe.append(start - FIFTEEN / 2) # Add to Pickups open window + De.append(start - FIFTEEN / 2) # Add to Dropoffs open window + Pl.append(end + FIFTEEN / 2) # Add to Pickups close window + Dl.append(end + FIFTEEN / 2) # Add to Dropoffs close window + Pq.append(cap) # Add to Pickup capacity + Dq.append(-cap) # Add to dropoff capacity + + PQ.append(mdl.continuous_var(lb=0, name='Q_' + str(count))) # Varaible for capacity at location pickup + DQ.append( + mdl.continuous_var(lb=0, name='Q_' + str(TRIPS_TO_DO + count))) # Varaible for capacity at location dropoff + + PB.append(mdl.continuous_var(lb=0, ub=1, name='B_' + str(count))) # Varaible for time at location pickup + DB.append(mdl.continuous_var(lb=0, ub=1, + name='B_' + str(TRIPS_TO_DO + count))) # Varaible for time at location dropoff + + Pv.append( + mdl.continuous_var(lb=0, name='v_' + str(count))) # Varaible for index of first location on route pickup + Dv.append(mdl.continuous_var(lb=0, name='v_' + str( + TRIPS_TO_DO + count))) # Varaible for undex of first location on route dropoff + + location_pair.add((o, d)) + ar[(o, d)] = t if o not in outlfow_trips: outlfow_trips[o] = {t} else: @@ -169,7 +174,7 @@ def notify_progress(self, data): if count == TRIPS_TO_DO: break for index, row in driver_df.iterrows(): - if count < DRIVER_IDX: + if count < DRIVER_IDX: count += 1 continue cap = 1 if row['Vehicle_Type'] == 'A' else 1.5 @@ -177,8 +182,8 @@ def notify_progress(self, data): drivers.add(Driver(row['ID'], row['Name'], add, cap, row['Vehicle_Type'])) N.append(add) driverLocations.append(add) - driverstart = add # + "P" - driverstop = add #+ "D" + driverstart = add # + "P" + driverstop = add # + "D" break # Append all of the arrays together to make data structure @@ -197,9 +202,9 @@ def notify_progress(self, data): # print(len(v)) id = 1 -x = [] # binary whether trip ij is taken; length of A -t = [] # time of traversing trip ij; length of A -c = [] # cost of traversing trip ij; length of A +x = [] # binary whether trip ij is taken; length of A +t = [] # time of traversing trip ij; length of A +c = [] # cost of traversing trip ij; length of A for i, o in enumerate(N): for j, d in enumerate(N): if o != d: @@ -236,7 +241,7 @@ def notify_progress(self, data): x.append(mdl.binary_var(name='C:' + o + '->' + d)) else: # Shouldn't happen - print(o,d) + print(o, d) exit(1) tripdex[(o, d)] = len(x) - 1 t.append(trp.lp.time) @@ -268,7 +273,7 @@ def notify_progress(self, data): total += x[tripdex[(otrip.lp.o, otrip.lp.d)]] if i in driverLocations: print("here") - obj += 1000 * total + obj += 1000 * total mdl.add_constraint(total >= NUM_DRIVERS, "Drivers leaving Depot") else: mdl.add_constraint(total == 1, "Primary Location Exited " + i) @@ -280,8 +285,8 @@ def notify_progress(self, data): for i, o in enumerate(PuD): for j, d in enumerate(PuD): if o != d: - mdl.add_constraint(ct= B[j] >= B[i] + t[tripdex[(o,d)]] - BIGM*(1- x[tripdex[(o,d)]])) - mdl.add_constraint(ct= Q[j] >= Q[i] + q[j] - BIGM*(1- x[tripdex[(o,d)]])) + mdl.add_constraint(ct=B[j] >= B[i] + t[tripdex[(o, d)]] - BIGM * (1 - x[tripdex[(o, d)]])) + mdl.add_constraint(ct=Q[j] >= Q[i] + q[j] - BIGM * (1 - x[tripdex[(o, d)]])) """ Time Windows @@ -312,8 +317,8 @@ def notify_progress(self, data): for i, o in enumerate(PuD): for j, d in enumerate(PuD): if o != d: - mdl.add_constraint(v[j] >= v[i] + n * (x[tripdex[(o,d)]] - 1)) - mdl.add_constraint(v[j] <= v[i] + n * (1 - x[tripdex[(o,d)]])) + mdl.add_constraint(v[j] >= v[i] + n * (x[tripdex[(o, d)]] - 1)) + mdl.add_constraint(v[j] <= v[i] + n * (1 - x[tripdex[(o, d)]])) """ Temporary Validation @@ -336,7 +341,7 @@ def notify_progress(self, data): Objective """ # total = 0.0 -for i,yes in enumerate(x): +for i, yes in enumerate(x): obj += c[i] * yes # print('\n'.join(str(c) for c in mdl.iter_constraints())) @@ -369,7 +374,8 @@ def notify_progress(self, data): starters = list(var.solution_value for var in v) for s in starters: for var0, var1, var2, var3 in filter(lambda x: s == x[3].solution_value, zip(PuD, B, Q, v)): - print('"' + var0 + '"' + ';' + str(var1.solution_value) +';' + str(var2.solution_value) + ';'+ str(var3.solution_value)) + print('"' + var0 + '"' + ';' + str(var1.solution_value) + ';' + str(var2.solution_value) + ';' + str( + var3.solution_value)) # print(var1.get_name() + ": "+ str(var1.solution_value)) # print(var2.get_name() + ": "+ str(var2.solution_value)) # print(var3.get_name() + ": "+ str(var3.solution_value)) @@ -377,8 +383,8 @@ def notify_progress(self, data): for i, o in enumerate(N): for j, d in enumerate(N): if o != d: - var = x[tripdex[(o,d)]] - t = ar[(o,d)] + var = x[tripdex[(o, d)]] + t = ar[(o, d)] # print("'" + var.get_name() + "';" + str(var.solution_value) + ';' + str(t.start)+ ';' + str(t.end)+ ';' + str(t.lp.miles)) with open('output/pdwtw_final_output' + str(datetime.now()) + '.csv', 'w') as output: output.write( @@ -391,9 +397,10 @@ def notify_progress(self, data): if t.id not in primaryTID: continue arrival = idxes[o] - dep = arrival + TRIPS_TO_DO + dep = arrival + TRIPS_TO_DO print(arrival, dep) - output.write(str(t.id) + "," + str(round(v[arrival].solution_value)) + ",\"" + str(t.lp.o[:-4]) + "\"," + str(t.start) + "," + str(B[arrival].solution_value) + ",\"" + + output.write(str(t.id) + "," + str(round(v[arrival].solution_value)) + ",\"" + str( + t.lp.o[:-4]) + "\"," + str(t.start) + "," + str(B[arrival].solution_value) + ",\"" + str(t.lp.d[:-4]) + "\"," + str(t.end) + "," + str( B[dep].solution_value) + "," + str(t.los) + "," + str(t.lp.miles) + "," + str(t.lp.time) + "\n") diff --git a/legacy/iteration1.py b/legacy/iteration1.py index b1c402d..cf72563 100644 --- a/legacy/iteration1.py +++ b/legacy/iteration1.py @@ -1,11 +1,8 @@ import pandas as pd from Driver import Driver from Trip import Trip, TripType -import cplex -from docplex.cp.model import CpoModel from docplex.mp.model import Model - # Read input data trip_df = pd.read_csv("Trips.csv") driver_df = pd.read_csv("Drivers.csv") @@ -34,14 +31,14 @@ end = 1.0 id = row['trip_id'] type = TripType.A if 'A' in id else TripType.B - if type == TripType.B and start == 0: # Assuming that a B trip with no required pickup time will require pickup 1 hr after A trip dropoff - start = last_trip.end + (1/24) + if type == TripType.B and start == 0: # Assuming that a B trip with no required pickup time will require pickup 1 hr after A trip dropoff + start = last_trip.end + (1 / 24) cap = 1 if row['trip_los'] == 'A' else 1.5 locations.add(o) locations.add(d) t = Trip(o, d, cap, id, type, start, end) primary_trips.add(t) - location_pair.add((o,d)) + location_pair.add((o, d)) if o not in outlfow_trips: outlfow_trips[o] = {t} else: @@ -70,7 +67,7 @@ # trip_dict[(location, row['Address'])] = t for o in locations: for d in locations: - if o is not d and (o,d) not in location_pair: + if o is not d and (o, d) not in location_pair: t = Trip(o, d, 0, id, TripType.INTER_A, 0.0, 1.0) if o not in outlfow_trips: outlfow_trips[o] = {t} @@ -100,9 +97,9 @@ print("Trip details") for i, trip in enumerate(all_trips): if trip.type == TripType.A or trip.type == TripType.B: - print("Primary trip", i, "FROM",trip.lp.o, "TO" ,trip.lp.d, "pickup by", trip.start, "drop off by", trip.end ) + print("Primary trip", i, "FROM", trip.lp.o, "TO", trip.lp.d, "pickup by", trip.start, "drop off by", trip.end) else: - print("Secondary trip", i, "FROM",trip.lp.o, "TO" ,trip.lp.d, "pickup by", trip.start, "drop off by", trip.end ) + print("Secondary trip", i, "FROM", trip.lp.o, "TO", trip.lp.d, "pickup by", trip.start, "drop off by", trip.end) print("Locataion details") for i, loc in enumerate(locations): @@ -118,15 +115,15 @@ for i, driver in enumerate(drivers): for j, trip in enumerate(all_trips): - x.append(mdl.binary_var(name='y' +'_' + str(i) +'_' + str(j))) + x.append(mdl.binary_var(name='y' + '_' + str(i) + '_' + str(j))) for i, driver in enumerate(drivers): for j, trip in enumerate(all_trips): - x.append(mdl.continuous_var(lb=0, ub=1, name='t' +'_' + str(i) +'_' + str(j))) + x.append(mdl.continuous_var(lb=0, ub=1, name='t' + '_' + str(i) + '_' + str(j))) print("Number of variables: ", mdl.number_of_variables) -#Inflow = outflow for all locations +# Inflow = outflow for all locations for loc in locations: for i, d in enumerate(drivers): total = 0.0 @@ -134,8 +131,8 @@ total += x[i * len(all_trips) + indices[intrip]] for otrip in outlfow_trips[loc]: total -= x[i * len(all_trips) + indices[otrip]] - mdl.add_constraint(ct= total == 0 , ctname='flowinout' + '_' + str(hash(loc))[:5] + '_' + str(i)) -print("Number of constraints after flow in = flow out" , mdl.number_of_constraints) + mdl.add_constraint(ct=total == 0, ctname='flowinout' + '_' + str(hash(loc))[:5] + '_' + str(i)) +print("Number of constraints after flow in = flow out", mdl.number_of_constraints) # Inflow before outflow for all locations except driver home --- can't figure this out ---- for loc in locations: @@ -143,38 +140,40 @@ for intrip in inflow_trips[loc]: total = 0.0 for otrip in outlfow_trips[loc]: - total += (x[INT_VARS_OFFSET + i * len(all_trips) + indices[intrip]] + intrip.lp.time) - x[INT_VARS_OFFSET + i * len(all_trips) + indices[otrip]] - if loc in driverLocations: # leave home before coming back - mdl.add_constraint(ct= total >= 0, ctname='outb4in' + '_' + str(hash(loc))[:5] + '_' + str(i)) + total += (x[INT_VARS_OFFSET + i * len(all_trips) + indices[intrip]] + intrip.lp.time) - x[ + INT_VARS_OFFSET + i * len(all_trips) + indices[otrip]] + if loc in driverLocations: # leave home before coming back + mdl.add_constraint(ct=total >= 0, ctname='outb4in' + '_' + str(hash(loc))[:5] + '_' + str(i)) else: - mdl.add_constraint(ct= total <= 0 , ctname='inb4out' + '_' + str(hash(loc))[:5] + '_' + str(i)) -print("Number of constraints after flow in before flow out" , mdl.number_of_constraints) + mdl.add_constraint(ct=total <= 0, ctname='inb4out' + '_' + str(hash(loc))[:5] + '_' + str(i)) +print("Number of constraints after flow in before flow out", mdl.number_of_constraints) # Only one driver per trip for j, trip in enumerate(all_trips): total = 0 for i, driver in enumerate(drivers): total += x[i * len(all_trips) + j] if j < len(primary_trips): - mdl.add_constraint(ct= total == 1 , ctname='primaryTrip' +'_' + str(j)) + mdl.add_constraint(ct=total == 1, ctname='primaryTrip' + '_' + str(j)) else: - mdl.add_constraint(ct= total <=1 , ctname='secondaryTrip' +'_' + str(j)) -print("Number of constraints after required trips" ,mdl.number_of_constraints) + mdl.add_constraint(ct=total <= 1, ctname='secondaryTrip' + '_' + str(j)) +print("Number of constraints after required trips", mdl.number_of_constraints) -#Trips can't overlap for a driver +# Trips can't overlap for a driver for i, driver in enumerate(drivers): for j, trip in enumerate(all_trips): - for k, trip2 in enumerate(all_trips[j+1:]): + for k, trip2 in enumerate(all_trips[j + 1:]): l = k + j if trip.start + trip.lp.time >= trip2.start - 0.01041666666: - total = ((x[i * len(all_trips) + l] * x[INT_VARS_OFFSET + i * len(all_trips) + l] - x[i * len(all_trips) + j] * x[INT_VARS_OFFSET + i * len(all_trips) + j]) - + (x[i * len(all_trips) + j]* (x[INT_VARS_OFFSET + i * len(all_trips) + j] + trip.lp.time) - - x[i * len(all_trips) + l]* x[INT_VARS_OFFSET + i * len(all_trips) + l]) - trip.lp.time) - mdl.add_constraint(ct= total <= 0, ctname='tripConflict'+'_' + str(i) +'_' + str(j)+'_' + str(l)) + total = ((x[i * len(all_trips) + l] * x[INT_VARS_OFFSET + i * len(all_trips) + l] - x[ + i * len(all_trips) + j] * x[INT_VARS_OFFSET + i * len(all_trips) + j]) + + (x[i * len(all_trips) + j] * (x[INT_VARS_OFFSET + i * len(all_trips) + j] + trip.lp.time) - + x[i * len(all_trips) + l] * x[INT_VARS_OFFSET + i * len(all_trips) + l]) - trip.lp.time) + mdl.add_constraint(ct=total <= 0, ctname='tripConflict' + '_' + str(i) + '_' + str(j) + '_' + str(l)) else: break -print("Number of constraints after overlap constraints" ,mdl.number_of_constraints) +print("Number of constraints after overlap constraints", mdl.number_of_constraints) -#Trips can't overlap for a driver +# Trips can't overlap for a driver # for i, driver in enumerate(drivers): # for j, trip in enumerate(all_trips): # for k, trip2 in enumerate(all_trips): @@ -186,26 +185,26 @@ # Wheelchair constraint for i, driver in enumerate(drivers): for j, trip in enumerate(all_trips): - total = (x[i * len(all_trips) + j]*trip.space - driver.capacity) - mdl.add_constraint(ct=total <= 0, ctname='capacity'+'_' +str(i)+'_' +str(j)) -print("Number of constraints after wheelchair capacity" ,mdl.number_of_constraints) + total = (x[i * len(all_trips) + j] * trip.space - driver.capacity) + mdl.add_constraint(ct=total <= 0, ctname='capacity' + '_' + str(i) + '_' + str(j)) +print("Number of constraints after wheelchair capacity", mdl.number_of_constraints) # Pickup at most 15 mins before for primary trips for i, driver in enumerate(drivers): for j, trip in enumerate(all_trips): if trip in primary_trips: total = ((trip.start - 0.01041666666) - x[INT_VARS_OFFSET + i * len(all_trips) + j]) - mdl.add_constraint(ct= total <= 0,ctname='pickup' +'_' + str(i)+'_' + str(j)) -print("Number of constraints after pickup time constraint" ,mdl.number_of_constraints) + mdl.add_constraint(ct=total <= 0, ctname='pickup' + '_' + str(i) + '_' + str(j)) +print("Number of constraints after pickup time constraint", mdl.number_of_constraints) # Dropoff by the required time for primary trips for i, driver in enumerate(drivers): for j, trip in enumerate(all_trips): if trip in primary_trips: total = ((x[INT_VARS_OFFSET + i * len(all_trips) + j] + trip.lp.time) - trip.end) - mdl.add_constraint(ct= total <= 0,ctname='dropoff'+'_' +str(i)+'_' + str(j)) + mdl.add_constraint(ct=total <= 0, ctname='dropoff' + '_' + str(i) + '_' + str(j)) -print("Number of constraints after dropoff time constraint" ,mdl.number_of_constraints) +print("Number of constraints after dropoff time constraint", mdl.number_of_constraints) total = 0.0 for i, driver in enumerate(drivers): @@ -222,17 +221,20 @@ print("Obj value: " + str(mdl.objective_value)) for var in x: - print(var.get_name() + ": "+ str(var.solution_value)) + print(var.get_name() + ": " + str(var.solution_value)) with open("modeltrips.txt", "w+") as o: o.write("Trip_id, start, end, pickup, dropoff\n") for i, trip in enumerate(all_trips): - o.write(str(trip.id) + "," + str(trip.lp.o) + "," + str(trip.lp.d) + "," + str(trip.start) + "," + str(trip.end) +"\n") + o.write(str(trip.id) + "," + str(trip.lp.o) + "," + str(trip.lp.d) + "," + str(trip.start) + "," + str( + trip.end) + "\n") with open("modelsoln.txt", "w+") as o: o.write("Driver_id, Trip_id, Time\n") for i, driver in enumerate(drivers): for j, trip in enumerate(all_trips): if x[i * len(all_trips) + j].solution_value == 1: - o.write(driver.name + "," + str(trip.id) + "," + str(x[INT_VARS_OFFSET + i * len(all_trips) + j].solution_value) + "\n") - print("Driver ", driver.name, " goes from ", trip.lp.o, " to ", trip.lp.d, " at ", x[INT_VARS_OFFSET + i * len(all_trips) + j].solution_value ) + o.write(driver.name + "," + str(trip.id) + "," + str( + x[INT_VARS_OFFSET + i * len(all_trips) + j].solution_value) + "\n") + print("Driver ", driver.name, " goes from ", trip.lp.o, " to ", trip.lp.d, " at ", + x[INT_VARS_OFFSET + i * len(all_trips) + j].solution_value) diff --git a/legacy/iteration2.py b/legacy/iteration2.py index a7502fb..0843c8b 100644 --- a/legacy/iteration2.py +++ b/legacy/iteration2.py @@ -1,16 +1,17 @@ +from datetime import datetime + import pandas as pd from Driver import Driver from Trip import Trip, TripType from docplex.mp.model import Model -import time -from datetime import datetime + def f(driver): def filt(trip): return not (trip.lp.o in driverLocations and trip.lp.o != driver.address) or ( - trip.lp.d in driverLocations and trip.lp.d != driver.address) - return filt + trip.lp.d in driverLocations and trip.lp.d != driver.address) + return filt print("Started", datetime.now()) @@ -53,13 +54,13 @@ def filt(trip): else: type = TripType.D if type == TripType.D and start == 0: - start = last_trip.end + (1/24) + start = last_trip.end + (1 / 24) cap = 1 if row['trip_los'] == 'A' else 1.5 locations.add(o) locations.add(d) t = Trip(o, d, cap, id, type, start, end) primary_trips.add(t) - location_pair.add((o,d)) + location_pair.add((o, d)) if o not in outlfow_trips: outlfow_trips[o] = {t} else: @@ -82,7 +83,7 @@ def filt(trip): for o in locations: for d in locations: - if o != d and (o,d) not in location_pair: + if o != d and (o, d) not in location_pair: if o in driverLocations and d in driverLocations: continue if o in driverLocations: @@ -149,11 +150,12 @@ def filt(trip): print("Trip details") for i, trip in enumerate(all_trips): if trip.type == TripType.B or trip.type == TripType.D: - print("Primary trip", i, "FROM",trip.lp.o, "TO" ,trip.lp.d, "pickup by", trip.start, "drop off by", trip.end ) + print("Primary trip", i, "FROM", trip.lp.o, "TO", trip.lp.d, "pickup by", trip.start, "drop off by", trip.end) elif trip.type == TripType.INTER_A or trip.type == TripType.INTER_B: - print("Driver Home trip", i, "FROM",trip.lp.o, "TO" ,trip.lp.d, "pickup by", trip.start, "drop off by", trip.end ) + print("Driver Home trip", i, "FROM", trip.lp.o, "TO", trip.lp.d, "pickup by", trip.start, "drop off by", + trip.end) else: - print("Secondary trip", i, "FROM",trip.lp.o, "TO" ,trip.lp.d, "pickup by", trip.start, "drop off by", trip.end ) + print("Secondary trip", i, "FROM", trip.lp.o, "TO", trip.lp.d, "pickup by", trip.start, "drop off by", trip.end) print("Locataion details") for i, loc in enumerate(locations): @@ -170,12 +172,12 @@ def filt(trip): valid_trips = 0 for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): - x.append(mdl.binary_var(name='y' +'_' + str(i) +'_' + str(j))) + x.append(mdl.binary_var(name='y' + '_' + str(i) + '_' + str(j))) valid_trips += 1 for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): - x.append(mdl.continuous_var(lb=0, ub=1, name='t' +'_' + str(i) +'_' + str(j))) + x.append(mdl.continuous_var(lb=0, ub=1, name='t' + '_' + str(i) + '_' + str(j))) indices = {driver: {k: v for v, k in enumerate(filter(f(driver), all_trips))} for driver in drivers} for k, v in indices.items(): @@ -185,11 +187,11 @@ def filt(trip): print(x) valid_trips //= len(drivers) -INT_VARS_OFFSET = len(x)//2 +INT_VARS_OFFSET = len(x) // 2 print("Number of variables: ", mdl.number_of_variables) -#Inflow = outflow for all locations +# Inflow = outflow for all locations for i, d in enumerate(drivers): for loc in locations: total = 0.0 @@ -201,10 +203,11 @@ def filt(trip): if (otrip.lp.o in driverLocations and otrip.lp.o != d.address) or ( otrip.lp.d in driverLocations and otrip.lp.d != d.address): continue total -= x[i * len(all_trips) + indices[d][otrip]] - mdl.add_constraint(ct= total == 0 , ctname='flowinout' + '_' + str(loc)[:5] + '_' + str(i)) -print("Number of constraints after flow in = flow out" , mdl.number_of_constraints) + mdl.add_constraint(ct=total == 0, ctname='flowinout' + '_' + str(loc)[:5] + '_' + str(i)) +print("Number of constraints after flow in = flow out", mdl.number_of_constraints) -type_conflicts = {(TripType.INTER_A, TripType.B), (TripType.B, TripType.C), (TripType.C, TripType.D), (TripType.D, TripType.INTER_B), +type_conflicts = {(TripType.INTER_A, TripType.B), (TripType.B, TripType.C), (TripType.C, TripType.D), + (TripType.D, TripType.INTER_B), (TripType.A, TripType.B), } driver_type_conflicts = {(TripType.INTER_A, TripType.INTER_B)} # Inflow before outflow for all locations except driver home --- can't figure this out ---- @@ -226,7 +229,10 @@ def filt(trip): print("Intrip:", intrip.lp.o, intrip.lp.d, intrip.start, intrip.end, intrip.type) print("Otrip:", otrip.lp.o, otrip.lp.d, otrip.start, otrip.end, otrip.type) # mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][intrip]] == x[i * valid_trips + indices[d][otrip]], then_ct=x[INT_VARS_OFFSET + i * valid_trips + indices[d][intrip]] + intrip.lp.time <= x[INT_VARS_OFFSET + i * valid_trips + indices[d][otrip]]) - mdl.add_constraint(ct=x[INT_VARS_OFFSET + i * valid_trips + indices[d][intrip]] + intrip.lp.time <= x[INT_VARS_OFFSET + i * valid_trips + indices[d][otrip]], ctname='tripord' + '_' + str(i) + '_' + str(intrip.id) + '_' + str(otrip.id)) + mdl.add_constraint( + ct=x[INT_VARS_OFFSET + i * valid_trips + indices[d][intrip]] + intrip.lp.time <= x[ + INT_VARS_OFFSET + i * valid_trips + indices[d][otrip]], + ctname='tripord' + '_' + str(i) + '_' + str(intrip.id) + '_' + str(otrip.id)) # tot = 0.0 # for i2, od in enumerate(drivers): # if od != d: @@ -243,7 +249,7 @@ def filt(trip): ctname='tripord' + '_' + str(i) + '_' + str(otrip.id) + '_' + str(intrip.id)) # mdl.add_constraint(ct=x[i * valid_trips + indices[otrip]] >= x[i * valid_trips + indices[intrip]], ctname='tripordbool' + '_' + str(i) + '_' + str(otrip.id) + '_' + str(intrip.id)) -print("Number of constraints after flow in before flow out" , mdl.number_of_constraints) +print("Number of constraints after flow in before flow out", mdl.number_of_constraints) # Only one driver per trip for j, trip in enumerate(all_trips[:len(primary_trips) + len(secondary_trips)]): if trip.type != TripType.INTER_B and trip.type != TripType.INTER_A: @@ -256,7 +262,7 @@ def filt(trip): mdl.add_constraint(ct=total == 1, ctname='primaryTrip' + '_' + str(j)) # else: # mdl.add_constraint(ct=total <= 1, ctname='secondaryTrip' + '_' + str(j)) -print("Number of constraints after primary/secondary trips" ,mdl.number_of_constraints) +print("Number of constraints after primary/secondary trips", mdl.number_of_constraints) for i, driver in enumerate(drivers): total_o = 0.0 @@ -271,11 +277,9 @@ def filt(trip): mdl.add_constraint(ct=total_o == 1, ctname='driverFromHome' + '_' + str(i)) mdl.add_constraint(ct=total_d == 1, ctname='driverToHome' + '_' + str(i)) -print("Number of constraints after driver home trips" ,mdl.number_of_constraints) - - +print("Number of constraints after driver home trips", mdl.number_of_constraints) -#Trips can't overlap for a driver +# Trips can't overlap for a driver # for i, driver in enumerate(drivers): # for j, trip in enumerate(all_trips): # for k, trip2 in enumerate(all_trips[j+1:]): @@ -289,7 +293,7 @@ def filt(trip): # break # print("Number of constraints after overlap constraints" ,mdl.number_of_constraints) -#Trips can't overlap for a driver +# Trips can't overlap for a driver # for i, driver in enumerate(drivers): # for j, trip in enumerate(all_trips): # for k, trip2 in enumerate(all_trips): @@ -302,31 +306,31 @@ def filt(trip): for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): total = (x[i * valid_trips + j] * trip.space - driver.capacity) - mdl.add_constraint(ct=total <= 0, ctname='capacity'+'_' +str(i)+'_' +str(j)) -print("Number of constraints after wheelchair capacity" ,mdl.number_of_constraints) + mdl.add_constraint(ct=total <= 0, ctname='capacity' + '_' + str(i) + '_' + str(j)) +print("Number of constraints after wheelchair capacity", mdl.number_of_constraints) # Pickup at most 15 mins before for primary trips for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): if trip in primary_trips: total = ((trip.start - 0.01041666666) - x[INT_VARS_OFFSET + i * valid_trips + j]) - mdl.add_constraint(ct= total <= 0,ctname='pickup' +'_' + str(i)+'_' + str(j)) -print("Number of constraints after pickup time constraint" ,mdl.number_of_constraints) + mdl.add_constraint(ct=total <= 0, ctname='pickup' + '_' + str(i) + '_' + str(j)) +print("Number of constraints after pickup time constraint", mdl.number_of_constraints) # Dropoff by the required time for primary trips for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): if trip in primary_trips: total = ((x[INT_VARS_OFFSET + i * valid_trips + j] + trip.lp.time) - trip.end) - mdl.add_constraint(ct= total <= 0,ctname='dropoff'+'_' +str(i)+'_' + str(j)) + mdl.add_constraint(ct=total <= 0, ctname='dropoff' + '_' + str(i) + '_' + str(j)) -print("Number of constraints after dropoff time constraint" ,mdl.number_of_constraints) +print("Number of constraints after dropoff time constraint", mdl.number_of_constraints) total = 0.0 for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): total += trip.lp.time * x[i * valid_trips + j] - for k, trip2 in enumerate(filter(f(driver) ,all_trips[j + 1:])): + for k, trip2 in enumerate(filter(f(driver), all_trips[j + 1:])): l = indices[driver][trip2] if trip.end >= trip2.start - 0.01041666666 and trip.end <= trip2.end: total += 100000 * (x[i * valid_trips + l] * x[i * valid_trips + j]) @@ -345,7 +349,7 @@ def filt(trip): try: for var in x: - print(var.get_name() + ": "+ str(var.solution_value)) + print(var.get_name() + ": " + str(var.solution_value)) except Exception as e: print(e) pass @@ -353,14 +357,17 @@ def filt(trip): with open("modeltrips.txt", "w+") as o: o.write("Trip_id, start, end, pickup, dropoff, time, type, miles\n") for trip in all_trips: - o.write(str(trip.id) + "," + str(trip.lp.o) + "," + str(trip.lp.d) + "," + str(trip.start) + "," + str(trip.end) + "," + str(trip.lp.time) + "," + str(trip.type) + "," + str(trip.lp.miles) + "\n") + o.write(str(trip.id) + "," + str(trip.lp.o) + "," + str(trip.lp.d) + "," + str(trip.start) + "," + str( + trip.end) + "," + str(trip.lp.time) + "," + str(trip.type) + "," + str(trip.lp.miles) + "\n") with open("modelsoln.txt", "w+") as o: o.write("Driver_id, Trip_id, Time, Trip_type\n") for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): if x[i * valid_trips + j].solution_value == 1: - o.write(driver.name + "," + str(trip.id) + "," + str(x[INT_VARS_OFFSET + i * valid_trips + j].solution_value) + "," + str(trip.type) + "\n") - print("Driver ", driver.name, " goes from ", trip.lp.o, " to ", trip.lp.d, " at ", x[INT_VARS_OFFSET + i * valid_trips + j].solution_value) + o.write(driver.name + "," + str(trip.id) + "," + str( + x[INT_VARS_OFFSET + i * valid_trips + j].solution_value) + "," + str(trip.type) + "\n") + print("Driver ", driver.name, " goes from ", trip.lp.o, " to ", trip.lp.d, " at ", + x[INT_VARS_OFFSET + i * valid_trips + j].solution_value) print("Ended", datetime.now()) diff --git a/legacy/iteration3.py b/legacy/iteration3.py index 159eb62..84186f7 100644 --- a/legacy/iteration3.py +++ b/legacy/iteration3.py @@ -1,16 +1,17 @@ +from datetime import datetime + import pandas as pd from Driver import Driver from Trip import Trip, TripType from docplex.mp.model import Model -import time -from datetime import datetime + def f(driver): def filt(trip): return not (trip.lp.o in driverLocations and trip.lp.o != driver.address) or ( - trip.lp.d in driverLocations and trip.lp.d != driver.address) - return filt + trip.lp.d in driverLocations and trip.lp.d != driver.address) + return filt print("Started", datetime.now()) @@ -53,13 +54,13 @@ def filt(trip): else: type = TripType.D if type == TripType.D and start == 0: - start = last_trip.end + (1/24) + start = last_trip.end + (1 / 24) cap = 1 if row['trip_los'] == 'A' else 1.5 locations.add(o) locations.add(d) t = Trip(o, d, cap, id, type, start, end) primary_trips.add(t) - location_pair.add((o,d)) + location_pair.add((o, d)) if o not in outlfow_trips: outlfow_trips[o] = {t} else: @@ -82,7 +83,7 @@ def filt(trip): for o in locations: for d in locations: - if o != d and (o,d) not in location_pair: + if o != d and (o, d) not in location_pair: if o in driverLocations and d in driverLocations: continue if o in driverLocations: @@ -148,11 +149,12 @@ def filt(trip): print("Trip details") for i, trip in enumerate(all_trips): if trip.type == TripType.B or trip.type == TripType.D: - print("Primary trip", i, "FROM",trip.lp.o, "TO" ,trip.lp.d, "pickup by", trip.start, "drop off by", trip.end ) + print("Primary trip", i, "FROM", trip.lp.o, "TO", trip.lp.d, "pickup by", trip.start, "drop off by", trip.end) elif trip.type == TripType.INTER_A or trip.type == TripType.INTER_B: - print("Driver Home trip", i, "FROM",trip.lp.o, "TO" ,trip.lp.d, "pickup by", trip.start, "drop off by", trip.end ) + print("Driver Home trip", i, "FROM", trip.lp.o, "TO", trip.lp.d, "pickup by", trip.start, "drop off by", + trip.end) else: - print("Secondary trip", i, "FROM",trip.lp.o, "TO" ,trip.lp.d, "pickup by", trip.start, "drop off by", trip.end ) + print("Secondary trip", i, "FROM", trip.lp.o, "TO", trip.lp.d, "pickup by", trip.start, "drop off by", trip.end) print("Locataion details") for i, loc in enumerate(locations): @@ -169,12 +171,12 @@ def filt(trip): valid_trips = 0 for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): - x.append(mdl.binary_var(name='y' +'_' + str(i) +'_' + str(j))) + x.append(mdl.binary_var(name='y' + '_' + str(i) + '_' + str(j))) valid_trips += 1 for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): - x.append(mdl.continuous_var(lb=0, ub=1, name='t' +'_' + str(i) +'_' + str(j))) + x.append(mdl.continuous_var(lb=0, ub=1, name='t' + '_' + str(i) + '_' + str(j))) indices = {driver: {k: v for v, k in enumerate(filter(f(driver), all_trips))} for driver in drivers} for k, v in indices.items(): @@ -184,11 +186,11 @@ def filt(trip): print(x) valid_trips //= len(drivers) -INT_VARS_OFFSET = len(x)//2 +INT_VARS_OFFSET = len(x) // 2 print("Number of variables: ", mdl.number_of_variables) -#Inflow = outflow for all locations +# Inflow = outflow for all locations for i, d in enumerate(drivers): for loc in locations: if loc in driverLocations and loc != d.address: @@ -200,15 +202,17 @@ def filt(trip): for otrip in outlfow_trips[loc]: if (otrip.lp.d in driverLocations and otrip.lp.d != d.address): continue total -= x[i * valid_trips + indices[d][otrip]] - mdl.add_constraint(ct= total == 0 , ctname='flowinout' + '_' + str(loc)[:5] + '_' + str(i)) -print("Number of constraints after flow in = flow out" , mdl.number_of_constraints) + mdl.add_constraint(ct=total == 0, ctname='flowinout' + '_' + str(loc)[:5] + '_' + str(i)) +print("Number of constraints after flow in = flow out", mdl.number_of_constraints) -home_type_conflicts = {(TripType.INTER_A, TripType.B), (TripType.INTER_A, TripType.C), (TripType.INTER_A, TripType.INTER_B), +home_type_conflicts = {(TripType.INTER_A, TripType.B), (TripType.INTER_A, TripType.C), + (TripType.INTER_A, TripType.INTER_B), (TripType.A, TripType.B), (TripType.A, TripType.INTER_B), (TripType.D, TripType.C), (TripType.D, TripType.A), (TripType.D, TripType.INTER_B)} -not_homes_type_conflicts = {(TripType.INTER_A, TripType.A), (TripType.INTER_A, TripType.D), (TripType.INTER_A, TripType.INTER_B), - (TripType.C, TripType.D), (TripType.A, TripType.INTER_B), - (TripType.B, TripType.A), (TripType.B, TripType.C), (TripType.D, TripType.INTER_B)} +not_homes_type_conflicts = {(TripType.INTER_A, TripType.A), (TripType.INTER_A, TripType.D), + (TripType.INTER_A, TripType.INTER_B), + (TripType.C, TripType.D), (TripType.A, TripType.INTER_B), + (TripType.B, TripType.A), (TripType.B, TripType.C), (TripType.D, TripType.INTER_B)} driver_type_conflicts = {(TripType.INTER_A, TripType.INTER_B)} # Inflow before outflow for all locations except driver home --- can't figure this out ---- for i, d in enumerate(drivers): @@ -237,11 +241,11 @@ def filt(trip): else: print("Something not sure???????") if otrip.type == TripType.A: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct= Dsum>=1) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=Dsum >= 1) if otrip.type == TripType.B: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct= Asum + IAsum >= 1) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=Asum + IAsum >= 1) if otrip.type == TripType.C: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct= Dsum >= 1) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=Dsum >= 1) if otrip.type == TripType.INTER_B: mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=IAsum + Dsum >= 1) @@ -267,11 +271,11 @@ def filt(trip): else: print("Something not sure???????") if otrip.type == TripType.A: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct= Bsum>=1) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=Bsum >= 1) if otrip.type == TripType.C: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct= Bsum >= 1) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=Bsum >= 1) if otrip.type == TripType.D: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct= IAsum + Csum >= 1) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=IAsum + Csum >= 1) if otrip.type == TripType.INTER_B: mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=IAsum + Bsum >= 1) else: @@ -287,13 +291,16 @@ def filt(trip): # If C == 1, sum of Ds must be >= 1 for all C trips # If I_B == 1, sum of Inter_A and Ds must be >= 1 for all I_B trips - #If A == 1, sum of all Bs must be >= 1 - #If C == 1, sum of all Bs must be >= 1 + # If A == 1, sum of all Bs must be >= 1 + # If C == 1, sum of all Bs must be >= 1 # If D == 1, sum of Inter_As/Cs must be >= 1 # If I_B == 1, sum of Inter_A and Bs must be >= 1 print("Intrip:", intrip.lp.o, intrip.lp.d, intrip.start, intrip.end, intrip.type) print("Otrip:", otrip.lp.o, otrip.lp.d, otrip.start, otrip.end, otrip.type) - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][intrip]] + x[i * valid_trips + indices[d][otrip]] == 2, then_ct=x[INT_VARS_OFFSET + i * valid_trips + indices[d][intrip]] + intrip.lp.time <= x[INT_VARS_OFFSET + i * valid_trips + indices[d][otrip]]) + mdl.add_if_then( + if_ct=x[i * valid_trips + indices[d][intrip]] + x[i * valid_trips + indices[d][otrip]] == 2, + then_ct=x[INT_VARS_OFFSET + i * valid_trips + indices[d][intrip]] + intrip.lp.time <= x[ + INT_VARS_OFFSET + i * valid_trips + indices[d][otrip]]) # mdl.add_constraint(ct=(x[INT_VARS_OFFSET + i * valid_trips + indices[d][intrip]] + intrip.lp.time -x[INT_VARS_OFFSET + i * valid_trips + indices[d][otrip]]) <= 0, ctname='tripord' + '_' + str(i) + '_' + str(intrip.id) + '_' + str(otrip.id)) # tot = 0.0 # for i2, od in enumerate(drivers): @@ -316,8 +323,7 @@ def filt(trip): # mdl.add_constraint(ct=x[i * valid_trips + indices[d][otrip]] >= x[i * valid_trips + indices[d][intrip]], ctname='tripordbool' + '_' + str(i) + '_' + str(otrip.id) + '_' + str(intrip.id)) # mdl.add_constraint(ct=x[i * valid_trips + indices[d][intrip]]* (x[INT_VARS_OFFSET + i * valid_trips + indices[d][otrip]] + otrip.lp.time) <= x[i * valid_trips + indices[d][otrip]] * x[INT_VARS_OFFSET + i * valid_trips + indices[d][intrip]], ctname='tripord' + '_' + str(i) + '_' + str(intrip.id) + '_' + str(otrip.id)) - -print("Number of constraints after flow in before flow out" , mdl.number_of_constraints) +print("Number of constraints after flow in before flow out", mdl.number_of_constraints) # Only one driver per trip for j, trip in enumerate(all_trips[:len(primary_trips) + len(secondary_trips)]): if trip.type != TripType.INTER_B and trip.type != TripType.INTER_A: @@ -330,7 +336,7 @@ def filt(trip): mdl.add_constraint(ct=total == 1, ctname='primaryTrip' + '_' + str(j)) # else: # mdl.add_constraint(ct=total <= 1, ctname='secondaryTrip' + '_' + str(j)) -print("Number of constraints after primary/secondary trips" ,mdl.number_of_constraints) +print("Number of constraints after primary/secondary trips", mdl.number_of_constraints) for i, driver in enumerate(drivers): total_o = 0.0 @@ -345,37 +351,37 @@ def filt(trip): mdl.add_constraint(ct=total_o == 1, ctname='driverFromHome' + '_' + str(i)) mdl.add_constraint(ct=total_d == 1, ctname='driverToHome' + '_' + str(i)) -print("Number of constraints after driver home trips" ,mdl.number_of_constraints) +print("Number of constraints after driver home trips", mdl.number_of_constraints) # Wheelchair constraint for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): total = (x[i * valid_trips + j] * trip.space - driver.capacity) - mdl.add_constraint(ct=total <= 0, ctname='capacity'+'_' +str(i)+'_' +str(j)) -print("Number of constraints after wheelchair capacity" ,mdl.number_of_constraints) + mdl.add_constraint(ct=total <= 0, ctname='capacity' + '_' + str(i) + '_' + str(j)) +print("Number of constraints after wheelchair capacity", mdl.number_of_constraints) # Pickup at most 15 mins before for primary trips for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): if trip in primary_trips: total = ((trip.start - 0.01041666666) - x[INT_VARS_OFFSET + i * valid_trips + j]) - mdl.add_constraint(ct= total <= 0,ctname='pickup' +'_' + str(i)+'_' + str(j)) -print("Number of constraints after pickup time constraint" ,mdl.number_of_constraints) + mdl.add_constraint(ct=total <= 0, ctname='pickup' + '_' + str(i) + '_' + str(j)) +print("Number of constraints after pickup time constraint", mdl.number_of_constraints) # Dropoff by the required time for primary trips for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): if trip in primary_trips: total = ((x[INT_VARS_OFFSET + i * valid_trips + j] + trip.lp.time) - trip.end) - mdl.add_constraint(ct= total <= 0,ctname='dropoff'+'_' +str(i)+'_' + str(j)) + mdl.add_constraint(ct=total <= 0, ctname='dropoff' + '_' + str(i) + '_' + str(j)) -print("Number of constraints after dropoff time constraint" ,mdl.number_of_constraints) +print("Number of constraints after dropoff time constraint", mdl.number_of_constraints) total = 0.0 for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): total += trip.lp.time * x[i * valid_trips + j] - for k, trip2 in enumerate(filter(f(driver) ,all_trips[j + 1:])): + for k, trip2 in enumerate(filter(f(driver), all_trips[j + 1:])): l = indices[driver][trip2] # if trip.end >= trip2.start - 0.01041666666 and trip.end <= trip2.end: # total += 100000 * (x[i * valid_trips + l] * x[i * valid_trips + j]) @@ -394,7 +400,7 @@ def filt(trip): try: for var in x: - print(var.get_name() + ": "+ str(var.solution_value)) + print(var.get_name() + ": " + str(var.solution_value)) except Exception as e: print(e) pass @@ -402,14 +408,17 @@ def filt(trip): with open("modeltrips.txt", "w+") as o: o.write("Trip_id, start, end, pickup, dropoff, time, type, miles\n") for trip in all_trips: - o.write(str(trip.id) + "," + str(trip.lp.o) + "," + str(trip.lp.d) + "," + str(trip.start) + "," + str(trip.end) + "," + str(trip.lp.time) + "," + str(trip.type) + "," + str(trip.lp.miles) + "\n") + o.write(str(trip.id) + "," + str(trip.lp.o) + "," + str(trip.lp.d) + "," + str(trip.start) + "," + str( + trip.end) + "," + str(trip.lp.time) + "," + str(trip.type) + "," + str(trip.lp.miles) + "\n") with open("modelsoln.txt", "w+") as o: o.write("Driver_id, Trip_id, Time, Trip_type\n") for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): if x[i * valid_trips + j].solution_value == 1: - o.write(driver.name + "," + str(trip.id) + "," + str(x[INT_VARS_OFFSET + i * valid_trips + j].solution_value) + "," + str(trip.type) + "\n") - print("Driver ", driver.name, " goes from ", trip.lp.o, " to ", trip.lp.d, " at ", x[INT_VARS_OFFSET + i * valid_trips + j].solution_value) + o.write(driver.name + "," + str(trip.id) + "," + str( + x[INT_VARS_OFFSET + i * valid_trips + j].solution_value) + "," + str(trip.type) + "\n") + print("Driver ", driver.name, " goes from ", trip.lp.o, " to ", trip.lp.d, " at ", + x[INT_VARS_OFFSET + i * valid_trips + j].solution_value) print("Ended", datetime.now()) diff --git a/legacy/iteration3final.py b/legacy/iteration3final.py index 5904dfc..dd1263a 100644 --- a/legacy/iteration3final.py +++ b/legacy/iteration3final.py @@ -1,15 +1,17 @@ +from datetime import datetime + import pandas as pd from Driver import Driver from Trip import Trip, TripType from docplex.mp.model import Model -from datetime import datetime + def f(driver): def filt(trip): return not (trip.lp.o in driverLocations and trip.lp.o != driver.address) or ( - trip.lp.d in driverLocations and trip.lp.d != driver.address) - return filt + trip.lp.d in driverLocations and trip.lp.d != driver.address) + return filt print("Started", datetime.now()) @@ -37,9 +39,9 @@ def filt(trip): NUM_DRIVERS = 2 FIFTEEN = 0.01041666666 THIRTY = FIFTEEN * 2 -TWENTY = FIFTEEN * (4/3) -TEN = FIFTEEN * (2/3) -FIVE = FIFTEEN * (1/3) +TWENTY = FIFTEEN * (4 / 3) +TEN = FIFTEEN * (2 / 3) +FIVE = FIFTEEN * (1 / 3) last_trip = None count = 0 @@ -59,13 +61,13 @@ def filt(trip): else: type = TripType.D if type == TripType.D and start == 0: - start = last_trip.end + (1/24) + start = last_trip.end + (1 / 24) cap = 1 if row['trip_los'] == 'A' else 1.5 locations.add(o) locations.add(d) t = Trip(o, d, cap, id, type, start, end) primary_trips.add(t) - location_pair.add((o,d)) + location_pair.add((o, d)) if o not in outlfow_trips: outlfow_trips[o] = {t} else: @@ -90,10 +92,9 @@ def filt(trip): if count == NUM_DRIVERS: break - for o in locations: for d in locations: - if o != d and (o,d) not in location_pair: + if o != d and (o, d) not in location_pair: if o in driverLocations and d in driverLocations: continue if o in driverLocations: @@ -159,11 +160,12 @@ def filt(trip): print("Trip details") for i, trip in enumerate(all_trips): if trip.type == TripType.B or trip.type == TripType.D: - print("Primary trip", i, "FROM",trip.lp.o, "TO" ,trip.lp.d, "pickup by", trip.start, "drop off by", trip.end ) + print("Primary trip", i, "FROM", trip.lp.o, "TO", trip.lp.d, "pickup by", trip.start, "drop off by", trip.end) elif trip.type == TripType.INTER_A or trip.type == TripType.INTER_B: - print("Driver Home trip", i, "FROM",trip.lp.o, "TO" ,trip.lp.d, "pickup by", trip.start, "drop off by", trip.end ) + print("Driver Home trip", i, "FROM", trip.lp.o, "TO", trip.lp.d, "pickup by", trip.start, "drop off by", + trip.end) else: - print("Secondary trip", i, "FROM",trip.lp.o, "TO" ,trip.lp.d, "pickup by", trip.start, "drop off by", trip.end ) + print("Secondary trip", i, "FROM", trip.lp.o, "TO", trip.lp.d, "pickup by", trip.start, "drop off by", trip.end) print("Locataion details") for i, loc in enumerate(locations): @@ -180,12 +182,12 @@ def filt(trip): valid_trips = 0 for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): - x.append(mdl.binary_var(name='y' +'_' + str(i) +'_' + str(j))) + x.append(mdl.binary_var(name='y' + '_' + str(i) + '_' + str(j))) valid_trips += 1 for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): - x.append(mdl.continuous_var(lb=0, ub=1, name='t' +'_' + str(i) +'_' + str(j))) + x.append(mdl.continuous_var(lb=0, ub=1, name='t' + '_' + str(i) + '_' + str(j))) indices = {driver: {k: v for v, k in enumerate(filter(f(driver), all_trips))} for driver in drivers} for k, v in indices.items(): @@ -195,11 +197,11 @@ def filt(trip): print(x) valid_trips //= len(drivers) -INT_VARS_OFFSET = len(x)//2 +INT_VARS_OFFSET = len(x) // 2 print("Number of variables: ", mdl.number_of_variables) -#Inflow = outflow for all locations +# Inflow = outflow for all locations for i, d in enumerate(drivers): for loc in locations: if loc in driverLocations and loc != d.address: @@ -211,15 +213,16 @@ def filt(trip): for otrip in outlfow_trips[loc]: if (otrip.lp.d in driverLocations and otrip.lp.d != d.address): continue total -= x[i * valid_trips + indices[d][otrip]] - mdl.add_constraint(ct= total == 0 , ctname='flowinout' + '_' + str(loc)[:5] + '_' + str(i)) -print("Number of constraints after flow in = flow out" , mdl.number_of_constraints) + mdl.add_constraint(ct=total == 0, ctname='flowinout' + '_' + str(loc)[:5] + '_' + str(i)) +print("Number of constraints after flow in = flow out", mdl.number_of_constraints) home_type_conflicts = {(TripType.INTER_A, TripType.B), (TripType.INTER_A, TripType.INTER_B), (TripType.A, TripType.B), (TripType.D, TripType.C), (TripType.D, TripType.A), (TripType.D, TripType.INTER_B)} -not_homes_type_conflicts = {(TripType.INTER_A, TripType.A), (TripType.INTER_A, TripType.D), (TripType.INTER_A, TripType.INTER_B), - (TripType.C, TripType.D), - (TripType.B, TripType.A), (TripType.B, TripType.C), (TripType.B, TripType.INTER_B)} +not_homes_type_conflicts = {(TripType.INTER_A, TripType.A), (TripType.INTER_A, TripType.D), + (TripType.INTER_A, TripType.INTER_B), + (TripType.C, TripType.D), + (TripType.B, TripType.A), (TripType.B, TripType.C), (TripType.B, TripType.INTER_B)} driver_type_conflicts = {(TripType.INTER_A, TripType.INTER_B)} # Inflow before outflow for all locations except driver home --- can't figure this out ---- for i, d in enumerate(drivers): @@ -248,11 +251,11 @@ def filt(trip): else: print("Something not sure???????") if otrip.type == TripType.A: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct= Dsum>=1) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=Dsum >= 1) if otrip.type == TripType.B: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct= Asum + IAsum >= 1) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=Asum + IAsum >= 1) if otrip.type == TripType.C: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct= Dsum >= 1) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=Dsum >= 1) if otrip.type == TripType.INTER_B: mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=IAsum + Dsum >= 1) @@ -278,11 +281,11 @@ def filt(trip): else: print("Something not sure???????") if otrip.type == TripType.A: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct= Bsum>=1) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=Bsum >= 1) if otrip.type == TripType.C: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct= Bsum >= 1) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=Bsum >= 1) if otrip.type == TripType.D: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct= IAsum + Csum +Bsum >= 1) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=IAsum + Csum + Bsum >= 1) if otrip.type == TripType.INTER_B: mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][otrip]] == 1, then_ct=IAsum + Bsum >= 1) else: @@ -299,17 +302,23 @@ def filt(trip): mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][intrip]] + x[ i * valid_trips + indices[d][otrip]] == 2, then_ct=x[INT_VARS_OFFSET + i * valid_trips + indices[d][ - intrip]] + intrip.lp.time + 0 <= x[ + intrip]] + intrip.lp.time + 0 <= + x[ INT_VARS_OFFSET + i * valid_trips + indices[d][otrip]]) else: - mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][intrip]] + x[i * valid_trips + indices[d][otrip]] == 2, then_ct=x[INT_VARS_OFFSET + i * valid_trips + indices[d][intrip]] + intrip.lp.time <= x[INT_VARS_OFFSET + i * valid_trips + indices[d][otrip]]) + mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][intrip]] + x[ + i * valid_trips + indices[d][otrip]] == 2, then_ct=x[INT_VARS_OFFSET + i * valid_trips + + indices[d][ + intrip]] + intrip.lp.time <= x[ + INT_VARS_OFFSET + i * valid_trips + + indices[d][otrip]]) # mdl.add_constraint(ct=(x[INT_VARS_OFFSET + i * valid_trips + indices[d][intrip]] + intrip.lp.time -x[INT_VARS_OFFSET + i * valid_trips + indices[d][otrip]]) <= 0, ctname='tripord' + '_' + str(i) + '_' + str(intrip.id) + '_' + str(otrip.id)) # mdl.add_constraint(ct=x[i * valid_trips + indices[d][intrip]] >= x[i * valid_trips + indices[d][otrip]], ctname='tripordbool' + '_' + str(i) + '_' + str(intrip.id) + '_' + str(otrip.id)) # mdl.add_constraint(ct=x[i * valid_trips + indices[d][otrip]]* (x[INT_VARS_OFFSET + i * valid_trips + indices[d][intrip]] + intrip.lp.time) <= x[i * valid_trips + indices[d][intrip]] * x[INT_VARS_OFFSET + i * valid_trips + indices[d][otrip]], ctname='tripord' + '_' + str(i) + '_' + str(intrip.id) + '_' + str(otrip.id)) # mdl.add_if_then(if_ct=x[i * valid_trips + indices[d][intrip]] >= x[i * valid_trips + indices[d][otrip]], then_ct=x[INT_VARS_OFFSET + i * valid_trips + indices[d][intrip]] + intrip.lp.time <= x[INT_VARS_OFFSET + i * valid_trips + indices[d][otrip]]) -print("Number of constraints after flow in before flow out" , mdl.number_of_constraints) +print("Number of constraints after flow in before flow out", mdl.number_of_constraints) # Only one driver per trip for j, trip in enumerate(all_trips[:len(primary_trips) + len(secondary_trips)]): @@ -321,7 +330,7 @@ def filt(trip): total += x[i * valid_trips + j] if j < len(primary_trips): mdl.add_constraint(ct=total == 1, ctname='primaryTrip' + '_' + str(j)) -print("Number of constraints after primary/secondary trips" ,mdl.number_of_constraints) +print("Number of constraints after primary/secondary trips", mdl.number_of_constraints) for i, driver in enumerate(drivers): total_o = 0.0 @@ -336,31 +345,31 @@ def filt(trip): mdl.add_constraint(ct=total_o == 1, ctname='driverFromHome' + '_' + str(i)) mdl.add_constraint(ct=total_d == 1, ctname='driverToHome' + '_' + str(i)) -print("Number of constraints after driver home trips" ,mdl.number_of_constraints) +print("Number of constraints after driver home trips", mdl.number_of_constraints) # Wheelchair constraint for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): total = (x[i * valid_trips + j] * trip.space - driver.capacity) - mdl.add_constraint(ct=total <= 0, ctname='capacity'+'_' +str(i)+'_' +str(j)) -print("Number of constraints after wheelchair capacity" ,mdl.number_of_constraints) + mdl.add_constraint(ct=total <= 0, ctname='capacity' + '_' + str(i) + '_' + str(j)) +print("Number of constraints after wheelchair capacity", mdl.number_of_constraints) # Pickup at most 15 mins before for primary trips for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): if trip in primary_trips: total = ((trip.start - FIFTEEN) - x[INT_VARS_OFFSET + i * valid_trips + j]) - mdl.add_constraint(ct= total <= 0,ctname='pickup' +'_' + str(i)+'_' + str(j)) -print("Number of constraints after pickup time constraint" ,mdl.number_of_constraints) + mdl.add_constraint(ct=total <= 0, ctname='pickup' + '_' + str(i) + '_' + str(j)) +print("Number of constraints after pickup time constraint", mdl.number_of_constraints) # Dropoff by the required time for primary trips for i, driver in enumerate(drivers): for j, trip in enumerate(filter(f(driver), all_trips)): if trip in primary_trips: total = ((x[INT_VARS_OFFSET + i * valid_trips + j] + trip.lp.time) - trip.end) - mdl.add_constraint(ct= total <= 0,ctname='dropoff'+'_' +str(i)+'_' + str(j)) + mdl.add_constraint(ct=total <= 0, ctname='dropoff' + '_' + str(i) + '_' + str(j)) -print("Number of constraints after dropoff time constraint" ,mdl.number_of_constraints) +print("Number of constraints after dropoff time constraint", mdl.number_of_constraints) total = 0.0 for i, driver in enumerate(drivers): @@ -381,7 +390,7 @@ def filt(trip): try: for var in x: - print(var.get_name() + ": "+ str(var.solution_value)) + print(var.get_name() + ": " + str(var.solution_value)) except Exception as e: print(e) pass @@ -389,7 +398,8 @@ def filt(trip): with open("modeltrips.txt", "w+") as o: o.write("Trip_id, start, end, pickup, dropoff, time, type, miles\n") for trip in all_trips: - o.write(str(trip.id) + ',"' + str(trip.lp.o) + '","' + str(trip.lp.d) + '",' + str(trip.start) + "," + str(trip.end) + "," + str(trip.lp.time) + "," + str(trip.type) + "," + str(trip.lp.miles) + "\n") + o.write(str(trip.id) + ',"' + str(trip.lp.o) + '","' + str(trip.lp.d) + '",' + str(trip.start) + "," + str( + trip.end) + "," + str(trip.lp.time) + "," + str(trip.type) + "," + str(trip.lp.miles) + "\n") totalMiles = 0 count = 0 @@ -400,8 +410,11 @@ def filt(trip): if x[i * valid_trips + j].solution_value == 1: count += 1 totalMiles += trip.lp.miles - o.write(str(driver.id) + "," + str(trip.id) + "," + str(x[INT_VARS_OFFSET + i * valid_trips + j].solution_value) + "," + str(trip.lp.miles) + "," + str(trip.lp.time) + "," + str(trip.type) + ',"' + str(trip.lp.o) + '","' + str(trip.lp.d) + '"\n') - print("Driver ", driver.id, " goes from ", trip.lp.o, " to ", trip.lp.d, " at ", x[INT_VARS_OFFSET + i * valid_trips + j].solution_value) + o.write(str(driver.id) + "," + str(trip.id) + "," + str( + x[INT_VARS_OFFSET + i * valid_trips + j].solution_value) + "," + str(trip.lp.miles) + "," + str( + trip.lp.time) + "," + str(trip.type) + ',"' + str(trip.lp.o) + '","' + str(trip.lp.d) + '"\n') + print("Driver ", driver.id, " goes from ", trip.lp.o, " to ", trip.lp.d, " at ", + x[INT_VARS_OFFSET + i * valid_trips + j].solution_value) print("Number of trips", count) print("Total miles traveled", totalMiles) print("Ended", datetime.now()) diff --git a/legacy/iteration4.py b/legacy/iteration4.py index 193e8bb..a0bf41f 100644 --- a/legacy/iteration4.py +++ b/legacy/iteration4.py @@ -1,25 +1,25 @@ -import pandas as pd -from docloud.status import JobSolveStatus -from docplex.mp.basic import Priority -from docplex.mp.utils import DOcplexException -from constants import * +from datetime import datetime +import pandas as pd from Driver import Driver from Trip import Trip, TripType, InvalidTripException +from constants import * +from docloud.status import JobSolveStatus from docplex.mp.model import Model -from docplex.mp.progress import ProgressListener - -from datetime import datetime - +from docplex.mp.utils import DOcplexException from listeners import TimeListener, GapListener -NUM_TRIPS = 55 # float('inf') -NUM_DRIVERS = float('inf') -invalid_trips = {(-1, -1.5), (-1.5,-1), (1, 1.5), (1, -1.5)} +NUM_TRIPS = 55 # float('inf') +NUM_DRIVERS = float('inf') +invalid_trips = {(-1, -1.5), (-1.5, -1), (1, 1.5), (1, -1.5)} + def filtered(d, iter): - return filter(lambda t: not ((t.lp.o in driverNodes and t.lp.o[3:] != d.address) or (t.lp.d in driverNodes and t.lp.d[3:] != d.address)) and t.los in d.los - and not ( abs(nodeCaps[t.lp.o] + nodeCaps[t.lp.d]) > d.capacity ) , iter) + return filter(lambda t: not ((t.lp.o in driverNodes and t.lp.o[3:] != d.address) or ( + t.lp.d in driverNodes and t.lp.d[3:] != d.address)) and t.los in d.los + and not (abs(nodeCaps[t.lp.o] + nodeCaps[t.lp.d]) > d.capacity), iter) + + # ((abs(nodeCaps[t.lp.o]) <= d.capacity + nodeCaps[t.lp.d]) and (d.capacity >= nodeCaps[t.lp.o] + nodeCaps[t.lp.d])) @@ -32,31 +32,30 @@ def filtered(d, iter): mdl = Model(name="Patient Transport") # Input Data Structures -drivers = list() # List of all Drivers +drivers = list() # List of all Drivers primary_trips = set() -all_trips = dict() # Maps Trip-ID to Trip Object -driverNodes = set() # All Driver Nodes -driverStart = set() # Starting Nodes of Driver -driverEnd = set() # Ending Nodes of Driver - -requestNodes = set() # Nodes of request trips -requestStart = set() # Starting nodes of request trips -requestEnd = set() # Ending nodes of request trips -requestPair = dict() # Map from request start to request end -nodeCaps = dict() # Map from node to capacity delta -nodeDeps = dict() # Earliest departure time from a node -nodeArrs = dict() # earliest arrival time to a node -primaryTID = dict() # Map from starting location to ID of primary trip from that location +all_trips = dict() # Maps Trip-ID to Trip Object +driverNodes = set() # All Driver Nodes +driverStart = set() # Starting Nodes of Driver +driverEnd = set() # Ending Nodes of Driver + +requestNodes = set() # Nodes of request trips +requestStart = set() # Starting nodes of request trips +requestEnd = set() # Ending nodes of request trips +requestPair = dict() # Map from request start to request end +nodeCaps = dict() # Map from node to capacity delta +nodeDeps = dict() # Earliest departure time from a node +nodeArrs = dict() # earliest arrival time to a node +primaryTID = dict() # Map from starting location to ID of primary trip from that location # Decision Variable Structures -trips = dict() # Map from driver to map of trip to model variable -times = dict() # Map from driver to map of trip to model variable -caps = dict() # Map from driver to map of trip to model variable +trips = dict() # Map from driver to map of trip to model variable +times = dict() # Map from driver to map of trip to model variable +caps = dict() # Map from driver to map of trip to model variable # Additional Structures -intrips = dict() # Map from driver to Map from location to list of trips -outtrips = dict() # Map from driver to Map from location to list of trips - +intrips = dict() # Map from driver to Map from location to list of trips +outtrips = dict() # Map from driver to Map from location to list of trips # Preprocess input data @@ -114,10 +113,10 @@ def filtered(d, iter): nodeCaps[start] = cap nodeCaps[end] = -cap t = Trip(start, end, cap, id, type, pick, drop, prefix=True, prefixLen=4) - nodeArrs[start] = drop - t.lp.time # 0 + nodeArrs[start] = drop - t.lp.time # 0 nodeDeps[start] = pick # max(0, pick - BUFFER) nodeArrs[end] = drop - nodeDeps[end] = pick + t.lp.time # 0 # max(0, pick - BUFFER) + t.lp.time + nodeDeps[end] = pick + t.lp.time # 0 # max(0, pick - BUFFER) + t.lp.time # nodeArrs[start] = 1 # nodeDeps[start] = 0 # nodeArrs[end] = 1 @@ -138,7 +137,7 @@ def filtered(d, iter): break if len(requestNodes) != 2 * count: - print("Not enough nodes", len(requestNodes), count ) + print("Not enough nodes", len(requestNodes), count) exit(1) print("Number of Trips:", count) @@ -227,13 +226,13 @@ def filtered(d, iter): for t in filtered(d, all_trips.values()): # if (t.lp.o in driverNodes and t.lp.o[2:] != d.address) or (t.lp.d in driverNodes and t.lp.d[2:] != d.address): # continue - trips[d][t] = mdl.binary_var(name='y' +'_' + str(d.id) +'_' + str(t.id)) + trips[d][t] = mdl.binary_var(name='y' + '_' + str(d.id) + '_' + str(t.id)) # if d.los == 'A' and t.los != 'A': # mdl.add_constraint(ct=trips[d][t] == 0) - times[d][t] = mdl.continuous_var(lb=0, ub=1, name='t' +'_' + str(d.id) +'_' + str(t.id)) + times[d][t] = mdl.continuous_var(lb=0, ub=1, name='t' + '_' + str(d.id) + '_' + str(t.id)) mdl.add_constraint(times[d][t] - trips[d][t] <= 0) # mdl.add_equivalence(trips[d][t], times[d][t] > 0) - caps[d][t] = mdl.continuous_var(lb=0, ub=d.capacity, name='q' +'_' + str(d.id) +'_' + str(t.id)) + caps[d][t] = mdl.continuous_var(lb=0, ub=d.capacity, name='q' + '_' + str(d.id) + '_' + str(t.id)) mdl.add_constraint(caps[d][t] - trips[d][t] * d.capacity <= 0) # print(outtrips) @@ -294,9 +293,9 @@ def filtered(d, iter): totalin += trips[d][intrip] for otrip in filtered(d, outtrips[rN]): totalout -= trips[d][otrip] - mdl.add_constraint(ct= totalin <= 1, ctname='flowin' + '_' + str(rN)[:5]) - mdl.add_constraint(ct= totalout >= -1, ctname='flowout' + '_' + str(rN)[:5]) - mdl.add_constraint(ct= totalin + totalout == 0, ctname='flowinout' + '_' + str(rN)[:5]) + mdl.add_constraint(ct=totalin <= 1, ctname='flowin' + '_' + str(rN)[:5]) + mdl.add_constraint(ct=totalout >= -1, ctname='flowout' + '_' + str(rN)[:5]) + mdl.add_constraint(ct=totalin + totalout == 0, ctname='flowinout' + '_' + str(rN)[:5]) for d in drivers: for dS in driverStart: if dS[3:] != d.address: @@ -304,7 +303,7 @@ def filtered(d, iter): total = 0 for otrip in filtered(d, outtrips[dS]): total -= trips[d][otrip] - mdl.add_constraint(ct= total == -1, ctname='driverout' + '_' + str(d.id)) + mdl.add_constraint(ct=total == -1, ctname='driverout' + '_' + str(d.id)) for d in drivers: for dE in driverEnd: if dE[3:] != d.address: @@ -312,7 +311,7 @@ def filtered(d, iter): total = 0 for intrip in filtered(d, intrips[dE]): total += trips[d][intrip] - mdl.add_constraint(ct= total == 1, ctname='driverin' + '_' + str(d.id)) + mdl.add_constraint(ct=total == 1, ctname='driverin' + '_' + str(d.id)) print("Set flow conservation constraints") @@ -335,7 +334,7 @@ def filtered(d, iter): intripTimes += intrip.lp.time * trips[d][intrip] # intripEnds += intrip.end * trips[d][intrip] mdl.add_constraint(intripSum + intripTimes <= intripEnds) - # obj += dropOffPenalty * ((intripSum + intripTimes) - intripEnds) + # obj += dropOffPenalty * ((intripSum + intripTimes) - intripEnds) print("Set arrival time constriants") # for d in drivers: @@ -357,7 +356,7 @@ def filtered(d, iter): # obj += pickupEarlyPenalty * (otripStarts - (otripSum + BUFFER)) # obj += pickupLatePenalty * (otripStarts - (otripSum - BUFFER)) mdl.add_constraint(otripSum + BUFFER >= otripStarts) - mdl.add_constraint(otripSum <= otripStarts + BUFFER) + mdl.add_constraint(otripSum <= otripStarts + BUFFER) print("Set departure time constraints") """ @@ -379,8 +378,8 @@ def filtered(d, iter): for d2 in drivers: for otrip in filtered(d2, outtrips[alt_trip_loc]): osum += times[d2][otrip] - # print(d.id, d2.id, repr(intrip), repr(otrip)) - # mdl.add_indicator(trips[d][intrip], times[d][intrip] + intrip.lp.time <= times[d2][otrip]) + # print(d.id, d2.id, repr(intrip), repr(otrip)) + # mdl.add_indicator(trips[d][intrip], times[d][intrip] + intrip.lp.time <= times[d2][otrip]) mdl.add_constraint(isum + itimeSum <= osum) print("Set primary trip precedence constraints") @@ -403,7 +402,7 @@ def filtered(d, iter): total += d.id * trips[d][intrip] for otrip in filtered(d, outtrips[loc]): total -= d.id * trips[d][otrip] - mdl.add_constraint(ct= total == 0) + mdl.add_constraint(ct=total == 0) for rS in requestStart: rE = requestPair[rS] @@ -413,7 +412,7 @@ def filtered(d, iter): total += d.id * trips[d][intrip] for otrip in filtered(d, outtrips[rS]): total -= d.id * trips[d][otrip] - mdl.add_constraint(ct= total == 0) + mdl.add_constraint(ct=total == 0) print("Set incoming driver is the same as outgoing driver constraints") """ @@ -460,7 +459,7 @@ def filtered(d, iter): print("Warm starting from single rider constraint solution") mdl.add_mip_start(first_solve) mdl.remove_progress_listener(pL) - pL = GapListener(3600*6, 0.01) + pL = GapListener(3600 * 6, 0.01) mdl.add_progress_listener(pL) mdl.solve() print("Final solve status: " + str(mdl.get_solve_status())) @@ -478,8 +477,10 @@ def filtered(d, iter): totalMiles += t.lp.miles print(str(d.id) + " : " + str(t) + " at time ", str(times[d][t].solution_value), "holding ", str(caps[d][t].solution_value), " out of total capacity ", d.capacity) - elif var.solution_value == 0 and (abs(times[d][t].solution_value) >= 0.1 or abs(caps[d][t].solution_value) >= 0.1): - print("Something Wrong, non ok trips with time/or cap not equal to 0", times[d][t].solution_value, caps[d][t].solution_value) + elif var.solution_value == 0 and ( + abs(times[d][t].solution_value) >= 0.1 or abs(caps[d][t].solution_value) >= 0.1): + print("Something Wrong, non ok trips with time/or cap not equal to 0", times[d][t].solution_value, + caps[d][t].solution_value) # for driver_trips in times.values(): # for t, var in driver_trips.items(): # print(var.get_name() + ": " + str(var.solution_value)) @@ -529,5 +530,3 @@ def tripGen(): print("Total Number of primary trip miles by each driver: ") print(driverMiles) print("Ended", datetime.now()) - -