From f7440b935add619838b5e0124bda4d1cf71f7673 Mon Sep 17 00:00:00 2001 From: Abel Soares Siqueira Date: Wed, 4 Dec 2024 10:14:12 +0100 Subject: [PATCH] Create constraints with new TulipaConstraint object (#928) * Create constraints with new TulipaConstraint object Update highest resolution table computation to use union tables and move highest_... out of dataframes Create TulipaConstraint structure. Instead of incoming and outgoing expressions, create a dictionary of expressions inside TulipaConstraint. This is necessary at the moment because some additional expressions were being created in addition to incoming_flow and outgoing_flow. Create TulipaConstraint objects for all other constraints, including the ones that are copies of variables. Remove the dataframes variable completely. Clean up add_..._constraints signatures and related places * Remove file time-resolution * Rename add_expressions_to_dataframe * Better document the lowest resolution computation --- src/TulipaEnergyModel.jl | 1 - src/constraints/capacity.jl | 103 +++--- src/constraints/consumer.jl | 18 +- src/constraints/conversion.jl | 17 +- src/constraints/create.jl | 174 +++++++++ src/constraints/energy.jl | 18 +- src/constraints/hub.jl | 14 +- .../ramping-and-unit-commitment.jl | 56 +-- src/constraints/storage.jl | 24 +- src/constraints/transport.jl | 13 +- src/create-model.jl | 96 +---- src/io.jl | 10 +- src/model-preparation.jl | 343 ++++-------------- src/objective.jl | 10 +- src/solve-model.jl | 11 +- src/structures.jl | 34 +- src/time-resolution.jl | 261 ------------- src/tmp.jl | 265 ++++++-------- src/variables/create.jl | 8 +- test/test-pipeline.jl | 13 +- test/test-time-resolution.jl | 43 --- 21 files changed, 538 insertions(+), 994 deletions(-) create mode 100644 src/constraints/create.jl delete mode 100644 src/time-resolution.jl delete mode 100644 test/test-time-resolution.jl diff --git a/src/TulipaEnergyModel.jl b/src/TulipaEnergyModel.jl index 28ada1ae..ee66e418 100644 --- a/src/TulipaEnergyModel.jl +++ b/src/TulipaEnergyModel.jl @@ -28,7 +28,6 @@ const to = TimerOutput() # Definitions and auxiliary files include("utils.jl") -include("time-resolution.jl") include("run-scenario.jl") include("model-parameters.jl") include("structures.jl") diff --git a/src/constraints/capacity.jl b/src/constraints/capacity.jl index a249b0e5..dc76fe0c 100644 --- a/src/constraints/capacity.jl +++ b/src/constraints/capacity.jl @@ -6,19 +6,7 @@ add_capacity_constraints!(model, graph,...) Adds the capacity constraints for all asset types to the model """ -function add_capacity_constraints!( - model, - graph, - dataframes, - sets, - variables, - accumulated_initial_units, - accumulated_investment_units_using_simple_method, - accumulated_units, - accumulated_units_compact_method, - outgoing_flow_highest_out_resolution, - incoming_flow_highest_in_resolution, -) +function add_capacity_constraints!(model, variables, constraints, graph, sets) ## unpack from sets Acv = sets[:Acv] Ai = sets[:Ai] @@ -34,10 +22,21 @@ function add_capacity_constraints!( sets[:decommissionable_assets_using_compact_method] decommissionable_assets_using_simple_method = sets[:decommissionable_assets_using_simple_method] + ## unpack from model + accumulated_initial_units = model[:accumulated_initial_units] + accumulated_investment_units_using_simple_method = + model[:accumulated_investment_units_using_simple_method] + accumulated_units = model[:accumulated_units] + accumulated_units_compact_method = model[:accumulated_units_compact_method] + ## unpack from variables flows_indices = variables[:flow].indices flow = variables[:flow].container + ## unpack from constraints + incoming_flow_highest_in_resolution = constraints[:highest_in].expressions[:incoming] + outgoing_flow_highest_out_resolution = constraints[:highest_out].expressions[:outgoing] + ## Expressions used by capacity constraints # - Create capacity limit for outgoing flows assets_profile_times_capacity_out = @@ -52,7 +51,7 @@ function add_capacity_constraints!( row.year, v, ("availability", row.rep_period), - row.timesteps_block, + row.time_block_start:row.time_block_end, 1.0, ) * accumulated_units_compact_method[accumulated_set_using_compact_method_lookup[( @@ -72,13 +71,13 @@ function add_capacity_constraints!( row.year, row.year, ("availability", row.rep_period), - row.timesteps_block, + row.time_block_start:row.time_block_end, 1.0, ) * graph[row.asset].capacity * accumulated_units[accumulated_units_lookup[(row.asset, row.year)]] ) - end for row in eachrow(dataframes[:highest_out]) + end for row in eachrow(constraints[:highest_out].indices) ] # - Create accumulated investment limit for the use of binary storage method with investments @@ -100,14 +99,14 @@ function add_capacity_constraints!( row.year, row.year, ("availability", row.rep_period), - row.timesteps_block, + row.time_block_start:row.time_block_end, 1.0, ) * ( graph[row.asset].capacity * accumulated_initial_units[row.asset, row.year] + accumulated_investment_limit[row.year, row.asset] ) * - (1 - row.is_charging) + (1 - is_charging) ) else @expression( @@ -118,13 +117,16 @@ function add_capacity_constraints!( row.year, row.year, ("availability", row.rep_period), - row.timesteps_block, + row.time_block_start:row.time_block_end, 1.0, ) * (graph[row.asset].capacity * accumulated_initial_units[row.asset, row.year]) * - (1 - row.is_charging) + (1 - is_charging) ) - end for row in eachrow(dataframes[:highest_out]) + end for (row, is_charging) in zip( + eachrow(constraints[:highest_out].indices), + constraints[:highest_out].expressions[:is_charging], + ) ] assets_profile_times_capacity_out_with_binary_part2 = @@ -138,16 +140,19 @@ function add_capacity_constraints!( row.year, row.year, ("availability", row.rep_period), - row.timesteps_block, + row.time_block_start:row.time_block_end, 1.0, ) * ( graph[row.asset].capacity * ( - accumulated_initial_units[row.asset, row.year] * (1 - row.is_charging) + + accumulated_initial_units[row.asset, row.year] * (1 - is_charging) + accumulated_investment_units_using_simple_method[row.asset, row.year] ) ) ) - end for row in eachrow(dataframes[:highest_out]) + end for (row, is_charging) in zip( + eachrow(constraints[:highest_out].indices), + constraints[:highest_out].expressions[:is_charging], + ) ] # - Create capacity limit for incoming flows @@ -161,12 +166,12 @@ function add_capacity_constraints!( row.year, row.year, ("availability", row.rep_period), - row.timesteps_block, + row.time_block_start:row.time_block_end, 1.0, ) * graph[row.asset].capacity * accumulated_units[accumulated_units_lookup[(row.asset, row.year)]] - ) for row in eachrow(dataframes[:highest_in]) + ) for row in eachrow(constraints[:highest_in].indices) ] # - Create capacity limit for incoming flows with binary is_charging for storage assets @@ -181,14 +186,14 @@ function add_capacity_constraints!( row.year, row.year, ("availability", row.rep_period), - row.timesteps_block, + row.time_block_start:row.time_block_end, 1.0, ) * ( graph[row.asset].capacity * accumulated_initial_units[row.asset, row.year] + accumulated_investment_limit[row.year, row.asset] ) * - row.is_charging + is_charging ) else @expression( @@ -199,13 +204,16 @@ function add_capacity_constraints!( row.year, row.year, ("availability", row.rep_period), - row.timesteps_block, + row.time_block_start:row.time_block_end, 1.0, ) * (graph[row.asset].capacity * accumulated_initial_units[row.asset, row.year]) * - row.is_charging + is_charging ) - end for row in eachrow(dataframes[:highest_in]) + end for (row, is_charging) in zip( + eachrow(constraints[:highest_in].indices), + constraints[:highest_in].expressions[:is_charging], + ) ] assets_profile_times_capacity_in_with_binary_part2 = @@ -219,16 +227,19 @@ function add_capacity_constraints!( row.year, row.year, ("availability", row.rep_period), - row.timesteps_block, + row.time_block_start:row.time_block_end, 1.0, ) * ( graph[row.asset].capacity * ( - accumulated_initial_units[row.asset, row.year] * row.is_charging + + accumulated_initial_units[row.asset, row.year] * is_charging + accumulated_investment_units_using_simple_method[row.asset, row.year] ) ) ) - end for row in eachrow(dataframes[:highest_in]) + end for (row, is_charging) in zip( + eachrow(constraints[:highest_in].indices), + constraints[:highest_in].expressions[:is_charging], + ) ] ## Capacity limit constraints (using the highest resolution) @@ -238,8 +249,8 @@ function add_capacity_constraints!( model, outgoing_flow_highest_out_resolution[row.index] ≤ assets_profile_times_capacity_out[row.index], - base_name = "max_output_flows_limit[$(row.asset),$(row.year),$(row.rep_period),$(row.timesteps_block)]" - ) for row in eachrow(dataframes[:highest_out]) if + base_name = "max_output_flows_limit[$(row.asset),$(row.year),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]" + ) for row in eachrow(constraints[:highest_out].indices) if outgoing_flow_highest_out_resolution[row.index] != 0 ] @@ -249,8 +260,8 @@ function add_capacity_constraints!( model, incoming_flow_highest_in_resolution[row.index] ≤ assets_profile_times_capacity_in[row.index], - base_name = "max_input_flows_limit[$(row.asset),$(row.rep_period),$(row.timesteps_block)]" - ) for row in eachrow(dataframes[:highest_in]) if + base_name = "max_input_flows_limit[$(row.asset),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]" + ) for row in eachrow(constraints[:highest_in].indices) if incoming_flow_highest_in_resolution[row.index] != 0 ] @@ -261,8 +272,8 @@ function add_capacity_constraints!( model, outgoing_flow_highest_out_resolution[row.index] ≤ assets_profile_times_capacity_out_with_binary_part1[row.index], - base_name = "max_output_flows_limit_with_binary_part1[$(row.asset),$(row.rep_period),$(row.timesteps_block)]" - ) for row in eachrow(dataframes[:highest_out]) if + base_name = "max_output_flows_limit_with_binary_part1[$(row.asset),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]" + ) for row in eachrow(constraints[:highest_out].indices) if row.asset ∈ Asb && outgoing_flow_highest_out_resolution[row.index] != 0 ] @@ -271,8 +282,8 @@ function add_capacity_constraints!( model, outgoing_flow_highest_out_resolution[row.index] ≤ assets_profile_times_capacity_out_with_binary_part2[row.index], - base_name = "max_output_flows_limit_with_binary_part2[$(row.asset),$(row.rep_period),$(row.timesteps_block)]" - ) for row in eachrow(dataframes[:highest_out]) if row.asset ∈ Ai[row.year] && + base_name = "max_output_flows_limit_with_binary_part2[$(row.asset),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]" + ) for row in eachrow(constraints[:highest_out].indices) if row.asset ∈ Ai[row.year] && row.asset ∈ Asb && outgoing_flow_highest_out_resolution[row.index] != 0 ] @@ -283,8 +294,8 @@ function add_capacity_constraints!( model, incoming_flow_highest_in_resolution[row.index] ≤ assets_profile_times_capacity_in_with_binary_part1[row.index], - base_name = "max_input_flows_limit_with_binary_part1[$(row.asset),$(row.rep_period),$(row.timesteps_block)]" - ) for row in eachrow(dataframes[:highest_in]) if + base_name = "max_input_flows_limit_with_binary_part1[$(row.asset),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]" + ) for row in eachrow(constraints[:highest_in].indices) if row.asset ∈ Asb && incoming_flow_highest_in_resolution[row.index] != 0 ] model[:max_input_flows_limit_with_binary_part2] = [ @@ -292,8 +303,8 @@ function add_capacity_constraints!( model, incoming_flow_highest_in_resolution[row.index] ≤ assets_profile_times_capacity_in_with_binary_part2[row.index], - base_name = "max_input_flows_limit_with_binary_part2[$(row.asset),$(row.rep_period),$(row.timesteps_block)]" - ) for row in eachrow(dataframes[:highest_in]) if row.asset ∈ Ai[row.year] && + base_name = "max_input_flows_limit_with_binary_part2[$(row.asset),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]" + ) for row in eachrow(constraints[:highest_in].indices) if row.asset ∈ Ai[row.year] && row.asset ∈ Asb && incoming_flow_highest_in_resolution[row.index] != 0 ] diff --git a/src/constraints/consumer.jl b/src/constraints/consumer.jl index 62b64c40..da186088 100644 --- a/src/constraints/consumer.jl +++ b/src/constraints/consumer.jl @@ -12,17 +12,13 @@ add_consumer_constraints!(model, Adds the consumer asset constraints to the model. """ -function add_consumer_constraints!( - model, - graph, - dataframes, - sets, - incoming_flow_highest_in_out_resolution, - outgoing_flow_highest_in_out_resolution, -) +function add_consumer_constraints!(model, constraints, graph, sets) Ac = sets[:Ac] + incoming_flow_highest_in_out_resolution = constraints[:highest_in_out].expressions[:incoming] + outgoing_flow_highest_in_out_resolution = constraints[:highest_in_out].expressions[:outgoing] + # - Balance constraint (using the lowest temporal resolution) - df = filter(:asset => ∈(Ac), dataframes[:highest_in_out]; view = true) + df = filter(:asset => ∈(Ac), constraints[:highest_in_out].indices; view = true) model[:consumer_balance] = [ @constraint( model, @@ -34,11 +30,11 @@ function add_consumer_constraints!( row.year, row.year, ("demand", row.rep_period), - row.timesteps_block, + row.time_block_start:row.time_block_end, 1.0, ) * graph[row.asset].peak_demand[row.year] in graph[row.asset].consumer_balance_sense, - base_name = "consumer_balance[$(row.asset),$(row.year),$(row.rep_period),$(row.timesteps_block)]" + base_name = "consumer_balance[$(row.asset),$(row.year),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]" ) for row in eachrow(df) ] end diff --git a/src/constraints/conversion.jl b/src/constraints/conversion.jl index 12cf6d10..99e2d085 100644 --- a/src/constraints/conversion.jl +++ b/src/constraints/conversion.jl @@ -11,22 +11,17 @@ add_conversion_constraints!(model, Adds the conversion asset constraints to the model. """ -function add_conversion_constraints!( - model, - dataframes, - sets, - incoming_flow_lowest_resolution, - outgoing_flow_lowest_resolution, -) +function add_conversion_constraints!(model, constraints, sets) Acv = sets[:Acv] # - Balance constraint (using the lowest temporal resolution) - df = filter(:asset => ∈(Acv), dataframes[:lowest]; view = true) + df = filter(:asset => ∈(Acv), constraints[:lowest].indices; view = true) + incoming = constraints[:lowest].expressions[:incoming] + outgoing = constraints[:lowest].expressions[:outgoing] model[:conversion_balance] = [ @constraint( model, - incoming_flow_lowest_resolution[row.index] == - outgoing_flow_lowest_resolution[row.index], - base_name = "conversion_balance[$(row.asset),$(row.year),$(row.rep_period),$(row.timesteps_block)]" + incoming[row.index] == outgoing[row.index], + base_name = "conversion_balance[$(row.asset),$(row.year),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]" ) for row in eachrow(df) ] end diff --git a/src/constraints/create.jl b/src/constraints/create.jl new file mode 100644 index 00000000..56c15a28 --- /dev/null +++ b/src/constraints/create.jl @@ -0,0 +1,174 @@ +export compute_constraints_indices + +function compute_constraints_indices(connection) + constraints = Dict{Symbol,TulipaConstraint}() + + constraints[:lowest] = TulipaConstraint( + DuckDB.query( + connection, + "CREATE OR REPLACE TEMP SEQUENCE id START 1; + SELECT + nextval('id') AS index, + asset.asset, + t_low.year, + t_low.rep_period, + t_low.time_block_start, + t_low.time_block_end, + FROM t_lowest_all_flows AS t_low + LEFT JOIN asset + ON t_low.asset = asset.asset + WHERE + asset.type in ('conversion', 'producer') + ORDER BY + asset.asset, + t_low.year, + t_low.rep_period, + t_low.time_block_start + ", + ) |> DataFrame, + ) + + constraints[:highest_in_out] = TulipaConstraint( + DuckDB.query( + connection, + "CREATE OR REPLACE TEMP SEQUENCE id START 1; + SELECT + nextval('id') AS index, + t_high.* + FROM t_highest_all_flows AS t_high + LEFT JOIN asset + ON t_high.asset = asset.asset + LEFT JOIN asset_both + ON t_high.asset = asset_both.asset + AND t_high.year = asset_both.milestone_year + AND t_high.year = asset_both.commission_year + WHERE + asset_both.active = true + AND asset.type in ('hub', 'consumer')", + ) |> DataFrame, + ) + + constraints[:highest_in] = TulipaConstraint( + DuckDB.query( + connection, + "CREATE OR REPLACE TEMP SEQUENCE id START 1; + SELECT + nextval('id') AS index, + t_high.* + FROM t_highest_in_flows AS t_high + LEFT JOIN asset + ON t_high.asset = asset.asset + LEFT JOIN asset_both + ON t_high.asset = asset_both.asset + AND t_high.year = asset_both.milestone_year + AND t_high.year = asset_both.commission_year + WHERE + asset_both.active = true + AND asset.type in ('storage')", + ) |> DataFrame, + ) + + constraints[:highest_out] = TulipaConstraint( + DuckDB.query( + connection, + "CREATE OR REPLACE TEMP SEQUENCE id START 1; + SELECT + nextval('id') AS index, + t_high.* + FROM t_highest_out_flows AS t_high + LEFT JOIN asset + ON t_high.asset = asset.asset + LEFT JOIN asset_both + ON t_high.asset = asset_both.asset + AND t_high.year = asset_both.milestone_year + AND t_high.year = asset_both.commission_year + WHERE + asset_both.active = true + AND asset.type in ('producer', 'storage', 'conversion')", + ) |> DataFrame, + ) + + constraints[:units_on_and_outflows] = TulipaConstraint( + DuckDB.query( + connection, + "CREATE OR REPLACE TEMP SEQUENCE id START 1; + CREATE OR REPLACE TABLE units_on_and_outflows AS + SELECT + nextval('id') AS index, + t_high.* + FROM t_highest_assets_and_out_flows AS t_high + LEFT JOIN asset + ON t_high.asset = asset.asset + LEFT JOIN asset_both + ON t_high.asset = asset_both.asset + AND t_high.year = asset_both.milestone_year + AND t_high.year = asset_both.commission_year + WHERE + asset_both.active = true + AND asset.type in ('producer', 'conversion') + AND asset.unit_commitment = true; + SELECT * FROM units_on_and_outflows + ", + ) |> DataFrame, + ) + + constraints[:storage_level_intra_rp] = TulipaConstraint( + DuckDB.query( + connection, + "SELECT * FROM storage_level_intra_rp + ", + ) |> DataFrame, + ) + + constraints[:storage_level_inter_rp] = TulipaConstraint( + DuckDB.query( + connection, + "SELECT * FROM storage_level_inter_rp + ", + ) |> DataFrame, + ) + + constraints[:min_energy_inter_rp] = TulipaConstraint( + DuckDB.query( + connection, + "CREATE OR REPLACE TEMP SEQUENCE id START 1; + SELECT + nextval('id') AS index, + attr.asset, + attr.year, + attr.period_block_start, + attr.period_block_end, + FROM asset_timeframe_time_resolution AS attr + LEFT JOIN asset_milestone + ON attr.asset = asset_milestone.asset + AND attr.year = asset_milestone.milestone_year + WHERE + asset_milestone.min_energy_timeframe_partition IS NOT NULL + ", + ) |> DataFrame, + ) + + # a -> any(!ismissing, values(a.max_energy_timeframe_partition)), + + constraints[:max_energy_inter_rp] = TulipaConstraint( + DuckDB.query( + connection, + "CREATE OR REPLACE TEMP SEQUENCE id START 1; + SELECT + nextval('id') AS index, + attr.asset, + attr.year, + attr.period_block_start, + attr.period_block_end, + FROM asset_timeframe_time_resolution AS attr + LEFT JOIN asset_milestone + ON attr.asset = asset_milestone.asset + AND attr.year = asset_milestone.milestone_year + WHERE + asset_milestone.max_energy_timeframe_partition IS NOT NULL + ", + ) |> DataFrame, + ) + + return constraints +end diff --git a/src/constraints/energy.jl b/src/constraints/energy.jl index 5ec89212..39dbdee3 100644 --- a/src/constraints/energy.jl +++ b/src/constraints/energy.jl @@ -6,7 +6,7 @@ function add_energy_constraints!(model, graph, dataframes) Adds the energy constraints for assets withnin the period blocks of the timeframe (inter-temporal) to the model. """ -function add_energy_constraints!(model, graph, dataframes) +function add_energy_constraints!(model, constraints, graph) ## INTER-TEMPORAL CONSTRAINTS (between representative periods) @@ -14,35 +14,35 @@ function add_energy_constraints!(model, graph, dataframes) model[:max_energy_inter_rp] = [ @constraint( model, - dataframes[:max_energy_inter_rp].outgoing_flow[row.index] ≤ + constraints[:max_energy_inter_rp].expressions[:outgoing][row.index] ≤ profile_aggregation( sum, graph[row.asset].timeframe_profiles, row.year, row.year, "max_energy", - row.periods_block, + row.period_block_start:row.period_block_end, 1.0, ) * (graph[row.asset].max_energy_timeframe_partition[row.year]), - base_name = "max_energy_inter_rp_limit[$(row.asset),$(row.year),$(row.periods_block)]" - ) for row in eachrow(dataframes[:max_energy_inter_rp]) + base_name = "max_energy_inter_rp_limit[$(row.asset),$(row.year),$(row.period_block_start):$(row.period_block_end)]" + ) for row in eachrow(constraints[:max_energy_inter_rp].indices) ] # - Minimum outgoing energy within each period block model[:min_energy_inter_rp] = [ @constraint( model, - dataframes[:min_energy_inter_rp].outgoing_flow[row.index] ≥ + constraints[:min_energy_inter_rp].expressions[:outgoing][row.index] ≥ profile_aggregation( sum, graph[row.asset].timeframe_profiles, row.year, row.year, "min_energy", - row.periods_block, + row.period_block_start:row.period_block_end, 1.0, ) * (graph[row.asset].min_energy_timeframe_partition[row.year]), - base_name = "min_energy_inter_rp_limit[$(row.asset),$(row.year),$(row.periods_block)]" - ) for row in eachrow(dataframes[:min_energy_inter_rp]) + base_name = "min_energy_inter_rp_limit[$(row.asset),$(row.year),$(row.period_block_start):$(row.period_block_end)]" + ) for row in eachrow(constraints[:min_energy_inter_rp].indices) ] end diff --git a/src/constraints/hub.jl b/src/constraints/hub.jl index caf84f8f..ee51e061 100644 --- a/src/constraints/hub.jl +++ b/src/constraints/hub.jl @@ -11,22 +11,18 @@ add_hub_constraints!(model, Adds the hub asset constraints to the model. """ -function add_hub_constraints!( - model, - dataframes, - sets, - incoming_flow_highest_in_out_resolution, - outgoing_flow_highest_in_out_resolution, -) +function add_hub_constraints!(model, constraints, sets) Ah = sets[:Ah] + incoming_flow_highest_in_out_resolution = constraints[:highest_in_out].expressions[:incoming] + outgoing_flow_highest_in_out_resolution = constraints[:highest_in_out].expressions[:outgoing] # - Balance constraint (using the lowest temporal resolution) - df = filter(:asset => ∈(Ah), dataframes[:highest_in_out]; view = true) + df = filter(:asset => ∈(Ah), constraints[:highest_in_out].indices; view = true) model[:hub_balance] = [ @constraint( model, incoming_flow_highest_in_out_resolution[row.index] == outgoing_flow_highest_in_out_resolution[row.index], - base_name = "hub_balance[$(row.asset),$(row.year),$(row.rep_period),$(row.timesteps_block)]" + base_name = "hub_balance[$(row.asset),$(row.year),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]" ) for row in eachrow(df) ] end diff --git a/src/constraints/ramping-and-unit-commitment.jl b/src/constraints/ramping-and-unit-commitment.jl index 22310b42..7720c8cd 100644 --- a/src/constraints/ramping-and-unit-commitment.jl +++ b/src/constraints/ramping-and-unit-commitment.jl @@ -5,22 +5,19 @@ export add_ramping_and_unit_commitment_constraints! Adds the ramping constraints for producer and conversion assets where ramping = true in assets_data """ -function add_ramping_constraints!( - model, - variables, - graph, - df_units_on_and_outflows, - df_highest_out, - outgoing_flow_highest_out_resolution, - accumulated_units, - sets, -) +function add_ramping_constraints!(model, variables, constraints, graph, sets) # unpack from sets Ar = sets[:Ar] Auc = sets[:Auc] Auc_basic = sets[:Auc_basic] accumulated_units_lookup = sets[:accumulated_units_lookup] + ## unpack from model + accumulated_units = model[:accumulated_units] + + ## unpack from constraints + outgoing_flow_highest_out_resolution = constraints[:highest_out].expressions[:outgoing] + ## Expressions used by the ramping and unit commitment constraints # - Expression to have the product of the profile and the capacity paramters profile_times_capacity = [ @@ -32,10 +29,11 @@ function add_ramping_constraints!( row.year, row.year, ("availability", row.rep_period), - row.timesteps_block, + row.time_block_start:row.time_block_end, 1.0, ) * graph[row.asset].capacity - ) for row in eachrow(df_units_on_and_outflows) if is_active(graph, row.asset, row.year) + ) for row in eachrow(constraints[:units_on_and_outflows].indices) if + is_active(graph, row.asset, row.year) ] # - Flow that is above the minimum operating point of the asset @@ -43,11 +41,11 @@ function add_ramping_constraints!( model[:flow_above_min_operating_point] = [ @expression( model, - row.outgoing_flow - + constraints[:units_on_and_outflows].expressions[:outgoing][row.index] - profile_times_capacity[row.index] * graph[row.asset].min_operating_point * - row.units_on - ) for row in eachrow(df_units_on_and_outflows) + constraints[:units_on_and_outflows].expressions[:units_on][row.index] + ) for row in eachrow(constraints[:units_on_and_outflows].indices) ] ## Unit Commitment Constraints (basic implementation - more advanced will be added in 2025) @@ -66,8 +64,8 @@ function add_ramping_constraints!( @constraint( model, flow_above_min_operating_point[row.index] ≥ 0, - base_name = "min_output_flow_with_unit_commitment[$(row.asset),$(row.year),$(row.rep_period),$(row.timesteps_block)]" - ) for row in eachrow(df_units_on_and_outflows) + base_name = "min_output_flow_with_unit_commitment[$(row.asset),$(row.year),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]" + ) for row in eachrow(constraints[:units_on_and_outflows].indices) ] # - Maximum output flow above the minimum operating point @@ -77,18 +75,22 @@ function add_ramping_constraints!( flow_above_min_operating_point[row.index] ≤ (1 - graph[row.asset].min_operating_point) * profile_times_capacity[row.index] * - row.units_on, - base_name = "max_output_flow_with_basic_unit_commitment[$(row.asset),$(row.year),$(row.rep_period),$(row.timesteps_block)]" - ) for row in eachrow(df_units_on_and_outflows) if row.asset ∈ Auc_basic + constraints[:units_on_and_outflows].expressions[:units_on][row.index], + base_name = "max_output_flow_with_basic_unit_commitment[$(row.asset),$(row.year),$(row.rep_period),$(row.time_block_start):$(row.time_block_end)]" + ) for + row in eachrow(constraints[:units_on_and_outflows].indices) if row.asset ∈ Auc_basic ] ## Ramping Constraints with unit commitment # Note: We start ramping constraints from the second timesteps_block # We filter and group the dataframe per asset and representative period - df_grouped = DataFrames.groupby(df_units_on_and_outflows, [:asset, :year, :rep_period]) + df_grouped = DataFrames.groupby( + constraints[:units_on_and_outflows].indices, + [:asset, :year, :rep_period], + ) # get the units on column to get easier the index - 1, i.e., the previous one - units_on = df_units_on_and_outflows.units_on + units_on = constraints[:units_on_and_outflows].expressions[:units_on] #- Maximum ramp-up rate limit to the flow above the operating point when having unit commitment variables for ((a, y, rp), sub_df) in pairs(df_grouped) @@ -104,7 +106,7 @@ function add_ramping_constraints!( row.min_outgoing_flow_duration * profile_times_capacity[row.index] * units_on[row.index], - base_name = "max_ramp_up_with_unit_commitment[$a,$y,$rp,$(row.timesteps_block)]" + base_name = "max_ramp_up_with_unit_commitment[$a,$y,$rp,$(row.time_block_start):$(row.time_block_end)]" ) for (k, row) in enumerate(eachrow(sub_df)) if k > 1 ] end @@ -123,7 +125,7 @@ function add_ramping_constraints!( row.min_outgoing_flow_duration * profile_times_capacity[row.index] * units_on[row.index-1], - base_name = "max_ramp_down_with_unit_commitment[$a,$y,$rp,$(row.timesteps_block)]" + base_name = "max_ramp_down_with_unit_commitment[$a,$y,$rp,$(row.time_block_start):$(row.time_block_end)]" ) for (k, row) in enumerate(eachrow(sub_df)) if k > 1 ] end @@ -131,7 +133,7 @@ function add_ramping_constraints!( ## Ramping Constraints without unit commitment # Note: We start ramping constraints from the second timesteps_block # We filter and group the dataframe per asset and representative period that does not have the unit_commitment methods - df_grouped = DataFrames.groupby(df_highest_out, [:asset, :year, :rep_period]) + df_grouped = DataFrames.groupby(constraints[:highest_out].indices, [:asset, :year, :rep_period]) # get the expression from the capacity constraints for the highest_out assets_profile_times_capacity_out = model[:assets_profile_times_capacity_out] @@ -149,7 +151,7 @@ function add_ramping_constraints!( graph[row.asset].max_ramp_up * row.min_outgoing_flow_duration * assets_profile_times_capacity_out[row.index], - base_name = "max_ramp_up_without_unit_commitment[$a,$y,$rp,$(row.timesteps_block)]" + base_name = "max_ramp_up_without_unit_commitment[$a,$y,$rp,$(row.time_block_start):$(row.time_block_end)]" ) for (k, row) in enumerate(eachrow(sub_df)) if k > 1 && outgoing_flow_highest_out_resolution[row.index] != 0 ] @@ -168,7 +170,7 @@ function add_ramping_constraints!( -graph[row.asset].max_ramp_down * row.min_outgoing_flow_duration * assets_profile_times_capacity_out[row.index], - base_name = "max_ramp_down_without_unit_commitment[$a,$y,$rp,$(row.timesteps_block)]" + base_name = "max_ramp_down_without_unit_commitment[$a,$y,$rp,$(row.time_block_start):$(row.time_block_end)]" ) for (k, row) in enumerate(eachrow(sub_df)) if k > 1 && outgoing_flow_highest_out_resolution[row.index] != 0 ] diff --git a/src/constraints/storage.jl b/src/constraints/storage.jl index 09115b78..9900edc1 100644 --- a/src/constraints/storage.jl +++ b/src/constraints/storage.jl @@ -6,17 +6,7 @@ add_storage_constraints!(model, graph,...) Adds the storage asset constraints to the model. """ -function add_storage_constraints!( - model, - variables, - graph, - dataframes, - accumulated_energy_capacity, - incoming_flow_lowest_storage_resolution_intra_rp, - outgoing_flow_lowest_storage_resolution_intra_rp, - incoming_flow_storage_inter_rp_balance, - outgoing_flow_storage_inter_rp_balance, -) +function add_storage_constraints!(model, variables, constraints, graph) ## INTRA-TEMPORAL CONSTRAINTS (within a representative period) storage_level_intra_rp = variables[:storage_level_intra_rp] @@ -27,6 +17,16 @@ function add_storage_constraints!( df_storage_inter_rp_balance_grouped = DataFrames.groupby(storage_level_inter_rp.indices, [:asset, :year]) + accumulated_energy_capacity = model[:accumulated_energy_capacity] + incoming_flow_lowest_storage_resolution_intra_rp = + constraints[:storage_level_intra_rp].expressions[:incoming] + outgoing_flow_lowest_storage_resolution_intra_rp = + constraints[:storage_level_intra_rp].expressions[:outgoing] + incoming_flow_storage_inter_rp_balance = + constraints[:storage_level_inter_rp].expressions[:incoming] + outgoing_flow_storage_inter_rp_balance = + constraints[:storage_level_inter_rp].expressions[:outgoing] + # - Balance constraint (using the lowest temporal resolution) for ((a, y, rp), sub_df) in pairs(df_storage_intra_rp_balance_grouped) # This assumes an ordering of the time blocks, that is guaranteed inside @@ -136,7 +136,7 @@ function add_storage_constraints!( ) end ) + - row.inflows_profile_aggregation + + constraints[:storage_level_inter_rp].expressions[:inflows_profile_aggregation][row.index] + incoming_flow_storage_inter_rp_balance[row.index] - outgoing_flow_storage_inter_rp_balance[row.index], base_name = "storage_inter_rp_balance[$a,$(row.year),$(row.period_block_start):$(row.period_block_end)]" diff --git a/src/constraints/transport.jl b/src/constraints/transport.jl index 839caf32..3d2221d1 100644 --- a/src/constraints/transport.jl +++ b/src/constraints/transport.jl @@ -6,17 +6,14 @@ add_transport_constraints!(model, graph, df_flows, flow, Ft, flows_investment) Adds the transport flow constraints to the model. """ -function add_transport_constraints!( - model, - graph, - sets, - variables, - accumulated_flows_export_units, - accumulated_flows_import_units, -) +function add_transport_constraints!(model, variables, graph, sets) ## unpack from sets Ft = sets[:Ft] + ## unpack from model + accumulated_flows_export_units = model[:accumulated_flows_export_units] + accumulated_flows_import_units = model[:accumulated_flows_import_units] + ## unpack from variables flows_indices = variables[:flow].indices flow = variables[:flow].container diff --git a/src/create-model.jl b/src/create-model.jl index 5ae70ce7..f92c46e2 100644 --- a/src/create-model.jl +++ b/src/create-model.jl @@ -10,18 +10,18 @@ function create_model!(energy_problem; kwargs...) graph = energy_problem.graph representative_periods = energy_problem.representative_periods variables = energy_problem.variables + constraints = energy_problem.constraints timeframe = energy_problem.timeframe groups = energy_problem.groups model_parameters = energy_problem.model_parameters years = energy_problem.years - dataframes = energy_problem.dataframes sets = create_sets(graph, years) energy_problem.model = @timeit to "create_model" create_model( graph, sets, variables, + constraints, representative_periods, - dataframes, years, timeframe, groups, @@ -47,8 +47,8 @@ function create_model( graph, sets, variables, + constraints, representative_periods, - dataframes, years, timeframe, groups, @@ -61,11 +61,6 @@ function create_model( expression_workspace = Vector{JuMP.AffExpr}(undef, Tmax) - # Unpacking dataframes - @timeit to "unpacking dataframes" begin - df_units_on_and_outflows = dataframes[:units_on_and_outflows] - end - ## Model model = JuMP.Model() @@ -83,20 +78,9 @@ function create_model( ## Add expressions to dataframes # TODO: What will improve this? Variables (#884)?, Constraints? - ( - incoming_flow_lowest_resolution, - outgoing_flow_lowest_resolution, - incoming_flow_lowest_storage_resolution_intra_rp, - outgoing_flow_lowest_storage_resolution_intra_rp, - incoming_flow_highest_in_out_resolution, - outgoing_flow_highest_in_out_resolution, - incoming_flow_highest_in_resolution, - outgoing_flow_highest_out_resolution, - incoming_flow_storage_inter_rp_balance, - outgoing_flow_storage_inter_rp_balance, - ) = add_expressions_to_dataframe!( - dataframes, + add_expressions_to_constraints!( variables, + constraints, model, expression_workspace, representative_periods, @@ -106,93 +90,48 @@ function create_model( ## Expressions for multi-year investment create_multi_year_expressions!(model, graph, sets, variables) - accumulated_flows_export_units = model[:accumulated_flows_export_units] - accumulated_flows_import_units = model[:accumulated_flows_import_units] - accumulated_initial_units = model[:accumulated_initial_units] - accumulated_investment_units_using_simple_method = - model[:accumulated_investment_units_using_simple_method] - accumulated_units = model[:accumulated_units] - accumulated_units_compact_method = model[:accumulated_units_compact_method] - accumulated_units_simple_method = model[:accumulated_units_simple_method] ## Expressions for storage assets add_storage_expressions!(model, graph, sets, variables) - accumulated_energy_units_simple_method = model[:accumulated_energy_units_simple_method] - accumulated_energy_capacity = model[:accumulated_energy_capacity] ## Expressions for the objective function - add_objective!( - model, - variables, - graph, - dataframes, - representative_periods, - sets, - model_parameters, - ) + add_objective!(model, variables, graph, representative_periods, sets, model_parameters) # TODO: Pass sets instead of the explicit values ## Constraints @timeit to "add_capacity_constraints!" add_capacity_constraints!( model, + variables, + constraints, graph, - dataframes, sets, - variables, - accumulated_initial_units, - accumulated_investment_units_using_simple_method, - accumulated_units, - accumulated_units_compact_method, - outgoing_flow_highest_out_resolution, - incoming_flow_highest_in_resolution, ) - @timeit to "add_energy_constraints!" add_energy_constraints!(model, graph, dataframes) + @timeit to "add_energy_constraints!" add_energy_constraints!(model, constraints, graph) @timeit to "add_consumer_constraints!" add_consumer_constraints!( model, + constraints, graph, - dataframes, sets, - incoming_flow_highest_in_out_resolution, - outgoing_flow_highest_in_out_resolution, ) @timeit to "add_storage_constraints!" add_storage_constraints!( model, variables, + constraints, graph, - dataframes, - accumulated_energy_capacity, - incoming_flow_lowest_storage_resolution_intra_rp, - outgoing_flow_lowest_storage_resolution_intra_rp, - incoming_flow_storage_inter_rp_balance, - outgoing_flow_storage_inter_rp_balance, ) - @timeit to "add_hub_constraints!" add_hub_constraints!( - model, - dataframes, - sets, - incoming_flow_highest_in_out_resolution, - outgoing_flow_highest_in_out_resolution, - ) + @timeit to "add_hub_constraints!" add_hub_constraints!(model, constraints, sets) - @timeit to "add_conversion_constraints!" add_conversion_constraints!( - model, - dataframes, - sets, - incoming_flow_lowest_resolution, - outgoing_flow_lowest_resolution, - ) + @timeit to "add_conversion_constraints!" add_conversion_constraints!(model, constraints, sets) @timeit to "add_transport_constraints!" add_transport_constraints!( model, + variables, graph, sets, - variables, - accumulated_flows_export_units, - accumulated_flows_import_units, ) @timeit to "add_investment_constraints!" add_investment_constraints!(graph, sets, variables) @@ -207,15 +146,12 @@ function create_model( ) end - if !isempty(dataframes[:units_on_and_outflows]) + if !isempty(constraints[:units_on_and_outflows].indices) @timeit to "add_ramping_constraints!" add_ramping_constraints!( model, variables, + constraints, graph, - df_units_on_and_outflows, - dataframes[:highest_out], - outgoing_flow_highest_out_resolution, - accumulated_units, sets, ) end diff --git a/src/io.jl b/src/io.jl index 95cda782..06933922 100644 --- a/src/io.jl +++ b/src/io.jl @@ -234,6 +234,7 @@ function create_internal_structures(connection) tmp_create_partition_tables(connection) tmp_create_union_tables(connection) tmp_create_lowest_resolution_table(connection) + tmp_create_highest_resolution_table(connection) df = TulipaIO.get_table(connection, "asset_time_resolution") gdf = DataFrames.groupby(df, [:asset, :year, :rep_period]) @@ -378,12 +379,7 @@ function save_solution_to_file(output_folder, energy_problem::EnergyProblem) if !energy_problem.solved error("The energy_problem has not been solved yet.") end - save_solution_to_file( - output_folder, - energy_problem.graph, - energy_problem.dataframes, - energy_problem.solution, - ) + save_solution_to_file(output_folder, energy_problem.graph, energy_problem.solution) end """ @@ -408,7 +404,7 @@ The following files are created: proportional fraction of the value at the corresponding time block, i.e., if level[1:3] = 30, then level[1] = level[2] = level[3] = 10. """ -function save_solution_to_file(output_folder, graph, dataframes, solution) +function save_solution_to_file(output_folder, graph, solution) output_file = joinpath(output_folder, "assets-investments.csv") output_table = DataFrame(; asset = String[], diff --git a/src/model-preparation.jl b/src/model-preparation.jl index 72f5e1a3..eefe9dfe 100644 --- a/src/model-preparation.jl +++ b/src/model-preparation.jl @@ -1,147 +1,5 @@ # Tools to prepare data and structures to the model creation -export create_sets, construct_dataframes - -""" - dataframes = construct_dataframes( - connection, - graph, - representative_periods, - constraints_partitions,, IteratorSize - years, - ) - -Computes the data frames used to linearize the variables and constraints. These are used -internally in the model only. -""" -function construct_dataframes( - connection, - graph, - representative_periods, - constraints_partitions, - years_struct, -) - years = [year.id for year in years_struct if year.is_milestone] - A = MetaGraphsNext.labels(graph) |> collect - F = MetaGraphsNext.edge_labels(graph) |> collect - RP = Dict(year => 1:length(representative_periods[year]) for year in years) - - # Create subsets of assets - Ap = filter_graph(graph, A, "producer", :type) - Acv = filter_graph(graph, A, "conversion", :type) - Auc = Dict(year => (Ap ∪ Acv) ∩ filter_graph(graph, A, true, :unit_commitment, year) for year in years) - - # Output object - dataframes = Dict{Symbol,DataFrame}() - - # Create all the dataframes for the constraints considering the constraints_partitions - for (key, partitions) in constraints_partitions - if length(partitions) == 0 - # No data, but ensure schema is correct - dataframes[key] = DataFrame(; - asset = String[], - year = Int[], - rep_period = Int[], - timesteps_block = UnitRange{Int}[], - index = Int[], - ) - continue - end - - # This construction should ensure the ordering of the time blocks for groups of (a, rp) - df = DataFrame( - ( - ( - (asset = a, year = y, rep_period = rp, timesteps_block = timesteps_block) for - timesteps_block in partition - ) for ((a, y, rp), partition) in partitions - ) |> Iterators.flatten, - ) - df.index = 1:size(df, 1) - dataframes[key] = df - end - - tmp_create_constraints_indices(connection) - - # WIP: highest_in_out is not included in constraints_partition anymore - dataframes[:highest_in_out] = TulipaIO.get_table(connection, "cons_indices_highest_in_out") - dataframes[:highest_in_out].timesteps_block = map( - r -> r[1]:r[2], - zip( - dataframes[:highest_in_out].time_block_start, - dataframes[:highest_in_out].time_block_end, - ), - ) - dataframes[:highest_in_out].index = 1:size(dataframes[:highest_in_out], 1) - - # WIP: highest_in is not included in constraints_partition anymore - dataframes[:highest_in] = TulipaIO.get_table(connection, "cons_indices_highest_in") - dataframes[:highest_in].timesteps_block = map( - r -> r[1]:r[2], - zip(dataframes[:highest_in].time_block_start, dataframes[:highest_in].time_block_end), - ) - dataframes[:highest_in].index = 1:size(dataframes[:highest_in], 1) - - # WIP: highest_out is not included in constraints_partition anymore - dataframes[:highest_out] = TulipaIO.get_table(connection, "cons_indices_highest_out") - dataframes[:highest_out].timesteps_block = map( - r -> r[1]:r[2], - zip(dataframes[:highest_out].time_block_start, dataframes[:highest_out].time_block_end), - ) - dataframes[:highest_out].index = 1:size(dataframes[:highest_out], 1) - - # Dataframe to store the constraints for assets with maximum energy between (inter) representative periods - # Only for assets with max energy limit - dataframes[:max_energy_inter_rp] = _construct_inter_rp_dataframes( - A, - graph, - years, - a -> any(!ismissing, values(a.max_energy_timeframe_partition)), - ) - - # Dataframe to store the constraints for assets with minimum energy between (inter) representative periods - # Only for assets with min energy limit - dataframes[:min_energy_inter_rp] = _construct_inter_rp_dataframes( - A, - graph, - years, - a -> any(!ismissing, values(a.min_energy_timeframe_partition)), - ) - - return dataframes -end - -""" - df = _construct_inter_rp_dataframes(assets, graph, years, asset_filter) - -Constructs dataframes for inter representative period constraints. - -# Arguments -- `assets`: An array of assets. -- `graph`: The energy problem graph with the assets data. -- `asset_filter`: A function that filters assets based on certain criteria. - -# Returns -A dataframe containing the constructed dataframe for constraints. - -""" -function _construct_inter_rp_dataframes(assets, graph, years, asset_filter) - local_filter(a, y) = - is_active(graph, a, y) && haskey(graph[a].timeframe_partitions, y) && asset_filter(graph[a]) - - df = DataFrame( - ( - ( - (asset = a, year = y, periods_block = periods_block) for - periods_block in graph[a].timeframe_partitions[y] - ) for a in assets, y in years if local_filter(a, y) - ) |> Iterators.flatten, - ) - if size(df, 1) == 0 - df = DataFrame(; asset = String[], year = Int[], periods_block = PeriodsBlock[]) - end - df.index = 1:size(df, 1) - return df -end +export create_sets """ add_expression_terms_intra_rp_constraints!(df_cons, @@ -163,7 +21,7 @@ This strategy is based on the replies in this discourse thread: - https://discourse.julialang.org/t/help-improving-the-speed-of-a-dataframes-operation/107615/23 """ function add_expression_terms_intra_rp_constraints!( - df_cons, + cons::TulipaConstraint, flow::TulipaVariable, workspace, representative_periods, @@ -176,25 +34,28 @@ function add_expression_terms_intra_rp_constraints!( # Otherwise, just use the sum agg = multiply_by_duration ? v -> sum(v) : v -> sum(unique(v)) - grouped_cons = DataFrames.groupby(df_cons, [:year, :rep_period, :asset]) + grouped_cons = DataFrames.groupby(cons.indices, [:year, :rep_period, :asset]) # grouped_cons' asset will be matched with either to or from, depending on whether # we are filling incoming or outgoing flows cases = [ - (col_name = :incoming_flow, asset_match = :to, selected_assets = ["hub", "consumer"]), + (expr_key = :incoming, asset_match = :to, selected_assets = ["hub", "consumer"]), ( - col_name = :outgoing_flow, + expr_key = :outgoing, asset_match = :from, selected_assets = ["hub", "consumer", "producer"], ), ] + num_rows = size(cons.indices, 1) for case in cases - df_cons[!, case.col_name] .= JuMP.AffExpr(0.0) + cons.expressions[case.expr_key] = Vector{JuMP.AffExpr}(undef, num_rows) + cons.expressions[case.expr_key] .= JuMP.AffExpr(0.0) conditions_to_add_min_outgoing_flow_duration = - add_min_outgoing_flow_duration && case.col_name == :outgoing_flow + add_min_outgoing_flow_duration && case.expr_key == :outgoing if conditions_to_add_min_outgoing_flow_duration - df_cons[!, :min_outgoing_flow_duration] .= 1 + # TODO: What to do about this? + cons.indices[!, :min_outgoing_flow_duration] .= 1 end grouped_flows = DataFrames.groupby(flow.indices, [:year, :rep_period, case.asset_match]) for ((year, rep_period, asset), sub_df) in pairs(grouped_cons) @@ -217,7 +78,7 @@ function add_expression_terms_intra_rp_constraints!( if graph[asset].type in case.selected_assets || use_highest_resolution 1.0 else - if case.col_name == :incoming_flow + if case.expr_key == :incoming row.efficiency else # Divide by efficiency for outgoing flows @@ -241,12 +102,8 @@ function add_expression_terms_intra_rp_constraints!( for row in eachrow(sub_df) # TODO: This is a hack to handle constraint tables that still have timesteps_block # In particular, storage_level_intra_rp - if haskey(row, :timesteps_block) - row[case.col_name] = agg(@view workspace[row.timesteps_block]) - else - row[case.col_name] = - agg(@view workspace[row.time_block_start:row.time_block_end]) - end + cons.expressions[case.expr_key][row.index] = + agg(@view workspace[row.time_block_start:row.time_block_end]) if conditions_to_add_min_outgoing_flow_duration row[:min_outgoing_flow_duration] = outgoing_flow_durations end @@ -272,18 +129,18 @@ This strategy is based on the replies in this discourse thread: - https://discourse.julialang.org/t/help-improving-the-speed-of-a-dataframes-operation/107615/23 """ function add_expression_is_charging_terms_intra_rp_constraints!( - df_cons, - is_charging_indices, - is_charging_variables, + cons::TulipaConstraint, + is_charging::TulipaVariable, workspace, ) # Aggregating function: We have to compute the proportion of each variable is_charging in the constraint timesteps_block. agg = Statistics.mean - grouped_cons = DataFrames.groupby(df_cons, [:year, :rep_period, :asset]) + grouped_cons = DataFrames.groupby(cons.indices, [:year, :rep_period, :asset]) - df_cons[!, :is_charging] .= JuMP.AffExpr(0.0) - grouped_is_charging = DataFrames.groupby(is_charging_indices, [:year, :rep_period, :asset]) + cons.expressions[:is_charging] = Vector{JuMP.AffExpr}(undef, size(cons.indices, 1)) + cons.expressions[:is_charging] .= JuMP.AffExpr(0.0) + grouped_is_charging = DataFrames.groupby(is_charging.indices, [:year, :rep_period, :asset]) for ((year, rep_period, asset), sub_df) in pairs(grouped_cons) if !haskey(grouped_is_charging, (year, rep_period, asset)) continue @@ -295,13 +152,14 @@ function add_expression_is_charging_terms_intra_rp_constraints!( # Store the corresponding variables in the workspace for row in eachrow(grouped_is_charging[(year, rep_period, asset)]) asset = row[:asset] - for t in row.timesteps_block - JuMP.add_to_expression!(workspace[t], is_charging_variables[row.index]) + for t in row.time_block_start:row.time_block_end + JuMP.add_to_expression!(workspace[t], is_charging.container[row.index]) end end # Apply the agg funtion to the corresponding variables from the workspace for row in eachrow(sub_df) - row[:is_charging] = agg(@view workspace[row.timesteps_block]) + cons.expressions[:is_charging][row.index] = + agg(@view workspace[row.time_block_start:row.time_block_end]) end end end @@ -323,16 +181,17 @@ This strategy is based on the replies in this discourse thread: - https://discourse.julialang.org/t/help-improving-the-speed-of-a-dataframes-operation/107615/23 """ function add_expression_units_on_terms_intra_rp_constraints!( - df_cons, + cons::TulipaConstraint, units_on::TulipaVariable, workspace, ) # Aggregating function: since the constraint is in the highest resolution we can aggregate with unique. agg = v -> sum(unique(v)) - grouped_cons = DataFrames.groupby(df_cons, [:rep_period, :asset]) + grouped_cons = DataFrames.groupby(cons.indices, [:rep_period, :asset]) - df_cons[!, :units_on] .= JuMP.AffExpr(0.0) + cons.expressions[:units_on] = Vector{JuMP.AffExpr}(undef, size(cons.indices, 1)) + cons.expressions[:units_on] .= JuMP.AffExpr(0.0) grouped_units_on = DataFrames.groupby(units_on.indices, [:rep_period, :asset]) for ((rep_period, asset), sub_df) in pairs(grouped_cons) haskey(grouped_units_on, (rep_period, asset)) || continue @@ -348,7 +207,8 @@ function add_expression_units_on_terms_intra_rp_constraints!( end # Apply the agg funtion to the corresponding variables from the workspace for row in eachrow(sub_df) - row[:units_on] = agg(@view workspace[row.timesteps_block]) + cons.expressions[:units_on][row.index] = + agg(@view workspace[row.time_block_start:row.time_block_end]) end end end @@ -368,54 +228,58 @@ This function is only used internally in the model. """ function add_expression_terms_inter_rp_constraints!( - df_inter, + cons::TulipaConstraint, flow::TulipaVariable, - df_map, + df_map, # TODO: Figure out how to handle this graph, representative_periods; is_storage_level = false, ) - df_inter[!, :outgoing_flow] .= JuMP.AffExpr(0.0) + num_rows = size(cons.indices, 1) + cons.expressions[:outgoing] = Vector{JuMP.AffExpr}(undef, num_rows) + cons.expressions[:outgoing] .= JuMP.AffExpr(0.0) if is_storage_level - df_inter[!, :incoming_flow] .= JuMP.AffExpr(0.0) - df_inter[!, :inflows_profile_aggregation] .= JuMP.AffExpr(0.0) + cons.expressions[:incoming] = Vector{JuMP.AffExpr}(undef, num_rows) + cons.expressions[:incoming] .= JuMP.AffExpr(0.0) + cons.expressions[:inflows_profile_aggregation] = Vector{JuMP.AffExpr}(undef, num_rows) + cons.expressions[:inflows_profile_aggregation] .= JuMP.AffExpr(0.0) end # TODO: The interaction between year and timeframe is not clear yet, so this is probably wrong # At this moment, that relation is ignored (we don't even look at df_inter.year) # Incoming, outgoing flows, and profile aggregation - for row_inter in eachrow(df_inter) + for row_cons in eachrow(cons.indices) sub_df_map = filter( - :period => p -> row_inter.period_block_start <= p <= row_inter.period_block_end, + :period => p -> row_cons.period_block_start <= p <= row_cons.period_block_end, df_map; view = true, ) for row_map in eachrow(sub_df_map) - # Skip inactive row_inter or undefined for that year + # Skip inactive row_cons or undefined for that year # TODO: This is apparently never happening - # if !get(graph[row_inter.asset].active, row_map.year, false) + # if !get(graph[row_cons.asset].active, row_map.year, false) # continue # end sub_df_flows = filter( [:from, :year, :rep_period] => (from, y, rp) -> - (from, y, rp) == (row_inter.asset, row_map.year, row_map.rep_period), + (from, y, rp) == (row_cons.asset, row_map.year, row_map.rep_period), flow.indices; view = true, ) sub_df_flows.duration = sub_df_flows.time_block_end - sub_df_flows.time_block_start .+ 1 if is_storage_level - row_inter.outgoing_flow += + cons.expressions[:outgoing][row_cons.index] += LinearAlgebra.dot( flow.container[sub_df_flows.index], sub_df_flows.duration ./ sub_df_flows.efficiency, ) * row_map.weight else - row_inter.outgoing_flow += + cons.expressions[:outgoing][row_cons.index] += LinearAlgebra.dot(flow.container[sub_df_flows.index], sub_df_flows.duration) * row_map.weight end @@ -425,38 +289,39 @@ function add_expression_terms_inter_rp_constraints!( sub_df_flows = filter( [:to, :year, :rep_period] => (to, y, rp) -> - (to, y, rp) == (row_inter.asset, row_map.year, row_map.rep_period), + (to, y, rp) == (row_cons.asset, row_map.year, row_map.rep_period), flow.indices; view = true, ) sub_df_flows.duration = sub_df_flows.time_block_end - sub_df_flows.time_block_start .+ 1 - row_inter.incoming_flow += + + cons.expressions[:incoming][row_cons.index] += LinearAlgebra.dot( flow.container[sub_df_flows.index], sub_df_flows.duration .* sub_df_flows.efficiency, ) * row_map.weight - row_inter.inflows_profile_aggregation += + cons.expressions[:inflows_profile_aggregation][row_cons.index] += profile_aggregation( sum, - graph[row_inter.asset].rep_periods_profiles, + graph[row_cons.asset].rep_periods_profiles, row_map.year, row_map.year, ("inflows", row_map.rep_period), representative_periods[row_map.year][row_map.rep_period].timesteps, 0.0, ) * - graph[row_inter.asset].storage_inflows[row_map.year] * + graph[row_cons.asset].storage_inflows[row_map.year] * row_map.weight end end end end -function add_expressions_to_dataframe!( - dataframes, +function add_expressions_to_constraints!( variables, + constraints, model, expression_workspace, representative_periods, @@ -465,12 +330,9 @@ function add_expressions_to_dataframe!( ) @timeit to "add_expression_terms_to_df" begin # Unpack variables - is_charging_indices = variables[:is_charging].indices - is_charging_variables = variables[:is_charging].container - # Creating the incoming and outgoing flow expressions add_expression_terms_intra_rp_constraints!( - dataframes[:lowest], + constraints[:lowest], variables[:flow], expression_workspace, representative_periods, @@ -478,12 +340,8 @@ function add_expressions_to_dataframe!( use_highest_resolution = false, multiply_by_duration = true, ) - # TODO: storage_level_intra_rp is serving as a constraints indices - # This should be fixed when: - # - the constraint is separate from the variable - # - the incoming and outgoing flows are stored outside the DF add_expression_terms_intra_rp_constraints!( - variables[:storage_level_intra_rp].indices, + constraints[:storage_level_intra_rp], variables[:flow], expression_workspace, representative_periods, @@ -492,7 +350,7 @@ function add_expressions_to_dataframe!( multiply_by_duration = true, ) add_expression_terms_intra_rp_constraints!( - dataframes[:highest_in_out], + constraints[:highest_in_out], variables[:flow], expression_workspace, representative_periods, @@ -501,7 +359,7 @@ function add_expressions_to_dataframe!( multiply_by_duration = false, ) add_expression_terms_intra_rp_constraints!( - dataframes[:highest_in], + constraints[:highest_in], variables[:flow], expression_workspace, representative_periods, @@ -510,7 +368,7 @@ function add_expressions_to_dataframe!( multiply_by_duration = false, ) add_expression_terms_intra_rp_constraints!( - dataframes[:highest_out], + constraints[:highest_out], variables[:flow], expression_workspace, representative_periods, @@ -519,9 +377,9 @@ function add_expressions_to_dataframe!( multiply_by_duration = false, add_min_outgoing_flow_duration = true, ) - if !isempty(dataframes[:units_on_and_outflows]) + if !isempty(constraints[:units_on_and_outflows].indices) add_expression_terms_intra_rp_constraints!( - dataframes[:units_on_and_outflows], + constraints[:units_on_and_outflows], variables[:flow], expression_workspace, representative_periods, @@ -532,106 +390,47 @@ function add_expressions_to_dataframe!( ) end add_expression_terms_inter_rp_constraints!( - variables[:storage_level_inter_rp].indices, + constraints[:storage_level_inter_rp], variables[:flow], timeframe.map_periods_to_rp, graph, representative_periods; is_storage_level = true, ) - # TODO: This hack allows changing the function to use period_block_start and period_block_end - # This can be removed after the max_energy_inter_rp and min_energy_inter_rp are moved to TulipaVariable - for name in (:max_energy_inter_rp, :min_energy_inter_rp) - df = dataframes[name] - df.period_block_start = first.(df.periods_block) - df.period_block_end = last.(df.periods_block) - end add_expression_terms_inter_rp_constraints!( - dataframes[:max_energy_inter_rp], + constraints[:max_energy_inter_rp], variables[:flow], timeframe.map_periods_to_rp, graph, representative_periods, ) add_expression_terms_inter_rp_constraints!( - dataframes[:min_energy_inter_rp], + constraints[:min_energy_inter_rp], variables[:flow], timeframe.map_periods_to_rp, graph, representative_periods, ) add_expression_is_charging_terms_intra_rp_constraints!( - dataframes[:highest_in], - is_charging_indices, - is_charging_variables, + constraints[:highest_in], + variables[:is_charging], expression_workspace, ) add_expression_is_charging_terms_intra_rp_constraints!( - dataframes[:highest_out], - is_charging_indices, - is_charging_variables, + constraints[:highest_out], + variables[:is_charging], expression_workspace, ) - if !isempty(dataframes[:units_on_and_outflows]) + if !isempty(constraints[:units_on_and_outflows].indices) add_expression_units_on_terms_intra_rp_constraints!( - dataframes[:units_on_and_outflows], + constraints[:units_on_and_outflows], variables[:units_on], expression_workspace, ) end - - incoming_flow_lowest_resolution = - model[:incoming_flow_lowest_resolution] = dataframes[:lowest].incoming_flow - outgoing_flow_lowest_resolution = - model[:outgoing_flow_lowest_resolution] = dataframes[:lowest].outgoing_flow - incoming_flow_lowest_storage_resolution_intra_rp = - model[:incoming_flow_lowest_storage_resolution_intra_rp] = - variables[:storage_level_intra_rp].indices.incoming_flow - outgoing_flow_lowest_storage_resolution_intra_rp = - model[:outgoing_flow_lowest_storage_resolution_intra_rp] = - variables[:storage_level_intra_rp].indices.outgoing_flow - incoming_flow_highest_in_out_resolution = - model[:incoming_flow_highest_in_out_resolution] = - dataframes[:highest_in_out].incoming_flow - outgoing_flow_highest_in_out_resolution = - model[:outgoing_flow_highest_in_out_resolution] = - dataframes[:highest_in_out].outgoing_flow - incoming_flow_highest_in_resolution = - model[:incoming_flow_highest_in_resolution] = dataframes[:highest_in].incoming_flow - outgoing_flow_highest_out_resolution = - model[:outgoing_flow_highest_out_resolution] = dataframes[:highest_out].outgoing_flow - incoming_flow_storage_inter_rp_balance = - model[:incoming_flow_storage_inter_rp_balance] = - variables[:storage_level_inter_rp].indices.incoming_flow - outgoing_flow_storage_inter_rp_balance = - model[:outgoing_flow_storage_inter_rp_balance] = - variables[:storage_level_inter_rp].indices.outgoing_flow - # Below, we drop zero coefficients, but probably we don't have any - # (if the implementation is correct) - JuMP.drop_zeros!.(incoming_flow_lowest_resolution) - JuMP.drop_zeros!.(outgoing_flow_lowest_resolution) - JuMP.drop_zeros!.(incoming_flow_lowest_storage_resolution_intra_rp) - JuMP.drop_zeros!.(outgoing_flow_lowest_storage_resolution_intra_rp) - JuMP.drop_zeros!.(incoming_flow_highest_in_out_resolution) - JuMP.drop_zeros!.(outgoing_flow_highest_in_out_resolution) - JuMP.drop_zeros!.(incoming_flow_highest_in_resolution) - JuMP.drop_zeros!.(outgoing_flow_highest_out_resolution) - JuMP.drop_zeros!.(incoming_flow_storage_inter_rp_balance) - JuMP.drop_zeros!.(outgoing_flow_storage_inter_rp_balance) end - return ( - incoming_flow_lowest_resolution, - outgoing_flow_lowest_resolution, - incoming_flow_lowest_storage_resolution_intra_rp, - outgoing_flow_lowest_storage_resolution_intra_rp, - incoming_flow_highest_in_out_resolution, - outgoing_flow_highest_in_out_resolution, - incoming_flow_highest_in_resolution, - outgoing_flow_highest_out_resolution, - incoming_flow_storage_inter_rp_balance, - outgoing_flow_storage_inter_rp_balance, - ) + return end function create_sets(graph, years) diff --git a/src/objective.jl b/src/objective.jl index 4a7159b6..68378c1d 100644 --- a/src/objective.jl +++ b/src/objective.jl @@ -1,12 +1,4 @@ -function add_objective!( - model, - variables, - graph, - dataframes, - representative_periods, - sets, - model_parameters, -) +function add_objective!(model, variables, graph, representative_periods, sets, model_parameters) assets_investment = variables[:assets_investment].lookup accumulated_units_simple_method = model[:accumulated_units_simple_method] accumulated_units_compact_method = model[:accumulated_units_compact_method] diff --git a/src/solve-model.jl b/src/solve-model.jl index 26edfa70..01bc4dfd 100644 --- a/src/solve-model.jl +++ b/src/solve-model.jl @@ -17,13 +17,8 @@ function solve_model!( error("Model is not created, run create_model(energy_problem) first.") end - energy_problem.solution = solve_model!( - energy_problem.dataframes, - model, - energy_problem.variables, - optimizer; - parameters = parameters, - ) + energy_problem.solution = + solve_model!(model, energy_problem.variables, optimizer; parameters = parameters) energy_problem.termination_status = JuMP.termination_status(model) if energy_problem.solution === nothing # Warning has been given at internal function @@ -94,7 +89,7 @@ The modifications made to `dataframes` are: - `df_storage_level_intra_rp.solution = solution.storage_level_intra_rp` - `df_storage_level_inter_rp.solution = solution.storage_level_inter_rp` """ -function solve_model!(dataframes, model, args...; kwargs...) +function solve_model!(model, args...; kwargs...) solution = solve_model(model, args...; kwargs...) if isnothing(solution) return nothing diff --git a/src/structures.jl b/src/structures.jl index 5688f481..7066f67c 100644 --- a/src/structures.jl +++ b/src/structures.jl @@ -2,6 +2,7 @@ export GraphAssetData, GraphFlowData, EnergyProblem, TulipaVariable, + TulipaConstraint, RepresentativePeriod, PeriodsBlock, TimestepsBlock, @@ -45,6 +46,15 @@ mutable struct TulipaVariable end end +mutable struct TulipaConstraint + indices::DataFrame + expressions::Dict{Symbol,Vector{JuMP.AffExpr}} + + function TulipaConstraint(indices) + return new(indices, Dict()) + end +end + """ Structure to hold the data of one representative period. """ @@ -265,12 +275,11 @@ mutable struct EnergyProblem Nothing, # Default edge weight } variables::Dict{Symbol,TulipaVariable} + constraints::Dict{Symbol,TulipaConstraint} representative_periods::Dict{Int,Vector{RepresentativePeriod}} - constraints_partitions::Dict{Symbol,Dict{Tuple{String,Int,Int},Vector{TimestepsBlock}}} timeframe::Timeframe groups::Vector{Group} years::Vector{Year} - dataframes::Dict{Symbol,DataFrame} model_parameters::ModelParameters model::Union{JuMP.Model,Nothing} solution::Union{Solution,Nothing} @@ -292,35 +301,22 @@ mutable struct EnergyProblem graph, representative_periods, timeframe, groups, years = create_internal_structures(connection) end - elapsed_time_cons = @elapsed begin - constraints_partitions = - compute_constraints_partitions(graph, representative_periods, years) - end - - elapsed_time_construct_dataframes = @elapsed begin - dataframes = construct_dataframes( - connection, - graph, - representative_periods, - constraints_partitions, - years, - ) - end elapsed_time_vars = @elapsed begin variables = compute_variables_indices(connection) end + constraints = compute_constraints_indices(connection) + energy_problem = new( connection, graph, variables, + constraints, representative_periods, - constraints_partitions, timeframe, groups, years, - dataframes, ModelParameters(connection, model_parameters_file), nothing, nothing, @@ -329,8 +325,6 @@ mutable struct EnergyProblem JuMP.OPTIMIZE_NOT_CALLED, Dict( "creating internal structures" => elapsed_time_internal, - "computing constraints partitions" => elapsed_time_cons, - "creating dataframes" => elapsed_time_construct_dataframes, "creating variables indices" => elapsed_time_vars, ), ) diff --git a/src/time-resolution.jl b/src/time-resolution.jl deleted file mode 100644 index 7860a371..00000000 --- a/src/time-resolution.jl +++ /dev/null @@ -1,261 +0,0 @@ -export compute_rp_partition, compute_constraints_partitions - -using SparseArrays - -""" - cons_partitions = compute_constraints_partitions(graph, representative_periods) - -Computes the constraints partitions using the assets and flows partitions stored in the graph, -and the representative periods. - -The function computes the constraints partitions by iterating over the partition dictionary, -which specifies the partition strategy for each resolution (i.e., lowest or highest). -For each asset and representative period, it calls the `compute_rp_partition` function -to compute the partition based on the strategy. -""" -function compute_constraints_partitions(graph, representative_periods, years) - constraints_partitions = Dict{Symbol,Dict{Tuple{String,Int,Int},Vector{TimestepsBlock}}}() - _inflows(a, y, rp) = [ - graph[u, a].rep_periods_partitions[y][rp] for - u in MetaGraphsNext.inneighbor_labels(graph, a) if is_active(graph, (u, a), y) - ] - _outflows(a, y, rp) = [ - graph[a, v].rep_periods_partitions[y][rp] for - v in MetaGraphsNext.outneighbor_labels(graph, a) if is_active(graph, (a, v), y) - ] - - _allflows(a, y, rp) = [_inflows(a, y, rp); _outflows(a, y, rp)] - _assets(a, y, rp) = [graph[a].rep_periods_partitions[y][rp]] - _assets_and_outflows(a, y, rp) = [_assets(a, y, rp); _outflows(a, y, rp)] - _all(a, y, rp) = [_allflows(a, y, rp); _assets(a, y, rp)] - - partitions_cases = [ - ( - name = :lowest, - partitions = _allflows, - strategy = :lowest, - asset_filter = (a, y) -> graph[a].type in ["conversion", "producer"], - ), - # ( - # name = :storage_level_intra_rp, - # partitions = _all, - # strategy = :lowest, - # asset_filter = (a, y) -> graph[a].type == "storage" && !graph[a].is_seasonal, - # ), - # ( - # name = :lowest_in_out, - # partitions = _allflows, - # strategy = :lowest, - # asset_filter = (a, y) -> - # graph[a].type == "storage" && !ismissing(graph[a].use_binary_storage_method), - # ), - # ( # WIP: Testing removing this in favor of using table cons_indices_highest_in_out - # name = :highest_in_out, - # partitions = _allflows, - # strategy = :highest, - # asset_filter = (a, y) -> graph[a].type in ["hub", "consumer"], - # ), - # ( # WIP: Testing removing this in favor of using table cons_indices_highest_in - # name = :highest_in, - # partitions = _inflows, - # strategy = :highest, - # asset_filter = (a, y) -> graph[a].type in ["storage"], - # ), - # ( # WIP: Testing removing this in favor of using table cons_indices_highest_out - # name = :highest_out, - # partitions = _outflows, - # strategy = :highest, - # asset_filter = (a, y) -> graph[a].type in ["producer", "storage", "conversion"], - # ), - # ( - # name = :units_on, - # partitions = _assets, - # strategy = :highest, - # asset_filter = (a, y) -> - # graph[a].type in ["producer", "conversion"] && graph[a].unit_commitment, - # ), - ( - name = :units_on_and_outflows, - partitions = _assets_and_outflows, - strategy = :highest, - asset_filter = (a, y) -> - graph[a].type in ["producer", "conversion"] && graph[a].unit_commitment, - ), - ] - - RP = Dict(year => 1:length(representative_periods[year]) for year in getfield.(years, :id)) - - for (name, partitions, strategy, asset_filter) in partitions_cases - constraints_partitions[name] = OrderedDict( - (a, y, rp) => begin - P = partitions(a, y, rp) - if length(P) > 0 - compute_rp_partition(partitions(a, y, rp), strategy) - else - Vector{TimestepsBlock}[] - end - end for a in MetaGraphsNext.labels(graph), y in getfield.(years, :id) if - is_active(graph, a, y) && asset_filter(a, y) for rp in RP[y] - ) - end - - return constraints_partitions -end - -""" - rp_partition = compute_rp_partition(partitions, :lowest) - -Given the timesteps of various flows/assets in the `partitions` input, compute the representative period partitions. - -Each element of `partitions` is a partition with the following assumptions: - - - An element is of the form `V = [r₁, r₂, …, rₘ]`, where each `rᵢ` is a range `a:b`. - - `r₁` starts at 1. - - `rᵢ₊₁` starts at the end of `rᵢ` plus 1. - - `rₘ` ends at some value `N`, that is the same for all elements of `partitions`. - -Notice that this implies that they form a disjunct partition of `1:N`. - -The output will also be a partition with the conditions above. - -## Strategies - -### :lowest - -If `strategy = :lowest` (default), then the output is constructed greedily, -i.e., it selects the next largest breakpoint following the algorithm below: - - 0. Input: `Vᴵ₁, …, Vᴵₚ`, a list of time blocks. Each element of `Vᴵⱼ` is a range `r = r.start:r.end`. Output: `V`. - 1. Compute the end of the representative period `N` (all `Vᴵⱼ` should have the same end) - 2. Start with an empty `V = []` - 3. Define the beginning of the range `s = 1` - 4. Define an array with all the next breakpoints `B` such that `Bⱼ` is the first `r.end` such that `r.end ≥ s` for each `r ∈ Vᴵⱼ`. - 5. The end of the range will be the `e = max Bⱼ`. - 6. Define `r = s:e` and add `r` to the end of `V`. - 7. If `e = N`, then END - 8. Otherwise, define `s = e + 1` and go to step 4. - -#### Examples - -```jldoctest -partition1 = [1:4, 5:8, 9:12] -partition2 = [1:3, 4:6, 7:9, 10:12] -compute_rp_partition([partition1, partition2], :lowest) - -# output - -3-element Vector{UnitRange{Int64}}: - 1:4 - 5:8 - 9:12 -``` - -```jldoctest -partition1 = [1:1, 2:3, 4:6, 7:10, 11:12] -partition2 = [1:2, 3:4, 5:5, 6:7, 8:9, 10:12] -compute_rp_partition([partition1, partition2], :lowest) - -# output - -5-element Vector{UnitRange{Int64}}: - 1:2 - 3:4 - 5:6 - 7:10 - 11:12 -``` - -### :highest - -If `strategy = :highest`, then the output selects includes all the breakpoints from the input. -Another way of describing it, is to select the minimum end-point instead of the maximum end-point in the `:lowest` strategy. - -#### Examples - -```jldoctest -partition1 = [1:4, 5:8, 9:12] -partition2 = [1:3, 4:6, 7:9, 10:12] -compute_rp_partition([partition1, partition2], :highest) - -# output - -6-element Vector{UnitRange{Int64}}: - 1:3 - 4:4 - 5:6 - 7:8 - 9:9 - 10:12 -``` - -```jldoctest -partition1 = [1:1, 2:3, 4:6, 7:10, 11:12] -partition2 = [1:2, 3:4, 5:5, 6:7, 8:9, 10:12] -compute_rp_partition([partition1, partition2], :highest) - -# output - -10-element Vector{UnitRange{Int64}}: - 1:1 - 2:2 - 3:3 - 4:4 - 5:5 - 6:6 - 7:7 - 8:9 - 10:10 - 11:12 -``` -""" -function compute_rp_partition( - partitions::AbstractVector{<:AbstractVector{<:UnitRange{<:Integer}}}, - strategy, -) - valid_strategies = [:highest, :lowest] - if !(strategy in valid_strategies) - error("`strategy` should be one of $valid_strategies. See docs for more info.") - end - # Get Vᴵ₁, the last range of it, the last element of the range - rp_end = partitions[1][end][end] - for partition in partitions - # Assumption: All start at 1 and end at N - @assert partition[1][1] == 1 - @assert rp_end == partition[end][end] - end - rp_partition = UnitRange{Int}[] # List of ranges - - block_start = 1 - if strategy == :lowest - while block_start ≤ rp_end - # The next block end must be ≥ block start - block_end = block_start - for partition in partitions - # For this partition, find the first block that ends after block_start - for timesteps_block in partition - tentative_end = timesteps_block[end] - if tentative_end ≥ block_start - if tentative_end > block_end # Better block - block_end = tentative_end - end - break - end - end - end - push!(rp_partition, block_start:block_end) - block_start = block_end + 1 - end - elseif strategy == :highest - # We need all end points of each interval - end_points_per_array = map(partitions) do x # For each partition - last.(x) # Retrieve the last element of each interval - end - # Then we concatenate, remove duplicates, and sort. - end_points = vcat(end_points_per_array...) |> unique |> sort - for block_end in end_points - push!(rp_partition, block_start:block_end) - block_start = block_end + 1 - end - end - return rp_partition -end diff --git a/src/tmp.jl b/src/tmp.jl index a5922a50..787e93f8 100644 --- a/src/tmp.jl +++ b/src/tmp.jl @@ -240,15 +240,42 @@ function tmp_create_union_tables(connection) # These are equivalent to the partitions in time-resolution.jl # But computed in a more general context to be used by variables as well + # Incoming flows + DuckDB.execute( + connection, + "CREATE OR REPLACE TEMP TABLE t_union_in_flows AS + SELECT DISTINCT to_asset as asset, year, rep_period, time_block_start, time_block_end + FROM flow_time_resolution + ", + ) + + # Outgoing flows + DuckDB.execute( + connection, + "CREATE OR REPLACE TEMP TABLE t_union_out_flows AS + SELECT DISTINCT from_asset as asset, year, rep_period, time_block_start, time_block_end + FROM flow_time_resolution + ", + ) + + # Union of all assets and outgoing flows + DuckDB.execute( + connection, + "CREATE OR REPLACE TEMP TABLE t_union_assets_and_out_flows AS + SELECT DISTINCT asset, year, rep_period, time_block_start, time_block_end + FROM asset_time_resolution + UNION + FROM t_union_out_flows + ", + ) + # Union of all incoming and outgoing flows DuckDB.execute( connection, "CREATE OR REPLACE TEMP TABLE t_union_all_flows AS - SELECT from_asset as asset, year, rep_period, time_block_start, time_block_end - FROM flow_time_resolution + FROM t_union_in_flows UNION - SELECT to_asset as asset, year, rep_period, time_block_start, time_block_end - FROM flow_time_resolution + FROM t_union_out_flows ", ) @@ -256,10 +283,9 @@ function tmp_create_union_tables(connection) DuckDB.execute( connection, "CREATE OR REPLACE TEMP TABLE t_union_all AS - SELECT asset, year, rep_period, time_block_start, time_block_end + SELECT DISTINCT asset, year, rep_period, time_block_start, time_block_end FROM asset_time_resolution UNION - SELECT asset, year, rep_period, time_block_start, time_block_end FROM t_union_all_flows ", ) @@ -276,11 +302,59 @@ end function tmp_create_lowest_resolution_table(connection) # These are generic tables to be used to create some variables and constraints + # Following the lowest resolution merge strategy # The logic: - # - t_union has groups in order, then s:e ordered by s increasing and e decreasing - # - in a group (asset, year, rep_period) we can take the first range then - # - continue in the sequence selecting the next largest e + # - t_union is ordered by groups (asset, year, rep_period), so every new group starts the same way + # - Inside a group, time blocks (TBs) are ordered by time_block_start, so the + # first row of every group starts at time_block_start = 1 + # - The objective of the lowest resolution is to iteratively find the + # smallest interval [s, e] that covers all blocks that (i) haven't been + # covered yet; and (ii) start at s or earlier. + # - We start with an interval [1, e], then use use s = e + 1 for the next intervals + # - To cover all blocks, we use e as the largest time_block_end of the relevant blocks + # - Since the TBs are in order, we can simply loop over them until we find + # the first that starts later than s. That will be the beginning of a new TB + + # Example: + # Consider a group with two assets/flows with the following time resolutions: + # + # 1: 1:3 4:6 7:9 10:12 + # 2: 1:2 3:7 8:8 9:11 12:12 + # + # The expected lowest resolution is: 1:3 4:7 8:9 10:12 + # + # This will be translated into the following rows (TBS ordered, not + # necessarily TBE ordered), and ROW is the row number only for our example + # + # ROW TBS TBE + # 1 1 3 + # 2 1 2 + # 3 3 7 + # 4 4 6 + # 5 7 9 + # 6 8 8 + # 7 9 11 + # 8 10 12 + # 9 12 12 + # + # Here's how the algorithm behaves: + # + # - BEGINNING OF GROUP, set s = 1, e = 0 + # - ROW 1, TB = [1, 3]: TBS = 1 ≤ 1 = s, so e = max(e, TBE) = max(0, 3) = 3 + # - ROW 2, TB = [1, 2]: TBS = 1 ≤ 1 = s, so e = max(e, TBE) = max(3, 2) = 3 + # - ROW 3, TB = [3, 7]: TBS = 3 > 1 = s, so the first block is created: [s, e] = [1, 3]. + # Start a new block with s = 3 + 1 = 4 and e = TBE = 7 + # - ROW 4, TB = [4, 6]: TBS = 4 ≤ 4 = s, so e = max(e, TBE) = max(7, 6) = 7 + # - ROW 5, TB = [7, 9]: TBS = 7 > 4 = s, so the second block is created: [s, e] = [4, 7]. + # Start a new block with s = 7 + 1 = 8 and e = TBE = 9 + # - ROW 6, TB = [8, 8]: TBS = 8 ≤ 8 = s, so e = max(e, TBE) = max(9, 8) = 9 + # - ROW 7, TB = [9, 11]: TBS = 9 > 8 = s, so the third block is created: [s, e] = [8, 9]. + # Start a new block with s = 9 + 1 = 10 and e = TBE = 11 + # - ROW 8, TB = [10, 12]: TBS = 10 ≤ 10 = s, so e = max(e, TBE) = max(11, 12) = 12 + # - ROW 9, TB = [12, 12]: TBS = 12 > 10 = s, so the fourth block is created: [s, e] = [10, 12]. + # Start a new block with s = 12 + 1 = 13 and e = TBE = 12 + # - END OF GROUP: Is 1 ≤ s ≤ e? No, so this is not a valid block for union_table in ("t_union_all_flows", "t_union_all") table_name = replace(union_table, "t_union" => "t_lowest") @@ -295,13 +369,14 @@ function tmp_create_lowest_resolution_table(connection) )", ) appender = DuckDB.Appender(connection, table_name) + # Dummy starting values s = 0 e_candidate = 0 current_group = ("", 0, 0) @timeit to "append $table_name rows" for row in DuckDB.query( connection, "SELECT * FROM $union_table - ORDER BY asset, year, rep_period, time_block_start, time_block_end DESC + ORDER BY asset, year, rep_period, time_block_start ", ) if (row.asset, row.year, row.rep_period) != current_group @@ -334,146 +409,40 @@ function tmp_create_lowest_resolution_table(connection) end end -function tmp_create_constraints_indices(connection) - # Create a list of all (asset, year, rp) and also data used in filtering - DBInterface.execute( - connection, - "CREATE OR REPLACE TABLE t_cons_indices AS - SELECT DISTINCT - asset_both.asset as asset, - asset_both.milestone_year as year, - rep_periods_data.rep_period, - asset.type, - rep_periods_data.num_timesteps, - asset.unit_commitment, - FROM asset_both - LEFT JOIN asset - ON asset_both.asset=asset.asset - LEFT JOIN rep_periods_data - ON asset_both.milestone_year=rep_periods_data.year - WHERE asset_both.active=true - ORDER BY asset_both.milestone_year, rep_periods_data.rep_period - ", - ) - - # -- The previous attempt used - # The idea below is to find all unique time_block_start values because - # this is uses strategy 'highest'. By ordering them, and making - # time_block_end[i] = time_block_start[i+1] - 1, we have all ranges. - # We use the `lead` function from SQL to get `time_block_start[i+1]` - # and row.num_timesteps is the maximum value for when i+1 > length - # - # The query below is trying to replace the following constraints_partitions example: - #= ( - name = :highest_in_out, - partitions = _allflows, - strategy = :highest, - asset_filter = (a, y) -> graph[a].type in ["hub", "consumer"], - ), - =# - # The **highest** strategy is obtained simply by computing the union of all - # time_block_starts, since it consists of "all breakpoints". - # The time_block_end is computed a posteriori using the next time_block_start. - # The query below will use the WINDOW FUNCTION `lead` to compute the time - # block end. - # This query uses the assets, incoming flows and outgoing flows to compute partitions - # This will be useful when we have other `partitions` instead of `_allflows` - # SELECT asset, year, rep_period, time_block_start - # FROM asset_time_resolution - # UNION - DuckDB.execute( - connection, - "CREATE OR REPLACE TABLE cons_indices_highest_in_out AS - SELECT - main.asset, - main.year, - main.rep_period, - COALESCE(sub.time_block_start, 1) AS time_block_start, - lead(sub.time_block_start - 1, 1, main.num_timesteps) - OVER (PARTITION BY main.asset, main.year, main.rep_period ORDER BY time_block_start) - AS time_block_end, - FROM t_cons_indices AS main - LEFT JOIN ( - SELECT to_asset as asset, year, rep_period, time_block_start - FROM flow_time_resolution - UNION - SELECT from_asset as asset, year, rep_period, time_block_start - FROM flow_time_resolution - ) AS sub - ON main.asset=sub.asset - AND main.year=sub.year - AND main.rep_period=sub.rep_period - WHERE main.type in ('hub', 'consumer') - ", - ) +function tmp_create_highest_resolution_table(connection) + # These are generic tables to be used to create some variables and constraints + # Following the highest resolution merge strategy - # This follows the same implementation as highest_in_out above, but using - # only the incoming flows. - # - # The query below is trying to replace the following constraints_partitions example: - #= ( - # name = :highest_in, - # partitions = _inflows, - # strategy = :highest, - # asset_filter = (a, y) -> graph[a].type in ["storage"], - # ), - =# - DuckDB.execute( - connection, - "CREATE OR REPLACE TABLE cons_indices_highest_in AS - SELECT - main.asset, - main.year, - main.rep_period, - COALESCE(sub.time_block_start, 1) AS time_block_start, - lead(sub.time_block_start - 1, 1, main.num_timesteps) - OVER (PARTITION BY main.asset, main.year, main.rep_period ORDER BY time_block_start) - AS time_block_end, - FROM t_cons_indices AS main - LEFT JOIN ( - SELECT to_asset as asset, year, rep_period, time_block_start - FROM flow_time_resolution - ) AS sub - ON main.asset=sub.asset - AND main.year=sub.year - AND main.rep_period=sub.rep_period - WHERE main.type in ('storage') - ", - ) + # The logic: + # - for each group (asset, year, rep_period) in t_union + # - keep all unique time_block_start + # - create corresponing time_block_end - # This follows the same implementation as highest_in_out above, but using - # only the outgoing flows. - # - # The query below is trying to replace the following constraints_partitions example: - #= ( - # name = :highest_out, - # partitions = _outflows, - # strategy = :highest, - # asset_filter = (a, y) -> graph[a].type in ["producer", "storage", "conversion"], - # ), - =# - DuckDB.execute( - connection, - "CREATE OR REPLACE TABLE cons_indices_highest_out AS - SELECT - main.asset, - main.year, - main.rep_period, - COALESCE(sub.time_block_start, 1) AS time_block_start, - lead(sub.time_block_start - 1, 1, main.num_timesteps) - OVER (PARTITION BY main.asset, main.year, main.rep_period ORDER BY time_block_start) - AS time_block_end, - FROM t_cons_indices AS main - LEFT JOIN ( - SELECT from_asset as asset, year, rep_period, time_block_start - FROM flow_time_resolution - ) AS sub - ON main.asset=sub.asset - AND main.year=sub.year - AND main.rep_period=sub.rep_period - WHERE main.type in ('producer', 'storage', 'conversion') - ", - ) + for union_table in + ("t_union_" * x for x in ("in_flows", "out_flows", "assets_and_out_flows", "all_flows")) + table_name = replace(union_table, "t_union" => "t_highest") + DuckDB.execute( + connection, + "CREATE OR REPLACE TABLE $table_name AS + SELECT + asset, + t_union.year, + t_union.rep_period, + time_block_start, + lead(time_block_start - 1, 1, rep_periods_data.num_timesteps) + OVER (PARTITION BY asset, t_union.year, t_union.rep_period ORDER BY time_block_start) + AS time_block_end + FROM ( + SELECT DISTINCT asset, year, rep_period, time_block_start + FROM $union_table + ) AS t_union + LEFT JOIN rep_periods_data + ON t_union.year = rep_periods_data.year + AND t_union.rep_period = rep_periods_data.rep_period + ORDER BY asset, t_union.year, t_union.rep_period, time_block_start + ", + ) + end end """ diff --git a/src/variables/create.jl b/src/variables/create.jl index 4f4af6f7..ae690cc5 100644 --- a/src/variables/create.jl +++ b/src/variables/create.jl @@ -70,6 +70,7 @@ function compute_variables_indices(connection) DuckDB.query( connection, "CREATE OR REPLACE TEMP SEQUENCE id START 1; + CREATE OR REPLACE TABLE storage_level_intra_rp AS SELECT nextval('id') as index, t_low.asset, @@ -82,7 +83,8 @@ function compute_variables_indices(connection) ON t_low.asset = asset.asset WHERE asset.type = 'storage' - AND asset.is_seasonal = false + AND asset.is_seasonal = false; + SELECT * FROM storage_level_intra_rp ", ) |> DataFrame, ) @@ -91,6 +93,7 @@ function compute_variables_indices(connection) DuckDB.query( connection, "CREATE OR REPLACE TEMP SEQUENCE id START 1; + CREATE OR REPLACE TABLE storage_level_inter_rp AS SELECT nextval('id') as index, asset.asset, @@ -101,7 +104,8 @@ function compute_variables_indices(connection) LEFT JOIN asset ON attr.asset = asset.asset WHERE - asset.type = 'storage' + asset.type = 'storage'; + SELECT * FROM storage_level_inter_rp ", ) |> DataFrame, ) diff --git a/test/test-pipeline.jl b/test/test-pipeline.jl index fe7ceed0..3e8810d2 100644 --- a/test/test-pipeline.jl +++ b/test/test-pipeline.jl @@ -22,25 +22,18 @@ end # Internal data and structures pre-model graph, representative_periods, timeframe, groups, years = create_internal_structures(connection) - constraints_partitions = compute_constraints_partitions(graph, representative_periods, years) - dataframes = construct_dataframes( - connection, - graph, - representative_periods, - constraints_partitions, - years, - ) model_parameters = ModelParameters(connection) sets = create_sets(graph, years) variables = compute_variables_indices(connection) + constraints = compute_constraints_indices(connection) # Create model model = create_model( graph, sets, variables, + constraints, representative_periods, - dataframes, years, timeframe, groups, @@ -49,5 +42,5 @@ end # Solve model solution = solve_model(model, variables, HiGHS.Optimizer) - save_solution_to_file(mktempdir(), graph, dataframes, solution) + save_solution_to_file(mktempdir(), graph, solution) end diff --git a/test/test-time-resolution.jl b/test/test-time-resolution.jl deleted file mode 100644 index fc9633d4..00000000 --- a/test/test-time-resolution.jl +++ /dev/null @@ -1,43 +0,0 @@ -@testset "Time resolution" begin - @testset "compute_rp_partition" begin - # regular - partition1 = [1:4, 5:8, 9:12] # every 4 hours - partition2 = [1:3, 4:6, 7:9, 10:12] # every 3 hours - partition3 = [i:i for i in 1:12] # hourly - - @testset "strategy greedy (default)" begin - @test compute_rp_partition([partition1, partition2], :lowest) == partition1 - @test compute_rp_partition([partition1, partition2, partition3], :lowest) == partition1 - @test compute_rp_partition([partition2, partition3], :lowest) == partition2 - end - - @testset "strategy all" begin - @test compute_rp_partition([partition1, partition2], :highest) == - [1:3, 4:4, 5:6, 7:8, 9:9, 10:12] - @test compute_rp_partition([partition1, partition2, partition3], :highest) == partition3 - @test compute_rp_partition([partition2, partition3], :highest) == partition3 - end - - # Irregular - time_steps4 = [1:6, 7:9, 10:11, 12:12] - time_steps5 = [1:2, 3:4, 5:12] - - @testset "strategy greedy (default)" begin - @test compute_rp_partition([partition1, time_steps4], :lowest) == [1:6, 7:9, 10:12] - @test compute_rp_partition([partition1, time_steps5], :lowest) == [1:4, 5:12] - @test compute_rp_partition([time_steps4, time_steps5], :lowest) == [1:6, 7:12] - end - - @testset "strategy all" begin - @test compute_rp_partition([partition1, time_steps4], :highest) == - [1:4, 5:6, 7:8, 9:9, 10:11, 12:12] - @test compute_rp_partition([partition1, time_steps5], :highest) == [1:2, 3:4, 5:8, 9:12] - @test compute_rp_partition([time_steps4, time_steps5], :highest) == - [1:2, 3:4, 5:6, 7:9, 10:11, 12:12] - end - - @testset "Bad strategy" begin - @test_throws ErrorException compute_rp_partition([partition1, partition2], :bad) - end - end -end