From e3d52d444e0f3467680e216332e9b96256b0486e Mon Sep 17 00:00:00 2001 From: Diego Alejandro Tejada Arango <12887482+datejada@users.noreply.github.com> Date: Tue, 28 May 2024 06:44:09 +0200 Subject: [PATCH] Create new variable and constraints for the `use_binary_storage_method` (#645) * Update filter functions to create the sets * Add new partition lowest_in_out * Add new variable is_charging * Add new variable is_charging to the dataframe * Improve readability in the code * Update Norse case data * Add constraints and variables to the model * Update agg function to add expression of is_charging * Update capacity constraints depending if they are in Ai or not * Add initial capacity to PHS in the Norse case study * Change filter_assets function to have multiple dispatch * Update Norse case study input data * Update Norse case study input data for partitions * Update MIP gap for the Norse Case * Include changes from code review --- src/constraints/capacity.jl | 151 +++++++++++++++++- src/create-model.jl | 95 ++++++++++- src/time-resolution.jl | 7 + test/inputs/Norse/assets-data.csv | 6 +- test/inputs/Norse/flows-data.csv | 1 + .../Norse/flows-rep-periods-partitions.csv | 2 + test/test-case-studies.jl | 6 +- 7 files changed, 255 insertions(+), 13 deletions(-) diff --git a/src/constraints/capacity.jl b/src/constraints/capacity.jl index 1d4eb615..212436c6 100644 --- a/src/constraints/capacity.jl +++ b/src/constraints/capacity.jl @@ -7,6 +7,7 @@ add_capacity_constraints!(model, df_flows, flow, Ai, + Asb, assets_investment, outgoing_flow_highest_out_resolution, incoming_flow_highest_in_resolution @@ -22,6 +23,7 @@ function add_capacity_constraints!( df_flows, flow, Ai, + Asb, assets_investment, outgoing_flow_highest_out_resolution, incoming_flow_highest_in_resolution, @@ -59,6 +61,57 @@ function add_capacity_constraints!( end for row in eachrow(dataframes[:highest_out]) ] + # - Create capacity limit for outgoing flows with binary is_charging for storage assets + assets_profile_times_capacity_out_with_binary_part1 = + model[:assets_profile_times_capacity_out_with_binary_part1] = [ + if row.asset ∈ Ai && row.asset ∈ Asb + @expression( + model, + profile_aggregation( + Statistics.mean, + graph[row.asset].rep_periods_profiles, + (:availability, row.rp), + row.timesteps_block, + 1.0, + ) * + (graph[row.asset].initial_capacity + graph[row.asset].investment_limit) * + (1 - row.is_charging) + ) + else + @expression( + model, + profile_aggregation( + Statistics.mean, + graph[row.asset].rep_periods_profiles, + (:availability, row.rp), + row.timesteps_block, + 1.0, + ) * + (graph[row.asset].initial_capacity) * + (1 - row.is_charging) + ) + end for row in eachrow(dataframes[:highest_out]) + ] + + assets_profile_times_capacity_out_with_binary_part2 = + model[:assets_profile_times_capacity_out_with_binary_part2] = [ + if row.asset ∈ Ai && row.asset ∈ Asb + @expression( + model, + profile_aggregation( + Statistics.mean, + graph[row.asset].rep_periods_profiles, + (:availability, row.rp), + row.timesteps_block, + 1.0, + ) * ( + graph[row.asset].initial_capacity * (1 - row.is_charging) + + graph[row.asset].capacity * assets_investment[row.asset] + ) + ) + end for row in eachrow(dataframes[:highest_out]) + ] + # - Create capacity limit for incoming flows assets_profile_times_capacity_in = model[:assets_profile_times_capacity_in] = [ @@ -90,6 +143,57 @@ function add_capacity_constraints!( end for row in eachrow(dataframes[:highest_in]) ] + # - Create capacity limit for incoming flows with binary is_charging for storage assets + assets_profile_times_capacity_in_with_binary_part1 = + model[:assets_profile_times_capacity_in_with_binary_part1] = [ + if row.asset ∈ Ai && row.asset ∈ Asb + @expression( + model, + profile_aggregation( + Statistics.mean, + graph[row.asset].rep_periods_profiles, + (:availability, row.rp), + row.timesteps_block, + 1.0, + ) * + (graph[row.asset].initial_capacity + graph[row.asset].investment_limit) * + row.is_charging + ) + else + @expression( + model, + profile_aggregation( + Statistics.mean, + graph[row.asset].rep_periods_profiles, + (:availability, row.rp), + row.timesteps_block, + 1.0, + ) * + (graph[row.asset].initial_capacity) * + row.is_charging + ) + end for row in eachrow(dataframes[:highest_in]) + ] + + assets_profile_times_capacity_in_with_binary_part2 = + model[:assets_profile_times_capacity_in_with_binary_part2] = [ + if row.asset ∈ Ai && row.asset ∈ Asb + @expression( + model, + profile_aggregation( + Statistics.mean, + graph[row.asset].rep_periods_profiles, + (:availability, row.rp), + row.timesteps_block, + 1.0, + ) * ( + graph[row.asset].initial_capacity * row.is_charging + + graph[row.asset].capacity * assets_investment[row.asset] + ) + ) + end for row in eachrow(dataframes[:highest_in]) + ] + ## Capacity limit constraints (using the highest resolution) # - Maximum output flows limit model[:max_output_flows_limit] = [ @@ -99,7 +203,7 @@ function add_capacity_constraints!( assets_profile_times_capacity_out[row.index], base_name = "max_output_flows_limit[$(row.asset),$(row.rp),$(row.timesteps_block)]" ) for row in eachrow(dataframes[:highest_out]) if - outgoing_flow_highest_out_resolution[row.index] != 0 + row.asset ∉ Asb && outgoing_flow_highest_out_resolution[row.index] != 0 ] # - Maximum input flows limit @@ -110,7 +214,50 @@ function add_capacity_constraints!( assets_profile_times_capacity_in[row.index], base_name = "max_input_flows_limit[$(row.asset),$(row.rp),$(row.timesteps_block)]" ) for row in eachrow(dataframes[:highest_in]) if - incoming_flow_highest_in_resolution[row.index] != 0 + row.asset ∉ Asb && incoming_flow_highest_in_resolution[row.index] != 0 + ] + + ## Capacity limit constraints (using the highest resolution) for storage assets using binary to avoid charging and discharging at the same time + # - Maximum output flows limit with is_charging binary for storage assets + model[:max_output_flows_limit_with_binary_part1] = [ + @constraint( + model, + outgoing_flow_highest_out_resolution[row.index] ≤ + assets_profile_times_capacity_out_with_binary_part1[row.index], + base_name = "max_output_flows_limit_with_binary_part1[$(row.asset),$(row.rp),$(row.timesteps_block)]" + ) for row in eachrow(dataframes[:highest_out]) if + row.asset ∈ Asb && outgoing_flow_highest_out_resolution[row.index] != 0 + ] + + model[:max_output_flows_limit_with_binary_part2] = [ + @constraint( + model, + outgoing_flow_highest_out_resolution[row.index] ≤ + assets_profile_times_capacity_out_with_binary_part2[row.index], + base_name = "max_output_flows_limit_with_binary_part2[$(row.asset),$(row.rp),$(row.timesteps_block)]" + ) for row in eachrow(dataframes[:highest_out]) if row.asset ∈ Ai && + row.asset ∈ Asb && + outgoing_flow_highest_out_resolution[row.index] != 0 + ] + + # - Maximum input flows limit with is_charging binary for storage assets + model[:max_input_flows_limit_with_binary_part1] = [ + @constraint( + model, + incoming_flow_highest_in_resolution[row.index] ≤ + assets_profile_times_capacity_in_with_binary_part1[row.index], + base_name = "max_input_flows_limit_with_binary_part1[$(row.asset),$(row.rp),$(row.timesteps_block)]" + ) for row in eachrow(dataframes[:highest_in]) if + row.asset ∈ Asb && incoming_flow_highest_in_resolution[row.index] != 0 + ] + model[:max_input_flows_limit_with_binary_part2] = [ + @constraint( + model, + incoming_flow_highest_in_resolution[row.index] ≤ + assets_profile_times_capacity_in_with_binary_part2[row.index], + base_name = "max_input_flows_limit_with_binary_part2[$(row.asset),$(row.rp),$(row.timesteps_block)]" + ) for row in eachrow(dataframes[:highest_in]) if + row.asset ∈ Ai && row.asset ∈ Asb && incoming_flow_highest_in_resolution[row.index] != 0 ] # - Lower limit for flows that are not transport assets diff --git a/src/create-model.jl b/src/create-model.jl index 492371a3..36e42d66 100644 --- a/src/create-model.jl +++ b/src/create-model.jl @@ -167,6 +167,51 @@ function add_expression_terms_intra_rp_constraints!( end end +""" +add_expression_is_charging_terms_intra_rp_constraints!(df_cons, + df_is_charging, + workspace + ) + +Computes the is_charging expressions per row of df_cons for the constraints +that are within (intra) the representative periods. + +This function is only used internally in the model. + +This strategy is based on the replies in this discourse thread: + + - https://discourse.julialang.org/t/help-improving-the-speed-of-a-dataframes-operation/107615/23 +""" +function add_expression_is_charging_terms_intra_rp_constraints!(df_cons, df_is_charging, workspace) + # Aggregating function: We have to compute the proportion of each variable is_charging in the constraint timesteps_block. + agg = Statistics.mean + + grouped_cons = DataFrames.groupby(df_cons, [:rp, :asset]) + + df_cons[!, :is_charging] .= JuMP.AffExpr(0.0) + grouped_is_charging = DataFrames.groupby(df_is_charging, [:rp, :asset]) + for ((rp, asset), sub_df) in pairs(grouped_cons) + if !haskey(grouped_is_charging, (rp, asset)) + continue + end + + for i in eachindex(workspace) + workspace[i] = JuMP.AffExpr(0.0) + end + # Store the corresponding variables in the workspace + for row in eachrow(grouped_is_charging[(rp, asset)]) + asset = row[:asset] + for t in row.timesteps_block + JuMP.add_to_expression!(workspace[t], row.is_charging) + end + end + # Apply the agg funtion to the corresponding variables from the workspace + for row in eachrow(sub_df) + row[:is_charging] = agg(@view workspace[row.timesteps_block]) + end + end +end + """ add_expression_terms_inter_rp_constraints!(df_inter, df_flows, @@ -282,9 +327,11 @@ function create_model(graph, representative_periods, dataframes, timeframe; writ ## Sets unpacking A = MetaGraphsNext.labels(graph) |> collect F = MetaGraphsNext.edge_labels(graph) |> collect - filter_assets(key, value) = - filter(a -> !ismissing(getfield(graph[a], key)) && getfield(graph[a], key) == value, A) - filter_flows(key, value) = filter(f -> getfield(graph[f...], key) == value, F) + filter_assets(key, values) = + filter(a -> !ismissing(getfield(graph[a], key)) && getfield(graph[a], key) == values, A) + filter_assets(key, values::Vector{Symbol}) = + filter(a -> !ismissing(getfield(graph[a], key)) && getfield(graph[a], key) in values, A) + filter_flows(key, values) = filter(f -> getfield(graph[f...], key) == values, F) Ac = filter_assets(:type, :consumer) Ap = filter_assets(:type, :producer) @@ -297,8 +344,9 @@ function create_model(graph, representative_periods, dataframes, timeframe; writ Ai = filter_assets(:investable, true) Fi = filter_flows(:investable, true) - # Create subsets of assets by storage_method_energy - Ase = filter_assets(:storage_method_energy, true) + # Create subsets of storage assets + Ase = As ∩ filter_assets(:storage_method_energy, true) + Asb = As ∩ filter_assets(:use_binary_storage_method, [:binary, :relaxed_binary]) # Maximum timestep Tmax = maximum(last(rp.timesteps) for rp in representative_periods) @@ -306,6 +354,7 @@ function create_model(graph, representative_periods, dataframes, timeframe; writ # Unpacking dataframes df_flows = dataframes[:flows] + df_is_charging = dataframes[:lowest_in_out] df_storage_intra_rp_balance_grouped = DataFrames.groupby(dataframes[:lowest_storage_level_intra_rp], [:asset, :rp]) @@ -343,6 +392,17 @@ function create_model(graph, representative_periods, dataframes, timeframe; writ base_name = "storage_level_inter_rp[$(row.asset),$(row.periods_block)]" ) for row in eachrow(dataframes[:storage_level_inter_rp]) ] + is_charging = + model[:is_charging] = + df_is_charging.is_charging = [ + @variable( + model, + lower_bound = 0.0, + upper_bound = 1.0, + base_name = "is_charging[$(row.asset),$(row.rp),$(row.timesteps_block)]" + ) for row in eachrow(df_is_charging) + ] + ### Integer Investment Variables for a in Ai if graph[a].investment_integer @@ -362,6 +422,19 @@ function create_model(graph, representative_periods, dataframes, timeframe; writ end end + df_is_charging.use_binary_storage_method = + [graph[a].use_binary_storage_method for a in df_is_charging.asset] + sub_df_is_charging_binary = DataFrames.subset( + df_is_charging, + :asset => DataFrames.ByRow(in(Asb)), + :use_binary_storage_method => DataFrames.ByRow(==(:binary)); + view = true, + ) + + for row in eachrow(sub_df_is_charging_binary) + JuMP.set_binary(is_charging[row.index]) + end + ## Expressions @expression( model, @@ -426,6 +499,17 @@ function create_model(graph, representative_periods, dataframes, timeframe; writ graph, representative_periods, ) + add_expression_is_charging_terms_intra_rp_constraints!( + dataframes[:highest_in], + df_is_charging, + expression_workspace, + ) + add_expression_is_charging_terms_intra_rp_constraints!( + dataframes[:highest_out], + df_is_charging, + expression_workspace, + ) + incoming_flow_lowest_resolution = model[:incoming_flow_lowest_resolution] = dataframes[:lowest].incoming_flow outgoing_flow_lowest_resolution = @@ -503,6 +587,7 @@ function create_model(graph, representative_periods, dataframes, timeframe; writ df_flows, flow, Ai, + Asb, assets_investment, outgoing_flow_highest_out_resolution, incoming_flow_highest_in_resolution, diff --git a/src/time-resolution.jl b/src/time-resolution.jl index 7309b7f5..b990bfe4 100644 --- a/src/time-resolution.jl +++ b/src/time-resolution.jl @@ -38,6 +38,13 @@ function compute_constraints_partitions(graph, representative_periods) strategy = :lowest, asset_filter = a -> graph[a].type == :storage && !graph[a].is_seasonal, ), + ( + name = :lowest_in_out, + partitions = _allflows, + strategy = :lowest, + asset_filter = a -> + graph[a].type == :storage && !ismissing(graph[a].use_binary_storage_method), + ), ( name = :highest_in_out, partitions = _allflows, diff --git a/test/inputs/Norse/assets-data.csv b/test/inputs/Norse/assets-data.csv index 7b535aa7..c1dab0a4 100644 --- a/test/inputs/Norse/assets-data.csv +++ b/test/inputs/Norse/assets-data.csv @@ -1,13 +1,13 @@ ,{producer;consumer;storage;hub;conversion},{true;false},{true;false},{true;false},kEUR/MW/year,MW,MW,MW,MW,{empty;==;>=},{true;false},MWh/year,MWh,MWh,h,{true;false},kEUR/MWh/year,MWh,MWh,{true;false},{missing;binary;relaxed_binary} name,type,active,investable,investment_integer,investment_cost,investment_limit,capacity,initial_capacity,peak_demand,consumer_balance_sense,is_seasonal,storage_inflows,initial_storage_capacity,initial_storage_level,energy_to_power_ratio,storage_method_energy,investment_cost_storage_energy,investment_limit_storage_energy,capacity_storage_energy,investment_integer_storage_energy,use_binary_storage_method -Asgard_Battery,storage,true,true,true,300,,10,0,0,,false,0,0,,100,true,30,1000,10,true,binary +Asgard_Battery,storage,true,true,true,300,25000,100,725,0,,false,0,0,,100,true,30,1000,10,true,binary Asgard_Solar,producer,true,true,true,350,50000,100,0,0,,false,0,0,0,0,false,0,,0,false, Asgard_E_demand,consumer,true,false,false,0,0,0,0,65787.17792,,false,0,0,0,0,false,0,,0,false, Asgard_CCGT,conversion,true,true,true,650,,500,0,0,,false,0,0,0,0,false,0,,0,false, G_imports,producer,true,false,false,0,0,0,75000,0,,false,0,0,0,0,false,0,,0,false, Midgard_Wind,producer,true,true,true,1300,80000,3,0,0,,false,0,0,0,0,false,0,,0,false, -Midgard_Hydro,storage,true,false,false,1600,0,0,250,0,,true,10000,50000,25000,0,false,0,,0,false, -Midgard_PHS,storage,true,true,true,800,,200,0,0,,false,0,0,,1,true,500,1000,100,false,relaxed_binary +Midgard_Hydro,storage,true,false,false,1600,0,0,250,0,,true,10000,50000,25000,0,false,0,,0,false,relaxed_binary +Midgard_PHS,storage,true,true,true,800,5000,200,350,0,,false,0,0,,1,true,500,1000,100,false, Midgard_Nuclear_SMR,producer,true,true,false,6000,,150,1000,0,,false,0,0,0,0,false,0,,0,false, Midgard_E_imports,producer,true,false,false,0,0,0,500,0,,false,0,0,0,0,false,0,,0,false, Midgard_CCGT,conversion,true,true,true,650,,500,0,0,,false,0,0,0,0,false,0,,0,false, diff --git a/test/inputs/Norse/flows-data.csv b/test/inputs/Norse/flows-data.csv index 7e28f4de..33689677 100644 --- a/test/inputs/Norse/flows-data.csv +++ b/test/inputs/Norse/flows-data.csv @@ -16,6 +16,7 @@ electricity,Midgard_Nuclear_SMR,Midgard_E_demand,true,false,false,false,0.015,0, electricity,Midgard_E_imports,Midgard_E_demand,true,false,false,false,0.02,0,0,0,0,0,1 electricity,Midgard_CCGT,Midgard_E_demand,true,false,false,false,0,0,0,0,0,0,0.5 electricity,Midgard_Hydro,Midgard_E_demand,true,false,false,false,0,0,0,0,0,0,1 +electricity,Midgard_E_demand,Midgard_Hydro,true,false,false,false,0,0,0,0,0,0,0.7 electricity,Midgard_PHS,Midgard_E_demand,true,false,false,false,0.004,0,0,0,0,0,0.85 electricity,Midgard_E_demand,Midgard_PHS,true,false,false,false,0.002,0,0,0,0,0,0.85 electricity,Valhalla_GT,Valhalla_E_balance,true,false,false,false,0,0,0,0,0,0,0.42 diff --git a/test/inputs/Norse/flows-rep-periods-partitions.csv b/test/inputs/Norse/flows-rep-periods-partitions.csv index d4f173f5..a06c4edc 100644 --- a/test/inputs/Norse/flows-rep-periods-partitions.csv +++ b/test/inputs/Norse/flows-rep-periods-partitions.csv @@ -2,3 +2,5 @@ from_asset,to_asset,rep_period,specification,partition Asgard_Solar,Asgard_Battery,2,math,4x3+3x4 Asgard_Solar,Asgard_E_demand,2,math,3x4+4x3 +Asgard_Solar,Asgard_Battery,1,math,42x2+28x3 +Asgard_Battery,Asgard_E_demand,1,math,28x3+42x2 diff --git a/test/test-case-studies.jl b/test/test-case-studies.jl index 23c5c5f6..1922d2d4 100644 --- a/test/test-case-studies.jl +++ b/test/test-case-studies.jl @@ -1,11 +1,11 @@ @testset "Norse Case Study" begin dir = joinpath(INPUT_FOLDER, "Norse") parameters_dict = Dict( - HiGHS.Optimizer => Dict("mip_rel_gap" => 0.0, "output_flag" => false), - GLPK.Optimizer => Dict("mip_gap" => 0.0, "msg_lev" => 0, "presolve" => GLPK.GLP_ON), + HiGHS.Optimizer => Dict("mip_rel_gap" => 0.01, "output_flag" => false), + GLPK.Optimizer => Dict("mip_gap" => 0.01, "msg_lev" => 0, "presolve" => GLPK.GLP_ON), ) if !Sys.isapple() - parameters_dict[Cbc.Optimizer] = Dict("ratioGap" => 0.0, "logLevel" => 0) + parameters_dict[Cbc.Optimizer] = Dict("ratioGap" => 0.01, "logLevel" => 0) end for (optimizer, parameteres) in parameters_dict energy_problem = run_scenario(dir; optimizer = optimizer, parameters = parameteres)