From 9da07752ad658f877c5908af5ebb0e56019d6108 Mon Sep 17 00:00:00 2001 From: Abel Soares Siqueira Date: Tue, 18 Feb 2025 09:07:35 +0100 Subject: [PATCH] Remove graph and other internal structures and general cleaning (#1023) --- Project.toml | 4 - benchmark/benchmarks.jl | 1 - docs/Project.toml | 1 - docs/src/60-structures.md | 16 +- src/TulipaEnergyModel.jl | 5 - src/constraints/capacity.jl | 2 +- src/constraints/consumer.jl | 8 +- src/constraints/conversion.jl | 1 - src/constraints/energy.jl | 5 +- src/constraints/hub.jl | 8 +- .../ramping-and-unit-commitment.jl | 9 +- src/constraints/storage.jl | 2 +- src/constraints/transport.jl | 2 +- src/create-model.jl | 23 +- src/economic-parameters.jl | 396 ------------------ src/expressions/storage.jl | 1 - src/io.jl | 320 +------------- src/model-preparation.jl | 81 ++-- src/structures.jl | 195 ++------- src/utils.jl | 149 +------ src/variables/create.jl | 4 +- src/variables/flows.jl | 2 +- src/variables/investments.jl | 3 +- src/variables/storage.jl | 59 ++- src/variables/unit-commitment.jl | 2 +- test/runtests.jl | 3 +- test/test-economic-parameters.jl | 108 ----- test/test-io.jl | 13 - test/test-pipeline.jl | 5 +- 29 files changed, 145 insertions(+), 1283 deletions(-) delete mode 100644 src/economic-parameters.jl delete mode 100644 test/test-economic-parameters.jl diff --git a/Project.toml b/Project.toml index 4c8f8ab7..0b547588 100644 --- a/Project.toml +++ b/Project.toml @@ -8,12 +8,10 @@ CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b" DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" DuckDB = "d2f5444f-75bc-4fdf-ac35-56f514c445e1" DuckDB_jll = "2cbbab25-fc8b-58cf-88d4-687a02676033" -Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6" HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" JuMP = "4076af6c-e467-56ae-b986-b466b2749572" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" -MetaGraphsNext = "fa8bd995-216d-47f1-8a91-f3b68fbeb377" OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" @@ -26,11 +24,9 @@ CSV = "0.10" DataFrames = "1" DuckDB = "0.10, ~1.0" # ~1.0 until they fix https://github.com/duckdb/duckdb/issues/13911 DuckDB_jll = "0.10, ~1.0" # DuckDB 1.0.0 still allows DuckDB_jll 1.1.0 -Graphs = "1.8" HiGHS = "1" JuMP = "1" MathOptInterface = "1" -MetaGraphsNext = "0.6, 0.7" OrderedCollections = "1" SparseArrays = "1" Statistics = "1" diff --git a/benchmark/benchmarks.jl b/benchmark/benchmarks.jl index 399dcc89..ef5ddcd4 100644 --- a/benchmark/benchmarks.jl +++ b/benchmark/benchmarks.jl @@ -1,6 +1,5 @@ using BenchmarkTools using TulipaEnergyModel -using MetaGraphsNext using TulipaIO using DuckDB diff --git a/docs/Project.toml b/docs/Project.toml index 935e29bf..2ab1d120 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -7,6 +7,5 @@ GLPK = "60bf3e95-4087-53dc-ae20-288a0d20c6a6" HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" JuMP = "4076af6c-e467-56ae-b986-b466b2749572" LiveServer = "16fef848-5104-11e9-1b77-fb7a48bbb589" -MetaGraphsNext = "fa8bd995-216d-47f1-8a91-f3b68fbeb377" TulipaEnergyModel = "5d7bd171-d18e-45a5-9111-f1f11ac5d04d" TulipaIO = "7b3808b7-0819-42d4-885c-978ba173db11" diff --git a/docs/src/60-structures.md b/docs/src/60-structures.md index 22bc3e60..b5a6c2d8 100644 --- a/docs/src/60-structures.md +++ b/docs/src/60-structures.md @@ -14,18 +14,16 @@ It hides the complexity behind the energy problem, making the usage more friendl ### Fields -- `db_connection`: A DuckDB connection to the input tables in the model -- `graph`: The Graph object that defines the geometry of the energy problem. +- `db_connection`: A DuckDB connection to the input tables in the model. +- `variables`: A dictionary of [TulipaVariable](@ref TulipaVariable)s containing the variables of the model. +- `expressions`: A dictionary of [TulipaExpression](@ref TulipaExpression)s containing the expressions of the model attached to tables. +- `constraints`: A dictionary of [TulipaConstraint](@ref TulipaConstraint)s containing the constraints of the model. +- `profiles`: Holds the profiles per `rep_period` or `over_clustered_year` in dictionary format. See [ProfileLookup](@ref). +- `model_parameters`: A [ModelParameters](@ref ModelParameters) structure to store all the parameters that are exclusive of the model. - `model`: A JuMP.Model object representing the optimization model. -- `objective_value`: The objective value of the solved problem (Float64). -- `variables`: A [TulipaVariable](@ref TulipaVariable) structure to store all the information related to the variables in the model. -- `constraints`: A [TulipaConstraint](@ref TulipaConstraint) structure to store all the information related to the constraints in the model. -- `representative_periods`: A vector of [Representative Periods](@ref representative-periods). - `solved`: A boolean indicating whether the `model` has been solved or not. +- `objective_value`: The objective value of the solved problem (Float64). - `termination_status`: The termination status of the optimization model. -- `timeframe`: A structure with the number of periods in the `representative_periods` and the mapping between the periods and their representatives. -- `model_parameters`: A [ModelParameters](@ref ModelParameters) structure to store all the parameters that are exclusive of the model. -- `years`: A vector with the information of all the milestone years. ### Constructor diff --git a/src/TulipaEnergyModel.jl b/src/TulipaEnergyModel.jl index ee66e418..eaeb51bd 100644 --- a/src/TulipaEnergyModel.jl +++ b/src/TulipaEnergyModel.jl @@ -9,10 +9,6 @@ using DuckDB: DuckDB, DBInterface using TOML: TOML using TulipaIO: TulipaIO -## Graph -using Graphs: Graphs, SimpleDiGraph -using MetaGraphsNext: MetaGraphsNext, MetaGraph - ## Optimization using HiGHS: HiGHS using JuMP: JuMP, @constraint, @expression, @objective, @variable @@ -48,7 +44,6 @@ for folder_name in ["variables", "constraints", "expressions"] include(joinpath(folder_path, file)) end end -include("economic-parameters.jl") include("objective.jl") include("create-model.jl") diff --git a/src/constraints/capacity.jl b/src/constraints/capacity.jl index 1daf002d..0f2e5f93 100644 --- a/src/constraints/capacity.jl +++ b/src/constraints/capacity.jl @@ -1,7 +1,7 @@ export add_capacity_constraints! """ -add_capacity_constraints!(model, graph,...) + add_capacity_constraints!(connection, model, expressions, constraints, profiles) Adds the capacity constraints for all asset types to the model """ diff --git a/src/constraints/consumer.jl b/src/constraints/consumer.jl index 44d4767b..60ce52b8 100644 --- a/src/constraints/consumer.jl +++ b/src/constraints/consumer.jl @@ -1,13 +1,7 @@ export add_consumer_constraints! """ -add_consumer_constraints!(model, - graph, - dataframes, - Ac, - incoming_flow_highest_in_out_resolution, - outgoing_flow_highest_in_out_resolution, - ) + add_consumer_constraints!(connection, model, constraints, profiles) Adds the consumer asset constraints to the model. """ diff --git a/src/constraints/conversion.jl b/src/constraints/conversion.jl index 5bd297bd..636f2207 100644 --- a/src/constraints/conversion.jl +++ b/src/constraints/conversion.jl @@ -10,7 +10,6 @@ add_conversion_constraints!(model, Adds the conversion asset constraints to the model. """ - function add_conversion_constraints!(model, constraints) # - Balance constraint (using the lowest temporal resolution) let table_name = :balance_conversion, cons = constraints[table_name] diff --git a/src/constraints/energy.jl b/src/constraints/energy.jl index d1a0b646..c7fe7018 100644 --- a/src/constraints/energy.jl +++ b/src/constraints/energy.jl @@ -1,11 +1,10 @@ export add_energy_constraints! """ -function add_energy_constraints!(model, graph, dataframes) + add_energy_constraints!(connection, model, constraints, profiles) -Adds the energy constraints for assets withnin the period blocks of the timeframe (inter-temporal) to the model. +Adds the energy constraints for assets within the period blocks of the timeframe (inter-temporal) to the model. """ - function add_energy_constraints!(connection, model, constraints, profiles) ## INTER-TEMPORAL CONSTRAINTS (between representative periods) diff --git a/src/constraints/hub.jl b/src/constraints/hub.jl index ce3dfde3..0396b90f 100644 --- a/src/constraints/hub.jl +++ b/src/constraints/hub.jl @@ -1,16 +1,10 @@ export add_hub_constraints! """ -add_hub_constraints!(model, - dataframes, - Ah, - incoming_flow_highest_in_out_resolution, - outgoing_flow_highest_in_out_resolution, - ) + add_hub_constraints!(model, constraints) Adds the hub asset constraints to the model. """ - function add_hub_constraints!(model, constraints) # - Balance constraint (using the lowest temporal resolution) let table_name = :balance_hub, cons = constraints[:balance_hub] diff --git a/src/constraints/ramping-and-unit-commitment.jl b/src/constraints/ramping-and-unit-commitment.jl index da2c3a09..a6395e17 100644 --- a/src/constraints/ramping-and-unit-commitment.jl +++ b/src/constraints/ramping-and-unit-commitment.jl @@ -1,7 +1,14 @@ export add_ramping_and_unit_commitment_constraints! """ - add_ramping_and_unit_commitment_constraints!(model, graph, ...) + add_ramping_and_unit_commitment_constraints!( + connection, + model, + variables, + expressions, + constraints, + profiles + ) Adds the ramping constraints for producer and conversion assets where ramping = true in assets_data """ diff --git a/src/constraints/storage.jl b/src/constraints/storage.jl index ae1c130e..85e46ac8 100644 --- a/src/constraints/storage.jl +++ b/src/constraints/storage.jl @@ -1,7 +1,7 @@ export add_storage_constraints! """ -add_storage_constraints!(model, graph,...) + add_storage_constraints!(connection, model, variables, expressions, constraints, profiles) Adds the storage asset constraints to the model. """ diff --git a/src/constraints/transport.jl b/src/constraints/transport.jl index a27493fb..f7892948 100644 --- a/src/constraints/transport.jl +++ b/src/constraints/transport.jl @@ -1,7 +1,7 @@ export add_transport_constraints! """ -add_transport_constraints!(model, graph, df_flows, flow, Ft, flows_investment) + add_transport_constraints!(connection, model, variables, expressions, constraints, profiles) Adds the transport flow constraints to the model. """ diff --git a/src/create-model.jl b/src/create-model.jl index 385e36fe..00e48a82 100644 --- a/src/create-model.jl +++ b/src/create-model.jl @@ -1,19 +1,18 @@ export create_model!, create_model """ - create_model!(energy_problem; verbose = false) + create_model!(energy_problem; kwargs...) Create the internal model of an [`TulipaEnergyModel.EnergyProblem`](@ref). +Any keyword argument will be passed to the underlyting [`create_model`](@ref). """ function create_model!(energy_problem; kwargs...) energy_problem.model = @timeit to "create_model" create_model( energy_problem.db_connection, - energy_problem.graph, energy_problem.variables, energy_problem.expressions, energy_problem.constraints, energy_problem.profiles, - energy_problem.representative_periods, energy_problem.model_parameters; kwargs..., ) @@ -25,18 +24,25 @@ function create_model!(energy_problem; kwargs...) end """ - model = create_model(args...; write_lp_file = false, enable_names = true) + model = create_model( + connection, + variables, + expressions, + constraints, + profiles, + model_parameters; + write_lp_file = false, + enable_names = true + ) Create the energy model manually. We recommend using [`create_model!`](@ref) instead. """ function create_model( connection, - graph, variables, expressions, constraints, profiles, - representative_periods, model_parameters; write_lp_file = false, enable_names = true, @@ -58,17 +64,14 @@ function create_model( @timeit to "add_flow_variables!" add_flow_variables!(connection, model, variables) @timeit to "add_investment_variables!" add_investment_variables!(model, variables) @timeit to "add_unit_commitment_variables!" add_unit_commitment_variables!(model, variables) - @timeit to "add_storage_variables!" add_storage_variables!(model, graph, variables) + @timeit to "add_storage_variables!" add_storage_variables!(connection, model, variables) ## Add expressions to dataframes - # TODO: What will improve this? Variables (#884)?, Constraints? @timeit to "add_expressions_to_constraints!" add_expressions_to_constraints!( connection, variables, constraints, - model, expression_workspace, - profiles, ) ## Expressions for multi-year investment diff --git a/src/economic-parameters.jl b/src/economic-parameters.jl deleted file mode 100644 index 7b2aad81..00000000 --- a/src/economic-parameters.jl +++ /dev/null @@ -1,396 +0,0 @@ -export calculate_annualized_cost, calculate_salvage_value, calculate_weight_for_investment_discounts - -""" - calculate_annualized_cost(discount_rate, economic_lifetime, investment_cost, years, investable_assets) - -Calculates the annualized cost for each asset, both energy assets and transport assets, in each year using provided discount rates, economic lifetimes, and investment costs. - -# Arguments -- `discount_rate::Dict`: A dictionary where the key is an `asset` or a pair of assets `(asset1, asset2)` for transport assets, and the value is the discount rate. -- `economic_lifetime::Dict`: A dictionary where the key is an `asset` or a pair of assets `(asset1, asset2)` for transport assets, and the value is the economic lifetime. -- `investment_cost::Dict`: A dictionary where the key is a tuple `(year, asset)` or `(year, (asset1, asset2))` for transport assets, and the value is the investment cost. -- `years::Array`: An array of years to be considered. -- `investable_assets::Dict`: A dictionary where the key is a year, and the value is an array of assets that are relevant for that year. - -# Returns -- A `Dict` where the keys are tuples `(year, asset)` representing the year and the asset, and the values are the calculated annualized cost for each asset in each year. - -# Formula -The annualized cost for each asset in year is calculated using the formula: - - annualized_cost = discount_rate[asset] / ( - (1 + discount_rate[asset]) * - (1 - 1 / (1 + discount_rate[asset])^economic_lifetime[asset]) - ) * investment_cost[(year, asset)] - -# Example for energy assets - -```jldoctest -discount_rate = Dict("asset1" => 0.05, "asset2" => 0.07) - -economic_lifetime = Dict("asset1" => 10, "asset2" => 15) - -investment_cost = Dict((2021, "asset1") => 1000, (2021, "asset2") => 1500, - (2022, "asset1") => 1100, (2022, "asset2") => 1600) -years = [2021, 2022] - -investable_assets = Dict(2021 => ["asset1", "asset2"], - 2022 => ["asset1"]) - -costs = calculate_annualized_cost(discount_rate, economic_lifetime, investment_cost, years, investable_assets) - -# output - -Dict{Tuple{Int64, String}, Float64} with 3 entries: - (2021, "asset1") => 123.338 - (2021, "asset2") => 153.918 - (2022, "asset1") => 135.671 -``` - -# Example for transport assets - -```jldoctest -discount_rate = Dict(("asset1", "asset2") => 0.05, ("asset3", "asset4") => 0.07) - -economic_lifetime = Dict(("asset1", "asset2") => 10, ("asset3", "asset4") => 15) - -investment_cost = Dict((2021, ("asset1", "asset2")) => 1000, (2021, ("asset3", "asset4")) => 1500, - (2022, ("asset1", "asset2")) => 1100, (2022, ("asset3", "asset4")) => 1600) -years = [2021, 2022] - -investable_assets = Dict(2021 => [("asset1", "asset2"), ("asset3", "asset4")], - 2022 => [("asset1", "asset2")]) - -costs = calculate_annualized_cost(discount_rate, economic_lifetime, investment_cost, years, investable_assets) - -# output - -Dict{Tuple{Int64, Tuple{String, String}}, Float64} with 3 entries: - (2022, ("asset1", "asset2")) => 135.671 - (2021, ("asset3", "asset4")) => 153.918 - (2021, ("asset1", "asset2")) => 123.338 -``` -""" -function calculate_annualized_cost( - discount_rate, - economic_lifetime, - investment_cost, - years, - investable_assets, -) - annualized_cost = Dict( - (year, asset) => - discount_rate[asset] / ( - (1 + discount_rate[asset]) * - (1 - 1 / (1 + discount_rate[asset])^economic_lifetime[asset]) - ) * investment_cost[(year, asset)] for year in years for - asset in investable_assets[year] - ) - return annualized_cost -end - -""" - calculate_salvage_value(discount_rate, - economic_lifetime, - annualized_cost, - years, - investable_assets, - ) - -Calculates the salvage value for each asset, both energy assets and transport assets. - -# Arguments -- `discount_rate::Dict`: A dictionary where the key is an `asset` or a pair of assets `(asset1, asset2)` for transport assets, and the value is the discount rate. -- `economic_lifetime::Dict`: A dictionary where the key is an `asset` or a pair of assets `(asset1, asset2)` for transport assets, and the value is the economic lifetime. -- `annualized_cost::Dict`: A `Dict` where the keys are tuples `(year, asset)` representing the year and the asset, and the values are the annualized cost for each asset in each year. -- `years::Array`: An array of years to be considered. -- `investable_assets::Dict`: A dictionary where the key is a year, and the value is an array of assets that are relevant for that year. - -# Returns -- A `Dict` where the keys are tuples `(year, asset)` representing the year and the asset, and the values are the salvage value for each asset in each year. - -# Formula -The salvage value for each asset in year is calculated using the formula: - -salvage_value = - annualized_cost[(year, asset)] * sum( - 1 / (1 + discount_rate[asset])^(year_alias - year) for - year_alias in salvage_value_set[(year, asset)] - ) - -# Example for energy assets - -```jldoctest -discount_rate = Dict("asset1" => 0.05, "asset2" => 0.07) - -economic_lifetime = Dict("asset1" => 10, "asset2" => 15) - -annualized_cost = - Dict((2021, "asset1") => 123.338, (2021, "asset2") => 153.918, (2022, "asset1") => 135.671) - -years = [2021, 2022] - -investable_assets = Dict(2021 => ["asset1", "asset2"], 2022 => ["asset1"]) - -salvage_value = calculate_salvage_value( - discount_rate, - economic_lifetime, - annualized_cost, - years, - investable_assets, -) - -# output -Dict{Tuple{Int64, String}, Float64} with 3 entries: - (2021, "asset1") => 759.2 - (2021, "asset2") => 1202.24 - (2022, "asset1") => 964.325 -``` - -# Example for transport assets - -```jldoctest -discount_rate = Dict(("asset1", "asset2") => 0.05, ("asset3", "asset4") => 0.07) - -economic_lifetime = Dict(("asset1", "asset2") => 10, ("asset3", "asset4") => 15) - -annualized_cost = Dict( - (2022, ("asset1", "asset2")) => 135.671, - (2021, ("asset3", "asset4")) => 153.918, - (2021, ("asset1", "asset2")) => 123.338, -) - -years = [2021, 2022] - -investable_assets = - Dict(2021 => [("asset1", "asset2"), ("asset3", "asset4")], 2022 => [("asset1", "asset2")]) - -salvage_value = calculate_salvage_value( - discount_rate, - economic_lifetime, - annualized_cost, - years, - investable_assets, -) - -# output - -Dict{Tuple{Int64, Tuple{String, String}}, Float64} with 3 entries: - (2022, ("asset1", "asset2")) => 964.325 - (2021, ("asset3", "asset4")) => 1202.24 - (2021, ("asset1", "asset2")) => 759.2 -``` -""" -function calculate_salvage_value( - discount_rate, - economic_lifetime, - annualized_cost, - years, - investable_assets, -) - # Create a dict of the years beyond the last milestone year - end_of_horizon = maximum(years) - salvage_value_set = Dict( - (year, asset) => end_of_horizon+1:year+economic_lifetime[asset]-1 for year in years for - asset in investable_assets[year] if year + economic_lifetime[asset] - 1 ≥ end_of_horizon + 1 - ) - - # Create a dict of salvage values - salvage_value = Dict( - (year, asset) => if (year, asset) in keys(salvage_value_set) - annualized_cost[(year, asset)] * sum( - 1 / (1 + discount_rate[asset])^(year_alias - year) for - year_alias in salvage_value_set[(year, asset)] - ) - else - 0 - end for year in years for asset in investable_assets[year] - ) - return salvage_value -end - -""" - calculate_weight_for_investment_discounts(social_rate, - discount_year, - salvage_value, - investment_cost, - years, - investable_assets, - ) - -Calculates the weight for investment discounts for each asset, both energy assets and transport assets. - -# Arguments -- `social_rate::Float64`: A value with the social discount rate. -- `discount_year::Int64`: A value with the discount year for all the investments. -- `salvage_value::Dict`: A dictionary where the key is an tuple `(year, asset)` or `(year, (asset1, asset2))` for transport assets, and the value is the salvage value. -- `investment_cost::Dict`: A dictionary where the key is an tuple `(year, asset)` or `(year, (asset1, asset2))` for transport assets, and the value is the investment cost. -- `years::Array`: An array of years to be considered. -- `investable_assets::Dict`: A dictionary where the key is a year, and the value is an array of assets that are relevant for that year. - -# Returns -- A `Dict` where the keys are tuples `(year, asset)` representing the year and the asset, and the values are the weights for investment discounts. - -# Formula -The weight for investment discounts for each asset in year is calculated using the formula: - -weight_for_investment_discounts = - 1 / (1 + social_rate)^(year - discount_year) * - (1 - salvage_value[(year, asset)] / investment_cost[(year, asset)]) - -# Example for energy assets - -```jldoctest -social_rate = 0.02 - -discount_year = 2000 - -salvage_value = Dict( - (2021, "asset1") => 759.1978422, - (2021, "asset2") => 1202.2339859, - (2022, "asset1") => 964.3285406, -) - -investment_cost = Dict( - (2021, "asset1") => 1000, - (2021, "asset2") => 1500, - (2022, "asset1") => 1100, - (2022, "asset2") => 1600, -) -years = [2021, 2022] - -investable_assets = Dict(2021 => ["asset1", "asset2"], 2022 => ["asset1"]) - -weights = calculate_weight_for_investment_discounts( - social_rate, - discount_year, - salvage_value, - investment_cost, - years, - investable_assets, -) - -# output - -Dict{Tuple{Int64, String}, Float64} with 3 entries: - (2021, "asset1") => 0.158875 - (2021, "asset2") => 0.130973 - (2022, "asset1") => 0.0797796 -``` - -# Example for transport assets - -```jldoctest -social_rate = 0.02 - -discount_year = 2000 - -salvage_value = Dict( - (2022, ("asset1", "asset2")) => 964.325, - (2021, ("asset3", "asset4")) => 1202.24, - (2021, ("asset1", "asset2")) => 759.2, -) - -investment_cost = Dict((2021, ("asset1", "asset2")) => 1000, (2021, ("asset3", "asset4")) => 1500, - (2022, ("asset1", "asset2")) => 1100, (2022, ("asset3", "asset4")) => 1600) -years = [2021, 2022] - -investable_assets = Dict(2021 => [("asset1", "asset2"), ("asset3", "asset4")], - 2022 => [("asset1", "asset2")]) - -weights = calculate_weight_for_investment_discounts( - social_rate, - discount_year, - salvage_value, - investment_cost, - years, - investable_assets, -) - -# output - -Dict{Tuple{Int64, Tuple{String, String}}, Float64} with 3 entries: - (2022, ("asset1", "asset2")) => 0.0797817 - (2021, ("asset3", "asset4")) => 0.13097 - (2021, ("asset1", "asset2")) => 0.158874 -``` -""" -function calculate_weight_for_investment_discounts( - social_rate, - discount_year, - salvage_value, - investment_cost, - years, - investable_assets, -) - weight_for_investment_discounts = Dict( - (year, asset) => - 1 / (1 + social_rate)^(year - discount_year) * - (1 - salvage_value[(year, asset)] / investment_cost[(year, asset)]) for - year in years for asset in investable_assets[year] - ) - return weight_for_investment_discounts -end - -""" - calculate_weight_for_investment_discounts(graph::MetaGraph, - years, - investable_assets, - assets, - model_parameters, - ) - -Calculates the weight for investment discounts for each asset, both energy assets and transport assets. -Internally calls [`calculate_annualized_cost`](@ref), [`calculate_salvage_value`](@ref), [`calculate_weight_for_investment_discounts`](@ref). - -# Arguments -- `graph::MetaGraph`: A graph -- `years::Array`: An array of years to be considered. -- `investable_assets::Dict`: A dictionary where the key is a year, and the value is an array of assets that are relevant for that year. -- `assets::Array`: An array of assets. -- `model_parameters::ModelParameters`: A model parameters structure. - -# Returns -- A `Dict` where the keys are tuples `(year, asset)` representing the year and the asset, and the values are the weights for investment discounts. -""" -function calculate_weight_for_investment_discounts( - graph::MetaGraph, - years, - investable_assets, - assets, - model_parameters, -) - - # Calculate the economic parameters - discount_rate = Dict(asset => get_graph_value_or_missing(graph, asset, :discount_rate) for asset in assets) - economic_lifetime = Dict(asset => get_graph_value_or_missing(graph, asset, :economic_lifetime) for asset in assets) - investment_cost = Dict((year, asset) => get_graph_value_or_missing(graph, asset, :investment_cost, year) for year in years for asset in investable_assets[year]) - - # Create a dict of the annualized cost for asset invested in year - annualized_cost = calculate_annualized_cost( - discount_rate, - economic_lifetime, - investment_cost, - years, - investable_assets, - ) - - # Create a dict of salvage values - salvage_value = calculate_salvage_value( - discount_rate, - economic_lifetime, - annualized_cost, - years, - investable_assets, - ) - - # Return a dict of weights for investment discounts - return calculate_weight_for_investment_discounts( - model_parameters.discount_rate, - model_parameters.discount_year, - salvage_value, - investment_cost, - years, - investable_assets, - ) -end diff --git a/src/expressions/storage.jl b/src/expressions/storage.jl index d1c62fe7..1747cb85 100644 --- a/src/expressions/storage.jl +++ b/src/expressions/storage.jl @@ -33,7 +33,6 @@ function add_storage_expressions!(connection, model, expressions) expr_avail = expressions[:available_energy_units].expressions[:energy] - # TODO: Reevaluate the available_energy_capacity definition let table_name = :available_energy_capacity, expr = expressions[table_name] indices = DuckDB.query(connection, "FROM expr_$table_name") attach_expression!( diff --git a/src/io.jl b/src/io.jl index 4cf84918..97bbdd52 100644 --- a/src/io.jl +++ b/src/io.jl @@ -1,26 +1,11 @@ -export create_internal_structures, export_solution_to_csv_files +export create_internal_structures!, export_solution_to_csv_files """ - graph, representative_periods, timeframe = create_internal_structures(connection) + create_internal_structures!(connection) -Return the `graph`, `representative_periods`, and `timeframe` structures given the input dataframes structure. - -The details of these structures are: - - - `graph`: a MetaGraph with the following information: - - + `labels(graph)`: All assets. - + `edge_labels(graph)`: All flows, in pair format `(u, v)`, where `u` and `v` are assets. - + `graph[a]`: A [`TulipaEnergyModel.GraphAssetData`](@ref) structure for asset `a`. - + `graph[u, v]`: A [`TulipaEnergyModel.GraphFlowData`](@ref) structure for flow `(u, v)`. - - - `representative_periods`: An array of - [`TulipaEnergyModel.RepresentativePeriod`](@ref) ordered by their IDs. - - - `timeframe`: Information of - [`TulipaEnergyModel.Timeframe`](@ref). +Creates internal tables. """ -function create_internal_structures(connection) +function create_internal_structures!(connection) # Create tables that are allowed to be missing tables_allowed_to_be_missing = [ @@ -35,266 +20,13 @@ function create_internal_structures(connection) _create_empty_unless_exists(connection, table) end - # Get the years struct ordered by year - years = [ - Year(row.year, row.length, row.is_milestone) for row in DBInterface.execute( - connection, - "SELECT * - FROM year_data - ORDER BY year", - ) - ] - - milestone_years = [year.id for year in years] - - # Calculate the weights from the "rep_periods_mapping" table in the connection - weights = Dict( - year => [ - row.weight for row in DBInterface.execute( - connection, - "SELECT rep_period, SUM(weight) AS weight - FROM rep_periods_mapping - WHERE year = $year - GROUP BY rep_period - ORDER BY rep_period", - ) - ] for year in milestone_years - ) - - representative_periods = Dict{Int,Vector{RepresentativePeriod}}( - year => [ - RepresentativePeriod(weights[year][row.rep_period], row.num_timesteps, row.resolution) for row in TulipaIO.get_table(Val(:raw), connection, "rep_periods_data") if - row.year == year - ] for year in milestone_years - ) - - # Calculate the total number of periods and then pipe into a Dataframe to get the first value of the df with the num_periods - num_periods, = DuckDB.query(connection, "SELECT MAX(period) AS period FROM rep_periods_mapping") - - timeframe = Timeframe(num_periods.period, TulipaIO.get_table(connection, "rep_periods_mapping")) - - _query_data_per_year(table_name, col, year_col; where_pairs...) = begin - # Make sure valid year columns are used - @assert year_col in ("milestone_year", "commission_year") - year_prefix = replace(year_col, "_year" => "") - # Make sure we are at the right table - @assert table_name in ("asset_$year_prefix", "flow_$year_prefix") - _q = "SELECT $year_col, $col FROM $table_name" - if length(where_pairs) > 0 - _q *= - " WHERE " * - join(("$k=$(TulipaIO.FmtSQL.fmt_quote(v))" for (k, v) in where_pairs), " AND ") - end - DuckDB.query(connection, _q) - end - - function _get_data_per_year(table_name, col; where_pairs...) - year_prefix = replace(table_name, "asset_" => "", "flow_" => "") - @assert year_prefix in ("milestone", "commission") - year_col = year_prefix * "_year" - @assert table_name in ("asset_$year_prefix", "flow_$year_prefix") - - result = _query_data_per_year(table_name, col, year_col; where_pairs...) - return Dict(row[Symbol(year_col)] => getproperty(row, Symbol(col)) for row in result) - end - - _query_data_per_both_years(table_name, col; where_pairs...) = begin - _q = "SELECT $col, milestone_year, commission_year FROM $table_name" - if length(where_pairs) > 0 - _q *= - " WHERE " * - join(("$k=$(TulipaIO.FmtSQL.fmt_quote(v))" for (k, v) in where_pairs), " AND ") - end - DuckDB.query(connection, _q) - end - - function _get_data_per_both_years(table_name, col; where_pairs...) - result = _query_data_per_both_years(table_name, col; where_pairs...) - T = result.types[1] # First column is the one with out query - result_dict = Dict{Int,Dict{Int,T}}() - for row in result - if !haskey(result_dict, row.milestone_year) - result_dict[row.milestone_year] = Dict{Int,T}() - end - result_dict[row.milestone_year][row.commission_year] = getproperty(row, Symbol(col)) - end - return result_dict - end - - asset_data = @timeit to "asset_data" [ - row.asset => begin - _where = (asset = row.asset,) - GraphAssetData( - # From asset table - row.type, - row.group, - row.capacity, - row.min_operating_point, - row.investment_method, - row.investment_integer, - row.technical_lifetime, - row.economic_lifetime, - row.discount_rate, - if ismissing(row.consumer_balance_sense) - MathOptInterface.EqualTo(0.0) - else - MathOptInterface.GreaterThan(0.0) - end, - row.capacity_storage_energy, - row.is_seasonal, - row.use_binary_storage_method, - row.unit_commitment, - row.unit_commitment_method, - row.unit_commitment_integer, - row.ramping, - row.storage_method_energy, - row.energy_to_power_ratio, - row.investment_integer_storage_energy, - row.max_ramp_up, - row.max_ramp_down, - - # From asset_milestone table - _get_data_per_year("asset_milestone", "investable"; _where...), - _get_data_per_year("asset_milestone", "peak_demand"; _where...), - _get_data_per_year("asset_milestone", "storage_inflows"; _where...), - _get_data_per_year("asset_milestone", "initial_storage_level"; _where...), - _get_data_per_year("asset_milestone", "min_energy_timeframe_partition"; _where...), - _get_data_per_year("asset_milestone", "max_energy_timeframe_partition"; _where...), - _get_data_per_year("asset_milestone", "units_on_cost"; _where...), - - # From asset_commission table - _get_data_per_year("asset_commission", "fixed_cost"; _where...), - _get_data_per_year("asset_commission", "investment_cost"; _where...), - _get_data_per_year("asset_commission", "investment_limit"; _where...), - _get_data_per_year("asset_commission", "fixed_cost_storage_energy"; _where...), - _get_data_per_year("asset_commission", "investment_cost_storage_energy"; _where...), - _get_data_per_year( - "asset_commission", - "investment_limit_storage_energy"; - _where..., - ), - - # From asset_both - _get_data_per_both_years("asset_both", "active"; _where...), - _get_data_per_both_years("asset_both", "decommissionable"; _where...), - _get_data_per_both_years("asset_both", "initial_units"; _where...), - _get_data_per_both_years("asset_both", "initial_storage_units"; _where...), - ) - end for row in TulipaIO.get_table(Val(:raw), connection, "asset") - ] - - flow_data = @timeit to "flow_data" [ - (row.from_asset, row.to_asset) => begin - _where = (from_asset = row.from_asset, to_asset = row.to_asset) - GraphFlowData( - # flow - row.carrier, - row.is_transport, - row.capacity, - row.technical_lifetime, - row.economic_lifetime, - row.discount_rate, - row.investment_integer, - - # flow_milestone - _get_data_per_year("flow_milestone", "investable"; _where...), - _get_data_per_year("flow_milestone", "variable_cost"; _where...), - - # flow_commission - _get_data_per_year("flow_commission", "fixed_cost"; _where...), - _get_data_per_year("flow_commission", "investment_cost"; _where...), - _get_data_per_year("flow_commission", "efficiency"; _where...), - _get_data_per_year("flow_commission", "investment_limit"; _where...), - - # flow_both - _get_data_per_both_years("flow_both", "active"; _where...), - _get_data_per_both_years("flow_both", "decommissionable"; _where...), - _get_data_per_both_years("flow_both", "initial_export_units"; _where...), - _get_data_per_both_years("flow_both", "initial_import_units"; _where...), - ) - end for row in TulipaIO.get_table(Val(:raw), connection, "flow") - ] - - num_assets = length(asset_data) # we only look at unique asset names - - name_to_id = Dict(value.first => idx for (idx, value) in enumerate(asset_data)) - - _graph = Graphs.DiGraph(num_assets) - for flow in flow_data - from_id, to_id = flow[1] - Graphs.add_edge!(_graph, name_to_id[from_id], name_to_id[to_id]) - end - - graph = MetaGraphsNext.MetaGraph(_graph, asset_data, flow_data, nothing, nothing, nothing) - # TODO: Move these function calls to the correct place @timeit to "tmp_create_partition_tables" tmp_create_partition_tables(connection) @timeit to "tmp_create_union_tables" tmp_create_union_tables(connection) @timeit to "tmp_create_lowest_resolution_table" tmp_create_lowest_resolution_table(connection) @timeit to "tmp_create_highest_resolution_table" tmp_create_highest_resolution_table(connection) - _df = - DuckDB.execute( - connection, - "SELECT asset, commission_year, profile_type, year, rep_period, value - FROM assets_profiles - JOIN profiles_rep_periods - ON assets_profiles.profile_name=profiles_rep_periods.profile_name", - ) |> DataFrame - - gp = DataFrames.groupby(_df, [:asset, :commission_year, :profile_type, :year, :rep_period]) - - for ((asset, commission_year, profile_type, year, rep_period), df) in pairs(gp) - profiles = graph[asset].rep_periods_profiles - if !haskey(profiles, year) - profiles[year] = Dict{Int,Dict{Tuple{Symbol,Int},Vector{Float64}}}() - end - if !haskey(profiles[year], commission_year) - profiles[year][commission_year] = Dict{Tuple{Symbol,Int},Vector{Float64}}() - end - profiles[year][commission_year][(profile_type, rep_period)] = df.value - end - - _df = TulipaIO.get_table(connection, "profiles_rep_periods") - for flow_profile_row in TulipaIO.get_table(Val(:raw), connection, "flows_profiles") - gp = DataFrames.groupby( - filter(:profile_name => ==(flow_profile_row.profile_name), _df; view = true), - [:rep_period, :year]; - ) - for ((rep_period, year), df) in pairs(gp) - profiles = - graph[flow_profile_row.from_asset, flow_profile_row.to_asset].rep_periods_profiles - if !haskey(profiles, year) - profiles[year] = Dict{Tuple{Symbol,Int},Vector{Float64}}() - end - profiles[year][(flow_profile_row.profile_type, rep_period)] = df.value - end - end - - _df = TulipaIO.get_table(connection, "profiles_timeframe") - for asset_profile_row in TulipaIO.get_table(Val(:raw), connection, "assets_timeframe_profiles") # row = asset, profile_type, profile_name - gp = DataFrames.groupby( - filter( # Filter - [:profile_name, :year] => - (name, year) -> - name == asset_profile_row.profile_name && - year == asset_profile_row.commission_year, - _df; - view = true, - ), - [:year], - ) - for ((year,), df) in pairs(gp) - profiles = graph[asset_profile_row.asset].timeframe_profiles - if !haskey(profiles, year) - profiles[year] = Dict{Int,Dict{String,Vector{Float64}}}() - profiles[year][year] = Dict{String,Vector{Float64}}() - end - profiles[year][year][asset_profile_row.profile_type] = df.value - end - end - - return graph, representative_periods, timeframe, years + return end function get_schema(tablename) @@ -369,45 +101,3 @@ function export_solution_to_csv_files(output_folder, connection, variables, cons return end - -""" - _check_initial_storage_level!(df) - -Determine the starting value for the initial storage level for interpolating the storage level. -If there is no initial storage level given, we will use the final storage level. -Otherwise, we use the given initial storage level. -""" -function _check_initial_storage_level!(df, graph) - initial_storage_level_dict = graph[unique(df.asset)[1]].initial_storage_level - for (_, initial_storage_level) in initial_storage_level_dict - if ismissing(initial_storage_level) - df[!, :processed_value] = [df.value[end]; df[1:end-1, :value]] - else - df[!, :processed_value] = [initial_storage_level; df[1:end-1, :value]] - end - end -end - -""" - _interpolate_storage_level!(df, time_column::Symbol) - -Transform the storage level dataframe from grouped timesteps or periods to incremental ones by interpolation. -The starting value is the value of the previous grouped timesteps or periods or the initial value. -The ending value is the value for the grouped timesteps or periods. -""" -function _interpolate_storage_level!(df, time_column) - return DataFrames.flatten( - DataFrames.transform( - df, - [time_column, :value, :processed_value] => - DataFrames.ByRow( - (period, value, start_value) -> begin - n = length(period) - interpolated_values = range(start_value; stop = value, length = n + 1) - (period, value, interpolated_values[2:end]) - end, - ) => [time_column, :value, :processed_value], - ), - [time_column, :processed_value], - ) -end diff --git a/src/model-preparation.jl b/src/model-preparation.jl index 2c9656db..ef2f3e86 100644 --- a/src/model-preparation.jl +++ b/src/model-preparation.jl @@ -2,15 +2,16 @@ export prepare_profiles_structure """ - add_expression_terms_rep_period_constraints!(df_cons, - df_flows, - workspace; - use_highest_resolution = true, - multiply_by_duration = true, - add_min_outgoing_flow_duration = false, - ) - -Computes the incoming and outgoing expressions per row of df_cons for the constraints + add_expression_terms_rep_period_constraints!( + connection, + cons, + flow; + use_highest_resolution = true, + multiply_by_duration = true, + add_min_outgoing_flow_duration = false, + ) + +Computes the incoming and outgoing expressions per row of `cons` for the constraints that are within (intra) the representative periods. This function is only used internally in the model. @@ -100,14 +101,6 @@ function add_expression_terms_rep_period_constraints!( conditions_to_add_min_outgoing_flow_duration = add_min_outgoing_flow_duration && case.expr_key == :outgoing if conditions_to_add_min_outgoing_flow_duration - # TODO: Evaluate what to do with this - # Originally, this was a column attach to the indices Assuming the - # indices will be DuckDB tables, that would be problematic, - # although possible However, that would be the only place that - # DuckDB tables are changed after creation - notice that - # constraints create new tables when a new column is necessary The - # current solution is to attach as a coefficient, a new field of - # TulipaConstraint created just for this purpose attach_coefficient!(cons, :min_outgoing_flow_duration, ones(num_rows)) end @@ -243,13 +236,13 @@ function add_expression_terms_rep_period_constraints!( end """ - add_expression_is_charging_terms_rep_period_constraints!(df_cons, - is_charging_indices, - is_charging_variables, - workspace - ) + add_expression_is_charging_terms_rep_period_constraints!( + cons, + is_charging, + workspace, + ) -Computes the `is_charging` expressions per row of `df_cons` for the constraints +Computes the `is_charging` expressions per row of `cons` for the constraints that are within (intra) the representative periods. This function is only used internally in the model. @@ -296,12 +289,12 @@ end """ add_expression_units_on_terms_rep_period_constraints!( - df_cons, - df_units_on, + cons, + units_on, workspace, ) -Computes the `units_on` expressions per row of `df_cons` for the constraints +Computes the `units_on` expressions per row of `cons` for the constraints that are within (intra) the representative periods. This function is only used internally in the model. @@ -344,12 +337,13 @@ function add_expression_units_on_terms_rep_period_constraints!( end """ - add_expression_terms_over_clustered_year_constraints!(df_inter, - df_flows, - df_map, - graph, - representative_periods, - ) + add_expression_terms_over_clustered_year_constraints!( + connection, + cons, + flow, + profiles; + is_storage_level = false, + ) Computes the incoming and outgoing expressions per row of df_inter for the constraints that are between (inter) the representative periods. @@ -360,8 +354,7 @@ This function is only used internally in the model. function add_expression_terms_over_clustered_year_constraints!( connection, cons::TulipaConstraint, - flow::TulipaVariable, - profiles; + flow::TulipaVariable; is_storage_level = false, ) num_rows = size(cons.indices, 1) @@ -377,8 +370,6 @@ function add_expression_terms_over_clustered_year_constraints!( cons.expressions[case.expr_key] .= JuMP.AffExpr(0.0) end - # TODO: The interaction between year and timeframe is not clear yet, so this is probably wrong - # At this moment, that relation is ignored (we don't even look at df_inter.year) grouped_cons_table_name = "t_grouped_$(cons.table_name)" if !_check_if_table_exists(connection, grouped_cons_table_name) DuckDB.query( @@ -468,9 +459,6 @@ function add_expression_terms_over_clustered_year_constraints!( empty!.(flows_per_period_workspace) for ( - rp, - storage_inflow, - num_timesteps, var_indices, var_time_block_start_vec, var_time_block_end_vec, @@ -478,9 +466,6 @@ function add_expression_terms_over_clustered_year_constraints!( var_periods, var_weights, ) in zip( - group_row.var_rep_periods, - group_row.storage_inflows, - group_row.num_timesteps, group_row.var_indices, group_row.var_time_block_start_vec, group_row.var_time_block_end_vec, @@ -597,14 +582,7 @@ function add_expression_terms_over_clustered_year_constraints!( return end -function add_expressions_to_constraints!( - connection, - variables, - constraints, - model, - expression_workspace, - profiles, -) +function add_expressions_to_constraints!(connection, variables, constraints, expression_workspace) # Unpack variables # Creating the incoming and outgoing flow expressions @timeit to "add_expression_terms_rep_period_constraints!" add_expression_terms_rep_period_constraints!( @@ -678,20 +656,17 @@ function add_expressions_to_constraints!( connection, constraints[:balance_storage_over_clustered_year], variables[:flow], - profiles, is_storage_level = true, ) @timeit to "add_expression_terms_over_clustered_year_constraints!" add_expression_terms_over_clustered_year_constraints!( connection, constraints[:max_energy_over_clustered_year], variables[:flow], - profiles, ) @timeit to "add_expression_terms_over_clustered_year_constraints!" add_expression_terms_over_clustered_year_constraints!( connection, constraints[:min_energy_over_clustered_year], variables[:flow], - profiles, ) @timeit to "add_expression_is_charging_terms_rep_period_constraints!" add_expression_is_charging_terms_rep_period_constraints!( constraints[:capacity_incoming], diff --git a/src/structures.jl b/src/structures.jl index 4fb2529e..c9579885 100644 --- a/src/structures.jl +++ b/src/structures.jl @@ -1,13 +1,13 @@ -export GraphAssetData, - GraphFlowData, - EnergyProblem, +export EnergyProblem, + ProfileLookup, TulipaVariable, TulipaConstraint, - RepresentativePeriod, + TulipaExpression, PeriodsBlock, TimestepsBlock, - Timeframe, - Year + attach_constraint!, + attach_expression!, + attach_coefficient! const TimestepsBlock = UnitRange{Int} const PeriodsBlock = UnitRange{Int} @@ -15,23 +15,6 @@ const PeriodsBlock = UnitRange{Int} const PeriodType = Symbol const PERIOD_TYPES = [:rep_periods, :timeframe] -""" -Structure to hold the data of the year. -""" -struct Year - id::Int - length::Int - is_milestone::Bool -end - -""" -Structure to hold the data of the timeframe. -""" -struct Timeframe - num_periods::Int64 - map_periods_to_rp::DataFrame -end - """ Structure to hold the JuMP variables for the TulipaEnergyModel """ @@ -79,6 +62,9 @@ mutable struct TulipaConstraint end end +""" +Structure to hold some JuMP expressions that are not attached to constraints but are attached to a table. +""" mutable struct TulipaExpression indices::DataFrame table_name::String @@ -209,123 +195,8 @@ function attach_coefficient!(cons::TulipaConstraint, name::Symbol, container) end """ -Structure to hold the data of one representative period. -""" -struct RepresentativePeriod - weight::Float64 - timesteps::TimestepsBlock - resolution::Float64 - - function RepresentativePeriod(weight, num_timesteps, resolution) - return new(weight, 1:num_timesteps, resolution) - end -end - -""" -Structure to hold the asset data in the graph. +Structure to hold the dictionaries of profiles. """ -mutable struct GraphAssetData - # asset - type::String - group::Union{Missing,String} - capacity::Float64 - min_operating_point::Union{Missing,Float64} - investment_method::String - investment_integer::Bool - technical_lifetime::Float64 - economic_lifetime::Float64 - discount_rate::Float64 - consumer_balance_sense::Union{MathOptInterface.EqualTo,MathOptInterface.GreaterThan} - capacity_storage_energy::Float64 - is_seasonal::Bool - use_binary_storage_method::Union{Missing,String} - unit_commitment::Bool - unit_commitment_method::Union{Missing,String} - unit_commitment_integer::Bool - ramping::Bool - storage_method_energy::Bool - energy_to_power_ratio::Float64 - investment_integer_storage_energy::Bool - max_ramp_up::Union{Missing,Float64} - max_ramp_down::Union{Missing,Float64} - - # asset_milestone - investable::Dict{Int,Bool} - peak_demand::Dict{Int,Float64} - storage_inflows::Dict{Int,Union{Missing,Float64}} - initial_storage_level::Dict{Int,Union{Missing,Float64}} - min_energy_timeframe_partition::Dict{Int,Union{Missing,Float64}} - max_energy_timeframe_partition::Dict{Int,Union{Missing,Float64}} - units_on_cost::Dict{Int,Union{Missing,Float64}} - - # asset_commission - fixed_cost::Dict{Int,Float64} - investment_cost::Dict{Int,Float64} - investment_limit::Dict{Int,Union{Missing,Float64}} - fixed_cost_storage_energy::Dict{Int,Float64} - investment_cost_storage_energy::Dict{Int,Float64} - investment_limit_storage_energy::Dict{Int,Union{Missing,Float64}} - - # asset_both - active::Dict{Int,Dict{Int,Bool}} - decommissionable::Dict{Int,Dict{Int,Bool}} - initial_units::Dict{Int,Dict{Int,Float64}} - initial_storage_units::Dict{Int,Dict{Int,Float64}} - - # profiles - timeframe_profiles::Dict{Int,Dict{Int,Dict{String,Vector{Float64}}}} - rep_periods_profiles::Dict{Int,Dict{Int,Dict{Tuple{String,Int},Vector{Float64}}}} - - # You don't need profiles to create the struct, so initiate it empty - function GraphAssetData(args...) - timeframe_profiles = Dict{Int,Dict{Int,Dict{String,Vector{Float64}}}}() - rep_periods_profiles = Dict{Int,Dict{Int,Dict{Tuple{String,Int},Vector{Float64}}}}() - return new(args..., timeframe_profiles, rep_periods_profiles) - end -end - -""" -Structure to hold the flow data in the graph. -""" -mutable struct GraphFlowData - # flow - carrier::String - is_transport::Bool - capacity::Float64 - technical_lifetime::Float64 - economic_lifetime::Float64 - discount_rate::Float64 - investment_integer::Bool - - # flow_milestone - investable::Dict{Int,Bool} - variable_cost::Dict{Int,Float64} - - # flow_commission - fixed_cost::Dict{Int,Float64} - investment_cost::Dict{Int,Float64} - efficiency::Dict{Int,Float64} - investment_limit::Dict{Int,Union{Missing,Float64}} - - # flow_both - active::Dict{Int,Dict{Int,Bool}} - decommissionable::Dict{Int,Dict{Int,Bool}} - initial_export_units::Dict{Int,Dict{Int,Float64}} - initial_import_units::Dict{Int,Dict{Int,Float64}} - - # profiles - timeframe_profiles::Dict{Int,Dict{String,Vector{Float64}}} - rep_periods_profiles::Dict{Int,Dict{Tuple{String,Int},Vector{Float64}}} -end - -function GraphFlowData(args...) - return GraphFlowData( - args..., - Dict{Int,Dict{String,Vector{Float64}}}(), - Dict{Int,Dict{Tuple{String,Int},Vector{Float64}}}(), - ) -end - mutable struct ProfileLookup # The integers here are Int32 because they are obtained directly from DuckDB # @@ -337,47 +208,38 @@ mutable struct ProfileLookup end """ + EnergyProblem + Structure to hold all parts of an energy problem. It is a wrapper around various other relevant structures. It hides the complexity behind the energy problem, making the usage more friendly, although more verbose. # Fields -- `db_connection`: A DuckDB connection to the input tables in the model -- `graph`: The Graph object that defines the geometry of the energy problem. + +- `db_connection`: A DuckDB connection to the input tables in the model. +- `variables`: A dictionary of [TulipaVariable](@ref TulipaVariable)s containing the variables of the model. +- `expressions`: A dictionary of [TulipaExpression](@ref TulipaExpression)s containing the expressions of the model attached to tables. +- `constraints`: A dictionary of [TulipaConstraint](@ref TulipaConstraint)s containing the constraints of the model. +- `profiles`: Holds the profiles per `rep_period` or `over_clustered_year` in dictionary format. See [ProfileLookup](@ref). +- `model_parameters`: A [ModelParameters](@ref ModelParameters) structure to store all the parameters that are exclusive of the model. - `model`: A JuMP.Model object representing the optimization model. -- `objective_value`: The objective value of the solved problem (Float64). -- `variables`: A [TulipaVariable](@ref TulipaVariable) structure to store all the information related to the variables in the model. -- `constraints`: A [TulipaConstraint](@ref TulipaConstraint) structure to store all the information related to the constraints in the model. -- `representative_periods`: A vector of [Representative Periods](@ref representative-periods). - `solved`: A boolean indicating whether the `model` has been solved or not. +- `objective_value`: The objective value of the solved problem (Float64). - `termination_status`: The termination status of the optimization model. -- `timeframe`: A structure with the number of periods in the `representative_periods` and the mapping between the periods and their representatives. -- `model_parameters`: A [ModelParameters](@ref ModelParameters) structure to store all the parameters that are exclusive of the model. -- `years`: A vector with the information of all the milestone years. # Constructor -- `EnergyProblem(connection)`: Constructs a new `EnergyProblem` object with the given connection. The `constraints_partitions` field is computed from the `representative_periods`, and the other fields are initialized with default values. + +- `EnergyProblem(connection)`: Constructs a new `EnergyProblem` object with the given connection. +The `constraints_partitions` field is computed from the `representative_periods`, and the other +fields are initialized with default values. See the [basic example tutorial](@ref basic-example) to see how these can be used. """ mutable struct EnergyProblem db_connection::DuckDB.DB - graph::MetaGraph{ - Int, - SimpleDiGraph{Int}, - String, - GraphAssetData, - GraphFlowData, - Nothing, # Internal data - Nothing, # Edge weight function - Nothing, # Default edge weight - } variables::Dict{Symbol,TulipaVariable} expressions::Dict{Symbol,TulipaExpression} constraints::Dict{Symbol,TulipaConstraint} profiles::ProfileLookup - representative_periods::Dict{Int,Vector{RepresentativePeriod}} - timeframe::Timeframe - years::Vector{Year} model_parameters::ModelParameters model::Union{JuMP.Model,Nothing} solved::Bool @@ -391,10 +253,7 @@ mutable struct EnergyProblem This will call relevant functions to generate all input that is required for the model creation. """ function EnergyProblem(connection; model_parameters_file = "") - model = JuMP.Model() - - graph, representative_periods, timeframe, years = - @timeit to "create_internal_structure" create_internal_structures(connection) + @timeit to "create_internal_structure" create_internal_structures!(connection) variables = @timeit to "compute_variables_indices" compute_variables_indices(connection) @@ -405,14 +264,10 @@ mutable struct EnergyProblem energy_problem = new( connection, - graph, variables, Dict(), constraints, profiles, - representative_periods, - timeframe, - years, ModelParameters(connection, model_parameters_file), nothing, false, diff --git a/src/utils.jl b/src/utils.jl index e30150d5..44eb44fa 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -8,122 +8,20 @@ function _check_if_table_exists(connection, table_name) return length(collect(existence_query)) > 0 end -# FIXME: Ugly hack applied """ - is_active(graph, a, y) - is_active(graph, (u, v), y) + _profile_aggregate(profiles, tuple_key, time_block, agg_functions, default_value) -Returns `graph[a].active[y][y]` or `graph[u, v].active[y][y]` or `false` if intermediary values are -missing. -""" -function is_active(graph, graph_key, y) - active_dict = _get_graph_asset_or_flow(graph, graph_key).active # graph[...].active - if !haskey(active_dict, y) - return false - else - return get(active_dict[y], y, false) - end -end - -""" - _get_graph_asset_or_flow(graph, a) - _get_graph_asset_or_flow(graph, (u, v)) - -Returns `graph[a]` or `graph[u, v]`. -""" -_get_graph_asset_or_flow(graph, a) = graph[a] -_get_graph_asset_or_flow(graph, f::Tuple) = graph[f...] - -""" - get_graph_value_or_missing(graph, graph_key, field_key) - get_graph_value_or_missing(graph, graph_key, field_key, year) - -Get `graph[graph_key].field_key` (or `graph[graph_key].field_key[year]`) or return `missing` if -any of the values do not exist. -We also check if `graph[graph_key].active[year]` is true if the `year` is passed and return -`missing` otherwise. -""" -function get_graph_value_or_missing(graph, graph_key, field_key) - g = _get_graph_asset_or_flow(graph, graph_key) - return getproperty(g, field_key) -end -function get_graph_value_or_missing(graph, graph_key, field_key, year) - if !is_active(graph, graph_key, year) - return missing - end - g = get_graph_value_or_missing(graph, graph_key, field_key) - return get(g, year, missing) -end - -""" - safe_comparison(graph, a, value, key) - safe_comparison(graph, a, value, key, year) - -Check if `graph[a].value` (or `graph[a].value[year]`) is equal to `value`. -This function assumes that if `graph[a].value` is a dictionary and `value` is not, then you made a mistake. -This makes it safer, because it will not silently return `false`. -It also checks for missing. -""" -function safe_comparison(graph, a, value1, args...) - value2 = get_graph_value_or_missing(graph, a, args...) - if ismissing(value1) || ismissing(value2) - return false - end - return cmp(value1, value2) == 0 # Will error is one is a container (vector, dict) and the other is not -end - -""" - safe_inclusion(graph, a, value, key) - safe_inclusion(graph, a, value, key, year) - -Check if `graph[a].value` (or `graph[a].value[year]`) is in `values`. -This correctly check that `missing in [missing]` returns `false`. -""" -function safe_inclusion(graph, a, values::Vector, args...) - value = get_graph_value_or_missing(graph, a, args...) - return coalesce(value in values, false) -end - -""" - filter_graph(graph, elements, value, key) - filter_graph(graph, elements, value, key, year) - -Helper function to filter elements (assets or flows) in the graph given a key (and possibly year) and value (or values). -In the safest case, this is equivalent to the filters - -```julia -filter_assets_whose_key_equal_to_value = a -> graph[a].key == value -filter_assets_whose_key_year_equal_to_value = a -> graph[a].key[year] in value -filter_flows_whose_key_equal_to_value = f -> graph[f...].key == value -filter_flows_whose_key_year_equal_to_value = f -> graph[f...].key[year] in value -``` -""" -filter_graph(graph, elements, value, args...) = - filter(e -> safe_comparison(graph, e, value, args...), elements) -filter_graph(graph, elements, values::Vector, args...) = - filter(e -> safe_inclusion(graph, e, values, args...), elements) - -""" - profile_aggregation(agg, profiles, key, block, default_value) - -Aggregates the `profiles[key]` over the `block` using the `agg` function. +Aggregates the `profiles[tuple_key]` over the `time_block` using the `agg_function` function. If the profile does not exist, uses `default_value` instead of **each** profile value. -`profiles` should be a dictionary of profiles, for instance `graph[a].profiles` or `graph[u, v].profiles`. -If `profiles[key]` exists, then this function computes the aggregation of `profiles[key]` -over the range `block` using the aggregator `agg`, i.e., `agg(profiles[key][block])`. -If `profiles[key]` does not exist, then this substitutes it with a vector of `default_value`s. -""" -function profile_aggregation(agg, profiles, year, commission_year, key, block, default_value) - if haskey(profiles, year) && - haskey(profiles[year], commission_year) && - haskey(profiles[year][commission_year], key) - return agg(profiles[year][commission_year][key][block]) - else - return agg(Iterators.repeated(default_value, length(block))) - end -end +`profiles` should be a dictionary of profiles, and `tuple_key` should be either +`(profile_name, year, rep_period)` for the profiles of representative periods +or `(profile_name, year)` for the profiles over clustered years. +If `profiles[tuple_key]` exists, then this function computes the aggregation of `V = profiles[tuple_key]` +over the range `time_block` using the aggregator `agg_function`, i.e., `agg_function(V[time_block])`. +If it does not exist, then `V[time_block]` is substituted by a vector of the corresponding size and `default_value`. +""" function _profile_aggregate(profiles, tuple_key::Tuple, time_block, agg_function, default_value) if any(ismissing, tuple_key) || !haskey(profiles, tuple_key) return agg_function(Iterators.repeated(default_value, length(time_block))) @@ -131,32 +29,3 @@ function _profile_aggregate(profiles, tuple_key::Tuple, time_block, agg_function profile_value = profiles[tuple_key] return agg_function(skipmissing(profile_value[time_block])) end - -""" - create_intervals(years) - -Create a dictionary of intervals for `years`. The interval is assigned to the its starting year. -The last interval is 1. -""" -function create_intervals_for_years(years) - intervals = Dict() - - # This assumes that `years` is ordered - for i in 1:length(years)-1 - intervals[years[i]] = years[i+1] - years[i] - end - - intervals[years[end]] = 1 - - return intervals -end - -""" - Δ = duration(block, rp, representative_periods) - -Computes the duration of the `block` and multiply by the resolution of the -representative period `rp`. -""" -function duration(timesteps_block, rp, representative_periods) - return length(timesteps_block) * representative_periods[rp].resolution -end diff --git a/src/variables/create.jl b/src/variables/create.jl index fac22823..26b8c57c 100644 --- a/src/variables/create.jl +++ b/src/variables/create.jl @@ -95,7 +95,7 @@ function _create_variables_tables(connection) t_low.year, t_low.rep_period, t_low.time_block_start, - t_low.time_block_end + t_low.time_block_end, FROM t_lowest_all AS t_low LEFT JOIN asset ON t_low.asset = asset.asset @@ -124,7 +124,7 @@ function _create_variables_tables(connection) attr.asset, attr.year, attr.period_block_start, - attr.period_block_end + attr.period_block_end, FROM asset_timeframe_time_resolution AS attr LEFT JOIN asset ON attr.asset = asset.asset diff --git a/src/variables/flows.jl b/src/variables/flows.jl index 2f0da95a..cd1bcf92 100644 --- a/src/variables/flows.jl +++ b/src/variables/flows.jl @@ -1,7 +1,7 @@ export add_flow_variables! """ - add_flow_variables!(model, variables) + add_flow_variables!(connection, model, variables) Adds flow variables to the optimization `model` based on data from the `variables`. The flow variables are created using the `@variable` macro for each row in the `:flows` dataframe. diff --git a/src/variables/investments.jl b/src/variables/investments.jl index 3ba056b3..91576ac1 100644 --- a/src/variables/investments.jl +++ b/src/variables/investments.jl @@ -30,8 +30,7 @@ end add_investment_variables!(model, variables) Adds investment, decommission, and energy-related variables to the optimization `model`, -and sets integer constraints on selected variables based on the `graph` data. - +and sets integer constraints on selected variables based on the input data. """ function add_investment_variables!(model, variables) for (name, keys_from_row, lower_bound_from_row, upper_bound_from_row, integer_from_row) in [ diff --git a/src/variables/storage.jl b/src/variables/storage.jl index c2849699..786a7025 100644 --- a/src/variables/storage.jl +++ b/src/variables/storage.jl @@ -1,13 +1,12 @@ export add_storage_variables! """ - add_storage_variables!(model, ...) + add_storage_variables!(connection, model, variables) Adds storage-related variables to the optimization `model`, including storage levels for both intra-representative periods and inter-representative periods, as well as charging state variables. The function also optionally sets binary constraints for certain charging variables based on storage methods. - """ -function add_storage_variables!(model, graph, variables) +function add_storage_variables!(connection, model, variables) storage_level_rep_period_indices = variables[:storage_level_rep_period].indices storage_level_over_clustered_year_indices = variables[:storage_level_over_clustered_year].indices @@ -40,29 +39,43 @@ function add_storage_variables!(model, graph, variables) ] ### Cycling conditions - df_storage_rep_period_balance_grouped = - DataFrames.groupby(storage_level_rep_period_indices, [:asset, :year, :rep_period]) - - df_storage_over_clustered_year_balance_grouped = - DataFrames.groupby(storage_level_over_clustered_year_indices, [:asset, :year]) - - for ((a, y, _), sub_df) in pairs(df_storage_rep_period_balance_grouped) - # Ordering is assumed - if !ismissing(graph[a].initial_storage_level[y]) - JuMP.set_lower_bound( - variables[:storage_level_rep_period].container[last(sub_df.index)], - graph[a].initial_storage_level[y], - ) + let var = variables[:storage_level_rep_period] + table_name = var.table_name + for row in DuckDB.query( + connection, + "SELECT + last(var.index) AS last_index, + var.asset, var.year, var.rep_period, + ANY_VALUE(asset_milestone.initial_storage_level) AS initial_storage_level, + FROM $table_name AS var + LEFT JOIN asset_milestone + ON var.asset = asset_milestone.asset + AND var.year = asset_milestone.milestone_year + WHERE asset_milestone.initial_storage_level IS NOT NULL + GROUP BY var.asset, var.year, var.rep_period + ", + ) + JuMP.set_lower_bound(var.container[row.last_index], row.initial_storage_level) end end - for ((a, y), sub_df) in pairs(df_storage_over_clustered_year_balance_grouped) - # Ordering is assumed - if !ismissing(graph[a].initial_storage_level[y]) - JuMP.set_lower_bound( - variables[:storage_level_over_clustered_year].container[last(sub_df.index)], - graph[a].initial_storage_level[y], - ) + let var = variables[:storage_level_over_clustered_year] + table_name = var.table_name + for row in DuckDB.query( + connection, + "SELECT + last(var.index) AS last_index, + var.asset, var.year, + ANY_VALUE(asset_milestone.initial_storage_level) AS initial_storage_level, + FROM $table_name AS var + LEFT JOIN asset_milestone + ON var.asset = asset_milestone.asset + AND var.year = asset_milestone.milestone_year + WHERE asset_milestone.initial_storage_level IS NOT NULL + GROUP BY var.asset, var.year + ", + ) + JuMP.set_lower_bound(var.container[row.last_index], row.initial_storage_level) end end diff --git a/src/variables/unit-commitment.jl b/src/variables/unit-commitment.jl index 449b77cb..a6daf731 100644 --- a/src/variables/unit-commitment.jl +++ b/src/variables/unit-commitment.jl @@ -1,7 +1,7 @@ export add_unit_commitment_variables! """ - add_unit_commitment_variables!(model, ...) + add_unit_commitment_variables!(model, variables) Adds unit commitment variables to the optimization `model` based on the `:units_on` indices. Additionally, variables are constrained to be integers based on the `unit_commitment_integer` property. diff --git a/test/runtests.jl b/test/runtests.jl index 38cd117d..837ae8ed 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -3,7 +3,6 @@ using Cbc: Cbc using DataFrames: DataFrames, DataFrame using DuckDB: DuckDB, DBInterface using GLPK: GLPK -using Graphs: Graphs using HiGHS: HiGHS using JuMP: JuMP using MathOptInterface: MathOptInterface @@ -49,5 +48,5 @@ end @testset "Ensuring data can be read and create the internal structures" begin connection = DBInterface.connect(DuckDB.DB) _read_csv_folder(connection, joinpath(@__DIR__, "../benchmark/EU/")) - TulipaEnergyModel.create_internal_structures(connection) + TulipaEnergyModel.create_internal_structures!(connection) end diff --git a/test/test-economic-parameters.jl b/test/test-economic-parameters.jl deleted file mode 100644 index 98d7561f..00000000 --- a/test/test-economic-parameters.jl +++ /dev/null @@ -1,108 +0,0 @@ -@testset "calculate_annualized_cost tests" begin - discount_rate = Dict("asset1" => 0.05, "asset2" => 0.07) - - economic_lifetime = Dict("asset1" => 10, "asset2" => 15) - - investment_cost = Dict( - (2021, "asset1") => 1000, - (2021, "asset2") => 1500, - (2022, "asset1") => 1100, - (2022, "asset2") => 1600, - ) - years = [2021, 2022] - - investable_assets = Dict(2021 => ["asset1", "asset2"], 2022 => ["asset1"]) - - expected_output = Dict( - (2021, "asset1") => 123.3376904, - (2021, "asset2") => 153.9176982, - (2022, "asset1") => 135.6714595, - ) - - result = TulipaEnergyModel.calculate_annualized_cost( - discount_rate, - economic_lifetime, - investment_cost, - years, - investable_assets, - ) - - for key in keys(expected_output) - @test result[key] ≈ expected_output[key] atol = 1e-6 - end -end - -@testset "calculate_salvage_value tests" begin - discount_rate = Dict("asset1" => 0.05, "asset2" => 0.07) - - economic_lifetime = Dict("asset1" => 10, "asset2" => 15) - - annualized_cost = Dict( - (2021, "asset1") => 123.3376904, - (2021, "asset2") => 153.9176982, - (2022, "asset1") => 135.6714595, - ) - - years = [2021, 2022] - - investable_assets = Dict(2021 => ["asset1", "asset2"], 2022 => ["asset1"]) - - expected_output = Dict( - (2021, "asset1") => 759.1978422, - (2021, "asset2") => 1202.2339859, - (2022, "asset1") => 964.3285406, - ) - - result = TulipaEnergyModel.calculate_salvage_value( - discount_rate, - economic_lifetime, - annualized_cost, - years, - investable_assets, - ) - - for key in keys(expected_output) - @test result[key] ≈ expected_output[key] atol = 1e-6 - end -end - -@testset "calculate_weight_for_investment_discounts tests" begin - social_rate = 0.02 - - discount_year = 2000 - - salvage_value = Dict( - (2021, "asset1") => 759.1978422, - (2021, "asset2") => 1202.2339859, - (2022, "asset1") => 964.3285406, - ) - - investment_cost = Dict( - (2021, "asset1") => 1000, - (2021, "asset2") => 1500, - (2022, "asset1") => 1100, - (2022, "asset2") => 1600, - ) - years = [2021, 2022] - - investable_assets = Dict(2021 => ["asset1", "asset2"], 2022 => ["asset1"]) - - expected_output = Dict( - (2021, "asset1") => 0.158875, - (2021, "asset2") => 0.130973, - (2022, "asset1") => 0.0797796, - ) - - result = TulipaEnergyModel.calculate_weight_for_investment_discounts( - social_rate, - discount_year, - salvage_value, - investment_cost, - years, - investable_assets, - ) - - for key in keys(expected_output) - @test result[key] ≈ expected_output[key] atol = 1e-6 - end -end diff --git a/test/test-io.jl b/test/test-io.jl index 8f5f5179..0568a1f6 100644 --- a/test/test-io.jl +++ b/test/test-io.jl @@ -52,16 +52,3 @@ end print(energy_problem) end end - -@testset "Graph structure" begin - @testset "Graph structure is correct" begin - connection = DBInterface.connect(DuckDB.DB) - _read_csv_folder(connection, joinpath(INPUT_FOLDER, "Tiny")) - graph, _, _ = TulipaEnergyModel.create_internal_structures(connection) - - @test Graphs.nv(graph) == 6 - @test Graphs.ne(graph) == 5 - @test collect(Graphs.edges(graph)) == - [Graphs.Edge(e) for e in [(1, 2), (3, 2), (4, 2), (5, 2), (6, 2)]] - end -end diff --git a/test/test-pipeline.jl b/test/test-pipeline.jl index 34721b69..1a16afc7 100644 --- a/test/test-pipeline.jl +++ b/test/test-pipeline.jl @@ -21,8 +21,7 @@ end _read_csv_folder(connection, dir) # Internal data and structures pre-model - graph, representative_periods, timeframe, years = - TulipaEnergyModel.create_internal_structures(connection) + TulipaEnergyModel.create_internal_structures!(connection) model_parameters = TulipaEnergyModel.ModelParameters(connection) variables = TulipaEnergyModel.compute_variables_indices(connection) expressions = Dict() @@ -32,12 +31,10 @@ end # Create model model = TulipaEnergyModel.create_model( connection, - graph, variables, expressions, constraints, profiles, - representative_periods, model_parameters, )