diff --git a/CHANGELOG.md b/CHANGELOG.md index 909f8a14cd..73d8c5b919 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Unreleased +### Changed +- Automatic problem scaling is now available using the `MacroEnergySystemsScaling.jl` +package. The new `AutoScaling` setting has been introduced to replace the +`ParameterScale` setting and enable this feature. + +### Removed +- Removed the `ParameterScale` setting. + ## [0.4.2] - 2024-12-23 ### Added diff --git a/Project.toml b/Project.toml index 60f5a42e21..46fa2393e4 100644 --- a/Project.toml +++ b/Project.toml @@ -15,6 +15,7 @@ HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" JuMP = "4076af6c-e467-56ae-b986-b466b2749572" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" Logging = "56ddb016-857b-54e1-b83d-db4d58db5568" +MacroEnergySystemsScaling = "e07ba8b4-499e-4f54-a885-af58260c741f" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" PrecompileTools = "aea7be01-6a6a-4083-8856-8a6e6704d82a" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" diff --git a/docs/src/Tutorials/Tutorial_1_configuring_settings.md b/docs/src/Tutorials/Tutorial_1_configuring_settings.md index c7d8c7f08c..cb5603dcfa 100644 --- a/docs/src/Tutorials/Tutorial_1_configuring_settings.md +++ b/docs/src/Tutorials/Tutorial_1_configuring_settings.md @@ -35,7 +35,6 @@ genx_settings_SNE = YAML.load(open("example_systems/1_three_zones/settings/genx_ Dict{Any, Any} with 19 entries: "NetworkExpansion" => 1 "ModelingToGenerateAlternativeIterations" => 3 - "ParameterScale" => 1 "EnergyShareRequirement" => 0 "PrintModel" => 0 "TimeDomainReduction" => 1 diff --git a/docs/src/Tutorials/Tutorial_3_K-means_time_domain_reduction.md b/docs/src/Tutorials/Tutorial_3_K-means_time_domain_reduction.md index 5d56315d4c..37507a98b0 100644 --- a/docs/src/Tutorials/Tutorial_3_K-means_time_domain_reduction.md +++ b/docs/src/Tutorials/Tutorial_3_K-means_time_domain_reduction.md @@ -97,7 +97,6 @@ genx_settings_TZ = YAML.load(open((joinpath(case,"settings/genx_settings.yml"))) Dict{Any, Any} with 19 entries: "NetworkExpansion" => 1 "ModelingToGenerateAlternativeIterations" => 3 - "ParameterScale" => 1 "EnergyShareRequirement" => 0 "PrintModel" => 0 "TimeDomainReduction" => 1 @@ -129,7 +128,6 @@ genx_settings_TZ ## Output settings ``` Dict{Any, Any} with 13 entries: "NetworkExpansion" => 1 - "ParameterScale" => 1 "EnergyShareRequirement" => 0 "TimeDomainReduction" => 0 "Trans_Loss_Segments" => 1 diff --git a/docs/src/Tutorials/Tutorial_4_model_generation.md b/docs/src/Tutorials/Tutorial_4_model_generation.md index 0d50bab766..96911a056c 100644 --- a/docs/src/Tutorials/Tutorial_4_model_generation.md +++ b/docs/src/Tutorials/Tutorial_4_model_generation.md @@ -193,7 +193,6 @@ setup = GenX.configure_settings(genx_settings,writeoutput_settings) # Combines g "CO2Cap" => 2 "WriteShadowPrices" => 1 "ModelingToGenerateAlternativeIterations" => 3 - "ParameterScale" => 1 "EnergyShareRequirement" => 1 "PrintModel" => 0 "TimeDomainReduction" => 1 diff --git a/docs/src/Tutorials/Tutorial_7_setup.md b/docs/src/Tutorials/Tutorial_7_setup.md index 53dfe6ea68..d0a9c0e28d 100644 --- a/docs/src/Tutorials/Tutorial_7_setup.md +++ b/docs/src/Tutorials/Tutorial_7_setup.md @@ -65,7 +65,6 @@ setup = GenX.configure_settings(genx_settings,writeoutput_settings) "CO2Cap" => 2 "WriteShadowPrices" => 1 "OperationalReserves" => 0 - "ParameterScale" => 1 "EnergyShareRequirement" => 0 "PrintModel" => 0 "TimeDomainReduction" => 1 diff --git a/docs/src/User_Guide/model_configuration.md b/docs/src/User_Guide/model_configuration.md index a1526b3eab..8fb4fe5dc7 100644 --- a/docs/src/User_Guide/model_configuration.md +++ b/docs/src/User_Guide/model_configuration.md @@ -45,7 +45,7 @@ The following tables summarize the model settings parameters and their default/p |**Parameter** | **Description**| | :------------ | :-----------| -|ParameterScale | Flag to turn on parameter scaling wherein demand, capacity and power variables defined in GW rather than MW. This flag aides in improving the computational performance of the model. | +|AutoScaling | Flag to turn on constraint scaling wherein demand. This feature usually aides in improving the computational performance of the model. It does not affect the units of the inputs or results. The scaling can be adjusted by using the additional settings detailed [here](https://macroenergy.github.io/MacroEnergySystemsScaling.jl/stable/scaling_settings/)| ||1 = Scaling is activated. | ||0 = Scaling is not activated. | |ObjScale| Parameter value to scale the objective function during optimization.| diff --git a/example_systems/1_three_zones/settings/genx_settings.yml b/example_systems/1_three_zones/settings/genx_settings.yml index d48032b1d1..1acd7a8838 100644 --- a/example_systems/1_three_zones/settings/genx_settings.yml +++ b/example_systems/1_three_zones/settings/genx_settings.yml @@ -6,8 +6,8 @@ CO2Cap: 2 # CO2 emissions cap; 0 = not active (no CO2 emission limit); 1 = mass- StorageLosses: 1 # Energy Share Requirement and CO2 constraints account for energy lost; 0 = not active (DO NOT account for energy lost); 1 = active systemwide (DO account for energy lost) MinCapReq: 1 # Activate minimum technology carveout constraints; 0 = not active; 1 = active MaxCapReq: 0 # Activate maximum technology carveout constraints; 0 = not active; 1 = active -ParameterScale: 1 # Turn on parameter scaling wherein demand, capacity and power variables are defined in GW rather than MW. 0 = not active; 1 = active systemwide WriteShadowPrices: 1 # Write shadow prices of LP or relaxed MILP; 0 = not active; 1 = active UCommit: 2 # Unit committment of thermal power plants; 0 = not active; 1 = active using integer clestering; 2 = active using linearized clustering TimeDomainReduction: 1 # Time domain reduce (i.e. cluster) inputs based on Demand_data.csv, Generators_variability.csv, and Fuels_data.csv; 0 = not active (use input data as provided); 0 = active (cluster input data, or use data that has already been clustered) -OutputFullTimeSeries: 1 \ No newline at end of file +OutputFullTimeSeries: 1 +AutoScaling : 1 \ No newline at end of file diff --git a/example_systems/2_three_zones_w_electrolyzer/settings/genx_settings.yml b/example_systems/2_three_zones_w_electrolyzer/settings/genx_settings.yml index 3c3f4d572e..b6d6ff6e09 100644 --- a/example_systems/2_three_zones_w_electrolyzer/settings/genx_settings.yml +++ b/example_systems/2_three_zones_w_electrolyzer/settings/genx_settings.yml @@ -2,7 +2,6 @@ Trans_Loss_Segments: 1 # Number of segments used in piecewise linear approximati UCommit: 2 # Unit committment of thermal power plants; 0 = not active; 1 = active using integer clestering; 2 = active using linearized clustering StorageLosses: 1 # Energy Share Requirement and CO2 constraints account for energy lost; 0 = not active (DO NOT account for energy lost); 1 = active systemwide (DO account for energy lost) HydrogenHourlyMatching: 1 # Hydrogen electrolyzer contribution to hourly supply matching required -ParameterScale: 1 # Turn on parameter scaling wherein demand, capacity and power variables are defined in GW rather than MW. 0 = not active; 1 = active systemwide TimeDomainReduction: 1 # Time domain reduce (i.e. cluster) inputs based on Demand_data.csv, Generators_variability.csv, and Fuels_data.csv; 0 = not active (use input data as provided); 0 = active (cluster input data, or use data that has already been clustered) WriteShadowPrices: 1 # Write shadow prices of LP or relaxed MILP; 0 = not active; 1 = active HydrogenMinimumProduction: 1 # Hydrogen production requirement; 0 = not active; 1 = active, meet regional level H2 production requirements diff --git a/example_systems/3_three_zones_w_co2_capture/settings/genx_settings.yml b/example_systems/3_three_zones_w_co2_capture/settings/genx_settings.yml index 27f3eef52f..72647ea817 100644 --- a/example_systems/3_three_zones_w_co2_capture/settings/genx_settings.yml +++ b/example_systems/3_three_zones_w_co2_capture/settings/genx_settings.yml @@ -6,7 +6,6 @@ CO2Cap: 2 # CO2 emissions cap; 0 = not active (no CO2 emission limit); 1 = mass- StorageLosses: 1 # Energy Share Requirement and CO2 constraints account for energy lost; 0 = not active (DO NOT account for energy lost); 1 = active systemwide (DO account for energy lost) MinCapReq: 1 # Activate minimum technology carveout constraints; 0 = not active; 1 = active MaxCapReq: 0 # Activate maximum technology carveout constraints; 0 = not active; 1 = active -ParameterScale: 1 # Turn on parameter scaling wherein demand, capacity and power variables are defined in GW rather than MW. 0 = not active; 1 = active systemwide WriteShadowPrices: 1 # Write shadow prices of LP or relaxed MILP; 0 = not active; 1 = active UCommit: 2 # Unit committment of thermal power plants; 0 = not active; 1 = active using integer clestering; 2 = active using linearized clustering TimeDomainReduction: 1 # Time domain reduce (i.e. cluster) inputs based on Demand_data.csv, Generators_variability.csv, and Fuels_data.csv; 0 = not active (use input data as provided); 0 = active (cluster input data, or use data that has already been clustered) diff --git a/example_systems/4_three_zones_w_policies_slack/settings/genx_settings.yml b/example_systems/4_three_zones_w_policies_slack/settings/genx_settings.yml index 2aeabd9a67..9febaf6f6e 100644 --- a/example_systems/4_three_zones_w_policies_slack/settings/genx_settings.yml +++ b/example_systems/4_three_zones_w_policies_slack/settings/genx_settings.yml @@ -6,7 +6,6 @@ CO2Cap: 2 # CO2 emissions cap; 0 = not active (no CO2 emission limit); 1 = mass- StorageLosses: 1 # Energy Share Requirement and CO2 constraints account for energy lost; 0 = not active (DO NOT account for energy lost); 1 = active systemwide (DO account for energy lost) MinCapReq: 1 # Activate minimum technology carveout constraints; 0 = not active; 1 = active MaxCapReq: 1 # Activate maximum technology carveout constraints; 0 = not active; 1 = active -ParameterScale: 1 # Turn on parameter scaling wherein demand, capacity and power variables are defined in GW rather than MW. 0 = not active; 1 = active systemwide WriteShadowPrices: 1 # Write shadow prices of LP or relaxed MILP; 0 = not active; 1 = active UCommit: 2 # Unit committment of thermal power plants; 0 = not active; 1 = active using integer clestering; 2 = active using linearized clustering TimeDomainReduction: 1 # Time domain reduce (i.e. cluster) inputs based on Demand_data.csv, Generators_variability.csv, and Fuels_data.csv; 0 = not active (use input data as provided); 0 = active (cluster input data, or use data that has already been clustered) diff --git a/example_systems/5_three_zones_w_piecewise_fuel/settings/genx_settings.yml b/example_systems/5_three_zones_w_piecewise_fuel/settings/genx_settings.yml index fdd18b2300..8139d00a3b 100644 --- a/example_systems/5_three_zones_w_piecewise_fuel/settings/genx_settings.yml +++ b/example_systems/5_three_zones_w_piecewise_fuel/settings/genx_settings.yml @@ -7,7 +7,6 @@ CO2Cap: 2 # CO2 emissions cap; 0 = not active (no CO2 emission limit); 1 = mass- StorageLosses: 1 # Energy Share Requirement and CO2 constraints account for energy lost; 0 = not active (DO NOT account for energy lost); 1 = active systemwide (DO account for energy lost) MinCapReq: 1 # Activate minimum technology carveout constraints; 0 = not active; 1 = active MaxCapReq: 0 # Activate maximum technology carveout constraints; 0 = not active; 1 = active -ParameterScale: 1 # Turn on parameter scaling wherein demand, capacity and power variables are defined in GW rather than MW. 0 = not active; 1 = active systemwide WriteShadowPrices: 1 # Write shadow prices of LP or relaxed MILP; 0 = not active; 1 = active UCommit: 2 # Unit committment of thermal power plants; 0 = not active; 1 = active using integer clestering; 2 = active using linearized clustering TimeDomainReduction: 1 # Time domain reduce (i.e. cluster) inputs based on Demand_data.csv, Generators_variability.csv, and Fuels_data.csv; 0 = not active (use input data as provided); 0 = active (cluster input data, or use data that has already been clustered) diff --git a/example_systems/6_three_zones_w_multistage/settings/genx_settings.yml b/example_systems/6_three_zones_w_multistage/settings/genx_settings.yml index e9599523c4..36e7fa1f91 100644 --- a/example_systems/6_three_zones_w_multistage/settings/genx_settings.yml +++ b/example_systems/6_three_zones_w_multistage/settings/genx_settings.yml @@ -6,7 +6,6 @@ CO2Cap: 2 # CO2 emissions cap; 0 = not active (no CO2 emission limit); 1 = mass- StorageLosses: 1 # Energy Share Requirement and CO2 constraints account for energy lost; 0 = not active (DO NOT account for energy lost); 1 = active systemwide (DO account for energy lost) MinCapReq: 0 # Activate minimum technology carveout constraints; 0 = not active; 1 = active MaxCapReq: 0 # Activate maximum technology carveout constraints; 0 = not active; 1 = active -ParameterScale: 1 # Turn on parameter scaling wherein demand, capacity and power variables are defined in GW rather than MW. 0 = not active; 1 = active systemwide UCommit: 2 # Unit committment of thermal power plants; 0 = not active; 1 = active using integer clestering; 2 = active using linearized clustering TimeDomainReduction: 1 # Time domain reduce (i.e. cluster) inputs based on Demand_data.csv, Generators_variability.csv, and Fuels_data.csv; 0 = not active (use input data as provided); 0 = active (cluster input data, or use data that has already been clustered) MultiStage: 1 # 0 = Single-stage GenX, 1 = Multi-stage GenX diff --git a/example_systems/7_three_zones_w_colocated_VRE_storage/settings/genx_settings.yml b/example_systems/7_three_zones_w_colocated_VRE_storage/settings/genx_settings.yml index a4039ad2e3..7230a35a07 100644 --- a/example_systems/7_three_zones_w_colocated_VRE_storage/settings/genx_settings.yml +++ b/example_systems/7_three_zones_w_colocated_VRE_storage/settings/genx_settings.yml @@ -1,4 +1,3 @@ -ParameterScale: 1 NetworkExpansion: 1 Trans_Loss_Segments: 1 UCommit: 2 diff --git a/example_systems/8_three_zones_w_colocated_VRE_storage_electrolyzers/settings/genx_settings.yml b/example_systems/8_three_zones_w_colocated_VRE_storage_electrolyzers/settings/genx_settings.yml index c53bd4b62b..717ef8f2a6 100644 --- a/example_systems/8_three_zones_w_colocated_VRE_storage_electrolyzers/settings/genx_settings.yml +++ b/example_systems/8_three_zones_w_colocated_VRE_storage_electrolyzers/settings/genx_settings.yml @@ -2,7 +2,6 @@ Trans_Loss_Segments: 1 # Number of segments used in piecewise linear approximati UCommit: 2 # Unit committment of thermal power plants; 0 = not active; 1 = active using integer clestering; 2 = active using linearized clustering StorageLosses: 1 # Energy Share Requirement and CO2 constraints account for energy lost; 0 = not active (DO NOT account for energy lost); 1 = active systemwide (DO account for energy lost) HydrogenHourlyMatching: 0 # Hydrogen electrolyzer hourly supply matching required -ParameterScale: 0 # Turn on parameter scaling wherein demand, capacity and power variables are defined in GW rather than MW. 0 = not active; 1 = active systemwide TimeDomainReduction: 0 # Time domain reduce (i.e. cluster) inputs based on Demand_data.csv, Generators_variability.csv, and Fuels_data.csv; 0 = not active (use input data as provided); 0 = active (cluster input data, or use data that has already been clustered) WriteShadowPrices: 1 # Write shadow prices of LP or relaxed MILP; 0 = not active; 1 = active HydrogenMinimumProduction: 1 # Hydrogen production requirement; 0 = not active; 1 = active, meet regional level H2 production requirements \ No newline at end of file diff --git a/example_systems/9_three_zones_w_retrofit/settings/genx_settings.yml b/example_systems/9_three_zones_w_retrofit/settings/genx_settings.yml index 96854d0222..772ff0663d 100644 --- a/example_systems/9_three_zones_w_retrofit/settings/genx_settings.yml +++ b/example_systems/9_three_zones_w_retrofit/settings/genx_settings.yml @@ -6,7 +6,6 @@ CO2Cap: 2 # CO2 emissions cap; 0 = not active (no CO2 emission limit); 1 = mass- StorageLosses: 1 # Energy Share Requirement and CO2 constraints account for energy lost; 0 = not active (DO NOT account for energy lost); 1 = active systemwide (DO account for energy lost) MinCapReq: 1 # Activate minimum technology carveout constraints; 0 = not active; 1 = active MaxCapReq: 0 # Activate maximum technology carveout constraints; 0 = not active; 1 = active -ParameterScale: 1 # Turn on parameter scaling wherein demand, capacity and power variables are defined in GW rather than MW. 0 = not active; 1 = active systemwide WriteShadowPrices: 1 # Write shadow prices of LP or relaxed MILP; 0 = not active; 1 = active UCommit: 2 # Unit committment of thermal power plants; 0 = not active; 1 = active using integer clestering; 2 = active using linearized clustering TimeDomainReduction: 1 # Time domain reduce (i.e. cluster) inputs based on Demand_data.csv, Generators_variability.csv, and Fuels_data.csv; 0 = not active (use input data as provided); 0 = active (cluster input data, or use data that has already been clustered) \ No newline at end of file diff --git a/precompile/case/settings/genx_settings.yml b/precompile/case/settings/genx_settings.yml index 2d6600608b..a72528fbe4 100644 --- a/precompile/case/settings/genx_settings.yml +++ b/precompile/case/settings/genx_settings.yml @@ -3,7 +3,6 @@ Trans_Loss_Segments: 1 CO2Cap: 2 StorageLosses: 1 MinCapReq: 1 -ParameterScale: 1 WriteShadowPrices: 1 UCommit: 2 OverwriteResults: 1 \ No newline at end of file diff --git a/src/GenX.jl b/src/GenX.jl index 3a5a399800..54a6f842c1 100644 --- a/src/GenX.jl +++ b/src/GenX.jl @@ -37,16 +37,10 @@ using RecursiveArrayTools using Statistics using HiGHS using Logging +using MacroEnergySystemsScaling using PrecompileTools: @compile_workload -# Global scaling factor used when ParameterScale is on to shift values from MW to GW -# DO NOT CHANGE THIS (Unless you do so very carefully) -# To translate MW to GW, divide by ModelScalingFactor -# To translate $ to $M, multiply by ModelScalingFactor^2 -# To translate $/MWh to $M/GWh, multiply by ModelScalingFactor -const ModelScalingFactor = 1e+3 - """ An abstract type that should be subtyped for users creating GenX resources. """ diff --git a/src/case_runners/case_runner.jl b/src/case_runners/case_runner.jl index afea227f29..905f4e211e 100644 --- a/src/case_runners/case_runner.jl +++ b/src/case_runners/case_runner.jl @@ -80,6 +80,11 @@ function run_genx_case_simple!(case::AbstractString, mysetup::Dict, optimizer::A println("Time elapsed for model building is") println(time_elapsed) + if mysetup["AutoScaling"] == 1 + println("Scaling Constraints") + scale_constraints!(EP, mysetup["ScalingSettings"]) + end + println("Solving Model") EP, solve_time = solve_model(EP, mysetup) myinputs["solve_time"] = solve_time # Store the model solve time in myinputs diff --git a/src/configure_settings/configure_settings.jl b/src/configure_settings/configure_settings.jl index 9df418e334..dce8e5ddb3 100644 --- a/src/configure_settings/configure_settings.jl +++ b/src/configure_settings/configure_settings.jl @@ -12,7 +12,6 @@ function default_settings() "VirtualChargeDischargeCost" => 1, # $/MWh "MinCapReq" => 0, "MaxCapReq" => 0, - "ParameterScale" => 0, "WriteShadowPrices" => 0, "UCommit" => 0, "TimeDomainReduction" => 0, @@ -37,7 +36,8 @@ function default_settings() "ResourcePoliciesFolder" => "policy_assignments", "SystemFolder" => "system", "PoliciesFolder" => "policies", - "ObjScale" => 1) + "ObjScale" => 1, + "AutoScaling" => 0,) end @doc raw""" @@ -61,6 +61,8 @@ function configure_settings(settings_path::String, output_settings_path::String) settings = default_settings() merge!(settings, model_settings) + settings["ScalingSettings"] = get_scaling_settings(settings) + output_settings = configure_writeoutput(output_settings_path, settings) settings["WriteOutputsSettingsDict"] = output_settings diff --git a/src/load_inputs/load_cap_reserve_margin.jl b/src/load_inputs/load_cap_reserve_margin.jl index 0a652bc78f..338c17aacc 100644 --- a/src/load_inputs/load_cap_reserve_margin.jl +++ b/src/load_inputs/load_cap_reserve_margin.jl @@ -4,13 +4,10 @@ Read input parameters related to planning reserve margin constraints """ function load_cap_reserve_margin!(setup::Dict, path::AbstractString, inputs::Dict) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - filename = "Capacity_reserve_margin_slack.csv" if isfile(joinpath(path, filename)) df = load_dataframe(joinpath(path, filename)) inputs["dfCapRes_slack"] = df - inputs["dfCapRes_slack"][!, :PriceCap] ./= scale_factor # Million $/GW if scaled, $/MW if not scaled end filename = "Capacity_reserve_margin.csv" diff --git a/src/load_inputs/load_co2_cap.jl b/src/load_inputs/load_co2_cap.jl index 08e6802a0a..2660ac0936 100644 --- a/src/load_inputs/load_co2_cap.jl +++ b/src/load_inputs/load_co2_cap.jl @@ -4,13 +4,10 @@ Read input parameters related to CO$_2$ emissions cap constraints """ function load_co2_cap!(setup::Dict, path::AbstractString, inputs::Dict) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - filename = "CO2_cap_slack.csv" if isfile(joinpath(path, filename)) df = load_dataframe(joinpath(path, filename)) inputs["dfCO2Cap_slack"] = df - inputs["dfCO2Cap_slack"][!, :PriceCap] ./= scale_factor # Million $/kton if scaled, $/ton if not scaled end filename = "CO2_cap.csv" @@ -23,17 +20,15 @@ function load_co2_cap!(setup::Dict, path::AbstractString, inputs::Dict) # Emission limits if setup["CO2Cap"] == 1 - # CO2 emissions cap in mass + # CO2 emissions cap in mass # note the default inputs is in million tons - # when scaled, the constraint unit is kton # when not scaled, the constraint unit is ton mat = extract_matrix_from_dataframe(df, "CO_2_Max_Mtons") - inputs["dfMaxCO2"] = mat * 1e6 / scale_factor + inputs["dfMaxCO2"] = mat * 1e6 elseif setup["CO2Cap"] == 2 || setup["CO2Cap"] == 3 # CO2 emissions rate applied per MWh mat = extract_matrix_from_dataframe(df, "CO_2_Max_tons_MWh") - # no scale_factor is needed since this is a ratio inputs["dfMaxCO2Rate"] = mat end diff --git a/src/load_inputs/load_demand_data.jl b/src/load_inputs/load_demand_data.jl index 52c5bd7bf2..512275b700 100644 --- a/src/load_inputs/load_demand_data.jl +++ b/src/load_inputs/load_demand_data.jl @@ -71,14 +71,12 @@ function load_demand_data!(setup::Dict, path::AbstractString, inputs::Dict) inputs["START_SUBPERIODS"] = 1:hours_per_subperiod:T # set of indexes for all time periods that start a subperiod (e.g. sample day/week) inputs["INTERIOR_SUBPERIODS"] = setdiff(1:T, inputs["START_SUBPERIODS"]) # set of indexes for all time periods that do not start a subperiod - # Demand in MW for each zone - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 # Max value of non-served energy - inputs["Voll"] = as_vector(:Voll) / scale_factor # convert from $/MWh $ million/GWh (assuming objective is divided by 1000) + inputs["Voll"] = as_vector(:Voll) # Demand in MW inputs["pD"] = extract_matrix_from_dataframe(demand_in, DEMAND_COLUMN_PREFIX()[1:(end - 1)], - prefixseparator = 'z') / scale_factor + prefixseparator = 'z') # Cost of non-served energy/demand curtailment # Cost of each segment reported as a fraction of value of non-served energy - scaled implicitly diff --git a/src/load_inputs/load_energy_share_requirement.jl b/src/load_inputs/load_energy_share_requirement.jl index e3205196b7..0d1a9d5a9d 100644 --- a/src/load_inputs/load_energy_share_requirement.jl +++ b/src/load_inputs/load_energy_share_requirement.jl @@ -5,13 +5,10 @@ Read input parameters related to minimum energy share requirement constraints (e.g. renewable portfolio standard or clean electricity standard policies) """ function load_energy_share_requirement!(setup::Dict, path::AbstractString, inputs::Dict) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - filename = "Energy_share_requirement_slack.csv" if isfile(joinpath(path, filename)) df = load_dataframe(joinpath(path, filename)) inputs["dfESR_slack"] = df - inputs["dfESR_slack"][!, :PriceCap] ./= scale_factor # million $/GWh if scaled, $/MWh if not scaled end filename = "Energy_share_requirement.csv" diff --git a/src/load_inputs/load_fuels_data.jl b/src/load_inputs/load_fuels_data.jl index 61b0ff2f0f..2b1d89f11b 100644 --- a/src/load_inputs/load_fuels_data.jl +++ b/src/load_inputs/load_fuels_data.jl @@ -24,11 +24,9 @@ function load_fuels_data!(setup::Dict, path::AbstractString, inputs::Dict) fuel_costs = Dict{AbstractString, Array{Float64}}() fuel_CO2 = Dict{AbstractString, Float64}() - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - for i in 1:length(fuels) - # fuel cost is in $/MMBTU w/o scaling, $/Billon BTU w/ scaling - fuel_costs[fuels[i]] = costs[:, i] / scale_factor + # fuel cost is in $/MMBTU w/o scaling + fuel_costs[fuels[i]] = costs[:, i] # No need to scale fuel_CO2, fuel_CO2 is ton/MMBTU or kton/Billion BTU fuel_CO2[fuels[i]] = CO2_content[i] end diff --git a/src/load_inputs/load_hydrogen_demand.jl b/src/load_inputs/load_hydrogen_demand.jl index d7b5fc6e47..d78be7e738 100644 --- a/src/load_inputs/load_hydrogen_demand.jl +++ b/src/load_inputs/load_hydrogen_demand.jl @@ -10,10 +10,8 @@ function load_hydrogen_demand!(setup::Dict, path::AbstractString, inputs::Dict) inputs["NumberOfH2DemandReqs"] = nrow(df) inputs["H2DemandReq"] = df[!, :Hydrogen_Demand_kt] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 # Million $/kton if scaled, $/ton if not scaled - if "PriceCap" in names(df) - inputs["H2DemandPriceCap"] = df[!, :PriceCap] / scale_factor + inputs["H2DemandPriceCap"] = df[!, :PriceCap] end println(filename * " Successfully Read!") end diff --git a/src/load_inputs/load_inputs.jl b/src/load_inputs/load_inputs.jl index aa1ee28a0e..1ec862f5bb 100644 --- a/src/load_inputs/load_inputs.jl +++ b/src/load_inputs/load_inputs.jl @@ -81,9 +81,7 @@ function load_inputs(setup::Dict, path::AbstractString) end # Virtual charge discharge cost - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - inputs["VirtualChargeDischargeCost"] = setup["VirtualChargeDischargeCost"] / - scale_factor + inputs["VirtualChargeDischargeCost"] = setup["VirtualChargeDischargeCost"] println("CSV Files Successfully Read In From $path") diff --git a/src/load_inputs/load_maximum_capacity_requirement.jl b/src/load_inputs/load_maximum_capacity_requirement.jl index 95b766be8e..d242a33da1 100644 --- a/src/load_inputs/load_maximum_capacity_requirement.jl +++ b/src/load_inputs/load_maximum_capacity_requirement.jl @@ -9,11 +9,8 @@ function load_maximum_capacity_requirement!(path::AbstractString, inputs::Dict, inputs["NumberOfMaxCapReqs"] = nrow(df) inputs["MaxCapReq"] = df[!, :Max_MW] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - - inputs["MaxCapReq"] /= scale_factor if "PriceCap" in names(df) - inputs["MaxCapPriceCap"] = df[!, :PriceCap] / scale_factor + inputs["MaxCapPriceCap"] = df[!, :PriceCap] end println(filename * " Successfully Read!") end diff --git a/src/load_inputs/load_minimum_capacity_requirement.jl b/src/load_inputs/load_minimum_capacity_requirement.jl index e0561b126d..1ab6fd6955 100644 --- a/src/load_inputs/load_minimum_capacity_requirement.jl +++ b/src/load_inputs/load_minimum_capacity_requirement.jl @@ -9,14 +9,8 @@ function load_minimum_capacity_requirement!(path::AbstractString, inputs::Dict, NumberOfMinCapReqs = length(df[!, :MinCapReqConstraint]) inputs["NumberOfMinCapReqs"] = NumberOfMinCapReqs inputs["MinCapReq"] = df[!, :Min_MW] - if setup["ParameterScale"] == 1 - inputs["MinCapReq"] /= ModelScalingFactor # Convert to GW - end if "PriceCap" in names(df) inputs["MinCapPriceCap"] = df[!, :PriceCap] - if setup["ParameterScale"] == 1 - inputs["MinCapPriceCap"] /= ModelScalingFactor # Convert to million $/GW - end end println(filename * " Successfully Read!") end diff --git a/src/load_inputs/load_multistage_data.jl b/src/load_inputs/load_multistage_data.jl index 2042ffa87d..5793ed2057 100644 --- a/src/load_inputs/load_multistage_data.jl +++ b/src/load_inputs/load_multistage_data.jl @@ -1,4 +1,4 @@ -function load_multistage_dataframe(filepath::AbstractString, scale_factor::Float64) +function load_multistage_dataframe(filepath::AbstractString) if !isfile(filepath) error("Multistage data file not found at $filepath") end @@ -6,7 +6,7 @@ function load_multistage_dataframe(filepath::AbstractString, scale_factor::Float multistage_in = load_dataframe(filepath) # rename columns lowercase for internal consistency rename!(multistage_in, lowercase.(names(multistage_in))) - scale_multistage_data!(multistage_in, scale_factor) + scale_multistage_data!(multistage_in) validate_multistage_data!(multistage_in) @@ -24,7 +24,7 @@ function validate_multistage_data!(multistage_df::DataFrame) end end -function scale_multistage_data!(multistage_in::DataFrame, scale_factor::Float64) +function scale_multistage_data!(multistage_in::DataFrame) columns_to_scale = [:min_retired_cap_mw, # to GW :min_retired_charge_cap_mw, # to GW :min_retired_energy_cap_mw, # to GW @@ -37,6 +37,6 @@ function scale_multistage_data!(multistage_in::DataFrame, scale_factor::Float64) :min_retired_cap_discharge_dc_mw, :min_retired_cap_discharge_ac_mw ] - scale_columns!(multistage_in, columns_to_scale, scale_factor) + scale_columns!(multistage_in, columns_to_scale) return nothing end diff --git a/src/load_inputs/load_network_data.jl b/src/load_inputs/load_network_data.jl index ac7f2b1c8c..3cd06da8e9 100644 --- a/src/load_inputs/load_network_data.jl +++ b/src/load_inputs/load_network_data.jl @@ -4,8 +4,6 @@ Function for reading input parameters related to the electricity transmission network """ function load_network_data!(setup::Dict, path::AbstractString, inputs_nw::Dict) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - filename = "Network.csv" network_var = load_dataframe(joinpath(path, filename)) @@ -23,7 +21,7 @@ function load_network_data!(setup::Dict, path::AbstractString, inputs_nw::Dict) inputs_nw["pNet_Map"] = load_network_map(network_var, Z, L) # Transmission capacity of the network (in MW) - inputs_nw["pTrans_Max"] = to_floats(:Line_Max_Flow_MW) / scale_factor # convert to GW + inputs_nw["pTrans_Max"] = to_floats(:Line_Max_Flow_MW) if setup["Trans_Loss_Segments"] == 1 # Line percentage Loss - valid for case when modeling losses as a fixed percent of absolute value of power flows @@ -50,8 +48,7 @@ function load_network_data!(setup::Dict, path::AbstractString, inputs_nw::Dict) inputs_nw["Line_Angle_Limit"] = to_floats(:Angle_Limit_Rad) # DC-OPF coefficient for each line (in MW when not scaled, in GW when scaled) # MW = (kV)^2/Ohms - inputs_nw["pDC_OPF_coeff"] = ((line_voltage_kV .^ 2) ./ line_reactance_Ohms) / - scale_factor + inputs_nw["pDC_OPF_coeff"] = ((line_voltage_kV .^ 2) ./ line_reactance_Ohms) end # Maximum possible flow after reinforcement for use in linear segments of piecewise approximation @@ -59,12 +56,11 @@ function load_network_data!(setup::Dict, path::AbstractString, inputs_nw::Dict) if setup["NetworkExpansion"] == 1 # Read between zone network reinforcement costs per peak MW of capacity added - inputs_nw["pC_Line_Reinforcement"] = to_floats(:Line_Reinforcement_Cost_per_MWyr) / - scale_factor # convert to million $/GW/yr with objective function in millions + inputs_nw["pC_Line_Reinforcement"] = to_floats(:Line_Reinforcement_Cost_per_MWyr) # Maximum reinforcement allowed in MW #NOTE: values <0 indicate no expansion possible inputs_nw["pMax_Line_Reinforcement"] = map(x -> max(0, x), - to_floats(:Line_Max_Reinforcement_MW)) / scale_factor # convert to GW + to_floats(:Line_Max_Reinforcement_MW)) # convert to GW inputs_nw["pTrans_Max_Possible"] += inputs_nw["pMax_Line_Reinforcement"] end @@ -77,8 +73,7 @@ function load_network_data!(setup::Dict, path::AbstractString, inputs_nw::Dict) end # Max Flow Possible on Each Line - inputs_nw["pLine_Max_Flow_Possible_MW"] = to_floats(:Line_Max_Flow_Possible_MW) / - scale_factor # Convert to GW + inputs_nw["pLine_Max_Flow_Possible_MW"] = to_floats(:Line_Max_Flow_Possible_MW) end # Transmission line (between zone) loss coefficient (resistance/voltage^2) @@ -88,7 +83,7 @@ function load_network_data!(setup::Dict, path::AbstractString, inputs_nw::Dict) elseif setup["Trans_Loss_Segments"] >= 2 # If zones are connected, loss coefficient is R/V^2 where R is resistance in Ohms and V is voltage in Volts inputs_nw["pTrans_Loss_Coef"] = (inputs_nw["Ohms"] / 10^6) ./ - (inputs_nw["kV"] / 10^3)^2 * scale_factor # 1/GW *** + (inputs_nw["kV"] / 10^3)^2 # 1/GW *** end ## Sets and indices for transmission losses and expansion diff --git a/src/load_inputs/load_operational_reserves.jl b/src/load_inputs/load_operational_reserves.jl index 6b6d67cb78..91cd836a6f 100644 --- a/src/load_inputs/load_operational_reserves.jl +++ b/src/load_inputs/load_operational_reserves.jl @@ -43,12 +43,9 @@ function load_operational_reserves!(setup::Dict, path::AbstractString, inputs::D # Spinning up reserve requirement as a percent of hourly wind and solar generation (which is summed across all zones) inputs["pRsv_Req_VRE"] = float(res_in[1, :Rsv_Req_Percent_VRE]) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - # Penalty for not meeting hourly spinning reserve requirement - inputs["pC_Rsv_Penalty"] = float(res_in[1, :Unmet_Rsv_Penalty_Dollar_per_MW]) / - scale_factor # convert to million $/GW with objective function in millions - inputs["pStatic_Contingency"] = float(res_in[1, :Static_Contingency_MW]) / scale_factor # convert to GW + inputs["pC_Rsv_Penalty"] = float(res_in[1, :Unmet_Rsv_Penalty_Dollar_per_MW]) + inputs["pStatic_Contingency"] = float(res_in[1, :Static_Contingency_MW]) if setup["UCommit"] >= 1 inputs["pDynamic_Contingency"] = convert(Int8, res_in[1, :Dynamic_Contingency]) diff --git a/src/load_inputs/load_resources_data.jl b/src/load_inputs/load_resources_data.jl index 7966038a0a..86188e45ae 100644 --- a/src/load_inputs/load_resources_data.jl +++ b/src/load_inputs/load_resources_data.jl @@ -72,7 +72,7 @@ function _get_summary_map() end """ - scale_resources_data!(resource_in::DataFrame, scale_factor::Float64) + scale_resources_data!(resource_in::DataFrame, scale_factor::Float64=1.0) Scales resources attributes in-place if necessary. Generally, these scalings converts energy and power units from MW to GW and \$/MW to \$M/GW. Both are done by dividing the values by 1000. See documentation for descriptions of each column being scaled. @@ -82,7 +82,11 @@ See documentation for descriptions of each column being scaled. - `scale_factor` (Float64): A scaling factor for energy and currency units. """ -function scale_resources_data!(resource_in::DataFrame, scale_factor::Float64) +function scale_resources_data!(resource_in::DataFrame, scale_factor::Float64=1.0) + if scale_factor == 1.0 + return nothing + end + columns_to_scale = [:existing_charge_cap_mw, # to GW :existing_cap_mwh, # to GWh :existing_cap_mw, # to GW @@ -115,7 +119,7 @@ function scale_resources_data!(resource_in::DataFrame, scale_factor::Float64) end """ - scale_vre_stor_data!(vre_stor_in::DataFrame, scale_factor::Float64) + scale_vre_stor_data!(vre_stor_in::DataFrame, scale_factor::Float64=1.0) Scales vre_stor attributes in-place if necessary. Generally, these scalings converts energy and power units from MW to GW and \$/MW to \$M/GW. Both are done by dividing the values by 1000. See documentation for descriptions of each column being scaled. @@ -125,7 +129,7 @@ See documentation for descriptions of each column being scaled. - `scale_factor` (Float64): A scaling factor for energy and currency units. """ -function scale_vre_stor_data!(vre_stor_in::DataFrame, scale_factor::Float64) +function scale_vre_stor_data!(vre_stor_in::DataFrame, scale_factor::Float64=1.0) columns_to_scale = [:existing_cap_inverter_mw, :existing_cap_solar_mw, :existing_cap_wind_mw, @@ -185,7 +189,7 @@ function scale_vre_stor_data!(vre_stor_in::DataFrame, scale_factor::Float64) end """ - scale_columns!(df::DataFrame, columns_to_scale::Vector{Symbol}, scale_factor::Float64) + scale_columns!(df::DataFrame, columns_to_scale::Vector{Symbol}, scale_factor::Float64=1.0) Scales in-place the columns in `columns_to_scale` of a dataframe `df` by a `scale_factor`. @@ -197,7 +201,10 @@ Scales in-place the columns in `columns_to_scale` of a dataframe `df` by a `scal """ function scale_columns!(df::DataFrame, columns_to_scale::Vector{Symbol}, - scale_factor::Float64) + scale_factor::Float64=1.0) + if scale_factor == 1.0 + return nothing + end for column in columns_to_scale if string(column) in names(df) df[!, column] /= scale_factor @@ -207,20 +214,20 @@ function scale_columns!(df::DataFrame, end """ - load_resource_df(path::AbstractString, scale_factor::Float64, resource_type::Type) + load_resource_df(path::AbstractString, resource_type::Type, scale_factor::Float64=1.0) Function to load and scale the dataframe of a given resource. # Arguments - `path::AbstractString`: Path to the resource dataframe. -- `scale_factor::Float64`: Scaling factor for the resource data. - `resource_type::Type`: GenX type of the resource. +- `scale_factor::Float64=1.0`: Scaling factor for the resource data. # Returns - `resource_in::DataFrame`: The loaded and scaled resource data. """ -function load_resource_df(path::AbstractString, scale_factor::Float64, resource_type::Type) +function load_resource_df(path::AbstractString, resource_type::Type, scale_factor::Float64=1.0) resource_in = load_dataframe(path) # rename columns lowercase for internal consistency rename!(resource_in, lowercase.(names(resource_in))) @@ -303,7 +310,7 @@ Construct the array of resources from multiple files of different types located # Arguments - `resource_folder::AbstractString`: The path to the folder containing the resource files. - `resources_info::NamedTuple`: A NamedTuple that maps a resource type to its filename and GenX type. -- `scale_factor::Float64`: A scaling factor to adjust the attributes of the resources (default: 1.0). +- `scale_factor::Float64=1.0`: A scaling factor to adjust the attributes of the resources (default: 1.0). # Returns - `Vector{<:AbstractResource}`: An array of GenX resources. @@ -314,7 +321,7 @@ Construct the array of resources from multiple files of different types located """ function create_resource_array(resource_folder::AbstractString, resources_info::NamedTuple, - scale_factor::Float64 = 1.0) + scale_factor::Float64=1.0) resource_id_offset = 0 resources = [] # loop over available types and load all resources in resource_folder @@ -322,7 +329,7 @@ function create_resource_array(resource_folder::AbstractString, df_path = joinpath(resource_folder, filename) # if file exists, load resources of a single resource_type if isfile(df_path) - resource_in = load_resource_df(df_path, scale_factor, resource_type) + resource_in = load_resource_df(df_path, resource_type, scale_factor) # compute indices for resources of a given type and add them to dataframe resources_indices = compute_resource_indices(resource_in, resource_id_offset) add_id_to_resource_df!(resource_in, resources_indices) @@ -590,13 +597,11 @@ Function that loads and scales resources data from folder specified in resources """ function create_resource_array(setup::Dict, resources_path::AbstractString) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1.0 - # get filename and GenX type for each type of resources available in GenX resources_info = _get_resource_info() # load each resource type, scale data and return array of resources - resources = create_resource_array(resources_path, resources_info, scale_factor) + resources = create_resource_array(resources_path, resources_info) # validate input before returning resources validate_resources(setup, resources) return resources @@ -800,7 +805,6 @@ Reads module dataframes, loops over files and adds columns as new attributes to function add_modules_to_resources!(resources::Vector{<:AbstractResource}, setup::Dict, resources_path::AbstractString) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1.0 modules = Vector{DataFrame}() @@ -808,7 +812,7 @@ function add_modules_to_resources!(resources::Vector{<:AbstractResource}, # Add multistage if multistage is activated if setup["MultiStage"] == 1 filename = joinpath(resources_path, "Resource_multistage_data.csv") - multistage_in = load_multistage_dataframe(filename, scale_factor) + multistage_in = load_multistage_dataframe(filename) push!(modules, multistage_in) @info "Multistage data successfully read." end @@ -944,9 +948,6 @@ function process_piecewisefuelusage!(setup::Dict, sort!(slope_cols, by = x -> parse(Int, split(string(x), "_")[end])) slope_df = DataFrame(heat_rate_mat, Symbol.(slope_cols)) PWFU_data = hcat(slope_df, intercept_df) - # no need to scale sclope, but intercept should be scaled when parameterscale is on (MMBTU -> billion BTU) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - PWFU_data[!, intercept_cols] ./= scale_factor inputs["slope_cols"] = slope_cols inputs["intercept_cols"] = intercept_cols diff --git a/src/model/resources/fusion/fusion.jl b/src/model/resources/fusion/fusion.jl index 2b62f1d20f..c4e2fb3f80 100644 --- a/src/model/resources/fusion/fusion.jl +++ b/src/model/resources/fusion/fusion.jl @@ -483,11 +483,11 @@ end function thermal_fusion_annual_parasitic_power( EP::Model, inputs::Dict, setup::Dict)::Vector{Float64} gen = inputs["RESOURCES"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 + FUSION = ids_with(gen, fusion) resource_component = resource_name.(gen[FUSION]) expr = fusion_annual_parasitic_power.(Ref(EP), Ref(inputs), resource_component) - return scale_factor * value.(expr) + return value.(expr) end diff --git a/src/model/resources/hydrogen/electrolyzer.jl b/src/model/resources/hydrogen/electrolyzer.jl index bd132321bf..aaab9835c3 100644 --- a/src/model/resources/hydrogen/electrolyzer.jl +++ b/src/model/resources/hydrogen/electrolyzer.jl @@ -172,14 +172,13 @@ function electrolyzer!(EP::Model, inputs::Dict, setup::Dict) ### Objective Function ### # Subtract hydrogen revenue from objective function - scale_factor = setup["ParameterScale"] == 1 ? 10^6 : 1 # If ParameterScale==1, costs are in millions of $ @expression(EP, eHydrogenValue[y in ELECTROLYZERS, t in 1:T], omega[t] * EP[:vUSE][y, t] / hydrogen_mwh_per_tonne(gen[y]) * - hydrogen_price_per_tonne(gen[y])/scale_factor) + hydrogen_price_per_tonne(gen[y])) if !isempty(VS_ELEC) @expression(EP, eHydrogenValue_vs[y in VS_ELEC, t in 1:T], omega[t] * EP[:vP_ELEC][y, t] / hydrogen_mwh_per_tonne_elec(gen[y]) * - hydrogen_price_per_tonne_elec(gen[y])/scale_factor) + hydrogen_price_per_tonne_elec(gen[y])) end @expression(EP, eTotalHydrogenValueT[t in 1:T], if !isempty(VS_ELEC) diff --git a/src/model/resources/maintenance.jl b/src/model/resources/maintenance.jl index 1a9bda5f04..4f6f1469ea 100644 --- a/src/model/resources/maintenance.jl +++ b/src/model/resources/maintenance.jl @@ -108,7 +108,7 @@ function maintenance_formulation!(EP::Model, maint_begin_cadence::Int, maint_dur::Int, maint_freq_years::Int, - cap::Float64, + cap::Union{Float64, Int64}, vcommit::Symbol, ecap::Symbol, integer_operational_unit_commitment::Bool) diff --git a/src/time_domain_reduction/time_domain_reduction.jl b/src/time_domain_reduction/time_domain_reduction.jl index 36aa7ecb6d..7d08ac5602 100644 --- a/src/time_domain_reduction/time_domain_reduction.jl +++ b/src/time_domain_reduction/time_domain_reduction.jl @@ -683,16 +683,10 @@ function cluster_inputs(inpath, PMap_Outfile = joinpath(TimeDomainReductionFolder, "Period_map.csv") YAML_Outfile = joinpath(TimeDomainReductionFolder, "time_domain_reduction_settings.yml") - # Define a local version of the setup so that you can modify the mysetup["ParameterScale"] value to be zero in case it is 1 - mysetup_local = copy(mysetup) - # If ParameterScale =1 then make it zero, since clustered inputs will be scaled prior to generating model - mysetup_local["ParameterScale"] = 0 # Performing cluster and report outputs in user-provided units - # Define another local version of setup such that Multi-Stage Non-Concatentation TDR can iteratively read in the raw data mysetup_MS = copy(mysetup) mysetup_MS["TimeDomainReduction"] = 0 mysetup_MS["DoNotReadPeriodMap"] = 1 - mysetup_MS["ParameterScale"] = 0 if MultiStage == 1 model_dict = Dict() @@ -749,7 +743,7 @@ function cluster_inputs(inpath, if v println("Not MultiStage") end - myinputs = load_inputs(mysetup_local, inpath) + myinputs = load_inputs(mysetup, inpath) RESOURCE_ZONES = myinputs["RESOURCE_ZONES"] RESOURCES = myinputs["RESOURCE_NAMES"] ZONES = myinputs["R_ZONES"] diff --git a/src/write_outputs/capacity_reserve_margin/write_capacity_value.jl b/src/write_outputs/capacity_reserve_margin/write_capacity_value.jl index b1aa431bb6..50ef9e7ae0 100644 --- a/src/write_outputs/capacity_reserve_margin/write_capacity_value.jl +++ b/src/write_outputs/capacity_reserve_margin/write_capacity_value.jl @@ -29,11 +29,10 @@ function write_capacity_value(path::AbstractString, inputs::Dict, setup::Dict, E MUST_RUN = inputs["MUST_RUN"] VRE_STOR = inputs["VRE_STOR"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 eTotalCap = value.(EP[:eTotalCap]) minimum_plant_size = 1 # MW - large_plants = findall(>=(minimum_plant_size), eTotalCap * scale_factor) + large_plants = findall(>=(minimum_plant_size), eTotalCap ) THERM_ALL_EX = intersect(THERM_ALL, large_plants) VRE_EX = intersect(VRE, large_plants) @@ -165,6 +164,5 @@ function capacity_reserve_margin_price(EP::Model, setup::Dict, capres_zone::Int)::Vector{Float64} ω = inputs["omega"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - return dual.(EP[:cCapacityResMargin][capres_zone, :]) ./ ω * scale_factor + return dual.(EP[:cCapacityResMargin][capres_zone, :]) ./ ω end diff --git a/src/write_outputs/capacity_reserve_margin/write_reserve_margin.jl b/src/write_outputs/capacity_reserve_margin/write_reserve_margin.jl index 1eeca0ef0e..107ae72a2d 100644 --- a/src/write_outputs/capacity_reserve_margin/write_reserve_margin.jl +++ b/src/write_outputs/capacity_reserve_margin/write_reserve_margin.jl @@ -1,8 +1,5 @@ function write_reserve_margin(path::AbstractString, setup::Dict, EP::Model) temp_ResMar = dual.(EP[:cCapacityResMargin]) - if setup["ParameterScale"] == 1 - temp_ResMar = temp_ResMar * ModelScalingFactor # Convert from MillionUS$/GWh to US$/MWh - end dfResMar = DataFrame(temp_ResMar, :auto) CSV.write(joinpath(path, "ReserveMargin.csv"), dfResMar) return nothing diff --git a/src/write_outputs/capacity_reserve_margin/write_reserve_margin_revenue.jl b/src/write_outputs/capacity_reserve_margin/write_reserve_margin_revenue.jl index 418b14eeab..32b5930a6c 100644 --- a/src/write_outputs/capacity_reserve_margin/write_reserve_margin_revenue.jl +++ b/src/write_outputs/capacity_reserve_margin/write_reserve_margin_revenue.jl @@ -12,8 +12,7 @@ function write_reserve_margin_revenue(path::AbstractString, inputs::Dict, setup::Dict, EP::Model) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - + gen = inputs["RESOURCES"] regions = region.(gen) clusters = cluster.(gen) @@ -86,7 +85,7 @@ function write_reserve_margin_revenue(path::AbstractString, tempresrev[AC_CHARGE] .-= derating_factor.(gen[AC_CHARGE], tag = i) .* ((value.(EP[:vCAPRES_AC_CHARGE][AC_CHARGE, :]).data) * weighted_price) end - tempresrev *= scale_factor + annual_sum .+= tempresrev dfResRevenue = hcat(dfResRevenue, DataFrame([tempresrev], [Symbol("CapRes_$i")])) end diff --git a/src/write_outputs/capacity_reserve_margin/write_reserve_margin_slack.jl b/src/write_outputs/capacity_reserve_margin/write_reserve_margin_slack.jl index f6d71e4fb4..9ab0d126c1 100644 --- a/src/write_outputs/capacity_reserve_margin/write_reserve_margin_slack.jl +++ b/src/write_outputs/capacity_reserve_margin/write_reserve_margin_slack.jl @@ -8,18 +8,10 @@ function write_reserve_margin_slack(path::AbstractString, AnnualSum = value.(EP[:eCapResSlack_Year]), Penalty = value.(EP[:eCCapResSlack])) - if setup["ParameterScale"] == 1 - dfResMar_slack.AnnualSum .*= ModelScalingFactor # Convert GW to MW - dfResMar_slack.Penalty .*= ModelScalingFactor^2 # Convert Million $ to $ - end - if setup["WriteOutputs"] == "annual" CSV.write(joinpath(path, "ReserveMargin_prices_and_penalties.csv"), dfResMar_slack) else # setup["WriteOutputs"] == "full" temp_ResMar_slack = value.(EP[:vCapResSlack]) - if setup["ParameterScale"] == 1 - temp_ResMar_slack .*= ModelScalingFactor # Convert GW to MW - end dfResMar_slack = hcat(dfResMar_slack, DataFrame(temp_ResMar_slack, [Symbol("t$t") for t in 1:T])) CSV.write(joinpath(path, "ReserveMargin_prices_and_penalties.csv"), diff --git a/src/write_outputs/capacity_reserve_margin/write_reserve_margin_w.jl b/src/write_outputs/capacity_reserve_margin/write_reserve_margin_w.jl index 74b4efa7fe..5299ce0723 100644 --- a/src/write_outputs/capacity_reserve_margin/write_reserve_margin_w.jl +++ b/src/write_outputs/capacity_reserve_margin/write_reserve_margin_w.jl @@ -3,9 +3,6 @@ function write_reserve_margin_w(path::AbstractString, inputs::Dict, setup::Dict, #dfResMar dataframe with weights included for calculations dfResMar_w = DataFrame(Constraint = [Symbol("t$t") for t in 1:T]) temp_ResMar_w = transpose(dual.(EP[:cCapacityResMargin])) ./ inputs["omega"] - if setup["ParameterScale"] == 1 - temp_ResMar_w = temp_ResMar_w * ModelScalingFactor # Convert from MillionUS$/GWh to US$/MWh - end dfResMar_w = hcat(dfResMar_w, DataFrame(temp_ResMar_w, :auto)) auxNew_Names_res = [Symbol("Constraint"); [Symbol("CapRes_$i") for i in 1:inputs["NCapacityReserveMargin"]]] diff --git a/src/write_outputs/capacity_reserve_margin/write_virtual_discharge.jl b/src/write_outputs/capacity_reserve_margin/write_virtual_discharge.jl index 1aa52623de..fe9be7150d 100644 --- a/src/write_outputs/capacity_reserve_margin/write_virtual_discharge.jl +++ b/src/write_outputs/capacity_reserve_margin/write_virtual_discharge.jl @@ -9,12 +9,10 @@ function write_virtual_discharge(path::AbstractString, inputs::Dict, setup::Dict T = inputs["T"] # Number of time steps (hours) STOR_ALL = inputs["STOR_ALL"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - resources = inputs["RESOURCE_NAMES"][STOR_ALL] zones = inputs["R_ZONES"][STOR_ALL] virtual_discharge = (value.(EP[:vCAPRES_discharge][STOR_ALL, :].data) - - value.(EP[:vCAPRES_charge][STOR_ALL, :].data)) * scale_factor + value.(EP[:vCAPRES_charge][STOR_ALL, :].data)) dfVirtualDischarge = DataFrame(Resource = resources, Zone = zones) dfVirtualDischarge.AnnualSum .= virtual_discharge * inputs["omega"] diff --git a/src/write_outputs/co2_cap/write_co2_cap.jl b/src/write_outputs/co2_cap/write_co2_cap.jl index 19cba87d71..8b486a9775 100644 --- a/src/write_outputs/co2_cap/write_co2_cap.jl +++ b/src/write_outputs/co2_cap/write_co2_cap.jl @@ -8,16 +8,9 @@ function write_co2_cap(path::AbstractString, inputs::Dict, setup::Dict, EP::Mode dfCO2Price = DataFrame( CO2_Cap = [Symbol("CO2_Cap_$cap") for cap in 1:inputs["NCO2Cap"]], CO2_Price = (-1) * (dual.(EP[:cCO2Emissions_systemwide]))) - if setup["ParameterScale"] == 1 - dfCO2Price.CO2_Price .*= ModelScalingFactor # Convert Million$/kton to $/ton - end if haskey(inputs, "dfCO2Cap_slack") dfCO2Price[!, :CO2_Mass_Slack] = convert(Array{Float64}, value.(EP[:vCO2Cap_slack])) dfCO2Price[!, :CO2_Penalty] = convert(Array{Float64}, value.(EP[:eCCO2Cap_slack])) - if setup["ParameterScale"] == 1 - dfCO2Price.CO2_Mass_Slack .*= ModelScalingFactor # Convert ktons to tons - dfCO2Price.CO2_Penalty .*= ModelScalingFactor^2 # Convert Million$ to $ - end end CSV.write(joinpath(path, "CO2_prices_and_penalties.csv"), dfCO2Price) diff --git a/src/write_outputs/energy_share_requirement/write_esr_prices.jl b/src/write_outputs/energy_share_requirement/write_esr_prices.jl index e9cccc46ae..0fe94e566a 100644 --- a/src/write_outputs/energy_share_requirement/write_esr_prices.jl +++ b/src/write_outputs/energy_share_requirement/write_esr_prices.jl @@ -1,16 +1,9 @@ function write_esr_prices(path::AbstractString, inputs::Dict, setup::Dict, EP::Model) dfESR = DataFrame(ESR_Price = convert(Array{Float64}, dual.(EP[:cESRShare]))) - if setup["ParameterScale"] == 1 - dfESR[!, :ESR_Price] = dfESR[!, :ESR_Price] * ModelScalingFactor # Converting MillionUS$/GWh to US$/MWh - end if haskey(inputs, "dfESR_slack") dfESR[!, :ESR_AnnualSlack] = convert(Array{Float64}, value.(EP[:vESR_slack])) dfESR[!, :ESR_AnnualPenalty] = convert(Array{Float64}, value.(EP[:eCESRSlack])) - if setup["ParameterScale"] == 1 - dfESR[!, :ESR_AnnualSlack] *= ModelScalingFactor # Converting GWh to MWh - dfESR[!, :ESR_AnnualPenalty] *= (ModelScalingFactor^2) # Converting MillionUSD to USD - end end CSV.write(joinpath(path, "ESR_prices_and_penalties.csv"), dfESR) return dfESR diff --git a/src/write_outputs/hydrogen/write_hourly_matching_prices.jl b/src/write_outputs/hydrogen/write_hourly_matching_prices.jl index 92fe82c0b6..6dda03f2da 100644 --- a/src/write_outputs/hydrogen/write_hourly_matching_prices.jl +++ b/src/write_outputs/hydrogen/write_hourly_matching_prices.jl @@ -4,12 +4,11 @@ function write_hourly_matching_prices(path::AbstractString, EP::Model) T = inputs["T"] # Number of time steps (hours) Z = inputs["Z"] # Number of zones - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - + ## Extract dual variables of constraints dfHourlyMatchPrices = DataFrame(Zone = 1:Z) # The unit is $/MWh # Dividing dual variable for each hour with corresponding hourly weight to retrieve marginal cost of the constraint - price = dual.(EP[:cHourlyMatching]) ./ inputs["omega"] * scale_factor + price = dual.(EP[:cHourlyMatching]) ./ inputs["omega"] dfHourlyMatchPrices = hcat(dfHourlyMatchPrices, DataFrame(transpose(price), :auto)) diff --git a/src/write_outputs/hydrogen/write_hydrogen_prices.jl b/src/write_outputs/hydrogen/write_hydrogen_prices.jl index 6701931967..b552d60121 100644 --- a/src/write_outputs/hydrogen/write_hydrogen_prices.jl +++ b/src/write_outputs/hydrogen/write_hydrogen_prices.jl @@ -1,11 +1,9 @@ function write_hydrogen_prices(path::AbstractString, inputs::Dict, setup::Dict, EP::Model) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor^2 : 1 # If ParameterScale==1, costs are in millions of $ - NumberOfH2DemandReqs = inputs["NumberOfH2DemandReqs"] dfHydrogenPrice = DataFrame( H2_Demand = [Symbol("H2_Demand_$h2demand") for h2demand in 1:NumberOfH2DemandReqs], Hydrogen_Price_Per_Tonne = convert( - Array{Float64}, dual.(EP[:cZoneH2DemandReq]) * scale_factor)) + Array{Float64}, dual.(EP[:cZoneH2DemandReq]) )) CSV.write(joinpath(path, "hydrogen_prices.csv"), dfHydrogenPrice) return nothing diff --git a/src/write_outputs/long_duration_storage/write_opwrap_lds_dstor.jl b/src/write_outputs/long_duration_storage/write_opwrap_lds_dstor.jl index 875d8e6f86..a318fa3e6b 100644 --- a/src/write_outputs/long_duration_storage/write_opwrap_lds_dstor.jl +++ b/src/write_outputs/long_duration_storage/write_opwrap_lds_dstor.jl @@ -19,9 +19,6 @@ function write_opwrap_lds_dstor(path::AbstractString, inputs::Dict, setup::Dict, end end end - if setup["ParameterScale"] == 1 - dsoc *= ModelScalingFactor - end dfdStorage = hcat(dfdStorage, DataFrame(dsoc, :auto)) auxNew_Names = [Symbol("Resource"); Symbol("Zone"); [Symbol("w$t") for t in 1:W]] diff --git a/src/write_outputs/long_duration_storage/write_opwrap_lds_stor_init.jl b/src/write_outputs/long_duration_storage/write_opwrap_lds_stor_init.jl index e0a731d177..6bc831f971 100644 --- a/src/write_outputs/long_duration_storage/write_opwrap_lds_stor_init.jl +++ b/src/write_outputs/long_duration_storage/write_opwrap_lds_stor_init.jl @@ -22,9 +22,6 @@ function write_opwrap_lds_stor_init(path::AbstractString, end end end - if setup["ParameterScale"] == 1 - socw *= ModelScalingFactor - end dfStorageInit = hcat(dfStorageInit, DataFrame(socw, :auto)) auxNew_Names = [Symbol("Resource"); Symbol("Zone"); [Symbol("n$t") for t in 1:NPeriods]] @@ -45,10 +42,6 @@ function write_opwrap_lds_stor_init(path::AbstractString, e_total_cap = value.(EP[:eTotalCap]) v_charge = value.(EP[:vCHARGE]) v_P = value.(EP[:vP]) - if setup["ParameterScale"] == 1 - v_charge *= ModelScalingFactor - v_P *= ModelScalingFactor - end if !isempty(stor_hydro_long_duration) v_spill = value.(EP[:vSPILL]) end diff --git a/src/write_outputs/min_max_capacity_requirement/write_maximum_capacity_requirement.jl b/src/write_outputs/min_max_capacity_requirement/write_maximum_capacity_requirement.jl index 8d1b3450ee..fda5d3c9d9 100644 --- a/src/write_outputs/min_max_capacity_requirement/write_maximum_capacity_requirement.jl +++ b/src/write_outputs/min_max_capacity_requirement/write_maximum_capacity_requirement.jl @@ -8,15 +8,9 @@ function write_maximum_capacity_requirement(path::AbstractString, for maxcap in 1:NumberOfMaxCapReqs], Price = -dual.(EP[:cZoneMaxCapReq])) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - - dfMaxCapPrice.Price *= scale_factor - if haskey(inputs, "MaxCapPriceCap") dfMaxCapPrice[!, :Slack] = convert(Array{Float64}, value.(EP[:vMaxCap_slack])) dfMaxCapPrice[!, :Penalty] = convert(Array{Float64}, value.(EP[:eCMaxCap_slack])) - dfMaxCapPrice.Slack *= scale_factor # Convert GW to MW - dfMaxCapPrice.Penalty *= scale_factor^2 # Convert Million $ to $ end CSV.write(joinpath(path, "MaxCapReq_prices_and_penalties.csv"), dfMaxCapPrice) end diff --git a/src/write_outputs/min_max_capacity_requirement/write_minimum_capacity_requirement.jl b/src/write_outputs/min_max_capacity_requirement/write_minimum_capacity_requirement.jl index bae7d17ee9..d1210ce9aa 100644 --- a/src/write_outputs/min_max_capacity_requirement/write_minimum_capacity_requirement.jl +++ b/src/write_outputs/min_max_capacity_requirement/write_minimum_capacity_requirement.jl @@ -8,15 +8,9 @@ function write_minimum_capacity_requirement(path::AbstractString, for mincap in 1:NumberOfMinCapReqs], Price = dual.(EP[:cZoneMinCapReq])) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - - dfMinCapPrice.Price *= scale_factor # Convert Million $/GW to $/MW - if haskey(inputs, "MinCapPriceCap") dfMinCapPrice[!, :Slack] = convert(Array{Float64}, value.(EP[:vMinCap_slack])) dfMinCapPrice[!, :Penalty] = convert(Array{Float64}, value.(EP[:eCMinCap_slack])) - dfMinCapPrice.Slack *= scale_factor # Convert GW to MW - dfMinCapPrice.Penalty *= scale_factor^2 # Convert Million $ to $ end CSV.write(joinpath(path, "MinCapReq_prices_and_penalties.csv"), dfMinCapPrice) end diff --git a/src/write_outputs/reserves/write_operating_reserve_price_revenue.jl b/src/write_outputs/reserves/write_operating_reserve_price_revenue.jl index 79ab9b8cbe..8c87aaf689 100644 --- a/src/write_outputs/reserves/write_operating_reserve_price_revenue.jl +++ b/src/write_outputs/reserves/write_operating_reserve_price_revenue.jl @@ -11,8 +11,7 @@ function write_operating_reserve_regulation_revenue(path::AbstractString, inputs::Dict, setup::Dict, EP::Model) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - + gen = inputs["RESOURCES"] RSV = inputs["RSV"] REG = inputs["REG"] @@ -39,9 +38,6 @@ function write_operating_reserve_regulation_revenue(path::AbstractString, rsvrevenue = value.(EP[:vRSV][RSV, :].data) .* transpose(weighted_rsv_price) regrevenue = value.(EP[:vREG][REG, :].data) .* transpose(weighted_reg_price) - rsvrevenue *= scale_factor - regrevenue *= scale_factor - dfOpRsvRevenue.AnnualSum .= rsvrevenue * inputs["omega"] dfOpRegRevenue.AnnualSum .= regrevenue * inputs["omega"] @@ -63,8 +59,7 @@ This is equal to the dual variable of the regulation requirement constraint. function operating_regulation_price(EP::Model, inputs::Dict, setup::Dict)::Vector{Float64} ω = inputs["omega"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - return dual.(EP[:cReg]) ./ ω * scale_factor + return dual.(EP[:cReg]) ./ ω end @doc raw""" @@ -80,6 +75,6 @@ This is equal to the dual variable of the reserve requirement constraint. function operating_reserve_price(EP::Model, inputs::Dict, setup::Dict)::Vector{Float64} ω = inputs["omega"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - return dual.(EP[:cRsvReq]) ./ ω * scale_factor + + return dual.(EP[:cRsvReq]) ./ ω end diff --git a/src/write_outputs/reserves/write_reg.jl b/src/write_outputs/reserves/write_reg.jl index 7d7ca1efd6..86afbc0a5a 100644 --- a/src/write_outputs/reserves/write_reg.jl +++ b/src/write_outputs/reserves/write_reg.jl @@ -1,11 +1,10 @@ function write_reg(path::AbstractString, inputs::Dict, setup::Dict, EP::Model) REG = inputs["REG"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - + resources = inputs["RESOURCE_NAMES"][REG] zones = inputs["R_ZONES"][REG] # Regulation contributions for each resource in each time step - reg = value.(EP[:vREG][REG, :].data) * scale_factor + reg = value.(EP[:vREG][REG, :].data) dfReg = DataFrame(Resource = resources, Zone = zones) dfReg.AnnualSum = reg * inputs["omega"] diff --git a/src/write_outputs/reserves/write_rsv.jl b/src/write_outputs/reserves/write_rsv.jl index ba38ccb727..d91c353416 100644 --- a/src/write_outputs/reserves/write_rsv.jl +++ b/src/write_outputs/reserves/write_rsv.jl @@ -1,11 +1,10 @@ function write_rsv(path::AbstractString, inputs::Dict, setup::Dict, EP::Model) T = inputs["T"] # Number of time steps (hours) RSV = inputs["RSV"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - + resources = inputs["RESOURCE_NAMES"][RSV] zones = inputs["R_ZONES"][RSV] - rsv = value.(EP[:vRSV][RSV, :].data) * scale_factor + rsv = value.(EP[:vRSV][RSV, :].data) dfRsv = DataFrame(Resource = resources, Zone = zones) @@ -14,7 +13,7 @@ function write_rsv(path::AbstractString, inputs::Dict, setup::Dict, EP::Model) if setup["WriteOutputs"] == "annual" write_annual(joinpath(path, "reg_dn.csv"), dfRsv) else # setup["WriteOutputs"] == "full" - unmet_vec = value.(EP[:vUNMET_RSV]) * scale_factor + unmet_vec = value.(EP[:vUNMET_RSV]) total_unmet = sum(unmet_vec) dfRsv = hcat(dfRsv, DataFrame(rsv, :auto)) auxNew_Names = [Symbol("Resource"); diff --git a/src/write_outputs/transmission/write_nw_expansion.jl b/src/write_outputs/transmission/write_nw_expansion.jl index f89e1bfe1f..6ec1ceeb61 100644 --- a/src/write_outputs/transmission/write_nw_expansion.jl +++ b/src/write_outputs/transmission/write_nw_expansion.jl @@ -14,10 +14,5 @@ function write_nw_expansion(path::AbstractString, inputs::Dict, setup::Dict, EP: Cost_Trans_Capacity = convert(Array{Float64}, transcap .* inputs["pC_Line_Reinforcement"])) - if setup["ParameterScale"] == 1 - dfTransCap.New_Trans_Capacity *= ModelScalingFactor # GW to MW - dfTransCap.Cost_Trans_Capacity *= ModelScalingFactor^2 # MUSD to USD - end - CSV.write(joinpath(path, "network_expansion.csv"), dfTransCap) end diff --git a/src/write_outputs/transmission/write_transmission_flows.jl b/src/write_outputs/transmission/write_transmission_flows.jl index 5290d71afe..e13179f6e9 100644 --- a/src/write_outputs/transmission/write_transmission_flows.jl +++ b/src/write_outputs/transmission/write_transmission_flows.jl @@ -8,9 +8,6 @@ function write_transmission_flows(path::AbstractString, # Power flows on transmission lines at each time step dfFlow = DataFrame(Line = 1:L) flow = value.(EP[:vFLOW]) - if setup["ParameterScale"] == 1 - flow *= ModelScalingFactor - end filepath = joinpath(path, "flow.csv") if setup["WriteOutputs"] == "annual" diff --git a/src/write_outputs/transmission/write_transmission_losses.jl b/src/write_outputs/transmission/write_transmission_losses.jl index a76bca1180..d5ef53809b 100644 --- a/src/write_outputs/transmission/write_transmission_losses.jl +++ b/src/write_outputs/transmission/write_transmission_losses.jl @@ -9,9 +9,6 @@ function write_transmission_losses(path::AbstractString, dfTLosses = DataFrame(Line = 1:L) tlosses = zeros(L, T) tlosses[LOSS_LINES, :] = value.(EP[:vTLOSS][LOSS_LINES, :]) - if setup["ParameterScale"] == 1 - tlosses[LOSS_LINES, :] *= ModelScalingFactor - end dfTLosses.AnnualSum = tlosses * inputs["omega"] diff --git a/src/write_outputs/write_capacity.jl b/src/write_outputs/write_capacity.jl index 99e4797ecc..b00b5c76c2 100755 --- a/src/write_outputs/write_capacity.jl +++ b/src/write_outputs/write_capacity.jl @@ -96,22 +96,6 @@ function write_capacity(path::AbstractString, inputs::Dict, setup::Dict, EP::Mod RetChargeCap = retcapcharge[:], NewChargeCap = capcharge[:], EndChargeCap = existingcapcharge[:] - retcapcharge[:] + capcharge[:]) - if setup["ParameterScale"] == 1 - dfCap.StartCap = dfCap.StartCap * ModelScalingFactor - dfCap.RetCap = dfCap.RetCap * ModelScalingFactor - dfCap.RetroCap = dfCap.RetroCap * ModelScalingFactor - dfCap.NewCap = dfCap.NewCap * ModelScalingFactor - dfCap.EndCap = dfCap.EndCap * ModelScalingFactor - dfCap.CapacityConstraintDual = dfCap.CapacityConstraintDual * ModelScalingFactor - dfCap.StartEnergyCap = dfCap.StartEnergyCap * ModelScalingFactor - dfCap.RetEnergyCap = dfCap.RetEnergyCap * ModelScalingFactor - dfCap.NewEnergyCap = dfCap.NewEnergyCap * ModelScalingFactor - dfCap.EndEnergyCap = dfCap.EndEnergyCap * ModelScalingFactor - dfCap.StartChargeCap = dfCap.StartChargeCap * ModelScalingFactor - dfCap.RetChargeCap = dfCap.RetChargeCap * ModelScalingFactor - dfCap.NewChargeCap = dfCap.NewChargeCap * ModelScalingFactor - dfCap.EndChargeCap = dfCap.EndChargeCap * ModelScalingFactor - end total = DataFrame(Resource = "Total", Zone = "n/a", Retrofit_Id = "n/a", StartCap = sum(dfCap[!, :StartCap]), RetCap = sum(dfCap[!, :RetCap]), NewCap = sum(dfCap[!, :NewCap]), EndCap = sum(dfCap[!, :EndCap]), diff --git a/src/write_outputs/write_capacityfactor.jl b/src/write_outputs/write_capacityfactor.jl index d7e5f13c6b..c8637d6108 100644 --- a/src/write_outputs/write_capacityfactor.jl +++ b/src/write_outputs/write_capacityfactor.jl @@ -21,14 +21,14 @@ function write_capacityfactor(path::AbstractString, inputs::Dict, setup::Dict, E AnnualSum = zeros(G), Capacity = zeros(G), CapacityFactor = zeros(G)) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - df.AnnualSum .= value.(EP[:vP]) * weight * scale_factor - df.Capacity .= value.(EP[:eTotalCap]) * scale_factor + + df.AnnualSum .= value.(EP[:vP]) * weight + df.Capacity .= value.(EP[:eTotalCap]) # The .data only works on DenseAxisArray variables or expressions # In contrast vP and eTotalCap are whole vectors / matrices - energy_sum(sym, set) = value.(EP[sym][set, :]).data * weight * scale_factor - capacity(sym, set) = value.(EP[sym][set]).data * scale_factor + energy_sum(sym, set) = value.(EP[sym][set, :]).data * weight + capacity(sym, set) = value.(EP[sym][set]).data if !isempty(VRE_STOR) VS_SOLAR = inputs["VS_SOLAR"] @@ -94,7 +94,6 @@ function write_fusion_net_capacity_factor(path::AbstractString, inputs::Dict, se G_fusion = length(gen_fusion) ω = inputs["omega"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 df = DataFrame(Resource = resource_names, Zone = zone_id.(gen_fusion), @@ -105,10 +104,10 @@ function write_fusion_net_capacity_factor(path::AbstractString, inputs::Dict, se reactor = FusionReactorData.(gen_fusion) avg_power_factor = average_net_power_factor.(reactor) - gross_power = value.(EP[:vP][FUSION, :]) * ω * scale_factor + gross_power = value.(EP[:vP][FUSION, :]) * ω parasitic_power = thermal_fusion_annual_parasitic_power(EP, inputs, setup) df.NetOutput .= gross_power - parasitic_power - df.NetCapacity .= value.(EP[:eTotalCap][FUSION]) * scale_factor .* avg_power_factor + df.NetCapacity .= value.(EP[:eTotalCap][FUSION]) .* avg_power_factor # We only calcualte the resulted capacity factor with total capacity > 1MW and total generation > 1MWh enough_power = findall(x -> x >= 1, df.NetOutput) diff --git a/src/write_outputs/write_charge.jl b/src/write_outputs/write_charge.jl index 89d95fbb2e..496479224e 100644 --- a/src/write_outputs/write_charge.jl +++ b/src/write_outputs/write_charge.jl @@ -17,8 +17,7 @@ function write_charge(path::AbstractString, inputs::Dict, setup::Dict, EP::Model FUSION = ids_with(gen, :fusion) weight = inputs["omega"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - + charge = Matrix[] charge_ids = Vector{Int}[] if !isempty(STOR_ALL) @@ -45,8 +44,6 @@ function write_charge(path::AbstractString, inputs::Dict, setup::Dict, EP::Model charge = reduce(vcat, charge, init = zeros(0, T)) charge_ids = reduce(vcat, charge_ids, init = Int[]) - charge *= scale_factor - df = DataFrame(Resource = resources[charge_ids], Zone = zones[charge_ids]) df.AnnualSum = charge * weight diff --git a/src/write_outputs/write_charging_cost.jl b/src/write_outputs/write_charging_cost.jl index cabc4db135..a850b0b6ab 100644 --- a/src/write_outputs/write_charging_cost.jl +++ b/src/write_outputs/write_charging_cost.jl @@ -16,8 +16,7 @@ function write_charging_cost(path::AbstractString, inputs::Dict, setup::Dict, EP FUSION = ids_with(gen, :fusion) weight = inputs["omega"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - + price = locational_marginal_price(EP, inputs, setup) chargecost = zeros(G, T) @@ -41,8 +40,6 @@ function write_charging_cost(path::AbstractString, inputs::Dict, setup::Dict, EP _, mat = prepare_fusion_parasitic_power(EP, inputs) chargecost[FUSION, :] = mat end - chargecost *= scale_factor - dfChargingcost = DataFrame(Region = regions, Resource = resources, Zone = zones, diff --git a/src/write_outputs/write_co2.jl b/src/write_outputs/write_co2.jl index 5a2860d197..c5fa1d3a1a 100644 --- a/src/write_outputs/write_co2.jl +++ b/src/write_outputs/write_co2.jl @@ -21,10 +21,8 @@ function write_co2_emissions_plant(path::AbstractString, G = inputs["G"] # Number of resources (generators, storage, DR, and DERs) weight = inputs["omega"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - + emissions_plant = value.(EP[:eEmissionsByPlant]) - emissions_plant *= scale_factor df = DataFrame(Resource = resources, Zone = zones, @@ -43,14 +41,12 @@ function write_co2_capture_plant(path::AbstractString, inputs::Dict, setup::Dict zones = zone_id.(gen[CCS]) weight = inputs["omega"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 df = DataFrame(Resource = resources, Zone = zones, AnnualSum = zeros(length(CCS))) if !isempty(CCS) emissions_captured_plant = value.(EP[:eEmissionsCaptureByPlant]).data - emissions_captured_plant *= scale_factor df.AnnualSum .= emissions_captured_plant * weight diff --git a/src/write_outputs/write_costs.jl b/src/write_outputs/write_costs.jl index 9193865911..e3b228e0b8 100644 --- a/src/write_outputs/write_costs.jl +++ b/src/write_outputs/write_costs.jl @@ -95,10 +95,6 @@ function write_costs(path::AbstractString, inputs::Dict, setup::Dict, EP::Model) dfCost[!, Symbol("Total")] = total_cost - if setup["ParameterScale"] == 1 - dfCost.Total *= ModelScalingFactor^2 - end - if setup["UCommit"] >= 1 dfCost[6, 2] = value(EP[:eTotalCStart]) + value(EP[:eTotalCFuelStart]) end @@ -136,22 +132,13 @@ function write_costs(path::AbstractString, inputs::Dict, setup::Dict, EP::Model) end if !isempty(VRE_STOR) - dfCost[!, 2][11] = value(EP[:eTotalCGrid]) * - (setup["ParameterScale"] == 1 ? ModelScalingFactor^2 : 1) + dfCost[!, 2][11] = value(EP[:eTotalCGrid]) end if any(co2_capture_fraction.(gen) .!= 0) dfCost[10, 2] += value(EP[:eTotaleCCO2Sequestration]) end - if setup["ParameterScale"] == 1 - dfCost[6, 2] *= ModelScalingFactor^2 - dfCost[7, 2] *= ModelScalingFactor^2 - dfCost[8, 2] *= ModelScalingFactor^2 - dfCost[9, 2] *= ModelScalingFactor^2 - dfCost[10, 2] *= ModelScalingFactor^2 - end - for z in 1:Z tempCTotal = 0.0 tempCFix = 0.0 @@ -297,16 +284,6 @@ function write_costs(path::AbstractString, inputs::Dict, setup::Dict, EP::Model) tempCTotal += tempCCO2 end - if setup["ParameterScale"] == 1 - tempCTotal *= ModelScalingFactor^2 - tempCFix *= ModelScalingFactor^2 - tempCVar *= ModelScalingFactor^2 - tempCFuel *= ModelScalingFactor^2 - tempCNSE *= ModelScalingFactor^2 - tempCStart *= ModelScalingFactor^2 - tempHydrogenValue *= ModelScalingFactor^2 - tempCCO2 *= ModelScalingFactor^2 - end temp_cost_list = [ tempCTotal, tempCFix, diff --git a/src/write_outputs/write_curtailment.jl b/src/write_outputs/write_curtailment.jl index 4d41289750..414d6596c4 100644 --- a/src/write_outputs/write_curtailment.jl +++ b/src/write_outputs/write_curtailment.jl @@ -15,8 +15,7 @@ function write_curtailment(path::AbstractString, inputs::Dict, setup::Dict, EP:: VRE_STOR = inputs["VRE_STOR"] weight = inputs["omega"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - + curtailment = zeros(G, T) curtailment[VRE, :] = (value.(EP[:eTotalCap][VRE]) .* inputs["pP_Max"][VRE, :] .- value.(EP[:vP][VRE, :])) @@ -48,8 +47,6 @@ function write_curtailment(path::AbstractString, inputs::Dict, setup::Dict, EP:: end end - curtailment *= scale_factor - df = DataFrame(Resource = resources, Zone = zones, AnnualSum = zeros(G)) diff --git a/src/write_outputs/write_emissions.jl b/src/write_outputs/write_emissions.jl index 2e0c011f68..1b85848e1b 100644 --- a/src/write_outputs/write_emissions.jl +++ b/src/write_outputs/write_emissions.jl @@ -8,8 +8,6 @@ function write_emissions(path::AbstractString, inputs::Dict, setup::Dict, EP::Mo T = inputs["T"] # Number of time steps (hours) Z = inputs["Z"] # Number of zones - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - if (setup["WriteShadowPrices"] == 1 || setup["UCommit"] == 0 || (setup["UCommit"] == 2 && (setup["OperationalReserves"] == 0 || (setup["OperationalReserves"] > 0 && inputs["pDynamic_Contingency"] == 0)))) # fully linear model @@ -23,8 +21,6 @@ function write_emissions(path::AbstractString, inputs::Dict, setup::Dict, EP::Mo for z in findall(x -> x == 1, inputs["dfCO2CapZones"][:, cap]) tempCO2Price[z, cap] = (-1) * dual.(EP[:cCO2Emissions_systemwide])[cap] - # when scaled, The objective function is in unit of Million US$/kton, thus k$/ton, to get $/ton, multiply 1000 - tempCO2Price[z, cap] *= scale_factor end end end @@ -41,8 +37,7 @@ function write_emissions(path::AbstractString, inputs::Dict, setup::Dict, EP::Mo emissions_by_zone = value.(EP[:eEmissionsByZone]) for i in 1:Z - dfEmissions[i, :AnnualSum] = sum(inputs["omega"] .* emissions_by_zone[i, :]) * - scale_factor + dfEmissions[i, :AnnualSum] = sum(inputs["omega"] .* emissions_by_zone[i, :]) end if setup["WriteOutputs"] == "annual" @@ -58,7 +53,7 @@ function write_emissions(path::AbstractString, inputs::Dict, setup::Dict, EP::Mo CSV.write(joinpath(path, "emissions.csv"), dfEmissions) else# setup["WriteOutputs"] == "full" dfEmissions = hcat(dfEmissions, - DataFrame(emissions_by_zone * scale_factor, :auto)) + DataFrame(emissions_by_zone , :auto)) if setup["CO2Cap"] >= 1 auxNew_Names = [Symbol("Zone"); [Symbol("CO2_Price_$cap") for cap in 1:inputs["NCO2Cap"]]; @@ -99,8 +94,7 @@ function write_emissions(path::AbstractString, inputs::Dict, setup::Dict, EP::Mo dfEmissions = hcat(DataFrame(Zone = 1:Z), DataFrame(AnnualSum = Array{Float64}(undef, Z))) for i in 1:Z - dfEmissions[i, :AnnualSum] = sum(inputs["omega"] .* emissions_by_zone[i, :]) * - scale_factor + dfEmissions[i, :AnnualSum] = sum(inputs["omega"] .* emissions_by_zone[i, :]) end if setup["WriteOutputs"] == "annual" @@ -109,7 +103,7 @@ function write_emissions(path::AbstractString, inputs::Dict, setup::Dict, EP::Mo CSV.write(joinpath(path, "emissions.csv"), dfEmissions) else# setup["WriteOutputs"] == "full" dfEmissions = hcat(dfEmissions, - DataFrame(emissions_by_zone * scale_factor, :auto)) + DataFrame(emissions_by_zone, :auto)) auxNew_Names = [Symbol("Zone"); Symbol("AnnualSum"); [Symbol("t$t") for t in 1:T]] diff --git a/src/write_outputs/write_energy_revenue.jl b/src/write_outputs/write_energy_revenue.jl index 3e0834bd1e..60f502b246 100644 --- a/src/write_outputs/write_energy_revenue.jl +++ b/src/write_outputs/write_energy_revenue.jl @@ -26,9 +26,6 @@ function write_energy_revenue(path::AbstractString, inputs::Dict, setup::Dict, E energyrevenue[FLEX, :] = value.(EP[:vCHARGE_FLEX][FLEX, :]).data .* transpose(price)[zone_id.(gen[FLEX]), :] end - if setup["ParameterScale"] == 1 - energyrevenue *= ModelScalingFactor - end dfEnergyRevenue.AnnualSum .= energyrevenue * inputs["omega"] write_simple_csv(joinpath(path, "EnergyRevenue.csv"), dfEnergyRevenue) return dfEnergyRevenue diff --git a/src/write_outputs/write_fuel_consumption.jl b/src/write_outputs/write_fuel_consumption.jl index 8385e3e5ce..662fdb9481 100644 --- a/src/write_outputs/write_fuel_consumption.jl +++ b/src/write_outputs/write_fuel_consumption.jl @@ -46,12 +46,6 @@ function write_fuel_consumption_plant(path::AbstractString, tempannualsum_fuel_heat_multi_total[findfirst(x -> x == g, HAS_FUEL)] = value.(EP[:ePlantFuelConsumptionYear_multi][g,i]) tempannualsum_fuel_cost_multi[findfirst(x -> x == g, HAS_FUEL)] = value.(EP[:ePlantCFuelOut_multi][g,i]) + value.(EP[:ePlantCFuelOut_multi_start][g,i]) end - if setup["ParameterScale"] == 1 - tempannualsum_fuel_heat_multi_generation *= ModelScalingFactor - tempannualsum_fuel_heat_multi_start *= ModelScalingFactor - tempannualsum_fuel_heat_multi_total *= ModelScalingFactor - tempannualsum_fuel_cost_multi *= ModelScalingFactor^2 - end dfPlantFuel[!, fuel_cols_num[i]] = fuel_cols.(gen[HAS_FUEL], tag = i) dfPlantFuel[!, Symbol(string(fuel_cols_num[i], "_AnnualSum_Fuel_HeatInput_Generation_MMBtu"))] = tempannualsum_fuel_heat_multi_generation @@ -61,9 +55,6 @@ function write_fuel_consumption_plant(path::AbstractString, end end - if setup["ParameterScale"] == 1 - tempannualsum *= ModelScalingFactor^2 # - end dfPlantFuel.AnnualSumCosts .+= tempannualsum CSV.write(joinpath(path, "Fuel_cost_plant.csv"), dfPlantFuel) end @@ -78,9 +69,6 @@ function write_fuel_consumption_ts(path::AbstractString, # Fuel consumption by each resource per time step, unit is MMBTU dfPlantFuel_TS = DataFrame(Resource = inputs["RESOURCE_NAMES"][HAS_FUEL]) tempts = value.(EP[:ePlantFuel_generation] + EP[:ePlantFuel_start])[HAS_FUEL, :] - if setup["ParameterScale"] == 1 - tempts *= ModelScalingFactor # kMMBTU to MMBTU - end dfPlantFuel_TS = hcat(dfPlantFuel_TS, DataFrame(tempts, [Symbol("t$t") for t in 1:T])) CSV.write(joinpath(path, "FuelConsumption_plant_MMBTU.csv"), @@ -103,9 +91,6 @@ function write_fuel_consumption_tot(path::AbstractString, dfFuel = DataFrame(Fuel = fuel_types, AnnualSum = zeros(fuel_number)) tempannualsum = value.(EP[:eFuelConsumptionYear]) - if setup["ParameterScale"] == 1 - tempannualsum *= ModelScalingFactor # billion MMBTU to MMBTU - end dfFuel.AnnualSum .+= tempannualsum CSV.write(joinpath(path, "FuelConsumption_total_MMBTU.csv"), dfFuel) end diff --git a/src/write_outputs/write_net_revenue.jl b/src/write_outputs/write_net_revenue.jl index 2311c8d1a4..216b45401f 100644 --- a/src/write_outputs/write_net_revenue.jl +++ b/src/write_outputs/write_net_revenue.jl @@ -75,11 +75,6 @@ function write_net_revenue(path::AbstractString, dfVreStor[1:VRE_STOR_LENGTH, :NewCapWind] end end - if setup["ParameterScale"] == 1 - dfNetRevenue.Inv_cost_MWh *= ModelScalingFactor # converting Million US$ to US$ - dfNetRevenue.Inv_cost_MW *= ModelScalingFactor # converting Million US$ to US$ - dfNetRevenue.Inv_cost_charge_MW *= ModelScalingFactor # converting Million US$ to US$ - end # Add operations and maintenance cost to the dataframe dfNetRevenue.Fixed_OM_cost_MW = fixed_om_cost_per_mwyr.(gen) .* dfCap[1:G, :EndCap] @@ -121,18 +116,9 @@ function write_net_revenue(path::AbstractString, (value.(EP[:vP_AC_DISCHARGE][AC_DISCHARGE,:]).data * inputs["omega"]) end end - if setup["ParameterScale"] == 1 - dfNetRevenue.Fixed_OM_cost_MW *= ModelScalingFactor # converting Million US$ to US$ - dfNetRevenue.Fixed_OM_cost_MWh *= ModelScalingFactor # converting Million US$ to US$ - dfNetRevenue.Fixed_OM_cost_charge_MW *= ModelScalingFactor # converting Million US$ to US$ - dfNetRevenue.Var_OM_cost_out *= ModelScalingFactor # converting Million US$ to US$ - end # Add fuel cost to the dataframe dfNetRevenue.Fuel_cost = sum(value.(EP[:ePlantCFuelOut]), dims = 2) - if setup["ParameterScale"] == 1 - dfNetRevenue.Fuel_cost *= ModelScalingFactor^2 # converting Million US$ to US$ - end # Add storage cost to the dataframe dfNetRevenue.Var_OM_cost_in = zeros(nrow(dfNetRevenue)) @@ -154,9 +140,6 @@ function write_net_revenue(path::AbstractString, end end - if setup["ParameterScale"] == 1 - dfNetRevenue.Var_OM_cost_in *= ModelScalingFactor^2 # converting Million US$ to US$ - end # Add start-up cost to the dataframe dfNetRevenue.StartCost = zeros(nrow(dfNetRevenue)) if setup["UCommit"] >= 1 && !isempty(COMMIT) @@ -164,9 +147,6 @@ function write_net_revenue(path::AbstractString, start_fuel_costs = vec(value.(EP[:ePlantCFuelStart][COMMIT])) dfNetRevenue.StartCost[COMMIT] .= start_costs + start_fuel_costs end - if setup["ParameterScale"] == 1 - dfNetRevenue.StartCost *= ModelScalingFactor^2 # converting Million US$ to US$ - end # Add charge cost to the dataframe dfNetRevenue.Charge_cost = zeros(nrow(dfNetRevenue)) if has_duals(EP) @@ -179,9 +159,6 @@ function write_net_revenue(path::AbstractString, dfNetRevenue.CO2SequestrationCost = zeros(G) dfNetRevenue[CCS, :CO2SequestrationCost] = value.(EP[:ePlantCCO2Sequestration]).data end - if setup["ParameterScale"] == 1 - dfNetRevenue.CO2SequestrationCost *= ModelScalingFactor^2 # converting Million US$ to US$ - end # Add energy and subsidy revenue to the dataframe dfNetRevenue.EnergyRevenue = zeros(nrow(dfNetRevenue)) @@ -236,9 +213,6 @@ function write_net_revenue(path::AbstractString, dfNetRevenue.EmissionsCost[Y] += -co2_cap_dual * temp_vec end end - if setup["ParameterScale"] == 1 - dfNetRevenue.EmissionsCost *= ModelScalingFactor^2 # converting Million US$ to US$ - end end # Add regional technology subsidy revenue to the dataframe diff --git a/src/write_outputs/write_nse.jl b/src/write_outputs/write_nse.jl index 9d1c73e835..8838c75277 100644 --- a/src/write_outputs/write_nse.jl +++ b/src/write_outputs/write_nse.jl @@ -12,9 +12,8 @@ function write_nse(path::AbstractString, inputs::Dict, setup::Dict, EP::Model) Zone = repeat(1:Z, inner = SEG), AnnualSum = zeros(SEG * Z)) nse = zeros(SEG * Z, T) - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 for z in 1:Z - nse[((z - 1) * SEG + 1):(z * SEG), :] = value.(EP[:vNSE])[:, :, z] * scale_factor + nse[((z - 1) * SEG + 1):(z * SEG), :] = value.(EP[:vNSE])[:, :, z] end dfNse.AnnualSum .= nse * inputs["omega"] diff --git a/src/write_outputs/write_power.jl b/src/write_outputs/write_power.jl index 995a2d941b..6b6cff045e 100644 --- a/src/write_outputs/write_power.jl +++ b/src/write_outputs/write_power.jl @@ -12,11 +12,9 @@ function write_power(path::AbstractString, inputs::Dict, setup::Dict, EP::Model) T = inputs["T"] # Number of time steps (hours) weight = inputs["omega"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - + # Power injected by each resource in each time step power = value.(EP[:vP]) - power *= scale_factor df = DataFrame(Resource = resources, Zone = zones, diff --git a/src/write_outputs/write_power_balance.jl b/src/write_outputs/write_power_balance.jl index 72bfee22d1..61d2cc59af 100644 --- a/src/write_outputs/write_power_balance.jl +++ b/src/write_outputs/write_power_balance.jl @@ -97,9 +97,6 @@ function write_power_balance(path::AbstractString, inputs::Dict, setup::Dict, EP EP, inputs, z) end end - if setup["ParameterScale"] == 1 - powerbalance *= ModelScalingFactor - end dfPowerBalance.AnnualSum .= powerbalance * inputs["omega"] if setup["WriteOutputs"] == "annual" diff --git a/src/write_outputs/write_price.jl b/src/write_outputs/write_price.jl index 79afb5b428..10e658d474 100644 --- a/src/write_outputs/write_price.jl +++ b/src/write_outputs/write_price.jl @@ -43,6 +43,5 @@ be calculated only if `WriteShadowPrices` is activated. """ function locational_marginal_price(EP::Model, inputs::Dict, setup::Dict)::Matrix{Float64} ω = inputs["omega"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - return dual.(EP[:cPowerBalance]) ./ ω * scale_factor + return dual.(EP[:cPowerBalance]) ./ ω end diff --git a/src/write_outputs/write_reliability.jl b/src/write_outputs/write_reliability.jl index afb3a3284c..36c83a8d71 100644 --- a/src/write_outputs/write_reliability.jl +++ b/src/write_outputs/write_reliability.jl @@ -10,9 +10,8 @@ function write_reliability(path::AbstractString, inputs::Dict, setup::Dict, EP:: # reliability: Dual variable of maximum NSE constraint = shadow value of reliability constraint dfReliability = DataFrame(Zone = 1:Z) # Dividing dual variable for each hour with corresponding hourly weight to retrieve marginal cost of generation - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 dfReliability = hcat(dfReliability, - DataFrame(transpose(dual.(EP[:cMaxNSE]) ./ inputs["omega"] * scale_factor), :auto)) + DataFrame(transpose(dual.(EP[:cMaxNSE]) ./ inputs["omega"]), :auto)) auxNew_Names = [Symbol("Zone"); [Symbol("t$t") for t in 1:T]] rename!(dfReliability, auxNew_Names) diff --git a/src/write_outputs/write_storage.jl b/src/write_outputs/write_storage.jl index 4e64c0f9bd..53ac2c5460 100644 --- a/src/write_outputs/write_storage.jl +++ b/src/write_outputs/write_storage.jl @@ -17,8 +17,7 @@ function write_storage(path::AbstractString, inputs::Dict, setup::Dict, EP::Mode VS_STOR = !isempty(VRE_STOR) ? inputs["VS_STOR"] : [] weight = inputs["omega"] - scale_factor = setup["ParameterScale"] == 1 ? ModelScalingFactor : 1 - + stored = Matrix[] if !isempty(STOR_ALL) push!(stored, value.(EP[:vS])) @@ -33,7 +32,6 @@ function write_storage(path::AbstractString, inputs::Dict, setup::Dict, EP::Mode push!(stored, value.(EP[:vS_VRE_STOR])) end stored = reduce(vcat, stored, init = zeros(0, T)) - stored *= scale_factor stored_ids = convert(Vector{Int}, vcat(STOR_ALL, HYDRO_RES, FLEX, VS_STOR)) df = DataFrame(Resource = resources[stored_ids], diff --git a/src/write_outputs/write_storagedual.jl b/src/write_outputs/write_storagedual.jl index 90eae94ff2..1af5fed23a 100644 --- a/src/write_outputs/write_storagedual.jl +++ b/src/write_outputs/write_storagedual.jl @@ -68,10 +68,6 @@ function write_storagedual(path::AbstractString, inputs::Dict, setup::Dict, EP:: end end - if setup["ParameterScale"] == 1 - dual_values *= ModelScalingFactor - end - dfStorageDual = hcat(dfStorageDual, DataFrame(dual_values, :auto)) rename!(dfStorageDual, [Symbol("Resource"); Symbol("Zone"); [Symbol("t$t") for t in 1:T]]) diff --git a/src/write_outputs/write_subsidy_revenue.jl b/src/write_outputs/write_subsidy_revenue.jl index 3262ec94d7..97cc980452 100644 --- a/src/write_outputs/write_subsidy_revenue.jl +++ b/src/write_outputs/write_subsidy_revenue.jl @@ -93,11 +93,6 @@ function write_subsidy_revenue(path::AbstractString, inputs::Dict, setup::Dict, end end - if setup["ParameterScale"] == 1 - dfSubRevenue.SubsidyRevenue *= ModelScalingFactor^2 #convert from Million US$ to US$ - dfRegSubRevenue.SubsidyRevenue *= ModelScalingFactor^2 #convert from Million US$ to US$ - end - CSV.write(joinpath(path, "SubsidyRevenue.csv"), dfSubRevenue) CSV.write(joinpath(path, "RegSubsidyRevenue.csv"), dfRegSubRevenue) return dfSubRevenue, dfRegSubRevenue diff --git a/src/write_outputs/write_vre_stor.jl b/src/write_outputs/write_vre_stor.jl index a50687c957..f6ca989076 100644 --- a/src/write_outputs/write_vre_stor.jl +++ b/src/write_outputs/write_vre_stor.jl @@ -247,52 +247,6 @@ function write_vre_stor_capacity(path::AbstractString, inputs::Dict, setup::Dict EndDischargeACCap = existingcapdischargeac[:] - retcapdischargeac[:] + capdischargeac[:]) - if setup["ParameterScale"] == 1 - columns_to_scale = [ - :StartCapSolar, - :RetCapSolar, - :NewCapSolar, - :EndCapSolar, - :StartCapWind, - :RetCapWind, - :NewCapWind, - :EndCapWind, - :StartCapElec, - :RetCapElec, - :NewCapElec, - :EndCapElec, - :StartCapDC, - :RetCapDC, - :NewCapDC, - :EndCapDC, - :StartCapGrid, - :RetCapGrid, - :NewCapGrid, - :EndCapGrid, - :StartEnergyCap, - :RetEnergyCap, - :NewEnergyCap, - :EndEnergyCap, - :StartChargeACCap, - :RetChargeACCap, - :NewChargeACCap, - :EndChargeACCap, - :StartChargeDCCap, - :RetChargeDCCap, - :NewChargeDCCap, - :EndChargeDCCap, - :StartDischargeDCCap, - :RetDischargeDCCap, - :NewDischargeDCCap, - :EndDischargeDCCap, - :StartDischargeACCap, - :RetDischargeACCap, - :NewDischargeACCap, - :EndDischargeACCap - ] - dfCap[!, columns_to_scale] .*= ModelScalingFactor - end - total = DataFrame(Resource = "Total", Zone = "n/a", Resource_Type = "Total", Cluster = "n/a", StartCapSolar = sum(dfCap[!, :StartCapSolar]), @@ -354,8 +308,7 @@ function write_vre_stor_charge(path::AbstractString, inputs::Dict, setup::Dict, AnnualSum = Array{Union{Missing, Float32}}(undef, size(DC_CHARGE)[1])) charge_dc = zeros(size(DC_CHARGE)[1], T) charge_dc = value.(EP[:vP_DC_CHARGE]).data ./ - etainverter.(gen[DC_CHARGE]) * - (setup["ParameterScale"] == 1 ? ModelScalingFactor : 1) + etainverter.(gen[DC_CHARGE]) dfCharge_DC.AnnualSum .= charge_dc * inputs["omega"] filepath = joinpath(path, "vre_stor_dc_charge.csv") @@ -372,8 +325,7 @@ function write_vre_stor_charge(path::AbstractString, inputs::Dict, setup::Dict, Zone = inputs["ZONES_AC_CHARGE"], AnnualSum = Array{Union{Missing, Float32}}(undef, size(AC_CHARGE)[1])) charge_ac = zeros(size(AC_CHARGE)[1], T) - charge_ac = value.(EP[:vP_AC_CHARGE]).data * - (setup["ParameterScale"] == 1 ? ModelScalingFactor : 1) + charge_ac = value.(EP[:vP_AC_CHARGE]).data dfCharge_AC.AnnualSum .= charge_ac * inputs["omega"] filepath = joinpath(path, "vre_stor_ac_charge.csv") @@ -411,9 +363,6 @@ function write_vre_stor_discharge(path::AbstractString, AnnualSum = Array{Union{Missing, Float32}}(undef, size(DC_DISCHARGE)[1])) power_vre_stor = value.(EP[:vP_DC_DISCHARGE]).data .* etainverter.(gen[DC_DISCHARGE]) - if setup["ParameterScale"] == 1 - power_vre_stor *= ModelScalingFactor - end dfDischarge_DC.AnnualSum .= power_vre_stor * inputs["omega"] filepath = joinpath(path, "vre_stor_dc_discharge.csv") @@ -430,9 +379,6 @@ function write_vre_stor_discharge(path::AbstractString, Zone = inputs["ZONES_AC_DISCHARGE"], AnnualSum = Array{Union{Missing, Float32}}(undef, size(AC_DISCHARGE)[1])) power_vre_stor = value.(EP[:vP_AC_DISCHARGE]).data - if setup["ParameterScale"] == 1 - power_vre_stor *= ModelScalingFactor - end dfDischarge_AC.AnnualSum .= power_vre_stor * inputs["omega"] filepath = joinpath(path, "vre_stor_ac_discharge.csv") @@ -449,9 +395,6 @@ function write_vre_stor_discharge(path::AbstractString, Zone = inputs["ZONES_WIND"], AnnualSum = Array{Union{Missing, Float32}}(undef, size(WIND)[1])) vre_vre_stor = value.(EP[:vP_WIND]).data - if setup["ParameterScale"] == 1 - vre_vre_stor *= ModelScalingFactor - end dfVP_VRE_STOR.AnnualSum .= vre_vre_stor * inputs["omega"] filepath = joinpath(path, "vre_stor_wind_power.csv") @@ -468,9 +411,6 @@ function write_vre_stor_discharge(path::AbstractString, Zone = inputs["ZONES_ELEC"], AnnualSum = Array{Union{Missing, Float32}}(undef, size(ELEC)[1])) elec_vre_stor = value.(EP[:vP_ELEC]).data - if setup["ParameterScale"] == 1 - elec_vre_stor *= ModelScalingFactor - end dfVP_VRE_STOR.AnnualSum .= elec_vre_stor * inputs["omega"] filepath = joinpath(path, "vre_stor_elec_power_consumption.csv") @@ -485,11 +425,7 @@ function write_vre_stor_discharge(path::AbstractString, dfVP_VRE_STOR = DataFrame(Resource = inputs["RESOURCE_NAMES_SOLAR"], Zone = inputs["ZONES_SOLAR"], AnnualSum = Array{Union{Missing, Float32}}(undef, size(SOLAR)[1])) - vre_vre_stor = value.(EP[:vP_SOLAR]).data .* - etainverter.(gen[SOLAR]) - if setup["ParameterScale"] == 1 - vre_vre_stor *= ModelScalingFactor - end + vre_vre_stor = value.(EP[:vP_SOLAR]).data .* etainverter.(gen[SOLAR]) dfVP_VRE_STOR.AnnualSum .= vre_vre_stor * inputs["omega"] filepath = joinpath(path, "vre_stor_solar_power.csv") diff --git a/test/load_resources/test_gen_non_colocated/settings/genx_settings.yml b/test/load_resources/test_gen_non_colocated/settings/genx_settings.yml index 4c1c974d23..958c5e2c40 100644 --- a/test/load_resources/test_gen_non_colocated/settings/genx_settings.yml +++ b/test/load_resources/test_gen_non_colocated/settings/genx_settings.yml @@ -1,5 +1,4 @@ NetworkExpansion: 1 -ParameterScale: 1 EnergyShareRequirement: 1 Trans_Loss_Segments: 1 CapacityReserveMargin: 1 diff --git a/test/load_resources/test_gen_vre_stor/settings/genx_settings.yml b/test/load_resources/test_gen_vre_stor/settings/genx_settings.yml index 4c1c974d23..958c5e2c40 100644 --- a/test/load_resources/test_gen_vre_stor/settings/genx_settings.yml +++ b/test/load_resources/test_gen_vre_stor/settings/genx_settings.yml @@ -1,5 +1,4 @@ NetworkExpansion: 1 -ParameterScale: 1 EnergyShareRequirement: 1 Trans_Loss_Segments: 1 CapacityReserveMargin: 1 diff --git a/test/test_VRE_storage.jl b/test/test_VRE_storage.jl index ddf8a604ca..72bfd11eec 100644 --- a/test/test_VRE_storage.jl +++ b/test/test_VRE_storage.jl @@ -24,9 +24,9 @@ function test_case(test_path, obj_true, genx_setup) end # Test cases (format: (test_path, obj_true)) -test_cases = [("VRE_storage/solar_wind", 92376.060123), - ("VRE_storage/solar", 106798.88706), - ("VRE_storage/wind", 92376.275543)] +test_cases = [("VRE_storage/solar_wind", 9.23762755432138e10), + ("VRE_storage/solar", 1.0679888706409656e11), + ("VRE_storage/wind", 9.23762755432139e10)] # Define test setup genx_setup = Dict("NetworkExpansion" => 1, @@ -37,8 +37,7 @@ genx_setup = Dict("NetworkExpansion" => 1, "CO2Cap" => 1, "StorageLosses" => 1, "VirtualChargeDischargeCost" => 1, - "ParameterScale" => 1) - +) # Run test cases for (test_path, obj_true) in test_cases test_case(test_path, obj_true, genx_setup) diff --git a/test/test_electrolyzer.jl b/test/test_electrolyzer.jl index 58685beb81..31191fd688 100644 --- a/test/test_electrolyzer.jl +++ b/test/test_electrolyzer.jl @@ -4,14 +4,13 @@ using Test include(joinpath(@__DIR__, "utilities.jl")) -obj_true = 34275.8599 +obj_true = 3.427501887037163e10 test_path = "electrolyzer" # Define test inputs genx_setup = Dict("Trans_Loss_Segments" => 1, "UCommit" => 2, "StorageLosses" => 1, - "ParameterScale" => 1, "HourlyMatching" => 1, "HydrogenHourlyMatching" => 1, "HydrogenMinimumProduction" => 1 diff --git a/test/test_fusion.jl b/test/test_fusion.jl index 105b6b703a..92c4c51c85 100644 --- a/test/test_fusion.jl +++ b/test/test_fusion.jl @@ -4,12 +4,12 @@ using Test include(joinpath(@__DIR__, "utilities.jl")) -obj_true = 300.5962608 # see fusion_pulse_every_hour/README.md +obj_true = 3.005962608299133e8 # see fusion_pulse_every_hour/README.md test_path = "fusion_pulse_every_hour" # Define test inputs -genx_setup = Dict("UCommit" => 2, - "ParameterScale" => 1, +genx_setup = Dict( + "UCommit" => 2, ) settings = GenX.default_settings() merge!(settings, genx_setup) diff --git a/test/test_load_resource_data.jl b/test/test_load_resource_data.jl index 00d979b197..f19347d230 100644 --- a/test/test_load_resource_data.jl +++ b/test/test_load_resource_data.jl @@ -34,9 +34,8 @@ function prepare_inputs_true(test_path::AbstractString, gen_filename = in_filenames.gen_filename inputs_filename = in_filenames.inputs_filename dfGen = GenX.load_dataframe(joinpath(test_path, gen_filename)) - scale_factor = setup["ParameterScale"] == 1 ? GenX.ModelScalingFactor : 1.0 GenX.rename!(dfGen, lowercase.(names(dfGen))) - GenX.scale_resources_data!(dfGen, scale_factor) + GenX.scale_resources_data!(dfGen) dfGen[!, :r_id] = 1:size(dfGen, 1) inputs_true = load(joinpath(test_path, inputs_filename)) return dfGen, inputs_true @@ -243,7 +242,7 @@ function test_resource_specific_attributes(gen, dfGen, inputs) end function test_load_resources_data() - setup = Dict("ParameterScale" => 0, + setup = Dict( "OperationalReserves" => 1, "UCommit" => 2, "MultiStage" => 1) @@ -293,7 +292,7 @@ function test_load_resources_data() end function test_load_VRE_STOR_data() - setup = Dict("ParameterScale" => 0, + setup = Dict( "OperationalReserves" => 1, "UCommit" => 2, "MultiStage" => 0) @@ -308,8 +307,7 @@ function test_load_VRE_STOR_data() dfVRE_STOR = GenX.load_dataframe(joinpath(test_path, "Vre_and_stor_data.csv")) dfVRE_STOR = GenX.rename!(dfVRE_STOR, lowercase.(names(dfVRE_STOR))) - scale_factor = settings["ParameterScale"] == 1 ? GenX.ModelScalingFactor : 1.0 - GenX.scale_vre_stor_data!(dfVRE_STOR, scale_factor) + GenX.scale_vre_stor_data!(dfVRE_STOR) resources_path = joinpath(test_path, settings["ResourcesFolder"]) gen = GenX.create_resource_array(settings, resources_path) diff --git a/test/test_multifuels.jl b/test/test_multifuels.jl index ff1a0efdac..3d3c7ae45d 100644 --- a/test/test_multifuels.jl +++ b/test/test_multifuels.jl @@ -4,7 +4,7 @@ using Test include(joinpath(@__DIR__, "utilities.jl")) -obj_true = 5494.7919354 +obj_true = 5.4950570610364895e9 test_path = "multi_fuels" # Define test inputs @@ -14,7 +14,6 @@ genx_setup = Dict("Trans_Loss_Segments" => 1, "StorageLosses" => 1, "MinCapReq" => 1, "MaxCapReq" => 1, - "ParameterScale" => 1, "WriteShadowPrices" => 1, "UCommit" => 2) diff --git a/test/test_multistage.jl b/test/test_multistage.jl index eba9201a1e..ef500f503f 100644 --- a/test/test_multistage.jl +++ b/test/test_multistage.jl @@ -4,7 +4,7 @@ using Test include(joinpath(@__DIR__, "utilities.jl")) -obj_true = [79734.80032, 41630.03494, 27855.20631] +obj_true = [7.973480032e10, 4.163003494e10, 2.785520631e10] test_path = joinpath(@__DIR__, "multi_stage") # Define test inputs @@ -19,7 +19,6 @@ genx_setup = Dict("Trans_Loss_Segments" => 1, "OperationalReserves" => 1, "CO2Cap" => 2, "StorageLosses" => 1, - "ParameterScale" => 1, "UCommit" => 2, "MultiStage" => 1, "MultiStageSettingsDict" => multistage_setup) @@ -102,14 +101,13 @@ function test_update_cumulative_min_ret!() # Merge the genx_setup with the default settings settings = GenX.default_settings() - for ParameterScale in [0, 1] - genx_setup["ParameterScale"] = ParameterScale + for AutoScale in [0, 1] + genx_setup["AutoScaling"] = AutoScale merge!(settings, genx_setup) inputs_dict = Dict() true_min_retirements = Dict() - scale_factor = settings["ParameterScale"] == 1 ? GenX.ModelScalingFactor : 1.0 redirect_stdout(devnull) do warnerror_logger = ConsoleLogger(stderr, Logging.Warn) with_logger(warnerror_logger) do @@ -123,7 +121,7 @@ function test_update_cumulative_min_ret!() DataFrame) rename!(true_min_retirements[t], lowercase.(names(true_min_retirements[t]))) - GenX.scale_multistage_data!(true_min_retirements[t], scale_factor) + GenX.scale_multistage_data!(true_min_retirements[t]) inputs_dict[t] = Dict() inputs_dict[t]["Z"] = 1 diff --git a/test/test_piecewisefuel.jl b/test/test_piecewisefuel.jl index db52aaf0da..77ce4e24d2 100644 --- a/test/test_piecewisefuel.jl +++ b/test/test_piecewisefuel.jl @@ -3,14 +3,14 @@ module TestPiecewiseFuelCO2 using Test include(joinpath(@__DIR__, "utilities.jl")) -obj_true = 2341.82308 +obj_true = 2.3418230753008084e9 test_path = "piecewise_fuel" # Define test inputs -genx_setup = Dict("UCommit" => 2, +genx_setup = Dict( + "UCommit" => 2, "CO2Cap" => 1, - "ParameterScale" => 1) - +) # Run the case and get the objective value and tolerance EP, _, _ = redirect_stdout(devnull) do run_genx_case_testing(test_path, genx_setup) diff --git a/test/test_retrofit.jl b/test/test_retrofit.jl index 54ae82ad5a..807eb4640d 100644 --- a/test/test_retrofit.jl +++ b/test/test_retrofit.jl @@ -4,15 +4,15 @@ using Test include(joinpath(@__DIR__, "utilities.jl")) -obj_true = 3179.6244 +obj_true = 3.1796243889649606e9 test_path = "retrofit" # Define test inputs -genx_setup = Dict("CO2Cap" => 2, +genx_setup = Dict( + "CO2Cap" => 2, "StorageLosses" => 1, "MinCapReq" => 1, "MaxCapReq" => 1, - "ParameterScale" => 1, "UCommit" => 2, "EnergyShareRequirement" => 1, "CapacityReserveMargin" => 1, diff --git a/test/test_threezones.jl b/test/test_threezones.jl index 5d608e0f96..bc093fc7e6 100644 --- a/test/test_threezones.jl +++ b/test/test_threezones.jl @@ -4,7 +4,7 @@ using Test include(joinpath(@__DIR__, "utilities.jl")) -obj_true = 6960.20855 +obj_true = 6.960208549907064e9 test_path = "three_zones" # Define test inputs @@ -13,7 +13,6 @@ genx_setup = Dict("NetworkExpansion" => 1, "CO2Cap" => 2, "StorageLosses" => 1, "MinCapReq" => 1, - "ParameterScale" => 1, "UCommit" => 2) # Run the case and get the objective value and tolerance