From 5f7c0883970845b37f9aee2147394a5c2c152bf8 Mon Sep 17 00:00:00 2001 From: biegelk Date: Wed, 31 Dec 2025 13:40:41 -0600 Subject: [PATCH 01/17] converting all instances of 'lamda' to 'lambda' --- settings.yml | 4 ++-- src/ABCEfunctions.jl | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/settings.yml b/settings.yml index 30fb382d..0d155130 100644 --- a/settings.yml +++ b/settings.yml @@ -128,8 +128,8 @@ agent_opt: cap_decrease_threshold: 1.0 cap_decrease_margin: -0.08 cap_maintain_margin: -0.03 - profit_lamda: 1.0 # Note: only the ratio between the lamdas matters - credit_rating_lamda: 1.0 + profit_lambda: 1.0 # Note: only the ratio between the lambdas matters + credit_rating_lambda: 1.0 fin_metric_horizon: 4 fcf_debt_floor: 0.16 # weighted sum limit re_debt_floor: 0.115 # weighted sum limit diff --git a/src/ABCEfunctions.jl b/src/ABCEfunctions.jl index e122f9d9..61694c04 100644 --- a/src/ABCEfunctions.jl +++ b/src/ABCEfunctions.jl @@ -1904,15 +1904,15 @@ function set_up_model( # Set relative valuation-vs-credit metrics priority, depending on current # credit grade - profit_lamda = settings["agent_opt"]["profit_lamda"] / 1e9 * cr_adj - credit_rating_lamda = settings["agent_opt"]["credit_rating_lamda"] / cr_adj + profit_lambda = settings["agent_opt"]["profit_lambda"] / 1e9 * cr_adj + credit_rating_lambda = settings["agent_opt"]["credit_rating_lambda"] / cr_adj @objective( m, Max, ( - profit_lamda * (transpose(u) * PA_summaries[!, :NPV]) + - credit_rating_lamda / (0.1 + 0.2 + 0.1) * ( + profit_lambda * (transpose(u) * PA_summaries[!, :NPV]) + + credit_rating_lambda / (0.1 + 0.2 + 0.1) * ( 0.1 * sum(agent_fs[i, :FCF] / 1e9 + sum(u .* marg_FCF[:, i]) + (agent_fs[i, :interest_payment] / 1e9 + sum(u .* marg_int[:, i])) for i=1:fin_metric_horizon) + 0.2 * sum((agent_fs[i, :FCF] / 1e9 + sum(u .* marg_FCF[:, i])) - (agent_fs[i, :remaining_debt_principal] / 1e9 + sum(u .* marg_debt[:, i])) for i=1:fin_metric_horizon) + 0.1 * sum((agent_fs[i, :retained_earnings] / 1e9 + sum(u .* marg_RE[:, i])) - (agent_fs[i, :remaining_debt_principal] / 1e9 + sum(u .* marg_debt[:, i])) for i=1:fin_metric_horizon) From f4898822f5bbef8fbe595cd07a88d66f71fdaf8c Mon Sep 17 00:00:00 2001 From: biegelk Date: Wed, 31 Dec 2025 13:43:40 -0600 Subject: [PATCH 02/17] removing dispatch_results from detailed dispatch results to save; LER fully encompasses d_r --- src/dispatch.jl | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/dispatch.jl b/src/dispatch.jl index 059adbf1..12818dc1 100644 --- a/src/dispatch.jl +++ b/src/dispatch.jl @@ -1650,16 +1650,6 @@ function postprocess_results( ), sLER, ) - - dispres_filename = string("dispatch_results__", id, "basepd_", current_pd, ".csv") - CSV.write( - joinpath( - settings["file_paths"]["output_logging_dir"], - settings["simulation"]["scenario_name"], - dispres_filename, - ), - dispatch_results, - ) end From a7b0e10ee84be1cadb650539dacbefd7b7992ef2 Mon Sep 17 00:00:00 2001 From: biegelk Date: Thu, 1 Jan 2026 11:16:28 -0600 Subject: [PATCH 03/17] adding function to save annual dispatch hourly results to the database --- src/dispatch.jl | 60 +++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 58 insertions(+), 2 deletions(-) diff --git a/src/dispatch.jl b/src/dispatch.jl index 12818dc1..7d9ee164 100644 --- a/src/dispatch.jl +++ b/src/dispatch.jl @@ -1659,9 +1659,64 @@ end function finalize_annual_dispatch_results(db, current_pd, long_econ_results, dispatch_results) #save_annual_dispatch_summary(db, current_pd, long_econ_results) - #save_annual_dispatch_hourly_results(db, current_pd, long_econ_results) + save_annual_dispatch_hourly_results(db, current_pd, long_econ_results) save_annual_dispatch_unit_summary(db, current_pd, dispatch_results) - #save_annual_dispatch_hourly_unit_results(db, current_pd, long_econ_results) + save_annual_dispatch_hourly_unit_results(db, current_pd, long_econ_results) +end + + +function save_annual_dispatch_hourly_results(db, current_pd, long_econ_results) + # Get columns of interest from LER + results = deepcopy( + long_econ_results[:, [:y, :d, :h, :unit_type, :lambda, :reg_rmp, :spin_rmp, :nspin_rmp]] + ) + + # Remove duplicates and delete unit_type column + long_econ_results = filter( + :unit_type => ut -> ut == "wind", + long_econ_results, + ) + + long_econ_results = select(long_econ_results, Not([:unit_type])) + + # Rename the columns to match the DB standard + rename!( + results, + :y => :period, + :d => :day, + :h => :hour, + :reg_rmp => :reg_price, + :spin_rmp => :spin_price, + :nspin_rmp => :nspin_price, + ) + + stmt = DBInterface.prepare( + db, + """INSERT INTO annual_dispatch_hourly_results VALUES (:period, :day, :hour, :demand, :reg_demand, :spin_demand, :nspin_demand, :lambda, :reg_price, :spin_price, :nspin_price, :ENS, :RNS, :SNS, :NSNS)""" + ) + + zeroes = results[!, :period] .* 0 + + DBInterface.executemany( + stmt, + (period = results[!, :period], + day = results[!, :day], + hour = results[!, :hour], + demand = zeroes, + reg_demand = zeroes, + spin_demand = zeroes, + nspin_demand = zeroes, + lambda = results[!, :lambda], + reg_price = results[!, :reg_price], + spin_price = results[!, :spin_price], + nspin_price = results[!, :nspin_price], + ENS = zeroes, + RNS = zeroes, + SNS = zeroes, + NSNS = zeroes, + ) + ) + end @@ -1692,6 +1747,7 @@ function save_annual_dispatch_unit_summary(db, current_pd, dispatch_results) end + function save_annual_dispatch_hourly_unit_results(db, current_pd, long_econ_results) # Get columns of interest results = deepcopy(long_econ_results[:, [:y, :d, :h, :unit_type, :gen, :reg, :spin, :nspin]]) From afc0233c2458f5298ef5561a868f01690eca1a93 Mon Sep 17 00:00:00 2001 From: biegelk Date: Thu, 8 Jan 2026 12:31:21 -0600 Subject: [PATCH 04/17] removing demand columns from the annual dispatch results table --- src/seed_creator.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/seed_creator.py b/src/seed_creator.py index c12c59c3..097b033b 100644 --- a/src/seed_creator.py +++ b/src/seed_creator.py @@ -278,10 +278,6 @@ ("period", "integer"), ("day", "integer"), ("hour", "integer"), - ("demand", "real"), - ("reg_demand", "real"), - ("spin_demand", "real"), - ("nspin_demand", "real"), ("lambda", "real"), ("reg_price", "real"), ("spin_price", "real"), From 009f96ea864f1a81060c1c8e3d0302cf5aac0762 Mon Sep 17 00:00:00 2001 From: biegelk Date: Thu, 8 Jan 2026 12:32:23 -0600 Subject: [PATCH 05/17] reinstating save_annual_dispatch_hourly_results() in dispatch.jl, including capturing and returning XNS results --- src/agent_choice.jl | 2 +- src/annual_dispatch.jl | 4 ++-- src/dispatch.jl | 46 ++++++++++++++++++++++++------------------ 3 files changed, 29 insertions(+), 23 deletions(-) diff --git a/src/agent_choice.jl b/src/agent_choice.jl index ea937b9c..59b2dc79 100755 --- a/src/agent_choice.jl +++ b/src/agent_choice.jl @@ -236,7 +236,7 @@ function run_agent_choice() # Use the agent's internal dispatch forecast generator to project dispatch # results in the system over the forecast horizon @info "Simulating future market dispatch..." - long_econ_results, dispatch_results = Dispatch.execute_dispatch_economic_projection( + long_econ_results, dispatch_results, not_served = Dispatch.execute_dispatch_economic_projection( CLI_args, db, settings, diff --git a/src/annual_dispatch.jl b/src/annual_dispatch.jl index 1fdd56ea..7b01888c 100644 --- a/src/annual_dispatch.jl +++ b/src/annual_dispatch.jl @@ -144,7 +144,7 @@ function run_true_annual_dispatch() @info "Running the year-long dispatch simulation..." # Run the year's UC/ED problem - long_econ_results, dispatch_results = Dispatch.execute_dispatch_economic_projection( + long_econ_results, dispatch_results, not_served = Dispatch.execute_dispatch_economic_projection( CL_args, db, settings, @@ -161,7 +161,7 @@ function run_true_annual_dispatch() end # Adjust formatting and save to the database - Dispatch.finalize_annual_dispatch_results(db, CL_args["current_pd"], long_econ_results, dispatch_results) + Dispatch.finalize_annual_dispatch_results(db, CL_args["current_pd"], long_econ_results, dispatch_results, not_served) @info "Done!" end diff --git a/src/dispatch.jl b/src/dispatch.jl index 7d9ee164..415d1288 100644 --- a/src/dispatch.jl +++ b/src/dispatch.jl @@ -37,7 +37,7 @@ function execute_dispatch_economic_projection( " years...", ) - all_repdays, all_grc_results, all_price_results = handle_annual_dispatch( + all_repdays, all_grc_results, all_price_results, not_served = handle_annual_dispatch( db, settings, CLI_args, @@ -70,7 +70,7 @@ function execute_dispatch_economic_projection( fc_pd, ) - return long_econ_results, dispatch_results + return long_econ_results, dispatch_results, not_served end @@ -168,6 +168,7 @@ function handle_annual_dispatch( all_grc_results = set_up_grc_results_df() all_prices = set_up_prices_df() + not_served = nothing if run_mode == "forecast" num_years = settings["dispatch"]["num_dispatch_years"] @@ -230,13 +231,15 @@ function handle_annual_dispatch( CLI_args["current_pd"], results[:summary_statistics] ) + + not_served = results[:not_served] end @debug "DISPATCH SIMULATION: YEAR $y COMPLETE." end - return all_repdays, all_grc_results, all_prices + return all_repdays, all_grc_results, all_prices, not_served end @@ -1004,7 +1007,6 @@ function run_annual_dispatch( # Set up default return values if the dispatch year is infeasible new_grc_results = nothing new_prices = nothing - summary_statistics = nothing # Save the generation and commitment results from the integral problem new_grc_results = assemble_grc_results(y, gen_qty, r, sr, nsr, c, su, sd, portfolio_specs) @@ -1148,14 +1150,24 @@ function run_annual_dispatch( end if run_mode == "current" + # Calculate summary statistics for the real dispatch year summary_statistics = calculate_summary_statistics(new_grc_results, new_prices, ens, rns, sns, nsns) + + # Collate and return X-not-served results + not_served = select(deepcopy(new_prices), [:y, :d, :h]) + not_served[!, :ENS] = vec(reshape(ens, (:, 1))) + not_served[!, :RNS] = vec(reshape(rns, (:, 1))) + not_served[!, :SNS] = vec(reshape(sns, (:, 1))) + not_served[!, :NSNS] = vec(reshape(nsns, (:, 1))) else summary_statistics = nothing + not_served = nothing end results = Dict( :new_grc_results => new_grc_results, :new_prices => new_prices, + :not_served => not_served, :summary_statistics => summary_statistics ) @@ -1657,27 +1669,27 @@ function postprocess_results( end -function finalize_annual_dispatch_results(db, current_pd, long_econ_results, dispatch_results) +function finalize_annual_dispatch_results(db, current_pd, long_econ_results, dispatch_results, not_served) #save_annual_dispatch_summary(db, current_pd, long_econ_results) - save_annual_dispatch_hourly_results(db, current_pd, long_econ_results) + save_annual_dispatch_hourly_results(db, current_pd, long_econ_results, not_served) save_annual_dispatch_unit_summary(db, current_pd, dispatch_results) save_annual_dispatch_hourly_unit_results(db, current_pd, long_econ_results) end -function save_annual_dispatch_hourly_results(db, current_pd, long_econ_results) +function save_annual_dispatch_hourly_results(db, current_pd, long_econ_results, not_served) # Get columns of interest from LER results = deepcopy( long_econ_results[:, [:y, :d, :h, :unit_type, :lambda, :reg_rmp, :spin_rmp, :nspin_rmp]] ) # Remove duplicates and delete unit_type column - long_econ_results = filter( + results = filter( :unit_type => ut -> ut == "wind", long_econ_results, ) - long_econ_results = select(long_econ_results, Not([:unit_type])) + results = select(results, Not([:unit_type])) # Rename the columns to match the DB standard rename!( @@ -1692,28 +1704,22 @@ function save_annual_dispatch_hourly_results(db, current_pd, long_econ_results) stmt = DBInterface.prepare( db, - """INSERT INTO annual_dispatch_hourly_results VALUES (:period, :day, :hour, :demand, :reg_demand, :spin_demand, :nspin_demand, :lambda, :reg_price, :spin_price, :nspin_price, :ENS, :RNS, :SNS, :NSNS)""" + """INSERT INTO annual_dispatch_hourly_results VALUES (:period, :day, :hour, :lambda, :reg_price, :spin_price, :nspin_price, :ENS, :RNS, :SNS, :NSNS)""" ) - zeroes = results[!, :period] .* 0 - DBInterface.executemany( stmt, (period = results[!, :period], day = results[!, :day], hour = results[!, :hour], - demand = zeroes, - reg_demand = zeroes, - spin_demand = zeroes, - nspin_demand = zeroes, lambda = results[!, :lambda], reg_price = results[!, :reg_price], spin_price = results[!, :spin_price], nspin_price = results[!, :nspin_price], - ENS = zeroes, - RNS = zeroes, - SNS = zeroes, - NSNS = zeroes, + ENS = not_served[!, :ENS], + RNS = not_served[!, :RNS], + SNS = not_served[!, :SNS], + NSNS = not_served[!, :NSNS], ) ) From 8e722d627bbe477d071ae216e230e13181c7ec03 Mon Sep 17 00:00:00 2001 From: biegelk Date: Sun, 11 Jan 2026 11:02:39 -0600 Subject: [PATCH 06/17] retrieve a list of existent unit types to filter on --- src/dispatch.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/dispatch.jl b/src/dispatch.jl index 415d1288..5710c480 100644 --- a/src/dispatch.jl +++ b/src/dispatch.jl @@ -1684,8 +1684,9 @@ function save_annual_dispatch_hourly_results(db, current_pd, long_econ_results, ) # Remove duplicates and delete unit_type column + filter_type = unique(results[!, :unit_type])[1] results = filter( - :unit_type => ut -> ut == "wind", + :unit_type => ut -> ut == filter_type, long_econ_results, ) From c2ea0a4e46261c002b8fd9bd01a695c05d573c11 Mon Sep 17 00:00:00 2001 From: biegelk Date: Tue, 20 Jan 2026 14:55:31 -0600 Subject: [PATCH 07/17] reorganizing and cleaning out some cruft --- src/postprocessing.py | 210 ++++++++++++++++++++---------------------- 1 file changed, 102 insertions(+), 108 deletions(-) diff --git a/src/postprocessing.py b/src/postprocessing.py index 10d1f882..194e5547 100644 --- a/src/postprocessing.py +++ b/src/postprocessing.py @@ -43,43 +43,57 @@ "PUN units (higher VOM)": "#bbbbbb" } +#================= +# Setup functions +#================= +def get_cli_args(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--dir", + type=str, + required=True + ) -def write_raw_db_to_excel(settings, db, tag=None): - # Get the names of all database tables - db_tables = pd.read_sql_query( - "SELECT name FROM sqlite_master WHERE type='table';", db + parser.add_argument( + "--default", + "-d", + action="store_true", ) - if settings is not None: - # Set up the path to the ultimate outputs directory - out_file = Path( - Path.cwd() - / "outputs" - / settings["simulation"]["scenario_name"] - / f"{settings['simulation']['scenario_name']}__{settings['file_paths']['output_file']}" - ) - else: - out_file = Path(os.getenv("ABCE_DIR")) / "imgs" / tag / f"{tag}_outputs.xlsx" + args = parser.parse_args() + args.no_plots = False + return args - # Write each table to a tab in the excel sheet - with pd.ExcelWriter(out_file) as writer: - for i in range(len(db_tables)): - # Get table name - table = db_tables.loc[i, "name"] - # Get table data - table_data = pd.read_sql_query(f"SELECT * FROM {table}", db) +def get_db(args): + db_file = Path(args.dir) / "abce_db.db" + db = sqlite3.connect(db_file) - # Write table data to excel tab - try: - table_data.to_excel(writer, sheet_name=table, engine="openpyxl") - except: - logging.info(f"Unable to save table {table} to excel, likely due to excessive length.") + return db_file, db + + +#====================== +# Processing functions +#====================== +def postprocess_results(args, db, settings): + logging.info("Postprocessing results...") + # Save the raw database as an Excel format for easier viewing and manual + # postprocessing/debugging + write_raw_db_to_excel(settings, db) + + # Get a list of all agent ids + agent_list = get_agent_list(db) -############################################################################### -# Functions for postprocessing and plotting -############################################################################### + # Get an subset of unit_specs columns + unit_specs = get_unit_specs(db) + + # Plot portfolio evolution for all agents, plus the overall system + if not args.no_plots: + for agent_id in agent_list + [None]: + plot_portfolios(db, settings, unit_specs, agent_id) + + logging.info("Postprocessing complete.") def get_agent_list(db): @@ -100,6 +114,23 @@ def get_unit_specs(db): return unit_specs +def plot_portfolios(db, settings, unit_specs, agent_id): + if agent_id == None: + msg = "total system portfolio" + else: + msg = f"agent {agent_id}'s portfolio" + + logging.debug(f"Procesing data for {msg}...") + portfolio_profile = get_portfolio_profile( + db, agent_id, unit_specs, settings, + ) + + portfolio_profile = organize_portfolio_profile(portfolio_profile) + + plot_portfolio_profile(settings, agent_id, portfolio_profile) + logging.debug(f"Plot for {msg} saved.") + + def get_portfolio_profile(db, agent_id, unit_specs, settings): # Retrieve a long dataframe showing the number of installed units # by type by year @@ -221,19 +252,21 @@ def organize_portfolio_profile(portfolio_profile): return portfolio_profile -def plot_portfolio_profile(settings, agent_id, portfolio, tag=None, descriptor=None): - # Set up figure titles and filenames - if settings is not None: - tag = settings["simulation"]["scenario_name"] - descriptor = " ".join(tag.split("_")) + +#==================== +# Plotting functions +#==================== +def plot_portfolio_profile(settings, agent_id, portfolio): + sname = settings["simulation"]["scenario_name"] + readable_name = " ".join(sname.split("_")) # Set up figure-specific strings according to the agent_id specified if agent_id == None: - title = f"Total system portfolio evolution\n{descriptor}" - filename = f"{tag}_total_system_portfolio_evolution.png" + title = f"Total system portfolio evolution\n{readable_name}" + filename = f"{sname}_total_system_portfolio_evolution.png" else: - title = f"Agent {agent_id} portfolio evolution\n{descriptor}" - filename = f"{tag}_agent_{agent_id}_portfolio_evolution.png" + title = f"Agent {agent_id} portfolio evolution\n{readable_name}" + filename = f"{sname}_agent_{agent_id}_portfolio_evolution.png" # Remove empty data columns for column in list(portfolio.columns): @@ -275,94 +308,55 @@ def plot_portfolio_profile(settings, agent_id, portfolio, tag=None, descriptor=N Path("outputs", settings["simulation"]["scenario_name"], filename) ) else: - target_dir = Path(os.getenv("ABCE_DIR")) / "imgs" / tag + target_dir = Path(os.getenv("ABCE_DIR")) / "imgs" / sname if not target_dir.is_dir(): os.makedirs(target_dir) fig.get_figure().savefig(target_dir / filename) -def plot_portfolios(db, settings, unit_specs, agent_id, tag, descriptor): - if agent_id == None: - msg = "total system portfolio" - else: - msg = f"agent {agent_id}'s portfolio" - - logging.debug(f"Procesing data for {msg}...") - portfolio_profile = get_portfolio_profile( - db, agent_id, unit_specs, settings, +#================ +# Save functions +#================ +def write_raw_db_to_excel(settings, db): + # Get the names of all database tables + db_tables = pd.read_sql_query( + "SELECT name FROM sqlite_master WHERE type='table';", + db, ) - portfolio_profile = organize_portfolio_profile(portfolio_profile) - - plot_portfolio_profile(settings, agent_id, portfolio_profile, tag, descriptor) - logging.debug(f"Plot for {msg} saved.") - - -def postprocess_results(args, abce_model, settings=None, tag=None, descriptor=None): - logging.info("Postprocessing results...") - - # Save the raw database as an Excel format for easier viewing and manual - # postprocessing/debugging -# if settings is not None: - write_raw_db_to_excel(settings, abce_model.db, tag) - - # Get a list of all agent ids - agent_list = get_agent_list(abce_model.db) - - # Get an subset of unit_specs columns - unit_specs = get_unit_specs(abce_model.db) - - # Plot portfolio evolution for all agents, plus the overall system - if not args.no_plots: - for agent_id in agent_list + [None]: - plot_portfolios(abce_model.db, settings, unit_specs, agent_id, tag, descriptor) - - logging.info("Postprocessing complete.") + sname = settings["simulation"]["scenario_name"] - -def cli_args(): - parser = argparse.ArgumentParser() - parser.add_argument( - "--dir", - type=str, - required=True + # Set up the path to the ultimate outputs directory + out_file = Path( + Path.cwd() + / "outputs" + / sname + / f"{sname}__{settings['file_paths']['output_file']}" ) - parser.add_argument( - "--default", - "-d", - action="store_true", - ) + # Write each table to a tab in the excel sheet + with pd.ExcelWriter(out_file) as writer: + for i in range(len(db_tables)): + # Get table name + table = db_tables.loc[i, "name"] - args = parser.parse_args() - args.no_plots = False - return args + # Get table data + table_data = pd.read_sql_query(f"SELECT * FROM {table}", db) + # Write table data to excel tab + try: + table_data.to_excel(writer, sheet_name=table, engine="openpyxl") + except: + logging.info(f"Unable to save table {table} to excel, likely due to excessive length.") -class Model(object): - def __init__(self, db): - self.db = db if __name__ == "__main__": - args = cli_args() + args = get_cli_args() # Open database and create the fake model object - db_file = Path(args.dir) / "abce_db.db" - print(db_file) - db = sqlite3.connect(db_file) - m = Model(db) - - # If not default, ask the user for the scenario name to use in - # figure titles - descriptor = None - if not args.default: - descriptor = input("Provide a scenario descriptor to use in the figure titles:\n") - - # Separate out the immediate parent directory name as a tag for filenames - # This is the same as the scenario name in a live run of ABCE - tag = db_file.parts[-2] + db_file, db = get_db(args) - postprocess_results(args, m, settings=None, tag=tag, descriptor=descriptor) + postprocess_results(args, db, settings) From d90f36cc0925a7858a54bd22b840920993c5db0a Mon Sep 17 00:00:00 2001 From: biegelk Date: Tue, 20 Jan 2026 14:55:47 -0600 Subject: [PATCH 08/17] updating ppx arguments --- run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run.py b/run.py index a07a3063..8a9cca22 100644 --- a/run.py +++ b/run.py @@ -195,7 +195,7 @@ def run_model(): for i in range(settings["simulation"]["num_steps"]): abce_model.step(demo=args.demo) - ppx.postprocess_results(args, abce_model, settings) + ppx.postprocess_results(args, abce_model.db, settings) class ABCEFormatter(logging.Formatter): From 868119a7809a7d4dbb625179b6baf205906cda36 Mon Sep 17 00:00:00 2001 From: biegelk Date: Tue, 20 Jan 2026 14:56:08 -0600 Subject: [PATCH 09/17] clearing out commented-out code --- utils/ppx.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/utils/ppx.py b/utils/ppx.py index 2d8bd108..9df2ac28 100644 --- a/utils/ppx.py +++ b/utils/ppx.py @@ -255,20 +255,6 @@ def write_to_excel(everything): with xlsxwriter as writer: for key in everything.keys(): everything[key].to_excel(writer, sheet_name = names[key]) -# everything["wa_lambda"].to_excel(writer, sheet_name = "Wtd Avg Lambda") -# everything["ENS"].to_excel(writer, sheet_name = "Energy Not Served") -# everything["conditions"].to_excel(writer, sheet_name = "Decision conditions") -# everything["xtr_pivot"].to_excel(writer, sheet_name = "New xtr pivot") -# everything["xtr_exec_pivot"].to_excel(writer, sheet_name = "New xtr L0 pivot") -# everything["ret_pivot"].to_excel(writer, sheet_name = "Retirement pivot") -# everything["ret_exec_pivot"].to_excel(writer, sheet_name = "Retirements executed") -# everything["201_revenue"].to_excel(writer, sheet_name = "201 revenue") -# everything["201_FCF"].to_excel(writer, sheet_name = "201 FCF") -# everything["201_moodys_score"].to_excel(writer, sheet_name = "201 score") -# everything["202_revenue"].to_excel(writer, sheet_name = "202 revenue") -# everything["202_FCF"].to_excel(writer, sheet_name = "202 FCF") -# everything["202_moodys_score"].to_excel(writer, sheet_name = "202 score") - def run(): From 60877070792e6df500bf6e8d2df80f9d51d497f0 Mon Sep 17 00:00:00 2001 From: biegelk Date: Sun, 25 Jan 2026 16:08:46 -0600 Subject: [PATCH 10/17] remove empty index column from database->excel write; tidy up formatting --- src/postprocessing.py | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/src/postprocessing.py b/src/postprocessing.py index 194e5547..9bf71313 100644 --- a/src/postprocessing.py +++ b/src/postprocessing.py @@ -51,7 +51,7 @@ def get_cli_args(): parser.add_argument( "--dir", type=str, - required=True + required=True, ) parser.add_argument( @@ -192,7 +192,10 @@ def get_portfolio_profile(db, agent_id, unit_specs, settings): portfolios["num_units"] * portfolios["capacity"] ) portfolios = pd.pivot_table( - portfolios, values="total_capacity", index="year", columns=["unit_type"] + portfolios, + values="total_capacity", + index="year", + columns=["unit_type"], ) return portfolios @@ -300,7 +303,12 @@ def plot_portfolio_profile(settings, agent_id, portfolio): # Add the legend handles, labels = ax.get_legend_handles_labels() - plt.legend(handles[::-1], labels[::-1], loc="center left", bbox_to_anchor=(1.0, 0.5)) + plt.legend( + handles[::-1], + labels[::-1], + loc="center left", + bbox_to_anchor=(1.0, 0.5), + ) # Save the figure if settings is not None: @@ -345,10 +353,17 @@ def write_raw_db_to_excel(settings, db): # Write table data to excel tab try: - table_data.to_excel(writer, sheet_name=table, engine="openpyxl") + table_data.to_excel( + writer, + sheet_name=table, + engine="openpyxl", + index=False, + ) except: - logging.info(f"Unable to save table {table} to excel, likely due to excessive length.") - + logging.info( + f"Unable to save table {table} to excel. " + + "This may be due to excessive length or another problem." + ) if __name__ == "__main__": From d5f67cd1d7fef1407cfa60fdf63553bf16e2c0e8 Mon Sep 17 00:00:00 2001 From: biegelk Date: Sun, 25 Jan 2026 16:14:30 -0600 Subject: [PATCH 11/17] fixing whitespace --- src/postprocessing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/postprocessing.py b/src/postprocessing.py index 9bf71313..3c6754c1 100644 --- a/src/postprocessing.py +++ b/src/postprocessing.py @@ -307,7 +307,7 @@ def plot_portfolio_profile(settings, agent_id, portfolio): handles[::-1], labels[::-1], loc="center left", - bbox_to_anchor=(1.0, 0.5), + bbox_to_anchor=(1.0, 0.5), ) # Save the figure From b8c37a7e9feb61f5a9a7c64ddcd74571f3fa0f34 Mon Sep 17 00:00:00 2001 From: biegelk Date: Sun, 15 Mar 2026 10:02:51 -0500 Subject: [PATCH 12/17] adding DB tables to store relaxed integrality decision results, constraint status, and objective values --- src/seed_creator.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/src/seed_creator.py b/src/seed_creator.py index 097b033b..23c89fd9 100644 --- a/src/seed_creator.py +++ b/src/seed_creator.py @@ -263,6 +263,36 @@ ("units_to_execute", "integer"), ("mode", "text"), ], + + "agent_decisions_relax": [ + ("agent_id", "integer", "PRIMARY KEY"), + ("base_pd", "integer"), + ("unit_type", "text"), + ("project_type", "text"), + ("lag", "integer"), + ("ret_pd", "integer"), + ("NPV", "real"), + ("allowed", "text"), + ("units_to_execute", "real"), + ("mode", "text"), + ], + + "constraint_status": [ + ("agent_id", "integer"), + ("pd", "integer"), + ("name", "text"), + ("equation", "text"), + ("primal_value", "real"), + ("dual_value", "real"), + ], + + "objective_values": [ + ("agent_id", "integer", "PRIMARY KEY"), + ("pd", "integer"), + ("integral_problem", "real"), + ("relaxed_problem", "real"), + ], + "annual_dispatch_summary": [ ("period", "integer", "PRIMARY KEY"), ("wa_lambda", "real"), From f9bba1418ce8ebe67e4cf122de2090f70a05f2c8 Mon Sep 17 00:00:00 2001 From: biegelk Date: Sun, 15 Mar 2026 11:37:08 -0500 Subject: [PATCH 13/17] name all constraints and save constraint data to DB --- src/ABCEfunctions.jl | 97 +++++++++++++++++++++++++++++++------------- 1 file changed, 69 insertions(+), 28 deletions(-) diff --git a/src/ABCEfunctions.jl b/src/ABCEfunctions.jl index 61694c04..5350134c 100644 --- a/src/ABCEfunctions.jl +++ b/src/ABCEfunctions.jl @@ -17,7 +17,7 @@ module ABCEfunctions using ArgParse, CPLEX, - Requires, SQLite, DataFrames, CSV, JuMP, GLPK, Cbc, Logging, Tables, HiGHS, Statistics + Requires, SQLite, DataFrames, CSV, JuMP, GLPK, Cbc, Logging, Tables, HiGHS, Statistics, MathOptInterface include("./dispatch.jl") using .Dispatch @@ -1351,10 +1351,13 @@ function add_constraint_shortage_protection( transform!(agent_pf, [:num_units, :capacity, :capacity_factor] => ((num, cap, cf) -> num .* cap .* cf) => :total_derated_cap) agent_year_derated_cap = sum(agent_pf[!, :total_derated_cap]) + cname = string("shortage_protection_y", i) + @constraint( m, transpose(m[:u] .* PA_summaries[:, :current]) * marg_derated_cap[:, i] >= - agent_year_derated_cap * margin + agent_year_derated_cap * margin, + base_name = cname, ) end end @@ -1474,6 +1477,7 @@ function add_constraint_FM_floors( ) for i=1:settings["agent_opt"]["fin_metric_horizon"] ) / settings["agent_opt"]["fin_metric_horizon"] >= 0, + base_name = "FCDR_RCDR", ) # Limit the average ICR value over the horizon to its floor @@ -1483,7 +1487,8 @@ function add_constraint_FM_floors( agent_fs[i, :FCF] / 1e9 + sum(m[:u] .* marg_FCF[:, i]) + (1 - ICR_solo_floor) * (-1) * (agent_fs[i, :interest_payment] / 1e9 + sum(m[:u] .* marg_int[:, i])) for i = 1:settings["agent_opt"]["fin_metric_horizon"] - ) >= 0 + ) >= 0, + base_name = "ICR", ) return m @@ -1502,11 +1507,13 @@ function add_constraint_max_new_capacity( for i = 1:size(PA_summaries)[1] if PA_summaries[i, :project_type] == "new_xtr" + cname = string(PA_summaries[i, :unit_type], "_new_xtr_L", PA_summaries[i, :lag]) @constraint( m, m[:u][i] .* exp_PA_summaries[i, :capacity] .<= convert(Int64, exp_PA_summaries[i, :allowed]) .* - max_type_newcap + max_type_newcap, + base_name = cname, ) end end @@ -1530,21 +1537,27 @@ function add_constraint_max_retirements( if PA_summaries[i, :project_type] == "retirement" unit_type = PA_summaries[i, :unit_type] ret_pd = PA_summaries[i, :ret_pd] + asset_count = filter( [:unit_type, :retirement_pd] => (x, y) -> x == unit_type && y == ret_pd, asset_counts, - )[ - 1, - :count, - ] + )[1, :count] + max_retirement = ( convert(Int64, PA_summaries[i, :allowed]) .* min( asset_count, max_type_rets, ) ) - @constraint(m, m[:u][i] .<= max_retirement) + + cname = string(unit_type, "_retlimit_", ret_pd, "-", PA_summaries[i, :lag]) + + @constraint( + m, + m[:u][i] .<= max_retirement, + base_name = cname, + ) end end @@ -1597,7 +1610,8 @@ function add_constraint_retireable_asset_limit( @constraint( m, sum(ret_summation_matrix[i, :] .* m[:u]) <= - retireable_asset_counts[i, :count] + retireable_asset_counts[i, :count], + base_name = "retlimit_exist_$i" ) end @@ -1737,12 +1751,14 @@ function add_constraint_retirement_scheduling_limit( for i = 1:max_horizon @constraint( m, - sum(transpose(m[:u]) * type_PA_rets[:, i]) <= type_ops[i, :num_units] + sum(transpose(m[:u]) * type_PA_rets[:, i]) <= type_ops[i, :num_units], + base_name = string(unit_type, "_retlimit_ops_y", i), ) @constraint( m, - sum(transpose(m[:u]) * type_PA_rets[:, i]) <= settings["agent_opt"]["max_type_rets_per_pd"] + sum(transpose(m[:u]) * type_PA_rets[:, i]) <= settings["agent_opt"]["max_type_rets_per_pd"], + base_name = string(unit_type, "_retlimit_global_y", i), ) # The following set of three constraints are collectively @@ -1754,17 +1770,20 @@ function add_constraint_retirement_scheduling_limit( @constraint( m, - m[:z][z_count, i] >= type_rets[i, :num_units] + m[:z][z_count, i] >= type_rets[i, :num_units], + base_name = string("retlimit_z1_", unit_type, i), ) @constraint( m, - m[:z][z_count, i] >= sum(transpose(m[:u]) * type_PA_rets[:, i]) + m[:z][z_count, i] >= sum(transpose(m[:u]) * type_PA_rets[:, i]), + base_name = string("retlimit_z2_", unit_type, i), ) @constraint( m, - sum(m[:z][z_count, j] for j=1:i) <= type_ops[1, :num_units] + sum(m[:z][z_count, j] for j=1:i) <= type_ops[1, :num_units], + base_name = string("retlimit_z3_", unit_type, i), ) end @@ -1926,25 +1945,39 @@ function set_up_model( end -function solve_model(m, verbosity, threshold=1e-8) - optimize!(m) +function save_constraint_data(db, m, agent_id, pd) + cons_data = DataFrame( + agent_id = Int64[], + pd = Int64[], + name = String[], + equation = String[], + primal_value = Float64[], + dual_value = Float64[], + ) + + for (F, S) in list_of_constraint_types(m) + for con in all_constraints(m, F, S) + if (F != VariableRef) + # Get separate name and equation info + con_eq = JuMP.constraint_string(MIME("text/plain"), con) + if occursin(JuMP.name(con), con_eq) + lname = JuMP.name(con) * " : " + con_eq = replace(con_eq, lname => "") + end - if verbosity >= 3 - if string(termination_status.(m)) == "OPTIMAL" - for (F, S) in list_of_constraint_types(m) - for con in all_constraints(m, F, S) - println(con) - println(JuMP.value(con)) - end + con_data = [agent_id, pd, JuMP.name(con), con_eq, JuMP.value(con), JuMP.dual(con)] + DBInterface.execute( + db, + "INSERT INTO constraint_status VALUES (?, ?, ?, ?, ?, ?)", + con_data, + ) end end end - return m end - ### Postprocessing function finalize_results_dataframe(m, mode, PA_summaries) # Check solve status of model @@ -2719,16 +2752,24 @@ function save_agent_fs!(fs, agent_id, db, mode) end -function save_agent_decisions(db, current_pd, agent_id, decision_df) +function save_agent_decisions(db, current_pd, agent_id, decision_df; mode="integral") decision_df[!, :agent_id] .= agent_id decision_df[!, :base_pd] .= current_pd + + if mode == "integral" + table = "agent_decisions" + else + table = "agent_decisions_relax" + end + cols_to_ignore = [:uid, :current] select!(decision_df, [:agent_id, :base_pd], Not(vcat([:agent_id], cols_to_ignore))) + for row in Tuple.(eachrow(decision_df)) DBInterface.execute( db, string( - "INSERT INTO agent_decisions ", + "INSERT INTO $table ", "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", ), row, From e9705d8e37eb160637a69afdbdfe74c307318284 Mon Sep 17 00:00:00 2001 From: biegelk Date: Sun, 15 Mar 2026 11:38:41 -0500 Subject: [PATCH 14/17] removing generic process_results function; retrieve model results (including constraint and objective data) from the integral model, then solve the relaxed-integrality model to get constraint dual values; save that data to DB --- src/agent_choice.jl | 92 +++++++++++++++++++++++++++++++++------------ 1 file changed, 67 insertions(+), 25 deletions(-) diff --git a/src/agent_choice.jl b/src/agent_choice.jl index 59b2dc79..34d49af0 100755 --- a/src/agent_choice.jl +++ b/src/agent_choice.jl @@ -124,26 +124,6 @@ function compute_last_year_results(db, settings, CLI_args, agent_params) end -function process_results(settings, CLI_args, final_model, final_mode, db, PA_uids, unit_specs) - # Ensure model results data is valid and of correct type - all_results = ABCEfunctions.finalize_results_dataframe(final_model, final_mode, PA_uids) - - # Display the results - ABCEfunctions.display_agent_choice_results(CLI_args, final_model, all_results) - - # Save newly-selected project alternatives happening in the current period - # to the database - ABCEfunctions.postprocess_agent_decisions( - settings, - all_results, - unit_specs, - db, - CLI_args["current_pd"], - CLI_args["agent_id"], - ) -end - - function save_intermediate_outputs(settings, CLI_args, adj_system_portfolios, long_econ_results) # Save all system portfolio forecasts to the cnerg groupspace pfs = deepcopy(adj_system_portfolios[CLI_args["current_pd"]]) @@ -163,6 +143,34 @@ function save_intermediate_outputs(settings, CLI_args, adj_system_portfolios, lo end +function get_model_results(settings, model, realized_mode, PA_uids; mode="integral") + # Check solve status of model + status = string(termination_status.(model)) + + if status == "OPTIMAL" + # If the model solved to optimality: + if mode == "integral" + # Convert the results to type Int64 + unit_qty = Int64.(round.(value.(model[:u]))) + else + unit_qty = value.(model[:u]) + end + obj_val = objective_value(model) + else + # If the model did not solve to optimality for any reason + # (infeasibility or error): + # The agent does nothing. Return a vector of all zeroes instead. + unit_qty = zeros(Int64, size(PA_uids)[1]) + obj_val = nothing + end + + decision_df = hcat(PA_uids, DataFrame(units_to_execute = unit_qty)) + decision_df[!, :mode] .= realized_mode + + return decision_df, obj_val +end + + function run_agent_choice() @info "Setting up data..." @@ -302,12 +310,12 @@ function run_agent_choice() # Solve the model @info "Solving optimization problem..." - m = ABCEfunctions.solve_model(m, CLI_args["verbosity"]) + optimize!(m) status = string(termination_status.(m)) if status == "OPTIMAL" final_model = m - final_mode = "normal" + realized_mode = "normal" else m_ret = ABCEfunctions.set_up_model( settings, @@ -327,15 +335,49 @@ function run_agent_choice() mode="ret_only" ) - m_ret = ABCEfunctions.solve_model(m_ret, CLI_args["verbosity"]) + optimize!(m_ret) final_model = m_ret - final_mode = "ret_only" + realized_mode = "ret_only" end # Process the model outputs @debug "Postprocessing model results..." - process_results(settings, CLI_args, final_model, final_mode, db, PA_uids, unit_specs) + decision_df, obj_val = get_model_results(settings, final_model, realized_mode, PA_uids; mode="integral") + ABCEfunctions.display_agent_choice_results(CLI_args, final_model, decision_df) + + # Re-solve the model with integrality relaxed + undo_relax = relax_integrality(final_model) + optimize!(final_model) + relax_decision_df, relax_obj_val = get_model_results(settings, final_model, realized_mode, PA_uids; mode="relax") + + # Save the constraint data (including dual values) for the relaxed model + ABCEfunctions.save_constraint_data(db, final_model, CLI_args["agent_id"], CLI_args["current_pd"]) + + # Display the integral model results + ABCEfunctions.display_agent_choice_results(CLI_args, final_model, relax_decision_df) + + # Save newly-selected project alternatives happening in the current period + # to the database + ABCEfunctions.postprocess_agent_decisions( + settings, + decision_df, + unit_specs, + db, + CLI_args["current_pd"], + CLI_args["agent_id"], + ) + + # Save relaxed problem results to DB + ABCEfunctions.save_agent_decisions(db, CLI_args["current_pd"], CLI_args["agent_id"], relax_decision_df; mode="relax") + + # Save both problems' final objective values to DB + DBInterface.execute( + db, + "INSERT INTO objective_values VALUES (?, ?, ?, ?)", + (CLI_args["agent_id"], CLI_args["current_pd"], obj_val, relax_obj_val), + ) + end From 4d9b4844a25eea23b6755f9fe4a226fcd1543dac Mon Sep 17 00:00:00 2001 From: biegelk Date: Sun, 15 Mar 2026 11:40:00 -0500 Subject: [PATCH 15/17] git ignoring utils/ dir --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index c98bec7a..17931abb 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ **/outputs/ **/scenario_reduction_tmp/ **/tmp/ +**/utils/ .github/workflows/inactive inputs/backup_calcs test/norun From 3ee0a3139b6cb6d20516453c32d0ab8701662d69 Mon Sep 17 00:00:00 2001 From: biegelk Date: Sun, 15 Mar 2026 11:41:58 -0500 Subject: [PATCH 16/17] adding canonical 2a1b input files to git tracking --- inputs/1a_S1aFS.yml | 33 +++++ inputs/2a1b/2a1b.yml | 175 +++++++++++++++++++++++ inputs/2a1b/demand_data_baseline_70y.csv | 71 +++++++++ inputs/2a1b/settings.yml | 151 +++++++++++++++++++ 4 files changed, 430 insertions(+) create mode 100644 inputs/1a_S1aFS.yml create mode 100644 inputs/2a1b/2a1b.yml create mode 100644 inputs/2a1b/demand_data_baseline_70y.csv create mode 100644 inputs/2a1b/settings.yml diff --git a/inputs/1a_S1aFS.yml b/inputs/1a_S1aFS.yml new file mode 100644 index 00000000..61401539 --- /dev/null +++ b/inputs/1a_S1aFS.yml @@ -0,0 +1,33 @@ +# This input file serves as a base for the Simplified 1-agent Full Spectrum test +# +# System composition: +# - 1 agent, which owns an equal number of all possible generation asset types +# - with 8 assets per type (excl. C2N), 15600 MW unforced capacity (12400 net of wind and solar) +# +# This file is intended to be used with the following other assumptions/files: +# - ts_US_data timeseries files (all quantities constant, no AS) +# - 10k starting peak demand + + +# Standard Light & Power +201: + debt_fraction: 0.5 + cost_of_debt: 0.06 + cost_of_equity: 0.1 + starting_debt: 3000000000 # $3 B + starting_PPE: 250000000 # $250 M + starting_RE: 200000000 # $200 M + k: 0.95 + starting_portfolio: + coal: 8 + conventional_nuclear: 8 + ngcc: 8 + ngct: 8 + PUN_unit_high: 8 + PUN_unit_low: 8 + solar: 8 + solar_old: 8 + wind: 8 + wind_old: 8 + advanced_nuclear: 8 + diff --git a/inputs/2a1b/2a1b.yml b/inputs/2a1b/2a1b.yml new file mode 100644 index 00000000..247a30eb --- /dev/null +++ b/inputs/2a1b/2a1b.yml @@ -0,0 +1,175 @@ +# This input file serves as a base for two-agent test scenarios. +# The two agents have identical portfolios and parameters. +# The system portfolio has been adjusted slightly from that of the standard +# ABCE "2019 ERCOT" facsimile to be evenly divisible by two. + + +# Standard Light & Power +201: + debt_fraction: 0.5 + cost_of_debt: 0.06 + cost_of_equity: 0.1 + starting_debt: 3000000000 # $3 B + starting_PPE: 250000000 # $250 M + starting_RE: 200000000 # $200 M + starting_portfolio: + coal: 8 + conventional_nuclear: 4 + ngcc: 57 + ngct: 44 + PUN_unit_high: 22 + PUN_unit_low: 10 + solar: 10 + solar_old: 12 + wind: 19 + wind_old: 52 + scheduled_retirements: + # Format: + # unit_type: + # retirement_pd: num_units + coal: + 4: 1 + 5: 1 + 8: 3 + 9: 1 + 15: 2 + conventional_nuclear: + 22: 2 + 27: 2 + ngcc: + 3: 2 + 5: 7 + 8: 4 + 10: 13 + 13: 4 + 17: 8 + 19: 6 + 22: 13 + ngct: + 2: 2 + 5: 5 + 7: 6 + 11: 13 + 14: 2 + 19: 10 + 24: 6 + PUN_unit_high: + 50: 22 + PUN_unit_low: + 50: 10 + solar: + 15: 3 + 20: 5 + 27: 2 + solar_old: + 2: 2 + 5: 4 + 8: 4 + 10: 2 + wind: + 12: 6 + 15: 3 + 24: 10 + wind_old: + 4: 6 + 6: 7 + 8: 9 + 10: 3 + 14: 8 + 16: 13 + 18: 6 + + + +# Springfield Electric +202: + debt_fraction: 0.5 + cost_of_debt: 0.06 + cost_of_equity: 0.1 + starting_debt: 3000000000 # $3 B + starting_PPE: 250000000 # $250 M + starting_RE: 200000000 # $200 M + starting_portfolio: + coal: 8 + conventional_nuclear: 4 + ngcc: 57 + ngct: 44 + PUN_unit_high: 22 + PUN_unit_low: 10 + solar: 10 + solar_old: 12 + wind: 19 + wind_old: 52 + scheduled_retirements: + # Format: + # unit_type: + # retirement_pd: num_units + coal: + 4: 1 + 5: 1 + 8: 3 + 9: 1 + 15: 2 + conventional_nuclear: + 22: 2 + 27: 2 + ngcc: + 3: 2 + 5: 7 + 8: 4 + 10: 13 + 13: 4 + 17: 8 + 19: 6 + 22: 13 + ngct: + 2: 2 + 5: 5 + 7: 6 + 11: 13 + 14: 2 + 19: 10 + 24: 6 + PUN_unit_high: + 50: 22 + PUN_unit_low: + 50: 10 + solar: + 15: 3 + 20: 5 + 27: 2 + solar_old: + 2: 2 + 5: 4 + 8: 4 + 10: 2 + wind: + 12: 6 + 15: 3 + 24: 10 + wind_old: + 4: 6 + 6: 7 + 8: 9 + 10: 3 + 14: 8 + 16: 13 + 18: 6 + +# Balance of System +208: + starting_portfolio: + coal: 16 + ngcc: 114 + ngct: 88 + conventional_nuclear: 8 + wind: 38 + wind_old: 104 + solar: 20 + solar_old: 24 + PUN_unit_high: 44 + PUN_unit_low: 20 + inactive: True + balance_of_system: True + expansion_strategy: proportional_expansion + diff --git a/inputs/2a1b/demand_data_baseline_70y.csv b/inputs/2a1b/demand_data_baseline_70y.csv new file mode 100644 index 00000000..086a94fa --- /dev/null +++ b/inputs/2a1b/demand_data_baseline_70y.csv @@ -0,0 +1,71 @@ +demand +1 +1.012 +1.024 +1.036 +1.049 +1.061 +1.074 +1.087 +1.1 +1.113 +1.127 +1.133 +1.14 +1.147 +1.153 +1.161 +1.168 +1.175 +1.181 +1.189 +1.195 +1.203 +1.21 +1.218 +1.225 +1.232 +1.239 +1.247 +1.254 +1.262 +1.269 +1.277677419 +1.286354839 +1.295032258 +1.303709677 +1.312387097 +1.321064516 +1.329741935 +1.338419355 +1.347096774 +1.355774194 +1.364451613 +1.373129032 +1.381806452 +1.390483871 +1.39916129 +1.40783871 +1.416516129 +1.425193548 +1.433870968 +1.442548387 +1.451225806 +1.459903226 +1.468580645 +1.477258065 +1.485935484 +1.494612903 +1.503290323 +1.511967742 +1.520645161 +1.529322581 +1.538 +1.546677419 +1.555354839 +1.564032258 +1.572709677 +1.581387097 +1.590064516 +1.598741935 +1.607419355 diff --git a/inputs/2a1b/settings.yml b/inputs/2a1b/settings.yml new file mode 100644 index 00000000..7956df6d --- /dev/null +++ b/inputs/2a1b/settings.yml @@ -0,0 +1,151 @@ +simulation: + scenario_name: "2a_10p_2a1b_baseline" + solver: "CPLEX" + num_steps: 10 + file_logging_level: 1 # 0: no additional csvs saved; 1: all additional csvs saved + # caution! enabling file logging with 365 repdays can require 200GB+ of storage + C2N_assumption: baseline + +scenario: + peak_demand: 78000 + policies: + CTAX: + enabled: False + qty: 0 # $/t CO2 + PTC_all_techs: + enabled: True + eligibility: + unit_type: + - wind + - solar + start_year: -5 # earliest eligible year for construction start (absolute, inclusive) + end_year: 9 # latest eligible year for construction start (absolute, inclusive) + duration: 10 # number of in-service years for which eligible units receive the credit + qty: 27.5 # $/MWh + PTC_existing_nuclear: + enabled: True + eligibility: + unit_type: + - conventional_nuclear + start_year: -100 + end_year: 100 + duration: 50 + qty: 10 + ITC: + enabled: True + eligibility: + unit_type: + - advanced_nuclear + - PWR_C2N0_single + - PWR_C2N1_single + - HTGR_C2N0_single + - HTGR_C2N2_single + - SFR_C2N0_single + - SFR_C2N3_single + start_year: 0 # earliest eligible year for construction start (absolute, inclusive) + end_year: 9 # latest eligible year for construction start (absolute, inclusive) + qty: 0.4 # Specify as a fraction (between 0 and 1) + + allowed_xtr_types: + - wind + - solar + - ngcc + - ngct + - advanced_nuclear + - PWR_C2N0_single + - PWR_C2N1_single + - HTGR_C2N0_single + - HTGR_C2N2_single + - SFR_C2N0_single + - SFR_C2N3_single + + +####################################################### +# Advanced settings +####################################################### + +# Various constants: should never be updated! +constants: + first_asset_id: 2001 + vis_lvl: 45 # sets the logging level for bare visual elements + large_epsilon: 1.0 + time_before_start: -1 + distant_time: 9999 + big_number: 999999 + hours_per_year: 8760 + MW2kW: 1000 # converts MW to kW + +# File paths and filenames +file_paths: + ABCE_sysimage_file: "abceSysimage.so" + db_file: "abce_db.db" + demand_data_file: "demand_data_baseline_70y.csv" +# agent_specifications_file: "2a_identical.yml" + agent_specifications_file: "2a1b.yml" + output_file: "outputs.xlsx" +# output_logging_dir: "/groupspace/cnerg/users/kebiegel/abce_outputs" + output_logging_dir: "/filespace/k/kebiegel/abce/outputs" + timeseries_data_dir: "ts_data" + unit_specs_data_file: "unit_specs.yml" + logo: "abce.txt" + +# Modeled grid system settings which are unlikely to change frequently +system: + price_cap: 9000 + AS_price_cap: 1000 + tax_rate: 0.21 + planning_reserve_margin: 0.03 # 1 + PRM = total target capacity + peak_initial_reserves: 0.0 + tax_credits_discount: 0.1 + +# Settings for demand projections +demand: + total_forecast_horizon: 10 # Number of periods in the complete forecast horizon + demand_visibility_horizon: 5 + demand_projection_mode: exp_termrate # flat, exp_fitted, or exp_termrate + demand_projection_window: 5 # Total number of periods used to project demand + historical_demand_growth_rate: 0.01 + terminal_demand_growth_rate: 0.01 # Exponential growth rate of demand + +# Settings for the agents' internal dispatch simulator and handling of +# dispatch data +dispatch: + num_dispatch_years: 10 # Num. of years to explicitly simulate dispatch + downselection: exact + num_repdays: 365 + annual_dispatch_subperiod: 30 # Number of days per annual dispatch subperiod + ASNS_penalty_ratio: 0.08 # Relative to an ENS penalty value of 1 + rns_subpenalty: 4 + sns_subpenalty: 5 + nsns_subpenalty: 2 + gamma_reg: .8 + gamma_spin: .8 + gamma_nspin: 0.2 + hist_wt: 0.4 # Weighting of historical versus projected data + hist_decay: 0.5 # Decay factor for each historical data year + +# Settings for agent behavior optimization +agent_opt: + num_future_periods_considered: 2 # Number of periods for which to consider future projects + max_type_rets_per_pd: 5 + max_type_newcap_per_pd: 5000 + shortage_protection_period: 8 + cap_decrease_threshold: 1.0 + cap_decrease_margin: -0.08 + cap_maintain_margin: -0.03 + profit_lamda: 1.0 # Note: only the ratio between the lamdas matters + credit_rating_lamda: 1.0 + fin_metric_horizon: 4 + rcdr_target_delta: 0.005 + icr_floor: 3.5 # weighted sum limit + fcf_debt_floor: 0.16 # weighted sum limit + re_debt_floor: 0.115 # weighted sum limit + icr_solo_floor: 0.0 # individual + competitor_efficiency_assumption: 0.95 # k < 1.0 (sensitive parameter!) + +financing: + default_debt_term: 30 + depreciation_horizon: 20 + starting_instrument_id: 1000 + + From e5378923c474d9240aa22ddf4e2571e32900d963 Mon Sep 17 00:00:00 2001 From: biegelk Date: Sun, 15 Mar 2026 11:42:20 -0500 Subject: [PATCH 17/17] ignoring perturbation and test files in the inputs/ dir --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 17931abb..739d0b0b 100644 --- a/.gitignore +++ b/.gitignore @@ -29,3 +29,6 @@ precompile.jl *cplex* **/*repDays*.csv *logfile* +core +**/inputs/*test* +**/inputs/*_p.*