Skip to content

Commit e7d35e7

Browse files
committed
Merge remote-tracking branch 'origin/main' into other-commodities
2 parents 4c25f5c + ff47a80 commit e7d35e7

10 files changed

Lines changed: 106 additions & 98 deletions

File tree

src/asset.rs

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -264,20 +264,15 @@ impl AssetPool {
264264
self.iter().filter(|asset| asset.region_id == *region_id)
265265
}
266266

267-
/// Iterate over only the active assets in a given region that produce or consume a given
268-
/// commodity
267+
/// Iterate over the active assets in a given region that produce/consume a commodity with the
268+
/// associated process flow
269269
pub fn iter_for_region_and_commodity<'a>(
270270
&'a self,
271271
region_id: &'a RegionID,
272272
commodity_id: &'a CommodityID,
273-
) -> impl Iterator<Item = &'a AssetRef> {
274-
self.iter_for_region(region_id).filter(|asset| {
275-
asset.process.contains_commodity_flow(
276-
commodity_id,
277-
&asset.region_id,
278-
asset.commission_year,
279-
)
280-
})
273+
) -> impl Iterator<Item = (&'a AssetRef, &'a ProcessFlow)> {
274+
self.iter_for_region(region_id)
275+
.filter_map(|asset| Some((asset, asset.get_flow(commodity_id)?)))
281276
}
282277

283278
/// Replace the active pool with new and/or already commissioned assets

src/input/commodity/demand.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@ fn compute_demand_maps(
187187
) -> HashMap<CommodityID, DemandMap> {
188188
let mut map = HashMap::new();
189189
for ((commodity_id, region_id, year), (level, annual_demand)) in demand.iter() {
190-
for ts_selection in time_slice_info.iter_selections_for_level(*level) {
190+
for ts_selection in time_slice_info.iter_selections_at_level(*level) {
191191
let slice_key = (
192192
commodity_id.clone(),
193193
region_id.clone(),

src/input/commodity/demand_slicing.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ fn validate_demand_slices(
128128
) -> Result<()> {
129129
for (commodity, region_id) in iproduct!(svd_commodities.values(), region_ids) {
130130
time_slice_info
131-
.iter_selections_for_level(commodity.time_slice_level)
131+
.iter_selections_at_level(commodity.time_slice_level)
132132
.map(|ts_selection| {
133133
demand_slices
134134
.get(&(

src/input/process.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@ fn validate_commodities(
170170
}
171171
CommodityType::ServiceDemand => {
172172
for ts_selection in
173-
time_slice_info.iter_selections_for_level(commodity.time_slice_level)
173+
time_slice_info.iter_selections_at_level(commodity.time_slice_level)
174174
{
175175
validate_svd_commodity(
176176
time_slice_info,

src/output.rs

Lines changed: 18 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,8 @@ const ASSETS_FILE_NAME: &str = "assets.csv";
3232
/// The output file name for commodity balance duals
3333
const COMMODITY_BALANCE_DUALS_FILE_NAME: &str = "debug_commodity_balance_duals.csv";
3434

35-
/// The output file name for capacity duals
36-
const CAPACITY_DUALS_FILE_NAME: &str = "debug_capacity_duals.csv";
35+
/// The output file name for activity duals
36+
const ACTIVITY_DUALS_FILE_NAME: &str = "debug_activity_duals.csv";
3737

3838
/// Get the model name from the specified directory path
3939
pub fn get_output_dir(model_dir: &Path) -> Result<PathBuf> {
@@ -111,9 +111,9 @@ struct CommodityPriceRow {
111111
price: f64,
112112
}
113113

114-
/// Represents the capacity duals data in a row of the capacity duals CSV file
114+
/// Represents the activity duals data in a row of the activity duals CSV file
115115
#[derive(Serialize, Deserialize, Debug, PartialEq)]
116-
struct CapacityDualsRow {
116+
struct ActivityDualsRow {
117117
milestone_year: u32,
118118
asset_id: AssetID,
119119
time_slice: TimeSliceID,
@@ -130,21 +130,10 @@ struct CommodityBalanceDualsRow {
130130
value: f64,
131131
}
132132

133-
/// Represents the fixed asset duals data in a row of the fixed asset duals CSV file
134-
#[derive(Serialize, Deserialize, Debug, PartialEq)]
135-
struct FixedAssetDualsRow {
136-
pac: CommodityID,
137-
pac_flow: f64,
138-
commodity_id: CommodityID,
139-
commodity_flow: f64,
140-
time_slice: TimeSliceID,
141-
value: f64,
142-
}
143-
144133
/// For writing extra debug information about the model
145134
struct DebugDataWriter {
146135
commodity_balance_duals_writer: csv::Writer<File>,
147-
capacity_duals_writer: csv::Writer<File>,
136+
activity_duals_writer: csv::Writer<File>,
148137
}
149138

150139
impl DebugDataWriter {
@@ -161,33 +150,33 @@ impl DebugDataWriter {
161150

162151
Ok(Self {
163152
commodity_balance_duals_writer: new_writer(COMMODITY_BALANCE_DUALS_FILE_NAME)?,
164-
capacity_duals_writer: new_writer(CAPACITY_DUALS_FILE_NAME)?,
153+
activity_duals_writer: new_writer(ACTIVITY_DUALS_FILE_NAME)?,
165154
})
166155
}
167156

168157
/// Write all debug info to output files
169158
fn write_debug_info(&mut self, milestone_year: u32, solution: &Solution) -> Result<()> {
170-
self.write_capacity_duals(milestone_year, solution.iter_capacity_duals())?;
159+
self.write_activity_duals(milestone_year, solution.iter_activity_duals())?;
171160
self.write_commodity_balance_duals(
172161
milestone_year,
173162
solution.iter_commodity_balance_duals(),
174163
)?;
175164
Ok(())
176165
}
177166

178-
/// Write capacity duals to file
179-
fn write_capacity_duals<'a, I>(&mut self, milestone_year: u32, iter: I) -> Result<()>
167+
/// Write activity duals to file
168+
fn write_activity_duals<'a, I>(&mut self, milestone_year: u32, iter: I) -> Result<()>
180169
where
181170
I: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, f64)>,
182171
{
183172
for (asset, time_slice, value) in iter {
184-
let row = CapacityDualsRow {
173+
let row = ActivityDualsRow {
185174
milestone_year,
186175
asset_id: asset.id.unwrap(),
187176
time_slice: time_slice.clone(),
188177
value,
189178
};
190-
self.capacity_duals_writer.serialize(row)?;
179+
self.activity_duals_writer.serialize(row)?;
191180
}
192181

193182
Ok(())
@@ -215,7 +204,7 @@ impl DebugDataWriter {
215204
/// Flush the underlying streams
216205
fn flush(&mut self) -> Result<()> {
217206
self.commodity_balance_duals_writer.flush()?;
218-
self.capacity_duals_writer.flush()?;
207+
self.activity_duals_writer.flush()?;
219208

220209
Ok(())
221210
}
@@ -468,30 +457,30 @@ mod tests {
468457
}
469458

470459
#[rstest]
471-
fn test_write_capacity_duals(assets: AssetPool, time_slice: TimeSliceID) {
460+
fn test_write_activity_duals(assets: AssetPool, time_slice: TimeSliceID) {
472461
let milestone_year = 2020;
473462
let value = 0.5;
474463
let dir = tempdir().unwrap();
475464
let asset = assets.iter().next().unwrap();
476465

477-
// Write capacity dual
466+
// Write activity dual
478467
{
479468
let mut writer = DebugDataWriter::create(dir.path()).unwrap();
480469
writer
481-
.write_capacity_duals(milestone_year, iter::once((asset, &time_slice, value)))
470+
.write_activity_duals(milestone_year, iter::once((asset, &time_slice, value)))
482471
.unwrap();
483472
writer.flush().unwrap();
484473
}
485474

486475
// Read back and compare
487-
let expected = CapacityDualsRow {
476+
let expected = ActivityDualsRow {
488477
milestone_year,
489478
asset_id: asset.id.unwrap(),
490479
time_slice,
491480
value,
492481
};
493-
let records: Vec<CapacityDualsRow> =
494-
csv::Reader::from_path(dir.path().join(CAPACITY_DUALS_FILE_NAME))
482+
let records: Vec<ActivityDualsRow> =
483+
csv::Reader::from_path(dir.path().join(ACTIVITY_DUALS_FILE_NAME))
495484
.unwrap()
496485
.into_deserialize()
497486
.try_collect()

src/process.rs

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -48,21 +48,6 @@ pub struct Process {
4848
pub regions: HashSet<RegionID>,
4949
}
5050

51-
impl Process {
52-
/// Whether the process contains a flow for a given commodity
53-
pub fn contains_commodity_flow(
54-
&self,
55-
commodity_id: &CommodityID,
56-
region_id: &RegionID,
57-
year: u32,
58-
) -> bool {
59-
self.flows
60-
.get(&(region_id.clone(), year))
61-
.unwrap() // all regions and years are covered
62-
.contains_key(commodity_id)
63-
}
64-
}
65-
6651
/// Represents a maximum annual commodity coeff for a given process
6752
#[derive(PartialEq, Debug, Clone)]
6853
pub struct ProcessFlow {

src/simulation/optimisation.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,10 +94,10 @@ impl Solution<'_> {
9494
})
9595
}
9696

97-
/// Keys and dual values for capacity constraints.
98-
pub fn iter_capacity_duals(&self) -> impl Iterator<Item = (&AssetRef, &TimeSliceID, f64)> {
97+
/// Keys and dual values for activity constraints.
98+
pub fn iter_activity_duals(&self) -> impl Iterator<Item = (&AssetRef, &TimeSliceID, f64)> {
9999
self.constraint_keys
100-
.capacity_keys
100+
.activity_keys
101101
.zip_duals(self.solution.dual_rows())
102102
.map(|((asset, time_slice), dual)| (asset, time_slice, dual))
103103
}
Lines changed: 75 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
//! Code for adding constraints to the dispatch optimisation problem.
22
use super::VariableMap;
33
use crate::asset::{AssetPool, AssetRef};
4-
use crate::commodity::CommodityID;
4+
use crate::commodity::{CommodityID, CommodityType};
55
use crate::model::Model;
66
use crate::region::RegionID;
77
use crate::time_slice::{TimeSliceID, TimeSliceInfo, TimeSliceSelection};
@@ -28,34 +28,33 @@ impl<T> KeysWithOffset<T> {
2828
/// Indicates the commodity ID and time slice selection covered by each commodity balance constraint
2929
pub type CommodityBalanceKeys = KeysWithOffset<(CommodityID, RegionID, TimeSliceSelection)>;
3030

31-
/// Indicates the asset ID and time slice covered by each capacity constraint
32-
pub type CapacityKeys = KeysWithOffset<(AssetRef, TimeSliceID)>;
31+
/// Indicates the asset ID and time slice covered by each activity constraint
32+
pub type ActivityKeys = KeysWithOffset<(AssetRef, TimeSliceID)>;
3333

3434
/// The keys for different constraints
3535
pub struct ConstraintKeys {
3636
/// Keys for commodity balance constraints
3737
pub commodity_balance_keys: CommodityBalanceKeys,
38-
/// Keys for capacity constraints
39-
pub capacity_keys: CapacityKeys,
38+
/// Keys for activity constraints
39+
pub activity_keys: ActivityKeys,
4040
}
4141

4242
/// Add asset-level constraints
4343
///
4444
/// Note: the ordering of constraints is important, as the dual values of the constraints must later
4545
/// be retrieved to calculate commodity prices.
4646
///
47-
/// # Arguments:
47+
/// # Arguments
4848
///
4949
/// * `problem` - The optimisation problem
5050
/// * `variables` - The variables in the problem
5151
/// * `model` - The model
5252
/// * `assets` - The asset pool
5353
/// * `year` - Current milestone year
5454
///
55-
/// # Returns:
55+
/// # Returns
5656
///
57-
/// * A vector of keys for commodity balance constraints
58-
/// * A vector of keys for capacity constraints
57+
/// Keys for the different constraints.
5958
pub fn add_asset_constraints(
6059
problem: &mut Problem,
6160
variables: &VariableMap,
@@ -67,12 +66,12 @@ pub fn add_asset_constraints(
6766
add_commodity_balance_constraints(problem, variables, model, assets, year);
6867

6968
let capacity_keys =
70-
add_asset_capacity_constraints(problem, variables, assets, &model.time_slice_info);
69+
add_activity_constraints(problem, variables, &model.time_slice_info, assets);
7170

7271
// Return constraint keys
7372
ConstraintKeys {
7473
commodity_balance_keys,
75-
capacity_keys,
74+
activity_keys: capacity_keys,
7675
}
7776
}
7877

@@ -85,44 +84,84 @@ pub fn add_asset_constraints(
8584
/// [1]: https://energysystemsmodellinglab.github.io/MUSE_2.0/dispatch_optimisation.html#commodity-balance-constraints
8685
fn add_commodity_balance_constraints(
8786
problem: &mut Problem,
88-
_variables: &VariableMap,
89-
_model: &Model,
90-
_assets: &AssetPool,
91-
_year: u32,
87+
variables: &VariableMap,
88+
model: &Model,
89+
assets: &AssetPool,
90+
year: u32,
9291
) -> CommodityBalanceKeys {
9392
// Row offset in problem. This line **must** come before we add more constraints.
9493
let offset = problem.num_rows();
9594

96-
let keys = Vec::new();
97-
98-
// **TODO:** Add commodity balance constraints:
99-
// https://github.com/EnergySystemsModellingLab/MUSE_2.0/issues/577
95+
let mut keys = Vec::new();
96+
let mut terms = Vec::new();
97+
for (commodity_id, commodity) in model.commodities.iter() {
98+
if !matches!(
99+
commodity.kind,
100+
CommodityType::SupplyEqualsDemand | CommodityType::ServiceDemand
101+
) {
102+
continue;
103+
}
104+
105+
for region_id in model.iter_regions() {
106+
for ts_selection in model
107+
.time_slice_info
108+
.iter_selections_at_level(commodity.time_slice_level)
109+
{
110+
for (asset, flow) in assets.iter_for_region_and_commodity(region_id, commodity_id) {
111+
// If the commodity has a time slice level of season/annual, the constraint will
112+
// cover multiple time slices
113+
for (time_slice, _) in ts_selection.iter(&model.time_slice_info) {
114+
let var = variables.get(asset, time_slice);
115+
terms.push((var, flow.coeff));
116+
}
117+
}
118+
119+
// Add constraint. For SED commodities, the RHS is zero and for SVD commodities it
120+
// is the exogenous demand supplied by the user.
121+
let rhs = if commodity.kind == CommodityType::ServiceDemand {
122+
*commodity
123+
.demand
124+
.get(&(region_id.clone(), year, ts_selection.clone()))
125+
.unwrap()
126+
} else {
127+
0.0
128+
};
129+
problem.add_row(rhs..=rhs, terms.drain(..));
130+
keys.push((
131+
commodity_id.clone(),
132+
region_id.clone(),
133+
ts_selection.clone(),
134+
))
135+
}
136+
}
137+
}
100138

101139
CommodityBalanceKeys { offset, keys }
102140
}
103141

104-
/// Add asset-level capacity and availability constraints.
142+
/// Add constraints on the activity of different assets.
105143
///
106-
/// For every asset at every time slice, the sum of the commodity flows for assets must not exceed
107-
/// the capacity limits, which are a product of the annual capacity, time slice length and process
108-
/// availability.
109-
///
110-
/// See description in [the dispatch optimisation documentation][1].
111-
///
112-
/// [1]: https://energysystemsmodellinglab.github.io/MUSE_2.0/dispatch_optimisation.html#asset-level-capacity-and-availability-constraints
113-
fn add_asset_capacity_constraints(
144+
/// This ensures that assets do not exceed their specified capacity and availability for each time
145+
/// slice.
146+
fn add_activity_constraints(
114147
problem: &mut Problem,
115-
_variables: &VariableMap,
116-
_assets: &AssetPool,
117-
_time_slice_info: &TimeSliceInfo,
118-
) -> CapacityKeys {
148+
variables: &VariableMap,
149+
time_slice_info: &TimeSliceInfo,
150+
assets: &AssetPool,
151+
) -> ActivityKeys {
119152
// Row offset in problem. This line **must** come before we add more constraints.
120153
let offset = problem.num_rows();
121154

122-
let keys = Vec::new();
155+
let mut keys = Vec::new();
156+
for asset in assets.iter() {
157+
for time_slice in time_slice_info.iter_ids() {
158+
let var = variables.get(asset, time_slice);
159+
let limits = asset.get_activity_limits(time_slice);
123160

124-
// **TODO:** Add capacity/availability constraints:
125-
// https://github.com/EnergySystemsModellingLab/MUSE_2.0/issues/579
161+
problem.add_row(limits, [(var, 1.0)]);
162+
keys.push((asset.clone(), time_slice.clone()))
163+
}
164+
}
126165

127-
CapacityKeys { offset, keys }
166+
ActivityKeys { offset, keys }
128167
}

0 commit comments

Comments
 (0)