Skip to content

Commit c8b2743

Browse files
authored
Merge pull request #615 from EnergySystemsModellingLab/asset_id_outputs
Use `AssetID` in output files
2 parents 542baff + 61e4a5d commit c8b2743

2 files changed

Lines changed: 28 additions & 20 deletions

File tree

src/asset.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,20 @@ use crate::region::RegionID;
66
use crate::time_slice::TimeSliceID;
77
use anyhow::{ensure, Context, Result};
88
use indexmap::IndexMap;
9+
use serde::{Deserialize, Serialize};
910
use std::hash::{Hash, Hasher};
1011
use std::ops::{Deref, RangeInclusive};
1112
use std::rc::Rc;
1213

1314
/// A unique identifier for an asset
14-
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
15+
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
1516
pub struct AssetID(u32);
1617

1718
/// An asset controlled by an agent.
1819
#[derive(Clone, Debug, PartialEq)]
1920
pub struct Asset {
2021
/// A unique identifier for the asset
21-
id: Option<AssetID>,
22+
pub id: Option<AssetID>,
2223
/// A unique identifier for the agent
2324
pub agent_id: AgentID,
2425
/// The [`Process`] that this asset corresponds to

src/output.rs

Lines changed: 25 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
//! The module responsible for writing output data to disk.
22
use crate::agent::AgentID;
3-
use crate::asset::{Asset, AssetRef};
3+
use crate::asset::{Asset, AssetID, AssetRef};
44
use crate::commodity::CommodityID;
55
use crate::process::ProcessID;
66
use crate::region::RegionID;
@@ -66,13 +66,11 @@ pub fn create_output_directory(output_dir: &Path) -> Result<()> {
6666
Ok(())
6767
}
6868

69-
/// Used to represent assets in assets output CSV file and other output files.
70-
///
71-
/// NB: It may be better to represent assets in these other files with IDs instead, see
72-
/// [#581](https://github.com/EnergySystemsModellingLab/MUSE_2.0/issues/581).
69+
/// Represents a row in the assets output CSV file.
7370
#[derive(Serialize, Deserialize, Debug, PartialEq)]
7471
struct AssetRow {
7572
milestone_year: u32,
73+
asset_id: AssetID,
7674
process_id: ProcessID,
7775
region_id: RegionID,
7876
agent_id: AgentID,
@@ -84,6 +82,7 @@ impl AssetRow {
8482
fn new(milestone_year: u32, asset: &Asset) -> Self {
8583
Self {
8684
milestone_year,
85+
asset_id: asset.id.unwrap(),
8786
process_id: asset.process.id.clone(),
8887
region_id: asset.region_id.clone(),
8988
agent_id: asset.agent_id.clone(),
@@ -93,10 +92,10 @@ impl AssetRow {
9392
}
9493

9594
/// Represents the flow-related data in a row of the commodity flows CSV file.
96-
///
97-
/// This will be written along with an [`AssetRow`] containing asset-related info.
9895
#[derive(Serialize, Deserialize, Debug, PartialEq)]
9996
struct CommodityFlowRow {
97+
milestone_year: u32,
98+
asset_id: AssetID,
10099
commodity_id: CommodityID,
101100
time_slice: TimeSliceID,
102101
flow: f64,
@@ -112,11 +111,11 @@ struct CommodityPriceRow {
112111
price: f64,
113112
}
114113

115-
/// Represents the capacity duals data in a row of the capacity duals CSV file.
116-
///
117-
/// This will be written along with an [`AssetRow`] containing asset-related info.
114+
/// Represents the capacity duals data in a row of the capacity duals CSV file
118115
#[derive(Serialize, Deserialize, Debug, PartialEq)]
119116
struct CapacityDualsRow {
117+
milestone_year: u32,
118+
asset_id: AssetID,
120119
time_slice: TimeSliceID,
121120
value: f64,
122121
}
@@ -182,13 +181,13 @@ impl DebugDataWriter {
182181
I: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, f64)>,
183182
{
184183
for (asset, time_slice, value) in iter {
185-
let asset_row = AssetRow::new(milestone_year, asset);
186-
let dual_row = CapacityDualsRow {
184+
let row = CapacityDualsRow {
185+
milestone_year,
186+
asset_id: asset.id.unwrap(),
187187
time_slice: time_slice.clone(),
188188
value,
189189
};
190-
self.capacity_duals_writer
191-
.serialize((asset_row, dual_row))?;
190+
self.capacity_duals_writer.serialize(row)?;
192191
}
193192

194193
Ok(())
@@ -277,13 +276,14 @@ impl DataWriter {
277276
/// Write commodity flows to a CSV file
278277
pub fn write_flows(&mut self, milestone_year: u32, flow_map: &FlowMap) -> Result<()> {
279278
for ((asset, commodity_id, time_slice), flow) in flow_map {
280-
let asset_row = AssetRow::new(milestone_year, asset);
281-
let flow_row = CommodityFlowRow {
279+
let row = CommodityFlowRow {
280+
milestone_year,
281+
asset_id: asset.id.unwrap(),
282282
commodity_id: commodity_id.clone(),
283283
time_slice: time_slice.clone(),
284284
flow: *flow,
285285
};
286-
self.flows_writer.serialize((asset_row, flow_row))?;
286+
self.flows_writer.serialize(row)?;
287287
}
288288

289289
Ok(())
@@ -380,6 +380,8 @@ mod tests {
380380

381381
// Read back and compare
382382
let expected = CommodityFlowRow {
383+
milestone_year,
384+
asset_id: asset.id.unwrap(),
383385
commodity_id,
384386
time_slice,
385387
flow: 42.0,
@@ -482,7 +484,12 @@ mod tests {
482484
}
483485

484486
// Read back and compare
485-
let expected = CapacityDualsRow { time_slice, value };
487+
let expected = CapacityDualsRow {
488+
milestone_year,
489+
asset_id: asset.id.unwrap(),
490+
time_slice,
491+
value,
492+
};
486493
let records: Vec<CapacityDualsRow> =
487494
csv::Reader::from_path(dir.path().join(CAPACITY_DUALS_FILE_NAME))
488495
.unwrap()

0 commit comments

Comments
 (0)