Skip to content

Commit 0b89a2b

Browse files
authored
ENH: Add lo-de-rates data product (IMAP-Science-Operations-Center#2566)
1 parent a0554e2 commit 0b89a2b

3 files changed

Lines changed: 315 additions & 0 deletions

File tree

imap_processing/cli.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1019,6 +1019,8 @@ def do_processing(
10191019
elif self.data_level == "l1b":
10201020
data_dict = {}
10211021
science_files = dependencies.get_file_paths(source="lo", data_type="l1a")
1022+
science_files += dependencies.get_file_paths(source="lo", data_type="l1b")
1023+
10221024
ancillary_files = dependencies.get_file_paths(
10231025
source="lo", data_type="ancillary"
10241026
)

imap_processing/lo/l1b/lo_l1b.py

Lines changed: 192 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,11 @@ def lo_l1b(
8585
ds = l1b_histrates(sci_dependencies, anc_dependencies, attr_mgr_l1b)
8686
datasets_to_return.append(ds)
8787

88+
if descriptor == "derates":
89+
logger.info("\nProcessing IMAP-Lo L1B DE Rates...")
90+
ds = calculate_de_rates(sci_dependencies, anc_dependencies, attr_mgr_l1b)
91+
datasets_to_return.append(ds)
92+
8893
return datasets_to_return
8994

9095

@@ -1617,6 +1622,193 @@ def calculate_histogram_rates(
16171622
return l1b_histrates
16181623

16191624

1625+
def calculate_de_rates(
1626+
sci_dependencies: dict,
1627+
anc_dependencies: list,
1628+
attr_mgr_l1b: ImapCdfAttributes,
1629+
) -> xr.Dataset:
1630+
"""
1631+
Calculate direct event rates histograms.
1632+
1633+
The histograms are per ASC (28 spins), so we need to
1634+
regroup the individual DEs from the l1b_de dataset into
1635+
their associated ASC and then bin them by ESA / spin bin.
1636+
1637+
Parameters
1638+
----------
1639+
sci_dependencies : dict
1640+
The science dependencies for the derates product.
1641+
anc_dependencies : list
1642+
List of ancillary file paths.
1643+
attr_mgr_l1b : ImapCdfAttributes
1644+
Attribute manager used to get the L1B derates dataset attributes.
1645+
1646+
Returns
1647+
-------
1648+
l1b_derates : xr.Dataset
1649+
Dataset containing DE rates histograms.
1650+
"""
1651+
l1b_de = sci_dependencies["imap_lo_l1b_de"]
1652+
l1a_spin = sci_dependencies["imap_lo_l1a_spin"]
1653+
l1b_nhk = sci_dependencies["imap_lo_l1b_nhk"]
1654+
# Set the asc_start for each DE by removing the average spin cycle
1655+
# which is a function of esa_step (see set_spin_cycle function)
1656+
# spin_cycle is an average over esa steps and spins per asc, so finding
1657+
# the "average" spin that an esa step occurred at.
1658+
asc_start = l1b_de["spin_cycle"] - (7 + (l1b_de["esa_step"] - 1) * 2)
1659+
1660+
# Get unique ASC values and create a mapping from asc_start to index
1661+
unique_asc, unique_idx, asc_idx = np.unique(
1662+
asc_start.values, return_index=True, return_inverse=True
1663+
)
1664+
num_asc = len(unique_asc)
1665+
1666+
# Pre-extract arrays for faster access (avoid repeated xarray indexing)
1667+
esa_step_idx = l1b_de["esa_step"].values - 1 # Convert to 0-based index
1668+
# Convert spin_bin from 0.1 degree bins to 6 degree bins for coarse histograms
1669+
spin_bin = l1b_de["spin_bin"].values // 60
1670+
species = l1b_de["species"].values
1671+
coincidence_type = l1b_de["coincidence_type"].values
1672+
1673+
if len(anc_dependencies) == 0:
1674+
logger.warning("No ancillary dependencies provided, using linear stepping.")
1675+
energy_step_mapping = np.arange(7)
1676+
else:
1677+
# An array mapping esa step index to esa level for resweeping
1678+
energy_step_mapping = _get_esa_level_indices(asc_start, anc_dependencies)
1679+
1680+
# exposure time shape: (num_asc, num_esa_steps)
1681+
exposure_time = np.zeros((num_asc, 7), dtype=float)
1682+
# exposure_time_6deg = 4 * avg_spin_per_asc / 60
1683+
# 4 sweeps per ASC (28 / 7) in 60 bins
1684+
asc_avg_spin_durations = 4 * l1b_de["avg_spin_durations"].data[unique_idx] / 60
1685+
np.add.at(
1686+
exposure_time,
1687+
(slice(None), energy_step_mapping),
1688+
asc_avg_spin_durations[:, np.newaxis],
1689+
)
1690+
1691+
# Create output arrays
1692+
output_shape = (num_asc, 7, 60)
1693+
h_counts = np.zeros(output_shape)
1694+
o_counts = np.zeros(output_shape)
1695+
triple_counts = np.zeros(output_shape)
1696+
double_counts = np.zeros(output_shape)
1697+
1698+
# Species masks
1699+
h_mask = species == "H"
1700+
o_mask = species == "O"
1701+
1702+
# Coincidence type masks
1703+
triple_types = ["111111", "111100", "111000"]
1704+
double_types = [
1705+
"110100",
1706+
"110000",
1707+
"101101",
1708+
"101100",
1709+
"101000",
1710+
"100100",
1711+
"100101",
1712+
"100000",
1713+
"011100",
1714+
"011000",
1715+
"010100",
1716+
"010101",
1717+
"010000",
1718+
"001100",
1719+
"001101",
1720+
"001000",
1721+
]
1722+
triple_mask = np.isin(coincidence_type, triple_types)
1723+
double_mask = np.isin(coincidence_type, double_types)
1724+
1725+
# Vectorized histogramming using np.add.at with full index arrays
1726+
np.add.at(h_counts, (asc_idx[h_mask], esa_step_idx[h_mask], spin_bin[h_mask]), 1)
1727+
np.add.at(o_counts, (asc_idx[o_mask], esa_step_idx[o_mask], spin_bin[o_mask]), 1)
1728+
np.add.at(
1729+
triple_counts,
1730+
(asc_idx[triple_mask], esa_step_idx[triple_mask], spin_bin[triple_mask]),
1731+
1,
1732+
)
1733+
np.add.at(
1734+
double_counts,
1735+
(asc_idx[double_mask], esa_step_idx[double_mask], spin_bin[double_mask]),
1736+
1,
1737+
)
1738+
1739+
ds = xr.Dataset(
1740+
coords={
1741+
# ASC start time in TTJ2000ns
1742+
"epoch": l1a_spin["epoch"],
1743+
"esa_step": np.arange(7),
1744+
"spin_bin": np.arange(60),
1745+
},
1746+
)
1747+
ds["h_counts"] = xr.DataArray(
1748+
h_counts,
1749+
dims=["epoch", "esa_step", "spin_bin"],
1750+
)
1751+
ds["o_counts"] = xr.DataArray(
1752+
o_counts,
1753+
dims=["epoch", "esa_step", "spin_bin"],
1754+
)
1755+
ds["triple_counts"] = xr.DataArray(
1756+
triple_counts,
1757+
dims=["epoch", "esa_step", "spin_bin"],
1758+
)
1759+
ds["double_counts"] = xr.DataArray(
1760+
double_counts,
1761+
dims=["epoch", "esa_step", "spin_bin"],
1762+
)
1763+
ds["exposure_time"] = xr.DataArray(
1764+
exposure_time,
1765+
dims=["epoch", "esa_step"],
1766+
)
1767+
ds["h_rates"] = ds["h_counts"] / ds["exposure_time"]
1768+
ds["o_rates"] = ds["o_counts"] / ds["exposure_time"]
1769+
ds["triple_rates"] = ds["triple_counts"] / ds["exposure_time"]
1770+
ds["double_rates"] = ds["double_counts"] / ds["exposure_time"]
1771+
1772+
# (N, 7)
1773+
unique_asc = xr.DataArray(unique_asc, dims=["epoch"])
1774+
ds["spin_cycle"] = unique_asc + 7 + (ds["esa_step"] - 1) * 2
1775+
1776+
# TODO: Add badtimes
1777+
ds["badtime"] = xr.zeros_like(ds["epoch"], dtype=int)
1778+
1779+
pivot_angle = _get_nearest_pivot_angle(ds["epoch"].values[0], l1b_nhk)
1780+
ds["pivot_angle"] = xr.DataArray([pivot_angle], dims=["pivot_angle"])
1781+
1782+
pointing_start_met, pointing_end_met = get_pointing_times(
1783+
ttj2000ns_to_met(ds["epoch"].values[0].item())
1784+
)
1785+
ds = set_esa_mode(pointing_start_met, pointing_end_met, anc_dependencies, ds)
1786+
1787+
ds.attrs = attr_mgr_l1b.get_global_attributes("imap_lo_l1b_derates")
1788+
ds["epoch"].attrs = attr_mgr_l1b.get_variable_attributes("epoch")
1789+
1790+
return ds
1791+
1792+
1793+
def _get_nearest_pivot_angle(epoch: int, ds_nhk: xr.Dataset) -> float:
1794+
"""
1795+
Get the nearest pivot angle for the given epoch from the NHK dataset.
1796+
1797+
Parameters
1798+
----------
1799+
epoch : int
1800+
The epoch in TTJ2000ns format.
1801+
ds_nhk : xr.Dataset
1802+
The NHK dataset containing pivot angle information.
1803+
1804+
Returns
1805+
-------
1806+
pivot_angle : float
1807+
The nearest pivot angle for the given epoch.
1808+
"""
1809+
return ds_nhk["pcc_cumulative_cnt_pri"].sel(epoch=epoch, method="nearest").item()
1810+
1811+
16201812
def _get_esa_level_indices(epochs: np.ndarray, anc_dependencies: list) -> np.ndarray:
16211813
"""
16221814
Get the ESA level indices (reswept indices) for the given epochs.

imap_processing/tests/lo/test_lo_l1b.py

Lines changed: 121 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
1010
from imap_processing.cdf.utils import load_cdf
1111
from imap_processing.lo.l1b.lo_l1b import (
12+
calculate_de_rates,
1213
calculate_histogram_rates,
1314
calculate_tof1_for_golden_triples,
1415
convert_start_end_acq_times,
@@ -1275,3 +1276,123 @@ def test_set_spin_cycle_from_spin_data_insufficient_spins():
12751276

12761277
# Verify spin_cycle shape matches filtered data
12771278
assert result["spin_cycle"].shape == (1, 7)
1279+
1280+
1281+
@patch(
1282+
"imap_processing.lo.l1b.lo_l1b.get_pointing_times",
1283+
return_value=(473389199, 473472001),
1284+
)
1285+
@patch(
1286+
"imap_processing.lo.l1b.lo_l1b._get_esa_level_indices",
1287+
return_value=np.arange(7),
1288+
)
1289+
def test_calculate_de_rates(
1290+
mock_get_esa_level_indices, mock_get_pointing_times, attr_mgr_l1b, anc_dependencies
1291+
):
1292+
"""Test the calculate_de_rates function."""
1293+
# Use MET times from the test sweep table (2025-01-01)
1294+
met_start = 473389200
1295+
epoch_time = met_to_ttj2000ns([met_start, met_start + 15 * 28])
1296+
1297+
# Create individual epochs for each direct event in TTJ2000ns
1298+
de_epochs = met_to_ttj2000ns(
1299+
[
1300+
met_start + 10,
1301+
met_start + 20,
1302+
met_start + 30,
1303+
met_start + 15 * 28 + 10,
1304+
met_start + 15 * 28 + 20,
1305+
]
1306+
)
1307+
1308+
# Create a simple l1b_de dataset with a few direct events
1309+
l1b_de = xr.Dataset(
1310+
{
1311+
"spin_cycle": ("epoch", [7, 9, 11, 35, 37]),
1312+
"esa_step": ("epoch", [1, 2, 3, 1, 2]),
1313+
"spin_bin": ("epoch", [0, 120, 240, 60, 180]),
1314+
"species": ("epoch", ["H", "O", "H", "H", "O"]),
1315+
"coincidence_type": (
1316+
"epoch",
1317+
["111111", "110100", "111000", "101000", "100100"],
1318+
),
1319+
"avg_spin_durations": ("epoch", [15.0, 15.0, 15.0, 15.0, 15.0]),
1320+
},
1321+
coords={"epoch": de_epochs},
1322+
)
1323+
1324+
# Create l1a_spin dataset
1325+
l1a_spin = xr.Dataset(
1326+
{
1327+
"shcoarse": ("epoch", [met_start, met_start + 15 * 28]),
1328+
"num_completed": ("epoch", [28, 28]),
1329+
"acq_start_sec": ("epoch", [met_start, met_start + 15 * 28]),
1330+
"acq_start_subsec": ("epoch", [0, 0]),
1331+
"acq_end_sec": ("epoch", [met_start + 15 * 28, met_start + 2 * 15 * 28]),
1332+
"acq_end_subsec": ("epoch", [0, 0]),
1333+
},
1334+
coords={"epoch": epoch_time},
1335+
)
1336+
1337+
# Create l1b_nhk dataset with pivot angle information
1338+
l1b_nhk = xr.Dataset(
1339+
{"pcc_cumulative_cnt_pri": ("epoch", [45.0])},
1340+
coords={"epoch": epoch_time[:1]},
1341+
)
1342+
1343+
sci_dependencies = {
1344+
"imap_lo_l1b_de": l1b_de,
1345+
"imap_lo_l1a_spin": l1a_spin,
1346+
"imap_lo_l1b_nhk": l1b_nhk,
1347+
}
1348+
1349+
result = calculate_de_rates(sci_dependencies, anc_dependencies, attr_mgr_l1b)
1350+
1351+
assert result.attrs["Logical_source"] == "imap_lo_l1b_derates"
1352+
assert "epoch" in result.coords
1353+
assert "esa_step" in result.coords
1354+
assert "spin_bin" in result.coords
1355+
1356+
# Check that all expected data variables are present
1357+
expected_vars = [
1358+
"h_counts",
1359+
"o_counts",
1360+
"triple_counts",
1361+
"double_counts",
1362+
"h_rates",
1363+
"o_rates",
1364+
"triple_rates",
1365+
"double_rates",
1366+
"exposure_time",
1367+
"spin_cycle",
1368+
"esa_mode",
1369+
]
1370+
for var in expected_vars:
1371+
assert var in result.data_vars
1372+
1373+
# Check shapes
1374+
assert result["h_counts"].shape == (2, 7, 60) # (num_asc, num_esa_steps, num_bins)
1375+
assert result["o_counts"].shape == (2, 7, 60)
1376+
assert result["exposure_time"].shape == (2, 7)
1377+
1378+
# Verify some counts are correct based on our test data
1379+
# First ASC (spin_cycle 0) has 3 events at esa_step 1, 2, 3
1380+
# Second ASC (spin_cycle 28) has 2 events at esa_step 1, 2
1381+
# H species: indices 0, 2, 3
1382+
# ASC 0 has 2 H (esa_step 1, 3), ASC 1 has 1 H (esa_step 1)
1383+
# O species: indices 1, 4
1384+
# ASC 0 has 1 O (esa_step 2), ASC 1 has 1 O (esa_step 2)
1385+
# First ASC, esa_step 1, spin_bin 0
1386+
assert result["h_counts"][0, 0, 0] == 1
1387+
# First ASC, esa_step 3, spin_bin 4 (240//60)
1388+
assert result["h_counts"][0, 2, 4] == 1
1389+
# First ASC, esa_step 2, spin_bin 2 (120//60)
1390+
assert result["o_counts"][0, 1, 2] == 1
1391+
1392+
# Check that pivot angle was set
1393+
assert result["pivot_angle"].values[0] == 45.0
1394+
1395+
# Test that lo_l1b() with descriptor="derates" produces the correct output
1396+
output_datasets = lo_l1b(sci_dependencies, anc_dependencies, descriptor="derates")
1397+
assert len(output_datasets) == 1
1398+
assert output_datasets[0].attrs["Logical_source"] == "imap_lo_l1b_derates"

0 commit comments

Comments
 (0)