Skip to content

Commit 882be30

Browse files
committed
comment fixes
1 parent 7a0c7e4 commit 882be30

2 files changed

Lines changed: 35 additions & 64 deletions

File tree

imap_processing/lo/l1b/lo_l1b.py

Lines changed: 35 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -191,6 +191,7 @@
191191

192192
# -------------------------------------------------------------------
193193
DE_CLOCK_TICK_S = 4.096e-3 # seconds per DE clock tick
194+
NUM_ESA_STEPS = 7
194195

195196

196197
def lo_l1b(
@@ -2545,32 +2546,34 @@ def l1b_bgrates_and_goodtimes(
25452546
interval_nom = 420 * cycle_count # seconds
25462547
exposure = interval_nom * 0.5 # 50% duty cycle
25472548

2548-
h_intensity = np.sum(l1b_histrates["h_counts"][:, 0:7, 20:50], axis=(1, 2))
2549-
o_intensity = np.sum(l1b_histrates["o_counts"][:, 0:7, 20:50], axis=(1, 2))
2550-
epochs_ttj2000 = l1b_histrates["epoch"][:]
2549+
h_intensity = np.sum(
2550+
l1b_histrates["h_counts"][:, 0:NUM_ESA_STEPS, 20:50], axis=(1, 2)
2551+
)
2552+
o_intensity = np.sum(
2553+
l1b_histrates["o_counts"][:, 0:NUM_ESA_STEPS, 20:50], axis=(1, 2)
2554+
)
25512555

25522556
# Use proper SPICE-based time conversion with current kernels
25532557
# Note: The reference script adds +9 seconds because they use an
25542558
# "older time kernel (pre 2012)"
25552559
# We use current SPICE kernels, so we should NOT add that offset
2556-
shcoarse = ttj2000ns_to_met(epochs_ttj2000)
2557-
# Convert to plain numpy array for easier indexing
2558-
if hasattr(shcoarse, "values"):
2559-
shcoarse = shcoarse.values
2560-
shcoarse = np.asarray(shcoarse, dtype=np.float64)
2561-
# shcoarse = epochs_ttj2000 / 1e9 # Convert from ns to s MET
2560+
met = ttj2000ns_to_met(l1b_histrates["epoch"].values)
25622561

25632562
max_row_count = np.shape(h_intensity)[0]
25642563
epochs = l1b_histrates["epoch"].values
25652564
epochs = xr.DataArray(epochs, dims=["epoch"])
25662565
goodtimes = xr.DataArray(np.zeros((max_row_count, 2), dtype=np.int64))
2567-
h_background_rate = xr.DataArray(np.zeros((max_row_count, 7), dtype=np.float32))
2566+
h_background_rate = xr.DataArray(
2567+
np.zeros((max_row_count, NUM_ESA_STEPS), dtype=np.float32)
2568+
)
25682569
h_background_rate_variance = xr.DataArray(
2569-
np.zeros((max_row_count, 7), dtype=np.float32)
2570+
np.zeros((max_row_count, NUM_ESA_STEPS), dtype=np.float32)
2571+
)
2572+
o_background_rate = xr.DataArray(
2573+
np.zeros((max_row_count, NUM_ESA_STEPS), dtype=np.float32)
25702574
)
2571-
o_background_rate = xr.DataArray(np.zeros((max_row_count, 7), dtype=np.float32))
25722575
o_background_rate_variance = xr.DataArray(
2573-
np.zeros((max_row_count, 7), dtype=np.float32)
2576+
np.zeros((max_row_count, NUM_ESA_STEPS), dtype=np.float32)
25742577
)
25752578

25762579
# Walk through the histrate data in chunks of cycle_count (10)
@@ -2589,13 +2592,13 @@ def l1b_bgrates_and_goodtimes(
25892592
for index in range(0, max_row_count, cycle_count):
25902593
# Calculate the interval for this chunk
25912594
if (index + cycle_count - 1) < max_row_count:
2592-
interval = shcoarse[index + cycle_count - 1] - shcoarse[index]
2595+
interval = met[index + cycle_count - 1] - met[index]
25932596
else:
25942597
interval = interval_nom
25952598

25962599
logger.debug(
2597-
f"\n Index {index}: shcoarse[{index}]="
2598-
f"{shcoarse[index] if index < max_row_count else 'N/A'}, "
2600+
f"\n Index {index}: met[{index}]="
2601+
f"{met[index] if index < max_row_count else 'N/A'}, "
25992602
f"interval={interval}, begin={begin}"
26002603
)
26012604

@@ -2607,7 +2610,7 @@ def l1b_bgrates_and_goodtimes(
26072610
)
26082611
# If we were tracking a goodtime interval, close it before the gap
26092612
if begin > 0.0:
2610-
end = shcoarse[index - 1]
2613+
end = met[index - 1]
26112614
logger.debug(f" Closing interval before gap: {begin} -> {end}")
26122615

26132616
h_bg_rate = sum_h_bg_counts / sum_h_bg_exposure
@@ -2635,42 +2638,15 @@ def l1b_bgrates_and_goodtimes(
26352638
f" STORED interval {row_count} (large interval): "
26362639
f"{int(begin - 620)} -> {int(end + 320)} (raw: {begin} -> {end})"
26372640
)
2638-
h_background_rate[row_count, :] = [
2639-
h_bg_rate,
2640-
h_bg_rate,
2641-
h_bg_rate,
2642-
h_bg_rate,
2643-
h_bg_rate,
2644-
h_bg_rate,
2645-
h_bg_rate,
2646-
]
2647-
h_background_rate_variance[row_count, :] = [
2648-
h_bg_rate_variance,
2649-
h_bg_rate_variance,
2650-
h_bg_rate_variance,
2651-
h_bg_rate_variance,
2652-
h_bg_rate_variance,
2653-
h_bg_rate_variance,
2654-
h_bg_rate_variance,
2655-
]
2656-
o_background_rate[row_count, :] = [
2657-
o_bg_rate,
2658-
o_bg_rate,
2659-
o_bg_rate,
2660-
o_bg_rate,
2661-
o_bg_rate,
2662-
o_bg_rate,
2663-
o_bg_rate,
2664-
]
2665-
o_background_rate_variance[row_count, :] = [
2666-
o_bg_rate_variance,
2667-
o_bg_rate_variance,
2668-
o_bg_rate_variance,
2669-
o_bg_rate_variance,
2670-
o_bg_rate_variance,
2671-
o_bg_rate_variance,
2672-
o_bg_rate_variance,
2673-
]
2641+
h_background_rate[row_count, :] = np.full(NUM_ESA_STEPS, h_bg_rate)
2642+
h_background_rate_variance[row_count, :] = (
2643+
np.full(
2644+
NUM_ESA_STEPS, h_bg_rate_variance
2645+
))
2646+
o_background_rate[row_count, :] = np.full(NUM_ESA_STEPS, o_bg_rate)
2647+
o_background_rate_variance[row_count, :] = np.full(
2648+
NUM_ESA_STEPS, o_bg_rate_variance
2649+
)
26742650

26752651
row_count += 1
26762652
begin = 0.0
@@ -2682,14 +2658,14 @@ def l1b_bgrates_and_goodtimes(
26822658
# Check for time gap from previous chunk
26832659
delta_time = 0.0
26842660
if index > 0:
2685-
delta_time = shcoarse[index] - (shcoarse[index - 1] + 420)
2661+
delta_time = met[index] - (met[index - 1] + 420)
26862662
logger.debug(
26872663
f" Delta time from previous: {delta_time} (max: {delay_max})"
26882664
)
26892665

26902666
# If there's a gap and we have an active interval, close it
26912667
if (delta_time > delay_max) & (begin > 0.0):
2692-
end = shcoarse[index - 1]
2668+
end = met[index - 1]
26932669
logger.debug(f" Closing interval due to time gap: {begin} -> {end}")
26942670

26952671
h_bg_rate = sum_h_bg_counts / sum_h_bg_exposure
@@ -2771,7 +2747,7 @@ def l1b_bgrates_and_goodtimes(
27712747
# If rate is below threshold, accumulate for background
27722748
if antiram_h_rate < h_bg_rate_nom:
27732749
if begin == 0.0:
2774-
begin = shcoarse[index]
2750+
begin = met[index]
27752751
logger.debug(f" Starting new interval at {begin}")
27762752

27772753
sum_h_bg_counts = sum_h_bg_counts + antiram_h_counts
@@ -2781,7 +2757,7 @@ def l1b_bgrates_and_goodtimes(
27812757
# If rate exceeds threshold, close the interval if one is active
27822758
if antiram_h_rate >= h_bg_rate_nom:
27832759
if begin > 0.0:
2784-
end = shcoarse[index - 1]
2760+
end = met[index - 1]
27852761
logger.debug(
27862762
f" Closing interval due to rate threshold: {begin} -> {end}"
27872763
)
@@ -2854,7 +2830,7 @@ def l1b_bgrates_and_goodtimes(
28542830

28552831
# Handle the final interval if one is still open
28562832
if (end == 0.0) & (begin > 0.0):
2857-
end = shcoarse[max_row_count - 1]
2833+
end = met[max_row_count - 1]
28582834
if end > begin:
28592835
h_bg_rate = sum_h_bg_counts / sum_h_bg_exposure
28602836
h_bg_rate_variance = np.sqrt(sum_h_bg_counts) / sum_h_bg_exposure
@@ -2994,7 +2970,7 @@ def l1b_bgrates_and_goodtimes(
29942970
# For now, set all ESA flags to 1 (good) since we don't have
29952971
# an algorithm for this yet
29962972
l1b_backgrounds_and_goodtimes_ds["esa_goodtime_flags"] = xr.DataArray(
2997-
data=np.zeros((row_count, 7), dtype=int) + 1,
2973+
data=np.zeros((row_count, NUM_ESA_STEPS), dtype=int) + 1,
29982974
name="E-step",
29992975
dims=["met", "esa_step"],
30002976
# attrs=attr_mgr_l1b.get_variable_attributes("esa_goodtime_flags"),

imap_processing/tests/lo/test_lo_l1b.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2877,11 +2877,6 @@ def test_l1b_bgrates_and_goodtimes_large_interval_with_active_tracking(attr_mgr_
28772877
met_spacing,
28782878
)
28792879

2880-
# But make the interval within this chunk too large by spacing them far apart
2881-
# Actually, the interval is calculated as:
2882-
# shcoarse[index + cycle_count - 1] - shcoarse[index]
2883-
# So we need the last epoch of the chunk to be far from the first
2884-
# Let's adjust: keep first 9 epochs close, but make the 10th epoch very far
28852880
met_times_gap_chunk_adjusted = met_times_gap_chunk.copy()
28862881
met_times_gap_chunk_adjusted[-1] = met_times_gap_chunk[0] + large_gap
28872882

0 commit comments

Comments
 (0)