|
| 1 | +"""MAGIC-specific `Detector` class(es).""" |
| 2 | + |
| 3 | +from typing import Dict, Callable, Any |
| 4 | +import torch |
| 5 | +import os |
| 6 | + |
| 7 | +from graphnet.models.detector.detector import Detector |
| 8 | +from graphnet.constants import MAGIC_GEOMETRY_TABLE_DIR |
| 9 | + |
| 10 | + |
| 11 | +class MAGIC(Detector): |
| 12 | + """`Detector` class for MAGIC telescopes.""" |
| 13 | + |
| 14 | + geometry_table_path = os.path.join( |
| 15 | + MAGIC_GEOMETRY_TABLE_DIR, "magic.parquet" |
| 16 | + ) |
| 17 | + |
| 18 | + # By default, treat the telescope ID as a spatial-like z-coordinate |
| 19 | + xyz = ["x_cam", "y_cam", "tel_id"] |
| 20 | + |
| 21 | + def __init__(self, use_charge_epsilon: bool = True, **kwargs: Any) -> None: |
| 22 | + """Construct detector for the MAGIC telescopes. |
| 23 | +
|
| 24 | + Args: |
| 25 | + use_charge_epsilon: Whether to add a small epsilon value to charge |
| 26 | + before taking log10 to avoid log(0). Defaults to True (uses |
| 27 | + 1e-6). Set to False if data is guaranteed to have zero-padding. |
| 28 | + """ |
| 29 | + super().__init__(**kwargs) |
| 30 | + self._charge_epsilon = 1e-6 if use_charge_epsilon else 0.0 |
| 31 | + |
| 32 | + def feature_map(self) -> Dict[str, Callable]: |
| 33 | + """Map standardization functions to each dimension. |
| 34 | +
|
| 35 | + Note: tel_id can take the integer values 0 or 1, where: |
| 36 | + - 0 corresponds to MAGIC-I |
| 37 | + - 1 corresponds to MAGIC-II |
| 38 | + """ |
| 39 | + feature_map = { |
| 40 | + "x_cam": self._xy, |
| 41 | + "y_cam": self._xy, |
| 42 | + "tel_id": self._identity, |
| 43 | + "time": self._time, |
| 44 | + "charge": self._charge, |
| 45 | + "telescope_phi": self._identity, |
| 46 | + "telescope_theta": self._identity, |
| 47 | + } |
| 48 | + return feature_map |
| 49 | + |
| 50 | + def _xy(self, x: torch.tensor) -> torch.tensor: |
| 51 | + return x / 28.5 |
| 52 | + |
| 53 | + def _time(self, x: torch.tensor) -> torch.tensor: |
| 54 | + """Scale time based on the average time of arrival.""" |
| 55 | + t_min = -30 |
| 56 | + t_max = 60 |
| 57 | + return (x - t_min) / (t_max - t_min) |
| 58 | + |
| 59 | + def _charge(self, x: torch.tensor) -> torch.tensor: |
| 60 | + """Add a small epsilon to avoid log(0).""" |
| 61 | + return torch.log10(x + self._charge_epsilon) |
0 commit comments