Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# Active log configuration symlink (auto-created at startup)
python/pifinder_logconf.json
python/logconf_*.json

# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
Expand Down
2 changes: 1 addition & 1 deletion python/PiFinder/catalog_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def assign_virtual_object_ids(catalog, low_id: int) -> int:
class TimerMixin:
"""Provides timer functionality via composition"""

def __init__(self):
def __init__(self) -> None:
self.timer: Optional[threading.Timer] = None
self.is_running: bool = False
self.time_delay_seconds: Union[int, Callable[[], int]] = (
Expand Down
9 changes: 7 additions & 2 deletions python/PiFinder/catalog_imports/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,16 @@
This module provides centralized access to database objects for all catalog loaders.
"""

from typing import Optional

from PiFinder.db.objects_db import ObjectsDatabase
from PiFinder.db.observations_db import ObservationsDatabase

from .catalog_import_utils import init_databases

# Global database objects shared across all catalog loaders
objects_db = None
observations_db = None
objects_db: Optional[ObjectsDatabase] = None
observations_db: Optional[ObservationsDatabase] = None


def init_shared_database():
Expand Down
5 changes: 4 additions & 1 deletion python/PiFinder/catalog_imports/harris_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -381,10 +381,13 @@ def create_cluster_object(entry: npt.NDArray, seq: int) -> Dict[str, Any]:
return result


def load_harris():
def load_harris() -> None:
logging.info("Loading Harris Globular Cluster catalog")
catalog: str = "Har"
obj_type: str = "Gb" # Globular Cluster

if objects_db is None:
raise RuntimeError("Database not initialized. Call init_shared_database() first.")
conn, _ = objects_db.get_conn_cursor()

# Enable bulk mode to prevent commits during insert operations
Expand Down
5 changes: 5 additions & 0 deletions python/PiFinder/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -870,6 +870,11 @@ def main(
if __name__ == "__main__":
import sys

# Ensure the active log config symlink exists, defaulting to logconf_default.json
_logconf_link = Path("pifinder_logconf.json")
if not _logconf_link.exists():
_logconf_link.symlink_to("logconf_default.json")

debug_no_file_logs = "--debug-no-file-logs" in sys.argv
if debug_no_file_logs:
os.environ["PIFINDER_DEBUG_NO_FILE_LOGS"] = "1"
Expand Down
77 changes: 77 additions & 0 deletions python/PiFinder/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -844,6 +844,83 @@ def get_component_levels():
logging.error(f"Error reading log configuration: {e}")
return {"status": "error", "message": str(e)}

@app.route("/logs/configs")
@auth_required
def list_log_configs():
"""Return all available logconf_*.json files with display names."""
import glob

configs = []
active = (
os.path.realpath("pifinder_logconf.json")
if os.path.exists("pifinder_logconf.json")
else None
)
for path in sorted(glob.glob("logconf_*.json")):
stem = path[len("logconf_") : -len(".json")]
display = stem.replace("_", " ").title()
configs.append(
{
"file": path,
"name": display,
"active": os.path.realpath(path) == active,
}
)
return {"configs": configs}

@app.route("/logs/switch_config", method="post")
@auth_required
def switch_log_config():
"""Atomically repoint pifinder_logconf.json to the chosen config, then restart."""
logconf_file = request.forms.get("logconf_file", "").strip()
if (
not logconf_file
or not logconf_file.startswith("logconf_")
or not logconf_file.endswith(".json")
):
return {"status": "error", "message": "Invalid log config file name"}
if not os.path.exists(logconf_file):
return {
"status": "error",
"message": f"Log config file not found: {logconf_file}",
}
try:
link = "pifinder_logconf.json"
tmp = link + ".tmp"
os.symlink(logconf_file, tmp)
os.replace(tmp, link)
logger.info("Switched log config to %s", logconf_file)
except Exception as e:
logger.error("Failed to switch log config: %s", e)
return {"status": "error", "message": str(e)}
return template("restart_pifinder")

@app.route("/logs/upload_config", method="post")
@auth_required
def upload_log_config():
"""Upload a new logconf_*.json file."""
upload = request.files.get("config_file")
if not upload:
return {"status": "error", "message": "No file provided"}
filename = upload.filename
if not filename.startswith("logconf_") or not filename.endswith(".json"):
return {
"status": "error",
"message": "File must be named logconf_<name>.json",
}
if os.path.exists(filename):
return {
"status": "error",
"message": f"File already exists: {filename}",
}
try:
upload.save(filename, overwrite=False)
logger.info("Uploaded log config: %s", filename)
return {"status": "ok", "file": filename}
except Exception as e:
logger.error("Failed to save uploaded log config: %s", e)
return {"status": "error", "message": str(e)}

@app.route("/logs/download")
@auth_required
def download_logs():
Expand Down
3 changes: 2 additions & 1 deletion python/PiFinder/sqm/save_sweep_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ def save_sweep_metadata(
# Noise floor estimation details (from NoiseFloorEstimator)
if noise_floor_details is not None:
metadata["noise_floor_estimator"] = {
k: v for k, v in noise_floor_details.items()
k: v
for k, v in noise_floor_details.items()
if k != "request_zero_sec_sample" # Exclude internal flags
}
if camera_type is not None:
Expand Down
17 changes: 10 additions & 7 deletions python/PiFinder/sqm/sqm.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,9 @@ def _load_calibration(self) -> bool:
)

if not calibration_file.exists():
logger.debug(f"No calibration file found at {calibration_file}, using defaults")
logger.debug(
f"No calibration file found at {calibration_file}, using defaults"
)
return False

try:
Expand Down Expand Up @@ -193,7 +195,9 @@ def _measure_star_flux_with_local_background(

# Check for saturation in aperture
aperture_pixels = image_patch[aperture_mask]
max_aperture_pixel = np.max(aperture_pixels) if len(aperture_pixels) > 0 else 0
max_aperture_pixel = (
np.max(aperture_pixels) if len(aperture_pixels) > 0 else 0
)

if max_aperture_pixel >= saturation_threshold:
# Mark saturated star with flux=-1 to be excluded from mzero calculation
Expand All @@ -202,7 +206,6 @@ def _measure_star_flux_with_local_background(
n_saturated += 1
continue


# Total flux in aperture (includes background)
total_flux = np.sum(aperture_pixels)

Expand Down Expand Up @@ -263,9 +266,7 @@ def _calculate_mzero(
# Flux-weighted mean: brighter stars contribute more
valid_mzeros_arr = np.array(valid_mzeros)
valid_fluxes_arr = np.array(valid_fluxes)
weighted_mzero = float(
np.average(valid_mzeros_arr, weights=valid_fluxes_arr)
)
weighted_mzero = float(np.average(valid_mzeros_arr, weights=valid_fluxes_arr))

return weighted_mzero, mzeros

Expand Down Expand Up @@ -540,7 +541,9 @@ def calculate(
# Following ASTAP: zenith is reference point where extinction = 0
# Only ADDITIONAL extinction below zenith is added: 0.28 * (airmass - 1)
# This allows comparing measurements at different altitudes
extinction_for_altitude = self._atmospheric_extinction(altitude_deg) # 0.28*(airmass-1)
extinction_for_altitude = self._atmospheric_extinction(
altitude_deg
) # 0.28*(airmass-1)

# Main SQM value: no extinction correction (raw measurement)
sqm_final = sqm_uncorrected
Expand Down
2 changes: 1 addition & 1 deletion python/PiFinder/state.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ def from_json(cls, json_str):


class SharedStateObj:
def __init__(self):
def __init__(self) -> None:
self.__power_state = 1 # 0 = sleep state, 1 = awake state
# self.__solve_state
# None = No solve attempted yet
Expand Down
2 changes: 1 addition & 1 deletion python/PiFinder/ui/radec_entry.py
Original file line number Diff line number Diff line change
Expand Up @@ -586,7 +586,7 @@ class LayoutConfig:
class UIRADecEntry(UIModule):
__title__ = _("RA/DEC Entry")

def __init__(self, *args, **kwargs):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)

self.callback = self.item_definition.get("callback")
Expand Down
4 changes: 3 additions & 1 deletion python/PiFinder/ui/sqm.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,9 @@ def _is_calibrated(self) -> bool:
camera_type = self.shared_state.camera_type()
camera_type_processed = f"{camera_type}_processed"
calibration_file = (
Path.home() / "PiFinder_data" / f"sqm_calibration_{camera_type_processed}.json"
Path.home()
/ "PiFinder_data"
/ f"sqm_calibration_{camera_type_processed}.json"
)
return calibration_file.exists()

Expand Down
6 changes: 4 additions & 2 deletions python/PiFinder/ui/sqm_calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ class UISQMCalibration(UIModule):
__title__ = "SQM CAL"
__help_name__ = ""

def __init__(self, *args, **kwargs):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)

# Wizard state machine
Expand Down Expand Up @@ -701,7 +701,9 @@ def _analyze_calibration(self):
# 2. Compute read noise using temporal variance (not spatial)
# Spatial std includes fixed pattern noise (PRNU), which is wrong.
# Temporal variance at each pixel measures true read noise.
temporal_variance = np.var(bias_stack, axis=0) # variance across frames per pixel
temporal_variance = np.var(
bias_stack, axis=0
) # variance across frames per pixel
self.read_noise = float(np.sqrt(np.mean(temporal_variance)))

# 3. Compute dark current rate
Expand Down
16 changes: 11 additions & 5 deletions python/PiFinder/ui/sqm_sweep.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,8 @@ def _add_detailed_metadata(self):
# Find the sweep directory
captures_dir = Path(utils.data_dir) / "captures"
sweep_dirs = [
d for d in captures_dir.glob("sweep_*")
d
for d in captures_dir.glob("sweep_*")
if d.stat().st_ctime >= (self.start_time - 1)
]
if not sweep_dirs:
Expand All @@ -313,7 +314,8 @@ def _add_detailed_metadata(self):
"pifinder_value": sqm_state.value,
"reference_value": self.reference_sqm,
"difference": (self.reference_sqm - sqm_state.value)
if self.reference_sqm and sqm_state.value else None,
if self.reference_sqm and sqm_state.value
else None,
"source": sqm_state.source,
}

Expand All @@ -327,7 +329,8 @@ def _add_detailed_metadata(self):
if image_metadata:
metadata["image"] = {
"exposure_us": image_metadata.get("exposure_time"),
"exposure_sec": image_metadata.get("exposure_time", 0) / 1_000_000.0,
"exposure_sec": image_metadata.get("exposure_time", 0)
/ 1_000_000.0,
"gain": image_metadata.get("gain"),
"imu_delta": image_metadata.get("imu_delta"),
}
Expand All @@ -348,8 +351,11 @@ def _add_detailed_metadata(self):
# Add NoiseFloorEstimator output
camera_type = self.shared_state.camera_type()
camera_type_processed = f"{camera_type}_processed"
exposure_sec = (image_metadata.get("exposure_time", 500000) / 1_000_000.0
if image_metadata else 0.5)
exposure_sec = (
image_metadata.get("exposure_time", 500000) / 1_000_000.0
if image_metadata
else 0.5
)

if self.camera_image is not None:
image_array = np.array(self.camera_image.convert("L"))
Expand Down
11 changes: 5 additions & 6 deletions python/PiFinder/ui/textentry.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
from PIL import Image, ImageDraw
from PiFinder.composite_object import CompositeObject
from PiFinder.ui.base import UIModule
from PiFinder.db.objects_db import ObjectsDatabase
from PiFinder.ui.object_list import UIObjectList
from PiFinder.ui.ui_utils import format_number
import time
import threading
from typing import Any, TYPE_CHECKING
import logging


if TYPE_CHECKING:

Expand Down Expand Up @@ -80,7 +83,7 @@ def __iter__(self):


class UITextEntry(UIModule):
def __init__(self, *args, **kwargs):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)

# Get mode from item_definition
Expand All @@ -105,7 +108,7 @@ def __init__(self, *args, **kwargs):
self.KEYPRESS_TIMEOUT = 1
self.last_key_press_time = 0
self.char_index = 0
self.search_results = []
self.search_results: list[CompositeObject] = []
self.search_results_len_str = "0"
self.show_keypad = True
self.keys = KeyPad()
Expand Down Expand Up @@ -231,8 +234,6 @@ def update_search_results(self):
Debounced async search - waits 250ms after last keystroke before searching.
Only updates search results in search mode.
"""
import logging

logger = logging.getLogger("TextEntry")

if self.text_entry_mode:
Expand Down Expand Up @@ -271,8 +272,6 @@ def _perform_search(self, search_text, search_version):
Perform the actual search in background thread.
Only updates results if this search version is still current.
"""
import logging

logger = logging.getLogger("TextEntry")

try:
Expand Down
Loading
Loading