From 35dea3aea03d660c73eb8467e58b5e030e2db813 Mon Sep 17 00:00:00 2001 From: Jens Scheidtmann Date: Tue, 31 Mar 2026 08:37:20 +0200 Subject: [PATCH 1/7] webserver: Upload and choose log configuration --- .gitignore | 4 + python/PiFinder/main.py | 5 ++ python/PiFinder/server.py | 58 ++++++++++++++ python/logconf_debug.json | 151 +++++++++++++++++++++++++++++++++++ python/pifinder_logconf.json | 1 - python/views/logs.tpl | 143 +++++++++++++-------------------- 6 files changed, 273 insertions(+), 89 deletions(-) create mode 100644 python/logconf_debug.json delete mode 120000 python/pifinder_logconf.json diff --git a/.gitignore b/.gitignore index 8c26256e2..72089c9df 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,7 @@ +# Active log configuration symlink (auto-created at startup) +python/pifinder_logconf.json +python/logconf_*.json + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/python/PiFinder/main.py b/python/PiFinder/main.py index 119c9b77f..073a894f4 100644 --- a/python/PiFinder/main.py +++ b/python/PiFinder/main.py @@ -870,6 +870,11 @@ def main( if __name__ == "__main__": import sys + # Ensure the active log config symlink exists, defaulting to logconf_default.json + _logconf_link = Path("pifinder_logconf.json") + if not _logconf_link.exists(): + _logconf_link.symlink_to("logconf_default.json") + debug_no_file_logs = "--debug-no-file-logs" in sys.argv if debug_no_file_logs: os.environ["PIFINDER_DEBUG_NO_FILE_LOGS"] = "1" diff --git a/python/PiFinder/server.py b/python/PiFinder/server.py index 7816f4e11..d3ea7cfab 100644 --- a/python/PiFinder/server.py +++ b/python/PiFinder/server.py @@ -844,6 +844,64 @@ def get_component_levels(): logging.error(f"Error reading log configuration: {e}") return {"status": "error", "message": str(e)} + @app.route("/logs/configs") + @auth_required + def list_log_configs(): + """Return all available logconf_*.json files with display names.""" + import glob + + configs = [] + active = os.path.realpath("pifinder_logconf.json") if os.path.exists("pifinder_logconf.json") else None + for path in sorted(glob.glob("logconf_*.json")): + stem = path[len("logconf_"):-len(".json")] + display = stem.replace("_", " ").title() + configs.append({ + "file": path, + "name": display, + "active": os.path.realpath(path) == active, + }) + return {"configs": configs} + + @app.route("/logs/switch_config", method="post") + @auth_required + def switch_log_config(): + """Atomically repoint pifinder_logconf.json to the chosen config, then restart.""" + logconf_file = request.forms.get("logconf_file", "").strip() + if not logconf_file or not logconf_file.startswith("logconf_") or not logconf_file.endswith(".json"): + return {"status": "error", "message": "Invalid log config file name"} + if not os.path.exists(logconf_file): + return {"status": "error", "message": f"Log config file not found: {logconf_file}"} + try: + link = "pifinder_logconf.json" + tmp = link + ".tmp" + os.symlink(logconf_file, tmp) + os.replace(tmp, link) + logger.info("Switched log config to %s", logconf_file) + except Exception as e: + logger.error("Failed to switch log config: %s", e) + return {"status": "error", "message": str(e)} + return template("restart_pifinder") + + @app.route("/logs/upload_config", method="post") + @auth_required + def upload_log_config(): + """Upload a new logconf_*.json file.""" + upload = request.files.get("config_file") + if not upload: + return {"status": "error", "message": "No file provided"} + filename = upload.filename + if not filename.startswith("logconf_") or not filename.endswith(".json"): + return {"status": "error", "message": "File must be named logconf_.json"} + if os.path.exists(filename): + return {"status": "error", "message": f"File already exists: {filename}"} + try: + upload.save(filename, overwrite=False) + logger.info("Uploaded log config: %s", filename) + return {"status": "ok", "file": filename} + except Exception as e: + logger.error("Failed to save uploaded log config: %s", e) + return {"status": "error", "message": str(e)} + @app.route("/logs/download") @auth_required def download_logs(): diff --git a/python/logconf_debug.json b/python/logconf_debug.json new file mode 100644 index 000000000..bb47960c2 --- /dev/null +++ b/python/logconf_debug.json @@ -0,0 +1,151 @@ +{ + // Note that the JSON5 library that we use for configuration supports comments in JSON files. + "version": 1, + "disable_existing_loggers": false, // THIS MUST BE FALSE for logging to work + "formatters": { + "default": { + "format": "%(asctime)s %(processName)s-%(name)s:%(levelname)s:%(message)s" + } + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "level": "DEBUG", + "formatter": "default", + "stream": "ext://sys.stdout" + }, + // Used for GRPC, avoids a "No handlers can be found" warning. + "null": { + "class": "logging.NullHandler", + "level": "ERROR" + } + }, + "loggers": { + + ///////////////////////////////////////////////////////////////// + ////// root logger + // + "": { + "level": "DEBUG", + "handlers": ["console"] + // The file handler is added automatically by code + }, + + ///////////////////////////////////////////////////////////////// + ////// State shared between Subsystems + // + "SharedState": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// User Interface + // + "UI": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// Camera Subsystem + // + "Camera": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// Platesolver Subsystem + // + "Solver": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// GPS Subsystem + // + "GPS": { + "level": "DEBUG" + }, + "GPS.parser": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// Catalog Subsystem + // + "Catalog": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// Database + // + "Database": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// IMU Subsystem + // + "IMU": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// Keyboard Subsystem + // + "Keyboard": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// Observations Subsystem + // + "Observation": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// Web Server Subsystem + // + "Server": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// Pos Server Subsystem (SkySafari LX200 Interface) + // + "PosServer": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// Utils Libraries + // + "SysUtils": { + "level": "DEBUG" + }, + "Utils": { + "level": "DEBUG" + }, + + ///////////////////////////////////////////////////////////////// + ////// Third party Libraries + // + // Keep these suppressed to avoid noise. + // + "PIL.PngImagePlugin": { + "level": "WARNING" + }, + "tetra3.Tetra3": { + "level": "WARNING" + }, + "picamera2.picamera2": { + "level": "WARNING" + }, + "grpc": { + "level": "ERROR", + "propagate": false, + "handlers": ["null"] + }, + } +} diff --git a/python/pifinder_logconf.json b/python/pifinder_logconf.json deleted file mode 120000 index cb17ea4b3..000000000 --- a/python/pifinder_logconf.json +++ /dev/null @@ -1 +0,0 @@ -logconf_default.json \ No newline at end of file diff --git a/python/views/logs.tpl b/python/views/logs.tpl index d52b0b648..64a997752 100644 --- a/python/views/logs.tpl +++ b/python/views/logs.tpl @@ -122,20 +122,12 @@ - - - +
@@ -325,94 +317,69 @@ document.getElementById('copyButton').addEventListener('click', function() { }); }); -// Log level management -function updateComponentLevels() { - fetch('/logs/components') +// Log configuration management +function loadLogConfigs() { + fetch('/logs/configs') .then(response => response.json()) .then(data => { - const componentSelect = document.getElementById('componentSelect'); - componentSelect.innerHTML = ''; - - // Sort components alphabetically - const sortedComponents = Object.entries(data.components).sort(([a], [b]) => a.localeCompare(b)); - - sortedComponents.forEach(([component, levels]) => { + const configSelect = document.getElementById('configSelect'); + configSelect.innerHTML = ''; + data.configs.forEach(cfg => { const option = document.createElement('option'); - option.value = component; - option.textContent = component; - componentSelect.appendChild(option); + option.value = cfg.file; + option.textContent = cfg.name; + if (cfg.active) option.selected = true; + configSelect.appendChild(option); }); }) - .catch(error => console.error('Error fetching component levels:', error)); + .catch(error => console.error('Error fetching log configs:', error)); } -// Handle global level change -document.getElementById('globalLevel').addEventListener('change', function(e) { - const newLevel = e.target.value; - fetch('/logs/level', { - method: 'POST', - headers: { - 'Content-Type': 'application/x-www-form-urlencoded', - }, - body: `level=${encodeURIComponent(newLevel)}` - }) - .then(response => response.json()) - .then(result => { - if (result.status === 'success') { - console.log(`Changed global log level to ${newLevel}`); - } else { - console.error('Failed to update global log level:', result.message); - } - }) - .catch(error => console.error('Error updating global log level:', error)); +document.getElementById('configSelect').addEventListener('change', function(e) { + const configFile = e.target.value; + if (!configFile) return; + if (!confirm(`Switch log configuration to "${e.target.options[e.target.selectedIndex].text}" and restart PiFinder?`)) { + loadLogConfigs(); // reset selection + return; + } + // Use a real form POST so the browser navigates to the restart page HTML + const form = document.createElement('form'); + form.method = 'POST'; + form.action = '/logs/switch_config'; + const input = document.createElement('input'); + input.type = 'hidden'; + input.name = 'logconf_file'; + input.value = configFile; + form.appendChild(input); + document.body.appendChild(form); + form.submit(); }); -// Handle component selection -document.getElementById('componentSelect').addEventListener('change', function(e) { - const component = e.target.value; - if (!component) { - document.getElementById('componentLevel').style.display = 'none'; +document.getElementById('uploadLogConfInput').addEventListener('change', function(e) { + const file = e.target.files[0]; + if (!file) return; + if (!file.name.startsWith('logconf_') || !file.name.endsWith('.json')) { + alert('File must be named logconf_.json'); + e.target.value = ''; return; } - - // Show level select and set current level - const levelSelect = document.getElementById('componentLevel'); - levelSelect.style.display = 'block'; - - // Get current level for selected component - fetch('/logs/components') + const formData = new FormData(); + formData.append('config_file', file); + fetch('/logs/upload_config', { method: 'POST', body: formData }) .then(response => response.json()) - .then(data => { - const currentLevel = data.components[component].current_level; - levelSelect.value = currentLevel; - }); -}); - -// Handle component level change -document.getElementById('componentLevel').addEventListener('change', function(e) { - const component = document.getElementById('componentSelect').value; - const newLevel = e.target.value; - - fetch('/logs/component_level', { - method: 'POST', - headers: { - 'Content-Type': 'application/x-www-form-urlencoded', - }, - body: `component=${encodeURIComponent(component)}&level=${encodeURIComponent(newLevel)}` - }) - .then(response => response.json()) - .then(result => { - if (result.status === 'success') { - console.log(`Changed ${component} log level to ${newLevel}`); - } else { - console.error('Failed to update log level:', result.message); - } - }) - .catch(error => console.error('Error updating log level:', error)); + .then(result => { + if (result.status === 'ok') { + loadLogConfigs(); + } else { + alert('Upload failed: ' + result.message); + } + }) + .catch(error => console.error('Error uploading log config:', error)); + e.target.value = ''; }); -// Initial load of components -updateComponentLevels(); +// Initial load of configs +loadLogConfigs(); // Set up button event listeners document.getElementById('pauseButton').addEventListener('click', function() { From 908560a2c9baff0bbce8acdcf459a81aba90cc29 Mon Sep 17 00:00:00 2001 From: Jens Scheidtmann Date: Tue, 7 Apr 2026 18:53:46 +0200 Subject: [PATCH 2/7] ruff formatting --- python/PiFinder/server.py | 41 ++++++++++++++++------ python/PiFinder/sqm/save_sweep_metadata.py | 3 +- python/PiFinder/sqm/sqm.py | 17 +++++---- python/PiFinder/ui/sqm.py | 4 ++- python/PiFinder/ui/sqm_calibration.py | 4 ++- python/PiFinder/ui/sqm_sweep.py | 16 ++++++--- python/tests/test_sqm.py | 4 ++- 7 files changed, 62 insertions(+), 27 deletions(-) diff --git a/python/PiFinder/server.py b/python/PiFinder/server.py index a13b2151a..bae7bc6b3 100644 --- a/python/PiFinder/server.py +++ b/python/PiFinder/server.py @@ -851,15 +851,21 @@ def list_log_configs(): import glob configs = [] - active = os.path.realpath("pifinder_logconf.json") if os.path.exists("pifinder_logconf.json") else None + active = ( + os.path.realpath("pifinder_logconf.json") + if os.path.exists("pifinder_logconf.json") + else None + ) for path in sorted(glob.glob("logconf_*.json")): - stem = path[len("logconf_"):-len(".json")] + stem = path[len("logconf_") : -len(".json")] display = stem.replace("_", " ").title() - configs.append({ - "file": path, - "name": display, - "active": os.path.realpath(path) == active, - }) + configs.append( + { + "file": path, + "name": display, + "active": os.path.realpath(path) == active, + } + ) return {"configs": configs} @app.route("/logs/switch_config", method="post") @@ -867,10 +873,17 @@ def list_log_configs(): def switch_log_config(): """Atomically repoint pifinder_logconf.json to the chosen config, then restart.""" logconf_file = request.forms.get("logconf_file", "").strip() - if not logconf_file or not logconf_file.startswith("logconf_") or not logconf_file.endswith(".json"): + if ( + not logconf_file + or not logconf_file.startswith("logconf_") + or not logconf_file.endswith(".json") + ): return {"status": "error", "message": "Invalid log config file name"} if not os.path.exists(logconf_file): - return {"status": "error", "message": f"Log config file not found: {logconf_file}"} + return { + "status": "error", + "message": f"Log config file not found: {logconf_file}", + } try: link = "pifinder_logconf.json" tmp = link + ".tmp" @@ -891,9 +904,15 @@ def upload_log_config(): return {"status": "error", "message": "No file provided"} filename = upload.filename if not filename.startswith("logconf_") or not filename.endswith(".json"): - return {"status": "error", "message": "File must be named logconf_.json"} + return { + "status": "error", + "message": "File must be named logconf_.json", + } if os.path.exists(filename): - return {"status": "error", "message": f"File already exists: {filename}"} + return { + "status": "error", + "message": f"File already exists: {filename}", + } try: upload.save(filename, overwrite=False) logger.info("Uploaded log config: %s", filename) diff --git a/python/PiFinder/sqm/save_sweep_metadata.py b/python/PiFinder/sqm/save_sweep_metadata.py index 518434a97..293131716 100644 --- a/python/PiFinder/sqm/save_sweep_metadata.py +++ b/python/PiFinder/sqm/save_sweep_metadata.py @@ -80,7 +80,8 @@ def save_sweep_metadata( # Noise floor estimation details (from NoiseFloorEstimator) if noise_floor_details is not None: metadata["noise_floor_estimator"] = { - k: v for k, v in noise_floor_details.items() + k: v + for k, v in noise_floor_details.items() if k != "request_zero_sec_sample" # Exclude internal flags } if camera_type is not None: diff --git a/python/PiFinder/sqm/sqm.py b/python/PiFinder/sqm/sqm.py index db6186ab0..d5426a020 100644 --- a/python/PiFinder/sqm/sqm.py +++ b/python/PiFinder/sqm/sqm.py @@ -65,7 +65,9 @@ def _load_calibration(self) -> bool: ) if not calibration_file.exists(): - logger.debug(f"No calibration file found at {calibration_file}, using defaults") + logger.debug( + f"No calibration file found at {calibration_file}, using defaults" + ) return False try: @@ -193,7 +195,9 @@ def _measure_star_flux_with_local_background( # Check for saturation in aperture aperture_pixels = image_patch[aperture_mask] - max_aperture_pixel = np.max(aperture_pixels) if len(aperture_pixels) > 0 else 0 + max_aperture_pixel = ( + np.max(aperture_pixels) if len(aperture_pixels) > 0 else 0 + ) if max_aperture_pixel >= saturation_threshold: # Mark saturated star with flux=-1 to be excluded from mzero calculation @@ -202,7 +206,6 @@ def _measure_star_flux_with_local_background( n_saturated += 1 continue - # Total flux in aperture (includes background) total_flux = np.sum(aperture_pixels) @@ -263,9 +266,7 @@ def _calculate_mzero( # Flux-weighted mean: brighter stars contribute more valid_mzeros_arr = np.array(valid_mzeros) valid_fluxes_arr = np.array(valid_fluxes) - weighted_mzero = float( - np.average(valid_mzeros_arr, weights=valid_fluxes_arr) - ) + weighted_mzero = float(np.average(valid_mzeros_arr, weights=valid_fluxes_arr)) return weighted_mzero, mzeros @@ -540,7 +541,9 @@ def calculate( # Following ASTAP: zenith is reference point where extinction = 0 # Only ADDITIONAL extinction below zenith is added: 0.28 * (airmass - 1) # This allows comparing measurements at different altitudes - extinction_for_altitude = self._atmospheric_extinction(altitude_deg) # 0.28*(airmass-1) + extinction_for_altitude = self._atmospheric_extinction( + altitude_deg + ) # 0.28*(airmass-1) # Main SQM value: no extinction correction (raw measurement) sqm_final = sqm_uncorrected diff --git a/python/PiFinder/ui/sqm.py b/python/PiFinder/ui/sqm.py index 592e755fd..235b7a6c4 100644 --- a/python/PiFinder/ui/sqm.py +++ b/python/PiFinder/ui/sqm.py @@ -262,7 +262,9 @@ def _is_calibrated(self) -> bool: camera_type = self.shared_state.camera_type() camera_type_processed = f"{camera_type}_processed" calibration_file = ( - Path.home() / "PiFinder_data" / f"sqm_calibration_{camera_type_processed}.json" + Path.home() + / "PiFinder_data" + / f"sqm_calibration_{camera_type_processed}.json" ) return calibration_file.exists() diff --git a/python/PiFinder/ui/sqm_calibration.py b/python/PiFinder/ui/sqm_calibration.py index 450b54d98..b0eb61803 100644 --- a/python/PiFinder/ui/sqm_calibration.py +++ b/python/PiFinder/ui/sqm_calibration.py @@ -701,7 +701,9 @@ def _analyze_calibration(self): # 2. Compute read noise using temporal variance (not spatial) # Spatial std includes fixed pattern noise (PRNU), which is wrong. # Temporal variance at each pixel measures true read noise. - temporal_variance = np.var(bias_stack, axis=0) # variance across frames per pixel + temporal_variance = np.var( + bias_stack, axis=0 + ) # variance across frames per pixel self.read_noise = float(np.sqrt(np.mean(temporal_variance))) # 3. Compute dark current rate diff --git a/python/PiFinder/ui/sqm_sweep.py b/python/PiFinder/ui/sqm_sweep.py index 56e8310b2..277e47b7b 100644 --- a/python/PiFinder/ui/sqm_sweep.py +++ b/python/PiFinder/ui/sqm_sweep.py @@ -288,7 +288,8 @@ def _add_detailed_metadata(self): # Find the sweep directory captures_dir = Path(utils.data_dir) / "captures" sweep_dirs = [ - d for d in captures_dir.glob("sweep_*") + d + for d in captures_dir.glob("sweep_*") if d.stat().st_ctime >= (self.start_time - 1) ] if not sweep_dirs: @@ -313,7 +314,8 @@ def _add_detailed_metadata(self): "pifinder_value": sqm_state.value, "reference_value": self.reference_sqm, "difference": (self.reference_sqm - sqm_state.value) - if self.reference_sqm and sqm_state.value else None, + if self.reference_sqm and sqm_state.value + else None, "source": sqm_state.source, } @@ -327,7 +329,8 @@ def _add_detailed_metadata(self): if image_metadata: metadata["image"] = { "exposure_us": image_metadata.get("exposure_time"), - "exposure_sec": image_metadata.get("exposure_time", 0) / 1_000_000.0, + "exposure_sec": image_metadata.get("exposure_time", 0) + / 1_000_000.0, "gain": image_metadata.get("gain"), "imu_delta": image_metadata.get("imu_delta"), } @@ -348,8 +351,11 @@ def _add_detailed_metadata(self): # Add NoiseFloorEstimator output camera_type = self.shared_state.camera_type() camera_type_processed = f"{camera_type}_processed" - exposure_sec = (image_metadata.get("exposure_time", 500000) / 1_000_000.0 - if image_metadata else 0.5) + exposure_sec = ( + image_metadata.get("exposure_time", 500000) / 1_000_000.0 + if image_metadata + else 0.5 + ) if self.camera_image is not None: image_array = np.array(self.camera_image.convert("L")) diff --git a/python/tests/test_sqm.py b/python/tests/test_sqm.py index a8a9ac58b..7b9f610e9 100644 --- a/python/tests/test_sqm.py +++ b/python/tests/test_sqm.py @@ -256,7 +256,9 @@ def test_calculate_extinction_applied(self): # Check extinction values (ASTAP convention: 0 at zenith) # Pickering airmass at 30° ≈ 1.995, so extinction ≈ 0.28 * 0.995 ≈ 0.279 - assert details_zenith["extinction_for_altitude"] == pytest.approx(0.0, abs=0.001) + assert details_zenith["extinction_for_altitude"] == pytest.approx( + 0.0, abs=0.001 + ) expected_ext_30 = 0.28 * (sqm._pickering_airmass(30.0) - 1) assert details_30deg["extinction_for_altitude"] == pytest.approx( expected_ext_30, abs=0.001 From 5dce9498d833f4147f0409de38e9ed820f018ff4 Mon Sep 17 00:00:00 2001 From: Jens Scheidtmann Date: Mon, 13 Apr 2026 08:05:02 +0200 Subject: [PATCH 3/7] ruff formatting --- python/PiFinder/calc_utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/PiFinder/calc_utils.py b/python/PiFinder/calc_utils.py index 112271a3d..d834f38d0 100644 --- a/python/PiFinder/calc_utils.py +++ b/python/PiFinder/calc_utils.py @@ -140,7 +140,6 @@ def b1950_to_j2000(ra_hours, dec_deg): """ return epoch_to_epoch(B1950, J2000, ra_hours, dec_deg) - def aim_degrees(shared_state, mount_type, screen_direction, target): """ Returns degrees in either From f18de0e5481b1e612fa47811ec96f9f7e3a299b8 Mon Sep 17 00:00:00 2001 From: Jens Scheidtmann Date: Mon, 13 Apr 2026 08:12:18 +0200 Subject: [PATCH 4/7] Avoid mypy notes that bodies of untyped functions are not checked. --- python/PiFinder/calc_utils.py | 1 + python/PiFinder/catalog_base.py | 2 +- python/PiFinder/catalog_imports/harris_loader.py | 2 +- python/PiFinder/state.py | 2 +- python/PiFinder/ui/radec_entry.py | 2 +- python/PiFinder/ui/sqm_calibration.py | 2 +- python/PiFinder/ui/textentry.py | 2 +- 7 files changed, 7 insertions(+), 6 deletions(-) diff --git a/python/PiFinder/calc_utils.py b/python/PiFinder/calc_utils.py index d834f38d0..112271a3d 100644 --- a/python/PiFinder/calc_utils.py +++ b/python/PiFinder/calc_utils.py @@ -140,6 +140,7 @@ def b1950_to_j2000(ra_hours, dec_deg): """ return epoch_to_epoch(B1950, J2000, ra_hours, dec_deg) + def aim_degrees(shared_state, mount_type, screen_direction, target): """ Returns degrees in either diff --git a/python/PiFinder/catalog_base.py b/python/PiFinder/catalog_base.py index 6f07f3f47..12ad7d2ea 100644 --- a/python/PiFinder/catalog_base.py +++ b/python/PiFinder/catalog_base.py @@ -171,7 +171,7 @@ def assign_virtual_object_ids(catalog, low_id: int) -> int: class TimerMixin: """Provides timer functionality via composition""" - def __init__(self): + def __init__(self) -> None: self.timer: Optional[threading.Timer] = None self.is_running: bool = False self.time_delay_seconds: Union[int, Callable[[], int]] = ( diff --git a/python/PiFinder/catalog_imports/harris_loader.py b/python/PiFinder/catalog_imports/harris_loader.py index e3635a3cd..e9e7cd69a 100644 --- a/python/PiFinder/catalog_imports/harris_loader.py +++ b/python/PiFinder/catalog_imports/harris_loader.py @@ -381,7 +381,7 @@ def create_cluster_object(entry: npt.NDArray, seq: int) -> Dict[str, Any]: return result -def load_harris(): +def load_harris() -> None: logging.info("Loading Harris Globular Cluster catalog") catalog: str = "Har" obj_type: str = "Gb" # Globular Cluster diff --git a/python/PiFinder/state.py b/python/PiFinder/state.py index e96b1825b..8f886b31e 100644 --- a/python/PiFinder/state.py +++ b/python/PiFinder/state.py @@ -238,7 +238,7 @@ def from_json(cls, json_str): class SharedStateObj: - def __init__(self): + def __init__(self) -> None: self.__power_state = 1 # 0 = sleep state, 1 = awake state # self.__solve_state # None = No solve attempted yet diff --git a/python/PiFinder/ui/radec_entry.py b/python/PiFinder/ui/radec_entry.py index 34b8db27c..5f77be72d 100644 --- a/python/PiFinder/ui/radec_entry.py +++ b/python/PiFinder/ui/radec_entry.py @@ -586,7 +586,7 @@ class LayoutConfig: class UIRADecEntry(UIModule): __title__ = _("RA/DEC Entry") - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) self.callback = self.item_definition.get("callback") diff --git a/python/PiFinder/ui/sqm_calibration.py b/python/PiFinder/ui/sqm_calibration.py index b0eb61803..02bc8a513 100644 --- a/python/PiFinder/ui/sqm_calibration.py +++ b/python/PiFinder/ui/sqm_calibration.py @@ -52,7 +52,7 @@ class UISQMCalibration(UIModule): __title__ = "SQM CAL" __help_name__ = "" - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) # Wizard state machine diff --git a/python/PiFinder/ui/textentry.py b/python/PiFinder/ui/textentry.py index 0e41d81b2..7faae1f85 100644 --- a/python/PiFinder/ui/textentry.py +++ b/python/PiFinder/ui/textentry.py @@ -80,7 +80,7 @@ def __iter__(self): class UITextEntry(UIModule): - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) # Get mode from item_definition From 1e90d927efd317bda5aeae2d09f022751a284b75 Mon Sep 17 00:00:00 2001 From: Jens Scheidtmann Date: Mon, 13 Apr 2026 08:21:40 +0200 Subject: [PATCH 5/7] type annotate objects_db and observations_db --- python/PiFinder/catalog_imports/database.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/python/PiFinder/catalog_imports/database.py b/python/PiFinder/catalog_imports/database.py index fd8419ce3..91b88cf2c 100644 --- a/python/PiFinder/catalog_imports/database.py +++ b/python/PiFinder/catalog_imports/database.py @@ -4,11 +4,16 @@ This module provides centralized access to database objects for all catalog loaders. """ +from typing import Optional + +from PiFinder.db.objects_db import ObjectsDatabase +from PiFinder.db.observations_db import ObservationsDatabase + from .catalog_import_utils import init_databases # Global database objects shared across all catalog loaders -objects_db = None -observations_db = None +objects_db: Optional[ObjectsDatabase] = None +observations_db: Optional[ObservationsDatabase] = None def init_shared_database(): From 08b7615ac6859974fb5becb74d95fb013225f5d4 Mon Sep 17 00:00:00 2001 From: Jens Scheidtmann Date: Mon, 13 Apr 2026 08:22:07 +0200 Subject: [PATCH 6/7] Fix type errors --- python/PiFinder/catalog_imports/harris_loader.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/python/PiFinder/catalog_imports/harris_loader.py b/python/PiFinder/catalog_imports/harris_loader.py index e9e7cd69a..3edd7fe3b 100644 --- a/python/PiFinder/catalog_imports/harris_loader.py +++ b/python/PiFinder/catalog_imports/harris_loader.py @@ -385,6 +385,9 @@ def load_harris() -> None: logging.info("Loading Harris Globular Cluster catalog") catalog: str = "Har" obj_type: str = "Gb" # Globular Cluster + + if objects_db is None: + raise RuntimeError("Database not initialized. Call init_shared_database() first.") conn, _ = objects_db.get_conn_cursor() # Enable bulk mode to prevent commits during insert operations From f8801318e7d928d5e9229b0453e435984e71cd2b Mon Sep 17 00:00:00 2001 From: Jens Scheidtmann Date: Mon, 13 Apr 2026 08:26:48 +0200 Subject: [PATCH 7/7] Fix mypy errors --- python/PiFinder/ui/textentry.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/python/PiFinder/ui/textentry.py b/python/PiFinder/ui/textentry.py index 7faae1f85..e65738747 100644 --- a/python/PiFinder/ui/textentry.py +++ b/python/PiFinder/ui/textentry.py @@ -1,4 +1,5 @@ from PIL import Image, ImageDraw +from PiFinder.composite_object import CompositeObject from PiFinder.ui.base import UIModule from PiFinder.db.objects_db import ObjectsDatabase from PiFinder.ui.object_list import UIObjectList @@ -6,6 +7,8 @@ import time import threading from typing import Any, TYPE_CHECKING +import logging + if TYPE_CHECKING: @@ -105,7 +108,7 @@ def __init__(self, *args, **kwargs) -> None: self.KEYPRESS_TIMEOUT = 1 self.last_key_press_time = 0 self.char_index = 0 - self.search_results = [] + self.search_results: list[CompositeObject] = [] self.search_results_len_str = "0" self.show_keypad = True self.keys = KeyPad() @@ -231,8 +234,6 @@ def update_search_results(self): Debounced async search - waits 250ms after last keystroke before searching. Only updates search results in search mode. """ - import logging - logger = logging.getLogger("TextEntry") if self.text_entry_mode: @@ -271,8 +272,6 @@ def _perform_search(self, search_text, search_version): Perform the actual search in background thread. Only updates results if this search version is still current. """ - import logging - logger = logging.getLogger("TextEntry") try: