Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 31 additions & 15 deletions python/PiFinder/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -868,25 +868,36 @@ def main(


if __name__ == "__main__":
import sys

debug_no_file_logs = "--debug-no-file-logs" in sys.argv
if debug_no_file_logs:
os.environ["PIFINDER_DEBUG_NO_FILE_LOGS"] = "1"

print("Bootstrap logging configuration ...")
logging.basicConfig(format="%(asctime)s BASIC %(name)s: %(levelname)s %(message)s")
rlogger = logging.getLogger()
rlogger.setLevel(logging.INFO)
log_path = utils.data_dir / "pifinder.log"
try:
log_helper = MultiprocLogging(
Path("pifinder_logconf.json"),
log_path,
)
rlogger.setLevel(logging.DEBUG if debug_no_file_logs else logging.INFO)

if debug_no_file_logs:
log_helper = MultiprocLogging(Path("pifinder_logconf.json"), console_only=True)
MultiprocLogging.configurer(log_helper.get_queue())
except FileNotFoundError:
rlogger.warning(
"Cannot find log configuration file, proceeding with basic configuration."
)
rlogger.warning("Logs will not be stored on disk, unless you use --log")
logging.getLogger("PIL.PngImagePlugin").setLevel(logging.WARNING)
logging.getLogger("tetra3.Tetra3").setLevel(logging.WARNING)
logging.getLogger("picamera2.picamera2").setLevel(logging.WARNING)
else:
log_path = utils.data_dir / "pifinder.log"
try:
log_helper = MultiprocLogging(
Path("pifinder_logconf.json"),
log_path,
)
MultiprocLogging.configurer(log_helper.get_queue())
except FileNotFoundError:
rlogger.warning(
"Cannot find log configuration file, proceeding with basic configuration."
)
rlogger.warning("Logs will not be stored on disk, unless you use --log")
logging.getLogger("PIL.PngImagePlugin").setLevel(logging.WARNING)
logging.getLogger("tetra3.Tetra3").setLevel(logging.WARNING)
logging.getLogger("picamera2.picamera2").setLevel(logging.WARNING)

rlogger.info("Starting PiFinder ...")
parser = argparse.ArgumentParser(description="eFinder")
Expand Down Expand Up @@ -948,6 +959,11 @@ def main(
"-x", "--verbose", help="Set logging to debug mode", action="store_true"
)
parser.add_argument("-l", "--log", help="Log to file", action="store_true")
parser.add_argument(
"--debug-no-file-logs",
help="Debug: log everything at DEBUG level to console only, bypassing log configuration and file output",
action="store_true",
)
parser.add_argument(
"--lang",
help="Force user interface language (iso2 code). Changes configuration",
Expand Down
34 changes: 25 additions & 9 deletions python/PiFinder/multiproclogging.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,14 +67,18 @@ def __init__(
log_conf: Optional[Path] = None,
out_file: Optional[Path] = None,
formatter: str = "%(asctime)s %(processName)s-%(name)s:%(levelname)s:%(message)s",
console_only: bool = False,
):
self._queues: List[Queue] = []
self._log_conf_file = log_conf
self._log_output_file = out_file
self._formatter = formatter
self._console_only = console_only
self._proc: Optional[Process] = None

self.apply_config()
if console_only:
logging.getLogger().setLevel(logging.DEBUG)

def apply_config(self):
if self._log_conf_file is not None:
Expand All @@ -94,7 +98,7 @@ def start(self, initial_queue: Optional[Queue] = None):
self._proc = Process(
target=self._run_sink,
args=(
self._log_output_file,
None if self._console_only else self._log_output_file,
self._queues,
),
)
Expand All @@ -109,7 +113,7 @@ def join(self):
self._queues[0].put(None)
self._proc.join()

def _run_sink(self, output: Path, queues: List[Queue]):
def _run_sink(self, output: Optional[Path], queues: List[Queue]):
"""
This is the process that consumes every log message (sink)

Expand All @@ -124,15 +128,22 @@ def _run_sink(self, output: Path, queues: List[Queue]):
for hdlr in hdlrs:
rLogger.removeHandler(hdlr)

# Set maxBytes to 50MB (50 * 1024 * 1024 bytes) and keep 5 backup files
h = logging.handlers.RotatingFileHandler(
output, maxBytes=50 * 1024 * 1024, backupCount=5, encoding="utf-8"
)
if output is None:
import sys

h: logging.Handler = logging.StreamHandler(sys.stderr)
rLogger.setLevel(logging.DEBUG)
rLogger.warning("Starting logging process (console only, no file output)")
else:
# Set maxBytes to 50MB (50 * 1024 * 1024 bytes) and keep 5 backup files
h = logging.handlers.RotatingFileHandler(
output, maxBytes=50 * 1024 * 1024, backupCount=5, encoding="utf-8"
)
rLogger.warning("Starting logging process")
rLogger.warning("Logging to %s", output)
f = logging.Formatter(self._formatter)
h.setFormatter(f)
rLogger.addHandler(h)
rLogger.warning("Starting logging process")
rLogger.warning("Logging to %s", output)

# import logging_tree
# logging_tree.printout()
Expand Down Expand Up @@ -172,6 +183,8 @@ def configurer(queue: Queue):
This method needs to be called once in each process, so that log records get forwarded to the single process writing
log messages.
"""
import os

assert queue is not None, "You passed a None to configurer! You cannot do that"
assert isinstance(
queue, multiprocessing.queues.Queue
Expand All @@ -182,8 +195,11 @@ def configurer(queue: Queue):
config = json5.load(logconf)
logging.config.dictConfig(config)

h = logging.handlers.QueueHandler(queue)
root = logging.getLogger()
if os.environ.get("PIFINDER_DEBUG_NO_FILE_LOGS"):
root.setLevel(logging.DEBUG)

h = logging.handlers.QueueHandler(queue)
root.addHandler(h)

def set_log_conf_file(self, config: Path) -> None:
Expand Down
4 changes: 2 additions & 2 deletions python/PiFinder/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -802,8 +802,8 @@ def stream_logs():
else:
return {"logs": [], "position": position}
except FileNotFoundError:
logger.error(f"Log file not found: {log_file}")
return {"logs": [], "position": 0}
logger.warning(f"Log file not found: {log_file}")
return {"logs": [], "position": 0, "file_not_found": True}

except Exception as e:
logger.error(f"Error streaming logs: {e}")
Expand Down
38 changes: 32 additions & 6 deletions python/views/logs.tpl
Original file line number Diff line number Diff line change
Expand Up @@ -164,30 +164,54 @@ const LINE_HEIGHT = 20;
let updateInterval;
let lastLine = '';

// Backoff state for when the log file is not yet available.
// This page intentionally does NOT fetch the home route, so iwgetid is never triggered here.
const MIN_POLL_INTERVAL = 1000;
const MAX_POLL_INTERVAL = 10000;
let currentPollInterval = MIN_POLL_INTERVAL;

function scheduleFetch() {
clearInterval(updateInterval);
updateInterval = setInterval(fetchLogs, currentPollInterval);
}

function fetchLogs() {
if (isPaused) return;

fetch(`/logs/stream?position=${currentPosition}`)
.then(response => response.json())
.then(data => {
if (data.file_not_found) {
// Back off exponentially up to MAX_POLL_INTERVAL
currentPollInterval = Math.min(currentPollInterval * 2, MAX_POLL_INTERVAL);
scheduleFetch();
return;
}

// Reset backoff on a successful response
if (currentPollInterval !== MIN_POLL_INTERVAL) {
currentPollInterval = MIN_POLL_INTERVAL;
scheduleFetch();
}

if (!data.logs || data.logs.length === 0) return;

currentPosition = data.position;
const logContent = document.getElementById('logContent');

// Add new logs to buffer, skipping duplicates
data.logs.forEach(line => {
if (line !== lastLine) {
logBuffer.push(line);
lastLine = line;
}
});

// Trim buffer if it exceeds size
if (logBuffer.length > BUFFER_SIZE) {
logBuffer = logBuffer.slice(-BUFFER_SIZE);
}

// Update display
updateLogDisplay();
})
Expand Down Expand Up @@ -239,8 +263,10 @@ function restartFromEnd() {
currentPosition = 0;
logBuffer = [];
isPaused = false;
currentPollInterval = MIN_POLL_INTERVAL;
document.getElementById('pauseButton').textContent = 'Pause';
fetchLogs();
scheduleFetch();
}

// Start fetching logs when page loads
Expand All @@ -251,7 +277,7 @@ document.addEventListener('DOMContentLoaded', () => {

// Start log fetching
fetchLogs();
updateInterval = setInterval(fetchLogs, 1000);
scheduleFetch();

// Hide loading message after first logs appear
const observer = new MutationObserver((mutations) => {
Expand Down
Loading