Skip to content

Commit 40dfe3c

Browse files
epicgdogevanugarte
andauthored
track print jobs with lpstat + sqlite (#138)
* new sqlite thingy * gitignore * mount db + other stuff * dockercompose * added default file path * no default * syntax error fix * fix errors * fix errors again * fix errors again * updating completed jobs * mocking * gitignore * dynamic job ids * Refactor print job creation and add ID iterator * removed thread that polls lpstat * removed thread that polls lpstat * DEBUG = False to pass unit tests * printer returns None when error * throw error for returncode != 0 * please work * create_print_job returns different things for different things * log exception & remove sqlite_helpers from gerard.py * minor edits * minor changes part 2 * tests work * rename set * moved updating jobs to sqlite_helpers * sqlite unit tests * update unit tests * removed timestamp check in unit tests * better unit testing * bug fixes on unit tests * v1 of lpstat unit tests * changed names of update_completed -> update_jobs * update_jobs * acknowledged, not created * i forgot to call the test * trying with sqlite tests * bug fixes * trying with sqlite tests * bug fixes * logging + more fixes * working lpstat tests + bug fixes in sqlite_helpers * sqlite_test bug * simplify unit tests * renamed sqlite_test to test_sqlite * unit test for gerard * changed yaml file * collector import bug * removed useless test * update imports for test_lpstat * fixes * sqlite test fixes * lpstat test fixes * format, remove rendudant lines in gerard and lpstat_helpers * less noise in logs, mount files as volume * add curl example, remove shell=True * set -x in what.sh, log bytes written * less diff * tests should pass * remove ghostscript, logging.exception --------- Co-authored-by: evan <evanuxd@gmail.com>
1 parent 153dae6 commit 40dfe3c

16 files changed

Lines changed: 738 additions & 79 deletions

.github/workflows/unit-tests.yml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,4 +26,7 @@ jobs:
2626
2727
- name: Run tests
2828
run: |
29-
python printer/test_server.py
29+
python printer/test/test_gerard.py
30+
python printer/test/test_lpstat.py
31+
python printer/test/test_server.py
32+
python printer/test/test_sqlite.py

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,3 +16,6 @@ config.json
1616
venv/
1717

1818
tmp/
19+
20+
*.db
21+
.python-version

README.md

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,3 +53,20 @@ sce link q
5353
### its go time
5454
- just run `docker-compose up --build -d`
5555
- the logs of the server can be observed with `docker logs sce-printer --tail 300 -f`
56+
57+
### example curl command to print a blank pdf
58+
```sh
59+
# get the full path of your folder
60+
pwd
61+
62+
# download a blank pdf
63+
curl --output blank.pdf https://mag.wcoomd.org/uploads/2018/05/blank.pdf
64+
65+
# curl the server
66+
curl -X POST \
67+
-H "Content-Type: multipart/form-data" \
68+
-F "file=@/full/path/to/blank.pdf" \
69+
-F "copies=2" \
70+
-F "sides=one-sided" \
71+
http://localhost:9000/print
72+
```

docker-compose.yml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,18 @@ services:
1212
- ~/.ssh/id_ed25519-tunnel:/app/ssh_key
1313
- ~/.ssh/known_hosts:/app/known_hosts
1414
- "/etc/cups/ppd/:/etc/cups/ppd"
15+
- ./config:/app/config
16+
- ./printer:/app/printer
17+
- quasar_data:/tmp/
1518
tty: true
1619

1720
# we attach the print container to an external docker
1821
# network called "poweredge". we do this so a prometheus
1922
# container can pull metrics from the server over HTTP
2023
# and relay the metrics to a Grafana dashboard.
24+
volumes:
25+
quasar_data:
26+
2127
networks:
2228
default:
2329
external:

printer/Dockerfile

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
FROM drpsychick/airprint-bridge:jammy
44

55
WORKDIR /app
6+
67
RUN apt-get update
78

89
RUN apt install -y python3 python3-pip python3-venv jq ssh
@@ -17,12 +18,6 @@ COPY ./printer/requirements.txt /app/printer/requirements.txt
1718

1819
RUN /opt/venv/bin/pip install -r /app/printer/requirements.txt
1920

20-
COPY ./config/config.json /app/config/config.json
21-
22-
COPY ./printer/what.sh /app/printer/what.sh
23-
24-
COPY ./printer/*.py /app/printer/
25-
2621
EXPOSE 9000
2722

2823
# The below command runs the bash script that sets up the connection to the
Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,10 @@
22
import enum
33
import logging
44
import json
5-
import asyncio
65

76
from pysnmp.hlapi import *
87

9-
from metrics import MetricsHandler
8+
from modules.metrics import MetricsHandler
109

1110
metrics_handler = MetricsHandler.instance()
1211

@@ -46,15 +45,22 @@ def fetch_ips_from_config(config_file_path):
4645
raise Exception("No printers defined in config file")
4746

4847
ip_list = []
49-
for printer in printer_configs:
50-
if isinstance(printer_configs[printer], dict):
51-
ip = printer_configs[printer]["IP"]
52-
logging.info(f"Adding printer {printer} with IP {ip}")
53-
ip_list.append(ip)
48+
for printer_name in printer_configs:
49+
resolved_config = printer_configs.get(printer_name, {})
50+
if not resolved_config.get("ENABLED"):
51+
logging.info(f"{printer_name} is not enabled, skipping")
52+
continue
53+
ip = resolved_config.get("IP")
54+
if not ip:
55+
logging.info(f"{printer_name} config {resolved_config} did not have an ip, skipping")
56+
continue
57+
58+
logging.info(f"found printer {printer_name} with IP {ip}")
59+
ip_list.append(ip)
5460
return ip_list
5561

56-
except Exception as e:
57-
logging.error(f"error opening config file: {e}")
62+
except Exception:
63+
logging.exception(f"error opening config file")
5864

5965

6066
def scrape_snmp(ip_list, sleep_duration_minutes=5):
@@ -83,7 +89,7 @@ def get_snmp_data(ip):
8389
metrics_handler.device_unreachable.set(1)
8490
continue
8591
if errorStatus:
86-
logging.error(
92+
logging.debug(
8793
f"Error status from {ip} for metric {oid.metric_value}: {errorStatus.prettyPrint()}"
8894
)
8995
# SNMP OIDs related to errors often dissappear when

printer/modules/gerard.py

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
"""this file is for parsing lp command output.
2+
epicgdog made these files so instead of calling it lp_helpers.py its gerard.py
3+
"""
4+
5+
import logging
6+
import shlex
7+
import subprocess
8+
9+
10+
LP_COMMAND = """
11+
lp \
12+
-n {num_copies} {maybe_page_range} \
13+
-o sides={sides} \
14+
-o media=na_letter_8.5x11in \
15+
-d {printer_name} \
16+
{file_path}
17+
"""
18+
19+
logging.basicConfig(
20+
# in mondo we trust
21+
format="%(asctime)s.%(msecs)03dZ %(levelname)s:%(name)s:%(message)s",
22+
datefmt="%Y-%m-%dT%H:%M:%S",
23+
level=logging.INFO,
24+
)
25+
26+
27+
class IDIterator:
28+
def __init__(self):
29+
self._current = 0
30+
31+
def __next__(self):
32+
id = self._current
33+
self._current += 1
34+
return id
35+
36+
37+
print_job_suffix = IDIterator()
38+
39+
40+
def create_print_job(
41+
num_copies,
42+
maybe_page_range,
43+
sides,
44+
printer_name,
45+
file_path,
46+
is_development_mode=False,
47+
):
48+
command = LP_COMMAND.format(
49+
num_copies=num_copies,
50+
maybe_page_range=maybe_page_range,
51+
sides=sides,
52+
printer_name=printer_name,
53+
file_path=file_path,
54+
)
55+
56+
if is_development_mode:
57+
logging.warning(
58+
f"server is in development mode, command would've been `{command}`"
59+
)
60+
job_id = f"HP_LaserJet_p2015dn_Right-{next(print_job_suffix)}"
61+
return job_id
62+
63+
args_list = shlex.split(command.strip())
64+
logging.info(f"running command {command}")
65+
print_job = subprocess.Popen(
66+
args_list,
67+
stdout=subprocess.PIPE,
68+
stderr=subprocess.PIPE,
69+
text=True,
70+
)
71+
print_job.wait(timeout=3)
72+
if print_job.returncode != 0:
73+
logging.error(
74+
f"print job returned nonzero code {print_job.returncode} stderr: {print_job.stderr.read()} stdout: {print_job.stdout.read()}"
75+
)
76+
return None
77+
try:
78+
lp_command_output = print_job.stdout.read()
79+
logging.info(f"lp command stdout was {lp_command_output}")
80+
print_id = lp_command_output.split(" ")[3]
81+
return print_id
82+
except Exception:
83+
logging.exception(f"unable to parse print job from stdout")
84+
return ""

printer/modules/lpstat_helpers.py

Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
import logging
2+
import subprocess
3+
import time
4+
5+
from modules import sqlite_helpers
6+
7+
LPSTAT_CMD = "lpstat -o HP_LaserJet_p2015dn_Right"
8+
SLEEP_TIME = 2
9+
10+
jobs_seen_last = set()
11+
12+
logging.basicConfig(
13+
# in mondo we trust
14+
format="%(asctime)s.%(msecs)03dZ %(levelname)s:%(name)s:%(message)s",
15+
datefmt="%Y-%m-%dT%H:%M:%S",
16+
level=logging.INFO,
17+
)
18+
19+
20+
def query_lpstat():
21+
global jobs_seen_last
22+
global current_jobs
23+
p = subprocess.Popen(
24+
LPSTAT_CMD,
25+
shell=True,
26+
stdout=subprocess.PIPE,
27+
stderr=subprocess.PIPE,
28+
text=True,
29+
)
30+
p.wait()
31+
32+
if p.returncode != 0:
33+
logging.error(
34+
f"{LPSTAT_CMD} returned nonzero code {p.returncode} with stderr {p.stderr.read()}"
35+
)
36+
return
37+
38+
output = p.stdout.read().strip()
39+
logging.debug(f"{LPSTAT_CMD} stdout: {output}")
40+
if len(output) == 0:
41+
return
42+
# 2 things at once; add new jobs to new one while also retrieving current job_ids
43+
jobs = output.split("\n")
44+
for job in jobs:
45+
# an example line of stdout looks like
46+
# HP_LaserJet_p2015dn_Right-52 root 5120 Sat May 31 18:19:38 2025
47+
try:
48+
yield job.strip().split(" ")[0]
49+
except:
50+
logging.exception(f"unable to parse job id from line {job}")
51+
return
52+
53+
54+
def poll_lpstat(sqlite_file):
55+
global jobs_seen_last
56+
while True:
57+
try:
58+
current_jobs = set(query_lpstat())
59+
completed_jobs = jobs_seen_last - current_jobs
60+
61+
sqlite_helpers.mark_jobs_completed(
62+
sqlite_file, [job for job in completed_jobs]
63+
)
64+
sqlite_helpers.mark_jobs_acknowledged(
65+
sqlite_file, [job for job in current_jobs]
66+
)
67+
68+
jobs_seen_last.clear()
69+
jobs_seen_last.update(current_jobs)
70+
71+
except Exception:
72+
logging.exception("what happened to query_lpstat?")
73+
time.sleep(SLEEP_TIME)

printer/modules/sqlite_helpers.py

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
from datetime import datetime
2+
import logging
3+
import sqlite3
4+
import datetime
5+
6+
logging.basicConfig(
7+
# in mondo we trust
8+
format="%(asctime)s.%(msecs)03dZ %(levelname)s:%(name)s:%(message)s",
9+
datefmt="%Y-%m-%dT%H:%M:%S",
10+
level=logging.INFO,
11+
)
12+
13+
14+
def maybe_create_table(sqlite_file: str) -> bool:
15+
db = sqlite3.connect(sqlite_file)
16+
cursor = db.cursor()
17+
18+
try:
19+
create_table_query = """
20+
CREATE TABLE IF NOT EXISTS logs (
21+
date DATETIME DEFAULT CURRENT_TIMESTAMP,
22+
job_id TEXT NOT NULL,
23+
status TEXT CHECK (status IN ('created', 'acknowledged', 'completed')) NOT NULL DEFAULT 'created',
24+
PRIMARY KEY (date, job_id)
25+
)
26+
"""
27+
28+
cursor.execute(create_table_query)
29+
db.commit()
30+
return True
31+
except Exception:
32+
logging.exception("Unable to create printer table")
33+
return False
34+
35+
36+
def insert_print_job(sqlite_file: str, job_id: str):
37+
try:
38+
with sqlite3.connect(sqlite_file, timeout=10.0) as db:
39+
cursor = db.cursor()
40+
timestamp = datetime.datetime.now()
41+
db = sqlite3.connect(sqlite_file)
42+
sql = "INSERT INTO logs (job_id) VALUES (?)"
43+
cursor.execute(sql, (job_id,))
44+
db.commit()
45+
return timestamp
46+
except sqlite3.IntegrityError:
47+
return None
48+
except Exception:
49+
logging.exception("Inserting print job had an error")
50+
return None
51+
52+
53+
def mark_jobs_with_status(sqlite_file, jobs, status):
54+
db = sqlite3.connect(sqlite_file)
55+
cursor = db.cursor()
56+
job_ids = [(job_id,) for job_id in jobs]
57+
if not job_ids:
58+
return
59+
logging.info(f"marking {job_ids} as {status} in sqlite")
60+
61+
sql_update = (
62+
f"UPDATE logs SET status = '{status}' WHERE job_id = ?"
63+
)
64+
cursor.executemany(sql_update, job_ids)
65+
66+
db.commit()
67+
68+
69+
def mark_jobs_acknowledged(sqlite_file, jobs):
70+
mark_jobs_with_status(sqlite_file, jobs, "acknowledged")
71+
72+
73+
def mark_jobs_completed(sqlite_file, jobs):
74+
mark_jobs_with_status(sqlite_file, jobs, "completed")

0 commit comments

Comments
 (0)