Skip to content

Commit e6ed79c

Browse files
RMANOVclaude
andcommitted
feat(ui): full bridge export with progress bar + QProgressBar hotfix
- Bridge sync now exports entities, relations AND tasks to shared.json - Pull-before-push: git pull --rebase before exporting, imports new remote entities automatically - Progress bar in status bar shows sync steps (5%→100%) - QProgressBar import fix (was removed by ruff F401 race condition) - TaskDB._ensure_table() now auto-migrates existing DBs to v0.5.0 schema - Suppress console windows on Windows (CREATE_NO_WINDOW) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 258d638 commit e6ed79c

2 files changed

Lines changed: 203 additions & 29 deletions

File tree

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "sqlite-memory-mcp"
3-
version = "0.5.0"
3+
version = "0.5.1"
44
description = "SQLite-backed MCP Memory Server with WAL concurrent safety, FTS5 search, session tracking, cross-machine bridge sync, and task management"
55
requires-python = ">=3.10"
66
dependencies = ["fastmcp>=2.0.0"]

task_tray.py

Lines changed: 202 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def __init__(self, db_path=None):
4848
self._ensure_table()
4949

5050
def _ensure_table(self):
51-
"""Create tasks table if it doesn't exist (for test DBs)."""
51+
"""Create tasks table if missing; migrate existing table to v0.5.0 schema."""
5252
self._conn.execute("""
5353
CREATE TABLE IF NOT EXISTS tasks (
5454
id TEXT PRIMARY KEY,
@@ -69,6 +69,21 @@ def _ensure_table(self):
6969
updated_at TEXT
7070
)
7171
""")
72+
# Migrate existing DBs: add columns that v0.5.0 requires
73+
existing = {
74+
r[1] for r in self._conn.execute("PRAGMA table_info('tasks')").fetchall()
75+
}
76+
for col, sql in [
77+
("type", "ALTER TABLE tasks ADD COLUMN type TEXT NOT NULL DEFAULT 'task'"),
78+
("assignee", "ALTER TABLE tasks ADD COLUMN assignee TEXT DEFAULT NULL"),
79+
("shared_by", "ALTER TABLE tasks ADD COLUMN shared_by TEXT DEFAULT NULL"),
80+
(
81+
"description",
82+
"ALTER TABLE tasks ADD COLUMN description TEXT DEFAULT NULL",
83+
),
84+
]:
85+
if col not in existing:
86+
self._conn.execute(sql)
7287
self._conn.commit()
7388

7489
def close(self):
@@ -231,6 +246,7 @@ def delete_task(self, task_id):
231246
QFormLayout,
232247
QComboBox,
233248
QDialogButtonBox,
249+
QProgressBar,
234250
)
235251
from PyQt6.QtGui import QIcon, QAction, QPixmap, QPainter, QColor, QFont
236252
from PyQt6.QtCore import QEvent, QSettings, Qt, QTimer, QPoint, pyqtSignal
@@ -878,6 +894,7 @@ class FullWindow(QMainWindow):
878894
"""Full task manager window with tabs, search, sort, and suggested view."""
879895

880896
_bridge_done = pyqtSignal(str)
897+
_bridge_progress = pyqtSignal(int, str) # (percent, step_label)
881898

882899
# Sort modes cycle: priority → due → created → priority ...
883900
_SORT_MODES = ("priority", "due", "created")
@@ -982,8 +999,17 @@ def __init__(self, db, parent=None):
982999
self.status = QStatusBar()
9831000
self.setStatusBar(self.status)
9841001

985-
# Bridge sync signal (thread-safe → main thread)
986-
self._bridge_done.connect(lambda msg: self.status.showMessage(msg, 5000))
1002+
# Bridge sync progress bar (hidden by default)
1003+
self._sync_bar = QProgressBar()
1004+
self._sync_bar.setFixedWidth(220)
1005+
self._sync_bar.setTextVisible(True)
1006+
self._sync_bar.setFormat("%v% %s")
1007+
self._sync_bar.hide()
1008+
self.status.addPermanentWidget(self._sync_bar)
1009+
1010+
# Bridge sync signals (thread-safe → main thread)
1011+
self._bridge_progress.connect(self._on_sync_progress)
1012+
self._bridge_done.connect(self._on_sync_done)
9871013

9881014
# Auto-refresh every 30s
9891015
self._refresh_timer = QTimer(self)
@@ -1009,46 +1035,194 @@ def _refresh_and_sync(self):
10091035
self.refresh()
10101036
self._sync_bridge()
10111037

1038+
# Suppress console windows on Windows
1039+
_SP_FLAGS = subprocess.CREATE_NO_WINDOW if os.name == "nt" else 0
1040+
1041+
def _on_sync_progress(self, pct, label):
1042+
self._sync_bar.setValue(pct)
1043+
self._sync_bar.setFormat(f"{pct}% {label}")
1044+
self._sync_bar.show()
1045+
1046+
def _on_sync_done(self, msg):
1047+
self._sync_bar.setValue(100)
1048+
self._sync_bar.setFormat(f"100% {msg}")
1049+
QTimer.singleShot(3000, self._sync_bar.hide)
1050+
self.status.showMessage(msg, 5000)
1051+
10121052
def _sync_bridge(self):
1013-
"""Git add/commit/push the memory bridge in a background thread."""
1053+
"""Export full memory (entities+relations+tasks) → shared.json, then git push."""
10141054
if not os.path.isdir(self._BRIDGE_DIR):
10151055
self.status.showMessage("Bridge dir not found", 3000)
10161056
return
10171057

1018-
self.status.showMessage("Syncing bridge to GitHub...")
1019-
1020-
def _do_sync():
1058+
def _run():
10211059
try:
1022-
subprocess.run(
1023-
["git", "add", "-A"],
1060+
git_kw = dict(
10241061
cwd=self._BRIDGE_DIR,
10251062
capture_output=True,
1026-
timeout=10,
1063+
text=True,
1064+
creationflags=self._SP_FLAGS,
10271065
)
1066+
1067+
# 0. Pull remote changes + import new entities
1068+
self._bridge_progress.emit(5, "git pull...")
1069+
subprocess.run(["git", "pull", "--rebase"], timeout=30, **git_kw)
1070+
shared_path = Path(self._BRIDGE_DIR) / "shared.json"
1071+
if shared_path.exists():
1072+
try:
1073+
remote_data = json.loads(
1074+
shared_path.read_text(encoding="utf-8")
1075+
)
1076+
self._import_remote_entities(remote_data.get("entities", []))
1077+
except (json.JSONDecodeError, OSError):
1078+
pass
1079+
1080+
# 1. Export entities + observations
1081+
self._bridge_progress.emit(15, "Exporting entities...")
1082+
conn = self.db._conn
1083+
ent_rows = conn.execute(
1084+
"SELECT id, name, entity_type, project, created_at, updated_at "
1085+
"FROM entities WHERE project LIKE 'shared%' ORDER BY name"
1086+
).fetchall()
1087+
entities_out, entity_ids = [], set()
1088+
for e in ent_rows:
1089+
entity_ids.add(e["id"])
1090+
obs = conn.execute(
1091+
"SELECT content, created_at FROM observations "
1092+
"WHERE entity_id = ? ORDER BY id",
1093+
(e["id"],),
1094+
).fetchall()
1095+
entities_out.append(
1096+
{
1097+
"name": e["name"],
1098+
"entityType": e["entity_type"],
1099+
"project": e["project"],
1100+
"observations": [
1101+
{"content": o["content"], "createdAt": o["created_at"]}
1102+
for o in obs
1103+
],
1104+
"createdAt": e["created_at"],
1105+
"updatedAt": e["updated_at"],
1106+
}
1107+
)
1108+
1109+
# 2. Export relations between shared entities
1110+
self._bridge_progress.emit(25, "Exporting relations...")
1111+
relations_out = []
1112+
if entity_ids:
1113+
ph = ",".join("?" * len(entity_ids))
1114+
ids = list(entity_ids)
1115+
rel_rows = conn.execute(
1116+
f"SELECT ef.name AS from_name, et.name AS to_name, "
1117+
f"r.relation_type, r.created_at FROM relations r "
1118+
f"JOIN entities ef ON r.from_id = ef.id "
1119+
f"JOIN entities et ON r.to_id = et.id "
1120+
f"WHERE r.from_id IN ({ph}) AND r.to_id IN ({ph})",
1121+
ids + ids,
1122+
).fetchall()
1123+
relations_out = [
1124+
{
1125+
"from": r["from_name"],
1126+
"to": r["to_name"],
1127+
"relationType": r["relation_type"],
1128+
"createdAt": r["created_at"],
1129+
}
1130+
for r in rel_rows
1131+
]
1132+
1133+
# 3. Export all non-archived tasks
1134+
self._bridge_progress.emit(40, "Exporting tasks...")
1135+
task_rows = conn.execute(
1136+
"SELECT id, title, description, status, priority, section, "
1137+
"due_date, project, parent_id, notes, recurring, type, "
1138+
"assignee, shared_by, created_at, updated_at "
1139+
"FROM tasks WHERE status != 'archived' ORDER BY created_at"
1140+
).fetchall()
1141+
1142+
# 4. Build payload (preserve extra keys from remote)
1143+
payload = {
1144+
"version": 2,
1145+
"pushed_at": now_iso(),
1146+
"machine_id": socket.gethostname(),
1147+
"entities": entities_out,
1148+
"relations": relations_out,
1149+
"tasks": [dict(r) for r in task_rows],
1150+
}
1151+
if shared_path.exists():
1152+
try:
1153+
existing = json.loads(shared_path.read_text(encoding="utf-8"))
1154+
known = {
1155+
"version",
1156+
"pushed_at",
1157+
"machine_id",
1158+
"entities",
1159+
"relations",
1160+
"tasks",
1161+
"shared_tasks",
1162+
}
1163+
for k, v in existing.items():
1164+
if k not in known and isinstance(v, list):
1165+
payload[k] = v
1166+
except (json.JSONDecodeError, OSError):
1167+
pass
1168+
1169+
self._bridge_progress.emit(55, "Writing shared.json...")
1170+
shared_path.write_text(
1171+
json.dumps(payload, indent=2, ensure_ascii=False), encoding="utf-8"
1172+
)
1173+
1174+
# 5. Git add + commit + push
1175+
self._bridge_progress.emit(65, "git add...")
1176+
subprocess.run(["git", "add", "shared.json"], timeout=10, **git_kw)
1177+
1178+
self._bridge_progress.emit(80, "git commit...")
1179+
n_ent = len(entities_out)
1180+
n_tasks = len(payload["tasks"])
1181+
msg = f"bridge: push {n_ent} entities, {n_tasks} tasks from {socket.gethostname()}"
10281182
result = subprocess.run(
1029-
["git", "commit", "-m", "bridge: sync from task tray"],
1030-
cwd=self._BRIDGE_DIR,
1031-
capture_output=True,
1032-
timeout=10,
1183+
["git", "commit", "-m", msg], timeout=10, **git_kw
10331184
)
1034-
if result.returncode == 0:
1035-
subprocess.run(
1036-
["git", "push"],
1037-
cwd=self._BRIDGE_DIR,
1038-
capture_output=True,
1039-
timeout=30,
1040-
)
1041-
return "Bridge synced to GitHub"
1042-
return "Bridge: nothing to sync"
1043-
except Exception as exc:
1044-
return f"Bridge sync error: {exc}"
1185+
if result.returncode != 0:
1186+
self._bridge_done.emit("Nothing to sync")
1187+
return
10451188

1046-
def _run():
1047-
msg = _do_sync()
1048-
self._bridge_done.emit(msg)
1189+
self._bridge_progress.emit(90, "git push...")
1190+
subprocess.run(["git", "push"], timeout=30, **git_kw)
1191+
self._bridge_done.emit(f"Synced: {n_ent} entities, {n_tasks} tasks")
1192+
except Exception as exc:
1193+
self._bridge_done.emit(f"Sync error: {exc}")
10491194

10501195
threading.Thread(target=_run, daemon=True).start()
10511196

1197+
def _import_remote_entities(self, remote_entities):
1198+
"""Import entities from remote shared.json that don't exist locally."""
1199+
conn = self.db._conn
1200+
for e in remote_entities:
1201+
existing = conn.execute(
1202+
"SELECT id FROM entities WHERE name = ?", (e["name"],)
1203+
).fetchone()
1204+
if existing:
1205+
continue
1206+
now = now_iso()
1207+
eid = conn.execute(
1208+
"INSERT INTO entities (name, entity_type, project, created_at, updated_at) "
1209+
"VALUES (?, ?, ?, ?, ?)",
1210+
(
1211+
e["name"],
1212+
e["entityType"],
1213+
e.get("project") or "shared:bridge",
1214+
now,
1215+
now,
1216+
),
1217+
).lastrowid
1218+
for o in e.get("observations", []):
1219+
conn.execute(
1220+
"INSERT INTO observations (entity_id, content, created_at) "
1221+
"VALUES (?, ?, ?)",
1222+
(eid, o["content"], o.get("createdAt", now)),
1223+
)
1224+
conn.commit()
1225+
10521226
def _sort_tasks(self, tasks):
10531227
"""Sort tasks by current sort mode."""
10541228
mode = self._sort_mode

0 commit comments

Comments
 (0)