Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions src/lerim/adapters/claude.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,21 @@ def iter_sessions(
entries = load_jsonl_dict_lines(path)
if not entries:
continue

# Skip subagent/sidechain transcripts — their content flows back to
# the parent session via tool results, so extracting from both would
# double-count. Also skip tiny sessions (< 6 conversation turns) which
# are typically eval judge calls or trivial interactions.
is_sidechain = any(e.get("isSidechain") for e in entries[:5])
if is_sidechain:
continue
conv_turns = sum(
1 for e in entries
if e.get("type") in ("user", "assistant")
)
if conv_turns < 6:
continue

started_at: datetime | None = None
repo_name: str | None = None
cwd: str | None = None
Expand Down
14 changes: 7 additions & 7 deletions src/lerim/app/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -1445,20 +1445,20 @@ def build_parser() -> argparse.ArgumentParser:
memory_reset = memory_sub.add_parser(
"reset",
formatter_class=_F,
help="DESTRUCTIVE: wipe memory, workspace, and index data",
help="DESTRUCTIVE: wipe memory, workspace, cache, and index data",
description=(
"Irreversibly delete memory/, workspace/, and index/ under the selected\n"
"scope, then recreate canonical empty folders.\n\n"
"Irreversibly delete memory/, workspace/, index/, and cache/ under the\n"
"selected scope, then recreate canonical empty folders.\n\n"
"Scopes:\n"
" project -- reset <repo>/.lerim/ only\n"
" global -- reset ~/.lerim/ only (includes sessions DB)\n"
" global -- reset ~/.lerim/ only (includes sessions DB + cache)\n"
" both -- reset both project and global roots (default)\n\n"
"The sessions DB lives in global index/, so --scope project alone\n"
"does NOT reset the session queue. Use 'global' or 'both' for a full wipe.\n\n"
"The sessions DB lives in global index/, and compacted session traces\n"
"live in global cache/. Use 'global' or 'both' for a full wipe.\n\n"
"Examples:\n"
" lerim memory reset --yes # wipe everything\n"
" lerim memory reset --scope project --yes # project data only\n"
" lerim memory reset --yes && lerim sync --max-sessions 5 # fresh start"
" lerim memory reset --yes && lerim up --build # fresh start"
),
)
memory_reset.add_argument(
Expand Down
41 changes: 38 additions & 3 deletions src/lerim/app/daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,44 +340,75 @@ def _process_one_job(job: dict[str, Any]) -> dict[str, Any]:
)
return {"status": "failed"}
counts = result.get("counts") or {}
# Extract memory actions with full metadata for activity feed
memory_actions: list[dict] = []
for path in result.get("written_memory_paths") or []:
p = Path(path)
fname = p.stem if path else ""
title = fname[9:].replace("-", " ") if len(fname) > 9 and fname[8] == "-" else fname.replace("-", " ")
primitive = "decision" if "/decisions/" in str(path) else "learning"
ma: dict = {
"action": "add",
"title": title,
"primitive": primitive,
"session_run_id": rid,
}
# Read frontmatter for full metadata
if p.exists():
try:
import frontmatter
post = frontmatter.load(str(p))
ma["title"] = post.metadata.get("title", title)
ma["body"] = post.content.strip()
ma["confidence"] = float(post.metadata.get("confidence", 0))
ma["tags"] = post.metadata.get("tags", [])
ma["source_speaker"] = post.metadata.get("source_speaker", "")
ma["durability"] = post.metadata.get("durability", "")
ma["kind"] = post.metadata.get("kind", "")
except Exception:
pass
memory_actions.append(ma)
complete_session_job(rid)
return {
"status": "extracted",
"learnings_new": int(counts.get("add") or 0),
"learnings_updated": int(counts.get("update") or 0),
"memory_actions": memory_actions,
"cost_usd": float(result.get("cost_usd") or 0),
}


def _process_claimed_jobs(
claimed: list[dict[str, Any]],
) -> tuple[int, int, int, int, int, float]:
) -> tuple[int, int, int, int, int, list[dict[str, str]], float]:
"""Process claimed jobs sequentially in chronological order.

Jobs are already sorted oldest-first by ``claim_session_jobs``.
Sequential processing ensures that later sessions can correctly
update or supersede memories created by earlier ones.

Returns (extracted, failed, skipped, new, updated, cost_usd).
Returns (extracted, failed, skipped, new, updated, memory_actions, cost_usd).
"""
extracted = 0
failed = 0
skipped = 0
learnings_new = 0
learnings_updated = 0
memory_actions: list[dict[str, str]] = []
cost_usd = 0.0
for job in claimed:
result = _process_one_job(job)
if result["status"] == "extracted":
extracted += 1
learnings_new += result.get("learnings_new", 0)
learnings_updated += result.get("learnings_updated", 0)
memory_actions.extend(result.get("memory_actions", []))
cost_usd += result.get("cost_usd", 0.0)
elif result["status"] == "failed":
failed += 1
elif result["status"] == "skipped":
skipped += 1
return extracted, failed, skipped, learnings_new, learnings_updated, cost_usd
return extracted, failed, skipped, learnings_new, learnings_updated, memory_actions, cost_usd


def run_sync_once(
Expand Down Expand Up @@ -495,6 +526,7 @@ def run_sync_once(
failed = 0
learnings_new = 0
learnings_updated = 0
all_memory_actions: list[dict[str, str]] = []
cost_usd = 0.0
projects: set[str] = set()
claim_limit = max(max_sessions, 1)
Expand Down Expand Up @@ -526,13 +558,15 @@ def run_sync_once(
batch_skipped,
batch_new,
batch_updated,
batch_actions,
batch_cost,
) = _process_claimed_jobs(claimed)
extracted += batch_extracted
failed += batch_failed
skipped += batch_skipped
learnings_new += batch_new
learnings_updated += batch_updated
all_memory_actions.extend(batch_actions)
cost_usd += batch_cost
total_processed += len(claimed)

Expand Down Expand Up @@ -568,6 +602,7 @@ def run_sync_once(
failed_sessions=failed,
learnings_new=learnings_new,
learnings_updated=learnings_updated,
memory_actions=all_memory_actions,
run_ids=target_run_ids,
window_start=window_start.isoformat() if window_start else None,
window_end=window_end.isoformat() if window_end else None,
Expand Down
1 change: 1 addition & 0 deletions src/lerim/app/operation_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ class OperationResult:
failed_sessions: int = 0
learnings_new: int = 0
learnings_updated: int = 0
memory_actions: list[dict[str, str]] = field(default_factory=list)
run_ids: list[str] = field(default_factory=list)
window_start: str | None = None
window_end: str | None = None
Expand Down
Loading
Loading