Skip to content

Commit e6b98a1

Browse files
authored
fix(cli): propagate cloud workspace routing and incremental sync (#712)
Signed-off-by: phernandez <paul@basicmachines.co>
1 parent 733c4f7 commit e6b98a1

12 files changed

Lines changed: 669 additions & 108 deletions

File tree

.github/workflows/test.yml

Lines changed: 34 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,6 @@ jobs:
5252
test-sqlite-unit:
5353
name: Test SQLite Unit (${{ matrix.os }}, Python ${{ matrix.python-version }})
5454
timeout-minutes: 30
55-
needs: [static-checks]
5655
strategy:
5756
fail-fast: false
5857
matrix:
@@ -99,7 +98,6 @@ jobs:
9998
test-sqlite-integration:
10099
name: Test SQLite Integration (${{ matrix.os }}, Python ${{ matrix.python-version }})
101100
timeout-minutes: 45
102-
needs: [static-checks]
103101
strategy:
104102
fail-fast: false
105103
matrix:
@@ -146,7 +144,7 @@ jobs:
146144
test-postgres-unit:
147145
name: Test Postgres Unit (Python ${{ matrix.python-version }})
148146
timeout-minutes: 30
149-
needs: [static-checks]
147+
if: github.event_name != 'pull_request' || matrix.python-version == '3.12'
150148
strategy:
151149
fail-fast: false
152150
matrix:
@@ -155,8 +153,22 @@ jobs:
155153
- python-version: "3.13"
156154
- python-version: "3.14"
157155
runs-on: ubuntu-latest
158-
159-
# Note: No services section needed - testcontainers handles Postgres in Docker
156+
services:
157+
postgres:
158+
image: pgvector/pgvector:pg16
159+
env:
160+
POSTGRES_USER: basic_memory_user
161+
POSTGRES_PASSWORD: dev_password
162+
POSTGRES_DB: basic_memory_test
163+
ports:
164+
- 5432:5432
165+
options: >-
166+
--health-cmd "pg_isready -U basic_memory_user -d basic_memory_test"
167+
--health-interval 10s
168+
--health-timeout 5s
169+
--health-retries 5
170+
env:
171+
BASIC_MEMORY_TEST_POSTGRES_URL: postgresql://basic_memory_user:dev_password@127.0.0.1:5432/basic_memory_test
160172

161173
steps:
162174
- uses: actions/checkout@v4
@@ -190,7 +202,7 @@ jobs:
190202
test-postgres-integration:
191203
name: Test Postgres Integration (Python ${{ matrix.python-version }})
192204
timeout-minutes: 45
193-
needs: [static-checks]
205+
if: github.event_name != 'pull_request' || matrix.python-version == '3.12'
194206
strategy:
195207
fail-fast: false
196208
matrix:
@@ -199,8 +211,22 @@ jobs:
199211
- python-version: "3.13"
200212
- python-version: "3.14"
201213
runs-on: ubuntu-latest
202-
203-
# Note: No services section needed - testcontainers handles Postgres in Docker
214+
services:
215+
postgres:
216+
image: pgvector/pgvector:pg16
217+
env:
218+
POSTGRES_USER: basic_memory_user
219+
POSTGRES_PASSWORD: dev_password
220+
POSTGRES_DB: basic_memory_test
221+
ports:
222+
- 5432:5432
223+
options: >-
224+
--health-cmd "pg_isready -U basic_memory_user -d basic_memory_test"
225+
--health-interval 10s
226+
--health-timeout 5s
227+
--health-retries 5
228+
env:
229+
BASIC_MEMORY_TEST_POSTGRES_URL: postgresql://basic_memory_user:dev_password@127.0.0.1:5432/basic_memory_test
204230

205231
steps:
206232
- uses: actions/checkout@v4
@@ -234,7 +260,6 @@ jobs:
234260
test-semantic:
235261
name: Test Semantic (Python 3.12)
236262
timeout-minutes: 45
237-
needs: [static-checks]
238263
runs-on: ubuntu-latest
239264

240265
steps:

AGENTS.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -442,5 +442,9 @@ With GitHub integration, the development workflow includes:
442442
3. **Branch management** - Claude can create feature branches for implementations
443443
4. **Documentation maintenance** - Claude can keep documentation updated as the code evolves
444444
5. **Code Commits**: ALWAYS sign off commits with `git commit -s`
445+
6. **Pull Request Titles**: PR titles must follow the semantic format enforced by `.github/workflows/pr-title.yml`: `type(scope): summary`
446+
- Allowed types: `feat`, `fix`, `chore`, `docs`, `style`, `refactor`, `perf`, `test`, `build`, `ci`
447+
- Allowed scopes: `core`, `cli`, `api`, `mcp`, `sync`, `ui`, `deps`, `installer`
448+
- Example: `fix(cli): propagate cloud workspace routing`
445449

446450
This level of integration represents a new paradigm in AI-human collaboration, where the AI assistant becomes a full-fledged team member rather than just a tool for generating code snippets.

src/basic_memory/cli/commands/cloud/cloud_utils.py

Lines changed: 46 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
from basic_memory.cli.commands.cloud.api_client import make_api_request
44
from basic_memory.config import ConfigManager
5+
from basic_memory.mcp.async_client import resolve_configured_workspace
56
from basic_memory.schemas.cloud import (
67
CloudProjectList,
78
CloudProjectCreateRequest,
@@ -16,8 +17,25 @@ class CloudUtilsError(Exception):
1617
pass
1718

1819

20+
def _workspace_headers(
21+
*,
22+
project_name: str | None = None,
23+
workspace: str | None = None,
24+
) -> dict[str, str]:
25+
"""Build optional workspace headers using the CLI config resolution chain."""
26+
resolved_workspace = resolve_configured_workspace(
27+
project_name=project_name,
28+
workspace=workspace,
29+
)
30+
if resolved_workspace is None:
31+
return {}
32+
return {"X-Workspace-ID": resolved_workspace}
33+
34+
1935
async def fetch_cloud_projects(
2036
*,
37+
project_name: str | None = None,
38+
workspace: str | None = None,
2139
api_request=make_api_request,
2240
) -> CloudProjectList:
2341
"""Fetch list of projects from cloud API.
@@ -30,7 +48,11 @@ async def fetch_cloud_projects(
3048
config = config_manager.config
3149
host_url = config.cloud_host.rstrip("/")
3250

33-
response = await api_request(method="GET", url=f"{host_url}/proxy/v2/projects/")
51+
response = await api_request(
52+
method="GET",
53+
url=f"{host_url}/proxy/v2/projects/",
54+
headers=_workspace_headers(project_name=project_name, workspace=workspace),
55+
)
3456

3557
return CloudProjectList.model_validate(response.json())
3658
except Exception as e:
@@ -40,12 +62,14 @@ async def fetch_cloud_projects(
4062
async def create_cloud_project(
4163
project_name: str,
4264
*,
65+
workspace: str | None = None,
4366
api_request=make_api_request,
4467
) -> CloudProjectCreateResponse:
4568
"""Create a new project on cloud.
4669
4770
Args:
4871
project_name: Name of project to create
72+
workspace: Optional workspace override for tenant-scoped project creation
4973
5074
Returns:
5175
CloudProjectCreateResponse with project details from API
@@ -67,7 +91,10 @@ async def create_cloud_project(
6791
response = await api_request(
6892
method="POST",
6993
url=f"{host_url}/proxy/v2/projects/",
70-
headers={"Content-Type": "application/json"},
94+
headers={
95+
"Content-Type": "application/json",
96+
**_workspace_headers(project_name=project_name, workspace=workspace),
97+
},
7198
json_data=project_data.model_dump(),
7299
)
73100

@@ -91,18 +118,28 @@ async def sync_project(project_name: str, force_full: bool = False) -> None:
91118
raise CloudUtilsError(f"Failed to sync project '{project_name}': {e}") from e
92119

93120

94-
async def project_exists(project_name: str, *, api_request=make_api_request) -> bool:
121+
async def project_exists(
122+
project_name: str,
123+
*,
124+
workspace: str | None = None,
125+
api_request=make_api_request,
126+
) -> bool:
95127
"""Check if a project exists on cloud.
96128
97129
Args:
98130
project_name: Name of project to check
131+
workspace: Optional workspace override for tenant-scoped project lookup
99132
100133
Returns:
101134
True if project exists, False otherwise
135+
136+
Raises:
137+
CloudUtilsError: If the project list cannot be fetched from cloud
102138
"""
103-
try:
104-
projects = await fetch_cloud_projects(api_request=api_request)
105-
project_names = {p.name for p in projects.projects}
106-
return project_name in project_names
107-
except Exception:
108-
return False
139+
projects = await fetch_cloud_projects(
140+
project_name=project_name,
141+
workspace=workspace,
142+
api_request=api_request,
143+
)
144+
project_names = {p.name for p in projects.projects}
145+
return project_name in project_names

src/basic_memory/cli/commands/cloud/project_sync.py

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def _require_cloud_credentials(config) -> None:
5454

5555
async def _get_cloud_project(name: str) -> ProjectItem | None:
5656
"""Fetch a project by name from the cloud API."""
57-
async with get_client() as client:
57+
async with get_client(project_name=name) as client:
5858
projects_list = await ProjectClient(client).list_projects()
5959
for proj in projects_list.projects:
6060
if generate_permalink(proj.name) == generate_permalink(name):
@@ -129,9 +129,9 @@ def sync_project_command(
129129
if not dry_run:
130130

131131
async def _trigger_db_sync():
132-
async with get_client() as client:
132+
async with get_client(project_name=name) as client:
133133
return await ProjectClient(client).sync(
134-
project_data.external_id, force_full=True
134+
project_data.external_id, force_full=False
135135
)
136136

137137
try:
@@ -195,7 +195,10 @@ def bisync_project_command(
195195
# Update config — sync_entry is guaranteed non-None because
196196
# _get_sync_project validated local_sync_path (which comes from sync_entry)
197197
sync_entry = config.projects.get(name)
198-
assert sync_entry is not None
198+
if sync_entry is None:
199+
raise RuntimeError(
200+
f"Sync entry for project '{name}' unexpectedly missing after validation"
201+
)
199202
sync_entry.last_sync = datetime.now()
200203
sync_entry.bisync_initialized = True
201204
ConfigManager().save_config(config)
@@ -204,9 +207,9 @@ def bisync_project_command(
204207
if not dry_run:
205208

206209
async def _trigger_db_sync():
207-
async with get_client() as client:
210+
async with get_client(project_name=name) as client:
208211
return await ProjectClient(client).sync(
209-
project_data.external_id, force_full=True
212+
project_data.external_id, force_full=False
210213
)
211214

212215
try:
@@ -320,7 +323,7 @@ def setup_project_sync(
320323

321324
async def _verify_project_exists():
322325
"""Verify the project exists on cloud by listing all projects."""
323-
async with get_client() as client:
326+
async with get_client(project_name=name) as client:
324327
projects_list = await ProjectClient(client).list_projects()
325328
project_names = [p.name for p in projects_list.projects]
326329
if name not in project_names:

src/basic_memory/cli/commands/cloud/upload_command.py

Lines changed: 24 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
"""Upload CLI commands for basic-memory projects."""
22

3+
from functools import partial
34
from pathlib import Path
45

56
import typer
@@ -8,12 +9,16 @@
89
from basic_memory.cli.app import cloud_app
910
from basic_memory.cli.commands.command_utils import run_with_cleanup
1011
from basic_memory.cli.commands.cloud.cloud_utils import (
12+
CloudUtilsError,
1113
create_cloud_project,
1214
project_exists,
1315
sync_project,
1416
)
1517
from basic_memory.cli.commands.cloud.upload import upload_path
16-
from basic_memory.mcp.async_client import get_cloud_control_plane_client
18+
from basic_memory.mcp.async_client import (
19+
get_cloud_control_plane_client,
20+
resolve_configured_workspace,
21+
)
1722

1823
console = Console()
1924

@@ -73,12 +78,20 @@ def upload(
7378
"""
7479

7580
async def _upload():
81+
resolved_workspace = resolve_configured_workspace(project_name=project)
82+
83+
try:
84+
project_already_exists = await project_exists(project, workspace=resolved_workspace)
85+
except CloudUtilsError as e:
86+
console.print(f"[red]Failed to check cloud project '{project}': {e}[/red]")
87+
raise typer.Exit(1)
88+
7689
# Check if project exists
77-
if not await project_exists(project):
90+
if not project_already_exists:
7891
if create_project:
7992
console.print(f"[blue]Creating cloud project '{project}'...[/blue]")
8093
try:
81-
await create_cloud_project(project)
94+
await create_cloud_project(project, workspace=resolved_workspace)
8295
console.print(f"[green]Created project '{project}'[/green]")
8396
except Exception as e:
8497
console.print(f"[red]Failed to create project: {e}[/red]")
@@ -106,7 +119,10 @@ async def _upload():
106119
verbose=verbose,
107120
use_gitignore=not no_gitignore,
108121
dry_run=dry_run,
109-
client_cm_factory=get_cloud_control_plane_client,
122+
client_cm_factory=partial(
123+
get_cloud_control_plane_client,
124+
workspace=resolved_workspace,
125+
),
110126
)
111127
if not success:
112128
console.print("[red]Upload failed[/red]")
@@ -117,8 +133,10 @@ async def _upload():
117133
else:
118134
console.print(f"[green]Successfully uploaded to '{project}'[/green]")
119135

120-
# Sync project if requested (skip on dry run)
121-
# Force full scan after bisync to ensure database is up-to-date with synced files
136+
# Sync project if requested (skip on dry run).
137+
# Trigger: upload adds new files the watcher has not observed locally.
138+
# Why: force_full ensures those freshly uploaded files are indexed immediately.
139+
# Outcome: upload keeps its eager reindex while sync/bisync stay incremental.
122140
if sync and not dry_run:
123141
console.print(f"[blue]Syncing project '{project}'...[/blue]")
124142
try:

0 commit comments

Comments
 (0)