Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions docker-compose.smoke.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
# Usage: docker compose -f docker-compose.yml -f docker-compose.smoke.yml up --build
services:
app:
ports:
- "${APP_PORT:-3060}:3050"
environment:
- EMBEDDING_PROVIDER=transformers
- EMBEDDING_MODEL=Xenova/all-MiniLM-L6-v2
Expand Down
5 changes: 3 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
services:
postgres:
image: pgvector/pgvector:pg17
restart: unless-stopped
environment:
POSTGRES_USER: supermem
POSTGRES_PASSWORD: supermem
POSTGRES_DB: supermem
ports:
- "5433:5432"
- "${POSTGRES_PORT:-5433}:5432"
volumes:
- pgdata:/var/lib/postgresql/data
healthcheck:
Expand All @@ -19,7 +20,7 @@ services:
build: .
restart: unless-stopped
ports:
- "3050:3050"
- "${APP_PORT:-3050}:3050"
environment:
DATABASE_URL: postgresql://supermem:supermem@postgres:5432/supermem
PORT: "3050"
Expand Down
61 changes: 47 additions & 14 deletions scripts/docker-smoke-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,9 @@ set -euo pipefail

SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
COMPOSE_PROJECT="supermem-smoke-test"
APP_PORT=3060 # Use non-default port to avoid conflicts with dev server
COMPOSE_PROJECT="${COMPOSE_PROJECT:-}"
APP_PORT="${APP_PORT:-}"
POSTGRES_PORT="${POSTGRES_PORT:-}"
HEALTH_TIMEOUT=90
HEALTH_INTERVAL=2

Expand All @@ -42,6 +43,33 @@ log() { echo -e "${GREEN}[smoke]${NC} $*"; }
warn() { echo -e "${YELLOW}[smoke]${NC} $*"; }
fail() { echo -e "${RED}[FAIL]${NC} $*"; }

port_in_use() {
local port="$1"
lsof -n -P -iTCP:"$port" -sTCP:LISTEN >/dev/null 2>&1
}

find_free_port() {
local port="$1"
while port_in_use "$port"; do
port=$((port + 1))
done
echo "$port"
}

resolve_port() {
local requested="$1"
local fallback="$2"
if [[ -n "$requested" ]]; then
if port_in_use "$requested"; then
fail "Requested port is already in use: $requested"
exit 1
fi
echo "$requested"
return
fi
find_free_port "$fallback"
}

assert_ok() {
local name="$1"
total=$((total + 1))
Expand All @@ -62,7 +90,7 @@ cleanup() {
trap cleanup EXIT

# --- Pre-flight checks ---
for cmd in docker curl jq; do
for cmd in docker curl jq lsof; do
if ! command -v "$cmd" &>/dev/null; then
fail "Required command not found: $cmd"
exit 1
Expand All @@ -76,11 +104,17 @@ fi

cd "$PROJECT_DIR"

APP_PORT="$(resolve_port "$APP_PORT" 3060)"
POSTGRES_PORT="$(resolve_port "$POSTGRES_PORT" 5444)"
if [[ -z "$COMPOSE_PROJECT" ]]; then
COMPOSE_PROJECT="supermem-smoke-test-${APP_PORT}-${POSTGRES_PORT}"
fi

# --- Build + Start ---
log "Starting compose stack (project=$COMPOSE_PROJECT, port=$APP_PORT)..."
log "Starting compose stack (project=$COMPOSE_PROJECT, app_port=$APP_PORT, postgres_port=$POSTGRES_PORT)..."

# Override the app port to avoid conflicts
export APP_PORT
# Override published ports to avoid conflicts with local dev stacks
export APP_PORT POSTGRES_PORT

if [[ "${SKIP_BUILD:-}" == "1" ]]; then
docker compose -p "$COMPOSE_PROJECT" \
Expand Down Expand Up @@ -154,10 +188,9 @@ fi
# --- Test 4: Database connectivity (via stats endpoint) ---
log "Test: database connectivity"
stats_status=$(curl -sf -o /dev/null -w '%{http_code}' \
-X POST "$BASE/memories/stats" \
-H "Content-Type: application/json" \
-d '{"user_id":"smoke-test-user"}')
assert_ok "POST /memories/stats returns 200 (DB connected)" \
-G "$BASE/memories/stats" \
--data-urlencode "user_id=smoke-test-user")
assert_ok "GET /memories/stats returns 200 (DB connected)" \
'[ "$stats_status" = "200" ]'

# --- Test 5: Quick ingest endpoint (no LLM required — embedding-only dedup) ---
Expand All @@ -171,7 +204,7 @@ ingest_response=$(curl -sf -w '\n%{http_code}' \
"source_site": "docker-smoke-test"
}')
ingest_status=$(echo "$ingest_response" | tail -1)
ingest_body=$(echo "$ingest_response" | head -n -1)
ingest_body=$(echo "$ingest_response" | sed '$d')
assert_ok "POST /memories/ingest/quick returns 200" \
'[ "$ingest_status" = "200" ]'
assert_ok "Ingest stored at least 1 memory" \
Expand All @@ -188,7 +221,7 @@ search_response=$(curl -sf -w '\n%{http_code}' \
"source_site": "docker-smoke-test"
}')
search_status=$(echo "$search_response" | tail -1)
search_body=$(echo "$search_response" | head -n -1)
search_body=$(echo "$search_response" | sed '$d')
assert_ok "POST /memories/search returns 200" \
'[ "$search_status" = "200" ]'
assert_ok "Search returns at least 1 result" \
Expand All @@ -205,10 +238,10 @@ assert_ok "POST /memories/reset-source returns 200" \

# --- Test 8: Input validation ---
log "Test: input validation"
bad_ingest_status=$(curl -sf -o /dev/null -w '%{http_code}' \
bad_ingest_status=$(curl -s -o /dev/null -w '%{http_code}' \
-X POST "$BASE/memories/ingest" \
-H "Content-Type: application/json" \
-d '{"user_id":"x"}' 2>/dev/null || echo "400")
-d '{"user_id":"x"}')
assert_ok "Missing required fields returns 400" \
'[ "$bad_ingest_status" = "400" ]'

Expand Down
144 changes: 144 additions & 0 deletions src/__tests__/route-validation.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
/**
* Route-level validation tests for memory API endpoints.
* Tests UUID validation on param/query inputs and filter behavior
* on the list endpoint. Requires DATABASE_URL in .env.test.
*/

import { describe, it, expect, beforeAll, afterAll, vi } from 'vitest';

// Mock embedText to avoid hitting the real embedding provider in CI where
// OPENAI_API_KEY is a placeholder. Returns a deterministic zero vector
// matching the configured embedding dimensions.
vi.mock('../services/embedding.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('../services/embedding.js')>();
return {
...actual,
embedText: vi.fn(async () => {
const { config: cfg } = await import('../config.js');
return new Array(cfg.embeddingDimensions).fill(0);
}),
};
});

import { pool } from '../db/pool.js';
import { config } from '../config.js';
import { MemoryRepository } from '../db/memory-repository.js';
import { ClaimRepository } from '../db/claim-repository.js';
import { MemoryService } from '../services/memory-service.js';
import { createMemoryRouter } from '../routes/memories.js';
import express from 'express';
import { readFileSync } from 'node:fs';
import { resolve, dirname } from 'node:path';
import { fileURLToPath } from 'node:url';

const __dirname = dirname(fileURLToPath(import.meta.url));
const TEST_USER = 'route-validation-test-user';
const VALID_UUID = '00000000-0000-0000-0000-000000000001';
const INVALID_UUID = 'not-a-uuid';

let server: ReturnType<typeof app.listen>;
let baseUrl: string;
const app = express();
app.use(express.json());

beforeAll(async () => {
const raw = readFileSync(resolve(__dirname, '../db/schema.sql'), 'utf-8');
const sql = raw.replace(/\{\{EMBEDDING_DIMENSIONS\}\}/g, String(config.embeddingDimensions));
await pool.query(sql);

const repo = new MemoryRepository(pool);
const claimRepo = new ClaimRepository(pool);
const service = new MemoryService(repo, claimRepo);
app.use('/memories', createMemoryRouter(service));

await new Promise<void>((resolve) => {
server = app.listen(0, () => {
const addr = server.address();
const port = typeof addr === 'object' && addr ? addr.port : 0;
baseUrl = `http://localhost:${port}`;
resolve();
});
});
});

afterAll(async () => {
await new Promise<void>((resolve) => server.close(() => resolve()));
await pool.end();
});

describe('GET /memories/:id — UUID validation', () => {
it('returns 400 for an invalid UUID', async () => {
const res = await fetch(`${baseUrl}/memories/${INVALID_UUID}?user_id=${TEST_USER}`);
expect(res.status).toBe(400);
const body = await res.json();
expect(body.error).toMatch(/valid UUID/);
});

it('returns 404 for a valid but non-existent UUID', async () => {
const res = await fetch(`${baseUrl}/memories/${VALID_UUID}?user_id=${TEST_USER}`);
expect(res.status).toBe(404);
});
});

describe('DELETE /memories/:id — UUID validation', () => {
it('returns 400 for an invalid UUID', async () => {
const res = await fetch(`${baseUrl}/memories/${INVALID_UUID}?user_id=${TEST_USER}`, {
method: 'DELETE',
});
expect(res.status).toBe(400);
const body = await res.json();
expect(body.error).toMatch(/valid UUID/);
});
});

describe('POST /memories/ingest/quick — skip_extraction (storeVerbatim)', () => {
it('stores a single memory without extraction when skip_extraction is true', async () => {
const res = await fetch(`${baseUrl}/memories/ingest/quick`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
user_id: TEST_USER,
conversation: 'Verbatim content that should not be extracted into facts.',
source_site: 'verbatim-test',
source_url: 'https://example.com/verbatim',
skip_extraction: true,
}),
});
expect(res.status).toBe(200);
const body = await res.json();
expect(body.memoriesStored).toBe(1);
expect(body.memoryIds).toHaveLength(1);
});
});

describe('GET /memories/list — source_site filter', () => {
it('returns memories filtered by source_site', async () => {
const res = await fetch(
`${baseUrl}/memories/list?user_id=${TEST_USER}&source_site=test-site`,
);
expect(res.status).toBe(200);
const body = await res.json();
expect(body).toHaveProperty('memories');
expect(body).toHaveProperty('count');
});
});

describe('GET /memories/list — episode_id filter', () => {
it('returns 400 for an invalid episode_id', async () => {
const res = await fetch(
`${baseUrl}/memories/list?user_id=${TEST_USER}&episode_id=${INVALID_UUID}`,
);
expect(res.status).toBe(400);
const body = await res.json();
expect(body.error).toMatch(/valid UUID/);
});

it('accepts a valid episode_id UUID', async () => {
const res = await fetch(
`${baseUrl}/memories/list?user_id=${TEST_USER}&episode_id=${VALID_UUID}`,
);
expect(res.status).toBe(200);
const body = await res.json();
expect(body).toHaveProperty('memories');
});
});
4 changes: 2 additions & 2 deletions src/db/memory-repository.ts
Original file line number Diff line number Diff line change
Expand Up @@ -161,8 +161,8 @@ export class MemoryRepository {
return getMemoryWithClient(client, id, userId, true);
}

async listMemories(userId: string, limit: number = 20, offset: number = 0) {
return listMemories(this.pool, userId, limit, offset);
async listMemories(userId: string, limit: number = 20, offset: number = 0, sourceSite?: string, episodeId?: string) {
return listMemories(this.pool, userId, limit, offset, sourceSite, episodeId);
}

async listMemoriesInWorkspace(workspaceId: string, limit: number = 20, offset: number = 0) {
Expand Down
16 changes: 13 additions & 3 deletions src/db/repository-read.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,23 @@ export async function getMemoryWithClient(
return result.rows[0] ? normalizeMemoryRow(result.rows[0]) : null;
}

export async function listMemories(pool: pg.Pool, userId: string, limit: number, offset: number): Promise<MemoryRow[]> {
export async function listMemories(pool: pg.Pool, userId: string, limit: number, offset: number, sourceSite?: string, episodeId?: string): Promise<MemoryRow[]> {
const params: unknown[] = [userId, limit, offset];
let extraClauses = '';
if (sourceSite) {
params.push(sourceSite);
extraClauses += ` AND source_site = $${params.length}`;
}
if (episodeId) {
params.push(episodeId);
extraClauses += ` AND episode_id = $${params.length}`;
}
const result = await pool.query(
`SELECT * FROM memories
WHERE user_id = $1 AND deleted_at IS NULL AND expired_at IS NULL AND status = 'active'
AND workspace_id IS NULL
AND workspace_id IS NULL${extraClauses}
ORDER BY created_at DESC LIMIT $2 OFFSET $3`,
[userId, limit, offset],
params,
);
return result.rows.map(normalizeMemoryRow);
}
Expand Down
Loading
Loading