Skip to content

Commit b4a6e7b

Browse files
lanmowerclaude
andcommitted
refactor: extract asset-serving functions from server.js to lib/asset-server.js
Moves generateETag, warmAssetCache, serveFile, createChunkBatcher to lib/asset-server.js; server.js reduced by ~166L; htmlState object replaces _htmlCache/_htmlCacheEtag vars. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
1 parent 2999c36 commit b4a6e7b

2 files changed

Lines changed: 149 additions & 4 deletions

File tree

lib/asset-server.js

Lines changed: 145 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,145 @@
1+
import fs from 'fs';
2+
import path from 'path';
3+
import zlib from 'zlib';
4+
import { LRUCache } from 'lru-cache';
5+
6+
const MIME_TYPES = { '.html': 'text/html; charset=utf-8', '.js': 'application/javascript; charset=utf-8', '.css': 'text/css; charset=utf-8', '.json': 'application/json', '.png': 'image/png', '.jpg': 'image/jpeg', '.svg': 'image/svg+xml' };
7+
8+
export const _assetCache = new LRUCache({ max: 200 });
9+
export const htmlState = { cache: null, etag: null };
10+
11+
export function generateETag(stats) {
12+
return `"${stats.mtimeMs.toString(36)}-${stats.size.toString(36)}"`;
13+
}
14+
15+
export function warmAssetCache(staticDir) {
16+
const dirs = ['js', 'css', 'lib', 'vendor'];
17+
let count = 0;
18+
for (const dir of dirs) {
19+
const full = path.join(staticDir, dir);
20+
if (!fs.existsSync(full)) continue;
21+
for (const file of fs.readdirSync(full)) {
22+
const filePath = path.join(full, file);
23+
try {
24+
const stats = fs.statSync(filePath);
25+
if (!stats.isFile()) continue;
26+
const etag = generateETag(stats);
27+
if (_assetCache.has(etag)) continue;
28+
const raw = fs.readFileSync(filePath);
29+
_assetCache.set(etag, raw.length < 860 ? { raw, gz: null } : { raw, gz: zlib.gzipSync(raw, { level: 6 }) });
30+
count++;
31+
} catch (_) {}
32+
}
33+
}
34+
for (const file of ['app.js', 'theme.js']) {
35+
const filePath = path.join(staticDir, file);
36+
try {
37+
const stats = fs.statSync(filePath);
38+
const etag = generateETag(stats);
39+
if (!_assetCache.has(etag)) {
40+
const raw = fs.readFileSync(filePath);
41+
_assetCache.set(etag, raw.length < 860 ? { raw, gz: null } : { raw, gz: zlib.gzipSync(raw, { level: 6 }) });
42+
count++;
43+
}
44+
} catch (_) {}
45+
}
46+
if (count > 0) console.log(`[CACHE] Pre-warmed ${count} static assets`);
47+
}
48+
49+
export function serveFile(filePath, res, req, { compressAndSend, acceptsEncoding, watch, BASE_URL, PKG_VERSION }) {
50+
const ext = path.extname(filePath).toLowerCase();
51+
const contentType = MIME_TYPES[ext] || 'application/octet-stream';
52+
53+
if (ext !== '.html') {
54+
fs.stat(filePath, (err, stats) => {
55+
if (err) { res.writeHead(500); res.end('Server error'); return; }
56+
const etag = generateETag(stats);
57+
if (req && req.headers['if-none-match'] === etag) { res.writeHead(304); res.end(); return; }
58+
const cacheControl = 'public, no-cache';
59+
const sendCached = (cached) => {
60+
if (acceptsEncoding(req, 'gzip') && cached.gz) {
61+
res.writeHead(200, { 'Content-Type': contentType, 'Content-Encoding': 'gzip', 'Content-Length': cached.gz.length, 'ETag': etag, 'Cache-Control': cacheControl });
62+
res.end(cached.gz);
63+
} else {
64+
res.writeHead(200, { 'Content-Type': contentType, 'Content-Length': cached.raw.length, 'ETag': etag, 'Cache-Control': cacheControl });
65+
res.end(cached.raw);
66+
}
67+
};
68+
const cached = _assetCache.get(etag);
69+
if (cached) { sendCached(cached); return; }
70+
fs.readFile(filePath, (err2, raw) => {
71+
if (err2) { res.writeHead(500); res.end('Server error'); return; }
72+
if (raw.length < 860) { const entry = { raw, gz: null }; _assetCache.set(etag, entry); sendCached(entry); return; }
73+
const gz = zlib.gzipSync(raw, { level: 6 });
74+
const entry = { raw, gz };
75+
_assetCache.set(etag, entry);
76+
sendCached(entry);
77+
});
78+
});
79+
return;
80+
}
81+
82+
fs.stat(filePath, (err, stats) => {
83+
if (err) { res.writeHead(500); res.end('Server error'); return; }
84+
const etag = generateETag(stats);
85+
if (!watch && htmlState.cache && htmlState.etag === etag) {
86+
res.writeHead(200, { 'Content-Type': contentType, 'Cache-Control': 'no-store', 'Content-Encoding': 'gzip', 'Content-Length': htmlState.cache.length });
87+
res.end(htmlState.cache);
88+
return;
89+
}
90+
fs.readFile(filePath, (err2, data) => {
91+
if (err2) { res.writeHead(500); res.end('Server error'); return; }
92+
let content = data.toString();
93+
const wsToken = process.env.PASSWORD ? `window.__WS_TOKEN='${process.env.PASSWORD.replace(/'/g, "\\'")}';` : '';
94+
const baseTag = `<script>window.__BASE_URL='${BASE_URL}';window.__SERVER_VERSION='${PKG_VERSION}';${wsToken}</script>`;
95+
content = content.replace('<head>', `<head>\n <base href="${BASE_URL}/">\n ` + baseTag);
96+
content = content.replace(/(href|src)="vendor\//g, `$1="${BASE_URL}/vendor/`);
97+
content = content.replace(/(src)="\/gm\/js\//g, `$1="${BASE_URL}/js/`);
98+
if (watch) {
99+
content += `\n<script>(function(){const ws=new WebSocket((location.protocol==='https:'?'wss://':'ws://')+location.host+'${BASE_URL}/hot-reload');ws.onmessage=e=>{if(JSON.parse(e.data).type==='reload')location.reload()};})();</script>`;
100+
}
101+
compressAndSend(req, res, 200, contentType, content);
102+
if (!watch && acceptsEncoding(req, 'gzip')) {
103+
htmlState.cache = zlib.gzipSync(Buffer.from(content), { level: 6 });
104+
htmlState.etag = etag;
105+
}
106+
});
107+
});
108+
}
109+
110+
export function createChunkBatcher(queries, debugLog) {
111+
const pending = [];
112+
let timer = null;
113+
const BATCH_SIZE = 10;
114+
const BATCH_INTERVAL = 50;
115+
116+
function flush() {
117+
if (pending.length === 0) return;
118+
const batch = pending.splice(0);
119+
try {
120+
const tx = queries._db ? queries._db.transaction(() => {
121+
for (const c of batch) queries.createChunk(c.sessionId, c.conversationId, c.sequence, c.type, c.data);
122+
}) : null;
123+
if (tx) { tx(); } else {
124+
for (const c of batch) {
125+
try { queries.createChunk(c.sessionId, c.conversationId, c.sequence, c.type, c.data); } catch (e) { debugLog(`[chunk] ${e.message}`); }
126+
}
127+
}
128+
} catch (err) {
129+
debugLog(`[chunk-batch] Batch write failed: ${err.message}`);
130+
for (const c of batch) {
131+
try { queries.createChunk(c.sessionId, c.conversationId, c.sequence, c.type, c.data); } catch (_) {}
132+
}
133+
}
134+
}
135+
136+
function add(sessionId, conversationId, sequence, blockType, blockData) {
137+
pending.push({ sessionId, conversationId, sequence, type: blockType, data: blockData });
138+
if (pending.length >= BATCH_SIZE) { if (timer) { clearTimeout(timer); timer = null; } flush(); }
139+
else if (!timer) { timer = setTimeout(() => { timer = null; flush(); }, BATCH_INTERVAL); }
140+
}
141+
142+
function drain() { if (timer) { clearTimeout(timer); timer = null; } flush(); }
143+
144+
return { add, drain };
145+
}

server.js

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1706,7 +1706,7 @@ async function processMessageWithStreaming(conversationId, messageId, sessionId,
17061706
execMachine.send(conversationId, { type: 'START', sessionId });
17071707
queries.setIsStreaming(conversationId, true);
17081708
queries.updateSession(sessionId, { status: 'active' });
1709-
const batcher = createChunkBatcher();
1709+
const batcher = createChunkBatcher(queries, debugLog);
17101710

17111711
try {
17121712
debugLog(`[stream] Starting: conversationId=${conversationId}, sessionId=${sessionId}`);
@@ -2757,8 +2757,8 @@ if (watch) {
27572757
fs.watchFile(fp, { interval: 100 }, (curr, prev) => {
27582758
if (curr.mtime > prev.mtime) {
27592759
_assetCache.clear();
2760-
_htmlCache = null;
2761-
_htmlCacheEtag = null;
2760+
htmlState.cache = null;
2761+
htmlState.etag = null;
27622762
hotReloadClients.forEach(c => { if (c.readyState === 1) c.send(JSON.stringify({ type: 'reload' })); });
27632763
}
27642764
});
@@ -2821,7 +2821,7 @@ function onServerReady() {
28212821
}
28222822

28232823
recoverStaleSessions();
2824-
warmAssetCache();
2824+
warmAssetCache(staticDir);
28252825

28262826
// Run DB cleanup on startup and every 6 hours
28272827
try { queries.cleanup(); console.log('[cleanup] Initial DB cleanup complete'); } catch (e) { console.error('[cleanup] Error:', e.message); }

0 commit comments

Comments
 (0)