Skip to content

Commit ff1c43f

Browse files
committed
feat: update LM Studio integration to remove versioning in URLs and enhance build optimization setup
1 parent adf871f commit ff1c43f

13 files changed

Lines changed: 221 additions & 91 deletions

File tree

CHANGELOG.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
6060
- **Availability checking** via `/models` endpoint before initialization
6161
- **Configuration options**:
6262
- Environment variables: `CODEGRAPH_LMSTUDIO_MODEL`, `CODEGRAPH_LMSTUDIO_URL`, `CODEGRAPH_LMSTUDIO_TIMEOUT`, `CODEGRAPH_MAX_CHUNK_TOKENS`
63-
- Config file: `embedding.provider = "lmstudio"`, `embedding.lmstudio_url = "http://localhost:1234/v1"`
64-
- Default URL: `http://localhost:1234/v1`
63+
- Config file: `embedding.provider = "lmstudio"`, `embedding.lmstudio_url = "http://localhost:1234"`
64+
- Default URL: `http://localhost:1234` (the `/v1` path is appended automatically)
6565
- **Feature flag**: `lmstudio` (requires reqwest)
6666
- **Performance characteristics**: 50 texts/sec throughput, 500ms typical latency, high memory usage (running full model)
6767
- **Integration**: Seamless integration with `EmbeddingGenerator` factory, automatic provider selection based on config

Cargo.lock

Lines changed: 0 additions & 60 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 71 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,65 @@ authors = ["CodeGraph Team"]
2929
license = "MIT OR Apache-2.0"
3030
repository = "https://github.com/codegraph/embedding-system"
3131

32+
# Feature groups for common development scenarios
33+
[workspace.metadata.cargo-features]
34+
# Minimal build for MCP server development (fastest build times)
35+
mcp-minimal = [
36+
"ai-enhanced",
37+
"embeddings-ollama",
38+
"codegraph-ai/anthropic"
39+
]
40+
41+
# MCP development with LM Studio support
42+
mcp-lmstudio = [
43+
"ai-enhanced",
44+
"embeddings-ollama",
45+
"embeddings-lmstudio",
46+
"codegraph-ai/anthropic",
47+
"codegraph-ai/openai-compatible"
48+
]
49+
50+
# Full local stack (no cloud dependencies)
51+
local-stack = [
52+
"daemon",
53+
"ai-enhanced",
54+
"embeddings-ollama",
55+
"embeddings-lmstudio",
56+
"embeddings-local",
57+
"codegraph-ai/openai-compatible",
58+
"autoagents-experimental"
59+
]
60+
61+
# Cloud-enabled build
62+
cloud-enabled = [
63+
"daemon",
64+
"ai-enhanced",
65+
"cloud",
66+
"embeddings-jina",
67+
"codegraph-ai/anthropic",
68+
"codegraph-ai/openai-llm",
69+
"server-http",
70+
"autoagents-experimental"
71+
]
72+
73+
# Everything (development/testing)
74+
full = [
75+
"daemon",
76+
"ai-enhanced",
77+
"cloud",
78+
"embeddings",
79+
"embeddings-local",
80+
"embeddings-openai",
81+
"embeddings-ollama",
82+
"embeddings-jina",
83+
"embeddings-lmstudio",
84+
"codegraph-ai/anthropic",
85+
"codegraph-ai/openai-llm",
86+
"codegraph-ai/openai-compatible",
87+
"server-http",
88+
"autoagents-experimental"
89+
]
90+
3291
[workspace.dependencies]
3392
fastrand = "2.0"
3493
governor = { version = "0.10.1" }
@@ -223,14 +282,25 @@ inherits = "release"
223282
debug = true
224283

225284
[profile.dev]
226-
opt-level = 1
285+
opt-level = 0 # Faster compilation (0 = no optimization)
227286
debug = true
228287
incremental = true
288+
codegen-units = 256 # Parallel codegen for faster builds
229289

230290
[profile.test]
231291
opt-level = 1
232292
debug = true
233293

294+
# Super-fast dev profile for rapid iteration
295+
[profile.fast-dev]
296+
inherits = "dev"
297+
opt-level = 0
298+
incremental = true
299+
codegen-units = 256
300+
debug = 1 # Line numbers only, not full debug info
301+
lto = false
302+
panic = "unwind"
303+
234304
# Size-optimized release profile for smallest possible binaries
235305
[profile.release-size]
236306
inherits = "release"

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ export CODEGRAPH_EMBEDDING_DIMENSION=1024 # 384, 768, 1024, 1536,
3636
```bash
3737
export CODEGRAPH_EMBEDDING_PROVIDER=lmstudio
3838
export CODEGRAPH_LMSTUDIO_MODEL=jina-embeddings-v3 # or jina-embeddings-v4, qwen3-embedding-0.6b, nomic-embed-text-v1.5, etc.
39-
export CODEGRAPH_LMSTUDIO_URL=http://localhost:1234/v1 # Default LM Studio endpoint
39+
export CODEGRAPH_LMSTUDIO_URL=http://localhost:1234 # Default LM Studio endpoint (the /v1 path is appended automatically)
4040
export CODEGRAPH_EMBEDDING_DIMENSION=1024 # Auto-detected for 20+ models, or set manually
4141
```
4242

@@ -310,7 +310,7 @@ Create `~/.codegraph/config.toml`:
310310
[embedding]
311311
provider = "lmstudio"
312312
model = "jinaai/jina-embeddings-v4"
313-
lmstudio_url = "http://localhost:1234/v1"
313+
lmstudio_url = "http://localhost:1234"
314314
dimension = 2048
315315

316316
[llm]
@@ -324,7 +324,7 @@ lmstudio_url = "http://localhost:1234"
324324
```bash
325325
export CODEGRAPH_EMBEDDING_PROVIDER=lmstudio
326326
export CODEGRAPH_LMSTUDIO_MODEL=jinaai/jina-embeddings-v4
327-
export CODEGRAPH_LMSTUDIO_URL=http://localhost:1234/v1
327+
export CODEGRAPH_LMSTUDIO_URL=http://localhost:1234
328328
export CODEGRAPH_EMBEDDING_DIMENSION=2048
329329
```
330330

crates/codegraph-cache/Cargo.toml

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -8,31 +8,16 @@ description = "AI-powered caching system for CodeGraph vector operations"
88

99
[dependencies]
1010
codegraph-core = { workspace = true }
11-
ndarray = { workspace = true }
1211
serde = { workspace = true }
1312
serde_json = { workspace = true }
14-
bincode = { workspace = true }
1513
tracing = { workspace = true }
1614
async-trait = { workspace = true }
1715
tokio = { workspace = true }
1816
parking_lot = { workspace = true }
19-
backtrace = { workspace = true }
2017
dashmap = { workspace = true }
21-
rayon = { workspace = true }
2218
crossbeam-channel = { workspace = true }
23-
chrono = { workspace = true }
24-
futures = { workspace = true }
2519
sha2 = { workspace = true }
26-
sysinfo = { workspace = true }
27-
num_cpus = { workspace = true }
28-
once_cell = { workspace = true }
29-
axum = { workspace = true, features = ["ws"] }
30-
tower-http = { workspace = true, features = ["cors", "fs"] }
31-
http = { workspace = true }
32-
tower = { workspace = true }
33-
tokio-tungstenite = { workspace = true }
3420
fastrand = { workspace = true }
35-
futures-util = "0.3"
3621

3722
[dev-dependencies]
3823
tokio-test = { workspace = true }

crates/codegraph-core/src/config_manager.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -357,7 +357,7 @@ fn default_embedding_provider() -> String {
357357
"auto".to_string()
358358
}
359359
fn default_lmstudio_url() -> String {
360-
"http://localhost:1234/v1".to_string()
360+
"http://localhost:1234".to_string()
361361
}
362362
fn default_ollama_url() -> String {
363363
"http://localhost:11434".to_string()

crates/codegraph-mcp/Cargo.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ sha2 = { workspace = true }
3030
toml = { workspace = true }
3131
rmcp = { workspace = true }
3232
schemars = { workspace = true }
33-
once_cell = { workspace = true }
3433
lru = { workspace = true }
3534
atty = { workspace = true }
3635

crates/codegraph-napi/Cargo.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ napi-derive = "2.16"
2222
tokio = { workspace = true }
2323
serde_json = { workspace = true }
2424
uuid = { workspace = true }
25-
chrono = { workspace = true }
2625
tracing = { workspace = true }
2726

2827
# Local crates

crates/codegraph-vector/src/lmstudio_embedding_provider.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ impl Default for LmStudioEmbeddingConfig {
3636
fn default() -> Self {
3737
Self {
3838
model: "jinaai/jina-embeddings-v3".to_string(),
39-
api_base: "http://localhost:1234/v1".to_string(),
39+
api_base: "http://localhost:1234".to_string(),
4040
timeout: Duration::from_secs(60),
4141
batch_size: 32,
4242
max_retries: 3,
@@ -186,7 +186,7 @@ impl LmStudioEmbeddingProvider {
186186

187187
/// Call LM Studio embeddings endpoint
188188
async fn call_embed_endpoint(&self, texts: &[String]) -> Result<Vec<Vec<f32>>> {
189-
let url = format!("{}/embeddings", self.config.api_base);
189+
let url = format!("{}/v1/embeddings", self.config.api_base.trim_end_matches('/'));
190190

191191
let request = EmbeddingRequest {
192192
input: texts.to_vec(),

crates/codegraph-vector/src/lmstudio_reranker.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ impl Default for LmStudioRerankerConfig {
1515
fn default() -> Self {
1616
Self {
1717
base_url: std::env::var("LMSTUDIO_URL")
18-
.unwrap_or_else(|_| "http://localhost:1234/v1".to_string()),
18+
.unwrap_or_else(|_| "http://localhost:1234".to_string()),
1919
api_key: std::env::var("LMSTUDIO_API_KEY").unwrap_or_else(|_| "lm-studio".to_string()),
2020
model: std::env::var("LMSTUDIO_RERANK_MODEL")
2121
.unwrap_or_else(|_| "text-embedding-3-small".to_string()),
@@ -89,7 +89,7 @@ impl LmStudioReranker {
8989
input: inputs.clone(),
9090
};
9191

92-
let url = format!("{}/embeddings", self.config.base_url.trim_end_matches('/'));
92+
let url = format!("{}/v1/embeddings", self.config.base_url.trim_end_matches('/'));
9393
let response = self
9494
.client
9595
.post(&url)

0 commit comments

Comments
 (0)