Skip to content

Commit 2f92ad6

Browse files
resolved all the issues
1 parent 8698fde commit 2f92ad6

4 files changed

Lines changed: 93 additions & 0 deletions

File tree

cmd/commit-msg/main.go

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,10 @@ import (
1111
"github.com/dfanso/commit-msg/internal/gemini"
1212
"github.com/dfanso/commit-msg/internal/git"
1313
"github.com/dfanso/commit-msg/internal/grok"
14+
"github.com/dfanso/commit-msg/internal/ollama"
1415
"github.com/dfanso/commit-msg/internal/stats"
1516
"github.com/dfanso/commit-msg/pkg/types"
17+
"github.com/joho/godotenv"
1618
"github.com/pterm/pterm"
1719
)
1820

@@ -48,6 +50,9 @@ func main() {
4850
if apiKey == "" {
4951
log.Fatalf("CLAUDE_API_KEY is not set")
5052
}
53+
case "ollama":
54+
// No API key required to run a local LLM
55+
apiKey = ""
5156
default:
5257
log.Fatalf("Invalid COMMIT_LLM value: %s", commitLLM)
5358
}
@@ -124,6 +129,16 @@ func main() {
124129
commitMsg, err = chatgpt.GenerateCommitMessage(config, changes, apiKey)
125130
case "claude":
126131
commitMsg, err = claude.GenerateCommitMessage(config, changes, apiKey)
132+
case "ollama":
133+
url := os.Getenv("OLLAMA_URL")
134+
if url == "" {
135+
url = "http://localhost:11434/api/generate"
136+
}
137+
model := os.Getenv("OLLAMA_MODEL")
138+
if model == "" {
139+
model = "llama3:latest"
140+
}
141+
commitMsg, err = ollama.GenerateCommitMessage(config, changes, url, model)
127142
default:
128143
commitMsg, err = grok.GenerateCommitMessage(config, changes, apiKey)
129144
}

go.mod

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ require (
3131
github.com/googleapis/enterprise-certificate-proxy v0.3.4 // indirect
3232
github.com/googleapis/gax-go/v2 v2.14.1 // indirect
3333
github.com/gookit/color v1.5.4 // indirect
34+
github.com/joho/godotenv v1.5.1 // indirect
3435
github.com/lithammer/fuzzysearch v1.1.8 // indirect
3536
github.com/mattn/go-runewidth v0.0.16 // indirect
3637
github.com/rivo/uniseg v0.4.7 // indirect

go.sum

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,8 @@ github.com/gookit/color v1.4.2/go.mod h1:fqRyamkC1W8uxl+lxCQxOT09l/vYfZ+QeiX3rKQ
6161
github.com/gookit/color v1.5.0/go.mod h1:43aQb+Zerm/BWh2GnrgOQm7ffz7tvQXEKV6BFMl7wAo=
6262
github.com/gookit/color v1.5.4 h1:FZmqs7XOyGgCAxmWyPslpiok1k05wmY3SJTytgvYFs0=
6363
github.com/gookit/color v1.5.4/go.mod h1:pZJOeOS8DM43rXbp4AZo1n9zCU2qjpcRko0b6/QJi9w=
64+
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
65+
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
6466
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
6567
github.com/klauspost/cpuid/v2 v2.0.10/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c=
6668
github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c=

internal/ollama/ollama.go

Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
package ollama
2+
3+
import (
4+
"fmt"
5+
"net/http"
6+
"encoding/json"
7+
"bytes"
8+
"io"
9+
10+
"github.com/dfanso/commit-msg/pkg/types"
11+
12+
)
13+
14+
type OllamaRequest struct {
15+
Model string `json:"model"`
16+
Prompt string `json:"prompt"`
17+
}
18+
19+
type OllamaResponse struct {
20+
Response string `json:"response"`
21+
Done bool `json:"done"`
22+
}
23+
24+
func GenerateCommitMessage(config *types.Config, changes string, url string, model string) (string, error) {
25+
// Use llama3:latest as the default model
26+
if model == "" {
27+
model = "llama3:latest"
28+
}
29+
30+
// Preparing the prompt
31+
prompt := fmt.Sprintf("%s\n\n%s", types.CommitPrompt, changes)
32+
33+
// Generating the request body - add stream: false for non-streaming response
34+
reqBody := map[string]interface{}{
35+
"model": model,
36+
"prompt": prompt,
37+
"stream": false,
38+
}
39+
40+
// Generating the body
41+
body, err := json.Marshal(reqBody)
42+
if err != nil {
43+
return "", fmt.Errorf("failed to marshal request: %v", err)
44+
}
45+
46+
resp, err := http.Post(url, "application/json", bytes.NewBuffer(body))
47+
if err != nil {
48+
return "", fmt.Errorf("failed to send request to Ollama: %v", err)
49+
}
50+
defer resp.Body.Close()
51+
52+
// Read the full response body for better error handling
53+
responseBody, err := io.ReadAll(resp.Body)
54+
if err != nil {
55+
return "", fmt.Errorf("failed to read response body: %v", err)
56+
}
57+
58+
// Check HTTP status
59+
if resp.StatusCode != http.StatusOK {
60+
return "", fmt.Errorf("Ollama API returned status %d: %s", resp.StatusCode, string(responseBody))
61+
}
62+
63+
// Since we set stream: false, we get a single response object
64+
var response OllamaResponse
65+
if err := json.Unmarshal(responseBody, &response); err != nil {
66+
return "", fmt.Errorf("failed to decode response: %v", err)
67+
}
68+
69+
// Check if we got any response
70+
if response.Response == "" {
71+
return "", fmt.Errorf("received empty response from Ollama")
72+
}
73+
74+
return response.Response, nil
75+
}

0 commit comments

Comments
 (0)