Skip to content

Commit 7f38168

Browse files
authored
Merge pull request #96 from aneeshsunganahalli/feat/dry-run-mode
Feature: Dry-Run Added
2 parents 7dd4621 + b2112a5 commit 7f38168

3 files changed

Lines changed: 144 additions & 13 deletions

File tree

README.md

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ Looking to contribute? Check out:
2929

3030
**AI-Powered Commit Messages** - Automatically generate meaningful commit messages
3131
🔄 **Multiple LLM Support** - Choose between Google Gemini, Grok, Claude, ChatGPT, or Ollama (local)
32+
🧪 **Dry Run Mode** - Preview prompts without making API calls
3233
📝 **Context-Aware** - Analyzes staged and unstaged changes
3334
📋 **Auto-Copy to Clipboard** - Generated messages are automatically copied for instant use
3435
🎛️ **Interactive Review Flow** - Accept, regenerate with new styles, or open the message in your editor before committing
@@ -112,6 +113,27 @@ Or if running from source:
112113
go run cmd/commit-msg/main.go .
113114
```
114115

116+
### Preview Mode (Dry Run)
117+
118+
Preview what would be sent to the LLM without making an API call:
119+
120+
```bash
121+
commit . --dry-run
122+
```
123+
124+
This displays:
125+
- The LLM provider that would be used
126+
- The exact prompt that would be sent
127+
- File statistics and change summary
128+
- Estimated token count
129+
- All without consuming API credits or sharing data
130+
131+
Perfect for:
132+
- 🐛 **Debugging** - See exactly what prompt is being sent
133+
- 💰 **Cost Control** - Review before consuming API credits
134+
- 🔒 **Privacy** - Verify what data would be shared with external APIs
135+
- 🧪 **Development** - Test prompt changes without API calls
136+
115137
### Setup LLM and API Key
116138

117139
```bash

cmd/cli/createMsg.go

Lines changed: 113 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,8 @@ import (
2626

2727
// CreateCommitMsg launches the interactive flow for reviewing, regenerating,
2828
// editing, and accepting AI-generated commit messages in the current repo.
29-
func CreateCommitMsg() {
29+
// If dryRun is true, it displays the prompt without making an API call.
30+
func CreateCommitMsg(dryRun bool) {
3031
// Validate COMMIT_LLM and required API keys
3132
useLLM, err := store.DefaultLLMKey()
3233
if err != nil {
@@ -94,6 +95,13 @@ func CreateCommitMsg() {
9495
return
9596
}
9697

98+
// Handle dry-run mode: display what would be sent to LLM without making API call
99+
if dryRun {
100+
pterm.Println()
101+
displayDryRunInfo(commitLLM, config, changes, apiKey)
102+
return
103+
}
104+
97105
pterm.Println()
98106
spinnerGenerating, err := pterm.DefaultSpinner.
99107
WithSequence("⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏").
@@ -226,6 +234,22 @@ var (
226234
}
227235
errSelectionCancelled = errors.New("selection cancelled")
228236
)
237+
// resolveOllamaConfig returns the URL and model for Ollama, using environment variables as fallbacks
238+
func resolveOllamaConfig(apiKey string) (url, model string) {
239+
url = apiKey
240+
if strings.TrimSpace(url) == "" {
241+
url = os.Getenv("OLLAMA_URL")
242+
if url == "" {
243+
url = "http://localhost:11434/api/generate"
244+
}
245+
}
246+
model = os.Getenv("OLLAMA_MODEL")
247+
if model == "" {
248+
model = "llama3.1"
249+
}
250+
return url, model
251+
}
252+
229253

230254
func generateMessage(provider types.LLMProvider, config *types.Config, changes string, apiKey string, opts *types.GenerationOptions) (string, error) {
231255
switch provider {
@@ -238,17 +262,7 @@ func generateMessage(provider types.LLMProvider, config *types.Config, changes s
238262
case types.ProviderGroq:
239263
return groq.GenerateCommitMessage(config, changes, apiKey, opts)
240264
case types.ProviderOllama:
241-
url := apiKey
242-
if strings.TrimSpace(url) == "" {
243-
url = os.Getenv("OLLAMA_URL")
244-
if url == "" {
245-
url = "http://localhost:11434/api/generate"
246-
}
247-
}
248-
model := os.Getenv("OLLAMA_MODEL")
249-
if model == "" {
250-
model = "llama3.1"
251-
}
265+
url, model := resolveOllamaConfig(apiKey)
252266
return ollama.GenerateCommitMessage(config, changes, url, model, opts)
253267
default:
254268
return grok.GenerateCommitMessage(config, changes, apiKey, opts)
@@ -425,3 +439,90 @@ func displayProviderError(provider types.LLMProvider, err error) {
425439
pterm.Error.Printf("LLM API error: %v\n", err)
426440
}
427441
}
442+
443+
// displayDryRunInfo shows what would be sent to the LLM without making an API call
444+
func displayDryRunInfo(provider types.LLMProvider, config *types.Config, changes string, apiKey string) {
445+
pterm.DefaultHeader.WithFullWidth().
446+
WithBackgroundStyle(pterm.NewStyle(pterm.BgBlue)).
447+
WithTextStyle(pterm.NewStyle(pterm.FgWhite, pterm.Bold)).
448+
Println("DRY RUN MODE - Preview Only")
449+
450+
pterm.Println()
451+
pterm.Info.Println("This is a dry-run. No API call will be made to the LLM provider.")
452+
pterm.Println()
453+
454+
// Display provider information
455+
pterm.DefaultSection.Println("LLM Provider Configuration")
456+
providerInfo := [][]string{
457+
{"Provider", provider.String()},
458+
}
459+
460+
// Add provider-specific info
461+
switch provider {
462+
case types.ProviderOllama:
463+
url, model := resolveOllamaConfig(apiKey)
464+
providerInfo = append(providerInfo, []string{"Ollama URL", url})
465+
providerInfo = append(providerInfo, []string{"Model", model})
466+
case types.ProviderGrok:
467+
providerInfo = append(providerInfo, []string{"API Endpoint", config.GrokAPI})
468+
providerInfo = append(providerInfo, []string{"API Key", maskAPIKey(apiKey)})
469+
default:
470+
providerInfo = append(providerInfo, []string{"API Key", maskAPIKey(apiKey)})
471+
}
472+
473+
pterm.DefaultTable.WithHasHeader(false).WithData(providerInfo).Render()
474+
475+
pterm.Println()
476+
477+
// Build and display the prompt
478+
opts := &types.GenerationOptions{Attempt: 1}
479+
prompt := types.BuildCommitPrompt(changes, opts)
480+
481+
pterm.DefaultSection.Println("Prompt That Would Be Sent")
482+
pterm.Println()
483+
484+
// Display prompt in a box
485+
promptBox := pterm.DefaultBox.
486+
WithTitle("Full LLM Prompt").
487+
WithTitleTopCenter().
488+
WithBoxStyle(pterm.NewStyle(pterm.FgCyan))
489+
promptBox.Println(prompt)
490+
491+
pterm.Println()
492+
493+
// Display changes statistics
494+
pterm.DefaultSection.Println("Changes Summary")
495+
linesCount := len(strings.Split(changes, "\n"))
496+
charsCount := len(changes)
497+
498+
statsData := [][]string{
499+
{"Total Lines", fmt.Sprintf("%d", linesCount)},
500+
{"Total Characters", fmt.Sprintf("%d", charsCount)},
501+
{"Prompt Size (approx)", fmt.Sprintf("%d tokens", estimateTokens(prompt))},
502+
}
503+
pterm.DefaultTable.WithHasHeader(false).WithData(statsData).Render()
504+
505+
pterm.Println()
506+
pterm.Success.Println("Dry-run complete. To generate actual commit message, run without --dry-run flag.")
507+
}
508+
509+
// maskAPIKey masks the API key for display purposes
510+
func maskAPIKey(apiKey string) string {
511+
if len(apiKey) == 0 {
512+
return "[NOT SET]"
513+
}
514+
// Don't mask URLs (used by Ollama)
515+
if strings.HasPrefix(apiKey, "http://") || strings.HasPrefix(apiKey, "https://") {
516+
return apiKey
517+
}
518+
if len(apiKey) <= 8 {
519+
return strings.Repeat("*", len(apiKey))
520+
}
521+
// Show first 4 and last 4 characters
522+
return apiKey[:4] + strings.Repeat("*", len(apiKey)-8) + apiKey[len(apiKey)-4:]
523+
}
524+
525+
// estimateTokens provides a rough estimate of token count (1 token ≈ 4 characters)
526+
func estimateTokens(text string) int {
527+
return len(text) / 4
528+
}

cmd/cli/root.go

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,11 @@ var creatCommitMsg = &cobra.Command{
5353
Use: ".",
5454
Short: "Create Commit Message",
5555
RunE: func(cmd *cobra.Command, args []string) error {
56-
CreateCommitMsg()
56+
dryRun, err := cmd.Flags().GetBool("dry-run")
57+
if err != nil {
58+
return err
59+
}
60+
CreateCommitMsg(dryRun)
5761
return nil
5862
},
5963
}
@@ -68,6 +72,10 @@ func init() {
6872
// Cobra also supports local flags, which will only run
6973
// when this action is called directly.
7074
rootCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
75+
76+
// Add --dry-run flag to the commit command
77+
creatCommitMsg.Flags().Bool("dry-run", false, "Preview the prompt that would be sent to the LLM without making an API call")
78+
7179
rootCmd.AddCommand(creatCommitMsg)
7280
rootCmd.AddCommand(llmCmd)
7381
llmCmd.AddCommand(llmSetupCmd)

0 commit comments

Comments
 (0)