@@ -26,7 +26,8 @@ import (
2626
2727// CreateCommitMsg launches the interactive flow for reviewing, regenerating,
2828// editing, and accepting AI-generated commit messages in the current repo.
29- func CreateCommitMsg () {
29+ // If dryRun is true, it displays the prompt without making an API call.
30+ func CreateCommitMsg (dryRun bool ) {
3031 // Validate COMMIT_LLM and required API keys
3132 useLLM , err := store .DefaultLLMKey ()
3233 if err != nil {
@@ -94,6 +95,13 @@ func CreateCommitMsg() {
9495 return
9596 }
9697
98+ // Handle dry-run mode: display what would be sent to LLM without making API call
99+ if dryRun {
100+ pterm .Println ()
101+ displayDryRunInfo (commitLLM , config , changes , apiKey )
102+ return
103+ }
104+
97105 pterm .Println ()
98106 spinnerGenerating , err := pterm .DefaultSpinner .
99107 WithSequence ("⠋" , "⠙" , "⠹" , "⠸" , "⠼" , "⠴" , "⠦" , "⠧" , "⠇" , "⠏" ).
@@ -226,6 +234,22 @@ var (
226234 }
227235 errSelectionCancelled = errors .New ("selection cancelled" )
228236)
237+ // resolveOllamaConfig returns the URL and model for Ollama, using environment variables as fallbacks
238+ func resolveOllamaConfig (apiKey string ) (url , model string ) {
239+ url = apiKey
240+ if strings .TrimSpace (url ) == "" {
241+ url = os .Getenv ("OLLAMA_URL" )
242+ if url == "" {
243+ url = "http://localhost:11434/api/generate"
244+ }
245+ }
246+ model = os .Getenv ("OLLAMA_MODEL" )
247+ if model == "" {
248+ model = "llama3.1"
249+ }
250+ return url , model
251+ }
252+
229253
230254func generateMessage (provider types.LLMProvider , config * types.Config , changes string , apiKey string , opts * types.GenerationOptions ) (string , error ) {
231255 switch provider {
@@ -238,17 +262,7 @@ func generateMessage(provider types.LLMProvider, config *types.Config, changes s
238262 case types .ProviderGroq :
239263 return groq .GenerateCommitMessage (config , changes , apiKey , opts )
240264 case types .ProviderOllama :
241- url := apiKey
242- if strings .TrimSpace (url ) == "" {
243- url = os .Getenv ("OLLAMA_URL" )
244- if url == "" {
245- url = "http://localhost:11434/api/generate"
246- }
247- }
248- model := os .Getenv ("OLLAMA_MODEL" )
249- if model == "" {
250- model = "llama3.1"
251- }
265+ url , model := resolveOllamaConfig (apiKey )
252266 return ollama .GenerateCommitMessage (config , changes , url , model , opts )
253267 default :
254268 return grok .GenerateCommitMessage (config , changes , apiKey , opts )
@@ -425,3 +439,90 @@ func displayProviderError(provider types.LLMProvider, err error) {
425439 pterm .Error .Printf ("LLM API error: %v\n " , err )
426440 }
427441}
442+
443+ // displayDryRunInfo shows what would be sent to the LLM without making an API call
444+ func displayDryRunInfo (provider types.LLMProvider , config * types.Config , changes string , apiKey string ) {
445+ pterm .DefaultHeader .WithFullWidth ().
446+ WithBackgroundStyle (pterm .NewStyle (pterm .BgBlue )).
447+ WithTextStyle (pterm .NewStyle (pterm .FgWhite , pterm .Bold )).
448+ Println ("DRY RUN MODE - Preview Only" )
449+
450+ pterm .Println ()
451+ pterm .Info .Println ("This is a dry-run. No API call will be made to the LLM provider." )
452+ pterm .Println ()
453+
454+ // Display provider information
455+ pterm .DefaultSection .Println ("LLM Provider Configuration" )
456+ providerInfo := [][]string {
457+ {"Provider" , provider .String ()},
458+ }
459+
460+ // Add provider-specific info
461+ switch provider {
462+ case types .ProviderOllama :
463+ url , model := resolveOllamaConfig (apiKey )
464+ providerInfo = append (providerInfo , []string {"Ollama URL" , url })
465+ providerInfo = append (providerInfo , []string {"Model" , model })
466+ case types .ProviderGrok :
467+ providerInfo = append (providerInfo , []string {"API Endpoint" , config .GrokAPI })
468+ providerInfo = append (providerInfo , []string {"API Key" , maskAPIKey (apiKey )})
469+ default :
470+ providerInfo = append (providerInfo , []string {"API Key" , maskAPIKey (apiKey )})
471+ }
472+
473+ pterm .DefaultTable .WithHasHeader (false ).WithData (providerInfo ).Render ()
474+
475+ pterm .Println ()
476+
477+ // Build and display the prompt
478+ opts := & types.GenerationOptions {Attempt : 1 }
479+ prompt := types .BuildCommitPrompt (changes , opts )
480+
481+ pterm .DefaultSection .Println ("Prompt That Would Be Sent" )
482+ pterm .Println ()
483+
484+ // Display prompt in a box
485+ promptBox := pterm .DefaultBox .
486+ WithTitle ("Full LLM Prompt" ).
487+ WithTitleTopCenter ().
488+ WithBoxStyle (pterm .NewStyle (pterm .FgCyan ))
489+ promptBox .Println (prompt )
490+
491+ pterm .Println ()
492+
493+ // Display changes statistics
494+ pterm .DefaultSection .Println ("Changes Summary" )
495+ linesCount := len (strings .Split (changes , "\n " ))
496+ charsCount := len (changes )
497+
498+ statsData := [][]string {
499+ {"Total Lines" , fmt .Sprintf ("%d" , linesCount )},
500+ {"Total Characters" , fmt .Sprintf ("%d" , charsCount )},
501+ {"Prompt Size (approx)" , fmt .Sprintf ("%d tokens" , estimateTokens (prompt ))},
502+ }
503+ pterm .DefaultTable .WithHasHeader (false ).WithData (statsData ).Render ()
504+
505+ pterm .Println ()
506+ pterm .Success .Println ("Dry-run complete. To generate actual commit message, run without --dry-run flag." )
507+ }
508+
509+ // maskAPIKey masks the API key for display purposes
510+ func maskAPIKey (apiKey string ) string {
511+ if len (apiKey ) == 0 {
512+ return "[NOT SET]"
513+ }
514+ // Don't mask URLs (used by Ollama)
515+ if strings .HasPrefix (apiKey , "http://" ) || strings .HasPrefix (apiKey , "https://" ) {
516+ return apiKey
517+ }
518+ if len (apiKey ) <= 8 {
519+ return strings .Repeat ("*" , len (apiKey ))
520+ }
521+ // Show first 4 and last 4 characters
522+ return apiKey [:4 ] + strings .Repeat ("*" , len (apiKey )- 8 ) + apiKey [len (apiKey )- 4 :]
523+ }
524+
525+ // estimateTokens provides a rough estimate of token count (1 token ≈ 4 characters)
526+ func estimateTokens (text string ) int {
527+ return len (text ) / 4
528+ }
0 commit comments