-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgoogle_simple.rs
More file actions
135 lines (118 loc) · 4.59 KB
/
google_simple.rs
File metadata and controls
135 lines (118 loc) · 4.59 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
use rullm_core::providers::google::{
Content, GenerateContentRequest, GenerationConfig, GoogleClient, Part,
};
// Helper to extract text from response
fn extract_text(response: &rullm_core::providers::google::GenerateContentResponse) -> String {
response
.candidates
.iter()
.flat_map(|candidate| &candidate.content.parts)
.filter_map(|part| match part {
Part::Text { text } => Some(text.as_str()),
_ => None,
})
.collect::<Vec<_>>()
.join("")
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// 1. Basic Configuration using from_env
let client = GoogleClient::from_env()?;
// 2. Simple Chat Completion
let request = GenerateContentRequest::new(vec![Content::user("What is 2 + 2?")])
.with_system("You are a helpful assistant.".to_string())
.with_generation_config(GenerationConfig {
temperature: Some(0.7),
max_output_tokens: Some(1024),
stop_sequences: None,
top_p: None,
top_k: None,
response_mime_type: None,
response_schema: None,
});
let response = client.generate_content("gemini-1.5-flash", request).await?;
println!("🤖 Assistant: {}", extract_text(&response));
if let Some(usage) = &response.usage_metadata {
println!("📊 Tokens used: {}", usage.total_token_count);
}
// 3. Multi-message conversation
let conversation_request = GenerateContentRequest::new(vec![
Content::user("What is 5 * 7?"),
Content::model("5 * 7 = 35"),
Content::user("What about 6 * 8?"),
])
.with_system("You are a helpful math tutor.".to_string())
.with_generation_config(GenerationConfig {
max_output_tokens: Some(100),
stop_sequences: None,
temperature: None,
top_p: None,
top_k: None,
response_mime_type: None,
response_schema: None,
});
let conversation_response = client
.generate_content("gemini-1.5-pro", conversation_request)
.await?;
println!("\n💬 Conversation:");
println!("Assistant: {}", extract_text(&conversation_response));
// 4. Different models comparison
let models = ["gemini-1.5-flash", "gemini-1.5-pro", "gemini-2.0-flash-exp"];
let question = "Explain async/await in one sentence.";
for model in &models {
let request = GenerateContentRequest::new(vec![Content::user(question)])
.with_generation_config(GenerationConfig {
temperature: Some(0.5),
max_output_tokens: Some(50),
stop_sequences: None,
top_p: None,
top_k: None,
response_mime_type: None,
response_schema: None,
});
match client.generate_content(model, request).await {
Ok(response) => {
println!("\n🔬 {model} says:");
println!("{}", extract_text(&response));
}
Err(e) => {
println!("❌ Error with {model}: {e}");
}
}
}
// 5. Advanced parameters with Google-specific features
let creative_request =
GenerateContentRequest::new(vec![Content::user("Write a haiku about programming.")])
.with_system("You are a creative writer.".to_string())
.with_generation_config(GenerationConfig {
temperature: Some(1.0), // Higher creativity
top_p: Some(0.9),
max_output_tokens: Some(200),
stop_sequences: None,
top_k: None,
response_mime_type: None,
response_schema: None,
});
let creative_response = client
.generate_content("gemini-1.5-pro", creative_request)
.await?;
println!("\n🎨 Creative Response:");
println!("{}", extract_text(&creative_response));
if let Some(candidate) = creative_response.candidates.first() {
if let Some(reason) = &candidate.finish_reason {
println!("Finish reason: {:?}", reason);
}
}
// 6. Display safety ratings if available (Google AI specific)
if let Some(candidate) = creative_response.candidates.first() {
if let Some(safety_ratings) = &candidate.safety_ratings {
println!("🛡️ Safety ratings: {} checks", safety_ratings.len());
}
}
// 7. Health check
match client.health_check().await {
Ok(_) => println!("\n✅ Google AI is healthy"),
Err(e) => println!("\n❌ Health check failed: {e}"),
}
Ok(())
}