Skip to content

Commit 89f35c9

Browse files
Copilotoleander
andcommitted
Implement strategy pattern for generation fallbacks
Co-authored-by: oleander <220827+oleander@users.noreply.github.com>
1 parent ed595ae commit 89f35c9

File tree

5 files changed

+289
-135
lines changed

5 files changed

+289
-135
lines changed

Cargo.lock

Lines changed: 13 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ tokio = { version = "1.45.1", features = ["full"] }
4444
futures = "0.3"
4545
parking_lot = "0.12.3"
4646
tracing = "0.1"
47+
async-trait = "0.1"
4748

4849
# CLI and UI
4950

src/commit.rs

Lines changed: 21 additions & 134 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,10 @@
11
use anyhow::{anyhow, bail, Result};
22
use maplit::hashmap;
33
use mustache;
4-
use async_openai::Client;
54

6-
use crate::{config, debug_output, openai, profile};
5+
use crate::{config, openai, profile};
76
use crate::model::Model;
87
use crate::config::AppConfig;
9-
use crate::multi_step_integration::{generate_commit_message_local, generate_commit_message_multi_step};
108

119
/// The instruction template included at compile time
1210
const INSTRUCTION_TEMPLATE: &str = include_str!("../resources/prompt.md");
@@ -73,125 +71,20 @@ pub fn create_commit_request(diff: String, max_tokens: usize, model: Model) -> R
7371
/// Returns an error if:
7472
/// - max_tokens is 0
7573
/// - OpenAI API call fails
76-
pub async fn generate(patch: String, remaining_tokens: usize, model: Model, settings: Option<&AppConfig>) -> Result<openai::Response> {
74+
pub async fn generate(patch: String, remaining_tokens: usize, _model: Model, settings: Option<&AppConfig>) -> Result<openai::Response> {
7775
profile!("Generate commit message");
7876

7977
if remaining_tokens == 0 {
8078
bail!("Maximum token count must be greater than zero")
8179
}
8280

83-
// Try multi-step approach first
84-
let max_length = settings
85-
.and_then(|s| s.max_commit_length)
86-
.or(config::APP_CONFIG.max_commit_length);
87-
88-
// Check if we have a valid API key configuration
89-
let has_valid_api_key = if let Some(custom_settings) = settings {
90-
custom_settings
91-
.openai_api_key
92-
.as_ref()
93-
.map(|key| !key.is_empty() && key != "<PLACE HOLDER FOR YOUR API KEY>")
94-
.unwrap_or(false)
95-
} else {
96-
// Check environment variable or config
97-
config::APP_CONFIG
98-
.openai_api_key
99-
.as_ref()
100-
.map(|key| !key.is_empty() && key != "<PLACE HOLDER FOR YOUR API KEY>")
101-
.unwrap_or(false)
102-
|| std::env::var("OPENAI_API_KEY")
103-
.map(|key| !key.is_empty())
104-
.unwrap_or(false)
105-
};
106-
107-
if !has_valid_api_key {
108-
bail!("OpenAI API key not configured. Please set your API key using:\n git-ai config set openai-api-key <your-key>\nor set the OPENAI_API_KEY environment variable.");
109-
}
110-
111-
// Use custom settings if provided
112-
if let Some(custom_settings) = settings {
113-
if let Some(api_key) = &custom_settings.openai_api_key {
114-
if !api_key.is_empty() && api_key != "<PLACE HOLDER FOR YOUR API KEY>" {
115-
match openai::create_openai_config(custom_settings) {
116-
Ok(config) => {
117-
let client = Client::with_config(config);
118-
let model_str = model.to_string();
119-
120-
match generate_commit_message_multi_step(&client, &model_str, &patch, max_length).await {
121-
Ok(message) => return Ok(openai::Response { response: message }),
122-
Err(e) => {
123-
// Check if it's an API key error
124-
if e.to_string().contains("invalid_api_key") || e.to_string().contains("Incorrect API key") {
125-
bail!("Invalid OpenAI API key. Please check your API key configuration.");
126-
}
127-
log::warn!("Multi-step generation with custom settings failed: {e}");
128-
if let Some(session) = debug_output::debug_session() {
129-
session.set_multi_step_error(e.to_string());
130-
}
131-
}
132-
}
133-
}
134-
Err(e) => {
135-
// If config creation fails due to API key, propagate the error
136-
return Err(e);
137-
}
138-
}
139-
}
140-
}
141-
} else {
142-
// Try with default settings
143-
if let Ok(api_key) = std::env::var("OPENAI_API_KEY") {
144-
if !api_key.is_empty() {
145-
let client = Client::new();
146-
let model_str = model.to_string();
147-
148-
match generate_commit_message_multi_step(&client, &model_str, &patch, max_length).await {
149-
Ok(message) => return Ok(openai::Response { response: message }),
150-
Err(e) => {
151-
// Check if it's an API key error
152-
if e.to_string().contains("invalid_api_key") || e.to_string().contains("Incorrect API key") {
153-
bail!("Invalid OpenAI API key. Please check your API key configuration.");
154-
}
155-
log::warn!("Multi-step generation failed: {e}");
156-
if let Some(session) = debug_output::debug_session() {
157-
session.set_multi_step_error(e.to_string());
158-
}
159-
}
160-
}
161-
}
162-
}
163-
}
164-
165-
// Try local multi-step generation
166-
match generate_commit_message_local(&patch, max_length) {
167-
Ok(message) => return Ok(openai::Response { response: message }),
168-
Err(e) => {
169-
log::warn!("Local multi-step generation failed: {e}");
170-
}
171-
}
172-
173-
// Mark that we're using single-step fallback
174-
if let Some(session) = debug_output::debug_session() {
175-
session.set_single_step_success(true);
176-
}
177-
178-
// Fallback to original single-step approach
179-
let request = create_commit_request(patch, remaining_tokens, model)?;
180-
181-
// Use custom settings if provided, otherwise use global config
182-
match settings {
183-
Some(custom_settings) => {
184-
// Create a client with custom settings
185-
match openai::create_openai_config(custom_settings) {
186-
Ok(config) => openai::call_with_config(request, config).await,
187-
Err(e) => Err(e)
188-
}
189-
}
190-
None => {
191-
// Use the default global config
192-
openai::call(request).await
193-
}
194-
}
81+
// Use the provided settings, or fall back to global config
82+
let config = settings.unwrap_or(&config::APP_CONFIG);
83+
84+
// Use the new strategy pattern for generation
85+
let message = crate::generation::fallback::generate_with_fallback(&patch, config).await?;
86+
87+
Ok(openai::Response { response: message })
19588
}
19689

19790
pub fn token_used(model: &Model) -> Result<usize> {
@@ -216,7 +109,7 @@ mod tests {
216109
use super::*;
217110

218111
#[tokio::test]
219-
async fn test_missing_api_key_error() {
112+
async fn test_missing_api_key_fallback() {
220113
// Create settings with no API key
221114
let settings = AppConfig {
222115
openai_api_key: None,
@@ -230,7 +123,7 @@ mod tests {
230123
let original_key = std::env::var("OPENAI_API_KEY").ok();
231124
std::env::remove_var("OPENAI_API_KEY");
232125

233-
// Test that generate returns an error for missing API key
126+
// Test that generate falls back to local generation when no API key is available
234127
let result = generate(
235128
"diff --git a/test.txt b/test.txt\n+Hello World".to_string(),
236129
1024,
@@ -244,17 +137,14 @@ mod tests {
244137
std::env::set_var("OPENAI_API_KEY", key);
245138
}
246139

247-
assert!(result.is_err());
248-
let error_message = result.unwrap_err().to_string();
249-
assert!(
250-
error_message.contains("OpenAI API key not configured"),
251-
"Expected error message about missing API key, got: {}",
252-
error_message
253-
);
140+
// Should succeed with local fallback
141+
assert!(result.is_ok(), "Expected fallback to local generation to succeed, got error: {:?}", result.err());
142+
let response = result.unwrap();
143+
assert!(!response.response.is_empty(), "Expected non-empty commit message");
254144
}
255145

256146
#[tokio::test]
257-
async fn test_invalid_api_key_error() {
147+
async fn test_invalid_api_key_fallback() {
258148
// Create settings with invalid API key
259149
let settings = AppConfig {
260150
openai_api_key: Some("<PLACE HOLDER FOR YOUR API KEY>".to_string()),
@@ -264,7 +154,7 @@ mod tests {
264154
timeout: Some(30)
265155
};
266156

267-
// Test that generate returns an error for invalid API key
157+
// Test that generate falls back to local generation when API key is invalid
268158
let result = generate(
269159
"diff --git a/test.txt b/test.txt\n+Hello World".to_string(),
270160
1024,
@@ -273,12 +163,9 @@ mod tests {
273163
)
274164
.await;
275165

276-
assert!(result.is_err());
277-
let error_message = result.unwrap_err().to_string();
278-
assert!(
279-
error_message.contains("OpenAI API key not configured"),
280-
"Expected error message about invalid API key, got: {}",
281-
error_message
282-
);
166+
// Should succeed with local fallback
167+
assert!(result.is_ok(), "Expected fallback to local generation to succeed, got error: {:?}", result.err());
168+
let response = result.unwrap();
169+
assert!(!response.response.is_empty(), "Expected non-empty commit message");
283170
}
284171
}

0 commit comments

Comments
 (0)