Skip to content

Commit b295d44

Browse files
Copilotoleander
andcommitted
Rename functions in src/multi_step_integration.rs to follow naming conventions
Co-authored-by: oleander <220827+oleander@users.noreply.github.com>
1 parent e8022eb commit b295d44

File tree

1 file changed

+9
-9
lines changed

1 file changed

+9
-9
lines changed

src/multi_step_integration.rs

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ pub async fn generate_commit_message_multi_step(
5353
let start_time = std::time::Instant::now();
5454
let payload = format!("{{\"file_path\": \"{file_path}\", \"operation_type\": \"{operation}\", \"diff_content\": \"...\"}}");
5555

56-
let result = call_analyze_function(client, model, file).await;
56+
let result = analyze_file_via_api(client, model, file).await;
5757
let duration = start_time.elapsed();
5858
(file, result, duration, payload)
5959
}
@@ -135,7 +135,7 @@ pub async fn generate_commit_message_multi_step(
135135

136136
// Start step 2 and 3 in parallel
137137
// First create the futures for both operations
138-
let score_future = call_score_function(client, model, files_data);
138+
let score_future = calculate_scores_via_api(client, model, files_data);
139139

140140
// Run the scoring operation
141141
let scored_files = score_future.await?;
@@ -151,7 +151,7 @@ pub async fn generate_commit_message_multi_step(
151151
let generate_payload = format!("{{\"files_with_scores\": [...], \"max_length\": {}}}", max_length.unwrap_or(72));
152152

153153
// Now create and run the generate and select steps in parallel
154-
let generate_future = call_generate_function(client, model, scored_files.clone(), max_length.unwrap_or(72));
154+
let generate_future = generate_candidates_via_api(client, model, scored_files.clone(), max_length.unwrap_or(72));
155155

156156
let candidates = generate_future.await?;
157157
let generate_duration = generate_start_time.elapsed();
@@ -401,8 +401,8 @@ pub fn parse_diff(diff_content: &str) -> Result<Vec<ParsedFile>> {
401401
Ok(files)
402402
}
403403

404-
/// Call the analyze function via OpenAI
405-
async fn call_analyze_function(client: &Client<OpenAIConfig>, model: &str, file: &ParsedFile) -> Result<Value> {
404+
/// Analyze file via OpenAI API
405+
async fn analyze_file_via_api(client: &Client<OpenAIConfig>, model: &str, file: &ParsedFile) -> Result<Value> {
406406
let tools = vec![create_analyze_function_tool()?];
407407

408408
let system_message = ChatCompletionRequestSystemMessageArgs::default()
@@ -440,8 +440,8 @@ async fn call_analyze_function(client: &Client<OpenAIConfig>, model: &str, file:
440440
}
441441
}
442442

443-
/// Call the score function via OpenAI
444-
async fn call_score_function(
443+
/// Calculate scores via OpenAI API
444+
async fn calculate_scores_via_api(
445445
client: &Client<OpenAIConfig>, model: &str, files_data: Vec<FileDataForScoring>
446446
) -> Result<Vec<FileWithScore>> {
447447
let tools = vec![create_score_function_tool()?];
@@ -487,8 +487,8 @@ async fn call_score_function(
487487
}
488488
}
489489

490-
/// Call the generate function via OpenAI
491-
async fn call_generate_function(
490+
/// Generate candidates via OpenAI API
491+
async fn generate_candidates_via_api(
492492
client: &Client<OpenAIConfig>, model: &str, files_with_scores: Vec<FileWithScore>, max_length: usize
493493
) -> Result<Value> {
494494
let tools = vec![create_generate_function_tool()?];

0 commit comments

Comments
 (0)