Skip to content

Commit edbd0c8

Browse files
Fix LLM configuration status detection logic
- Improve logic in getServerStatus to properly handle different scenarios: - Not configured: when API key or model name is missing - Cannot test: when settings are configured but server is unreachable - Working/Error: when server is reachable and can test LLM configuration - Add workspace field to testLLMConfiguration API request to fix 422 error - Now shows 'Cannot test LLM configuration - server not reachable' instead of 'Not configured' when server is offline but settings are present Co-authored-by: openhands <openhands@all-hands.dev>
1 parent 18f012d commit edbd0c8

File tree

1 file changed

+14
-5
lines changed

1 file changed

+14
-5
lines changed

example/src/utils/serverStatus.ts

Lines changed: 14 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,10 @@ export const testLLMConfiguration = async (settings: Settings): Promise<{ succes
9090
model: settings.modelName,
9191
api_key: settings.apiKey,
9292
}
93+
},
94+
workspace: {
95+
type: 'local',
96+
path: '/tmp/test-workspace'
9397
}
9498
}),
9599
signal: AbortSignal.timeout(10000), // 10 second timeout
@@ -174,14 +178,19 @@ export const getServerStatus = async (settings: Settings): Promise<ServerStatus>
174178
let llmStatus: 'unknown' | 'working' | 'error' = 'unknown';
175179
let llmError: string | undefined;
176180

177-
if (healthCheck.isConnected && settings.apiKey && settings.modelName) {
178-
// Test LLM configuration if server is reachable and LLM settings are provided
181+
// Check if LLM settings are configured
182+
if (!settings.apiKey || !settings.modelName) {
183+
llmStatus = 'unknown';
184+
llmError = 'LLM API key or model name not configured';
185+
} else if (!healthCheck.isConnected) {
186+
// Settings are configured but server is not reachable
187+
llmStatus = 'unknown';
188+
llmError = 'Cannot test LLM configuration - server not reachable';
189+
} else {
190+
// Server is reachable and settings are configured - test LLM
179191
const llmTest = await testLLMConfiguration(settings);
180192
llmStatus = llmTest.success ? 'working' : 'error';
181193
llmError = llmTest.error;
182-
} else if (!settings.apiKey || !settings.modelName) {
183-
llmStatus = 'unknown';
184-
llmError = 'LLM API key or model name not configured';
185194
}
186195

187196
return {

0 commit comments

Comments
 (0)