Skip to content

Commit fa0f054

Browse files
Refactor health checks to use SDK instead of raw fetch calls
- Replace raw fetch calls with HttpClient from the SDK for health checks - Use RemoteConversation.create() for LLM configuration testing - Properly use SDK's sendMessage() method without run parameter - This provides better consistency, error handling, and tests the SDK itself - Maintains the same functionality while using the proper SDK APIs Benefits: - More maintainable code using our own SDK - Better error handling through SDK's HttpClient - Actually tests the SDK functionality in the health checks - Consistent with the SDK's design patterns Co-authored-by: openhands <openhands@all-hands.dev>
1 parent 872bab6 commit fa0f054

File tree

1 file changed

+34
-112
lines changed

1 file changed

+34
-112
lines changed

example/src/utils/serverStatus.ts

Lines changed: 34 additions & 112 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import { Settings } from '../components/SettingsModal';
2+
import { HttpClient, RemoteConversation } from '@openhands/agent-server-typescript-client';
23

34
export interface ServerStatus {
45
isConnected: boolean;
@@ -21,46 +22,26 @@ export interface LLMTestResponse {
2122
}
2223

2324
/**
24-
* Check if the agent server is reachable
25+
* Check if the agent server is reachable using the SDK
2526
*/
2627
export const checkServerHealth = async (serverUrl: string, apiKey?: string): Promise<{ isConnected: boolean; error?: string }> => {
2728
try {
28-
const url = `${serverUrl.replace(/\/$/, '')}/health`;
29-
const headers: Record<string, string> = {
30-
'Content-Type': 'application/json',
31-
};
29+
const client = new HttpClient({ baseUrl: serverUrl, apiKey });
3230

33-
if (apiKey) {
34-
headers['X-Session-API-Key'] = apiKey;
35-
}
36-
37-
const response = await fetch(url, {
38-
method: 'GET',
39-
headers,
40-
signal: AbortSignal.timeout(5000), // 5 second timeout
41-
});
42-
43-
if (response.ok) {
44-
return { isConnected: true };
45-
} else {
46-
return {
47-
isConnected: false,
48-
error: `Server responded with ${response.status}: ${response.statusText}`
49-
};
50-
}
31+
// Use the SDK's HTTP client to check health
32+
await client.get('/health');
33+
34+
return { isConnected: true };
5135
} catch (error) {
5236
if (error instanceof Error) {
53-
if (error.name === 'AbortError') {
54-
return { isConnected: false, error: 'Connection timeout' };
55-
}
5637
return { isConnected: false, error: error.message };
5738
}
5839
return { isConnected: false, error: 'Unknown connection error' };
5940
}
6041
};
6142

6243
/**
63-
* Test LLM configuration with a simple query
44+
* Test LLM configuration using the SDK
6445
*/
6546
export const testLLMConfiguration = async (settings: Settings): Promise<{ success: boolean; error?: string }> => {
6647
try {
@@ -70,106 +51,47 @@ export const testLLMConfiguration = async (settings: Settings): Promise<{ succes
7051
return { success: false, error: `Server not reachable: ${healthCheck.error}` };
7152
}
7253

73-
// Create a test conversation to validate LLM settings
74-
const createUrl = `${settings.agentServerUrl.replace(/\/$/, '')}/api/conversations`;
75-
const headers: Record<string, string> = {
76-
'Content-Type': 'application/json',
77-
};
78-
79-
if (settings.agentServerApiKey) {
80-
headers['X-Session-API-Key'] = settings.agentServerApiKey;
81-
}
82-
83-
const createResponse = await fetch(createUrl, {
84-
method: 'POST',
85-
headers,
86-
body: JSON.stringify({
87-
agent: {
88-
name: 'TestAgent',
89-
llm: {
90-
model: settings.modelName,
91-
api_key: settings.apiKey,
92-
}
93-
},
54+
// Create a test conversation using the SDK
55+
const conversation = await RemoteConversation.create(
56+
settings.agentServerUrl,
57+
{
58+
name: 'TestAgent',
59+
llm: {
60+
model: settings.modelName,
61+
api_key: settings.apiKey,
62+
}
63+
},
64+
{
65+
apiKey: settings.agentServerApiKey,
9466
workspace: {
9567
type: 'local',
9668
path: '/tmp/test-workspace',
9769
working_dir: '/tmp/test-workspace'
9870
}
99-
}),
100-
signal: AbortSignal.timeout(10000), // 10 second timeout
101-
});
102-
103-
if (!createResponse.ok) {
104-
const errorText = await createResponse.text();
105-
return {
106-
success: false,
107-
error: `Failed to create test conversation: ${createResponse.status} ${errorText}`
108-
};
109-
}
110-
111-
const conversationData = await createResponse.json();
112-
const conversationId = conversationData.id;
113-
114-
if (!conversationId) {
115-
return {
116-
success: false,
117-
error: `Failed to get conversation ID from response: ${JSON.stringify(conversationData)}`
118-
};
119-
}
71+
}
72+
);
12073

12174
try {
122-
// Send a simple test message
123-
const messageUrl = `${settings.agentServerUrl.replace(/\/$/, '')}/events`;
124-
const messageResponse = await fetch(messageUrl, {
125-
method: 'POST',
126-
headers,
127-
body: JSON.stringify({
128-
conversation_id: conversationId,
129-
role: 'user',
130-
content: [{ type: 'text', text: 'Hello, respond with just "OK" to confirm you are working.' }],
131-
run: false
132-
}),
133-
signal: AbortSignal.timeout(15000), // 15 second timeout
75+
// Send a simple test message to validate LLM configuration
76+
await conversation.sendMessage({
77+
role: 'user',
78+
content: [{ type: 'text', text: 'Hello, respond with just "OK" to confirm you are working.' }]
13479
});
135-
136-
if (messageResponse.ok) {
137-
// Clean up the test conversation
138-
try {
139-
await fetch(`${settings.agentServerUrl.replace(/\/$/, '')}/api/conversations/${conversationId}`, {
140-
method: 'DELETE',
141-
headers,
142-
});
143-
} catch {
144-
// Ignore cleanup errors
145-
}
146-
147-
return { success: true };
148-
} else {
149-
const errorText = await messageResponse.text();
150-
return {
151-
success: false,
152-
error: `LLM test failed: ${messageResponse.status} ${errorText}`
153-
};
154-
}
155-
} catch (testError) {
156-
// Clean up the test conversation even if test failed
80+
81+
// If we get here, the LLM configuration is working
82+
return { success: true };
83+
84+
} finally {
85+
// Always try to clean up the test conversation
15786
try {
158-
await fetch(`${settings.agentServerUrl.replace(/\/$/, '')}/api/conversations/${conversationId}`, {
159-
method: 'DELETE',
160-
headers,
161-
});
87+
await conversation.close();
16288
} catch {
16389
// Ignore cleanup errors
16490
}
165-
throw testError;
16691
}
16792
} catch (error) {
16893
if (error instanceof Error) {
169-
if (error.name === 'AbortError') {
170-
return { success: false, error: 'LLM test timeout' };
171-
}
172-
return { success: false, error: `LLM test error: ${error.message}` };
94+
return { success: false, error: error.message };
17395
}
17496
return { success: false, error: 'Unknown LLM test error' };
17597
}

0 commit comments

Comments
 (0)