Skip to content

Commit 6d48ecf

Browse files
committed
🤖 style: format ollama test file
1 parent 6f8976b commit 6d48ecf

File tree

1 file changed

+140
-156
lines changed

1 file changed

+140
-156
lines changed

tests/ipcMain/ollama.test.ts

Lines changed: 140 additions & 156 deletions
Original file line numberDiff line numberDiff line change
@@ -26,162 +26,146 @@ describeIntegration("IpcMain Ollama integration tests", () => {
2626
await loadTokenizerModules();
2727
}, 30000); // 30s timeout for tokenizer loading
2828

29-
test(
30-
"should successfully send message to Ollama and receive response",
31-
async () => {
32-
// Setup test environment
33-
const { env, workspaceId, cleanup } = await setupWorkspace("ollama");
34-
try {
35-
// Send a simple message to verify basic connectivity
36-
const result = await sendMessageWithModel(
37-
env.mockIpcRenderer,
38-
workspaceId,
39-
"Say 'hello' and nothing else",
40-
"ollama",
41-
"gpt-oss:20b"
42-
);
43-
44-
// Verify the IPC call succeeded
45-
expect(result.success).toBe(true);
46-
47-
// Collect and verify stream events
48-
const collector = createEventCollector(env.sentEvents, workspaceId);
49-
const streamEnd = await collector.waitForEvent("stream-end", 30000);
50-
51-
expect(streamEnd).toBeDefined();
52-
assertStreamSuccess(collector);
53-
54-
// Verify we received deltas
55-
const deltas = collector.getDeltas();
56-
expect(deltas.length).toBeGreaterThan(0);
57-
58-
// Verify the response contains expected content
59-
const text = extractTextFromEvents(deltas).toLowerCase();
60-
expect(text).toMatch(/hello/i);
61-
} finally {
62-
await cleanup();
63-
}
64-
},
65-
45000 // Ollama can be slower than cloud APIs, especially first run
66-
);
67-
68-
test(
69-
"should successfully call tools with Ollama",
70-
async () => {
71-
const { env, workspaceId, cleanup } = await setupWorkspace("ollama");
72-
try {
73-
// Ask for current time which should trigger bash tool
74-
const result = await sendMessageWithModel(
75-
env.mockIpcRenderer,
76-
workspaceId,
77-
"What is the current date and time? Use the bash tool to find out.",
78-
"ollama",
79-
"gpt-oss:20b"
80-
);
81-
82-
expect(result.success).toBe(true);
83-
84-
// Wait for stream to complete
85-
const collector = createEventCollector(env.sentEvents, workspaceId);
86-
await collector.waitForEvent("stream-end", 60000);
87-
88-
assertStreamSuccess(collector);
89-
90-
// Verify bash tool was called via events
91-
const events = collector.getEvents();
92-
const toolCallStarts = events.filter((e: any) => e.type === "tool-call-start");
93-
expect(toolCallStarts.length).toBeGreaterThan(0);
94-
95-
const bashCall = toolCallStarts.find((e: any) => e.toolName === "bash");
96-
expect(bashCall).toBeDefined();
97-
98-
// Verify we got a text response with date/time info
99-
const deltas = collector.getDeltas();
100-
const responseText = extractTextFromEvents(deltas).toLowerCase();
101-
102-
// Should mention time or date in response
103-
expect(responseText).toMatch(/time|date|am|pm|2024|2025/i);
104-
} finally {
105-
await cleanup();
106-
}
107-
},
108-
90000 // Tool calling can take longer
109-
);
110-
111-
test(
112-
"should handle file operations with Ollama",
113-
async () => {
114-
const { env, workspaceId, cleanup } = await setupWorkspace("ollama");
115-
try {
116-
// Ask to read a file that should exist
117-
const result = await sendMessageWithModel(
118-
env.mockIpcRenderer,
119-
workspaceId,
120-
"Read the README.md file and tell me what the first heading says.",
121-
"ollama",
122-
"gpt-oss:20b"
123-
);
124-
125-
expect(result.success).toBe(true);
126-
127-
// Wait for stream to complete
128-
const collector = createEventCollector(env.sentEvents, workspaceId);
129-
await collector.waitForEvent("stream-end", 60000);
130-
131-
assertStreamSuccess(collector);
132-
133-
// Verify file_read tool was called via events
134-
const events = collector.getEvents();
135-
const toolCallStarts = events.filter((e: any) => e.type === "tool-call-start");
136-
expect(toolCallStarts.length).toBeGreaterThan(0);
137-
138-
const fileReadCall = toolCallStarts.find((e: any) => e.toolName === "file_read");
139-
expect(fileReadCall).toBeDefined();
140-
141-
// Verify response mentions README content (cmux heading or similar)
142-
const deltas = collector.getDeltas();
143-
const responseText = extractTextFromEvents(deltas).toLowerCase();
144-
145-
expect(responseText).toMatch(/cmux|readme|heading/i);
146-
} finally {
147-
await cleanup();
148-
}
149-
},
150-
90000 // File operations with reasoning
151-
);
152-
153-
test(
154-
"should handle errors gracefully when Ollama is not running",
155-
async () => {
156-
const { env, workspaceId, cleanup } = await setupWorkspace("ollama");
157-
try {
158-
// Override baseUrl to point to non-existent server
159-
const result = await sendMessageWithModel(
160-
env.mockIpcRenderer,
161-
workspaceId,
162-
"This should fail",
163-
"ollama",
164-
"gpt-oss:20b",
165-
{
166-
providerOptions: {
167-
ollama: {},
168-
},
169-
}
170-
);
171-
172-
// If Ollama is running, test will pass
173-
// If not running, we should get an error
174-
if (!result.success) {
175-
expect(result.error).toBeDefined();
176-
} else {
177-
// If it succeeds, that's fine - Ollama is running
178-
const collector = createEventCollector(env.sentEvents, workspaceId);
179-
await collector.waitForEvent("stream-end", 30000);
29+
test("should successfully send message to Ollama and receive response", async () => {
30+
// Setup test environment
31+
const { env, workspaceId, cleanup } = await setupWorkspace("ollama");
32+
try {
33+
// Send a simple message to verify basic connectivity
34+
const result = await sendMessageWithModel(
35+
env.mockIpcRenderer,
36+
workspaceId,
37+
"Say 'hello' and nothing else",
38+
"ollama",
39+
"gpt-oss:20b"
40+
);
41+
42+
// Verify the IPC call succeeded
43+
expect(result.success).toBe(true);
44+
45+
// Collect and verify stream events
46+
const collector = createEventCollector(env.sentEvents, workspaceId);
47+
const streamEnd = await collector.waitForEvent("stream-end", 30000);
48+
49+
expect(streamEnd).toBeDefined();
50+
assertStreamSuccess(collector);
51+
52+
// Verify we received deltas
53+
const deltas = collector.getDeltas();
54+
expect(deltas.length).toBeGreaterThan(0);
55+
56+
// Verify the response contains expected content
57+
const text = extractTextFromEvents(deltas).toLowerCase();
58+
expect(text).toMatch(/hello/i);
59+
} finally {
60+
await cleanup();
61+
}
62+
}, 45000); // Ollama can be slower than cloud APIs, especially first run
63+
64+
test("should successfully call tools with Ollama", async () => {
65+
const { env, workspaceId, cleanup } = await setupWorkspace("ollama");
66+
try {
67+
// Ask for current time which should trigger bash tool
68+
const result = await sendMessageWithModel(
69+
env.mockIpcRenderer,
70+
workspaceId,
71+
"What is the current date and time? Use the bash tool to find out.",
72+
"ollama",
73+
"gpt-oss:20b"
74+
);
75+
76+
expect(result.success).toBe(true);
77+
78+
// Wait for stream to complete
79+
const collector = createEventCollector(env.sentEvents, workspaceId);
80+
await collector.waitForEvent("stream-end", 60000);
81+
82+
assertStreamSuccess(collector);
83+
84+
// Verify bash tool was called via events
85+
const events = collector.getEvents();
86+
const toolCallStarts = events.filter((e: any) => e.type === "tool-call-start");
87+
expect(toolCallStarts.length).toBeGreaterThan(0);
88+
89+
const bashCall = toolCallStarts.find((e: any) => e.toolName === "bash");
90+
expect(bashCall).toBeDefined();
91+
92+
// Verify we got a text response with date/time info
93+
const deltas = collector.getDeltas();
94+
const responseText = extractTextFromEvents(deltas).toLowerCase();
95+
96+
// Should mention time or date in response
97+
expect(responseText).toMatch(/time|date|am|pm|2024|2025/i);
98+
} finally {
99+
await cleanup();
100+
}
101+
}, 90000); // Tool calling can take longer
102+
103+
test("should handle file operations with Ollama", async () => {
104+
const { env, workspaceId, cleanup } = await setupWorkspace("ollama");
105+
try {
106+
// Ask to read a file that should exist
107+
const result = await sendMessageWithModel(
108+
env.mockIpcRenderer,
109+
workspaceId,
110+
"Read the README.md file and tell me what the first heading says.",
111+
"ollama",
112+
"gpt-oss:20b"
113+
);
114+
115+
expect(result.success).toBe(true);
116+
117+
// Wait for stream to complete
118+
const collector = createEventCollector(env.sentEvents, workspaceId);
119+
await collector.waitForEvent("stream-end", 60000);
120+
121+
assertStreamSuccess(collector);
122+
123+
// Verify file_read tool was called via events
124+
const events = collector.getEvents();
125+
const toolCallStarts = events.filter((e: any) => e.type === "tool-call-start");
126+
expect(toolCallStarts.length).toBeGreaterThan(0);
127+
128+
const fileReadCall = toolCallStarts.find((e: any) => e.toolName === "file_read");
129+
expect(fileReadCall).toBeDefined();
130+
131+
// Verify response mentions README content (cmux heading or similar)
132+
const deltas = collector.getDeltas();
133+
const responseText = extractTextFromEvents(deltas).toLowerCase();
134+
135+
expect(responseText).toMatch(/cmux|readme|heading/i);
136+
} finally {
137+
await cleanup();
138+
}
139+
}, 90000); // File operations with reasoning
140+
141+
test("should handle errors gracefully when Ollama is not running", async () => {
142+
const { env, workspaceId, cleanup } = await setupWorkspace("ollama");
143+
try {
144+
// Override baseUrl to point to non-existent server
145+
const result = await sendMessageWithModel(
146+
env.mockIpcRenderer,
147+
workspaceId,
148+
"This should fail",
149+
"ollama",
150+
"gpt-oss:20b",
151+
{
152+
providerOptions: {
153+
ollama: {},
154+
},
180155
}
181-
} finally {
182-
await cleanup();
156+
);
157+
158+
// If Ollama is running, test will pass
159+
// If not running, we should get an error
160+
if (!result.success) {
161+
expect(result.error).toBeDefined();
162+
} else {
163+
// If it succeeds, that's fine - Ollama is running
164+
const collector = createEventCollector(env.sentEvents, workspaceId);
165+
await collector.waitForEvent("stream-end", 30000);
183166
}
184-
},
185-
45000
186-
);
167+
} finally {
168+
await cleanup();
169+
}
170+
}, 45000);
187171
});

0 commit comments

Comments
 (0)