|
13 | 13 |
|
14 | 14 | use PHPUnit\Framework\TestCase; |
15 | 15 | use Symfony\AI\Platform\Bridge\Ollama\Ollama; |
| 16 | +use Symfony\AI\Platform\Bridge\Ollama\OllamaMessageChunk; |
16 | 17 | use Symfony\AI\Platform\Bridge\Ollama\OllamaResultConverter; |
17 | 18 | use Symfony\AI\Platform\Exception\RuntimeException; |
18 | 19 | use Symfony\AI\Platform\Model; |
19 | 20 | use Symfony\AI\Platform\Result\InMemoryRawResult; |
20 | 21 | use Symfony\AI\Platform\Result\RawHttpResult; |
| 22 | +use Symfony\AI\Platform\Result\StreamResult; |
21 | 23 | use Symfony\AI\Platform\Result\TextResult; |
22 | 24 | use Symfony\AI\Platform\Result\ToolCallResult; |
23 | 25 | use Symfony\Contracts\HttpClient\ResponseInterface; |
@@ -160,4 +162,74 @@ public function testItConvertsAResponseToAVectorResult() |
160 | 162 | $this->assertSame([0.3, 0.4, 0.4], $convertedContent[0]->getData()); |
161 | 163 | $this->assertSame([0.0, 0.0, 0.2], $convertedContent[1]->getData()); |
162 | 164 | } |
| 165 | + |
| 166 | + public function testConvertStreamingResponse() |
| 167 | + { |
| 168 | + $converter = new OllamaResultConverter(); |
| 169 | + $rawResult = new InMemoryRawResult(dataStream: $this->generateConvertStreamingStream()); |
| 170 | + |
| 171 | + $result = $converter->convert($rawResult, options: ['stream' => true]); |
| 172 | + |
| 173 | + $this->assertInstanceOf(StreamResult::class, $result); |
| 174 | + |
| 175 | + $chunks = $result->getContent(); |
| 176 | + $this->assertInstanceOf(OllamaMessageChunk::class, $chunks->current()); |
| 177 | + $this->assertSame('Hello', $chunks->current()->getContent()); |
| 178 | + $this->assertFalse($chunks->current()->isDone()); |
| 179 | + $this->assertSame('deepseek-r1:latest', $chunks->current()->raw['model']); |
| 180 | + $this->assertArrayNotHasKey('total_duration', $chunks->current()->raw); |
| 181 | + $chunks->next(); |
| 182 | + $this->assertInstanceOf(OllamaMessageChunk::class, $chunks->current()); |
| 183 | + $this->assertSame(' world!', $chunks->current()->getContent()); |
| 184 | + $this->assertTrue($chunks->current()->isDone()); |
| 185 | + $this->assertArrayHasKey('total_duration', $chunks->current()->raw); |
| 186 | + } |
| 187 | + |
| 188 | + public function testConvertThinkingStreamingResponse() |
| 189 | + { |
| 190 | + $converter = new OllamaResultConverter(); |
| 191 | + $rawResult = new InMemoryRawResult(dataStream: $this->generateConvertThinkingStreamingStream()); |
| 192 | + |
| 193 | + $result = $converter->convert($rawResult, options: ['stream' => true]); |
| 194 | + |
| 195 | + $this->assertInstanceOf(StreamResult::class, $result); |
| 196 | + |
| 197 | + $chunks = $result->getContent(); |
| 198 | + $this->assertInstanceOf(OllamaMessageChunk::class, $chunks->current()); |
| 199 | + $this->assertSame('', $chunks->current()->getContent()); |
| 200 | + $this->assertSame('Thinking', $chunks->current()->getThinking()); |
| 201 | + $this->assertFalse($chunks->current()->isDone()); |
| 202 | + $this->assertSame('deepseek-r1:latest', $chunks->current()->raw['model']); |
| 203 | + $this->assertArrayNotHasKey('total_duration', $chunks->current()->raw); |
| 204 | + $chunks->next(); |
| 205 | + $this->assertSame('', $chunks->current()->getContent()); |
| 206 | + $this->assertSame(' hard', $chunks->current()->getThinking()); |
| 207 | + $this->assertFalse($chunks->current()->isDone()); |
| 208 | + $chunks->next(); |
| 209 | + $this->assertSame('Hello', $chunks->current()->getContent()); |
| 210 | + $this->assertNull($chunks->current()->getThinking()); |
| 211 | + $this->assertFalse($chunks->current()->isDone()); |
| 212 | + $chunks->next(); |
| 213 | + $this->assertInstanceOf(OllamaMessageChunk::class, $chunks->current()); |
| 214 | + $this->assertSame(' world!', $chunks->current()->getContent()); |
| 215 | + $this->assertNull($chunks->current()->getThinking()); |
| 216 | + $this->assertTrue($chunks->current()->isDone()); |
| 217 | + $this->assertArrayHasKey('total_duration', $chunks->current()->raw); |
| 218 | + } |
| 219 | + |
| 220 | + private function generateConvertStreamingStream(): iterable |
| 221 | + { |
| 222 | + yield ['model' => 'deepseek-r1:latest', 'created_at' => '2025-10-29T17:15:49.631700779Z', 'message' => ['role' => 'assistant', 'content' => 'Hello'], 'done' => false]; |
| 223 | + yield ['model' => 'deepseek-r1:latest', 'created_at' => '2025-10-29T17:15:49.905924913Z', 'message' => ['role' => 'assistant', 'content' => ' world!'], 'done' => true, |
| 224 | + 'done_reason' => 'stop', 'total_duration' => 100, 'load_duration' => 10, 'prompt_eval_count' => 42, 'prompt_eval_duration' => 30, 'eval_count' => 17, 'eval_duration' => 60]; |
| 225 | + } |
| 226 | + |
| 227 | + private function generateConvertThinkingStreamingStream(): iterable |
| 228 | + { |
| 229 | + yield ['model' => 'deepseek-r1:latest', 'created_at' => '2025-10-29T17:15:49.631700779Z', 'message' => ['role' => 'assistant', 'content' => '', 'thinking' => 'Thinking'], 'done' => false]; |
| 230 | + yield ['model' => 'deepseek-r1:latest', 'created_at' => '2025-10-29T17:15:49.905924913Z', 'message' => ['role' => 'assistant', 'content' => '', 'thinking' => ' hard'], 'done' => false]; |
| 231 | + yield ['model' => 'deepseek-r1:latest', 'created_at' => '2025-10-29T17:15:50.14497475Z', 'message' => ['role' => 'assistant', 'content' => 'Hello'], 'done' => false]; |
| 232 | + yield ['model' => 'deepseek-r1:latest', 'created_at' => '2025-10-29T17:15:50.367912083Z', 'message' => ['role' => 'assistant', 'content' => ' world!'], 'done' => true, |
| 233 | + 'done_reason' => 'stop', 'total_duration' => 100, 'load_duration' => 10, 'prompt_eval_count' => 42, 'prompt_eval_duration' => 30, 'eval_count' => 17, 'eval_duration' => 60]; |
| 234 | + } |
163 | 235 | } |
0 commit comments