diff --git a/lib/ruby_llm/providers/mistral.rb b/lib/ruby_llm/providers/mistral.rb index 18ddc266c..b67ef2656 100644 --- a/lib/ruby_llm/providers/mistral.rb +++ b/lib/ruby_llm/providers/mistral.rb @@ -7,6 +7,7 @@ class Mistral < OpenAI include Mistral::Chat include Mistral::Models include Mistral::Embeddings + include Mistral::Streaming def api_base 'https://api.mistral.ai/v1' diff --git a/lib/ruby_llm/providers/mistral/streaming.rb b/lib/ruby_llm/providers/mistral/streaming.rb new file mode 100644 index 000000000..3d2dd289a --- /dev/null +++ b/lib/ruby_llm/providers/mistral/streaming.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +module RubyLLM + module Providers + class Mistral + # Streaming methods of the Mistral API integration + module Streaming + module_function + + def build_chunk(data) + Chunk.new( + role: :assistant, + model_id: data['model'], + content: extract_content(data), + tool_calls: parse_tool_calls(data.dig('choices', 0, 'delta', 'tool_calls'), parse_arguments: false), + input_tokens: data.dig('usage', 'prompt_tokens'), + output_tokens: data.dig('usage', 'completion_tokens') + ) + end + + def extract_content(data) + data = data.dig('choices', 0, 'delta', 'content') + return '' if data.is_a?(Array) || data.is_a?(Hash) + + data.to_s + end + end + end + end +end diff --git a/spec/ruby_llm/providers/mistral/streaming_spec.rb b/spec/ruby_llm/providers/mistral/streaming_spec.rb new file mode 100644 index 000000000..497a7d853 --- /dev/null +++ b/spec/ruby_llm/providers/mistral/streaming_spec.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe RubyLLM::Providers::Mistral::Streaming do + let(:test_obj) do + Object.new.extend(described_class).tap do |obj| + allow(obj).to receive(:parse_tool_calls).and_return([]) # ignore tool calls + end + end + + it 'correctly processes content on receiving reasoning messages (an array in choices.delta.content)' do + data = { + 'choices' => [{ + 'index' => 0, + 'delta' => { + 'content' => [{ + 'type' => 'thinking', + 'thinking' => [{ 'type' => 'text', 'text' => 'Okay' }] + }] + } + }] + } + expect(test_obj.send(:build_chunk, data).content).to eq('') + end + + it 'correctly processes content on receiving normal messages' do + data = { + 'choices' => [{ + 'index' => 0, + 'delta' => { 'content' => 'thecontent' } + }] + } + expect(test_obj.send(:build_chunk, data).content).to eq('thecontent') + end +end