@@ -92,9 +92,8 @@ def to_llm
9292 @chat . add_message ( msg . to_llm )
9393 end
9494
95- # Only set up the end_message callback by default
96- # The new_message callback will be handled differently for streaming
97- @chat . on_end_message { |msg | persist_message_completion ( msg ) }
95+ @chat . on_new_message { persist_new_message }
96+ . on_end_message { |msg | persist_message_completion ( msg ) }
9897 end
9998
10099 def with_instructions ( instructions , replace : false )
@@ -154,28 +153,8 @@ def ask(message, with: nil, &)
154153
155154 alias say ask
156155
157- def complete ( *args , **kwargs , &)
158- @message = nil
159- @streaming = block_given?
160- first_chunk_received = false
161-
162- if @streaming
163- # For streaming, handle message creation on first chunk
164- to_llm . complete ( *args , **kwargs ) do |chunk |
165- # Create assistant message on first content chunk
166- unless first_chunk_received
167- first_chunk_received = true
168- persist_new_message
169- end
170-
171- # Pass the chunk to the user's block
172- yield chunk
173- end
174- else
175- # For non-streaming, maintain original behavior
176- to_llm . on_new_message { persist_new_message }
177- to_llm . complete ( *args , **kwargs , &)
178- end
156+ def complete ( ...)
157+ to_llm . complete ( ...)
179158 rescue RubyLLM ::Error => e
180159 if @message &.persisted? && @message . content . blank?
181160 RubyLLM . logger . debug "RubyLLM: API call failed, destroying message: #{ @message . id } "
@@ -193,9 +172,6 @@ def persist_new_message
193172 def persist_message_completion ( message )
194173 return unless message
195174
196- # If we're streaming and never created a message (no content chunks), create it now
197- persist_new_message if @streaming && !@message &.persisted?
198-
199175 tool_call_id = find_tool_call_id ( message . tool_call_id ) if message . tool_call_id
200176
201177 transaction do
0 commit comments