@@ -402,26 +402,29 @@ You can describe and pass in functions and the model will intelligently choose t
402402``` ruby
403403
404404def get_current_weather (location: , unit: " fahrenheit" )
405- # use a weather api to fetch weather
405+ # Here you could use a weather api to fetch the weather.
406+ " The weather in #{ location } is nice 🌞 #{ unit } "
406407end
407408
409+ messages = [
410+ {
411+ " role" : " user" ,
412+ " content" : " What is the weather like in San Francisco?" ,
413+ },
414+ ]
415+
408416response =
409417 client.chat(
410418 parameters: {
411419 model: " gpt-4o" ,
412- messages: [
413- {
414- " role" : " user" ,
415- " content" : " What is the weather like in San Francisco?" ,
416- },
417- ],
420+ messages: messages, # Defined above because we'll use it again
418421 tools: [
419422 {
420423 type: " function" ,
421424 function: {
422425 name: " get_current_weather" ,
423426 description: " Get the current weather in a given location" ,
424- parameters: {
427+ parameters: { # Format: https://json-schema.org/understanding-json-schema
425428 type: :object ,
426429 properties: {
427430 location: {
@@ -438,31 +441,51 @@ response =
438441 },
439442 }
440443 ],
441- tool_choice: {
442- type: " function" ,
443- function: {
444- name: " get_current_weather"
445- }
446- }
444+ tool_choice: " required" # Optional, defaults to "auto"
445+ # Can also put "none" or specific functions, see docs
447446 },
448447 )
449448
450449message = response.dig(" choices" , 0 , " message" )
451450
452451if message[" role" ] == " assistant" && message[" tool_calls" ]
453- function_name = message.dig(" tool_calls" , 0 , " function" , " name" )
454- args =
455- JSON .parse(
456- message.dig(" tool_calls" , 0 , " function" , " arguments" ),
452+ message[" tool_calls" ].each do |tool_call |
453+ tool_call_id = tool_call.dig(" id" )
454+ function_name = tool_call.dig(" function" , " name" )
455+ function_args = JSON .parse(
456+ tool_call.dig(" function" , " arguments" ),
457457 { symbolize_names: true },
458458 )
459+ function_response = case function_name
460+ when " get_current_weather"
461+ get_current_weather(** function_args) # => "The weather is nice 🌞"
462+ else
463+ # decide how to handle
464+ end
465+
466+ # For a subsequent message with the role "tool", OpenAI requires the preceding message to have a tool_calls argument.
467+ messages << message
459468
460- case function_name
461- when " get_current_weather"
462- get_current_weather(** args)
469+ messages << {
470+ tool_call_id: tool_call_id,
471+ role: " tool" ,
472+ name: function_name,
473+ content: function_response
474+ } # Extend the conversation with the results of the functions
463475 end
476+
477+ second_response = client.chat(
478+ parameters: {
479+ model: " gpt-4o" ,
480+ messages: messages
481+ })
482+
483+ puts second_response.dig(" choices" , 0 , " message" , " content" )
484+
485+ # At this point, the model has decided to call functions, you've called the functions
486+ # and provided the response back, and the model has considered this and responded.
464487end
465- # => "The weather is nice 🌞 "
488+ # => "It looks like the weather is nice and sunny in San Francisco! If you're planning to go out, it should be a pleasant day. "
466489```
467490
468491### Completions
0 commit comments