-
-
Notifications
You must be signed in to change notification settings - Fork 3.1k
fix(api): SSE streaming format to comply with specification #7182
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
c60c6c2
3e4917f
17c5826
0f2a126
deefcb0
b38426f
9c0c1ae
e6825c6
da887e7
1bdc171
22333d3
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -2,7 +2,6 @@ | |||||
|
|
||||||
| import ( | ||||||
| "bufio" | ||||||
| "bytes" | ||||||
| "context" | ||||||
| "encoding/json" | ||||||
| "fmt" | ||||||
|
|
@@ -91,7 +90,7 @@ | |||||
| ID: id, | ||||||
| Created: created, | ||||||
| Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. | ||||||
| Choices: []schema.Choice{{Delta: &schema.Message{Role: "assistant", Content: &textContentToReturn}}}, | ||||||
| Choices: []schema.Choice{{Delta: &schema.Message{Role: "assistant"}, Index: 0, FinishReason: nil}}, | ||||||
| Object: "chat.completion.chunk", | ||||||
| } | ||||||
| responses <- initialMessage | ||||||
|
|
@@ -111,7 +110,7 @@ | |||||
| ID: id, | ||||||
| Created: created, | ||||||
| Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. | ||||||
| Choices: []schema.Choice{{Delta: &schema.Message{Content: &s}, Index: 0}}, | ||||||
| Choices: []schema.Choice{{Delta: &schema.Message{Content: &s}, Index: 0, FinishReason: nil}}, | ||||||
| Object: "chat.completion.chunk", | ||||||
| Usage: usage, | ||||||
| } | ||||||
|
|
@@ -145,7 +144,7 @@ | |||||
| ID: id, | ||||||
| Created: created, | ||||||
| Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. | ||||||
| Choices: []schema.Choice{{Delta: &schema.Message{Role: "assistant", Content: &textContentToReturn}}}, | ||||||
| Choices: []schema.Choice{{Delta: &schema.Message{Role: "assistant"}, Index: 0, FinishReason: nil}}, | ||||||
| Object: "chat.completion.chunk", | ||||||
| } | ||||||
| responses <- initialMessage | ||||||
|
|
@@ -169,7 +168,7 @@ | |||||
| ID: id, | ||||||
| Created: created, | ||||||
| Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. | ||||||
| Choices: []schema.Choice{{Delta: &schema.Message{Content: &result}, Index: 0}}, | ||||||
| Choices: []schema.Choice{{Delta: &schema.Message{Content: &result}, Index: 0, FinishReason: nil}}, | ||||||
| Object: "chat.completion.chunk", | ||||||
| Usage: usage, | ||||||
| } | ||||||
|
|
@@ -197,7 +196,10 @@ | |||||
| }, | ||||||
| }, | ||||||
| }, | ||||||
| }}}, | ||||||
| }, | ||||||
| Index: 0, | ||||||
| FinishReason: nil, | ||||||
| }}, | ||||||
| Object: "chat.completion.chunk", | ||||||
| } | ||||||
| responses <- initialMessage | ||||||
|
|
@@ -220,7 +222,10 @@ | |||||
| }, | ||||||
| }, | ||||||
| }, | ||||||
| }}}, | ||||||
| }, | ||||||
| Index: 0, | ||||||
| FinishReason: nil, | ||||||
| }}, | ||||||
| Object: "chat.completion.chunk", | ||||||
| } | ||||||
| } | ||||||
|
|
@@ -427,11 +432,14 @@ | |||||
| if len(ev.Choices[0].Delta.ToolCalls) > 0 { | ||||||
| toolsCalled = true | ||||||
| } | ||||||
| var buf bytes.Buffer | ||||||
| enc := json.NewEncoder(&buf) | ||||||
| enc.Encode(ev) | ||||||
| log.Debug().Msgf("Sending chunk: %s", buf.String()) | ||||||
| _, err := fmt.Fprintf(w, "data: %v\n", buf.String()) | ||||||
| respData, err := json.Marshal(ev) | ||||||
| if err != nil { | ||||||
| log.Debug().Msgf("Failed to marshal response: %v", err) | ||||||
| input.Cancel() | ||||||
| continue | ||||||
| } | ||||||
| log.Debug().Msgf("Sending chunk: %s", string(respData)) | ||||||
| _, err = fmt.Fprintf(w, "data: %s\n\n", string(respData)) | ||||||
| if err != nil { | ||||||
| log.Debug().Msgf("Sending chunk failed: %v", err) | ||||||
| input.Cancel() | ||||||
|
|
@@ -443,34 +451,40 @@ | |||||
| } | ||||||
| log.Error().Msgf("Stream ended with error: %v", err) | ||||||
|
|
||||||
| stopReason := FinishReasonStop | ||||||
| resp := &schema.OpenAIResponse{ | ||||||
| ID: id, | ||||||
| Created: created, | ||||||
| Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. | ||||||
| Choices: []schema.Choice{ | ||||||
| { | ||||||
| FinishReason: "stop", | ||||||
| FinishReason: &stopReason, | ||||||
| Index: 0, | ||||||
| Delta: &schema.Message{Content: "Internal error: " + err.Error()}, | ||||||
| }}, | ||||||
| Object: "chat.completion.chunk", | ||||||
| Usage: *usage, | ||||||
| } | ||||||
| respData, _ := json.Marshal(resp) | ||||||
|
|
||||||
| w.WriteString(fmt.Sprintf("data: %s\n\n", respData)) | ||||||
| respData, marshalErr := json.Marshal(resp) | ||||||
| if marshalErr != nil { | ||||||
| log.Error().Msgf("Failed to marshal error response: %v", marshalErr) | ||||||
| // Send a simple error message as fallback | ||||||
| w.WriteString("data: {\"error\":\"Internal error\"}\n\n") | ||||||
| } else { | ||||||
| w.WriteString(fmt.Sprintf("data: %s\n\n", respData)) | ||||||
Check warningCode scanning / gosec Errors unhandled Warning
Errors unhandled
|
||||||
| } | ||||||
| w.WriteString("data: [DONE]\n\n") | ||||||
| w.Flush() | ||||||
|
|
||||||
| return | ||||||
| } | ||||||
| } | ||||||
|
|
||||||
| finishReason := "stop" | ||||||
| finishReason := FinishReasonStop | ||||||
| if toolsCalled && len(input.Tools) > 0 { | ||||||
| finishReason = "tool_calls" | ||||||
| finishReason = FinishReasonToolCalls | ||||||
| } else if toolsCalled { | ||||||
| finishReason = "function_call" | ||||||
| finishReason = FinishReasonFunctionCall | ||||||
| } | ||||||
|
|
||||||
| resp := &schema.OpenAIResponse{ | ||||||
|
|
@@ -479,9 +493,9 @@ | |||||
| Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. | ||||||
| Choices: []schema.Choice{ | ||||||
| { | ||||||
| FinishReason: finishReason, | ||||||
| FinishReason: &finishReason, | ||||||
| Index: 0, | ||||||
| Delta: &schema.Message{Content: &textContentToReturn}, | ||||||
| Delta: &schema.Message{}, | ||||||
|
||||||
| Delta: &schema.Message{}, | |
| Delta: &schema.Message{Role: "assistant"}, |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,8 @@ | ||
| package openai | ||
|
|
||
| // Finish reason constants for OpenAI API responses | ||
| const ( | ||
| FinishReasonStop = "stop" | ||
| FinishReasonToolCalls = "tool_calls" | ||
| FinishReasonFunctionCall = "function_call" | ||
| ) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,5 +1,3 @@ | ||
| module github.com/McShelby/hugo-theme-relearn.git | ||
|
|
||
| go 1.19 | ||
|
|
||
| require github.com/gohugoio/hugo-mod-bootstrap-scss/v5 v5.20300.20200 // indirect |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,4 +0,0 @@ | ||
| github.com/gohugoio/hugo-mod-bootstrap-scss/v5 v5.20300.20200 h1:SmpwwN3DNzJWbV+IT8gaFu07ENUFpCvKou5BHYUKuVs= | ||
| github.com/gohugoio/hugo-mod-bootstrap-scss/v5 v5.20300.20200/go.mod h1:kx8MBj9T7SFR8ZClWvKZPmmUxBaltkoXvnWlZZcSnYA= | ||
| github.com/gohugoio/hugo-mod-jslibs-dist/popperjs/v2 v2.21100.20000/go.mod h1:mFberT6ZtcchrsDtfvJM7aAH2bDKLdOnruUHl0hlapI= | ||
| github.com/twbs/bootstrap v5.3.2+incompatible/go.mod h1:fZTSrkpSf0/HkL0IIJzvVspTt1r9zuf7XlZau8kpcY0= | ||
Check warning
Code scanning / gosec
Errors unhandled Warning