Skip to content

Commit 287c37c

Browse files
Making all properties from Chunk object optional
1 parent 15cd189 commit 287c37c

File tree

6 files changed

+25
-25
lines changed

6 files changed

+25
-25
lines changed

Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatProvider.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ import SwiftOpenAI
4848
do {
4949
let stream = try await service.startStreamedChat(parameters: parameters)
5050
for try await result in stream {
51-
let content = result.choices.first?.delta.content ?? ""
51+
let content = result.choices?.first?.delta?.content ?? ""
5252
self.message += content
5353
}
5454
} catch APIError.responseUnsuccessful(let description, let statusCode) {

Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -100,12 +100,12 @@ struct FunctionCallStreamedResponse {
100100
let stream = try await service.startStreamedChat(parameters: parameters)
101101
for try await result in stream {
102102
// Extract the first choice from the stream results, if none exist, exit the loop.
103-
if let choice = result.choices.first {
103+
if let choice = result.choices?.first {
104104
/// Because we are using the stream API we need to wait to populate
105105
/// the needed values that comes from the streamed API to construct a valid tool call response.
106106
/// This is not needed if the stream is set to false in the API completion request.
107107
/// # Step 2: check if the model wanted to call a function
108-
if let toolCalls = choice.delta.toolCalls {
108+
if let toolCalls = choice.delta?.toolCalls {
109109

110110
/// # Step 3: Define the available functions to be called
111111
availableFunctions = [.createImage: generateImage(arguments:)]
@@ -114,7 +114,7 @@ struct FunctionCallStreamedResponse {
114114
}
115115

116116
/// The streamed content to display
117-
if let newContent = choice.delta.content {
117+
if let newContent = choice.delta?.content {
118118
await updateLastAssistantMessage(.init(
119119
content: .content(.init(text: newContent)),
120120
origin: .received(.gpt)))
@@ -213,10 +213,10 @@ struct FunctionCallStreamedResponse {
213213
let stream = try await service.startStreamedChat(parameters: paramsForChat)
214214
for try await result in stream {
215215
// Extract the first choice from the stream results, if none exist, exit the loop.
216-
guard let choice = result.choices.first else { return }
216+
guard let choice = result.choices?.first else { return }
217217

218218
/// The streamed content to display
219-
if let newContent = choice.delta.content {
219+
if let newContent = choice.delta?.content {
220220
await updateLastAssistantMessage(.init(content: .content(.init(text: newContent)), origin: .received(.gpt)))
221221
}
222222
}

Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ import SwiftOpenAI
5757
let stream = try await service.startStreamedChat(parameters: localParameters)
5858
for try await result in stream {
5959
// Extract the first choice from the stream results, if none exist, exit the loop.
60-
guard let choice = result.choices.first else { return }
60+
guard let choice = result.choices?.first else { return }
6161

6262
// Store initial `role` and `functionCall` data from the first `choice.delta` for UI display.
6363
// This information is essential for maintaining context in the conversation and for updating
@@ -73,7 +73,7 @@ import SwiftOpenAI
7373
// Assign the content received in the current message to the newDelta.
7474
newDelta.content = temporalReceivedMessageContent
7575
// Update the UI with the latest assistant message and the corresponding delta.
76-
updateLastAssistantMessage(content: choice.delta.content ?? "", delta: newDelta)
76+
updateLastAssistantMessage(content: choice.delta?.content ?? "", delta: newDelta)
7777

7878
// Evaluate the `finishReason` to determine if the conversation has reached a logical end.
7979
// If so, package the accumulated data into a new message parameter that will be used

Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,10 +47,10 @@ final class ChatStructuredOutputProvider {
4747
do {
4848
let stream = try await service.startStreamedChat(parameters: parameters)
4949
for try await result in stream {
50-
let firstChoiceDelta = result.choices.first?.delta
50+
let firstChoiceDelta = result.choices?.first?.delta
5151
let content = firstChoiceDelta?.refusal ?? firstChoiceDelta?.content ?? ""
5252
self.message += content
53-
if result.choices.first?.finishReason != nil {
53+
if result.choices?.first?.finishReason != nil {
5454
self.message = self.message.asJsonFormatted()
5555
}
5656
}

Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ import SwiftOpenAI
4848
let stream = try await service.startStreamedChat(parameters: parameters)
4949
for try await result in stream {
5050
// Extract the first choice from the stream results, if none exist, exit the loop.
51-
guard let choice = result.choices.first else { return }
51+
guard let choice = result.choices?.first else { return }
5252

5353
// Store initial `role` and `functionCall` data from the first `choice.delta` for UI display.
5454
// This information is essential for maintaining context in the conversation and for updating
@@ -64,7 +64,7 @@ import SwiftOpenAI
6464
// Assign the content received in the current message to the newDelta.
6565
newDelta.content = temporalReceivedMessageContent
6666
// Update the UI with the latest assistant message and the corresponding delta.
67-
await updateLastAssistantMessage(content: choice.delta.content ?? "", delta: newDelta)
67+
await updateLastAssistantMessage(content: choice.delta?.content ?? "", delta: newDelta)
6868
}
6969
} catch {
7070
// If an error occurs, update the UI to display the error message.

Sources/OpenAI/Public/ResponseModels/Chat/ChatCompletionChunkObject.swift

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -13,29 +13,29 @@ public struct ChatCompletionChunkObject: Decodable {
1313
/// A unique identifier for the chat completion chunk.
1414
public let id: String?
1515
/// A list of chat completion choices. Can be more than one if n is greater than 1.
16-
public let choices: [ChatChoice]
16+
public let choices: [ChatChoice]?
1717
/// The Unix timestamp (in seconds) of when the chat completion chunk was created.
18-
public let created: Int
18+
public let created: Int?
1919
/// The model to generate the completion.
20-
public let model: String
20+
public let model: String?
2121
/// The service tier used for processing the request. This field is only included if the service_tier parameter is specified in the request.
2222
public let serviceTier: String?
2323
/// This fingerprint represents the backend configuration that the model runs with.
2424
/// Can be used in conjunction with the seed request parameter to understand when backend changes have been made that might impact determinism.
2525
public let systemFingerprint: String?
2626
/// The object type, which is always chat.completion.chunk.
27-
public let object: String
27+
public let object: String?
2828
/// An optional field that will only be present when you set stream_options: {"include_usage": true} in your request. When present, it contains a null value except for the last chunk which contains the token usage statistics for the entire request.
2929
public let usage: ChatUsage?
3030

3131
public struct ChatChoice: Decodable {
3232

3333
/// A chat completion delta generated by streamed model responses.
34-
public let delta: Delta
34+
public let delta: Delta?
3535
/// The reason the model stopped generating tokens. This will be stop if the model hit a natural stop point or a provided stop sequence, length if the maximum number of tokens specified in the request was reached, content_filter if content was omitted due to a flag from our content filters, tool_calls if the model called a tool, or function_call (deprecated) if the model called a function.
3636
public let finishReason: IntOrStringValue?
3737
/// The index of the choice in the list of choices.
38-
public let index: Int
38+
public let index: Int?
3939
/// Provided by the Vision API.
4040
public let finishDetails: FinishDetails?
4141
/// Log probability information for the choice.
@@ -69,18 +69,18 @@ public struct ChatCompletionChunkObject: Decodable {
6969

7070
public struct LogProb: Decodable {
7171
/// A list of message content tokens with log probability information.
72-
let content: [TokenDetail]
72+
let content: [TokenDetail]?
7373
}
7474

7575
public struct TokenDetail: Decodable {
7676
/// The token.
77-
let token: String
77+
let token: String?
7878
/// The log probability of this token.
79-
let logprob: Double
79+
let logprob: Double?
8080
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
8181
let bytes: [Int]?
8282
/// List of the most likely tokens and their log probability, at this token position. In rare cases, there may be fewer than the number of requested top_logprobs returned.
83-
let topLogprobs: [TopLogProb]
83+
let topLogprobs: [TopLogProb]?
8484

8585
enum CodingKeys: String, CodingKey {
8686
case token, logprob, bytes
@@ -89,17 +89,17 @@ public struct ChatCompletionChunkObject: Decodable {
8989

9090
struct TopLogProb: Decodable {
9191
/// The token.
92-
let token: String
92+
let token: String?
9393
/// The log probability of this token.
94-
let logprob: Double
94+
let logprob: Double?
9595
/// A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and their byte representations must be combined to generate the correct text representation. Can be null if there is no bytes representation for the token.
9696
let bytes: [Int]?
9797
}
9898
}
9999

100100
/// Provided by the Vision API.
101101
public struct FinishDetails: Decodable {
102-
let type: String
102+
let type: String?
103103
}
104104

105105
enum CodingKeys: String, CodingKey {

0 commit comments

Comments
 (0)