Skip to content

Commit

Permalink
Merge pull request #269 from LimChihi/feat_stream_usage
Browse files Browse the repository at this point in the history
  • Loading branch information
nezhyborets authored Feb 22, 2025
2 parents ea3e55d + 2908c54 commit 0e8b8b1
Show file tree
Hide file tree
Showing 3 changed files with 76 additions and 1 deletion.
25 changes: 24 additions & 1 deletion Sources/OpenAI/Public/Models/ChatQuery.swift
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,8 @@ public struct ChatQuery: Equatable, Codable, Streamable {
/// If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message.
/// https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format
public var stream: Bool
/// Options for streaming response. Only set this when you set stream: true.
public var streamOptions: Self.StreamOptions?

public init(
messages: [Self.ChatCompletionMessageParam],
Expand All @@ -95,7 +97,8 @@ public struct ChatQuery: Equatable, Codable, Streamable {
topLogprobs: Int? = nil,
topP: Double? = nil,
user: String? = nil,
stream: Bool = false
stream: Bool = false,
streamOptions: StreamOptions? = nil
) {
self.messages = messages
self.model = model
Expand All @@ -117,6 +120,7 @@ public struct ChatQuery: Equatable, Codable, Streamable {
self.topP = topP
self.user = user
self.stream = stream
self.streamOptions = streamOptions
}

public enum ChatCompletionMessageParam: Codable, Equatable {
Expand Down Expand Up @@ -1149,6 +1153,24 @@ public struct ChatQuery: Equatable, Codable, Streamable {
case function
}
}

public struct StreamOptions: Codable, Equatable {

/// If set, an additional chunk will be streamed before the data: [DONE] message.
/// The usage field on this chunk shows the token usage statistics for the entire request,
/// and the choices field will always be an empty array. All other chunks will also
/// include a usage field, but with a null value.
public let includeUsage: Bool

public init(includeUsage: Bool) {
self.includeUsage = includeUsage
}

public enum CodingKeys: String, CodingKey {
case includeUsage = "include_usage"
}

}

public enum CodingKeys: String, CodingKey {
case messages
Expand All @@ -1171,6 +1193,7 @@ public struct ChatQuery: Equatable, Codable, Streamable {
case topP = "top_p"
case user
case stream
case streamOptions = "stream_options"
}
}

Expand Down
3 changes: 3 additions & 0 deletions Sources/OpenAI/Public/Models/ChatStreamResult.swift
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,8 @@ public struct ChatStreamResult: Codable, Equatable {
public let choices: [Choice]
/// This fingerprint represents the backend configuration that the model runs with. Can be used in conjunction with the `seed` request parameter to understand when backend changes have been made that might impact determinism.
public let systemFingerprint: String?
/// Usage statistics for the completion request.
public let usage: ChatResult.CompletionUsage?

public enum CodingKeys: String, CodingKey {
case id
Expand All @@ -138,5 +140,6 @@ public struct ChatStreamResult: Codable, Equatable {
case model
case choices
case systemFingerprint = "system_fingerprint"
case usage
}
}
49 changes: 49 additions & 0 deletions Tests/OpenAITests/OpenAITestsDecoder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,55 @@ class OpenAITestsDecoder: XCTestCase {

XCTAssertEqual(chatQueryAsDict, expectedValueAsDict)
}

func testChatQueryWithStreamOptions() async throws {
let chatQuery = ChatQuery(messages: [
.init(role: .user, content: "Who are you?")!
], model: .gpt4, stream: true, streamOptions: .init(includeUsage: true))
let expectedValue = """
{
"model": "gpt-4",
"messages": [
{
"role": "user",
"content": "Who are you?"
}
],
"stream": true,
"stream_options": {
"include_usage" : true
}
}
"""

let chatQueryAsDict = try jsonDataAsNSDictionary(JSONEncoder().encode(chatQuery))
let expectedValueAsDict = try jsonDataAsNSDictionary(expectedValue.data(using: .utf8)!)

XCTAssertEqual(chatQueryAsDict, expectedValueAsDict)
}

func testChatQueryWithoutStreamOptions() async throws {
let chatQuery = ChatQuery(messages: [
.init(role: .user, content: "Who are you?")!
], model: .gpt4, stream: true)
let expectedValue = """
{
"model": "gpt-4",
"messages": [
{
"role": "user",
"content": "Who are you?"
}
],
"stream": true
}
"""

let chatQueryAsDict = try jsonDataAsNSDictionary(JSONEncoder().encode(chatQuery))
let expectedValueAsDict = try jsonDataAsNSDictionary(expectedValue.data(using: .utf8)!)

XCTAssertEqual(chatQueryAsDict, expectedValueAsDict)
}

func testChatQueryWithFunctionCall() async throws {
let chatQuery = ChatQuery(
Expand Down

0 comments on commit 0e8b8b1

Please sign in to comment.