Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions lib/openai/helpers/streaming/chat_completion_stream.rb
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ class ChatCompletionStream

def initialize(raw_stream:, response_format: nil, input_tools: nil)
@raw_stream = raw_stream
@headers = raw_stream.headers
@status = raw_stream.status
@state = ChatCompletionStreamState.new(
response_format: response_format,
input_tools: input_tools
Expand Down
2 changes: 2 additions & 0 deletions lib/openai/helpers/streaming/response_stream.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ def initialize(raw_stream:, text_format: nil, starting_after: nil)
@text_format = text_format
@starting_after = starting_after
@raw_stream = raw_stream
@headers = raw_stream.headers
@status = raw_stream.status
@iterator = iterator
@state = ResponseStreamState.new(
text_format: text_format
Expand Down
16 changes: 15 additions & 1 deletion lib/openai/internal/transport/base_client.rb
Original file line number Diff line number Diff line change
Expand Up @@ -510,10 +510,24 @@ def request(req)
page.new(client: self, req: req, headers: headers, page_data: decoded)
else
unwrapped = OpenAI::Internal::Util.dig(decoded, unwrap)
OpenAI::Internal::Type::Converter.coerce(model, unwrapped)
attach_response_metadata(OpenAI::Internal::Type::Converter.coerce(model, unwrapped), headers)
end
end

# @api private
#
# @param result [Object]
# @param headers [Hash{String=>String}]
#
# @return [Object]
private

def attach_response_metadata(result, headers)
return result unless result.respond_to?(:__set_response_headers)

result.__set_response_headers(headers)
end

# @api private
#
# @return [String]
Expand Down
14 changes: 14 additions & 0 deletions lib/openai/internal/type/base_model.rb
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,11 @@ class BaseModel
extend OpenAI::Internal::Type::Converter
extend OpenAI::Internal::Util::SorbetRuntimeSupport

# @api public
#
# @return [Hash{String=>String}, nil]
attr_reader :response_headers

class << self
# @api private
#
Expand Down Expand Up @@ -486,6 +491,15 @@ def initialize(data = {})
end
end

# @api private
#
# @param headers [Hash{String=>String}]
# @return [self]
def __set_response_headers(headers)
@response_headers = headers
self
end

class << self
# @api private
#
Expand Down
15 changes: 15 additions & 0 deletions lib/openai/internal/type/base_page.rb
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,11 @@ module Type
#
# This module provides a base implementation for paginated responses in the SDK.
module BasePage
# @api public
#
# @return [Hash{String=>String}, nil]
attr_reader :response_headers

# rubocop:disable Lint/UnusedMethodArgument

# @api public
Expand Down Expand Up @@ -45,9 +50,19 @@ def initialize(client:, req:, headers:, page_data:)
@client = client
@req = req
@model = req.fetch(:model)
@response_headers = headers
super()
end

# @api private
#
# @param headers [Hash{String=>String}]
# @return [self]
def __set_response_headers(headers)
@response_headers = headers
self
end

# rubocop:enable Lint/UnusedMethodArgument
end
end
Expand Down
5 changes: 5 additions & 0 deletions lib/openai/internal/type/base_stream.rb
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,11 @@ module BaseStream
# @return [Hash{String=>String}]
attr_reader :headers

# @api public
#
# @return [Hash{String=>String}]
def response_headers = @headers

# @api public
#
# @return [void]
Expand Down
8 changes: 8 additions & 0 deletions rbi/openai/internal/type/base_model.rbi
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,10 @@ module OpenAI
def [](key)
end

sig { returns(T.nilable(T::Hash[String, String])) }
attr_reader :response_headers
end

# Returns a Hash of the data underlying this object. O(1)
#
# Keys are Symbols and values are the raw values from the response. The return
Expand Down Expand Up @@ -278,6 +282,10 @@ module OpenAI
def self.new(data = {})
end

sig { params(headers: T::Hash[String, String]).returns(T.self_type) }
def __set_response_headers(headers)
end

class << self
# @api private
sig { params(depth: Integer).returns(String) }
Expand Down
8 changes: 8 additions & 0 deletions rbi/openai/internal/type/base_page.rbi
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ module OpenAI
def to_enum
end

sig { returns(T.nilable(T::Hash[String, String])) }
attr_reader :response_headers
end

# @api private
sig do
params(
Expand All @@ -36,6 +40,10 @@ module OpenAI
end
def initialize(client:, req:, headers:, page_data:)
end

sig { params(headers: T::Hash[String, String]).returns(T.self_type) }
def __set_response_headers(headers)
end
end
end
end
Expand Down
4 changes: 4 additions & 0 deletions rbi/openai/internal/type/base_stream.rbi
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@ module OpenAI
sig { returns(T::Hash[String, String]) }
attr_reader :headers

sig { returns(T::Hash[String, String]) }
def response_headers
end

sig { void }
def close
end
Expand Down
4 changes: 4 additions & 0 deletions sig/openai/internal/type/base_model.rbs
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,8 @@ module OpenAI

def []: (Symbol key) -> top?

def response_headers: -> ::Hash[String, String]?

def to_h: -> ::Hash[Symbol, top]

alias to_hash to_h
Expand All @@ -91,6 +93,8 @@ module OpenAI

def initialize: (?::Hash[Symbol, top] | instance data) -> void

def __set_response_headers: (::Hash[String, String] headers) -> self

def self.inspect: (?depth: Integer) -> String

def to_s: -> String
Expand Down
4 changes: 4 additions & 0 deletions sig/openai/internal/type/base_page.rbs
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,16 @@ module OpenAI

alias enum_for to_enum

def response_headers: -> ::Hash[String, String]?

def initialize: (
client: OpenAI::Internal::Transport::BaseClient,
req: OpenAI::Internal::Transport::BaseClient::request_components,
headers: ::Hash[String, String],
page_data: top
) -> void

def __set_response_headers: (::Hash[String, String] headers) -> self
end
end
end
Expand Down
2 changes: 2 additions & 0 deletions sig/openai/internal/type/base_stream.rbs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ module OpenAI

attr_reader headers: ::Hash[String, String]

def response_headers: -> ::Hash[String, String]

def close: -> void

private def iterator: -> Enumerable[Elem]
Expand Down
31 changes: 31 additions & 0 deletions test/openai/client_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -326,4 +326,35 @@ def test_default_headers
headers.each { refute_empty(_1) }
end
end

def test_non_streaming_response_exposes_openai_response_metadata
stub_request(:get, "http://localhost/models/gpt-4o-mini").to_return_json(
status: 200,
headers: {"x-request-id" => "req_123", "openai-processing-ms" => "45.6"},
body: {id: "gpt-4o-mini", object: "model", created: 1_700_000_000, owned_by: "openai"}
)

openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key")
response = openai.models.retrieve("gpt-4o-mini")

assert_equal("req_123", response.response_headers["x-request-id"])
assert_equal("45.6", response.response_headers["openai-processing-ms"])
end

def test_page_response_exposes_openai_response_metadata
stub_request(:get, "http://localhost/models").to_return_json(
status: 200,
headers: {"x-request-id" => "req_page", "openai-processing-ms" => "12"},
body: {
object: "list",
data: [{id: "gpt-4o-mini", object: "model", created: 1_700_000_000, owned_by: "openai"}]
}
)

openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key")
response = openai.models.list

assert_equal("req_page", response.response_headers["x-request-id"])
assert_equal("12", response.response_headers["openai-processing-ms"])
end
end
24 changes: 24 additions & 0 deletions test/openai/resources/chat/completions/streaming_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,30 @@ def test_get_final_completion
assert_equal(12, completion.usage.total_tokens) if completion.usage
end

def test_stream_exposes_openai_response_metadata
stub_request(:post, "http://localhost/chat/completions")
.with(
body: hash_including(
messages: [{content: "Hello", role: "user"}],
model: "gpt-4o-mini",
stream: true
)
)
.to_return(
status: 200,
headers: {
"Content-Type" => "text/event-stream",
"x-request-id" => "req_chat_stream",
"openai-processing-ms" => "34"
},
body: completion_with_usage_sse_response
)

stream = @client.chat.completions.stream(**basic_params)
assert_equal("req_chat_stream", stream.response_headers["x-request-id"])
assert_equal("34", stream.response_headers["openai-processing-ms"])
end

def test_get_output_text
stub_streaming_response(basic_text_sse_response)

Expand Down
25 changes: 25 additions & 0 deletions test/openai/resources/responses/streaming_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,31 @@ def test_get_final_response
end
end

def test_stream_exposes_openai_response_metadata
stub_request(:post, "http://localhost/responses")
.with(
body: hash_including(
instructions: "You are a helpful assistant",
messages: [{content: "Hello", role: "user"}],
model: "gpt-4",
stream: true
)
)
.to_return(
status: 200,
headers: {
"Content-Type" => "text/event-stream",
"x-request-id" => "req_stream",
"openai-processing-ms" => "78.9"
},
body: basic_text_sse_response
)

stream = @client.responses.stream(**basic_params)
assert_equal("req_stream", stream.response_headers["x-request-id"])
assert_equal("78.9", stream.response_headers["openai-processing-ms"])
end

def test_get_output_text
stub_streaming_response(basic_text_sse_response)

Expand Down