diff --git a/lib/openai/helpers/streaming/chat_completion_stream.rb b/lib/openai/helpers/streaming/chat_completion_stream.rb index 69df1b0ca..b201c4c25 100644 --- a/lib/openai/helpers/streaming/chat_completion_stream.rb +++ b/lib/openai/helpers/streaming/chat_completion_stream.rb @@ -8,6 +8,8 @@ class ChatCompletionStream def initialize(raw_stream:, response_format: nil, input_tools: nil) @raw_stream = raw_stream + @headers = raw_stream.headers + @status = raw_stream.status @state = ChatCompletionStreamState.new( response_format: response_format, input_tools: input_tools diff --git a/lib/openai/helpers/streaming/response_stream.rb b/lib/openai/helpers/streaming/response_stream.rb index c26d6a718..474173775 100644 --- a/lib/openai/helpers/streaming/response_stream.rb +++ b/lib/openai/helpers/streaming/response_stream.rb @@ -10,6 +10,8 @@ def initialize(raw_stream:, text_format: nil, starting_after: nil) @text_format = text_format @starting_after = starting_after @raw_stream = raw_stream + @headers = raw_stream.headers + @status = raw_stream.status @iterator = iterator @state = ResponseStreamState.new( text_format: text_format diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index 01772d3f1..794f4d29f 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -510,10 +510,24 @@ def request(req) page.new(client: self, req: req, headers: headers, page_data: decoded) else unwrapped = OpenAI::Internal::Util.dig(decoded, unwrap) - OpenAI::Internal::Type::Converter.coerce(model, unwrapped) + attach_response_metadata(OpenAI::Internal::Type::Converter.coerce(model, unwrapped), headers) end end + # @api private + # + # @param result [Object] + # @param headers [Hash{String=>String}] + # + # @return [Object] + private + + def attach_response_metadata(result, headers) + return result unless result.respond_to?(:__set_response_headers) + + result.__set_response_headers(headers) + end + # @api private # # @return [String] diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index d1e25f0c7..0c5b9c5a9 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -8,6 +8,11 @@ class BaseModel extend OpenAI::Internal::Type::Converter extend OpenAI::Internal::Util::SorbetRuntimeSupport + # @api public + # + # @return [Hash{String=>String}, nil] + attr_reader :response_headers + class << self # @api private # @@ -486,6 +491,15 @@ def initialize(data = {}) end end + # @api private + # + # @param headers [Hash{String=>String}] + # @return [self] + def __set_response_headers(headers) + @response_headers = headers + self + end + class << self # @api private # diff --git a/lib/openai/internal/type/base_page.rb b/lib/openai/internal/type/base_page.rb index 402b1f117..5e8417d39 100644 --- a/lib/openai/internal/type/base_page.rb +++ b/lib/openai/internal/type/base_page.rb @@ -9,6 +9,11 @@ module Type # # This module provides a base implementation for paginated responses in the SDK. module BasePage + # @api public + # + # @return [Hash{String=>String}, nil] + attr_reader :response_headers + # rubocop:disable Lint/UnusedMethodArgument # @api public @@ -45,9 +50,19 @@ def initialize(client:, req:, headers:, page_data:) @client = client @req = req @model = req.fetch(:model) + @response_headers = headers super() end + # @api private + # + # @param headers [Hash{String=>String}] + # @return [self] + def __set_response_headers(headers) + @response_headers = headers + self + end + # rubocop:enable Lint/UnusedMethodArgument end end diff --git a/lib/openai/internal/type/base_stream.rb b/lib/openai/internal/type/base_stream.rb index 38951a780..928cf2d17 100644 --- a/lib/openai/internal/type/base_stream.rb +++ b/lib/openai/internal/type/base_stream.rb @@ -19,6 +19,11 @@ module BaseStream # @return [Hash{String=>String}] attr_reader :headers + # @api public + # + # @return [Hash{String=>String}] + def response_headers = @headers + # @api public # # @return [void] diff --git a/rbi/openai/internal/type/base_model.rbi b/rbi/openai/internal/type/base_model.rbi index 21cb6dfb6..2787daf87 100644 --- a/rbi/openai/internal/type/base_model.rbi +++ b/rbi/openai/internal/type/base_model.rbi @@ -219,6 +219,10 @@ module OpenAI def [](key) end + sig { returns(T.nilable(T::Hash[String, String])) } + attr_reader :response_headers + end + # Returns a Hash of the data underlying this object. O(1) # # Keys are Symbols and values are the raw values from the response. The return @@ -278,6 +282,10 @@ module OpenAI def self.new(data = {}) end + sig { params(headers: T::Hash[String, String]).returns(T.self_type) } + def __set_response_headers(headers) + end + class << self # @api private sig { params(depth: Integer).returns(String) } diff --git a/rbi/openai/internal/type/base_page.rbi b/rbi/openai/internal/type/base_page.rbi index 19a344a09..0756dac39 100644 --- a/rbi/openai/internal/type/base_page.rbi +++ b/rbi/openai/internal/type/base_page.rbi @@ -25,6 +25,10 @@ module OpenAI def to_enum end + sig { returns(T.nilable(T::Hash[String, String])) } + attr_reader :response_headers + end + # @api private sig do params( @@ -36,6 +40,10 @@ module OpenAI end def initialize(client:, req:, headers:, page_data:) end + + sig { params(headers: T::Hash[String, String]).returns(T.self_type) } + def __set_response_headers(headers) + end end end end diff --git a/rbi/openai/internal/type/base_stream.rbi b/rbi/openai/internal/type/base_stream.rbi index 9225515d8..4c435a1cc 100644 --- a/rbi/openai/internal/type/base_stream.rbi +++ b/rbi/openai/internal/type/base_stream.rbi @@ -18,6 +18,10 @@ module OpenAI sig { returns(T::Hash[String, String]) } attr_reader :headers + sig { returns(T::Hash[String, String]) } + def response_headers + end + sig { void } def close end diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index f9e57a2e0..22a7ee123 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -77,6 +77,8 @@ module OpenAI def []: (Symbol key) -> top? + def response_headers: -> ::Hash[String, String]? + def to_h: -> ::Hash[Symbol, top] alias to_hash to_h @@ -91,6 +93,8 @@ module OpenAI def initialize: (?::Hash[Symbol, top] | instance data) -> void + def __set_response_headers: (::Hash[String, String] headers) -> self + def self.inspect: (?depth: Integer) -> String def to_s: -> String diff --git a/sig/openai/internal/type/base_page.rbs b/sig/openai/internal/type/base_page.rbs index b04062a6d..99379af63 100644 --- a/sig/openai/internal/type/base_page.rbs +++ b/sig/openai/internal/type/base_page.rbs @@ -12,12 +12,16 @@ module OpenAI alias enum_for to_enum + def response_headers: -> ::Hash[String, String]? + def initialize: ( client: OpenAI::Internal::Transport::BaseClient, req: OpenAI::Internal::Transport::BaseClient::request_components, headers: ::Hash[String, String], page_data: top ) -> void + + def __set_response_headers: (::Hash[String, String] headers) -> self end end end diff --git a/sig/openai/internal/type/base_stream.rbs b/sig/openai/internal/type/base_stream.rbs index 031b7982c..ea4a9dab3 100644 --- a/sig/openai/internal/type/base_stream.rbs +++ b/sig/openai/internal/type/base_stream.rbs @@ -8,6 +8,8 @@ module OpenAI attr_reader headers: ::Hash[String, String] + def response_headers: -> ::Hash[String, String] + def close: -> void private def iterator: -> Enumerable[Elem] diff --git a/test/openai/client_test.rb b/test/openai/client_test.rb index 6a8d41eaf..711a471b6 100644 --- a/test/openai/client_test.rb +++ b/test/openai/client_test.rb @@ -326,4 +326,35 @@ def test_default_headers headers.each { refute_empty(_1) } end end + + def test_non_streaming_response_exposes_openai_response_metadata + stub_request(:get, "http://localhost/models/gpt-4o-mini").to_return_json( + status: 200, + headers: {"x-request-id" => "req_123", "openai-processing-ms" => "45.6"}, + body: {id: "gpt-4o-mini", object: "model", created: 1_700_000_000, owned_by: "openai"} + ) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") + response = openai.models.retrieve("gpt-4o-mini") + + assert_equal("req_123", response.response_headers["x-request-id"]) + assert_equal("45.6", response.response_headers["openai-processing-ms"]) + end + + def test_page_response_exposes_openai_response_metadata + stub_request(:get, "http://localhost/models").to_return_json( + status: 200, + headers: {"x-request-id" => "req_page", "openai-processing-ms" => "12"}, + body: { + object: "list", + data: [{id: "gpt-4o-mini", object: "model", created: 1_700_000_000, owned_by: "openai"}] + } + ) + + openai = OpenAI::Client.new(base_url: "http://localhost", api_key: "My API Key") + response = openai.models.list + + assert_equal("req_page", response.response_headers["x-request-id"]) + assert_equal("12", response.response_headers["openai-processing-ms"]) + end end diff --git a/test/openai/resources/chat/completions/streaming_test.rb b/test/openai/resources/chat/completions/streaming_test.rb index 7355b6c8e..afb9b50be 100644 --- a/test/openai/resources/chat/completions/streaming_test.rb +++ b/test/openai/resources/chat/completions/streaming_test.rb @@ -110,6 +110,30 @@ def test_get_final_completion assert_equal(12, completion.usage.total_tokens) if completion.usage end + def test_stream_exposes_openai_response_metadata + stub_request(:post, "http://localhost/chat/completions") + .with( + body: hash_including( + messages: [{content: "Hello", role: "user"}], + model: "gpt-4o-mini", + stream: true + ) + ) + .to_return( + status: 200, + headers: { + "Content-Type" => "text/event-stream", + "x-request-id" => "req_chat_stream", + "openai-processing-ms" => "34" + }, + body: completion_with_usage_sse_response + ) + + stream = @client.chat.completions.stream(**basic_params) + assert_equal("req_chat_stream", stream.response_headers["x-request-id"]) + assert_equal("34", stream.response_headers["openai-processing-ms"]) + end + def test_get_output_text stub_streaming_response(basic_text_sse_response) diff --git a/test/openai/resources/responses/streaming_test.rb b/test/openai/resources/responses/streaming_test.rb index 5bd6708f3..0500ecdda 100644 --- a/test/openai/resources/responses/streaming_test.rb +++ b/test/openai/resources/responses/streaming_test.rb @@ -100,6 +100,31 @@ def test_get_final_response end end + def test_stream_exposes_openai_response_metadata + stub_request(:post, "http://localhost/responses") + .with( + body: hash_including( + instructions: "You are a helpful assistant", + messages: [{content: "Hello", role: "user"}], + model: "gpt-4", + stream: true + ) + ) + .to_return( + status: 200, + headers: { + "Content-Type" => "text/event-stream", + "x-request-id" => "req_stream", + "openai-processing-ms" => "78.9" + }, + body: basic_text_sse_response + ) + + stream = @client.responses.stream(**basic_params) + assert_equal("req_stream", stream.response_headers["x-request-id"]) + assert_equal("78.9", stream.response_headers["openai-processing-ms"]) + end + def test_get_output_text stub_streaming_response(basic_text_sse_response)