Responses API streaming does not receive any response if Reasoning is given when creating Response. #414
-
OverviewWhen trying to ResponsesEndpoint.CreateModelResponseAsync, and a Reasoning is provided, no response is received. To ReproduceUse this MonoBehaviour to see the bug happening. using OpenAI;
using OpenAI.Responses;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using UnityEngine;
using Utilities.WebRequestRest.Interfaces;
public class StreamingBug : MonoBehaviour {
private OpenAI.Models.Model aiModel = OpenAI.Models.Model.GPT4o;
private OpenAIClient api = null;
private List<IResponseItem> conversation = new List<IResponseItem>();
private List<Tool> tools = new List<Tool>();
private void AddMessage(Role role, string message) {
conversation.Add(new Message(role, message));
}
private async Task RequestResponse(CancellationToken cancellationToken) {
Reasoning reasoning = new Reasoning(ReasoningEffort.Minimal, OpenAI.ReasoningSummary.Concise);
CreateResponseRequest request = new CreateResponseRequest(conversation, aiModel, tools: tools, reasoning: reasoning);
// CreateResponseRequest request = new CreateResponseRequest(conversation, aiModel, tools: tools);
Response response = await api.ResponsesEndpoint.CreateModelResponseAsync(request, StreamCallback, cancellationToken);
var responseItem = response.Output.LastOrDefault();
}
private OpenAIAuthentication GetOpenAIAuthentication() {
// For now just hardcode this, but eventually we want to load it from a file or environment variable.
const string apiKey = "MY_API_KEY";
const string orgKey = "MY_ORG_KEY";
const string projKey = "MY_PROJ_KEY";
return new OpenAIAuthentication(apiKey, orgKey, projKey);
}
private async Task StreamCallback(string @event, IServerSentEvent sseEvent) {
switch (sseEvent) {
case Message messageItem:
UnityEngine.Debug.Log("Message Item Received");
UnityEngine.Debug.Log(messageItem.ToString());
conversation.Add(messageItem);
break;
case FunctionToolCall functionToolCall:
UnityEngine.Debug.Log("FunctionToolCall Received");
conversation.Add(functionToolCall);
var output = await functionToolCall.InvokeFunctionAsync();
conversation.Add(output);
await api.ResponsesEndpoint.CreateModelResponseAsync(new CreateResponseRequest(conversation, aiModel, tools: tools, toolChoice: "none"), StreamCallback);
break;
case OpenAI.Responses.ReasoningSummary reasoningSummary:
UnityEngine.Debug.Log("ReasoningSummary Received");
UnityEngine.Debug.Log(reasoningSummary.Text);
break;
case OpenAI.Responses.Response response:
break;
case Utilities.WebRequestRest.ServerSentEvent serverSentEvent:
break;
case OpenAI.Responses.TextContent textContent:
break;
default:
// Trying to find out what kind of events I can possibly receive.
UnityEngine.Debug.Log("Unknown " + sseEvent.GetType() + " Received");
break;
}
}
private void Awake() {
api = new OpenAIClient(GetOpenAIAuthentication()) { EnableDebug = false };
}
private void Start() {
AddMessage(OpenAI.Role.System, "You are a helpful assistant.");
AddMessage(OpenAI.Role.User, "Why is the sky blue?");
// Nothing is ever printed to the console.
_ = RequestResponse(destroyCancellationToken);
}
} |
Beta Was this translation helpful? Give feedback.
Answered by
TypeDefinition
Oct 21, 2025
Replies: 1 comment 4 replies
-
|
Have you tried the sample behavior? I think the way you're throwing away the async calls doesn't help your case. Are there any error logs at all? |
Beta Was this translation helpful? Give feedback.
4 replies
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment


I wrapped it around a try-catch and it looks like reasoning effort is not supported by GPT4o.