We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent ce9f210 commit 5c21283Copy full SHA for 5c21283
src/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py
@@ -250,7 +250,7 @@ async def create_response(self) -> AsyncIterator[OpenAIResponseObjectStream]:
250
logger.debug(f"calling openai_chat_completion with tools: {self.ctx.chat_tools}")
251
252
logprobs = (
253
- True if self.include and ResponseItemInclude.message_output_text_logprobs in self.include else False
+ True if self.include and ResponseItemInclude.message_output_text_logprobs in self.include else None
254
)
255
256
params = OpenAIChatCompletionRequestWithExtraBody(
0 commit comments