Skip to content

Commit 7c177ad

Browse files
giulio-leoneCopilot
andcommitted
fix: tolerate null response content joins
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
1 parent f30e486 commit 7c177ad

File tree

4 files changed

+59
-3
lines changed

4 files changed

+59
-3
lines changed

src/llama_stack/providers/inline/responses/builtin/responses/types.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ class ChatCompletionResult:
5858
"""Result of processing streaming chat completion chunks."""
5959

6060
response_id: str
61-
content: list[str]
61+
content: list[str | None]
6262
tool_calls: dict[int, OpenAIChatCompletionToolCall]
6363
created: int
6464
model: str
@@ -72,7 +72,7 @@ class ChatCompletionResult:
7272
@property
7373
def content_text(self) -> str:
7474
"""Get joined content as string."""
75-
return "".join(self.content)
75+
return "".join(content for content in self.content if content is not None)
7676

7777
@property
7878
def has_tool_calls(self) -> bool:

src/llama_stack/providers/utils/inference/prompt_adapter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ def _process(c) -> str:
3333
if isinstance(c, str):
3434
return c
3535
elif isinstance(c, TextContentItem) or isinstance(c, OpenAIChatCompletionContentPartTextParam):
36-
return c.text
36+
return c.text or ""
3737
elif isinstance(c, ImageContentItem) or isinstance(c, OpenAIChatCompletionContentPartImageParam):
3838
return "<image>"
3939
elif isinstance(c, OpenAIFile):
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
# Copyright (c) Meta Platforms, Inc. and affiliates.
2+
# All rights reserved.
3+
#
4+
# This source code is licensed under the terms described in the LICENSE file in
5+
# the root directory of this source tree.
6+
7+
"""Regression tests for None-safe response content joins."""
8+
9+
from llama_stack.providers.inline.responses.builtin.responses.types import ChatCompletionResult
10+
11+
12+
def _build_result(content: list[str | None]) -> ChatCompletionResult:
13+
return ChatCompletionResult(
14+
response_id="resp_123",
15+
content=content,
16+
tool_calls={},
17+
created=0,
18+
model="test-model",
19+
finish_reason="stop",
20+
message_item_id="msg_123",
21+
tool_call_item_ids={},
22+
content_part_emitted=False,
23+
)
24+
25+
26+
def test_content_text_skips_none_entries():
27+
assert _build_result([None, "tool result"]).content_text == "tool result"
28+
29+
30+
def test_content_text_returns_empty_string_for_none_only_content():
31+
assert _build_result([None]).content_text == ""
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
# Copyright (c) Meta Platforms, Inc. and affiliates.
2+
# All rights reserved.
3+
#
4+
# This source code is licensed under the terms described in the LICENSE file in
5+
# the root directory of this source tree.
6+
7+
"""Regression tests for None-safe prompt content joins."""
8+
9+
from llama_stack.providers.utils.inference.prompt_adapter import interleaved_content_as_str
10+
from llama_stack_api.common.content_types import TextContentItem
11+
12+
13+
class TestInterleavedContentAsStrNoneSafety:
14+
def test_none_content_returns_empty_string(self):
15+
assert interleaved_content_as_str(None) == ""
16+
17+
def test_text_content_item_with_none_text_returns_empty_string(self):
18+
item = TextContentItem.model_construct(text=None)
19+
20+
assert interleaved_content_as_str([item]) == ""
21+
22+
def test_text_content_item_with_valid_text_is_preserved(self):
23+
item = TextContentItem(text="hello world")
24+
25+
assert interleaved_content_as_str([item]) == "hello world"

0 commit comments

Comments
 (0)