Skip to content

Commit d04e964

Browse files
committed
Fix: protect instruction insertion from being placed directly before a function call.
1 parent d767fcc commit d04e964

File tree

2 files changed

+124
-1
lines changed

2 files changed

+124
-1
lines changed

src/google/adk/flows/llm_flows/contents.py

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -695,7 +695,7 @@ async def _add_instructions_to_user_content(
695695

696696
if llm_request.contents:
697697
for i in range(len(llm_request.contents) - 1, -1, -1):
698-
if llm_request.contents[i].role != 'user':
698+
if _is_valid_instruction_position(llm_request, i):
699699
insert_index = i + 1
700700
break
701701
elif i == 0:
@@ -708,3 +708,26 @@ async def _add_instructions_to_user_content(
708708

709709
# Insert all instruction contents at the proper position using efficient slicing
710710
llm_request.contents[insert_index:insert_index] = instruction_contents
711+
712+
def _is_valid_instruction_position(llm_request: LlmRequest, index: int) -> bool:
713+
"""Check if a position is valid for inserting instructions.
714+
715+
Instructions should not be inserted before:
716+
- User content
717+
- Function call content
718+
719+
Args:
720+
llm_request: The LLM request containing contents
721+
index: The index to check
722+
723+
Returns:
724+
True if this is a valid position to insert after, False otherwise
725+
"""
726+
user_message = llm_request.contents[index].role == 'user'
727+
tool_request = False
728+
for part in llm_request.contents[index].parts:
729+
if part.function_call:
730+
tool_request = True
731+
break
732+
733+
return not user_message and not tool_request

tests/unittests/flows/llm_flows/test_contents.py

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -427,3 +427,103 @@ async def test_events_with_empty_content_are_skipped():
427427
types.UserContent("Hello"),
428428
types.UserContent("How are you?"),
429429
]
430+
431+
432+
@pytest.mark.asyncio
433+
async def test_add_instructions_skips_function_call_position():
434+
"""Test that instructions are not inserted before function_call content."""
435+
agent = Agent(model="gemini-2.5-flash", name="test_agent")
436+
invocation_context = await testing_utils.create_invocation_context(
437+
agent=agent
438+
)
439+
440+
# Create instruction contents to be inserted
441+
instruction_contents = [
442+
types.Content(
443+
parts=[types.Part(text="System instruction")], role="user"
444+
)
445+
]
446+
447+
# Create llm_request with 5 contents including function_call in the middle
448+
llm_request = LlmRequest(model="gemini-2.5-flash")
449+
llm_request.contents = [
450+
types.UserContent("First user message"),
451+
types.ModelContent("Model response"),
452+
types.ModelContent(
453+
[types.Part(function_call=types.FunctionCall(
454+
id="fc_123",
455+
name="test_tool",
456+
args={"param": "value"}
457+
))]
458+
),
459+
types.Content(
460+
parts=[types.Part(function_response=types.FunctionResponse(
461+
id="fc_123",
462+
name="test_tool",
463+
response={"result": "success"}
464+
))],
465+
role="user"
466+
),
467+
types.UserContent("Final user message"),
468+
]
469+
470+
# Call the function
471+
await contents._add_instructions_to_user_content(
472+
invocation_context, llm_request, instruction_contents
473+
)
474+
475+
# Verify instructions are inserted after model content (not before function_call)
476+
# The function walks backwards and finds the first valid position (model content
477+
# without function_call), which is index 1, so it inserts at index 2
478+
assert len(llm_request.contents) == 6
479+
assert llm_request.contents[0] == types.UserContent("First user message")
480+
assert llm_request.contents[1] == types.ModelContent("Model response")
481+
# Instruction should be inserted at position 2 (after model response)
482+
assert llm_request.contents[2].parts[0].text == "System instruction"
483+
# Function call should be pushed to position 3
484+
assert llm_request.contents[3].parts[0].function_call is not None
485+
# Function response should be pushed to position 4
486+
assert llm_request.contents[4].parts[0].function_response is not None
487+
# Final user message should be pushed to position 5
488+
assert llm_request.contents[5] == types.UserContent("Final user message")
489+
490+
491+
@pytest.mark.asyncio
492+
async def test_add_instructions_skips_leading_user_content():
493+
"""Test that instructions are not inserted before leading user content."""
494+
agent = Agent(model="gemini-2.5-flash", name="test_agent")
495+
invocation_context = await testing_utils.create_invocation_context(
496+
agent=agent
497+
)
498+
499+
# Create instruction contents to be inserted
500+
instruction_contents = [
501+
types.Content(
502+
parts=[types.Part(text="System instruction")], role="user"
503+
)
504+
]
505+
506+
# Create llm_request with 5 contents starting with user messages
507+
llm_request = LlmRequest(model="gemini-2.5-flash")
508+
llm_request.contents = [
509+
types.UserContent("First user message"),
510+
types.UserContent("Second user message"),
511+
types.ModelContent("Model response"),
512+
types.UserContent("Third user message"),
513+
types.UserContent("Fourth user message"),
514+
]
515+
516+
# Call the function
517+
await contents._add_instructions_to_user_content(
518+
invocation_context, llm_request, instruction_contents
519+
)
520+
521+
# Verify instructions are inserted after model content, not at the beginning
522+
assert len(llm_request.contents) == 6
523+
assert llm_request.contents[0] == types.UserContent("First user message")
524+
assert llm_request.contents[1] == types.UserContent("Second user message")
525+
assert llm_request.contents[2] == types.ModelContent("Model response")
526+
# Instruction should be inserted at position 3 (after model content)
527+
assert llm_request.contents[3].parts[0].text == "System instruction"
528+
assert llm_request.contents[4] == types.UserContent("Third user message")
529+
assert llm_request.contents[5] == types.UserContent("Fourth user message")

0 commit comments

Comments
 (0)