Skip to content

Commit f2bed14

Browse files
seanzhougooglecopybara-github
authored andcommitted
chore: Adjust the LLM Request logging
1. function declarations is not necessary in the first tool 2. log the config PiperOrigin-RevId: 816547534
1 parent 3021266 commit f2bed14

File tree

2 files changed

+168
-4
lines changed

2 files changed

+168
-4
lines changed

src/google/adk/models/google_llm.py

Lines changed: 36 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -351,10 +351,19 @@ def _build_function_declaration_log(
351351

352352

353353
def _build_request_log(req: LlmRequest) -> str:
354-
function_decls: list[types.FunctionDeclaration] = cast(
355-
list[types.FunctionDeclaration],
356-
req.config.tools[0].function_declarations if req.config.tools else [],
357-
)
354+
# Find which tool contains function_declarations
355+
function_decls: list[types.FunctionDeclaration] = []
356+
function_decl_tool_index: Optional[int] = None
357+
358+
if req.config.tools:
359+
for idx, tool in enumerate(req.config.tools):
360+
if tool.function_declarations:
361+
function_decls = cast(
362+
list[types.FunctionDeclaration], tool.function_declarations
363+
)
364+
function_decl_tool_index = idx
365+
break
366+
358367
function_logs = (
359368
[
360369
_build_function_declaration_log(func_decl)
@@ -375,12 +384,35 @@ def _build_request_log(req: LlmRequest) -> str:
375384
for content in req.contents
376385
]
377386

387+
# Build exclusion dict for config logging
388+
tools_exclusion = (
389+
{function_decl_tool_index: {'function_declarations'}}
390+
if function_decl_tool_index is not None
391+
else True
392+
)
393+
394+
try:
395+
config_log = str(
396+
req.config.model_dump(
397+
exclude_none=True,
398+
exclude={
399+
'system_instruction': True,
400+
'tools': tools_exclusion if req.config.tools else True,
401+
},
402+
)
403+
)
404+
except Exception:
405+
config_log = repr(req.config)
406+
378407
return f"""
379408
LLM Request:
380409
-----------------------------------------------------------
381410
System Instruction:
382411
{req.config.system_instruction}
383412
-----------------------------------------------------------
413+
Config:
414+
{config_log}
415+
-----------------------------------------------------------
384416
Contents:
385417
{_NEW_LINE.join(contents_logs)}
386418
-----------------------------------------------------------

tests/unittests/models/test_google_llm.py

Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
from google.adk.models.gemini_llm_connection import GeminiLlmConnection
2525
from google.adk.models.google_llm import _AGENT_ENGINE_TELEMETRY_ENV_VARIABLE_NAME
2626
from google.adk.models.google_llm import _AGENT_ENGINE_TELEMETRY_TAG
27+
from google.adk.models.google_llm import _build_request_log
2728
from google.adk.models.google_llm import Gemini
2829
from google.adk.models.llm_request import LlmRequest
2930
from google.adk.models.llm_response import LlmResponse
@@ -1726,3 +1727,134 @@ async def mock_coro():
17261727
# Verify cache metadata is preserved
17271728
assert second_arg.cache_name == cache_metadata.cache_name
17281729
assert second_arg.invocations_used == cache_metadata.invocations_used
1730+
1731+
1732+
def test_build_request_log_with_config_multiple_tool_types():
1733+
"""Test that _build_request_log includes config with multiple tool types."""
1734+
func_decl = types.FunctionDeclaration(
1735+
name="test_function",
1736+
description="A test function",
1737+
parameters={"type": "object", "properties": {}},
1738+
)
1739+
1740+
tool = types.Tool(
1741+
function_declarations=[func_decl],
1742+
google_search=types.GoogleSearch(),
1743+
code_execution=types.ToolCodeExecution(),
1744+
)
1745+
1746+
llm_request = LlmRequest(
1747+
model="gemini-1.5-flash",
1748+
contents=[Content(role="user", parts=[Part.from_text(text="Hello")])],
1749+
config=types.GenerateContentConfig(
1750+
temperature=0.7,
1751+
max_output_tokens=500,
1752+
system_instruction="You are a helpful assistant",
1753+
tools=[tool],
1754+
),
1755+
)
1756+
1757+
log_output = _build_request_log(llm_request)
1758+
1759+
# Verify config section exists
1760+
assert "Config:" in log_output
1761+
1762+
# Verify config contains expected fields (using Python dict format with single quotes)
1763+
assert "'temperature': 0.7" in log_output
1764+
assert "'max_output_tokens': 500" in log_output
1765+
1766+
# Verify config contains other tool types (not function_declarations)
1767+
assert "'google_search'" in log_output
1768+
assert "'code_execution'" in log_output
1769+
1770+
# Verify function_declarations is NOT in config section
1771+
# (it should only be in the Functions section)
1772+
config_section = log_output.split("Functions:")[0]
1773+
assert "'function_declarations'" not in config_section
1774+
1775+
# Verify function is in Functions section
1776+
assert "Functions:" in log_output
1777+
assert "test_function" in log_output
1778+
1779+
# Verify system instruction is NOT in config section
1780+
assert (
1781+
"'system_instruction'"
1782+
not in log_output.split("Contents:")[0].split("Config:")[1]
1783+
)
1784+
1785+
1786+
def test_build_request_log_function_declarations_in_second_tool():
1787+
"""Test that function_declarations in non-first tool are handled correctly."""
1788+
func_decl = types.FunctionDeclaration(
1789+
name="my_function",
1790+
description="A test function",
1791+
parameters={"type": "object", "properties": {}},
1792+
)
1793+
1794+
# First tool has only google_search
1795+
tool1 = types.Tool(google_search=types.GoogleSearch())
1796+
1797+
# Second tool has function_declarations
1798+
tool2 = types.Tool(
1799+
function_declarations=[func_decl],
1800+
code_execution=types.ToolCodeExecution(),
1801+
)
1802+
1803+
llm_request = LlmRequest(
1804+
model="gemini-1.5-flash",
1805+
contents=[Content(role="user", parts=[Part.from_text(text="Hello")])],
1806+
config=types.GenerateContentConfig(
1807+
temperature=0.5,
1808+
system_instruction="You are a helpful assistant",
1809+
tools=[tool1, tool2],
1810+
),
1811+
)
1812+
1813+
log_output = _build_request_log(llm_request)
1814+
1815+
# Verify function is in Functions section
1816+
assert "Functions:" in log_output
1817+
assert "my_function" in log_output
1818+
1819+
# Verify function_declarations is NOT in config section
1820+
config_section = log_output.split("Functions:")[0]
1821+
assert "'function_declarations'" not in config_section
1822+
1823+
# Verify both tools are in config but without function_declarations (Python dict format)
1824+
assert "'google_search'" in log_output
1825+
assert "'code_execution'" in log_output
1826+
1827+
# Verify config has the expected structure without parsing
1828+
config_section = log_output.split("Config:")[1].split("---")[0]
1829+
# Should have 2 tools (two dict entries in the tools list)
1830+
assert config_section.count("'google_search'") == 1
1831+
assert config_section.count("'code_execution'") == 1
1832+
# Function declarations should NOT be in config section
1833+
assert "'function_declarations'" not in config_section
1834+
1835+
1836+
def test_build_request_log_fallback_to_repr_on_all_failures(monkeypatch):
1837+
"""Test that _build_request_log falls back to repr() if model_dump fails."""
1838+
1839+
llm_request = LlmRequest(
1840+
model="gemini-1.5-flash",
1841+
contents=[Content(role="user", parts=[Part.from_text(text="Hello")])],
1842+
config=types.GenerateContentConfig(
1843+
temperature=0.7,
1844+
system_instruction="You are a helpful assistant",
1845+
),
1846+
)
1847+
1848+
# Mock model_dump at class level to raise exception
1849+
def mock_model_dump(*args, **kwargs):
1850+
raise Exception("dump failed")
1851+
1852+
monkeypatch.setattr(
1853+
types.GenerateContentConfig, "model_dump", mock_model_dump
1854+
)
1855+
1856+
log_output = _build_request_log(llm_request)
1857+
1858+
# Should still succeed using repr()
1859+
assert "Config:" in log_output
1860+
assert "GenerateContentConfig" in log_output

0 commit comments

Comments
 (0)