Skip to content

Commit 60aa09f

Browse files
authored
Fix structured output in SDK and add samples and tests (#43868)
1 parent 08161f6 commit 60aa09f

File tree

7 files changed

+361
-18
lines changed

7 files changed

+361
-18
lines changed

sdk/ai/azure-ai-projects/apiview-properties.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -254,7 +254,6 @@
254254
"azure.ai.projects.models.ResponseFileSearchCallCompletedEvent": "OpenAI.ResponseFileSearchCallCompletedEvent",
255255
"azure.ai.projects.models.ResponseFileSearchCallInProgressEvent": "OpenAI.ResponseFileSearchCallInProgressEvent",
256256
"azure.ai.projects.models.ResponseFileSearchCallSearchingEvent": "OpenAI.ResponseFileSearchCallSearchingEvent",
257-
"azure.ai.projects.models.ResponseFormatJsonSchemaSchema": "OpenAI.ResponseFormatJsonSchemaSchema",
258257
"azure.ai.projects.models.ResponseFunctionCallArgumentsDeltaEvent": "OpenAI.ResponseFunctionCallArgumentsDeltaEvent",
259258
"azure.ai.projects.models.ResponseFunctionCallArgumentsDoneEvent": "OpenAI.ResponseFunctionCallArgumentsDoneEvent",
260259
"azure.ai.projects.models.ResponseImageGenCallCompletedEvent": "OpenAI.ResponseImageGenCallCompletedEvent",

sdk/ai/azure-ai-projects/azure/ai/projects/models/__init__.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,6 @@
260260
ResponseFileSearchCallCompletedEvent,
261261
ResponseFileSearchCallInProgressEvent,
262262
ResponseFileSearchCallSearchingEvent,
263-
ResponseFormatJsonSchemaSchema,
264263
ResponseFunctionCallArgumentsDeltaEvent,
265264
ResponseFunctionCallArgumentsDoneEvent,
266265
ResponseImageGenCallCompletedEvent,
@@ -660,7 +659,6 @@
660659
"ResponseFileSearchCallCompletedEvent",
661660
"ResponseFileSearchCallInProgressEvent",
662661
"ResponseFileSearchCallSearchingEvent",
663-
"ResponseFormatJsonSchemaSchema",
664662
"ResponseFunctionCallArgumentsDeltaEvent",
665663
"ResponseFunctionCallArgumentsDoneEvent",
666664
"ResponseImageGenCallCompletedEvent",

sdk/ai/azure-ai-projects/azure/ai/projects/models/_models.py

Lines changed: 6 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -5512,7 +5512,9 @@ class FileSearchTool(Tool, discriminator="file_search"):
55125512
visibility=["read", "create", "update", "delete", "query"]
55135513
)
55145514
"""Ranking options for search."""
5515-
filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = rest_field(visibility=["read", "create", "update", "delete", "query"])
5515+
filters: Optional[Union["_models.ComparisonFilter", "_models.CompoundFilter"]] = rest_field(
5516+
visibility=["read", "create", "update", "delete", "query"]
5517+
)
55165518
"""A filter to apply. Is either a ComparisonFilter type or a CompoundFilter type."""
55175519

55185520
@overload
@@ -11390,13 +11392,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
1139011392
self.type = ResponseStreamEventType.RESPONSE_FILE_SEARCH_CALL_SEARCHING # type: ignore
1139111393

1139211394

11393-
class ResponseFormatJsonSchemaSchema(_Model):
11394-
"""The schema for the response format, described as a JSON Schema object.
11395-
Learn how to build JSON schemas `here <https://json-schema.org/>`_.
11396-
11397-
"""
11398-
11399-
1140011395
class ResponseFunctionCallArgumentsDeltaEvent(
1140111396
ResponseStreamEvent, discriminator="response.function_call_arguments.delta"
1140211397
):
@@ -13379,7 +13374,7 @@ class ResponseTextFormatConfigurationJsonSchema(
1337913374
underscores and dashes, with a maximum length of 64. Required.
1338013375
:vartype name: str
1338113376
:ivar schema: Required.
13382-
:vartype schema: ~azure.ai.projects.models.ResponseFormatJsonSchemaSchema
13377+
:vartype schema: dict[str, any]
1338313378
:ivar strict: Whether to enable strict schema adherence when generating the output.
1338413379
If set to true, the model will always follow the exact schema defined
1338513380
in the ``schema`` field. Only a subset of JSON Schema is supported when
@@ -13396,9 +13391,7 @@ class ResponseTextFormatConfigurationJsonSchema(
1339613391
name: str = rest_field(visibility=["read", "create", "update", "delete", "query"])
1339713392
"""The name of the response format. Must be a-z, A-Z, 0-9, or contain
1339813393
underscores and dashes, with a maximum length of 64. Required."""
13399-
schema: "_models.ResponseFormatJsonSchemaSchema" = rest_field(
13400-
visibility=["read", "create", "update", "delete", "query"]
13401-
)
13394+
schema: dict[str, Any] = rest_field(visibility=["read", "create", "update", "delete", "query"])
1340213395
"""Required."""
1340313396
strict: Optional[bool] = rest_field(visibility=["read", "create", "update", "delete", "query"])
1340413397
"""Whether to enable strict schema adherence when generating the output.
@@ -13412,7 +13405,7 @@ def __init__(
1341213405
self,
1341313406
*,
1341413407
name: str,
13415-
schema: "_models.ResponseFormatJsonSchemaSchema",
13408+
schema: dict[str, Any],
1341613409
description: Optional[str] = None,
1341713410
strict: Optional[bool] = None,
1341813411
) -> None: ...
Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
# pylint: disable=line-too-long,useless-suppression
2+
# ------------------------------------
3+
# Copyright (c) Microsoft Corporation.
4+
# Licensed under the MIT License.
5+
# ------------------------------------
6+
7+
"""
8+
DESCRIPTION:
9+
This sample demonstrates how to run basic Prompt Agent operations
10+
using the synchronous AIProjectClient, while defining a desired
11+
JSON schema for the response ("structured output").
12+
13+
The Responses and Conversations calls in this sample are made using
14+
the OpenAI client from the `openai` package. See https://platform.openai.com/docs/api-reference
15+
for more information.
16+
17+
This sample is inspired by the OpenAI example here:
18+
https://platform.openai.com/docs/guides/structured-outputs/supported-schemas
19+
20+
USAGE:
21+
python sample_agent_structured_output.py
22+
23+
Before running the sample:
24+
25+
pip install "azure-ai-projects>=2.0.0b1" openai azure-identity python-dotenv pydantic
26+
27+
Set these environment variables with your own values:
28+
1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview
29+
page of your Azure AI Foundry portal.
30+
2) AZURE_AI_MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in
31+
the "Models + endpoints" tab in your Azure AI Foundry project.
32+
"""
33+
34+
import os
35+
from dotenv import load_dotenv
36+
from azure.identity import DefaultAzureCredential
37+
from azure.ai.projects import AIProjectClient
38+
from azure.ai.projects.models import (
39+
PromptAgentDefinition,
40+
PromptAgentDefinitionText,
41+
ResponseTextFormatConfigurationJsonSchema,
42+
)
43+
from pydantic import BaseModel, Field
44+
45+
load_dotenv()
46+
47+
48+
class CalendarEvent(BaseModel):
49+
model_config = {"extra": "forbid"}
50+
name: str
51+
date: str = Field(description="Date in YYYY-MM-DD format")
52+
participants: list[str]
53+
54+
55+
project_client = AIProjectClient(
56+
endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"],
57+
credential=DefaultAzureCredential(),
58+
)
59+
60+
with project_client:
61+
62+
openai_client = project_client.get_openai_client()
63+
64+
agent = project_client.agents.create_version(
65+
agent_name="MyAgent",
66+
definition=PromptAgentDefinition(
67+
model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"],
68+
text=PromptAgentDefinitionText(
69+
format=ResponseTextFormatConfigurationJsonSchema(
70+
name="CalendarEvent", schema=CalendarEvent.model_json_schema()
71+
)
72+
),
73+
instructions="""
74+
You are a helpful assistant that extracts calendar event information from the input user messages,
75+
and returns it in the desired structured output format.
76+
""",
77+
),
78+
)
79+
print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})")
80+
81+
conversation = openai_client.conversations.create(
82+
items=[
83+
{
84+
"type": "message",
85+
"role": "user",
86+
"content": "Alice and Bob are going to a science fair this Friday, November 7, 2025.",
87+
}
88+
],
89+
)
90+
print(f"Created conversation with initial user message (id: {conversation.id})")
91+
92+
response = openai_client.responses.create(
93+
conversation=conversation.id,
94+
extra_body={"agent": {"name": agent.name, "type": "agent_reference"}},
95+
input="",
96+
)
97+
print(f"Response output: {response.output_text}")
98+
99+
openai_client.conversations.delete(conversation_id=conversation.id)
100+
print("Conversation deleted")
101+
102+
project_client.agents.delete_version(agent_name=agent.name, agent_version=agent.version)
103+
print("Agent deleted")
Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,114 @@
1+
# pylint: disable=line-too-long,useless-suppression
2+
# ------------------------------------
3+
# Copyright (c) Microsoft Corporation.
4+
# Licensed under the MIT License.
5+
# ------------------------------------
6+
7+
"""
8+
DESCRIPTION:
9+
This sample demonstrates how to run basic Prompt Agent operations
10+
using the asynchronous AIProjectClient, while defining a desired
11+
JSON schema for the response ("structured output").
12+
13+
The Responses and Conversations calls in this sample are made using
14+
the OpenAI client from the `openai` package. See https://platform.openai.com/docs/api-reference
15+
for more information.
16+
17+
This sample is inspired by the OpenAI example here:
18+
https://platform.openai.com/docs/guides/structured-outputs/supported-schemas
19+
20+
USAGE:
21+
python sample_agent_structured_output_async.py
22+
23+
Before running the sample:
24+
25+
pip install "azure-ai-projects>=2.0.0b1" openai azure-identity aiohttp python-dotenv pydantic
26+
27+
Set these environment variables with your own values:
28+
1) AZURE_AI_PROJECT_ENDPOINT - The Azure AI Project endpoint, as found in the Overview
29+
page of your Azure AI Foundry portal.
30+
2) AZURE_AI_MODEL_DEPLOYMENT_NAME - The deployment name of the AI model, as found under the "Name" column in
31+
the "Models + endpoints" tab in your Azure AI Foundry project.
32+
"""
33+
34+
import asyncio
35+
import os
36+
from dotenv import load_dotenv
37+
from azure.identity.aio import DefaultAzureCredential
38+
from azure.ai.projects.aio import AIProjectClient
39+
from azure.ai.projects.models import (
40+
PromptAgentDefinition,
41+
PromptAgentDefinitionText,
42+
ResponseTextFormatConfigurationJsonSchema,
43+
)
44+
from pydantic import BaseModel, Field
45+
46+
load_dotenv()
47+
48+
49+
class CalendarEvent(BaseModel):
50+
model_config = {"extra": "forbid"}
51+
name: str
52+
date: str = Field(description="Date in YYYY-MM-DD format")
53+
participants: list[str]
54+
55+
56+
async def main() -> None:
57+
58+
credential = DefaultAzureCredential()
59+
60+
async with credential:
61+
62+
project_client = AIProjectClient(
63+
endpoint=os.environ["AZURE_AI_PROJECT_ENDPOINT"],
64+
credential=credential,
65+
)
66+
67+
async with project_client:
68+
69+
openai_client = await project_client.get_openai_client()
70+
71+
agent = await project_client.agents.create_version(
72+
agent_name="MyAgent",
73+
definition=PromptAgentDefinition(
74+
model=os.environ["AZURE_AI_MODEL_DEPLOYMENT_NAME"],
75+
text=PromptAgentDefinitionText(
76+
format=ResponseTextFormatConfigurationJsonSchema(
77+
name="CalendarEvent", schema=CalendarEvent.model_json_schema()
78+
)
79+
),
80+
instructions="""
81+
You are a helpful assistant that extracts calendar event information from the input user messages,
82+
and returns it in the desired structured output format.
83+
""",
84+
),
85+
)
86+
print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})")
87+
88+
conversation = await openai_client.conversations.create(
89+
items=[
90+
{
91+
"type": "message",
92+
"role": "user",
93+
"content": "Alice and Bob are going to a science fair this Friday, November 7, 2025.",
94+
}
95+
],
96+
)
97+
print(f"Created conversation with initial user message (id: {conversation.id})")
98+
99+
response = await openai_client.responses.create(
100+
conversation=conversation.id,
101+
extra_body={"agent": {"name": agent.name, "type": "agent_reference"}},
102+
input="",
103+
)
104+
print(f"Response output: {response.output_text}")
105+
106+
await openai_client.conversations.delete(conversation_id=conversation.id)
107+
print("Conversation deleted")
108+
109+
await project_client.agents.delete_version(agent_name=agent.name, agent_version=agent.version)
110+
print("Agent deleted")
111+
112+
113+
if __name__ == "__main__":
114+
asyncio.run(main())

sdk/ai/azure-ai-projects/tests/agents/test_agent_responses_crud.py

Lines changed: 68 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,15 @@
55
# ------------------------------------
66
# cSpell:disable
77

8+
from pydantic import BaseModel, Field
89
import pytest
910
from test_base import TestBase, servicePreparer
1011
from devtools_testutils import is_live_and_not_recording
11-
from azure.ai.projects.models import PromptAgentDefinition
12+
from azure.ai.projects.models import (
13+
PromptAgentDefinition,
14+
ResponseTextFormatConfigurationJsonSchema,
15+
PromptAgentDefinitionText,
16+
)
1217

1318

1419
class TestAgentResponsesCrud(TestBase):
@@ -150,3 +155,65 @@ def test_agent_responses_crud(self, **kwargs):
150155

151156
project_client.agents.delete_version(agent_name=agent.name, agent_version=agent.version)
152157
print("Agent deleted")
158+
159+
# To run this tes:
160+
# pytest tests\agents\test_agent_responses_crud.py::TestAgentResponsesCrud::test_agent_responses_with_structured_output -s
161+
@servicePreparer()
162+
@pytest.mark.skipif(
163+
condition=(not is_live_and_not_recording()),
164+
reason="Skipped because we cannot record network calls with OpenAI client",
165+
)
166+
def test_agent_responses_with_structured_output(self, **kwargs):
167+
model = self.test_agents_params["model_deployment_name"]
168+
169+
# Setup
170+
project_client = self.create_client(operation_group="agents", **kwargs)
171+
openai_client = project_client.get_openai_client()
172+
173+
class CalendarEvent(BaseModel):
174+
model_config = {"extra": "forbid"}
175+
name: str
176+
date: str = Field(description="Date in YYYY-MM-DD format")
177+
participants: list[str]
178+
179+
agent = project_client.agents.create_version(
180+
agent_name="MyAgent",
181+
definition=PromptAgentDefinition(
182+
model=model,
183+
text=PromptAgentDefinitionText(
184+
format=ResponseTextFormatConfigurationJsonSchema(
185+
name="CalendarEvent", schema=CalendarEvent.model_json_schema()
186+
)
187+
),
188+
instructions="""
189+
You are a helpful assistant that extracts calendar event information from the input user messages,
190+
and returns it in the desired structured output format.
191+
""",
192+
),
193+
)
194+
print(f"Agent created (id: {agent.id}, name: {agent.name}, version: {agent.version})")
195+
196+
conversation = openai_client.conversations.create(
197+
items=[
198+
{
199+
"type": "message",
200+
"role": "user",
201+
"content": "Alice and Bob are going to a science fair this Friday, November 7, 2025.",
202+
}
203+
]
204+
)
205+
print(f"Created conversation with initial user message (id: {conversation.id})")
206+
207+
response = openai_client.responses.create(
208+
conversation=conversation.id,
209+
extra_body={"agent": {"name": agent.name, "type": "agent_reference"}},
210+
input="", # TODO: Remove 'input' once service is fixed
211+
)
212+
print(f"Response id: {response.id}, output text: {response.output_text}")
213+
assert response.output_text == '{"name":"Science Fair","date":"2025-11-07","participants":["Alice","Bob"]}'
214+
215+
openai_client.conversations.delete(conversation_id=conversation.id)
216+
print("Conversation deleted")
217+
218+
project_client.agents.delete_version(agent_name=agent.name, agent_version=agent.version)
219+
print("Agent deleted")

0 commit comments

Comments
 (0)