Skip to content

Commit 63bef03

Browse files
author
Solveit
committed
Add structured output
1 parent d558beb commit 63bef03

File tree

3 files changed

+89
-13
lines changed

3 files changed

+89
-13
lines changed

00_core.ipynb

Lines changed: 76 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,7 @@
7373
"from anthropic.types import (Usage, TextBlock, ServerToolUseBlock,\n",
7474
" WebSearchToolResultBlock, Message, ToolUseBlock,\n",
7575
" ThinkingBlock, ServerToolUsage)\n",
76+
"from anthropic.types.beta import (BetaMessage)\n",
7677
"from anthropic.resources import messages\n",
7778
"\n",
7879
"import toolslm\n",
@@ -533,7 +534,7 @@
533534
"This is the first exported public function or class we're creating (the previous export was of a variable). In the rendered version of the notebook for these you'll see 4 things, in this order (unless the symbol starts with a single `_`, which indicates it's *private*):\n",
534535
"\n",
535536
"- The signature (with the symbol name as a heading, with a horizontal rule above)\n",
536-
"- A table of paramater docs (if provided)\n",
537+
"- A table of parameter docs (if provided)\n",
537538
"- The doc string (in italics).\n",
538539
"- The source code (in a collapsible \"Exported source\" block)\n",
539540
"\n",
@@ -646,7 +647,7 @@
646647
"source": [
647648
"#| exports\n",
648649
"@patch\n",
649-
"def _repr_markdown_(self:(Message)):\n",
650+
"def _repr_markdown_(self:(Message,BetaMessage)):\n",
650651
" det = '\\n- '.join(f'{k}: `{v}`' for k,v in self.model_dump().items())\n",
651652
" cts = re.sub(r'\\$', '$', contents(self)) # escape `$` for jupyter latex\n",
652653
" return f\"\"\"{cts}\n",
@@ -1145,7 +1146,8 @@
11451146
" self.model,self.use = model,usage()\n",
11461147
" self.text_only = model in text_only_models\n",
11471148
" self.log = [] if log else None\n",
1148-
" self.c = (cli or Anthropic(default_headers={'anthropic-beta': 'prompt-caching-2024-07-31'}))\n",
1149+
" betas = ['prompt-caching-2024-07-31', 'structured-outputs-2025-11-13']\n",
1150+
" self.c = (cli or Anthropic(default_headers={'anthropic-beta': ','.join(betas)}))\n",
11491151
" self.cache = cache"
11501152
]
11511153
},
@@ -1376,9 +1378,10 @@
13761378
"#| exports\n",
13771379
"@patch\n",
13781380
"def _precall(self:Client, msgs, prefill, sp, temp, maxtok, maxthinktok, stream,\n",
1379-
" stop, tools, tool_choice, kwargs):\n",
1381+
" stop, tools, tool_choice, output_format, kwargs):\n",
13801382
" if tools: kwargs['tools'] = [get_schema(o) if callable(o) else o for o in listify(tools)]\n",
13811383
" if tool_choice: kwargs['tool_choice'] = mk_tool_choice(tool_choice)\n",
1384+
" if output_format: kwargs['output_format'] = output_format\n",
13821385
" if maxthinktok: \n",
13831386
" kwargs['thinking'] = {'type':'enabled', 'budget_tokens':maxthinktok} \n",
13841387
" temp,prefill = 1,''\n",
@@ -1414,12 +1417,13 @@
14141417
" stop=None, # Stop sequence\n",
14151418
" tools:Optional[list]=None, # List of tools to make available to Claude\n",
14161419
" tool_choice:Optional[dict]=None, # Optionally force use of some tool\n",
1420+
" output_format:Optional[dict]=None, # Optionally force output to conform with a JSON schema\n",
14171421
" cb=None, # Callback to pass result to when complete\n",
14181422
" **kwargs):\n",
14191423
" \"Make a call to Claude.\"\n",
14201424
" msgs,kwargs = self._precall(msgs, prefill, sp, temp, maxtok, maxthinktok, stream,\n",
1421-
" stop, tools, tool_choice, kwargs)\n",
1422-
" m = self.c.messages\n",
1425+
" stop, tools, tool_choice, output_format, kwargs)\n",
1426+
" m = self.c.beta.messages if output_format else self.c.messages\n",
14231427
" f = m.stream if stream else m.create\n",
14241428
" res = f(model=self.model, messages=msgs, **kwargs)\n",
14251429
" def _cb(v):\n",
@@ -1846,6 +1850,72 @@
18461850
"print(c.stop_reason, c.stop_sequence)"
18471851
]
18481852
},
1853+
{
1854+
"cell_type": "markdown",
1855+
"id": "17fa5c5f",
1856+
"metadata": {},
1857+
"source": [
1858+
"We can force the next message to be [structured output](https://docs.claude.com/en/docs/build-with-claude/structured-outputs) based on a JSON schema we provide."
1859+
]
1860+
},
1861+
{
1862+
"cell_type": "code",
1863+
"execution_count": null,
1864+
"id": "feb75ba4",
1865+
"metadata": {},
1866+
"outputs": [
1867+
{
1868+
"data": {
1869+
"text/markdown": [
1870+
"{\"first_name\":\"Johno\",\"last_name\":\"Ohjohn\"}\n",
1871+
"\n",
1872+
"<details>\n",
1873+
"\n",
1874+
"- id: `msg_019Jy7ge2pTDFih4Zt5kCUvS`\n",
1875+
"- container: `None`\n",
1876+
"- content: `[{'citations': None, 'text': '{\"first_name\":\"Johno\",\"last_name\":\"Ohjohn\"}', 'type': 'text'}]`\n",
1877+
"- context_management: `None`\n",
1878+
"- model: `claude-sonnet-4-5-20250929`\n",
1879+
"- role: `assistant`\n",
1880+
"- stop_reason: `end_turn`\n",
1881+
"- stop_sequence: `None`\n",
1882+
"- type: `message`\n",
1883+
"- usage: `{'cache_creation': {'ephemeral_1h_input_tokens': 0, 'ephemeral_5m_input_tokens': 0}, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 193, 'output_tokens': 21, 'server_tool_use': None, 'service_tier': 'standard'}`\n",
1884+
"\n",
1885+
"</details>"
1886+
],
1887+
"text/plain": [
1888+
"BetaMessage(id='msg_019Jy7ge2pTDFih4Zt5kCUvS', container=None, content=[BetaTextBlock(citations=None, text='{\"first_name\":\"Johno\",\"last_name\":\"Ohjohn\"}', type='text')], context_management=None, model='claude-sonnet-4-5-20250929', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=BetaUsage(cache_creation=BetaCacheCreation(ephemeral_1h_input_tokens=0, ephemeral_5m_input_tokens=0), cache_creation_input_tokens=0, cache_read_input_tokens=0, input_tokens=193, output_tokens=21, server_tool_use=None, service_tier='standard'))"
1889+
]
1890+
},
1891+
"execution_count": null,
1892+
"metadata": {},
1893+
"output_type": "execute_result"
1894+
}
1895+
],
1896+
"source": [
1897+
"of = {\n",
1898+
" \"type\": \"json_schema\",\n",
1899+
" \"schema\": {\n",
1900+
" \"type\": \"object\",\n",
1901+
" \"properties\": {\n",
1902+
" \"first_name\": {\"type\": \"string\"},\n",
1903+
" \"last_name\": {\"type\": \"string\"},\n",
1904+
" },\n",
1905+
" \"required\": [\"first_name\", \"last_name\"],\n",
1906+
" \"additionalProperties\": False\n",
1907+
" }\n",
1908+
"}\n",
1909+
"\n",
1910+
"c(\"The first name is Johno, the last name is Ohjohn\", output_format=of)"
1911+
]
1912+
},
1913+
{
1914+
"cell_type": "markdown",
1915+
"id": "e0edc13f",
1916+
"metadata": {},
1917+
"source": []
1918+
},
18491919
{
18501920
"cell_type": "markdown",
18511921
"id": "fbdc1914",

claudette/_modidx.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,10 @@
1616
'claudette.asink._astream': ('async.html#_astream', 'claudette/asink.py'),
1717
'claudette.asink.mk_funcres_async': ('async.html#mk_funcres_async', 'claudette/asink.py'),
1818
'claudette.asink.mk_toolres_async': ('async.html#mk_toolres_async', 'claudette/asink.py')},
19-
'claudette.core': { 'claudette.core.Chat': ('core.html#chat', 'claudette/core.py'),
19+
'claudette.core': { 'claudette.core.(Message, BetaMessage)._repr_markdown_': ( 'core.html#(message, '
20+
'betamessage)._repr_markdown_',
21+
'claudette/core.py'),
22+
'claudette.core.Chat': ('core.html#chat', 'claudette/core.py'),
2023
'claudette.core.Chat.__call__': ('core.html#chat.__call__', 'claudette/core.py'),
2124
'claudette.core.Chat.__init__': ('core.html#chat.__init__', 'claudette/core.py'),
2225
'claudette.core.Chat._append_pr': ('core.html#chat._append_pr', 'claudette/core.py'),
@@ -33,7 +36,6 @@
3336
'claudette.core.Client._repr_markdown_': ('core.html#client._repr_markdown_', 'claudette/core.py'),
3437
'claudette.core.Client.cost': ('core.html#client.cost', 'claudette/core.py'),
3538
'claudette.core.Client.structured': ('core.html#client.structured', 'claudette/core.py'),
36-
'claudette.core.Message._repr_markdown_': ('core.html#message._repr_markdown_', 'claudette/core.py'),
3739
'claudette.core.ServerToolUsage.__add__': ('core.html#servertoolusage.__add__', 'claudette/core.py'),
3840
'claudette.core.ToolResult': ('core.html#toolresult', 'claudette/core.py'),
3941
'claudette.core.ToolResult.__init__': ('core.html#toolresult.__init__', 'claudette/core.py'),

claudette/core.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
from anthropic.types import (Usage, TextBlock, ServerToolUseBlock,
2020
WebSearchToolResultBlock, Message, ToolUseBlock,
2121
ThinkingBlock, ServerToolUsage)
22+
from anthropic.types.beta import (BetaMessage)
2223
from anthropic.resources import messages
2324

2425
import toolslm
@@ -120,7 +121,7 @@ def find_block(r:abc.Mapping, # The message to look in
120121

121122
# %% ../00_core.ipynb
122123
@patch
123-
def _repr_markdown_(self:(Message)):
124+
def _repr_markdown_(self:(Message,BetaMessage)):
124125
det = '\n- '.join(f'{k}: `{v}`' for k,v in self.model_dump().items())
125126
cts = re.sub(r'\$', '&#36;', contents(self)) # escape `$` for jupyter latex
126127
return f"""{cts}
@@ -187,7 +188,8 @@ def __init__(self, model, cli=None, log=False, cache=False):
187188
self.model,self.use = model,usage()
188189
self.text_only = model in text_only_models
189190
self.log = [] if log else None
190-
self.c = (cli or Anthropic(default_headers={'anthropic-beta': 'prompt-caching-2024-07-31'}))
191+
betas = ['prompt-caching-2024-07-31', 'structured-outputs-2025-11-13']
192+
self.c = (cli or Anthropic(default_headers={'anthropic-beta': ','.join(betas)}))
191193
self.cache = cache
192194

193195
# %% ../00_core.ipynb
@@ -239,9 +241,10 @@ def mk_tool_choice(choose:Union[str,bool,None])->dict:
239241
# %% ../00_core.ipynb
240242
@patch
241243
def _precall(self:Client, msgs, prefill, sp, temp, maxtok, maxthinktok, stream,
242-
stop, tools, tool_choice, kwargs):
244+
stop, tools, tool_choice, output_format, kwargs):
243245
if tools: kwargs['tools'] = [get_schema(o) if callable(o) else o for o in listify(tools)]
244246
if tool_choice: kwargs['tool_choice'] = mk_tool_choice(tool_choice)
247+
if output_format: kwargs['output_format'] = output_format
245248
if maxthinktok:
246249
kwargs['thinking'] = {'type':'enabled', 'budget_tokens':maxthinktok}
247250
temp,prefill = 1,''
@@ -269,12 +272,13 @@ def __call__(self:Client,
269272
stop=None, # Stop sequence
270273
tools:Optional[list]=None, # List of tools to make available to Claude
271274
tool_choice:Optional[dict]=None, # Optionally force use of some tool
275+
output_format:Optional[dict]=None, # Optionally force output to conform with a JSON schema
272276
cb=None, # Callback to pass result to when complete
273277
**kwargs):
274278
"Make a call to Claude."
275279
msgs,kwargs = self._precall(msgs, prefill, sp, temp, maxtok, maxthinktok, stream,
276-
stop, tools, tool_choice, kwargs)
277-
m = self.c.messages
280+
stop, tools, tool_choice, output_format, kwargs)
281+
m = self.c.beta.messages if output_format else self.c.messages
278282
f = m.stream if stream else m.create
279283
res = f(model=self.model, messages=msgs, **kwargs)
280284
def _cb(v):

0 commit comments

Comments
 (0)