Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 76 additions & 6 deletions 00_core.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@
"from anthropic.types import (Usage, TextBlock, ServerToolUseBlock,\n",
" WebSearchToolResultBlock, Message, ToolUseBlock,\n",
" ThinkingBlock, ServerToolUsage)\n",
"from anthropic.types.beta import (BetaMessage)\n",
"from anthropic.resources import messages\n",
"\n",
"import toolslm\n",
Expand Down Expand Up @@ -533,7 +534,7 @@
"This is the first exported public function or class we're creating (the previous export was of a variable). In the rendered version of the notebook for these you'll see 4 things, in this order (unless the symbol starts with a single `_`, which indicates it's *private*):\n",
"\n",
"- The signature (with the symbol name as a heading, with a horizontal rule above)\n",
"- A table of paramater docs (if provided)\n",
"- A table of parameter docs (if provided)\n",
"- The doc string (in italics).\n",
"- The source code (in a collapsible \"Exported source\" block)\n",
"\n",
Expand Down Expand Up @@ -646,7 +647,7 @@
"source": [
"#| exports\n",
"@patch\n",
"def _repr_markdown_(self:(Message)):\n",
"def _repr_markdown_(self:(Message,BetaMessage)):\n",
" det = '\\n- '.join(f'{k}: `{v}`' for k,v in self.model_dump().items())\n",
" cts = re.sub(r'\\$', '$', contents(self)) # escape `$` for jupyter latex\n",
" return f\"\"\"{cts}\n",
Expand Down Expand Up @@ -1145,7 +1146,8 @@
" self.model,self.use = model,usage()\n",
" self.text_only = model in text_only_models\n",
" self.log = [] if log else None\n",
" self.c = (cli or Anthropic(default_headers={'anthropic-beta': 'prompt-caching-2024-07-31'}))\n",
" betas = ['prompt-caching-2024-07-31', 'structured-outputs-2025-11-13']\n",
" self.c = (cli or Anthropic(default_headers={'anthropic-beta': ','.join(betas)}))\n",
" self.cache = cache"
]
},
Expand Down Expand Up @@ -1376,9 +1378,10 @@
"#| exports\n",
"@patch\n",
"def _precall(self:Client, msgs, prefill, sp, temp, maxtok, maxthinktok, stream,\n",
" stop, tools, tool_choice, kwargs):\n",
" stop, tools, tool_choice, output_format, kwargs):\n",
" if tools: kwargs['tools'] = [get_schema(o) if callable(o) else o for o in listify(tools)]\n",
" if tool_choice: kwargs['tool_choice'] = mk_tool_choice(tool_choice)\n",
" if output_format: kwargs['output_format'] = output_format\n",
" if maxthinktok: \n",
" kwargs['thinking'] = {'type':'enabled', 'budget_tokens':maxthinktok} \n",
" temp,prefill = 1,''\n",
Expand Down Expand Up @@ -1414,12 +1417,13 @@
" stop=None, # Stop sequence\n",
" tools:Optional[list]=None, # List of tools to make available to Claude\n",
" tool_choice:Optional[dict]=None, # Optionally force use of some tool\n",
" output_format:Optional[dict]=None, # Optionally force output to conform with a JSON schema\n",
" cb=None, # Callback to pass result to when complete\n",
" **kwargs):\n",
" \"Make a call to Claude.\"\n",
" msgs,kwargs = self._precall(msgs, prefill, sp, temp, maxtok, maxthinktok, stream,\n",
" stop, tools, tool_choice, kwargs)\n",
" m = self.c.messages\n",
" stop, tools, tool_choice, output_format, kwargs)\n",
" m = self.c.beta.messages if output_format else self.c.messages\n",
" f = m.stream if stream else m.create\n",
" res = f(model=self.model, messages=msgs, **kwargs)\n",
" def _cb(v):\n",
Expand Down Expand Up @@ -1846,6 +1850,72 @@
"print(c.stop_reason, c.stop_sequence)"
]
},
{
"cell_type": "markdown",
"id": "17fa5c5f",
"metadata": {},
"source": [
"We can force the next message to be [structured output](https://docs.claude.com/en/docs/build-with-claude/structured-outputs) based on a JSON schema we provide."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "feb75ba4",
"metadata": {},
"outputs": [
{
"data": {
"text/markdown": [
"{\"first_name\":\"Johno\",\"last_name\":\"Ohjohn\"}\n",
"\n",
"<details>\n",
"\n",
"- id: `msg_019Jy7ge2pTDFih4Zt5kCUvS`\n",
"- container: `None`\n",
"- content: `[{'citations': None, 'text': '{\"first_name\":\"Johno\",\"last_name\":\"Ohjohn\"}', 'type': 'text'}]`\n",
"- context_management: `None`\n",
"- model: `claude-sonnet-4-5-20250929`\n",
"- role: `assistant`\n",
"- stop_reason: `end_turn`\n",
"- stop_sequence: `None`\n",
"- type: `message`\n",
"- usage: `{'cache_creation': {'ephemeral_1h_input_tokens': 0, 'ephemeral_5m_input_tokens': 0}, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 0, 'input_tokens': 193, 'output_tokens': 21, 'server_tool_use': None, 'service_tier': 'standard'}`\n",
"\n",
"</details>"
],
"text/plain": [
"BetaMessage(id='msg_019Jy7ge2pTDFih4Zt5kCUvS', container=None, content=[BetaTextBlock(citations=None, text='{\"first_name\":\"Johno\",\"last_name\":\"Ohjohn\"}', type='text')], context_management=None, model='claude-sonnet-4-5-20250929', role='assistant', stop_reason='end_turn', stop_sequence=None, type='message', usage=BetaUsage(cache_creation=BetaCacheCreation(ephemeral_1h_input_tokens=0, ephemeral_5m_input_tokens=0), cache_creation_input_tokens=0, cache_read_input_tokens=0, input_tokens=193, output_tokens=21, server_tool_use=None, service_tier='standard'))"
]
},
"execution_count": null,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"of = {\n",
" \"type\": \"json_schema\",\n",
" \"schema\": {\n",
" \"type\": \"object\",\n",
" \"properties\": {\n",
" \"first_name\": {\"type\": \"string\"},\n",
" \"last_name\": {\"type\": \"string\"},\n",
" },\n",
" \"required\": [\"first_name\", \"last_name\"],\n",
" \"additionalProperties\": False\n",
" }\n",
"}\n",
"\n",
"c(\"The first name is Johno, the last name is Ohjohn\", output_format=of)"
]
},
{
"cell_type": "markdown",
"id": "e0edc13f",
"metadata": {},
"source": []
},
{
"cell_type": "markdown",
"id": "fbdc1914",
Expand Down
6 changes: 4 additions & 2 deletions claudette/_modidx.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,10 @@
'claudette.asink._astream': ('async.html#_astream', 'claudette/asink.py'),
'claudette.asink.mk_funcres_async': ('async.html#mk_funcres_async', 'claudette/asink.py'),
'claudette.asink.mk_toolres_async': ('async.html#mk_toolres_async', 'claudette/asink.py')},
'claudette.core': { 'claudette.core.Chat': ('core.html#chat', 'claudette/core.py'),
'claudette.core': { 'claudette.core.(Message, BetaMessage)._repr_markdown_': ( 'core.html#(message, '
'betamessage)._repr_markdown_',
'claudette/core.py'),
'claudette.core.Chat': ('core.html#chat', 'claudette/core.py'),
'claudette.core.Chat.__call__': ('core.html#chat.__call__', 'claudette/core.py'),
'claudette.core.Chat.__init__': ('core.html#chat.__init__', 'claudette/core.py'),
'claudette.core.Chat._append_pr': ('core.html#chat._append_pr', 'claudette/core.py'),
Expand All @@ -33,7 +36,6 @@
'claudette.core.Client._repr_markdown_': ('core.html#client._repr_markdown_', 'claudette/core.py'),
'claudette.core.Client.cost': ('core.html#client.cost', 'claudette/core.py'),
'claudette.core.Client.structured': ('core.html#client.structured', 'claudette/core.py'),
'claudette.core.Message._repr_markdown_': ('core.html#message._repr_markdown_', 'claudette/core.py'),
'claudette.core.ServerToolUsage.__add__': ('core.html#servertoolusage.__add__', 'claudette/core.py'),
'claudette.core.ToolResult': ('core.html#toolresult', 'claudette/core.py'),
'claudette.core.ToolResult.__init__': ('core.html#toolresult.__init__', 'claudette/core.py'),
Expand Down
14 changes: 9 additions & 5 deletions claudette/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from anthropic.types import (Usage, TextBlock, ServerToolUseBlock,
WebSearchToolResultBlock, Message, ToolUseBlock,
ThinkingBlock, ServerToolUsage)
from anthropic.types.beta import (BetaMessage)
from anthropic.resources import messages

import toolslm
Expand Down Expand Up @@ -120,7 +121,7 @@ def find_block(r:abc.Mapping, # The message to look in

# %% ../00_core.ipynb
@patch
def _repr_markdown_(self:(Message)):
def _repr_markdown_(self:(Message,BetaMessage)):
det = '\n- '.join(f'{k}: `{v}`' for k,v in self.model_dump().items())
cts = re.sub(r'\$', '&#36;', contents(self)) # escape `$` for jupyter latex
return f"""{cts}
Expand Down Expand Up @@ -187,7 +188,8 @@ def __init__(self, model, cli=None, log=False, cache=False):
self.model,self.use = model,usage()
self.text_only = model in text_only_models
self.log = [] if log else None
self.c = (cli or Anthropic(default_headers={'anthropic-beta': 'prompt-caching-2024-07-31'}))
betas = ['prompt-caching-2024-07-31', 'structured-outputs-2025-11-13']
self.c = (cli or Anthropic(default_headers={'anthropic-beta': ','.join(betas)}))
self.cache = cache

# %% ../00_core.ipynb
Expand Down Expand Up @@ -239,9 +241,10 @@ def mk_tool_choice(choose:Union[str,bool,None])->dict:
# %% ../00_core.ipynb
@patch
def _precall(self:Client, msgs, prefill, sp, temp, maxtok, maxthinktok, stream,
stop, tools, tool_choice, kwargs):
stop, tools, tool_choice, output_format, kwargs):
if tools: kwargs['tools'] = [get_schema(o) if callable(o) else o for o in listify(tools)]
if tool_choice: kwargs['tool_choice'] = mk_tool_choice(tool_choice)
if output_format: kwargs['output_format'] = output_format
if maxthinktok:
kwargs['thinking'] = {'type':'enabled', 'budget_tokens':maxthinktok}
temp,prefill = 1,''
Expand Down Expand Up @@ -269,12 +272,13 @@ def __call__(self:Client,
stop=None, # Stop sequence
tools:Optional[list]=None, # List of tools to make available to Claude
tool_choice:Optional[dict]=None, # Optionally force use of some tool
output_format:Optional[dict]=None, # Optionally force output to conform with a JSON schema
cb=None, # Callback to pass result to when complete
**kwargs):
"Make a call to Claude."
msgs,kwargs = self._precall(msgs, prefill, sp, temp, maxtok, maxthinktok, stream,
stop, tools, tool_choice, kwargs)
m = self.c.messages
stop, tools, tool_choice, output_format, kwargs)
m = self.c.beta.messages if output_format else self.c.messages
f = m.stream if stream else m.create
res = f(model=self.model, messages=msgs, **kwargs)
def _cb(v):
Expand Down