Skip to content

Commit 8368b2f

Browse files
authored
chore: use latest chat model names (#1203)
- use latest chat model in examples to highlight support - drop prefixed `openai:` and `anthropic:` where possible due to dynamic routing - standardize on full model name instead of aliases (for easier future updates)
1 parent b8cb4b6 commit 8368b2f

File tree

78 files changed

+456
-419
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

78 files changed

+456
-419
lines changed

src/langsmith/define-target-function.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ const target = async(inputs) => {
8787
```python Python (LangChain)
8888
from langchain.chat_models import init_chat_model
8989

90-
model = init_chat_model("openai:gpt-4o-mini")
90+
model = init_chat_model("gpt-4o-mini")
9191

9292
def target(inputs: dict) -> dict:
9393
# This assumes your dataset has inputs with a `messages` key

src/langsmith/evaluate-complex-agent.mdx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -322,7 +322,7 @@ def lookup_album( ...
322322
def lookup_artist( ...
323323

324324
# Agent model
325-
qa_llm = init_chat_model("claude-3-5-sonnet-latest")
325+
qa_llm = init_chat_model("claude-sonnet-4-5-20250929")
326326
# The prebuilt ReACT agent only expects State to have a 'messages' key, so the
327327
# state we defined for the refund agent can also be passed to our lookup agent.
328328
qa_graph = create_agent(qa_llm, tools=[lookup_track, lookup_artist, lookup_album])
@@ -1260,7 +1260,7 @@ def lookup_artist(
12601260

12611261

12621262
# Agent model
1263-
qa_llm = init_chat_model("claude-3-5-sonnet-latest")
1263+
qa_llm = init_chat_model("claude-sonnet-4-5-20250929")
12641264
# The prebuilt ReACT agent only expects State to have a 'messages' key, so the
12651265
# state we defined for the refund agent can also be passed to our lookup agent.
12661266
qa_graph = create_agent(qa_llm, [lookup_track, lookup_artist, lookup_album])

src/langsmith/evaluate-graph.mdx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def search(query: str) -> str:
4242

4343
tools = [search]
4444
tool_node = ToolNode(tools)
45-
model = init_chat_model("claude-3-5-sonnet-latest").bind_tools(tools)
45+
model = init_chat_model("claude-sonnet-4-5-20250929").bind_tools(tools)
4646

4747
# Define the function that determines whether to continue or not
4848
def should_continue(state: State) -> Literal["tools", END]:
@@ -282,7 +282,7 @@ def search(query: str) -> str:
282282

283283
tools = [search]
284284
tool_node = ToolNode(tools)
285-
model = init_chat_model("claude-3-5-sonnet-latest").bind_tools(tools)
285+
model = init_chat_model("claude-sonnet-4-5-20250929").bind_tools(tools)
286286

287287
# Define the function that determines whether to continue or not
288288
def should_continue(state: State) -> Literal["tools", END]:

src/langsmith/generative-ui-react.mdx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -367,7 +367,7 @@ Then you can push updates to the UI component by calling `ui.push()` / `push_ui_
367367

368368

369369
async def writer_node(state: AgentState):
370-
model = ChatAnthropic(model="claude-3-5-sonnet-latest")
370+
model = ChatAnthropic(model="claude-sonnet-4-5-20250929")
371371
message: AIMessage = await model.bind_tools(
372372
tools=[CreateTextDocument],
373373
tool_choice={"type": "tool", "name": "CreateTextDocument"},
@@ -432,7 +432,7 @@ Then you can push updates to the UI component by calling `ui.push()` / `push_ui_
432432
): Promise<typeof AgentState.Update> {
433433
const ui = typedUi<typeof ComponentMap>(config);
434434

435-
const model = new ChatAnthropic({ model: "claude-3-5-sonnet-latest" });
435+
const model = new ChatAnthropic({ model: "claude-sonnet-4-5-20250929" });
436436
const message = await model
437437
.bindTools(
438438
[

src/langsmith/human-in-the-loop-time-travel.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ To time travel using the LangGraph Server API (via the LangGraph SDK):
2727
joke: NotRequired[str]
2828

2929
model = init_chat_model(
30-
"anthropic:claude-sonnet-4-5",
30+
"claude-sonnet-4-5-20250929",
3131
temperature=0,
3232
)
3333

src/langsmith/log-llm-trace.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -397,7 +397,7 @@ def chat_model(inputs: dict) -> dict:
397397
When using a custom model, it is recommended to also provide the following `metadata` fields to identify the model when viewing traces and when filtering.
398398

399399
* `ls_provider`: The provider of the model, eg "openai", "anthropic", etc.
400-
* `ls_model_name`: The name of the model, eg "gpt-4o-mini", "claude-3-opus-20240307", etc.
400+
* `ls_model_name`: The name of the model, eg "gpt-4o-mini", "claude-3-opus-20240229", etc.
401401

402402
<CodeGroup>
403403

src/langsmith/manage-prompts-programmatically.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -349,7 +349,7 @@ const { messages, system } = convertPromptToAnthropic(formattedPrompt);
349349

350350
const anthropicClient = new Anthropic();
351351
const anthropicResponse = await anthropicClient.messages.create({
352-
model: "claude-3-haiku-20240307",
352+
model: "claude-haiku-4-5-20251001",
353353
system,
354354
messages,
355355
max_tokens: 1024,

src/langsmith/observability-studio.mdx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,8 +78,8 @@ class Configuration(BaseModel):
7878

7979
model: Annotated[
8080
Literal[
81-
"anthropic/claude-sonnet-4-5",
82-
"anthropic/claude-3-5-haiku-latest",
81+
"anthropic/claude-sonnet-4-5-20250929",
82+
"anthropic/claude-haiku-4-5-20251001",
8383
"openai/o1",
8484
"openai/gpt-4o-mini",
8585
"openai/o1-mini",

src/langsmith/server-mcp.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ Use an MCP-compliant client to connect to the LangGraph server. The following ex
154154
tools = await load_mcp_tools(session)
155155

156156
# Create and run a react agent with the tools
157-
agent = create_agent("openai:gpt-4.1", tools)
157+
agent = create_agent("gpt-4.1", tools)
158158

159159
# Invoke the agent with a message
160160
agent_response = await agent.ainvoke({"messages": "What can the finance agent do for me?"})

src/langsmith/streaming.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -649,7 +649,7 @@ The streamed output from [`messages-tuple` mode](#supported-stream-modes) is a t
649649
topic: str
650650
joke: str = ""
651651

652-
model = init_chat_model(model="openai:gpt-4o-mini")
652+
model = init_chat_model(model="gpt-4o-mini")
653653

654654
def call_model(state: MyState):
655655
"""Call the LLM to generate a joke about a topic"""

0 commit comments

Comments
 (0)