@@ -138,4 +138,262 @@ const { embeddings } = await gemini.embed({
138138- In the browser , set ` corsProxy ` if needed
139139
140140For more examples , see the examples directory and provider - specific docs .
141+
142+ -- -
143+
144+ ## AWS Bedrock Provider
145+
146+ The ` @ax-llm/ax-ai-aws-bedrock ` package provides production - ready AWS Bedrock integration supporting Claude , GPT OSS , and Titan Embed models .
147+
148+ ### Installation
149+
150+ ` ` ` bash
151+ npm install @ax-llm/ax @ax-llm/ax-ai-aws-bedrock
152+ ` ` `
153+
154+ ### Quick Start
155+
156+ ` ` ` typescript
157+ import { AxAIBedrock, AxAIBedrockModel } from "@ax-llm/ax-ai-aws-bedrock";
158+ import { ax } from "@ax-llm/ax";
159+
160+ const ai = new AxAIBedrock({
161+ region: "us-east-2",
162+ config: { model: AxAIBedrockModel.ClaudeSonnet4 },
163+ });
164+
165+ const generator = ax("question:string -> answer:string");
166+ const result = await generator.forward(ai, {
167+ question: "What is AWS Bedrock?",
168+ });
169+
170+ console.log(result.answer);
171+ ` ` `
172+
173+ ### Configuration
174+
175+ ` ` ` typescript
176+ const ai = new AxAIBedrock({
177+ region: "us-east-2", // Primary AWS region
178+ fallbackRegions: ["us-west-2", "us-east-1"], // Fallback regions for Claude
179+ gptRegion: "us-west-2", // Primary region for GPT models
180+ gptFallbackRegions: ["us-east-1"], // Fallback regions for GPT
181+ config: {
182+ model: AxAIBedrockModel.ClaudeSonnet4,
183+ maxTokens: 4096,
184+ temperature: 0.7,
185+ topP: 0.9,
186+ },
187+ });
188+ ` ` `
189+
190+ ### Supported Models
191+
192+ ** Claude Models :**
193+
194+ - ` AxAIBedrockModel.ClaudeSonnet4 ` - Claude Sonnet 4
195+ - ` AxAIBedrockModel.ClaudeOpus4 ` - Claude Opus 4
196+ - ` AxAIBedrockModel.Claude35Sonnet ` - Claude 3.5 Sonnet
197+ - ` AxAIBedrockModel.Claude35Haiku ` - Claude 3.5 Haiku
198+ - ` AxAIBedrockModel.Claude3Opus ` - Claude 3 Opus
199+
200+ ** GPT Models :**
201+
202+ - ` AxAIBedrockModel.Gpt41106 ` - GPT - 4 1106 Preview
203+ - ` AxAIBedrockModel.Gpt4Mini ` - GPT - 4o Mini
204+
205+ ** Embedding Models :**
206+
207+ - ` AxAIBedrockEmbedModel.TitanEmbedV2 ` - Titan Embed V2
208+
209+ ### Regional Failover
210+
211+ The provider automatically handles regional failover for high availability . If the primary region fails , it retries with fallback regions .
212+
213+ ### AWS Authentication
214+
215+ Uses AWS SDK ' s default credential chain:
216+
217+ - Environment variables (` AWS_ACCESS_KEY_ID ` , ` AWS_SECRET_ACCESS_KEY ` )
218+ - AWS credentials file (` ~/.aws/credentials ` )
219+ - IAM roles (EC2 / Lambda )
220+
221+ -- -
222+
223+ ## Vercel AI SDK Integration
224+
225+ The ` @ax-llm/ax-ai-sdk-provider ` package provides seamless integration with the Vercel AI SDK v5 .
226+
227+ ### Installation
228+
229+ ` ` ` bash
230+ npm install @ax-llm/ax @ax-llm/ax-ai-sdk-provider ai
231+ ` ` `
232+
233+ ### Basic Usage
234+
235+ ` ` ` typescript
236+ import { ai } from "@ax-llm/ax";
237+ import { AxAIProvider } from "@ax-llm/ax-ai-sdk-provider";
238+ import { generateText, streamText } from "ai";
239+
240+ // Create Ax AI instance
241+ const axAI = ai({
242+ name: "openai",
243+ apiKey: process.env.OPENAI_APIKEY!,
244+ });
245+
246+ // Create AI SDK v5 compatible provider
247+ const model = new AxAIProvider(axAI);
248+
249+ // Use with AI SDK functions
250+ const result = await generateText({
251+ model,
252+ messages: [{ role: "user", content: "Hello!" }],
253+ });
254+
255+ console.log(result.text);
256+ ` ` `
257+
258+ ### Streaming with React Server Components
259+
260+ ` ` ` typescript
261+ import { ai } from "@ax-llm/ax";
262+ import { AxAIProvider } from "@ax-llm/ax-ai-sdk-provider";
263+ import { streamUI } from "ai/rsc";
264+
265+ const axAI = ai({
266+ name: "openai",
267+ apiKey: process.env.OPENAI_APIKEY!,
268+ });
269+
270+ const model = new AxAIProvider(axAI);
271+
272+ const result = await streamUI({
273+ model,
274+ messages: [{ role: "user", content: "Tell me a story" }],
275+ text: ({ content }) => <p>{content}</p>,
276+ });
277+ ` ` `
278+
279+ ### Agent Provider
280+
281+ Use Ax agents with the AI SDK :
282+
283+ ` ` ` typescript
284+ import { ai, agent } from "@ax-llm/ax";
285+ import { AxAgentProvider } from "@ax-llm/ax-ai-sdk-provider";
286+
287+ const llm = ai({ name: "openai", apiKey: process.env.OPENAI_APIKEY! });
288+
289+ const myAgent = agent("userInput:string -> response:string", {
290+ name: "helper",
291+ description: "A helpful assistant",
292+ ai: llm,
293+ });
294+
295+ const agentProvider = new AxAgentProvider({
296+ agent: myAgent,
297+ updateState: (msgs) => {
298+ /* handle state updates */
299+ },
300+ generate: (result) => <div>{result.response}</div>,
301+ });
302+ ` ` `
303+
304+ ### Features
305+
306+ - AI SDK v5 ` LanguageModelV2 ` compatible
307+ - Full tool / function calling support
308+ - Streaming with lifecycle events
309+ - Multi-modal inputs (text , images , files )
310+ - Full TypeScript support
311+
312+ -- -
313+
314+ ## Ax Tools Package
315+
316+ The ` @ax-llm/ax-tools ` package provides additional tools for Ax including MCP (Model Context Protocol ) support and a JavaScript interpreter .
317+
318+ ### Installation
319+
320+ ` ` ` bash
321+ npm install @ax-llm/ax @ax-llm/ax-tools
322+ ` ` `
323+
324+ ### MCP Stdio Transport
325+
326+ Connect to MCP servers via stdio :
327+
328+ ` ` ` typescript
329+ import { AxMCPClient } from "@ax-llm/ax";
330+ import { axCreateMCPStdioTransport } from "@ax-llm/ax-tools";
331+
332+ // Create transport for an MCP server
333+ const transport = axCreateMCPStdioTransport({
334+ command: "npx",
335+ args: ["-y", "@anthropic/mcp-server-filesystem"],
336+ env: { HOME: process.env.HOME },
337+ });
338+
339+ // Use with AxMCPClient
340+ const client = new AxMCPClient(transport);
341+ await client.init();
342+
343+ const tools = await client.getTools();
344+ console.log("Available tools:", tools.map((t) => t.name));
345+ ` ` `
346+
347+ ### JavaScript Interpreter
348+
349+ A sandboxed JavaScript interpreter that can be used as a function tool :
350+
351+ ` ` ` typescript
352+ import { ai, ax } from "@ax-llm/ax";
353+ import {
354+ AxJSInterpreter,
355+ AxJSInterpreterPermission,
356+ } from "@ax-llm/ax-tools";
357+
358+ // Create interpreter with specific permissions
359+ const interpreter = new AxJSInterpreter({
360+ permissions: [
361+ AxJSInterpreterPermission.CRYPTO,
362+ AxJSInterpreterPermission.OS,
363+ ],
364+ });
365+
366+ // Use as a function tool
367+ const llm = ai({ name: "openai", apiKey: process.env.OPENAI_APIKEY! });
368+
369+ const codeRunner = ax("task:string -> result:string", {
370+ functions: [interpreter.toFunction()],
371+ });
372+
373+ const result = await codeRunner.forward(llm, {
374+ task: "Calculate the factorial of 10",
375+ });
376+ ` ` `
377+
378+ ### Permissions
379+
380+ Control what the interpreter can access :
381+
382+ | Permission | Description |
383+ | ---------- | ---------- - |
384+ | ` FS ` | File system access (` node:fs ` ) |
385+ | ` NET ` | Network access (` http ` , ` https ` ) |
386+ | ` OS ` | OS information (` node:os ` ) |
387+ | ` CRYPTO ` | Cryptographic functions |
388+ | ` PROCESS ` | Process information |
389+
390+ ` ` ` typescript
391+ import { AxJSInterpreterPermission } from "@ax-llm/ax-tools";
392+
393+ const interpreter = new AxJSInterpreter({
394+ permissions: [
395+ AxJSInterpreterPermission.FS,
396+ AxJSInterpreterPermission.NET,
397+ ],
398+ });
141399` ` `
0 commit comments