@@ -48,12 +48,12 @@ A Python equivalent of this utility is available [here](https://pypi.org/project
4848 ``` json5
4949 {
5050 " llm" : {
51- " provider" : " openai" , " model" : " gpt-5-mini" ,
52- // "provider": "anthropic", "model": "claude-3-5-haiku-latest",
53- // "provider": "google_genai", "model": "gemini-2.5-flash",
54- // "provider": "xai", "model": "grok-3-mini",
55- // "provider": "cerebras", "model": "gpt-oss-120b",
56- // "provider": "groq", "model": "openai/gpt-oss-20b",
51+ " provider" : " openai" , " model" : " gpt-5-mini"
52+ // "provider": "anthropic", "model": "claude-3-5-haiku-latest"
53+ // "provider": "google_genai", "model": "gemini-2.5-flash"
54+ // "provider": "xai", "model": "grok-3-mini"
55+ // "provider": "cerebras", "model": "gpt-oss-120b"
56+ // "provider": "groq", "model": "openai/gpt-oss-20b"
5757 },
5858
5959 " mcp_servers" : {
@@ -103,7 +103,8 @@ See [README_DEV.md](https://github.com/hideya/mcp-client-langchain-ts/blob/main/
103103 See [ this page] ( https://github.com/hideya/langchain-mcp-tools-ts/blob/main/README.md#llm-provider-schema-compatibility )
104104for details.
105105If you want to disable the schema trnaformations, add ` "schema_transformations": false, ` to the top level of the config file.
106- - ** Command & URL servers** : Support for both local and remote MCP servers
106+ - ** Command & URL servers** : Support for both local and remote MCP servers.
107+ Use ` mcp-remote ` to connect to remote servers with OAuth (see the end of the configuration example below).
107108- ** Real-time logging** : Live stdio MCP server logs with customizable log directory
108109- ** Interactive testing** : Example queries for the convenience of repeated testing
109110
@@ -172,44 +173,28 @@ Create a `llm_mcp_config.json5` file:
172173``` json5
173174{
174175 " llm" : {
175- " provider" : " openai" ,
176- " model" : " gpt-4.1-nano" ,
177- // model: "gpt-5-mini",
176+ " provider" : " openai" , " model" : " gpt-5-mini" ,
177+ // "provider": "anthropic", "model": "claude-3-5-haiku-latest",
178+ // "provider": "google_genai", "model": "gemini-2.5-flash",
179+ // "provider": "xai", "model": "grok-3-mini",
180+ // "provider": "cerebras", "model": "gpt-oss-120b",
181+ // "provider": "groq", "model": "openai/gpt-oss-20b",
178182 },
179-
180- // "llm": {
181- // "provider": "anthropic",
182- // "model": "claude-3-5-haiku-latest",
183- // // "model": "claude-sonnet-4-0",
184- // },
185-
186- // "llm": {
187- // "provider": "google_genai",
188- // "model": "gemini-2.5-flash",
189- // // "model": "gemini-2.5-pro",
190- // },
191-
192- // "llm": {
193- // "provider": "xai",
194- // "model": "grok-3-mini",
195- // // "model": "grok-4",
196- // },
197-
198- // "llm": {
199- // "provider": "cerebras",
200- // "model": "gpt-oss-120b",
201- // },
202-
203- // "llm": {
204- // "provider": "groq",
205- // "model": "openai/gpt-oss-20b",
206- // // "model": "openai/gpt-oss-120b",
207- // },
183+
184+ // To disable the automatic schema transformations, uncomment the following line.
185+ // See this for details about the schema transformations:
186+ // https://github.com/hideya/langchain-mcp-tools-ts/blob/main/README.md#llm-provider-schema-compatibility
187+ //
188+ // "schema_transformations": false,
208189
209190 " example_queries" : [
210191 " Tell me how LLMs work in a few sentences" ,
211192 " Are there any weather alerts in California?" ,
212193 " Read the news headlines on bbc.com" ,
194+ // "Tell me about my GitHub profile"",
195+ // "What's the news from Tokyo today?",
196+ // "Open the webpage at bbc.com",
197+ // "Tell me about my Notion account",
213198 ],
214199
215200 " mcp_servers" : {
0 commit comments