Skip to content

Commit aa2a953

Browse files
author
hideya
committed
Update to ver 0.3.7
1 parent 6006292 commit aa2a953

File tree

7 files changed

+111
-122
lines changed

7 files changed

+111
-122
lines changed

CHANGELOG.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,13 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/)
66
and this project adheres to [Semantic Versioning](http://semver.org/).
77

88

9+
## [0.3.7] - 2025-08-27
10+
11+
### Changed
12+
- Remove extra `console.log()`s from load-config.ts
13+
- Upgrade dependencies
14+
15+
916
## [0.3.6] - 2025-08-24
1017

1118
### Added

README.md

Lines changed: 24 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -48,12 +48,12 @@ A Python equivalent of this utility is available [here](https://pypi.org/project
4848
```json5
4949
{
5050
"llm": {
51-
"provider": "openai", "model": "gpt-5-mini",
52-
// "provider": "anthropic", "model": "claude-3-5-haiku-latest",
53-
// "provider": "google_genai", "model": "gemini-2.5-flash",
54-
// "provider": "xai", "model": "grok-3-mini",
55-
// "provider": "cerebras", "model": "gpt-oss-120b",
56-
// "provider": "groq", "model": "openai/gpt-oss-20b",
51+
"provider": "openai", "model": "gpt-5-mini"
52+
// "provider": "anthropic", "model": "claude-3-5-haiku-latest"
53+
// "provider": "google_genai", "model": "gemini-2.5-flash"
54+
// "provider": "xai", "model": "grok-3-mini"
55+
// "provider": "cerebras", "model": "gpt-oss-120b"
56+
// "provider": "groq", "model": "openai/gpt-oss-20b"
5757
},
5858

5959
"mcp_servers": {
@@ -103,7 +103,8 @@ See [README_DEV.md](https://github.com/hideya/mcp-client-langchain-ts/blob/main/
103103
See [this page](https://github.com/hideya/langchain-mcp-tools-ts/blob/main/README.md#llm-provider-schema-compatibility)
104104
for details.
105105
If you want to disable the schema trnaformations, add `"schema_transformations": false,` to the top level of the config file.
106-
- **Command & URL servers**: Support for both local and remote MCP servers
106+
- **Command & URL servers**: Support for both local and remote MCP servers.
107+
Use `mcp-remote` to connect to remote servers with OAuth (see the end of the configuration example below).
107108
- **Real-time logging**: Live stdio MCP server logs with customizable log directory
108109
- **Interactive testing**: Example queries for the convenience of repeated testing
109110

@@ -172,44 +173,28 @@ Create a `llm_mcp_config.json5` file:
172173
```json5
173174
{
174175
"llm": {
175-
"provider": "openai",
176-
"model": "gpt-4.1-nano",
177-
// model: "gpt-5-mini",
176+
"provider": "openai", "model": "gpt-5-mini",
177+
// "provider": "anthropic", "model": "claude-3-5-haiku-latest",
178+
// "provider": "google_genai", "model": "gemini-2.5-flash",
179+
// "provider": "xai", "model": "grok-3-mini",
180+
// "provider": "cerebras", "model": "gpt-oss-120b",
181+
// "provider": "groq", "model": "openai/gpt-oss-20b",
178182
},
179-
180-
// "llm": {
181-
// "provider": "anthropic",
182-
// "model": "claude-3-5-haiku-latest",
183-
// // "model": "claude-sonnet-4-0",
184-
// },
185-
186-
// "llm": {
187-
// "provider": "google_genai",
188-
// "model": "gemini-2.5-flash",
189-
// // "model": "gemini-2.5-pro",
190-
// },
191-
192-
// "llm": {
193-
// "provider": "xai",
194-
// "model": "grok-3-mini",
195-
// // "model": "grok-4",
196-
// },
197-
198-
// "llm": {
199-
// "provider": "cerebras",
200-
// "model": "gpt-oss-120b",
201-
// },
202-
203-
// "llm": {
204-
// "provider": "groq",
205-
// "model": "openai/gpt-oss-20b",
206-
// // "model": "openai/gpt-oss-120b",
207-
// },
183+
184+
// To disable the automatic schema transformations, uncomment the following line.
185+
// See this for details about the schema transformations:
186+
// https://github.com/hideya/langchain-mcp-tools-ts/blob/main/README.md#llm-provider-schema-compatibility
187+
//
188+
// "schema_transformations": false,
208189

209190
"example_queries": [
210191
"Tell me how LLMs work in a few sentences",
211192
"Are there any weather alerts in California?",
212193
"Read the news headlines on bbc.com",
194+
// "Tell me about my GitHub profile"",
195+
// "What's the news from Tokyo today?",
196+
// "Open the webpage at bbc.com",
197+
// "Tell me about my Notion account",
213198
],
214199

215200
"mcp_servers": {

llm_mcp_config.json5

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -58,12 +58,12 @@
5858
// },
5959

6060
"example_queries": [
61-
"Read the news headlines on bbc.com",
62-
"Read and briefly summarize the LICENSE file",
63-
"Are there any weather alerts in California?",
61+
"Summarize the beginning of the news headlines on BBC.com",
62+
// "Read and briefly summarize the LICENSE file",
63+
// "Are there any weather alerts in California?",
6464
// "What's the news from Tokyo today?",
6565
// "Open the webpage at bbc.com",
66-
// "Tell me how many github repositories I have?"
66+
// "Tell me about my GitHub profile",
6767
// "Tell me about my Notion account",
6868
],
6969

@@ -116,7 +116,7 @@
116116
// "github": {
117117
// // To avoid auto protocol fallback, specify the protocol explicitly when using authentication
118118
// "type": "http", // or `transport: "http",`
119-
// "url": "https://api.githubcopilot.com/mcp/",
119+
// "url": "https://api.githubcopilot.com/mcp",
120120
// "headers": {
121121
// "Authorization": "Bearer ${GITHUB_PERSONAL_ACCESS_TOKEN}"
122122
// }
@@ -148,7 +148,7 @@
148148
// }
149149
// },
150150

151-
// Run Notion remote MCP server via mcp-remote
151+
// // Run Notion remote MCP server via mcp-remote
152152
// "notionMCP": {
153153
// "command": "npx",
154154
// "args": ["-y", "mcp-remote", "https://mcp.notion.com/mcp"],

0 commit comments

Comments
 (0)