Skip to content

Commit fb3ad8d

Browse files
authored
Pass a fake extension id through LM API (microsoft#262936)
1 parent fcc6d27 commit fb3ad8d

File tree

3 files changed

+5
-5
lines changed

3 files changed

+5
-5
lines changed

src/vs/workbench/contrib/mcp/common/mcpSamplingService.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ export class McpSamplingService extends Disposable implements IMcpSamplingServic
7474

7575
const model = await this._getMatchingModel(opts);
7676
// todo@connor4312: nullExtensionDescription.identifier -> undefined with API update
77-
const response = await this._languageModelsService.sendChatRequest(model, new ExtensionIdentifier('Github.copilot-chat'), messages, {}, token);
77+
const response = await this._languageModelsService.sendChatRequest(model, new ExtensionIdentifier('core'), messages, {}, token);
7878

7979
let responseText = '';
8080

src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/bufferOutputPolling.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ export async function assessOutputForErrors(buffer: string, token: CancellationT
233233
return 'No models available';
234234
}
235235

236-
const response = await languageModelsService.sendChatRequest(models[0], new ExtensionIdentifier('github.copilot-chat'), [{ role: ChatMessageRole.User, content: [{ type: 'text', value: `Evaluate this terminal output to determine if there were errors or if the command ran successfully: ${buffer}.` }] }], {}, token);
236+
const response = await languageModelsService.sendChatRequest(models[0], new ExtensionIdentifier('core'), [{ role: ChatMessageRole.User, content: [{ type: 'text', value: `Evaluate this terminal output to determine if there were errors or if the command ran successfully: ${buffer}.` }] }], {}, token);
237237

238238
let responseText = '';
239239

src/vs/workbench/contrib/terminalContrib/chatAgentTools/browser/tools/monitoring/outputMonitor.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,7 @@ export class OutputMonitor extends Disposable implements IOutputMonitor {
247247
return 'No models available';
248248
}
249249

250-
const response = await this._languageModelsService.sendChatRequest(models[0], new ExtensionIdentifier('github.copilot-chat'), [{ role: ChatMessageRole.User, content: [{ type: 'text', value: `Evaluate this terminal output to determine if there were errors or if the command ran successfully: ${buffer}.` }] }], {}, token);
250+
const response = await this._languageModelsService.sendChatRequest(models[0], new ExtensionIdentifier('core'), [{ role: ChatMessageRole.User, content: [{ type: 'text', value: `Evaluate this terminal output to determine if there were errors or if the command ran successfully: ${buffer}.` }] }], {}, token);
251251

252252
try {
253253
const responseFromStream = getTextResponseFromStream(response);
@@ -291,7 +291,7 @@ export class OutputMonitor extends Disposable implements IOutputMonitor {
291291
Now, analyze this output:
292292
${lastFiveLines}
293293
`;
294-
const response = await this._languageModelsService.sendChatRequest(models[0], new ExtensionIdentifier('github.copilot-chat'), [
294+
const response = await this._languageModelsService.sendChatRequest(models[0], new ExtensionIdentifier('core'), [
295295
{ role: ChatMessageRole.User, content: [{ type: 'text', value: promptText }] }
296296
], {}, token);
297297

@@ -328,7 +328,7 @@ export class OutputMonitor extends Disposable implements IOutputMonitor {
328328
const sanitizedPrompt = confirmationPrompt.prompt;
329329
const sanitizedOptions = confirmationPrompt.options.map(opt => opt);
330330
const promptText = `Given the following confirmation prompt and options from a terminal output, which option should be selected to proceed safely and correctly?\nPrompt: "${sanitizedPrompt}"\nOptions: ${JSON.stringify(sanitizedOptions)}\nRespond with only the option string.`;
331-
const response = await this._languageModelsService.sendChatRequest(models[0], new ExtensionIdentifier('github.copilot-chat'), [
331+
const response = await this._languageModelsService.sendChatRequest(models[0], new ExtensionIdentifier('core'), [
332332
{ role: ChatMessageRole.User, content: [{ type: 'text', value: promptText }] }
333333
], {}, token);
334334

0 commit comments

Comments
 (0)