Skip to content

Commit cc56462

Browse files
pipperci-bot
authored andcommitted
initial endpoint
1 parent e7df1dd commit cc56462

File tree

7 files changed

+90
-4
lines changed

7 files changed

+90
-4
lines changed

apps/remix-ide/src/app/plugins/remixAIPlugin.tsx

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import * as packageJson from '../../../../../package.json'
22
import { Plugin } from '@remixproject/engine';
33
import { trackMatomoEvent } from '@remix-api'
4-
import { IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, AssistantParams, CodeExplainAgent, SecurityAgent, CompletionParams, OllamaInferencer, isOllamaAvailable, getBestAvailableModel } from '@remix/remix-ai-core';
4+
import { IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, AssistantParams, CodeExplainAgent, SecurityAgent, CompletionParams, OllamaInferencer, isOllamaAvailable, getBestAvailableModel, resetOllamaHostOnSettingsChange } from '@remix/remix-ai-core';
55
import { CodeCompletionAgent, ContractAgent, workspaceAgent, IContextType, mcpDefaultServersConfig } from '@remix/remix-ai-core';
66
import { MCPInferencer } from '@remix/remix-ai-core';
77
import { IMCPServer, IMCPConnectionStatus } from '@remix/remix-ai-core';
@@ -63,6 +63,10 @@ export class RemixAIPlugin extends Plugin {
6363
}
6464

6565
onActivation(): void {
66+
// Expose Ollama reset function globally for settings integration
67+
if (typeof window !== 'undefined') {
68+
(window as any).resetOllamaHostOnSettingsChange = resetOllamaHostOnSettingsChange;
69+
}
6670

6771
if (this.isOnDesktop) {
6872
this.useRemoteInferencer = true

apps/remix-ide/src/app/tabs/locales/en/settings.json

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -69,5 +69,8 @@
6969
"settings.mcpServerConfigurationDescription": "Connect to Model Context Protocol servers for enhanced AI context",
7070
"settings.enableMCPEnhancement": "Enable MCP Integration",
7171
"settings.enableMCPEnhancementDescription": "Manage your MCP server connections",
72-
"settings.aiPrivacyPolicyDescription": "Understand how RemixAI processes your data."
72+
"settings.aiPrivacyPolicyDescription": "Understand how RemixAI processes your data.",
73+
"settings.ollamaConfig": "Ollama Configuration",
74+
"settings.ollamaConfigDescription": "Configure Ollama endpoint for local AI model integration",
75+
"settings.ollama-endpoint": "ENDPOINT URL"
7376
}

libs/remix-ai-core/src/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ import { RemoteInferencer } from './inferencers/remote/remoteInference'
99
import { OllamaInferencer } from './inferencers/local/ollamaInferencer'
1010
import { MCPInferencer } from './inferencers/mcp/mcpInferencer'
1111
import { RemixMCPServer, createRemixMCPServer } from './remix-mcp-server'
12-
import { isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost } from './inferencers/local/ollama'
12+
import { isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost, resetOllamaHostOnSettingsChange } from './inferencers/local/ollama'
1313
import { FIMModelManager, FIMModelConfig, FIM_MODEL_CONFIGS } from './inferencers/local/fimModelConfig'
1414
import { ChatHistory } from './prompts/chat'
1515
import { downloadLatestReleaseExecutable } from './helpers/inferenceServerReleases'
@@ -18,7 +18,7 @@ import { mcpDefaultServersConfig } from './config/mcpDefaultServers'
1818
export {
1919
IModel, IModelResponse, ChatCommandParser,
2020
ModelType, DefaultModels, ICompletions, IParams, IRemoteModel, buildChatPrompt,
21-
RemoteInferencer, OllamaInferencer, MCPInferencer, RemixMCPServer, isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost,
21+
RemoteInferencer, OllamaInferencer, MCPInferencer, RemixMCPServer, isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost, resetOllamaHostOnSettingsChange,
2222
FIMModelManager, FIMModelConfig, FIM_MODEL_CONFIGS, createRemixMCPServer,
2323
InsertionParams, CompletionParams, GenerationParams, AssistantParams,
2424
ChatEntry, AIRequestType, ChatHistory, downloadLatestReleaseExecutable,

libs/remix-ai-core/src/inferencers/local/ollama.ts

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import axios from 'axios';
2+
import { Registry } from '@remix-project/remix-lib';
23

34
// Helper function to track events using MatomoManager instance
45
function trackMatomoEvent(category: string, action: string, name?: string) {
@@ -14,15 +15,47 @@ function trackMatomoEvent(category: string, action: string, name?: string) {
1415
// default Ollama ports to check (11434 is the legacy/standard port)
1516
const OLLAMA_PORTS = [11434, 11435, 11436];
1617
const OLLAMA_BASE_HOST = 'http://localhost';
18+
const DEFAULT_OLLAMA_HOST = 'http://localhost:11434';
1719

1820
let discoveredOllamaHost: string | null = null;
1921

22+
function getConfiguredOllamaEndpoint(): string | null {
23+
try {
24+
const config = Registry.getInstance().get('config').api;
25+
const configuredEndpoint = config.get('settings/ollama-endpoint');
26+
if (configuredEndpoint && configuredEndpoint !== DEFAULT_OLLAMA_HOST) {
27+
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_using_configured_endpoint', configuredEndpoint]);
28+
return configuredEndpoint;
29+
}
30+
} catch (error) {
31+
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_config_access_failed', error.message || 'unknown']);
32+
}
33+
return null;
34+
}
35+
2036
export async function discoverOllamaHost(): Promise<string | null> {
2137
if (discoveredOllamaHost) {
2238
trackMatomoEvent('ai', 'remixAI', `ollama_host_cache_hit:${discoveredOllamaHost}`);
2339
return discoveredOllamaHost;
2440
}
2541

42+
// First, try to use the configured endpoint from settings
43+
const configuredEndpoint = getConfiguredOllamaEndpoint();
44+
if (configuredEndpoint) {
45+
try {
46+
const res = await axios.get(`${configuredEndpoint}/api/tags`, { timeout: 2000 });
47+
if (res.status === 200) {
48+
discoveredOllamaHost = configuredEndpoint;
49+
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_configured_endpoint_success', configuredEndpoint]);
50+
return configuredEndpoint;
51+
}
52+
} catch (error) {
53+
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_configured_endpoint_failed', `${configuredEndpoint}:${error.message || 'unknown'}`]);
54+
// Fall back to discovery if configured endpoint fails
55+
}
56+
}
57+
58+
// Fall back to port discovery if no configured endpoint or it failed
2659
for (const port of OLLAMA_PORTS) {
2760
const host = `${OLLAMA_BASE_HOST}:${port}`;
2861
trackMatomoEvent('ai', 'remixAI', `ollama_port_check:${port}`);
@@ -75,6 +108,12 @@ export function resetOllamaHost(): void {
75108
discoveredOllamaHost = null;
76109
}
77110

111+
export function resetOllamaHostOnSettingsChange(): void {
112+
// This function should be called when Ollama settings are updated
113+
resetOllamaHost();
114+
_paq.push(['trackEvent', 'ai', 'remixAI', 'ollama_reset_on_settings_change']);
115+
}
116+
78117
export async function pullModel(modelName: string): Promise<void> {
79118
// in case the user wants to pull a model from registry
80119
trackMatomoEvent('ai', 'remixAI', `ollama_pull_model_start:${modelName}`);

libs/remix-ui/settings/src/lib/remix-ui-settings.tsx

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,16 @@ const settingsSections: SettingsSection[] = [
128128
action: 'link',
129129
link: 'https://remix-ide.readthedocs.io/en/latest/ai.html'
130130
}
131+
},
132+
{
133+
name: 'ollama-config',
134+
label: 'settings.ollamaConfig',
135+
description: 'settings.ollamaConfigDescription',
136+
type: 'toggle',
137+
toggleUIOptions: [{
138+
name: 'ollama-endpoint',
139+
type: 'text'
140+
}]
131141
}]
132142
},
133143
...(mcpEnabled ? [{

libs/remix-ui/settings/src/lib/settingsReducer.ts

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,14 @@ const sindriAccessToken = config.get('settings/sindri-access-token') || ''
1818
const etherscanAccessToken = config.get('settings/etherscan-access-token') || ''
1919
const mcpServersEnable = config.get('settings/mcp/servers/enable') || false
2020
const mcpServerManagement = config.get('settings/mcp-server-management') || false
21+
const ollamaEndpoint = config.get('settings/ollama-endpoint') || 'http://localhost:11434'
2122

2223
let githubConfig = config.get('settings/github-config') || false
2324
let ipfsConfig = config.get('settings/ipfs-config') || false
2425
let swarmConfig = config.get('settings/swarm-config') || false
2526
let sindriConfig = config.get('settings/sindri-config') || false
2627
let etherscanConfig = config.get('settings/etherscan-config') || false
28+
let ollamaConfig = config.get('settings/ollama-config') || false
2729
let generateContractMetadata = config.get('settings/generate-contract-metadata')
2830
let autoCompletion = config.get('settings/auto-completion')
2931
let showGas = config.get('settings/show-gas')
@@ -50,6 +52,10 @@ if (!etherscanConfig && etherscanAccessToken) {
5052
config.set('settings/etherscan-config', true)
5153
etherscanConfig = true
5254
}
55+
if (!ollamaConfig && ollamaEndpoint !== 'http://localhost:11434') {
56+
config.set('settings/ollama-config', true)
57+
ollamaConfig = true
58+
}
5359
if (typeof generateContractMetadata !== 'boolean') {
5460
config.set('settings/generate-contract-metadata', true)
5561
generateContractMetadata = true
@@ -188,12 +194,21 @@ export const initialState: SettingsState = {
188194
value: '',
189195
isLoading: false
190196
},
197+
<<<<<<< HEAD
191198
'mcp/servers/enable': {
192199
value: mcpServersEnable,
193200
isLoading: false
194201
},
195202
'mcp-server-management': {
196203
value: mcpServerManagement,
204+
=======
205+
'ollama-config': {
206+
value: ollamaConfig,
207+
isLoading: false
208+
},
209+
'ollama-endpoint': {
210+
value: ollamaEndpoint,
211+
>>>>>>> 0571313a4a (initial endpoint)
197212
isLoading: false
198213
},
199214
toaster: {
@@ -206,6 +221,19 @@ export const settingReducer = (state: SettingsState, action: SettingsActions): S
206221
switch (action.type) {
207222
case 'SET_VALUE':
208223
config.set('settings/' + action.payload.name, action.payload.value)
224+
225+
// Reset Ollama host cache when endpoint is changed
226+
if (action.payload.name === 'ollama-endpoint') {
227+
try {
228+
// Check if the resetOllamaHostOnSettingsChange function is available globally
229+
if (typeof window !== 'undefined' && (window as any).resetOllamaHostOnSettingsChange) {
230+
(window as any).resetOllamaHostOnSettingsChange();
231+
}
232+
} catch (error) {
233+
// Ignore errors - Ollama functionality is optional
234+
}
235+
}
236+
209237
return { ...state, [action.payload.name]: { ...state[action.payload.name], value: action.payload.value, isLoading: false } }
210238
case 'SET_LOADING':
211239
return { ...state, [action.payload.name]: { ...state[action.payload.name], isLoading: true } }

libs/remix-ui/settings/src/types/index.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,8 @@ export interface SettingsState {
115115
'ai-privacy-policy': ConfigState,
116116
'mcp/servers/enable': ConfigState,
117117
'mcp-server-management': ConfigState,
118+
'ollama-config': ConfigState,
119+
'ollama-endpoint': ConfigState,
118120
toaster: ConfigState
119121
}
120122
export interface SettingsActionPayloadTypes {

0 commit comments

Comments
 (0)