diff --git a/.gitignore b/.gitignore index fff83ce6d98..83c04a65772 100644 --- a/.gitignore +++ b/.gitignore @@ -79,12 +79,8 @@ apps/remixdesktop/log_input_signals_new.txt logs apps/remix-ide-e2e/src/extensions/chrome/metamask apps/remix-ide-e2e/tmp/ -apps/remix-ide-e2e/tmp/ # IDE - Cursor -<<<<<<< HEAD -.cursor/ -======= .cursor/ PR_MESSAGE.md ->>>>>>> master +apps/remix-ide-e2e/tmp/ diff --git a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx index c4b1e67f5f0..c03cc584b73 100644 --- a/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx +++ b/apps/remix-ide/src/app/plugins/remixAIPlugin.tsx @@ -1,7 +1,7 @@ import * as packageJson from '../../../../../package.json' import { Plugin } from '@remixproject/engine'; import { trackMatomoEvent } from '@remix-api' -import { IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, AssistantParams, CodeExplainAgent, SecurityAgent, CompletionParams, OllamaInferencer, isOllamaAvailable, getBestAvailableModel } from '@remix/remix-ai-core'; +import { IModel, RemoteInferencer, IRemoteModel, IParams, GenerationParams, AssistantParams, CodeExplainAgent, SecurityAgent, CompletionParams, OllamaInferencer, isOllamaAvailable, getBestAvailableModel, resetOllamaHostOnSettingsChange } from '@remix/remix-ai-core'; import { CodeCompletionAgent, ContractAgent, workspaceAgent, IContextType, mcpDefaultServersConfig } from '@remix/remix-ai-core'; import { MCPInferencer } from '@remix/remix-ai-core'; import { IMCPServer, IMCPConnectionStatus } from '@remix/remix-ai-core'; @@ -63,6 +63,8 @@ export class RemixAIPlugin extends Plugin { } onActivation(): void { + // Expose Ollama reset function globally for settings integration + resetOllamaHostOnSettingsChange(); if (this.isOnDesktop) { this.useRemoteInferencer = true diff --git a/apps/remix-ide/src/app/tabs/locales/en/settings.json b/apps/remix-ide/src/app/tabs/locales/en/settings.json index 5ac3129d765..8a7ddd83c57 100644 --- a/apps/remix-ide/src/app/tabs/locales/en/settings.json +++ b/apps/remix-ide/src/app/tabs/locales/en/settings.json @@ -69,5 +69,8 @@ "settings.mcpServerConfigurationDescription": "Connect to Model Context Protocol servers for enhanced AI context", "settings.enableMCPEnhancement": "Enable MCP Integration", "settings.enableMCPEnhancementDescription": "Manage your MCP server connections", - "settings.aiPrivacyPolicyDescription": "Understand how RemixAI processes your data." + "settings.aiPrivacyPolicyDescription": "Understand how RemixAI processes your data.", + "settings.ollamaConfig": "Ollama URL Configuration", + "settings.ollamaConfigDescription": "Configure Ollama endpoint for local AI model integration", + "settings.ollama-endpoint": "ENDPOINT URL" } diff --git a/libs/remix-ai-core/src/index.ts b/libs/remix-ai-core/src/index.ts index 62ca148ef0a..ba298f68728 100644 --- a/libs/remix-ai-core/src/index.ts +++ b/libs/remix-ai-core/src/index.ts @@ -9,7 +9,7 @@ import { RemoteInferencer } from './inferencers/remote/remoteInference' import { OllamaInferencer } from './inferencers/local/ollamaInferencer' import { MCPInferencer } from './inferencers/mcp/mcpInferencer' import { RemixMCPServer, createRemixMCPServer } from './remix-mcp-server' -import { isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost } from './inferencers/local/ollama' +import { isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost, resetOllamaHostOnSettingsChange } from './inferencers/local/ollama' import { FIMModelManager, FIMModelConfig, FIM_MODEL_CONFIGS } from './inferencers/local/fimModelConfig' import { ChatHistory } from './prompts/chat' import { downloadLatestReleaseExecutable } from './helpers/inferenceServerReleases' @@ -18,7 +18,7 @@ import { mcpDefaultServersConfig } from './config/mcpDefaultServers' export { IModel, IModelResponse, ChatCommandParser, ModelType, DefaultModels, ICompletions, IParams, IRemoteModel, buildChatPrompt, - RemoteInferencer, OllamaInferencer, MCPInferencer, RemixMCPServer, isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost, + RemoteInferencer, OllamaInferencer, MCPInferencer, RemixMCPServer, isOllamaAvailable, getBestAvailableModel, listModels, discoverOllamaHost, resetOllamaHostOnSettingsChange, FIMModelManager, FIMModelConfig, FIM_MODEL_CONFIGS, createRemixMCPServer, InsertionParams, CompletionParams, GenerationParams, AssistantParams, ChatEntry, AIRequestType, ChatHistory, downloadLatestReleaseExecutable, diff --git a/libs/remix-ai-core/src/inferencers/local/ollama.ts b/libs/remix-ai-core/src/inferencers/local/ollama.ts index 0702710aacb..64de03f1312 100644 --- a/libs/remix-ai-core/src/inferencers/local/ollama.ts +++ b/libs/remix-ai-core/src/inferencers/local/ollama.ts @@ -1,60 +1,89 @@ import axios from 'axios'; - -// Helper function to track events using MatomoManager instance -function trackMatomoEvent(category: string, action: string, name?: string) { - try { - if (typeof window !== 'undefined' && (window as any)._matomoManagerInstance) { - (window as any)._matomoManagerInstance.trackEvent(category, action, name) - } - } catch (error) { - // Silent fail for tracking - } -} +import { Registry } from '@remix-project/remix-lib'; +import { trackMatomoEvent } from '@remix-api' // default Ollama ports to check (11434 is the legacy/standard port) const OLLAMA_PORTS = [11434, 11435, 11436]; const OLLAMA_BASE_HOST = 'http://localhost'; +const DEFAULT_OLLAMA_HOST = 'http://localhost:11434'; let discoveredOllamaHost: string | null = null; +function getConfiguredOllamaEndpoint(): string | null { + const filemanager = Registry.getInstance().get('filemanager').api; + try { + const config = Registry.getInstance().get('config').api + const configuredEndpoint = config.get('settings/ollama-endpoint'); + if (configuredEndpoint && configuredEndpoint !== DEFAULT_OLLAMA_HOST) { + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_using_configured_endpoint', value: configuredEndpoint }); + return configuredEndpoint; + } + } catch (error) { + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_config_access_failed', value: error.message || 'unknown' }); + } + return null; +} + export async function discoverOllamaHost(): Promise { + const filemanager = Registry.getInstance().get('filemanager').api; if (discoveredOllamaHost) { - trackMatomoEvent('ai', 'remixAI', `ollama_host_cache_hit:${discoveredOllamaHost}`); + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_host_cache_hit:${discoveredOllamaHost}` }) return discoveredOllamaHost; } + // First, try to use the configured endpoint from settings + const configuredEndpoint = getConfiguredOllamaEndpoint(); + if (configuredEndpoint) { + try { + const res = await axios.get(`${configuredEndpoint}/api/tags`, { timeout: 2000 }); + if (res.status === 200) { + discoveredOllamaHost = configuredEndpoint; + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_configured_endpoint_success', value: configuredEndpoint }); + return configuredEndpoint; + } + return null; + } catch (error) { + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_configured_endpoint_failed', value: `${configuredEndpoint}:${error.message || 'unknown'}` }); + // Fall back to discovery if configured endpoint fails + return null; + } + } + + // Fall back to port discovery if no configured endpoint for (const port of OLLAMA_PORTS) { const host = `${OLLAMA_BASE_HOST}:${port}`; - trackMatomoEvent('ai', 'remixAI', `ollama_port_check:${port}`); + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_port_check:${port}` }); try { const res = await axios.get(`${host}/api/tags`, { timeout: 2000 }); if (res.status === 200) { discoveredOllamaHost = host; - trackMatomoEvent('ai', 'remixAI', `ollama_host_discovered_success:${host}`); + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_host_discovered_success:${host}` }); return host; } } catch (error) { - trackMatomoEvent('ai', 'remixAI', `ollama_port_connection_failed:${port}:${error.message || 'unknown'}`); + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_port_connection_failed:${port}:${error.message || 'unknown'}` }); continue; // next port } } - trackMatomoEvent('ai', 'remixAI', 'ollama_host_discovery_failed:no_ports_available'); + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_host_discovery_failed:no_ports_available' }); return null; } export async function isOllamaAvailable(): Promise { - trackMatomoEvent('ai', 'remixAI', 'ollama_availability_check:checking'); + const filemanager = Registry.getInstance().get('filemanager').api; + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_availability_check:checking' }); const host = await discoverOllamaHost(); const isAvailable = host !== null; - trackMatomoEvent('ai', 'remixAI', `ollama_availability_result:available:${isAvailable}`); + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_availability_result:available:${isAvailable}` }); return isAvailable; } export async function listModels(): Promise { - trackMatomoEvent('ai', 'remixAI', 'ollama_list_models_start:fetching'); + const filemanager = Registry.getInstance().get('filemanager').api; + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_list_models_start:fetching' }); const host = await discoverOllamaHost(); if (!host) { - trackMatomoEvent('ai', 'remixAI', 'ollama_list_models_failed:no_host'); + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_list_models_failed:no_host' }); throw new Error('Ollama is not available'); } @@ -71,16 +100,25 @@ export function getOllamaHost(): string | null { } export function resetOllamaHost(): void { - trackMatomoEvent('ai', 'remixAI', `ollama_reset_host:${discoveredOllamaHost || 'null'}`); + const fileManager = Registry.getInstance().get('filemanager').api; + trackMatomoEvent(fileManager, { category: 'ai', action: 'remixAI', name: `ollama_reset_host:${discoveredOllamaHost || 'null'}` }); discoveredOllamaHost = null; } +export function resetOllamaHostOnSettingsChange(): void { + const fileManager = Registry.getInstance().get('filemanager').api; + // This function should be called when Ollama settings are updated + resetOllamaHost(); + trackMatomoEvent(fileManager, { category: 'ai', action: 'remixAI', name: 'ollama_reset_on_settings_change' }); +} + export async function pullModel(modelName: string): Promise { + const filemanager = Registry.getInstance().get('filemanager').api; // in case the user wants to pull a model from registry - trackMatomoEvent('ai', 'remixAI', `ollama_pull_model_start:${modelName}`); + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_pull_model_start:${modelName}` }); const host = await discoverOllamaHost(); if (!host) { - trackMatomoEvent('ai', 'remixAI', `ollama_pull_model_failed:${modelName}|no_host`); + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_pull_model_failed:${modelName}|no_host` }); throw new Error('Ollama is not available'); } @@ -88,9 +126,9 @@ export async function pullModel(modelName: string): Promise { const startTime = Date.now(); await axios.post(`${host}/api/pull`, { name: modelName }); const duration = Date.now() - startTime; - trackMatomoEvent('ai', 'remixAI', `ollama_pull_model_success:${modelName}|duration:${duration}ms`); + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_pull_model_success:${modelName}|duration:${duration}ms` }); } catch (error) { - trackMatomoEvent('ai', 'remixAI', `ollama_pull_model_error:${modelName}|${error.message || 'unknown'}`); + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_pull_model_error:${modelName}|${error.message || 'unknown'}` }); console.error('Error pulling model:', error); throw new Error(`Failed to pull model: ${modelName}`); } @@ -106,7 +144,8 @@ export async function validateModel(modelName: string): Promise { } export async function getBestAvailableModel(): Promise { - trackMatomoEvent('ai', 'remixAI', 'ollama_get_best'); + const filemanager = Registry.getInstance().get('filemanager').api; + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: 'ollama_get_best' }); try { const models = await listModels(); if (models.length === 0) return null; @@ -125,7 +164,7 @@ export async function getBestAvailableModel(): Promise { // TODO get model stats and get best model return models[0]; } catch (error) { - trackMatomoEvent('ai', 'remixAI', `ollama_get_best_model_error:${error.message || 'unknown'}`); + trackMatomoEvent(filemanager, { category: 'ai', action: 'remixAI', name: `ollama_get_best_model_error:${error.message || 'unknown'}` }); console.error('Error getting best available model:', error); return null; } diff --git a/libs/remix-ui/remix-ai-assistant/src/components/remix-ui-remix-ai-assistant.tsx b/libs/remix-ui/remix-ai-assistant/src/components/remix-ui-remix-ai-assistant.tsx index ab795655f4e..c76d3a2e45f 100644 --- a/libs/remix-ui/remix-ai-assistant/src/components/remix-ui-remix-ai-assistant.tsx +++ b/libs/remix-ui/remix-ai-assistant/src/components/remix-ui-remix-ai-assistant.tsx @@ -645,7 +645,7 @@ export const RemixUiRemixAiAssistant = React.forwardRef< setMessages(prev => [...prev, { id: crypto.randomUUID(), role: 'assistant', - content: '**Ollama is not available.**\n\nTo use Ollama with Remix IDE:\n\n1. **Install Ollama**: Visit [ollama.ai](https://ollama.ai) to download\n2. **Start Ollama**: Run `ollama serve` in your terminal\n3. **Install a model**: Run `ollama pull codestral:latest`\n4. **Configure CORS**: Set `OLLAMA_ORIGINS=https://remix.ethereum.org`\n\nSee the [Ollama Setup Guide](https://github.com/ethereum/remix-project/blob/master/OLLAMA_SETUP.md) for detailed instructions.\n\n*Switching back to previous model for now.*', + content: '**Ollama is not available.**\n\nTo use Ollama with Remix IDE:\n\n1. **Install Ollama**: Visit [ollama.ai](https://ollama.ai) to download\n2. **Start Ollama**: Run `ollama serve` in your terminal\n3. **Install a model**: Run `ollama pull codestral:latest`\n4. **Configure CORS**: e.g \`OLLAMA_ORIGINS=https://remix.ethereum.org ollama serve\`\n\nSee the [Ollama Setup Guide](https://github.com/ethereum/remix-project/blob/master/OLLAMA_SETUP.md) for detailed instructions.\n\n*Switching back to previous model for now.*', timestamp: Date.now(), sentiment: 'none' }]) @@ -662,7 +662,7 @@ export const RemixUiRemixAiAssistant = React.forwardRef< setMessages(prev => [...prev, { id: crypto.randomUUID(), role: 'assistant', - content: `**Failed to connect to Ollama.**\n\nError: ${error.message || 'Unknown error'}\n\nPlease ensure:\n- Ollama is running (\`ollama serve\`)\n- CORS is configured for Remix IDE\n- At least one model is installed\n\nSee the [Ollama Setup Guide](https://github.com/ethereum/remix-project/blob/master/OLLAMA_SETUP.md) for help.\n\n*Switching back to previous model.*`, + content: `**Failed to connect to Ollama.**\n\nError: ${error.message || 'Unknown error'}\n\nPlease ensure:\n- Ollama is running (\`ollama serve\`)\n- The ollama CORS setting is configured for Remix IDE. e.g \`OLLAMA_ORIGINS=https://remix.ethereum.org ollama serve\` Please see [Ollama Setup Guide](https://github.com/ethereum/remix-project/blob/master/OLLAMA_SETUP.md) for detailed instructions.\n- At least one model is installed\n\nSee the [Ollama Setup Guide](https://github.com/ethereum/remix-project/blob/master/OLLAMA_SETUP.md) for help.\n\n*Switching back to previous model.*`, timestamp: Date.now(), sentiment: 'none' }]) diff --git a/libs/remix-ui/settings/src/lib/remix-ui-settings.tsx b/libs/remix-ui/settings/src/lib/remix-ui-settings.tsx index 31f4bf090ab..cd03c623d40 100644 --- a/libs/remix-ui/settings/src/lib/remix-ui-settings.tsx +++ b/libs/remix-ui/settings/src/lib/remix-ui-settings.tsx @@ -128,6 +128,16 @@ const settingsSections: SettingsSection[] = [ action: 'link', link: 'https://remix-ide.readthedocs.io/en/latest/ai.html' } + }, + { + name: 'ollama-config', + label: 'settings.ollamaConfig', + description: 'settings.ollamaConfigDescription', + type: 'toggle', + toggleUIOptions: [{ + name: 'ollama-endpoint', + type: 'text' + }] }] }, ...(mcpEnabled ? [{ diff --git a/libs/remix-ui/settings/src/lib/settingsReducer.ts b/libs/remix-ui/settings/src/lib/settingsReducer.ts index 714d91bbb8b..a983bd91b2c 100644 --- a/libs/remix-ui/settings/src/lib/settingsReducer.ts +++ b/libs/remix-ui/settings/src/lib/settingsReducer.ts @@ -1,6 +1,6 @@ import { Registry } from '@remix-project/remix-lib' import { SettingsActions, SettingsState } from '../types' - +import { resetOllamaHostOnSettingsChange } from '@remix/remix-ai-core'; const config = Registry.getInstance().get('config').api const settingsConfig = Registry.getInstance().get('settingsConfig').api const defaultTheme = config.get('settings/theme') ? settingsConfig.themes.find((theme) => theme.name.toLowerCase() === config.get('settings/theme').toLowerCase()) : settingsConfig.themes[0] @@ -18,12 +18,14 @@ const sindriAccessToken = config.get('settings/sindri-access-token') || '' const etherscanAccessToken = config.get('settings/etherscan-access-token') || '' const mcpServersEnable = config.get('settings/mcp/servers/enable') || false const mcpServerManagement = config.get('settings/mcp-server-management') || false +const ollamaEndpoint = config.get('settings/ollama-endpoint') || 'http://localhost:11434' let githubConfig = config.get('settings/github-config') || false let ipfsConfig = config.get('settings/ipfs-config') || false let swarmConfig = config.get('settings/swarm-config') || false let sindriConfig = config.get('settings/sindri-config') || false let etherscanConfig = config.get('settings/etherscan-config') || false +let ollamaConfig = config.get('settings/ollama-config') || false let generateContractMetadata = config.get('settings/generate-contract-metadata') let autoCompletion = config.get('settings/auto-completion') let showGas = config.get('settings/show-gas') @@ -50,6 +52,10 @@ if (!etherscanConfig && etherscanAccessToken) { config.set('settings/etherscan-config', true) etherscanConfig = true } +if (!ollamaConfig && ollamaEndpoint !== 'http://localhost:11434') { + config.set('settings/ollama-config', true) + ollamaConfig = true +} if (typeof generateContractMetadata !== 'boolean') { config.set('settings/generate-contract-metadata', true) generateContractMetadata = true @@ -196,6 +202,14 @@ export const initialState: SettingsState = { value: mcpServerManagement, isLoading: false }, + 'ollama-config': { + value: ollamaConfig, + isLoading: false + }, + 'ollama-endpoint': { + value: ollamaEndpoint, + isLoading: false + }, toaster: { value: '', isLoading: false @@ -206,6 +220,15 @@ export const settingReducer = (state: SettingsState, action: SettingsActions): S switch (action.type) { case 'SET_VALUE': config.set('settings/' + action.payload.name, action.payload.value) + // Reset Ollama host cache when endpoint is changed + if (action.payload.name === 'ollama-endpoint') { + try { + resetOllamaHostOnSettingsChange(); + } catch (error) { + // Ignore errors - Ollama functionality is optional + } + } + return { ...state, [action.payload.name]: { ...state[action.payload.name], value: action.payload.value, isLoading: false } } case 'SET_LOADING': return { ...state, [action.payload.name]: { ...state[action.payload.name], isLoading: true } } diff --git a/libs/remix-ui/settings/src/types/index.ts b/libs/remix-ui/settings/src/types/index.ts index 4473c994f6e..18e3f066e12 100644 --- a/libs/remix-ui/settings/src/types/index.ts +++ b/libs/remix-ui/settings/src/types/index.ts @@ -115,6 +115,8 @@ export interface SettingsState { 'ai-privacy-policy': ConfigState, 'mcp/servers/enable': ConfigState, 'mcp-server-management': ConfigState, + 'ollama-config': ConfigState, + 'ollama-endpoint': ConfigState, toaster: ConfigState } export interface SettingsActionPayloadTypes {