diff --git a/.env.example b/.env.example index 6527a59..af9e56b 100644 --- a/.env.example +++ b/.env.example @@ -15,10 +15,13 @@ REDIS_URI=redis://localhost:6379 AUTH_CACHE_KEY_SECRET=replace-with-strong-random-secret AUTH_CACHE_TTL_SECONDS=180 -# Anthropic Claude API (required) -ANTHROPIC_API_KEY=sk-ant-your-key-here -ANTHROPIC_MODEL=claude-sonnet-4-20250514 -ANTHROPIC_SUMMARY_MODEL=claude-haiku-4-5-20251001 +# AI Provider - OpenRouter (required) +AI_API_KEY=sk-or-your-key-here +AI_MODEL=anthropic/claude-sonnet-4-5-20250929 +AI_SUMMARY_MODEL=anthropic/claude-haiku-4-5-20251001 +AI_BASE_URL=https://openrouter.ai/api/v1 +AI_APP_NAME=vultisig-agent +AI_APP_URL=https://vultisig.com # Conversation context window CONTEXT_WINDOW_SIZE=20 diff --git a/README.md b/README.md index e6c855d..17fb573 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # agent-backend -AI Chat Agent backend service for Vultisig mobile apps. This service handles natural language conversations using Anthropic Claude and coordinates with existing Vultisig plugins (app-recurring, feeplugin) via the verifier. +AI Chat Agent backend service for Vultisig mobile apps. This service handles natural language conversations using LLMs via OpenRouter and coordinates with existing Vultisig plugins (app-recurring, feeplugin) via the verifier. ## Prerequisites @@ -19,8 +19,12 @@ AI Chat Agent backend service for Vultisig mobile apps. This service handles nat | `REDIS_URI` | Yes | - | Redis connection URI | | `AUTH_CACHE_KEY_SECRET` | Yes | - | HMAC secret for auth cache key derivation | | `AUTH_CACHE_TTL_SECONDS` | No | `180` | Auth cache TTL (seconds) | -| `ANTHROPIC_API_KEY` | Yes | - | Anthropic Claude API key | -| `ANTHROPIC_MODEL` | No | `claude-sonnet-4-20250514` | Claude model to use | +| `AI_API_KEY` | Yes | - | OpenRouter API key | +| `AI_MODEL` | No | `anthropic/claude-sonnet-4-5-20250929` | Model to use (OpenRouter format) | +| `AI_SUMMARY_MODEL` | No | `anthropic/claude-haiku-4-5-20251001` | Model for conversation summarization | +| `AI_BASE_URL` | No | `https://openrouter.ai/api/v1` | AI provider base URL | +| `AI_APP_NAME` | No | `vultisig-agent` | App name sent to OpenRouter | +| `AI_APP_URL` | No | - | App URL sent to OpenRouter | | `VERIFIER_URL` | Yes | - | Verifier service base URL | | `LOG_FORMAT` | No | `json` | Log format (`json` or `text`) | @@ -32,7 +36,7 @@ AI Chat Agent backend service for Vultisig mobile apps. This service handles nat export DATABASE_DSN="postgres://user:pass@localhost:5432/agent?sslmode=disable" export REDIS_URI="redis://localhost:6379" export AUTH_CACHE_KEY_SECRET="replace-with-strong-random-secret" -export ANTHROPIC_API_KEY="sk-ant-..." +export AI_API_KEY="sk-or-v1-..." export VERIFIER_URL="http://localhost:8080" ``` @@ -63,7 +67,7 @@ Run with Docker: docker run -p 8080:8080 \ -e DATABASE_DSN="postgres://..." \ -e REDIS_URI="redis://..." \ - -e ANTHROPIC_API_KEY="sk-ant-..." \ + -e AI_API_KEY="sk-or-v1-..." \ -e VERIFIER_URL="http://verifier:8080" \ agent-backend:latest ``` @@ -117,7 +121,7 @@ internal/ service/ # Business logic layer storage/postgres/ # PostgreSQL repositories + migrations cache/redis/ # Redis caching - ai/anthropic/ # Anthropic Claude integration + ai/ # AI client (OpenRouter-compatible) config/ # Configuration loading types/ # Shared types ``` diff --git a/cmd/server/main.go b/cmd/server/main.go index 27dafb0..e05fced 100644 --- a/cmd/server/main.go +++ b/cmd/server/main.go @@ -13,7 +13,7 @@ import ( "github.com/labstack/echo/v4/middleware" "github.com/sirupsen/logrus" - "github.com/vultisig/agent-backend/internal/ai/anthropic" + "github.com/vultisig/agent-backend/internal/ai" "github.com/vultisig/agent-backend/internal/api" "github.com/vultisig/agent-backend/internal/cache/redis" "github.com/vultisig/agent-backend/internal/config" @@ -58,8 +58,8 @@ func main() { } defer redisClient.Close() - // Initialize Anthropic client - anthropicClient := anthropic.NewClient(cfg.Anthropic.APIKey, cfg.Anthropic.Model) + // Initialize AI client + aiClient := ai.NewClient(cfg.AI.APIKey, cfg.AI.Model, cfg.AI.BaseURL, cfg.AI.AppName, cfg.AI.AppURL) // Initialize plugin service (skills fetched dynamically on demand) pluginService := plugin.NewService(cfg.Verifier.URL, redisClient, logger) @@ -80,7 +80,7 @@ func main() { } // Initialize agent service - agentService := agent.NewAgentService(anthropicClient, msgRepo, convRepo, memRepo, redisClient, verifierClient, pluginService, swapTxBuilder, logger, cfg.Anthropic.SummaryModel, cfg.Context) + agentService := agent.NewAgentService(aiClient, msgRepo, convRepo, memRepo, redisClient, verifierClient, pluginService, swapTxBuilder, logger, cfg.AI.SummaryModel, cfg.Context) // Initialize API server server := api.NewServer( diff --git a/deploy/01_server.yaml b/deploy/01_server.yaml index 9d2ed34..0be68f4 100644 --- a/deploy/01_server.yaml +++ b/deploy/01_server.yaml @@ -31,17 +31,27 @@ spec: value: "80" - name: LOG_FORMAT value: "json" - # --- Anthropic config (from ConfigMap) --- - - name: ANTHROPIC_MODEL + # --- AI config (from ConfigMap) --- + - name: AI_MODEL valueFrom: configMapKeyRef: name: agent - key: anthropic-model - - name: ANTHROPIC_SUMMARY_MODEL + key: ai-model + - name: AI_SUMMARY_MODEL valueFrom: configMapKeyRef: name: agent - key: anthropic-summary-model + key: ai-summary-model + - name: AI_BASE_URL + valueFrom: + configMapKeyRef: + name: agent + key: ai-base-url + - name: AI_APP_NAME + valueFrom: + configMapKeyRef: + name: agent + key: ai-app-name # --- Context window config (from ConfigMap) --- - name: CONTEXT_WINDOW_SIZE valueFrom: @@ -75,10 +85,10 @@ spec: secretKeyRef: name: redis key: uri - - name: ANTHROPIC_API_KEY + - name: AI_API_KEY valueFrom: secretKeyRef: - name: anthropic + name: ai-provider key: api-key - name: AUTH_CACHE_KEY_SECRET valueFrom: diff --git a/deploy/dev/01_agent.yaml b/deploy/dev/01_agent.yaml index 93dbfd9..86e76cd 100644 --- a/deploy/dev/01_agent.yaml +++ b/deploy/dev/01_agent.yaml @@ -3,8 +3,10 @@ kind: ConfigMap metadata: name: agent data: - anthropic-model: "claude-sonnet-4-20250514" - anthropic-summary-model: "claude-haiku-4-5-20251001" + ai-model: "anthropic/claude-sonnet-4-5-20250929" + ai-summary-model: "anthropic/claude-haiku-4-5-20251001" + ai-base-url: "https://openrouter.ai/api/v1" + ai-app-name: "vultisig-agent" context-window-size: "20" context-summarize-trigger: "30" context-summary-max-tokens: "512" diff --git a/deploy/prod/01_agent.yaml b/deploy/prod/01_agent.yaml index 93dbfd9..86e76cd 100644 --- a/deploy/prod/01_agent.yaml +++ b/deploy/prod/01_agent.yaml @@ -3,8 +3,10 @@ kind: ConfigMap metadata: name: agent data: - anthropic-model: "claude-sonnet-4-20250514" - anthropic-summary-model: "claude-haiku-4-5-20251001" + ai-model: "anthropic/claude-sonnet-4-5-20250929" + ai-summary-model: "anthropic/claude-haiku-4-5-20251001" + ai-base-url: "https://openrouter.ai/api/v1" + ai-app-name: "vultisig-agent" context-window-size: "20" context-summarize-trigger: "30" context-summary-max-tokens: "512" diff --git a/internal/ai/anthropic/client.go b/internal/ai/client.go similarity index 91% rename from internal/ai/anthropic/client.go rename to internal/ai/client.go index dc38a8e..2471318 100644 --- a/internal/ai/anthropic/client.go +++ b/internal/ai/client.go @@ -1,4 +1,4 @@ -package anthropic +package ai import ( "bufio" @@ -13,19 +13,19 @@ import ( ) const ( - defaultBaseURL = "https://api.anthropic.com/v1" defaultMaxTokens = 4096 - apiVersion = "2023-06-01" maxRetries = 3 baseRetryDelay = 1 * time.Second ) -// Client is an Anthropic Claude API client. +// Client is an OpenRouter-compatible AI API client. type Client struct { apiKey string model string httpClient *http.Client baseURL string + appName string + appURL string } // Message represents a simple conversation message with string content. @@ -55,14 +55,14 @@ type ToolResultBlock struct { IsError bool `json:"is_error,omitempty"` } -// Tool represents a tool that Claude can use. +// Tool represents a tool that the model can use. type Tool struct { Name string `json:"name"` Description string `json:"description"` InputSchema any `json:"input_schema"` } -// ToolChoice specifies how Claude should use tools. +// ToolChoice specifies how the model should use tools. type ToolChoice struct { Type string `json:"type"` // "auto", "any", or "tool" Name string `json:"name,omitempty"` // Required when type is "tool" @@ -106,7 +106,7 @@ type Usage struct { OutputTokens int `json:"output_tokens"` } -// APIError represents an error from the Anthropic API. +// APIError represents an error from the AI API. type APIError struct { Type string `json:"type"` Message string `json:"message"` @@ -114,22 +114,24 @@ type APIError struct { } func (e *APIError) Error() string { - return fmt.Sprintf("anthropic: %s: %s", e.Type, e.Message) + return fmt.Sprintf("ai: %s: %s", e.Type, e.Message) } -// NewClient creates a new Anthropic client. -func NewClient(apiKey, model string) *Client { +// NewClient creates a new AI client. +func NewClient(apiKey, model, baseURL, appName, appURL string) *Client { return &Client{ apiKey: apiKey, model: model, - baseURL: defaultBaseURL, + baseURL: baseURL, + appName: appName, + appURL: appURL, httpClient: &http.Client{ Timeout: 90 * time.Second, }, } } -// SendMessage sends a message to Claude and returns the response. +// SendMessage sends a message to the model and returns the response. func (c *Client) SendMessage(ctx context.Context, req *Request) (*Response, error) { if req.Model == "" { req.Model = c.model @@ -175,8 +177,13 @@ func (c *Client) doRequest(ctx context.Context, body []byte) (*Response, error) } httpReq.Header.Set("Content-Type", "application/json") - httpReq.Header.Set("x-api-key", c.apiKey) - httpReq.Header.Set("anthropic-version", apiVersion) + httpReq.Header.Set("Authorization", "Bearer "+c.apiKey) + if c.appName != "" { + httpReq.Header.Set("X-Title", c.appName) + } + if c.appURL != "" { + httpReq.Header.Set("HTTP-Referer", c.appURL) + } resp, err := c.httpClient.Do(httpReq) if err != nil { @@ -273,8 +280,13 @@ func (c *Client) SendMessageStream(ctx context.Context, req *Request, callback S } httpReq.Header.Set("Content-Type", "application/json") - httpReq.Header.Set("x-api-key", c.apiKey) - httpReq.Header.Set("anthropic-version", apiVersion) + httpReq.Header.Set("Authorization", "Bearer "+c.apiKey) + if c.appName != "" { + httpReq.Header.Set("X-Title", c.appName) + } + if c.appURL != "" { + httpReq.Header.Set("HTTP-Referer", c.appURL) + } resp, err := c.httpClient.Do(httpReq) if err != nil { diff --git a/internal/ai/anthropic/stream_parser.go b/internal/ai/stream_parser.go similarity index 99% rename from internal/ai/anthropic/stream_parser.go rename to internal/ai/stream_parser.go index 70be135..0f104ef 100644 --- a/internal/ai/anthropic/stream_parser.go +++ b/internal/ai/stream_parser.go @@ -1,4 +1,4 @@ -package anthropic +package ai import ( "strings" diff --git a/internal/ai/anthropic/stream_parser_test.go b/internal/ai/stream_parser_test.go similarity index 99% rename from internal/ai/anthropic/stream_parser_test.go rename to internal/ai/stream_parser_test.go index ad2ca70..66bf76b 100644 --- a/internal/ai/anthropic/stream_parser_test.go +++ b/internal/ai/stream_parser_test.go @@ -1,4 +1,4 @@ -package anthropic +package ai import ( "testing" diff --git a/internal/config/config.go b/internal/config/config.go index d9a9729..f8c65bf 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -11,7 +11,7 @@ type Config struct { Database DatabaseConfig Redis RedisConfig AuthCache AuthCacheConfig - Anthropic AnthropicConfig + AI AIConfig Context ContextConfig Verifier VerifierConfig MCP MCPConfig @@ -39,11 +39,14 @@ type AuthCacheConfig struct { TTLSeconds int `envconfig:"AUTH_CACHE_TTL_SECONDS" default:"180"` } -// AnthropicConfig holds Anthropic Claude API configuration. -type AnthropicConfig struct { - APIKey string `envconfig:"ANTHROPIC_API_KEY" required:"true"` - Model string `envconfig:"ANTHROPIC_MODEL" default:"claude-sonnet-4-20250514"` - SummaryModel string `envconfig:"ANTHROPIC_SUMMARY_MODEL" default:"claude-haiku-4-5-20251001"` +// AIConfig holds AI provider configuration (OpenRouter-compatible). +type AIConfig struct { + APIKey string `envconfig:"AI_API_KEY" required:"true"` + Model string `envconfig:"AI_MODEL" default:"anthropic/claude-sonnet-4-5-20250929"` + SummaryModel string `envconfig:"AI_SUMMARY_MODEL" default:"anthropic/claude-haiku-4-5-20251001"` + BaseURL string `envconfig:"AI_BASE_URL" default:"https://openrouter.ai/api/v1"` + AppName string `envconfig:"AI_APP_NAME" default:"vultisig-agent"` + AppURL string `envconfig:"AI_APP_URL" default:""` } // TODO: Add WhisperConfig for OpenAI Whisper voice transcription support. diff --git a/internal/service/agent/agent.go b/internal/service/agent/agent.go index 6465e94..82b7533 100644 --- a/internal/service/agent/agent.go +++ b/internal/service/agent/agent.go @@ -11,7 +11,7 @@ import ( "github.com/google/uuid" "github.com/sirupsen/logrus" - "github.com/vultisig/agent-backend/internal/ai/anthropic" + "github.com/vultisig/agent-backend/internal/ai" "github.com/vultisig/agent-backend/internal/cache/redis" "github.com/vultisig/agent-backend/internal/config" "github.com/vultisig/agent-backend/internal/service/verifier" @@ -58,7 +58,7 @@ type SwapTxBuildResponse struct { } type AgentService struct { - anthropic *anthropic.Client + ai *ai.Client msgRepo *postgres.MessageRepository convRepo *postgres.ConversationRepository memRepo *postgres.MemoryRepository @@ -80,7 +80,7 @@ type conversationWindow struct { } func NewAgentService( - anthropicClient *anthropic.Client, + aiClient *ai.Client, msgRepo *postgres.MessageRepository, convRepo *postgres.ConversationRepository, memRepo *postgres.MemoryRepository, @@ -93,7 +93,7 @@ func NewAgentService( ctxCfg config.ContextConfig, ) *AgentService { return &AgentService{ - anthropic: anthropicClient, + ai: aiClient, msgRepo: msgRepo, convRepo: convRepo, memRepo: memRepo, @@ -146,8 +146,8 @@ func (s *AgentService) ProcessMessage(ctx context.Context, convID uuid.UUID, pub window.summary, ) - messages := anthropicMessagesFromWindow(window) - messages = append(messages, anthropic.Message{ + messages := aiMessagesFromWindow(window) + messages = append(messages, ai.Message{ Role: "user", Content: userContent, }) @@ -158,25 +158,26 @@ func (s *AgentService) ProcessMessage(ctx context.Context, convID uuid.UUID, pub var toolResp *ToolResponse var textContent string - for range maxLoopIterations { - anthropicReq := &anthropic.Request{ + for i := 0; i < maxLoopIterations; i++ { + aiReq := &ai.Request{ + Model: req.Model, System: systemPrompt, Messages: messages, Tools: tools, - ToolChoice: &anthropic.ToolChoice{ + ToolChoice: &ai.ToolChoice{ Type: "auto", }, } - resp, err := s.anthropic.SendMessage(ctx, anthropicReq) + resp, err := s.ai.SendMessage(ctx, aiReq) if err != nil { - return nil, fmt.Errorf("call anthropic: %w", err) + return nil, fmt.Errorf("call ai (iteration %d): %w", i, err) } s.persistMemoryUpdate(ctx, req.PublicKey, s.extractMemoryUpdate(resp)) var assistantText string - var toolCalls []anthropic.ContentBlock + var toolCalls []ai.ContentBlock for _, block := range resp.Content { switch block.Type { case "text": @@ -191,14 +192,14 @@ func (s *AgentService) ProcessMessage(ctx context.Context, convID uuid.UUID, pub break } - var toolResults []anthropic.ToolResultBlock + var toolResults []ai.ToolResultBlock for _, tc := range toolCalls { if tc.Name == "respond_to_user" { var tr ToolResponse if err := json.Unmarshal(tc.Input, &tr); err == nil { toolResp = &tr } - toolResults = append(toolResults, anthropic.ToolResultBlock{ + toolResults = append(toolResults, ai.ToolResultBlock{ Type: "tool_result", ToolUseID: tc.ID, Content: `{"ok": true}`, @@ -210,18 +211,24 @@ func (s *AgentService) ProcessMessage(ctx context.Context, convID uuid.UUID, pub if err != nil { result = jsonError(err.Error()) } - toolResults = append(toolResults, anthropic.ToolResultBlock{ + + s.logger.WithFields(logrus.Fields{ + "tool": tc.Name, + "tool_id": tc.ID, + }).Debug("tool executed") + + toolResults = append(toolResults, ai.ToolResultBlock{ Type: "tool_result", ToolUseID: tc.ID, Content: result, }) } - messages = append(messages, anthropic.AssistantMessage{ + messages = append(messages, ai.AssistantMessage{ Role: "assistant", Content: resp.Content, }) - messages = append(messages, anthropic.ToolResultMessage{ + messages = append(messages, ai.ToolResultMessage{ Role: "user", Content: toolResults, }) @@ -285,8 +292,8 @@ func (s *AgentService) ProcessMessageStream(ctx context.Context, convID uuid.UUI window.summary, ) - messages := anthropicMessagesFromWindow(window) - messages = append(messages, anthropic.Message{ + messages := aiMessagesFromWindow(window) + messages = append(messages, ai.Message{ Role: "user", Content: userContent, }) @@ -298,18 +305,19 @@ func (s *AgentService) ProcessMessageStream(ctx context.Context, convID uuid.UUI var textContent string for range maxLoopIterations { - anthropicReq := &anthropic.Request{ + aiReq := &ai.Request{ + Model: req.Model, System: systemPrompt, Messages: messages, Tools: tools, - ToolChoice: &anthropic.ToolChoice{ + ToolChoice: &ai.ToolChoice{ Type: "auto", }, } - extractor := anthropic.NewResponseFieldExtractor() - callback := func(ev anthropic.StreamEvent) { - if ev.Type != anthropic.StreamEventContentBlockDelta { + extractor := ai.NewResponseFieldExtractor() + callback := func(ev ai.StreamEvent) { + if ev.Type != ai.StreamEventContentBlockDelta { return } var delta struct { @@ -335,7 +343,7 @@ func (s *AgentService) ProcessMessageStream(ctx context.Context, convID uuid.UUI } } - resp, err := s.anthropic.SendMessageStream(ctx, anthropicReq, callback) + resp, err := s.ai.SendMessageStream(ctx, aiReq, callback) if err != nil { sendErr(fmt.Sprintf("AI error: %v", err)) return @@ -344,7 +352,7 @@ func (s *AgentService) ProcessMessageStream(ctx context.Context, convID uuid.UUI s.persistMemoryUpdate(ctx, req.PublicKey, s.extractMemoryUpdate(resp)) var assistantText string - var toolCalls []anthropic.ContentBlock + var toolCalls []ai.ContentBlock for _, block := range resp.Content { switch block.Type { case "text": @@ -359,14 +367,14 @@ func (s *AgentService) ProcessMessageStream(ctx context.Context, convID uuid.UUI break } - var toolResults []anthropic.ToolResultBlock + var toolResults []ai.ToolResultBlock for _, tc := range toolCalls { if tc.Name == "respond_to_user" { var tr ToolResponse if err := json.Unmarshal(tc.Input, &tr); err == nil { toolResp = &tr } - toolResults = append(toolResults, anthropic.ToolResultBlock{ + toolResults = append(toolResults, ai.ToolResultBlock{ Type: "tool_result", ToolUseID: tc.ID, Content: `{"ok": true}`, @@ -378,18 +386,18 @@ func (s *AgentService) ProcessMessageStream(ctx context.Context, convID uuid.UUI if err != nil { result = jsonError(err.Error()) } - toolResults = append(toolResults, anthropic.ToolResultBlock{ + toolResults = append(toolResults, ai.ToolResultBlock{ Type: "tool_result", ToolUseID: tc.ID, Content: result, }) } - messages = append(messages, anthropic.AssistantMessage{ + messages = append(messages, ai.AssistantMessage{ Role: "assistant", Content: resp.Content, }) - messages = append(messages, anthropic.ToolResultMessage{ + messages = append(messages, ai.ToolResultMessage{ Role: "user", Content: toolResults, }) @@ -1043,17 +1051,18 @@ func (s *AgentService) summarizeOldMessages(ctx context.Context, convID uuid.UUI } prompt += "\n\n## Messages to Summarize\n\n" + oldContent - aiReq := &anthropic.Request{ + // Call Claude Haiku for summarization + req := &ai.Request{ Model: s.summaryModel, MaxTokens: s.summaryMaxTokens, Messages: []any{ - anthropic.Message{Role: "user", Content: prompt}, + ai.Message{Role: "user", Content: prompt}, }, } - resp, err := s.anthropic.SendMessage(ctx, aiReq) + resp, err := s.ai.SendMessage(ctx, req) if err != nil { - return fmt.Errorf("call anthropic: %w", err) + return fmt.Errorf("call ai: %w", err) } var summaryText string @@ -1065,7 +1074,7 @@ func (s *AgentService) summarizeOldMessages(ctx context.Context, convID uuid.UUI } if summaryText == "" { - return fmt.Errorf("empty response from anthropic") + return fmt.Errorf("empty response from ai") } summaryUpTo := oldMsgs[len(oldMsgs)-1].CreatedAt @@ -1081,13 +1090,15 @@ func (s *AgentService) summarizeOldMessages(ctx context.Context, convID uuid.UUI return nil } -func anthropicMessagesFromWindow(window *conversationWindow) []any { +// aiMessagesFromWindow converts conversation window messages to AI message format, +// skipping system messages. +func aiMessagesFromWindow(window *conversationWindow) []any { msgs := make([]any, 0, len(window.messages)) for _, msg := range window.messages { if msg.Role == types.RoleSystem { continue } - msgs = append(msgs, anthropic.Message{ + msgs = append(msgs, ai.Message{ Role: string(msg.Role), Content: msg.Content, }) diff --git a/internal/service/agent/memory.go b/internal/service/agent/memory.go index ee3d2d0..53adda0 100644 --- a/internal/service/agent/memory.go +++ b/internal/service/agent/memory.go @@ -6,7 +6,7 @@ import ( "github.com/sirupsen/logrus" - "github.com/vultisig/agent-backend/internal/ai/anthropic" + "github.com/vultisig/agent-backend/internal/ai" ) const maxMemoryBytes = 4000 @@ -61,7 +61,7 @@ func (s *AgentService) persistMemoryUpdate(ctx context.Context, publicKey string } } -func (s *AgentService) extractMemoryUpdate(resp *anthropic.Response) *updateMemoryInput { +func (s *AgentService) extractMemoryUpdate(resp *ai.Response) *updateMemoryInput { for _, block := range resp.Content { if block.Type == "tool_use" && block.Name == "update_memory" { var mu updateMemoryInput @@ -73,10 +73,11 @@ func (s *AgentService) extractMemoryUpdate(resp *anthropic.Response) *updateMemo return nil } -func (s *AgentService) memoryTools() []anthropic.Tool { +// memoryTools returns the update_memory tool if memRepo is configured, for appending to ability tool lists. +func (s *AgentService) memoryTools() []ai.Tool { if s.memRepo == nil { return nil } - return []anthropic.Tool{UpdateMemoryTool} + return []ai.Tool{UpdateMemoryTool} } diff --git a/internal/service/agent/prompt.go b/internal/service/agent/prompt.go index d69a53f..b5c694b 100644 --- a/internal/service/agent/prompt.go +++ b/internal/service/agent/prompt.go @@ -4,7 +4,7 @@ import ( "fmt" "strings" - "github.com/vultisig/agent-backend/internal/ai/anthropic" + "github.com/vultisig/agent-backend/internal/ai" ) // ActionsTable is the shared actions reference used by both the system prompt and starters. @@ -243,7 +243,7 @@ For vault-to-vault sends ("send to my other vault"): If any required param (coin, address, amount) is missing, ask the user for it — do NOT call build_send_tx until all params are known.` // RespondToUserTool is the tool definition for responding to users. -var RespondToUserTool = anthropic.Tool{ +var RespondToUserTool = ai.Tool{ Name: "respond_to_user", Description: "Respond to the user with detected intent, optional suggestions for plugin-based automation, and optional actions for the app to execute.", InputSchema: map[string]any{ @@ -331,7 +331,7 @@ var RespondToUserTool = anthropic.Tool{ } // ConfirmActionTool is the tool definition for confirming action results. -var ConfirmActionTool = anthropic.Tool{ +var ConfirmActionTool = ai.Tool{ Name: "confirm_action", Description: "Generate a confirmation message for a completed action (success or failure).", InputSchema: map[string]any{ @@ -354,7 +354,7 @@ var ConfirmActionTool = anthropic.Tool{ } // BuildPolicyTool is the tool definition for building policy configurations. -var BuildPolicyTool = anthropic.Tool{ +var BuildPolicyTool = ai.Tool{ Name: "build_policy", Description: "Build a policy configuration based on the user's conversation and the plugin's schema.", InputSchema: map[string]any{ @@ -513,7 +513,7 @@ func BuildFullPrompt(msgCtx *MessageContext, plugins []PluginSkill) string { } // UpdateMemoryTool is the tool definition for updating the user's memory document. -var UpdateMemoryTool = anthropic.Tool{ +var UpdateMemoryTool = ai.Tool{ Name: "update_memory", Description: "Update your persistent memory about this user. Send the COMPLETE " + "updated memory document (markdown). This replaces the entire document. " + diff --git a/internal/service/agent/starters.go b/internal/service/agent/starters.go index 7fde21d..bbd252a 100644 --- a/internal/service/agent/starters.go +++ b/internal/service/agent/starters.go @@ -7,7 +7,7 @@ import ( "strings" "time" - "github.com/vultisig/agent-backend/internal/ai/anthropic" + "github.com/vultisig/agent-backend/internal/ai" ) const ( @@ -67,15 +67,15 @@ func (s *AgentService) GenerateStarters(ctx context.Context, req *GetStartersReq aiCtx, cancel := context.WithTimeout(ctx, startersTimeout) defer cancel() - aiReq := &anthropic.Request{ + aiReq := &ai.Request{ Model: s.summaryModel, MaxTokens: 1024, Messages: []any{ - anthropic.Message{Role: "user", Content: sb.String()}, + ai.Message{Role: "user", Content: sb.String()}, }, } - resp, err := s.anthropic.SendMessage(aiCtx, aiReq) + resp, err := s.ai.SendMessage(aiCtx, aiReq) if err != nil { s.logger.WithError(err).Warn("failed to generate starters") return empty diff --git a/internal/service/agent/tools.go b/internal/service/agent/tools.go index 202be01..ef864a3 100644 --- a/internal/service/agent/tools.go +++ b/internal/service/agent/tools.go @@ -1,9 +1,9 @@ package agent -import "github.com/vultisig/agent-backend/internal/ai/anthropic" +import "github.com/vultisig/agent-backend/internal/ai" // CheckPluginInstalledTool checks if a plugin is installed for the user's vault. -var CheckPluginInstalledTool = anthropic.Tool{ +var CheckPluginInstalledTool = ai.Tool{ Name: "check_plugin_installed", Description: "Check if a specific plugin is installed for the user's vault. " + "Call this when the user wants to use a plugin's features (e.g., create a DCA policy). " + @@ -21,7 +21,7 @@ var CheckPluginInstalledTool = anthropic.Tool{ } // GetRecipeSchemaTool fetches the configuration schema and examples for a plugin. -var GetRecipeSchemaTool = anthropic.Tool{ +var GetRecipeSchemaTool = ai.Tool{ Name: "get_recipe_schema", Description: "Fetch the configuration schema and examples for a plugin. " + "Use this to understand what fields a plugin requires before building a configuration. " + @@ -39,7 +39,7 @@ var GetRecipeSchemaTool = anthropic.Tool{ } // SuggestPolicyTool validates a configuration and gets policy rules from the verifier. -var SuggestPolicyTool = anthropic.Tool{ +var SuggestPolicyTool = ai.Tool{ Name: "suggest_policy", Description: "Validate a configuration and get policy rules from the verifier. " + "Call this ONLY after you have a complete configuration from get_recipe_schema. " + @@ -63,7 +63,7 @@ var SuggestPolicyTool = anthropic.Tool{ } // CreateSuggestionTool stores a suggestion card for the frontend to display. -var CreateSuggestionTool = anthropic.Tool{ +var CreateSuggestionTool = ai.Tool{ Name: "create_suggestion", Description: "Create a suggestion card for the user to select. " + "Use this when you want to offer the user an action option (e.g., 'Weekly DCA into ETH'). " + @@ -90,7 +90,7 @@ var CreateSuggestionTool = anthropic.Tool{ } // CheckBillingStatusTool checks if the user's billing app is set up. -var CheckBillingStatusTool = anthropic.Tool{ +var CheckBillingStatusTool = ai.Tool{ Name: "check_billing_status", Description: "Check if the user has the billing app installed or has an active free trial. " + "Most plugins require the billing app (vultisig-fees-feee) to be installed after the 7-day free trial expires. " + @@ -102,8 +102,9 @@ var CheckBillingStatusTool = anthropic.Tool{ }, } -func agentTools() []anthropic.Tool { - return []anthropic.Tool{ +// agentTools returns all granular tools for the decision loop. +func agentTools() []ai.Tool { + return []ai.Tool{ RespondToUserTool, CheckPluginInstalledTool, CheckBillingStatusTool, diff --git a/internal/service/agent/types.go b/internal/service/agent/types.go index 1de4b52..0b004ca 100644 --- a/internal/service/agent/types.go +++ b/internal/service/agent/types.go @@ -10,6 +10,7 @@ import ( type SendMessageRequest struct { PublicKey string `json:"public_key"` Content string `json:"content"` + Model string `json:"model,omitempty"` Context *MessageContext `json:"context,omitempty"` SelectedSuggestionID *string `json:"selected_suggestion_id,omitempty"` ActionResult *ActionResult `json:"action_result,omitempty"`