Skip to content

ikun2021/mcp-adapters

Repository files navigation

MCP-Adapters

中文

Project Overview

mcp-Adapters is a library that converts MCP resources into tools compatible with langchain-go, helping developers seamlessly integrate MCP resources into LangChain agents. The library supports multiple transport protocols (sse, stdio, etc.), provides a clean API interface, and enables seamless integration of MCP resources with LangChain tools.

Core Features

  • Convert MCP tools to LangChain Go tool interfaces
  • Support for sse and stdio transport protocols
  • Built-in concurrency-safe session management
  • Seamless integration with the LangChain Go ecosystem
  • Support for multi-server configuration and load balancing

Quick Start

Installation

go get github.com/luxun9527/mcp-adapters@latest

Usage Examples

SSE Protocol Example

ReAct Mode

package sse

import (
	"context"
	mcp_adapters "github.com/luxun9527/mcp-adapters"
	"github.com/tmc/langchaingo/agents"
	"github.com/tmc/langchaingo/chains"
	"github.com/tmc/langchaingo/llms"
	"github.com/tmc/langchaingo/llms/openai"
	"log"
	"net/http"
	"net/http/httputil"
	"os"
	"testing"
)
var token = os.Getenv("token")
type loggingRoundTripper struct {}

func (lrt loggingRoundTripper) Do(req *http.Request) (*http.Response, error) {
	// Print request details
	requestDump, _ := httputil.DumpRequestOut(req, true)
	log.Printf("HTTP Request:\n%s\n", string(requestDump))
	resp, err := http.DefaultClient.Do(req)
	if err != nil {
		return nil, err
	}
	// Print response details
	responseDump, _ := httputil.DumpResponse(resp, true)
	log.Printf("HTTP Response:\n%s\n", string(responseDump))
	return resp, nil
}


// Example for MutliClient - run sse_server.go file to start a server first
func TestMutliClient(t *testing.T) {
	mcpClient := mcp_adapters.NewMultiServerMCPClient(map[string]mcp_adapters.McpClientConf{
		"getWeather": mcp_adapters.SSEClientConfig{URL: "http://localhost:7878/sse"},
		"getTime":    mcp_adapters.SSEClientConfig{URL: "http://localhost:7877/sse"},
	})
	llm, err := openai.New(
		openai.WithModel("qwen-turbo"),
		openai.WithBaseURL("https://dashscope.aliyuncs.com/compatible-mode/v1"),
		// Use your token
		openai.WithToken(token),
		openai.WithHTTPClient(loggingRoundTripper{}),
	)
	if err != nil {
		log.Fatalf("openai.New failed: %v", err)
	}
	// Get all tools
	tools, err := mcpClient.GetTools(context.Background())
	if err != nil {
		log.Fatalf("mcpClient.GetTools failed: %v", err)
	}
    
	agent := agents.NewOneShotAgent(llm,
		tools,
		agents.WithMaxIterations(1),
	)
    
	executor := agents.NewExecutor(agent)
	question := "What's the weather like in Daxing District, Beijing , What time is it now?"
	answer, err := chains.Run(context.Background(), executor, question)
	if err != nil {
		log.Fatalf("chains.Run failed: %v", err)
	}
	log.Printf("answer: %v", answer)
	// Output: The current time is 2025-06-23 22:59:39, and the weather in Daxing District, Beijing is partly cloudy with beautiful moonlight tonight.
}

OpenAIFunctionCalling Mode

langchaingo evolves slowly. In the latest v0.1.13 version, there are still several issues with the default openai function calling:

1. The definition of input tools is problematic; this issue does not occur in Python during actual testing

{
  "model": "qwen-turbo",
  "messages": [
    {
      "role": "system",
      "content": "You are a helpful AI assistant."
    },
    {
      "role": "user",
      "content": "1+1=?"
    }
  ],
  "temperature": 0,
  "tools": [
    {
      "type": "function",
      "function": {
        "name": "calculator",
        "description": "Useful for getting the result of a math expression. \n\tThe input to this tool should be a valid mathematical expression that could be executed by a starlark evaluator.",
        "parameters": {
          "properties": {
            "__arg1": {
              "title": "__arg1",
              "type": "string"
            }
          },
          "required": [
            "__arg1"
          ],
          "type": "object"
        }
      }
    }
  ]
}

2. After triggering function calling, the subsequent input is still treated as a function role

{
  "model": "qwen-turbo",
  "messages": [
    {
      "role": "system",
      "content": "You are a helpful AI assistant."
    },
    {
      "role": "user",
      "content": "1+1=?"
    },
    {
      "role": "function",
      "content": "2"
    }
  ],
  "temperature": 0,
  "tools": [
    {
      "type": "function",
      "function": {
        "name": "calculator",
        "description": "Useful for getting the result of a math expression. \n\tThe input to this tool should be a valid mathematical expression that could be executed by a starlark evaluator.",
        "parameters": {
          "properties": {
            "__arg1": {
              "title": "__arg1",
              "type": "string"
            }
          },
          "required": [
            "__arg1"
          ],
          "type": "object"
        }
      }
    }
  ]
}

Solution: By creating a custom agent and overriding the Plan method

package sse

import (
	"context"
	mcp_adapters "github.com/luxun9527/mcp-adapters"
	"github.com/tmc/langchaingo/agents"
	"github.com/tmc/langchaingo/chains"
	"github.com/tmc/langchaingo/llms"
	"github.com/tmc/langchaingo/llms/openai"
	"log"
	"net/http"
	"net/http/httputil"
	"os"
	"testing"
)
var token = os.Getenv("token")
type loggingRoundTripper struct {}

func (lrt loggingRoundTripper) Do(req *http.Request) (*http.Response, error) {
	// Print request details
	requestDump, _ := httputil.DumpRequestOut(req, true)
	log.Printf("HTTP Request:\n%s\n", string(requestDump))
	resp, err := http.DefaultClient.Do(req)
	if err != nil {
		return nil, err
	}
	// Print response details
	responseDump, _ := httputil.DumpResponse(resp, true)
	log.Printf("HTTP Response:\n%s\n", string(responseDump))
	return resp, nil
}

// Run sse_server.go file to start a server first
func TestMutliClientOpenaiFunction(t *testing.T) {
    mcpClient := mcp_adapters.NewMultiServerMCPClient(map[string]mcp_adapters.McpClientConf{
       "getWeather": mcp_adapters.SSEClientConfig{URL: "http://localhost:7878/sse"},
       "getTime":    mcp_adapters.SSEClientConfig{URL: "http://localhost:7877/sse"},
    })
    llm, err := openai.New(
       openai.WithModel("qwen-turbo"),
       openai.WithBaseURL("https://dashscope.aliyuncs.com/compatible-mode/v1"),
       // Use your token
       openai.WithToken(token),
       openai.WithHTTPClient(loggingRoundTripper{}),
    )
    if err != nil {
       log.Fatalf("openai.New failed: %v", err)
    }
    agentTools, err := mcpClient.GetTools(context.Background(), mcp_adapters.WithLangChainOptions(mcp_adapters.WithToolType(mcp_adapters.OpenaiFunctionCallToolType)))
    if err != nil {
       log.Fatalf("mcpClient.GetTools failed: %v", err)
    }

    agent := agents.NewOpenAIFunctionsAgent(llm,
       agentTools,
       agents.WithMaxIterations(1),
    )
    // Custom agent, override method
    openaiAgent := &mcp_adapters.OpenAIFunctionsAgent{OpenAIFunctionsAgent: agent}
    executor := agents.NewExecutor(openaiAgent)
    question := "What's the weather like in Daxing District, Beijing , What time is it now?"
    answer, err := chains.Run(context.Background(), executor, question)
    if err != nil {
       log.Fatalf("chains.Run failed: %v", err)
    }
    log.Printf("answer: %v", answer)
    /*
       Output: The weather in Daxing District, Beijing is partly cloudy today, and the moonlight is beautiful tonight.
               As for the current time, I don't have access to real-time data. Please check your device or a clock for the current time. 
               Let me know if there's anything else I can assist you with!
    */
}

Project Structure

  • client.go: MCP client implementation
  • session.go: Session management
  • tools.go: LangChain tool conversion utilities
  • example/: Example code for SSE and STDIO protocols

License

MIT

About

No description, website, or topics provided.

Resources

License

Stars

Watchers

Forks

Packages

No packages published

Languages