Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,6 @@
*.json
*.log
*.csv
# pycache
__pycache__/
*.pyc
5 changes: 5 additions & 0 deletions common/ais/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from common.ais.chatgpt import ChatGPT
from common.ais.claude import Claude
from common.ais.minimax import MiniMax

__all__ = ["ChatGPT", "Claude", "MiniMax"]
194 changes: 194 additions & 0 deletions common/ais/minimax.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,194 @@
# -*- coding: utf-8 -*-
"""
@file: agentfy/common/ais/minimax.py
@desc: MiniMax API client wrapper for chat via OpenAI-compatible interface
@auth: Callmeiks
"""
import traceback
from typing import Dict, Any, Optional
from decimal import Decimal

from openai import AsyncOpenAI, OpenAIError
from config import settings
from common.exceptions.exceptions import MiniMaxAPIError
from common.utils.logging import setup_logger

# Set up logger
logger = setup_logger(__name__)

MINIMAX_BASE_URL = "https://api.minimax.io/v1"


class MiniMax:
"""MiniMax API client wrapper using the OpenAI-compatible interface."""

# Model pricing configuration (price per token in USD)
PRICING = {
"MiniMax-M2.7": {
"input": Decimal("0.80") / 1000000,
"output": Decimal("4.00") / 1000000,
},
"MiniMax-M2.7-highspeed": {
"input": Decimal("0.20") / 1000000,
"output": Decimal("1.00") / 1000000,
},
"MiniMax-M2.5": {
"input": Decimal("0.80") / 1000000,
"output": Decimal("4.00") / 1000000,
},
"MiniMax-M2.5-highspeed": {
"input": Decimal("0.20") / 1000000,
"output": Decimal("1.00") / 1000000,
},
}

# Supported model names
SUPPORTED_MODELS = list(PRICING.keys())

def __init__(self, minimax_api_key: Optional[str] = None):
"""
Initialize MiniMax client.

Args:
minimax_api_key: MiniMax API key; reads MINIMAX_API_KEY env var if not provided.
"""
self.minimax_key = minimax_api_key or settings.minimax_api_key

if not self.minimax_key:
logger.warning("No MiniMax API key provided, MiniMax functionality will be unavailable")
self.minimax_client = None
else:
self.minimax_client = AsyncOpenAI(
api_key=self.minimax_key,
base_url=MINIMAX_BASE_URL,
timeout=60,
)

def _clamp_temperature(self, temperature: float) -> float:
"""
Clamp temperature to the valid MiniMax range (0.0, 1.0].

MiniMax requires temperature > 0 and <= 1.
"""
if temperature <= 0.0:
temperature = 0.01
elif temperature > 1.0:
temperature = 1.0
return temperature

def _normalize_model(self, model: str) -> str:
"""Return the canonical model name, defaulting to MiniMax-M2.7."""
if model in self.PRICING:
return model
logger.warning(f"Unknown MiniMax model '{model}', falling back to MiniMax-M2.7")
return "MiniMax-M2.7"

async def calculate_chat_cost(self, model: str, prompt_tokens: int, completion_tokens: int) -> Dict[str, Any]:
"""
Calculate MiniMax API usage cost.

Args:
model: Model name.
prompt_tokens: Number of input tokens.
completion_tokens: Number of output tokens.

Returns:
dict with input_cost, output_cost, total_cost.
"""
model_key = self._normalize_model(model)
input_cost = prompt_tokens * self.PRICING[model_key]["input"]
output_cost = completion_tokens * self.PRICING[model_key]["output"]
total_cost = input_cost + output_cost

return {
"input_cost": float(input_cost),
"output_cost": float(output_cost),
"total_cost": float(total_cost),
}

async def chat(
self,
system_prompt: str,
user_prompt: str,
model: str = "MiniMax-M2.7",
temperature: float = 0.7,
max_tokens: int = 10000,
timeout: int = 60,
) -> Dict[str, Any]:
"""
Call MiniMax chat API via the OpenAI-compatible interface (async).

Args:
system_prompt: System prompt.
user_prompt: User prompt.
model: Model name (default: MiniMax-M2.7, 204K context).
temperature: Sampling temperature; clamped to (0.0, 1.0].
max_tokens: Maximum output tokens.
timeout: Request timeout in seconds.

Returns:
dict with model, temperature, max_tokens, response, cost.

Raises:
MiniMaxAPIError: When the API call fails.
"""
if not self.minimax_client:
raise MiniMaxAPIError(
"MiniMax client not initialized, chat functionality unavailable",
{"details": "Please provide a valid MINIMAX_API_KEY"},
)

model = self._normalize_model(model)
temperature = self._clamp_temperature(temperature)

try:
chat_completion = await self.minimax_client.chat.completions.create(
model=model,
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt},
],
temperature=temperature,
max_tokens=max_tokens,
timeout=timeout,
)

cost = await self.calculate_chat_cost(
model,
chat_completion.usage.prompt_tokens,
chat_completion.usage.completion_tokens,
)

result = {
"model": model,
"temperature": temperature,
"max_tokens": max_tokens,
"response": chat_completion.model_dump(),
"cost": cost,
}

logger.info(
f"MiniMax response: model={model}, "
f"completion={chat_completion.usage.completion_tokens}/{chat_completion.usage.total_tokens} "
f"input_cost=${cost['input_cost']:.6f}, output_cost=${cost['output_cost']:.6f}, "
f"total_cost=${cost['total_cost']:.6f}"
)

return result

except OpenAIError as e:
logger.error(
f"MiniMax API error: {str(e)}",
{"model": model, "temperature": temperature, "max_tokens": max_tokens},
)
raise MiniMaxAPIError(
"Error calling MiniMax API",
{"details": str(e)},
)
except Exception as e:
logger.error(f"Unexpected error calling MiniMax: {str(e)}")
traceback.print_exc()
raise MiniMaxAPIError(
"Unexpected error calling MiniMax",
{"details": str(e)},
)
4 changes: 4 additions & 0 deletions common/exceptions/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,10 @@ class ClaudeAPIError(ReasoningException):
"""Raised when Claude API call fails."""
pass

class MiniMaxAPIError(ReasoningException):
"""Raised when MiniMax API call fails."""
pass

# Action Module Exceptions
class ActionException(SocialMediaAgentException):
"""Base exception for action module."""
Expand Down
1 change: 1 addition & 0 deletions config.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ class Settings(BaseSettings):
# External Service API Keys
openai_api_key: Optional[str] = Field(None, env="OPENAI_API_KEY")
anthropic_api_key: Optional[str] = Field(None, env="ANTHROPIC_API_KEY")
minimax_api_key: Optional[str] = Field(None, env="MINIMAX_API_KEY")
tikhub_api_key: Optional[str] = Field(None, env="TIKHUB_API_KEY")
lemonfox_api_key: Optional[str] = Field(None, env="LEMONFOX_API_KEY")
elevenlabs_api_key: Optional[str] = Field(None, env="ELEVENLABS_API_KEY")
Expand Down
6 changes: 6 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
"""Pytest configuration for Agentfy tests."""
import sys
import os

# Allow imports from the project root
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
57 changes: 57 additions & 0 deletions tests/test_minimax_integration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# -*- coding: utf-8 -*-
"""
Integration tests for common/ais/minimax.py

These tests verify the integration with the MiniMax API using the
MINIMAX_API_KEY environment variable. They are skipped automatically
when the key is not present.
"""
import os
import pytest


pytestmark = pytest.mark.skipif(
not os.getenv("MINIMAX_API_KEY"),
reason="MINIMAX_API_KEY not set — skipping integration tests",
)


@pytest.fixture
def minimax():
from common.ais.minimax import MiniMax
return MiniMax()


class TestMiniMaxIntegration:
@pytest.mark.asyncio
async def test_chat_returns_non_empty_response(self, minimax):
result = await minimax.chat(
system_prompt="You are a helpful assistant.",
user_prompt="Say hello in one word.",
model="MiniMax-M2.7",
max_tokens=20,
)
choices = result["response"].get("choices", [])
assert len(choices) > 0
content = choices[0]["message"]["content"]
assert isinstance(content, str) and len(content) > 0

@pytest.mark.asyncio
async def test_chat_highspeed_model(self, minimax):
result = await minimax.chat(
system_prompt="You are a helpful assistant.",
user_prompt="Reply with the single word 'OK'.",
model="MiniMax-M2.7-highspeed",
max_tokens=10,
)
assert result["model"] == "MiniMax-M2.7-highspeed"

@pytest.mark.asyncio
async def test_chat_cost_is_positive(self, minimax):
result = await minimax.chat(
system_prompt="Answer briefly.",
user_prompt="What is 2+2?",
model="MiniMax-M2.7",
max_tokens=20,
)
assert result["cost"]["total_cost"] > 0
Loading