Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
144 changes: 144 additions & 0 deletions lib/active_agent/generation_provider/x_ai_provider.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
# frozen_string_literal: true

begin
gem "ruby-openai", ">= 8.1.0"
require "openai"
rescue LoadError
raise LoadError, "The 'ruby-openai >= 8.1.0' gem is required for XAIProvider. Please add it to your Gemfile and run `bundle install`."
end

require "active_agent/action_prompt/action"
require_relative "base"
require_relative "response"
require_relative "stream_processing"
require_relative "message_formatting"
require_relative "tool_management"

module ActiveAgent
module GenerationProvider
# XAI (Grok) Generation Provider
# Uses OpenAI-compatible API format with xAI's endpoint
class XAIProvider < Base
include StreamProcessing
include MessageFormatting
include ToolManagement

XAI_API_HOST = "https://api.x.ai"

def initialize(config)
super
# Support both api_key and access_token for backwards compatibility
@access_token = config["api_key"] || config["access_token"] || ENV["XAI_API_KEY"] || ENV["GROK_API_KEY"]

unless @access_token
raise ArgumentError, "XAI API key is required. Set it in config as 'api_key', 'access_token', or via XAI_API_KEY/GROK_API_KEY environment variable."
end

# xAI uses OpenAI-compatible client with custom endpoint
@client = OpenAI::Client.new(
access_token: @access_token,
uri_base: config["host"] || XAI_API_HOST,
log_errors: Rails.env.development?
)

# Default to grok-2-latest but allow configuration
@model_name = config["model"] || "grok-2-latest"
end

def generate(prompt)
@prompt = prompt

with_error_handling do
chat_prompt(parameters: prompt_parameters)
end
end

def embed(prompt)
# xAI doesn't currently provide embedding models
raise NotImplementedError, "xAI does not currently support embeddings. Use a different provider for embedding tasks."
end

protected

# Override from StreamProcessing module - uses OpenAI format
def process_stream_chunk(chunk, message, agent_stream)
new_content = chunk.dig("choices", 0, "delta", "content")
if new_content && !new_content.blank?
message.generation_id = chunk.dig("id")
message.content += new_content
agent_stream&.call(message, new_content, false, prompt.action_name)
elsif chunk.dig("choices", 0, "delta", "tool_calls") && chunk.dig("choices", 0, "delta", "role")
message = handle_message(chunk.dig("choices", 0, "delta"))
prompt.messages << message
@response = ActiveAgent::GenerationProvider::Response.new(
prompt:,
message:,
raw_response: chunk,
raw_request: @streaming_request_params
)
end

if chunk.dig("choices", 0, "finish_reason")
finalize_stream(message, agent_stream)
end
end

# Override from MessageFormatting module to handle image format (if xAI adds vision support)
def format_image_content(message)
[ {
type: "image_url",
image_url: { url: message.content }
} ]
end

private

# Override from ParameterBuilder to add xAI-specific parameters if needed
def build_provider_parameters
params = {}

# Add any xAI-specific parameters here
# For now, xAI follows OpenAI's format closely

params
end

def chat_response(response, request_params = nil)
return @response if prompt.options[:stream]

message_json = response.dig("choices", 0, "message")
message_json["id"] = response.dig("id") if message_json["id"].blank?
message = handle_message(message_json)

update_context(prompt: prompt, message: message, response: response)

@response = ActiveAgent::GenerationProvider::Response.new(
prompt: prompt,
message: message,
raw_response: response,
raw_request: request_params
)
end

def handle_message(message_json)
ActiveAgent::ActionPrompt::Message.new(
generation_id: message_json["id"],
content: message_json["content"],
role: message_json["role"].intern,
action_requested: message_json["finish_reason"] == "tool_calls",
raw_actions: message_json["tool_calls"] || [],
requested_actions: handle_actions(message_json["tool_calls"]),
content_type: prompt.output_schema.present? ? "application/json" : "text/plain"
)
end

def chat_prompt(parameters: prompt_parameters)
if prompt.options[:stream] || config["stream"]
parameters[:stream] = provider_stream
@streaming_request_params = parameters
end
chat_response(@client.chat(parameters: parameters), parameters)
end
end
end
end
227 changes: 227 additions & 0 deletions test/generation_provider/x_ai_provider_test.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,227 @@
require "test_helper"
require "active_agent/generation_provider/x_ai_provider"

# Test for xAI Provider gem loading and configuration
class XAIProviderTest < ActiveAgentTestCase
# Test the gem load rescue block
test "gem load rescue block provides correct error message" do
# Since we can't easily simulate the gem not being available without complex mocking,
# we'll test that the error message is correct by creating a minimal reproduction
expected_message = "The 'ruby-openai >= 8.1.0' gem is required for XAIProvider. Please add it to your Gemfile and run `bundle install`."

# Verify the rescue block pattern exists in the source code
provider_file_path = File.join(Rails.root, "../../lib/active_agent/generation_provider/x_ai_provider.rb")
provider_source = File.read(provider_file_path)

assert_includes provider_source, "begin"
assert_includes provider_source, 'gem "ruby-openai"'
assert_includes provider_source, 'require "openai"'
assert_includes provider_source, "rescue LoadError"
assert_includes provider_source, expected_message

# Test the actual error by creating a minimal scenario
test_code = <<~RUBY
begin
gem "nonexistent-openai-gem"
require "nonexistent-openai-gem"
rescue LoadError
raise LoadError, "#{expected_message}"
end
RUBY

error = assert_raises(LoadError) do
eval(test_code)
end

assert_equal expected_message, error.message
end

test "loads successfully when ruby-openai gem is available" do
# This test ensures the provider loads correctly when the gem is present
# Since the gem is already loaded in our test environment, this should work

# Verify the class exists and can be instantiated with valid config
assert defined?(ActiveAgent::GenerationProvider::XAIProvider)

config = {
"service" => "XAI",
"api_key" => "test-key",
"model" => "grok-2-latest"
}

assert_nothing_raised do
ActiveAgent::GenerationProvider::XAIProvider.new(config)
end
end

# Test configuration loading and presence
test "raises error when xAI API key is missing" do
config = {
"service" => "XAI",
"model" => "grok-2-latest"
# Missing api_key
}

error = assert_raises(ArgumentError) do
ActiveAgent::GenerationProvider::XAIProvider.new(config)
end

assert_includes error.message, "XAI API key is required"
end

test "loads configuration from active_agent.yml when present" do
# Mock a configuration
mock_config = {
"test" => {
"xai" => {
"service" => "XAI",
"api_key" => "test-api-key",
"model" => "grok-2-latest",
"temperature" => 0.7
}
}
}

ActiveAgent.instance_variable_set(:@config, mock_config)

# Set Rails environment for testing
rails_env = ENV["RAILS_ENV"]
ENV["RAILS_ENV"] = "test"

config = ApplicationAgent.configuration(:xai)

assert_equal "XAI", config.config["service"]
assert_equal "test-api-key", config.config["api_key"]
assert_equal "grok-2-latest", config.config["model"]
assert_equal 0.7, config.config["temperature"]

# Restore original environment
ENV["RAILS_ENV"] = rails_env
end

test "loads configuration from environment-specific section" do
mock_config = {
"development" => {
"xai" => {
"service" => "XAI",
"api_key" => "dev-api-key",
"model" => "grok-2-latest"
}
},
"test" => {
"xai" => {
"service" => "XAI",
"api_key" => "test-api-key",
"model" => "grok-2-latest"
}
}
}

ActiveAgent.instance_variable_set(:@config, mock_config)

# Test development configuration
original_env = ENV["RAILS_ENV"]
ENV["RAILS_ENV"] = "development"

config = ApplicationAgent.configuration(:xai)
assert_equal "dev-api-key", config.config["api_key"]

# Test test configuration
ENV["RAILS_ENV"] = "test"
config = ApplicationAgent.configuration(:xai)
assert_equal "test-api-key", config.config["api_key"]

ENV["RAILS_ENV"] = original_env
end

test "xAI provider initialization with API key from environment variable" do
# Test with XAI_API_KEY env var
original_xai_key = ENV["XAI_API_KEY"]
original_grok_key = ENV["GROK_API_KEY"]

ENV["XAI_API_KEY"] = "env-xai-key"
ENV["GROK_API_KEY"] = nil

config = {
"service" => "XAI",
"model" => "grok-2-latest"
}

provider = ActiveAgent::GenerationProvider::XAIProvider.new(config)
assert_equal "env-xai-key", provider.instance_variable_get(:@access_token)

# Test with GROK_API_KEY env var
ENV["XAI_API_KEY"] = nil
ENV["GROK_API_KEY"] = "env-grok-key"

provider = ActiveAgent::GenerationProvider::XAIProvider.new(config)
assert_equal "env-grok-key", provider.instance_variable_get(:@access_token)

# Restore original environment
ENV["XAI_API_KEY"] = original_xai_key
ENV["GROK_API_KEY"] = original_grok_key
end

test "xAI provider initialization with custom host" do
config = {
"service" => "XAI",
"api_key" => "test-key",
"model" => "grok-2-latest",
"host" => "https://custom-xai-host.com"
}

provider = ActiveAgent::GenerationProvider::XAIProvider.new(config)
client = provider.instance_variable_get(:@client)

# The OpenAI client should be configured with the custom host
assert_not_nil client
end

test "xAI provider defaults to grok-2-latest model" do
config = {
"service" => "XAI",
"api_key" => "test-key"
# Model not specified
}

provider = ActiveAgent::GenerationProvider::XAIProvider.new(config)
assert_equal "grok-2-latest", provider.instance_variable_get(:@model_name)
end

test "xAI provider uses configured model" do
config = {
"service" => "XAI",
"api_key" => "test-key",
"model" => "grok-1"
}

provider = ActiveAgent::GenerationProvider::XAIProvider.new(config)
assert_equal "grok-1", provider.instance_variable_get(:@model_name)
end

test "xAI provider defaults to correct API host" do
config = {
"service" => "XAI",
"api_key" => "test-key"
}

provider = ActiveAgent::GenerationProvider::XAIProvider.new(config)
assert_equal "https://api.x.ai", ActiveAgent::GenerationProvider::XAIProvider::XAI_API_HOST
end

test "embed method raises NotImplementedError for xAI" do
config = {
"service" => "XAI",
"api_key" => "test-key"
}

provider = ActiveAgent::GenerationProvider::XAIProvider.new(config)
mock_prompt = Minitest::Mock.new

error = assert_raises(NotImplementedError) do
provider.embed(mock_prompt)
end

assert_includes error.message, "xAI does not currently support embeddings"
end
end