Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,4 @@ logs/
# g3 artifacts
requirements.md
todo.g3.md
config.toml
27 changes: 27 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Changelog

All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [Unreleased] - 2025-12-16

### Added
- **OpenRouter Support**: Implemented full support for OpenRouter as an LLM provider.
- Added `OpenRouterProvider` implementation in `crates/g3-providers`.
- Added configuration structures `OpenRouterConfig` and `ProviderPreferencesConfig` in `crates/g3-config`.
- Added integration tests in `crates/tests/openrouter_integration_tests.rs`.
- Updated `g3-cli` to accept `openrouter` as a valid provider type in command line arguments.
- Updated `g3-core` to register and handle OpenRouter providers.
- Added example configuration in `config.example.toml`.
- **Configuration**: Added `config.toml` to `.gitignore`.

### Changed
- **g3-core**: Updated `provider_max_tokens` and `resolve_max_tokens` to correctly handle OpenRouter configuration and context window sizes (defaulting to 128k if not specified, but respecting config).
- **g3-cli**: Updated provider validation logic to support `openrouter` prefix (e.g., `openrouter.grok`).

### Fixed
- **g3-providers**: Fixed unused variable warnings in `anthropic.rs` by renaming `cache_config` to `_cache_config`.
- **g3-planner**: Fixed unused function warning in `llm.rs` by renaming `print_status_line` to `_print_status_line`.
- **g3-providers**: Fixed typo in `openrouter.rs` (`pub mode` -> `pub mod`).
21 changes: 21 additions & 0 deletions config.example.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,27 @@ use_oauth = true
# max_tokens = 4096
# temperature = 0.1

Named OpenRouter configurations
# OpenRouter provides access to 200+ AI models through a unified API
[providers.openrouter.default]
api_key = "${OPENROUTER_API_KEY}"
model = "anthropic/claude-3.5-sonnet"
max_tokens = 4096
temperature = 0.7
# http_referer = "https://yourapp.com" # Optional: Your app URL for analytics
# x_title = "Your App Name" # Optional: Your app name for analytics
# provider_order = ["Anthropic"] # Optional: Preferred provider routing
# allow_fallbacks = true # Optional: Allow fallback to other providers

# Multiple OpenAI-compatible providers can be configured
# [providers.openai_compatible.groq]
# api_key = "your-groq-api-key"
# model = "llama-3.3-70b-versatile"
# base_url = "https://api.groq.com/openai/v1"
# max_tokens = 4096
# temperature = 0.1


# Multiple OpenAI-compatible providers can be configured
# [providers.openai_compatible.openrouter]
# api_key = "your-openrouter-api-key"
Expand Down
62 changes: 37 additions & 25 deletions crates/g3-cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -185,34 +185,45 @@ fn extract_coach_feedback_from_logs(
if let Some(prev_content) = prev_msg.get("content") {
if let Some(prev_content_str) = prev_content.as_str() {
// Check if the previous assistant message contains a final_output tool call
if prev_content_str.contains("\"tool\": \"final_output\"") {
// This is a final_output tool result
let feedback = if content_str.starts_with("Tool result: ") {
// If the previous assistant message explicitly indicates a final_output tool
// then treat it as verified. Otherwise, accept the Tool result as a
// fallback (with a warning) to avoid losing coach feedback when logs
// don't include an exact final_output marker.
let feedback = if prev_content_str.contains("\"tool\": \"final_output\"") {
if content_str.starts_with("Tool result: ") {
content_str.strip_prefix("Tool result: ")
.unwrap_or(content_str)
.to_string()
} else {
content_str.to_string()
};

output.print(&format!(
"Coach feedback extracted: {} characters (from {} total)",
feedback.len(),
content_str.len()
));
output.print(&format!("Coach feedback:\n{}", feedback));

output.print(&format!(
"✅ Extracted coach feedback from session: {} (verified final_output tool)",
session_id
));
return Ok(feedback);
}
} else {
// Unverified fallback: accept the tool result but warn
output.print(&format!(
"⚠️ Skipping tool result at index {} - not a final_output tool call",
"⚠️ Tool result at index {} not verified as final_output; accepting as fallback",
i
));
}
if content_str.starts_with("Tool result: ") {
content_str.strip_prefix("Tool result: ")
.unwrap_or(content_str)
.to_string()
} else {
content_str.to_string()
}
};

output.print(&format!(
"Coach feedback extracted: {} characters (from {} total)",
feedback.len(),
content_str.len()
));
output.print(&format!("Coach feedback:\n{}", feedback));

output.print(&format!(
"✅ Extracted coach feedback from session: {}",
session_id
));
return Ok(feedback);
}
}
}
Expand All @@ -233,8 +244,8 @@ fn extract_coach_feedback_from_logs(
}
}

// If we couldn't extract from logs, panic with detailed error
panic!(
// If we couldn't extract from logs, return an error (avoid panicking)
return Err(anyhow::anyhow!(
"CRITICAL: Could not extract coach feedback from session: {}\n\
Log file path: {:?}\n\
Log file exists: {}\n\
Expand All @@ -244,7 +255,7 @@ fn extract_coach_feedback_from_logs(
log_file_path,
log_file_path.exists(),
coach_result.response.len()
);
));
}

use clap::Parser;
Expand Down Expand Up @@ -323,7 +334,7 @@ pub struct Cli {
#[arg(long)]
pub machine: bool,

/// Override the configured provider (anthropic, databricks, embedded, openai)
/// Override the configured provider (anthropic, databricks, embedded, openai, openrouter)
#[arg(long, value_name = "PROVIDER")]
pub provider: Option<String>,

Expand Down Expand Up @@ -533,8 +544,9 @@ pub async fn run() -> Result<()> {

// Validate provider if specified
if let Some(ref provider) = cli.provider {
let valid_providers = ["anthropic", "databricks", "embedded", "openai"];
if !valid_providers.contains(&provider.as_str()) {
let valid_providers = ["anthropic", "databricks", "embedded", "openai", "openrouter"];
let provider_type = provider.split('.').next().unwrap_or(provider);
if !valid_providers.contains(&provider_type) {
return Err(anyhow::anyhow!(
"Invalid provider '{}'. Valid options: {:?}",
provider,
Expand Down
35 changes: 34 additions & 1 deletion crates/g3-config/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,10 @@ pub struct ProvidersConfig {
/// Multiple named OpenAI-compatible providers (e.g., openrouter, groq, etc.)
#[serde(default)]
pub openai_compatible: HashMap<String, OpenAIConfig>,

/// Named OpenRouter provider configs
#[serde(default)]
pub openrouter: HashMap<String, OpenRouterConfig>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
Expand All @@ -58,6 +62,25 @@ pub struct OpenAIConfig {
pub temperature: Option<f32>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenRouterConfig {
pub api_key: String,
pub model: String,
pub base_url: Option<String>,
pub max_tokens: Option<u32>,
pub temperature: Option<f32>,
pub provider_preferences: Option<ProviderPreferencesConfig>,
pub http_referer: Option<String>,
pub x_title: Option<String>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProviderPreferencesConfig {
pub order: Option<Vec<String>>,
pub allow_fallbacks: Option<bool>,
pub require_parameters: Option<bool>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AnthropicConfig {
pub api_key: String,
Expand Down Expand Up @@ -198,6 +221,7 @@ impl Default for Config {
databricks: databricks_configs,
embedded: HashMap::new(),
openai_compatible: HashMap::new(),
openrouter: HashMap::new(),
},
agent: AgentConfig {
max_context_length: None,
Expand Down Expand Up @@ -414,11 +438,20 @@ impl Config {
);
}
}
"openrouter" => {
if !self.providers.openrouter.contains_key(config_name) {
anyhow::bail!(
"Provider config 'openrouter.{}' not found. Available: {:?}",
config_name,
self.providers.openrouter.keys().collect::<Vec<_>>()
);
}
}
_ => {
// Check openai_compatible providers
if !self.providers.openai_compatible.contains_key(provider_type) {
anyhow::bail!(
"Unknown provider type '{}'. Valid types: anthropic, openai, databricks, embedded, or openai_compatible names",
"Unknown provider type '{}'. Valid types: anthropic, openai, databricks, embedded, openrouter, or openai_compatible names",
provider_type
);
}
Expand Down
56 changes: 54 additions & 2 deletions crates/g3-core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1316,6 +1316,39 @@ impl<W: UiWriter> Agent<W> {
}
}

// Register OpenRouter providers from HashMap
for (name, openrouter_config) in &config.providers.openrouter {
if should_register("openrouter", name) {
let mut openrouter_provider = g3_providers::OpenRouterProvider::new_with_name(
format!("openrouter.{}", name),
openrouter_config.api_key.clone(),
Some(openrouter_config.model.clone()),
openrouter_config.max_tokens,
openrouter_config.temperature,
)?;

if let Some(prefs) = &openrouter_config.provider_preferences {
// Convert config prefs to provider prefs
let provider_prefs = g3_providers::ProviderPreferences {
order: prefs.order.clone(),
allow_fallbacks: prefs.allow_fallbacks,
require_parameters: prefs.require_parameters,
};
openrouter_provider = openrouter_provider.with_provider_preferences(provider_prefs);
}

if let Some(referer) = &openrouter_config.http_referer {
openrouter_provider = openrouter_provider.with_http_referer(referer.clone());
}

if let Some(title) = &openrouter_config.x_title {
openrouter_provider = openrouter_provider.with_x_title(title.clone());
}

providers.register(openrouter_provider);
}
}

// Register Anthropic providers from HashMap
for (name, anthropic_config) in &config.providers.anthropic {
if should_register("anthropic", name) {
Expand Down Expand Up @@ -1543,7 +1576,11 @@ impl<W: UiWriter> Agent<W> {
"openai" => config.providers.openai.get(config_name)?.max_tokens,
"databricks" => config.providers.databricks.get(config_name)?.max_tokens,
"embedded" => config.providers.embedded.get(config_name)?.max_tokens,
_ => None,
"openrouter" => config.providers.openrouter.get(config_name)?.max_tokens,
_ => {
// Check openai_compatible
config.providers.openai_compatible.get(provider_type)?.max_tokens
}
}
}

Expand All @@ -1563,7 +1600,11 @@ impl<W: UiWriter> Agent<W> {
"openai" => config.providers.openai.get(config_name)?.temperature,
"databricks" => config.providers.databricks.get(config_name)?.temperature,
"embedded" => config.providers.embedded.get(config_name)?.temperature,
_ => None,
"openrouter" => config.providers.openrouter.get(config_name)?.temperature,
_ => {
// Check openai_compatible
config.providers.openai_compatible.get(provider_type)?.temperature
}
}
}

Expand Down Expand Up @@ -1946,6 +1987,17 @@ impl<W: UiWriter> Agent<W> {
16384 // Conservative default for other Databricks models
}
}
"openrouter" => {
if let Some(max_tokens) = Self::provider_max_tokens(config, provider_name) {
warnings.push(format!(
"Context length falling back to max_tokens ({}) for provider={}",
max_tokens, provider_name
));
max_tokens
} else {
128000 // Default for OpenRouter
}
}
_ => config.agent.fallback_default_max_tokens as u32,
};

Expand Down
2 changes: 1 addition & 1 deletion crates/g3-planner/src/llm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ impl PlannerUiWriter {
}

/// Clear the current line and print a status message
fn print_status_line(&self, message: &str) {
fn _print_status_line(&self, message: &str) {
// Print status message without overwriting previous content
// Use println to ensure each status is on its own line
println!("{:.80}", message);
Expand Down
10 changes: 5 additions & 5 deletions crates/g3-providers/src/anthropic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ pub struct AnthropicProvider {
model: String,
max_tokens: u32,
temperature: f32,
cache_config: Option<String>,
_cache_config: Option<String>,
enable_1m_context: bool,
thinking_budget_tokens: Option<u32>,
}
Expand All @@ -136,7 +136,7 @@ impl AnthropicProvider {
model: Option<String>,
max_tokens: Option<u32>,
temperature: Option<f32>,
cache_config: Option<String>,
_cache_config: Option<String>,
enable_1m_context: Option<bool>,
thinking_budget_tokens: Option<u32>,
) -> Result<Self> {
Expand All @@ -156,7 +156,7 @@ impl AnthropicProvider {
model,
max_tokens: max_tokens.unwrap_or(4096),
temperature: temperature.unwrap_or(0.1),
cache_config,
_cache_config,
enable_1m_context: enable_1m_context.unwrap_or(false),
thinking_budget_tokens,
})
Expand All @@ -169,7 +169,7 @@ impl AnthropicProvider {
model: Option<String>,
max_tokens: Option<u32>,
temperature: Option<f32>,
cache_config: Option<String>,
_cache_config: Option<String>,
enable_1m_context: Option<bool>,
thinking_budget_tokens: Option<u32>,
) -> Result<Self> {
Expand All @@ -189,7 +189,7 @@ impl AnthropicProvider {
model,
max_tokens: max_tokens.unwrap_or(4096),
temperature: temperature.unwrap_or(0.1),
cache_config,
_cache_config,
enable_1m_context: enable_1m_context.unwrap_or(false),
thinking_budget_tokens,
})
Expand Down
2 changes: 2 additions & 0 deletions crates/g3-providers/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,11 +144,13 @@ pub mod databricks;
pub mod embedded;
pub mod oauth;
pub mod openai;
pub mod openrouter;

pub use anthropic::AnthropicProvider;
pub use databricks::DatabricksProvider;
pub use embedded::EmbeddedProvider;
pub use openai::OpenAIProvider;
pub use openrouter::{OpenRouterProvider, ProviderPreferences};

impl Message {
/// Generate a unique message ID in format HHMMSS-XXX
Expand Down
Loading