Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,11 @@ serde_yaml = "0.9"
# Storage
rusqlite = { version = "0.31", features = ["bundled"] }

# PostgreSQL backend (enabled with --features postgres)
postgres = { version = "0.19", optional = true }
r2d2 = { version = "0.8", optional = true }
r2d2_postgres = { version = "0.18", optional = true }

# IDs and time
uuid = { version = "1", features = ["v4", "serde"] }
chrono = { version = "0.4", features = ["serde"] }
Expand Down Expand Up @@ -76,6 +81,10 @@ crossterm = "0.28"
tempfile = "3"
tower = { version = "0.4", features = ["util"] }

[features]
default = []
postgres = ["dep:postgres", "dep:r2d2", "dep:r2d2_postgres"]

[profile.release]
strip = true
opt-level = 3
Expand Down
12 changes: 6 additions & 6 deletions src/api/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use std::sync::Arc;
use uuid::Uuid;

use crate::module::Registry;
use crate::storage::{EvidenceQuery, SqliteStore, Store};
use crate::storage::{EvidenceQuery, Store};

// ---------------------------------------------------------------------------
// Application state
Expand All @@ -22,7 +22,7 @@ use crate::storage::{EvidenceQuery, SqliteStore, Store};
/// Shared state injected into every axum handler.
#[derive(Clone)]
pub struct AppState {
pub store: Arc<SqliteStore>,
pub store: Arc<dyn Store>,
pub registry: Arc<Registry>,
/// When `Some`, all requests must carry `Authorization: Bearer <token>`.
pub auth_token: Option<String>,
Expand Down Expand Up @@ -341,7 +341,7 @@ mod tests {
use uuid::Uuid;

use crate::modules::{register_all_observers, register_all_testers};
use crate::storage::SqliteStore;
use crate::storage::{SqliteStore, Store};

fn make_state() -> AppState {
let dir = std::env::temp_dir();
Expand All @@ -350,7 +350,7 @@ mod tests {
.to_str()
.unwrap()
.to_string();
let store = Arc::new(SqliteStore::open(&path).unwrap());
let store: Arc<dyn Store> = Arc::new(SqliteStore::open(&path).unwrap());
let registry = Arc::new(Registry::new());
register_all_observers(&registry);
register_all_testers(&registry);
Expand Down Expand Up @@ -524,7 +524,7 @@ mod tests {
.unwrap()
.to_string();
let state = AppState {
store: Arc::new(SqliteStore::open(&path).unwrap()),
store: Arc::new(SqliteStore::open(&path).unwrap()) as Arc<dyn Store>,
registry: Arc::new(Registry::new()),
auth_token: Some("secret-token".to_string()),
};
Expand All @@ -541,7 +541,7 @@ mod tests {
.unwrap()
.to_string();
let state = AppState {
store: Arc::new(SqliteStore::open(&path).unwrap()),
store: Arc::new(SqliteStore::open(&path).unwrap()) as Arc<dyn Store>,
registry: Arc::new(Registry::new()),
auth_token: Some("my-token".to_string()),
};
Expand Down
4 changes: 2 additions & 2 deletions src/api/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use tokio::net::TcpListener;
use super::handlers::{router, AppState};
use crate::module::Registry;
use crate::modules::{register_all_observers, register_all_testers};
use crate::storage::SqliteStore;
use crate::storage::{open_store, Store};

/// Start the OCEAN REST API server.
///
Expand All @@ -16,7 +16,7 @@ use crate::storage::SqliteStore;
/// The CLI's `cmd_serve` creates a `tokio::runtime::Runtime` and calls
/// `block_on(serve(...))`.
pub async fn serve(port: u16, auth_token: Option<String>, db_path: String) -> Result<()> {
let store = Arc::new(SqliteStore::open(&db_path)?);
let store: Arc<dyn Store> = Arc::from(open_store(&db_path)?);

let registry = Arc::new(Registry::new());
register_all_observers(&registry);
Expand Down
6 changes: 3 additions & 3 deletions src/cli/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use ocean::{
module::{AutoAuthorizer, EnvironmentScope, Executor, Registry, TestConfig},
modules::{register_all_observers, register_all_testers},
scheduler::Schedule,
storage::{EvidenceQuery, SqliteStore, Store},
storage::{open_store, EvidenceQuery, Store},
};

use output::{print_evaluation_table, print_output, EvaluationResult, ModuleRunResult, OutputFormat};
Expand Down Expand Up @@ -419,13 +419,13 @@ fn resolve_db_path(db: &str) -> String {
format!("{home}/.ocean/evidence.db")
}

fn open_store(db: &str) -> Result<SqliteStore> {
fn open_store(db: &str) -> Result<Box<dyn Store>> {
let path = resolve_db_path(db);
if let Some(parent) = std::path::Path::new(&path).parent() {
std::fs::create_dir_all(parent)
.with_context(|| format!("create database directory: {parent:?}"))?;
}
SqliteStore::open(&path)
crate::storage::open_store(&path)
}

fn build_registry() -> Arc<Registry> {
Expand Down
22 changes: 22 additions & 0 deletions src/storage/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
// Storage — Store trait + SqliteStore implementation.
pub mod sqlite;
pub mod postgres;

pub use sqlite::SqliteStore;
#[cfg(feature = "postgres")]
pub use postgres::PostgresStore;

use anyhow::Result;
use chrono::{DateTime, Utc};
Expand Down Expand Up @@ -56,6 +59,25 @@ pub trait Store: Send + Sync {
fn close(&self) -> Result<()>;
}

/// Open the configured storage backend.
///
/// - If `OCEAN_POSTGRES_URL` is set (and the `postgres` feature is enabled),
/// returns a [`PostgresStore`] connected to that URL.
/// - Otherwise returns a [`SqliteStore`] at the given `sqlite_path`.
pub fn open_store(sqlite_path: &str) -> Result<Box<dyn Store>> {
#[cfg(feature = "postgres")]
if let Ok(url) = std::env::var("OCEAN_POSTGRES_URL") {
if !url.is_empty() {
let pool_size: u32 = std::env::var("OCEAN_POSTGRES_POOL_SIZE")
.ok()
.and_then(|v| v.parse().ok())
.unwrap_or(10);
return Ok(Box::new(PostgresStore::connect(&url, pool_size)?));
}
}
Ok(Box::new(SqliteStore::open(sqlite_path)?))
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down
Loading