Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 41 additions & 2 deletions crates/cli/src/bundle_cmd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,7 @@ pub async fn export_bundle(
};
writeln!(f, "{}", serde_json::to_string(&header)?)?;

// v0: export primary memory items + expertise items.
// Future: include focus.
// Export primary memory items + expertise items + daily entries + daily summaries.
// Export ALL items (including narrative) for bundle completeness.
let items = db.all_memory_items_with_filter(true).await?;

Expand Down Expand Up @@ -51,6 +50,28 @@ pub async fn export_bundle(
}
}

// Export daily entries (all, including invalidated — preserve full history).
let daily_entries = db.all_daily_entries().await?;
for entry in daily_entries {
let rec = BundleRecord::DailyEntry {
logical_key: entry.id.clone(),
item: entry,
};
writeln!(f, "{}", serde_json::to_string(&rec)?)?;
wrote += 1;
}

// Export daily summaries (the hand-authored summary field lives here).
let daily_summaries = db.all_daily_summaries().await?;
for summary in daily_summaries {
let rec = BundleRecord::DailySummary {
logical_key: summary.date.clone(),
item: summary,
};
writeln!(f, "{}", serde_json::to_string(&rec)?)?;
wrote += 1;
}

Ok(wrote)
}

Expand Down Expand Up @@ -96,6 +117,24 @@ pub async fn import_bundle(db: &MemoryDb, in_path: &Path) -> Result<u64> {
db.expertise_put_with_dedupe(&item, Some(&key)).await?;
imported += 1;
}
BundleRecord::DailyEntry {
logical_key: _,
item,
} => {
// Idempotent by entry id (INSERT OR IGNORE).
db.upsert_daily_entry(&item).await?;
imported += 1;
}
BundleRecord::DailySummary {
logical_key: _,
item,
} => {
// Idempotent by date (INSERT OR IGNORE).
// Entries drive summary content via refresh_daily_summary;
// this restores the hand-authored `summary` field.
db.upsert_daily_summary(&item).await?;
imported += 1;
}
}
}

Expand Down
8 changes: 8 additions & 0 deletions crates/cli/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3876,6 +3876,9 @@ fn main() -> anyhow::Result<()> {
let id = db
.insert_daily_entry(&date, context.as_deref(), &content)
.await?;
// TODO(#120): call db.link_daily_memory(&date, memory_id) here once
// content-similarity lookup (finding related memory_items for this entry)
// is implemented. The DB method is scaffolded; the lookup is not yet built.
let entries = db.get_daily_entries(&date, false).await?;
let daily = db.get_daily_summary(&date).await?;
print_ok(json_mode, serde_json::json!({
Expand Down Expand Up @@ -4633,6 +4636,11 @@ fn validate_day_args(
"--date cannot be used with --list".to_string(),
));
}
if date.is_some() && invalidate.is_some() {
return Err(CliError::InvalidInput(
"--date cannot be combined with --invalidate; invalidation looks up the entry by ID only".to_string(),
));
}

Ok(())
}
Expand Down
12 changes: 12 additions & 0 deletions crates/core/src/bundle.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,16 @@ pub enum BundleRecord {
expertise: String,
item: crate::types::ExpertiseItem,
},

#[serde(rename = "wagl.bundle.daily_entry")]
DailyEntry {
logical_key: String,
item: crate::types::DailyEntry,
},

#[serde(rename = "wagl.bundle.daily_summary")]
DailySummary {
logical_key: String,
item: crate::types::DailySummary,
},
}
116 changes: 108 additions & 8 deletions crates/db/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -495,6 +495,7 @@ impl MemoryDb {
invalidate_reason TEXT,\
created_at INTEGER NOT NULL\
);\
CREATE INDEX IF NOT EXISTS idx_daily_entries_date ON daily_entries (date);\
CREATE TABLE IF NOT EXISTS daily_memories (\
date TEXT,\
memory_id TEXT,\
Expand Down Expand Up @@ -811,11 +812,21 @@ impl MemoryDb {
}
}

// Preserve provenance from existing row if incoming does not supply it.
// This prevents idempotent writes from silently clearing previously captured provenance.
let environment = item.environment.clone().or(existing.environment.clone());
let origin_domain = item
.origin_domain
.clone()
.or(existing.origin_domain.clone());

let merged = MemoryItem {
id: existing.id,
created_at: existing.created_at,
tags,
files,
environment,
origin_domain,
..item.clone()
};

Expand Down Expand Up @@ -979,16 +990,14 @@ impl MemoryDb {
.conn
.query(
&format!(
"SELECT id, mem_type, content, environment, origin_domain, tags, created_at, \
salience, primary_emotion, secondary_emotions, d_score, i_score, ev, files, actionable \
FROM memory_items \
"SELECT {} FROM memory_items \
WHERE created_at >= ?1 \
AND d_score IS NOT NULL \
AND ABS(d_score) >= ?2 \
AND {} \
ORDER BY ABS(d_score) DESC, created_at DESC \
LIMIT ?3",
actionable_filter
MEMORY_ITEM_SELECT, actionable_filter
),
(cutoff.as_str(), threshold_f64, limit_i64),
)
Expand Down Expand Up @@ -1022,9 +1031,7 @@ impl MemoryDb {
.conn
.query(
&format!(
"SELECT id, mem_type, content, environment, origin_domain, tags, created_at, \
salience, primary_emotion, secondary_emotions, d_score, i_score, ev, files, actionable \
FROM memory_items \
"SELECT {} FROM memory_items \
WHERE {} AND (
LOWER(mem_type) LIKE '%task%' \
OR LOWER(mem_type) LIKE '%open_loop%' \
Expand All @@ -1039,7 +1046,7 @@ impl MemoryDb {
)
ORDER BY created_at DESC \
LIMIT ?1",
actionable_filter
MEMORY_ITEM_SELECT, actionable_filter
),
[limit_i64],
)
Expand Down Expand Up @@ -1725,6 +1732,99 @@ impl MemoryDb {
Ok(())
}

/// Return all daily entries (including invalidated) across all dates, for bundle export.
pub async fn all_daily_entries(&self) -> anyhow::Result<Vec<DailyEntry>> {
let mut rows = self
.conn
.query(
"SELECT id, date, context, content, invalidated, invalidate_reason, created_at FROM daily_entries ORDER BY date ASC, created_at ASC",
(),
)
.await
.context("all daily entries")?;

let mut out = Vec::new();
while let Some(row) = rows.next().await? {
let invalidated_i: i64 = row.get(4)?;
out.push(DailyEntry {
id: row.get(0)?,
date: row.get(1)?,
context: row.get(2)?,
content: row.get(3)?,
invalidated: invalidated_i != 0,
invalidate_reason: row.get(5)?,
created_at: row.get(6)?,
});
}
Ok(out)
}

/// Return all daily summaries across all dates, for bundle export.
pub async fn all_daily_summaries(&self) -> anyhow::Result<Vec<DailySummary>> {
let mut rows = self
.conn
.query(
"SELECT date, content, summary, updated_at FROM daily ORDER BY date ASC",
(),
)
.await
.context("all daily summaries")?;

let mut out = Vec::new();
while let Some(row) = rows.next().await? {
out.push(DailySummary {
date: row.get(0)?,
content: row.get(1)?,
summary: row.get(2)?,
updated_at: row.get(3)?,
});
}
Ok(out)
}

/// Upsert a daily entry during bundle import (idempotent by id).
pub async fn upsert_daily_entry(&self, entry: &DailyEntry) -> anyhow::Result<()> {
let invalidated_i: i64 = if entry.invalidated { 1 } else { 0 };
self.conn
.execute(
"INSERT OR IGNORE INTO daily_entries (id, date, context, content, invalidated, invalidate_reason, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7)",
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1 Badge Upsert existing daily entries during bundle import

upsert_daily_entry uses INSERT OR IGNORE, so importing a newer bundle snapshot does not apply later changes to an existing entry (notably invalidated/invalidate_reason). This breaks incremental restore/sync workflows: an entry invalidated in the source DB remains active in the target DB after re-import, and refresh_daily_summary then rebuilds daily.content from that stale active row.

Useful? React with 👍 / 👎.

params![
entry.id.clone(),
entry.date.clone(),
entry.context.clone(),
entry.content.clone(),
invalidated_i,
entry.invalidate_reason.clone(),
entry.created_at
],
)
.await
.context("upsert daily entry")?;
// Refresh the summary so it stays consistent with the entries.
self.refresh_daily_summary(&entry.date).await?;
Ok(())
}

/// Upsert a daily summary during bundle import (idempotent by date).
/// Only restores the hand-written `summary` field; the `content` field is
/// regenerated by `refresh_daily_summary` when entries are imported, so we
/// only fill it in here when there are no entries to drive the refresh.
pub async fn upsert_daily_summary(&self, summary: &DailySummary) -> anyhow::Result<()> {
self.conn
.execute(
"INSERT INTO daily (date, content, summary, updated_at) VALUES (?1, ?2, ?3, ?4) ON CONFLICT(date) DO UPDATE SET summary = excluded.summary, updated_at = excluded.updated_at",
params![
summary.date.clone(),
summary.content.clone(),
summary.summary.clone(),
summary.updated_at
],
)
.await
.context("upsert daily summary")?;
Ok(())
}

pub async fn insert_daily_entry(
&self,
date: &str,
Expand Down
1 change: 1 addition & 0 deletions crates/db/src/migrate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,7 @@ async fn migrate_v1_to_v2_inner(conn: &Connection) -> anyhow::Result<()> {
invalidate_reason TEXT,\
created_at INTEGER NOT NULL\
);\
CREATE INDEX IF NOT EXISTS idx_daily_entries_date ON daily_entries (date);\
CREATE TABLE IF NOT EXISTS daily_memories (\
date TEXT,\
memory_id TEXT,\
Expand Down
Loading