diff --git a/skills/openclaw-native/dag-recall/SKILL.md b/skills/openclaw-native/dag-recall/SKILL.md new file mode 100644 index 0000000..4b38a94 --- /dev/null +++ b/skills/openclaw-native/dag-recall/SKILL.md @@ -0,0 +1,137 @@ +--- +name: dag-recall +version: "1.0" +category: openclaw-native +description: Walks the memory DAG to recall detailed context on demand — query, expand, and assemble cited answers from hierarchical summaries without re-reading raw transcripts. +stateful: true +--- + +# DAG Recall + +## What it does + +When the agent needs to recall something from past sessions, reading raw transcripts is expensive and often exceeds context limits. DAG Recall walks the hierarchical summary DAG built by memory-dag-compactor — starting from high-level (d2/d3) nodes, expanding into detailed (d0/d1) children — and assembles a focused, cited answer. + +Inspired by [lossless-claw](https://github.com/Martian-Engineering/lossless-claw)'s sub-agent recall pattern, where a lightweight agent fetches and expands nodes on demand rather than loading entire conversation histories. + +## When to invoke + +- When the agent asks "what did we decide about X?" or "how did we implement Y?" +- When context about a past session is needed but the transcript isn't loaded +- When searching MEMORY.md returns only high-level summaries that need expansion +- Before starting work that depends on decisions or patterns from earlier sessions + +## How to use + +```bash +python3 recall.py --query "how did we handle auth migration" # Walk DAG + assemble answer +python3 recall.py --query "deploy process" --depth 2 # Limit expansion depth +python3 recall.py --query "API keys" --top 5 # Return top 5 matching nodes +python3 recall.py --expand s-d1-003 # Expand a specific node +python3 recall.py --trace s-d0-012 # Show full ancestor chain +python3 recall.py --recent --hours 48 # Recall from recent nodes only +python3 recall.py --status # Last recall summary +python3 recall.py --format json # Machine-readable output +``` + +## Recall algorithm + +1. **Search** — FTS5 query across all DAG node summaries +2. **Rank** — Score by relevance × recency × depth (deeper = more detailed = higher score for recall) +3. **Expand** — For each top-N match, walk to children (lower depth = more detail) +4. **Assemble** — Combine expanded content into a coherent answer with node citations +5. **Cache** — Store the assembled answer for fast re-retrieval + +### Expansion strategy + +``` +Query: "auth migration" + ↓ +d3 node: "Infrastructure & Auth overhaul Q1" (score: 0.72) + → expand d2: "Auth migration week of Feb 10" (score: 0.89) + → expand d1: "Migrated JWT signing from HS256 to RS256" (score: 0.95) + → expand d0: [raw operational detail — returned as-is] +``` + +Expansion stops when: +- Target depth reached (default: expand to d0) +- Token budget exhausted (default: 4000 tokens) +- No children exist (leaf node) + +## DAG structure expected + +Reads from `~/.openclaw/lcm-dag/` (same directory as memory-dag-compactor): + +``` +~/.openclaw/lcm-dag/ +├── index.json # Node metadata: id, depth, summary, children, created_at +├── nodes/ +│ ├── s-d0-001.md # Leaf node (operational detail) +│ ├── s-d1-001.md # Condensed summary +│ ├── s-d2-001.md # Arc summary +│ └── s-d3-001.md # Durable summary +└── fts.db # FTS5 index over node summaries +``` + +## Procedure + +**Step 1 — Query the DAG** + +```bash +python3 recall.py --query "how did we handle the database migration" +``` + +Searches the FTS5 index, ranks results, expands top matches, and assembles a cited answer: + +``` +Recall: "how did we handle the database migration" — 3 sources + + We migrated the database schema using Alembic with a blue-green + deployment strategy. The key decisions were: + + 1. Zero-downtime migration using shadow tables [s-d1-003] + 2. Rollback script tested against staging first [s-d0-012] + 3. Data backfill ran as async job over 2 hours [s-d0-015] + + Sources: + [s-d1-003] "Database migration — shadow table approach" (Feb 12) + [s-d0-012] "Alembic rollback script for users table" (Feb 12) + [s-d0-015] "Async backfill job for legacy records" (Feb 13) +``` + +**Step 2 — Expand a specific node** + +```bash +python3 recall.py --expand s-d1-003 +``` + +Shows the full content of a node and lists its children for further expansion. + +**Step 3 — Trace lineage** + +```bash +python3 recall.py --trace s-d0-012 +``` + +Shows the full ancestor chain from leaf to root, revealing how detail connects to high-level themes. + +## Integration with other skills + +- **memory-dag-compactor**: Produces the DAG that this skill reads — must be run first +- **session-persistence**: Alternative data source — recall can fall back to SQLite search when DAG nodes are insufficient +- **context-assembly-scorer**: Recall results feed into context assembly scoring +- **memory-integrity-checker**: Ensures DAG is structurally sound before recall walks it + +## State + +Recall history and cache stored in `~/.openclaw/skill-state/dag-recall/state.yaml`. + +Fields: `last_query`, `last_query_at`, `cache_size`, `total_recalls`, `recall_history`. + +## Notes + +- Uses Python's built-in `sqlite3` and `json` modules — no external dependencies +- FTS5 used for search when available; falls back to substring matching +- Token budget prevents runaway expansion on large DAGs +- Cache is LRU with configurable max size (default: 50 entries) +- If DAG doesn't exist yet, prints a helpful message pointing to memory-dag-compactor diff --git a/skills/openclaw-native/dag-recall/STATE_SCHEMA.yaml b/skills/openclaw-native/dag-recall/STATE_SCHEMA.yaml new file mode 100644 index 0000000..ecd3dd6 --- /dev/null +++ b/skills/openclaw-native/dag-recall/STATE_SCHEMA.yaml @@ -0,0 +1,26 @@ +version: "1.0" +description: Recall query history, cache stats, and expansion tracking. +fields: + last_query: + type: string + description: Most recent recall query text + last_query_at: + type: datetime + cache_size: + type: integer + description: Number of cached recall results + total_recalls: + type: integer + description: Lifetime recall count + avg_sources_per_recall: + type: number + description: Average number of DAG nodes cited per recall + recall_history: + type: list + description: Rolling log of recent recalls (last 20) + items: + query: { type: string } + recalled_at: { type: datetime } + sources_used: { type: integer } + tokens_assembled: { type: integer } + cache_hit: { type: boolean } diff --git a/skills/openclaw-native/dag-recall/example-state.yaml b/skills/openclaw-native/dag-recall/example-state.yaml new file mode 100644 index 0000000..1787893 --- /dev/null +++ b/skills/openclaw-native/dag-recall/example-state.yaml @@ -0,0 +1,62 @@ +# Example runtime state for dag-recall +last_query: "how did we handle the auth migration" +last_query_at: "2026-03-16T10:32:15.000000" +cache_size: 12 +total_recalls: 47 +avg_sources_per_recall: 3.2 +recall_history: + - query: "how did we handle the auth migration" + recalled_at: "2026-03-16T10:32:15.000000" + sources_used: 4 + tokens_assembled: 1820 + cache_hit: false + - query: "deploy process" + recalled_at: "2026-03-16T09:15:03.000000" + sources_used: 3 + tokens_assembled: 1240 + cache_hit: false + - query: "deploy process" + recalled_at: "2026-03-16T09:45:22.000000" + sources_used: 3 + tokens_assembled: 1240 + cache_hit: true +# ── Walkthrough ────────────────────────────────────────────────────────────── +# python3 recall.py --query "how did we handle the auth migration" +# +# Recall: "how did we handle the auth migration" — 4 sources +# +# [s-d1-003] (summary) Migrated JWT signing from HS256 to RS256 +# with key rotation plan... +# [s-d0-012] (detail) Created migration script for auth_keys table... +# [s-d0-015] (detail) Updated environment variables for RS256 public... +# [s-d0-018] (detail) Added rollback procedure in deploy/auth-rollback.sh... +# +# Sources: +# [s-d1-003] "Migrated JWT signing from HS256 to RS2..." (2026-02-10) +# [s-d0-012] "Created migration script for auth_keys..." (2026-02-10) +# [s-d0-015] "Updated environment variables for RS25..." (2026-02-11) +# [s-d0-018] "Added rollback procedure in deploy/aut..." (2026-02-11) +# +# python3 recall.py --trace s-d0-012 +# +# Trace: s-d0-012 → root (3 nodes) +# +# s-d0-012 (d0 — detail) +# Created migration script for auth_keys table +# Created: 2026-02-10 +# └── s-d1-003 (d1 — summary) +# JWT signing migration HS256 → RS256 +# Created: 2026-02-10 +# └── s-d2-001 (d2 — arc) +# Auth & Infrastructure overhaul Feb 2026 +# Created: 2026-02-14 +# +# python3 recall.py --status +# +# DAG Recall Status +# ────────────────────────────────────────────────── +# Last query: how did we handle the auth migration +# Last query at: 2026-03-16T10:32:15.000000 +# Total recalls: 47 +# Cache size: 12 / 50 +# DAG nodes: 24 diff --git a/skills/openclaw-native/dag-recall/recall.py b/skills/openclaw-native/dag-recall/recall.py new file mode 100755 index 0000000..2c32956 --- /dev/null +++ b/skills/openclaw-native/dag-recall/recall.py @@ -0,0 +1,599 @@ +#!/usr/bin/env python3 +"""DAG Recall — walk the memory DAG to recall detailed context on demand. + +Query, expand, and assemble cited answers from hierarchical summaries. + +Usage: + python3 recall.py --query "auth migration" # Search + expand + assemble + python3 recall.py --query "deploy" --depth 2 # Limit expansion depth + python3 recall.py --query "API" --top 5 # Top 5 matches + python3 recall.py --expand s-d1-003 # Expand a specific node + python3 recall.py --trace s-d0-012 # Ancestor chain to root + python3 recall.py --recent --hours 48 # Recent nodes only + python3 recall.py --status # Last recall summary + python3 recall.py --format json # Machine-readable output +""" + +import argparse +import json +import os +import sqlite3 +import sys +from collections import OrderedDict +from datetime import datetime, timedelta, timezone +from pathlib import Path + +# ── Paths ──────────────────────────────────────────────────────────────────── + +OPENCLAW_DIR = Path.home() / ".openclaw" +DAG_DIR = OPENCLAW_DIR / "lcm-dag" +INDEX_PATH = DAG_DIR / "index.json" +NODES_DIR = DAG_DIR / "nodes" +FTS_DB_PATH = DAG_DIR / "fts.db" +STATE_DIR = OPENCLAW_DIR / "skill-state" / "dag-recall" +STATE_PATH = STATE_DIR / "state.yaml" + +DEFAULT_TOKEN_BUDGET = 4000 +DEFAULT_TOP_N = 3 +DEFAULT_MAX_DEPTH = 0 # expand all the way to d0 +DEFAULT_CACHE_SIZE = 50 +CHARS_PER_TOKEN = 4 # rough estimate + + +# ── Index loading ──────────────────────────────────────────────────────────── + +def load_index(): + """Load the DAG index (node metadata).""" + if not INDEX_PATH.exists(): + return None + with open(INDEX_PATH) as f: + return json.load(f) + + +def load_node_content(node_id): + """Read the full content of a DAG node file.""" + node_path = NODES_DIR / f"{node_id}.md" + if not node_path.exists(): + return None + return node_path.read_text() + + +def estimate_tokens(text): + """Rough token estimate.""" + return len(text) // CHARS_PER_TOKEN + + +# ── FTS5 search ────────────────────────────────────────────────────────────── + +def search_fts(query, limit=20): + """Search DAG node summaries using FTS5.""" + if not FTS_DB_PATH.exists(): + return search_fallback(query, limit) + try: + conn = sqlite3.connect(str(FTS_DB_PATH)) + cur = conn.cursor() + # Check if FTS table exists + cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='nodes_fts'") + if not cur.fetchone(): + conn.close() + return search_fallback(query, limit) + cur.execute( + "SELECT node_id, snippet(nodes_fts, 1, '>>>', '<<<', '...', 40), rank " + "FROM nodes_fts WHERE nodes_fts MATCH ? ORDER BY rank LIMIT ?", + (query, limit), + ) + results = [ + {"node_id": row[0], "snippet": row[1], "score": -row[2]} + for row in cur.fetchall() + ] + conn.close() + return results + except Exception: + return search_fallback(query, limit) + + +def search_fallback(query, limit=20): + """Fallback: substring search across index summaries.""" + index = load_index() + if not index or "nodes" not in index: + return [] + terms = query.lower().split() + results = [] + for node in index["nodes"]: + summary = node.get("summary", "").lower() + score = sum(1 for t in terms if t in summary) + if score > 0: + results.append({ + "node_id": node["id"], + "snippet": node.get("summary", "")[:120], + "score": score, + }) + results.sort(key=lambda x: x["score"], reverse=True) + return results[:limit] + + +# ── Scoring ────────────────────────────────────────────────────────────────── + +def score_results(results, index): + """Re-score results factoring in depth and recency.""" + if not index or "nodes" not in index: + return results + + node_map = {n["id"]: n for n in index["nodes"]} + now = datetime.now(timezone.utc) + + for r in results: + meta = node_map.get(r["node_id"], {}) + depth = meta.get("depth", 0) + + # Deeper nodes (more detail) get bonus for recall + depth_bonus = max(0, 3 - depth) * 0.3 # d0=0.9, d1=0.6, d2=0.3, d3=0 + + # Recency bonus + recency_bonus = 0 + created = meta.get("created_at") + if created: + try: + ct = datetime.fromisoformat(created.replace("Z", "+00:00")) + days_old = (now - ct).days + recency_bonus = max(0, 1.0 - days_old / 90) # linear decay over 90 days + except Exception: + pass + + r["final_score"] = r["score"] + depth_bonus + recency_bonus + r["depth"] = depth + + results.sort(key=lambda x: x["final_score"], reverse=True) + return results + + +# ── Expansion ──────────────────────────────────────────────────────────────── + +def expand_node(node_id, index, target_depth=0, token_budget=DEFAULT_TOKEN_BUDGET): + """Expand a node by walking to its children, collecting content.""" + if not index or "nodes" not in index: + return [] + + node_map = {n["id"]: n for n in index["nodes"]} + collected = [] + tokens_used = 0 + + def walk(nid, budget_remaining): + nonlocal tokens_used + if budget_remaining <= 0: + return + + meta = node_map.get(nid) + if not meta: + return + + content = load_node_content(nid) + if not content: + content = meta.get("summary", "") + + t = estimate_tokens(content) + if t > budget_remaining: + # Truncate to fit budget + char_limit = budget_remaining * CHARS_PER_TOKEN + content = content[:char_limit] + "..." + t = budget_remaining + + collected.append({ + "node_id": nid, + "depth": meta.get("depth", 0), + "content": content, + "tokens": t, + "created_at": meta.get("created_at", ""), + }) + tokens_used += t + + # Expand children if above target depth + current_depth = meta.get("depth", 0) + if current_depth > target_depth: + children = meta.get("children", []) + for child_id in children: + if tokens_used >= token_budget: + break + walk(child_id, token_budget - tokens_used) + + walk(node_id, token_budget) + return collected + + +# ── Assembly ───────────────────────────────────────────────────────────────── + +def assemble_answer(query, expanded_nodes): + """Assemble expanded node content into a cited answer.""" + if not expanded_nodes: + return "No relevant information found in the memory DAG." + + lines = [] + sources = [] + for node in expanded_nodes: + nid = node["node_id"] + content = node["content"].strip() + depth = node["depth"] + created = node.get("created_at", "unknown")[:10] + depth_label = {0: "detail", 1: "summary", 2: "arc", 3: "durable"}.get(depth, f"d{depth}") + + lines.append(f" [{nid}] ({depth_label}) {content[:200]}") + sources.append(f" [{nid}] \"{content[:60]}...\" ({created})") + + answer = f"Recall: \"{query}\" — {len(expanded_nodes)} sources\n\n" + answer += "\n".join(lines) + answer += "\n\n Sources:\n" + answer += "\n".join(sources) + return answer + + +# ── Trace ──────────────────────────────────────────────────────────────────── + +def trace_ancestors(node_id, index): + """Walk from a node up to its ancestors (parents).""" + if not index or "nodes" not in index: + return [] + + # Build reverse parent map + parent_map = {} + for node in index["nodes"]: + for child_id in node.get("children", []): + parent_map[child_id] = node["id"] + + chain = [] + current = node_id + visited = set() + while current and current not in visited: + visited.add(current) + node_map = {n["id"]: n for n in index["nodes"]} + meta = node_map.get(current) + if not meta: + break + content = load_node_content(current) + chain.append({ + "node_id": current, + "depth": meta.get("depth", 0), + "summary": meta.get("summary", ""), + "content_preview": (content or "")[:200], + "created_at": meta.get("created_at", ""), + }) + current = parent_map.get(current) + + return chain + + +# ── Cache (LRU) ────────────────────────────────────────────────────────────── + +class RecallCache: + """Simple LRU cache for recall results.""" + + def __init__(self, max_size=DEFAULT_CACHE_SIZE): + self.max_size = max_size + self.cache = OrderedDict() + self._load() + + def _cache_path(self): + return STATE_DIR / "cache.json" + + def _load(self): + p = self._cache_path() + if p.exists(): + try: + data = json.loads(p.read_text()) + for k, v in data.items(): + self.cache[k] = v + except Exception: + pass + + def _save(self): + STATE_DIR.mkdir(parents=True, exist_ok=True) + # Keep only max_size entries + while len(self.cache) > self.max_size: + self.cache.popitem(last=False) + self._cache_path().write_text(json.dumps(dict(self.cache), indent=2)) + + def get(self, query): + key = query.lower().strip() + if key in self.cache: + self.cache.move_to_end(key) + return self.cache[key] + return None + + def put(self, query, result): + key = query.lower().strip() + self.cache[key] = result + self.cache.move_to_end(key) + self._save() + + def size(self): + return len(self.cache) + + +# ── State management ───────────────────────────────────────────────────────── + +def load_state(): + if STATE_PATH.exists(): + import re + state = {} + text = STATE_PATH.read_text() + for line in text.splitlines(): + line = line.strip() + if line.startswith("#") or not line: + continue + m = re.match(r'^(\w[\w_]*):\s*(.*)', line) + if m: + state[m.group(1)] = m.group(2).strip().strip('"') + return state + return {} + + +def save_state(state): + STATE_DIR.mkdir(parents=True, exist_ok=True) + lines = [] + for k, v in state.items(): + if isinstance(v, list): + lines.append(f"{k}:") + for item in v: + if isinstance(item, dict): + lines.append(f" - {json.dumps(item)}") + else: + lines.append(f" - {item}") + else: + lines.append(f"{k}: \"{v}\"" if isinstance(v, str) else f"{k}: {v}") + STATE_PATH.write_text("\n".join(lines) + "\n") + + +def update_state_after_recall(query, sources_used, tokens_assembled, cache_hit): + state = load_state() + now = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%f") + state["last_query"] = query + state["last_query_at"] = now + total = int(state.get("total_recalls", 0)) + 1 + state["total_recalls"] = str(total) + cache = RecallCache() + state["cache_size"] = str(cache.size()) + save_state(state) + + +# ── Commands ───────────────────────────────────────────────────────────────── + +def cmd_query(args): + """Search DAG, expand matches, assemble cited answer.""" + index = load_index() + if index is None: + print("DAG index not found at", INDEX_PATH) + print("Run memory-dag-compactor first: python3 compact.py --compact") + return 1 + + cache = RecallCache() + + # Check cache + if not args.no_cache: + cached = cache.get(args.query) + if cached: + print(cached["answer"]) + update_state_after_recall(args.query, cached["sources"], cached["tokens"], True) + return 0 + + # Search + results = search_fts(args.query, limit=args.top * 3) + if not results: + print(f"No results found for: \"{args.query}\"") + return 0 + + # Score + results = score_results(results, index) + top_results = results[:args.top] + + # Expand + all_expanded = [] + budget = args.token_budget + for r in top_results: + expanded = expand_node( + r["node_id"], index, + target_depth=args.depth, + token_budget=budget, + ) + for e in expanded: + budget -= e["tokens"] + all_expanded.extend(expanded) + if budget <= 0: + break + + # Deduplicate + seen = set() + unique = [] + for e in all_expanded: + if e["node_id"] not in seen: + seen.add(e["node_id"]) + unique.append(e) + + # Assemble + answer = assemble_answer(args.query, unique) + + if args.format == "json": + out = { + "query": args.query, + "sources": len(unique), + "tokens_assembled": sum(e["tokens"] for e in unique), + "nodes": unique, + } + print(json.dumps(out, indent=2)) + else: + print(answer) + + # Cache + state + total_tokens = sum(e["tokens"] for e in unique) + cache.put(args.query, {"answer": answer, "sources": len(unique), "tokens": total_tokens}) + update_state_after_recall(args.query, len(unique), total_tokens, False) + return 0 + + +def cmd_expand(args): + """Expand a specific node, showing content and children.""" + index = load_index() + if index is None: + print("DAG index not found. Run memory-dag-compactor first.") + return 1 + + node_map = {n["id"]: n for n in index.get("nodes", [])} + meta = node_map.get(args.expand) + if not meta: + print(f"Node not found: {args.expand}") + return 1 + + content = load_node_content(args.expand) + children = meta.get("children", []) + depth = meta.get("depth", 0) + + if args.format == "json": + print(json.dumps({ + "node_id": args.expand, + "depth": depth, + "summary": meta.get("summary", ""), + "content": content, + "children": children, + "created_at": meta.get("created_at", ""), + }, indent=2)) + else: + depth_label = {0: "detail", 1: "summary", 2: "arc", 3: "durable"}.get(depth, f"d{depth}") + print(f"Node: {args.expand} (depth {depth} — {depth_label})") + print(f"Created: {meta.get('created_at', 'unknown')}") + print(f"Summary: {meta.get('summary', 'n/a')}") + print(f"Children: {len(children)}") + if children: + for c in children: + cmeta = node_map.get(c, {}) + print(f" → {c} ({cmeta.get('summary', '')[:60]})") + print() + print("Content:") + print(content or "(empty)") + return 0 + + +def cmd_trace(args): + """Show full ancestor chain from node to root.""" + index = load_index() + if index is None: + print("DAG index not found. Run memory-dag-compactor first.") + return 1 + + chain = trace_ancestors(args.trace, index) + if not chain: + print(f"Node not found: {args.trace}") + return 1 + + if args.format == "json": + print(json.dumps(chain, indent=2)) + else: + print(f"Trace: {args.trace} → root ({len(chain)} nodes)") + print() + for i, node in enumerate(chain): + indent = " " * i + depth_label = {0: "detail", 1: "summary", 2: "arc", 3: "durable"}.get( + node["depth"], f"d{node['depth']}" + ) + print(f"{indent}{'└── ' if i > 0 else ''}{node['node_id']} (d{node['depth']} — {depth_label})") + print(f"{indent} {node['summary'][:80]}") + if node["created_at"]: + print(f"{indent} Created: {node['created_at'][:10]}") + return 0 + + +def cmd_recent(args): + """Show nodes created within the specified time window.""" + index = load_index() + if index is None: + print("DAG index not found. Run memory-dag-compactor first.") + return 1 + + cutoff = datetime.now(timezone.utc) - timedelta(hours=args.hours) + recent = [] + for node in index.get("nodes", []): + created = node.get("created_at") + if created: + try: + ct = datetime.fromisoformat(created.replace("Z", "+00:00")) + if ct >= cutoff: + recent.append(node) + except Exception: + pass + + recent.sort(key=lambda n: n.get("created_at", ""), reverse=True) + + if args.format == "json": + print(json.dumps(recent, indent=2)) + else: + print(f"Recent nodes (last {args.hours}h): {len(recent)}") + print() + for node in recent: + depth_label = {0: "detail", 1: "summary", 2: "arc", 3: "durable"}.get( + node.get("depth", 0), "?" + ) + print(f" {node['id']} d{node.get('depth', '?')} ({depth_label}) {node.get('created_at', '')[:16]}") + print(f" {node.get('summary', '')[:80]}") + return 0 + + +def cmd_status(args): + """Print last recall summary.""" + state = load_state() + cache = RecallCache() + + if args.format == "json": + state["cache_size"] = cache.size() + print(json.dumps(state, indent=2)) + else: + print("DAG Recall Status") + print("─" * 50) + print(f" Last query: {state.get('last_query', 'none')}") + print(f" Last query at: {state.get('last_query_at', 'never')}") + print(f" Total recalls: {state.get('total_recalls', 0)}") + print(f" Cache size: {cache.size()} / {DEFAULT_CACHE_SIZE}") + # Check if DAG exists + if INDEX_PATH.exists(): + index = load_index() + n = len(index.get("nodes", [])) if index else 0 + print(f" DAG nodes: {n}") + else: + print(f" DAG: not found ({DAG_DIR})") + return 0 + + +# ── Main ───────────────────────────────────────────────────────────────────── + +def main(): + parser = argparse.ArgumentParser(description="DAG Recall — walk the memory DAG to recall context") + parser.add_argument("--query", type=str, help="Search query to recall information about") + parser.add_argument("--expand", type=str, help="Expand a specific node by ID") + parser.add_argument("--trace", type=str, help="Trace ancestor chain for a node") + parser.add_argument("--recent", action="store_true", help="Show recent nodes") + parser.add_argument("--status", action="store_true", help="Show recall status") + parser.add_argument("--depth", type=int, default=DEFAULT_MAX_DEPTH, + help=f"Target expansion depth (default: {DEFAULT_MAX_DEPTH} = expand to leaf)") + parser.add_argument("--top", type=int, default=DEFAULT_TOP_N, + help=f"Number of top results to expand (default: {DEFAULT_TOP_N})") + parser.add_argument("--token-budget", type=int, default=DEFAULT_TOKEN_BUDGET, + help=f"Max tokens to assemble (default: {DEFAULT_TOKEN_BUDGET})") + parser.add_argument("--hours", type=int, default=24, help="Hours window for --recent") + parser.add_argument("--no-cache", action="store_true", help="Skip cache lookup") + parser.add_argument("--format", choices=["text", "json"], default="text") + + args = parser.parse_args() + + if args.query: + return cmd_query(args) + elif args.expand: + return cmd_expand(args) + elif args.trace: + return cmd_trace(args) + elif args.recent: + return cmd_recent(args) + elif args.status: + return cmd_status(args) + else: + parser.print_help() + return 1 + + +if __name__ == "__main__": + sys.exit(main())