From 660ffed8a026a6a33ec522b5c095189504997bbb Mon Sep 17 00:00:00 2001 From: Alex Rockwell Date: Sat, 7 Mar 2026 01:15:51 -0500 Subject: [PATCH 1/8] feat(models): introduce Document dataclass, replace TypedDict cast zoo (Issue #ARCH-24) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Introduce a proper Document domain object (@dataclass) that replaces the scattered TypedDict projections (DocumentRow, DocumentListItem, RecentDocumentItem, DeletedDocumentItem, ChildDocumentItem, SupersedeCandidate) with a single type constructed via factory methods. Phase 1 — Document dataclass (emdx/models/document.py): - @dataclass(slots=True) with all 15 document fields - from_row() / from_partial_row() factories with datetime parsing - __getitem__ / .get() / keys() / items() dict-compat layer so all 158 existing bracket-access sites keep working - to_dict() with datetime→ISO serialization for JSON output - 31 unit tests covering construction, parsing, compat, serialization Phase 1b — SearchHit dataclass (emdx/models/search.py): - Wraps Document + snippet + rank for search results - Same dict-compat interface with field fallthrough Phase 2 — Wire into database layer: - database/documents.py: all functions return Document instead of TypedDict casts. Removed _parse_doc_datetimes() (absorbed into Document.from_row()) - database/search.py: returns list[SearchHit] instead of list[SearchResult] - database/__init__.py: SQLiteDatabase methods updated to return Document / SearchHit - commands/context.py: switched from DocumentRow to Document 25 cast() calls eliminated. 2068 tests passing, zero breakage. Co-Authored-By: Claude Opus 4.6 --- emdx/commands/context.py | 9 +- emdx/database/__init__.py | 29 ++-- emdx/database/documents.py | 114 +++--------- emdx/database/search.py | 19 +- emdx/models/document.py | 160 +++++++++++++++++ emdx/models/search.py | 92 ++++++++++ tests/test_document_model.py | 325 +++++++++++++++++++++++++++++++++++ 7 files changed, 626 insertions(+), 122 deletions(-) create mode 100644 emdx/models/document.py create mode 100644 emdx/models/search.py create mode 100644 tests/test_document_model.py diff --git a/emdx/commands/context.py b/emdx/commands/context.py index 2d211211..782f7ad7 100644 --- a/emdx/commands/context.py +++ b/emdx/commands/context.py @@ -10,14 +10,15 @@ import json from dataclasses import dataclass, field -from typing import TYPE_CHECKING, cast +from typing import TYPE_CHECKING import typer from rich.console import Console from ..database import db from ..database.document_links import get_links_for_document -from ..database.types import DocumentLinkDetail, DocumentRow +from ..database.types import DocumentLinkDetail +from ..models.document import Document if TYPE_CHECKING: from ..services.hybrid_search import HybridSearchResult @@ -89,7 +90,7 @@ def compute_link_score( # ── Document fetching (no access tracking) ─────────────────────────── -def _fetch_document(doc_id: int) -> DocumentRow | None: +def _fetch_document(doc_id: int) -> Document | None: """Fetch a document by ID without updating access tracking.""" with db.get_connection() as conn: cursor = conn.execute( @@ -98,7 +99,7 @@ def _fetch_document(doc_id: int) -> DocumentRow | None: ) row = cursor.fetchone() if row: - return cast(DocumentRow, dict(row)) + return Document.from_row(row) return None diff --git a/emdx/database/__init__.py b/emdx/database/__init__.py index b67df90b..7872d9f2 100644 --- a/emdx/database/__init__.py +++ b/emdx/database/__init__.py @@ -16,6 +16,8 @@ from pathlib import Path from typing import Any, Union, cast +from ..models.document import Document +from ..models.search import SearchHit from .connection import DatabaseConnection, db_connection from .documents import ( delete_document, @@ -32,11 +34,6 @@ from .search import search_documents from .types import ( DatabaseStats, - DeletedDocumentItem, - DocumentListItem, - DocumentRow, - RecentDocumentItem, - SearchResult, ) @@ -136,7 +133,7 @@ def save_document( return doc_id - def get_document(self, identifier: Union[str, int]) -> DocumentRow | None: + def get_document(self, identifier: Union[str, int]) -> Document | None: """Get a document by ID or title.""" if not self._uses_custom_path: return get_document(identifier) @@ -167,9 +164,9 @@ def get_document(self, identifier: Union[str, int]) -> DocumentRow | None: ) conn.commit() row = cursor.fetchone() - return cast(DocumentRow, dict(row)) if row else None + return Document.from_row(row) if row else None - def list_documents(self, project: str | None = None, limit: int = 50) -> list[DocumentListItem]: + def list_documents(self, project: str | None = None, limit: int = 50) -> list[Document]: """List documents with optional filters.""" if not self._uses_custom_path: return list_documents(project, limit) @@ -189,7 +186,7 @@ def list_documents(self, project: str | None = None, limit: int = 50) -> list[Do f"FROM documents WHERE {where_clause} ORDER BY id DESC LIMIT ?", params, ) - return [cast(DocumentListItem, dict(row)) for row in cursor.fetchall()] + return [Document.from_partial_row(row) for row in cursor.fetchall()] def update_document(self, doc_id: int, title: str, content: str) -> bool: """Update a document.""" @@ -240,7 +237,7 @@ def delete_document(self, identifier: Union[str, int], hard_delete: bool = False conn.commit() return bool(cursor.rowcount > 0) - def get_recent_documents(self, limit: int = 10) -> list[RecentDocumentItem]: + def get_recent_documents(self, limit: int = 10) -> list[Document]: """Get recently accessed documents.""" if not self._uses_custom_path: return get_recent_documents(limit) @@ -252,7 +249,7 @@ def get_recent_documents(self, limit: int = 10) -> list[RecentDocumentItem]: "FROM documents WHERE is_deleted = FALSE ORDER BY accessed_at DESC LIMIT ?", (limit,), ) - return [cast(RecentDocumentItem, dict(row)) for row in cursor.fetchall()] + return [Document.from_partial_row(row) for row in cursor.fetchall()] def get_stats(self, project: str | None = None) -> DatabaseStats: """Get database statistics.""" @@ -280,7 +277,7 @@ def list_deleted_documents( self, days: int | None = None, limit: int = 50, - ) -> list[DeletedDocumentItem]: + ) -> list[Document]: """List soft-deleted documents.""" if not self._uses_custom_path: return list_deleted_documents(days, limit) @@ -300,7 +297,7 @@ def list_deleted_documents( "WHERE is_deleted = TRUE ORDER BY deleted_at DESC LIMIT ?", (limit,), ) - return [cast(DeletedDocumentItem, dict(row)) for row in cursor.fetchall()] + return [Document.from_partial_row(row) for row in cursor.fetchall()] def restore_document(self, identifier: Union[str, int]) -> bool: """Restore a soft-deleted document.""" @@ -353,7 +350,7 @@ def search_documents( created_before: str | None = None, modified_after: str | None = None, modified_before: str | None = None, - ) -> list[SearchResult]: + ) -> list[SearchHit]: """Search documents using FTS.""" if not self._uses_custom_path: return search_documents( @@ -384,7 +381,7 @@ def search_documents( f"FROM documents d WHERE {where_clause} ORDER BY d.id DESC LIMIT ?", params, ) - return [cast(SearchResult, dict(row)) for row in cursor.fetchall()] + return [SearchHit.from_row(row) for row in cursor.fetchall()] conditions = ["d.is_deleted = FALSE"] params = [] @@ -405,7 +402,7 @@ def search_documents( f"WHERE fts.documents_fts MATCH ? AND {where_clause} ORDER BY rank LIMIT ?", [safe_query] + params + [limit], ) - return [cast(SearchResult, dict(row)) for row in cursor.fetchall()] + return [SearchHit.from_row(row) for row in cursor.fetchall()] # Create global instance for backward compatibility diff --git a/emdx/database/documents.py b/emdx/database/documents.py index 612fd902..ce5835a1 100644 --- a/emdx/database/documents.py +++ b/emdx/database/documents.py @@ -5,42 +5,18 @@ from __future__ import annotations import logging -from typing import Any, Union, cast +from typing import Union, cast -from ..utils.datetime_utils import parse_datetime +from ..models.document import Document from .connection import db_connection from .types import ( - ChildDocumentItem, DatabaseStats, - DeletedDocumentItem, - DocumentListItem, - DocumentRow, MostViewedDoc, - RecentDocumentItem, - SupersedeCandidate, ) logger = logging.getLogger(__name__) -def _parse_doc_datetimes( - doc: dict[str, Any], - fields: list[str] | None = None, -) -> dict[str, Any]: - """Parse datetime string fields in a document dictionary, in place. - - Returns the same dict for call-chaining convenience. Callers should - ``cast()`` to the concrete TypedDict *before* calling this so that - their local variable already carries the right type. - """ - if fields is None: - fields = ["created_at", "updated_at", "accessed_at", "deleted_at"] - for field in fields: - if field in doc and isinstance(doc[field], str): - doc[field] = parse_datetime(doc[field]) - return doc - - def save_document( title: str, content: str, @@ -92,7 +68,7 @@ def save_document( return doc_id -def get_document(identifier: Union[str, int]) -> DocumentRow | None: +def get_document(identifier: Union[str, int]) -> Document | None: """Get a document by ID or title""" with db_connection.get_connection() as conn: # Convert to string for consistent handling @@ -138,9 +114,7 @@ def get_document(identifier: Union[str, int]) -> DocumentRow | None: row = cursor.fetchone() if row: - raw = dict(row) - _parse_doc_datetimes(raw) - return cast(DocumentRow, raw) + return Document.from_row(row) return None @@ -150,7 +124,7 @@ def list_documents( parent_id: int | None = None, offset: int = 0, doc_type: str | None = "user", -) -> list[DocumentListItem]: +) -> list[Document]: """List documents with optional project and hierarchy filters. Args: @@ -164,7 +138,7 @@ def list_documents( doc_type: Filter by document type. 'user' (default), 'wiki', or None for all types. Returns: - List of document dictionaries + List of Document objects Raises: ValueError: If limit or offset is negative @@ -212,13 +186,7 @@ def list_documents( params, ) - # Convert rows and parse datetime strings - docs: list[DocumentListItem] = [] - for row in cursor.fetchall(): - raw = dict(row) - _parse_doc_datetimes(raw, ["created_at", "accessed_at", "archived_at"]) - docs.append(cast(DocumentListItem, raw)) - return docs + return [Document.from_partial_row(row) for row in cursor.fetchall()] def count_documents( @@ -437,7 +405,7 @@ def delete_document(identifier: Union[str, int], hard_delete: bool = False) -> b def get_recent_documents( limit: int = 10, doc_type: str | None = "user", -) -> list[RecentDocumentItem]: +) -> list[Document]: """Get recently accessed documents. Args: @@ -466,13 +434,7 @@ def get_recent_documents( params, ) - # Convert rows and parse datetime strings - docs: list[RecentDocumentItem] = [] - for row in cursor.fetchall(): - raw = dict(row) - _parse_doc_datetimes(raw) - docs.append(cast(RecentDocumentItem, raw)) - return docs + return [Document.from_partial_row(row) for row in cursor.fetchall()] def get_stats(project: str | None = None) -> DatabaseStats: @@ -545,7 +507,7 @@ def get_stats(project: str | None = None) -> DatabaseStats: return stats -def list_deleted_documents(days: int | None = None, limit: int = 50) -> list[DeletedDocumentItem]: +def list_deleted_documents(days: int | None = None, limit: int = 50) -> list[Document]: """List soft-deleted documents""" with db_connection.get_connection() as conn: if days: @@ -572,13 +534,7 @@ def list_deleted_documents(days: int | None = None, limit: int = 50) -> list[Del (limit,), ) - # Convert rows and parse datetime strings - docs: list[DeletedDocumentItem] = [] - for row in cursor.fetchall(): - raw = dict(row) - _parse_doc_datetimes(raw) - docs.append(cast(DeletedDocumentItem, raw)) - return docs + return [Document.from_partial_row(row) for row in cursor.fetchall()] def restore_document(identifier: Union[str, int]) -> bool: @@ -661,7 +617,7 @@ def find_supersede_candidate( title_threshold: float = 0.85, content: str | None = None, content_threshold: float = 0.5, -) -> SupersedeCandidate | None: +) -> Document | None: """Find a document that should be superseded by a new document with the given title. Uses title normalization and optional content similarity to find the best candidate. @@ -710,10 +666,10 @@ def find_supersede_candidate( """, ) - candidates = [] + candidates: list[tuple[Document, float, str]] = [] for row in cursor.fetchall(): - doc = dict(row) - normalized_existing = normalize_title(doc["title"]) + doc = Document.from_partial_row(row) + normalized_existing = normalize_title(doc.title) # Exact normalized match - always a candidate if normalized_existing == normalized_new: @@ -721,7 +677,7 @@ def find_supersede_candidate( continue # Fuzzy title match - needs content check - sim = title_similarity(title, doc["title"]) + sim = title_similarity(title, doc.title) if sim >= title_threshold: candidates.append((doc, sim, "fuzzy")) @@ -731,19 +687,16 @@ def find_supersede_candidate( # For exact matches, return the most recent one exact_matches = [c for c in candidates if c[2] == "exact"] if exact_matches: - raw = exact_matches[0][0] - _parse_doc_datetimes(raw) - return cast(SupersedeCandidate, raw) + return exact_matches[0][0] # For fuzzy matches, we need content similarity check if content and candidates: from ..services.similarity import compute_content_similarity for doc, _title_sim, _match_type in candidates: - content_sim = compute_content_similarity(content, doc["content"]) + content_sim = compute_content_similarity(content, doc.content) if content_sim >= content_threshold: - _parse_doc_datetimes(doc) - return cast(SupersedeCandidate, doc) + return doc return None @@ -772,7 +725,7 @@ def set_parent(doc_id: int, parent_id: int, relationship: str = "supersedes") -> return cursor.rowcount > 0 -def get_children(doc_id: int) -> list[ChildDocumentItem]: +def get_children(doc_id: int) -> list[Document]: """Get all child documents of a parent. Args: @@ -792,18 +745,13 @@ def get_children(doc_id: int) -> list[ChildDocumentItem]: (doc_id,), ) - docs: list[ChildDocumentItem] = [] - for row in cursor.fetchall(): - raw = dict(row) - _parse_doc_datetimes(raw) - docs.append(cast(ChildDocumentItem, raw)) - return docs + return [Document.from_partial_row(row) for row in cursor.fetchall()] def list_recent_documents( limit: int = 100, days: int = 7, -) -> list[DocumentRow]: +) -> list[Document]: """Get recent direct-save documents. Args: @@ -811,7 +759,7 @@ def list_recent_documents( days: Only include documents from the last N days Returns: - List of document dicts + List of Document objects """ from datetime import datetime, timedelta @@ -828,15 +776,10 @@ def list_recent_documents( """ cursor = conn.execute(query, (cutoff.isoformat(), limit)) - docs: list[DocumentRow] = [] - for row in cursor.fetchall(): - raw = dict(row) - _parse_doc_datetimes(raw) - docs.append(cast(DocumentRow, raw)) - return docs + return [Document.from_row(row) for row in cursor.fetchall()] -def get_docs_in_window(hours: int, limit: int = 100) -> list[DocumentListItem]: +def get_docs_in_window(hours: int, limit: int = 100) -> list[Document]: """Get documents created within a time window. Args: @@ -859,9 +802,4 @@ def get_docs_in_window(hours: int, limit: int = 100) -> list[DocumentListItem]: """, (f"-{hours}", limit), ) - docs: list[DocumentListItem] = [] - for row in cursor.fetchall(): - raw = dict(row) - _parse_doc_datetimes(raw, ["created_at", "accessed_at", "archived_at"]) - docs.append(cast(DocumentListItem, raw)) - return docs + return [Document.from_partial_row(row) for row in cursor.fetchall()] diff --git a/emdx/database/search.py b/emdx/database/search.py index e50dec26..d2442357 100644 --- a/emdx/database/search.py +++ b/emdx/database/search.py @@ -2,11 +2,10 @@ Search functionality for emdx documents using FTS5 """ -from typing import Any, cast +from __future__ import annotations -from ..utils.datetime_utils import parse_datetime +from ..models.search import SearchHit from .connection import db_connection -from .types import SearchResult def escape_fts5_query(query: str) -> str: @@ -48,7 +47,7 @@ def search_documents( modified_after: str | None = None, modified_before: str | None = None, doc_type: str | None = "user", -) -> list[SearchResult]: +) -> list[SearchHit]: """Search documents using FTS5 Args: @@ -59,7 +58,7 @@ def search_documents( doc_type: Filter by document type. 'user' (default), 'wiki', or None for all types. Returns: - List of document dictionaries with search results including snippets and ranking + List of SearchHit objects with document data, snippets, and ranking """ with db_connection.get_connection() as conn: # Build dynamic query with date filters @@ -130,12 +129,4 @@ def search_documents( cursor = conn.execute(base_query, params) - # Convert rows and parse datetime strings - docs: list[SearchResult] = [] - for row in cursor.fetchall(): - raw: dict[str, Any] = dict(row) - for field in ["created_at", "updated_at", "last_accessed"]: - if field in raw and isinstance(raw[field], str): - raw[field] = parse_datetime(raw[field]) - docs.append(cast(SearchResult, raw)) - return docs + return [SearchHit.from_row(row) for row in cursor.fetchall()] diff --git a/emdx/models/document.py b/emdx/models/document.py new file mode 100644 index 00000000..2605e5c9 --- /dev/null +++ b/emdx/models/document.py @@ -0,0 +1,160 @@ +"""Document domain model for emdx. + +Single source of truth for the Document type. Replaces the scattered +TypedDict projections (DocumentRow, DocumentListItem, RecentDocumentItem, +etc.) with a proper dataclass that supports: + +- Factory construction from sqlite3.Row with datetime parsing +- Backward-compatible bracket access (doc["title"]) for incremental migration +- Serialization to dict for JSON output +""" + +from __future__ import annotations + +import sqlite3 +from collections.abc import Iterator +from dataclasses import asdict, dataclass, fields +from datetime import datetime +from typing import Any + +from ..utils.datetime_utils import parse_datetime + +# Fields that store datetime values and should be parsed from SQLite strings. +_DATETIME_FIELDS: frozenset[str] = frozenset( + {"created_at", "updated_at", "accessed_at", "deleted_at", "archived_at"} +) + + +@dataclass(slots=True) +class Document: + """Core document domain object. + + Constructed via ``Document.from_row()`` at the database boundary. + Supports ``doc["field"]`` and ``doc.get("field")`` for backward + compatibility with code that previously used TypedDict dicts. + """ + + id: int + title: str + content: str = "" + project: str | None = None + created_at: datetime | None = None + updated_at: datetime | None = None + accessed_at: datetime | None = None + access_count: int = 0 + deleted_at: datetime | None = None + is_deleted: bool = False + parent_id: int | None = None + relationship: str | None = None + archived_at: datetime | None = None + stage: str | None = None + doc_type: str = "user" + + # ── Dict-compatibility layer ────────────────────────────────────── + + def __getitem__(self, key: str) -> Any: + """Allow ``doc["title"]`` access for backward compatibility.""" + try: + return getattr(self, key) + except AttributeError: + raise KeyError(key) from None + + def get(self, key: str, default: Any = None) -> Any: + """Allow ``doc.get("title", "Untitled")`` for backward compatibility.""" + return getattr(self, key, default) + + def __contains__(self, key: object) -> bool: + """Allow ``"title" in doc`` checks.""" + if not isinstance(key, str): + return False + return key in self._field_names() + + def keys(self) -> list[str]: + """Return field names, for code that iterates dict keys.""" + return list(self._field_names()) + + def items(self) -> Iterator[tuple[str, Any]]: + """Yield (field_name, value) pairs, for dict-like iteration.""" + for name in self._field_names(): + yield name, getattr(self, name) + + def values(self) -> Iterator[Any]: + """Yield field values, for dict-like iteration.""" + for name in self._field_names(): + yield getattr(self, name) + + @classmethod + def _field_names(cls) -> frozenset[str]: + """Cached set of field names for this dataclass.""" + # Use the class-level cache if available. + cache_attr = "_cached_field_names" + cached: frozenset[str] | None = cls.__dict__.get(cache_attr) + if cached is not None: + return cached + names = frozenset(f.name for f in fields(cls)) + # slots=True means we can't set arbitrary class attrs, so we + # store on the class __dict__ via type.__setattr__. + type.__setattr__(cls, cache_attr, names) + return names + + # ── Factory methods ─────────────────────────────────────────────── + + @classmethod + def from_row(cls, row: sqlite3.Row | dict[str, Any]) -> Document: + """Construct a Document from a full database row. + + Parses datetime string fields into ``datetime`` objects using + the centralized ``parse_datetime`` utility. Unknown columns in + the row are silently ignored (safe for SELECT * with extra cols). + + The ``is_deleted`` field is normalized to ``bool`` (SQLite stores + it as 0/1 integer). + """ + if isinstance(row, sqlite3.Row): + raw = dict(row) + else: + raw = dict(row) # defensive copy + + return cls._from_dict(raw) + + @classmethod + def from_partial_row(cls, row: sqlite3.Row | dict[str, Any]) -> Document: + """Construct a Document from a partial SELECT. + + Missing fields get their dataclass defaults (empty string for + content, None for optional fields, 0 for counts, etc.). + Functionally identical to ``from_row`` — both tolerate missing + columns — but the separate name signals intent to callers. + """ + return cls.from_row(row) + + @classmethod + def _from_dict(cls, raw: dict[str, Any]) -> Document: + """Internal: build a Document from a raw dict, parsing datetimes.""" + known = cls._field_names() + kwargs: dict[str, Any] = {} + for key, value in raw.items(): + if key not in known: + continue + if key in _DATETIME_FIELDS and isinstance(value, str): + kwargs[key] = parse_datetime(value) + elif key == "is_deleted": + # SQLite stores boolean as 0/1 int + kwargs[key] = bool(value) + else: + kwargs[key] = value + return cls(**kwargs) + + # ── Serialization ───────────────────────────────────────────────── + + def to_dict(self) -> dict[str, Any]: + """Convert to a plain dict for JSON serialization. + + Datetime fields are formatted as ISO 8601 strings. + """ + result = asdict(self) + for key in _DATETIME_FIELDS: + val = result.get(key) + if isinstance(val, datetime): + result[key] = val.isoformat() + return result diff --git a/emdx/models/search.py b/emdx/models/search.py new file mode 100644 index 00000000..58e28b00 --- /dev/null +++ b/emdx/models/search.py @@ -0,0 +1,92 @@ +"""Search result domain model for emdx. + +Wraps a Document with search-specific metadata (snippet, rank). +Supports the same dict-compat interface as Document. +""" + +from __future__ import annotations + +import sqlite3 +from collections.abc import Iterator +from dataclasses import dataclass +from typing import Any + +from .document import Document + + +@dataclass(slots=True) +class SearchHit: + """A search result: a Document plus search metadata. + + Supports ``hit["title"]`` bracket access for backward compatibility + with code that consumed SearchResult TypedDicts. + """ + + doc: Document + snippet: str | None = None + rank: float = 0.0 + + # ── Dict-compatibility layer ────────────────────────────────────── + + def __getitem__(self, key: str) -> Any: + """Access document fields or search metadata via bracket notation.""" + if key == "snippet": + return self.snippet + if key == "rank": + return self.rank + return self.doc[key] + + def get(self, key: str, default: Any = None) -> Any: + """Dict-compat .get() that checks search fields then document fields.""" + if key == "snippet": + return self.snippet + if key == "rank": + return self.rank + return self.doc.get(key, default) + + def __contains__(self, key: object) -> bool: + if key in ("snippet", "rank"): + return True + return key in self.doc + + def keys(self) -> list[str]: + return self.doc.keys() + ["snippet", "rank"] + + def items(self) -> Iterator[tuple[str, Any]]: + yield from self.doc.items() + yield "snippet", self.snippet + yield "rank", self.rank + + def values(self) -> Iterator[Any]: + yield from self.doc.values() + yield self.snippet + yield self.rank + + # ── Factory ─────────────────────────────────────────────────────── + + @classmethod + def from_row(cls, row: sqlite3.Row | dict[str, Any]) -> SearchHit: + """Construct from a search query row. + + Expects document fields plus ``snippet`` and ``rank`` columns. + """ + if isinstance(row, sqlite3.Row): + raw = dict(row) + else: + raw = dict(row) + + snippet = raw.pop("snippet", None) + rank_val = raw.pop("rank", 0.0) + rank = float(rank_val) if rank_val is not None else 0.0 + + doc = Document.from_row(raw) + return cls(doc=doc, snippet=snippet, rank=rank) + + # ── Serialization ───────────────────────────────────────────────── + + def to_dict(self) -> dict[str, Any]: + """Convert to plain dict for JSON serialization.""" + result = self.doc.to_dict() + result["snippet"] = self.snippet + result["rank"] = self.rank + return result diff --git a/tests/test_document_model.py b/tests/test_document_model.py new file mode 100644 index 00000000..96ce9f53 --- /dev/null +++ b/tests/test_document_model.py @@ -0,0 +1,325 @@ +"""Tests for the Document dataclass domain model.""" + +from __future__ import annotations + +from datetime import datetime, timezone + +import pytest + +from emdx.models.document import _DATETIME_FIELDS, Document +from emdx.models.search import SearchHit + +# ── Construction ────────────────────────────────────────────────────── + + +class TestDocumentConstruction: + def test_minimal_construction(self) -> None: + doc = Document(id=1, title="Hello") + assert doc.id == 1 + assert doc.title == "Hello" + assert doc.content == "" + assert doc.project is None + assert doc.access_count == 0 + assert doc.is_deleted is False + assert doc.doc_type == "user" + + def test_full_construction(self) -> None: + now = datetime.now() + doc = Document( + id=42, + title="Full doc", + content="body", + project="emdx", + created_at=now, + updated_at=now, + accessed_at=now, + access_count=5, + deleted_at=None, + is_deleted=False, + parent_id=10, + relationship="supersedes", + archived_at=None, + stage="draft", + doc_type="wiki", + ) + assert doc.id == 42 + assert doc.project == "emdx" + assert doc.parent_id == 10 + assert doc.stage == "draft" + assert doc.doc_type == "wiki" + + +# ── from_row / from_partial_row ─────────────────────────────────────── + + +class TestFromRow: + def test_from_dict_full_row(self) -> None: + raw = { + "id": 1, + "title": "Test", + "content": "body", + "project": "proj", + "created_at": "2025-01-15 10:30:00", + "updated_at": "2025-01-16T12:00:00", + "accessed_at": None, + "access_count": 3, + "deleted_at": None, + "is_deleted": 0, + "parent_id": None, + "relationship": None, + "archived_at": None, + "stage": None, + "doc_type": "user", + } + doc = Document.from_row(raw) + assert doc.id == 1 + assert doc.title == "Test" + assert isinstance(doc.created_at, datetime) + assert doc.created_at.year == 2025 + assert doc.created_at.month == 1 + assert doc.created_at.day == 15 + assert isinstance(doc.updated_at, datetime) + assert doc.is_deleted is False + + def test_from_dict_parses_sqlite_datetime(self) -> None: + raw = {"id": 1, "title": "T", "created_at": "2025-03-07 14:30:00"} + doc = Document.from_row(raw) + assert isinstance(doc.created_at, datetime) + assert doc.created_at.hour == 14 + + def test_from_dict_parses_iso_datetime(self) -> None: + raw = {"id": 1, "title": "T", "created_at": "2025-03-07T14:30:00Z"} + doc = Document.from_row(raw) + assert isinstance(doc.created_at, datetime) + assert doc.created_at.tzinfo == timezone.utc + + def test_from_dict_is_deleted_bool_coercion(self) -> None: + doc_false = Document.from_row({"id": 1, "title": "T", "is_deleted": 0}) + assert doc_false.is_deleted is False + + doc_true = Document.from_row({"id": 1, "title": "T", "is_deleted": 1}) + assert doc_true.is_deleted is True + + def test_from_dict_ignores_unknown_columns(self) -> None: + raw = {"id": 1, "title": "T", "some_extra_column": "ignored"} + doc = Document.from_row(raw) + assert doc.id == 1 + assert doc.title == "T" + + def test_from_partial_row_fills_defaults(self) -> None: + raw = {"id": 1, "title": "T"} + doc = Document.from_partial_row(raw) + assert doc.content == "" + assert doc.project is None + assert doc.access_count == 0 + assert doc.is_deleted is False + assert doc.doc_type == "user" + + def test_from_row_defensive_copy(self) -> None: + """from_row should not mutate the input dict.""" + raw = {"id": 1, "title": "T", "created_at": "2025-01-01 00:00:00"} + original_val = raw["created_at"] + Document.from_row(raw) + assert raw["created_at"] == original_val + + def test_from_dict_datetime_already_parsed(self) -> None: + """If a datetime field is already a datetime object, pass through.""" + now = datetime.now() + raw = {"id": 1, "title": "T", "created_at": now} + doc = Document.from_row(raw) + assert doc.created_at is now + + +# ── Dict compatibility ──────────────────────────────────────────────── + + +class TestDictCompat: + @pytest.fixture() + def doc(self) -> Document: + return Document( + id=42, + title="My Doc", + content="body", + project="emdx", + access_count=5, + ) + + def test_getitem(self, doc: Document) -> None: + assert doc["id"] == 42 + assert doc["title"] == "My Doc" + assert doc["content"] == "body" + assert doc["project"] == "emdx" + + def test_getitem_raises_keyerror(self, doc: Document) -> None: + with pytest.raises(KeyError, match="nonexistent"): + doc["nonexistent"] + + def test_get_with_default(self, doc: Document) -> None: + assert doc.get("title") == "My Doc" + assert doc.get("nonexistent") is None + assert doc.get("nonexistent", "fallback") == "fallback" + + def test_contains(self, doc: Document) -> None: + assert "title" in doc + assert "id" in doc + assert "nonexistent" not in doc + assert 42 not in doc # type: ignore[operator] # non-string + + def test_keys(self, doc: Document) -> None: + k = doc.keys() + assert "id" in k + assert "title" in k + assert "content" in k + assert "doc_type" in k + assert len(k) == 15 # all fields + + def test_items(self, doc: Document) -> None: + pairs = dict(doc.items()) + assert pairs["id"] == 42 + assert pairs["title"] == "My Doc" + assert pairs["project"] == "emdx" + assert pairs["access_count"] == 5 + + def test_values(self, doc: Document) -> None: + vals = list(doc.values()) + assert 42 in vals + assert "My Doc" in vals + + +# ── Serialization ───────────────────────────────────────────────────── + + +class TestSerialization: + def test_to_dict_basic(self) -> None: + doc = Document(id=1, title="T", content="body", project="p") + d = doc.to_dict() + assert isinstance(d, dict) + assert d["id"] == 1 + assert d["title"] == "T" + assert d["project"] == "p" + + def test_to_dict_serializes_datetimes(self) -> None: + now = datetime(2025, 3, 7, 14, 30, 0) + doc = Document(id=1, title="T", created_at=now, updated_at=now) + d = doc.to_dict() + assert d["created_at"] == "2025-03-07T14:30:00" + assert d["updated_at"] == "2025-03-07T14:30:00" + + def test_to_dict_none_datetimes(self) -> None: + doc = Document(id=1, title="T") + d = doc.to_dict() + assert d["created_at"] is None + assert d["updated_at"] is None + + def test_roundtrip_dict(self) -> None: + """from_row(to_dict()) should produce an equivalent Document.""" + now = datetime(2025, 3, 7, 14, 30, 0) + original = Document( + id=1, + title="Roundtrip", + content="body", + project="p", + created_at=now, + access_count=5, + doc_type="wiki", + ) + rebuilt = Document.from_row(original.to_dict()) + assert rebuilt.id == original.id + assert rebuilt.title == original.title + assert rebuilt.created_at == original.created_at + assert rebuilt.access_count == original.access_count + assert rebuilt.doc_type == original.doc_type + + +# ── SearchHit ───────────────────────────────────────────────────────── + + +class TestSearchHit: + def test_from_row(self) -> None: + raw = { + "id": 1, + "title": "Found", + "project": "p", + "created_at": "2025-01-01 00:00:00", + "updated_at": None, + "snippet": "...match...", + "rank": -2.5, + "doc_type": "user", + } + hit = SearchHit.from_row(raw) + assert hit.doc.id == 1 + assert hit.doc.title == "Found" + assert hit.snippet == "...match..." + assert hit.rank == -2.5 + + def test_bracket_access_document_fields(self) -> None: + doc = Document(id=1, title="T") + hit = SearchHit(doc=doc, snippet="snip", rank=-1.0) + assert hit["id"] == 1 + assert hit["title"] == "T" + assert hit["snippet"] == "snip" + assert hit["rank"] == -1.0 + + def test_get_fallthrough(self) -> None: + doc = Document(id=1, title="T", project="p") + hit = SearchHit(doc=doc) + assert hit.get("project") == "p" + assert hit.get("snippet") is None + assert hit.get("nonexistent", "fb") == "fb" + + def test_contains(self) -> None: + doc = Document(id=1, title="T") + hit = SearchHit(doc=doc) + assert "title" in hit + assert "snippet" in hit + assert "rank" in hit + assert "nonexistent" not in hit + + def test_keys_includes_search_fields(self) -> None: + doc = Document(id=1, title="T") + hit = SearchHit(doc=doc) + k = hit.keys() + assert "snippet" in k + assert "rank" in k + assert "id" in k + + def test_to_dict(self) -> None: + doc = Document(id=1, title="T", project="p") + hit = SearchHit(doc=doc, snippet="snip", rank=-1.5) + d = hit.to_dict() + assert d["id"] == 1 + assert d["title"] == "T" + assert d["snippet"] == "snip" + assert d["rank"] == -1.5 + + def test_from_row_null_rank(self) -> None: + raw = {"id": 1, "title": "T", "snippet": None, "rank": None} + hit = SearchHit.from_row(raw) + assert hit.rank == 0.0 + assert hit.snippet is None + + +# ── Edge cases ──────────────────────────────────────────────────────── + + +class TestEdgeCases: + def test_datetime_fields_constant(self) -> None: + """Verify _DATETIME_FIELDS covers the expected fields.""" + assert _DATETIME_FIELDS == { + "created_at", + "updated_at", + "accessed_at", + "deleted_at", + "archived_at", + } + + def test_field_names_cached(self) -> None: + """_field_names should be cached after first call.""" + names1 = Document._field_names() + names2 = Document._field_names() + assert names1 is names2 + + def test_slots_prevent_arbitrary_attrs(self) -> None: + doc = Document(id=1, title="T") + with pytest.raises(AttributeError): + doc.arbitrary_thing = "nope" # type: ignore[attr-defined] From 750df1d3197e6c297157ec6f28e30a52e7aa8fe7 Mon Sep 17 00:00:00 2001 From: Alex Rockwell Date: Sat, 7 Mar 2026 01:34:01 -0500 Subject: [PATCH 2/8] refactor: migrate consumers from doc["field"] to doc.field (Issue #ARCH-24) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Phase 3 of the data model refactor — migrate bracket access to attribute access across 10 consumer files and 9 test files. Converted files: - commands/core.py (43 sites — display_save_result, view, edit, delete, _find_all, _find_recent, _view_review, _print_view_header_*) - commands/context.py, history.py, tags.py, tasks.py, wiki.py, gist.py, briefing.py, trash.py - ui/activity/activity_data.py Updated test mocks to return Document objects instead of raw dicts: - test_commands_core.py, test_commands_tags.py, test_commands_trash.py, test_gist.py, test_v028_regressions.py, test_activity_doc_type.py, test_activity_view.py, test_task_commands.py, test_view_review.py Co-Authored-By: Claude Opus 4.6 --- emdx/commands/briefing.py | 2 +- emdx/commands/context.py | 12 +- emdx/commands/core.py | 152 +++++++++--------- emdx/commands/gist.py | 14 +- emdx/commands/history.py | 6 +- emdx/commands/tags.py | 8 +- emdx/commands/tasks.py | 8 +- emdx/commands/trash.py | 14 +- emdx/commands/wiki.py | 13 +- emdx/ui/activity/activity_data.py | 4 +- tests/test_activity_doc_type.py | 44 +++--- tests/test_activity_view.py | 37 +++-- tests/test_commands_core.py | 245 +++++++++++++++++------------- tests/test_commands_tags.py | 11 +- tests/test_commands_trash.py | 49 +++--- tests/test_gist.py | 71 +++++---- tests/test_task_commands.py | 3 +- tests/test_v028_regressions.py | 21 ++- tests/test_view_review.py | 19 ++- 19 files changed, 401 insertions(+), 332 deletions(-) diff --git a/emdx/commands/briefing.py b/emdx/commands/briefing.py index 1b445357..780a20ce 100644 --- a/emdx/commands/briefing.py +++ b/emdx/commands/briefing.py @@ -450,7 +450,7 @@ def _briefing_save(hours: int, model: str | None) -> None: if docs: doc_lines = ["## Documents Created"] - doc_lines.extend(f"- #{d['id']}: {d['title']}" for d in docs[:15]) + doc_lines.extend(f"- #{d.id}: {d.title}" for d in docs[:15]) sections.append("\n".join(doc_lines)) sections.append( diff --git a/emdx/commands/context.py b/emdx/commands/context.py index 782f7ad7..bcb470aa 100644 --- a/emdx/commands/context.py +++ b/emdx/commands/context.py @@ -124,9 +124,9 @@ def traverse_graph( continue scored = ScoredDocument( doc_id=sid, - title=doc["title"], - content=doc["content"], - tokens=estimate_tokens(doc["content"]), + title=doc.title, + content=doc.content, + tokens=estimate_tokens(doc.content), hops=0, score=1.0, path=[sid], @@ -161,9 +161,9 @@ def traverse_graph( reason = f"{depth}-hop {method} from #{source_id}" visited[target_id] = ScoredDocument( doc_id=target_id, - title=doc["title"], - content=doc["content"], - tokens=estimate_tokens(doc["content"]), + title=doc.title, + content=doc.content, + tokens=estimate_tokens(doc.content), hops=depth, score=hop_score, path=source.path + [target_id], diff --git a/emdx/commands/core.py b/emdx/commands/core.py index f937c9c4..d748ccb8 100644 --- a/emdx/commands/core.py +++ b/emdx/commands/core.py @@ -9,7 +9,6 @@ import os import subprocess import tempfile -from collections.abc import Mapping from dataclasses import dataclass from datetime import datetime from pathlib import Path @@ -26,6 +25,7 @@ find_supersede_candidate, set_parent, ) +from emdx.models.document import Document from emdx.models.documents import ( delete_document, get_document, @@ -170,12 +170,12 @@ def display_save_result( doc_id: int, metadata: DocumentMetadata, applied_tags: list[str], - supersede_target: Mapping[str, Any] | None = None, + supersede_target: Document | None = None, ) -> None: """Display save result to user""" console.print(f"[green]✅ Saved as #{doc_id}:[/green] [cyan]{metadata.title}[/cyan]") if supersede_target: - console.print(f" [dim]↳ Superseded #{supersede_target['id']}[/dim]") + console.print(f" [dim]↳ Superseded #{supersede_target.id}[/dim]") if metadata.project: console.print(f" [dim]Project:[/dim] {metadata.project}") if applied_tags: @@ -255,7 +255,7 @@ def save( # Step 5.5: If superseding, link the old doc as a child of the new doc if supersede_target: - set_parent(supersede_target["id"], doc_id, relationship="supersedes") + set_parent(supersede_target.id, doc_id, relationship="supersedes") # Step 6: Apply tags applied_tags = apply_tags(doc_id, tags) @@ -831,13 +831,13 @@ def _find_list_all( table.add_column("Views", justify="right", style="blue") for doc in docs: - created = doc["created_at"].strftime("%Y-%m-%d") if doc["created_at"] else "" + created = doc.created_at.strftime("%Y-%m-%d") if doc.created_at else "" table.add_row( - str(doc["id"]), - truncate_title(doc["title"]), - doc["project"] or "None", + str(doc.id), + truncate_title(doc.title), + doc.project or "None", created, - str(doc["access_count"]), + str(doc.access_count), ) console.print(table) @@ -856,7 +856,7 @@ def _find_recent( docs = get_recent_documents(limit=limit, doc_type=doc_type) if project: - docs = [d for d in docs if d.get("project") == project] + docs = [d for d in docs if d.project == project] if not docs: console.print("[yellow]No recently accessed documents found[/yellow]") @@ -885,14 +885,14 @@ def _find_recent( for doc in docs: accessed_str = "Never" - if doc["accessed_at"]: - accessed_str = doc["accessed_at"].strftime("%Y-%m-%d %H:%M") + if doc.accessed_at: + accessed_str = doc.accessed_at.strftime("%Y-%m-%d %H:%M") table.add_row( - str(doc["id"]), - truncate_title(doc["title"]), - doc["project"] or "None", + str(doc.id), + truncate_title(doc.title), + doc.project or "None", accessed_str, - str(doc["access_count"]), + str(doc.access_count), ) console.print(table) @@ -1631,24 +1631,24 @@ def view( # Record view event (non-critical, best-effort) from emdx.models.events import record_event - record_event("view", doc_id=doc["id"]) + record_event("view", doc_id=doc.id) - doc_tags = get_document_tags(doc["id"]) + doc_tags = get_document_tags(doc.id) # Fetch linked documents try: from emdx.database.document_links import get_links_for_document - doc_links = get_links_for_document(doc["id"]) + doc_links = get_links_for_document(doc.id) except Exception: doc_links = [] # JSON output if json_output: - content = doc["content"] + content = doc.content linked_docs = [] for link in doc_links: - if link["source_doc_id"] == doc["id"]: + if link["source_doc_id"] == doc.id: linked_docs.append( { "id": link["target_doc_id"], @@ -1667,15 +1667,15 @@ def view( } ) output = { - "id": doc["id"], - "title": doc["title"], + "id": doc.id, + "title": doc.title, "content": content, - "project": doc["project"], - "created_at": str(doc.get("created_at") or ""), - "updated_at": str(doc.get("updated_at") or ""), - "accessed_at": str(doc.get("accessed_at") or ""), - "access_count": doc["access_count"], - "parent_id": doc.get("parent_id"), + "project": doc.project, + "created_at": str(doc.created_at or ""), + "updated_at": str(doc.updated_at or ""), + "accessed_at": str(doc.accessed_at or ""), + "access_count": doc.access_count, + "parent_id": doc.parent_id, "tags": doc_tags, "linked_docs": linked_docs, "word_count": len(content.split()), @@ -1688,10 +1688,10 @@ def view( # Handle --links: show detailed link information if links: if not doc_links: - console.print(f"[yellow]No links found for document #{doc['id']}[/yellow]") + console.print(f"[yellow]No links found for document #{doc.id}[/yellow]") return - console.print(f"[bold]Links for #{doc['id']} '{doc['title']}':[/bold]\n") + console.print(f"[bold]Links for #{doc.id} '{doc.title}':[/bold]\n") from rich.table import Table as LinksTable table = LinksTable() @@ -1701,7 +1701,7 @@ def view( table.add_column("Method", style="dim", width=8) for link in doc_links: - if link["source_doc_id"] == doc["id"]: + if link["source_doc_id"] == doc.id: other_id = link["target_doc_id"] other_title = link["target_title"] else: @@ -1720,17 +1720,17 @@ def _render_output() -> None: else: _print_view_header_plain(doc, doc_tags) if doc_links: - _print_related_docs(doc["id"], doc_links, rich_mode) + _print_related_docs(doc.id, doc_links, rich_mode) print() if raw: - print(doc["content"]) + print(doc.content) elif rich_mode: from emdx.ui.markdown_config import MarkdownConfig - console.print(MarkdownConfig.create_markdown(doc["content"])) + console.print(MarkdownConfig.create_markdown(doc.content)) else: - print(doc["content"]) + print(doc.content) if no_pager: _render_output() @@ -1749,7 +1749,7 @@ def _render_output() -> None: raise typer.Exit(1) from e -def _view_review(doc: Mapping[str, Any]) -> None: +def _view_review(doc: Document) -> None: """Run an adversarial review of a document using an LLM. Finds similar documents via embeddings (if available) and prompts @@ -1767,9 +1767,9 @@ def _view_review(doc: Mapping[str, Any]) -> None: ) raise typer.Exit(1) - doc_id: int = doc["id"] - title: str = doc["title"] - content: str = doc["content"] + doc_id: int = doc.id + title: str = doc.title + content: str = doc.content console.print(f"[dim]Reviewing #{doc_id} '{escape(title)}'...[/dim]") @@ -1851,17 +1851,17 @@ def _view_review(doc: Mapping[str, Any]) -> None: console.print(f"\n[dim]Similar docs referenced: {id_list}[/dim]") -def _print_view_header_plain(doc: Mapping[str, Any], doc_tags: list[str]) -> None: +def _print_view_header_plain(doc: Document, doc_tags: list[str]) -> None: """Print a plain text header for machine-friendly output.""" - print(f"#{doc['id']} {doc['title']}") + print(f"#{doc.id} {doc.title}") meta = [] - if doc.get("project"): - meta.append(f"Project: {doc['project']}") - created = str(doc.get("created_at") or "")[:16] + if doc.project: + meta.append(f"Project: {doc.project}") + created = str(doc.created_at or "")[:16] if created: meta.append(f"Created: {created}") - updated = str(doc.get("updated_at") or "")[:16] + updated = str(doc.updated_at or "")[:16] if updated and updated != created: meta.append(f"Updated: {updated}") if doc_tags: @@ -1871,26 +1871,26 @@ def _print_view_header_plain(doc: Mapping[str, Any], doc_tags: list[str]) -> Non print("---") -def _print_view_header_rich(doc: Mapping[str, Any], doc_tags: list[str]) -> None: +def _print_view_header_rich(doc: Document, doc_tags: list[str]) -> None: """Print a rich panel header for document view, matching the TUI.""" - content = doc.get("content", "") + content = doc.content word_count = len(content.split()) char_count = len(content) line_count = content.count("\n") + 1 if content else 0 lines = [] - lines.append(f"[bold cyan]#{doc['id']}[/bold cyan] [bold]{doc['title']}[/bold]") + lines.append(f"[bold cyan]#{doc.id}[/bold cyan] [bold]{doc.title}[/bold]") lines.append("") - if doc.get("project"): - lines.append(f" [dim]Project:[/dim] {doc['project']}") + if doc.project: + lines.append(f" [dim]Project:[/dim] {doc.project}") if doc_tags: lines.append(f" [dim]Tags:[/dim] {format_tags(doc_tags)}") - created = str(doc.get("created_at") or "")[:16] - updated = str(doc.get("updated_at") or "")[:16] - accessed = str(doc.get("accessed_at") or "")[:16] + created = str(doc.created_at or "")[:16] + updated = str(doc.updated_at or "")[:16] + accessed = str(doc.accessed_at or "")[:16] if created: lines.append(f" [dim]Created:[/dim] {created}") @@ -1900,14 +1900,14 @@ def _print_view_header_rich(doc: Mapping[str, Any], doc_tags: list[str]) -> None lines.append(f" [dim]Accessed:[/dim] {accessed}") lines.append( - f" [dim]Views:[/dim] {doc.get('access_count', 0)} " + f" [dim]Views:[/dim] {doc.access_count} " f"[dim]Words:[/dim] {word_count} " f"[dim]Lines:[/dim] {line_count} " f"[dim]Chars:[/dim] {char_count}" ) - if doc.get("parent_id"): - lines.append(f" [dim]Parent:[/dim] #{doc['parent_id']}") + if doc.parent_id: + lines.append(f" [dim]Parent:[/dim] #{doc.parent_id}") panel = Panel( "\n".join(lines), @@ -1976,10 +1976,10 @@ def edit( # Quick title update without editing content if title: - success = update_document(doc["id"], title, doc["content"]) + success = update_document(doc.id, title, doc.content) if success: console.print( - f"[green]✅ Updated title of #{doc['id']} to:[/green] [cyan]{title}[/cyan]" + f"[green]✅ Updated title of #{doc.id} to:[/green] [cyan]{title}[/cyan]" ) # Flag wiki articles sourced from this doc as stale try: @@ -1987,7 +1987,7 @@ def edit( check_doc_staleness, ) - check_doc_staleness(doc["id"]) + check_doc_staleness(doc.id) except Exception: pass # Wiki tables may not exist; non-critical else: @@ -2002,9 +2002,9 @@ def edit( # Create temporary file with current content with tempfile.NamedTemporaryFile(mode="w", suffix=".md", delete=False) as tmp_file: # Write header comment - tmp_file.write(f"# Editing: {doc['title']} (ID: {doc['id']})\n") - tmp_file.write(f"# Project: {doc['project'] or 'None'}\n") - tmp_file.write(f"# Created: {str(doc['created_at'] or '')[:16]}\n") + tmp_file.write(f"# Editing: {doc.title} (ID: {doc.id})\n") + tmp_file.write(f"# Project: {doc.project or 'None'}\n") + tmp_file.write(f"# Created: {str(doc.created_at or '')[:16]}\n") tmp_file.write("# Lines starting with '#' will be removed\n") tmp_file.write("#\n") tmp_file.write("# First line (after comments) will be used as the title\n") @@ -2012,8 +2012,8 @@ def edit( tmp_file.write("#\n") # Write title and content - tmp_file.write(f"{doc['title']}\n\n") - tmp_file.write(doc["content"]) + tmp_file.write(f"{doc.title}\n\n") + tmp_file.write(doc.content) tmp_file_path = tmp_file.name try: @@ -2054,17 +2054,17 @@ def edit( new_content = "".join(lines[content_start:]).strip() # Check if anything changed - if new_title == doc["title"] and new_content == doc["content"].strip(): + if new_title == doc.title and new_content == doc.content.strip(): console.print("[yellow]No changes made[/yellow]") return # Update document - success = update_document(doc["id"], new_title, new_content) + success = update_document(doc.id, new_title, new_content) if success: - console.print(f"[green]✅ Updated #{doc['id']}:[/green] [cyan]{new_title}[/cyan]") - if new_title != doc["title"]: - console.print(f" [dim]Title changed from:[/dim] {doc['title']}") + console.print(f"[green]✅ Updated #{doc.id}:[/green] [cyan]{new_title}[/cyan]") + if new_title != doc.title: + console.print(f" [dim]Title changed from:[/dim] {doc.title}") console.print(" [dim]Content updated[/dim]") # Flag wiki articles sourced from this doc as stale @@ -2073,7 +2073,7 @@ def edit( check_doc_staleness, ) - check_doc_staleness(doc["id"]) + check_doc_staleness(doc.id) except Exception: pass # Wiki tables may not exist; non-critical else: @@ -2137,10 +2137,10 @@ def delete( for doc in docs_to_delete: table.add_row( - str(doc["id"]), - doc["title"][:50] + "..." if len(doc["title"]) > 50 else doc["title"], - doc["project"] or "[dim]None[/dim]", - str(doc["created_at"] or "")[:10], + str(doc.id), + doc.title[:50] + "..." if len(doc.title) > 50 else doc.title, + doc.project or "[dim]None[/dim]", + str(doc.created_at or "")[:10], "[red]PERMANENT[/red]" if hard else "[yellow]Soft delete[/yellow]", ) @@ -2174,7 +2174,7 @@ def delete( failed = [] for doc in docs_to_delete: - success = delete_document(str(doc["id"]), hard_delete=hard) + success = delete_document(str(doc.id), hard_delete=hard) if success: deleted_count += 1 else: diff --git a/emdx/commands/gist.py b/emdx/commands/gist.py index f9c01a8f..11173b6a 100644 --- a/emdx/commands/gist.py +++ b/emdx/commands/gist.py @@ -203,14 +203,14 @@ def create( raise typer.Exit(1) # Prepare gist content - filename = sanitize_filename(doc["title"]) - content = doc["content"] + filename = sanitize_filename(doc.title) + content = doc.content # Use document title and metadata in description if not provided if not description: - description = f"{doc['title']} - emdx knowledge base" - if doc.get("project"): - description += f" (Project: {doc['project']})" + description = f"{doc.title} - emdx knowledge base" + if doc.project: + description += f" (Project: {doc.project})" # Create or update gist if update: @@ -229,7 +229,7 @@ def create( SET updated_at = CURRENT_TIMESTAMP WHERE gist_id = ? AND document_id = ? """, - (update, doc["id"]), + (update, doc.id), ) conn.commit() else: @@ -253,7 +253,7 @@ def create( INSERT INTO gists (document_id, gist_id, gist_url, is_public) VALUES (?, ?, ?, ?) """, - (doc["id"], gist_id, gist_url, public), + (doc.id, gist_id, gist_url, public), ) conn.commit() diff --git a/emdx/commands/history.py b/emdx/commands/history.py index fdb4d51a..ff0e517a 100644 --- a/emdx/commands/history.py +++ b/emdx/commands/history.py @@ -65,7 +65,7 @@ def history( json.dumps( { "doc_id": doc_id, - "title": doc["title"], + "title": doc.title, "versions": versions, }, indent=2, @@ -73,7 +73,7 @@ def history( ) return - table = Table(title=f"Version history for #{doc_id}: {doc['title']}") + table = Table(title=f"Version history for #{doc_id}: {doc.title}") table.add_column("Ver", justify="right", style="cyan") table.add_column("Date", style="dim") table.add_column("Delta", justify="right") @@ -167,7 +167,7 @@ def diff( old_version: int = row[0] old_content: str = row[2] - current_content: str = doc["content"] + current_content: str = doc.content old_lines = old_content.splitlines(keepends=True) new_lines = current_content.splitlines(keepends=True) diff --git a/emdx/commands/tags.py b/emdx/commands/tags.py index 624863e4..6cb5478e 100644 --- a/emdx/commands/tags.py +++ b/emdx/commands/tags.py @@ -62,7 +62,7 @@ def _add_tags_impl( suggestions = tagger.suggest_tags(doc_id) if suggestions: - console.print(f"\n[bold]Tag suggestions for #{doc_id}: {doc['title']}[/bold]\n") + console.print(f"\n[bold]Tag suggestions for #{doc_id}: {doc.title}[/bold]\n") table = Table(show_header=True, header_style="bold cyan") table.add_column("Tag", style="cyan") @@ -91,10 +91,10 @@ def _add_tags_impl( if not tags: current_tags = get_document_tags(doc_id) if current_tags: - console.print(f"\n[bold]Tags for #{doc_id}: {doc['title']}[/bold]") + console.print(f"\n[bold]Tags for #{doc_id}: {doc.title}[/bold]") console.print(f"[cyan]{format_tags(current_tags)}[/cyan]") else: - console.print(f"[yellow]No tags for #{doc_id}: {doc['title']}[/yellow]") + console.print(f"[yellow]No tags for #{doc_id}: {doc.title}[/yellow]") return # Add tags manually @@ -375,7 +375,7 @@ def batch( doc = get_document(str(doc_id)) if not doc: continue - title = truncate_title(doc["title"]) + title = truncate_title(doc.title) console.print(f" [dim]#{doc_id}[/dim] {title}") for tag_name, conf in tag_list: diff --git a/emdx/commands/tasks.py b/emdx/commands/tasks.py index 68480ec4..5a4dcc09 100644 --- a/emdx/commands/tasks.py +++ b/emdx/commands/tasks.py @@ -417,13 +417,13 @@ def view( if source_id: source_doc = get_document(source_id) if source_doc: - console.print(f" [dim]Source:[/dim] #{source_id} [cyan]{source_doc['title']}[/cyan]") + console.print(f" [dim]Source:[/dim] #{source_id} [cyan]{source_doc.title}[/cyan]") else: console.print(f" [dim]Source:[/dim] #{source_id} [dim](deleted)[/dim]") if output_id: output_doc = get_document(output_id) if output_doc: - console.print(f" [dim]Output:[/dim] #{output_id} [cyan]{output_doc['title']}[/cyan]") + console.print(f" [dim]Output:[/dim] #{output_id} [cyan]{output_doc.title}[/cyan]") else: console.print(f" [dim]Output:[/dim] #{output_id} [dim](deleted)[/dim]") @@ -676,7 +676,7 @@ def _assemble_brief( related_docs.append( { "id": source_id, - "title": source_doc["title"] if source_doc else "(deleted)", + "title": source_doc.title if source_doc else "(deleted)", "relation": "source", } ) @@ -691,7 +691,7 @@ def _assemble_brief( related_docs.append( { "id": output_id, - "title": output_doc["title"] if output_doc else "(deleted)", + "title": output_doc.title if output_doc else "(deleted)", "relation": "output", } ) diff --git a/emdx/commands/trash.py b/emdx/commands/trash.py index 540ae895..c192d46b 100644 --- a/emdx/commands/trash.py +++ b/emdx/commands/trash.py @@ -69,11 +69,11 @@ def _list_trash(days: int | None = None, limit: int = 50) -> None: for doc in deleted_docs: table.add_row( - str(doc["id"]), - doc["title"][:50] + "..." if len(doc["title"]) > 50 else doc["title"], - doc["project"] or "[dim]None[/dim]", - doc["deleted_at"].strftime("%Y-%m-%d %H:%M") if doc["deleted_at"] else "Unknown", - str(doc["access_count"]), + str(doc.id), + doc.title[:50] + "..." if len(doc.title) > 50 else doc.title, + doc.project or "[dim]None[/dim]", + doc.deleted_at.strftime("%Y-%m-%d %H:%M") if doc.deleted_at else "Unknown", + str(doc.access_count), ) console.print(table) @@ -110,7 +110,7 @@ def restore( restored_count = 0 for doc in deleted_docs: - if restore_document(str(doc["id"])): + if restore_document(str(doc.id)): restored_count += 1 console.print(f"\n[green]✅ Restored {restored_count} document(s)[/green]") @@ -157,7 +157,7 @@ def purge( cutoff = datetime.now() - timedelta(days=older_than) docs_to_purge = [ - d for d in deleted_docs if d["deleted_at"] is not None and d["deleted_at"] < cutoff + d for d in deleted_docs if d.deleted_at is not None and d.deleted_at < cutoff ] count = len(docs_to_purge) else: diff --git a/emdx/commands/wiki.py b/emdx/commands/wiki.py index f0d33f25..a7d03f49 100644 --- a/emdx/commands/wiki.py +++ b/emdx/commands/wiki.py @@ -184,9 +184,9 @@ def wiki_view( json.dumps( { "topic_id": topic_id, - "doc_id": doc["id"], - "title": doc["title"], - "content": doc["content"], + "doc_id": doc.id, + "title": doc.title, + "content": doc.content, }, indent=2, ) @@ -194,16 +194,15 @@ def wiki_view( return if raw: - print(doc["content"]) + print(doc.content) return from rich.markdown import Markdown console.print( - f"\n[bold cyan]Wiki: {doc['title']}[/bold cyan]" - f" [dim](topic {topic_id}, doc #{doc['id']})[/dim]\n" + f"\n[bold cyan]Wiki: {doc.title}[/bold cyan] [dim](topic {topic_id}, doc #{doc.id})[/dim]\n" ) - console.print(Markdown(doc["content"])) + console.print(Markdown(doc.content)) @wiki_app.command(name="search") diff --git a/emdx/ui/activity/activity_data.py b/emdx/ui/activity/activity_data.py index a24ea76c..4093e9a0 100644 --- a/emdx/ui/activity/activity_data.py +++ b/emdx/ui/activity/activity_data.py @@ -67,7 +67,7 @@ async def _load_documents(self, doc_type_filter: str = "all") -> list[ActivityIt from emdx.models.tags import get_document_tags for doc in docs: - doc_id = doc["id"] + doc_id = doc.id doc_tags[doc_id] = get_document_tags(doc_id) except ImportError: pass @@ -76,7 +76,7 @@ async def _load_documents(self, doc_type_filter: str = "all") -> list[ActivityIt for doc in docs: try: - doc_id = doc["id"] + doc_id = doc.id created = doc.get("created_at") title = doc.get("title", "") doc_type = doc.get("doc_type", "user") or "user" diff --git a/tests/test_activity_doc_type.py b/tests/test_activity_doc_type.py index 3e0acee7..13414ac8 100644 --- a/tests/test_activity_doc_type.py +++ b/tests/test_activity_doc_type.py @@ -4,12 +4,12 @@ from collections.abc import Generator from datetime import datetime -from typing import Any from unittest.mock import MagicMock, patch import pytest from textual.app import App, ComposeResult +from emdx.models.document import Document from emdx.ui.activity.activity_data import ActivityDataLoader from emdx.ui.activity.activity_items import DocumentItem from emdx.ui.activity.activity_table import ActivityTable @@ -25,25 +25,27 @@ def make_doc_row( title: str = "Test doc", doc_type: str = "user", created_at: str = "2025-01-20T12:00:00", -) -> dict[str, Any]: - """Create a fake document row matching DocumentRow shape.""" - return { - "id": id, - "title": title, - "content": "some content", - "project": None, - "created_at": created_at, - "updated_at": None, - "accessed_at": None, - "access_count": 1, - "deleted_at": None, - "is_deleted": 0, - "parent_id": None, - "relationship": None, - "archived_at": None, - "stage": None, - "doc_type": doc_type, - } +) -> Document: + """Create a fake Document matching the Document dataclass shape.""" + return Document.from_row( + { + "id": id, + "title": title, + "content": "some content", + "project": None, + "created_at": created_at, + "updated_at": None, + "accessed_at": None, + "access_count": 1, + "deleted_at": None, + "is_deleted": 0, + "parent_id": None, + "relationship": None, + "archived_at": None, + "stage": None, + "doc_type": doc_type, + } + ) # --------------------------------------------------------------------------- @@ -150,7 +152,7 @@ async def test_filter_all_shows_both(self) -> None: async def test_doc_type_defaults_to_user(self) -> None: """Documents without doc_type field default to 'user'.""" docs = [make_doc_row(id=1, title="Old doc")] - docs[0]["doc_type"] = None # Simulate missing doc_type + docs[0].doc_type = None # Simulate missing doc_type # type: ignore[assignment] with ( patch(f"{_DATA_LOADER_BASE}.doc_svc") as mock_svc, patch(f"{_DATA_LOADER_BASE}.HAS_DOCS", True), diff --git a/tests/test_activity_view.py b/tests/test_activity_view.py index 62500b54..8b58da41 100644 --- a/tests/test_activity_view.py +++ b/tests/test_activity_view.py @@ -10,6 +10,7 @@ from textual.app import App, ComposeResult from textual.widgets import RichLog, Static +from emdx.models.document import Document from emdx.ui.activity.activity_items import DocumentItem from emdx.ui.activity.activity_view import ActivityView @@ -143,14 +144,16 @@ async def test_document_preview_still_works( """Document preview rendering still works after streaming removal.""" doc_item = make_doc_item(item_id=100, title="My Document") mock_activity_deps["loader"].load_all.return_value = [doc_item] - mock_activity_deps["doc_db"].get_document.return_value = { - "id": 100, - "title": "My Document", - "content": "# My Document\n\nHello world", - "project": "test", - "created_at": "2025-01-01T12:00:00", - "access_count": 5, - } + mock_activity_deps["doc_db"].get_document.return_value = Document.from_row( + { + "id": 100, + "title": "My Document", + "content": "# My Document\n\nHello world", + "project": "test", + "created_at": "2025-01-01T12:00:00", + "access_count": 5, + } + ) app = ActivityTestApp() async with app.run_test() as pilot: @@ -164,14 +167,16 @@ async def test_copy_mode_still_works(self, mock_activity_deps: dict[str, MagicMo """Copy mode toggle still works after streaming removal.""" doc_item = make_doc_item(item_id=100, title="My Document") mock_activity_deps["loader"].load_all.return_value = [doc_item] - mock_activity_deps["doc_db"].get_document.return_value = { - "id": 100, - "title": "My Document", - "content": "# My Document\n\nHello world", - "project": "test", - "created_at": "2025-01-01T12:00:00", - "access_count": 1, - } + mock_activity_deps["doc_db"].get_document.return_value = Document.from_row( + { + "id": 100, + "title": "My Document", + "content": "# My Document\n\nHello world", + "project": "test", + "created_at": "2025-01-01T12:00:00", + "access_count": 1, + } + ) app = ActivityTestApp() async with app.run_test() as pilot: diff --git a/tests/test_commands_core.py b/tests/test_commands_core.py index 63ddca1a..c2e8a258 100644 --- a/tests/test_commands_core.py +++ b/tests/test_commands_core.py @@ -8,6 +8,7 @@ from typer.testing import CliRunner from emdx.commands.core import InputContent, app, generate_title, get_input_content +from emdx.models.document import Document runner = CliRunner() @@ -291,14 +292,16 @@ class TestViewCommand: @patch("emdx.commands.core.get_document") def test_view_by_id(self, mock_get_doc, mock_get_tags): """View a document by numeric ID.""" - mock_get_doc.return_value = { - "id": 1, - "title": "My Doc", - "content": "Hello world", - "project": "test", - "created_at": datetime(2024, 1, 1), - "access_count": 5, - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "My Doc", + "content": "Hello world", + "project": "test", + "created_at": datetime(2024, 1, 1), + "access_count": 5, + } + ) mock_get_tags.return_value = ["python"] result = runner.invoke(app, ["view", "1", "--no-pager"]) @@ -318,14 +321,16 @@ def test_view_not_found(self, mock_get_doc): @patch("emdx.commands.core.get_document") def test_view_raw(self, mock_get_doc, mock_get_tags): """View with --raw shows raw content.""" - mock_get_doc.return_value = { - "id": 1, - "title": "Raw Doc", - "content": "# Raw markdown", - "project": None, - "created_at": datetime(2024, 1, 1), - "access_count": 0, - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "Raw Doc", + "content": "# Raw markdown", + "project": None, + "created_at": datetime(2024, 1, 1), + "access_count": 0, + } + ) mock_get_tags.return_value = [] result = runner.invoke(app, ["view", "1", "--raw", "--no-pager"]) @@ -336,14 +341,16 @@ def test_view_raw(self, mock_get_doc, mock_get_tags): @patch("emdx.commands.core.get_document") def test_view_no_header(self, mock_get_doc, mock_get_tags): """View with --no-header hides header.""" - mock_get_doc.return_value = { - "id": 1, - "title": "No Header", - "content": "Just content", - "project": None, - "created_at": datetime(2024, 1, 1), - "access_count": 0, - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "No Header", + "content": "Just content", + "project": None, + "created_at": datetime(2024, 1, 1), + "access_count": 0, + } + ) mock_get_tags.return_value = [] result = runner.invoke(app, ["view", "1", "--no-header", "--no-pager"]) @@ -356,16 +363,18 @@ def test_view_no_header(self, mock_get_doc, mock_get_tags): @patch("emdx.commands.core.get_document") def test_view_json(self, mock_get_doc, mock_get_tags): """View with --json outputs valid JSON.""" - mock_get_doc.return_value = { - "id": 1, - "title": "JSON Doc", - "content": "Hello world", - "project": "test", - "created_at": datetime(2024, 1, 1), - "updated_at": datetime(2024, 1, 2), - "accessed_at": datetime(2024, 1, 3), - "access_count": 5, - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "JSON Doc", + "content": "Hello world", + "project": "test", + "created_at": datetime(2024, 1, 1), + "updated_at": datetime(2024, 1, 2), + "accessed_at": datetime(2024, 1, 3), + "access_count": 5, + } + ) mock_get_tags.return_value = ["python"] result = runner.invoke(app, ["view", "1", "--json"]) @@ -394,13 +403,15 @@ class TestEditCommand: @patch("emdx.commands.core.get_document") def test_edit_title_only(self, mock_get_doc, mock_update): """Edit with --title updates title without opening editor.""" - mock_get_doc.return_value = { - "id": 1, - "title": "Old Title", - "content": "content", - "project": "p", - "created_at": datetime(2024, 1, 1), - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "Old Title", + "content": "content", + "project": "p", + "created_at": datetime(2024, 1, 1), + } + ) mock_update.return_value = True result = runner.invoke(app, ["edit", "1", "--title", "New Title"]) @@ -421,13 +432,15 @@ def test_edit_doc_not_found(self, mock_get_doc): @patch("emdx.commands.core.get_document") def test_edit_title_failure(self, mock_get_doc, mock_update): """Edit that fails to update shows error.""" - mock_get_doc.return_value = { - "id": 1, - "title": "Title", - "content": "c", - "project": None, - "created_at": datetime(2024, 1, 1), - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "Title", + "content": "c", + "project": None, + "created_at": datetime(2024, 1, 1), + } + ) mock_update.return_value = False result = runner.invoke(app, ["edit", "1", "--title", "New"]) @@ -450,13 +463,15 @@ class TestDeleteCommand: @patch("emdx.commands.core.get_document") def test_delete_soft(self, mock_get_doc, mock_delete): """Soft delete with --force skips confirmation.""" - mock_get_doc.return_value = { - "id": 1, - "title": "To Delete", - "project": "p", - "created_at": datetime(2024, 1, 1), - "access_count": 0, - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "To Delete", + "project": "p", + "created_at": datetime(2024, 1, 1), + "access_count": 0, + } + ) mock_delete.return_value = True result = runner.invoke(app, ["delete", "1", "--force"]) @@ -469,13 +484,15 @@ def test_delete_soft(self, mock_get_doc, mock_delete): @patch("emdx.commands.core.get_document") def test_delete_hard_force(self, mock_get_doc, mock_delete): """Hard delete with --force --hard.""" - mock_get_doc.return_value = { - "id": 1, - "title": "Perm Delete", - "project": None, - "created_at": datetime(2024, 1, 1), - "access_count": 0, - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "Perm Delete", + "project": None, + "created_at": datetime(2024, 1, 1), + "access_count": 0, + } + ) mock_delete.return_value = True result = runner.invoke(app, ["delete", "1", "--force", "--hard"]) @@ -496,13 +513,15 @@ def test_delete_not_found(self, mock_get_doc): @patch("emdx.commands.core.get_document") def test_delete_dry_run(self, mock_get_doc): """--dry-run shows what would be deleted without deleting.""" - mock_get_doc.return_value = { - "id": 1, - "title": "Dry Run Doc", - "project": "p", - "created_at": datetime(2024, 1, 1), - "access_count": 0, - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "Dry Run Doc", + "project": "p", + "created_at": datetime(2024, 1, 1), + "access_count": 0, + } + ) result = runner.invoke(app, ["delete", "1", "--dry-run"]) assert result.exit_code == 0 @@ -515,20 +534,24 @@ def test_delete_multiple(self, mock_get_doc, mock_delete): def side_effect(identifier): docs = { - "1": { - "id": 1, - "title": "Doc 1", - "project": None, - "created_at": datetime(2024, 1, 1), - "access_count": 0, - }, # noqa: E501 - "2": { - "id": 2, - "title": "Doc 2", - "project": None, - "created_at": datetime(2024, 1, 2), - "access_count": 0, - }, # noqa: E501 + "1": Document.from_row( + { + "id": 1, + "title": "Doc 1", + "project": None, + "created_at": datetime(2024, 1, 1), + "access_count": 0, + } + ), + "2": Document.from_row( + { + "id": 2, + "title": "Doc 2", + "project": None, + "created_at": datetime(2024, 1, 2), + "access_count": 0, + } + ), } return docs.get(identifier) @@ -549,13 +572,15 @@ def test_delete_missing_id(self): @patch("emdx.commands.core.is_non_interactive", return_value=True) def test_delete_auto_confirms_when_non_tty(self, mock_isatty, mock_get_doc, mock_delete): """Delete skips confirmation when stdin is not a TTY (agent mode).""" - mock_get_doc.return_value = { - "id": 1, - "title": "Agent Delete", - "project": "p", - "created_at": datetime(2024, 1, 1), - "access_count": 0, - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "Agent Delete", + "project": "p", + "created_at": datetime(2024, 1, 1), + "access_count": 0, + } + ) mock_delete.return_value = True # No --force flag, but should still proceed without prompting @@ -570,13 +595,15 @@ def test_delete_auto_confirms_when_non_tty(self, mock_isatty, mock_get_doc, mock @patch("emdx.commands.core.is_non_interactive", return_value=True) def test_delete_hard_auto_confirms_when_non_tty(self, mock_isatty, mock_get_doc, mock_delete): """Hard delete skips confirmation when stdin is not a TTY (agent mode).""" - mock_get_doc.return_value = { - "id": 1, - "title": "Agent Hard Delete", - "project": None, - "created_at": datetime(2024, 1, 1), - "access_count": 0, - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "Agent Hard Delete", + "project": None, + "created_at": datetime(2024, 1, 1), + "access_count": 0, + } + ) mock_delete.return_value = True # No --force flag, --hard, should still proceed without prompting @@ -610,13 +637,15 @@ def test_trash_empty(self, mock_list_deleted): def test_trash_with_items(self, mock_list_deleted): """Trash with items shows table.""" mock_list_deleted.return_value = [ - { - "id": 1, - "title": "Deleted Doc", - "project": "proj", - "deleted_at": datetime(2024, 6, 1, 10, 0), - "access_count": 3, - } + Document.from_row( + { + "id": 1, + "title": "Deleted Doc", + "project": "proj", + "deleted_at": datetime(2024, 6, 1, 10, 0), + "access_count": 3, + } + ) ] result = runner.invoke(main_app, ["trash"]) @@ -668,8 +697,8 @@ def test_restore_no_args(self): def test_restore_all(self, mock_restore, mock_list_deleted): """Restore --all restores all deleted documents.""" mock_list_deleted.return_value = [ - {"id": 1, "title": "D1"}, - {"id": 2, "title": "D2"}, + Document.from_row({"id": 1, "title": "D1"}), + Document.from_row({"id": 2, "title": "D2"}), ] mock_restore.return_value = True @@ -699,8 +728,8 @@ def test_purge_empty_trash(self, mock_list_deleted): def test_purge_with_force(self, mock_list_deleted, mock_purge): """Purge --force skips confirmation.""" mock_list_deleted.return_value = [ - {"id": 1, "title": "D", "deleted_at": datetime(2024, 1, 1)} - ] # noqa: E501 + Document.from_row({"id": 1, "title": "D", "deleted_at": datetime(2024, 1, 1)}) + ] mock_purge.return_value = 1 result = runner.invoke(main_app, ["trash", "purge", "--force"]) diff --git a/tests/test_commands_tags.py b/tests/test_commands_tags.py index 2dd7ea6f..60c0e431 100644 --- a/tests/test_commands_tags.py +++ b/tests/test_commands_tags.py @@ -10,6 +10,7 @@ from typer.testing import CliRunner from emdx.commands.tags import app +from emdx.models.document import Document runner = CliRunner() @@ -30,7 +31,7 @@ class TestTagAddCommand: @patch("emdx.commands.tags.get_document") def test_add_tags_explicit(self, mock_get_doc, mock_add_tags, mock_get_tags): """Add tags via explicit 'add' subcommand.""" - mock_get_doc.return_value = {"id": 1, "title": "Doc"} + mock_get_doc.return_value = Document.from_row({"id": 1, "title": "Doc"}) mock_add_tags.return_value = ["python", "testing"] mock_get_tags.return_value = ["python", "testing"] @@ -73,7 +74,7 @@ def test_tag_shorthand_rewrite(self): @patch("emdx.commands.tags.get_document") def test_tag_show_current(self, mock_get_doc, mock_get_tags): """Tag with no tag arguments shows current tags.""" - mock_get_doc.return_value = {"id": 1, "title": "Doc"} + mock_get_doc.return_value = Document.from_row({"id": 1, "title": "Doc"}) mock_get_tags.return_value = ["python"] result = runner.invoke(app, ["add", "1"]) @@ -95,7 +96,7 @@ def test_tag_doc_not_found(self, mock_get_doc): @patch("emdx.commands.tags.get_document") def test_tag_already_exists(self, mock_get_doc, mock_add_tags, mock_get_tags): """Adding a tag that already exists shows message.""" - mock_get_doc.return_value = {"id": 1, "title": "Doc"} + mock_get_doc.return_value = Document.from_row({"id": 1, "title": "Doc"}) mock_add_tags.return_value = [] # no new tags added mock_get_tags.return_value = ["python"] @@ -115,7 +116,7 @@ class TestTagRemoveCommand: @patch("emdx.commands.tags.get_document") def test_remove(self, mock_get_doc, mock_remove_tags, mock_get_tags): """Remove tags from a document.""" - mock_get_doc.return_value = {"id": 1, "title": "Doc"} + mock_get_doc.return_value = Document.from_row({"id": 1, "title": "Doc"}) mock_remove_tags.return_value = ["python"] mock_get_tags.return_value = [] @@ -128,7 +129,7 @@ def test_remove(self, mock_get_doc, mock_remove_tags, mock_get_tags): @patch("emdx.commands.tags.get_document") def test_remove_not_on_doc(self, mock_get_doc, mock_remove_tags, mock_get_tags): """Removing a tag that doesn't exist shows message.""" - mock_get_doc.return_value = {"id": 1, "title": "Doc"} + mock_get_doc.return_value = Document.from_row({"id": 1, "title": "Doc"}) mock_remove_tags.return_value = [] mock_get_tags.return_value = [] diff --git a/tests/test_commands_trash.py b/tests/test_commands_trash.py index f9737ff9..5dfd6347 100644 --- a/tests/test_commands_trash.py +++ b/tests/test_commands_trash.py @@ -10,6 +10,7 @@ from typer.testing import CliRunner from emdx.commands.trash import app +from emdx.models.document import Document runner = CliRunner() @@ -39,13 +40,15 @@ def test_list_empty_trash_with_days(self, mock_list): @patch("emdx.commands.trash.list_deleted_documents") def test_list_shows_documents(self, mock_list): mock_list.return_value = [ - { - "id": 42, - "title": "Test Document", - "project": "test-project", - "deleted_at": datetime(2026, 1, 15, 10, 30), - "access_count": 5, - } + Document.from_row( + { + "id": 42, + "title": "Test Document", + "project": "test-project", + "deleted_at": datetime(2026, 1, 15, 10, 30), + "access_count": 5, + } + ) ] result = runner.invoke(app, ["list"]) assert result.exit_code == 0 @@ -57,13 +60,15 @@ def test_list_shows_documents(self, mock_list): @patch("emdx.commands.trash.list_deleted_documents") def test_list_truncates_long_titles(self, mock_list): mock_list.return_value = [ - { - "id": 1, - "title": "A" * 60, - "project": None, - "deleted_at": datetime(2026, 1, 15), - "access_count": 0, - } + Document.from_row( + { + "id": 1, + "title": "A" * 60, + "project": None, + "deleted_at": datetime(2026, 1, 15), + "access_count": 0, + } + ) ] result = runner.invoke(app, ["list"]) assert result.exit_code == 0 @@ -134,7 +139,9 @@ def test_purge_empty_trash(self, mock_list): @patch("emdx.commands.trash.list_deleted_documents") @patch("emdx.commands.trash.purge_deleted_documents") def test_purge_with_force(self, mock_purge, mock_list): - mock_list.return_value = [{"id": 1, "deleted_at": datetime(2026, 1, 1)}] + mock_list.return_value = [ + Document.from_row({"id": 1, "title": "", "deleted_at": datetime(2026, 1, 1)}) + ] mock_purge.return_value = 1 result = runner.invoke(app, ["purge", "--force"]) assert result.exit_code == 0 @@ -143,7 +150,9 @@ def test_purge_with_force(self, mock_purge, mock_list): @patch("emdx.commands.trash.list_deleted_documents") @patch("emdx.commands.trash.is_non_interactive", return_value=False) def test_purge_cancelled(self, mock_interactive, mock_list): - mock_list.return_value = [{"id": 1, "deleted_at": datetime(2026, 1, 1)}] + mock_list.return_value = [ + Document.from_row({"id": 1, "title": "", "deleted_at": datetime(2026, 1, 1)}) + ] result = runner.invoke(app, ["purge"], input="n\n") assert result.exit_code == 0 assert "cancelled" in _out(result).lower() @@ -155,7 +164,9 @@ def test_purge_auto_confirms_non_interactive( self, mock_ni: Any, mock_list: Any, mock_purge: Any ) -> None: """Purge skips confirmation when stdin is not a TTY (agent mode).""" - mock_list.return_value = [{"id": 1, "deleted_at": datetime(2026, 1, 1)}] + mock_list.return_value = [ + Document.from_row({"id": 1, "title": "", "deleted_at": datetime(2026, 1, 1)}) + ] mock_purge.return_value = 1 result = runner.invoke(app, ["purge"]) assert result.exit_code == 0 @@ -174,8 +185,8 @@ def test_restore_all_auto_confirms_non_interactive( ) -> None: """Restore --all skips confirmation when stdin is not a TTY.""" mock_list.return_value = [ - {"id": 1, "title": "Doc 1", "deleted_at": datetime(2026, 1, 1)}, - {"id": 2, "title": "Doc 2", "deleted_at": datetime(2026, 1, 2)}, + Document.from_row({"id": 1, "title": "Doc 1", "deleted_at": datetime(2026, 1, 1)}), + Document.from_row({"id": 2, "title": "Doc 2", "deleted_at": datetime(2026, 1, 2)}), ] mock_restore.return_value = True result = runner.invoke(app, ["restore", "--all"]) diff --git a/tests/test_gist.py b/tests/test_gist.py index 4b7d5290..4a274423 100644 --- a/tests/test_gist.py +++ b/tests/test_gist.py @@ -10,6 +10,7 @@ from emdx.commands.gist import sanitize_filename from emdx.main import app as main_app +from emdx.models.document import Document runner = CliRunner() @@ -111,12 +112,14 @@ def test_gist_document_not_found(self, mock_get_doc: Any) -> None: @patch("emdx.commands.gist.get_document") def test_gist_no_auth(self, mock_get_doc: Any, mock_auth: Any) -> None: """Gist without GitHub auth shows authentication error.""" - mock_get_doc.return_value = { - "id": 1, - "title": "Test Doc", - "content": "Hello world", - "project": "test", - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "Test Doc", + "content": "Hello world", + "project": "test", + } + ) mock_auth.return_value = None result = runner.invoke(main_app, ["gist", "1"]) @@ -132,12 +135,14 @@ def test_gist_create_success( self, mock_get_doc: Any, mock_auth: Any, mock_create: Any, mock_db: Any ) -> None: """Successful gist creation shows URL.""" - mock_get_doc.return_value = { - "id": 1, - "title": "Test Doc", - "content": "Hello world", - "project": "test", - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "Test Doc", + "content": "Hello world", + "project": "test", + } + ) mock_auth.return_value = "ghp_test_token" mock_create.return_value = { "id": "abc123", @@ -158,12 +163,14 @@ def test_gist_create_success( @patch("emdx.commands.gist.get_document") def test_gist_create_failure(self, mock_get_doc: Any, mock_auth: Any, mock_create: Any) -> None: """Failed gist creation shows error.""" - mock_get_doc.return_value = { - "id": 1, - "title": "Test Doc", - "content": "Hello world", - "project": None, - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "Test Doc", + "content": "Hello world", + "project": None, + } + ) mock_auth.return_value = "ghp_test_token" mock_create.return_value = None @@ -187,12 +194,14 @@ def test_gist_update_success( self, mock_get_doc: Any, mock_auth: Any, mock_update: Any, mock_db: Any ) -> None: """Updating an existing gist succeeds.""" - mock_get_doc.return_value = { - "id": 1, - "title": "Test Doc", - "content": "Updated content", - "project": None, - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "Test Doc", + "content": "Updated content", + "project": None, + } + ) mock_auth.return_value = "ghp_test_token" mock_update.return_value = True # Mock the database connection so UPDATE gists doesn't fail @@ -210,12 +219,14 @@ def test_gist_update_success( @patch("emdx.commands.gist.get_document") def test_gist_update_failure(self, mock_get_doc: Any, mock_auth: Any, mock_update: Any) -> None: """Failed gist update shows error.""" - mock_get_doc.return_value = { - "id": 1, - "title": "Test Doc", - "content": "Content", - "project": None, - } + mock_get_doc.return_value = Document.from_row( + { + "id": 1, + "title": "Test Doc", + "content": "Content", + "project": None, + } + ) mock_auth.return_value = "ghp_test_token" mock_update.return_value = False diff --git a/tests/test_task_commands.py b/tests/test_task_commands.py index a4d83a0e..f29b5fc2 100644 --- a/tests/test_task_commands.py +++ b/tests/test_task_commands.py @@ -10,6 +10,7 @@ from typer.testing import CliRunner from emdx.commands.tasks import app +from emdx.models.document import Document runner = CliRunner() @@ -735,7 +736,7 @@ def test_view_shows_epic_label(self, mock_tasks, mock_get_doc): mock_tasks.get_dependencies.return_value = [] mock_tasks.get_dependents.return_value = [] mock_tasks.get_task_log.return_value = [] - mock_get_doc.return_value = {"id": 99, "title": "Security audit report"} + mock_get_doc.return_value = Document.from_row({"id": 99, "title": "Security audit report"}) result = runner.invoke(app, ["view", "10"]) out = _out(result) diff --git a/tests/test_v028_regressions.py b/tests/test_v028_regressions.py index dc679147..b194865d 100644 --- a/tests/test_v028_regressions.py +++ b/tests/test_v028_regressions.py @@ -13,6 +13,7 @@ from typer.testing import CliRunner from emdx.main import app +from emdx.models.document import Document runner = CliRunner() @@ -238,7 +239,9 @@ def test_history_json_valid( mock_db: MagicMock, ) -> None: """history --json should produce parseable JSON with version info.""" - mock_get_doc.return_value = {"id": 42, "title": "Test Document", "content": "body"} + mock_get_doc.return_value = Document.from_row( + {"id": 42, "title": "Test Document", "content": "body"} + ) # Simulate version rows from the database mock_conn = MagicMock() @@ -267,7 +270,9 @@ def test_history_json_no_versions( mock_db: MagicMock, ) -> None: """history --json with no versions should output a JSON error message.""" - mock_get_doc.return_value = {"id": 42, "title": "Test", "content": "body"} + mock_get_doc.return_value = Document.from_row( + {"id": 42, "title": "Test", "content": "body"} + ) mock_conn = MagicMock() mock_db.get_connection.return_value.__enter__ = MagicMock(return_value=mock_conn) @@ -346,11 +351,13 @@ def test_wiki_view_returns_content( mock_row.__getitem__ = MagicMock(return_value=99) mock_conn.execute.return_value.fetchone.return_value = mock_row - mock_get_doc.return_value = { - "id": 99, - "title": "Wiki Article Title", - "content": "# Article\n\nSome content here.", - } + mock_get_doc.return_value = Document.from_row( + { + "id": 99, + "title": "Wiki Article Title", + "content": "# Article\n\nSome content here.", + } + ) result = runner.invoke(app, ["wiki", "view", "5", "--raw"]) assert result.exit_code == 0 diff --git a/tests/test_view_review.py b/tests/test_view_review.py index 5a942969..7f4140e2 100644 --- a/tests/test_view_review.py +++ b/tests/test_view_review.py @@ -9,6 +9,7 @@ from typer.testing import CliRunner from emdx.commands.core import app +from emdx.models.document import Document runner = CliRunner() @@ -18,14 +19,16 @@ def _out(result) -> str: # type: ignore[no-untyped-def] return re.sub(r"\x1b\[[0-9;]*m", "", result.stdout) -_DOC = { - "id": 42, - "title": "Architecture Plan", - "content": "We currently use REST. Today the API handles 1000 rps.", - "project": "acme", - "created_at": datetime(2024, 6, 1), - "access_count": 3, -} +_DOC = Document.from_row( + { + "id": 42, + "title": "Architecture Plan", + "content": "We currently use REST. Today the API handles 1000 rps.", + "project": "acme", + "created_at": datetime(2024, 6, 1), + "access_count": 3, + } +) class TestViewReviewFlag: From d120bcf458d5729c1bcb85f403eeb3753e8cc63e Mon Sep 17 00:00:00 2001 From: Alex Rockwell Date: Sat, 7 Mar 2026 01:37:06 -0500 Subject: [PATCH 3/8] refactor: remove 7 obsolete document TypedDicts from database/types.py (Issue #ARCH-24) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Phase 4 — remove DocumentRow, DocumentListItem, RecentDocumentItem, DeletedDocumentItem, ChildDocumentItem, SupersedeCandidate, and SearchResult from database/types.py. All replaced by the Document dataclass and SearchHit in emdx/models/. Remaining types in database/types.py are non-document structures: MostViewedDoc, DatabaseStats, DocumentLinkDetail, WikiArticleTimingDict, StandingQueryRow, StandingQueryMatch. Co-Authored-By: Claude Opus 4.6 --- emdx/database/types.py | 128 ++++++++--------------------------------- 1 file changed, 24 insertions(+), 104 deletions(-) diff --git a/emdx/database/types.py b/emdx/database/types.py index 33de0dc8..9a654a65 100644 --- a/emdx/database/types.py +++ b/emdx/database/types.py @@ -1,113 +1,40 @@ -"""TypedDict definitions for the database layer.""" +"""TypedDict definitions for the database layer. + +Document-related types have been replaced by the Document dataclass +in emdx.models.document. Remaining types here are for non-document +database structures (stats, links, wiki, standing queries). +""" from __future__ import annotations from datetime import datetime from typing import TypedDict -# ── Document types ───────────────────────────────────────────────────── - - -class DocumentRow(TypedDict): - """Full document row from the documents table. - - Datetime fields are parsed from SQLite strings to datetime objects - by ``_parse_doc_datetimes`` before being returned to callers. - """ - - id: int - title: str - content: str - project: str | None - created_at: datetime | None - updated_at: datetime | None - accessed_at: datetime | None - access_count: int - deleted_at: datetime | None - is_deleted: int - parent_id: int | None - relationship: str | None - archived_at: datetime | None - stage: str | None - doc_type: str - - -class DocumentListItem(TypedDict): - """Document item returned by list_documents.""" - - id: int - title: str - project: str | None - created_at: datetime | None - access_count: int - parent_id: int | None - relationship: str | None - archived_at: datetime | None - accessed_at: datetime | None - - -class RecentDocumentItem(TypedDict): - """Document item returned by get_recent_documents.""" - - id: int - title: str - project: str | None - accessed_at: datetime | None - access_count: int +# ── Stats types ─────────────────────────────────────────────────────── -class DeletedDocumentItem(TypedDict): - """Document item returned by list_deleted_documents.""" +class MostViewedDoc(TypedDict): + """Most viewed document summary.""" id: int title: str - project: str | None - deleted_at: datetime | None access_count: int -class ChildDocumentItem(TypedDict): - """Document item returned by get_children.""" - - id: int - title: str - project: str | None - created_at: datetime | None - parent_id: int | None - relationship: str | None - archived_at: datetime | None - - -class SupersedeCandidate(TypedDict): - """Candidate document for supersede matching.""" - - id: int - title: str - content: str - project: str | None - created_at: datetime | None - parent_id: int | None - - -class SearchResult(TypedDict, total=False): - """Search result from FTS5 queries.""" - - id: int - title: str - project: str | None - created_at: datetime | None - updated_at: datetime | None - snippet: str | None - rank: float - doc_type: str +class DatabaseStats(TypedDict, total=False): + """Statistics returned by get_stats.""" + total_documents: int + total_projects: int + total_views: int + avg_views: float + newest_doc: str | None + last_accessed: str | None + table_size: str + most_viewed: MostViewedDoc -class MostViewedDoc(TypedDict): - """Most viewed document summary.""" - id: int - title: str - access_count: int +# ── Document link types ─────────────────────────────────────────────── class DocumentLinkDetail(TypedDict): @@ -123,6 +50,9 @@ class DocumentLinkDetail(TypedDict): link_type: str +# ── Wiki types ──────────────────────────────────────────────────────── + + class WikiArticleTimingDict(TypedDict): """Step-level timing (milliseconds) for wiki article generation. @@ -138,17 +68,7 @@ class WikiArticleTimingDict(TypedDict): save_ms: float -class DatabaseStats(TypedDict, total=False): - """Statistics returned by get_stats.""" - - total_documents: int - total_projects: int - total_views: int - avg_views: float - newest_doc: str | None - last_accessed: str | None - table_size: str - most_viewed: MostViewedDoc +# ── Standing query types ────────────────────────────────────────────── class StandingQueryRow(TypedDict): From a99f493bed58a9ef4ebce0ec582e985b735ea4f3 Mon Sep 17 00:00:00 2001 From: Alex Rockwell Date: Sat, 7 Mar 2026 01:46:50 -0500 Subject: [PATCH 4/8] feat(models): Task dataclass replaces TaskDict/EpicTaskDict cast zoo (Issue #ARCH-24) - Create emdx/models/task.py with Task and TaskLogEntry dataclasses - Factory methods from_row()/from_partial_row() with datetime parsing - Dict-compat layer (__getitem__, .get()) for incremental migration - Wire into models/tasks.py: all functions return Task/TaskLogEntry - Migrate consumers: commands/tasks.py, ui/task_view.py type annotations - Fix mypy errors: switch 3 bracket accesses to attribute access - Remove TaskDict, EpicTaskDict, EpicViewDict, TaskLogEntryDict from types.py - Update test factories to use Task.from_row() Co-Authored-By: Claude Opus 4.6 --- emdx/commands/tasks.py | 31 ++--- emdx/models/task.py | 260 +++++++++++++++++++++++++++++++++++++ emdx/models/tasks.py | 53 ++++---- emdx/models/types.py | 37 ------ emdx/ui/task_view.py | 80 ++++++------ tests/test_task_browser.py | 106 ++++++++------- 6 files changed, 397 insertions(+), 170 deletions(-) create mode 100644 emdx/models/task.py diff --git a/emdx/commands/tasks.py b/emdx/commands/tasks.py index 5a4dcc09..458c3bcf 100644 --- a/emdx/commands/tasks.py +++ b/emdx/commands/tasks.py @@ -13,7 +13,8 @@ from emdx.commands.categories import app as categories_app from emdx.commands.epics import app as epics_app from emdx.models import tasks -from emdx.models.types import TaskDict, TaskRef +from emdx.models.task import Task +from emdx.models.types import TaskRef from emdx.utils.lazy_group import make_alias_group from emdx.utils.output import console, is_non_interactive, print_json @@ -58,7 +59,7 @@ def _blocker_summary(task_id: int) -> str: return f"{names}{extra}" -def _display_id(task: TaskDict) -> str: +def _display_id(task: Task) -> str: """Return KEY-N display ID if available, otherwise #id.""" if task.get("epic_key") and task.get("epic_seq"): return f"{task['epic_key']}-{task['epic_seq']}" @@ -229,7 +230,7 @@ def ready( ready_tasks = tasks.get_ready_tasks() if json_output: - print_json(ready_tasks) + print_json([t.to_dict() if hasattr(t, "to_dict") else t for t in ready_tasks]) return if not ready_tasks: @@ -596,7 +597,7 @@ def brief( def _assemble_brief( - task: TaskDict, + task: Task, task_id: int, log_limit: int, ) -> dict[str, object]: @@ -681,11 +682,7 @@ def _assemble_brief( } ) - output_id: int | None = None - try: - output_id = dict(task).get("output_doc_id") # type: ignore[assignment] - except Exception: - pass + output_id: int | None = task.get("output_doc_id") if output_id: output_doc = get_document(output_id) related_docs.append( @@ -898,7 +895,7 @@ def list_cmd( ) if json_output: - print_json(task_list) + print_json([t.to_dict() if hasattr(t, "to_dict") else t for t in task_list]) return if not task_list: @@ -922,7 +919,7 @@ def list_cmd( console.print(table) -def _task_label(task: TaskDict) -> str: +def _task_label(task: Task) -> str: """Format task label: DEBT-13 if epic, else #id.""" epic_key = task.get("epic_key") epic_seq = task.get("epic_seq") @@ -931,9 +928,9 @@ def _task_label(task: TaskDict) -> str: return f"#{task['id']}" -def _display_title(task: TaskDict) -> str: +def _display_title(task: Task) -> str: """Strip redundant KEY-N: prefix from title since the ID column has it.""" - title = task["title"] + title: str = task["title"] epic_key = task.get("epic_key") epic_seq = task.get("epic_seq") if epic_key and epic_seq: @@ -1112,7 +1109,7 @@ def dep_list( if json_output: - def _dep_summary(d: TaskDict) -> dict[str, str | int]: + def _dep_summary(d: Task) -> dict[str, str | int]: return { "id": d["id"], "display_id": _display_id(d), @@ -1176,7 +1173,7 @@ def chain( if json_output: - def _task_summary(t: TaskDict) -> dict[str, str | int]: + def _task_summary(t: Task) -> dict[str, str | int]: return { "id": t["id"], "display_id": _display_id(t), @@ -1214,11 +1211,11 @@ def _task_summary(t: TaskDict) -> dict[str, str | int]: console.print("\n[yellow]No dependencies in either direction[/yellow]") -def _walk_deps(task_id: int, direction: str) -> list[TaskDict]: +def _walk_deps(task_id: int, direction: str) -> list[Task]: """BFS walk of dependency graph. Returns tasks in traversal order.""" visited: set[int] = set() queue = [task_id] - result: list[TaskDict] = [] + result: list[Task] = [] while queue: current = queue.pop(0) diff --git a/emdx/models/task.py b/emdx/models/task.py new file mode 100644 index 00000000..096fdfd8 --- /dev/null +++ b/emdx/models/task.py @@ -0,0 +1,260 @@ +"""Task domain model for emdx. + +Single source of truth for the Task type. Replaces the scattered +TypedDict projections (TaskDict, EpicTaskDict, EpicViewDict, +TaskLogEntryDict) with proper dataclasses that support: + +- Factory construction from sqlite3.Row with datetime parsing +- Backward-compatible bracket access (task["title"]) for incremental migration +- Serialization to dict for JSON output +""" + +from __future__ import annotations + +import sqlite3 +from collections.abc import Iterator +from dataclasses import asdict, dataclass, field, fields +from datetime import datetime +from typing import Any + +from ..utils.datetime_utils import parse_datetime + +# Fields that store datetime values and should be parsed from SQLite strings. +_TASK_DATETIME_FIELDS: frozenset[str] = frozenset({"created_at", "updated_at", "completed_at"}) + + +@dataclass(slots=True) +class Task: + """Core task domain object. + + Constructed via ``Task.from_row()`` at the database boundary. + Supports ``task["field"]`` and ``task.get("field")`` for backward + compatibility with code that previously used TypedDict dicts. + """ + + id: int + title: str + description: str | None = None + status: str = "open" + priority: int = 5 + gameplan_id: int | None = None + project: str | None = None + current_step: str | None = None + created_at: datetime | None = None + updated_at: datetime | None = None + completed_at: datetime | None = None + type: str = "single" + source_doc_id: int | None = None + output_doc_id: int | None = None + parent_task_id: int | None = None + epic_key: str | None = None + epic_seq: int | None = None + + # Epic-specific fields (populated by epic queries, default to 0) + child_count: int = 0 + children_open: int = 0 + children_done: int = 0 + + # Children list (populated by get_epic_view, default empty) + children: list[Task] = field(default_factory=list) + + # ── Dict-compatibility layer ────────────────────────────────────── + + def __getitem__(self, key: str) -> Any: + """Allow ``task["title"]`` access for backward compatibility.""" + try: + return getattr(self, key) + except AttributeError: + raise KeyError(key) from None + + def get(self, key: str, default: Any = None) -> Any: + """Allow ``task.get("title", "Untitled")`` for backward compatibility.""" + return getattr(self, key, default) + + def __contains__(self, key: object) -> bool: + """Allow ``"title" in task`` checks.""" + if not isinstance(key, str): + return False + return key in self._field_names() + + def keys(self) -> list[str]: + """Return field names, for code that iterates dict keys.""" + return list(self._field_names()) + + def items(self) -> Iterator[tuple[str, Any]]: + """Yield (field_name, value) pairs, for dict-like iteration.""" + for name in self._field_names(): + yield name, getattr(self, name) + + def values(self) -> Iterator[Any]: + """Yield field values, for dict-like iteration.""" + for name in self._field_names(): + yield getattr(self, name) + + @classmethod + def _field_names(cls) -> frozenset[str]: + """Cached set of field names for this dataclass.""" + cache_attr = "_cached_field_names" + cached: frozenset[str] | None = cls.__dict__.get(cache_attr) + if cached is not None: + return cached + names = frozenset(f.name for f in fields(cls)) + type.__setattr__(cls, cache_attr, names) + return names + + # ── Factory methods ─────────────────────────────────────────────── + + @classmethod + def from_row(cls, row: sqlite3.Row | dict[str, Any]) -> Task: + """Construct a Task from a full database row. + + Parses datetime string fields into ``datetime`` objects using + the centralized ``parse_datetime`` utility. Unknown columns in + the row are silently ignored (safe for SELECT * with extra cols). + """ + if isinstance(row, sqlite3.Row): + raw = dict(row) + else: + raw = dict(row) # defensive copy + + return cls._from_dict(raw) + + @classmethod + def from_partial_row(cls, row: sqlite3.Row | dict[str, Any]) -> Task: + """Construct a Task from a partial SELECT. + + Missing fields get their dataclass defaults. Functionally + identical to ``from_row`` — both tolerate missing columns — + but the separate name signals intent to callers. + """ + return cls.from_row(row) + + @classmethod + def _from_dict(cls, raw: dict[str, Any]) -> Task: + """Internal: build a Task from a raw dict, parsing datetimes.""" + known = cls._field_names() + kwargs: dict[str, Any] = {} + for key, value in raw.items(): + if key not in known: + continue + if key in _TASK_DATETIME_FIELDS and isinstance(value, str): + kwargs[key] = parse_datetime(value) + elif key == "children" and isinstance(value, list): + # Recursively convert child dicts to Task objects + kwargs[key] = [ + cls.from_row(c) if isinstance(c, (dict, sqlite3.Row)) else c for c in value + ] + else: + kwargs[key] = value + return cls(**kwargs) + + # ── Serialization ───────────────────────────────────────────────── + + def to_dict(self) -> dict[str, Any]: + """Convert to a plain dict for JSON serialization. + + Datetime fields are formatted as ISO 8601 strings. + Children are recursively serialized. + """ + result = asdict(self) + for key in _TASK_DATETIME_FIELDS: + val = result.get(key) + if isinstance(val, datetime): + result[key] = val.isoformat() + # Recursively serialize children + if result.get("children"): + result["children"] = [c.to_dict() if isinstance(c, Task) else c for c in self.children] + return result + + +# Fields that store datetime values for TaskLogEntry +_LOG_DATETIME_FIELDS: frozenset[str] = frozenset({"created_at"}) + + +@dataclass(slots=True) +class TaskLogEntry: + """A single entry in a task's work log.""" + + id: int + task_id: int + message: str + created_at: datetime | None = None + + # ── Dict-compatibility layer ────────────────────────────────────── + + def __getitem__(self, key: str) -> Any: + """Allow ``entry["message"]`` access for backward compatibility.""" + try: + return getattr(self, key) + except AttributeError: + raise KeyError(key) from None + + def get(self, key: str, default: Any = None) -> Any: + """Allow ``entry.get("created_at")`` for backward compatibility.""" + return getattr(self, key, default) + + def __contains__(self, key: object) -> bool: + """Allow ``"message" in entry`` checks.""" + if not isinstance(key, str): + return False + return key in self._field_names() + + def keys(self) -> list[str]: + """Return field names, for code that iterates dict keys.""" + return list(self._field_names()) + + def items(self) -> Iterator[tuple[str, Any]]: + """Yield (field_name, value) pairs, for dict-like iteration.""" + for name in self._field_names(): + yield name, getattr(self, name) + + def values(self) -> Iterator[Any]: + """Yield field values, for dict-like iteration.""" + for name in self._field_names(): + yield getattr(self, name) + + @classmethod + def _field_names(cls) -> frozenset[str]: + """Cached set of field names for this dataclass.""" + cache_attr = "_cached_field_names" + cached: frozenset[str] | None = cls.__dict__.get(cache_attr) + if cached is not None: + return cached + names = frozenset(f.name for f in fields(cls)) + type.__setattr__(cls, cache_attr, names) + return names + + # ── Factory methods ─────────────────────────────────────────────── + + @classmethod + def from_row(cls, row: sqlite3.Row | dict[str, Any]) -> TaskLogEntry: + """Construct a TaskLogEntry from a database row.""" + if isinstance(row, sqlite3.Row): + raw = dict(row) + else: + raw = dict(row) # defensive copy + + known = cls._field_names() + kwargs: dict[str, Any] = {} + for key, value in raw.items(): + if key not in known: + continue + if key in _LOG_DATETIME_FIELDS and isinstance(value, str): + kwargs[key] = parse_datetime(value) + else: + kwargs[key] = value + return cls(**kwargs) + + @classmethod + def from_partial_row(cls, row: sqlite3.Row | dict[str, Any]) -> TaskLogEntry: + """Alias for from_row — both tolerate missing columns.""" + return cls.from_row(row) + + def to_dict(self) -> dict[str, Any]: + """Convert to a plain dict for JSON serialization.""" + result = asdict(self) + for key in _LOG_DATETIME_FIELDS: + val = result.get(key) + if isinstance(val, datetime): + result[key] = val.isoformat() + return result diff --git a/emdx/models/tasks.py b/emdx/models/tasks.py index ab696d65..f45b1217 100644 --- a/emdx/models/tasks.py +++ b/emdx/models/tasks.py @@ -2,7 +2,7 @@ import re import sqlite3 -from typing import Any, cast +from typing import Any from emdx.config.constants import ( DEFAULT_BROWSE_LIMIT, @@ -10,13 +10,8 @@ DEFAULT_TASK_PRIORITY, ) from emdx.database import db -from emdx.models.types import ( - EpicTaskDict, - EpicViewDict, - TaskDict, - TaskLogEntryDict, - TaskRef, -) +from emdx.models.task import Task, TaskLogEntry +from emdx.models.types import TaskRef # Valid status values STATUSES = ("open", "active", "blocked", "done", "failed", "wontdo", "duplicate") @@ -165,12 +160,12 @@ def delete_epic(epic_id: int, force: bool = False) -> dict[str, int]: return {"children_unlinked": children_unlinked} -def get_task(task_id: int) -> TaskDict | None: +def get_task(task_id: int) -> Task | None: """Get task by ID.""" with db.get_connection() as conn: cursor = conn.execute("SELECT * FROM tasks WHERE id = ?", (task_id,)) row = cursor.fetchone() - return cast(TaskDict, dict(row)) if row else None + return Task.from_row(row) if row else None _PREFIXED_ID_RE = re.compile(r"^([A-Za-z]+)-(\d+)$") @@ -231,7 +226,7 @@ def list_tasks( epic_key: str | None = None, parent_task_id: int | None = None, since: str | None = None, -) -> list[TaskDict]: +) -> list[Task]: """List tasks with filters. Args: @@ -286,7 +281,7 @@ def list_tasks( """, params, ) - return [cast(TaskDict, dict(row)) for row in cursor.fetchall()] + return [Task.from_row(row) for row in cursor.fetchall()] # Allowed columns for task updates (prevents SQL injection via column names) @@ -363,7 +358,7 @@ def delete_task(task_id: int) -> bool: return cursor.rowcount > 0 -def get_dependencies(task_id: int) -> list[TaskDict]: +def get_dependencies(task_id: int) -> list[Task]: """Get tasks this task depends on.""" with db.get_connection() as conn: cursor = conn.execute( @@ -374,10 +369,10 @@ def get_dependencies(task_id: int) -> list[TaskDict]: """, (task_id,), ) - return [cast(TaskDict, dict(row)) for row in cursor.fetchall()] + return [Task.from_row(row) for row in cursor.fetchall()] -def get_dependents(task_id: int) -> list[TaskDict]: +def get_dependents(task_id: int) -> list[Task]: """Get tasks that depend on this task (tasks this one blocks).""" with db.get_connection() as conn: cursor = conn.execute( @@ -388,13 +383,13 @@ def get_dependents(task_id: int) -> list[TaskDict]: """, (task_id,), ) - return [cast(TaskDict, dict(row)) for row in cursor.fetchall()] + return [Task.from_row(row) for row in cursor.fetchall()] def get_ready_tasks( gameplan_id: int | None = None, epic_key: str | None = None, -) -> list[TaskDict]: +) -> list[Task]: """Get tasks ready to work (open + all deps done). Args: @@ -425,7 +420,7 @@ def get_ready_tasks( """, params, ) - return [cast(TaskDict, dict(row)) for row in cursor.fetchall()] + return [Task.from_row(row) for row in cursor.fetchall()] def add_dependency(task_id: int, depends_on: int) -> bool: @@ -491,7 +486,7 @@ def log_progress(task_id: int, message: str) -> int: return cursor.lastrowid -def get_task_log(task_id: int, limit: int = DEFAULT_RECENT_LIMIT) -> list[TaskLogEntryDict]: +def get_task_log(task_id: int, limit: int = DEFAULT_RECENT_LIMIT) -> list[TaskLogEntry]: """Get task log entries.""" with db.get_connection() as conn: cursor = conn.execute( @@ -501,10 +496,10 @@ def get_task_log(task_id: int, limit: int = DEFAULT_RECENT_LIMIT) -> list[TaskLo """, (task_id, limit), ) - return [cast(TaskLogEntryDict, dict(row)) for row in cursor.fetchall()] + return [TaskLogEntry.from_row(row) for row in cursor.fetchall()] -def get_children(parent_task_id: int) -> list[TaskDict]: +def get_children(parent_task_id: int) -> list[Task]: """Get child tasks ordered by id.""" with db.get_connection() as conn: cursor = conn.execute( @@ -515,13 +510,13 @@ def get_children(parent_task_id: int) -> list[TaskDict]: """, (parent_task_id,), ) - return [cast(TaskDict, dict(row)) for row in cursor.fetchall()] + return [Task.from_row(row) for row in cursor.fetchall()] def list_epics( category_key: str | None = None, status: list[str] | None = None, -) -> list[EpicTaskDict]: +) -> list[Task]: """List epic tasks with child counts.""" conditions = ["t.type = 'epic'"] params = [] @@ -550,10 +545,10 @@ def list_epics( """, params, ) - return [cast(EpicTaskDict, dict(row)) for row in cursor.fetchall()] + return [Task.from_row(row) for row in cursor.fetchall()] -def get_epic_view(epic_id: int) -> EpicViewDict | None: +def get_epic_view(epic_id: int) -> Task | None: """Get epic task + its children.""" with db.get_connection() as conn: cursor = conn.execute( @@ -574,9 +569,9 @@ def get_epic_view(epic_id: int) -> EpicViewDict | None: """, (epic_id,), ) - raw["children"] = [cast(TaskDict, dict(row)) for row in child_cursor.fetchall()] + raw["children"] = [dict(row) for row in child_cursor.fetchall()] - return cast(EpicViewDict, raw) + return Task.from_row(raw) def attach_to_epic(task_ids: list[int], epic_id: int) -> int: @@ -634,7 +629,7 @@ def attach_to_epic(task_ids: list[int], epic_id: int) -> int: return attached -def get_tasks_in_window(hours: int) -> list[TaskDict]: +def get_tasks_in_window(hours: int) -> list[Task]: """Get tasks updated within a time window. Args: @@ -652,4 +647,4 @@ def get_tasks_in_window(hours: int) -> list[TaskDict]: """, (f"-{hours}",), ) - return [cast(TaskDict, dict(row)) for row in cursor.fetchall()] + return [Task.from_row(row) for row in cursor.fetchall()] diff --git a/emdx/models/types.py b/emdx/models/types.py index 057e5f63..52c7f152 100644 --- a/emdx/models/types.py +++ b/emdx/models/types.py @@ -12,43 +12,6 @@ TaskRef: TypeAlias = str -class TaskDict(TypedDict): - id: int - title: str - description: str | None - status: str - priority: int - gameplan_id: int | None - project: str | None - current_step: str | None - created_at: str | None - updated_at: str | None - completed_at: str | None - type: str - source_doc_id: int | None - output_doc_id: int | None - parent_task_id: int | None - epic_key: str | None - epic_seq: int | None - - -class EpicTaskDict(TaskDict): - child_count: int - children_open: int - children_done: int - - -class EpicViewDict(TaskDict): - children: list[TaskDict] - - -class TaskLogEntryDict(TypedDict): - id: int - task_id: int - message: str - created_at: str | None - - class CategoryDict(TypedDict): key: str name: str diff --git a/emdx/ui/task_view.py b/emdx/ui/task_view.py index c8952187..b3a66269 100644 --- a/emdx/ui/task_view.py +++ b/emdx/ui/task_view.py @@ -20,6 +20,7 @@ from textual.widget import Widget from textual.widgets import DataTable, Input, RichLog, Static +from emdx.models.task import Task, TaskLogEntry from emdx.models.tasks import ( get_dependencies, get_dependents, @@ -29,7 +30,6 @@ list_tasks, update_task, ) -from emdx.models.types import EpicTaskDict, TaskDict, TaskLogEntryDict from emdx.ui.link_helpers import extract_urls as _extract_urls from emdx.ui.link_helpers import linkify_text as _linkify_text @@ -74,12 +74,14 @@ DONE_FOLD_PREFIX = "done-fold:" -def _format_time_ago(dt_str: str | None) -> str: - """Format a datetime string as relative time, or absolute date if > 7 days.""" +def _format_time_ago(dt_str: str | datetime | None) -> str: + """Format a datetime string (or datetime) as relative time, or absolute date if > 7 days.""" if not dt_str: return "" try: - if "T" in dt_str: + if isinstance(dt_str, datetime): + dt = dt_str + elif "T" in dt_str: dt = datetime.fromisoformat(dt_str.replace("Z", "+00:00")) else: dt = datetime.fromisoformat(dt_str) @@ -111,8 +113,8 @@ def _format_time_ago(dt_str: str | None) -> str: return "" -def _format_time_short(dt_str: str | None) -> str: - """Format a datetime string as a compact relative time (no 'ago').""" +def _format_time_short(dt_str: str | datetime | None) -> str: + """Format a datetime string (or datetime) as a compact relative time (no 'ago').""" if not dt_str: return "" result = _format_time_ago(dt_str) @@ -146,18 +148,16 @@ def _strip_epic_prefix(title: str, epic_key: str | None, epic_seq: int | None) - return title -def _task_badge(task: TaskDict) -> str: +def _task_badge(task: Task) -> str: """Return the KEY-N badge for a task, or empty string if unavailable.""" - epic_key = task.get("epic_key") - epic_seq = task.get("epic_seq") - if epic_key and epic_seq: - return f"{epic_key}-{epic_seq}" - if epic_key: - return epic_key + if task.epic_key and task.epic_seq: + return f"{task.epic_key}-{task.epic_seq}" + if task.epic_key: + return task.epic_key return "" -def _task_label(task: TaskDict) -> str: +def _task_label(task: Task) -> str: """Build a plain text label for tests and fallback display.""" icon = STATUS_ICONS.get(task["status"], "?") title = task["title"] @@ -309,10 +309,10 @@ class TaskView(Widget): def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self._tasks: list[TaskDict] = [] - self._tasks_by_status: dict[str, list[TaskDict]] = defaultdict(list) - self._row_key_to_task: dict[str, TaskDict] = {} - self._epics: dict[int, EpicTaskDict] = {} # keyed by epic task ID + self._tasks: list[Task] = [] + self._tasks_by_status: dict[str, list[Task]] = defaultdict(list) + self._row_key_to_task: dict[str, Task] = {} + self._epics: dict[int, Task] = {} # keyed by epic task ID self._filter_text: str = "" self._debounce_timer: Timer | None = None self._status_filter: set[str] | None = None # None = show all @@ -324,7 +324,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self._done_folds_expanded: set[int] = set() # Epic IDs with done-fold open self._zoomed: bool = False self._sidebar_visible: bool = True - self._current_task: TaskDict | None = None + self._current_task: Task | None = None def compose(self) -> ComposeResult: yield Static("Loading tasks...", id="task-status-bar") @@ -482,7 +482,7 @@ async def _load_tasks(self, *, restore_row: int | None = None) -> None: self._render_task_table(restore_row=restore_row) self._update_status_bar() - def _row_key_for_task(self, task: TaskDict) -> str: + def _row_key_for_task(self, task: Task) -> str: """Generate a stable row key for a task.""" return f"task:{task['id']}" @@ -533,7 +533,7 @@ def _render_task_table(self, *, restore_row: int | None = None) -> None: def _render_task_row( self, table: "DataTable[str | Text]", - task: TaskDict, + task: Task, indent: bool = False, tree_prefix: str = "", ) -> None: @@ -634,10 +634,10 @@ def _render_groups_by_status(self, table: "DataTable[str | Text]") -> None: # together with tree connectors (cross-group siblings). # Tasks with no parent render normally. epic_ids_in_group = {t["id"] for t in tasks if t.get("type") == "epic"} - children_by_parent: dict[int, list[TaskDict]] = defaultdict(list) - cross_group_by_parent: dict[int, list[TaskDict]] = defaultdict(list) - true_orphans: list[TaskDict] = [] - epics_in_order: list[TaskDict] = [] + children_by_parent: dict[int, list[Task]] = defaultdict(list) + cross_group_by_parent: dict[int, list[Task]] = defaultdict(list) + true_orphans: list[Task] = [] + epics_in_order: list[Task] = [] for task in tasks: parent_id = task.get("parent_task_id") @@ -703,7 +703,7 @@ def _render_groups_by_epic(self, table: "DataTable[str | Text]") -> None: # Group children by parent_task_id. # First pass: collect all tasks and find which IDs are parents. - all_loaded: dict[int, TaskDict] = {} + all_loaded: dict[int, Task] = {} referenced_parents: set[int] = set() for status in STATUS_ORDER: for task in self._tasks_by_status.get(status, []): @@ -714,10 +714,10 @@ def _render_groups_by_epic(self, table: "DataTable[str | Text]") -> None: # Second pass: separate parents from children. parent_types = {"epic", "group"} - children_by_parent: dict[int | None, list[TaskDict]] = defaultdict( + children_by_parent: dict[int | None, list[Task]] = defaultdict( list, ) - epic_task_by_id: dict[int, TaskDict] = {} + epic_task_by_id: dict[int, Task] = {} for task in all_loaded.values(): is_parent = task.get("type") in parent_types or task["id"] in referenced_parents if is_parent: @@ -818,7 +818,7 @@ def _sort_key(pid: int | None) -> tuple[int, str]: # Epic not in loaded tasks — use _epics data epic_data = self._epics.get(pid) if epic_data: - epic = epic_data # EpicTaskDict extends TaskDict + epic = epic_data if epic: self._render_task_row(table, epic) else: @@ -846,8 +846,8 @@ def _sort_key(pid: int | None) -> tuple[int, str]: continue # Split children into active and done groups - active_kids: list[TaskDict] = [] - done_kids: list[TaskDict] = [] + active_kids: list[Task] = [] + done_kids: list[Task] = [] for task in kids: normalized = STATUS_ALIASES.get(task["status"], task["status"]) if normalized in finished: @@ -1032,7 +1032,7 @@ def _update_status_bar(self) -> None: # Filter logic # ------------------------------------------------------------------ - def _task_matches_filter(self, task: TaskDict, query: str) -> bool: + def _task_matches_filter(self, task: Task, query: str) -> bool: """Check if a task matches the filter query (case-insensitive substring).""" q = query.lower() fields = [ @@ -1042,7 +1042,7 @@ def _task_matches_filter(self, task: TaskDict, query: str) -> bool: ] return any(q in f.lower() for f in fields) - def _task_passes_filters(self, task: TaskDict) -> bool: + def _task_passes_filters(self, task: Task) -> bool: """Check if a task passes text, status, and epic filters.""" if self._status_filter and task["status"] not in self._status_filter: return False @@ -1178,7 +1178,7 @@ def do_filter() -> None: self._debounce_timer = self.set_timer(0.2, do_filter) - def _get_selected_task(self) -> TaskDict | None: + def _get_selected_task(self) -> Task | None: """Get the currently highlighted task.""" table = self.query_one("#task-table", DataTable) try: @@ -1295,7 +1295,7 @@ def _write_markdown_guttered( prefixed.overflow = "fold" detail_log.write(prefixed) - def _render_task_detail(self, task: TaskDict) -> None: + def _render_task_detail(self, task: Task) -> None: """Render full task detail — routes metadata to sidebar or inline.""" self._current_task = task @@ -1342,7 +1342,7 @@ def _render_task_detail(self, task: TaskDict) -> None: self._render_task_content(detail_log, task) - def _render_task_metadata(self, target: RichLog, task: TaskDict) -> None: + def _render_task_metadata(self, target: RichLog, task: Task) -> None: """Write task metadata (status, deps, blocks) to a RichLog target. Works for both the sidebar (30 cols) and the detail pane (full width). @@ -1406,7 +1406,7 @@ def _render_task_metadata(self, target: RichLog, task: TaskDict) -> None: except Exception as e: logger.debug(f"Error loading dependents: {e}") - def _render_task_content(self, target: RichLog, task: TaskDict) -> None: + def _render_task_content(self, target: RichLog, task: Task) -> None: """Write task content (description, error, work log) to a RichLog target.""" content_w = self._detail_content_width(target) @@ -1419,7 +1419,7 @@ def _render_task_content(self, target: RichLog, task: TaskDict) -> None: # Work log try: - log_entries: list[TaskLogEntryDict] = get_task_log(task["id"], limit=20) + log_entries: list[TaskLogEntry] = get_task_log(task["id"], limit=20) if log_entries: target.write("") target.write("[bold]Work Log:[/bold]") @@ -1428,7 +1428,7 @@ def _render_task_content(self, target: RichLog, task: TaskDict) -> None: last = len(log_entries) - 1 for i, entry in enumerate(log_entries): raw_ts = entry.get("created_at") - time_str = _format_time_ago(str(raw_ts) if raw_ts is not None else None) + time_str = _format_time_ago(raw_ts) ts_part = f" {time_str}" if time_str else "" target.write(f" [bold cyan]●[/bold cyan] [dim]{ts_part}[/dim]") self._write_markdown_guttered( @@ -1445,7 +1445,7 @@ def _render_task_content(self, target: RichLog, task: TaskDict) -> None: except Exception as e: logger.debug(f"Error loading task log: {e}") - def _render_epic_detail(self, task: TaskDict) -> None: + def _render_epic_detail(self, task: Task) -> None: """Render epic detail with child task listing in the right pane.""" detail_log = self.query_one("#task-detail-log", RichLog) header = self.query_one("#task-detail-header", Static) diff --git a/tests/test_task_browser.py b/tests/test_task_browser.py index 49168cbb..f1bc81bb 100644 --- a/tests/test_task_browser.py +++ b/tests/test_task_browser.py @@ -10,7 +10,7 @@ from textual.app import App, ComposeResult from textual.widgets import DataTable, Input, RichLog, Static -from emdx.models.types import EpicTaskDict, TaskDict, TaskLogEntryDict +from emdx.models.task import Task, TaskLogEntry from emdx.ui.link_helpers import extract_urls as _extract_urls from emdx.ui.link_helpers import linkify_text as _linkify_text from emdx.ui.task_view import TaskView, _format_time_ago, _task_label @@ -34,26 +34,27 @@ def make_task( type: str = "manual", epic_seq: int | None = None, **kwargs: object, -) -> TaskDict: - base: TaskDict = { - "id": id, - "title": title, - "status": status, - "priority": priority, - "description": description, - "epic_key": epic_key, - "created_at": created_at, - "updated_at": updated_at, - "completed_at": completed_at, - "gameplan_id": None, - "project": None, - "current_step": None, - "type": type, - "source_doc_id": None, - "parent_task_id": parent_task_id, - "epic_seq": epic_seq, - } - return base +) -> Task: + return Task.from_row( + { + "id": id, + "title": title, + "status": status, + "priority": priority, + "description": description, + "epic_key": epic_key, + "created_at": created_at, + "updated_at": updated_at, + "completed_at": completed_at, + "gameplan_id": None, + "project": None, + "current_step": None, + "type": type, + "source_doc_id": None, + "parent_task_id": parent_task_id, + "epic_seq": epic_seq, + } + ) def make_epic( @@ -65,22 +66,30 @@ def make_epic( children_open: int = 3, epic_seq: int = 1, **kwargs: object, -) -> EpicTaskDict: - base = make_task( - id=id, - title=f"Epic: {epic_key}", - status=status, - epic_key=epic_key, - type="epic", - epic_seq=epic_seq, +) -> Task: + return Task.from_row( + { + "id": id, + "title": f"Epic: {epic_key}", + "status": status, + "epic_key": epic_key, + "type": "epic", + "epic_seq": epic_seq, + "priority": 5, + "description": None, + "created_at": "2025-01-01T12:00:00", + "updated_at": None, + "completed_at": None, + "gameplan_id": None, + "project": None, + "current_step": None, + "source_doc_id": None, + "parent_task_id": None, + "child_count": child_count, + "children_done": children_done, + "children_open": children_open, + } ) - epic: EpicTaskDict = { - **base, # type: ignore[typeddict-item] - "child_count": child_count, - "children_done": children_done, - "children_open": children_open, - } - return epic def make_log_entry( @@ -88,13 +97,15 @@ def make_log_entry( task_id: int = 1, message: str = "Did something", created_at: str | None = "2025-01-01T12:00:00", -) -> TaskLogEntryDict: - return { - "id": id, - "task_id": task_id, - "message": message, - "created_at": created_at, - } +) -> TaskLogEntry: + return TaskLogEntry.from_row( + { + "id": id, + "task_id": task_id, + "message": message, + "created_at": created_at, + } + ) # --------------------------------------------------------------------------- @@ -170,8 +181,8 @@ def _make_list_tasks_side_effect( def _side_effect( status: list[str] | None = None, **kwargs: object, - ) -> list[TaskDict]: - all_tasks: list[TaskDict] = mock.return_value + ) -> list[Task]: + all_tasks: list[Task] = mock.return_value if status is not None: return [t for t in all_tasks if t["status"] in status] return list(all_tasks) @@ -1804,8 +1815,9 @@ async def test_same_group_children_still_cluster_under_epic( self, mock_task_data: MockDict ) -> None: """Children whose epic IS in the same status group cluster normally.""" - epic_task = make_task(id=100, title="Auth Epic", status="open", epic_key="AUTH") - epic_task["type"] = "epic" + epic_task = make_task( + id=100, title="Auth Epic", status="open", epic_key="AUTH", type="epic" + ) mock_task_data["list_tasks"].return_value = [ epic_task, make_task(id=1, title="Child A", status="open", epic_key="AUTH", parent_task_id=100), From 5d47fa908ed2b52563780ceeeb55b36efa9c1b35 Mon Sep 17 00:00:00 2001 From: Alex Rockwell Date: Sat, 7 Mar 2026 02:01:56 -0500 Subject: [PATCH 5/8] fix: revert premature attribute access in task commands/UI (dict compat needed) Tests mock get_task() with raw dicts, so attribute access like task.epic_key breaks. Restore bracket access via dict-compat layer until test mocks are migrated to Task objects. Co-Authored-By: Claude Opus 4.6 --- emdx/commands/tasks.py | 117 +++++++++++++++++++++-------------------- emdx/ui/task_view.py | 10 ++-- 2 files changed, 66 insertions(+), 61 deletions(-) diff --git a/emdx/commands/tasks.py b/emdx/commands/tasks.py index 4e35f2ea..458c3bcf 100644 --- a/emdx/commands/tasks.py +++ b/emdx/commands/tasks.py @@ -51,7 +51,7 @@ def _blocker_summary(task_id: int) -> str: deps = tasks.get_dependencies(task_id) if not deps: return "" - open_deps = [d for d in deps if d.status not in ("done", "closed", "wontdo", "duplicate")] + open_deps = [d for d in deps if d["status"] not in ("done", "closed", "wontdo", "duplicate")] if not open_deps: return "" names = ", ".join(_display_id(d) for d in open_deps[:3]) @@ -61,9 +61,9 @@ def _blocker_summary(task_id: int) -> str: def _display_id(task: Task) -> str: """Return KEY-N display ID if available, otherwise #id.""" - if task.epic_key and task.epic_seq: - return f"{task.epic_key}-{task.epic_seq}" - return f"#{task.id}" + if task.get("epic_key") and task.get("epic_seq"): + return f"{task['epic_key']}-{task['epic_seq']}" + return f"#{task['id']}" def _resolve_id( @@ -123,8 +123,8 @@ def add( raise typer.Exit(1) parent_task_id = epic_id # Inherit epic_key from the parent epic if not explicitly set - if not epic_key and parent_task.epic_key: - epic_key = parent_task.epic_key + if not epic_key and parent_task.get("epic_key"): + epic_key = parent_task["epic_key"] depends_on = after if after else None @@ -186,8 +186,8 @@ def plan( raise typer.Exit(1) epic_key = cat.upper() if cat else None - if not epic_key and parent_task.epic_key: - epic_key = parent_task.epic_key + if not epic_key and parent_task.get("epic_key"): + epic_key = parent_task["epic_key"] created: list[dict[str, str | int]] = [] prev_id: int | None = None @@ -284,14 +284,14 @@ def done( if json_output: result: dict[str, str | int | None] = { "id": task_id, - "title": task.title, + "title": task["title"], "status": "done", } if output_doc is not None: result["output_doc_id"] = output_doc print_json(result) else: - msg = f"[green]✓ Done:[/green] {_display_id(task)} {task.title}" + msg = f"[green]✓ Done:[/green] {_display_id(task)} {task['title']}" if output_doc is not None: msg += f" [dim](output #{output_doc})[/dim]" console.print(msg) @@ -328,9 +328,9 @@ def wontdo( tasks.log_progress(task_id, f"Won't do: {note}") if json_output: - print_json({"id": task_id, "title": task.title, "status": "wontdo"}) + print_json({"id": task_id, "title": task["title"], "status": "wontdo"}) else: - console.print(f"[dim]⊘ Won't do:[/dim] {_display_id(task)} {task.title}") + console.print(f"[dim]⊘ Won't do:[/dim] {_display_id(task)} {task['title']}") @app.command() @@ -364,9 +364,9 @@ def duplicate( tasks.log_progress(task_id, f"Duplicate: {note}") if json_output: - print_json({"id": task_id, "title": task.title, "status": "duplicate"}) + print_json({"id": task_id, "title": task["title"], "status": "duplicate"}) else: - console.print(f"[dim]◆ Duplicate:[/dim] {_display_id(task)} {task.title}") + console.print(f"[dim]◆ Duplicate:[/dim] {_display_id(task)} {task['title']}") @app.command() @@ -388,30 +388,31 @@ def view( console.print(f"[red]Task {task_id_str} not found[/red]") raise typer.Exit(1) - icon = ICONS.get(task.status, "?") + icon = ICONS.get(task["status"], "?") display = _display_id(task) - console.print(f"\n[bold]{icon} {display}: {task.title}[/bold]") + console.print(f"\n[bold]{icon} {display}: {task['title']}[/bold]") # Metadata line - meta = [f"Status: {task.status}"] - if task.epic_key: - meta.append(f"Category: {task.epic_key}") - if task.parent_task_id: - parent = tasks.get_task(task.parent_task_id) - epic_label = _display_id(parent) if parent else (task.epic_key or "?") + meta = [f"Status: {task['status']}"] + if task.get("epic_key"): + meta.append(f"Category: {task['epic_key']}") + parent_task_id: int | None = task.get("parent_task_id") + if parent_task_id: + parent = tasks.get_task(parent_task_id) + epic_label = _display_id(parent) if parent else task.get("epic_key", "?") meta.append(f"Epic: {epic_label}") - if task.priority and task.priority != 3: - meta.append(f"Priority: {task.priority}") + if task.get("priority") and task["priority"] != 3: + meta.append(f"Priority: {task['priority']}") console.print(f"[dim]{' | '.join(meta)}[/dim]") - if task.created_at: - console.print(f"[dim]Created: {task.created_at}[/dim]") + if task.get("created_at"): + console.print(f"[dim]Created: {task['created_at']}[/dim]") # Linked documents from emdx.models.documents import get_document - source_id = task.source_doc_id - output_id = task.output_doc_id + source_id = task.get("source_doc_id") + output_id = task.get("output_doc_id") if source_id or output_id: console.print() if source_id: @@ -428,7 +429,7 @@ def view( console.print(f" [dim]Output:[/dim] #{output_id} [dim](deleted)[/dim]") # Description - desc = task.description or "" + desc = task.get("description") or "" if desc: console.print() from emdx.ui.markdown_config import MarkdownConfig @@ -441,23 +442,23 @@ def view( if deps: console.print("\n[bold]Blocked by:[/bold]") for d in deps: - dep_icon = ICONS.get(d.status, "?") - console.print(f" {dep_icon} {_display_id(d)} {d.title}") + dep_icon = ICONS.get(d["status"], "?") + console.print(f" {dep_icon} {_display_id(d)} {d['title']}") dependents = tasks.get_dependents(task_id) if dependents: console.print("\n[bold]Blocks:[/bold]") for d in dependents: - dep_icon = ICONS.get(d.status, "?") - console.print(f" {dep_icon} {_display_id(d)} {d.title}") + dep_icon = ICONS.get(d["status"], "?") + console.print(f" {dep_icon} {_display_id(d)} {d['title']}") # Work log log = tasks.get_task_log(task_id, limit=5) if log: console.print("\n[bold]Work log:[/bold]") for entry in log: - ts = entry.created_at or "" - console.print(f" [dim]{ts}[/dim] {entry.message}") + ts = entry.get("created_at", "") + console.print(f" [dim]{ts}[/dim] {entry['message']}") @app.command() @@ -484,7 +485,7 @@ def active( if note: tasks.log_progress(task_id, note) - console.print(f"[blue]● Active:[/blue] {_display_id(task)} {task.title}") + console.print(f"[blue]● Active:[/blue] {_display_id(task)} {task['title']}") @app.command() @@ -518,10 +519,10 @@ def log( console.print(f"[yellow]No log entries for {_display_id(task)}[/yellow]") return - console.print(f"\n[bold]Log for {_display_id(task)}: {task.title}[/bold]") + console.print(f"\n[bold]Log for {_display_id(task)}: {task['title']}[/bold]") for entry in entries: - ts = entry.created_at or "" - console.print(f" [dim]{ts}[/dim] {entry.message}") + ts = entry.get("created_at", "") + console.print(f" [dim]{ts}[/dim] {entry['message']}") @app.command() @@ -609,19 +610,19 @@ def _assemble_brief( "id": task_id, "display_id": display, "title": _display_title(task), - "status": task.status, - "priority": task.priority, - "category": task.epic_key, - "description": task.description or "", + "status": task["status"], + "priority": task.get("priority", 3), + "category": task.get("epic_key"), + "description": task.get("description") or "", } # Epic info - parent_id = task.parent_task_id + parent_id = task.get("parent_task_id") if parent_id: parent = tasks.get_task(parent_id) data["epic"] = { "id": parent_id, - "title": parent.title if parent else "(deleted)", + "title": parent["title"] if parent else "(deleted)", "display_id": _display_id(parent) if parent else f"#{parent_id}", } @@ -629,10 +630,10 @@ def _assemble_brief( deps = tasks.get_dependencies(task_id) data["dependencies"] = [ { - "id": d.id, + "id": d["id"], "display_id": _display_id(d), "title": _display_title(d), - "status": d.status, + "status": d["status"], } for d in deps ] @@ -641,10 +642,10 @@ def _assemble_brief( dependents = tasks.get_dependents(task_id) data["dependents"] = [ { - "id": d.id, + "id": d["id"], "display_id": _display_id(d), "title": _display_title(d), - "status": d.status, + "status": d["status"], } for d in dependents ] @@ -653,22 +654,24 @@ def _assemble_brief( children = tasks.get_children(task_id) data["subtasks"] = [ { - "id": c.id, + "id": c["id"], "display_id": _display_id(c), "title": _display_title(c), - "status": c.status, + "status": c["status"], } for c in children ] # Task log log_entries = tasks.get_task_log(task_id, limit=log_limit) - data["log"] = [{"created_at": e.created_at or "", "message": e.message} for e in log_entries] + data["log"] = [ + {"created_at": e.get("created_at", ""), "message": e["message"]} for e in log_entries + ] # Related documents related_docs: list[dict[str, object]] = [] - source_id = task.source_doc_id + source_id = task.get("source_doc_id") if source_id: source_doc = get_document(source_id) related_docs.append( @@ -679,7 +682,7 @@ def _assemble_brief( } ) - output_id: int | None = task.output_doc_id + output_id: int | None = task.get("output_doc_id") if output_id: output_doc = get_document(output_id) related_docs.append( @@ -693,9 +696,9 @@ def _assemble_brief( data["related_documents"] = related_docs # Key files extracted from description and log - all_text = task.description or "" + all_text = task.get("description") or "" for entry in log_entries: - all_text += "\n" + entry.message + all_text += "\n" + entry["message"] data["key_files"] = _extract_file_paths(all_text) return data @@ -826,7 +829,7 @@ def blocked( if reason: tasks.log_progress(task_id, f"Blocked: {reason}") - msg = f"[yellow]⊘ Blocked:[/yellow] {_display_id(task)} {task.title}" + msg = f"[yellow]⊘ Blocked:[/yellow] {_display_id(task)} {task['title']}" if reason: msg += f"\n [dim]{reason}[/dim]" console.print(msg) diff --git a/emdx/ui/task_view.py b/emdx/ui/task_view.py index b8b9fcf3..9181007f 100644 --- a/emdx/ui/task_view.py +++ b/emdx/ui/task_view.py @@ -154,10 +154,12 @@ def _strip_epic_prefix(title: str, epic_key: str | None, epic_seq: int | None) - def _task_badge(task: Task) -> str: """Return the KEY-N badge for a task, or empty string if unavailable.""" - if task.epic_key and task.epic_seq: - return f"{task.epic_key}-{task.epic_seq}" - if task.epic_key: - return task.epic_key + epic_key = task.get("epic_key") + epic_seq = task.get("epic_seq") + if epic_key and epic_seq: + return f"{epic_key}-{epic_seq}" + if epic_key: + return str(epic_key) return "" From 9a5a54f977fd51da5a46d5008825534fc3196ec8 Mon Sep 17 00:00:00 2001 From: Alex Rockwell Date: Sat, 7 Mar 2026 02:05:30 -0500 Subject: [PATCH 6/8] test: migrate task test mocks from raw dicts to Task.from_row() (Issue #ARCH-24) Script-converted 102 mock return values from raw dicts to Task.from_row()/TaskLogEntry.from_row() so attribute access works on mock return values. Enables removing dict-compat layer later. Co-Authored-By: Claude Opus 4.6 --- tests/test_task_commands.py | 473 ++++++++++++++++++++++-------------- 1 file changed, 289 insertions(+), 184 deletions(-) diff --git a/tests/test_task_commands.py b/tests/test_task_commands.py index f29b5fc2..1992a7ab 100644 --- a/tests/test_task_commands.py +++ b/tests/test_task_commands.py @@ -11,6 +11,7 @@ from emdx.commands.tasks import app from emdx.models.document import Document +from emdx.models.task import Task, TaskLogEntry runner = CliRunner() @@ -26,12 +27,14 @@ class TestTaskAdd: @patch("emdx.commands.tasks.tasks") def test_add_simple_task(self, mock_tasks): mock_tasks.create_task.return_value = 1 - mock_tasks.get_task.return_value = { - "id": 1, - "title": "Fix the auth bug", - "epic_key": None, - "epic_seq": None, - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 1, + "title": "Fix the auth bug", + "epic_key": None, + "epic_seq": None, + } + ) result = runner.invoke(app, ["add", "Fix the auth bug"]) assert result.exit_code == 0 out = _out(result) @@ -59,12 +62,14 @@ def test_add_whitespace_title_rejected(self): @patch("emdx.commands.tasks.tasks") def test_add_task_shows_epic_key(self, mock_tasks): mock_tasks.create_task.return_value = 10 - mock_tasks.get_task.return_value = { - "id": 10, - "title": "FEAT-3: Add auth", - "epic_key": "FEAT", - "epic_seq": 3, - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 10, + "title": "FEAT-3: Add auth", + "epic_key": "FEAT", + "epic_seq": 3, + } + ) result = runner.invoke(app, ["add", "Add auth", "--cat", "FEAT"]) assert result.exit_code == 0 out = _out(result) @@ -74,12 +79,14 @@ def test_add_task_shows_epic_key(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_add_task_with_doc_id(self, mock_tasks): mock_tasks.create_task.return_value = 2 - mock_tasks.get_task.return_value = { - "id": 2, - "title": "Implement this", - "epic_key": None, - "epic_seq": None, - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 2, + "title": "Implement this", + "epic_key": None, + "epic_seq": None, + } + ) result = runner.invoke(app, ["add", "Implement this", "--doc", "42"]) assert result.exit_code == 0 out = _out(result) @@ -98,12 +105,14 @@ def test_add_task_with_doc_id(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_add_task_with_doc_id_short_flag(self, mock_tasks): mock_tasks.create_task.return_value = 3 - mock_tasks.get_task.return_value = { - "id": 3, - "title": "Another task", - "epic_key": None, - "epic_seq": None, - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 3, + "title": "Another task", + "epic_key": None, + "epic_seq": None, + } + ) result = runner.invoke(app, ["add", "Another task", "-d", "99"]) assert result.exit_code == 0 out = _out(result) @@ -121,12 +130,14 @@ def test_add_task_with_doc_id_short_flag(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_add_task_with_description(self, mock_tasks): mock_tasks.create_task.return_value = 4 - mock_tasks.get_task.return_value = { - "id": 4, - "title": "Refactor tests", - "epic_key": None, - "epic_seq": None, - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 4, + "title": "Refactor tests", + "epic_key": None, + "epic_seq": None, + } + ) result = runner.invoke( app, ["add", "Refactor tests", "--description", "Split into unit and integration"] ) @@ -146,12 +157,14 @@ def test_add_task_with_description(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_add_task_with_description_short_flag(self, mock_tasks): mock_tasks.create_task.return_value = 5 - mock_tasks.get_task.return_value = { - "id": 5, - "title": "Task", - "epic_key": None, - "epic_seq": None, - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 5, + "title": "Task", + "epic_key": None, + "epic_seq": None, + } + ) result = runner.invoke(app, ["add", "Task", "-D", "Details here"]) assert result.exit_code == 0 mock_tasks.create_task.assert_called_once_with( @@ -166,12 +179,14 @@ def test_add_task_with_description_short_flag(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_add_task_with_all_options(self, mock_tasks): mock_tasks.create_task.return_value = 6 - mock_tasks.get_task.return_value = { - "id": 6, - "title": "Full task", - "epic_key": None, - "epic_seq": None, - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 6, + "title": "Full task", + "epic_key": None, + "epic_seq": None, + } + ) result = runner.invoke(app, ["add", "Full task", "-d", "10", "-D", "Full description"]) assert result.exit_code == 0 out = _out(result) @@ -213,8 +228,8 @@ def test_ready_no_tasks(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_ready_shows_tasks(self, mock_tasks): mock_tasks.get_ready_tasks.return_value = [ - {"id": 1, "title": "First task", "epic_key": None, "epic_seq": None}, - {"id": 2, "title": "Second task", "epic_key": "SEC", "epic_seq": 1}, + Task.from_row({"id": 1, "title": "First task", "epic_key": None, "epic_seq": None}), + Task.from_row({"id": 2, "title": "Second task", "epic_key": "SEC", "epic_seq": 1}), ] result = runner.invoke(app, ["ready"]) assert result.exit_code == 0 @@ -228,7 +243,7 @@ def test_ready_shows_tasks(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_ready_shows_epic_label(self, mock_tasks): mock_tasks.get_ready_tasks.return_value = [ - {"id": 1, "title": "QW-3: Task", "epic_key": "QW", "epic_seq": 3}, + Task.from_row({"id": 1, "title": "QW-3: Task", "epic_key": "QW", "epic_seq": 3}), ] result = runner.invoke(app, ["ready"]) out = _out(result) @@ -243,7 +258,7 @@ class TestTaskDone: @patch("emdx.commands.tasks.tasks") def test_done_marks_task_complete(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 1 - mock_tasks.get_task.return_value = {"id": 1, "title": "Test task"} + mock_tasks.get_task.return_value = Task.from_row({"id": 1, "title": "Test task"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["done", "1"]) assert result.exit_code == 0 @@ -256,7 +271,7 @@ def test_done_marks_task_complete(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_done_with_note(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 2 - mock_tasks.get_task.return_value = {"id": 2, "title": "Bug fix"} + mock_tasks.get_task.return_value = Task.from_row({"id": 2, "title": "Bug fix"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["done", "2", "--note", "Fixed in PR #123"]) assert result.exit_code == 0 @@ -268,7 +283,7 @@ def test_done_with_note(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_done_with_note_short_flag(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 3 - mock_tasks.get_task.return_value = {"id": 3, "title": "Feature"} + mock_tasks.get_task.return_value = Task.from_row({"id": 3, "title": "Feature"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["done", "3", "-n", "Completed"]) assert result.exit_code == 0 @@ -293,7 +308,7 @@ class TestTaskDuplicate: @patch("emdx.commands.tasks.tasks") def test_duplicate_marks_status(self, mock_tasks: Any) -> None: mock_tasks.resolve_task_id.return_value = 1 - mock_tasks.get_task.return_value = {"id": 1, "title": "Test task"} + mock_tasks.get_task.return_value = Task.from_row({"id": 1, "title": "Test task"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["duplicate", "1"]) assert result.exit_code == 0 @@ -306,7 +321,7 @@ def test_duplicate_marks_status(self, mock_tasks: Any) -> None: @patch("emdx.commands.tasks.tasks") def test_duplicate_with_reason(self, mock_tasks: Any) -> None: mock_tasks.resolve_task_id.return_value = 2 - mock_tasks.get_task.return_value = {"id": 2, "title": "Bug fix"} + mock_tasks.get_task.return_value = Task.from_row({"id": 2, "title": "Bug fix"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["duplicate", "2", "--note", "Duplicate of #55"]) assert result.exit_code == 0 @@ -322,7 +337,7 @@ def test_duplicate_sets_completed_at(self, mock_tasks: Any) -> None: The model layer sets completed_at when status is 'duplicate'. """ mock_tasks.resolve_task_id.return_value = 3 - mock_tasks.get_task.return_value = {"id": 3, "title": "Feature"} + mock_tasks.get_task.return_value = Task.from_row({"id": 3, "title": "Feature"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["duplicate", "3"]) assert result.exit_code == 0 @@ -342,21 +357,33 @@ def test_list_empty(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_list_shows_tasks(self, mock_tasks): mock_tasks.list_tasks.return_value = [ - {"id": 1, "title": "Open task", "status": "open", "epic_key": None, "epic_seq": None}, - { - "id": 2, - "title": "Active task", - "status": "active", - "epic_key": None, - "epic_seq": None, - }, - { - "id": 3, - "title": "Blocked task", - "status": "blocked", - "epic_key": None, - "epic_seq": None, - }, + Task.from_row( + { + "id": 1, + "title": "Open task", + "status": "open", + "epic_key": None, + "epic_seq": None, + } + ), + Task.from_row( + { + "id": 2, + "title": "Active task", + "status": "active", + "epic_key": None, + "epic_seq": None, + } + ), + Task.from_row( + { + "id": 3, + "title": "Blocked task", + "status": "blocked", + "epic_key": None, + "epic_seq": None, + } + ), ] mock_tasks.get_dependencies.return_value = [] result = runner.invoke(app, ["list"]) @@ -370,7 +397,9 @@ def test_list_shows_tasks(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_list_shows_status_text(self, mock_tasks): mock_tasks.list_tasks.return_value = [ - {"id": 1, "title": "Task", "status": "active", "epic_key": None, "epic_seq": None}, + Task.from_row( + {"id": 1, "title": "Task", "status": "active", "epic_key": None, "epic_seq": None} + ), ] result = runner.invoke(app, ["list"]) out = _out(result) @@ -379,13 +408,15 @@ def test_list_shows_status_text(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_list_shows_epic_label_and_strips_prefix(self, mock_tasks): mock_tasks.list_tasks.return_value = [ - { - "id": 1, - "title": "SEC-1: Harden auth", - "status": "open", - "epic_key": "SEC", - "epic_seq": 1, - }, + Task.from_row( + { + "id": 1, + "title": "SEC-1: Harden auth", + "status": "open", + "epic_key": "SEC", + "epic_seq": 1, + } + ), ] result = runner.invoke(app, ["list"]) out = _out(result) @@ -500,10 +531,18 @@ def test_list_with_limit_short_flag(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_list_displays_status_as_text(self, mock_tasks): mock_tasks.list_tasks.return_value = [ - {"id": 1, "title": "Open", "status": "open", "epic_key": None, "epic_seq": None}, - {"id": 2, "title": "Active", "status": "active", "epic_key": None, "epic_seq": None}, - {"id": 3, "title": "Done", "status": "done", "epic_key": None, "epic_seq": None}, - {"id": 4, "title": "Failed", "status": "failed", "epic_key": None, "epic_seq": None}, + Task.from_row( + {"id": 1, "title": "Open", "status": "open", "epic_key": None, "epic_seq": None} + ), + Task.from_row( + {"id": 2, "title": "Active", "status": "active", "epic_key": None, "epic_seq": None} + ), + Task.from_row( + {"id": 3, "title": "Done", "status": "done", "epic_key": None, "epic_seq": None} + ), + Task.from_row( + {"id": 4, "title": "Failed", "status": "failed", "epic_key": None, "epic_seq": None} + ), ] result = runner.invoke(app, ["list"]) assert result.exit_code == 0 @@ -517,7 +556,9 @@ def test_list_displays_status_as_text(self, mock_tasks): def test_list_does_not_truncate_title(self, mock_tasks): long_title = "This is a very long task title that exceeds fifty characters by quite a bit" mock_tasks.list_tasks.return_value = [ - {"id": 1, "title": long_title, "status": "open", "epic_key": None, "epic_seq": None}, + Task.from_row( + {"id": 1, "title": long_title, "status": "open", "epic_key": None, "epic_seq": None} + ), ] result = runner.invoke(app, ["list"]) out = _out(result) @@ -607,7 +648,7 @@ class TestTaskDelete: @patch("emdx.commands.tasks.tasks") def test_delete_with_force(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 1 - mock_tasks.get_task.return_value = {"id": 1, "title": "Task to delete"} + mock_tasks.get_task.return_value = Task.from_row({"id": 1, "title": "Task to delete"}) mock_tasks.delete_task.return_value = True result = runner.invoke(app, ["delete", "1", "--force"]) assert result.exit_code == 0 @@ -618,7 +659,7 @@ def test_delete_with_force(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_delete_with_force_short_flag(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 2 - mock_tasks.get_task.return_value = {"id": 2, "title": "Another task"} + mock_tasks.get_task.return_value = Task.from_row({"id": 2, "title": "Another task"}) mock_tasks.delete_task.return_value = True result = runner.invoke(app, ["delete", "2", "-f"]) assert result.exit_code == 0 @@ -638,7 +679,7 @@ def test_delete_task_not_found(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_delete_with_confirmation(self, mock_tasks, mock_interactive): mock_tasks.resolve_task_id.return_value = 3 - mock_tasks.get_task.return_value = {"id": 3, "title": "Confirm delete"} + mock_tasks.get_task.return_value = Task.from_row({"id": 3, "title": "Confirm delete"}) mock_tasks.delete_task.return_value = True result = runner.invoke(app, ["delete", "3"], input="y\n") assert result.exit_code == 0 @@ -649,7 +690,7 @@ def test_delete_with_confirmation(self, mock_tasks, mock_interactive): @patch("emdx.commands.tasks.tasks") def test_delete_cancelled(self, mock_tasks, mock_interactive): mock_tasks.resolve_task_id.return_value = 4 - mock_tasks.get_task.return_value = {"id": 4, "title": "Cancel delete"} + mock_tasks.get_task.return_value = Task.from_row({"id": 4, "title": "Cancel delete"}) result = runner.invoke(app, ["delete", "4"], input="n\n") assert result.exit_code == 0 out = _out(result) @@ -665,7 +706,7 @@ def test_delete_requires_task_id(self): def test_delete_auto_confirms_non_interactive(self, mock_tasks: Any, mock_ni: Any) -> None: """Delete skips confirmation when stdin is not a TTY (agent mode).""" mock_tasks.resolve_task_id.return_value = 5 - mock_tasks.get_task.return_value = {"id": 5, "title": "Agent delete"} + mock_tasks.get_task.return_value = Task.from_row({"id": 5, "title": "Agent delete"}) mock_tasks.delete_task.return_value = True result = runner.invoke(app, ["delete", "5"]) assert result.exit_code == 0 @@ -679,18 +720,20 @@ class TestTaskView: @patch("emdx.commands.tasks.tasks") def test_view_shows_basic_info(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 42 - mock_tasks.get_task.return_value = { - "id": 42, - "title": "Fix auth bug", - "status": "open", - "description": "The auth middleware has a race condition", - "epic_key": None, - "epic_seq": None, - "parent_task_id": None, - "source_doc_id": None, - "priority": 3, - "created_at": "2026-01-15", - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 42, + "title": "Fix auth bug", + "status": "open", + "description": "The auth middleware has a race condition", + "epic_key": None, + "epic_seq": None, + "parent_task_id": None, + "source_doc_id": None, + "priority": 3, + "created_at": "2026-01-15", + } + ) mock_tasks.get_dependencies.return_value = [] mock_tasks.get_dependents.return_value = [] mock_tasks.get_task_log.return_value = [] @@ -751,23 +794,25 @@ def test_view_shows_epic_label(self, mock_tasks, mock_get_doc): @patch("emdx.commands.tasks.tasks") def test_view_shows_dependencies(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 5 - mock_tasks.get_task.return_value = { - "id": 5, - "title": "Task with deps", - "status": "blocked", - "description": "", - "epic_key": None, - "epic_seq": None, - "parent_task_id": None, - "source_doc_id": None, - "priority": 3, - "created_at": "2026-01-15", - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 5, + "title": "Task with deps", + "status": "blocked", + "description": "", + "epic_key": None, + "epic_seq": None, + "parent_task_id": None, + "source_doc_id": None, + "priority": 3, + "created_at": "2026-01-15", + } + ) mock_tasks.get_dependencies.return_value = [ - {"id": 3, "title": "Blocker task", "status": "active"}, + Task.from_row({"id": 3, "title": "Blocker task", "status": "active"}), ] mock_tasks.get_dependents.return_value = [ - {"id": 8, "title": "Waiting task", "status": "open"}, + Task.from_row({"id": 8, "title": "Waiting task", "status": "open"}), ] mock_tasks.get_task_log.return_value = [] @@ -783,23 +828,39 @@ def test_view_shows_dependencies(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_view_shows_work_log(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 7 - mock_tasks.get_task.return_value = { - "id": 7, - "title": "Some task", - "status": "active", - "description": "", - "epic_key": None, - "epic_seq": None, - "parent_task_id": None, - "source_doc_id": None, - "priority": 3, - "created_at": "2026-01-15", - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 7, + "title": "Some task", + "status": "active", + "description": "", + "epic_key": None, + "epic_seq": None, + "parent_task_id": None, + "source_doc_id": None, + "priority": 3, + "created_at": "2026-01-15", + } + ) mock_tasks.get_dependencies.return_value = [] mock_tasks.get_dependents.return_value = [] mock_tasks.get_task_log.return_value = [ - {"message": "Started investigation", "created_at": "2026-01-15 10:00"}, - {"message": "Found root cause", "created_at": "2026-01-15 11:00"}, + TaskLogEntry.from_row( + { + "id": 1, + "task_id": 7, + "message": "Started investigation", + "created_at": "2026-01-15 10:00", + } + ), + TaskLogEntry.from_row( + { + "id": 2, + "task_id": 7, + "message": "Found root cause", + "created_at": "2026-01-15 11:00", + } + ), ] result = runner.invoke(app, ["view", "7"]) @@ -827,7 +888,7 @@ class TestTaskActive: @patch("emdx.commands.tasks.tasks") def test_active_marks_task(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 1 - mock_tasks.get_task.return_value = {"id": 1, "title": "Test task"} + mock_tasks.get_task.return_value = Task.from_row({"id": 1, "title": "Test task"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["active", "1"]) assert result.exit_code == 0 @@ -840,7 +901,7 @@ def test_active_marks_task(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_active_with_note(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 2 - mock_tasks.get_task.return_value = {"id": 2, "title": "Auth fix"} + mock_tasks.get_task.return_value = Task.from_row({"id": 2, "title": "Auth fix"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["active", "2", "--note", "Starting work"]) assert result.exit_code == 0 @@ -850,7 +911,7 @@ def test_active_with_note(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_active_with_note_short_flag(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 3 - mock_tasks.get_task.return_value = {"id": 3, "title": "Feature"} + mock_tasks.get_task.return_value = Task.from_row({"id": 3, "title": "Feature"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["active", "3", "-n", "On it"]) assert result.exit_code == 0 @@ -875,7 +936,7 @@ class TestTaskLog: @patch("emdx.commands.tasks.tasks") def test_log_add_message(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 1 - mock_tasks.get_task.return_value = {"id": 1, "title": "Test task"} + mock_tasks.get_task.return_value = Task.from_row({"id": 1, "title": "Test task"}) mock_tasks.log_progress.return_value = 1 result = runner.invoke(app, ["log", "1", "Found the root cause"]) assert result.exit_code == 0 @@ -888,10 +949,24 @@ def test_log_add_message(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_log_view_entries(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 5 - mock_tasks.get_task.return_value = {"id": 5, "title": "Bug fix"} + mock_tasks.get_task.return_value = Task.from_row({"id": 5, "title": "Bug fix"}) mock_tasks.get_task_log.return_value = [ - {"message": "Started debugging", "created_at": "2026-01-15 10:00"}, - {"message": "Identified issue in middleware", "created_at": "2026-01-15 11:00"}, + TaskLogEntry.from_row( + { + "id": 1, + "task_id": 5, + "message": "Started debugging", + "created_at": "2026-01-15 10:00", + } + ), + TaskLogEntry.from_row( + { + "id": 2, + "task_id": 5, + "message": "Identified issue in middleware", + "created_at": "2026-01-15 11:00", + } + ), ] result = runner.invoke(app, ["log", "5"]) assert result.exit_code == 0 @@ -904,7 +979,7 @@ def test_log_view_entries(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_log_view_empty(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 3 - mock_tasks.get_task.return_value = {"id": 3, "title": "Clean task"} + mock_tasks.get_task.return_value = Task.from_row({"id": 3, "title": "Clean task"}) mock_tasks.get_task_log.return_value = [] result = runner.invoke(app, ["log", "3"]) assert result.exit_code == 0 @@ -930,7 +1005,7 @@ class TestTaskNote: @patch("emdx.commands.tasks.tasks") def test_note_logs_message(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 1 - mock_tasks.get_task.return_value = {"id": 1, "title": "Test task"} + mock_tasks.get_task.return_value = Task.from_row({"id": 1, "title": "Test task"}) mock_tasks.log_progress.return_value = 1 result = runner.invoke(app, ["note", "1", "Tried approach X"]) assert result.exit_code == 0 @@ -963,7 +1038,7 @@ class TestTaskBlocked: @patch("emdx.commands.tasks.tasks") def test_blocked_marks_task(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 1 - mock_tasks.get_task.return_value = {"id": 1, "title": "Test task"} + mock_tasks.get_task.return_value = Task.from_row({"id": 1, "title": "Test task"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["blocked", "1"]) assert result.exit_code == 0 @@ -977,7 +1052,7 @@ def test_blocked_marks_task(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_blocked_with_reason(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 2 - mock_tasks.get_task.return_value = {"id": 2, "title": "Auth fix"} + mock_tasks.get_task.return_value = Task.from_row({"id": 2, "title": "Auth fix"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["blocked", "2", "--reason", "Waiting on API key"]) assert result.exit_code == 0 @@ -991,7 +1066,7 @@ def test_blocked_with_reason(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_blocked_with_reason_short_flag(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 3 - mock_tasks.get_task.return_value = {"id": 3, "title": "Feature"} + mock_tasks.get_task.return_value = Task.from_row({"id": 3, "title": "Feature"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["blocked", "3", "-r", "Needs review"]) assert result.exit_code == 0 @@ -1016,7 +1091,9 @@ class TestTaskPriority: @patch("emdx.commands.tasks.tasks") def test_priority_show_current(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 42 - mock_tasks.get_task.return_value = {"id": 42, "title": "Fix auth", "priority": 2} + mock_tasks.get_task.return_value = Task.from_row( + {"id": 42, "title": "Fix auth", "priority": 2} + ) result = runner.invoke(app, ["priority", "42"]) assert result.exit_code == 0 out = _out(result) @@ -1027,7 +1104,9 @@ def test_priority_show_current(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_priority_show_default(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 10 - mock_tasks.get_task.return_value = {"id": 10, "title": "Some task", "priority": 3} + mock_tasks.get_task.return_value = Task.from_row( + {"id": 10, "title": "Some task", "priority": 3} + ) result = runner.invoke(app, ["priority", "10"]) assert result.exit_code == 0 out = _out(result) @@ -1036,7 +1115,9 @@ def test_priority_show_default(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_priority_set_value(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 42 - mock_tasks.get_task.return_value = {"id": 42, "title": "Fix auth", "priority": 3} + mock_tasks.get_task.return_value = Task.from_row( + {"id": 42, "title": "Fix auth", "priority": 3} + ) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["priority", "42", "1"]) assert result.exit_code == 0 @@ -1048,7 +1129,9 @@ def test_priority_set_value(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_priority_set_value_5(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 5 - mock_tasks.get_task.return_value = {"id": 5, "title": "Low task", "priority": 3} + mock_tasks.get_task.return_value = Task.from_row( + {"id": 5, "title": "Low task", "priority": 3} + ) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["priority", "5", "5"]) assert result.exit_code == 0 @@ -1057,7 +1140,9 @@ def test_priority_set_value_5(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_priority_invalid_value_too_high(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 42 - mock_tasks.get_task.return_value = {"id": 42, "title": "Fix auth", "priority": 3} + mock_tasks.get_task.return_value = Task.from_row( + {"id": 42, "title": "Fix auth", "priority": 3} + ) result = runner.invoke(app, ["priority", "42", "6"]) assert result.exit_code == 1 assert "between 1 and 5" in _out(result) @@ -1065,7 +1150,9 @@ def test_priority_invalid_value_too_high(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_priority_invalid_value_too_low(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 42 - mock_tasks.get_task.return_value = {"id": 42, "title": "Fix auth", "priority": 3} + mock_tasks.get_task.return_value = Task.from_row( + {"id": 42, "title": "Fix auth", "priority": 3} + ) result = runner.invoke(app, ["priority", "42", "0"]) assert result.exit_code == 1 assert "between 1 and 5" in _out(result) @@ -1081,7 +1168,9 @@ def test_priority_task_not_found(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_priority_with_prefixed_id(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 78 - mock_tasks.get_task.return_value = {"id": 78, "title": "FEAT-5: Feature", "priority": 3} + mock_tasks.get_task.return_value = Task.from_row( + {"id": 78, "title": "FEAT-5: Feature", "priority": 3} + ) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["priority", "FEAT-5", "2"]) assert result.exit_code == 0 @@ -1093,7 +1182,9 @@ def test_priority_show_json(self, mock_tasks): import json mock_tasks.resolve_task_id.return_value = 42 - mock_tasks.get_task.return_value = {"id": 42, "title": "Fix auth", "priority": 2} + mock_tasks.get_task.return_value = Task.from_row( + {"id": 42, "title": "Fix auth", "priority": 2} + ) result = runner.invoke(app, ["priority", "42", "--json"]) assert result.exit_code == 0 data = json.loads(result.stdout) @@ -1105,7 +1196,9 @@ def test_priority_set_json(self, mock_tasks): import json mock_tasks.resolve_task_id.return_value = 42 - mock_tasks.get_task.return_value = {"id": 42, "title": "Fix auth", "priority": 3} + mock_tasks.get_task.return_value = Task.from_row( + {"id": 42, "title": "Fix auth", "priority": 3} + ) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["priority", "42", "1", "--json"]) assert result.exit_code == 0 @@ -1253,12 +1346,12 @@ class TestTaskDepList: @patch("emdx.commands.tasks.tasks") def test_dep_list_shows_both_directions(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 5 - mock_tasks.get_task.return_value = {"id": 5, "title": "Middle task"} + mock_tasks.get_task.return_value = Task.from_row({"id": 5, "title": "Middle task"}) mock_tasks.get_dependencies.return_value = [ - {"id": 3, "title": "Blocker", "status": "active"}, + Task.from_row({"id": 3, "title": "Blocker", "status": "active"}), ] mock_tasks.get_dependents.return_value = [ - {"id": 8, "title": "Downstream", "status": "open"}, + Task.from_row({"id": 8, "title": "Downstream", "status": "open"}), ] result = runner.invoke(app, ["dep", "list", "5"]) assert result.exit_code == 0 @@ -1273,7 +1366,7 @@ def test_dep_list_shows_both_directions(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_dep_list_no_deps(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 1 - mock_tasks.get_task.return_value = {"id": 1, "title": "Solo task"} + mock_tasks.get_task.return_value = Task.from_row({"id": 1, "title": "Solo task"}) mock_tasks.get_dependencies.return_value = [] mock_tasks.get_dependents.return_value = [] result = runner.invoke(app, ["dep", "list", "1"]) @@ -1291,9 +1384,9 @@ def test_dep_list_not_found(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_dep_list_json(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 5 - mock_tasks.get_task.return_value = {"id": 5, "title": "Task"} + mock_tasks.get_task.return_value = Task.from_row({"id": 5, "title": "Task"}) mock_tasks.get_dependencies.return_value = [ - {"id": 3, "title": "Dep", "status": "done"}, + Task.from_row({"id": 3, "title": "Dep", "status": "done"}), ] mock_tasks.get_dependents.return_value = [] result = runner.invoke(app, ["dep", "list", "5", "--json"]) @@ -1313,11 +1406,13 @@ class TestTaskChain: @patch("emdx.commands.tasks.tasks") def test_chain_shows_upstream_and_downstream(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 5 - mock_tasks.get_task.return_value = { - "id": 5, - "title": "Middle task", - "status": "open", - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 5, + "title": "Middle task", + "status": "open", + } + ) # Walk up: task 5 depends on 3 mock_tasks.get_dependencies.side_effect = lambda tid: ( [{"id": 3, "title": "First", "status": "done"}] if tid == 5 else [] @@ -1341,11 +1436,13 @@ def test_chain_shows_upstream_and_downstream(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_chain_no_deps(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 1 - mock_tasks.get_task.return_value = { - "id": 1, - "title": "Solo task", - "status": "open", - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 1, + "title": "Solo task", + "status": "open", + } + ) mock_tasks.get_dependencies.return_value = [] mock_tasks.get_dependents.return_value = [] result = runner.invoke(app, ["chain", "1"]) @@ -1364,11 +1461,13 @@ def test_chain_not_found(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_chain_json(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 5 - mock_tasks.get_task.return_value = { - "id": 5, - "title": "Middle", - "status": "open", - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 5, + "title": "Middle", + "status": "open", + } + ) mock_tasks.get_dependencies.side_effect = lambda tid: ( [{"id": 3, "title": "Up", "status": "done"}] if tid == 5 else [] ) @@ -1393,18 +1492,20 @@ class TestPrefixedTaskId: @patch("emdx.commands.tasks.tasks") def test_view_with_prefixed_id(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 78 - mock_tasks.get_task.return_value = { - "id": 78, - "title": "TOOL-12: Build widget", - "status": "open", - "description": "", - "epic_key": "TOOL", - "epic_seq": 12, - "parent_task_id": None, - "source_doc_id": None, - "priority": 3, - "created_at": "2026-01-15", - } + mock_tasks.get_task.return_value = Task.from_row( + { + "id": 78, + "title": "TOOL-12: Build widget", + "status": "open", + "description": "", + "epic_key": "TOOL", + "epic_seq": 12, + "parent_task_id": None, + "source_doc_id": None, + "priority": 3, + "created_at": "2026-01-15", + } + ) mock_tasks.get_dependencies.return_value = [] mock_tasks.get_dependents.return_value = [] mock_tasks.get_task_log.return_value = [] @@ -1419,7 +1520,9 @@ def test_view_with_prefixed_id(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_done_with_prefixed_id(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 78 - mock_tasks.get_task.return_value = {"id": 78, "title": "TOOL-12: Build widget"} + mock_tasks.get_task.return_value = Task.from_row( + {"id": 78, "title": "TOOL-12: Build widget"} + ) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["done", "TOOL-12"]) assert result.exit_code == 0 @@ -1429,7 +1532,7 @@ def test_done_with_prefixed_id(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_active_with_prefixed_id(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 50 - mock_tasks.get_task.return_value = {"id": 50, "title": "SEC-3: Fix XSS"} + mock_tasks.get_task.return_value = Task.from_row({"id": 50, "title": "SEC-3: Fix XSS"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["active", "SEC-3"]) assert result.exit_code == 0 @@ -1439,7 +1542,7 @@ def test_active_with_prefixed_id(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_blocked_with_prefixed_id(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 33 - mock_tasks.get_task.return_value = {"id": 33, "title": "DEBT-5: Refactor"} + mock_tasks.get_task.return_value = Task.from_row({"id": 33, "title": "DEBT-5: Refactor"}) mock_tasks.update_task.return_value = True result = runner.invoke(app, ["blocked", "DEBT-5"]) assert result.exit_code == 0 @@ -1449,7 +1552,7 @@ def test_blocked_with_prefixed_id(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_note_with_prefixed_id(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 10 - mock_tasks.get_task.return_value = {"id": 10, "title": "FEAT-1: Add auth"} + mock_tasks.get_task.return_value = Task.from_row({"id": 10, "title": "FEAT-1: Add auth"}) mock_tasks.log_progress.return_value = 1 result = runner.invoke(app, ["note", "FEAT-1", "Working on it"]) assert result.exit_code == 0 @@ -1459,7 +1562,7 @@ def test_note_with_prefixed_id(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_log_with_prefixed_id(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 20 - mock_tasks.get_task.return_value = {"id": 20, "title": "FIX-7: Memory leak"} + mock_tasks.get_task.return_value = Task.from_row({"id": 20, "title": "FIX-7: Memory leak"}) mock_tasks.get_task_log.return_value = [] result = runner.invoke(app, ["log", "FIX-7"]) assert result.exit_code == 0 @@ -1468,7 +1571,9 @@ def test_log_with_prefixed_id(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_delete_with_prefixed_id(self, mock_tasks): mock_tasks.resolve_task_id.return_value = 42 - mock_tasks.get_task.return_value = {"id": 42, "title": "ARCH-2: Split module"} + mock_tasks.get_task.return_value = Task.from_row( + {"id": 42, "title": "ARCH-2: Split module"} + ) mock_tasks.delete_task.return_value = True result = runner.invoke(app, ["delete", "ARCH-2", "--force"]) assert result.exit_code == 0 From c57ff6da28ee84c48d25d0058223075614dd907b Mon Sep 17 00:00:00 2001 From: Alex Rockwell Date: Sat, 7 Mar 2026 02:33:06 -0500 Subject: [PATCH 7/8] refactor: purge dict-compat layer, add Category dataclass (Issue #ARCH-24) Remove __getitem__/.get()/keys()/items()/values()/__contains__ from Document, SearchHit, Task, and TaskLogEntry dataclasses. Convert all ~220 bracket access sites across 26 files to attribute access. Add Category dataclass replacing CategoryDict/CategoryWithStatsDict TypedDicts. Wire into categories.py, commands, and tests. Net -217 lines removed. All 2076 tests pass. Co-Authored-By: Claude Opus 4.6 --- emdx/commands/briefing.py | 12 +- emdx/commands/categories.py | 12 +- emdx/commands/core.py | 16 +- emdx/commands/epics.py | 48 +++--- emdx/commands/tasks.py | 182 ++++++++++++----------- emdx/commands/wiki.py | 12 +- emdx/models/categories.py | 16 +- emdx/models/category.py | 106 ++++++++++++++ emdx/models/document.py | 55 +------ emdx/models/search.py | 43 +----- emdx/models/task.py | 76 +--------- emdx/models/types.py | 14 -- emdx/services/hybrid_search.py | 30 ++-- emdx/ui/activity/activity_data.py | 20 +-- emdx/ui/activity/activity_items.py | 4 +- emdx/ui/activity/activity_view.py | 12 +- emdx/ui/modals.py | 2 +- emdx/ui/task_browser.py | 2 +- emdx/ui/task_view.py | 226 ++++++++++++++--------------- tests/test_categories.py | 56 +++---- tests/test_document_model.py | 99 ++++--------- tests/test_documents.py | 84 +++++------ tests/test_epics.py | 84 +++++------ tests/test_search.py | 78 +++++----- tests/test_sqlite_database.py | 10 +- tests/test_task_browser.py | 4 +- tests/test_task_commands.py | 118 ++++++++------- 27 files changed, 655 insertions(+), 766 deletions(-) create mode 100644 emdx/models/category.py diff --git a/emdx/commands/briefing.py b/emdx/commands/briefing.py index 780a20ce..542f38c2 100644 --- a/emdx/commands/briefing.py +++ b/emdx/commands/briefing.py @@ -432,19 +432,19 @@ def _briefing_save(hours: int, model: str | None) -> None: ) if tasks: - done = [t for t in tasks if t["status"] == "done"] - active_tasks = [t for t in tasks if t["status"] == "active"] - blocked = [t for t in tasks if t["status"] == "blocked"] + done = [t for t in tasks if t.status == "done"] + active_tasks = [t for t in tasks if t.status == "active"] + blocked = [t for t in tasks if t.status == "blocked"] task_lines = [] if done: task_lines.append("## Completed Tasks") - task_lines.extend(f"- {t['title']}" for t in done[:10]) + task_lines.extend(f"- {t.title}" for t in done[:10]) if active_tasks: task_lines.append("## In-Progress Tasks") - task_lines.extend(f"- {t['title']}" for t in active_tasks[:5]) + task_lines.extend(f"- {t.title}" for t in active_tasks[:5]) if blocked: task_lines.append("## Blocked Tasks") - task_lines.extend(f"- {t['title']}" for t in blocked[:5]) + task_lines.extend(f"- {t.title}" for t in blocked[:5]) if task_lines: sections.append("\n".join(task_lines)) diff --git a/emdx/commands/categories.py b/emdx/commands/categories.py index 3ec9e097..8dadf3fa 100644 --- a/emdx/commands/categories.py +++ b/emdx/commands/categories.py @@ -59,12 +59,12 @@ def list_cmd() -> None: for c in cats: table.add_row( - c["key"], - c["name"], - str(c["open_count"]), - str(c["done_count"]), - str(c["epic_count"]), - str(c["total_count"]), + c.key, + c.name, + str(c.open_count), + str(c.done_count), + str(c.epic_count), + str(c.total_count), ) console.print(table) diff --git a/emdx/commands/core.py b/emdx/commands/core.py index 7a49a47e..e2294622 100644 --- a/emdx/commands/core.py +++ b/emdx/commands/core.py @@ -649,11 +649,7 @@ def _find_list_all( if json_output: json_docs = [] for doc in docs: - d: dict[str, Any] = dict(doc) - if d["created_at"]: - d["created_at"] = d["created_at"].isoformat() - if d.get("accessed_at"): - d["accessed_at"] = d["accessed_at"].isoformat() + d = doc.to_dict() json_docs.append(d) print(json.dumps(json_docs, indent=2)) return @@ -705,11 +701,7 @@ def _find_recent( if json_output: json_docs = [] for doc in docs: - d: dict[str, Any] = dict(doc) - if d.get("created_at"): - d["created_at"] = d["created_at"].isoformat() - if d.get("accessed_at"): - d["accessed_at"] = d["accessed_at"].isoformat() + d = doc.to_dict() json_docs.append(d) print(json.dumps(json_docs, indent=2)) return @@ -884,7 +876,7 @@ def _find_keyword_search( # Combine: only show documents that match both criteria results: list[dict[str, Any]] = [ - dict(doc) for doc in search_results if doc.id in tag_doc_ids + doc.to_dict() for doc in search_results if doc.id in tag_doc_ids ][:limit] if not results: @@ -1675,7 +1667,7 @@ def delete( if failed: console.print(f"\n[red]Failed to delete {len(failed)} document(s):[/red]") for doc in failed: - console.print(f" [dim]• #{doc['id']}: {doc['title']}[/dim]") + console.print(f" [dim]• #{doc.id}: {doc.title}[/dim]") except typer.Abort: console.print("[yellow]Deletion cancelled[/yellow]") diff --git a/emdx/commands/epics.py b/emdx/commands/epics.py index 3148744e..70e54957 100644 --- a/emdx/commands/epics.py +++ b/emdx/commands/epics.py @@ -46,7 +46,7 @@ def create( try: epic_id = tasks.create_epic(name, cat, description or "") epic = tasks.get_task(epic_id) - seq = epic["epic_seq"] if epic else None + seq = epic.epic_seq if epic else None key_label = f"{cat.upper()}-{seq}" if seq else f"#{epic_id}" id_suffix = f" (#{epic_id})" if seq else "" console.print(f"[green]Created epic {key_label}{id_suffix}: {name}[/green]") @@ -83,16 +83,16 @@ def list_cmd( table.add_column("Total", justify="right", width=6) for e in epics: - epic_key = e.get("epic_key") or "" - epic_seq = e.get("epic_seq") - key_label = f"{epic_key}-{epic_seq}" if epic_key and epic_seq else str(e["id"]) + epic_key = e.epic_key or "" + epic_seq = e.epic_seq + key_label = f"{epic_key}-{epic_seq}" if epic_key and epic_seq else str(e.id) table.add_row( key_label, - e["title"][:40], - e["status"], - str(e["children_open"]), - str(e["children_done"]), - str(e["child_count"]), + e.title[:40], + e.status, + str(e.children_open), + str(e.children_done), + str(e.child_count), ) console.print(table) @@ -114,23 +114,21 @@ def view( console.print(f"[red]Epic #{epic_id} not found[/red]") raise typer.Exit(1) - cat_label = f" ({epic['epic_key']})" if epic.get("epic_key") else "" - console.print( - f"\n[bold]Epic #{epic['id']}: {epic['title']}{cat_label}[/bold] — {epic['status']}" - ) - if epic.get("description"): - console.print(f"[dim]{epic['description']}[/dim]") - console.print(f"[dim]Created: {epic.get('created_at', 'unknown')}[/dim]\n") + cat_label = f" ({epic.epic_key})" if epic.epic_key else "" + console.print(f"\n[bold]Epic #{epic.id}: {epic.title}{cat_label}[/bold] — {epic.status}") + if epic.description: + console.print(f"[dim]{epic.description}[/dim]") + console.print(f"[dim]Created: {epic.created_at or 'unknown'}[/dim]\n") - children = epic.get("children", []) + children = epic.children if children: console.print("[bold]Tasks:[/bold]") done_count = 0 for c in children: - icon = ICONS.get(c["status"], "?") - seq_label = f"{c['epic_key']}-{c['epic_seq']}" if c.get("epic_seq") else f"#{c['id']}" - console.print(f" {icon} {seq_label} {c['title']}") - if c["status"] == "done": + icon = ICONS.get(c.status, "?") + seq_label = f"{c.epic_key}-{c.epic_seq}" if c.epic_seq else f"#{c.id}" + console.print(f" {icon} {seq_label} {c.title}") + if c.status == "done": done_count += 1 console.print(f"\n[dim]Progress: {done_count}/{len(children)} done[/dim]") else: @@ -181,7 +179,7 @@ def done( raise typer.Exit(1) tasks.update_task(epic_id, status="done") - console.print(f"[green]✓ Done:[/green] Epic #{epic_id} {epic['title']}") + console.print(f"[green]✓ Done:[/green] Epic #{epic_id} {epic.title}") @app.command() @@ -201,7 +199,7 @@ def active( raise typer.Exit(1) tasks.update_task(epic_id, status="active") - console.print(f"[green]● Active:[/green] Epic #{epic_id} {epic['title']}") + console.print(f"[green]● Active:[/green] Epic #{epic_id} {epic.title}") TASK_ID_HELP = "Task ID (e.g. 42 or TOOL-12)" @@ -239,7 +237,7 @@ def attach( epic_task = tasks.get_task(epic_id) epic_label = f"#{epic_id}" - if epic_task and epic_task.get("epic_key") and epic_task.get("epic_seq"): - epic_label = f"{epic_task['epic_key']}-{epic_task['epic_seq']}" + if epic_task and epic_task.epic_key and epic_task.epic_seq: + epic_label = f"{epic_task.epic_key}-{epic_task.epic_seq}" console.print(f"[green]✅ Attached {count} task(s) to epic {epic_label}[/green]") diff --git a/emdx/commands/tasks.py b/emdx/commands/tasks.py index 458c3bcf..e86be097 100644 --- a/emdx/commands/tasks.py +++ b/emdx/commands/tasks.py @@ -51,7 +51,7 @@ def _blocker_summary(task_id: int) -> str: deps = tasks.get_dependencies(task_id) if not deps: return "" - open_deps = [d for d in deps if d["status"] not in ("done", "closed", "wontdo", "duplicate")] + open_deps = [d for d in deps if d.status not in ("done", "closed", "wontdo", "duplicate")] if not open_deps: return "" names = ", ".join(_display_id(d) for d in open_deps[:3]) @@ -61,9 +61,9 @@ def _blocker_summary(task_id: int) -> str: def _display_id(task: Task) -> str: """Return KEY-N display ID if available, otherwise #id.""" - if task.get("epic_key") and task.get("epic_seq"): - return f"{task['epic_key']}-{task['epic_seq']}" - return f"#{task['id']}" + if task.epic_key and task.epic_seq: + return f"{task.epic_key}-{task.epic_seq}" + return f"#{task.id}" def _resolve_id( @@ -123,8 +123,8 @@ def add( raise typer.Exit(1) parent_task_id = epic_id # Inherit epic_key from the parent epic if not explicitly set - if not epic_key and parent_task.get("epic_key"): - epic_key = parent_task["epic_key"] + if not epic_key and parent_task.epic_key: + epic_key = parent_task.epic_key depends_on = after if after else None @@ -186,8 +186,8 @@ def plan( raise typer.Exit(1) epic_key = cat.upper() if cat else None - if not epic_key and parent_task.get("epic_key"): - epic_key = parent_task["epic_key"] + if not epic_key and parent_task.epic_key: + epic_key = parent_task.epic_key created: list[dict[str, str | int]] = [] prev_id: int | None = None @@ -284,14 +284,14 @@ def done( if json_output: result: dict[str, str | int | None] = { "id": task_id, - "title": task["title"], + "title": task.title, "status": "done", } if output_doc is not None: result["output_doc_id"] = output_doc print_json(result) else: - msg = f"[green]✓ Done:[/green] {_display_id(task)} {task['title']}" + msg = f"[green]✓ Done:[/green] {_display_id(task)} {task.title}" if output_doc is not None: msg += f" [dim](output #{output_doc})[/dim]" console.print(msg) @@ -328,9 +328,9 @@ def wontdo( tasks.log_progress(task_id, f"Won't do: {note}") if json_output: - print_json({"id": task_id, "title": task["title"], "status": "wontdo"}) + print_json({"id": task_id, "title": task.title, "status": "wontdo"}) else: - console.print(f"[dim]⊘ Won't do:[/dim] {_display_id(task)} {task['title']}") + console.print(f"[dim]⊘ Won't do:[/dim] {_display_id(task)} {task.title}") @app.command() @@ -364,9 +364,9 @@ def duplicate( tasks.log_progress(task_id, f"Duplicate: {note}") if json_output: - print_json({"id": task_id, "title": task["title"], "status": "duplicate"}) + print_json({"id": task_id, "title": task.title, "status": "duplicate"}) else: - console.print(f"[dim]◆ Duplicate:[/dim] {_display_id(task)} {task['title']}") + console.print(f"[dim]◆ Duplicate:[/dim] {_display_id(task)} {task.title}") @app.command() @@ -388,31 +388,31 @@ def view( console.print(f"[red]Task {task_id_str} not found[/red]") raise typer.Exit(1) - icon = ICONS.get(task["status"], "?") + icon = ICONS.get(task.status, "?") display = _display_id(task) - console.print(f"\n[bold]{icon} {display}: {task['title']}[/bold]") + console.print(f"\n[bold]{icon} {display}: {task.title}[/bold]") # Metadata line - meta = [f"Status: {task['status']}"] - if task.get("epic_key"): - meta.append(f"Category: {task['epic_key']}") - parent_task_id: int | None = task.get("parent_task_id") + meta = [f"Status: {task.status}"] + if task.epic_key: + meta.append(f"Category: {task.epic_key}") + parent_task_id: int | None = task.parent_task_id if parent_task_id: parent = tasks.get_task(parent_task_id) - epic_label = _display_id(parent) if parent else task.get("epic_key", "?") + epic_label = _display_id(parent) if parent else (task.epic_key or "?") meta.append(f"Epic: {epic_label}") - if task.get("priority") and task["priority"] != 3: - meta.append(f"Priority: {task['priority']}") + if task.priority and task.priority != 3: + meta.append(f"Priority: {task.priority}") console.print(f"[dim]{' | '.join(meta)}[/dim]") - if task.get("created_at"): - console.print(f"[dim]Created: {task['created_at']}[/dim]") + if task.created_at: + console.print(f"[dim]Created: {task.created_at}[/dim]") # Linked documents from emdx.models.documents import get_document - source_id = task.get("source_doc_id") - output_id = task.get("output_doc_id") + source_id = task.source_doc_id + output_id = task.output_doc_id if source_id or output_id: console.print() if source_id: @@ -429,7 +429,7 @@ def view( console.print(f" [dim]Output:[/dim] #{output_id} [dim](deleted)[/dim]") # Description - desc = task.get("description") or "" + desc = task.description or "" if desc: console.print() from emdx.ui.markdown_config import MarkdownConfig @@ -442,23 +442,23 @@ def view( if deps: console.print("\n[bold]Blocked by:[/bold]") for d in deps: - dep_icon = ICONS.get(d["status"], "?") - console.print(f" {dep_icon} {_display_id(d)} {d['title']}") + dep_icon = ICONS.get(d.status, "?") + console.print(f" {dep_icon} {_display_id(d)} {d.title}") dependents = tasks.get_dependents(task_id) if dependents: console.print("\n[bold]Blocks:[/bold]") for d in dependents: - dep_icon = ICONS.get(d["status"], "?") - console.print(f" {dep_icon} {_display_id(d)} {d['title']}") + dep_icon = ICONS.get(d.status, "?") + console.print(f" {dep_icon} {_display_id(d)} {d.title}") # Work log log = tasks.get_task_log(task_id, limit=5) if log: console.print("\n[bold]Work log:[/bold]") for entry in log: - ts = entry.get("created_at", "") - console.print(f" [dim]{ts}[/dim] {entry['message']}") + ts = entry.created_at or "" + console.print(f" [dim]{ts}[/dim] {entry.message}") @app.command() @@ -485,7 +485,7 @@ def active( if note: tasks.log_progress(task_id, note) - console.print(f"[blue]● Active:[/blue] {_display_id(task)} {task['title']}") + console.print(f"[blue]● Active:[/blue] {_display_id(task)} {task.title}") @app.command() @@ -519,10 +519,10 @@ def log( console.print(f"[yellow]No log entries for {_display_id(task)}[/yellow]") return - console.print(f"\n[bold]Log for {_display_id(task)}: {task['title']}[/bold]") + console.print(f"\n[bold]Log for {_display_id(task)}: {task.title}[/bold]") for entry in entries: - ts = entry.get("created_at", "") - console.print(f" [dim]{ts}[/dim] {entry['message']}") + ts = entry.created_at or "" + console.print(f" [dim]{ts}[/dim] {entry.message}") @app.command() @@ -610,19 +610,19 @@ def _assemble_brief( "id": task_id, "display_id": display, "title": _display_title(task), - "status": task["status"], - "priority": task.get("priority", 3), - "category": task.get("epic_key"), - "description": task.get("description") or "", + "status": task.status, + "priority": task.priority, + "category": task.epic_key, + "description": task.description or "", } # Epic info - parent_id = task.get("parent_task_id") + parent_id = task.parent_task_id if parent_id: parent = tasks.get_task(parent_id) data["epic"] = { "id": parent_id, - "title": parent["title"] if parent else "(deleted)", + "title": parent.title if parent else "(deleted)", "display_id": _display_id(parent) if parent else f"#{parent_id}", } @@ -630,10 +630,10 @@ def _assemble_brief( deps = tasks.get_dependencies(task_id) data["dependencies"] = [ { - "id": d["id"], + "id": d.id, "display_id": _display_id(d), "title": _display_title(d), - "status": d["status"], + "status": d.status, } for d in deps ] @@ -642,10 +642,10 @@ def _assemble_brief( dependents = tasks.get_dependents(task_id) data["dependents"] = [ { - "id": d["id"], + "id": d.id, "display_id": _display_id(d), "title": _display_title(d), - "status": d["status"], + "status": d.status, } for d in dependents ] @@ -654,24 +654,22 @@ def _assemble_brief( children = tasks.get_children(task_id) data["subtasks"] = [ { - "id": c["id"], + "id": c.id, "display_id": _display_id(c), "title": _display_title(c), - "status": c["status"], + "status": c.status, } for c in children ] # Task log log_entries = tasks.get_task_log(task_id, limit=log_limit) - data["log"] = [ - {"created_at": e.get("created_at", ""), "message": e["message"]} for e in log_entries - ] + data["log"] = [{"created_at": e.created_at or "", "message": e.message} for e in log_entries] # Related documents related_docs: list[dict[str, object]] = [] - source_id = task.get("source_doc_id") + source_id = task.source_doc_id if source_id: source_doc = get_document(source_id) related_docs.append( @@ -682,7 +680,7 @@ def _assemble_brief( } ) - output_id: int | None = task.get("output_doc_id") + output_id: int | None = task.output_doc_id if output_id: output_doc = get_document(output_id) related_docs.append( @@ -696,9 +694,9 @@ def _assemble_brief( data["related_documents"] = related_docs # Key files extracted from description and log - all_text = task.get("description") or "" + all_text = task.description or "" for entry in log_entries: - all_text += "\n" + entry["message"] + all_text += "\n" + entry.message data["key_files"] = _extract_file_paths(all_text) return data @@ -829,7 +827,7 @@ def blocked( if reason: tasks.log_progress(task_id, f"Blocked: {reason}") - msg = f"[yellow]⊘ Blocked:[/yellow] {_display_id(task)} {task['title']}" + msg = f"[yellow]⊘ Blocked:[/yellow] {_display_id(task)} {task.title}" if reason: msg += f"\n [dim]{reason}[/dim]" console.print(msg) @@ -908,31 +906,31 @@ def list_cmd( table.add_column("Title") for t in task_list: - style = STATUS_STYLE.get(t["status"], "default") + style = STATUS_STYLE.get(t.status, "default") title = _display_title(t) - if t["status"] == "blocked": - blocker = _blocker_summary(t["id"]) + if t.status == "blocked": + blocker = _blocker_summary(t.id) if blocker: title += f" (blocked by {blocker})" - table.add_row(_task_label(t), Text(t["status"], style=style), title) + table.add_row(_task_label(t), Text(t.status, style=style), title) console.print(table) def _task_label(task: Task) -> str: """Format task label: DEBT-13 if epic, else #id.""" - epic_key = task.get("epic_key") - epic_seq = task.get("epic_seq") + epic_key = task.epic_key + epic_seq = task.epic_seq if epic_key and epic_seq: return f"{epic_key}-{epic_seq}" - return f"#{task['id']}" + return f"#{task.id}" def _display_title(task: Task) -> str: """Strip redundant KEY-N: prefix from title since the ID column has it.""" - title: str = task["title"] - epic_key = task.get("epic_key") - epic_seq = task.get("epic_seq") + title: str = task.title + epic_key = task.epic_key + epic_seq = task.epic_seq if epic_key and epic_seq: prefix = f"{epic_key}-{epic_seq}: " if title.startswith(prefix): @@ -967,11 +965,11 @@ def priority( raise typer.Exit(1) if value is None: - current = task.get("priority", 3) + current = task.priority if json_output: - print_json({"id": task_id, "title": task["title"], "priority": current}) + print_json({"id": task_id, "title": task.title, "priority": current}) else: - console.print(f"{_display_id(task)} {task['title']}: priority {current}") + console.print(f"{_display_id(task)} {task.title}: priority {current}") return if value < 1 or value > 5: @@ -983,7 +981,7 @@ def priority( tasks.update_task(task_id, priority=value) if json_output: - print_json({"id": task_id, "title": task["title"], "priority": value}) + print_json({"id": task_id, "title": task.title, "priority": value}) else: console.print(f"[green]✅ {_display_id(task)}[/green] priority set to {value}") @@ -1007,7 +1005,7 @@ def delete( raise typer.Exit(1) if not force and not is_non_interactive(): - console.print(f"Delete task {_display_id(task)}: {task['title']}?") + console.print(f"Delete task {_display_id(task)}: {task.title}?") confirm = typer.confirm("Are you sure?") if not confirm: console.print("[yellow]Cancelled[/yellow]") @@ -1111,10 +1109,10 @@ def dep_list( def _dep_summary(d: Task) -> dict[str, str | int]: return { - "id": d["id"], + "id": d.id, "display_id": _display_id(d), - "title": d["title"], - "status": d["status"], + "title": d.title, + "status": d.status, } print_json( @@ -1134,14 +1132,14 @@ def _dep_summary(d: Task) -> dict[str, str | int]: if deps: console.print(f"[bold]{display} depends on:[/bold]") for d in deps: - icon = ICONS.get(d["status"], "?") - console.print(f" {icon} {_display_id(d)} {d['title']}") + icon = ICONS.get(d.status, "?") + console.print(f" {icon} {_display_id(d)} {d.title}") if dependents: console.print(f"[bold]{display} blocks:[/bold]") for d in dependents: - icon = ICONS.get(d["status"], "?") - console.print(f" {icon} {_display_id(d)} {d['title']}") + icon = ICONS.get(d.status, "?") + console.print(f" {icon} {_display_id(d)} {d.title}") @app.command() @@ -1175,10 +1173,10 @@ def chain( def _task_summary(t: Task) -> dict[str, str | int]: return { - "id": t["id"], + "id": t.id, "display_id": _display_id(t), - "title": t["title"], - "status": t["status"], + "title": t.title, + "status": t.status, } print_json( @@ -1190,22 +1188,22 @@ def _task_summary(t: Task) -> dict[str, str | int]: ) return - icon = ICONS.get(task["status"], "?") - console.print(f"\n[bold]Chain for {display}: {task['title']}[/bold]") + icon = ICONS.get(task.status, "?") + console.print(f"\n[bold]Chain for {display}: {task.title}[/bold]") if upstream: console.print("\n[bold]Upstream (must finish first):[/bold]") for t in upstream: - t_icon = ICONS.get(t["status"], "?") - console.print(f" {t_icon} {_display_id(t)} {t['title']}") + t_icon = ICONS.get(t.status, "?") + console.print(f" {t_icon} {_display_id(t)} {t.title}") - console.print(f"\n [bold cyan]{icon} {display} {task['title']}[/bold cyan] ← you are here") + console.print(f"\n [bold cyan]{icon} {display} {task.title}[/bold cyan] ← you are here") if downstream: console.print("\n[bold]Downstream (waiting on this):[/bold]") for t in downstream: - t_icon = ICONS.get(t["status"], "?") - console.print(f" {t_icon} {_display_id(t)} {t['title']}") + t_icon = ICONS.get(t.status, "?") + console.print(f" {t_icon} {_display_id(t)} {t.title}") if not upstream and not downstream: console.print("\n[yellow]No dependencies in either direction[/yellow]") @@ -1229,8 +1227,8 @@ def _walk_deps(task_id: int, direction: str) -> list[Task]: neighbors = tasks.get_dependents(current) for n in neighbors: - if n["id"] not in visited: + if n.id not in visited: result.append(n) - queue.append(n["id"]) + queue.append(n.id) return result diff --git a/emdx/commands/wiki.py b/emdx/commands/wiki.py index a7d03f49..9f57d9b1 100644 --- a/emdx/commands/wiki.py +++ b/emdx/commands/wiki.py @@ -230,9 +230,9 @@ def wiki_search( if json_output: output = [ { - "id": r["id"], - "title": r["title"], - "snippet": (r.get("snippet") or "").replace("", "").replace("", ""), + "id": r.id, + "title": r.title, + "snippet": (r.snippet or "").replace("", "").replace("", ""), } for r in results ] @@ -244,11 +244,9 @@ def wiki_search( ) for i, r in enumerate(results, 1): console.print( - f"[bold cyan]#{r['id']}[/bold cyan] " - f"[bold]{r['title']}[/bold] " - f"[magenta]\\[wiki][/magenta]" + f"[bold cyan]#{r.id}[/bold cyan] [bold]{r.title}[/bold] [magenta]\\[wiki][/magenta]" ) - raw_snippet = r.get("snippet") + raw_snippet = r.snippet if snippets and raw_snippet: snippet = raw_snippet.replace("", "[bold yellow]").replace("", "[/bold yellow]") console.print(f"[dim]...{snippet}...[/dim]") diff --git a/emdx/models/categories.py b/emdx/models/categories.py index f382091c..58d345c3 100644 --- a/emdx/models/categories.py +++ b/emdx/models/categories.py @@ -1,10 +1,10 @@ """Category operations for task epic numbering.""" import re -from typing import cast from emdx.database import db -from emdx.models.types import CategoryDict, CategoryRenameResultDict, CategoryWithStatsDict +from emdx.models.category import Category +from emdx.models.types import CategoryRenameResultDict def create_category(key: str, name: str, description: str = "") -> str: @@ -25,16 +25,16 @@ def create_category(key: str, name: str, description: str = "") -> str: return key -def get_category(key: str) -> CategoryDict | None: +def get_category(key: str) -> Category | None: """Get category by key.""" key = key.upper() with db.get_connection() as conn: cursor = conn.execute("SELECT * FROM categories WHERE key = ?", (key,)) row = cursor.fetchone() - return cast(CategoryDict, dict(row)) if row else None + return Category.from_row(row) if row else None -def list_categories() -> list[CategoryWithStatsDict]: +def list_categories() -> list[Category]: """List categories with task count breakdowns.""" with db.get_connection() as conn: cursor = conn.execute(""" @@ -52,7 +52,7 @@ def list_categories() -> list[CategoryWithStatsDict]: GROUP BY c.key ORDER BY c.key """) - return [cast(CategoryWithStatsDict, dict(row)) for row in cursor.fetchall()] + return [Category.from_row(row) for row in cursor.fetchall()] def ensure_category(key: str) -> str: @@ -228,8 +228,8 @@ def rename_category( # Create target category if it doesn't exist new_cat = get_category(new_key) if not new_cat: - cat_name = name or old_cat["name"] - create_category(new_key, cat_name, old_cat["description"]) + cat_name = name or old_cat.name + create_category(new_key, cat_name, old_cat.description) elif name: # Update name if explicitly provided with db.get_connection() as conn: diff --git a/emdx/models/category.py b/emdx/models/category.py new file mode 100644 index 00000000..41c900c0 --- /dev/null +++ b/emdx/models/category.py @@ -0,0 +1,106 @@ +"""Category domain model for emdx. + +Single source of truth for the Category type. Replaces the scattered +TypedDict projections (CategoryDict, CategoryWithStatsDict) with a +proper dataclass that supports: + +- Factory construction from sqlite3.Row with datetime parsing +- Serialization to dict for JSON output +""" + +from __future__ import annotations + +import sqlite3 +from dataclasses import asdict, dataclass, fields +from datetime import datetime +from typing import Any + +from ..utils.datetime_utils import parse_datetime + +# Fields that store datetime values and should be parsed from SQLite strings. +_CATEGORY_DATETIME_FIELDS: frozenset[str] = frozenset({"created_at"}) + + +@dataclass(slots=True) +class Category: + """Core category domain object. + + Constructed via ``Category.from_row()`` at the database boundary. + """ + + key: str + name: str + description: str = "" + created_at: datetime | None = None + + # Stats fields (populated by list_categories query, default to 0) + open_count: int = 0 + done_count: int = 0 + epic_count: int = 0 + total_count: int = 0 + + # ── Factory methods ─────────────────────────────────────────────── + + @classmethod + def from_row(cls, row: sqlite3.Row | dict[str, Any]) -> Category: + """Construct a Category from a full database row. + + Parses datetime string fields into ``datetime`` objects using + the centralized ``parse_datetime`` utility. Unknown columns in + the row are silently ignored (safe for SELECT * with extra cols). + """ + if isinstance(row, sqlite3.Row): + raw = dict(row) + else: + raw = dict(row) # defensive copy + + return cls._from_dict(raw) + + @classmethod + def from_partial_row(cls, row: sqlite3.Row | dict[str, Any]) -> Category: + """Construct a Category from a partial SELECT. + + Missing fields get their dataclass defaults. Functionally + identical to ``from_row`` — both tolerate missing columns — + but the separate name signals intent to callers. + """ + return cls.from_row(row) + + @classmethod + def _from_dict(cls, raw: dict[str, Any]) -> Category: + """Internal: build a Category from a raw dict, parsing datetimes.""" + known = cls._field_names() + kwargs: dict[str, Any] = {} + for key, value in raw.items(): + if key not in known: + continue + if key in _CATEGORY_DATETIME_FIELDS and isinstance(value, str): + kwargs[key] = parse_datetime(value) + else: + kwargs[key] = value + return cls(**kwargs) + + @classmethod + def _field_names(cls) -> frozenset[str]: + """Cached set of field names for this dataclass.""" + cache_attr = "_cached_field_names" + cached: frozenset[str] | None = cls.__dict__.get(cache_attr) + if cached is not None: + return cached + names = frozenset(f.name for f in fields(cls)) + type.__setattr__(cls, cache_attr, names) + return names + + # ── Serialization ───────────────────────────────────────────────── + + def to_dict(self) -> dict[str, Any]: + """Convert to a plain dict for JSON serialization. + + Datetime fields are formatted as ISO 8601 strings. + """ + result = asdict(self) + for key in _CATEGORY_DATETIME_FIELDS: + val = result.get(key) + if isinstance(val, datetime): + result[key] = val.isoformat() + return result diff --git a/emdx/models/document.py b/emdx/models/document.py index 2605e5c9..b79f1f0b 100644 --- a/emdx/models/document.py +++ b/emdx/models/document.py @@ -5,14 +5,13 @@ etc.) with a proper dataclass that supports: - Factory construction from sqlite3.Row with datetime parsing -- Backward-compatible bracket access (doc["title"]) for incremental migration +- Attribute access (doc.title, doc.id) - Serialization to dict for JSON output """ from __future__ import annotations import sqlite3 -from collections.abc import Iterator from dataclasses import asdict, dataclass, fields from datetime import datetime from typing import Any @@ -30,8 +29,7 @@ class Document: """Core document domain object. Constructed via ``Document.from_row()`` at the database boundary. - Supports ``doc["field"]`` and ``doc.get("field")`` for backward - compatibility with code that previously used TypedDict dicts. + Access fields via attributes: ``doc.title``, ``doc.id``. """ id: int @@ -50,53 +48,6 @@ class Document: stage: str | None = None doc_type: str = "user" - # ── Dict-compatibility layer ────────────────────────────────────── - - def __getitem__(self, key: str) -> Any: - """Allow ``doc["title"]`` access for backward compatibility.""" - try: - return getattr(self, key) - except AttributeError: - raise KeyError(key) from None - - def get(self, key: str, default: Any = None) -> Any: - """Allow ``doc.get("title", "Untitled")`` for backward compatibility.""" - return getattr(self, key, default) - - def __contains__(self, key: object) -> bool: - """Allow ``"title" in doc`` checks.""" - if not isinstance(key, str): - return False - return key in self._field_names() - - def keys(self) -> list[str]: - """Return field names, for code that iterates dict keys.""" - return list(self._field_names()) - - def items(self) -> Iterator[tuple[str, Any]]: - """Yield (field_name, value) pairs, for dict-like iteration.""" - for name in self._field_names(): - yield name, getattr(self, name) - - def values(self) -> Iterator[Any]: - """Yield field values, for dict-like iteration.""" - for name in self._field_names(): - yield getattr(self, name) - - @classmethod - def _field_names(cls) -> frozenset[str]: - """Cached set of field names for this dataclass.""" - # Use the class-level cache if available. - cache_attr = "_cached_field_names" - cached: frozenset[str] | None = cls.__dict__.get(cache_attr) - if cached is not None: - return cached - names = frozenset(f.name for f in fields(cls)) - # slots=True means we can't set arbitrary class attrs, so we - # store on the class __dict__ via type.__setattr__. - type.__setattr__(cls, cache_attr, names) - return names - # ── Factory methods ─────────────────────────────────────────────── @classmethod @@ -131,7 +82,7 @@ def from_partial_row(cls, row: sqlite3.Row | dict[str, Any]) -> Document: @classmethod def _from_dict(cls, raw: dict[str, Any]) -> Document: """Internal: build a Document from a raw dict, parsing datetimes.""" - known = cls._field_names() + known = frozenset(f.name for f in fields(cls)) kwargs: dict[str, Any] = {} for key, value in raw.items(): if key not in known: diff --git a/emdx/models/search.py b/emdx/models/search.py index 7edc9af0..d7fd6cee 100644 --- a/emdx/models/search.py +++ b/emdx/models/search.py @@ -1,13 +1,12 @@ """Search result domain model for emdx. Wraps a Document with search-specific metadata (snippet, rank). -Supports the same dict-compat interface as Document. +Forwards attribute access to the inner Document via __getattr__. """ from __future__ import annotations import sqlite3 -from collections.abc import Iterator from dataclasses import dataclass from typing import Any @@ -18,8 +17,8 @@ class SearchHit: """A search result: a Document plus search metadata. - Supports ``hit["title"]`` bracket access for backward compatibility - with code that consumed SearchResult TypedDicts. + Access document fields via attributes: ``hit.title``, ``hit.id``. + Attribute access is forwarded to the inner Document via __getattr__. """ doc: Document @@ -37,42 +36,6 @@ def __getattr__(self, name: str) -> Any: f"'{type(self).__name__}' object has no attribute '{name}'" ) from None - # ── Dict-compatibility layer ────────────────────────────────────── - - def __getitem__(self, key: str) -> Any: - """Access document fields or search metadata via bracket notation.""" - if key == "snippet": - return self.snippet - if key == "rank": - return self.rank - return self.doc[key] - - def get(self, key: str, default: Any = None) -> Any: - """Dict-compat .get() that checks search fields then document fields.""" - if key == "snippet": - return self.snippet - if key == "rank": - return self.rank - return self.doc.get(key, default) - - def __contains__(self, key: object) -> bool: - if key in ("snippet", "rank"): - return True - return key in self.doc - - def keys(self) -> list[str]: - return self.doc.keys() + ["snippet", "rank"] - - def items(self) -> Iterator[tuple[str, Any]]: - yield from self.doc.items() - yield "snippet", self.snippet - yield "rank", self.rank - - def values(self) -> Iterator[Any]: - yield from self.doc.values() - yield self.snippet - yield self.rank - # ── Factory ─────────────────────────────────────────────────────── @classmethod diff --git a/emdx/models/task.py b/emdx/models/task.py index 096fdfd8..80b37b96 100644 --- a/emdx/models/task.py +++ b/emdx/models/task.py @@ -5,14 +5,13 @@ TaskLogEntryDict) with proper dataclasses that support: - Factory construction from sqlite3.Row with datetime parsing -- Backward-compatible bracket access (task["title"]) for incremental migration +- Attribute access (task.title, task.status, etc.) - Serialization to dict for JSON output """ from __future__ import annotations import sqlite3 -from collections.abc import Iterator from dataclasses import asdict, dataclass, field, fields from datetime import datetime from typing import Any @@ -28,8 +27,7 @@ class Task: """Core task domain object. Constructed via ``Task.from_row()`` at the database boundary. - Supports ``task["field"]`` and ``task.get("field")`` for backward - compatibility with code that previously used TypedDict dicts. + All fields are accessed as attributes (``task.title``, ``task.status``). """ id: int @@ -58,42 +56,11 @@ class Task: # Children list (populated by get_epic_view, default empty) children: list[Task] = field(default_factory=list) - # ── Dict-compatibility layer ────────────────────────────────────── - - def __getitem__(self, key: str) -> Any: - """Allow ``task["title"]`` access for backward compatibility.""" - try: - return getattr(self, key) - except AttributeError: - raise KeyError(key) from None - - def get(self, key: str, default: Any = None) -> Any: - """Allow ``task.get("title", "Untitled")`` for backward compatibility.""" - return getattr(self, key, default) - - def __contains__(self, key: object) -> bool: - """Allow ``"title" in task`` checks.""" - if not isinstance(key, str): - return False - return key in self._field_names() - - def keys(self) -> list[str]: - """Return field names, for code that iterates dict keys.""" - return list(self._field_names()) - - def items(self) -> Iterator[tuple[str, Any]]: - """Yield (field_name, value) pairs, for dict-like iteration.""" - for name in self._field_names(): - yield name, getattr(self, name) - - def values(self) -> Iterator[Any]: - """Yield field values, for dict-like iteration.""" - for name in self._field_names(): - yield getattr(self, name) + # ── Internal helpers ──────────────────────────────────────────────── @classmethod def _field_names(cls) -> frozenset[str]: - """Cached set of field names for this dataclass.""" + """Cached set of dataclass field names.""" cache_attr = "_cached_field_names" cached: frozenset[str] | None = cls.__dict__.get(cache_attr) if cached is not None: @@ -180,42 +147,11 @@ class TaskLogEntry: message: str created_at: datetime | None = None - # ── Dict-compatibility layer ────────────────────────────────────── - - def __getitem__(self, key: str) -> Any: - """Allow ``entry["message"]`` access for backward compatibility.""" - try: - return getattr(self, key) - except AttributeError: - raise KeyError(key) from None - - def get(self, key: str, default: Any = None) -> Any: - """Allow ``entry.get("created_at")`` for backward compatibility.""" - return getattr(self, key, default) - - def __contains__(self, key: object) -> bool: - """Allow ``"message" in entry`` checks.""" - if not isinstance(key, str): - return False - return key in self._field_names() - - def keys(self) -> list[str]: - """Return field names, for code that iterates dict keys.""" - return list(self._field_names()) - - def items(self) -> Iterator[tuple[str, Any]]: - """Yield (field_name, value) pairs, for dict-like iteration.""" - for name in self._field_names(): - yield name, getattr(self, name) - - def values(self) -> Iterator[Any]: - """Yield field values, for dict-like iteration.""" - for name in self._field_names(): - yield getattr(self, name) + # ── Internal helpers ──────────────────────────────────────────────── @classmethod def _field_names(cls) -> frozenset[str]: - """Cached set of field names for this dataclass.""" + """Cached set of dataclass field names.""" cache_attr = "_cached_field_names" cached: frozenset[str] | None = cls.__dict__.get(cache_attr) if cached is not None: diff --git a/emdx/models/types.py b/emdx/models/types.py index 52c7f152..e363313e 100644 --- a/emdx/models/types.py +++ b/emdx/models/types.py @@ -12,20 +12,6 @@ TaskRef: TypeAlias = str -class CategoryDict(TypedDict): - key: str - name: str - description: str - created_at: str | None - - -class CategoryWithStatsDict(CategoryDict): - open_count: int - done_count: int - epic_count: int - total_count: int - - class CategoryRenameResultDict(TypedDict): tasks_moved: int epics_moved: int diff --git a/emdx/services/hybrid_search.py b/emdx/services/hybrid_search.py index 40ccc120..c794d958 100644 --- a/emdx/services/hybrid_search.py +++ b/emdx/services/hybrid_search.py @@ -470,25 +470,21 @@ def _search_fts_parsed(self, query: SearchQuery) -> list[HybridSearchResult]: results = [] for doc in docs: - raw_rank = doc.get("rank", 0) + raw_rank = doc.rank score = max(0.0, min(1.0, 1.0 + (raw_rank / 20.0))) if raw_rank else 0.5 results.append( HybridSearchResult( - doc_id=doc["id"], - title=doc["title"], - project=doc.get("project"), + doc_id=doc.id, + title=doc.title, + project=doc.project, score=score, keyword_score=score, semantic_score=0.0, source="fts", - snippet=(doc.get("snippet") or "")[:200], - created_at=parse_datetime(doc.get("created_at")) - if doc.get("created_at") - else None, - updated_at=parse_datetime(doc.get("updated_at")) - if doc.get("updated_at") - else None, + snippet=(doc.snippet or "")[:200], + created_at=parse_datetime(doc.created_at) if doc.created_at else None, + updated_at=parse_datetime(doc.updated_at) if doc.updated_at else None, ) ) @@ -759,18 +755,18 @@ def _search_keyword( results = [] for doc in docs: - score = normalize_fts5_score(doc.get("rank", 0)) + score = normalize_fts5_score(doc.rank) results.append( HybridSearchResult( - doc_id=doc["id"], - title=doc["title"], - project=doc.get("project"), + doc_id=doc.id, + title=doc.title, + project=doc.project, score=score, keyword_score=score, semantic_score=0.0, source="keyword", - snippet=(doc.get("snippet") or "")[:200], - doc_type=doc.get("doc_type", "user"), + snippet=(doc.snippet or "")[:200], + doc_type=doc.doc_type, ) ) diff --git a/emdx/ui/activity/activity_data.py b/emdx/ui/activity/activity_data.py index 4093e9a0..8ac2a148 100644 --- a/emdx/ui/activity/activity_data.py +++ b/emdx/ui/activity/activity_data.py @@ -77,16 +77,16 @@ async def _load_documents(self, doc_type_filter: str = "all") -> list[ActivityIt for doc in docs: try: doc_id = doc.id - created = doc.get("created_at") - title = doc.get("title", "") - doc_type = doc.get("doc_type", "user") or "user" + created = doc.created_at + title = doc.title + doc_type = doc.doc_type or "user" # Apply doc_type filter if doc_type_filter != "all" and doc_type != doc_type_filter: continue # Compute word count from content - content = doc.get("content", "") + content = doc.content word_count = len(content.split()) if content else 0 item = DocumentItem( @@ -96,20 +96,20 @@ async def _load_documents(self, doc_type_filter: str = "all") -> list[ActivityIt timestamp=parse_datetime(created) or datetime.now(), doc_id=doc_id, doc_type=doc_type, - project=doc.get("project", "") or "", + project=doc.project or "", tags=doc_tags.get(doc_id), - access_count=doc.get("access_count", 0) or 0, + access_count=doc.access_count or 0, word_count=word_count, - updated_at=parse_datetime(doc.get("updated_at")), - accessed_at=parse_datetime(doc.get("accessed_at")), - parent_id=doc.get("parent_id"), + updated_at=parse_datetime(doc.updated_at), + accessed_at=parse_datetime(doc.accessed_at), + parent_id=doc.parent_id, ) items.append(item) except Exception as e: logger.error( - f"Error loading document {doc.get('id', '?')}: {e}", + f"Error loading document {doc.id}: {e}", exc_info=True, ) diff --git a/emdx/ui/activity/activity_items.py b/emdx/ui/activity/activity_items.py index 32a4056e..9f6d6d43 100644 --- a/emdx/ui/activity/activity_items.py +++ b/emdx/ui/activity/activity_items.py @@ -77,8 +77,8 @@ async def get_preview_content(self, doc_db: Any) -> tuple[str, str]: doc = doc_db.get_document(self.doc_id) if doc: - content = doc.get("content", "") - title = doc.get("title", "Untitled") + content = doc.content + title = doc.title or "Untitled" content_stripped = content.lstrip() if not (content_stripped.startswith(f"# {title}") or content_stripped.startswith("# ")): diff --git a/emdx/ui/activity/activity_view.py b/emdx/ui/activity/activity_view.py index d6c8d7a1..dd7769db 100644 --- a/emdx/ui/activity/activity_view.py +++ b/emdx/ui/activity/activity_view.py @@ -611,8 +611,8 @@ def show_markdown() -> None: try: doc = doc_db.get_document(item.doc_id) if doc: - content = doc.get("content", "") - title = doc.get("title", "Untitled") + content = doc.content + title = doc.title or "Untitled" self._render_markdown_preview(content, title, metadata_preamble=preamble) show_markdown() header.update(f"📄 #{item.doc_id}") @@ -904,8 +904,8 @@ async def action_create_gist(self) -> None: self._show_notification("Document not found", is_error=True) return - title = doc.get("title", "Untitled") - content = doc.get("content", "") + title = doc.title or "Untitled" + content = doc.content from emdx.utils.git import get_git_project @@ -948,8 +948,8 @@ async def select_document_by_id(self, doc_id: int) -> bool: if HAS_DOCS and doc_db: doc = doc_db.get_document(doc_id) if doc: - content = doc.get("content", "") - title = doc.get("title", "Untitled") + content = doc.content + title = doc.title or "Untitled" self._render_markdown_preview(content, title) header = self.query_one("#preview-header", Static) header.update(f"📄 #{doc_id}") diff --git a/emdx/ui/modals.py b/emdx/ui/modals.py index e214618a..49a3385a 100644 --- a/emdx/ui/modals.py +++ b/emdx/ui/modals.py @@ -361,7 +361,7 @@ async def on_mount(self) -> None: from .markdown_config import render_markdown_to_richlog result = get_document(self.doc_id) - self._doc_data = dict(result) if result else None + self._doc_data = result.to_dict() if result else None if not self._doc_data: self.query_one("#preview-title", Static).update( f"Document #{self.doc_id} not found" diff --git a/emdx/ui/task_browser.py b/emdx/ui/task_browser.py index 4100b2b0..64dc2982 100644 --- a/emdx/ui/task_browser.py +++ b/emdx/ui/task_browser.py @@ -116,7 +116,7 @@ def _update_footer_context(self) -> None: elif self.task_view: # Check if this is an epic/group parent task task = self.task_view._row_key_to_task.get(row_key) - if task and task.get("type") in ("epic", "group"): + if task and task.type in ("epic", "group"): bar.update(_EPIC_HEADER_FOOTER) else: bar.update(_TASK_FOOTER) diff --git a/emdx/ui/task_view.py b/emdx/ui/task_view.py index 9181007f..6d213395 100644 --- a/emdx/ui/task_view.py +++ b/emdx/ui/task_view.py @@ -154,8 +154,8 @@ def _strip_epic_prefix(title: str, epic_key: str | None, epic_seq: int | None) - def _task_badge(task: Task) -> str: """Return the KEY-N badge for a task, or empty string if unavailable.""" - epic_key = task.get("epic_key") - epic_seq = task.get("epic_seq") + epic_key = task.epic_key + epic_seq = task.epic_seq if epic_key and epic_seq: return f"{epic_key}-{epic_seq}" if epic_key: @@ -165,9 +165,9 @@ def _task_badge(task: Task) -> str: def _task_label(task: Task) -> str: """Build a plain text label for tests and fallback display.""" - icon = STATUS_ICONS.get(task["status"], "?") - title = task["title"] - title = _strip_epic_prefix(title, task.get("epic_key"), task.get("epic_seq")) + icon = STATUS_ICONS.get(task.status, "?") + title = task.title + title = _strip_epic_prefix(title, task.epic_key, task.epic_seq) if len(title) > 50: title = title[:47] + "..." return f"{icon} {title}" @@ -469,7 +469,7 @@ def _sync_title_width(self) -> None: def _compute_fingerprint(self, tasks: list[Task]) -> str: """Fast fingerprint of task data to detect changes.""" # id:status:updated_at for each task, sorted by id - parts = sorted(f"{t['id']}:{t['status']}:{t.get('updated_at', '')}" for t in tasks) + parts = sorted(f"{t.id}:{t.status}:{t.updated_at or ''}" for t in tasks) return "|".join(parts) async def _load_tasks(self, *, restore_row: int | None = None) -> None: @@ -492,7 +492,7 @@ async def _load_tasks(self, *, restore_row: int | None = None) -> None: # Load epics for reference try: epics = list_epics() - self._epics = {e["id"]: e for e in epics} + self._epics = {e.id: e for e in epics} except Exception as e: logger.error(f"Failed to load epics: {e}") self._epics = {} @@ -509,7 +509,7 @@ async def _load_tasks(self, *, restore_row: int | None = None) -> None: for task in self._tasks: if not self._task_passes_filters(task): continue - status = STATUS_ALIASES.get(task["status"], task["status"]) + status = STATUS_ALIASES.get(task.status, task.status) self._tasks_by_status[status].append(task) self._render_task_table(restore_row=restore_row) @@ -521,7 +521,7 @@ def _ensure_done_tasks_loaded(self, epic_id: int) -> None: The initial load caps done tasks at 200. When a user expands a fold, fetch all done children for that epic and merge any missing ones. """ - loaded_ids = {t["id"] for t in self._tasks} + loaded_ids = {t.id for t in self._tasks} try: epic_done = list_tasks( status=["done", "wontdo", "duplicate"], @@ -531,7 +531,7 @@ def _ensure_done_tasks_loaded(self, epic_id: int) -> None: except Exception as e: logger.error(f"Failed to lazy-load done tasks for epic {epic_id}: {e}") return - new_tasks = [t for t in epic_done if t["id"] not in loaded_ids] + new_tasks = [t for t in epic_done if t.id not in loaded_ids] if new_tasks: self._tasks.extend(new_tasks) # Invalidate fingerprint so next auto-refresh doesn't discard them @@ -539,7 +539,7 @@ def _ensure_done_tasks_loaded(self, epic_id: int) -> None: def _row_key_for_task(self, task: Task) -> str: """Generate a stable row key for a task.""" - return f"task:{task['id']}" + return f"task:{task.id}" def _render_task_table(self, *, restore_row: int | None = None) -> None: """Render the grouped task list into the DataTable. @@ -602,17 +602,17 @@ def _render_task_row( """ row_key = self._row_key_for_task(task) self._row_key_to_task[row_key] = task - is_parent = task.get("type") in {"epic", "group"} - color = STATUS_COLORS.get(task["status"], "") - icon = "📋" if is_parent else STATUS_ICONS.get(task["status"], "?") + is_parent = task.type in {"epic", "group"} + color = STATUS_COLORS.get(task.status, "") + icon = "📋" if is_parent else STATUS_ICONS.get(task.status, "?") title = _strip_epic_prefix( - task["title"], - task.get("epic_key"), - task.get("epic_seq"), + task.title, + task.epic_key, + task.epic_seq, ) # Epic badge: parents and children show "KEY-N" colored by status - epic_key = task.get("epic_key") - epic_seq = task.get("epic_seq") + epic_key = task.epic_key + epic_seq = task.epic_seq badge_color = color or "cyan" bold_badge = f"bold {badge_color}" if badge_color else "bold" if is_parent and epic_key and epic_seq: @@ -632,12 +632,12 @@ def _render_task_row( prefix = " " if indent else "" # Show inline progress for parent tasks (epics/groups) - age_text = _format_time_short(task.get("created_at")) + age_text = _format_time_short(task.created_at) if is_parent: - epic_data = self._epics.get(task["id"]) + epic_data = self._epics.get(task.id) if epic_data: - done = epic_data.get("children_done", 0) - total = epic_data.get("child_count", 0) + done = epic_data.children_done + total = epic_data.child_count age_text = f"{done}/{total}" # Build icon cell with tree connector or indent @@ -688,15 +688,15 @@ def _render_groups_by_status(self, table: "DataTable[str | Text]") -> None: # Children whose epic is in another status group are clustered # together with tree connectors (cross-group siblings). # Tasks with no parent render normally. - epic_ids_in_group = {t["id"] for t in tasks if t.get("type") == "epic"} + epic_ids_in_group = {t.id for t in tasks if t.type == "epic"} children_by_parent: dict[int, list[Task]] = defaultdict(list) cross_group_by_parent: dict[int, list[Task]] = defaultdict(list) true_orphans: list[Task] = [] epics_in_order: list[Task] = [] for task in tasks: - parent_id = task.get("parent_task_id") - if task.get("type") == "epic": + parent_id = task.parent_task_id + if task.type == "epic": epics_in_order.append(task) elif parent_id and parent_id in epic_ids_in_group: children_by_parent[parent_id].append(task) @@ -709,7 +709,7 @@ def _render_groups_by_status(self, table: "DataTable[str | Text]") -> None: # Render epics with their children for epic_task in epics_in_order: self._render_task_row(table, epic_task) - children = children_by_parent.get(epic_task["id"], []) + children = children_by_parent.get(epic_task.id, []) for i, child in enumerate(children): is_last = i == len(children) - 1 connector = "└─" if is_last else "├─" @@ -719,9 +719,9 @@ def _render_groups_by_status(self, table: "DataTable[str | Text]") -> None: for parent_id, children in cross_group_by_parent.items(): epic_data = self._epics.get(parent_id) if epic_data: - ek = epic_data.get("epic_key", "") - done = epic_data.get("children_done", 0) - total = epic_data.get("child_count", 0) + ek = epic_data.epic_key or "" + done = epic_data.children_done + total = epic_data.child_count ref_text = f"{ek} ({done}/{total} done)" else: ref_text = f"(parent {parent_id})" @@ -760,8 +760,8 @@ def _render_groups_by_epic(self, table: "DataTable[str | Text]") -> None: referenced_parents: set[int] = set() for status in STATUS_ORDER: for task in self._tasks_by_status.get(status, []): - all_loaded[task["id"]] = task - pid = task.get("parent_task_id") + all_loaded[task.id] = task + pid = task.parent_task_id if pid is not None: referenced_parents.add(pid) @@ -772,11 +772,11 @@ def _render_groups_by_epic(self, table: "DataTable[str | Text]") -> None: ) epic_task_by_id: dict[int, Task] = {} for task in all_loaded.values(): - is_parent = task.get("type") in parent_types or task["id"] in referenced_parents + is_parent = task.type in parent_types or task.id in referenced_parents if is_parent: - epic_task_by_id[task["id"]] = task + epic_task_by_id[task.id] = task else: - parent = task.get("parent_task_id") + parent = task.parent_task_id children_by_parent[parent].append(task) # Build ordered list of parent IDs: epics with children first, @@ -802,7 +802,7 @@ def _max_child_timestamp(pid: int | None) -> str: if not kids: return "" raw = max( - (k.get("updated_at") or k.get("created_at") or "" for k in kids), + (k.updated_at or k.created_at or "" for k in kids), default="", ) return str(raw) if raw else "" @@ -812,18 +812,18 @@ def _sort_key(pid: int | None) -> tuple[int, int, str, str]: return (2, 0, "", "") epic = epic_task_by_id.get(pid) or self._epics.get(pid) kids = children_by_parent.get(pid, []) - has_open = any(k["status"] not in finished for k in kids) - epic_status = epic.get("status", "done") if epic else "done" + has_open = any(k.status not in finished for k in kids) + epic_status = epic.status if epic else "done" # Bucket: 0=active, 1=done, 2=ungrouped bucket = 0 if (has_open or epic_status not in finished) else 1 # Epics with active/in-progress children float to top - has_active = any(k["status"] == "active" for k in kids) + has_active = any(k.status == "active" for k in kids) active_rank = 0 if has_active else 1 # Most recent child activity (invert for DESC sort) max_ts = _max_child_timestamp(pid) inv_ts = "".join(chr(0xFFFF - ord(c)) for c in max_ts) if max_ts else "\uffff" # Epic creation as tiebreaker (DESC) - raw_created = epic.get("created_at", "") if epic else "" + raw_created = (epic.created_at or "") if epic else "" epic_created = str(raw_created) if raw_created else "" inv_created = ( "".join(chr(0xFFFF - ord(c)) for c in epic_created) if epic_created else "\uffff" @@ -840,14 +840,14 @@ def _sort_key(pid: int | None) -> tuple[int, int, str, str]: done_parents: list[int] = [] for pid in parent_ids: kids = children_by_parent.get(pid, []) - has_open = any(k["status"] not in finished for k in kids) + has_open = any(k.status not in finished for k in kids) if has_open or pid is None: active_parents.append(pid) else: # pid cannot be None here (handled by `pid is None` above) assert pid is not None epic = epic_task_by_id.get(pid) or self._epics.get(pid) - epic_status = epic.get("status", "done") if epic else "done" + epic_status = epic.status if epic else "done" if epic_status not in finished: active_parents.append(pid) else: @@ -866,8 +866,8 @@ def _sort_key(pid: int | None) -> tuple[int, int, str, str]: k for k in kids if STATUS_ALIASES.get( - k["status"], - k["status"], + k.status, + k.status, ) in set(visible_statuses) ] @@ -922,7 +922,7 @@ def _sort_key(pid: int | None) -> tuple[int, int, str, str]: active_kids: list[Task] = [] done_kids: list[Task] = [] for task in kids: - normalized = STATUS_ALIASES.get(task["status"], task["status"]) + normalized = STATUS_ALIASES.get(task.status, task.status) if normalized in finished: done_kids.append(task) else: @@ -933,16 +933,16 @@ def _sort_key(pid: int | None) -> tuple[int, int, str, str]: active_kids.sort( key=lambda t: ( status_rank.get( - STATUS_ALIASES.get(t["status"], t["status"]), + STATUS_ALIASES.get(t.status, t.status), len(STATUS_ORDER), ), - t.get("created_at") or "", + t.created_at or "", ), ) # Sort done kids by completed_at descending (most recent first) done_kids.sort( - key=lambda t: t.get("completed_at") or t.get("updated_at") or "", + key=lambda t: t.completed_at or t.updated_at or "", reverse=True, ) @@ -967,13 +967,11 @@ def _sort_key(pid: int | None) -> tuple[int, int, str, str]: else: # Epic children: collapsible done-fold arrow = "▾" if done_fold_open else "▸" - latest_ts = ( - done_kids[0].get("completed_at") or done_kids[0].get("updated_at") or "" - ) + latest_ts = done_kids[0].completed_at or done_kids[0].updated_at or "" recency = _format_time_ago(latest_ts) # Use accurate DB count from epic data when available epic_info = self._epics.get(pid) if pid is not None else None - epic_done_count = epic_info["children_done"] if epic_info else len(done_kids) + epic_done_count = epic_info.children_done if epic_info else len(done_kids) if recency: fold_label = f"{epic_done_count} completed (latest: {recency}) {arrow}" else: @@ -1040,10 +1038,10 @@ def _sort_key(pid: int | None) -> tuple[int, int, str, str]: # Collapsed: show fold summary with recency hint # Use accurate DB count from epic data when available epic_info = self._epics.get(pid) - epic_done_count = epic_info["children_done"] if epic_info else len(kids) + epic_done_count = epic_info.children_done if epic_info else len(kids) if epic_done_count > 0: if kids: - latest = kids[0].get("completed_at") or kids[0].get("updated_at") or "" + latest = kids[0].completed_at or kids[0].updated_at or "" recency = _format_time_ago(latest) else: recency = "" @@ -1153,17 +1151,17 @@ def _task_matches_filter(self, task: Task, query: str) -> bool: """Check if a task matches the filter query (case-insensitive substring).""" q = query.lower() fields = [ - task.get("title") or "", - task.get("epic_key") or "", - task.get("description") or "", + task.title or "", + task.epic_key or "", + task.description or "", ] return any(q in f.lower() for f in fields) def _task_passes_filters(self, task: Task) -> bool: """Check if a task passes text, status, and epic filters.""" - if task["status"] in self._hidden_statuses: + if task.status in self._hidden_statuses: return False - if self._epic_filter and task.get("epic_key") != self._epic_filter: + if self._epic_filter and task.epic_key != self._epic_filter: return False if self._filter_text and not self._task_matches_filter(task, self._filter_text): return False @@ -1175,7 +1173,7 @@ def _apply_filter(self) -> None: for task in self._tasks: if not self._task_passes_filters(task): continue - status = STATUS_ALIASES.get(task["status"], task["status"]) + status = STATUS_ALIASES.get(task.status, task.status) self._tasks_by_status[status].append(task) self._render_task_table() self._update_status_bar() @@ -1283,7 +1281,7 @@ def action_clear_all_filters(self) -> None: def action_filter_epic(self) -> None: """Toggle epic filter to the current task's epic.""" task = self._get_selected_task() - epic_key = task.get("epic_key") if task else None + epic_key = task.epic_key if task else None if epic_key and self._epic_filter != epic_key: self._epic_filter = epic_key else: @@ -1448,7 +1446,7 @@ def _render_task_detail(self, task: Task) -> None: self._current_task = task # Epic tasks get a specialized view with child task listing - if task.get("type") == "epic": + if task.type == "epic": self._render_epic_detail(task) return @@ -1461,13 +1459,13 @@ def _render_task_detail(self, task: Task) -> None: detail_log.clear() - icon = STATUS_ICONS.get(task["status"], "?") + icon = STATUS_ICONS.get(task.status, "?") badge = _task_badge(task) header_label = f"{icon} {badge}" if badge else f"{icon} Task" header.update(header_label) # Title (strip KEY-N prefix since badge already shows it) - title = _strip_epic_prefix(task["title"], task.get("epic_key"), task.get("epic_seq")) + title = _strip_epic_prefix(task.title, task.epic_key, task.epic_seq) detail_log.write(f"[bold]{title}[/bold]") detail_log.write("") @@ -1497,60 +1495,60 @@ def _render_task_metadata(self, target: RichLog, task: Task) -> None: """ # Status / Priority / Epic meta_parts: list[str] = [] - meta_parts.append(f"Status: [bold]{task['status']}[/bold]") - pri = task.get("priority", 3) + meta_parts.append(f"Status: [bold]{task.status}[/bold]") + pri = task.priority if pri <= 1: meta_parts.append(f"Priority: [bold red]{pri} !!![/bold red]") elif pri <= 2: meta_parts.append(f"Priority: [yellow]{pri} !![/yellow]") else: meta_parts.append(f"Priority: {pri}") - if task.get("epic_key"): - parent_id = task.get("parent_task_id") + if task.epic_key: + parent_id = task.parent_task_id epic = self._epics.get(parent_id) if parent_id else None if epic: - done = epic.get("children_done", 0) - total = epic.get("child_count", 0) - meta_parts.append(f"Epic: [cyan]{task['epic_key']}[/cyan] ({done}/{total} done)") + done = epic.children_done + total = epic.child_count + meta_parts.append(f"Epic: [cyan]{task.epic_key}[/cyan] ({done}/{total} done)") else: - meta_parts.append(f"Epic: [cyan]{task['epic_key']}[/cyan]") + meta_parts.append(f"Epic: [cyan]{task.epic_key}[/cyan]") target.write(" ".join(meta_parts)) # Timestamps time_parts: list[str] = [] - if task.get("created_at"): - time_parts.append(f"Created {_format_time_ago(task['created_at'])}") - if task.get("updated_at"): - time_parts.append(f"Updated {_format_time_ago(task['updated_at'])}") - if task.get("completed_at"): - time_parts.append(f"Completed {_format_time_ago(task['completed_at'])}") + if task.created_at: + time_parts.append(f"Created {_format_time_ago(task.created_at)}") + if task.updated_at: + time_parts.append(f"Updated {_format_time_ago(task.updated_at)}") + if task.completed_at: + time_parts.append(f"Completed {_format_time_ago(task.completed_at)}") if time_parts: target.write(f"[dim]{' · '.join(time_parts)}[/dim]") # Dependencies try: - deps = get_dependencies(task["id"]) + deps = get_dependencies(task.id) if deps: target.write("") target.write("[bold]Depends on:[/bold]") for dep in deps: - dep_icon = STATUS_ICONS.get(dep["status"], "?") + dep_icon = STATUS_ICONS.get(dep.status, "?") dep_badge = _task_badge(dep) dep_label = f"{dep_badge} " if dep_badge else "" - target.write(f" {dep_icon} {dep_label}{dep['title'][:60]} [{dep['status']}]") + target.write(f" {dep_icon} {dep_label}{dep.title[:60]} [{dep.status}]") except Exception as e: logger.debug(f"Error loading dependencies: {e}") try: - dependents = get_dependents(task["id"]) + dependents = get_dependents(task.id) if dependents: target.write("") target.write("[bold]Blocks:[/bold]") for dep in dependents: - dep_icon = STATUS_ICONS.get(dep["status"], "?") + dep_icon = STATUS_ICONS.get(dep.status, "?") dep_badge = _task_badge(dep) dep_label = f"{dep_badge} " if dep_badge else "" - target.write(f" {dep_icon} {dep_label}{dep['title'][:60]} [{dep['status']}]") + target.write(f" {dep_icon} {dep_label}{dep.title[:60]} [{dep.status}]") except Exception as e: logger.debug(f"Error loading dependents: {e}") @@ -1559,7 +1557,7 @@ def _render_task_content(self, target: RichLog, task: Task) -> None: content_w = self._detail_content_width(target) # Description - desc = task.get("description") or "" + desc = task.description or "" if desc: target.write("") target.write("[bold]Description:[/bold]") @@ -1567,7 +1565,7 @@ def _render_task_content(self, target: RichLog, task: Task) -> None: # Work log try: - log_entries: list[TaskLogEntry] = get_task_log(task["id"], limit=20) + log_entries: list[TaskLogEntry] = get_task_log(task.id, limit=20) if log_entries: target.write("") target.write("[bold]Work Log:[/bold]") @@ -1575,13 +1573,13 @@ def _render_task_content(self, target: RichLog, task: Task) -> None: gutter_width = 4 last = len(log_entries) - 1 for i, entry in enumerate(log_entries): - raw_ts = entry.get("created_at") + raw_ts = entry.created_at time_str = _format_time_ago(raw_ts) ts_part = f" {time_str}" if time_str else "" target.write(f" [bold cyan]●[/bold cyan] [dim]{ts_part}[/dim]") self._write_markdown_guttered( target, - entry["message"], + entry.message, content_w, gutter=gutter, gutter_width=gutter_width, @@ -1599,23 +1597,23 @@ def _render_epic_detail(self, task: Task) -> None: header = self.query_one("#task-detail-header", Static) detail_log.clear() - icon = STATUS_ICONS.get(task["status"], "?") + icon = STATUS_ICONS.get(task.status, "?") badge = _task_badge(task) epic_header = f"{icon} {badge}" if badge else f"{icon} Epic" header.update(epic_header) # Title (strip KEY-N prefix since badge already shows it) - title = _strip_epic_prefix(task["title"], task.get("epic_key"), task.get("epic_seq")) + title = _strip_epic_prefix(task.title, task.epic_key, task.epic_seq) detail_log.write(f"[bold]{title}[/bold]") detail_log.write("") # Progress summary from cached epic data - epic_key = task.get("epic_key") - epic_data = self._epics.get(task["id"]) + epic_key = task.epic_key + epic_data = self._epics.get(task.id) if epic_data: - done = epic_data.get("children_done", 0) - total = epic_data.get("child_count", 0) - open_count = epic_data.get("children_open", 0) + done = epic_data.children_done + total = epic_data.child_count + open_count = epic_data.children_open pct = int(done / total * 100) if total > 0 else 0 bar_len = 20 filled = int(bar_len * done / total) if total > 0 else 0 @@ -1623,11 +1621,11 @@ def _render_epic_detail(self, task: Task) -> None: detail_log.write(f"[bold]Progress:[/bold] {bar} {pct}%") detail_log.write(f" [green]{done} done[/green] · {open_count} open · {total} total") else: - detail_log.write(f"Status: [bold]{task['status']}[/bold]") + detail_log.write(f"Status: [bold]{task.status}[/bold]") # Description content_w = self._detail_content_width(detail_log) - epic_desc = task.get("description") or "" + epic_desc = task.description or "" if epic_desc: detail_log.write("") detail_log.write("[bold]Description:[/bold]") @@ -1637,17 +1635,15 @@ def _render_epic_detail(self, task: Task) -> None: # Load and display child tasks try: - epic_view = get_epic_view(task["id"]) - if epic_view and epic_view.get("children"): + epic_view = get_epic_view(task.id) + if epic_view and epic_view.children: detail_log.write("") detail_log.write("[bold]Tasks:[/bold]") - for child in epic_view["children"]: - c_icon = STATUS_ICONS.get(child["status"], "?") - c_color = STATUS_COLORS.get(child["status"], "") - c_title = _strip_epic_prefix( - child["title"], child.get("epic_key"), child.get("epic_seq") - )[:55] - seq = child.get("epic_seq") + for child in epic_view.children: + c_icon = STATUS_ICONS.get(child.status, "?") + c_color = STATUS_COLORS.get(child.status, "") + c_title = _strip_epic_prefix(child.title, child.epic_key, child.epic_seq)[:55] + seq = child.epic_seq prefix = f"{epic_key}-{seq}" if epic_key and seq else "" if c_color: detail_log.write( @@ -1662,10 +1658,10 @@ def _render_epic_detail(self, task: Task) -> None: # Timestamps time_parts = [] - if task.get("created_at"): - time_parts.append(f"Created {_format_time_ago(task['created_at'])}") - if task.get("updated_at"): - time_parts.append(f"Updated {_format_time_ago(task['updated_at'])}") + if task.created_at: + time_parts.append(f"Created {_format_time_ago(task.created_at)}") + if task.updated_at: + time_parts.append(f"Updated {_format_time_ago(task.updated_at)}") if time_parts: detail_log.write("") detail_log.write(f"[dim]{' · '.join(time_parts)}[/dim]") @@ -1784,7 +1780,7 @@ def _toggle_collapse(self) -> None: task = self._get_selected_task() if not task: return - tid = task["id"] + tid = task.id # Toggle: if currently shown as collapsed, expand; otherwise collapse if tid in self._collapsed: self._collapsed.discard(tid) @@ -1809,7 +1805,7 @@ async def _set_task_status(self, new_status: str) -> None: task = self._get_selected_task() if not task: return - if task["status"] == new_status: + if task.status == new_status: return # Save row index — we want to stay at this position @@ -1817,9 +1813,9 @@ async def _set_task_status(self, new_status: str) -> None: saved_row = table.cursor_row try: - update_task(task["id"], status=new_status) + update_task(task.id, status=new_status) badge = _task_badge(task) - label = badge if badge else task["title"][:30] + label = badge if badge else task.title[:30] self.notify(f"{label} → {new_status}", timeout=2) await self._load_tasks(restore_row=saved_row) except Exception as e: @@ -1861,7 +1857,7 @@ def _open_task_urls(self) -> None: return urls: list[str] = [] for field in ("description", "error"): - val = task.get(field) + val = getattr(task, field, None) if isinstance(val, str) and val: urls.extend(_extract_urls(val)) if not urls: diff --git a/tests/test_categories.py b/tests/test_categories.py index 556852cf..20cb70bc 100644 --- a/tests/test_categories.py +++ b/tests/test_categories.py @@ -24,15 +24,15 @@ def test_create_category(self): assert key == "TEST" cat = categories.get_category("TEST") assert cat is not None - assert cat["name"] == "Test Category" - assert cat["description"] == "A test" + assert cat.name == "Test Category" + assert cat.description == "A test" def test_create_category_uppercase(self): key = categories.create_category("low", "Lowercase Input") assert key == "LOW" cat = categories.get_category("low") assert cat is not None - assert cat["key"] == "LOW" + assert cat.key == "LOW" def test_create_category_validation_too_short(self): with pytest.raises(ValueError, match="2-8 uppercase letters"): @@ -59,7 +59,7 @@ def test_ensure_category_creates(self): assert key == "NEWCAT" cat = categories.get_category("NEWCAT") assert cat is not None - assert cat["name"] == "NEWCAT" # auto-name is the key + assert cat.name == "NEWCAT" # auto-name is the key def test_ensure_category_idempotent(self): categories.ensure_category("IDEM") @@ -78,11 +78,11 @@ def test_list_categories_with_counts(self): tasks.create_task("LCNT-2: Second task", epic_key="LCNT", status="done") cats = categories.list_categories() - lcnt = next((c for c in cats if c["key"] == "LCNT"), None) + lcnt = next((c for c in cats if c.key == "LCNT"), None) assert lcnt is not None - assert lcnt["open_count"] >= 1 - assert lcnt["done_count"] >= 1 - assert lcnt["total_count"] >= 2 + assert lcnt.open_count >= 1 + assert lcnt.done_count >= 1 + assert lcnt.total_count >= 2 class TestDeleteCategory: @@ -103,8 +103,8 @@ def test_delete_category_with_done_tasks(self): # Task still exists but epic_key is cleared task = tasks.get_task(t1) assert task is not None - assert task["epic_key"] is None - assert task["epic_seq"] is None + assert task.epic_key is None + assert task.epic_seq is None def test_delete_category_refuses_open_tasks(self): categories.create_category("DREF", "Has Open Tasks") @@ -119,7 +119,7 @@ def test_delete_category_force_with_open_tasks(self): assert result["tasks_cleared"] == 1 assert categories.get_category("DFRC") is None task = tasks.get_task(t1) - assert task["epic_key"] is None + assert task.epic_key is None def test_delete_category_clears_epics(self): categories.create_category("DEPC", "With Epics") @@ -129,7 +129,7 @@ def test_delete_category_clears_epics(self): assert result["epics_cleared"] == 1 epic = tasks.get_task(epic_id) assert epic is not None - assert epic["epic_key"] is None + assert epic.epic_key is None def test_delete_category_not_found(self): with pytest.raises(ValueError, match="not found"): @@ -156,12 +156,12 @@ def test_adopt_backfills(self): # Verify tasks now have epic_key/epic_seq task1 = tasks.get_task(t1) - assert task1["epic_key"] == "ADPT" - assert task1["epic_seq"] == 1 + assert task1.epic_key == "ADPT" + assert task1.epic_seq == 1 task2 = tasks.get_task(t2) - assert task2["epic_key"] == "ADPT" - assert task2["epic_seq"] == 2 + assert task2.epic_key == "ADPT" + assert task2.epic_seq == 2 def test_adopt_skips_already_adopted(self): # Create a task that already has epic_key @@ -175,7 +175,7 @@ def test_adopt_with_name(self): categories.ensure_category("ANME") categories.adopt_category("ANME", name="Adopted Name") cat = categories.get_category("ANME") - assert cat["name"] == "Adopted Name" + assert cat.name == "Adopted Name" class TestRenameCategory: @@ -195,16 +195,16 @@ def test_rename_simple(self): assert categories.get_category("RNOLD") is None new_cat = categories.get_category("RNNEW") assert new_cat is not None - assert new_cat["name"] == "Old Name" # inherited + assert new_cat.name == "Old Name" # inherited # Tasks moved and retitled task1 = tasks.get_task(t1) - assert task1["epic_key"] == "RNNEW" - assert "RNNEW-" in task1["title"] + assert task1.epic_key == "RNNEW" + assert "RNNEW-" in task1.title task2 = tasks.get_task(t2) - assert task2["epic_key"] == "RNNEW" - assert "RNNEW-" in task2["title"] + assert task2.epic_key == "RNNEW" + assert "RNNEW-" in task2.title def test_rename_merge_renumbers(self): """Merge into existing category — renumbers to avoid seq conflicts.""" @@ -218,13 +218,13 @@ def test_rename_merge_renumbers(self): assert result["tasks_moved"] == 1 task_old = tasks.get_task(t_old) - assert task_old["epic_key"] == "MRNEW" - assert task_old["epic_seq"] == 2 # after existing seq 1 + assert task_old.epic_key == "MRNEW" + assert task_old.epic_seq == 2 # after existing seq 1 # Original target task unchanged task_new = tasks.get_task(t_new) - assert task_new["epic_key"] == "MRNEW" - assert task_new["epic_seq"] == 1 + assert task_new.epic_key == "MRNEW" + assert task_new.epic_seq == 1 def test_rename_with_name_override(self): """--name overrides the target category name.""" @@ -233,7 +233,7 @@ def test_rename_with_name_override(self): categories.rename_category("NMOLD", "NMNEW", name="Better Name") cat = categories.get_category("NMNEW") - assert cat["name"] == "Better Name" + assert cat.name == "Better Name" def test_rename_moves_epics(self): """Epics are moved along with regular tasks.""" @@ -246,7 +246,7 @@ def test_rename_moves_epics(self): assert result["tasks_moved"] == 1 epic = tasks.get_task(epic_id) - assert epic["epic_key"] == "EPNEW" + assert epic.epic_key == "EPNEW" def test_rename_same_key_raises(self): categories.create_category("SAME", "Same") diff --git a/tests/test_document_model.py b/tests/test_document_model.py index 96ce9f53..8e2b2cc9 100644 --- a/tests/test_document_model.py +++ b/tests/test_document_model.py @@ -130,10 +130,10 @@ def test_from_dict_datetime_already_parsed(self) -> None: assert doc.created_at is now -# ── Dict compatibility ──────────────────────────────────────────────── +# ── Attribute access ────────────────────────────────────────────────── -class TestDictCompat: +class TestAttributeAccess: @pytest.fixture() def doc(self) -> Document: return Document( @@ -144,46 +144,22 @@ def doc(self) -> Document: access_count=5, ) - def test_getitem(self, doc: Document) -> None: - assert doc["id"] == 42 - assert doc["title"] == "My Doc" - assert doc["content"] == "body" - assert doc["project"] == "emdx" - - def test_getitem_raises_keyerror(self, doc: Document) -> None: - with pytest.raises(KeyError, match="nonexistent"): - doc["nonexistent"] - - def test_get_with_default(self, doc: Document) -> None: - assert doc.get("title") == "My Doc" - assert doc.get("nonexistent") is None - assert doc.get("nonexistent", "fallback") == "fallback" - - def test_contains(self, doc: Document) -> None: - assert "title" in doc - assert "id" in doc - assert "nonexistent" not in doc - assert 42 not in doc # type: ignore[operator] # non-string - - def test_keys(self, doc: Document) -> None: - k = doc.keys() - assert "id" in k - assert "title" in k - assert "content" in k - assert "doc_type" in k - assert len(k) == 15 # all fields - - def test_items(self, doc: Document) -> None: - pairs = dict(doc.items()) - assert pairs["id"] == 42 - assert pairs["title"] == "My Doc" - assert pairs["project"] == "emdx" - assert pairs["access_count"] == 5 - - def test_values(self, doc: Document) -> None: - vals = list(doc.values()) - assert 42 in vals - assert "My Doc" in vals + def test_attribute_access(self, doc: Document) -> None: + assert doc.id == 42 + assert doc.title == "My Doc" + assert doc.content == "body" + assert doc.project == "emdx" + + def test_attribute_error_for_nonexistent(self, doc: Document) -> None: + with pytest.raises(AttributeError): + doc.nonexistent # type: ignore[attr-defined] # noqa: B018 + + def test_to_dict_preserves_all_fields(self, doc: Document) -> None: + d = doc.to_dict() + assert d["id"] == 42 + assert d["title"] == "My Doc" + assert d["project"] == "emdx" + assert d["access_count"] == 5 # ── Serialization ───────────────────────────────────────────────────── @@ -252,36 +228,25 @@ def test_from_row(self) -> None: assert hit.snippet == "...match..." assert hit.rank == -2.5 - def test_bracket_access_document_fields(self) -> None: + def test_attribute_access_document_fields(self) -> None: doc = Document(id=1, title="T") hit = SearchHit(doc=doc, snippet="snip", rank=-1.0) - assert hit["id"] == 1 - assert hit["title"] == "T" - assert hit["snippet"] == "snip" - assert hit["rank"] == -1.0 + assert hit.id == 1 + assert hit.title == "T" + assert hit.snippet == "snip" + assert hit.rank == -1.0 - def test_get_fallthrough(self) -> None: + def test_getattr_fallthrough(self) -> None: doc = Document(id=1, title="T", project="p") hit = SearchHit(doc=doc) - assert hit.get("project") == "p" - assert hit.get("snippet") is None - assert hit.get("nonexistent", "fb") == "fb" - - def test_contains(self) -> None: - doc = Document(id=1, title="T") - hit = SearchHit(doc=doc) - assert "title" in hit - assert "snippet" in hit - assert "rank" in hit - assert "nonexistent" not in hit + assert hit.project == "p" + assert hit.snippet is None - def test_keys_includes_search_fields(self) -> None: + def test_getattr_error_for_nonexistent(self) -> None: doc = Document(id=1, title="T") hit = SearchHit(doc=doc) - k = hit.keys() - assert "snippet" in k - assert "rank" in k - assert "id" in k + with pytest.raises(AttributeError): + hit.nonexistent # type: ignore[attr-defined] # noqa: B018 def test_to_dict(self) -> None: doc = Document(id=1, title="T", project="p") @@ -313,12 +278,6 @@ def test_datetime_fields_constant(self) -> None: "archived_at", } - def test_field_names_cached(self) -> None: - """_field_names should be cached after first call.""" - names1 = Document._field_names() - names2 = Document._field_names() - assert names1 is names2 - def test_slots_prevent_arbitrary_attrs(self) -> None: doc = Document(id=1, title="T") with pytest.raises(AttributeError): diff --git a/tests/test_documents.py b/tests/test_documents.py index d7f4cde4..6343faac 100644 --- a/tests/test_documents.py +++ b/tests/test_documents.py @@ -48,7 +48,7 @@ def test_save_with_project(self): doc_id = save_document("Proj Doc", "Content", project="my-project") doc = get_document(doc_id) - assert doc["project"] == "my-project" + assert doc.project == "my-project" def test_save_with_parent_id(self): from emdx.database.documents import get_document, save_document @@ -56,7 +56,7 @@ def test_save_with_parent_id(self): parent_id = save_document("Parent", "Parent content") child_id = save_document("Child", "Child content", parent_id=parent_id) child = get_document(child_id) - assert child["parent_id"] == parent_id + assert child.parent_id == parent_id def test_save_with_tags(self): from emdx.database.connection import db_connection @@ -81,8 +81,8 @@ def test_save_minimal(self): doc_id = save_document("Title Only", "") doc = get_document(doc_id) assert doc is not None - assert doc["title"] == "Title Only" - assert doc["content"] == "" + assert doc.title == "Title Only" + assert doc.content == "" def test_save_special_characters_in_title(self): from emdx.database.documents import get_document, save_document @@ -90,7 +90,7 @@ def test_save_special_characters_in_title(self): title = 'Test\'s "special" & more: 日本語' doc_id = save_document(title, "Content") doc = get_document(doc_id) - assert doc["title"] == title + assert doc.title == title def test_save_special_characters_in_content(self): from emdx.database.documents import get_document, save_document @@ -98,7 +98,7 @@ def test_save_special_characters_in_content(self): content = "Line 1\nLine 2\n\tTabbed\n```python\nprint('hello')\n```" doc_id = save_document("Code Doc", content) doc = get_document(doc_id) - assert doc["content"] == content + assert doc.content == content def test_save_very_long_content(self): from emdx.database.documents import get_document, save_document @@ -106,7 +106,7 @@ def test_save_very_long_content(self): long_content = "x" * 100_000 doc_id = save_document("Long Doc", long_content) doc = get_document(doc_id) - assert len(doc["content"]) == 100_000 + assert len(doc.content) == 100_000 class TestGetDocument: @@ -118,9 +118,9 @@ def test_get_by_id(self): doc_id = save_document("Test Doc", "Content here") doc = get_document(doc_id) assert doc is not None - assert doc["id"] == doc_id - assert doc["title"] == "Test Doc" - assert doc["content"] == "Content here" + assert doc.id == doc_id + assert doc.title == "Test Doc" + assert doc.content == "Content here" def test_get_by_string_id(self): from emdx.database.documents import get_document, save_document @@ -128,7 +128,7 @@ def test_get_by_string_id(self): doc_id = save_document("String ID", "Content") doc = get_document(str(doc_id)) assert doc is not None - assert doc["id"] == doc_id + assert doc.id == doc_id def test_get_by_title(self): from emdx.database.documents import get_document, save_document @@ -136,7 +136,7 @@ def test_get_by_title(self): save_document("Unique Title XYZ", "Some content") doc = get_document("Unique Title XYZ") assert doc is not None - assert doc["title"] == "Unique Title XYZ" + assert doc.title == "Unique Title XYZ" def test_get_by_title_case_insensitive(self): from emdx.database.documents import get_document, save_document @@ -144,7 +144,7 @@ def test_get_by_title_case_insensitive(self): save_document("Case Test Doc", "Content") doc = get_document("case test doc") assert doc is not None - assert doc["title"] == "Case Test Doc" + assert doc.title == "Case Test Doc" def test_get_nonexistent_returns_none(self): from emdx.database.documents import get_document @@ -164,10 +164,10 @@ def test_get_increments_access_count(self): doc_id = save_document("Access Test", "Content") # First access doc1 = get_document(doc_id) - count1 = doc1["access_count"] + count1 = doc1.access_count # Second access doc2 = get_document(doc_id) - count2 = doc2["access_count"] + count2 = doc2.access_count assert count2 == count1 + 1 def test_get_updates_accessed_at(self): @@ -178,7 +178,7 @@ def test_get_updates_accessed_at(self): time.sleep(0.001) # Minimal delay to ensure different timestamps doc2 = get_document(doc_id) # accessed_at should be updated (or at least not earlier) - assert doc2["accessed_at"] >= doc1["accessed_at"] + assert doc2.accessed_at >= doc1.accessed_at def test_get_returns_all_fields(self): from emdx.database.documents import get_document, save_document @@ -198,7 +198,7 @@ def test_get_returns_all_fields(self): "is_deleted", "deleted_at", ]: - assert field in doc, f"Missing field: {field}" + assert hasattr(doc, field), f"Missing field: {field}" class TestUpdateDocument: @@ -212,8 +212,8 @@ def test_update_title_and_content(self): assert result is True doc = get_document(doc_id) - assert doc["title"] == "Updated" - assert doc["content"] == "Updated content" + assert doc.title == "Updated" + assert doc.content == "Updated content" def test_update_nonexistent_returns_false(self): from emdx.database.documents import update_document @@ -229,7 +229,7 @@ def test_update_sets_updated_at(self): time.sleep(0.001) # Minimal delay to ensure different timestamps update_document(doc_id, "Timestamp Test v2", "New Content") doc_after = get_document(doc_id) - assert doc_after["updated_at"] >= doc_before["updated_at"] + assert doc_after.updated_at >= doc_before.updated_at class TestDeleteDocument: @@ -333,7 +333,7 @@ def test_restore_by_id(self): assert result is True doc = get_document(doc_id) assert doc is not None - assert doc["title"] == "Restorable" + assert doc.title == "Restorable" def test_restore_by_title(self): from emdx.database.documents import ( @@ -361,8 +361,8 @@ def test_restore_clears_deleted_at(self): delete_document(doc_id) restore_document(doc_id) doc = get_document(doc_id) - assert doc["deleted_at"] is None - assert doc["is_deleted"] == 0 + assert doc.deleted_at is None + assert doc.is_deleted is False def test_restore_non_deleted_returns_false(self): from emdx.database.documents import restore_document, save_document @@ -393,7 +393,7 @@ def test_list_returns_top_level_by_default(self): save_document("Child", "Content", parent_id=parent) docs = list_documents() - titles = [d["title"] for d in docs] + titles = [d.title for d in docs] assert "Top 1" in titles assert "Top 2" in titles assert "Parent" in titles @@ -407,7 +407,7 @@ def test_list_with_parent_id_minus_one_returns_all(self): save_document("Top", "Content") docs = list_documents(parent_id=-1) - titles = [d["title"] for d in docs] + titles = [d.title for d in docs] assert "Parent" in titles assert "Child" in titles assert "Top" in titles @@ -421,7 +421,7 @@ def test_list_children_of_parent(self): save_document("Unrelated", "Content") docs = list_documents(parent_id=parent) - titles = [d["title"] for d in docs] + titles = [d.title for d in docs] assert len(docs) == 2 assert "Child A" in titles assert "Child B" in titles @@ -434,7 +434,7 @@ def test_list_filter_by_project(self): docs = list_documents(project="alpha", parent_id=-1) assert len(docs) == 1 - assert docs[0]["title"] == "Proj A" + assert docs[0].title == "Proj A" def test_list_excludes_deleted(self): from emdx.database.documents import delete_document, list_documents, save_document @@ -444,7 +444,7 @@ def test_list_excludes_deleted(self): delete_document(doc_id) docs = list_documents(parent_id=-1) - titles = [d["title"] for d in docs] + titles = [d.title for d in docs] assert "Visible" in titles assert "Deleted" not in titles @@ -475,7 +475,7 @@ def test_list_ordered_by_id_desc(self): save_document("Third", "Content") docs = list_documents(parent_id=-1) - ids = [d["id"] for d in docs] + ids = [d.id for d in docs] assert ids == sorted(ids, reverse=True) @@ -522,7 +522,7 @@ def test_recent_returns_most_recent_first(self): docs = get_recent_documents(limit=10) # id1 was accessed most recently - assert docs[0]["id"] == id1 + assert docs[0].id == id1 def test_recent_respects_limit(self): from emdx.database.documents import get_recent_documents, save_document @@ -540,7 +540,7 @@ def test_recent_excludes_deleted(self): delete_document(doc_id) docs = get_recent_documents() - titles = [d["title"] for d in docs] + titles = [d.title for d in docs] assert "Deleted Recent" not in titles assert "Visible Recent" in titles @@ -555,7 +555,7 @@ def test_lists_soft_deleted(self): delete_document(doc_id) deleted = list_deleted_documents() assert len(deleted) == 1 - assert deleted[0]["title"] == "Trashed" + assert deleted[0].title == "Trashed" def test_excludes_non_deleted(self): from emdx.database.documents import list_deleted_documents, save_document @@ -747,8 +747,8 @@ def test_set_parent_basic(self): assert result is True doc = get_document(child) - assert doc["parent_id"] == parent - assert doc["relationship"] == "supersedes" + assert doc.parent_id == parent + assert doc.relationship == "supersedes" def test_set_parent_custom_relationship(self): from emdx.database.documents import get_document, save_document, set_parent @@ -758,7 +758,7 @@ def test_set_parent_custom_relationship(self): set_parent(child, parent, relationship="exploration") doc = get_document(child) - assert doc["relationship"] == "exploration" + assert doc.relationship == "exploration" def test_set_parent_nonexistent_child(self): from emdx.database.documents import save_document, set_parent @@ -780,7 +780,7 @@ def test_get_children_basic(self): children = get_children(parent) assert len(children) == 2 - titles = {c["title"] for c in children} + titles = {c.title for c in children} assert titles == {"Child 1", "Child 2"} @@ -862,21 +862,21 @@ def test_save_empty_title(self): doc_id = save_document("", "Some content") doc = get_document(doc_id) - assert doc["title"] == "" + assert doc.title == "" def test_save_empty_content(self): from emdx.database.documents import get_document, save_document doc_id = save_document("Empty Content", "") doc = get_document(doc_id) - assert doc["content"] == "" + assert doc.content == "" def test_save_none_project(self): from emdx.database.documents import get_document, save_document doc_id = save_document("No Project", "Content", project=None) doc = get_document(doc_id) - assert doc["project"] is None + assert doc.project is None def test_multiple_docs_same_title(self): from emdx.database.documents import get_document, save_document @@ -895,7 +895,7 @@ def test_unicode_content(self): content = "Unicode: \u2603 \U0001f600 \u00e9\u00e8\u00ea \u4e16\u754c \ud55c\uad6d\uc5b4" doc_id = save_document("Unicode Test", content) doc = get_document(doc_id) - assert doc["content"] == content + assert doc.content == content def test_newlines_in_title(self): from emdx.database.documents import get_document, save_document @@ -903,7 +903,7 @@ def test_newlines_in_title(self): title = "Line1\nLine2\nLine3" doc_id = save_document(title, "Content") doc = get_document(doc_id) - assert doc["title"] == title + assert doc.title == title def test_sql_injection_in_title(self): from emdx.database.documents import get_document, save_document @@ -911,7 +911,7 @@ def test_sql_injection_in_title(self): title = "'; DROP TABLE documents; --" doc_id = save_document(title, "Content") doc = get_document(doc_id) - assert doc["title"] == title + assert doc.title == title def test_rapid_saves(self): from emdx.database.documents import count_documents, save_document diff --git a/tests/test_epics.py b/tests/test_epics.py index 70812a12..61574992 100644 --- a/tests/test_epics.py +++ b/tests/test_epics.py @@ -26,9 +26,9 @@ def test_create_epic(self) -> None: epic_id = tasks.create_epic("Test Epic", "TEPC") epic = tasks.get_task(epic_id) assert epic is not None - assert epic["type"] == "epic" - assert epic["epic_key"] == "TEPC" - assert epic["epic_seq"] == 1 # epics get KEY-N like regular tasks + assert epic.type == "epic" + assert epic.epic_key == "TEPC" + assert epic.epic_seq == 1 # epics get KEY-N like regular tasks def test_create_epic_auto_creates_category(self) -> None: tasks.create_epic("Auto Cat Epic", "ACAT") @@ -58,10 +58,10 @@ def test_create_task_in_epic(self) -> None: ) task = tasks.get_task(task_id) assert task is not None - assert task["parent_task_id"] == epic_id - assert task["epic_key"] == "TPIC" - assert task["epic_seq"] == 2 # epic took seq 1 - assert task["title"].startswith("TPIC-2: ") + assert task.parent_task_id == epic_id + assert task.epic_key == "TPIC" + assert task.epic_seq == 2 # epic took seq 1 + assert task.title.startswith("TPIC-2: ") def test_epic_auto_numbers(self) -> None: categories.ensure_category("ENUM") @@ -72,13 +72,13 @@ def test_epic_auto_numbers(self) -> None: task1, task2, task3 = tasks.get_task(t1), tasks.get_task(t2), tasks.get_task(t3) assert task1 is not None and task2 is not None and task3 is not None - assert task1["epic_seq"] == 1 - assert task2["epic_seq"] == 2 - assert task3["epic_seq"] == 3 + assert task1.epic_seq == 1 + assert task2.epic_seq == 2 + assert task3.epic_seq == 3 - assert task1["title"] == "ENUM-1: First" - assert task2["title"] == "ENUM-2: Second" - assert task3["title"] == "ENUM-3: Third" + assert task1.title == "ENUM-1: First" + assert task2.title == "ENUM-2: Second" + assert task3.title == "ENUM-3: Third" def test_numbering_spans_epics(self) -> None: """Numbering is category-scoped, so it continues across epics.""" @@ -93,10 +93,10 @@ def test_numbering_spans_epics(self) -> None: epic_b_task = tasks.get_task(epic_b) task5 = tasks.get_task(t5) assert epic_a_task is not None and epic_b_task is not None and task5 is not None - assert epic_a_task["epic_seq"] == 1 - assert epic_b_task["epic_seq"] == 4 - assert task5["epic_seq"] == 5 # continues from epic_b's seq 4 - assert task5["title"] == "SPAN-5: Task in B" + assert epic_a_task.epic_seq == 1 + assert epic_b_task.epic_seq == 4 + assert task5.epic_seq == 5 # continues from epic_b's seq 4 + assert task5.title == "SPAN-5: Task in B" class TestEpicDone: @@ -107,7 +107,7 @@ def test_epic_done(self) -> None: tasks.update_task(epic_id, status="done") epic = tasks.get_task(epic_id) assert epic is not None - assert epic["status"] == "done" + assert epic.status == "done" class TestDeleteEpic: @@ -127,7 +127,7 @@ def test_delete_epic_with_done_children(self) -> None: # Child still exists but parent_task_id is cleared child = tasks.get_task(t1) assert child is not None - assert child["parent_task_id"] is None + assert child.parent_task_id is None def test_delete_epic_refuses_open_children(self) -> None: epic_id = tasks.create_epic("Open Children Epic", "DOCH") @@ -143,7 +143,7 @@ def test_delete_epic_force_with_open_children(self) -> None: assert tasks.get_task(epic_id) is None child = tasks.get_task(t1) assert child is not None - assert child["parent_task_id"] is None + assert child.parent_task_id is None def test_delete_epic_not_found(self) -> None: with pytest.raises(ValueError, match="not found"): @@ -163,7 +163,7 @@ def test_list_epics_by_category(self) -> None: tasks.create_epic("Other Epic", "OTHR") result = tasks.list_epics(category_key="FILT") - keys = [e["epic_key"] for e in result] + keys = [e.epic_key for e in result] assert "FILT" in keys assert "OTHR" not in keys @@ -175,8 +175,8 @@ def test_list_epics_by_status(self) -> None: open_epics = tasks.list_epics(status=["open"]) done_epics = tasks.list_epics(status=["done"]) - open_ids = [e["id"] for e in open_epics] - done_ids = [e["id"] for e in done_epics] + open_ids = [e.id for e in open_epics] + done_ids = [e.id for e in done_epics] assert e1 in open_ids assert e2 in done_ids @@ -191,8 +191,8 @@ def test_epic_view_with_children(self) -> None: view = tasks.get_epic_view(epic_id) assert view is not None - assert view["title"] == "View Epic" - assert len(view["children"]) == 2 + assert view.title == "View Epic" + assert len(view.children) == 2 def test_epic_view_not_found(self) -> None: result = tasks.get_epic_view(999999) @@ -236,7 +236,7 @@ def test_done_command(self) -> None: assert "Done" in _out(result) epic = tasks.get_task(epic_id) assert epic is not None - assert epic["status"] == "done" + assert epic.status == "done" def test_active_command(self) -> None: epic_id = tasks.create_epic("Active CLI Epic", "EACL") @@ -245,7 +245,7 @@ def test_active_command(self) -> None: assert "Active" in _out(result) epic = tasks.get_task(epic_id) assert epic is not None - assert epic["status"] == "active" + assert epic.status == "active" def test_view_not_found(self) -> None: result = runner.invoke(epics_app, ["view", "999999"]) @@ -334,7 +334,7 @@ def test_view_with_category_key(self) -> None: epic_id = tasks.create_epic("Key View Epic", "KVIW") epic = tasks.get_task(epic_id) assert epic is not None - key = f"KVIW-{epic['epic_seq']}" + key = f"KVIW-{epic.epic_seq}" result = runner.invoke(epics_app, ["view", key]) assert result.exit_code == 0 assert "Key View Epic" in _out(result) @@ -343,7 +343,7 @@ def test_done_with_category_key(self) -> None: epic_id = tasks.create_epic("Key Done Epic", "KDNE") epic = tasks.get_task(epic_id) assert epic is not None - key = f"KDNE-{epic['epic_seq']}" + key = f"KDNE-{epic.epic_seq}" result = runner.invoke(epics_app, ["done", key]) assert result.exit_code == 0 assert "Done" in _out(result) @@ -352,7 +352,7 @@ def test_active_with_category_key(self) -> None: epic_id = tasks.create_epic("Key Active Epic", "KACT") epic = tasks.get_task(epic_id) assert epic is not None - key = f"KACT-{epic['epic_seq']}" + key = f"KACT-{epic.epic_seq}" result = runner.invoke(epics_app, ["active", key]) assert result.exit_code == 0 assert "Active" in _out(result) @@ -362,7 +362,7 @@ def test_delete_with_category_key(self) -> None: tasks.update_task(epic_id, status="done") epic = tasks.get_task(epic_id) assert epic is not None - key = f"KDEL-{epic['epic_seq']}" + key = f"KDEL-{epic.epic_seq}" result = runner.invoke(epics_app, ["delete", key]) assert result.exit_code == 0 assert "Deleted" in _out(result) @@ -375,7 +375,7 @@ def test_add_with_epic_key(self) -> None: epic_id = tasks.create_epic("Key Add Epic", "KADD") epic = tasks.get_task(epic_id) assert epic is not None - key = f"KADD-{epic['epic_seq']}" + key = f"KADD-{epic.epic_seq}" result = runner.invoke(tasks_app, ["add", "Keyed task", "--epic", key]) assert result.exit_code == 0 @@ -388,7 +388,7 @@ def test_list_with_epic_key(self) -> None: tasks.create_task("Key list child", parent_task_id=epic_id, epic_key="KLST") epic = tasks.get_task(epic_id) assert epic is not None - key = f"KLST-{epic['epic_seq']}" + key = f"KLST-{epic.epic_seq}" result = runner.invoke(tasks_app, ["list", "--epic", key]) assert result.exit_code == 0 @@ -407,12 +407,12 @@ def test_attach_model(self) -> None: task1 = tasks.get_task(t1) task2 = tasks.get_task(t2) assert task1 is not None and task2 is not None - assert task1["parent_task_id"] == epic_id - assert task2["parent_task_id"] == epic_id - assert task1["epic_key"] == "ATCH" - assert task2["epic_key"] == "ATCH" - assert task1["epic_seq"] is not None - assert task2["epic_seq"] is not None + assert task1.parent_task_id == epic_id + assert task2.parent_task_id == epic_id + assert task1.epic_key == "ATCH" + assert task2.epic_key == "ATCH" + assert task1.epic_seq is not None + assert task2.epic_seq is not None def test_attach_skips_already_attached(self) -> None: epic_id = tasks.create_epic("Skip Epic", "SKIP") @@ -443,10 +443,10 @@ def test_attach_cli_with_keys(self) -> None: t1 = tasks.create_task("Key orphan", epic_key="KCLA") task = tasks.get_task(t1) assert task is not None - key = f"KCLA-{task['epic_seq']}" + key = f"KCLA-{task.epic_seq}" epic = tasks.get_task(epic_id) assert epic is not None - epic_key = f"KCLA-{epic['epic_seq']}" + epic_key = f"KCLA-{epic.epic_seq}" result = runner.invoke( epics_app, ["attach", key, "--epic", epic_key], @@ -462,7 +462,7 @@ def test_bare_int_falls_back_to_unique_epic_seq(self) -> None: task_id = tasks.create_task("Fallback test", epic_key="FLLB") task = tasks.get_task(task_id) assert task is not None - seq = task["epic_seq"] + seq = task.epic_seq # Only works if no task with DB id == seq exists. # We can test this by using the category-key approach as verification. from emdx.models.tasks import resolve_task_id diff --git a/tests/test_search.py b/tests/test_search.py index cf8244a0..b0f67585 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -159,7 +159,7 @@ def test_basic_search_single_term(self, mock_db_connection): results = search_documents("Python") assert len(results) == 2 - titles = [r["title"] for r in results] + titles = [r.title for r in results] assert "Python Guide" in titles assert "Testing with Pytest" in titles @@ -205,9 +205,9 @@ def test_results_are_ranked(self, mock_db_connection): results = search_documents("Python") assert len(results) == 2 - # All results should have a rank field + # All results should have a rank attribute for result in results: - assert "rank" in result + assert hasattr(result, "rank") def test_multiple_occurrences_ranked_higher(self, mock_db_connection): """Test that documents with more term occurrences rank higher.""" @@ -221,7 +221,7 @@ def test_multiple_occurrences_ranked_higher(self, mock_db_connection): results = search_documents("Python") assert len(results) == 2 # FTS5 rank is negative (lower is better), so first result should have lower rank - assert results[0]["rank"] <= results[1]["rank"] + assert results[0].rank <= results[1].rank class TestSearchPagination: @@ -274,12 +274,12 @@ def test_filter_by_project(self, mock_db_connection): results = search_documents("Python", project="project1") assert len(results) == 2 for result in results: - assert result["project"] == "project1" + assert result.project == "project1" # Filter by project2 results = search_documents("Python", project="project2") assert len(results) == 1 - assert results[0]["project"] == "project2" + assert results[0].project == "project2" def test_filter_nonexistent_project(self, mock_db_connection): """Test filtering by a project that doesn't exist.""" @@ -307,7 +307,7 @@ def test_created_after_filter(self, mock_db_connection): results = search_documents("Python", created_after=filter_date) assert len(results) == 1 - assert results[0]["title"] == "Recent Python Doc" + assert results[0].title == "Recent Python Doc" def test_created_before_filter(self, mock_db_connection): """Test filtering by created_before date.""" @@ -322,7 +322,7 @@ def test_created_before_filter(self, mock_db_connection): results = search_documents("Python", created_before=filter_date) assert len(results) == 1 - assert results[0]["title"] == "Old Python Doc" + assert results[0].title == "Old Python Doc" def test_modified_after_filter(self, mock_db_connection): """Test filtering by modified_after date.""" @@ -337,7 +337,7 @@ def test_modified_after_filter(self, mock_db_connection): results = search_documents("Python", modified_after=filter_date) assert len(results) == 1 - assert results[0]["title"] == "Recent Python Doc" + assert results[0].title == "Recent Python Doc" def test_modified_before_filter(self, mock_db_connection): """Test filtering by modified_before date.""" @@ -352,7 +352,7 @@ def test_modified_before_filter(self, mock_db_connection): results = search_documents("Python", modified_before=filter_date) assert len(results) == 1 - assert results[0]["title"] == "Old Python Doc" + assert results[0].title == "Old Python Doc" def test_combined_date_filters(self, mock_db_connection): """Test combining multiple date filters.""" @@ -372,7 +372,7 @@ def test_combined_date_filters(self, mock_db_connection): results = search_documents("Python", created_after=after_date, created_before=before_date) assert len(results) == 1 - assert results[0]["title"] == "Middle Doc" + assert results[0].title == "Middle Doc" class TestWildcardSearch: @@ -400,7 +400,7 @@ def test_wildcard_with_project_filter(self, mock_db_connection): results = search_documents("*", project="project1") assert len(results) == 2 for result in results: - assert result["project"] == "project1" + assert result.project == "project1" def test_wildcard_with_date_filters(self, mock_db_connection): """Test wildcard query with date filters.""" @@ -415,7 +415,7 @@ def test_wildcard_with_date_filters(self, mock_db_connection): results = search_documents("*", created_after=filter_date) assert len(results) == 1 - assert results[0]["title"] == "Recent Doc" + assert results[0].title == "Recent Doc" def test_wildcard_ordered_by_id_desc(self, mock_db_connection): """Test that wildcard results are ordered by ID descending.""" @@ -428,9 +428,9 @@ def test_wildcard_ordered_by_id_desc(self, mock_db_connection): results = search_documents("*") assert len(results) == 3 # Results should be ordered by ID descending (most recent first) - assert results[0]["id"] == id3 - assert results[1]["id"] == id2 - assert results[2]["id"] == id1 + assert results[0].id == id3 + assert results[1].id == id2 + assert results[2].id == id1 def test_wildcard_respects_limit(self, mock_db_connection): """Test that wildcard query respects limit parameter.""" @@ -456,7 +456,7 @@ def test_soft_deleted_excluded_from_search(self, mock_db_connection): results = search_documents("Python") assert len(results) == 1 - assert results[0]["title"] == "Active Python Doc" + assert results[0].title == "Active Python Doc" def test_soft_deleted_excluded_from_wildcard(self, mock_db_connection): """Test that soft-deleted documents are excluded from wildcard search.""" @@ -468,7 +468,7 @@ def test_soft_deleted_excluded_from_wildcard(self, mock_db_connection): results = search_documents("*") assert len(results) == 1 - assert results[0]["title"] == "Active Doc" + assert results[0].title == "Active Doc" class TestSearchResultFields: @@ -484,13 +484,13 @@ def test_search_returns_expected_fields(self, mock_db_connection): assert len(results) == 1 result = results[0] - assert "id" in result - assert "title" in result - assert "project" in result - assert "created_at" in result - assert "updated_at" in result - assert "snippet" in result - assert "rank" in result + assert hasattr(result, "id") + assert hasattr(result, "title") + assert hasattr(result, "project") + assert hasattr(result, "created_at") + assert hasattr(result, "updated_at") + assert hasattr(result, "snippet") + assert hasattr(result, "rank") def test_search_snippet_contains_match_context(self, mock_db_connection): """Test that search snippet contains context around the match.""" @@ -503,7 +503,7 @@ def test_search_snippet_contains_match_context(self, mock_db_connection): results = search_documents("Python") assert len(results) == 1 # Snippet should be present and contain some context - assert results[0]["snippet"] is not None + assert results[0].snippet is not None def test_wildcard_has_null_snippet(self, mock_db_connection): """Test that wildcard search returns NULL snippets.""" @@ -513,7 +513,7 @@ def test_wildcard_has_null_snippet(self, mock_db_connection): results = search_documents("*") assert len(results) == 1 - assert results[0]["snippet"] is None + assert results[0].snippet is None def test_datetime_fields_are_parsed(self, mock_db_connection): """Test that datetime fields are properly parsed.""" @@ -525,8 +525,8 @@ def test_datetime_fields_are_parsed(self, mock_db_connection): assert len(results) == 1 result = results[0] - assert isinstance(result["created_at"], datetime) - assert isinstance(result["updated_at"], datetime) + assert isinstance(result.created_at, datetime) + assert isinstance(result.updated_at, datetime) class TestSearchEdgeCases: @@ -546,7 +546,7 @@ def test_search_with_special_fts_characters(self, mock_db_connection): # Search for the full term "Programming" which should definitely match results = search_documents("Programming") assert len(results) == 1 - assert results[0]["title"] == "C++ Programming" + assert results[0].title == "C++ Programming" def test_search_with_phrase(self, mock_db_connection): """Test phrase search with FTS5.""" @@ -558,7 +558,7 @@ def test_search_with_phrase(self, mock_db_connection): # FTS5 phrase search with quotes - should match first doc with exact phrase results = search_documents('"Python web"') assert len(results) == 1 - assert results[0]["title"] == "Python Programming" + assert results[0].title == "Python Programming" def test_search_with_or_literal(self, mock_db_connection): """Test that 'OR' is treated as a literal word, not FTS operator. @@ -577,7 +577,7 @@ def test_search_with_or_literal(self, mock_db_connection): # Only the doc with literal "OR" in it will match all three terms results = search_documents("OR") assert len(results) == 1 - assert "Word OR Logic" in results[0]["title"] + assert "Word OR Logic" in results[0].title def test_stemming_with_porter(self, mock_db_connection): """Test that Porter stemmer works (running -> run).""" @@ -588,7 +588,7 @@ def test_stemming_with_porter(self, mock_db_connection): # Porter stemmer should match "run" to "running" results = search_documents("run") assert len(results) == 1 - assert results[0]["title"] == "Running Tips" + assert results[0].title == "Running Tips" class TestEscapeFts5Query: @@ -642,7 +642,7 @@ def test_search_hyphenated_term(self, mock_db_connection): # This should NOT fail with "no such column: driven" error results = search_documents("event-driven") assert len(results) == 1 - assert "Event-Driven" in results[0]["title"] + assert "Event-Driven" in results[0].title def test_search_multiple_hyphenated_terms(self, mock_db_connection): """Test searching for multiple hyphenated terms.""" @@ -691,7 +691,7 @@ def test_default_returns_only_user_docs(self, mock_db_connection): results = search_documents("Python") assert len(results) == 1 - assert results[0]["title"] == "User Doc" + assert results[0].title == "User Doc" def test_wiki_filter_returns_only_wiki_docs(self, mock_db_connection): """Test that doc_type='wiki' returns only wiki docs.""" @@ -702,7 +702,7 @@ def test_wiki_filter_returns_only_wiki_docs(self, mock_db_connection): results = search_documents("Python", doc_type="wiki") assert len(results) == 1 - assert results[0]["title"] == "Wiki Article" + assert results[0].title == "Wiki Article" def test_none_doc_type_returns_all(self, mock_db_connection): """Test that doc_type=None returns all document types.""" @@ -725,12 +725,12 @@ def test_wildcard_with_doc_type_filter(self, mock_db_connection): # Default: only user results = search_documents("*") assert len(results) == 1 - assert results[0]["title"] == "User Doc" + assert results[0].title == "User Doc" # Wiki only results = search_documents("*", doc_type="wiki") assert len(results) == 1 - assert results[0]["title"] == "Wiki Doc" + assert results[0].title == "Wiki Doc" # All types results = search_documents("*", doc_type=None) @@ -746,4 +746,4 @@ def test_doc_type_with_project_filter(self, mock_db_connection): results = search_documents("Python", project="project1", doc_type="user") assert len(results) == 1 - assert results[0]["title"] == "User P1" + assert results[0].title == "User P1" diff --git a/tests/test_sqlite_database.py b/tests/test_sqlite_database.py index 5ef0a3c3..4a27ae96 100644 --- a/tests/test_sqlite_database.py +++ b/tests/test_sqlite_database.py @@ -85,9 +85,9 @@ def test_save_and_get_document(self): doc = db.get_document(doc_id) assert doc is not None - assert doc["title"] == "Test Document" - assert doc["content"] == "Test content" - assert doc["project"] == "test-project" + assert doc.title == "Test Document" + assert doc.content == "Test content" + assert doc.project == "test-project" def test_save_document_with_tags(self): """Test saving document with tags.""" @@ -108,7 +108,7 @@ def test_save_document_with_tags(self): # Verify document exists doc = db.get_document(doc_id) assert doc is not None - assert doc["title"] == "Tagged Document" + assert doc.title == "Tagged Document" def test_search_documents_basic(self): """Test basic document search.""" @@ -124,7 +124,7 @@ def test_search_documents_basic(self): # Search for Python results = db.search_documents("Python") assert len(results) >= 1 - assert any("Python" in doc["title"] for doc in results) + assert any("Python" in doc.title for doc in results) def test_search_documents_empty_query(self): """Test search with empty query.""" diff --git a/tests/test_task_browser.py b/tests/test_task_browser.py index 4ee6795a..cb93358d 100644 --- a/tests/test_task_browser.py +++ b/tests/test_task_browser.py @@ -184,7 +184,7 @@ def _side_effect( ) -> list[Task]: all_tasks: list[Task] = mock.return_value if status is not None: - return [t for t in all_tasks if t["status"] in status] + return [t for t in all_tasks if t.status in status] return list(all_tasks) return _side_effect @@ -210,7 +210,7 @@ def mock_task_data() -> Generator[MockDict, None, None]: def _count_side_effect() -> dict[str, int]: counts: dict[str, int] = {} for t in m_list.return_value: - s = t["status"] + s = t.status counts[s] = counts.get(s, 0) + 1 return counts diff --git a/tests/test_task_commands.py b/tests/test_task_commands.py index 1992a7ab..519db235 100644 --- a/tests/test_task_commands.py +++ b/tests/test_task_commands.py @@ -749,31 +749,35 @@ def test_view_shows_basic_info(self, mock_tasks): @patch("emdx.models.documents.get_document") @patch("emdx.commands.tasks.tasks") def test_view_shows_epic_label(self, mock_tasks, mock_get_doc): - task_data = { - "id": 10, - "title": "SEC-1: Harden auth", - "status": "active", - "description": "", - "epic_key": "SEC", - "epic_seq": 1, - "parent_task_id": 500, - "source_doc_id": 99, - "output_doc_id": None, - "priority": 1, - "created_at": "2026-01-15", - } - parent_data = { - "id": 500, - "title": "Security epic", - "status": "open", - "description": "", - "epic_key": "SEC", - "epic_seq": 49, - "parent_task_id": None, - "source_doc_id": None, - "priority": 3, - "created_at": "2026-01-01", - } + task_data = Task.from_row( + { + "id": 10, + "title": "SEC-1: Harden auth", + "status": "active", + "description": "", + "epic_key": "SEC", + "epic_seq": 1, + "parent_task_id": 500, + "source_doc_id": 99, + "output_doc_id": None, + "priority": 1, + "created_at": "2026-01-15", + } + ) + parent_data = Task.from_row( + { + "id": 500, + "title": "Security epic", + "status": "open", + "description": "", + "epic_key": "SEC", + "epic_seq": 49, + "parent_task_id": None, + "source_doc_id": None, + "priority": 3, + "created_at": "2026-01-01", + } + ) mock_tasks.resolve_task_id.return_value = 10 mock_tasks.get_task.side_effect = lambda tid: task_data if tid == 10 else parent_data mock_tasks.get_dependencies.return_value = [] @@ -1217,8 +1221,8 @@ class TestTaskAddWithAfter: @patch("emdx.commands.tasks.tasks") def test_add_with_single_after(self, mock_tasks): mock_tasks.create_task.return_value = 10 - task_10 = {"id": 10, "title": "Deploy", "epic_key": None, "epic_seq": None} - task_5 = {"id": 5, "title": "Build", "epic_key": None, "epic_seq": None} + task_10 = Task.from_row({"id": 10, "title": "Deploy", "epic_key": None, "epic_seq": None}) + task_5 = Task.from_row({"id": 5, "title": "Build", "epic_key": None, "epic_seq": None}) mock_tasks.get_task.side_effect = lambda tid: task_10 if tid == 10 else task_5 result = runner.invoke(app, ["add", "Deploy", "--after", "5"]) assert result.exit_code == 0 @@ -1238,9 +1242,9 @@ def test_add_with_single_after(self, mock_tasks): def test_add_with_multiple_after(self, mock_tasks): mock_tasks.create_task.return_value = 20 tasks_by_id = { - 20: {"id": 20, "title": "Release", "epic_key": None, "epic_seq": None}, - 10: {"id": 10, "title": "Build", "epic_key": None, "epic_seq": None}, - 11: {"id": 11, "title": "Test", "epic_key": None, "epic_seq": None}, + 20: Task.from_row({"id": 20, "title": "Release", "epic_key": None, "epic_seq": None}), + 10: Task.from_row({"id": 10, "title": "Build", "epic_key": None, "epic_seq": None}), + 11: Task.from_row({"id": 11, "title": "Test", "epic_key": None, "epic_seq": None}), } mock_tasks.get_task.side_effect = lambda tid: tasks_by_id.get(tid) result = runner.invoke(app, ["add", "Release", "--after", "10", "--after", "11"]) @@ -1266,8 +1270,8 @@ class TestTaskDepAdd: def test_dep_add_success(self, mock_tasks): mock_tasks.resolve_task_id.side_effect = lambda x: int(x) tasks_by_id = { - 5: {"id": 5, "title": "Task A", "epic_key": None, "epic_seq": None}, - 3: {"id": 3, "title": "Task B", "epic_key": None, "epic_seq": None}, + 5: Task.from_row({"id": 5, "title": "Task A", "epic_key": None, "epic_seq": None}), + 3: Task.from_row({"id": 3, "title": "Task B", "epic_key": None, "epic_seq": None}), } mock_tasks.get_task.side_effect = lambda tid: tasks_by_id.get(tid) mock_tasks.add_dependency.return_value = True @@ -1281,12 +1285,14 @@ def test_dep_add_success(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_dep_add_cycle(self, mock_tasks): mock_tasks.resolve_task_id.side_effect = lambda x: int(x) - mock_tasks.get_task.side_effect = lambda tid: { - "id": tid, - "title": "Task", - "epic_key": None, - "epic_seq": None, - } + mock_tasks.get_task.side_effect = lambda tid: Task.from_row( + { + "id": tid, + "title": "Task", + "epic_key": None, + "epic_seq": None, + } + ) mock_tasks.add_dependency.return_value = False result = runner.invoke(app, ["dep", "add", "5", "3"]) assert result.exit_code == 1 @@ -1311,12 +1317,14 @@ class TestTaskDepRm: @patch("emdx.commands.tasks.tasks") def test_dep_rm_success(self, mock_tasks): mock_tasks.resolve_task_id.side_effect = lambda x: int(x) - mock_tasks.get_task.side_effect = lambda tid: { - "id": tid, - "title": "Task", - "epic_key": None, - "epic_seq": None, - } + mock_tasks.get_task.side_effect = lambda tid: Task.from_row( + { + "id": tid, + "title": "Task", + "epic_key": None, + "epic_seq": None, + } + ) mock_tasks.remove_dependency.return_value = True result = runner.invoke(app, ["dep", "rm", "5", "3"]) assert result.exit_code == 0 @@ -1328,12 +1336,14 @@ def test_dep_rm_success(self, mock_tasks): @patch("emdx.commands.tasks.tasks") def test_dep_rm_not_found(self, mock_tasks): mock_tasks.resolve_task_id.side_effect = lambda x: int(x) - mock_tasks.get_task.side_effect = lambda tid: { - "id": tid, - "title": "Task", - "epic_key": None, - "epic_seq": None, - } + mock_tasks.get_task.side_effect = lambda tid: Task.from_row( + { + "id": tid, + "title": "Task", + "epic_key": None, + "epic_seq": None, + } + ) mock_tasks.remove_dependency.return_value = False result = runner.invoke(app, ["dep", "rm", "5", "3"]) assert result.exit_code == 0 @@ -1415,11 +1425,11 @@ def test_chain_shows_upstream_and_downstream(self, mock_tasks): ) # Walk up: task 5 depends on 3 mock_tasks.get_dependencies.side_effect = lambda tid: ( - [{"id": 3, "title": "First", "status": "done"}] if tid == 5 else [] + [Task.from_row({"id": 3, "title": "First", "status": "done"})] if tid == 5 else [] ) # Walk down: task 8 depends on 5 mock_tasks.get_dependents.side_effect = lambda tid: ( - [{"id": 8, "title": "Last", "status": "open"}] if tid == 5 else [] + [Task.from_row({"id": 8, "title": "Last", "status": "open"})] if tid == 5 else [] ) result = runner.invoke(app, ["chain", "5"]) assert result.exit_code == 0 @@ -1469,10 +1479,10 @@ def test_chain_json(self, mock_tasks): } ) mock_tasks.get_dependencies.side_effect = lambda tid: ( - [{"id": 3, "title": "Up", "status": "done"}] if tid == 5 else [] + [Task.from_row({"id": 3, "title": "Up", "status": "done"})] if tid == 5 else [] ) mock_tasks.get_dependents.side_effect = lambda tid: ( - [{"id": 8, "title": "Down", "status": "open"}] if tid == 5 else [] + [Task.from_row({"id": 8, "title": "Down", "status": "open"})] if tid == 5 else [] ) result = runner.invoke(app, ["chain", "5", "--json"]) assert result.exit_code == 0 From 5101167665ef3cd52d7ae020721a6817e6f13ae3 Mon Sep 17 00:00:00 2001 From: Alex Rockwell Date: Sat, 7 Mar 2026 02:47:19 -0500 Subject: [PATCH 8/8] fix: convert remaining dict(obj) calls to .to_dict(), fix test mocks (Issue #ARCH-24) serve.py used dict(doc)/dict(task) which fails without dict-compat. core.py had one remaining dict(r) on search results. Test mocks in test_serve.py, test_commands_core.py, and test_v028_regressions.py still returned raw dicts instead of SearchHit/Document/Task objects. Co-Authored-By: Claude Opus 4.6 --- emdx/commands/core.py | 2 +- emdx/commands/serve.py | 12 ++--- tests/test_commands_core.py | 51 ++++++++++-------- tests/test_serve.py | 27 ++++++---- tests/test_v028_regressions.py | 97 +++++++++++++++++++--------------- 5 files changed, 107 insertions(+), 82 deletions(-) diff --git a/emdx/commands/core.py b/emdx/commands/core.py index e2294622..dc9c4770 100644 --- a/emdx/commands/core.py +++ b/emdx/commands/core.py @@ -910,7 +910,7 @@ def _find_keyword_search( effective_query = search_query if search_query else "*" results = [ - dict(r) + r.to_dict() for r in search_documents( effective_query, project=project, diff --git a/emdx/commands/serve.py b/emdx/commands/serve.py index 160aba2b..c9dbee75 100644 --- a/emdx/commands/serve.py +++ b/emdx/commands/serve.py @@ -55,14 +55,14 @@ def _serialize(obj: Any) -> Any: def _find_recent(params: dict[str, Any]) -> list[dict[str, Any]]: limit = params.get("limit", 20) rows = get_recent_documents(limit=limit) - return [dict(r) for r in rows] + return [r.to_dict() for r in rows] def _find_search(params: dict[str, Any]) -> list[dict[str, Any]]: query = params["query"] limit = params.get("limit", 10) rows = search_documents(query, limit=limit) - return [dict(r) for r in rows] + return [r.to_dict() for r in rows] def _find_by_tags(params: dict[str, Any]) -> list[dict[str, Any]]: @@ -80,7 +80,7 @@ def _view_document(params: dict[str, Any]) -> dict[str, Any] | None: row = get_document(doc_id) if row is None: return None - result = dict(row) + result = row.to_dict() # Get tags for the document from emdx.models.tags import get_document_tags @@ -127,14 +127,14 @@ def _task_list(params: dict[str, Any]) -> list[dict[str, Any]]: limit = params.get("limit", 200) status_list = [status] if status else None rows = list_tasks(status=status_list, epic_key=epic_key, limit=limit) - return [dict(r) for r in rows] + return [r.to_dict() for r in rows] def _task_log(params: dict[str, Any]) -> list[dict[str, Any]]: task_id = params["id"] limit = params.get("limit", 50) rows = get_task_log(task_id, limit=limit) - return [dict(r) for r in rows] + return [r.to_dict() for r in rows] def _task_update(params: dict[str, Any]) -> dict[str, Any]: @@ -155,7 +155,7 @@ def _task_log_progress(params: dict[str, Any]) -> dict[str, Any]: def _status(params: dict[str, Any]) -> dict[str, Any]: tasks = list_tasks(limit=20) return { - "tasks": [dict(t) for t in tasks], + "tasks": [t.to_dict() for t in tasks], } diff --git a/tests/test_commands_core.py b/tests/test_commands_core.py index c2e8a258..4e72322b 100644 --- a/tests/test_commands_core.py +++ b/tests/test_commands_core.py @@ -9,6 +9,7 @@ from emdx.commands.core import InputContent, app, generate_title, get_input_content from emdx.models.document import Document +from emdx.models.search import SearchHit runner = CliRunner() @@ -190,13 +191,15 @@ class TestFindCommand: def test_find_basic(self, mock_search, mock_get_tags): """Basic search returns results.""" mock_search.return_value = [ - { - "id": 1, - "title": "Found Doc", - "project": "proj", - "created_at": datetime(2024, 1, 1), - "access_count": 3, - } + SearchHit.from_row( + { + "id": 1, + "title": "Found Doc", + "project": "proj", + "created_at": datetime(2024, 1, 1), + "access_count": 3, + } + ) ] mock_get_tags.return_value = {1: ["python"]} @@ -226,13 +229,15 @@ def test_find_no_results(self, mock_search, mock_get_tags): def test_find_ids_only(self, mock_search, mock_get_tags): """--ids-only outputs just IDs.""" mock_search.return_value = [ - { - "id": 42, - "title": "Doc", - "project": None, - "created_at": datetime(2024, 1, 1), - "access_count": 0, - } + SearchHit.from_row( + { + "id": 42, + "title": "Doc", + "project": None, + "created_at": datetime(2024, 1, 1), + "access_count": 0, + } + ) ] mock_get_tags.return_value = {} @@ -246,14 +251,16 @@ def test_find_ids_only(self, mock_search, mock_get_tags): def test_find_json_output(self, mock_search, mock_get_tags): """--json outputs JSON array.""" mock_search.return_value = [ - { - "id": 1, - "title": "JSON Doc", - "project": "p", - "created_at": datetime(2024, 1, 1), - "updated_at": datetime(2024, 1, 2), - "access_count": 0, - } + SearchHit.from_row( + { + "id": 1, + "title": "JSON Doc", + "project": "p", + "created_at": datetime(2024, 1, 1), + "updated_at": datetime(2024, 1, 2), + "access_count": 0, + } + ) ] mock_get_tags.return_value = {1: []} diff --git a/tests/test_serve.py b/tests/test_serve.py index 5803d4a2..5e743e86 100644 --- a/tests/test_serve.py +++ b/tests/test_serve.py @@ -9,6 +9,9 @@ import pytest from emdx.commands.serve import _handle_request, _serialize +from emdx.models.document import Document +from emdx.models.search import SearchHit +from emdx.models.task import Task # --------------------------------------------------------------------------- @@ -50,8 +53,8 @@ def test_unknown_method(self) -> None: def test_find_recent(self, mock_recent: Any) -> None: """find.recent returns recent documents.""" mock_recent.return_value = [ - {"id": 1, "title": "Doc 1", "created_at": "2026-01-15"}, - {"id": 2, "title": "Doc 2", "created_at": "2026-01-14"}, + Document.from_row({"id": 1, "title": "Doc 1", "created_at": "2026-01-15"}), + Document.from_row({"id": 2, "title": "Doc 2", "created_at": "2026-01-14"}), ] request: dict[str, Any] = { @@ -68,7 +71,7 @@ def test_find_recent(self, mock_recent: Any) -> None: def test_find_search(self, mock_search: Any) -> None: """find.search returns search results.""" mock_search.return_value = [ - {"id": 1, "title": "Found Doc"}, + SearchHit.from_row({"id": 1, "title": "Found Doc"}), ] request: dict[str, Any] = { @@ -102,12 +105,14 @@ def test_find_by_tags(self, mock_tags: Any) -> None: @patch("emdx.commands.serve.get_document") def test_view_document(self, mock_get_doc: Any, mock_get_tags: Any, mock_links: Any) -> None: """view returns document with tags and links.""" - mock_get_doc.return_value = { - "id": 42, - "title": "My Document", - "content": "Hello world", - "project": "test", - } + mock_get_doc.return_value = Document.from_row( + { + "id": 42, + "title": "My Document", + "content": "Hello world", + "project": "test", + } + ) mock_get_tags.return_value = ["python"] mock_links.return_value = [] @@ -172,7 +177,7 @@ def test_tag_list(self, mock_tags: Any) -> None: def test_task_list(self, mock_tasks: Any) -> None: """task.list returns tasks.""" mock_tasks.return_value = [ - {"id": 1, "title": "Task 1", "status": "open"}, + Task.from_row({"id": 1, "title": "Task 1", "status": "open"}), ] request: dict[str, Any] = { @@ -215,7 +220,7 @@ def test_task_log_progress(self, mock_log: Any) -> None: def test_status_method(self, mock_tasks: Any) -> None: """status returns current task status.""" mock_tasks.return_value = [ - {"id": 1, "title": "Active Task", "status": "active"}, + Task.from_row({"id": 1, "title": "Active Task", "status": "active"}), ] request: dict[str, Any] = { diff --git a/tests/test_v028_regressions.py b/tests/test_v028_regressions.py index b194865d..fb5eb334 100644 --- a/tests/test_v028_regressions.py +++ b/tests/test_v028_regressions.py @@ -14,6 +14,7 @@ from emdx.main import app from emdx.models.document import Document +from emdx.models.search import SearchHit runner = CliRunner() @@ -126,27 +127,33 @@ def test_no_tags_excludes_matching_docs( """--no-tags should remove docs that have any of the excluded tags.""" # Simulate 3 search results with all required fields mock_search.return_value = [ - { - "id": 1, - "title": "Keep me", - "project": "p", - "content": "c", - "created_at": "2026-01-15", - }, - { - "id": 2, - "title": "Exclude me", - "project": "p", - "content": "c", - "created_at": "2026-01-15", - }, - { - "id": 3, - "title": "Also keep", - "project": "p", - "content": "c", - "created_at": "2026-01-15", - }, + SearchHit.from_row( + { + "id": 1, + "title": "Keep me", + "project": "p", + "content": "c", + "created_at": "2026-01-15", + } + ), + SearchHit.from_row( + { + "id": 2, + "title": "Exclude me", + "project": "p", + "content": "c", + "created_at": "2026-01-15", + } + ), + SearchHit.from_row( + { + "id": 3, + "title": "Also keep", + "project": "p", + "content": "c", + "created_at": "2026-01-15", + } + ), ] # Doc 2 has the excluded tag "draft" mock_tags_map.return_value = { @@ -177,27 +184,33 @@ def test_no_tags_multiple_excluded( ) -> None: """--no-tags with comma-separated tags excludes docs with ANY of them.""" mock_search.return_value = [ - { - "id": 1, - "title": "Keep me", - "project": "p", - "content": "c", - "created_at": "2026-01-15", - }, - { - "id": 2, - "title": "Has draft", - "project": "p", - "content": "c", - "created_at": "2026-01-15", - }, - { - "id": 3, - "title": "Has wip", - "project": "p", - "content": "c", - "created_at": "2026-01-15", - }, + SearchHit.from_row( + { + "id": 1, + "title": "Keep me", + "project": "p", + "content": "c", + "created_at": "2026-01-15", + } + ), + SearchHit.from_row( + { + "id": 2, + "title": "Has draft", + "project": "p", + "content": "c", + "created_at": "2026-01-15", + } + ), + SearchHit.from_row( + { + "id": 3, + "title": "Has wip", + "project": "p", + "content": "c", + "created_at": "2026-01-15", + } + ), ] mock_tags_map.return_value = { 1: ["notes"],