diff --git a/tests/README.md b/tests/README.md index 85b5758..8d31d04 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,4 +1,18 @@ -# Test Setup +# Tests + +## Running tests + +There is a pdm script which runs tests with code coverage `pdm run test`. Any additional parameters will be passed through to pytest. + +The -k parameter filters tests. Some useful examples: +``` +pdm run test -k "Integration" +pdm run test --pg-host 172.17.0.3 -k "postgres and not Integration" +pdm run test -k "drop_tables and not postgres" +pdm run test -k "duckdb" +``` + +## Test Setup Duckdb tests "just work". Postgres tests will be skipped unless you set them up first. diff --git a/tests/conftest.py b/tests/conftest.py index b9ccef2..38a7e5d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,10 @@ +import contextlib +from collections.abc import Callable + +import psycopg import pytest from httpx_folio.auth import FolioParams +from psycopg import sql def pytest_addoption(parser: pytest.Parser) -> None: @@ -24,3 +29,23 @@ def folio_params(pytestconfig: pytest.Config) -> tuple[bool, FolioParams]: pytestconfig.getoption("folio_password") or "admin", ), ) + + +@pytest.fixture(scope="session") +def pg_dsn(pytestconfig: pytest.Config) -> None | Callable[[str], str]: + host = pytestconfig.getoption("pg_host") + if host is None: + return None + + def setup(db: str) -> str: + base_dsn = f"host={host} user=ldlite password=ldlite" + with contextlib.closing(psycopg.connect(base_dsn)) as base_conn: + base_conn.autocommit = True + with base_conn.cursor() as curr: + curr.execute( + sql.SQL("CREATE DATABASE {db};").format(db=sql.Identifier(db)), + ) + + return base_dsn + f" dbname={db}" + + return setup diff --git a/tests/test_cases/to_csv_samples/basic.csv b/tests/export_csv_samples/basic.csv similarity index 100% rename from tests/test_cases/to_csv_samples/basic.csv rename to tests/export_csv_samples/basic.csv diff --git a/tests/test_cases/to_csv_samples/datatypes.csv b/tests/export_csv_samples/datatypes.csv similarity index 100% rename from tests/test_cases/to_csv_samples/datatypes.csv rename to tests/export_csv_samples/datatypes.csv diff --git a/tests/test_cases/to_csv_samples/escaped_chars.csv b/tests/export_csv_samples/escaped_chars.csv similarity index 100% rename from tests/test_cases/to_csv_samples/escaped_chars.csv rename to tests/export_csv_samples/escaped_chars.csv diff --git a/tests/test_cases/to_csv_samples/sorting.csv b/tests/export_csv_samples/sorting.csv similarity index 100% rename from tests/test_cases/to_csv_samples/sorting.csv rename to tests/export_csv_samples/sorting.csv diff --git a/tests/mock_response_case.py b/tests/mock_response_case.py new file mode 100644 index 0000000..353be9e --- /dev/null +++ b/tests/mock_response_case.py @@ -0,0 +1,76 @@ +import json +from dataclasses import dataclass +from functools import cached_property +from typing import TYPE_CHECKING +from unittest.mock import MagicMock +from uuid import uuid4 + +if TYPE_CHECKING: + import ldlite + + +@dataclass(frozen=True) +class Call: + prefix: str + returns: "ldlite._jsonx.Json | list[ldlite._jsonx.Json]" + + # duplicate of LDLite.query default params + query: str | dict[str, str] | None = None + json_depth: int = 3 + limit: int | None = None + keep_raw: bool = True + + @property + def returns_list(self) -> list["ldlite._jsonx.Json"]: + if isinstance(self.returns, list): + return self.returns + + return [self.returns] + + +@dataclass(frozen=True) +class MockedResponseTestCase: + calls: Call | list[Call] + + @property + def calls_list(self) -> list[Call]: + if isinstance(self.calls, list): + return self.calls + + return [self.calls] + + @cached_property + def db(self) -> str: + db = "db" + str(uuid4()).split("-")[0] + print(db) # noqa: T201 + return db + + def patch_request_get( + self, + ld: "ldlite.LDLite", + httpx_post_mock: MagicMock, + client_get_mock: MagicMock, + ) -> None: + # leave tqdm out of it + ld.quiet(enable=True) + + httpx_post_mock.return_value.cookies.__getitem__.return_value = "token" + + side_effects = [] + for call in self.calls_list: + key = next(iter(call.returns_list[0].keys())) + total_mock = MagicMock() + total_mock.text = f'{{"{key}": [{{"id": ""}}], "totalRecords": 100000}}' + + value_mocks = [] + for v in call.returns_list: + value_mock = MagicMock() + value_mock.text = json.dumps(v) + value_mocks.append(value_mock) + + end_mock = MagicMock() + end_mock.text = f'{{"{key}": [] }}' + + side_effects.extend([total_mock, *value_mocks, end_mock]) + + client_get_mock.side_effect = side_effects diff --git a/tests/test___init__.py b/tests/test___init__.py index e880ebf..e363ed4 100644 --- a/tests/test___init__.py +++ b/tests/test___init__.py @@ -7,78 +7,6 @@ from pytest_cases import parametrize_with_cases -def test_ok_legacy(folio_params: tuple[bool, FolioParams]) -> None: - from ldlite import LDLite as uut - - ld = uut() - ld.connect_folio(*astuple(folio_params[1])) - ld.connect_db() - ld.query(table="g", path="/groups", query="cql.allRecords=1 sortby id") - ld.select(table="g__t") - - -def test_ok_limit(folio_params: tuple[bool, FolioParams]) -> None: - from ldlite import LDLite as uut - - ld = uut() - db = ld.connect_db() - - ld.connect_folio(*astuple(folio_params[1])) - ld.page_size = 2 - ld.query(table="g", path="/groups", query="cql.allRecords=1 sortby id", limit=5) - - db.execute("SELECT COUNT(DISTINCT COLUMNS(*)) FROM g__t;") - actual = cast("tuple[int]", db.fetchone())[0] - assert actual == 5 - - -def test_ok_trailing_slash(folio_params: tuple[bool, FolioParams]) -> None: - if folio_params[0]: - pytest.skip("Specify an okapi environment with --folio-base-url to run") - - from ldlite import LDLite as uut - - ld = uut() - params = astuple(folio_params[1]) - ld.connect_folio(*[params[0] + "/", *params[1:]]) - ld.connect_db() - ld.query(table="g", path="/groups") - ld.select(table="g__t") - - -def test_ok(folio_params: tuple[bool, FolioParams]) -> None: - from ldlite import LDLite as uut - - ld = uut() - ld.connect_folio(*astuple(folio_params[1])) - ld.connect_db() - ld.query(table="g", path="/groups") - ld.select(table="g__t") - - -def test_no_connect_folio() -> None: - from ldlite import LDLite as uut - - ld = uut() - ld.connect_db() - with pytest.raises(RuntimeError): - ld.query(table="g", path="/groups") - - -def test_no_connect_db() -> None: - from ldlite import LDLite as uut - - ld = uut() - ld.connect_folio( - url="https://folio-etesting-snapshot-kong.ci.folio.org", - tenant="diku", - user="diku_admin", - password="admin", - ) - with pytest.raises(RuntimeError): - ld.query(table="g", path="/groups") - - @dataclass(frozen=True) class FolioConnectionCase: expected: type[Exception] @@ -116,14 +44,82 @@ def case_password(self) -> FolioConnectionCase: ) -@parametrize_with_cases("tc", cases=FolioConnectionCases) -def test_bad_folio_connection( - folio_params: tuple[bool, FolioParams], - tc: FolioConnectionCase, -) -> None: - from ldlite import LDLite as uut +class TestIntegration: + def test_ok_legacy(self, folio_params: tuple[bool, FolioParams]) -> None: + from ldlite import LDLite as uut + + ld = uut() + ld.connect_folio(*astuple(folio_params[1])) + ld.connect_db() + ld.query(table="g", path="/groups", query="cql.allRecords=1 sortby id") + ld.select(table="g__t") + + def test_ok_limit(self, folio_params: tuple[bool, FolioParams]) -> None: + from ldlite import LDLite as uut + + ld = uut() + db = ld.connect_db() + + ld.connect_folio(*astuple(folio_params[1])) + ld.page_size = 2 + ld.query(table="g", path="/groups", query="cql.allRecords=1 sortby id", limit=5) + + db.execute("SELECT COUNT(DISTINCT COLUMNS(*)) FROM g__t;") + actual = cast("tuple[int]", db.fetchone())[0] + assert actual == 5 + + def test_ok_trailing_slash(self, folio_params: tuple[bool, FolioParams]) -> None: + if folio_params[0]: + pytest.skip("Specify an okapi environment with --folio-base-url to run") + + from ldlite import LDLite as uut - ld = uut() - params = astuple(folio_params[1]) - with pytest.raises(tc.expected): - ld.connect_folio(*[*params[: tc.index], tc.value, *params[tc.index + 1 :]]) + ld = uut() + params = astuple(folio_params[1]) + ld.connect_folio(*[params[0] + "/", *params[1:]]) + ld.connect_db() + ld.query(table="g", path="/groups") + ld.select(table="g__t") + + def test_ok(self, folio_params: tuple[bool, FolioParams]) -> None: + from ldlite import LDLite as uut + + ld = uut() + ld.connect_folio(*astuple(folio_params[1])) + ld.connect_db() + ld.query(table="g", path="/groups") + ld.select(table="g__t") + + def test_no_connect_folio(self) -> None: + from ldlite import LDLite as uut + + ld = uut() + ld.connect_db() + with pytest.raises(RuntimeError): + ld.query(table="g", path="/groups") + + def test_no_connect_db(self) -> None: + from ldlite import LDLite as uut + + ld = uut() + ld.connect_folio( + url="https://folio-etesting-snapshot-kong.ci.folio.org", + tenant="diku", + user="diku_admin", + password="admin", + ) + with pytest.raises(RuntimeError): + ld.query(table="g", path="/groups") + + @parametrize_with_cases("tc", cases=FolioConnectionCases) + def test_bad_folio_connection( + self, + folio_params: tuple[bool, FolioParams], + tc: FolioConnectionCase, + ) -> None: + from ldlite import LDLite as uut + + ld = uut() + params = astuple(folio_params[1]) + with pytest.raises(tc.expected): + ld.connect_folio(*[*params[: tc.index], tc.value, *params[tc.index + 1 :]]) diff --git a/tests/test_cases/base.py b/tests/test_cases/base.py deleted file mode 100644 index 2977256..0000000 --- a/tests/test_cases/base.py +++ /dev/null @@ -1,59 +0,0 @@ -import json -from dataclasses import dataclass -from functools import cached_property -from typing import TYPE_CHECKING, Any, cast -from unittest.mock import MagicMock -from uuid import uuid4 - -if TYPE_CHECKING: - import ldlite - - -@dataclass(frozen=True) -class EndToEndTestCase: - values: dict[str, list[dict[str, Any]] | list[list[dict[str, Any]]]] - - @cached_property - def db(self) -> str: - db = "db" + str(uuid4()).split("-")[0] - print(db) # noqa: T201 - return db - - def patch_request_get( - self, - ld: "ldlite.LDLite", - httpx_post_mock: MagicMock, - client_get_mock: MagicMock, - ) -> None: - # iteration hack - ld.page_size = 1 - # leave tqdm out of it - ld.quiet(enable=True) - - httpx_post_mock.return_value.cookies.__getitem__.return_value = "token" - - side_effects = [] - for vsource in self.values.values(): - list_values = ( - [cast("list[dict[str, Any]]", vsource)] - if isinstance(vsource[0], dict) - else cast("list[list[dict[str, Any]]]", vsource) - ) - - key = next(iter(list_values[0][0].keys())) - total_mock = MagicMock() - total_mock.text = f'{{"{key}": [{{"id": ""}}], "totalRecords": 100000}}' - - for values in list_values: - value_mocks = [] - for v in values: - value_mock = MagicMock() - value_mock.text = json.dumps(v) - value_mocks.append(value_mock) - - end_mock = MagicMock() - end_mock.text = f'{{"{key}": [] }}' - - side_effects.extend([total_mock, *value_mocks, end_mock]) - - client_get_mock.side_effect = side_effects diff --git a/tests/test_cases/drop_tables_cases.py b/tests/test_cases/drop_tables_cases.py deleted file mode 100644 index e67c3b0..0000000 --- a/tests/test_cases/drop_tables_cases.py +++ /dev/null @@ -1,62 +0,0 @@ -from dataclasses import dataclass - -from pytest_cases import parametrize - -from .base import EndToEndTestCase - - -@dataclass(frozen=True) -class DropTablesCase(EndToEndTestCase): - drop: str - expected_tables: list[str] - keep_raw: bool - - -class DropTablesCases: - @parametrize(keep_raw=[True, False]) - def case_one_table(self, keep_raw: bool) -> DropTablesCase: - return DropTablesCase( - drop="prefix", - values={"prefix": [{"purchaseOrders": [{"id": "1"}]}]}, - expected_tables=[], - keep_raw=keep_raw, - ) - - @parametrize(keep_raw=[True, False]) - def case_two_tables(self, keep_raw: bool) -> DropTablesCase: - return DropTablesCase( - drop="prefix", - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "1", - "subObjects": [{"id": "2"}, {"id": "3"}], - }, - ], - }, - ], - }, - expected_tables=[], - keep_raw=keep_raw, - ) - - @parametrize(keep_raw=[True, False]) - def case_separate_table(self, keep_raw: bool) -> DropTablesCase: - expected_tables = [ - "notdropped__t", - "notdropped__tcatalog", - ] - if keep_raw: - expected_tables = ["notdropped", *expected_tables] - - return DropTablesCase( - drop="prefix", - values={ - "prefix": [{"purchaseOrders": [{"id": "1"}]}], - "notdropped": [{"purchaseOrders": [{"id": "1"}]}], - }, - expected_tables=expected_tables, - keep_raw=keep_raw, - ) diff --git a/tests/test_cases/load_history_cases.py b/tests/test_cases/load_history_cases.py deleted file mode 100644 index ed1727b..0000000 --- a/tests/test_cases/load_history_cases.py +++ /dev/null @@ -1,98 +0,0 @@ -from dataclasses import dataclass - -from pytest_cases import parametrize - -from .base import EndToEndTestCase - - -@dataclass(frozen=True) -class LoadHistoryCase(EndToEndTestCase): - queries: dict[str, list[str | None | dict[str, str]]] - expected_loads: dict[str, tuple[str | None, int]] - - -class LoadHistoryTestCases: - @parametrize(query=[None, "poline.id=*A"]) - def case_one_load(self, query: str | None) -> LoadHistoryCase: - return LoadHistoryCase( - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - }, - { - "id": "b096504a-9999-4664-9bf5-1b872466fd66", - "value": "value-2", - }, - ], - }, - ], - }, - queries={"prefix": [query]}, - expected_loads={ - "prefix": (query, 2), - }, - ) - - def case_schema_load(self) -> LoadHistoryCase: - return LoadHistoryCase( - values={ - "schema.prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - }, - { - "id": "b096504a-9999-4664-9bf5-1b872466fd66", - "value": "value-2", - }, - ], - }, - ], - }, - queries={"schema.prefix": [None]}, - expected_loads={ - "schema.prefix": (None, 2), - }, - ) - - def case_two_loads(self) -> LoadHistoryCase: - return LoadHistoryCase( - values={ - "prefix": [ - [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - }, - ], - }, - ], - [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - }, - { - "id": "b096504a-9999-4664-9bf5-1b872466fd66", - "value": "value-2", - }, - ], - }, - ], - ], - }, - queries={"prefix": [None, "a query"]}, - expected_loads={ - "prefix": ("a query", 2), - }, - ) diff --git a/tests/test_cases/query_cases.py b/tests/test_cases/query_cases.py deleted file mode 100644 index 9e3725e..0000000 --- a/tests/test_cases/query_cases.py +++ /dev/null @@ -1,605 +0,0 @@ -import json -from dataclasses import dataclass -from typing import Any - -from pytest_cases import parametrize - -from .base import EndToEndTestCase - - -@dataclass(frozen=True) -class QueryCase(EndToEndTestCase): - json_depth: int - expected_tables: list[str] - expected_values: dict[str, tuple[list[str], list[tuple[Any, ...]]]] - expected_indexes: list[tuple[str, str]] | None = None - keep_raw: bool = True - - -class QueryTestCases: - @parametrize(json_depth=range(1, 2)) - def case_one_table(self, json_depth: int) -> QueryCase: - return QueryCase( - json_depth=json_depth, - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - }, - ], - }, - ], - }, - expected_tables=["prefix", "prefix__t", "prefix__tcatalog"], - expected_values={ - "prefix__t": ( - ["id", "value"], - [("b096504a-3d54-4664-9bf5-1b872466fd66", "value")], - ), - "prefix__tcatalog": (["table_name"], [("prefix__t",)]), - }, - expected_indexes=[ - ("prefix", "__id"), - ("prefix__t", "__id"), - ("prefix__t", "id"), - ], - ) - - @parametrize(json_depth=range(2, 3)) - def case_two_tables(self, json_depth: int) -> QueryCase: - return QueryCase( - json_depth=json_depth, - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - "subObjects": [ - { - "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", - "value": "sub-value-1", - }, - { - "id": "f5bda109-a719-4f72-b797-b9c22f45e4e1", - "value": "sub-value-2", - }, - ], - }, - ], - }, - ], - }, - expected_tables=[ - "prefix", - "prefix__t", - "prefix__t__sub_objects", - "prefix__tcatalog", - ], - expected_values={ - "prefix__t": ( - ["id", "value"], - [("b096504a-3d54-4664-9bf5-1b872466fd66", "value")], - ), - "prefix__t__sub_objects": ( - ["id", "sub_objects__id", "sub_objects__value"], - [ - ( - "b096504a-3d54-4664-9bf5-1b872466fd66", - "2b94c631-fca9-4892-a730-03ee529ffe2a", - "sub-value-1", - ), - ( - "b096504a-3d54-4664-9bf5-1b872466fd66", - "f5bda109-a719-4f72-b797-b9c22f45e4e1", - "sub-value-2", - ), - ], - ), - "prefix__tcatalog": ( - ["table_name"], - [("prefix__t",), ("prefix__t__sub_objects",)], - ), - }, - expected_indexes=[ - ("prefix", "__id"), - ("prefix__t", "__id"), - ("prefix__t", "id"), - ("prefix__t__sub_objects", "__id"), - ("prefix__t__sub_objects", "id"), - ("prefix__t__sub_objects", "sub_objects__o"), - ("prefix__t__sub_objects", "sub_objects__id"), - ], - ) - - @parametrize(json_depth=range(1)) - def case_table_no_expansion(self, json_depth: int) -> QueryCase: - return QueryCase( - json_depth=json_depth, - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - "subObjects": [ - { - "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", - "value": "sub-value", - }, - ], - }, - ], - }, - ], - }, - expected_tables=["prefix"], - expected_values={}, - ) - - def case_table_underexpansion(self) -> QueryCase: - return QueryCase( - json_depth=2, - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "subObjects": [ - { - "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", - "value": "sub-value", - "subSubObjects": [ - { - "id": ( - "2b94c631-fca9-4892-a730-03ee529ffe2a" - ), - "value": "sub-sub-value", - }, - ], - }, - ], - }, - ], - }, - ], - }, - expected_tables=[ - "prefix", - "prefix__t", - "prefix__t__sub_objects", - "prefix__tcatalog", - ], - expected_values={ - "prefix__t__sub_objects": ( - [ - "id", - "sub_objects__id", - "sub_objects__value", - ], - [ - ( - "b096504a-3d54-4664-9bf5-1b872466fd66", - "2b94c631-fca9-4892-a730-03ee529ffe2a", - "sub-value", - ), - ], - ), - "prefix__tcatalog": ( - ["table_name"], - [("prefix__t",), ("prefix__t__sub_objects",)], - ), - }, - ) - - @parametrize(json_depth=range(3, 4)) - def case_three_tables(self, json_depth: int) -> QueryCase: - return QueryCase( - json_depth=json_depth, - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - "subObjects": [ - { - "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", - "value": "sub-value", - "subSubObjects": [ - { - "id": ( - "2b94c631-fca9-4892-a730-03ee529ffe2a" - ), - "value": "sub-sub-value", - }, - ], - }, - ], - }, - ], - }, - ], - }, - expected_tables=[ - "prefix", - "prefix__t", - "prefix__t__sub_objects", - "prefix__t__sub_objects__sub_sub_objects", - "prefix__tcatalog", - ], - expected_values={ - "prefix__t__sub_objects__sub_sub_objects": ( - [ - "id", - "sub_objects__id", - "sub_objects__sub_sub_objects__id", - "sub_objects__sub_sub_objects__value", - ], - [ - ( - "b096504a-3d54-4664-9bf5-1b872466fd66", - "2b94c631-fca9-4892-a730-03ee529ffe2a", - "2b94c631-fca9-4892-a730-03ee529ffe2a", - "sub-sub-value", - ), - ], - ), - "prefix__tcatalog": ( - ["table_name"], - [ - ("prefix__t",), - ("prefix__t__sub_objects",), - ("prefix__t__sub_objects__sub_sub_objects",), - ], - ), - }, - expected_indexes=[ - ("prefix", "__id"), - ("prefix__t", "__id"), - ("prefix__t", "id"), - ("prefix__t__sub_objects", "__id"), - ("prefix__t__sub_objects", "id"), - ("prefix__t__sub_objects", "sub_objects__o"), - ("prefix__t__sub_objects", "sub_objects__id"), - ("prefix__t__sub_objects__sub_sub_objects", "__id"), - ("prefix__t__sub_objects__sub_sub_objects", "id"), - ("prefix__t__sub_objects__sub_sub_objects", "sub_objects__o"), - ("prefix__t__sub_objects__sub_sub_objects", "sub_objects__id"), - ( - "prefix__t__sub_objects__sub_sub_objects", - "sub_objects__sub_sub_objects__o", - ), - ( - "prefix__t__sub_objects__sub_sub_objects", - "sub_objects__sub_sub_objects__id", - ), - ], - ) - - def case_nested_object(self) -> QueryCase: - return QueryCase( - json_depth=2, - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - "subObject": { - "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", - "value": "sub-value", - }, - }, - ], - }, - ], - }, - expected_tables=["prefix", "prefix__t", "prefix__tcatalog"], - expected_values={ - "prefix__t": ( - ["id", "value", "sub_object__id", "sub_object__value"], - [ - ( - "b096504a-3d54-4664-9bf5-1b872466fd66", - "value", - "2b94c631-fca9-4892-a730-03ee529ffe2a", - "sub-value", - ), - ], - ), - "prefix__tcatalog": ( - ["table_name"], - [("prefix__t",)], - ), - }, - expected_indexes=[ - ("prefix", "__id"), - ("prefix__t", "__id"), - ("prefix__t", "id"), - ("prefix__t", "sub_object__id"), - ], - ) - - def case_doubly_nested_object(self) -> QueryCase: - return QueryCase( - json_depth=3, - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - "subObject": { - "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", - "value": "sub-value", - "subSubObject": { - "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", - "value": "sub-sub-value", - }, - }, - }, - ], - }, - ], - }, - expected_tables=["prefix", "prefix__t", "prefix__tcatalog"], - expected_values={ - "prefix__t": ( - [ - "id", - "value", - "sub_object__id", - "sub_object__sub_sub_object__id", - "sub_object__sub_sub_object__value", - ], - [ - ( - "b096504a-3d54-4664-9bf5-1b872466fd66", - "value", - "2b94c631-fca9-4892-a730-03ee529ffe2a", - "2b94c631-fca9-4892-a730-03ee529ffe2a", - "sub-sub-value", - ), - ], - ), - "prefix__tcatalog": ( - ["table_name"], - [("prefix__t",)], - ), - }, - expected_indexes=[ - ("prefix", "__id"), - ("prefix__t", "__id"), - ("prefix__t", "id"), - ("prefix__t", "sub_object__id"), - ("prefix__t", "sub_object__sub_sub_object__id"), - ], - ) - - def case_nested_object_underexpansion(self) -> QueryCase: - return QueryCase( - json_depth=1, - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - "subObject": { - "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", - "value": "sub-value", - }, - }, - ], - }, - ], - }, - expected_tables=["prefix", "prefix__t", "prefix__tcatalog"], - expected_values={ - "prefix__t": ( - ["id", "value", "sub_object"], - [ - ( - "b096504a-3d54-4664-9bf5-1b872466fd66", - "value", - json.dumps( - { - "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", - "value": "sub-value", - }, - indent=4, - ), - ), - ], - ), - "prefix__tcatalog": ( - ["table_name"], - [("prefix__t",)], - ), - }, - ) - - def case_id_generation(self) -> QueryCase: - return QueryCase( - json_depth=4, - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "subObjects": [ - { - "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", - "subSubObjects": [ - { - "id": ( - "2b94c631-fca9-4892-a730-03ee529ffe2a" - ), - }, - { - "id": ( - "8516a913-8bf7-55a4-ab71-417aba9171c9" - ), - }, - ], - }, - { - "id": "b5d8cdc4-9441-487c-90cf-0c7ec97728eb", - "subSubObjects": [ - { - "id": ( - "13a24cc8-a15c-4158-abbd-4abf25c8815a" - ), - }, - { - "id": ( - "37344879-09ce-4cd8-976f-bf1a57c0cfa6" - ), - }, - ], - }, - ], - }, - ], - }, - ], - }, - expected_tables=[ - "prefix", - "prefix__t", - "prefix__t__sub_objects", - "prefix__t__sub_objects__sub_sub_objects", - "prefix__tcatalog", - ], - expected_values={ - "prefix__t__sub_objects": ( - ["__id", "id", "sub_objects__o", "sub_objects__id"], - [ - ( - "1", - "b096504a-3d54-4664-9bf5-1b872466fd66", - "1", - "2b94c631-fca9-4892-a730-03ee529ffe2a", - ), - ( - "2", - "b096504a-3d54-4664-9bf5-1b872466fd66", - "2", - "b5d8cdc4-9441-487c-90cf-0c7ec97728eb", - ), - ], - ), - "prefix__t__sub_objects__sub_sub_objects": ( - ["__id", "sub_objects__o", "sub_objects__sub_sub_objects__o"], - [ - ("1", "1", "1"), - ("2", "1", "2"), - ("3", "2", "1"), - ("4", "2", "2"), - ], - ), - }, - ) - - def case_indexing_id_like(self) -> QueryCase: - return QueryCase( - json_depth=4, - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "otherId": "b096504a-3d54-4664-9bf5-1b872466fd66", - "anIdButWithADifferentEnding": ( - "b096504a-3d54-4664-9bf5-1b872466fd66" - ), - }, - ], - }, - ], - }, - expected_tables=[ - "prefix", - "prefix__t", - "prefix__tcatalog", - ], - expected_values={}, - expected_indexes=[ - ("prefix", "__id"), - ("prefix__t", "__id"), - ("prefix__t", "id"), - ("prefix__t", "other_id"), - ("prefix__t", "an_id_but_with_a_different_ending"), - ], - ) - - @parametrize(json_depth=range(1, 2)) - def case_drop_raw(self, json_depth: int) -> QueryCase: - return QueryCase( - json_depth=json_depth, - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - }, - ], - }, - ], - }, - expected_tables=["prefix__t", "prefix__tcatalog"], - expected_values={ - "prefix__t": ( - ["id", "value"], - [("b096504a-3d54-4664-9bf5-1b872466fd66", "value")], - ), - "prefix__tcatalog": (["table_name"], [("prefix__t",)]), - }, - expected_indexes=[ - ("prefix__t", "__id"), - ("prefix__t", "id"), - ], - keep_raw=False, - ) - - # this case should be testing the FolioClient class - # but it isn't setup to mock the data properly right now - def case_null_records(self) -> QueryCase: - return QueryCase( - json_depth=1, - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "b096504a-3d54-4664-9bf5-1b872466fd66", - "value": "value", - }, - None, - ], - }, - ], - }, - expected_tables=["prefix", "prefix__t", "prefix__tcatalog"], - expected_values={}, - expected_indexes=[ - ("prefix", "__id"), - ("prefix__t", "__id"), - ("prefix__t", "id"), - ], - ) diff --git a/tests/test_cases/to_csv_cases.py b/tests/test_cases/to_csv_cases.py deleted file mode 100644 index baf2d9c..0000000 --- a/tests/test_cases/to_csv_cases.py +++ /dev/null @@ -1,100 +0,0 @@ -from dataclasses import dataclass -from pathlib import Path - -from .base import EndToEndTestCase - -_SAMPLE_PATH = Path() / "tests" / "test_cases" / "to_csv_samples" - - -@dataclass(frozen=True) -class ToCsvCase(EndToEndTestCase): - expected_csvs: list[tuple[str, Path]] - - -class ToCsvCases: - def case_basic(self) -> ToCsvCase: - return ToCsvCase( - values={"prefix": [{"purchaseOrders": [{"id": "id", "val": "value"}]}]}, - expected_csvs=[("prefix__t", _SAMPLE_PATH / "basic.csv")], - ) - - def case_datatypes(self) -> ToCsvCase: - return ToCsvCase( - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "id", - "string": "string", - "integer": 1, - "numeric": 1.1, - "boolean": True, - "uuid": "6a31a12a-9570-405c-af20-6abf2992859c", - }, - ], - }, - ], - }, - expected_csvs=[("prefix__t", _SAMPLE_PATH / "datatypes.csv")], - ) - - def case_escaped_chars(self) -> ToCsvCase: - return ToCsvCase( - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "id", - "comma": "Double, double toil and trouble", - "doubleQuote": 'Cry "Havoc!" a horse', - "newLine": """To be -or not -to be""", - "singleQuote": "Cry 'Havoc!' a horse", - }, - { - "id": "id", - "comma": "Z", - "doubleQuote": "Z", - "newLine": "Z", - "singleQuote": "Z", - }, - ], - }, - ], - }, - expected_csvs=[("prefix__t", _SAMPLE_PATH / "escaped_chars.csv")], - ) - - def case_sorting(self) -> ToCsvCase: - return ToCsvCase( - values={ - "prefix": [ - { - "purchaseOrders": [ - { - "id": "id", - "C": "YY", - "B": "XX", - "A": "ZZ", - }, - { - "id": "id", - "C": "Y", - "B": "XX", - "A": "ZZ", - }, - { - "id": "id", - "C": "Y", - "B": "X", - "A": "Z", - }, - ], - }, - ], - }, - expected_csvs=[("prefix__t", _SAMPLE_PATH / "sorting.csv")], - ) diff --git a/tests/test_drop_tables.py b/tests/test_drop_tables.py new file mode 100644 index 0000000..a0d1d42 --- /dev/null +++ b/tests/test_drop_tables.py @@ -0,0 +1,172 @@ +from collections.abc import Callable +from contextlib import closing +from dataclasses import dataclass +from typing import TYPE_CHECKING, cast +from unittest import mock +from unittest.mock import MagicMock + +import duckdb +import psycopg +import pytest +from pytest_cases import parametrize, parametrize_with_cases + +from .mock_response_case import Call, MockedResponseTestCase + +if TYPE_CHECKING: + from _typeshed import dbapi + + import ldlite + + +@dataclass(frozen=True) +class DropTablesTC(MockedResponseTestCase): + drop: str + expected_tables: list[str] + + +@parametrize(keep_raw=[True, False]) +def case_one_table(keep_raw: bool) -> DropTablesTC: + return DropTablesTC( + Call( + "prefix", + returns={"purchaseOrders": [{"id": "1"}]}, + keep_raw=keep_raw, + ), + drop="prefix", + expected_tables=[], + ) + + +@parametrize(keep_raw=[True, False]) +def case_two_tables(keep_raw: bool) -> DropTablesTC: + return DropTablesTC( + Call( + "prefix", + returns={ + "purchaseOrders": [ + { + "id": "1", + "subObjects": [{"id": "2"}, {"id": "3"}], + }, + ], + }, + keep_raw=keep_raw, + ), + drop="prefix", + expected_tables=[], + ) + + +@parametrize(keep_raw=[True, False]) +def case_separate_table(keep_raw: bool) -> DropTablesTC: + expected_tables = [ + "notdropped__t", + "notdropped__tcatalog", + ] + if keep_raw: + expected_tables = ["notdropped", *expected_tables] + + return DropTablesTC( + [ + Call( + "prefix", + returns={"purchaseOrders": [{"id": "1"}]}, + keep_raw=keep_raw, + ), + Call( + "notdropped", + returns={"purchaseOrders": [{"id": "1"}]}, + keep_raw=keep_raw, + ), + ], + drop="prefix", + expected_tables=expected_tables, + ) + + +def _arrange( + client_get_mock: MagicMock, + httpx_post_mock: MagicMock, + tc: DropTablesTC, +) -> "ldlite.LDLite": + from ldlite import LDLite + + uut = LDLite() + tc.patch_request_get(uut, httpx_post_mock, client_get_mock) + uut.connect_folio("https://doesnt.matter", "", "", "") + return uut + + +def _act(uut: "ldlite.LDLite", tc: DropTablesTC) -> None: + uut.drop_tables(tc.drop) + for call in tc.calls_list: + uut.query(table=call.prefix, path="/patched", keep_raw=call.keep_raw) + uut.drop_tables(tc.drop) + + +def _assert( + conn: "dbapi.DBAPIConnection", + res_schema: str, # TODO: have schema be part of tc + tc: DropTablesTC, +) -> None: + with closing(conn.cursor()) as cur: + cur.execute( + """ + SELECT table_name + FROM information_schema.tables + WHERE table_schema=$1 + """, + (res_schema,), + ) + assert sorted([r[0] for r in cur.fetchall()]) == sorted(tc.expected_tables) + + cur.execute('SELECT COUNT(*) FROM "ldlite_system"."load_history"') + assert (ud := cur.fetchone()) is not None + assert ud[0] == len(tc.calls_list) - 1 + cur.execute( + 'SELECT COUNT(*) FROM "ldlite_system"."load_history" ' + 'WHERE "table_name" = $1', + (tc.drop,), + ) + assert (d := cur.fetchone()) is not None + assert d[0] == 0 + + +@mock.patch("httpx_folio.auth.httpx.post") +@mock.patch("httpx_folio.factories.httpx.Client.get") +@parametrize_with_cases("tc", cases=".") +def test_duckdb( + client_get_mock: MagicMock, + httpx_post_mock: MagicMock, + tc: DropTablesTC, +) -> None: + uut = _arrange(client_get_mock, httpx_post_mock, tc) + dsn = f":memory:{tc.db}" + uut.connect_db(dsn) + + _act(uut, tc) + + with duckdb.connect(dsn) as conn: + _assert(cast("dbapi.DBAPIConnection", conn), "main", tc) + + +@mock.patch("httpx_folio.auth.httpx.post") +@mock.patch("httpx_folio.factories.httpx.Client.get") +@parametrize_with_cases("tc", cases=".") +def test_postgres( + client_get_mock: MagicMock, + httpx_post_mock: MagicMock, + pg_dsn: None | Callable[[str], str], + tc: DropTablesTC, +) -> None: + if pg_dsn is None: + pytest.skip("Specify the pg host using --pg-host to run") + + uut = _arrange(client_get_mock, httpx_post_mock, tc) + dsn = pg_dsn(tc.db) + uut.connect_db_postgresql(dsn) + + _act(uut, tc) + + with psycopg.connect(dsn, cursor_factory=psycopg.RawCursor) as conn: + _assert(cast("dbapi.DBAPIConnection", conn), "public", tc) diff --git a/tests/test_duckdb.py b/tests/test_duckdb.py deleted file mode 100644 index 1c3f8ea..0000000 --- a/tests/test_duckdb.py +++ /dev/null @@ -1,178 +0,0 @@ -from difflib import unified_diff -from pathlib import Path -from typing import Any, cast -from unittest import mock -from unittest.mock import MagicMock - -import duckdb -import pytest -from pytest_cases import parametrize_with_cases - -from tests.test_cases import drop_tables_cases as dtc -from tests.test_cases import load_history_cases as lhc -from tests.test_cases import query_cases as qc -from tests.test_cases import to_csv_cases as csvc - - -@mock.patch("httpx_folio.auth.httpx.post") -@mock.patch("httpx_folio.factories.httpx.Client.get") -@parametrize_with_cases("tc", cases=dtc.DropTablesCases) -def test_drop_tables( - client_get_mock: MagicMock, - httpx_post_mock: MagicMock, - tc: dtc.DropTablesCase, -) -> None: - from ldlite import LDLite as uut - - ld = uut() - tc.patch_request_get(ld, httpx_post_mock, client_get_mock) - dsn = f":memory:{tc.db}" - ld.connect_folio("https://doesnt.matter", "", "", "") - ld.connect_db(dsn) - ld.drop_tables(tc.drop) - - for prefix in tc.values: - ld.query(table=prefix, path="/patched", keep_raw=tc.keep_raw) - ld.drop_tables(tc.drop) - - with duckdb.connect(dsn) as res: - res.execute( - """ - SELECT table_name - FROM information_schema.tables - WHERE table_schema='main' - """, - ) - assert sorted([r[0] for r in res.fetchall()]) == sorted(tc.expected_tables) - - res.execute('SELECT COUNT(*) FROM "ldlite_system"."load_history"') - assert (ud := res.fetchone()) is not None - assert ud[0] == len(tc.values) - 1 - res.execute( - 'SELECT COUNT(*) FROM "ldlite_system"."load_history" ' - 'WHERE "table_name" = $1', - (tc.drop,), - ) - assert (d := res.fetchone()) is not None - assert d[0] == 0 - - -@mock.patch("httpx_folio.auth.httpx.post") -@mock.patch("httpx_folio.factories.httpx.Client.get") -@parametrize_with_cases("tc", cases=qc.QueryTestCases) -def test_query( - client_get_mock: MagicMock, - httpx_post_mock: MagicMock, - tc: qc.QueryCase, -) -> None: - from ldlite import LDLite as uut - - ld = uut() - tc.patch_request_get(ld, httpx_post_mock, client_get_mock) - dsn = f":memory:{tc.db}" - ld.connect_folio("https://doesnt.matter", "", "", "") - ld.connect_db(dsn) - - for prefix in tc.values: - ld.query( - table=prefix, - path="/patched", - json_depth=tc.json_depth, - keep_raw=tc.keep_raw, - ) - - with duckdb.connect(dsn) as res: - res.execute( - """ - SELECT table_name - FROM information_schema.tables - WHERE table_schema='main' - """, - ) - assert sorted([r[0] for r in res.fetchall()]) == sorted(tc.expected_tables) - - for table, (cols, values) in tc.expected_values.items(): - with duckdb.connect(dsn) as res: - res.execute(f"SELECT {'::text,'.join(cols)}::text FROM {table};") - for v in values: - assert res.fetchone() == v - - assert res.fetchone() is None - - -@mock.patch("httpx_folio.auth.httpx.post") -@mock.patch("httpx_folio.factories.httpx.Client.get") -@parametrize_with_cases("tc", cases=csvc.ToCsvCases) -def test_to_csv( - client_get_mock: MagicMock, - httpx_post_mock: MagicMock, - tc: csvc.ToCsvCase, - tmpdir: str, -) -> None: - from ldlite import LDLite as uut - - ld = uut() - tc.patch_request_get(ld, httpx_post_mock, client_get_mock) - dsn = f":memory:{tc.db}" - ld.connect_folio("https://doesnt.matter", "", "", "") - ld.connect_db(dsn) - - for prefix in tc.values: - ld.query(table=prefix, path="/patched") - - for table, expected in tc.expected_csvs: - actual = (Path(tmpdir) / table).with_suffix(".csv") - - ld.export_csv(str(actual), table) - - with expected.open("r") as f: - expected_lines = f.readlines() - with actual.open("r") as f: - actual_lines = f.readlines() - - diff = list(unified_diff(expected_lines, actual_lines)) - if len(diff) > 0: - pytest.fail("".join(diff)) - - -@mock.patch("httpx_folio.auth.httpx.post") -@mock.patch("httpx_folio.factories.httpx.Client.get") -@parametrize_with_cases("tc", cases=lhc.LoadHistoryTestCases) -def test_history( - client_get_mock: MagicMock, - httpx_post_mock: MagicMock, - tc: lhc.LoadHistoryCase, -) -> None: - from ldlite import LDLite as uut - - ld = uut() - tc.patch_request_get(ld, httpx_post_mock, client_get_mock) - dsn = f":memory:{tc.db}" - ld.connect_folio("https://doesnt.matter", "", "", "") - ld.connect_db(dsn) - - for prefix, calls in cast( - "dict[str, list[list[dict[str, Any]]]]", - tc.values, - ).items(): - for i in range(len(calls)): - ld.query( - table=prefix, - path="/patched", - query=tc.queries[prefix][i], - ) - - with duckdb.connect(dsn) as res: - res.execute('SELECT COUNT(*) FROM "ldlite_system"."load_history"') - assert (ud := res.fetchone()) is not None - assert ud[0] == len(tc.expected_loads) - - for tn, (q, t) in tc.expected_loads.items(): - res.execute( - 'SELECT * FROM "ldlite_system"."load_history" WHERE "table_name" = $1', - (tn,), - ) - assert (d := res.fetchone()) is not None - assert d[1] == q - assert d[7] == t - assert d[6] > d[5] > d[4] > d[3] > d[2] diff --git a/tests/test_endtoend.py b/tests/test_endtoend.py index d8aada2..6768101 100644 --- a/tests/test_endtoend.py +++ b/tests/test_endtoend.py @@ -1,82 +1,166 @@ -from dataclasses import astuple -from typing import cast +from collections.abc import Callable +from contextlib import closing +from dataclasses import astuple, dataclass +from typing import TYPE_CHECKING, cast +from uuid import uuid4 import pytest from httpx_folio.factories import FolioParams, default_client_factory from httpx_folio.query import QueryParams, QueryType -from pytest_cases import parametrize - - -@parametrize( - tc=[ - # no id column - (True, "/finance/ledger-rollovers-logs", None), - # finicky about sorting - (True, "/notes", "title=Key Permissions"), - # id descending - (False, "/invoice/invoices", "vendorId==e0* sortBy id desc"), - # non id sort - (False, "/groups", "cql.allRecords=1 sortBy group desc"), - ], -) -def test_endtoend( - folio_params: tuple[bool, FolioParams], - tc: tuple[bool, str, QueryType], -) -> None: - (non_snapshot_data, endpoint, query) = tc - if non_snapshot_data and folio_params[0]: - pytest.skip( - "Specify an environment having data with --folio-base-url to run", - ) - - from ldlite import LDLite as uut - - ld = uut() - db = ld.connect_db() - - ld.page_size = 3 - ld.connect_folio(*astuple(folio_params[1])) - ld.query(table="test", path=endpoint, query=query) # type:ignore[arg-type] - - with default_client_factory(folio_params[1])() as client: - res = client.get( - endpoint, - params=QueryParams(query).stats(), - ) - res.raise_for_status() - - expected = res.json()["totalRecords"] - assert expected > 3 - - db.execute("SELECT COUNT(DISTINCT COLUMNS(*)) FROM test__t;") - actual = cast("tuple[int]", db.fetchone())[0] - - assert actual == expected - - -@parametrize( - srs=[ - "/source-storage/records", - "/source-storage/stream/records", - "/source-storage/source-records", - "/source-storage/stream/source-records", - ], -) -def test_endtoend_srs(folio_params: tuple[bool, FolioParams], srs: str) -> None: - from ldlite import LDLite as uut - - ld = uut() - db = ld.connect_db() - - ld.connect_folio(*astuple(folio_params[1])) - ld.query(table="test", path=srs, limit=10) - - db.execute("SELECT COUNT(DISTINCT COLUMNS(*)) FROM test__t;") - actual = cast("tuple[int]", db.fetchone())[0] - - # snapshot a variable number of records - assert actual >= 1 - if folio_params[0]: - assert actual <= 10 - else: - assert actual == 10 +from pytest_cases import parametrize, parametrize_with_cases + +if TYPE_CHECKING: + import ldlite + + +@dataclass(frozen=True) +class NonSrsCase: + snapshot_ok: bool + path: str + query: str | dict[str, str] | None + + +class NonSrsCases: + def case_no_id_col(self) -> NonSrsCase: + return NonSrsCase(False, "/finance/ledger-rollovers-logs", None) + + def case_finicky_sorting(self) -> NonSrsCase: + return NonSrsCase(False, "/notes", "title=Key Permissions") + + def case_id_descending(self) -> NonSrsCase: + return NonSrsCase(True, "/invoice/invoices", "vendorId==e0* sortBy id desc") + + def case_non_id_sort(self) -> NonSrsCase: + return NonSrsCase(True, "/groups", "cql.allRecords=1 sortBy group desc") + + +SrsCases = [ + "/source-storage/records", + "/source-storage/stream/records", + "/source-storage/source-records", + "/source-storage/stream/source-records", +] + + +class TestIntegration: + def _arrange( + self, + folio_params: tuple[bool, FolioParams], + snapshot_ok: bool = True, + ) -> "ldlite.LDLite": + if not snapshot_ok and folio_params[0]: + pytest.skip( + "Specify an environment having data with --folio-base-url to run", + ) + + from ldlite import LDLite + + uut = LDLite() + uut.page_size = 3 + uut.connect_folio(*astuple(folio_params[1])) + return uut + + def _nonsrs_assert( + self, + uut: "ldlite.LDLite", + folio_params: tuple[bool, FolioParams], + tc: NonSrsCase, + ) -> None: + with default_client_factory(folio_params[1])() as client: + res = client.get( + tc.path, + params=QueryParams(cast("QueryType", tc.query)).stats(), + ) + res.raise_for_status() + + expected = res.json()["totalRecords"] + assert expected > 3 + + if uut.db is None: + pytest.fail("No active database connection.") + + with closing(uut.db.cursor()) as cur: + cur.execute("SELECT COUNT(*) FROM (SELECT DISTINCT * FROM test__t) t;") + actual = cast("tuple[int]", cur.fetchone())[0] + + assert actual == expected + + @parametrize_with_cases("tc", cases=NonSrsCases) + def test_nonsrs_duckdb( + self, + folio_params: tuple[bool, FolioParams], + tc: NonSrsCase, + ) -> None: + uut = self._arrange(folio_params, tc.snapshot_ok) + uut.connect_db() + + uut.query(table="test", path=tc.path, query=tc.query) + self._nonsrs_assert(uut, folio_params, tc) + + @parametrize_with_cases("tc", cases=NonSrsCases) + def test_nonsrs_postgres( + self, + folio_params: tuple[bool, FolioParams], + pg_dsn: None | Callable[[str], str], + tc: NonSrsCase, + ) -> None: + if pg_dsn is None: + pytest.skip("Specify the pg host using --pg-host to run") + + uut = self._arrange(folio_params, tc.snapshot_ok) + db = "db" + str(uuid4()).split("-")[0] + print(db) # noqa: T201 + dsn = pg_dsn(db) + uut.connect_db_postgresql(dsn) + + uut.query(table="test", path=tc.path, query=tc.query) + + self._nonsrs_assert(uut, folio_params, tc) + + def _srs_assert(self, uut: "ldlite.LDLite", is_snapshot: bool) -> None: + if uut.db is None: + pytest.fail("No active database connection.") + + with closing(uut.db.cursor()) as cur: + cur.execute("SELECT COUNT(*) FROM (SELECT DISTINCT * FROM test__t) t;") + actual = cast("tuple[int]", cur.fetchone())[0] + + # snapshot has a variable number of records + assert actual >= 1 + if is_snapshot: + assert actual <= 10 + else: + assert actual == 10 + + @parametrize(path=SrsCases) + def test_srs_duckdb( + self, + folio_params: tuple[bool, FolioParams], + path: str, + ) -> None: + uut = self._arrange(folio_params) + uut.connect_db() + + uut.query(table="test", path=path, limit=10) + + self._srs_assert(uut, folio_params[0]) + + @parametrize(path=SrsCases) + def test_srs_postgres( + self, + folio_params: tuple[bool, FolioParams], + pg_dsn: None | Callable[[str], str], + path: str, + ) -> None: + if pg_dsn is None: + pytest.skip("Specify the pg host using --pg-host to run") + + uut = self._arrange(folio_params) + db = "db" + str(uuid4()).split("-")[0] + print(db) # noqa: T201 + dsn = pg_dsn(db) + uut.connect_db_postgresql(dsn) + + uut.query(table="test", path=path, limit=10) + + self._srs_assert(uut, folio_params[0]) diff --git a/tests/test_export_csv.py b/tests/test_export_csv.py new file mode 100644 index 0000000..55aaae2 --- /dev/null +++ b/tests/test_export_csv.py @@ -0,0 +1,171 @@ +from collections.abc import Callable +from dataclasses import dataclass +from difflib import unified_diff +from pathlib import Path +from typing import TYPE_CHECKING +from unittest import mock +from unittest.mock import MagicMock + +import pytest +from pytest_cases import parametrize_with_cases + +from .mock_response_case import Call, MockedResponseTestCase + +if TYPE_CHECKING: + import ldlite + +_SAMPLE_PATH = Path() / "tests" / "export_csv_samples" + + +@dataclass(frozen=True) +class ExportCsvTC(MockedResponseTestCase): + expected_csvs: list[tuple[str, Path]] + + +def case_basic() -> ExportCsvTC: + return ExportCsvTC( + Call("prefix", returns={"purchaseOrders": [{"id": "id", "val": "value"}]}), + expected_csvs=[("prefix__t", _SAMPLE_PATH / "basic.csv")], + ) + + +def case_datatypes() -> ExportCsvTC: + return ExportCsvTC( + Call( + "prefix", + returns={ + "purchaseOrders": [ + { + "id": "id", + "string": "string", + "integer": 1, + "numeric": 1.1, + "boolean": True, + "uuid": "6a31a12a-9570-405c-af20-6abf2992859c", + }, + ], + }, + ), + expected_csvs=[("prefix__t", _SAMPLE_PATH / "datatypes.csv")], + ) + + +def case_escaped_chars() -> ExportCsvTC: + return ExportCsvTC( + Call( + "prefix", + returns={ + "purchaseOrders": [ + { + "id": "id", + "comma": "Double, double toil and trouble", + "doubleQuote": 'Cry "Havoc!" a horse', + "newLine": """To be +or not +to be""", + "singleQuote": "Cry 'Havoc!' a horse", + }, + { + "id": "id", + "comma": "Z", + "doubleQuote": "Z", + "newLine": "Z", + "singleQuote": "Z", + }, + ], + }, + ), + expected_csvs=[("prefix__t", _SAMPLE_PATH / "escaped_chars.csv")], + ) + + +def case_sorting() -> ExportCsvTC: + return ExportCsvTC( + Call( + "prefix", + returns={ + "purchaseOrders": [ + {"id": "id", "C": "YY", "B": "XX", "A": "ZZ"}, + {"id": "id", "C": "Y", "B": "XX", "A": "ZZ"}, + {"id": "id", "C": "Y", "B": "X", "A": "Z"}, + ], + }, + ), + expected_csvs=[("prefix__t", _SAMPLE_PATH / "sorting.csv")], + ) + + +def _arrange( + client_get_mock: MagicMock, + httpx_post_mock: MagicMock, + tc: ExportCsvTC, +) -> "ldlite.LDLite": + from ldlite import LDLite + + uut = LDLite() + tc.patch_request_get(uut, httpx_post_mock, client_get_mock) + uut.connect_folio("https://doesnt.matter", "", "", "") + return uut + + +def _act(uut: "ldlite.LDLite", tc: ExportCsvTC) -> None: + for call in tc.calls_list: + uut.query(table=call.prefix, path="/patched") + + +def _assert( + uut: "ldlite.LDLite", + tc: ExportCsvTC, + tmpdir: str, +) -> None: + for table, expected in tc.expected_csvs: + actual = (Path(tmpdir) / table).with_suffix(".csv") + + uut.export_csv(str(actual), table) + + with expected.open("r") as f: + expected_lines = f.readlines() + with actual.open("r") as f: + actual_lines = f.readlines() + + diff = list(unified_diff(expected_lines, actual_lines)) + if len(diff) > 0: + pytest.fail("".join(diff)) + + +@mock.patch("httpx_folio.auth.httpx.post") +@mock.patch("httpx_folio.factories.httpx.Client.get") +@parametrize_with_cases("tc", cases=".") +def test_duckdb( + client_get_mock: MagicMock, + httpx_post_mock: MagicMock, + tc: ExportCsvTC, + tmpdir: str, +) -> None: + uut = _arrange(client_get_mock, httpx_post_mock, tc) + dsn = f":memory:{tc.db}" + uut.connect_db(dsn) + + _act(uut, tc) + _assert(uut, tc, tmpdir) + + +@mock.patch("httpx_folio.auth.httpx.post") +@mock.patch("httpx_folio.factories.httpx.Client.get") +@parametrize_with_cases("tc", cases=".") +def test_postgres( + client_get_mock: MagicMock, + httpx_post_mock: MagicMock, + pg_dsn: None | Callable[[str], str], + tc: ExportCsvTC, + tmpdir: str, +) -> None: + if pg_dsn is None: + pytest.skip("Specify the pg host using --pg-host to run") + + uut = _arrange(client_get_mock, httpx_post_mock, tc) + dsn = pg_dsn(tc.db) + uut.connect_db_postgresql(dsn) + + _act(uut, tc) + _assert(uut, tc, tmpdir) diff --git a/tests/test_load_history.py b/tests/test_load_history.py new file mode 100644 index 0000000..1db5f34 --- /dev/null +++ b/tests/test_load_history.py @@ -0,0 +1,179 @@ +from collections.abc import Callable +from contextlib import closing +from dataclasses import dataclass +from typing import TYPE_CHECKING, cast +from unittest import mock +from unittest.mock import MagicMock + +import duckdb +import psycopg +import pytest +from pytest_cases import parametrize, parametrize_with_cases + +from .mock_response_case import Call, MockedResponseTestCase + +if TYPE_CHECKING: + from _typeshed import dbapi + + import ldlite + + +@dataclass(frozen=True) +class LoadHistoryTC(MockedResponseTestCase): + expected_loads: dict[str, tuple[str | None, int]] + + +@parametrize(query=[None, "poline.id=*A"]) +def case_one_load(query: str | None) -> LoadHistoryTC: + return LoadHistoryTC( + Call( + "prefix", + query=query, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + }, + { + "id": "b096504a-9999-4664-9bf5-1b872466fd66", + "value": "value-2", + }, + ], + }, + ), + expected_loads={"prefix": (query, 2)}, + ) + + +def case_schema_load() -> LoadHistoryTC: + return LoadHistoryTC( + Call( + "schema.prefix", + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + }, + { + "id": "b096504a-9999-4664-9bf5-1b872466fd66", + "value": "value-2", + }, + ], + }, + ), + expected_loads={"schema.prefix": (None, 2)}, + ) + + +def case_two_loads() -> LoadHistoryTC: + return LoadHistoryTC( + [ + Call( + "prefix", + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + }, + ], + }, + ), + Call( + "prefix", + query="a query", + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + }, + { + "id": "b096504a-9999-4664-9bf5-1b872466fd66", + "value": "value-2", + }, + ], + }, + ), + ], + expected_loads={"prefix": ("a query", 2)}, + ) + + +def _arrange( + client_get_mock: MagicMock, + httpx_post_mock: MagicMock, + tc: LoadHistoryTC, +) -> "ldlite.LDLite": + from ldlite import LDLite + + uut = LDLite() + tc.patch_request_get(uut, httpx_post_mock, client_get_mock) + uut.connect_folio("https://doesnt.matter", "", "", "") + return uut + + +def _act(uut: "ldlite.LDLite", tc: LoadHistoryTC) -> None: + for call in tc.calls_list: + uut.query(table=call.prefix, path="/patched", query=call.query) + + +def _assert( + conn: "dbapi.DBAPIConnection", + tc: LoadHistoryTC, +) -> None: + with closing(conn.cursor()) as cur: + cur.execute('SELECT COUNT(*) FROM "ldlite_system"."load_history"') + assert (ud := cur.fetchone()) is not None + assert ud[0] == len(tc.expected_loads) + + for tn, (q, t) in tc.expected_loads.items(): + cur.execute( + 'SELECT * FROM "ldlite_system"."load_history" WHERE "table_name" = $1', + (tn,), + ) + assert (d := cur.fetchone()) is not None + assert d[1] == q + assert d[7] == t + assert d[6] > d[5] > d[4] > d[3] > d[2] + + +@mock.patch("httpx_folio.auth.httpx.post") +@mock.patch("httpx_folio.factories.httpx.Client.get") +@parametrize_with_cases("tc", cases=".") +def test_duckdb( + client_get_mock: MagicMock, + httpx_post_mock: MagicMock, + tc: LoadHistoryTC, +) -> None: + uut = _arrange(client_get_mock, httpx_post_mock, tc) + dsn = f":memory:{tc.db}" + uut.connect_db(dsn) + + _act(uut, tc) + with duckdb.connect(dsn) as conn: + _assert(cast("dbapi.DBAPIConnection", conn), tc) + + +@mock.patch("httpx_folio.auth.httpx.post") +@mock.patch("httpx_folio.factories.httpx.Client.get") +@parametrize_with_cases("tc", cases=".") +def test_postgres( + client_get_mock: MagicMock, + httpx_post_mock: MagicMock, + pg_dsn: None | Callable[[str], str], + tc: LoadHistoryTC, +) -> None: + if pg_dsn is None: + pytest.skip("Specify the pg host using --pg-host to run") + + uut = _arrange(client_get_mock, httpx_post_mock, tc) + dsn = pg_dsn(tc.db) + uut.connect_db_postgresql(dsn) + + _act(uut, tc) + + with psycopg.connect(dsn, cursor_factory=psycopg.RawCursor) as conn: + _assert(cast("dbapi.DBAPIConnection", conn), tc) diff --git a/tests/test_postgres.py b/tests/test_postgres.py deleted file mode 100644 index 1489869..0000000 --- a/tests/test_postgres.py +++ /dev/null @@ -1,240 +0,0 @@ -import contextlib -from collections.abc import Callable -from difflib import unified_diff -from pathlib import Path -from typing import Any, cast -from unittest import mock -from unittest.mock import MagicMock - -import psycopg -import pytest -from psycopg import sql -from pytest_cases import parametrize_with_cases - -from tests.test_cases import drop_tables_cases as dtc -from tests.test_cases import load_history_cases as lhc -from tests.test_cases import query_cases as qc -from tests.test_cases import to_csv_cases as csvc - - -@pytest.fixture(scope="session") -def pg_dsn(pytestconfig: pytest.Config) -> None | Callable[[str], str]: - host = pytestconfig.getoption("pg_host") - if host is None: - return None - - def setup(db: str) -> str: - base_dsn = f"host={host} user=ldlite password=ldlite" - with contextlib.closing(psycopg.connect(base_dsn)) as base_conn: - base_conn.autocommit = True - with base_conn.cursor() as curr: - curr.execute( - sql.SQL("CREATE DATABASE {db};").format(db=sql.Identifier(db)), - ) - - return base_dsn + f" dbname={db}" - - return setup - - -@mock.patch("httpx_folio.auth.httpx.post") -@mock.patch("httpx_folio.factories.httpx.Client.get") -@parametrize_with_cases("tc", cases=dtc.DropTablesCases) -def test_drop_tables( - client_get_mock: MagicMock, - httpx_post_mock: MagicMock, - pg_dsn: None | Callable[[str], str], - tc: dtc.DropTablesCase, -) -> None: - if pg_dsn is None: - pytest.skip("Specify the pg host using --pg-host to run") - - from ldlite import LDLite as uut - - ld = uut() - tc.patch_request_get(ld, httpx_post_mock, client_get_mock) - dsn = pg_dsn(tc.db) - ld.connect_folio("https://doesnt.matter", "", "", "") - ld.connect_db_postgresql(dsn) - ld.drop_tables(tc.drop) - - for prefix in tc.values: - ld.query(table=prefix, path="/patched", keep_raw=tc.keep_raw) - ld.drop_tables(tc.drop) - - with psycopg.connect(dsn) as conn, conn.cursor() as res: - res.execute( - """ - SELECT table_name - FROM information_schema.tables - WHERE table_schema='public' - """, - ) - assert sorted([r[0] for r in res.fetchall()]) == sorted(tc.expected_tables) - - res.execute('SELECT COUNT(*) FROM "ldlite_system"."load_history"') - assert (ud := res.fetchone()) is not None - assert ud[0] == len(tc.values) - 1 - res.execute( - 'SELECT COUNT(*) FROM "ldlite_system"."load_history"' - 'WHERE "table_name" = %s', - (tc.drop,), - ) - assert (d := res.fetchone()) is not None - assert d[0] == 0 - - -@mock.patch("httpx_folio.auth.httpx.post") -@mock.patch("httpx_folio.factories.httpx.Client.get") -@parametrize_with_cases("tc", cases=qc.QueryTestCases) -def test_query( - client_get_mock: MagicMock, - httpx_post_mock: MagicMock, - pg_dsn: None | Callable[[str], str], - tc: qc.QueryCase, -) -> None: - if pg_dsn is None: - pytest.skip("Specify the pg host using --pg-host to run") - - from ldlite import LDLite as uut - - ld = uut() - tc.patch_request_get(ld, httpx_post_mock, client_get_mock) - dsn = pg_dsn(tc.db) - ld.connect_folio("https://doesnt.matter", "", "", "") - ld.connect_db_postgresql(dsn) - - for prefix in tc.values: - ld.query( - table=prefix, - path="/patched", - json_depth=tc.json_depth, - keep_raw=tc.keep_raw, - ) - - with psycopg.connect(dsn) as conn: - with conn.cursor() as res: - res.execute( - """ - SELECT table_name - FROM information_schema.tables - WHERE table_schema='public' - """, - ) - assert sorted([r[0] for r in res.fetchall()]) == sorted(tc.expected_tables) - - for table, (cols, values) in tc.expected_values.items(): - with conn.cursor() as res: - res.execute( - sql.SQL("SELECT {cols}::text FROM {table};").format( - cols=sql.SQL("::text, ").join( - [sql.Identifier(c) for c in cols], - ), - table=sql.Identifier(table), - ), - ) - for v in values: - assert res.fetchone() == v - - assert res.fetchone() is None - - if tc.expected_indexes is not None: - with conn.cursor() as res: - res.execute( - "SELECT COUNT(*) FROM pg_indexes WHERE schemaname = 'public';", - ) - assert cast("tuple[int]", res.fetchone())[0] == len(tc.expected_indexes) - - for t, c in tc.expected_indexes: - # this requires specific formatting to match the postgres strings - res.execute(f""" -SELECT COUNT(*) FROM pg_indexes -WHERE indexdef LIKE 'CREATE INDEX % ON public.{t} USING btree ({c})'; - """) - assert cast("tuple[int]", res.fetchone())[0] == 1, f"{t}, {c}" - - -@mock.patch("httpx_folio.auth.httpx.post") -@mock.patch("httpx_folio.factories.httpx.Client.get") -@parametrize_with_cases("tc", cases=csvc.ToCsvCases) -def test_to_csv( - client_get_mock: MagicMock, - httpx_post_mock: MagicMock, - pg_dsn: None | Callable[[str], str], - tc: csvc.ToCsvCase, - tmpdir: str, -) -> None: - if pg_dsn is None: - pytest.skip("Specify the pg host using --pg-host to run") - - from ldlite import LDLite as uut - - ld = uut() - tc.patch_request_get(ld, httpx_post_mock, client_get_mock) - dsn = pg_dsn(tc.db) - ld.connect_folio("https://doesnt.matter", "", "", "") - ld.connect_db_postgresql(dsn) - - for prefix in tc.values: - ld.query(table=prefix, path="/patched") - - for table, expected in tc.expected_csvs: - actual = (Path(tmpdir) / table).with_suffix(".csv") - - ld.export_csv(str(actual), table) - - with expected.open("r") as f: - expected_lines = f.readlines() - with actual.open("r") as f: - actual_lines = f.readlines() - - diff = list(unified_diff(expected_lines, actual_lines)) - if len(diff) > 0: - pytest.fail("".join(diff)) - - -@mock.patch("httpx_folio.auth.httpx.post") -@mock.patch("httpx_folio.factories.httpx.Client.get") -@parametrize_with_cases("tc", cases=lhc.LoadHistoryTestCases) -def test_history( - client_get_mock: MagicMock, - httpx_post_mock: MagicMock, - pg_dsn: None | Callable[[str], str], - tc: lhc.LoadHistoryCase, -) -> None: - if pg_dsn is None: - pytest.skip("Specify the pg host using --pg-host to run") - - from ldlite import LDLite as uut - - ld = uut() - tc.patch_request_get(ld, httpx_post_mock, client_get_mock) - dsn = pg_dsn(tc.db) - ld.connect_folio("https://doesnt.matter", "", "", "") - ld.connect_db_postgresql(dsn) - - for prefix, calls in cast( - "dict[str, list[list[dict[str, Any]]]]", - tc.values, - ).items(): - for i in range(len(calls)): - ld.query( - table=prefix, - path="/patched", - query=tc.queries[prefix][i], - ) - - with psycopg.connect(dsn) as conn, conn.cursor() as res: - res.execute('SELECT COUNT(*) FROM "ldlite_system"."load_history"') - assert (ud := res.fetchone()) is not None - assert ud[0] == len(tc.expected_loads) - - for tn, (q, t) in tc.expected_loads.items(): - res.execute( - 'SELECT * FROM "ldlite_system"."load_history" WHERE "table_name" = %s', - (tn,), - ) - assert (d := res.fetchone()) is not None - assert d[1] == q - assert d[7] == t - assert d[6] > d[5] > d[4] > d[3] > d[2] diff --git a/tests/test_query.py b/tests/test_query.py new file mode 100644 index 0000000..fbc8ceb --- /dev/null +++ b/tests/test_query.py @@ -0,0 +1,698 @@ +import json +from collections.abc import Callable +from contextlib import closing +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, cast +from unittest import mock +from unittest.mock import MagicMock + +import duckdb +import psycopg +import pytest +from psycopg import sql +from pytest_cases import parametrize, parametrize_with_cases + +from .mock_response_case import Call, MockedResponseTestCase + +if TYPE_CHECKING: + from _typeshed import dbapi + + import ldlite + + +@dataclass(frozen=True) +class QueryTC(MockedResponseTestCase): + expected_tables: list[str] + expected_values: dict[str, tuple[list[str], list[tuple[Any, ...]]]] + expected_indexes: list[tuple[str, str]] | None = None + + +@parametrize(json_depth=range(1, 2)) +def case_one_table(json_depth: int) -> QueryTC: + return QueryTC( + Call( + "prefix", + json_depth=json_depth, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + }, + ], + }, + ), + expected_tables=["prefix", "prefix__t", "prefix__tcatalog"], + expected_values={ + "prefix__t": ( + ["id", "value"], + [("b096504a-3d54-4664-9bf5-1b872466fd66", "value")], + ), + "prefix__tcatalog": (["table_name"], [("prefix__t",)]), + }, + expected_indexes=[ + ("prefix", "__id"), + ("prefix__t", "__id"), + ("prefix__t", "id"), + ], + ) + + +@parametrize(json_depth=range(2, 3)) +def case_two_tables(json_depth: int) -> QueryTC: + return QueryTC( + Call( + "prefix", + json_depth=json_depth, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + "subObjects": [ + { + "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", + "value": "sub-value-1", + }, + { + "id": "f5bda109-a719-4f72-b797-b9c22f45e4e1", + "value": "sub-value-2", + }, + ], + }, + ], + }, + ), + expected_tables=[ + "prefix", + "prefix__t", + "prefix__t__sub_objects", + "prefix__tcatalog", + ], + expected_values={ + "prefix__t": ( + ["id", "value"], + [("b096504a-3d54-4664-9bf5-1b872466fd66", "value")], + ), + "prefix__t__sub_objects": ( + ["id", "sub_objects__id", "sub_objects__value"], + [ + ( + "b096504a-3d54-4664-9bf5-1b872466fd66", + "2b94c631-fca9-4892-a730-03ee529ffe2a", + "sub-value-1", + ), + ( + "b096504a-3d54-4664-9bf5-1b872466fd66", + "f5bda109-a719-4f72-b797-b9c22f45e4e1", + "sub-value-2", + ), + ], + ), + "prefix__tcatalog": ( + ["table_name"], + [("prefix__t",), ("prefix__t__sub_objects",)], + ), + }, + expected_indexes=[ + ("prefix", "__id"), + ("prefix__t", "__id"), + ("prefix__t", "id"), + ("prefix__t__sub_objects", "__id"), + ("prefix__t__sub_objects", "id"), + ("prefix__t__sub_objects", "sub_objects__o"), + ("prefix__t__sub_objects", "sub_objects__id"), + ], + ) + + +@parametrize(json_depth=range(1)) +def case_table_no_expansion(json_depth: int) -> QueryTC: + return QueryTC( + Call( + "prefix", + json_depth=json_depth, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + "subObjects": [ + { + "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", + "value": "sub-value", + }, + ], + }, + ], + }, + ), + expected_tables=["prefix"], + expected_values={}, + ) + + +def case_table_underexpansion() -> QueryTC: + return QueryTC( + Call( + "prefix", + json_depth=2, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "subObjects": [ + { + "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", + "value": "sub-value", + "subSubObjects": [ + { + "id": ("2b94c631-fca9-4892-a730-03ee529ffe2a"), + "value": "sub-sub-value", + }, + ], + }, + ], + }, + ], + }, + ), + expected_tables=[ + "prefix", + "prefix__t", + "prefix__t__sub_objects", + "prefix__tcatalog", + ], + expected_values={ + "prefix__t__sub_objects": ( + [ + "id", + "sub_objects__id", + "sub_objects__value", + ], + [ + ( + "b096504a-3d54-4664-9bf5-1b872466fd66", + "2b94c631-fca9-4892-a730-03ee529ffe2a", + "sub-value", + ), + ], + ), + "prefix__tcatalog": ( + ["table_name"], + [("prefix__t",), ("prefix__t__sub_objects",)], + ), + }, + ) + + +@parametrize(json_depth=range(3, 4)) +def case_three_tables(json_depth: int) -> QueryTC: + return QueryTC( + Call( + "prefix", + json_depth=json_depth, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + "subObjects": [ + { + "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", + "value": "sub-value", + "subSubObjects": [ + { + "id": ("2b94c631-fca9-4892-a730-03ee529ffe2a"), + "value": "sub-sub-value", + }, + ], + }, + ], + }, + ], + }, + ), + expected_tables=[ + "prefix", + "prefix__t", + "prefix__t__sub_objects", + "prefix__t__sub_objects__sub_sub_objects", + "prefix__tcatalog", + ], + expected_values={ + "prefix__t__sub_objects__sub_sub_objects": ( + [ + "id", + "sub_objects__id", + "sub_objects__sub_sub_objects__id", + "sub_objects__sub_sub_objects__value", + ], + [ + ( + "b096504a-3d54-4664-9bf5-1b872466fd66", + "2b94c631-fca9-4892-a730-03ee529ffe2a", + "2b94c631-fca9-4892-a730-03ee529ffe2a", + "sub-sub-value", + ), + ], + ), + "prefix__tcatalog": ( + ["table_name"], + [ + ("prefix__t",), + ("prefix__t__sub_objects",), + ("prefix__t__sub_objects__sub_sub_objects",), + ], + ), + }, + expected_indexes=[ + ("prefix", "__id"), + ("prefix__t", "__id"), + ("prefix__t", "id"), + ("prefix__t__sub_objects", "__id"), + ("prefix__t__sub_objects", "id"), + ("prefix__t__sub_objects", "sub_objects__o"), + ("prefix__t__sub_objects", "sub_objects__id"), + ("prefix__t__sub_objects__sub_sub_objects", "__id"), + ("prefix__t__sub_objects__sub_sub_objects", "id"), + ("prefix__t__sub_objects__sub_sub_objects", "sub_objects__o"), + ("prefix__t__sub_objects__sub_sub_objects", "sub_objects__id"), + ( + "prefix__t__sub_objects__sub_sub_objects", + "sub_objects__sub_sub_objects__o", + ), + ( + "prefix__t__sub_objects__sub_sub_objects", + "sub_objects__sub_sub_objects__id", + ), + ], + ) + + +def case_nested_object() -> QueryTC: + return QueryTC( + Call( + "prefix", + json_depth=2, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + "subObject": { + "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", + "value": "sub-value", + }, + }, + ], + }, + ), + expected_tables=["prefix", "prefix__t", "prefix__tcatalog"], + expected_values={ + "prefix__t": ( + ["id", "value", "sub_object__id", "sub_object__value"], + [ + ( + "b096504a-3d54-4664-9bf5-1b872466fd66", + "value", + "2b94c631-fca9-4892-a730-03ee529ffe2a", + "sub-value", + ), + ], + ), + "prefix__tcatalog": ( + ["table_name"], + [("prefix__t",)], + ), + }, + expected_indexes=[ + ("prefix", "__id"), + ("prefix__t", "__id"), + ("prefix__t", "id"), + ("prefix__t", "sub_object__id"), + ], + ) + + +def case_doubly_nested_object() -> QueryTC: + return QueryTC( + Call( + "prefix", + json_depth=3, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + "subObject": { + "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", + "value": "sub-value", + "subSubObject": { + "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", + "value": "sub-sub-value", + }, + }, + }, + ], + }, + ), + expected_tables=["prefix", "prefix__t", "prefix__tcatalog"], + expected_values={ + "prefix__t": ( + [ + "id", + "value", + "sub_object__id", + "sub_object__sub_sub_object__id", + "sub_object__sub_sub_object__value", + ], + [ + ( + "b096504a-3d54-4664-9bf5-1b872466fd66", + "value", + "2b94c631-fca9-4892-a730-03ee529ffe2a", + "2b94c631-fca9-4892-a730-03ee529ffe2a", + "sub-sub-value", + ), + ], + ), + "prefix__tcatalog": ( + ["table_name"], + [("prefix__t",)], + ), + }, + expected_indexes=[ + ("prefix", "__id"), + ("prefix__t", "__id"), + ("prefix__t", "id"), + ("prefix__t", "sub_object__id"), + ("prefix__t", "sub_object__sub_sub_object__id"), + ], + ) + + +def case_nested_object_underexpansion() -> QueryTC: + return QueryTC( + Call( + "prefix", + json_depth=1, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + "subObject": { + "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", + "value": "sub-value", + }, + }, + ], + }, + ), + expected_tables=["prefix", "prefix__t", "prefix__tcatalog"], + expected_values={ + "prefix__t": ( + ["id", "value", "sub_object"], + [ + ( + "b096504a-3d54-4664-9bf5-1b872466fd66", + "value", + json.dumps( + { + "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", + "value": "sub-value", + }, + indent=4, + ), + ), + ], + ), + "prefix__tcatalog": ( + ["table_name"], + [("prefix__t",)], + ), + }, + ) + + +def case_id_generation() -> QueryTC: + return QueryTC( + Call( + "prefix", + json_depth=4, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "subObjects": [ + { + "id": "2b94c631-fca9-4892-a730-03ee529ffe2a", + "subSubObjects": [ + { + "id": ("2b94c631-fca9-4892-a730-03ee529ffe2a"), + }, + { + "id": ("8516a913-8bf7-55a4-ab71-417aba9171c9"), + }, + ], + }, + { + "id": "b5d8cdc4-9441-487c-90cf-0c7ec97728eb", + "subSubObjects": [ + { + "id": ("13a24cc8-a15c-4158-abbd-4abf25c8815a"), + }, + { + "id": ("37344879-09ce-4cd8-976f-bf1a57c0cfa6"), + }, + ], + }, + ], + }, + ], + }, + ), + expected_tables=[ + "prefix", + "prefix__t", + "prefix__t__sub_objects", + "prefix__t__sub_objects__sub_sub_objects", + "prefix__tcatalog", + ], + expected_values={ + "prefix__t__sub_objects": ( + ["__id", "id", "sub_objects__o", "sub_objects__id"], + [ + ( + "1", + "b096504a-3d54-4664-9bf5-1b872466fd66", + "1", + "2b94c631-fca9-4892-a730-03ee529ffe2a", + ), + ( + "2", + "b096504a-3d54-4664-9bf5-1b872466fd66", + "2", + "b5d8cdc4-9441-487c-90cf-0c7ec97728eb", + ), + ], + ), + "prefix__t__sub_objects__sub_sub_objects": ( + ["__id", "sub_objects__o", "sub_objects__sub_sub_objects__o"], + [ + ("1", "1", "1"), + ("2", "1", "2"), + ("3", "2", "1"), + ("4", "2", "2"), + ], + ), + }, + ) + + +def case_indexing_id_like() -> QueryTC: + return QueryTC( + Call( + "prefix", + json_depth=4, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "otherId": "b096504a-3d54-4664-9bf5-1b872466fd66", + "anIdButWithADifferentEnding": ( + "b096504a-3d54-4664-9bf5-1b872466fd66" + ), + }, + ], + }, + ), + expected_tables=[ + "prefix", + "prefix__t", + "prefix__tcatalog", + ], + expected_values={}, + expected_indexes=[ + ("prefix", "__id"), + ("prefix__t", "__id"), + ("prefix__t", "id"), + ("prefix__t", "other_id"), + ("prefix__t", "an_id_but_with_a_different_ending"), + ], + ) + + +@parametrize(json_depth=range(1, 2)) +def case_drop_raw(json_depth: int) -> QueryTC: + return QueryTC( + Call( + "prefix", + json_depth=json_depth, + keep_raw=False, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + }, + ], + }, + ), + expected_tables=["prefix__t", "prefix__tcatalog"], + expected_values={ + "prefix__t": ( + ["id", "value"], + [("b096504a-3d54-4664-9bf5-1b872466fd66", "value")], + ), + "prefix__tcatalog": (["table_name"], [("prefix__t",)]), + }, + expected_indexes=[ + ("prefix__t", "__id"), + ("prefix__t", "id"), + ], + ) + + +# this case should be testing the FolioClient class +# but it isn't setup to mock the data properly right now +def case_null_records() -> QueryTC: + return QueryTC( + Call( + "prefix", + json_depth=1, + returns={ + "purchaseOrders": [ + { + "id": "b096504a-3d54-4664-9bf5-1b872466fd66", + "value": "value", + }, + None, + ], + }, + ), + expected_tables=["prefix", "prefix__t", "prefix__tcatalog"], + expected_values={}, + expected_indexes=[ + ("prefix", "__id"), + ("prefix__t", "__id"), + ("prefix__t", "id"), + ], + ) + + +def _arrange( + client_get_mock: MagicMock, + httpx_post_mock: MagicMock, + tc: QueryTC, +) -> "ldlite.LDLite": + from ldlite import LDLite + + uut = LDLite() + tc.patch_request_get(uut, httpx_post_mock, client_get_mock) + uut.connect_folio("https://doesnt.matter", "", "", "") + return uut + + +def _act(uut: "ldlite.LDLite", tc: QueryTC) -> None: + for call in tc.calls_list: + uut.query( + table=call.prefix, + path="/patched", + json_depth=call.json_depth, + keep_raw=call.keep_raw, + ) + + +def _assert( + conn: "dbapi.DBAPIConnection", + res_schema: str, # TODO: have schema be part of tc + tc: QueryTC, +) -> None: + with closing(conn.cursor()) as cur: + cur.execute( + """ + SELECT table_name + FROM information_schema.tables + WHERE table_schema=$1 + """, + (res_schema,), + ) + assert sorted([r[0] for r in cur.fetchall()]) == sorted(tc.expected_tables) + + for table, (cols, values) in tc.expected_values.items(): + cur.execute( + sql.SQL("SELECT {cols}::text FROM {table};") + .format( + cols=sql.SQL("::text, ").join( + [sql.Identifier(c) for c in cols], + ), + table=sql.Identifier(table), + ) + .as_string(), + ) + for v in values: + assert cur.fetchone() == v + + assert cur.fetchone() is None + + +@mock.patch("httpx_folio.auth.httpx.post") +@mock.patch("httpx_folio.factories.httpx.Client.get") +@parametrize_with_cases("tc", cases=".") +def test_duckdb( + client_get_mock: MagicMock, + httpx_post_mock: MagicMock, + tc: QueryTC, +) -> None: + uut = _arrange(client_get_mock, httpx_post_mock, tc) + dsn = f":memory:{tc.db}" + uut.connect_db(dsn) + + _act(uut, tc) + + with duckdb.connect(dsn) as conn: + _assert(cast("dbapi.DBAPIConnection", conn), "main", tc) + + +@mock.patch("httpx_folio.auth.httpx.post") +@mock.patch("httpx_folio.factories.httpx.Client.get") +@parametrize_with_cases("tc", cases=".") +def test_postgres( + client_get_mock: MagicMock, + httpx_post_mock: MagicMock, + pg_dsn: None | Callable[[str], str], + tc: QueryTC, +) -> None: + if pg_dsn is None: + pytest.skip("Specify the pg host using --pg-host to run") + + uut = _arrange(client_get_mock, httpx_post_mock, tc) + dsn = pg_dsn(tc.db) + uut.connect_db_postgresql(dsn) + + _act(uut, tc) + + with psycopg.connect(dsn, cursor_factory=psycopg.RawCursor) as conn: + _assert(cast("dbapi.DBAPIConnection", conn), "public", tc)