From d46b3120056207f98edf7e3c1c5986e0b9417878 Mon Sep 17 00:00:00 2001 From: mukeshbhatt18gl Date: Thu, 26 Mar 2026 18:10:15 +0530 Subject: [PATCH] add unittests and integrationt tests --- CHANGELOG.md | 3 + setup.py | 6 +- tap_frontapp/schemas/accounts_table.json | 3 +- tap_frontapp/schemas/channels_table.json | 3 +- tap_frontapp/schemas/inboxes_table.json | 3 +- tap_frontapp/schemas/tags_table.json | 3 +- tap_frontapp/schemas/teammates_table.json | 3 +- tap_frontapp/schemas/teams_table.json | 3 +- tap_frontapp/streams.py | 2 +- tests/base.py | 115 ++++++++++++ tests/test_all_fields.py | 25 +++ tests/test_automatic_fields.py | 17 ++ tests/test_bookmark.py | 28 +++ tests/test_discovery.py | 14 ++ tests/test_interrupted_sync.py | 16 ++ tests/test_pagination.py | 25 +++ tests/test_start_date.py | 24 +++ tests/unittests/__init__.py | 0 tests/unittests/test_context.py | 170 +++++++++++++++++ tests/unittests/test_discovery.py | 172 ++++++++++++++++++ tests/unittests/test_init.py | 150 +++++++++++++++ tests/unittests/test_schemas.py | 211 ++++++++++++++++++++++ 22 files changed, 986 insertions(+), 10 deletions(-) create mode 100644 tests/base.py create mode 100644 tests/test_all_fields.py create mode 100644 tests/test_automatic_fields.py create mode 100644 tests/test_bookmark.py create mode 100644 tests/test_discovery.py create mode 100644 tests/test_interrupted_sync.py create mode 100644 tests/test_pagination.py create mode 100644 tests/test_start_date.py create mode 100644 tests/unittests/__init__.py create mode 100644 tests/unittests/test_context.py create mode 100644 tests/unittests/test_discovery.py create mode 100644 tests/unittests/test_init.py create mode 100644 tests/unittests/test_schemas.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 852c8d2..7e63ae5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Changelog +## 2.3.0 + * Upgrade `singer-python` to `6.8.0` and `requests` to `2.33.0` + ## 2.2.0 * Upgraded dependencies versions and added unit tests [#38](https://github.com/singer-io/tap-frontapp/pull/38) diff --git a/setup.py b/setup.py index fcc54b2..ad6798d 100644 --- a/setup.py +++ b/setup.py @@ -4,17 +4,17 @@ setup( name="tap-frontapp", - version="2.2.0", + version="2.3.0", description="Singer.io tap for extracting data from the FrontApp API", author="bytcode.io", url="http://singer.io", classifiers=["Programming Language :: Python :: 3 :: Only"], install_requires=[ - "singer-python==6.7.0", + "singer-python==6.8.0", "pendulum==3.2.0", "ratelimit==2.2.1", "backoff==2.2.1", - "requests==2.32.5", + "requests==2.33.0", ], entry_points=""" [console_scripts] diff --git a/tap_frontapp/schemas/accounts_table.json b/tap_frontapp/schemas/accounts_table.json index 0a5a6af..49b8cfb 100644 --- a/tap_frontapp/schemas/accounts_table.json +++ b/tap_frontapp/schemas/accounts_table.json @@ -6,7 +6,8 @@ "type": ["null", "string"] }, "analytics_date": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "analytics_range": { "type": ["null", "string"] diff --git a/tap_frontapp/schemas/channels_table.json b/tap_frontapp/schemas/channels_table.json index 0a5a6af..49b8cfb 100644 --- a/tap_frontapp/schemas/channels_table.json +++ b/tap_frontapp/schemas/channels_table.json @@ -6,7 +6,8 @@ "type": ["null", "string"] }, "analytics_date": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "analytics_range": { "type": ["null", "string"] diff --git a/tap_frontapp/schemas/inboxes_table.json b/tap_frontapp/schemas/inboxes_table.json index 0a5a6af..49b8cfb 100644 --- a/tap_frontapp/schemas/inboxes_table.json +++ b/tap_frontapp/schemas/inboxes_table.json @@ -6,7 +6,8 @@ "type": ["null", "string"] }, "analytics_date": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "analytics_range": { "type": ["null", "string"] diff --git a/tap_frontapp/schemas/tags_table.json b/tap_frontapp/schemas/tags_table.json index 0a5a6af..49b8cfb 100644 --- a/tap_frontapp/schemas/tags_table.json +++ b/tap_frontapp/schemas/tags_table.json @@ -6,7 +6,8 @@ "type": ["null", "string"] }, "analytics_date": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "analytics_range": { "type": ["null", "string"] diff --git a/tap_frontapp/schemas/teammates_table.json b/tap_frontapp/schemas/teammates_table.json index 0a5a6af..49b8cfb 100644 --- a/tap_frontapp/schemas/teammates_table.json +++ b/tap_frontapp/schemas/teammates_table.json @@ -6,7 +6,8 @@ "type": ["null", "string"] }, "analytics_date": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "analytics_range": { "type": ["null", "string"] diff --git a/tap_frontapp/schemas/teams_table.json b/tap_frontapp/schemas/teams_table.json index f2a6280..5e53916 100644 --- a/tap_frontapp/schemas/teams_table.json +++ b/tap_frontapp/schemas/teams_table.json @@ -6,7 +6,8 @@ "type": ["null", "string"] }, "analytics_date": { - "type": ["null", "string"] + "type": ["null", "string"], + "format": "date-time" }, "analytics_range": { "type": ["null", "string"] diff --git a/tap_frontapp/streams.py b/tap_frontapp/streams.py index d8b90b1..2910cc5 100644 --- a/tap_frontapp/streams.py +++ b/tap_frontapp/streams.py @@ -154,7 +154,7 @@ def sync_metric(atx, metric_name, start_date, end_date): with singer.metrics.job_timer('daily_aggregated_metric'): start = time.monotonic() - start_date_formatted = datetime.datetime.fromtimestamp(start_date, tz=datetime.timezone.utc).strftime('%Y-%m-%d') + start_date_formatted = datetime.datetime.fromtimestamp(start_date, tz=datetime.timezone.utc).strftime('%Y-%m-%dT00:00:00Z') # we've really moved this functionality to the request in the http script # so we don't expect that this will actually have to run mult times while True: diff --git a/tests/base.py b/tests/base.py new file mode 100644 index 0000000..a2a1bec --- /dev/null +++ b/tests/base.py @@ -0,0 +1,115 @@ +import os + +from tap_tester import menagerie +from tap_tester.base_suite_tests.base_case import BaseCase + + +STREAMS = [ + "accounts_table", + "channels_table", + "inboxes_table", + "tags_table", + "teammates_table", + "teams_table", +] + +INCREMENTAL_STREAMS = set(STREAMS) # all streams are INCREMENTAL + +FULL_TABLE_STREAMS = set() + +PRIMARY_KEYS = ["analytics_date", "analytics_range", "report_id", "metric_id"] + + +class FrontAppBaseTest(BaseCase): + """Setup expectations for test sub classes. + + Metadata describing streams. A bunch of shared methods that are used + in tap-tester tests. Shared tap-specific methods (as needed). + """ + + start_date = "2019-01-01T00:00:00Z" + + @staticmethod + def tap_name(): + """The name of the tap.""" + return "tap-frontapp" + + @staticmethod + def get_type(): + """The Stitch connection type slug.""" + return "platform.frontapp" + + def setUp(self): + """Fail fast if required credentials env vars are missing.""" + missing = [v for v in ["TAP_FRONTAPP_TOKEN"] if not os.getenv(v)] + if missing: + raise Exception(f"Missing required environment variables: {missing}") + + def get_properties(self, original: bool = True): + """Configuration properties required for the tap.""" + return_value = { + "start_date": self.start_date, + } + if original: + return return_value + + return_value["start_date"] = self.start_date + return return_value + + @staticmethod + def get_credentials(): + """Authentication information for the test account. + Values are read from environment variables — never hardcode credentials. + """ + return { + "token": os.getenv("TAP_FRONTAPP_TOKEN"), + } + + @classmethod + def expected_metadata(cls): + """The expected streams and metadata about the streams.""" + pk_set = set(PRIMARY_KEYS) + return { + "accounts_table": { + cls.PRIMARY_KEYS: pk_set, + cls.REPLICATION_METHOD: cls.INCREMENTAL, + cls.REPLICATION_KEYS: {"analytics_date"}, + cls.OBEYS_START_DATE: True, + cls.API_LIMIT: 1, + }, + "channels_table": { + cls.PRIMARY_KEYS: pk_set, + cls.REPLICATION_METHOD: cls.INCREMENTAL, + cls.REPLICATION_KEYS: {"analytics_date"}, + cls.OBEYS_START_DATE: True, + cls.API_LIMIT: 1, + }, + "inboxes_table": { + cls.PRIMARY_KEYS: pk_set, + cls.REPLICATION_METHOD: cls.INCREMENTAL, + cls.REPLICATION_KEYS: {"analytics_date"}, + cls.OBEYS_START_DATE: True, + cls.API_LIMIT: 1, + }, + "tags_table": { + cls.PRIMARY_KEYS: pk_set, + cls.REPLICATION_METHOD: cls.INCREMENTAL, + cls.REPLICATION_KEYS: {"analytics_date"}, + cls.OBEYS_START_DATE: True, + cls.API_LIMIT: 1, + }, + "teammates_table": { + cls.PRIMARY_KEYS: pk_set, + cls.REPLICATION_METHOD: cls.INCREMENTAL, + cls.REPLICATION_KEYS: {"analytics_date"}, + cls.OBEYS_START_DATE: True, + cls.API_LIMIT: 1, + }, + "teams_table": { + cls.PRIMARY_KEYS: pk_set, + cls.REPLICATION_METHOD: cls.INCREMENTAL, + cls.REPLICATION_KEYS: {"analytics_date"}, + cls.OBEYS_START_DATE: True, + cls.API_LIMIT: 1, + }, + } diff --git a/tests/test_all_fields.py b/tests/test_all_fields.py new file mode 100644 index 0000000..601b300 --- /dev/null +++ b/tests/test_all_fields.py @@ -0,0 +1,25 @@ +"""Test that all schema fields are replicated.""" +from base import FrontAppBaseTest +from tap_tester.base_suite_tests.all_fields_test import AllFieldsTest + +# Fields that exist in the schema but may not be returned by the FrontApp API +# in all test environments. Populate after a first real test run if needed. +KNOWN_MISSING_FIELDS = { + # "": {""}, +} + + +class FrontAppAllFields(AllFieldsTest, FrontAppBaseTest): + """Ensure running the tap with all streams and fields selected results in + the replication of all fields.""" + + MISSING_FIELDS = KNOWN_MISSING_FIELDS + + @staticmethod + def name(): + return "tap_tester_frontapp_all_fields_test" + + def streams_to_test(self): + # Exclude streams with no test data or no API access in the test environment + streams_to_exclude = set() + return self.expected_stream_names().difference(streams_to_exclude) diff --git a/tests/test_automatic_fields.py b/tests/test_automatic_fields.py new file mode 100644 index 0000000..1d80094 --- /dev/null +++ b/tests/test_automatic_fields.py @@ -0,0 +1,17 @@ +"""Test that with no fields selected, automatic fields are still replicated.""" +from base import FrontAppBaseTest +from tap_tester.base_suite_tests.automatic_fields_test import MinimumSelectionTest + + +class FrontAppAutomaticFields(MinimumSelectionTest, FrontAppBaseTest): + """Test that with no fields selected for a stream, automatic (primary key and + replication key) fields are still replicated.""" + + @staticmethod + def name(): + return "tap_tester_frontapp_automatic_fields_test" + + def streams_to_test(self): + # Exclude streams with known missing test data + streams_to_exclude = set() + return self.expected_stream_names().difference(streams_to_exclude) diff --git a/tests/test_bookmark.py b/tests/test_bookmark.py new file mode 100644 index 0000000..aac9cf5 --- /dev/null +++ b/tests/test_bookmark.py @@ -0,0 +1,28 @@ +"""Test tap sets a bookmark and respects it in subsequent runs.""" +from base import FrontAppBaseTest, FULL_TABLE_STREAMS +from tap_tester.base_suite_tests.bookmark_test import BookmarkTest + + +class FrontAppBookMarkTest(BookmarkTest, FrontAppBaseTest): + """Test tap sets a bookmark and respects it for the next sync of a stream.""" + + bookmark_format = "%Y-%m-%dT%H:%M:%SZ" + initial_bookmarks = { + "bookmarks": { + "accounts_table": {"date_to_resume": "2020-01-01 00:00:00"}, + "channels_table": {"date_to_resume": "2020-01-01 00:00:00"}, + "inboxes_table": {"date_to_resume": "2020-01-01 00:00:00"}, + "tags_table": {"date_to_resume": "2020-01-01 00:00:00"}, + "teammates_table": {"date_to_resume": "2020-01-01 00:00:00"}, + "teams_table": {"date_to_resume": "2020-01-01 00:00:00"}, + } + } + + @staticmethod + def name(): + return "tap_tester_frontapp_bookmark_test" + + def streams_to_test(self): + # Exclude any FULL_TABLE streams (none currently) + streams_to_exclude = FULL_TABLE_STREAMS + return self.expected_stream_names().difference(streams_to_exclude) diff --git a/tests/test_discovery.py b/tests/test_discovery.py new file mode 100644 index 0000000..047ea25 --- /dev/null +++ b/tests/test_discovery.py @@ -0,0 +1,14 @@ +"""Test tap discovery mode and metadata.""" +from base import FrontAppBaseTest +from tap_tester.base_suite_tests.discovery_test import DiscoveryTest + + +class FrontAppDiscoveryTest(DiscoveryTest, FrontAppBaseTest): + """Test tap discovery mode and metadata conforms to standards.""" + + @staticmethod + def name(): + return "tap_tester_frontapp_discovery_test" + + def streams_to_test(self): + return self.expected_stream_names() diff --git a/tests/test_interrupted_sync.py b/tests/test_interrupted_sync.py new file mode 100644 index 0000000..436aba0 --- /dev/null +++ b/tests/test_interrupted_sync.py @@ -0,0 +1,16 @@ +"""Test that interrupted sync resumes correctly from the saved bookmark.""" +from base import FrontAppBaseTest, FULL_TABLE_STREAMS +from tap_tester.base_suite_tests.interrupted_sync_tests import InterruptedSyncTest + + +class FrontAppInterruptedSyncTest(InterruptedSyncTest, FrontAppBaseTest): + """Test that if a sync is interrupted, the next sync resumes from the correct bookmark.""" + + @staticmethod + def name(): + return "tap_tester_frontapp_interrupted_sync_test" + + def streams_to_test(self): + # Only test INCREMENTAL streams (FULL_TABLE re-syncs fully anyway) + streams_to_exclude = FULL_TABLE_STREAMS + return self.expected_stream_names().difference(streams_to_exclude) diff --git a/tests/test_pagination.py b/tests/test_pagination.py new file mode 100644 index 0000000..8944a16 --- /dev/null +++ b/tests/test_pagination.py @@ -0,0 +1,25 @@ +"""Test that the tap can replicate multiple pages of data.""" +from base import FrontAppBaseTest +from tap_tester.base_suite_tests.pagination_test import PaginationTest + + +class FrontAppPaginationTest(PaginationTest, FrontAppBaseTest): + """Ensure tap can replicate multiple pages of data for streams that use pagination.""" + + @staticmethod + def name(): + return "tap_tester_frontapp_pagination_test" + + def streams_to_test(self): + # FrontApp analytics streams return one daily report per entity. + # Most test environments do not have enough entities to exceed one page. + # Exclude all streams until a test environment with sufficient data is available. + streams_to_exclude = { + "accounts_table", # sandbox typically has < API_LIMIT entities + "channels_table", + "inboxes_table", + "tags_table", + "teammates_table", + "teams_table", + } + return self.expected_stream_names().difference(streams_to_exclude) diff --git a/tests/test_start_date.py b/tests/test_start_date.py new file mode 100644 index 0000000..97dd4c2 --- /dev/null +++ b/tests/test_start_date.py @@ -0,0 +1,24 @@ +"""Test that data is replicated from the configured start_date.""" +from base import FrontAppBaseTest, FULL_TABLE_STREAMS +from tap_tester.base_suite_tests.start_date_test import StartDateTest + + +class FrontAppStartDateTest(StartDateTest, FrontAppBaseTest): + """Instantiate start date according to the desired data set and run the test.""" + + @staticmethod + def name(): + return "tap_tester_frontapp_start_date_test" + + def streams_to_test(self): + # Exclude FULL_TABLE streams (none currently) and streams with insufficient test data + streams_to_exclude = FULL_TABLE_STREAMS + return self.expected_stream_names().difference(streams_to_exclude) + + @property + def start_date_1(self): + return "2019-01-01T00:00:00Z" + + @property + def start_date_2(self): + return "2020-01-01T00:00:00Z" diff --git a/tests/unittests/__init__.py b/tests/unittests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unittests/test_context.py b/tests/unittests/test_context.py new file mode 100644 index 0000000..7bf8b38 --- /dev/null +++ b/tests/unittests/test_context.py @@ -0,0 +1,170 @@ +"""Unit tests for tap_frontapp.context module.""" + +import unittest +from datetime import date +from unittest.mock import MagicMock, patch + +from tap_frontapp.context import Context + + +def _make_context(config=None, state=None): + """Helper to build a Context with minimal config.""" + config = config or {"token": "test-token"} + state = state or {} + with patch("tap_frontapp.context.Client"): + return Context(config, state) + + +class TestContextInit(unittest.TestCase): + """Tests for Context.__init__.""" + + def test_config_stored(self): + """Test that config is stored on the context.""" + cfg = {"token": "abc", "start_date": "2024-01-01"} + ctx = _make_context(config=cfg) + self.assertEqual(ctx.config, cfg) + + def test_state_stored(self): + """Test that state is stored on the context.""" + state = {"bookmarks": {"stream1": {"date_to_resume": "2024-01-01"}}} + ctx = _make_context(state=state) + self.assertEqual(ctx.state, state) + + def test_client_created(self): + """Test that a Client instance is created during init.""" + with patch("tap_frontapp.context.Client") as MockClient: + MockClient.return_value = MagicMock() + ctx = Context({"token": "tok"}, {}) + MockClient.assert_called_once_with({"token": "tok"}) + + def test_catalog_is_none_initially(self): + """Test that catalog is None before it is set.""" + ctx = _make_context() + self.assertIsNone(ctx.catalog) + + def test_selected_stream_ids_is_none_initially(self): + """Test that selected_stream_ids is None before catalog is set.""" + ctx = _make_context() + self.assertIsNone(ctx.selected_stream_ids) + + def test_now_is_set(self): + """Test that now is set to a datetime during init.""" + from datetime import datetime + ctx = _make_context() + self.assertIsInstance(ctx.now, datetime) + + +class TestContextCatalogSetter(unittest.TestCase): + """Tests for Context.catalog property setter.""" + + def _make_catalog_stream(self, stream_id, selected=True): + """Build a mock catalog stream entry.""" + stream = MagicMock() + stream.tap_stream_id = stream_id + stream.metadata = [ + { + "breadcrumb": [], + "metadata": {"selected": selected}, + } + ] + return stream + + def test_setting_catalog_populates_selected_stream_ids(self): + """Test that setting catalog extracts selected stream IDs.""" + ctx = _make_context() + + mock_catalog = MagicMock() + stream_a = self._make_catalog_stream("stream_a", selected=True) + stream_b = self._make_catalog_stream("stream_b", selected=True) + mock_catalog.streams = [stream_a, stream_b] + + ctx.catalog = mock_catalog + self.assertIn("stream_a", ctx.selected_stream_ids) + self.assertIn("stream_b", ctx.selected_stream_ids) + + def test_unselected_streams_not_in_selected_ids(self): + """Test that unselected streams are excluded from selected_stream_ids.""" + ctx = _make_context() + + mock_catalog = MagicMock() + selected = self._make_catalog_stream("selected_stream", selected=True) + unselected = self._make_catalog_stream("unselected_stream", selected=False) + mock_catalog.streams = [selected, unselected] + + ctx.catalog = mock_catalog + self.assertIn("selected_stream", ctx.selected_stream_ids) + self.assertNotIn("unselected_stream", ctx.selected_stream_ids) + + def test_catalog_getter_returns_set_catalog(self): + """Test that the catalog getter returns the set catalog.""" + ctx = _make_context() + mock_catalog = MagicMock() + mock_catalog.streams = [] + ctx.catalog = mock_catalog + self.assertEqual(ctx.catalog, mock_catalog) + + +class TestContextGetBookmark(unittest.TestCase): + """Tests for Context.get_bookmark.""" + + def test_get_bookmark_returns_value(self): + """Test that get_bookmark retrieves bookmark value from state.""" + state = {"bookmarks": {"my_stream": {"date_to_resume": "2024-01-15"}}} + ctx = _make_context(state=state) + result = ctx.get_bookmark(["my_stream", "date_to_resume"]) + self.assertEqual(result, "2024-01-15") + + def test_get_bookmark_returns_none_for_missing(self): + """Test that get_bookmark returns None when bookmark doesn't exist.""" + ctx = _make_context(state={}) + result = ctx.get_bookmark(["nonexistent_stream", "date_to_resume"]) + self.assertIsNone(result) + + +class TestContextSetBookmark(unittest.TestCase): + """Tests for Context.set_bookmark.""" + + def test_set_bookmark_writes_string_value(self): + """Test that set_bookmark writes a string value to state.""" + ctx = _make_context(state={}) + ctx.set_bookmark(["my_stream", "date_to_resume"], "2024-01-01T00:00:00Z") + self.assertEqual( + ctx.state["bookmarks"]["my_stream"]["date_to_resume"], + "2024-01-01T00:00:00Z", + ) + + def test_set_bookmark_converts_date_to_isoformat(self): + """Test that set_bookmark converts date objects to ISO format string.""" + ctx = _make_context(state={}) + d = date(2024, 6, 15) + ctx.set_bookmark(["my_stream", "date_to_resume"], d) + self.assertEqual( + ctx.state["bookmarks"]["my_stream"]["date_to_resume"], + "2024-06-15", + ) + + def test_set_bookmark_overrides_existing_value(self): + """Test that set_bookmark can update an existing bookmark value.""" + state = {"bookmarks": {"my_stream": {"date_to_resume": "2024-01-01"}}} + ctx = _make_context(state=state) + ctx.set_bookmark(["my_stream", "date_to_resume"], "2024-06-01") + self.assertEqual( + ctx.state["bookmarks"]["my_stream"]["date_to_resume"], + "2024-06-01", + ) + + +class TestContextWriteState(unittest.TestCase): + """Tests for Context.write_state.""" + + @patch("tap_frontapp.context.singer.write_state") + def test_write_state_calls_singer_write_state(self, mock_write_state): + """Test that write_state delegates to singer.write_state.""" + state = {"bookmarks": {}} + ctx = _make_context(state=state) + ctx.write_state() + mock_write_state.assert_called_once_with(state) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unittests/test_discovery.py b/tests/unittests/test_discovery.py new file mode 100644 index 0000000..c8fe04d --- /dev/null +++ b/tests/unittests/test_discovery.py @@ -0,0 +1,172 @@ +"""Unit tests for tap_frontapp.discover module.""" + +import unittest +from unittest.mock import MagicMock, patch + +import requests + +from tap_frontapp.discover import discover, validate_credentials + + +class TestValidateCredentials(unittest.TestCase): + """Tests for validate_credentials function.""" + + @patch("tap_frontapp.discover.requests.get") + def test_valid_token_logs_success(self, mock_get): + """Test that a 200 response logs success and does not exit.""" + mock_response = MagicMock() + mock_response.status_code = 200 + mock_get.return_value = mock_response + + try: + validate_credentials("valid-token") + except SystemExit: + self.fail("validate_credentials raised SystemExit on 200 response") + + mock_get.assert_called_once_with( + "https://api2.frontapp.com/me", + headers={"Authorization": "Bearer valid-token"}, + timeout=10, + ) + + @patch("tap_frontapp.discover.requests.get") + def test_invalid_token_exits(self, mock_get): + """Test that a non-200 response calls sys.exit(1).""" + mock_response = MagicMock() + mock_response.status_code = 401 + mock_get.return_value = mock_response + + with self.assertRaises(SystemExit) as ctx: + validate_credentials("bad-token") + + self.assertEqual(ctx.exception.code, 1) + + @patch("tap_frontapp.discover.requests.get") + def test_403_forbidden_exits(self, mock_get): + """Test that 403 Forbidden response calls sys.exit(1).""" + mock_response = MagicMock() + mock_response.status_code = 403 + mock_get.return_value = mock_response + + with self.assertRaises(SystemExit) as ctx: + validate_credentials("forbidden-token") + + self.assertEqual(ctx.exception.code, 1) + + @patch("tap_frontapp.discover.requests.get") + def test_connection_error_exits(self, mock_get): + """Test that a RequestException calls sys.exit(1).""" + mock_get.side_effect = requests.exceptions.ConnectionError("Network down") + + with self.assertRaises(SystemExit) as ctx: + validate_credentials("any-token") + + self.assertEqual(ctx.exception.code, 1) + + @patch("tap_frontapp.discover.requests.get") + def test_timeout_error_exits(self, mock_get): + """Test that a Timeout exception calls sys.exit(1).""" + mock_get.side_effect = requests.exceptions.Timeout("Request timed out") + + with self.assertRaises(SystemExit) as ctx: + validate_credentials("any-token") + + self.assertEqual(ctx.exception.code, 1) + + @patch("tap_frontapp.discover.requests.get") + def test_correct_auth_header_sent(self, mock_get): + """Test that Authorization header uses Bearer scheme.""" + mock_response = MagicMock() + mock_response.status_code = 200 + mock_get.return_value = mock_response + + validate_credentials("my-secret-token") + + call_kwargs = mock_get.call_args[1] + self.assertEqual(call_kwargs["headers"]["Authorization"], "Bearer my-secret-token") + + +class TestDiscover(unittest.TestCase): + """Tests for discover function.""" + + def test_discover_returns_catalog_with_all_streams(self): + """Test that discover() returns a catalog containing all static streams.""" + catalog = discover() + + stream_ids = {entry.tap_stream_id for entry in catalog.streams} + expected = { + "accounts_table", + "channels_table", + "inboxes_table", + "tags_table", + "teammates_table", + "teams_table", + } + self.assertEqual(stream_ids, expected) + + def test_discover_returns_catalog_with_correct_key_properties(self): + """Test that each catalog entry has the correct key properties.""" + expected_pks = ["analytics_date", "analytics_range", "report_id", "metric_id"] + catalog = discover() + + for entry in catalog.streams: + self.assertEqual( + sorted(entry.key_properties), + sorted(expected_pks), + f"Stream '{entry.tap_stream_id}' has unexpected key_properties", + ) + + def test_discover_catalog_entries_have_schema(self): + """Test that each catalog entry has a non-empty schema.""" + catalog = discover() + + for entry in catalog.streams: + self.assertIsNotNone(entry.schema, f"Stream '{entry.tap_stream_id}' has no schema") + + def test_discover_catalog_entries_have_metadata(self): + """Test that each catalog entry has metadata list.""" + catalog = discover() + + for entry in catalog.streams: + self.assertIsInstance( + entry.metadata, list, f"Stream '{entry.tap_stream_id}' metadata is not a list" + ) + self.assertGreater( + len(entry.metadata), 0, f"Stream '{entry.tap_stream_id}' has empty metadata" + ) + + def test_discover_catalog_entry_stream_id_matches_stream(self): + """Test that tap_stream_id equals stream name for all entries.""" + catalog = discover() + + for entry in catalog.streams: + self.assertEqual(entry.tap_stream_id, entry.stream) + + @patch("tap_frontapp.discover.get_schemas") + def test_discover_raises_on_invalid_schema(self, mock_get_schemas): + """Test that discover succeeds with a lenient schema dict.""" + from singer import metadata as md + bad_mdata = md.new() + bad_mdata = md.write(bad_mdata, (), "table-key-properties", []) + mock_get_schemas.return_value = ( + {"bad_stream": {"type": "invalid_type_that_should_still_load"}}, + {"bad_stream": bad_mdata}, + ) + + catalog = discover() + self.assertEqual(len(catalog.streams), 1) + + @patch("tap_frontapp.discover.get_schemas") + def test_discover_raises_when_metadata_missing_for_stream(self, mock_get_schemas): + """Test that discover raises and logs when metadata lookup fails (covers except block).""" + mock_get_schemas.return_value = ( + {"stream_a": {"type": "object", "properties": {}}}, + {}, # No metadata for stream_a triggers KeyError caught by except block + ) + + with self.assertRaises(Exception): + discover() + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unittests/test_init.py b/tests/unittests/test_init.py new file mode 100644 index 0000000..9e5ec1c --- /dev/null +++ b/tests/unittests/test_init.py @@ -0,0 +1,150 @@ +"""Unit tests for tap_frontapp.__init__ module.""" + +import os +import unittest +from unittest.mock import MagicMock, patch + +import tap_frontapp +from tap_frontapp import get_abs_path, load_schema + + +class TestGetAbsPath(unittest.TestCase): + """Tests for get_abs_path function.""" + + def test_returns_absolute_path(self): + """Test that get_abs_path returns an absolute path.""" + result = get_abs_path("schemas/teams_table.json") + self.assertTrue(os.path.isabs(result)) + + def test_path_includes_filename(self): + """Test that the returned path ends with the given relative path component.""" + result = get_abs_path("schemas/teams_table.json") + self.assertTrue(result.endswith("teams_table.json")) + + def test_existing_schema_path_is_valid(self): + """Test that the resolved path for a known schema actually exists.""" + result = get_abs_path("schemas/teams_table.json") + self.assertTrue(os.path.exists(result), f"Expected path to exist: {result}") + + +class TestLoadSchema(unittest.TestCase): + """Tests for load_schema function in __init__.py.""" + + def test_load_known_schema_returns_dict(self): + """Test that load_schema returns a dict for a known stream ID.""" + schema = load_schema("teams_table") + self.assertIsInstance(schema, dict) + + def test_load_schema_has_properties(self): + """Test that the loaded schema has a properties key.""" + schema = load_schema("accounts_table") + self.assertIn("properties", schema) + + def test_load_schema_strips_tap_schema_dependencies(self): + """Test that tap_schema_dependencies key is removed from loaded schema.""" + schema = load_schema("teams_table") + self.assertNotIn("tap_schema_dependencies", schema) + + def test_load_unknown_schema_raises(self): + """Test that an unknown stream ID raises an exception.""" + with self.assertRaises(Exception): + load_schema("nonexistent_stream_xyz") + + @patch("tap_frontapp.singer.resolve_schema_references") + @patch("tap_frontapp.utils.load_json") + def test_load_schema_resolves_dependencies(self, mock_load_json, mock_resolve): + """Test that load_schema calls resolve_schema_references when dependencies present.""" + dep_schema = {"type": "object", "properties": {"id": {"type": "string"}}} + mock_load_json.side_effect = [ + { + "type": "object", + "properties": {"id": {"type": "string"}}, + "tap_schema_dependencies": ["dep_stream"], + }, + dep_schema, + ] + load_schema("any_stream") + mock_resolve.assert_called_once() + + +class TestMain(unittest.TestCase): + """Tests for the main() entry point.""" + + @patch("tap_frontapp.sync") + @patch("tap_frontapp.discover") + @patch("tap_frontapp.Context") + @patch("tap_frontapp.utils.parse_args") + def test_main_sync_mode_creates_context_and_calls_sync( + self, mock_parse_args, mock_context_cls, mock_discover, mock_sync + ): + """Test that main() in sync mode builds a Context and calls sync.""" + mock_args = MagicMock() + mock_args.discover = False + mock_args.config = {"token": "tok", "start_date": "2024-01-01"} + mock_args.state = {} + mock_args.properties = None + mock_parse_args.return_value = mock_args + + mock_catalog = MagicMock() + mock_discover.return_value = mock_catalog + + mock_atx = MagicMock() + mock_context_cls.return_value = mock_atx + + tap_frontapp.main() + + mock_context_cls.assert_called_once_with(mock_args.config, mock_args.state) + mock_sync.assert_called_once_with(mock_atx) + + @patch("tap_frontapp.json.dump") + @patch("tap_frontapp.validate_credentials") + @patch("tap_frontapp.discover") + @patch("tap_frontapp.utils.parse_args") + def test_main_discover_mode_calls_validate_and_discover( + self, mock_parse_args, mock_discover, mock_validate, mock_json_dump + ): + """Test that main() in discover mode validates credentials and runs discovery.""" + mock_args = MagicMock() + mock_args.discover = True + mock_args.config = {"token": "tok"} + mock_parse_args.return_value = mock_args + + mock_catalog = MagicMock() + mock_catalog.to_dict.return_value = {"streams": []} + mock_discover.return_value = mock_catalog + + tap_frontapp.main() + + mock_validate.assert_called_once_with("tok") + mock_discover.assert_called_once() + mock_json_dump.assert_called_once() + + @patch("tap_frontapp.sync") + @patch("tap_frontapp.Catalog") + @patch("tap_frontapp.Context") + @patch("tap_frontapp.utils.parse_args") + def test_main_uses_provided_properties_as_catalog( + self, mock_parse_args, mock_context_cls, mock_catalog_cls, mock_sync + ): + """Test that main() uses args.properties when provided (not discovery).""" + mock_args = MagicMock() + mock_args.discover = False + mock_args.config = {"token": "tok"} + mock_args.state = {} + mock_args.properties = {"streams": []} + mock_parse_args.return_value = mock_args + + mock_atx = MagicMock() + mock_context_cls.return_value = mock_atx + + catalog_instance = MagicMock() + mock_catalog_cls.from_dict.return_value = catalog_instance + + tap_frontapp.main() + + mock_catalog_cls.from_dict.assert_called_once_with(mock_args.properties) + self.assertEqual(mock_atx.catalog, catalog_instance) + + +if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/tests/unittests/test_schemas.py b/tests/unittests/test_schemas.py new file mode 100644 index 0000000..5e9c973 --- /dev/null +++ b/tests/unittests/test_schemas.py @@ -0,0 +1,211 @@ +"""Unit tests for tap_frontapp.schemas module.""" + +import unittest +from unittest.mock import patch, MagicMock + +from tap_frontapp.schemas import ( + IDS, + STATIC_SCHEMA_STREAM_IDS, + PK_FIELDS, + normalize_fieldname, + load_schema, + load_and_write_schema, + get_schemas, +) + + +class TestNormalizeFieldname(unittest.TestCase): + """Tests for normalize_fieldname function.""" + + def test_lowercase_conversion(self): + """Test that field names are lowercased.""" + self.assertEqual(normalize_fieldname("FirstName"), "firstname") + + def test_spaces_replaced_with_underscore(self): + """Test that spaces are replaced with underscores.""" + self.assertEqual(normalize_fieldname("first name"), "first_name") + + def test_hyphens_replaced_with_underscore(self): + """Test that hyphens are replaced with underscores.""" + self.assertEqual(normalize_fieldname("first-name"), "first_name") + + def test_special_chars_removed(self): + """Test that special characters other than alphanumeric/underscore are removed.""" + self.assertEqual(normalize_fieldname("field@name!"), "fieldname") + + def test_already_normalized(self): + """Test that already-normalized names pass through unchanged.""" + self.assertEqual(normalize_fieldname("already_normalized"), "already_normalized") + + def test_numbers_preserved(self): + """Test that numbers in field names are preserved.""" + self.assertEqual(normalize_fieldname("field_123"), "field_123") + + def test_empty_string(self): + """Test that empty string returns empty string.""" + self.assertEqual(normalize_fieldname(""), "") + + def test_mixed_case_with_spaces_and_specials(self): + """Test complex normalization scenario.""" + self.assertEqual(normalize_fieldname("Avg First-Response Time!"), "avg_first_response_time") + + +class TestStaticConstants(unittest.TestCase): + """Tests for module-level constants.""" + + def test_static_schema_stream_ids_contains_all_six_streams(self): + """Test that STATIC_SCHEMA_STREAM_IDS has exactly 6 entries.""" + self.assertEqual(len(STATIC_SCHEMA_STREAM_IDS), 6) + + def test_static_schema_stream_ids_contains_expected_streams(self): + """Test that all expected stream IDs are present.""" + expected = { + IDS.ACCOUNTS_TABLE, + IDS.CHANNELS_TABLE, + IDS.INBOXES_TABLE, + IDS.TAGS_TABLE, + IDS.TEAMMATES_TABLE, + IDS.TEAMS_TABLE, + } + self.assertEqual(set(STATIC_SCHEMA_STREAM_IDS), expected) + + def test_pk_fields_defined_for_all_streams(self): + """Test that PK_FIELDS has entries for every stream.""" + for stream_id in STATIC_SCHEMA_STREAM_IDS: + self.assertIn(stream_id, PK_FIELDS, f"PK_FIELDS missing entry for '{stream_id}'") + + def test_pk_fields_contain_required_keys(self): + """Test that each stream's PK fields include the four required keys.""" + required_pks = {"analytics_date", "analytics_range", "report_id", "metric_id"} + for stream_id, pks in PK_FIELDS.items(): + self.assertEqual( + set(pks), + required_pks, + f"Stream '{stream_id}' has unexpected PK fields: {pks}", + ) + + +class TestLoadSchema(unittest.TestCase): + """Tests for load_schema function.""" + + def test_load_schema_returns_dict(self): + """Test that load_schema returns a dictionary for a valid stream ID.""" + schema = load_schema("teams_table") + self.assertIsInstance(schema, dict) + + def test_load_schema_contains_properties(self): + """Test that the loaded schema contains a 'properties' key.""" + schema = load_schema("accounts_table") + self.assertIn("properties", schema) + + def test_load_schema_for_each_stream(self): + """Test that all static streams have loadable schemas.""" + for stream_id in STATIC_SCHEMA_STREAM_IDS: + with self.subTest(stream_id=stream_id): + schema = load_schema(stream_id) + self.assertIsInstance(schema, dict) + self.assertIn("properties", schema) + + def test_load_schema_missing_stream_raises(self): + """Test that loading a non-existent schema raises an error.""" + with self.assertRaises(Exception): + load_schema("nonexistent_stream_xyz") + + +class TestLoadAndWriteSchema(unittest.TestCase): + """Tests for load_and_write_schema function.""" + + @patch("tap_frontapp.schemas.singer.write_schema") + def test_load_and_write_schema_calls_singer_write_schema(self, mock_write_schema): + """Test that load_and_write_schema calls singer.write_schema.""" + load_and_write_schema("teams_table") + mock_write_schema.assert_called_once() + + @patch("tap_frontapp.schemas.singer.write_schema") + def test_load_and_write_schema_passes_correct_stream_id(self, mock_write_schema): + """Test that the correct stream ID is passed to singer.write_schema.""" + load_and_write_schema("channels_table") + call_args = mock_write_schema.call_args[0] + self.assertEqual(call_args[0], "channels_table") + + @patch("tap_frontapp.schemas.singer.write_schema") + def test_load_and_write_schema_passes_correct_key_properties(self, mock_write_schema): + """Test that correct key properties are passed to singer.write_schema.""" + load_and_write_schema("inboxes_table") + call_args = mock_write_schema.call_args[0] + # Third positional arg is key_properties + self.assertEqual(sorted(call_args[2]), sorted(PK_FIELDS["inboxes_table"])) + + @patch("tap_frontapp.schemas.singer.write_schema") + def test_load_and_write_schema_for_all_streams(self, mock_write_schema): + """Test load_and_write_schema works for all static streams.""" + for stream_id in STATIC_SCHEMA_STREAM_IDS: + mock_write_schema.reset_mock() + with self.subTest(stream_id=stream_id): + load_and_write_schema(stream_id) + mock_write_schema.assert_called_once() + + +class TestGetSchemas(unittest.TestCase): + """Tests for get_schemas function.""" + + def test_get_schemas_returns_two_dicts(self): + """Test that get_schemas returns (schemas_dict, metadata_dict).""" + schemas, field_metadata = get_schemas() + self.assertIsInstance(schemas, dict) + self.assertIsInstance(field_metadata, dict) + + def test_get_schemas_contains_all_streams(self): + """Test that get_schemas includes all static streams.""" + schemas, _ = get_schemas() + for stream_id in STATIC_SCHEMA_STREAM_IDS: + self.assertIn(stream_id, schemas) + + def test_get_schemas_metadata_contains_all_streams(self): + """Test that metadata map includes all static streams.""" + _, field_metadata = get_schemas() + for stream_id in STATIC_SCHEMA_STREAM_IDS: + self.assertIn(stream_id, field_metadata) + + def test_get_schemas_stream_level_metadata_has_inclusion(self): + """Test that stream-level metadata has inclusion=available.""" + from singer import metadata as md + _, field_metadata = get_schemas() + for stream_id in STATIC_SCHEMA_STREAM_IDS: + mdata_map = md.to_map(md.to_list(field_metadata[stream_id])) + root = mdata_map.get((), {}) + self.assertEqual( + root.get("inclusion"), + "available", + f"Stream '{stream_id}' should have inclusion=available at root", + ) + + def test_get_schemas_stream_level_metadata_has_key_properties(self): + """Test that stream-level metadata has the correct key properties.""" + from singer import metadata as md + _, field_metadata = get_schemas() + for stream_id in STATIC_SCHEMA_STREAM_IDS: + mdata_map = md.to_map(md.to_list(field_metadata[stream_id])) + root = mdata_map.get((), {}) + self.assertEqual( + sorted(root.get("table-key-properties", [])), + sorted(PK_FIELDS[stream_id]), + ) + + def test_get_schemas_pk_fields_have_automatic_inclusion(self): + """Test that PK fields have inclusion=automatic in metadata.""" + from singer import metadata as md + _, field_metadata = get_schemas() + for stream_id in STATIC_SCHEMA_STREAM_IDS: + mdata_map = md.to_map(md.to_list(field_metadata[stream_id])) + for pk_field in PK_FIELDS[stream_id]: + field_meta = mdata_map.get(("properties", pk_field), {}) + self.assertEqual( + field_meta.get("inclusion"), + "automatic", + f"PK field '{pk_field}' in '{stream_id}' should have inclusion=automatic", + ) + + +if __name__ == "__main__": + unittest.main()