Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# Changelog

## 2.3.0
* Upgrade `singer-python` to `6.8.0` and `requests` to `2.33.0`

## 2.2.0
* Upgraded dependencies versions and added unit tests [#38](https://github.com/singer-io/tap-frontapp/pull/38)

Expand Down
6 changes: 3 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,17 @@

setup(
name="tap-frontapp",
version="2.2.0",
version="2.3.0",
description="Singer.io tap for extracting data from the FrontApp API",
author="bytcode.io",
url="http://singer.io",
classifiers=["Programming Language :: Python :: 3 :: Only"],
install_requires=[
"singer-python==6.7.0",
"singer-python==6.8.0",
"pendulum==3.2.0",
"ratelimit==2.2.1",
"backoff==2.2.1",
"requests==2.32.5",
"requests==2.33.0",
],
entry_points="""
[console_scripts]
Expand Down
3 changes: 2 additions & 1 deletion tap_frontapp/schemas/accounts_table.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
"type": ["null", "string"]
},
"analytics_date": {
"type": ["null", "string"]
"type": ["null", "string"],
"format": "date-time"
},
"analytics_range": {
"type": ["null", "string"]
Expand Down
3 changes: 2 additions & 1 deletion tap_frontapp/schemas/channels_table.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
"type": ["null", "string"]
},
"analytics_date": {
"type": ["null", "string"]
"type": ["null", "string"],
"format": "date-time"
},
"analytics_range": {
"type": ["null", "string"]
Expand Down
3 changes: 2 additions & 1 deletion tap_frontapp/schemas/inboxes_table.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
"type": ["null", "string"]
},
"analytics_date": {
"type": ["null", "string"]
"type": ["null", "string"],
"format": "date-time"
},
"analytics_range": {
"type": ["null", "string"]
Expand Down
3 changes: 2 additions & 1 deletion tap_frontapp/schemas/tags_table.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
"type": ["null", "string"]
},
"analytics_date": {
"type": ["null", "string"]
"type": ["null", "string"],
"format": "date-time"
},
"analytics_range": {
"type": ["null", "string"]
Expand Down
3 changes: 2 additions & 1 deletion tap_frontapp/schemas/teammates_table.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
"type": ["null", "string"]
},
"analytics_date": {
"type": ["null", "string"]
"type": ["null", "string"],
"format": "date-time"
},
"analytics_range": {
"type": ["null", "string"]
Expand Down
3 changes: 2 additions & 1 deletion tap_frontapp/schemas/teams_table.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
"type": ["null", "string"]
},
"analytics_date": {
"type": ["null", "string"]
"type": ["null", "string"],
"format": "date-time"
},
"analytics_range": {
"type": ["null", "string"]
Expand Down
2 changes: 1 addition & 1 deletion tap_frontapp/streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def sync_metric(atx, metric_name, start_date, end_date):

with singer.metrics.job_timer('daily_aggregated_metric'):
start = time.monotonic()
start_date_formatted = datetime.datetime.fromtimestamp(start_date, tz=datetime.timezone.utc).strftime('%Y-%m-%d')
start_date_formatted = datetime.datetime.fromtimestamp(start_date, tz=datetime.timezone.utc).strftime('%Y-%m-%dT00:00:00Z')
# we've really moved this functionality to the request in the http script
# so we don't expect that this will actually have to run mult times
while True:
Expand Down
115 changes: 115 additions & 0 deletions tests/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
import os

from tap_tester import menagerie
from tap_tester.base_suite_tests.base_case import BaseCase


STREAMS = [
"accounts_table",
"channels_table",
"inboxes_table",
"tags_table",
"teammates_table",
"teams_table",
]

INCREMENTAL_STREAMS = set(STREAMS) # all streams are INCREMENTAL

FULL_TABLE_STREAMS = set()

PRIMARY_KEYS = ["analytics_date", "analytics_range", "report_id", "metric_id"]


class FrontAppBaseTest(BaseCase):
"""Setup expectations for test sub classes.

Metadata describing streams. A bunch of shared methods that are used
in tap-tester tests. Shared tap-specific methods (as needed).
"""

start_date = "2019-01-01T00:00:00Z"

@staticmethod
def tap_name():
"""The name of the tap."""
return "tap-frontapp"

@staticmethod
def get_type():
"""The Stitch connection type slug."""
return "platform.frontapp"

def setUp(self):
"""Fail fast if required credentials env vars are missing."""
missing = [v for v in ["TAP_FRONTAPP_TOKEN"] if not os.getenv(v)]
if missing:
raise Exception(f"Missing required environment variables: {missing}")

def get_properties(self, original: bool = True):
"""Configuration properties required for the tap."""
return_value = {
"start_date": self.start_date,
}
if original:
return return_value

return_value["start_date"] = self.start_date
return return_value

@staticmethod
def get_credentials():
"""Authentication information for the test account.
Values are read from environment variables — never hardcode credentials.
"""
return {
"token": os.getenv("TAP_FRONTAPP_TOKEN"),
}

@classmethod
def expected_metadata(cls):
"""The expected streams and metadata about the streams."""
pk_set = set(PRIMARY_KEYS)
return {
"accounts_table": {
cls.PRIMARY_KEYS: pk_set,
cls.REPLICATION_METHOD: cls.INCREMENTAL,
cls.REPLICATION_KEYS: {"analytics_date"},
cls.OBEYS_START_DATE: True,
cls.API_LIMIT: 1,
},
"channels_table": {
cls.PRIMARY_KEYS: pk_set,
cls.REPLICATION_METHOD: cls.INCREMENTAL,
cls.REPLICATION_KEYS: {"analytics_date"},
cls.OBEYS_START_DATE: True,
cls.API_LIMIT: 1,
},
"inboxes_table": {
cls.PRIMARY_KEYS: pk_set,
cls.REPLICATION_METHOD: cls.INCREMENTAL,
cls.REPLICATION_KEYS: {"analytics_date"},
cls.OBEYS_START_DATE: True,
cls.API_LIMIT: 1,
},
"tags_table": {
cls.PRIMARY_KEYS: pk_set,
cls.REPLICATION_METHOD: cls.INCREMENTAL,
cls.REPLICATION_KEYS: {"analytics_date"},
cls.OBEYS_START_DATE: True,
cls.API_LIMIT: 1,
},
"teammates_table": {
cls.PRIMARY_KEYS: pk_set,
cls.REPLICATION_METHOD: cls.INCREMENTAL,
cls.REPLICATION_KEYS: {"analytics_date"},
cls.OBEYS_START_DATE: True,
cls.API_LIMIT: 1,
},
"teams_table": {
cls.PRIMARY_KEYS: pk_set,
cls.REPLICATION_METHOD: cls.INCREMENTAL,
cls.REPLICATION_KEYS: {"analytics_date"},
cls.OBEYS_START_DATE: True,
cls.API_LIMIT: 1,
},
}
25 changes: 25 additions & 0 deletions tests/test_all_fields.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
"""Test that all schema fields are replicated."""
from base import FrontAppBaseTest
from tap_tester.base_suite_tests.all_fields_test import AllFieldsTest

# Fields that exist in the schema but may not be returned by the FrontApp API
# in all test environments. Populate after a first real test run if needed.
KNOWN_MISSING_FIELDS = {
# "<stream_name>": {"<field_name>"},
}


class FrontAppAllFields(AllFieldsTest, FrontAppBaseTest):
"""Ensure running the tap with all streams and fields selected results in
the replication of all fields."""

MISSING_FIELDS = KNOWN_MISSING_FIELDS

@staticmethod
def name():
return "tap_tester_frontapp_all_fields_test"

def streams_to_test(self):
# Exclude streams with no test data or no API access in the test environment
streams_to_exclude = set()
return self.expected_stream_names().difference(streams_to_exclude)
17 changes: 17 additions & 0 deletions tests/test_automatic_fields.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
"""Test that with no fields selected, automatic fields are still replicated."""
from base import FrontAppBaseTest
from tap_tester.base_suite_tests.automatic_fields_test import MinimumSelectionTest


class FrontAppAutomaticFields(MinimumSelectionTest, FrontAppBaseTest):
"""Test that with no fields selected for a stream, automatic (primary key and
replication key) fields are still replicated."""

@staticmethod
def name():
return "tap_tester_frontapp_automatic_fields_test"

def streams_to_test(self):
# Exclude streams with known missing test data
streams_to_exclude = set()
return self.expected_stream_names().difference(streams_to_exclude)
28 changes: 28 additions & 0 deletions tests/test_bookmark.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
"""Test tap sets a bookmark and respects it in subsequent runs."""
from base import FrontAppBaseTest, FULL_TABLE_STREAMS
from tap_tester.base_suite_tests.bookmark_test import BookmarkTest


class FrontAppBookMarkTest(BookmarkTest, FrontAppBaseTest):
Copy link

Copilot AI Mar 26, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Class name FrontAppBookMarkTest has inconsistent capitalization ("BookMark") compared to the other test classes in this suite (e.g., FrontAppStartDateTest, FrontAppPaginationTest). Consider renaming to FrontAppBookmarkTest for consistency and easier discovery/searching.

Suggested change
class FrontAppBookMarkTest(BookmarkTest, FrontAppBaseTest):
class FrontAppBookmarkTest(BookmarkTest, FrontAppBaseTest):

Copilot uses AI. Check for mistakes.
"""Test tap sets a bookmark and respects it for the next sync of a stream."""

bookmark_format = "%Y-%m-%dT%H:%M:%SZ"
initial_bookmarks = {
"bookmarks": {
"accounts_table": {"date_to_resume": "2020-01-01 00:00:00"},
"channels_table": {"date_to_resume": "2020-01-01 00:00:00"},
"inboxes_table": {"date_to_resume": "2020-01-01 00:00:00"},
"tags_table": {"date_to_resume": "2020-01-01 00:00:00"},
"teammates_table": {"date_to_resume": "2020-01-01 00:00:00"},
"teams_table": {"date_to_resume": "2020-01-01 00:00:00"},
}
Comment on lines +9 to +18
Copy link

Copilot AI Mar 26, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

bookmark_format is set to an ISO-8601 "...T...Z" pattern, but initial_bookmarks uses values like "2020-01-01 00:00:00" (space-separated). The tap currently writes bookmarks via to_datetime_string() ("YYYY-MM-DD HH:MM:SS"), so this mismatch is likely to break the bookmark integration test. Align bookmark_format and/or initial_bookmarks with the actual state value format the tap produces.

Copilot uses AI. Check for mistakes.
}

@staticmethod
def name():
return "tap_tester_frontapp_bookmark_test"

def streams_to_test(self):
# Exclude any FULL_TABLE streams (none currently)
streams_to_exclude = FULL_TABLE_STREAMS
return self.expected_stream_names().difference(streams_to_exclude)
14 changes: 14 additions & 0 deletions tests/test_discovery.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
"""Test tap discovery mode and metadata."""
from base import FrontAppBaseTest
from tap_tester.base_suite_tests.discovery_test import DiscoveryTest


class FrontAppDiscoveryTest(DiscoveryTest, FrontAppBaseTest):
"""Test tap discovery mode and metadata conforms to standards."""

@staticmethod
def name():
return "tap_tester_frontapp_discovery_test"

def streams_to_test(self):
return self.expected_stream_names()
16 changes: 16 additions & 0 deletions tests/test_interrupted_sync.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
"""Test that interrupted sync resumes correctly from the saved bookmark."""
from base import FrontAppBaseTest, FULL_TABLE_STREAMS
from tap_tester.base_suite_tests.interrupted_sync_tests import InterruptedSyncTest


class FrontAppInterruptedSyncTest(InterruptedSyncTest, FrontAppBaseTest):
"""Test that if a sync is interrupted, the next sync resumes from the correct bookmark."""

@staticmethod
def name():
return "tap_tester_frontapp_interrupted_sync_test"

def streams_to_test(self):
# Only test INCREMENTAL streams (FULL_TABLE re-syncs fully anyway)
streams_to_exclude = FULL_TABLE_STREAMS
return self.expected_stream_names().difference(streams_to_exclude)
25 changes: 25 additions & 0 deletions tests/test_pagination.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
"""Test that the tap can replicate multiple pages of data."""
from base import FrontAppBaseTest
from tap_tester.base_suite_tests.pagination_test import PaginationTest


class FrontAppPaginationTest(PaginationTest, FrontAppBaseTest):
"""Ensure tap can replicate multiple pages of data for streams that use pagination."""

@staticmethod
def name():
return "tap_tester_frontapp_pagination_test"

def streams_to_test(self):
# FrontApp analytics streams return one daily report per entity.
# Most test environments do not have enough entities to exceed one page.
# Exclude all streams until a test environment with sufficient data is available.
streams_to_exclude = {
"accounts_table", # sandbox typically has < API_LIMIT entities
"channels_table",
"inboxes_table",
"tags_table",
"teammates_table",
"teams_table",
}
return self.expected_stream_names().difference(streams_to_exclude)
24 changes: 24 additions & 0 deletions tests/test_start_date.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""Test that data is replicated from the configured start_date."""
from base import FrontAppBaseTest, FULL_TABLE_STREAMS
from tap_tester.base_suite_tests.start_date_test import StartDateTest


class FrontAppStartDateTest(StartDateTest, FrontAppBaseTest):
"""Instantiate start date according to the desired data set and run the test."""

@staticmethod
def name():
return "tap_tester_frontapp_start_date_test"

def streams_to_test(self):
# Exclude FULL_TABLE streams (none currently) and streams with insufficient test data
streams_to_exclude = FULL_TABLE_STREAMS
return self.expected_stream_names().difference(streams_to_exclude)

@property
def start_date_1(self):
return "2019-01-01T00:00:00Z"

@property
def start_date_2(self):
return "2020-01-01T00:00:00Z"
Empty file added tests/unittests/__init__.py
Empty file.
Loading
Loading