Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Tdl 20357 add missing tap tester tests #53

Open
wants to merge 25 commits into
base: crest-master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 24 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
25aabda
initial commit
NevilParikh14 Aug 24, 2022
3beaf78
Updated test cases
NevilParikh14 Aug 25, 2022
4d98589
added contacts stream in base.py
NevilParikh14 Aug 25, 2022
55e9e53
updated bookmark test cases
NevilParikh14 Aug 25, 2022
378ad79
Removed unused line
NevilParikh14 Aug 26, 2022
2286d1f
updated bookmark with filter param test case
NevilParikh14 Aug 26, 2022
e1aafd9
updated all fileds test case
NevilParikh14 Aug 26, 2022
ba6395b
Updated to LOGGER from print
NevilParikh14 Aug 29, 2022
010217d
Updated assertion message
NevilParikh14 Aug 29, 2022
f2313b8
added back commented print statement
NevilParikh14 Aug 29, 2022
5fc93e0
resolved review comments
NevilParikh14 Aug 29, 2022
3ecf64b
Updated pagination test case
NevilParikh14 Aug 29, 2022
5c3ccd5
uncommented LOGGERS
NevilParikh14 Aug 29, 2022
0c1ba1f
resolved review comments
NevilParikh14 Aug 31, 2022
f8202ef
removed unused imports
NevilParikh14 Sep 2, 2022
76be7cc
resolved review comments
NevilParikh14 Sep 5, 2022
6dc4ee8
resolved review comments
NevilParikh14 Sep 8, 2022
73afe0c
updated param name of expected_streams
NevilParikh14 Sep 12, 2022
35e3d16
removed unused import
NevilParikh14 Sep 15, 2022
422ec12
added tap-tester test for parent-child syncing
NevilParikh14 Sep 16, 2022
957e4b9
updated parent-child sync file
NevilParikh14 Sep 19, 2022
33180fb
updated pagination test case
NevilParikh14 Sep 19, 2022
2d777e5
updated bookmark with filter param test
NevilParikh14 Sep 20, 2022
013772e
resolved review comments
NevilParikh14 Sep 23, 2022
a215fb0
resolved review comments
NevilParikh14 Sep 27, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
245 changes: 165 additions & 80 deletions tests/base.py

Large diffs are not rendered by default.

103 changes: 103 additions & 0 deletions tests/test_freshdesk_all_fields.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
from tap_tester import runner, connections, menagerie

from base import FreshdeskBaseTest

# As we are not able to generate following fields by Freshdesk UI, so removed it from expectation list.
KNOWN_MISSING_FIELDS = {
'tickets': {
'facebook_id',
'description',
'description_text',
'twitter_id',
'name',
'phone',
'email'
},
'groups': {
'auto_ticket_assign',
'agent_ids'
},
'agents': {
'group_ids',
'role_ids'
},
'contacts': {
'view_all_tickets',
'other_companies',
'other_emails',
'tags',
'avatar'
}
}


class TestFreshdeskAllFields(FreshdeskBaseTest):
"""Test that with all fields selected for a stream automatic and available fields are replicated"""

@staticmethod
def name():
return "tap_tester_freshdesk_all_fields"

def test_run(self):
"""
• Verify no unexpected streams were replicated
• Verify that more than just the automatic fields are replicated for each stream.
• Verify all fields for each stream are replicated
"""

expected_streams = self.expected_streams(only_trial_account_streams=True)

# Instantiate connection
conn_id = connections.ensure_connection(self)

# Run check mode
found_catalogs = self.run_and_verify_check_mode(conn_id)

# Table and field selection
test_catalogs_all_fields = [catalog for catalog in found_catalogs
if catalog.get('stream_name') in expected_streams]
self.perform_and_verify_table_and_field_selection(
conn_id, test_catalogs_all_fields, select_all_fields=True,
)

# Grab metadata after performing table-and-field selection to set expectations
# used for asserting all fields are replicated
stream_to_all_catalog_fields = dict()
for catalog in test_catalogs_all_fields:
stream_id, stream_name = catalog['stream_id'], catalog['stream_name']
catalog_entry = menagerie.get_annotated_schema(conn_id, stream_id)
fields_from_field_level_md = [md_entry['breadcrumb'][1]
for md_entry in catalog_entry['metadata']
if md_entry['breadcrumb'] != []]
stream_to_all_catalog_fields[stream_name] = set(fields_from_field_level_md)

# Run initial sync
record_count_by_stream = self.run_and_verify_sync(conn_id)
synced_records = runner.get_records_from_target_output()

# Verify no unexpected streams were replicated
synced_stream_names = set(synced_records.keys())
self.assertSetEqual(expected_streams, synced_stream_names)

for stream in expected_streams:
with self.subTest(stream=stream):
# Expected values
expected_automatic_fields = self.expected_automatic_fields().get(stream)

# Get all expected keys
expected_all_keys = stream_to_all_catalog_fields[stream]

messages = synced_records.get(stream)
# Collect actual values
actual_all_keys = set()
for message in messages['messages']:
if message['action'] == 'upsert':
actual_all_keys.update(message['data'].keys())

expected_all_keys = expected_all_keys - KNOWN_MISSING_FIELDS.get(stream, set())

# Verify all fields for a stream were replicated
self.assertGreater(len(expected_all_keys),len(expected_automatic_fields))
self.assertTrue(expected_automatic_fields.issubset(expected_all_keys),
msg=f'{expected_automatic_fields - expected_all_keys} is not in "expected_all_keys"')
self.assertSetEqual(expected_all_keys, actual_all_keys)
71 changes: 71 additions & 0 deletions tests/test_freshdesk_automatic_fields.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
from tap_tester import runner, connections

from base import FreshdeskBaseTest


class TestFreshdeskAutomaticFields(FreshdeskBaseTest):
"""Test that with no fields selected for a stream automatic fields are still replicated"""

@staticmethod
def name():
return "tap_tester_freshdesk_automatic_fields"

def test_run(self):
"""
• Verify we can deselect all fields except when inclusion=automatic, which is handled by base.py methods
• Verify that only the automatic fields are sent to the target.
• Verify that all replicated records have unique primary key values.
"""

expected_streams = self.expected_streams(only_trial_account_streams=True)

# Instantiate connection
conn_id = connections.ensure_connection(self)

# Run check mode
found_catalogs = self.run_and_verify_check_mode(conn_id)

# Table and field selection
test_catalogs_automatic_fields = [catalog for catalog in found_catalogs
if catalog.get('stream_name') in expected_streams]

self.perform_and_verify_table_and_field_selection(
conn_id, test_catalogs_automatic_fields, select_all_fields=False,
)

# Run initial sync
record_count_by_stream = self.run_and_verify_sync(conn_id)
synced_records = runner.get_records_from_target_output()

for stream in expected_streams:
with self.subTest(stream=stream):

# Expected values
expected_primary_keys = self.expected_primary_keys()[stream]
expected_keys = self.expected_automatic_fields().get(stream)

# Collect actual values
data = synced_records.get(stream, {})
record_messages_keys = [set(row.get('data').keys())
for row in data.get('messages', {})]
primary_keys_list = [
tuple(message.get('data').get(expected_pk)
for expected_pk in expected_primary_keys)
for message in data.get('messages')
if message.get('action') == 'upsert']
unique_primary_keys_list = set(primary_keys_list)

# Verify that you get some records for each stream
self.assertGreater(
record_count_by_stream.get(stream, -1), 0,
msg="The number of records is not over the stream max limit for the {} stream".format(stream))

# Verify that only the automatic fields are sent to the target
for actual_keys in record_messages_keys:
self.assertSetEqual(expected_keys, actual_keys)

# Verify that all replicated records have unique primary key values.
self.assertEqual(
len(primary_keys_list),
len(unique_primary_keys_list),
msg="Replicated record does not have unique primary key values.")
Loading