Skip to content

Commit

Permalink
Qa/test setup (#44)
Browse files Browse the repository at this point in the history
* test setup for base suite

* Freshdesk check job test creation

* Created config.yml file

* Added add_ssh_keys entry to config.yml

* run tests again

Co-authored-by: kspeer <[email protected]>
Co-authored-by: btowles <[email protected]>
  • Loading branch information
3 people authored Jan 31, 2022
1 parent bb87d40 commit 3bb1a92
Show file tree
Hide file tree
Showing 3 changed files with 318 additions and 0 deletions.
54 changes: 54 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
version: 2.1
orbs:
slack: circleci/[email protected]

jobs:
build:
docker:
- image: 218546966473.dkr.ecr.us-east-1.amazonaws.com/circle-ci:stitch-tap-tester
steps:
- checkout
- run:
name: 'Setup Virtual Env'
command: |
python3 -m venv /usr/local/share/virtualenvs/tap-freshdesk
source /usr/local/share/virtualenvs/tap-freshdesk/bin/activate
pip install -U pip setuptools
pip install .[dev]
- add_ssh_keys
- run:
name: 'JSON Validator'
command: |
source /usr/local/share/virtualenvs/tap-tester/bin/activate
stitch-validate-json tap_freshdesk/schemas/*.json
- run:
name: 'Integration Tests'
command: |
aws s3 cp s3://com-stitchdata-dev-deployment-assets/environments/tap-tester/tap_tester_sandbox dev_env.sh
source dev_env.sh
source /usr/local/share/virtualenvs/tap-tester/bin/activate
run-test --tap=tap-freshdesk tests
- slack/notify-on-failure:
only_for_branches: master
workflows:
version: 2
commit:
jobs:
- build:
context:
- circleci-user
- tap-tester-user
build_daily:
triggers:
- schedule:
cron: "0 0 * * *"
filters:
branches:
only:
- master
jobs:
- build:
context:
- circleci-user
- tap-tester-user

235 changes: 235 additions & 0 deletions tests/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,235 @@
import os
import unittest
from datetime import datetime as dt
from datetime import timedelta

import tap_tester.menagerie as menagerie
import tap_tester.connections as connections
import tap_tester.runner as runner


class FreshdeskBaseTest(unittest.TestCase):

REPLICATION_KEYS = "valid-replication-keys"
PRIMARY_KEYS = "table-key-properties"
FOREIGN_KEYS = "table-foreign-key-properties"
REPLICATION_METHOD = "forced-replication-method"
INCREMENTAL = "INCREMENTAL"
FULL = "FULL_TABLE"

START_DATE_FORMAT = "%Y-%m-%dT00:00:00Z" # %H:%M:%SZ

# EXPECTED_PAGE_SIZE = "expected-page-size" # TODO applies?
OBEYS_START_DATE = "obey-start-date"
# PARENT_STREAM = "parent-stream" # TODO applies?

#######################################
# Tap Configurable Metadata Methods #
#######################################

def setUp(self):
missing_envs = [x for x in [
'TAP_FRESHDESK_API_KEY',
'TAP_FRESHDESK_SUBDOMAIN',
] if os.getenv(x) is None]
if missing_envs:
raise Exception("Missing environment variables: {}".format(missing_envs))

@staticmethod
def get_type():
return "platform.freshdesk"

@staticmethod
def tap_name():
return "tap-freshdesk"

def get_properties(self):
start_date = dt.today() - timedelta(days=5*365)
start_date_with_fmt = dt.strftime(start_date, self.START_DATE_FORMAT)

return {'start_date' : start_date_with_fmt}

def get_credentials(self):
return {
'api_key': os.getenv('TAP_FRESHDESK_API_KEY'),
'domain': os.getenv('TAP_FRESHDESK_SUBDOMAIN'),
}

def required_environment_variables(self):
return set(['TAP_FRESHDESK_API_KEY',
'TAP_FRESHDESK_SUBDOMAIN'])

def expected_metadata(self): # TODO LEFT OFF HERE, also need env vars
"""The expected streams and metadata about the streams"""
return {
"agents": {
self.PRIMARY_KEYS: {"id"},
self.REPLICATION_METHOD: self.INCREMENTAL,
self.REPLICATION_KEYS: {"updated_at"},
#self.EXPECTED_PAGE_SIZE: 25 # TODO check values
},
"companies": {
self.PRIMARY_KEYS: {"id"},
self.REPLICATION_METHOD: self.INCREMENTAL,
self.REPLICATION_KEYS: {"updated_at"},
#self.EXPECTED_PAGE_SIZE: 25 # TODO check values
},
"conversations": {
self.PRIMARY_KEYS: {"id"},
self.REPLICATION_METHOD: self.INCREMENTAL,
self.REPLICATION_KEYS: {"updated_at"},
#self.EXPECTED_PAGE_SIZE: 25 # TODO check values
},
"groups": {
self.PRIMARY_KEYS: {"id"},
self.REPLICATION_METHOD: self.INCREMENTAL,
self.REPLICATION_KEYS: {"updated_at"},
#self.EXPECTED_PAGE_SIZE: 25 # TODO check values
},
"roles": {
self.PRIMARY_KEYS: {"id"},
self.REPLICATION_METHOD: self.INCREMENTAL,
self.REPLICATION_KEYS: {"updated_at"},
#self.EXPECTED_PAGE_SIZE: 25 # TODO check values
},
"satisfaction_ratings": {
self.PRIMARY_KEYS: {"id"},
self.REPLICATION_METHOD: self.INCREMENTAL,
self.REPLICATION_KEYS: {"updated_at"},
#self.EXPECTED_PAGE_SIZE: 25 # TODO check values
},
"tickets": {
self.PRIMARY_KEYS: {"id"},
self.REPLICATION_METHOD: self.INCREMENTAL,
self.REPLICATION_KEYS: {"updated_at"},
#self.EXPECTED_PAGE_SIZE: 25 # TODO check values
},
"time_entries": {
self.PRIMARY_KEYS: {"id"},
self.REPLICATION_METHOD: self.INCREMENTAL,
self.REPLICATION_KEYS: {"updated_at"},
#self.EXPECTED_PAGE_SIZE: 25 # TODO check values
},
}

#############################
# Common Metadata Methods #
#############################

def expected_primary_keys(self):
"""
return a dictionary with key of table name
and value as a set of primary key fields
"""
return {table: properties.get(self.PRIMARY_KEYS, set())
for table, properties
in self.expected_metadata().items()}

def expected_automatic_fields(self):
"""
return a dictionary with key of table name and value as the primary keys and replication keys
"""
pks = self.expected_primary_keys()
rks = self.expected_replication_keys()

return {stream: rks.get(stream, set()) | pks.get(stream, set())
for stream in self.expected_streams()}

def expected_replication_method(self):
"""return a dictionary with key of table name and value of replication method"""
return {table: properties.get(self.REPLICATION_METHOD, None)
for table, properties
in self.expected_metadata().items()}

def expected_streams(self):
"""A set of expected stream names"""
return set(self.expected_metadata().keys())

def expected_replication_keys(self):
"""
return a dictionary with key of table name
and value as a set of replication key fields
"""
return {table: properties.get(self.REPLICATION_KEYS, set())
for table, properties
in self.expected_metadata().items()}

def expected_page_limits(self):
return {table: properties.get(self.EXPECTED_PAGE_SIZE, set())
for table, properties
in self.expected_metadata().items()}


##########################
# Common Test Actions #
##########################

def create_connection_and_run_check(self, original_properties: bool = True):
"""Create a new connection with the test name"""
# Create the connection
conn_id = connections.ensure_connection(self, original_properties)

# Run a check job using orchestrator (discovery)
check_job_name = runner.run_check_mode(self, conn_id)

# Assert that the check job succeeded
exit_status = menagerie.get_exit_status(conn_id, check_job_name)
menagerie.verify_check_exit_status(self, exit_status, check_job_name)
return conn_id

def run_and_verify_check_mode(self, conn_id):
"""
Run the tap in check mode and verify it succeeds.
This should be ran prior to field selection and initial sync.
Return the connection id and found catalogs from menagerie.
"""
# run in check mode
check_job_name = runner.run_check_mode(self, conn_id)

# verify check exit codes
exit_status = menagerie.get_exit_status(conn_id, check_job_name)
menagerie.verify_check_exit_status(self, exit_status, check_job_name)

found_catalogs = menagerie.get_catalogs(conn_id)
self.assertEqual(
len(found_catalogs), 0,
msg="expected 0 length catalog for check job, conn_id: {}".format(conn_id)
)
print("Verified len(found_catalogs) = 0 for job with conn_id: {}".format(conn_id))

def run_and_verify_sync(self, conn_id):
"""
Run a sync job and make sure it exited properly.
Return a dictionary with keys of streams synced
and values of records synced for each stream
"""
# Run a sync job using orchestrator
sync_job_name = runner.run_sync_mode(self, conn_id)

# Verify tap and target exit codes
exit_status = menagerie.get_exit_status(conn_id, sync_job_name)
menagerie.verify_sync_exit_status(self, exit_status, sync_job_name)

# Verify actual rows were synced
sync_record_count = runner.examine_target_output_file(self,
conn_id,
self.expected_streams(),
self.expected_primary_keys())
total_row_count = sum(sync_record_count.values())
self.assertGreater(total_row_count, 0,
msg="failed to replicate any data: {}".format(sync_record_count))
print("total replicated row count: {}".format(total_row_count))

return sync_record_count


def timedelta_formatted(self, dtime, days=0, str_format="%Y-%m-%dT00:00:00Z"):
date_stripped = dt.strptime(dtime, str_format)
return_date = date_stripped + timedelta(days=days)

return dt.strftime(return_date, str_format)

################################
# Tap Specific Test Actions #
################################
29 changes: 29 additions & 0 deletions tests/test_freshdesk_check.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
"""Test tap check mode and metadata/annotated-schema."""
import re

from tap_tester import menagerie, connections, runner

from base import FreshdeskBaseTest


class CheckTest(FreshdeskBaseTest):
"""Test tap check mode and metadata/annotated-schema conforms to standards."""

@staticmethod
def name():
return "tt_freshdesk_check"

def test_run(self):
"""
Freshdesk check test (does not run discovery).
Verify that check does NOT create a discovery catalog, schema, metadata, etc.
• Verify check job does not populate found_catalogs
• Verify no critical errors are thrown for check job
"""
streams_to_test = self.expected_streams()

conn_id = connections.ensure_connection(self)

# Run and verify the check, see base.py for details
self.run_and_verify_check_mode(conn_id)

0 comments on commit 3bb1a92

Please sign in to comment.