Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Miro code #136

Closed
wants to merge 27 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
994fcfd
TDL-14058: Update SDK and api version (#105)
savan-chovatiya Feb 10, 2022
f649825
Merge branch 'master' into crest-master
prijendev Feb 10, 2022
c92fbb9
Added tax_type and metadata in subcriptions_items
prijendev Feb 10, 2022
8811e85
Merged master into crest-master
namrata270998 Feb 18, 2022
f199cb7
Initial commit for rule map
prijendev Feb 24, 2022
309402f
Added standard_stream name in discover mode
prijendev Feb 25, 2022
e493966
Added detail comments
prijendev Feb 25, 2022
f144545
Resolved pylint error
prijendev Feb 25, 2022
ada7fa5
Added rulemap for event based records
prijendev Feb 28, 2022
ebf2f94
Resolved pylint errors
prijendev Feb 28, 2022
95fe8c0
Resolved pylint error
prijendev Feb 28, 2022
60af174
Resolved rule map error for array type
prijendev Mar 1, 2022
1be415f
Resolved list field missing issue
prijendev Mar 1, 2022
f061617
added the proration_details due to cci failure
namrata270998 Mar 2, 2022
9e407c1
Resolved ref issue.
prijendev Mar 7, 2022
5e9dc87
Added detail comment.
prijendev Mar 8, 2022
38a1aa6
Resolved conflict issue
prijendev Mar 8, 2022
a87d236
TDL-17880 Add missing test cases (#131)
namrata270998 Mar 11, 2022
922f568
Tdl 13149 sync payment intent (#127)
prijendev Mar 11, 2022
59a81a2
Tdl 13711 upgrade json schema (#124)
namrata270998 Mar 11, 2022
6f6f06e
TDL 15120 add event type field in schemas (#123)
prijendev Mar 11, 2022
0e19967
updated charge.json
namrata270998 Mar 11, 2022
15a2955
Resolved pylint error
prijendev Mar 16, 2022
d33160a
Merge remote-tracking branch 'origin/crest-master' into TDL-17934-poc…
prijendev Mar 16, 2022
c5e5868
Skipped card.mandate of charge stream in all_fields test case
prijendev Mar 16, 2022
2435471
Updated rulemap implementation with handling of None value
prijendev Mar 16, 2022
77b3025
Resolved all_fields test case error for payment_intents stream
prijendev Mar 16, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,8 @@ jobs:
name: 'Unit Tests'
command: |
source /usr/local/share/virtualenvs/tap-stripe/bin/activate
nosetests tests/unittests
nosetests --with-coverage --cover-erase --cover-package=tap_stripe --cover-html-dir=htmlcov tests/unittests
coverage html
run_integration_test:
parameters:
file:
Expand All @@ -86,12 +87,12 @@ jobs:
# easier, emit an xUnit report and let Circle tell you what
# failed.
name: 'Integration Testing'
no_output_timeout: 30m
no_output_timeout: 45m
command: |
source /usr/local/share/virtualenvs/tap-stripe/bin/activate
source /usr/local/share/virtualenvs/tap-tester/bin/activate
source /usr/local/share/virtualenvs/dev_env.sh
pip install 'stripe==2.42.0'
pip install 'stripe==2.64.0'
run-test --tap=${CIRCLE_PROJECT_REPONAME} tests/test_<< parameters.file >>.py
- slack/notify-on-failure:
only_for_branches: master
Expand Down Expand Up @@ -214,7 +215,7 @@ workflows:
- tier-1-tap-user
requires:
- 'Testing bookmarks'
build_daily:
build_daily:
<<: *commit_jobs
triggers:
- schedule:
Expand Down
5 changes: 3 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,13 @@
py_modules=["tap_stripe"],
install_requires=[
"singer-python==5.5.1",
"stripe==2.10.1",
"stripe==2.64.0",
],
extras_require={
'test': [
'pylint==2.7.2',
'nose==1.3.7'
'nose==1.3.7',
'coverage'
],
'dev': [
'ipdb',
Expand Down
257 changes: 214 additions & 43 deletions tap_stripe/__init__.py

Large diffs are not rendered by default.

229 changes: 229 additions & 0 deletions tap_stripe/rule_map.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,229 @@
import re
import singer

# These are standard keys defined in the JSON Schema spec.
# We will not apply rules on these STANDARD_KEYS.
STANDARD_KEYS = [
'selected',
'inclusion',
'description',
'minimum',
'maximum',
'exclusiveMinimum',
'exclusiveMaximum',
'multipleOf',
'maxLength',
'minLength',
'format',
'type',
'additionalProperties',
'anyOf',
'patternProperties',
]


LOGGER = singer.get_logger()

class RuleMap:
GetStdFieldsFromApiFields = {}


def fill_rule_map_object_by_catalog(self, stream_name, stream_metadata):
"""
Read original-name of fields available in metadata of catalog and add
it in `GetStdFieldsFromApiFields` dict object.
param1: stream_name: users
param2: stream_metadata
{
"breadcrumb": [
"properties",
"user_name"
],
"metadata": {
"original-name": "UserName"
}
}

After iterating all metadata,
GetStdFieldsFromApiFields['users'] = {('properties', 'UserName'): 'user_name'}
"""
self.GetStdFieldsFromApiFields[stream_name] = {}

for key, value in stream_metadata.items():
api_name = value.get('original-name')
if api_name and key:
self.GetStdFieldsFromApiFields[stream_name][key[:-1] + (api_name,)] = key[-1:][0]

def apply_ruleset_on_schema(self, schema, schema_copy, stream_name, parent = ()):
"""
Apply defined rule set on schema and return it.
"""
temp_dict = {}
roll_back_dict = {}

if schema and isinstance(schema, dict) and schema.get('properties'):
# Iterate through each item of schema.
for key in schema['properties'].keys():
breadcrumb = parent + ('properties', key)

self.apply_ruleset_on_schema(schema.get('properties', {}).get(key, {}), schema_copy.get('properties', {}).get(key, {}), stream_name, breadcrumb)

# Skip keys available in STANDARD_KEYS
if key not in STANDARD_KEYS:

# apply rules on field
standard_key = self.apply_rules_to_original_field(key)

# Check if Field name is changed after applying rules
# Check if same standard name of field is already available or
# not at same level
if key != standard_key and standard_key not in schema['properties'].keys():
if standard_key not in temp_dict:

# Add standard name of field in GetStdFieldsFromApiFields with key as tuple of breadcrumb keys
# Example:
# GetStdFieldsFromApiFields['users'][('properties', 'user_name')] = 'UserName'
self.GetStdFieldsFromApiFields[stream_name][parent +
('properties', standard_key)] = key

# Add key in temp_dict with value as standard_key to update in schema after iterating whole schema
# Because we can not update schema while iterating it.
temp_dict[key] = standard_key
else:
# Print Warning message for field name conflict found same level and add it's standard name to
# roll_back_dict because we need to roll back standard field name to original field name.
LOGGER.warning('Conflict found for field : %s', breadcrumb)
roll_back_dict[standard_key] = True

elif schema.get('anyOf'):
# Iterate through each possible datatype of field
# Example:
# 'sources': {
# 'anyOf': [
# {
# 'type': ['null', 'array'],
# 'items': {
# 'type': ['null', 'object'],
# 'properties': {}
# }
# },
# {
# 'type': ['null', 'object'],
# 'properties': {}
# }
# ]
#
# }
for index, schema_field in enumerate(schema.get('anyOf')):
self.apply_ruleset_on_schema(schema_field, schema_copy.get('anyOf')[index], stream_name, parent)
elif schema and isinstance(schema, dict) and schema.get('items'):
breadcrumb = parent + ('items',)
self.apply_ruleset_on_schema(schema['items'], schema_copy['items'], stream_name, breadcrumb)

for key, new_key in temp_dict.items():
if roll_back_dict.get(new_key):
breadcrumb = parent + ('properties', new_key)
# Remove key with standard name from GetStdFieldsFromApiFields for which conflict
# was found.
del self.GetStdFieldsFromApiFields[stream_name][breadcrumb]
LOGGER.warning('Conflict found for field : %s', parent + ("properties", key))
else:
# Replace original name of field with standard name in schema
try:
schema_copy['properties'][new_key] = schema_copy['properties'].pop(key)
except KeyError:
pass

return schema_copy

def apply_rule_set_on_stream_name(self, stream_name):
"""
Apply defined rule set on stream name and return it.
"""
standard_stream_name = self.apply_rules_to_original_field(stream_name)

if stream_name != standard_stream_name:
self.GetStdFieldsFromApiFields[stream_name]['stream_name'] = stream_name
return standard_stream_name

# return original name of stream if it is not changed.
return stream_name

@classmethod
def apply_rules_to_original_field(cls, key):
"""
Apply defined rules on field.
- Divide alphanumeric strings containing capital letters followed by small letters into
multiple words and join with underscores.
- However, two or more adjacent capital letters are considered a part of one word.
- Example:
anotherName -> another_name
ANOTHERName -> anothername
- Divide alphanumeric strings containing letters, number and special character into multiple words
and join with underscores.
- Example:
MyName123 -> my_name_123
- Convert any character that is not a letter, digit, or underscore to underscore.
A space is considered a character.
- Example:
A0a_*A -> a_0_a_a
- Convert multiple underscores to a single underscore
- Example:
add____*LPlO -> add_lpl_o
- Convert all upper-case letters to lower-case.
"""

# Divide alphanumeric strings containing capital letters followed by small letters into
# multiple words and joined with underscores. This include empty string at last
standard_key = re.findall('[A-Z]*[^A-Z]*', key)
standard_key = '_'.join(standard_key)

# Remove empty string from last position
standard_key = standard_key[:-1]

# Divide alphanumeric strings containing letters, number and special character into multiple words
# and join with underscores.
standard_key = re.findall(r'[A-Za-z_]+|\d+|\W+', standard_key)
standard_key = '_'.join(standard_key)

# Replace all special character with underscore
standard_key = re.sub(r'[\W]', '_', standard_key)

# Prepend underscore if 1st character is digit
if standard_key[0].isdigit():
standard_key = f'_{standard_key}'

# Convert repetitive multiple underscores to a single underscore
standard_key = re.sub(r'[_]+', '_', standard_key)

# Convert all upper-case letters to lower-case.
return standard_key.lower()

def apply_ruleset_on_api_response(self, response, stream_name, parent = ()):
"""
Apply defined rule set on api response and return it.
"""
temp_dict = {}
if isinstance(response, dict):
for key, value in response.items():
if isinstance(value, list) and value:
breadcrumb = parent + ('properties', key, 'items')
# Iterate through each item of list
for val in value:
self.apply_ruleset_on_api_response(val, stream_name, breadcrumb)
elif isinstance(value, dict):
breadcrumb = parent + ('properties', key)
self.apply_ruleset_on_api_response(value, stream_name, breadcrumb)
else:
breadcrumb = parent + ('properties', key)

if breadcrumb in self.GetStdFieldsFromApiFields[stream_name]:
# Field found in the rule_map that need to be changed to standard name
temp_dict[key] = self.GetStdFieldsFromApiFields[stream_name][breadcrumb]

# Updated key in record
for key, new_key in temp_dict.items():
# Replace original name of field with standard name in response
response[new_key] = response.pop(key)

return response
Loading