Skip to content
This repository has been archived by the owner on Sep 23, 2024. It is now read-only.

Commit

Permalink
[AP-XXXX] Make AWS key optional and obtain it secondarily from env va…
Browse files Browse the repository at this point in the history
…rs (#57)
  • Loading branch information
ms32035 authored and koszti committed Jan 27, 2020
1 parent e6e4ef6 commit 9e1c55d
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 11 deletions.
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ encryption=(MASTER_KEY='{client_side_encryption_master_key}');

The `encryption` option is optional and used for client side encryption. If you want client side encryption enabled you'll need
to define the same master key in the target `config.json`. Furhter details below in the Configuration settings section.
Instead of `credentials` you can also use `storage_integration`.

2. A named file format. This will be used by the MERGE/COPY commands to parse the CSV files correctly from S3:

Expand All @@ -83,8 +84,6 @@ Running the the target connector requires a `config.json` file. Example with the
"user": "my_user",
"password": "password",
"warehouse": "my_virtual_warehouse",
"aws_access_key_id": "secret",
"aws_secret_access_key": "secret",
"s3_bucket": "bucket_name",
"stage": "snowflake_external_stage_object_name",
"file_format": "snowflake_file_format_object_name",
Expand All @@ -101,8 +100,9 @@ Full list of options in `config.json`:
| user | String | Yes | Snowflake User |
| password | String | Yes | Snowflake Password |
| warehouse | String | Yes | Snowflake virtual warehouse name |
| aws_access_key_id | String | Yes | S3 Access Key Id |
| aws_secret_access_key | String | Yes | S3 Secret Access Key |
| aws_access_key_id | String | No | S3 Access Key Id. If not provided, AWS_ACCESS_KEY_ID environment variable or IAM role will be used |
| aws_secret_access_key | String | No | S3 Secret Access Key. If not provided, AWS_SECRET_ACCESS_KEY environment variable or IAM role will be used |
| aws_session_token | String | No | AWS Session token. If not provided, AWS_SESSION_TOKEN environment variable will be used |
| s3_bucket | String | Yes | S3 Bucket name |
| s3_key_prefix | String | | (Default: None) A static prefix before the generated S3 key names. Using prefixes you can upload files into specific directories in the S3 bucket. |
| stage | String | Yes | Named external stage name created at pre-requirements section. Has to be a fully qualified name including the schema name |
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
long_description = f.read()

setup(name="pipelinewise-target-snowflake",
version="1.3.0",
version="1.4.0",
description="Singer.io target for loading data to Snowflake - PipelineWise compatible",
long_description=long_description,
long_description_content_type='text/markdown',
Expand Down
8 changes: 3 additions & 5 deletions target_snowflake/db_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@ def validate_config(config):
'user',
'password',
'warehouse',
'aws_access_key_id',
'aws_secret_access_key',
's3_bucket',
'stage',
'file_format'
Expand Down Expand Up @@ -289,11 +287,11 @@ def __init__(self, connection_config, stream_schema_message=None, information_sc

self.s3 = boto3.client(
's3',
aws_access_key_id=self.connection_config['aws_access_key_id'],
aws_secret_access_key=self.connection_config['aws_secret_access_key']
aws_access_key_id=self.connection_config.get('aws_access_key_id'),
aws_secret_access_key=self.connection_config.get('aws_secret_access_key'),
aws_session_token=self.connection_config.get('aws_session_token')
)


def open_connection(self):
return snowflake.connector.connect(
user=self.connection_config['user'],
Expand Down
21 changes: 20 additions & 1 deletion tests/integration/test_target_snowflake.py
Original file line number Diff line number Diff line change
Expand Up @@ -895,4 +895,23 @@ def test_loading_tables_with_custom_temp_dir(self):
self.config['temp_dir'] = ('~/.pipelinewise/tmp')
self.persist_lines_with_cache(tap_lines)

self.assert_three_streams_are_into_snowflake()
self.assert_three_streams_are_into_snowflake()

def test_using_aws_environment_variables(self):
"""Test loading data with aws in the environment rather than explicitly provided access keys"""
tap_lines = test_utils.get_test_tap_lines("messages-with-three-streams.json")

try:
os.environ["AWS_ACCESS_KEY_ID"] = os.environ.get(
"TARGET_SNOWFLAKE_AWS_ACCESS_KEY"
)
os.environ["AWS_SECRET_ACCESS_KEY"] = os.environ.get(
"TARGET_SNOWFLAKE_AWS_SECRET_ACCESS_KEY"
)
self.config["aws_access_key_id"] = None
self.config["aws_secret_access_key"] = None

target_snowflake.persist_lines(self.config, tap_lines)
finally:
del os.environ["AWS_ACCESS_KEY_ID"]
del os.environ["AWS_SECRET_ACCESS_KEY"]

0 comments on commit 9e1c55d

Please sign in to comment.