Skip to content

Commit

Permalink
feat: add github action for running tests (#15)
Browse files Browse the repository at this point in the history
* feat: add placeholder github action for running tests

* add missing copyright for script

* add helper scripts for testing

* add ability to specify stack names for integ stacks

* fix nag suppressions errors from stack name change

* add support for multiple deployments and update tests

- Add UserPoolName parameter to CognitoAuth construct
- Use stack-specific log group for EventBusWatcher in control plane and
  core app plane
- Improve credentials generation in sbt-aws.sh script
- Update test-sbt.sh to use stack-specific DynamoDB table name
- Remove unnecessary cdk-nag suppressions

* add parameter checking for script

* rename test file and update response check for deleting a non-existent tenant

* update test-sbt-aws to return error code based on tests

* Add check for test result output from Step Functions execution

* fix sfn output parsing and comment out test for deleting non-existent tenant

* update json parse logic and remove commented test. (test will be added later)

* execute run-tests nightly on a schedule
  • Loading branch information
suhussai authored Mar 27, 2024
1 parent a0380ff commit 6e7e5b7
Show file tree
Hide file tree
Showing 15 changed files with 876 additions and 115 deletions.
1 change: 1 addition & 0 deletions .gitattributes

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

25 changes: 25 additions & 0 deletions .github/workflows/run-tests.yml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions .gitignore

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions .projen/files.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions .projenrc.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import { awscdk, javascript } from 'projen';
import { GithubCredentials } from 'projen/lib/github';
import { NpmAccess } from 'projen/lib/javascript';
import { runTestsWorkflow } from './projenrc/run-tests-workflow';

const GITHUB_USER: string = 'awslabs';
const PUBLICATION_NAMESPACE: string = 'cdklabs';
Expand Down Expand Up @@ -142,4 +143,5 @@ project.eslint?.addRules({
],
});

runTestsWorkflow(project);
project.synth();
48 changes: 48 additions & 0 deletions projenrc/run-tests-workflow.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0

import { AwsCdkConstructLibrary } from 'projen/lib/awscdk';
import { JobPermission } from 'projen/lib/github/workflows-model';

export function runTestsWorkflow(project: AwsCdkConstructLibrary) {
const runTests = project.github?.addWorkflow('run-tests');
if (runTests) {
runTests.on({
schedule: [
{ cron: '0 6 * * *' }, // Runs at midnight Mountain Time (UTC-6) every day
],
});

runTests.addJobs({
'run-tests': {
runsOn: ['ubuntu-22.04'],
permissions: {
idToken: JobPermission.WRITE,
contents: JobPermission.READ,
},
steps: [
{
name: 'configure aws credentials',
uses: 'aws-actions/configure-aws-credentials@v4',
with: {
'role-to-assume': '${{ secrets.IAM_ROLE_GITHUB }}',
'aws-region': '${{ secrets.AWS_REGION }}',
},
},
{
name: 'checkout source',
uses: 'actions/checkout@v4',
},
{
name: 'run tests',
run: 'bash -e scripts/github-actions-run-tests-script.sh',
env: {
STEP_FUNCTION_ARN: '${{ secrets.STEP_FUNCTION_ARN }}',
LOG_GROUP_NAME: '${{ secrets.LOG_GROUP_NAME }}',
},
},
],
},
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,14 @@
cognito = boto3.client('cognito-idp')
region = os.environ['AWS_REGION']


class CognitoIdentityProviderManagement():
def delete_control_plane_idp(self, userPoolId):
def delete_control_plane_idp(self, userPoolId):
response = cognito.describe_user_pool(
UserPoolId=userPoolId
)
domain = response['UserPool']['Domain']

cognito.delete_user_pool_domain(
UserPoolId=userPoolId,
Domain=domain
Expand All @@ -29,12 +30,13 @@ def create_control_plane_idp(self, event):
idp_response['idp'] = {}
user_details = {}
control_plane_callback_url = event['ControlPlaneCallbackURL']
user_pool_name = event['UserPoolName']
user_details['email'] = event['SystemAdminEmail']
user_details['userRole'] = event['SystemAdminRoleName']
user_details['userName'] = 'admin'

user_pool_response = self.__create_user_pool(
'SaaSControlPlaneUserPool', control_plane_callback_url)
user_pool_name, control_plane_callback_url)
logger.info(user_pool_response)
user_pool_id = user_pool_response['UserPool']['Id']

Expand All @@ -45,7 +47,8 @@ def create_control_plane_idp(self, event):
self.__create_user_pool_domain(user_pool_id, user_pool_domain)
tenant_user_group_response = user_management_util.create_user_group(user_pool_id, user_details['userRole'])
user_management_util.create_user(user_pool_id, user_details)
user_management_util.add_user_to_group(user_pool_id, user_details['userName'], tenant_user_group_response['Group']['GroupName'])
user_management_util.add_user_to_group(
user_pool_id, user_details['userName'], tenant_user_group_response['Group']['GroupName'])

idp_response['idp']['name'] = 'Cognito'
idp_response['idp']['userPoolId'] = user_pool_id
Expand Down
5 changes: 4 additions & 1 deletion resources/functions/auth-custom-resource/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def do_action(event, _):
idp_input['ControlPlaneCallbackURL'] = event['ResourceProperties']['ControlPlaneCallbackURL']
idp_input['SystemAdminRoleName'] = event['ResourceProperties']['SystemAdminRoleName']
idp_input['SystemAdminEmail'] = event['ResourceProperties']['SystemAdminEmail']
idp_input['UserPoolName'] = event['ResourceProperties']['UserPoolName']

idpDetails = idp_mgmt_service.create_control_plane_idp(idp_input)
response = json.dumps(idpDetails)
Expand All @@ -40,13 +41,15 @@ def do_action(event, _):
except Exception as e:
raise e


@helper.delete
def do_delete(event, _):
try:
try:
userPoolId = event['PhysicalResourceId']
idp_mgmt_service.delete_control_plane_idp(userPoolId)
except Exception as e:
raise e


def handler(event, context):
helper(event, context)
77 changes: 77 additions & 0 deletions scripts/github-actions-run-tests-script.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
#!/bin/bash
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0

# Required env variables
# - $STEP_FUNCTION_ARN - ARN of the Step Function to trigger
# - $LOG_GROUP_NAME - Name of the CloudWatch Log Group to tail logs from

# Check if required environment variables are set
if [ -z "$STEP_FUNCTION_ARN" ]; then
echo "Error: STEP_FUNCTION_ARN is not set"
exit 1
fi

if [ -z "$LOG_GROUP_NAME" ]; then
echo "Error: LOG_GROUP_NAME is not set"
exit 1
fi

# Get the current timestamp in UTC format
TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ")

# Start the Step Functions execution
# The --query option extracts the executionArn from the response and assigns it to EXECUTION_ARN
EXECUTION_ARN=$(aws stepfunctions start-execution \
--state-machine-arn "$STEP_FUNCTION_ARN" \
--query 'executionArn' \
--output text)

# Get the execution name from the execution ARN
EXECUTION_NAME=$(aws stepfunctions describe-execution \
--execution-arn "$EXECUTION_ARN" \
--query 'name' \
--output text)

# Loop until the Step Function execution is complete
while true; do
# Get the current status of the Step Function execution
STATUS=$(aws stepfunctions describe-execution \
--execution-arn "$EXECUTION_ARN" \
--query 'status' \
--output text)

# Tail the logs for the current execution from the specified log group
# The --log-stream-name-prefix option filters logs by the execution name
# The --format short option prints logs in a compact format
# The --since option specifies the start time for the log stream
aws logs tail "$LOG_GROUP_NAME" --log-stream-name-prefix "$EXECUTION_NAME" --format short --since "$TIMESTAMP"

# Update the timestamp for the next iteration
TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ")

# If the execution is still running, wait for 5 seconds before checking again
if [ "$STATUS" == "RUNNING" ]; then
sleep 5
else
# Exit the loop if the execution is not running
break
fi
done

# Get the final status and test result of the Step Function execution
FINAL_STATUS=$(aws stepfunctions describe-execution \
--execution-arn "$EXECUTION_ARN" \
--query 'status' \
--output text)

TEST_RESULT=$(aws stepfunctions describe-execution \
--execution-arn "$EXECUTION_ARN" \
--query 'output' | jq -rc '. as $my_json | try (fromjson) catch $my_json | .testResult')

# Exit with a success (0) or failure (1) code based on the final status and test result
if [ "$FINAL_STATUS" == "SUCCEEDED" ] && [ "$TEST_RESULT" == "0" ]; then
exit 0
else
exit 1
fi
70 changes: 70 additions & 0 deletions scripts/parse-password.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0

import argparse
import boto3
import re
import time
from botocore.exceptions import ClientError

# Parse command line arguments
parser = argparse.ArgumentParser(description='Wait for an email message and extract password')
parser.add_argument('email', type=str, help='The email address to search for')
parser.add_argument('--bucket', type=str, required=True, help='The S3 bucket name')
parser.add_argument('--prefix', type=str, default='emails/', help='The prefix for email files in the S3 bucket')
parser.add_argument('--max_attempts', type=int, default=30, help='The maximum number of attempts to check for the email')
parser.add_argument('--debug', type=bool, default=False, help='Set debug mode for more verbose logs.')
args = parser.parse_args()

# Create an S3 client
s3 = boto3.client('s3')

# Function to check for the email message
def check_for_email():
try:
# List objects in the S3 bucket with the specified prefix
response = s3.list_objects_v2(Bucket=args.bucket, Prefix=args.prefix)
# Iterate over the objects
for obj in response.get('Contents', []):
# Download the object content
s3_object = s3.get_object(Bucket=args.bucket, Key=obj['Key'])
file_contents = s3_object['Body'].read()
try:
obj_body = file_contents.decode('utf-8')
except UnicodeDecodeError:
if args.debug:
print(f"Unable to decode file with name: {obj['Key']}")
continue

# Check if the email is for the specified recipient
if args.email in obj_body:
if args.debug:
print(obj_body)
# Search for the password pattern
password_match = re.search(r'with username admin and temporary password (.+)', obj_body)
if password_match:
password = password_match.group(1)
if args.debug:
print(f"Found password '{password}' for {args.email} in {obj['Key']}")
return password

except ClientError as e:
print(f"Error: {e}")

return None

# Loop to check for the email message with a maximum number of attempts
password = None
attempts = 0
while attempts < args.max_attempts:
password = check_for_email()
if password:
break
time.sleep(10)
attempts += 1

# Print the password if found, or a message indicating the maximum attempts reached
if password:
print(password.strip())
else:
print(f"Maximum attempts ({args.max_attempts}) reached: Email message not found.")
Loading

0 comments on commit 6e7e5b7

Please sign in to comment.