Skip to content

Commit

Permalink
Updates 2024-07-10 - Terraform deploy prep
Browse files Browse the repository at this point in the history
  • Loading branch information
CHRISCARLON committed Jul 10, 2024
1 parent 3a25d4b commit cccba39
Show file tree
Hide file tree
Showing 7 changed files with 128 additions and 7 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/lambda_zipper.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
cd ./herding_cats_pipelines/lambda_jobs
pip install -r requirements.txt -t ./package
cp *.py ./package/
cd ./package && zip -r ../lambda_jobs.zip .
cd ./package && zip -r ../lambda_herding_cats_jobs.zip .
- name: Configure AWS credentials
uses: aws-actions/[email protected]
with:
Expand All @@ -41,4 +41,4 @@ jobs:
- name: Upload to S3
run: |
cd ./herding_cats_pipelines/lambda_jobs
aws s3 cp lambda_jobs.zip s3://${{ secrets.S3_BUCKET_NAME }}/lambda_jobs.zip
aws s3 cp lambda_herding_cats_jobs.zip s3://${{ secrets.S3_BUCKET_NAME }}/lambda_herding_cats_jobs.zip
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -171,9 +171,9 @@ creds.py
/dbt/target/
/dbt/dbt_packages/
dev.duckdb
variables.tf
terraform.tfvars
.terraform.lock.hcl
.terraform
LICENSE.txt
terraform-provider-aws_v5.50.0_x5
terraform/.terraform/providers/registry.terraform.io/hashicorp/aws/5.50.0/darwin_arm64
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import json
import boto3
import botocore
import botocore.exceptions
import requests

from loguru import logger
Expand Down Expand Up @@ -43,7 +45,7 @@ def lambda_handler(event, context) -> json:
try:
secret_name = get_param("herding_cats_param")
secret = get_secret(secret_name)
bucket_name = secret[secret_name]
bucket_name = secret["herding_cats_raw_data_bucket"]

url = "https://data.london.gov.uk/api/action/package_search"
response = requests.get(url)
Expand All @@ -62,7 +64,7 @@ def lambda_handler(event, context) -> json:
Body=json.dumps(data),
ContentType='application/json'
)
print(f"Data Successfully Dumped to S3://{bucket_name}/{file_name}")
logger.success(f"Data Successfully Dumped")

return {
'statusCode': 200,
Expand All @@ -74,7 +76,7 @@ def lambda_handler(event, context) -> json:
'statusCode': 500,
'body': json.dumps({'error': f'Data fetch error: {str(e)}'})
}
except boto3.exceptions.BotoError as e:
except botocore.exceptions.ClientError as e:
print(f"An error occurred while dumping to S3: {str(e)}")
return {
'statusCode': 500,
Expand Down
100 changes: 100 additions & 0 deletions herding_cats_pipelines/terraform/lambda-module/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
provider "aws" {
region = var.aws_region
}

# LAMBDA FUNCTION
resource "aws_lambda_function" "herding-cats" {
function_name = var.function_name
role = aws_iam_role.lambda_role.arn
handler = "main.lambda_handler"
memory_size = 2048
reserved_concurrent_executions = 1
timeout = 15
runtime = "python3.11"
s3_bucket = var.code_bucket_name
s3_key = "lambda_herding_cats_jobs.zip"
source_code_hash = data.aws_s3_object.lambda_code.etag
}

# IAM role for Lambda
resource "aws_iam_role" "lambda_role" {
name = "${var.function_name}-role"
assume_role_policy = jsonencode({
Version = "2012-10-17"
Statement = [{
Action = "sts:AssumeRole"
Effect = "Allow"
Principal = {
Service = "lambda.amazonaws.com"
}
}]
})
}

# Lambda basic execution policy
resource "aws_iam_role_policy_attachment" "lambda_basic_execution" {
policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole"
role = aws_iam_role.lambda_role.name
}

# S3 read policy for code bucket
resource "aws_iam_policy" "s3_code_access_policy" {
name = "${var.function_name}-s3-code-access-policy"
path = "/"
description = "IAM policy for S3 read access to code bucket from Lambda"
policy = jsonencode({
Version = "2012-10-17"
Statement = [{
Effect = "Allow"
Action = [
"s3:GetObject",
"s3:ListBucket"
]
Resource = [
"arn:aws:s3:::${var.code_bucket_name}",
"arn:aws:s3:::${var.code_bucket_name}/*"
]
}]
})
}

# S3 read/write policy for the data bucket
resource "aws_iam_policy" "s3_data_access_policy" {
name = "${var.function_name}-s3-data-access-policy"
path = "/"
description = "IAM policy for S3 read and write access to the data bucket from Lambda"
policy = jsonencode({
Version = "2012-10-17"
Statement = [{
Effect = "Allow"
Action = [
"s3:GetObject",
"s3:PutObject",
"s3:DeleteObject",
"s3:ListBucket"
]
Resource = [
"arn:aws:s3:::${var.data_bucket_name}",
"arn:aws:s3:::${var.data_bucket_name}/*"
]
}]
})
}

# Attach S3 code access policy to Lambda role
resource "aws_iam_role_policy_attachment" "lambda_s3_code_access" {
policy_arn = aws_iam_policy.s3_code_access_policy.arn
role = aws_iam_role.lambda_role.name
}

# Attach S3 data access policy to Lambda role
resource "aws_iam_role_policy_attachment" "lambda_s3_data_access" {
policy_arn = aws_iam_policy.s3_data_access_policy.arn
role = aws_iam_role.lambda_role.name
}


data "aws_s3_object" "lambda_code" {
bucket = var.code_bucket_name
key = "lambda_herding_cats_jobs.zip"
}
19 changes: 19 additions & 0 deletions herding_cats_pipelines/terraform/lambda-module/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
variable "aws_region" {
description = "The AWS region to deploy to"
type = string
}

variable "function_name" {
description = "The name of the Lambda function"
type = string
}

variable "code_bucket_name" {
description = "Name of the existing S3 bucket containing Lambda function code"
type = string
}

variable "data_bucket_name" {
description = "Name of the S3 bucket for Lambda function data operations"
type = string
}
2 changes: 1 addition & 1 deletion makefile
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ git-add:

git-commit:
@read -p "Please enter an additional commit message: " msg; \
git commit -m "updates $(DATE) - $$msg"
git commit -m "Updates $(DATE) - $$msg"

git-push:
git push

0 comments on commit cccba39

Please sign in to comment.