Skip to content

Commit

Permalink
Add needed variables for padre
Browse files Browse the repository at this point in the history
  • Loading branch information
dbarrous-navteca committed Jun 26, 2024
1 parent 4d1c7a1 commit 0faa63f
Show file tree
Hide file tree
Showing 8 changed files with 161 additions and 28 deletions.
66 changes: 66 additions & 0 deletions terraform/buildspec.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
version: 0.2

phases:
pre_build:
commands:
- echo Installing CI/CD Dependencies...
- python -m venv venv
- echo "source venv/bin/activate" > activate.sh
- bash activate.sh
- pip3 install --upgrade pip setuptools wheel
- pip3 install -r requirements.txt
- echo ________________________________

- echo Linting with Black...
- black --check --diff lambda_function/
- echo ________________________________

- echo Linting with Flake...
- flake8 --count --max-line-length 100 lambda_function/
- echo ________________________________

build:
commands:
- REGION=us-east-1
- echo Login to Private ECR $REGION
- aws ecr get-login-password --region $REGION | docker login --username AWS --password-stdin 351967858401.dkr.ecr.$REGION.amazonaws.com
- echo ________________________________
- ACCOUNT_ID=$(aws sts get-caller-identity --query 'Account' --output text)

- |
if git describe --tags --exact-match > /dev/null 2>&1; then
echo "This is a tag push event"
ECR_REPO="$ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/sdc_aws_artifacts_lambda"
CDK_ENVIRONMENT=PRODUCTION
VERSION=`git describe --tags --exact-match`
elif [[ "${CDK_ENVIRONMENT}" == "PRODUCTION" ]]; then
echo "This is a production environment"
ECR_REPO="$ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/sdc_aws_artifacts_lambda"
CDK_ENVIRONMENT=PRODUCTION
VERSION=`date -u +"%Y%m%d%H%M%S"`
else
echo "This is a development environment"
ECR_REPO="$ACCOUNT_ID.dkr.ecr.$REGION.amazonaws.com/dev-sdc_aws_artifacts_lambda"
CDK_ENVIRONMENT=DEVELOPMENT
VERSION=`date -u +"%Y%m%d%H%M%S"`
fi
- echo ________________________________
- echo Build Docker Image
- docker build --build-arg PUBLIC_ECR_REPO=$PUBLIC_ECR_REPO -t $ECR_REPO:latest lambda_function/.


- echo Tagging Docker Image...
- docker tag $ECR_REPO:latest $ECR_REPO:$VERSION

- echo Pushing the Docker image with Tags...
- docker push $ECR_REPO:latest
- docker push $ECR_REPO:$VERSION
- echo ________________________________

- echo Updating Deployment
- echo ________________________________
# - aws codebuild start-build --project-name arn:aws:codebuild:us-east-2:351967858401:project/build_sdc_aws_pipeline_architecture --environment-variables-override name=CDK_ENVIRONMENT,value=$CDK_ENVIRONMENT,type=PLAINTEXT

post_build:
commands:
- echo Build Successful - Lambda Successfully Built and Pushed to ECR
File renamed without changes.
9 changes: 6 additions & 3 deletions terraform/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -31,23 +31,26 @@ data "aws_caller_identity" "current" {}

// Locals for SDC Pipeline
locals {
is_production = terraform.workspace == "prod"
workspace_prefix = split("-", terraform.workspace)[0]

is_production = local.workspace_prefix == "prod"

environment_short_name = {
default = "dev-"
dev = "dev-"
prod = ""
}[terraform.workspace]
}[local.workspace_prefix]

environment_full_name = {
default = "Development"
dev = "Development"
prod = "Production"
}[terraform.workspace]
}[local.workspace_prefix]

standard_tags = {
"Environment" = local.environment_full_name
"Purpose" = "SWSOC Pipeline"
"Project" = var.mission_name
}

data_levels = slice(var.valid_data_levels, 0, length(var.valid_data_levels))
Expand Down
46 changes: 46 additions & 0 deletions terraform/padre.tfvars
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
// This file contains the configuration for the SDC AWS Pipeline

# AWS Deployment Region
# Region that the pipeline will be deployed to
deployment_region = "us-east-1"

# Mission Name
# This is the name of the mission that will be used to dynamically create the instrument buckets
mission_name = "padre"

# Instrument Names Used in the Mission.
# The names are used to dynamically create the instrument bucket
instrument_names = ["meddea", "sharp"]

# Valid Data Levels
# This is a list of the valid data levels for the mission
valid_data_levels = ["l0", "l1", "ql"]

# Timestream Database and Table Names for Logs
# The names of the timestream database and table that will be created to store logs
timestream_database_name = "padre_sdc_aws_logs"
timestream_s3_logs_table_name = "padre_sdc_aws_s3_bucket_log_table"

# S3 Instrument Bucket Name
# The names of the buckets that will be created for the mission
incoming_bucket_name = "padre-swsoc-incoming"

# S3 Sorting Lambda ECR Repository Name
# The name of the ECR repository that will be created to store the sorting lambda image
sorting_function_private_ecr_name = "padre_sdc_aws_sorting_lambda"

# S3 Artifacts Lambda ECR Repository Name
# The name of the ECR repository that will be created to store the artifacts lambda image
artifacts_function_private_ecr_name = "padre_sdc_aws_artifacts_lambda"

# S3 Server Access Logs Bucket
# The name of the bucket that will be created to store the s3 server access logs
s3_server_access_logs_bucket_name = "padre-swsoc-s3-server-access-logs"

# Processing Lambda ECR Repository Name
# The name of the ECR repository that will be created to store the processing lambda image
processing_function_private_ecr_name = "padre_sdc_aws_processing_lambda"

# Docker Base ECR Repository Name
# The name of the ECR repository that will be created to store the docker base image
docker_base_public_ecr_name = "padre-swsoc-docker-lambda-base"
15 changes: 10 additions & 5 deletions terraform/sdc_aws_artifacts_lambda_function.tf
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
// Resources for Processing Artifacts Lambda function, RDS DB for CDFTracker, triggers and the necessary IAM permissions
# // Resources for Processing Artifacts Lambda function, RDS DB for CDFTracker, triggers and the necessary IAM permissions


//////////////////////////////////////////
// S3 Processing Artifacts Lambda Function
//////////////////////////////////////////

resource "aws_lambda_function" "aws_sdc_artifacts_lambda_function" {
function_name = "${local.environment_short_name}aws_sdc_artifacts_lambda_function"
function_name = "${local.environment_short_name}${var.artifacts_function_private_ecr_name}_function"
role = aws_iam_role.artifacts_lambda_exec.arn
memory_size = 128
timeout = 900
Expand All @@ -23,6 +23,8 @@ resource "aws_lambda_function" "aws_sdc_artifacts_lambda_function" {
RDS_DATABASE = aws_db_instance.rds_instance.db_name
SDC_AWS_SLACK_TOKEN = var.slack_token
SDC_AWS_SLACK_CHANNEL = var.slack_channel
SWXSOC_MISSION = var.mission_name
SWXSOC_INCOMING_BUCKET = var.incoming_bucket_name
}
}
ephemeral_storage {
Expand All @@ -41,6 +43,9 @@ resource "aws_lambda_function" "aws_sdc_artifacts_lambda_function" {
]
}

tags = local.standard_tags


}


Expand All @@ -51,7 +56,7 @@ resource "aws_lambda_function" "aws_sdc_artifacts_lambda_function" {
# Create Lambda permissions for each prefix
resource "aws_lambda_permission" "af_allow_instrument_buckets" {
for_each = toset(local.instrument_bucket_names) # Convert to a set to ensure unique permissions
statement_id = "PF${local.environment_full_name}AllowExecutionFromS3Bucket-${each.key}"
statement_id = "PF${local.environment_full_name}${upper(var.mission_name)}AllowExecutionFromS3Bucket-${each.key}"
action = "lambda:InvokeFunction"
function_name = aws_lambda_function.aws_sdc_artifacts_lambda_function.function_name
principal = "s3.amazonaws.com"
Expand All @@ -61,7 +66,7 @@ resource "aws_lambda_permission" "af_allow_instrument_buckets" {
# Create Lambda permissions to be invoked by topic
resource "aws_lambda_permission" "af_allow_sns_topic" {
for_each = toset(local.instrument_bucket_names) # Convert to a set to ensure unique permissions
statement_id = "PF${local.environment_full_name}AllowExecutionFromSNSTopic-${each.key}"
statement_id = "PF${local.environment_full_name}${upper(var.mission_name)}AllowExecutionFromSNSTopic-${each.key}"
action = "lambda:InvokeFunction"
function_name = aws_lambda_function.aws_sdc_artifacts_lambda_function.function_name
principal = "sns.amazonaws.com"
Expand All @@ -87,7 +92,7 @@ resource "aws_sns_topic_subscription" "af_sns_topic_subscription" {

// Create an IAM role for the Lambda function
resource "aws_iam_role" "artifacts_lambda_exec" {
name = "${local.environment_short_name}artifacts_lambda_exec_role"
name = "${local.environment_short_name}${upper(var.mission_name)}_artifacts_lambda_exec_role"

assume_role_policy = jsonencode({
Version = "2012-10-17",
Expand Down
10 changes: 5 additions & 5 deletions terraform/sdc_aws_pipeline_infrastructure.tf
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ resource "aws_ecrpublic_repository" "docker_base_public_ecr" {

// Timestream Access Policy
resource "aws_iam_policy" "timestream_policy" {
name = "${local.environment_full_name}TimestreamAccessPolicy"
name = "${local.environment_full_name}${upper(var.mission_name)}TimestreamAccessPolicy"
description = "Provides access to Timestream"

policy = jsonencode({
Expand All @@ -252,7 +252,7 @@ resource "aws_iam_policy" "timestream_policy" {

// Logs Access Policy
resource "aws_iam_policy" "logs_access_policy" {
name = "${local.environment_full_name}LogsAccessPolicy"
name = "${local.environment_full_name}${upper(var.mission_name)}LogsAccessPolicy"
description = "Provides access to CloudWatch Logs"

policy = jsonencode({
Expand All @@ -274,7 +274,7 @@ resource "aws_iam_policy" "logs_access_policy" {

// S3 Bucket Access Policy
resource "aws_iam_policy" "s3_bucket_access_policy" {
name = "${local.environment_full_name}S3BucketAccessPolicy"
name = "${local.environment_full_name}${upper(var.mission_name)}S3BucketAccessPolicy"
description = "Provides access to specific S3 buckets"

policy = jsonencode({
Expand Down Expand Up @@ -305,7 +305,7 @@ resource "aws_iam_policy" "s3_bucket_access_policy" {

// Define a policy that grants Lambda permission to access the secret
resource "aws_iam_policy" "lambda_secrets_manager_policy" {
name_prefix = "${local.environment_short_name}lambda_secrets_manager_policy_"
name_prefix = "${local.environment_short_name}_${var.mission_name}_lambda_secrets_manager_policy_"

// Define the permissions for accessing the secret
policy = jsonencode({
Expand All @@ -324,7 +324,7 @@ resource "aws_iam_policy" "lambda_secrets_manager_policy" {

// Define a policy that grants Lambda permission to access the KMS key
resource "aws_iam_policy" "lambda_kms_policy" {
name_prefix = "${local.environment_short_name}lambda_kms_policy_"
name_prefix = "${local.environment_short_name}${var.mission_name}_lambda_kms_policy_"

// Define the permissions for accessing the KMS key
policy = jsonencode({
Expand Down
31 changes: 21 additions & 10 deletions terraform/sdc_aws_processing_lambda_function.tf
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
///////////////////////////////////////

resource "aws_lambda_function" "aws_sdc_processing_lambda_function" {
function_name = "${local.environment_short_name}aws_sdc_processing_lambda_function"
function_name = "${local.environment_short_name}${var.processing_function_private_ecr_name}_function"
role = aws_iam_role.processing_lambda_exec.arn
memory_size = 128
timeout = 900
Expand All @@ -16,7 +16,12 @@ resource "aws_lambda_function" "aws_sdc_processing_lambda_function" {

environment {
variables = {
LAMBDA_ENVIRONMENT = upper(local.environment_full_name)
LAMBDA_ENVIRONMENT = upper(local.environment_full_name)
SPACEPY = "/tmp"
SUNPY_CONFIGDIR = "/tmp"
SUNPY_DOWNLOADDIR = "/tmp"
SWXSOC_MISSION = var.mission_name
SWXSOC_INCOMING_BUCKET = var.incoming_bucket_name
}
}
ephemeral_storage {
Expand All @@ -27,6 +32,7 @@ resource "aws_lambda_function" "aws_sdc_processing_lambda_function" {
mode = "PassThrough"
}

tags = local.standard_tags
}


Expand All @@ -36,8 +42,13 @@ resource "aws_lambda_function" "aws_sdc_processing_lambda_function" {

// Generate a random password
resource "random_password" "password" {
length = 16
special = true
length = 32
special = false

# Ignore changes to the special variable
lifecycle {
ignore_changes = [length, special]
}
}

// KMS key used by Secrets Manager for RDS
Expand All @@ -53,7 +64,7 @@ resource "aws_kms_key" "default" {
// Create a secret in Secrets Manager
resource "aws_secretsmanager_secret" "rds_secret" {
kms_key_id = aws_kms_key.default.key_id
name = "${local.environment_short_name}rds-credentials"
name = "${local.environment_short_name}${var.mission_name}-rds-credentials"
description = "RDS Credentials"
recovery_window_in_days = 0

Expand All @@ -78,9 +89,9 @@ resource "aws_db_instance" "rds_instance" {
allocated_storage = 30
storage_type = "gp2"
engine = "postgres"
engine_version = "14.7"
engine_version = "14.10"
instance_class = "db.t3.micro"
db_name = local.is_production ? "hermes_db" : "dev_hermes_db"
db_name = local.is_production ? "${var.mission_name}_db" : "dev_${var.mission_name}_db"

username = "cdftracker_user"
password = random_password.password.result
Expand All @@ -105,7 +116,7 @@ resource "aws_db_instance" "rds_instance" {
# Create Lambda permissions for each prefix
resource "aws_lambda_permission" "pf_allow_instrument_buckets" {
for_each = toset(local.instrument_bucket_names) # Convert to a set to ensure unique permissions
statement_id = "PF${local.environment_full_name}AllowExecutionFromS3Bucket-${each.key}"
statement_id = "PF${local.environment_full_name}${upper(var.mission_name)}AllowExecutionFromS3Bucket-${each.key}"
action = "lambda:InvokeFunction"
function_name = aws_lambda_function.aws_sdc_processing_lambda_function.function_name
principal = "s3.amazonaws.com"
Expand All @@ -115,7 +126,7 @@ resource "aws_lambda_permission" "pf_allow_instrument_buckets" {
# Create Lambda permissions to be invoked by topic
resource "aws_lambda_permission" "pf_allow_sns_topic" {
for_each = toset(local.instrument_bucket_names) # Convert to a set to ensure unique permissions
statement_id = "PF${local.environment_full_name}AllowExecutionFromSNSTopic-${each.key}"
statement_id = "PF${local.environment_full_name}${upper(var.mission_name)}AllowExecutionFromSNSTopic-${each.key}"
action = "lambda:InvokeFunction"
function_name = aws_lambda_function.aws_sdc_processing_lambda_function.function_name
principal = "sns.amazonaws.com"
Expand Down Expand Up @@ -159,7 +170,7 @@ resource "aws_sns_topic_subscription" "pf_sns_topic_subscription" {

// Create an IAM role for the Lambda function
resource "aws_iam_role" "processing_lambda_exec" {
name = "${local.environment_short_name}processing_lambda_exec_role"
name = "${local.environment_short_name}${var.mission_name}_processing_lambda_exec_role"

assume_role_policy = jsonencode({
Version = "2012-10-17",
Expand Down
Loading

0 comments on commit 0faa63f

Please sign in to comment.