From 9801089f100abd1c85b9fa7cbb217da7f105cb92 Mon Sep 17 00:00:00 2001 From: Nhat-Thanh Nguyen Date: Tue, 28 Jan 2025 09:38:16 +1300 Subject: [PATCH 1/2] [Workshop] Local Terraform Module Development and Testing --- modules/terraform-aws-cicd/.gitignore | 29 ++ modules/terraform-aws-cicd/README.md | 3 + modules/terraform-aws-cicd/backend.tf | 63 ++++ modules/terraform-aws-cicd/codebuild.tf | 43 +++ modules/terraform-aws-cicd/codecommit.tf | 16 + modules/terraform-aws-cicd/codepipeline.tf | 51 +++ modules/terraform-aws-cicd/data.tf | 7 + modules/terraform-aws-cicd/eventbridge.tf | 115 +++++++ modules/terraform-aws-cicd/iam.tf | 197 +++++++++++ modules/terraform-aws-cicd/outputs.tf | 12 + modules/terraform-aws-cicd/s3.tf | 32 ++ modules/terraform-aws-cicd/tests/README.md | 3 + .../terraform-aws-cicd/tests/main.tftest.hcl | 311 ++++++++++++++++++ modules/terraform-aws-cicd/variables.tf | 188 +++++++++++ 14 files changed, 1070 insertions(+) create mode 100644 modules/terraform-aws-cicd/.gitignore create mode 100644 modules/terraform-aws-cicd/README.md create mode 100644 modules/terraform-aws-cicd/backend.tf create mode 100644 modules/terraform-aws-cicd/codebuild.tf create mode 100644 modules/terraform-aws-cicd/codecommit.tf create mode 100644 modules/terraform-aws-cicd/codepipeline.tf create mode 100644 modules/terraform-aws-cicd/data.tf create mode 100644 modules/terraform-aws-cicd/eventbridge.tf create mode 100644 modules/terraform-aws-cicd/iam.tf create mode 100644 modules/terraform-aws-cicd/outputs.tf create mode 100644 modules/terraform-aws-cicd/s3.tf create mode 100644 modules/terraform-aws-cicd/tests/README.md create mode 100644 modules/terraform-aws-cicd/tests/main.tftest.hcl create mode 100644 modules/terraform-aws-cicd/variables.tf diff --git a/modules/terraform-aws-cicd/.gitignore b/modules/terraform-aws-cicd/.gitignore new file mode 100644 index 0000000..397af32 --- /dev/null +++ b/modules/terraform-aws-cicd/.gitignore @@ -0,0 +1,29 @@ +# Local .terraform directories +**/.terraform/* + +# Terraform lockfile +.terraform.lock.hcl + +# .tfstate files +*.tfstate +*.tfstate.* + +# Crash log files +crash.log + +# Exclude all .tfvars files, which are likely to contain sentitive data, such as +# password, private keys, and other secrets. These should not be part of version +# control as they are data points which are potentially sensitive and subject +# to change depending on the environment. +*.tfvars + +# Ignore override files as they are usually used to override resources locally and so +# are not checked in +override.tf +override.tf.json +*_override.tf +*_override.tf.json + +# Ignore CLI configuration files +.terraformrc +terraform.rc diff --git a/modules/terraform-aws-cicd/README.md b/modules/terraform-aws-cicd/README.md new file mode 100644 index 0000000..ba77f98 --- /dev/null +++ b/modules/terraform-aws-cicd/README.md @@ -0,0 +1,3 @@ +# Terraform CI/CD and Testing on AWS with the new Terraform Test Framework + + diff --git a/modules/terraform-aws-cicd/backend.tf b/modules/terraform-aws-cicd/backend.tf new file mode 100644 index 0000000..90e8bab --- /dev/null +++ b/modules/terraform-aws-cicd/backend.tf @@ -0,0 +1,63 @@ +## Dynamically create resources for S3 Remote Backend (Amazon S3 and DynamoDB) +resource "random_string" "tf_remote_state_s3_buckets" { + for_each = var.tf_remote_state_resource_configs == null ? {} : var.tf_remote_state_resource_configs + length = 4 + special = false + upper = false +} + +resource "aws_s3_bucket" "tf_remote_state_s3_buckets" { + for_each = var.tf_remote_state_resource_configs == null ? {} : var.tf_remote_state_resource_configs + bucket = "${each.value.prefix}-tf-state-${random_string.tf_remote_state_s3_buckets[each.key].result}" + force_destroy = true + + # - Challenge: resolve Checkov issues - + #checkov:skip=CKV2_AWS_62: "Ensure S3 buckets should have event notifications enabled" + #checkov:skip=CKV2_AWS_61: "Ensure that an S3 bucket has a lifecycle configuration" + #checkov:skip=CKV_AWS_144: "Ensure that S3 bucket has cross-region replication enabled" + #checkov:skip=CKV_AWS_18: "Ensure the S3 bucket has access logging enabled" + #checkov:skip=CKV_AWS_145: "Ensure that S3 buckets are encrypted with KMS by default" + # +} + +resource "aws_s3_bucket_versioning" "tf_remote_state_s3_buckets" { + for_each = var.tf_remote_state_resource_configs == null ? {} : var.tf_remote_state_resource_configs + bucket = aws_s3_bucket.tf_remote_state_s3_buckets[each.key].id + versioning_configuration { + status = "Enabled" + } +} + +resource "aws_s3_bucket_public_access_block" "tf_remote_state_s3_buckets_pabs" { + for_each = var.tf_remote_state_resource_configs == null ? {} : var.tf_remote_state_resource_configs + bucket = aws_s3_bucket.tf_remote_state_s3_buckets[each.key].id + + block_public_acls = var.s3_public_access_block + block_public_policy = var.s3_public_access_block + ignore_public_acls = var.s3_public_access_block + restrict_public_buckets = var.s3_public_access_block +} + +# Terraform State Locking +resource "random_string" "tf_remote_state_lock_tables" { + for_each = var.tf_remote_state_resource_configs == null ? {} : var.tf_remote_state_resource_configs + length = 4 + special = false + upper = false +} +resource "aws_dynamodb_table" "tf_remote_state_lock_tables" { + for_each = var.tf_remote_state_resource_configs == null ? {} : var.tf_remote_state_resource_configs + name = "${each.value.prefix}-tf-state-lock-${random_string.tf_remote_state_lock_tables[each.key].result}" + billing_mode = each.value.ddb_billing_mode + hash_key = each.value.ddb_hash_key + + attribute { + name = each.value.ddb_hash_key + type = "S" + } + + # - Challenge: resolve Checkov issues - + #checkov:skip=CKV_AWS_28: "Ensure DynamoDB point in time recovery (backup) is enabled" + #checkov:skip=CKV_AWS_119: "Ensure DynamoDB Tables are encrypted using a KMS Customer Managed CMK" + #checkov:skip=CKV2_AWS_16: "Ensure that Auto Scaling is enabled on your DynamoDB tables" +} diff --git a/modules/terraform-aws-cicd/codebuild.tf b/modules/terraform-aws-cicd/codebuild.tf new file mode 100644 index 0000000..4a9a4f8 --- /dev/null +++ b/modules/terraform-aws-cicd/codebuild.tf @@ -0,0 +1,43 @@ +## Dynamically create AWS CodeBuild Projects +resource "aws_codebuild_project" "codebuild" { + + for_each = var.codebuild_projects == null ? {} : var.codebuild_projects + + name = each.value.name + description = each.value.description + build_timeout = each.value.build_timeout + service_role = var.codebuild_service_role_arn != null ? var.codebuild_service_role_arn : aws_iam_role.codebuild_service_role[0].arn + + environment { + compute_type = each.value.env_compute_type + image = each.value.env_image + type = each.value.env_type + } + + source { + type = each.value.source_type + location = each.value.source_location + git_clone_depth = each.value.source_clone_depth + buildspec = each.value.path_to_build_spec != null ? file("${each.value.path_to_build_spec}") : each.value.build_spec + } + + source_version = each.value.source_version + + artifacts { + type = "NO_ARTIFACTS" + } + + tags = merge( + { + "Name" = "${each.value.name}" + }, + var.tags, + ) + + depends_on = [ + aws_codecommit_repository.codecommit + ] + + # - Challenge: resolve Checkov issues - + #checkov:skip=CKV_AWS_314: "Ensure CodeBuild project environments have a logging configuration" +} diff --git a/modules/terraform-aws-cicd/codecommit.tf b/modules/terraform-aws-cicd/codecommit.tf new file mode 100644 index 0000000..1f2e5f1 --- /dev/null +++ b/modules/terraform-aws-cicd/codecommit.tf @@ -0,0 +1,16 @@ +## Dynamically create AWS CodeCommit Repos +resource "aws_codecommit_repository" "codecommit" { + for_each = var.codecommit_repos == null ? {} : var.codecommit_repos + repository_name = each.value.repository_name + description = each.value.description + default_branch = each.value.default_branch + tags = merge( + { + "Name" = "${each.value.repository_name}" + }, + var.tags, + ) + # - Challenge: resolve Checkov issues - + #checkov:skip=CKV2_AWS_37: "Ensure CodeCommit associates an approval rule" + +} diff --git a/modules/terraform-aws-cicd/codepipeline.tf b/modules/terraform-aws-cicd/codepipeline.tf new file mode 100644 index 0000000..2e59f73 --- /dev/null +++ b/modules/terraform-aws-cicd/codepipeline.tf @@ -0,0 +1,51 @@ +## Dynamically create AWS CodePipeline pipelines +resource "aws_codepipeline" "codepipeline" { + for_each = var.codepipeline_pipelines == null ? {} : var.codepipeline_pipelines + + name = each.value.name + pipeline_type = each.value.pipeline_type + role_arn = var.codepipeline_service_role_arn != null ? var.codepipeline_service_role_arn : aws_iam_role.codepipeline_service_role[0].arn + + artifact_store { + location = each.value.existing_s3_bucket_name != null ? each.value.existing_s3_bucket_name : aws_s3_bucket.codepipeline_artifacts_buckets[each.key].id + type = "S3" + } + + + dynamic "stage" { + for_each = [for s in each.value.stages : { + name = s.name + action = s.action + } if(lookup(s, "enabled", true))] + + content { + name = stage.value.name + dynamic "action" { + for_each = stage.value.action + content { + name = action.value["name"] + owner = action.value["owner"] + version = action.value["version"] + category = action.value["category"] + provider = action.value["provider"] + input_artifacts = lookup(action.value, "input_artifacts", []) + output_artifacts = lookup(action.value, "output_artifacts", []) + configuration = lookup(action.value, "configuration", {}) + role_arn = lookup(action.value, "role_arn", null) + run_order = lookup(action.value, "run_order", null) + region = lookup(action.value, "region", data.aws_region.current.name) + } + } + } + } + + tags = merge( + { + "Name" = "${each.value.name}" + }, + var.tags, + ) + + ## - Challenge: resolve Checkov issues - + #checkov:skip=CKV_AWS_219: "Ensure Code Pipeline Artifact store is using a KMS CMK" +} diff --git a/modules/terraform-aws-cicd/data.tf b/modules/terraform-aws-cicd/data.tf new file mode 100644 index 0000000..269359d --- /dev/null +++ b/modules/terraform-aws-cicd/data.tf @@ -0,0 +1,7 @@ +## Create Data Source to fetch current AWS Region +## Current AWS region +data "aws_region" "current" {} + +## Current AWS Caller Identity (IAM info) +data "aws_caller_identity" "current" {} + diff --git a/modules/terraform-aws-cicd/eventbridge.tf b/modules/terraform-aws-cicd/eventbridge.tf new file mode 100644 index 0000000..d603f8e --- /dev/null +++ b/modules/terraform-aws-cicd/eventbridge.tf @@ -0,0 +1,115 @@ +## Create resources for EventBridge +## Create Custom Event Bus +resource "random_string" "tf_workshop_event_bus" { + length = 4 + special = false + upper = false +} +resource "aws_cloudwatch_event_bus" "tf_workshop_event_bus" { + name = "${var.project_prefix}-event_bus-${random_string.tf_workshop_event_bus.result}" + tags = merge( + { + "Name" = "${var.project_prefix}-event_bus-${random_string.tf_workshop_event_bus.result}" + }, + var.tags, + ) +} +## Create Rule to forward CodeCommit events from Default Event Bus to TF Workshop Event Bus +resource "aws_cloudwatch_event_rule" "default_event_bus_to_tf_workshop_event_bus" { + for_each = var.codepipeline_pipelines + name = "${each.value.name}-default_event_bus_to_${var.project_prefix}-event_bus" + ## An event_bus_name is intentionally not defined so that the default Event Bus will be used. + description = "Send all defined events (CodeCommit) from default event bus to TF Workshop event bus." + role_arn = aws_iam_role.eventbridge_invoke_tf_workshop_event_bus[0].arn + force_destroy = var.eventbridge_rules_enable_force_destroy + event_pattern = jsonencode({ + source = ["aws.codecommit"], + detail-type = [ + "CodeCommit Repository State Change" + ], + resources = [ + "arn:aws:codecommit:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:${each.value.name}", + "arn:aws:codecommit:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:${each.value.name}", + "arn:aws:codecommit:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:${each.value.name}", + ] + detail = { + event = [ + "referenceCreated", + "referenceUpdated" + ] + referenceType = [ + "branch" + ] + referenceName = [ + "main" + ] + } + }) + + tags = merge( + var.tags, + { + "Name" = "${var.project_prefix}-default_event_bus_to_${var.project_prefix}-event_bus" + }, + ) +} +## Create Event Bus Target to send defined events from Default Event Bus to TF Workshop Event Bus +resource "aws_cloudwatch_event_target" "default_event_bus_to_tf_workshop_event_bus" { + for_each = var.codepipeline_pipelines + rule = aws_cloudwatch_event_rule.default_event_bus_to_tf_workshop_event_bus[each.key].name + force_destroy = var.eventbridge_rules_enable_force_destroy + target_id = aws_cloudwatch_event_bus.tf_workshop_event_bus.name + arn = aws_cloudwatch_event_bus.tf_workshop_event_bus.arn + role_arn = aws_iam_role.eventbridge_invoke_tf_workshop_event_bus[0].arn +} + +## Invoke CodePipeline +## Create rule to invoke CodePipelines when object is uploaded to the respective S3 Bucket +resource "aws_cloudwatch_event_rule" "invoke_codepipeline" { + for_each = var.codepipeline_pipelines + name = "invoke${each.value.name}-codepipeline" + event_bus_name = aws_cloudwatch_event_bus.tf_workshop_event_bus.name + description = "Invoke CodePipeline when object is uploaded to the respective S3 Bucket." + role_arn = aws_iam_role.eventbridge_invoke_codepipeline.arn + force_destroy = var.eventbridge_rules_enable_force_destroy + event_pattern = jsonencode({ + source = ["aws.codecommit"], + detail-type = [ + "CodeCommit Repository State Change" + ], + resources = [ + "arn:aws:codecommit:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:${each.value.name}", + "arn:aws:codecommit:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:${each.value.name}", + "arn:aws:codecommit:${data.aws_region.current.name}:${data.aws_caller_identity.current.account_id}:${each.value.name}", + ] + detail = { + event = [ + "referenceCreated", + "referenceUpdated" + ] + referenceType = [ + "branch" + ] + referenceName = [ + "main" + ] + } + }) + + tags = merge( + var.tags, + { + "Name" = each.value.name + }, + ) +} +## Create Event Bus to invoke CodePipeline when defined events are recieved on TF Workshop Event Bus +resource "aws_cloudwatch_event_target" "module_validation_codepipeline" { + for_each = var.codepipeline_pipelines + force_destroy = var.eventbridge_rules_enable_force_destroy + rule = aws_cloudwatch_event_rule.invoke_codepipeline[each.key].name + target_id = aws_codepipeline.codepipeline[each.key].name + arn = aws_codepipeline.codepipeline[each.key].arn + role_arn = aws_iam_role.eventbridge_invoke_codepipeline.arn + event_bus_name = aws_cloudwatch_event_bus.tf_workshop_event_bus.name +} diff --git a/modules/terraform-aws-cicd/iam.tf b/modules/terraform-aws-cicd/iam.tf new file mode 100644 index 0000000..069a0eb --- /dev/null +++ b/modules/terraform-aws-cicd/iam.tf @@ -0,0 +1,197 @@ +## Create resources for IAM +resource "random_string" "random_string" { + length = 4 + special = false + upper = false +} + + +## - Trust Relationships - +## EventBridge +data "aws_iam_policy_document" "eventbridge_trust_relationship" { + statement { + effect = "Allow" + actions = ["sts:AssumeRole"] + principals { + type = "Service" + identifiers = ["events.amazonaws.com"] + } + } +} +## CodeBuild +data "aws_iam_policy_document" "codebuild_trust_relationship" { + statement { + effect = "Allow" + actions = ["sts:AssumeRole"] + principals { + type = "Service" + identifiers = ["codebuild.amazonaws.com"] + } + } +} +## CodePipeline +data "aws_iam_policy_document" "codepipeline_trust_relationship" { + statement { + effect = "Allow" + actions = ["sts:AssumeRole"] + principals { + type = "Service" + identifiers = ["codepipeline.amazonaws.com"] + } + } +} + +## - Policies - +## Eventbridge - Default to TF Workshop Event Bus +data "aws_iam_policy_document" "eventbridge_invoke_tf_workshop_event_bus_policy" { + statement { + effect = "Allow" + actions = [ + "events:PutEvents", + ] + resources = [ + aws_cloudwatch_event_bus.tf_workshop_event_bus.arn, + ] + } +} +resource "aws_iam_policy" "eventbridge_invoke_tf_workshop_event_bus_policy" { + count = var.create_cloudwatch_service_role ? 1 : 0 + name = "${var.project_prefix}-cloudwatch-service-role-policy-${random_string.random_string.result}" + description = "Policy allowing events on the Default Event Bus to invoke the TF Workshop Event Bus." + policy = data.aws_iam_policy_document.eventbridge_invoke_tf_workshop_event_bus_policy.json +} + +## Eventbridge - Invoke CodePipeline +data "aws_iam_policy_document" "eventbridge_invoke_codepipeline_policy" { + statement { + effect = "Allow" + actions = [ + "codepipeline:StartPipelineExecution", + ] + resources = [ + "*" + ] + } + + ## - Challenge: resolve Checkov issues - + #checkov:skip=CKV_AWS_356: "Ensure no IAM policies documents allow "*" as a statement's resource for restrictable actions"" + #checkov:skip=CKV_AWS_111: "Ensure IAM policies does not allow write access without constraints" +} +resource "aws_iam_policy" "eventbridge_invoke_codepipeline_policy" { + name = "${var.project_prefix}-eventbridge-invoke-codepipeline-${random_string.random_string.result}" + description = "Policy that allows EventBridge to invoke the any CodePipelines." + policy = data.aws_iam_policy_document.eventbridge_invoke_codepipeline_policy.json + + tags = merge( + var.tags, + { + Name = "${var.project_prefix}-eventbridge-invoke-codepipeline" + }, + ) +} + +## CodeBuild +data "aws_iam_policy_document" "codebuild_policy" { + count = var.create_codebuild_service_role ? 1 : 0 + statement { + effect = "Allow" + actions = ["s3:*"] + resources = [ + "*", + ] + } + + ## - Challenge: resolve Checkov issues - + #checkov:skip=CKV_AWS_356: "Ensure no IAM policies documents allow "*" as a statement's resource for restrictable actions"" + #checkov:skip=CKV_AWS_111: "Ensure IAM policies does not allow write access without constraints" + #checkov:skip=CKV_AWS_108: "Ensure IAM policies does not allow data exfiltration" + #checkov:skip=CKV_AWS_109: "Ensure IAM policies does not allow permissions management / resource exposure without constraints" +} +resource "aws_iam_policy" "codebuild_policy" { + count = var.create_codebuild_service_role ? 1 : 0 + name = "${var.project_prefix}-codebuild-service-role-policy${random_string.random_string.result}" + description = "Policy granting AWS CodePipeling restricted access to _____" + policy = data.aws_iam_policy_document.codebuild_policy[0].json +} +## CodePipeline +data "aws_iam_policy_document" "codepipeline_policy" { + statement { + effect = "Allow" + actions = [ + "s3:GetObject", + "s3:GetObjectVersion", + "s3:GetBucketVersioning", + "s3:PutObjectAcl", + "s3:PutObject", + ] + resources = ["*"] + } + + ## - Challenge: resolve Checkov issues - + #checkov:skip=CKV_AWS_356: "Ensure no IAM policies documents allow "*" as a statement's resource for restrictable actions" + #checkov:skip=CKV_AWS_111: "Ensure IAM policies does not allow write access without constraints" + #checkov:skip=CKV_AWS_108: "Ensure IAM policies does not allow data exfiltration" + #checkov:skip=CKV_AWS_109: "Ensure IAM policies does not allow permissions management / resource exposure without constraints" +} +resource "aws_iam_policy" "codepipeline_policy" { + count = var.create_codepipeline_service_role ? 1 : 0 + name = "${var.project_prefix}-codepipeline-service-role-policy-${random_string.random_string.result}" + description = "Policy granting AWS CodePipeline access to Amazon S3." + policy = data.aws_iam_policy_document.codepipeline_policy.json +} + + + +## - IAM Roles - +## EventBridge +resource "aws_iam_role" "eventbridge_invoke_tf_workshop_event_bus" { + count = var.create_cloudwatch_service_role ? 1 : 0 + name = "${var.project_prefix}-eventbridge-invoke-tf-workshop-event-bus-${random_string.random_string.result}" + assume_role_policy = data.aws_iam_policy_document.eventbridge_trust_relationship.json + managed_policy_arns = [ + "arn:aws:iam::aws:policy/AdministratorAccess", + ] + + ## - Challenge: resolve Checkov issues - + #checkov:skip=CKV_AWS_274: "Disallow IAM roles, users, and groups from using the AWS AdministratorAccess policy" +} + +resource "aws_iam_role" "eventbridge_invoke_codepipeline" { + name = "${var.project_prefix}-eventbridge-invoke-codepipeline-${random_string.random_string.result}" + assume_role_policy = data.aws_iam_policy_document.eventbridge_trust_relationship.json + force_detach_policies = var.enable_force_detach_policies + managed_policy_arns = [ + aws_iam_policy.eventbridge_invoke_codepipeline_policy.arn, + ] + tags = merge( + var.tags, + { + Name = "${var.project_prefix}-eventbridge-invoke-codepipeline" + }, + ) +} + +## CodeBuild +resource "aws_iam_role" "codebuild_service_role" { + count = var.create_codebuild_service_role ? 1 : 0 + name = "${var.project_prefix}-codebuild-service-role-${random_string.random_string.result}" + assume_role_policy = data.aws_iam_policy_document.codebuild_trust_relationship.json + managed_policy_arns = [ + "arn:aws:iam::aws:policy/AdministratorAccess", + ] + + ## - Challenge: resolve Checkov issues - + #checkov:skip=CKV_AWS_274: "Disallow IAM roles, users, and groups from using the AWS AdministratorAccess policy" +} +## CodePipeline +resource "aws_iam_role" "codepipeline_service_role" { + count = var.create_codepipeline_service_role ? 1 : 0 + name = "${var.project_prefix}-codepipeline-service-role-${random_string.random_string.result}" + assume_role_policy = data.aws_iam_policy_document.codepipeline_trust_relationship.json + managed_policy_arns = [ + "arn:aws:iam::aws:policy/AdministratorAccess", + ] + + ## - Challenge: resolve Checkov issues - + #checkov:skip=CKV_AWS_274: "Disallow IAM roles, users, and groups from using the AWS AdministratorAccess policy" +} diff --git a/modules/terraform-aws-cicd/outputs.tf b/modules/terraform-aws-cicd/outputs.tf new file mode 100644 index 0000000..19c823a --- /dev/null +++ b/modules/terraform-aws-cicd/outputs.tf @@ -0,0 +1,12 @@ +## Create output values for the module +output "tf_state_s3_buckets_names" { + value = tomap({ + for k, bucket in aws_s3_bucket.tf_remote_state_s3_buckets : k => bucket.id + }) +} + +output "tf_state_ddb_table_names" { + value = tomap({ + for k, table in aws_dynamodb_table.tf_remote_state_lock_tables : k => table.id + }) +} diff --git a/modules/terraform-aws-cicd/s3.tf b/modules/terraform-aws-cicd/s3.tf new file mode 100644 index 0000000..5f91262 --- /dev/null +++ b/modules/terraform-aws-cicd/s3.tf @@ -0,0 +1,32 @@ +## Create resources for S3 +resource "random_string" "codepipeline_artifacts_s3_buckets" { + for_each = var.codepipeline_pipelines == null ? {} : var.codepipeline_pipelines + length = 4 + special = false + upper = false +} + +resource "aws_s3_bucket" "codepipeline_artifacts_buckets" { + for_each = var.codepipeline_pipelines == null ? {} : var.codepipeline_pipelines + bucket = "pipeline-artifacts-${each.value.name}-${random_string.codepipeline_artifacts_s3_buckets[each.key].result}" + force_destroy = true + + ## - Challenge: resolve Checkov issues - + #checkov:skip=CKV2_AWS_62: "Ensure S3 buckets should have event notifications enabled" + #checkov:skip=CKV2_AWS_61: "Ensure that an S3 bucket has a lifecycle configuration" + #checkov:skip=CKV_AWS_144: "Ensure that S3 bucket has cross-region replication enabled" + #checkov:skip=CKV_AWS_18: "Ensure the S3 bucket has access logging enabled" + #checkov:skip=CKV_AWS_21: "Ensure all data stored in the S3 bucket have versioning enabled" + #checkov:skip=CKV_AWS_145: "Ensure that S3 buckets are encrypted with KMS by default" + +} + +resource "aws_s3_bucket_public_access_block" "codepipeline_bucket_pabs" { + for_each = var.codepipeline_pipelines == null ? {} : var.codepipeline_pipelines + bucket = aws_s3_bucket.codepipeline_artifacts_buckets[each.key].id + + block_public_acls = var.s3_public_access_block + block_public_policy = var.s3_public_access_block + ignore_public_acls = var.s3_public_access_block + restrict_public_buckets = var.s3_public_access_block +} diff --git a/modules/terraform-aws-cicd/tests/README.md b/modules/terraform-aws-cicd/tests/README.md new file mode 100644 index 0000000..c90587b --- /dev/null +++ b/modules/terraform-aws-cicd/tests/README.md @@ -0,0 +1,3 @@ +# Tests + +This is the name of the directory that Terraform expects to contain test files. Terraform tests files following the naming convention `.tftest.hcl` or `.tftest.json` (e.g. `main.tftest.hcl`). diff --git a/modules/terraform-aws-cicd/tests/main.tftest.hcl b/modules/terraform-aws-cicd/tests/main.tftest.hcl new file mode 100644 index 0000000..ba8e9b3 --- /dev/null +++ b/modules/terraform-aws-cicd/tests/main.tftest.hcl @@ -0,0 +1,311 @@ +## Make sure to run `terraform init` in this directory before running `terraform test`. Also, ensure you use constant values (e.g. string, number, bool, etc.) within your tests where at all possible or you may encounter errors. + +## Configure the AWS Provider +provider "aws" { + region = "us-east-1" +} + +## Global Testing Variables - Define variables to be used in all tests here. You can overwrite these varibles by definig an additional variables block within the 'run' block for your tests +variables { + + ## - Test CodeBuild Projects - + codebuild_projects = { + ## Test Module 1 + tf_test_test_module_1 : { + name = "TerraformTest-test-module-1" + description = "Test Module 1 - Terraform Test" + build_spec = <<-EOF + ## Terraform Test + version: 0.1 + phases: + pre_build: + commands: + - terraform init + - terraform validate + + build: + commands: + - terraform test + EOF + }, + chevkov_test_module_1 : { + name = "Checkov-test-module-1" + description = "Test Module 1 - Checkov" + build_spec = <<-EOF + ## Checkov + version: 0.1 + phases: + pre_build: + commands: + - echo pre_build starting + + build: + commands: + - build starting + - checkov -s -d ./ > checkov.result.txt + + EOF + }, + } + + ## - Test CodePipeline pipelines - + codepipeline_pipelines = { + ## Module Validation Pipelines + ## Terraform Module Validation Pipeline for 'test-module-1' Terraform Module + tf_module_validation_test_module_1 : { + name = "tf-module-validation-test-module-1" + tags = { + "Description" = "Test Module 1.", + } + + stages = [ + ## Clone from S3, store contents in artifacts S3 Bucket + { + name = "Source" + action = [ + { + name = "PullFromS3" + category = "Source" + owner = "AWS" + provider = "S3" + version = "1" + configuration = { + S3Bucket = "git-module-aws-tf-cicd-" + S3ObjectKey = "archive.zip" + PollForSourceChanges = false + } + input_artifacts = [] + ## Store the output of this stage as 'source_output_artifacts' in connected the Artifacts S3 Bucket + output_artifacts = ["source_output_artifacts"] + run_order = 1 + }, + ] + }, + + ## Run Terraform Test Framework + { + name = "Build_TF_Test" + action = [ + { + name = "TerraformTest" + category = "Build" + owner = "AWS" + provider = "CodeBuild" + version = "1" + configuration = { + ## Reference existing CodeBuild Project + ProjectName = "TerraformTest-test-module-1" + } + ## Use the 'source_output_artifacts' contents from the Artifacts S3 Bucket + input_artifacts = ["source_output_artifacts"] + ## Store the output of this stage as 'build_tf_test_output_artifacts' in the connected Artifacts S3 Bucket + output_artifacts = ["build_tf_test_output_artifacts"] + + run_order = 1 + }, + ] + }, + + ## Run Checkov + { + name = "Build_Checkov" + action = [ + { + name = "Checkov" + category = "Build" + owner = "AWS" + provider = "CodeBuild" + version = "1" + configuration = { + ## Reference existing CodeBuild Project + ProjectName = "Checkov-test-module-1" + } + ## Use the 'source_output_artifacts' contents from the Artifacts S3 Bucket + input_artifacts = ["source_output_artifacts"] + ## Store the output of this stage as 'build_checkov_output_artifacts' in the connected Artifacts S3 Bucket + output_artifacts = ["build_checkov_output_artifacts"] + + run_order = 1 + }, + ] + }, + ] + }, + } + + ## Test Remote State Resources + tf_remote_state_resource_configs = { + ## Custom Terraform Module Repo + test_tf_remote_state_config_1 : { + prefix = "test-tf-remote-state-config-1" + }, + } +} + +## - Unit Tests - +run "input_validation" { + command = plan + + ## Intentional invalid values to test functionality. These variables overwrite the above variables (helpful for testing). + variables { + ## Intentional project_prefix that is longer than max of 40 characters (overwrite of above global variable) + project_prefix = "this_is_a_project_prefix_and_it_is_over_40_characters_and_will_cause_a_failure" + + ## CodeBuild - Intentional project name that is longer than max of 40 characters + codebuild_projects = { + ## Test Module 1 + tf_test_test_module_1 : { + name = "this_is_a_project_name_and_it_is_longer_than_40_characters" + description = "Test Module 1 - Terraform Test" + build_spec = <<-EOF + ## Terraform Test + version: 0.1 + phases: + pre_build: + commands: + - terraform init + - terraform validate + + build: + commands: + - terraform test + EOF + }, + } + + ## CodePipeline - Intentional pipeline name that is longer than max of 40 characters + codepipeline_pipelines = { + + ## Terraform Module Validation Pipeline for 'test-module-1' Terraform Module + tf_module_validation_test_module_1 : { + name = "this_is_a_pipeline_name_and_it_is_longer_than_40_characters_and_will_fail" + tags = { + "Description" = "Test Module 1.", + + } + + stages = [ + ## Clone from S3, store contents in artifacts S3 Bucket + { + name = "Source" + action = [ + { + name = "PullFromS3" + category = "Source" + owner = "AWS" + provider = "S3" + version = "1" + configuration = { + S3Bucket = "git-module-aws-tf-cicd-" + S3ObjectKey = "archive.zip" + PollForSourceChanges = false + } + input_artifacts = [] + ## Store the output of this stage as 'source_output_artifacts' in connected the Artifacts S3 Bucket + output_artifacts = ["source_output_artifacts"] + run_order = 1 + }, + ] + }, + + ## Run Terraform Test Framework + { + name = "Build_TF_Test" + action = [ + { + name = "TerraformTest" + category = "Build" + owner = "AWS" + provider = "CodeBuild" + version = "1" + configuration = { + ## Reference existing CodeBuild Project + ProjectName = "TerraformTest-test-module-1" + } + ## Use the 'source_output_artifacts' contents from the Artifacts S3 Bucket + input_artifacts = ["source_output_artifacts"] + ## Store the output of this stage as 'build_tf_test_output_artifacts' in the connected Artifacts S3 Bucket + output_artifacts = ["build_tf_test_output_artifacts"] + + run_order = 1 + }, + ] + }, + + ## Run Checkov + { + name = "Build_Checkov" + action = [ + { + name = "Checkov" + category = "Build" + owner = "AWS" + provider = "CodeBuild" + version = "1" + configuration = { + ## Reference existing CodeBuild Project + ProjectName = "Checkov-test-module-1" + } + ## Use the 'source_output_artifacts' contents from the Artifacts S3 Bucket + input_artifacts = ["source_output_artifacts"] + ## Store the output of this stage as 'build_checkov_output_artifacts' in the connected Artifacts S3 Bucket + output_artifacts = ["build_checkov_output_artifacts"] + + run_order = 1 + }, + ] + }, + ] + }, + } + + ## Terraform Remote State Resources - Intentional prefix that is longer than max of 40 characters + tf_remote_state_resource_configs = { + ## Custom Terraform Module Repo + test_tf_remote_state_config_1 : { + prefix = "this_is_a_prefix_name_and_it_is_longer_than_40_characters_and_will_fail" + }, + } + } + + ## Check for intentional failure of variables defined above. We expect these to fail since we intentionally provided values that do not conform to the validation rules defined in the module's variable.tf file. + expect_failures = [ + var.project_prefix, + var.codebuild_projects, + var.codepipeline_pipelines, + var.tf_remote_state_resource_configs, + ] +} + +## - End-to-end Tests - +run "e2e_test" { + command = apply + + ## Using global variables defined above since additional variables block is not defined here + + ## Assertions + + ## CodeBuild - Ensure projects have correct names after creation + assert { + condition = aws_codebuild_project.codebuild["tf_test_test_module_1"].name == "TerraformTest-test-module-1" + error_message = "The CodeBuild Project name (${aws_codebuild_project.codebuild["tf_test_test_module_1"].name}) didn't match the expected value (TerraformTest-test-module-1)." + } + + ## CodePipeline - Ensure pipelines have correct names after creation + assert { + condition = aws_codepipeline.codepipeline["tf_module_validation_test_module_1"].name == "tf-module-validation-test-module-1" + error_message = "The CodePipeline pipeline name (${aws_codepipeline.codepipeline["tf_module_validation_test_module_1"].name}) didn't match the expected value (tf-module-validation-test-module-1)." + } + + ## S3 Remote State - Ensure S3 Remote State buckets have correct names after creation + assert { + condition = startswith(aws_s3_bucket.tf_remote_state_s3_buckets["test_tf_remote_state_config_1"].id, "test-tf-remote-state-config-1") + error_message = "The S3 Remote State Bucket name (${aws_s3_bucket.tf_remote_state_s3_buckets["test_tf_remote_state_config_1"].id}) did not start with the expected value (test-tf-remote-state-config-1)." + } + + ## DynamoDB Terraform State Lock Table - Ensure DynamoDB Terraform State Lock Tables have correct names after creation + assert { + condition = startswith(aws_dynamodb_table.tf_remote_state_lock_tables["test_tf_remote_state_config_1"].id, "test-tf-remote-state-config-1") + error_message = "The DynamoDB Terraform State Lock table name (${aws_dynamodb_table.tf_remote_state_lock_tables["test_tf_remote_state_config_1"].id}) did not start with the expected value (test-tf-remote-state-config-1)." + } +} diff --git a/modules/terraform-aws-cicd/variables.tf b/modules/terraform-aws-cicd/variables.tf new file mode 100644 index 0000000..231575c --- /dev/null +++ b/modules/terraform-aws-cicd/variables.tf @@ -0,0 +1,188 @@ +## Create Module Input Variables +## - Conditional Logic Variables - +variable "create_codepipeline_artifacts_bucket" { + type = bool + default = true + description = "Conditional creation of CodePipeline artifacts S3 Bucket." + +} +variable "create_codepipeline_service_role" { + type = bool + default = true + description = "Conditional creation of CodePipeline IAM Role." + +} +variable "create_cloudwatch_service_role" { + type = bool + default = true + description = "Conditional creation of Cloudwatch IAM Role." + +} +variable "create_codebuild_service_role" { + type = bool + default = true + description = "Conditional creation of CodeBuild IAM Role." + +} +variable "create_s3_remote_backend" { + type = bool + default = false + description = "Conditional creation of S3 bucket to store Terraform state file." + +} + +## - Codecommit - +variable "codecommit_repos" { + type = map(object({ + repository_name = string + description = optional(string, null) + default_branch = optional(string, "main") + tags = optional(map(any), { "ContentType" = "Terraform Module" }) + })) + description = "Collection of AWS CodeCommit Repositories you wish to create" + default = {} + + validation { + condition = alltrue([for repo in values(var.codecommit_repos) : length(repo.repository_name) > 1 && length(repo.repository_name) <= 100]) + error_message = "The name of one of the defined CodeCodecommit Repositories is too long. Repository names can be a maxmium of 100 characters, as the names are used by other resources throughout this module. This can cause deployment failures for AWS resources with smaller character limits for naming. Please ensure all repository names are 100 characters or less, and try again." + } +} + +## - CodeBuild - +variable "codebuild_projects" { + type = map(object({ + + name = string + description = optional(string, null) + build_timeout = optional(number, 60) + + env_compute_type = optional(string, "BUILD_GENERAL1_SMALL") + env_image = optional(string, "hashicorp/terraform:latest") + env_type = optional(string, "LINUX_CONTAINER") + + source_version = optional(string, "main") + source_type = optional(string, "NO_SOURCE") + source_location = optional(string, null) + source_clone_depth = optional(number, 1) + path_to_build_spec = optional(string, null) + build_spec = optional(string, null) + + tags = optional(map(any), { "ContentType" = "Terraform Module" }) + + })) + description = "Collection of AWS CodeBuild Projects you wish to create" + default = {} + + validation { + condition = alltrue([for project in values(var.codebuild_projects) : length(project.name) > 3 && length(project.name) <= 40]) + error_message = "The name of one of the defined CodeBuild Projects is too long. Project names can be a maxmium of 40 characters, as the names are used by other resources throughout this module. This can cause deployment failures for AWS resources with smaller character limits for naming. Please ensure all project names are 40 characters or less, and try again." + } +} +variable "codebuild_service_role_arn" { + type = string + default = null + description = "The ARN of the IAM Role you wish to use with CodeBuild." + +} + +## - CodePipeline - +variable "codepipeline_pipelines" { + type = map(object({ + + name = string + pipeline_type = optional(string, "V2") + stages = list(any) + existing_s3_bucket_name = optional(string, null) + event_pattern = optional(string, null) + + + tags = optional(map(any), { "Description" = "Pipeline" }) + + })) + description = "Collection of AWS CodePipeline Pipelines you wish to create" + default = {} + + validation { + condition = alltrue([for pipeline in values(var.codepipeline_pipelines) : length(pipeline.name) > 3 && length(pipeline.name) <= 40]) + error_message = "The name of one of the defined CodePipeline pipelines is too long. Pipeline names can be a maxmium of 40 characters, as the names are used by other resources throughout this module. This can cause deployment failures for AWS resources with smaller character limits for naming. Please ensure all pipeline names are 40 characters or less, and try again." + } +} +variable "codepipeline_service_role_arn" { + type = string + default = null + description = "The ARN of the IAM Role you wish to use with CodePipeline." + +} + + +## - S3 - +variable "existing_s3_bucket_name" { + type = string + default = null + description = "The name of the existing S3 bucket you wish to store the CodePipeline artifacts in." + +} +variable "s3_public_access_block" { + type = bool + default = true + description = "Conditional enabling of S3 Public Access Block." + +} + +## - EventBridge - +variable "eventbridge_rules_enable_force_destroy" { + description = "Enable force destroy on all EventBridge rules. This allows the destruction of all events in the rule." + type = bool + default = true +} + +## - IAM - +variable "enable_force_detach_policies" { + description = "Enable force detaching any policies from IAM roles." + type = bool + default = true +} + + +## Terraform Remote State Resources +## - CodeCommit - +variable "tf_remote_state_resource_configs" { + type = map(object({ + prefix = optional(string, "my-prefix") + ddb_billing_mode = optional(string, "PAY_PER_REQUEST") + ddb_hash_key = optional(string, "LockID") + })) + description = "Configurations for Terraform State Resources" + default = {} + + validation { + condition = alltrue([for config in values(var.tf_remote_state_resource_configs) : length(config.prefix) > 3 && length(config.prefix) <= 40]) + error_message = "The prefix of one of the defined Terraform Remote State Resource Configs is too long. A prefix can be a maxmium of 40 characters, as the names are used by other resources throughout this module. This can cause deployment failures for AWS resources with smaller character limits for naming. Please ensure all prefixes are 40 characters or less, and try again." + } + + validation { + condition = alltrue([for config in values(var.tf_remote_state_resource_configs) : config.ddb_billing_mode == "PAY_PER_REQUEST" || config.ddb_billing_mode == "PROVISIONED"]) + error_message = "The DynamoDB Billing Mode ('ddb_billing_mode') of one of the defined Terraform Remote State Resource Configs is not an accepted value. Valid values for DynamoDB Billing Mode are 'PAY_PER_REQUEST' or 'PROVISIONED'. Please ensure the 'ddb_billing_mode' is set to one of these values and try again." + } +} + +## - Project Prefix - +variable "project_prefix" { + type = string + default = "tf-workshop" + description = "The prefix for the current project" + + validation { + condition = length(var.project_prefix) > 1 && length(var.project_prefix) <= 40 + error_message = "The defined 'project_prefix' has too many characters (${length(var.project_prefix)}). This can cause deployment failures for AWS resources with smaller character limits. Please reduce the character count and try again." + } +} + +## - Tags - +variable "tags" { + type = map(any) + description = "Tags to apply to resources." + default = { + "IAC_PROVIDER" = "Terraform" + } +} From 9bf7dd32efcd00570c8242cb7b332d68a80aa904 Mon Sep 17 00:00:00 2001 From: Nhat-Thanh Nguyen Date: Tue, 28 Jan 2025 09:58:13 +1300 Subject: [PATCH 2/2] [Workshop] Deployment - Terraform CI/CD Pipelines --- .../terraform-aws-cicd/tests/main.tftest.hcl | 4 +- terraform-aws-cicd-pipelines/.gitignore | 29 ++ terraform-aws-cicd-pipelines/README.md | 3 + .../buildspec/checkov-buildspec.yml | 15 + .../buildspec/tf-apply-buildspec.yml | 11 + .../buildspec/tf-plan-buildspec.yml | 12 + .../buildspec/tf-test-buildspec.yml | 11 + terraform-aws-cicd-pipelines/locals.tf | 38 ++ terraform-aws-cicd-pipelines/main.tf | 349 ++++++++++++++++++ terraform-aws-cicd-pipelines/outputs.tf | 16 + terraform-aws-cicd-pipelines/provider.tf | 38 ++ terraform-aws-cicd-pipelines/variables.tf | 8 + 12 files changed, 532 insertions(+), 2 deletions(-) create mode 100644 terraform-aws-cicd-pipelines/.gitignore create mode 100644 terraform-aws-cicd-pipelines/README.md create mode 100644 terraform-aws-cicd-pipelines/buildspec/checkov-buildspec.yml create mode 100644 terraform-aws-cicd-pipelines/buildspec/tf-apply-buildspec.yml create mode 100644 terraform-aws-cicd-pipelines/buildspec/tf-plan-buildspec.yml create mode 100644 terraform-aws-cicd-pipelines/buildspec/tf-test-buildspec.yml create mode 100644 terraform-aws-cicd-pipelines/locals.tf create mode 100644 terraform-aws-cicd-pipelines/main.tf create mode 100644 terraform-aws-cicd-pipelines/outputs.tf create mode 100644 terraform-aws-cicd-pipelines/provider.tf create mode 100644 terraform-aws-cicd-pipelines/variables.tf diff --git a/modules/terraform-aws-cicd/tests/main.tftest.hcl b/modules/terraform-aws-cicd/tests/main.tftest.hcl index ba8e9b3..aee59f1 100644 --- a/modules/terraform-aws-cicd/tests/main.tftest.hcl +++ b/modules/terraform-aws-cicd/tests/main.tftest.hcl @@ -70,7 +70,7 @@ variables { provider = "S3" version = "1" configuration = { - S3Bucket = "git-module-aws-tf-cicd-" + S3Bucket = "git-terraform-aws-cicd-" S3ObjectKey = "archive.zip" PollForSourceChanges = false } @@ -196,7 +196,7 @@ run "input_validation" { provider = "S3" version = "1" configuration = { - S3Bucket = "git-module-aws-tf-cicd-" + S3Bucket = "git-terraform-aws-cicd-" S3ObjectKey = "archive.zip" PollForSourceChanges = false } diff --git a/terraform-aws-cicd-pipelines/.gitignore b/terraform-aws-cicd-pipelines/.gitignore new file mode 100644 index 0000000..b58bd82 --- /dev/null +++ b/terraform-aws-cicd-pipelines/.gitignore @@ -0,0 +1,29 @@ +## Local .terraform directories +**/.terraform/* + +## Terraform lockfile +.terraform.lock.hcl + +## .tfstate files +*.tfstate +*.tfstate.* + +## Crash log files +crash.log + +## Exclude all .tfvars files, which are likely to contain sentitive data, such as +## password, private keys, and other secrets. These should not be part of version +## control as they are data points which are potentially sensitive and subject +## to change depending on the environment. +*.tfvars + +## Ignore override files as they are usually used to override resources locally and so +## are not checked in +override.tf +override.tf.json +*_override.tf +*_override.tf.json + +## Ignore CLI configuration files +.terraformrc +terraform.rc diff --git a/terraform-aws-cicd-pipelines/README.md b/terraform-aws-cicd-pipelines/README.md new file mode 100644 index 0000000..158abcc --- /dev/null +++ b/terraform-aws-cicd-pipelines/README.md @@ -0,0 +1,3 @@ +# AWS DevOps Core + +This repository contains the core DevOps infrastructure which currently includes the Pipelines used for Terraform module validation and deployment, as well as the CodeCommit repositories the configurations are stored in. diff --git a/terraform-aws-cicd-pipelines/buildspec/checkov-buildspec.yml b/terraform-aws-cicd-pipelines/buildspec/checkov-buildspec.yml new file mode 100644 index 0000000..bc79930 --- /dev/null +++ b/terraform-aws-cicd-pipelines/buildspec/checkov-buildspec.yml @@ -0,0 +1,15 @@ +## Checkov +version: 0.1 +phases: + pre_build: + commands: + - echo pre_build starting + + build: + commands: + - echo build starting + - echo starting checkov + - ls + - checkov -d . + - echo saving checkov output + - checkov -s -d ./ > checkov.result.txt diff --git a/terraform-aws-cicd-pipelines/buildspec/tf-apply-buildspec.yml b/terraform-aws-cicd-pipelines/buildspec/tf-apply-buildspec.yml new file mode 100644 index 0000000..8f56dd9 --- /dev/null +++ b/terraform-aws-cicd-pipelines/buildspec/tf-apply-buildspec.yml @@ -0,0 +1,11 @@ +## Terraform Apply +version: 0.1 +phases: + pre_build: + commands: + - terraform init + - terraform validate + + build: + commands: + - terraform apply -auto-approve diff --git a/terraform-aws-cicd-pipelines/buildspec/tf-plan-buildspec.yml b/terraform-aws-cicd-pipelines/buildspec/tf-plan-buildspec.yml new file mode 100644 index 0000000..9e8fae9 --- /dev/null +++ b/terraform-aws-cicd-pipelines/buildspec/tf-plan-buildspec.yml @@ -0,0 +1,12 @@ +## Currently unused. You can use this buildspec to create an execution plan and require manual approval step before the pipeline continues to the "apply" phase + +# version: 0.1 +# phases: +# pre_build: +# commands: +# - terraform init +# - terraform validate + +# build: +# commands: +# - terraform plan diff --git a/terraform-aws-cicd-pipelines/buildspec/tf-test-buildspec.yml b/terraform-aws-cicd-pipelines/buildspec/tf-test-buildspec.yml new file mode 100644 index 0000000..4c427a0 --- /dev/null +++ b/terraform-aws-cicd-pipelines/buildspec/tf-test-buildspec.yml @@ -0,0 +1,11 @@ +## Terraform Test +version: 0.1 +phases: + pre_build: + commands: + - terraform init + - terraform validate + + build: + commands: + - terraform test diff --git a/terraform-aws-cicd-pipelines/locals.tf b/terraform-aws-cicd-pipelines/locals.tf new file mode 100644 index 0000000..48121fd --- /dev/null +++ b/terraform-aws-cicd-pipelines/locals.tf @@ -0,0 +1,38 @@ +## Locals +locals { + # -- CodeCommit -- + # CodeCommit Repository Names + module_aws_tf_cicd_repository_name = "terraform-aws-cicd" + terraform_aws_cicd_pipelines_repository_name = "terraform-aws-cicd-pipelines" + example_production_workload_repository_name = "example-prod-workload" + + + # -- CodeBuild -- + # - CodeBuild Project Names - + # 'terraform-aws-cicd' Build Projects + tf_test_module_aws_tf_cicd_codebuild_project_name = "TerraformTest-terraform-aws-cicd" + chevkov_module_aws_tf_cicd_codebuild_project_name = "Checkov-terraform-aws-cicd" + # 'terraform-aws-cicd-pipelines' Build Projects + tf_test_terraform_aws_cicd_pipelines_codebuild_project_name = "TerraformTest-terraform-aws-cicd-pipelines" + chevkov_terraform_aws_cicd_pipelines_codebuild_project_name = "Checkov-terraform-aws-cicd-pipelines" + # 'example-production-workload' Build Projects + tf_test_example_production_workload_codebuild_project_name = "TerraformTest-example-prod-workload" + chevkov_example_production_workload_codebuild_project_name = "Checkov-example-prod-workload" + tf_apply_example_production_workload_codebuild_project_name = "TFApply-example-prod-workload" + + + # - CodeBuild buildspec paths - + tf_test_path_to_buildspec = "./buildspec/tf-test-buildspec.yml" # Terraform Test Framework (Test Functionality) + checkov_path_to_buildspec = "./buildspec/checkov-buildspec.yml" # Checkov (Test Security) + tf_apply_path_to_buildspec = "./buildspec/tf-apply-buildspec.yml" # TF Apply (Provision Resources) + + + # -- CodePipeline -- + # - CodePipeline Pipeline Names - + tf_module_validation_module_aws_tf_cicd_codepipeline_pipeline_name = "terraform-aws-cicd" + tf_deployment_example_production_workload_codepipeline_pipeline_name = "example-prod-workload" + + # Images + checkov_image = "bridgecrew/checkov" +} + diff --git a/terraform-aws-cicd-pipelines/main.tf b/terraform-aws-cicd-pipelines/main.tf new file mode 100644 index 0000000..1390ed4 --- /dev/null +++ b/terraform-aws-cicd-pipelines/main.tf @@ -0,0 +1,349 @@ +## Core Terraform Module configuration +resource "aws_sns_topic" "manual_approval_sns_topic" { + name = "manual-approval-sns-topic" +} + +resource "aws_sns_topic_subscription" "manual_approval_sns_subscription" { + topic_arn = aws_sns_topic.manual_approval_sns_topic.arn + protocol = "email" + endpoint = "nnthanh101@gmail.com" # Replace with your email address +} + +module "terraform-aws-cicd" { + source = "../modules/terraform-aws-cicd" + + # - Create S3 Remote State Resources - + tf_remote_state_resource_configs = { + # Custom Terraform Module Repo + terraform_aws_cicd_pipelines : { + prefix = "terraform-aws-cicd-pipelines" + }, + example_production_workload : { + prefix = "example-prod-workload" + }, + } + + # - Create CodeCommit Repos - + codecommit_repos = { + # Custom Terraform Module Repo + module_aws_tf_cicd : { + + repository_name = local.module_aws_tf_cicd_repository_name + description = "The repo containing the configuration for the 'terraform-aws-cicd' Terraform Module." + default_branch = "main" + tags = { + "ContentType" = "Terraform Module", + "PrimaryOwner" = "Nhat-Thanh Nguyen", + "PrimaryOwnerTitle" = "Solutions Architect", + } + }, + + # DevOps Core Infrastructure Repo + terraform_aws_cicd_pipelines : { + + repository_name = local.terraform_aws_cicd_pipelines_repository_name + description = "The repo containing the configuration for the core DevOps infrastructure." + default_branch = "main" + tags = { + "ContentType" = "AWS Infrastructure", + "Scope" = "DevOps Services", + "PrimaryOwner" = "Nhat-Thanh Nguyen", + "PrimaryOwnerTitle" = "Solutions Architect", + } + }, + + # Example Production Workload Repo + example_production_workload : { + + repository_name = local.example_production_workload_repository_name + description = "The repo containing the configuration for the core example production workload." + default_branch = "main" + tags = { + "ContentType" = "AWS Infrastructure", + "Scope" = "Example Production Environment", + "PrimaryOwner" = "Nhat-Thanh Nguyen", + "PrimaryOwnerTitle" = "Solutions Architect", + } + }, + } + + # - Create CodeBuild Projects - + codebuild_projects = { + # Terraform Module 'module'aws-tf-cicd' + tf_test_module_aws_tf_cicd : { + name = local.tf_test_module_aws_tf_cicd_codebuild_project_name + description = "CodeBuild Project that uses the Terraform Test Framework to test the functionality of the 'terraform-aws-cicd' Terraform Module." + + path_to_build_spec = local.tf_test_path_to_buildspec + }, + chevkov_module_aws_tf_cicd : { + name = local.chevkov_module_aws_tf_cicd_codebuild_project_name + description = "CodeBuild Project that uses Checkov to test the security of the 'terraform-aws-cicd' Terraform Module." + env_image = local.checkov_image + + path_to_build_spec = local.checkov_path_to_buildspec + }, + + # DevOps Core Infrastructure 'terraform-aws-cicd-pipelines' + tf_test_terraform_aws_cicd_pipelines : { + name = local.tf_test_terraform_aws_cicd_pipelines_codebuild_project_name + description = "CodeBuild Project that uses the Terraform Test Framework to test the functionality of the DevOps Core Infrastructure." + + path_to_build_spec = local.tf_test_path_to_buildspec + }, + chevkov_terraform_aws_cicd_pipelines : { + name = local.chevkov_terraform_aws_cicd_pipelines_codebuild_project_name + description = "CodeBuild Project that uses Checkov to test the security of the DevOps Core Infrastructure." + env_image = local.checkov_image + + path_to_build_spec = local.checkov_path_to_buildspec + }, + + # Example Production Workload 'example-production-workload' + tf_test_example_production_workload : { + name = local.tf_test_example_production_workload_codebuild_project_name + description = "CodeBuild Project that uses the Terraform Test Framework to test the functionality of the Example Production Workload." + + path_to_build_spec = local.tf_test_path_to_buildspec + }, + chevkov_example_production_workload : { + name = local.chevkov_example_production_workload_codebuild_project_name + description = "CodeBuild Project that uses Checkov to test the security of the Example Production Workload." + env_image = local.checkov_image + + path_to_build_spec = local.checkov_path_to_buildspec + }, + tf_apply_example_production_workload : { + name = local.tf_apply_example_production_workload_codebuild_project_name + description = "CodeBuild Project that uses Checkov to test the security of the Example Production Workload." + + path_to_build_spec = local.tf_apply_path_to_buildspec + }, + } + + codepipeline_pipelines = { + + # Terraform Module Validation Pipeline for 'terraform-aws-cicd' Terraform Module + module_aws_tf_cicd : { + name = local.tf_module_validation_module_aws_tf_cicd_codepipeline_pipeline_name + + tags = { + "Description" = "Pipeline that validates functionality and security of the terraform-aws-cicd Terraform Module.", + "Usage" = "Terraform Module Validation", + "PrimaryOwner" = "Nhat-Thanh Nguyen", + "PrimaryOwnerTitle" = "Solutions Architect", + } + + stages = [ + # Clone from CodeCommit, store contents in artifacts S3 Bucket + { + name = "Source" + action = [ + { + name = "PullFromCodeCommit" + category = "Source" + owner = "AWS" + provider = "CodeCommit" + version = "1" + configuration = { + BranchName = "main" + RepositoryName = local.module_aws_tf_cicd_repository_name + PollForSourceChanges = false + } + input_artifacts = [] + # Store the output of this stage as 'source_output_artifacts' in connected the Artifacts S3 Bucket + output_artifacts = ["source_output_artifacts"] + run_order = 1 + }, + ] + }, + + # Run Terraform Test Framework + { + name = "Build_TF_Test" + action = [ + { + name = "TerraformTest" + category = "Build" + owner = "AWS" + provider = "CodeBuild" + version = "1" + configuration = { + # Reference existing CodeBuild Project + ProjectName = local.tf_test_module_aws_tf_cicd_codebuild_project_name + } + # Use the 'source_output_artifacts' contents from the Artifacts S3 Bucket + input_artifacts = ["source_output_artifacts"] + # Store the output of this stage as 'build_tf_test_output_artifacts' in the connected Artifacts S3 Bucket + output_artifacts = ["build_tf_test_output_artifacts"] + + run_order = 2 + }, + ] + }, + + # Run Checkov + { + name = "Build_Checkov" + action = [ + { + name = "Checkov" + category = "Build" + owner = "AWS" + provider = "CodeBuild" + version = "1" + configuration = { + # Reference existing CodeBuild Project + ProjectName = local.chevkov_module_aws_tf_cicd_codebuild_project_name + } + # Use the 'source_output_artifacts' contents from the Artifacts S3 Bucket + input_artifacts = ["source_output_artifacts"] + # Store the output of this stage as 'build_checkov_output_artifacts' in the connected Artifacts S3 Bucket + output_artifacts = ["build_checkov_output_artifacts"] + + run_order = 3 + }, + ] + }, + ] + + }, + + + # Terraform Deployment Pipeline for 'example-production workload' + example_production_workload : { + + name = local.tf_deployment_example_production_workload_codepipeline_pipeline_name + tags = { + "Description" = "Pipeline that validates functionality/security and deploys the Example Production Workload.", + "Usage" = "Example Production Workload", + "PrimaryOwner" = "Nhat-Thanh Nguyen", + "PrimaryOwnerTitle" = "Solutions Architect", + } + + stages = [ + # Clone from CodeCommit, store contents in artifacts S3 Bucket + { + name = "Source" + action = [ + { + name = "PullFromCodeCommit" + category = "Source" + owner = "AWS" + provider = "CodeCommit" + version = "1" + configuration = { + BranchName = "main" + RepositoryName = local.example_production_workload_repository_name + PollForSourceChanges = false + } + input_artifacts = [] + # Store the output of this stage as 'source_output_artifacts' in connected the Artifacts S3 Bucket + output_artifacts = ["source_output_artifacts"] + run_order = 1 + }, + ] + }, + + # Run Terraform Test Framework + { + name = "Build_TF_Test" + action = [ + { + name = "TerraformTest" + category = "Build" + owner = "AWS" + provider = "CodeBuild" + version = "1" + configuration = { + # Reference existing CodeBuild Project + ProjectName = local.tf_test_example_production_workload_codebuild_project_name + } + # Use the 'source_output_artifacts' contents from the Artifacts S3 Bucket + input_artifacts = ["source_output_artifacts"] + # Store the output of this stage as 'build_tf_test_output_artifacts' in the connected Artifacts S3 Bucket + output_artifacts = ["build_tf_test_output_artifacts"] + + run_order = 2 + }, + ] + }, + + # Run Checkov + { + name = "Build_Checkov" + action = [ + { + name = "Checkov" + category = "Build" + owner = "AWS" + provider = "CodeBuild" + version = "1" + configuration = { + # Reference existing CodeBuild Project + ProjectName = local.chevkov_example_production_workload_codebuild_project_name + } + # Use the 'source_output_artifacts' contents from the Artifacts S3 Bucket + input_artifacts = ["source_output_artifacts"] + # Store the output of this stage as 'build_checkov_output_artifacts' in the connected Artifacts S3 Bucket + output_artifacts = ["build_checkov_output_artifacts"] + + run_order = 3 + }, + ] + }, + + # Add Manual Approval + { + name = "Manual_Approval" + action = [ + { + name = "ManualApprovalAction" + category = "Approval" + owner = "AWS" + provider = "Manual" + version = "1" + configuration = { + CustomData = "Please approve this deployment." + NotificationArn = aws_sns_topic.manual_approval_sns_topic.arn + } + + input_artifacts = [] + output_artifacts = [] + + run_order = 4 + }, + ] + }, + + + # Apply Terraform + { + name = "Apply" + action = [ + { + name = "TerraformApply" + category = "Build" + owner = "AWS" + provider = "CodeBuild" + version = "1" + configuration = { + # Reference existing CodeBuild Project + ProjectName = local.tf_apply_example_production_workload_codebuild_project_name + } + # Use the 'source_output_artifacts' contents from the Artifacts S3 Bucket + input_artifacts = ["source_output_artifacts"] + # Store the output of this stage as 'build_checkov_output_artifacts' in the connected Artifacts S3 Bucket + output_artifacts = ["build_tf_apply_output_artifacts"] + + run_order = 5 + }, + ] + }, + + ] + + }, + } +} + diff --git a/terraform-aws-cicd-pipelines/outputs.tf b/terraform-aws-cicd-pipelines/outputs.tf new file mode 100644 index 0000000..88cf8aa --- /dev/null +++ b/terraform-aws-cicd-pipelines/outputs.tf @@ -0,0 +1,16 @@ +## Outputs +# AWS DevOps Core +output "terraform_aws_cicd_pipelines_s3_bucket_name" { + value = module.terraform-aws-cicd.tf_state_s3_buckets_names["terraform_aws_cicd_pipelines"] +} +output "terraform_aws_cicd_pipelines_ddb_table_name" { + value = module.terraform-aws-cicd.tf_state_ddb_table_names["terraform_aws_cicd_pipelines"] +} + +# Example Production Workload +output "example_production_workload_s3_bucket_name" { + value = module.terraform-aws-cicd.tf_state_s3_buckets_names["example_production_workload"] +} +output "example_production_workload_ddb_table_name" { + value = module.terraform-aws-cicd.tf_state_ddb_table_names["example_production_workload"] +} diff --git a/terraform-aws-cicd-pipelines/provider.tf b/terraform-aws-cicd-pipelines/provider.tf new file mode 100644 index 0000000..3ecfbc8 --- /dev/null +++ b/terraform-aws-cicd-pipelines/provider.tf @@ -0,0 +1,38 @@ +## Provider configuration + +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 5.0" + } + } + # Instructions: Add S3 Remote Backend Configuration + + # Instructions: After first running `terraform apply`, uncomment the block below, full in the desired values, and re-run 'terraform apply' to configure your S3 Remote Backend. + # IMPORANT! - Ensure the resources you are referencing (S3 Bucket and DynamoDB table) already exist in the AWS account and region you are currently in or it will fail. + + # backend "s3" { + # bucket = "add-your-bucket-name-here" + # key = "state/terraform.tfstate" + # region = "us-east-1" + # encrypt = true + # dynamodb_table = "add-your-dynamodb-table-name-here" + # } + + + +} + + +# Configure the AWS Provider +provider "aws" { + region = var.aws_region + + + default_tags { + tags = { + Management = "Terraform" + } + } +} diff --git a/terraform-aws-cicd-pipelines/variables.tf b/terraform-aws-cicd-pipelines/variables.tf new file mode 100644 index 0000000..0195a89 --- /dev/null +++ b/terraform-aws-cicd-pipelines/variables.tf @@ -0,0 +1,8 @@ +## Variables + +variable "aws_region" { + type = string + description = "The AWS region you wish to deploy your resources to." + default = "us-east-1" + +}