Serverless Architecture with Lambda

First, create a Terraform module for a Lambda function and its pipeline using CodeBuild:

modules/lambda-pipeline/main.tf

# create a role for the function to assume
# see the docs: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role
resource "aws_iam_role" "iam_for_lambda" {
  name = "iam_for_lambda_${var.function_name}"

  assume_role_policy = <<EOF
{
  "Version": "2012-10-17",
  "Statement": [
    {
      "Action": "sts:AssumeRole",
      "Principal": {
        "Service": "lambda.amazonaws.com"
      },
      "Effect": "Allow",
      "Sid": ""
    }
  ]
}
EOF
}

locals {
  environment_map = var.env_vars == null ? [] : [var.env_vars]
  artifact_key = "artifact.zip"
}

# create a Lambda function
# see the docs: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_function
resource "aws_lambda_function" "lambda_func" {
  s3_bucket     = var.s3_bucket
  s3_key        = "${var.s3_key_prefix}/${local.artifact_key}"
  function_name = var.function_name
  role          = aws_iam_role.iam_for_lambda.arn
  handler       = var.handler

  dynamic "environment" {
    for_each = local.environment_map
    content {
      variables = environment.value
    }
  }

  # see all available runtimes here: https://docs.aws.amazon.com/lambda/latest/dg/API_CreateFunction.html#SSS-CreateFunction-request-Runtime
  runtime = var.runtime
}

# create a policy for publishing logs to CloudWatch
# and reading messages from SQS
resource "aws_iam_policy" "lambda_logging" {
  name        = "lambda_logging_${var.function_name}"
  description = "IAM policy for logging from a lambda and receiving SQS messages"

  policy = <<EOF
{
  "Version": "2012-10-17",
  "Statement": [
    {
      "Action": [
        "logs:CreateLogGroup",
        "logs:CreateLogStream",
        "logs:PutLogEvents"
      ],
      "Resource": "arn:aws:logs:*:*:*",
      "Effect": "Allow"
    }
  ]
}
EOF
}

# attach the above policy to the function role
# see the docs: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment
resource "aws_iam_role_policy_attachment" "lambda_logs" {
  role       = aws_iam_role.iam_for_lambda.name
  policy_arn = aws_iam_policy.lambda_logging.arn
}

########## CodeBuild #########

# create a CodeBuild project
resource "aws_codebuild_project" "project" {
  name          = "build-${var.function_name}"
  build_timeout = "20"
  service_role  = aws_iam_role.cbuild_role.arn

  artifacts {
    type = "NO_ARTIFACTS"
  }

  environment {
    compute_type = "BUILD_GENERAL1_SMALL"
    image        = "aws/codebuild/standard:6.0"
    type         = "LINUX_CONTAINER"

    environment_variable {
      name  = "BUCKET_NAME"
      value = var.s3_bucket
    }

    environment_variable {
      name  = "KEY_PREFIX"
      value = var.s3_key_prefix
    }

    environment_variable {
      name  = "ARTIFACT_KEY"
      value = local.artifact_key
    }

    environment_variable {
      name  = "FUNC_NAME"
      value = var.function_name
    }
  }

  logs_config {
    cloudwatch_logs {
      group_name = "build-${var.function_name}"
    }
  }

  # set the source to our repo 
  source {
    type            = "GITHUB"
    location        = var.repo_address
    git_clone_depth = 1
  }

  # set the branch name for the pipeline
  source_version = var.branch_name
}

# create a role for CodeBuild service to assume
resource "aws_iam_role" "cbuild_role" {
  name               = "role-build-${var.function_name}" 
  assume_role_policy = <<EOF
{
  "Version": "2012-10-17",
  "Statement": [
    {
      "Effect": "Allow",
      "Principal": {
        "Service": "codebuild.amazonaws.com"
      },
      "Action": "sts:AssumeRole"
    }
  ]
}
EOF
}

# attach an IAM policy to CodeBuild role
# for accessing the S3 bucket
resource "aws_iam_role_policy" "this" {
  role = aws_iam_role.cbuild_role.name

  policy = <<POLICY
{
  "Version": "2012-10-17",
  "Statement": [
    {
      "Effect": "Allow",
      "Resource": [
        "*"
      ],
      "Action": [
        "logs:CreateLogGroup",
        "logs:CreateLogStream",
        "logs:PutLogEvents"
      ]
    },
    {
      "Effect": "Allow",
      "Action": [
        "s3:*"
      ],
      "Resource": ["${var.s3_bucket_arn}", "${var.s3_bucket_arn}/*"]
    },
    {
      "Effect": "Allow",
      "Action": [
        "lambda:UpdateFunctionCode"
      ],
      "Resource": ["${aws_lambda_function.lambda_func.arn}"]
    }
  ]
}
POLICY
}

# create a webhook so the projects can detect 
# changes in the repo and start a new build
# note that you need to remove this section (or comment out)
# and build whatever's above this resource first
# you then need to change the source section of the 
# CodeBuild project to connect to your GitHub repo
# after that, you can create this resource
resource "aws_codebuild_webhook" "this" {
  project_name = aws_codebuild_project.project.name
  build_type   = "BUILD"

  filter_group {
    filter {
      type    = "EVENT"
      pattern = "PUSH"
    }

    filter {
      type    = "HEAD_REF"
      pattern = var.branch_name
    }
  }
}

modules/lambda-pipeline/variables.tf

variable "function_name" {
  type = string
}

variable "s3_bucket" {
  type = string
}

variable "s3_bucket_arn" {
  type = string
}

variable "s3_key_prefix" {
  type = string
  default = "artifact.zip"
}

variable "handler" {
  type    = string
  default = "main.handler"
}

variable "runtime" {
  type    = string
  default = "python3.8"
}

variable "env_vars" {
  type    = map(any)
  default = null
}

variable "repo_address" {
  type    = string
}

variable "branch_name" {
  type    = string
  default = "main"
}

Then in the root module:

main.tf

resource "aws_s3_bucket" "name" {
}

module "python_lambda" {
    source = "./modules/lambda-pipeline"
    function_name = "python-test"
    s3_bucket = aws_s3_bucket.name.bucket
    s3_bucket_arn = aws_s3_bucket.name.arn
    s3_key_prefix = "python-test"
    repo_address = "<REPO>"
}

module "go_lambda" {
    source = "./modules/lambda-pipeline"
    function_name = "go-test"
    s3_bucket = aws_s3_bucket.name.bucket
    s3_bucket_arn = aws_s3_bucket.name.arn
    s3_key_prefix = "go-test"
    repo_address = "<REPO>"
    runtime = "go1.x"
    handler = "main"
}

CodeBuild Configs

For a Python Lambda function without any dependencies:

buildspec.yaml

version: 0.2
phases:
  install:
    runtime-versions:
      python: latest
    commands:
      - echo "installing..."

  build:
    commands:
      - echo "building..."

  post_build:
    commands:
      - echo "updating function $FUNC_NAME"
      - zip $ARTIFACT_KEY *.py
      - aws s3 cp $ARTIFACT_KEY s3://$BUCKET_NAME/$KEY_PREFIX/$ARTIFACT_KEY
      - aws lambda update-function-code --function-name $FUNC_NAME --s3-bucket $BUCKET_NAME --s3-key $KEY_PREFIX/$ARTIFACT_KEY

And for a Go Lambda function with or without dependencies:

buildspec.yaml

version: 0.2
phases:
  install:
    runtime-versions:
      golang: latest
    commands:
      - echo "installing..."

  build:
    commands:
      - echo "building..."
      - export GOOS=linux
      - export GOARCH=amd64
      - go build -o main .

  post_build:
    commands:
      - echo "updating function $FUNC_NAME"
      - zip $ARTIFACT_KEY main
      - aws s3 cp $ARTIFACT_KEY s3://$BUCKET_NAME/$KEY_PREFIX/$ARTIFACT_KEY
      - aws lambda update-function-code --function-name $FUNC_NAME --s3-bucket $BUCKET_NAME --s3-key $KEY_PREFIX/$ARTIFACT_KEY