From c202ee0cd2348c1d736ec08730e836370b4ea260 Mon Sep 17 00:00:00 2001 From: aknysh Date: Wed, 29 Apr 2020 18:17:39 -0400 Subject: [PATCH 1/6] Convert to TF 0.12 --- .travis.yml | 34 ------------- LICENSE | 2 +- main.tf | 139 ++++++++++++++++++++++++--------------------------- outputs.tf | 4 +- variables.tf | 41 ++++++++------- versions.tf | 9 ++++ 6 files changed, 101 insertions(+), 128 deletions(-) delete mode 100644 .travis.yml create mode 100644 versions.tf diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index f8f27b7..0000000 --- a/.travis.yml +++ /dev/null @@ -1,34 +0,0 @@ -addons: - apt: - packages: - - git - - make - - curl - -env: - global: - - TERRAFORM_MODULE_NAME="$(basename $TRAVIS_REPO_SLUG)" - -install: - - make init - -script: - - make terraform/install - - make terraform/get-plugins - - make terraform/get-modules - - make terraform/lint - - make terraform/validate - - make build - -deploy: - # Deploy artifacts to S3 - - provider: s3 - region: "us-west-2" - access_key_id: $AWS_ACCESS_KEY_ID - secret_access_key: $AWS_SECRET_ACCESS_KEY - bucket: artifacts.prod.cloudposse.org - skip_cleanup: true - upload-dir: $TERRAFORM_MODULE_NAME/$TRAVIS_COMMIT - local-dir: artifacts - on: - all_branches: true diff --git a/LICENSE b/LICENSE index 808ecf3..c844c70 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2017-2018 Cloud Posse, LLC + Copyright 2017-2020 Cloud Posse, LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/main.tf b/main.tf index 483ec4a..a92ba0d 100644 --- a/main.tf +++ b/main.tf @@ -2,17 +2,11 @@ * ## Module: cloudposse/terraform-aws-lambda-elasticsearch-cleanup * * This module creates a scheduled Lambda function which will delete old -* Elasticsearch indexes using SigV4Auth authentication. The lambda +* Elasticsearch indexes using SigV4Auth authentication. The lambda * function can optionally send output to an SNS topic if the topic ARN * is given */ -# Terraform -#-------------------------------------------------------------- -terraform { - required_version = ">= 0.10.2" -} - # Data #-------------------------------------------------------------- data "aws_iam_policy_document" "assume_role" { @@ -32,7 +26,7 @@ data "aws_iam_policy_document" "es_logs" { actions = [ "logs:CreateLogGroup", "logs:CreateLogStream", - "logs:PutLogEvents", + "logs:PutLogEvents" ] effect = "Allow" @@ -48,7 +42,7 @@ data "aws_iam_policy_document" "es_logs" { "es:ESHttpHead", "es:ESHttpDelete", "es:Describe*", - "es:List*", + "es:List*" ] effect = "Allow" @@ -62,169 +56,168 @@ data "aws_iam_policy_document" "es_logs" { data "aws_iam_policy_document" "sns" { statement { actions = [ - "sns:Publish", + "sns:Publish" ] effect = "Allow" resources = [ - "${var.sns_arn}", + var.sns_arn ] } } data "aws_iam_policy_document" "default" { - source_json = "${data.aws_iam_policy_document.es_logs.json}" - override_json = "${length(var.sns_arn) > 0 ? data.aws_iam_policy_document.sns.json : "{}"}" + source_json = data.aws_iam_policy_document.es_logs.json + override_json = length(var.sns_arn) > 0 ? data.aws_iam_policy_document.sns.json : "{}" } # Modules #-------------------------------------------------------------- module "label" { source = "git::https://github.com/cloudposse/terraform-terraform-label.git?ref=tags/0.2.1" - namespace = "${var.namespace}" - name = "${var.name}" - stage = "${var.stage}" - delimiter = "${var.delimiter}" - attributes = "${compact(concat(var.attributes, list("elasticsearch", "cleanup")))}" - tags = "${var.tags}" - enabled = "true" + namespace = var.namespace + name = var.name + stage = var.stage + delimiter = var.delimiter + attributes = compact(concat(var.attributes, ["elasticsearch", "cleanup"])) + tags = var.tags } module "artifact" { source = "git::https://github.com/cloudposse/terraform-external-module-artifact.git?ref=tags/0.1.1" filename = "lambda.zip" module_name = "terraform-aws-lambda-elasticsearch-cleanup" - module_path = "${substr(path.module, length(path.cwd) + 1, -1)}" + module_path = substr(path.module, length(path.cwd) + 1, -1) } # Locals #-------------------------------------------------------------- locals { - function_name = "${module.label.id}" + function_name = module.label.id } # Resources #-------------------------------------------------------------- resource "aws_lambda_function" "default" { - count = "${var.enabled == "true" ? 1 : 0}" - filename = "${module.artifact.file}" - function_name = "${local.function_name}" - description = "${local.function_name}" - timeout = "${var.timeout}" + count = var.enabled ? 1 : 0 + filename = module.artifact.file + function_name = local.function_name + description = local.function_name + timeout = var.timeout runtime = "python${var.python_version}" - role = "${aws_iam_role.default.arn}" + role = join("", aws_iam_role.default.*.arn) handler = "es-cleanup.lambda_handler" - source_code_hash = "${module.artifact.base64sha256}" - tags = "${module.label.tags}" + source_code_hash = module.artifact.base64sha256 + tags = module.label.tags environment { variables = { - es_endpoint = "${var.es_endpoint}" - index = "${var.index}" - delete_after = "${var.delete_after}" - index_format = "${var.index_format}" - sns_arn = "${var.sns_arn}" + es_endpoint = var.es_endpoint + index = var.index + delete_after = var.delete_after + index_format = var.index_format + sns_arn = var.sns_arn } } vpc_config { - subnet_ids = ["${var.subnet_ids}"] - security_group_ids = ["${aws_security_group.default.id}"] + subnet_ids = var.subnet_ids + security_group_ids = [join("", aws_security_group.default.*.id)] } } resource "aws_security_group" "default" { - count = "${var.enabled == "true" ? 1 : 0}" - name = "${local.function_name}" - description = "${local.function_name}" - vpc_id = "${var.vpc_id}" - tags = "${module.label.tags}" + count = var.enabled ? 1 : 0 + name = local.function_name + description = local.function_name + vpc_id = var.vpc_id + tags = module.label.tags } resource "aws_security_group_rule" "udp_dns_egress_from_lambda" { - count = "${var.enabled == "true" ? 1 : 0}" + count = var.enabled ? 1 : 0 description = "Allow outbound UDP traffic from Lambda Elasticsearch cleanup to DNS" type = "egress" from_port = 53 to_port = 53 protocol = "udp" cidr_blocks = ["0.0.0.0/0"] - security_group_id = "${aws_security_group.default.id}" + security_group_id = join("", aws_security_group.default.*.id) } resource "aws_security_group_rule" "tcp_dns_egress_from_lambda" { - count = "${var.enabled == "true" ? 1 : 0}" + count = var.enabled ? 1 : 0 description = "Allow outbound TCP traffic from Lambda Elasticsearch cleanup to DNS" type = "egress" from_port = 53 to_port = 53 protocol = "tcp" cidr_blocks = ["0.0.0.0/0"] - security_group_id = "${aws_security_group.default.id}" + security_group_id = join("", aws_security_group.default.*.id) } resource "aws_security_group_rule" "egress_from_lambda_to_es_cluster" { - count = "${var.enabled == "true" ? 1 : 0}" + count = var.enabled ? 1 : 0 description = "Allow outbound traffic from Lambda Elasticsearch cleanup SG to Elasticsearch SG" type = "egress" from_port = 443 to_port = 443 protocol = "tcp" - source_security_group_id = "${var.es_security_group_id}" - security_group_id = "${aws_security_group.default.id}" + source_security_group_id = var.es_security_group_id + security_group_id = join("", aws_security_group.default.*.id) } resource "aws_security_group_rule" "ingress_to_es_cluster_from_lambda" { - count = "${var.enabled == "true" ? 1 : 0}" + count = var.enabled ? 1 : 0 description = "Allow inbound traffic to Elasticsearch domain from Lambda Elasticsearch cleanup SG" type = "ingress" from_port = 443 to_port = 443 protocol = "tcp" - source_security_group_id = "${aws_security_group.default.id}" - security_group_id = "${var.es_security_group_id}" + source_security_group_id = join("", aws_security_group.default.*.id) + security_group_id = var.es_security_group_id } resource "aws_iam_role" "default" { - count = "${var.enabled == "true" ? 1 : 0}" - name = "${local.function_name}" - assume_role_policy = "${data.aws_iam_policy_document.assume_role.json}" - tags = "${module.label.tags}" + count = var.enabled ? 1 : 0 + name = local.function_name + assume_role_policy = data.aws_iam_policy_document.assume_role.json + tags = module.label.tags } resource "aws_iam_role_policy" "default" { - count = "${var.enabled == "true" ? 1 : 0}" - name = "${local.function_name}" - role = "${aws_iam_role.default.name}" - policy = "${data.aws_iam_policy_document.default.json}" + count = var.enabled ? 1 : 0 + name = local.function_name + role = join("", aws_iam_role.default.*.name) + policy = data.aws_iam_policy_document.default.json } resource "aws_iam_role_policy_attachment" "default" { - count = "${var.enabled == "true" ? 1 : 0}" - role = "${aws_iam_role.default.name}" + count = var.enabled ? 1 : 0 + role = join("", aws_iam_role.default.*.name) policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole" } resource "aws_cloudwatch_event_rule" "default" { - count = "${var.enabled == "true" ? 1 : 0}" - name = "${local.function_name}" - description = "${local.function_name}" - schedule_expression = "${var.schedule}" + count = var.enabled ? 1 : 0 + name = local.function_name + description = local.function_name + schedule_expression = var.schedule } resource "aws_lambda_permission" "default" { - count = "${var.enabled == "true" ? 1 : 0}" + count = var.enabled ? 1 : 0 statement_id = "AllowExecutionFromCloudWatch" action = "lambda:InvokeFunction" - function_name = "${aws_lambda_function.default.arn}" + function_name = join("", aws_lambda_function.default.*.arn) principal = "events.amazonaws.com" - source_arn = "${aws_cloudwatch_event_rule.default.arn}" + source_arn = join("", aws_cloudwatch_event_rule.default.*.arn) } resource "aws_cloudwatch_event_target" "default" { - count = "${var.enabled == "true" ? 1 : 0}" - target_id = "${local.function_name}" - rule = "${aws_cloudwatch_event_rule.default.name}" - arn = "${aws_lambda_function.default.arn}" + count = var.enabled ? 1 : 0 + target_id = local.function_name + rule = join("", aws_cloudwatch_event_rule.default.*.name) + arn = join("", aws_lambda_function.default.*.arn) } diff --git a/outputs.tf b/outputs.tf index 2e9bdb0..acbe8f8 100644 --- a/outputs.tf +++ b/outputs.tf @@ -1,4 +1,4 @@ output "security_group_id" { - value = "${join(",", aws_security_group.default.*.id)}" - description = "Security Group ID of the Lambda " + value = join("", aws_security_group.default.*.id) + description = "Security Group ID of the Lambda" } diff --git a/variables.tf b/variables.tf index 840beb5..395521d 100644 --- a/variables.tf +++ b/variables.tf @@ -1,103 +1,108 @@ variable "enabled" { - type = "string" - default = "true" + type = bool + default = true description = "This module will not create any resources unless enabled is set to \"true\"" } variable "es_endpoint" { - type = "string" + type = string description = "The Elasticsearch endpoint for the Lambda function to connect to" } variable "es_domain_arn" { - type = "string" + type = string description = "The Elasticsearch domain ARN" } variable "es_security_group_id" { - type = "string" + type = string description = "The Elasticsearch cluster security group ID" } variable "schedule" { - type = "string" + type = string default = "cron(0 3 * * ? *)" description = "CloudWatch Events rule schedule using cron or rate expression" } variable "subnet_ids" { - type = "list" - description = "Subnet ids" + type = list(string) + description = "Subnet IDs" } variable "sns_arn" { - type = "string" + type = string default = "" description = "SNS ARN to publish alerts" } variable "index" { - type = "string" + type = string default = "all" description = "Index/indices to process. Use a comma-separated list. Specify `all` to match every index except for `.kibana` or `.kibana_1`" } variable "delete_after" { + type = number default = 15 description = "Number of days to preserve" } variable "namespace" { - type = "string" + type = string description = "Namespace, which could be your organization name, e.g. 'eg' or 'cp'" + default = "" } variable "stage" { - type = "string" + type = string description = "Stage, e.g. 'prod', 'staging', 'dev', or 'test'" + default = "" } variable "name" { - type = "string" + type = string default = "app" description = "Solution name, e.g. 'app' or 'cluster'" } variable "delimiter" { - type = "string" + type = string default = "-" description = "Delimiter to be used between `namespace`, `stage`, `name` and `attributes`" } variable "attributes" { - type = "list" + type = list(string) default = [] description = "Additional attributes (e.g. `1`)" } variable "tags" { - type = "map" + type = map(string) default = {} description = "Additional tags (e.g. `map('BusinessUnit','XYZ')`" } variable "index_format" { + type = string default = "%Y.%m.%d" description = "Combined with 'index' variable and is used to evaluate the index age" } variable "python_version" { - type = "string" + type = string default = "2.7" description = "The Python version to use" } variable "timeout" { + type = number default = 300 description = "Timeout for Lambda function in seconds" } variable "vpc_id" { - type = "string" + type = string description = "The VPC ID for the Lambda function" } diff --git a/versions.tf b/versions.tf new file mode 100644 index 0000000..484c1d6 --- /dev/null +++ b/versions.tf @@ -0,0 +1,9 @@ +terraform { + required_version = "~> 0.12.0" + + required_providers { + aws = "~> 2.0" + template = "~> 2.0" + null = "~> 2.0" + } +} From 4377630673cfb16145df9fd906b9a995cc93af58 Mon Sep 17 00:00:00 2001 From: aknysh Date: Wed, 29 Apr 2020 22:17:22 -0400 Subject: [PATCH 2/6] Add example and tests --- .dockerignore | 18 ++ .editorconfig | 24 +++ .github/CODEOWNERS | 4 + .github/ISSUE_TEMPLATE/bug_report.md | 37 ++++ .github/ISSUE_TEMPLATE/config.yml | 18 ++ .github/ISSUE_TEMPLATE/feature_request.md | 36 ++++ .github/ISSUE_TEMPLATE/question.md | 0 .github/PULL_REQUEST_TEMPLATE.md | 13 ++ .github/workflows/slash-command-dispatch.yml | 20 ++ .gitignore | 3 - README.md | 190 ++++++++++++------- README.yaml | 35 ++-- codefresh/test.yml | 74 ++++++++ docs/terraform.md | 20 +- example/main.tf | 66 ------- example/variables.tf | 43 ----- examples/complete/fixtures.us-east-2.tfvars | 31 +++ examples/complete/main.tf | 60 ++++++ examples/complete/outputs.tf | 79 ++++++++ examples/complete/variables.tf | 80 ++++++++ main.tf | 4 +- outputs.tf | 12 +- test/.gitignore | 1 + test/Makefile | 43 +++++ test/Makefile.alpine | 5 + test/src/.gitignore | 2 + test/src/Gopkg.lock | 92 +++++++++ test/src/Gopkg.toml | 7 + test/src/Makefile | 50 +++++ test/src/examples_complete_test.go | 58 ++++++ variables.tf | 82 ++++---- 31 files changed, 965 insertions(+), 242 deletions(-) create mode 100644 .dockerignore create mode 100644 .editorconfig create mode 100644 .github/CODEOWNERS create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/config.yml create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/ISSUE_TEMPLATE/question.md create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 .github/workflows/slash-command-dispatch.yml create mode 100644 codefresh/test.yml delete mode 100644 example/main.tf delete mode 100644 example/variables.tf create mode 100644 examples/complete/fixtures.us-east-2.tfvars create mode 100644 examples/complete/main.tf create mode 100644 examples/complete/outputs.tf create mode 100644 examples/complete/variables.tf create mode 100644 test/.gitignore create mode 100644 test/Makefile create mode 100644 test/Makefile.alpine create mode 100644 test/src/.gitignore create mode 100644 test/src/Gopkg.lock create mode 100644 test/src/Gopkg.toml create mode 100644 test/src/Makefile create mode 100644 test/src/examples_complete_test.go diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..cdc8107 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,18 @@ +**/.terraform +.git +.gitignore +.editorconfig + +# Compiled files +*.tfstate +*.tfstate.backup +.terraform.tfstate.lock.info + +# Module directory +.terraform/ +.idea +*.iml + +# Build Harness +.build-harness +build-harness/ diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..b37aa4c --- /dev/null +++ b/.editorconfig @@ -0,0 +1,24 @@ +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true + +# Override for Makefile +[{Makefile, makefile, GNUmakefile}] +indent_style = tab +indent_size = 4 + +[Makefile.*] +indent_style = tab +indent_size = 4 + +[shell] +indent_style = tab +indent_size = 4 + +[*.sh] +indent_style = tab +indent_size = 4 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..41c1baa --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,4 @@ +# Use this file to define individuals or teams that are responsible for code in a repository. +# Read more: + +* @cloudposse/engineering \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..f3df96b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,37 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: 'bug' +assignees: '' + +--- + +Found a bug? Maybe our [Slack Community](https://slack.cloudposse.com) can help. + +[![Slack Community](https://slack.cloudposse.com/badge.svg)](https://slack.cloudposse.com) + +## Describe the Bug +A clear and concise description of what the bug is. + +## Expected Behavior +A clear and concise description of what you expected to happen. + +## Steps to Reproduce +Steps to reproduce the behavior: +1. Go to '...' +2. Run '....' +3. Enter '....' +4. See error + +## Screenshots +If applicable, add screenshots or logs to help explain your problem. + +## Environment (please complete the following information): + +Anything that will help us triage the bug will help. Here are some ideas: + - OS: [e.g. Linux, OSX, WSL, etc] + - Version [e.g. 10.15] + +## Additional Context +Add any other context about the problem here. \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..76ae6d6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,18 @@ +blank_issues_enabled: false + +contact_links: + + - name: Community Slack Team + url: https://cloudposse.com/slack/ + about: |- + Please ask and answer questions here. + + - name: Office Hours + url: https://cloudposse.com/office-hours/ + about: |- + Join us every Wednesday for FREE Office Hours (lunch & learn). + + - name: DevOps Accelerator Program + url: https://cloudposse.com/accelerate/ + about: |- + Own your infrastructure in record time. We build it. You drive it. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..ecc9eb6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,36 @@ +--- +name: Feature Request +about: Suggest an idea for this project +title: '' +labels: 'feature request' +assignees: '' + +--- + +Have a question? Please checkout our [Slack Community](https://slack.cloudposse.com) in the `#geodesic` channel or visit our [Slack Archive](https://archive.sweetops.com/geodesic/). + +[![Slack Community](https://slack.cloudposse.com/badge.svg)](https://slack.cloudposse.com) + +## Describe the Feature + +A clear and concise description of what the bug is. + +## Expected Behavior + +A clear and concise description of what you expected to happen. + +## Use Case + +Is your feature request related to a problem/challenge you are trying to solve? Please provide some additional context of why this feature or capability will be valuable. + +## Describe Ideal Solution + +A clear and concise description of what you want to happen. If you don't know, that's okay. + +## Alternatives Considered + +Explain what alternative solutions or features you've considered. + +## Additional Context + +Add any other context or screenshots about the feature request here. \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 0000000..e69de29 diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..4b8f32d --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,13 @@ +## what +* Describe high-level what changed as a result of these commits (i.e. in plain-english, what do these changes mean?) +* Use bullet points to be concise and to the point. + +## why +* Provide the justifications for the changes (e.g. business case). +* Describe why these changes were made (e.g. why do these commits fix the problem?) +* Use bullet points to be concise and to the point. + +## references +* Link to any supporting github issues or helpful documentation to add some context (e.g. stackoverflow). +* Use `closes #123`, if this PR closes a GitHub issue `#123` + diff --git a/.github/workflows/slash-command-dispatch.yml b/.github/workflows/slash-command-dispatch.yml new file mode 100644 index 0000000..2b30f33 --- /dev/null +++ b/.github/workflows/slash-command-dispatch.yml @@ -0,0 +1,20 @@ +name: Slash Command Dispatch +on: + issue_comment: + types: [created] + +jobs: + slashCommandDispatch: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Slash Command Dispatch + uses: cloudposse/actions/github/slash-command-dispatch@0.15.0 + with: + token: ${{ secrets.PUBLIC_REPO_ACCESS_TOKEN }} + reaction-token: ${{ secrets.GITHUB_TOKEN }} + repository: cloudposse/actions + commands: rebuild-readme, terraform-fmt + permission: none + issue-type: pull-request diff --git a/.gitignore b/.gitignore index 93b8e3a..983fb11 100644 --- a/.gitignore +++ b/.gitignore @@ -5,9 +5,6 @@ *.tfstate *.tfstate.* -# .tfvars files -*.tfvars - **/.idea **/*.iml diff --git a/README.md b/README.md index 6285019..a92f006 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,56 @@ - + [![README Header][readme_header_img]][readme_header_link] [![Cloud Posse][logo]](https://cpco.io/homepage) # terraform-aws-lambda-elasticsearch-cleanup - [![Build Status](https://travis-ci.org/cloudposse/terraform-aws-lambda-elasticsearch-cleanup.svg?branch=master)](https://travis-ci.org/cloudposse/terraform-aws-lambda-elasticsearch-cleanup) [![Latest Release](https://img.shields.io/github/release/cloudposse/terraform-aws-lambda-elasticsearch-cleanup.svg)](https://github.com/cloudposse/terraform-aws-lambda-elasticsearch-cleanup/releases/latest) [![Slack Community](https://slack.cloudposse.com/badge.svg)](https://slack.cloudposse.com) + [![Codefresh Build Status](https://g.codefresh.io/api/badges/pipeline/cloudposse/terraform-modules%2Fterraform-aws-lambda-elasticsearch-cleanup?type=cf-1)](https://g.codefresh.io/public/accounts/cloudposse/pipelines/5eaa33ff1dc82bb99faa3905) [![Latest Release](https://img.shields.io/github/release/cloudposse/terraform-aws-lambda-elasticsearch-cleanup.svg)](https://github.com/cloudposse/terraform-aws-lambda-elasticsearch-cleanup/releases/latest) [![Slack Community](https://slack.cloudposse.com/badge.svg)](https://slack.cloudposse.com) -Terraform module to provision a scheduled Lambda function which will +Terraform module to provision a scheduled Lambda function which will delete old Elasticsearch indexes using [SigV4Auth](https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html) authentication. The -lambda function can optionally send output to an SNS topic if the -topic ARN is given. This module was largely inspired by +lambda function can optionally send output to an SNS topic if the +topic ARN is given. This module was largely inspired by [aws-lambda-es-cleanup](https://github.com/cloudreach/aws-lambda-es-cleanup) @@ -53,14 +92,19 @@ We literally have [*hundreds of terraform modules*][terraform_modules] that are Instead pin to the release tag (e.g. `?ref=tags/x.y.z`) of one of our [latest releases](https://github.com/cloudposse/terraform-aws-lambda-elasticsearch-cleanup/releases). + +For a complete example, see [examples/complete](examples/complete). + +For automated tests of the complete example using [bats](https://github.com/bats-core/bats-core) and [Terratest](https://github.com/gruntwork-io/terratest) (which tests and deploys the example on AWS), see [test](test). + ```hcl module "elasticsearch_cleanup" { - source = "../" - es_endpoint = "${module.elasticsearch.domain_endpoint}" - es_domain_arn = "${module.elasticsearch.domain_arn}" - es_security_group_id = "${module.elasticsearch.security_group_id}" - vpc_id = "${module.vpc.vpc_id}" - namespace = "example" + source = "https://github.com/cloudposse/terraform-aws-lambda-elasticsearch-cleanup.git?ref=master" + es_endpoint = module.elasticsearch.domain_endpoint + es_domain_arn = module.elasticsearch.domain_arn + es_security_group_id = module.elasticsearch.security_group_id + vpc_id = module.vpc.vpc_id + namespace = "eg" stage = "dev" schedule = "rate(5 minutes)" } @@ -87,31 +131,33 @@ Available targets: | Name | Description | Type | Default | Required | |------|-------------|:----:|:-----:|:-----:| -| attributes | Additional attributes (e.g. `1`) | list | `` | no | -| delete_after | Number of days to preserve | string | `15` | no | +| attributes | Additional attributes (e.g. `1`) | list(string) | `` | no | +| delete_after | Number of days to preserve | number | `15` | no | | delimiter | Delimiter to be used between `namespace`, `stage`, `name` and `attributes` | string | `-` | no | -| enabled | This module will not create any resources unless enabled is set to "true" | string | `true` | no | +| enabled | This module will not create any resources unless enabled is set to "true" | bool | `true` | no | | es_domain_arn | The Elasticsearch domain ARN | string | - | yes | | es_endpoint | The Elasticsearch endpoint for the Lambda function to connect to | string | - | yes | | es_security_group_id | The Elasticsearch cluster security group ID | string | - | yes | | index | Index/indices to process. Use a comma-separated list. Specify `all` to match every index except for `.kibana` or `.kibana_1` | string | `all` | no | | index_format | Combined with 'index' variable and is used to evaluate the index age | string | `%Y.%m.%d` | no | | name | Solution name, e.g. 'app' or 'cluster' | string | `app` | no | -| namespace | Namespace, which could be your organization name, e.g. 'eg' or 'cp' | string | - | yes | +| namespace | Namespace, which could be your organization name, e.g. 'eg' or 'cp' | string | `` | no | | python_version | The Python version to use | string | `2.7` | no | | schedule | CloudWatch Events rule schedule using cron or rate expression | string | `cron(0 3 * * ? *)` | no | | sns_arn | SNS ARN to publish alerts | string | `` | no | -| stage | Stage, e.g. 'prod', 'staging', 'dev', or 'test' | string | - | yes | -| subnet_ids | Subnet ids | list | - | yes | -| tags | Additional tags (e.g. `map('BusinessUnit','XYZ')` | map | `` | no | -| timeout | Timeout for Lambda function in seconds | string | `300` | no | +| stage | Stage, e.g. 'prod', 'staging', 'dev', or 'test' | string | `` | no | +| subnet_ids | Subnet IDs | list(string) | - | yes | +| tags | Additional tags (e.g. `map('BusinessUnit','XYZ')` | map(string) | `` | no | +| timeout | Timeout for Lambda function in seconds | number | `300` | no | | vpc_id | The VPC ID for the Lambda function | string | - | yes | ## Outputs | Name | Description | |------|-------------| -| security_group_id | Security Group ID of the Lambda | +| lambda_function_arn | ARN of the Lambda Function | +| lambda_function_source_code_size | The size in bytes of the function .zip file | +| security_group_id | Security Group ID of the Lambda Function | @@ -135,42 +181,51 @@ Check out these related projects. ## Help -**Got a question?** +**Got a question?** We got answers. File a GitHub [issue](https://github.com/cloudposse/terraform-aws-lambda-elasticsearch-cleanup/issues), send us an [email][email] or join our [Slack Community][slack]. [![README Commercial Support][readme_commercial_support_img]][readme_commercial_support_link] -## Commercial Support +## DevOps Accelerator for Startups -Work directly with our team of DevOps experts via email, slack, and video conferencing. -We provide [*commercial support*][commercial_support] for all of our [Open Source][github] projects. As a *Dedicated Support* customer, you have access to our team of subject matter experts at a fraction of the cost of a full-time engineer. +We are a [**DevOps Accelerator**][commercial_support]. We'll help you build your cloud infrastructure from the ground up so you can own it. Then we'll show you how to operate it and stick around for as long as you need us. -[![E-Mail](https://img.shields.io/badge/email-hello@cloudposse.com-blue.svg)][email] +[![Learn More](https://img.shields.io/badge/learn%20more-success.svg?style=for-the-badge)][commercial_support] -- **Questions.** We'll use a Shared Slack channel between your team and ours. -- **Troubleshooting.** We'll help you triage why things aren't working. -- **Code Reviews.** We'll review your Pull Requests and provide constructive feedback. -- **Bug Fixes.** We'll rapidly work to fix any bugs in our projects. -- **Build New Terraform Modules.** We'll [develop original modules][module_development] to provision infrastructure. -- **Cloud Architecture.** We'll assist with your cloud strategy and design. -- **Implementation.** We'll provide hands-on support to implement our reference architectures. +Work directly with our team of DevOps experts via email, slack, and video conferencing. +We deliver 10x the value for a fraction of the cost of a full-time engineer. Our track record is not even funny. If you want things done right and you need it done FAST, then we're your best bet. - -## Terraform Module Development - -Are you interested in custom Terraform module development? Submit your inquiry using [our form][module_development] today and we'll get back to you ASAP. - +- **Reference Architecture.** You'll get everything you need from the ground up built using 100% infrastructure as code. +- **Release Engineering.** You'll have end-to-end CI/CD with unlimited staging environments. +- **Site Reliability Engineering.** You'll have total visibility into your apps and microservices. +- **Security Baseline.** You'll have built-in governance with accountability and audit logs for all changes. +- **GitOps.** You'll be able to operate your infrastructure via Pull Requests. +- **Training.** You'll receive hands-on training so your team can operate what we build. +- **Questions.** You'll have a direct line of communication between our teams via a Shared Slack channel. +- **Troubleshooting.** You'll get help to triage when things aren't working. +- **Code Reviews.** You'll receive constructive feedback on Pull Requests. +- **Bug Fixes.** We'll rapidly work with you to fix any bugs in our projects. ## Slack Community Join our [Open Source Community][slack] on Slack. It's **FREE** for everyone! Our "SweetOps" community is where you get to talk with others who share a similar vision for how to rollout and manage infrastructure. This is the best place to talk shop, ask questions, solicit feedback, and work together as a community to build totally *sweet* infrastructure. +## Discourse Forums + +Participate in our [Discourse Forums][discourse]. Here you'll find answers to commonly asked questions. Most questions will be related to the enormous number of projects we support on our GitHub. Come here to collaborate on answers, find solutions, and get ideas about the products and services we value. It only takes a minute to get started! Just sign in with SSO using your GitHub account. + ## Newsletter -Signup for [our newsletter][newsletter] that covers everything on our technology radar. Receive updates on what we're up to on GitHub as well as awesome new projects we discover. +Sign up for [our newsletter][newsletter] that covers everything on our technology radar. Receive updates on what we're up to on GitHub as well as awesome new projects we discover. + +## Office Hours + +[Join us every Wednesday via Zoom][office_hours] for our weekly "Lunch & Learn" sessions. It's **FREE** for everyone! + +[![zoom](https://img.cloudposse.com/fit-in/200x200/https://cloudposse.com/wp-content/uploads/2019/08/Powered-by-Zoom.png")][office_hours] ## Contributing @@ -195,7 +250,7 @@ In general, PRs are welcome. We follow the typical "fork-and-pull" Git workflow. ## Copyright -Copyright © 2017-2019 [Cloud Posse, LLC](https://cpco.io/copyright) +Copyright © 2017-2020 [Cloud Posse, LLC](https://cpco.io/copyright) @@ -250,39 +305,44 @@ Check out [our other projects][github], [follow us on twitter][twitter], [apply ### Contributors -| [![Josh Myers][joshmyers_avatar]][joshmyers_homepage]
[Josh Myers][joshmyers_homepage] | -|---| +| [![Josh Myers][joshmyers_avatar]][joshmyers_homepage]
[Josh Myers][joshmyers_homepage] | [![Erik Osterman][osterman_avatar]][osterman_homepage]
[Erik Osterman][osterman_homepage] | [![Andriy Knysh][aknysh_avatar]][aknysh_homepage]
[Andriy Knysh][aknysh_homepage] | [![Igor Rodionov][goruha_avatar]][goruha_homepage]
[Igor Rodionov][goruha_homepage] | +|---|---|---|---| [joshmyers_homepage]: https://github.com/joshmyers - [joshmyers_avatar]: https://github.com/joshmyers.png?size=150 - - + [joshmyers_avatar]: https://img.cloudposse.com/150x150/https://github.com/joshmyers.png + [osterman_homepage]: https://github.com/osterman + [osterman_avatar]: https://img.cloudposse.com/150x150/https://github.com/osterman.png + [aknysh_homepage]: https://github.com/aknysh + [aknysh_avatar]: https://img.cloudposse.com/150x150/https://github.com/aknysh.png + [goruha_homepage]: https://github.com/goruha + [goruha_avatar]: https://img.cloudposse.com/150x150/https://github.com/goruha.png [![README Footer][readme_footer_img]][readme_footer_link] [![Beacon][beacon]][website] [logo]: https://cloudposse.com/logo-300x69.svg - [docs]: https://cpco.io/docs - [website]: https://cpco.io/homepage - [github]: https://cpco.io/github - [jobs]: https://cpco.io/jobs - [hire]: https://cpco.io/hire - [slack]: https://cpco.io/slack - [linkedin]: https://cpco.io/linkedin - [twitter]: https://cpco.io/twitter - [testimonial]: https://cpco.io/leave-testimonial - [newsletter]: https://cpco.io/newsletter - [email]: https://cpco.io/email - [commercial_support]: https://cpco.io/commercial-support - [we_love_open_source]: https://cpco.io/we-love-open-source - [module_development]: https://cpco.io/module-development - [terraform_modules]: https://cpco.io/terraform-modules - [readme_header_img]: https://cloudposse.com/readme/header/img?repo=cloudposse/terraform-aws-lambda-elasticsearch-cleanup - [readme_header_link]: https://cloudposse.com/readme/header/link?repo=cloudposse/terraform-aws-lambda-elasticsearch-cleanup - [readme_footer_img]: https://cloudposse.com/readme/footer/img?repo=cloudposse/terraform-aws-lambda-elasticsearch-cleanup - [readme_footer_link]: https://cloudposse.com/readme/footer/link?repo=cloudposse/terraform-aws-lambda-elasticsearch-cleanup - [readme_commercial_support_img]: https://cloudposse.com/readme/commercial-support/img?repo=cloudposse/terraform-aws-lambda-elasticsearch-cleanup - [readme_commercial_support_link]: https://cloudposse.com/readme/commercial-support/link?repo=cloudposse/terraform-aws-lambda-elasticsearch-cleanup + [docs]: https://cpco.io/docs?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=docs + [website]: https://cpco.io/homepage?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=website + [github]: https://cpco.io/github?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=github + [jobs]: https://cpco.io/jobs?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=jobs + [hire]: https://cpco.io/hire?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=hire + [slack]: https://cpco.io/slack?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=slack + [linkedin]: https://cpco.io/linkedin?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=linkedin + [twitter]: https://cpco.io/twitter?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=twitter + [testimonial]: https://cpco.io/leave-testimonial?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=testimonial + [office_hours]: https://cloudposse.com/office-hours?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=office_hours + [newsletter]: https://cpco.io/newsletter?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=newsletter + [discourse]: https://ask.sweetops.com/?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=discourse + [email]: https://cpco.io/email?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=email + [commercial_support]: https://cpco.io/commercial-support?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=commercial_support + [we_love_open_source]: https://cpco.io/we-love-open-source?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=we_love_open_source + [terraform_modules]: https://cpco.io/terraform-modules?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=terraform_modules + [readme_header_img]: https://cloudposse.com/readme/header/img + [readme_header_link]: https://cloudposse.com/readme/header/link?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=readme_header_link + [readme_footer_img]: https://cloudposse.com/readme/footer/img + [readme_footer_link]: https://cloudposse.com/readme/footer/link?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=readme_footer_link + [readme_commercial_support_img]: https://cloudposse.com/readme/commercial-support/img + [readme_commercial_support_link]: https://cloudposse.com/readme/commercial-support/link?utm_source=github&utm_medium=readme&utm_campaign=cloudposse/terraform-aws-lambda-elasticsearch-cleanup&utm_content=readme_commercial_support_link [share_twitter]: https://twitter.com/intent/tweet/?text=terraform-aws-lambda-elasticsearch-cleanup&url=https://github.com/cloudposse/terraform-aws-lambda-elasticsearch-cleanup [share_linkedin]: https://www.linkedin.com/shareArticle?mini=true&title=terraform-aws-lambda-elasticsearch-cleanup&url=https://github.com/cloudposse/terraform-aws-lambda-elasticsearch-cleanup [share_reddit]: https://reddit.com/submit/?url=https://github.com/cloudposse/terraform-aws-lambda-elasticsearch-cleanup diff --git a/README.yaml b/README.yaml index 84334ac..2e4bb7e 100644 --- a/README.yaml +++ b/README.yaml @@ -18,9 +18,9 @@ github_repo: cloudposse/terraform-aws-lambda-elasticsearch-cleanup # Badges to display badges: - - name: "Build Status" - image: "https://travis-ci.org/cloudposse/terraform-aws-lambda-elasticsearch-cleanup.svg?branch=master" - url: "https://travis-ci.org/cloudposse/terraform-aws-lambda-elasticsearch-cleanup" + - name: "Codefresh Build Status" + image: "https://g.codefresh.io/api/badges/pipeline/cloudposse/terraform-modules%2Fterraform-aws-lambda-elasticsearch-cleanup?type=cf-1" + url: "https://g.codefresh.io/public/accounts/cloudposse/pipelines/5eaa33ff1dc82bb99faa3905" - name: "Latest Release" image: "https://img.shields.io/github/release/cloudposse/terraform-aws-lambda-elasticsearch-cleanup.svg" url: "https://github.com/cloudposse/terraform-aws-lambda-elasticsearch-cleanup/releases/latest" @@ -41,22 +41,27 @@ related: # Short description of this project description: |- - Terraform module to provision a scheduled Lambda function which will + Terraform module to provision a scheduled Lambda function which will delete old Elasticsearch indexes using [SigV4Auth](https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html) authentication. The - lambda function can optionally send output to an SNS topic if the - topic ARN is given. This module was largely inspired by + lambda function can optionally send output to an SNS topic if the + topic ARN is given. This module was largely inspired by [aws-lambda-es-cleanup](https://github.com/cloudreach/aws-lambda-es-cleanup) # How to use this project usage: |- + + For a complete example, see [examples/complete](examples/complete). + + For automated tests of the complete example using [bats](https://github.com/bats-core/bats-core) and [Terratest](https://github.com/gruntwork-io/terratest) (which tests and deploys the example on AWS), see [test](test). + ```hcl module "elasticsearch_cleanup" { - source = "../" - es_endpoint = "${module.elasticsearch.domain_endpoint}" - es_domain_arn = "${module.elasticsearch.domain_arn}" - es_security_group_id = "${module.elasticsearch.security_group_id}" - vpc_id = "${module.vpc.vpc_id}" - namespace = "example" + source = "https://github.com/cloudposse/terraform-aws-lambda-elasticsearch-cleanup.git?ref=master" + es_endpoint = module.elasticsearch.domain_endpoint + es_domain_arn = module.elasticsearch.domain_arn + es_security_group_id = module.elasticsearch.security_group_id + vpc_id = module.vpc.vpc_id + namespace = "eg" stage = "dev" schedule = "rate(5 minutes)" } @@ -70,3 +75,9 @@ include: contributors: - name: "Josh Myers" github: "joshmyers" + - name: "Erik Osterman" + github: "osterman" + - name: "Andriy Knysh" + github: "aknysh" + - name: "Igor Rodionov" + github: "goruha" diff --git a/codefresh/test.yml b/codefresh/test.yml new file mode 100644 index 0000000..0c93b81 --- /dev/null +++ b/codefresh/test.yml @@ -0,0 +1,74 @@ +version: '1.0' + +stages: + - Prepare + - Test + +steps: + wait: + title: Wait + stage: Prepare + image: codefresh/cli:latest + commands: + - codefresh get builds --pipeline=${{CF_REPO_NAME}} --status running --limit 1000 -o json | jq --arg id ${{CF_BUILD_ID}} -ser 'flatten|.[-1].id==$id' + retry: + maxAttempts: 10 + delay: 20 + exponentialFactor: 1.1 + + main_clone: + title: "Clone repository" + type: git-clone + stage: Prepare + description: "Initialize" + repo: ${{CF_REPO_OWNER}}/${{CF_REPO_NAME}} + git: CF-default + revision: ${{CF_REVISION}} + + clean_init: + title: Prepare build-harness and test-harness + image: ${{TEST_IMAGE}} + stage: Prepare + commands: + - cf_export PATH="/usr/local/terraform/0.12/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" + - make init + - git -C build-harness checkout master + - make -C test/ clean init TEST_HARNESS_BRANCH=master + - make -C test/src clean init + - find . -type d -name '.terraform' | xargs rm -rf + - find . -type f -name 'terraform.tfstate*' -exec rm -f {} \; + + test: + type: "parallel" + title: "Run tests" + description: "Run all tests in parallel" + stage: Test + steps: + test_readme_lint: + title: "Test README.md updated" + stage: "Test" + image: ${{TEST_IMAGE}} + description: Test "readme/lint" + commands: + - make readme/lint + + test_module: + title: Test module with bats + image: ${{TEST_IMAGE}} + stage: Test + commands: + - make -C test/ module + + test_examples_complete: + title: Test "examples/complete" with bats + image: ${{TEST_IMAGE}} + stage: Test + commands: + - make -C test/ examples/complete + + test_examples_complete_terratest: + title: Test "examples/complete" with terratest + image: ${{TEST_IMAGE}} + stage: Test + commands: + - make -C test/src diff --git a/docs/terraform.md b/docs/terraform.md index 2bf2f9f..378e3e3 100644 --- a/docs/terraform.md +++ b/docs/terraform.md @@ -2,29 +2,31 @@ | Name | Description | Type | Default | Required | |------|-------------|:----:|:-----:|:-----:| -| attributes | Additional attributes (e.g. `1`) | list | `` | no | -| delete_after | Number of days to preserve | string | `15` | no | +| attributes | Additional attributes (e.g. `1`) | list(string) | `` | no | +| delete_after | Number of days to preserve | number | `15` | no | | delimiter | Delimiter to be used between `namespace`, `stage`, `name` and `attributes` | string | `-` | no | -| enabled | This module will not create any resources unless enabled is set to "true" | string | `true` | no | +| enabled | This module will not create any resources unless enabled is set to "true" | bool | `true` | no | | es_domain_arn | The Elasticsearch domain ARN | string | - | yes | | es_endpoint | The Elasticsearch endpoint for the Lambda function to connect to | string | - | yes | | es_security_group_id | The Elasticsearch cluster security group ID | string | - | yes | | index | Index/indices to process. Use a comma-separated list. Specify `all` to match every index except for `.kibana` or `.kibana_1` | string | `all` | no | | index_format | Combined with 'index' variable and is used to evaluate the index age | string | `%Y.%m.%d` | no | | name | Solution name, e.g. 'app' or 'cluster' | string | `app` | no | -| namespace | Namespace, which could be your organization name, e.g. 'eg' or 'cp' | string | - | yes | +| namespace | Namespace, which could be your organization name, e.g. 'eg' or 'cp' | string | `` | no | | python_version | The Python version to use | string | `2.7` | no | | schedule | CloudWatch Events rule schedule using cron or rate expression | string | `cron(0 3 * * ? *)` | no | | sns_arn | SNS ARN to publish alerts | string | `` | no | -| stage | Stage, e.g. 'prod', 'staging', 'dev', or 'test' | string | - | yes | -| subnet_ids | Subnet ids | list | - | yes | -| tags | Additional tags (e.g. `map('BusinessUnit','XYZ')` | map | `` | no | -| timeout | Timeout for Lambda function in seconds | string | `300` | no | +| stage | Stage, e.g. 'prod', 'staging', 'dev', or 'test' | string | `` | no | +| subnet_ids | Subnet IDs | list(string) | - | yes | +| tags | Additional tags (e.g. `map('BusinessUnit','XYZ')` | map(string) | `` | no | +| timeout | Timeout for Lambda function in seconds | number | `300` | no | | vpc_id | The VPC ID for the Lambda function | string | - | yes | ## Outputs | Name | Description | |------|-------------| -| security_group_id | Security Group ID of the Lambda | +| lambda_function_arn | ARN of the Lambda Function | +| lambda_function_source_code_size | The size in bytes of the function .zip file | +| security_group_id | Security Group ID of the Lambda Function | diff --git a/example/main.tf b/example/main.tf deleted file mode 100644 index 9632bfd..0000000 --- a/example/main.tf +++ /dev/null @@ -1,66 +0,0 @@ -module "label" { - source = "git::https://github.com/cloudposse/terraform-terraform-label.git?ref=tags/0.2.1" - namespace = "${var.namespace}" - name = "${var.name}" - stage = "${var.stage}" - delimiter = "${var.delimiter}" - attributes = "${compact(concat(var.attributes, list("elasticsearch", "cleanup")))}" - tags = "${var.tags}" - enabled = "true" -} - -module "vpc" { - source = "git::https://github.com/cloudposse/terraform-aws-vpc.git?ref=master" - name = "${var.name}" - namespace = "${var.namespace}" - stage = "${var.stage}" - tags = "${module.label.tags}" -} - -module "subnets" { - source = "git::https://github.com/cloudposse/terraform-aws-dynamic-subnets.git?ref=master" - name = "${var.name}" - namespace = "${var.namespace}" - stage = "${var.stage}" - region = "us-west-2" - vpc_id = "${module.vpc.vpc_id}" - igw_id = "${module.vpc.igw_id}" - cidr_block = "10.0.0.0/16" - availability_zones = ["us-west-2a", "us-west-2b"] - tags = "${module.label.tags}" -} - -module "elasticsearch" { - source = "git::https://github.com/cloudposse/terraform-aws-elasticsearch.git?ref=master" - name = "${var.name}" - namespace = "${var.namespace}" - stage = "${var.stage}" - dns_zone_id = "Z3SO0TKDDQ0RGG" - security_groups = [] - vpc_id = "${module.vpc.vpc_id}" - subnet_ids = ["${module.subnets.public_subnet_ids}"] - zone_awareness_enabled = "true" - elasticsearch_version = "6.3" - instance_type = "t2.small.elasticsearch" - instance_count = 4 - kibana_subdomain_name = "kibana-es" - encrypt_at_rest_enabled = "false" - ebs_volume_size = 10 - iam_actions = ["es:*"] - iam_role_arns = ["*"] - create_iam_service_linked_role = "false" - tags = "${module.label.tags}" -} - -module "elasticsearch_cleanup" { - source = "../" - es_endpoint = "${module.elasticsearch.domain_endpoint}" - es_domain_arn = "${module.elasticsearch.domain_arn}" - es_security_group_id = "${module.elasticsearch.security_group_id}" - subnet_ids = ["${module.subnets.public_subnet_ids}"] - vpc_id = "${module.vpc.vpc_id}" - namespace = "${var.namespace}" - stage = "${var.stage}" - schedule = "${var.schedule}" - tags = "${module.label.tags}" -} diff --git a/example/variables.tf b/example/variables.tf deleted file mode 100644 index 0c2fd47..0000000 --- a/example/variables.tf +++ /dev/null @@ -1,43 +0,0 @@ -variable "schedule" { - default = "rate(5 minutes)" -} - -variable "namespace" { - type = "string" - description = "Namespace, which could be your organization name, e.g. 'eg' or 'cp'" -} - -variable "stage" { - type = "string" - description = "Stage, e.g. 'prod', 'staging', 'dev', or 'test'" -} - -variable "environment" { - type = "string" - default = "" - description = "Environment, e.g. 'testing', 'UAT'" -} - -variable "name" { - type = "string" - default = "app" - description = "Solution name, e.g. 'app' or 'cluster'" -} - -variable "delimiter" { - type = "string" - default = "-" - description = "Delimiter to be used between `name`, `namespace`, `stage`, etc." -} - -variable "attributes" { - type = "list" - default = [] - description = "Additional attributes (e.g. `1`)" -} - -variable "tags" { - type = "map" - default = {} - description = "Additional tags (e.g. `map('BusinessUnit`,`XYZ`)" -} diff --git a/examples/complete/fixtures.us-east-2.tfvars b/examples/complete/fixtures.us-east-2.tfvars new file mode 100644 index 0000000..3f55308 --- /dev/null +++ b/examples/complete/fixtures.us-east-2.tfvars @@ -0,0 +1,31 @@ +region = "us-east-2" + +namespace = "eg" + +stage = "test" + +name = "es-cleanup" + +availability_zones = ["us-east-1a", "us-east-1b"] + +instance_type = "t2.small.elasticsearch" + +elasticsearch_version = "7.4" + +instance_count = 2 + +zone_awareness_enabled = true + +encrypt_at_rest_enabled = false + +dedicated_master_enabled = false + +kibana_subdomain_name = "kibana-es-cleanup" + +ebs_volume_size = 10 + +create_iam_service_linked_role = false + +dns_zone_id = "Z3SO0TKDDQ0RGG" + +schedule = "rate(5 minutes)" diff --git a/examples/complete/main.tf b/examples/complete/main.tf new file mode 100644 index 0000000..2a437c1 --- /dev/null +++ b/examples/complete/main.tf @@ -0,0 +1,60 @@ +provider "aws" { + region = var.region +} + +module "vpc" { + source = "git::https://github.com/cloudposse/terraform-aws-vpc.git?ref=tags/0.10.0" + namespace = var.namespace + stage = var.stage + name = var.name + cidr_block = "172.16.0.0/16" +} + +module "subnets" { + source = "git::https://github.com/cloudposse/terraform-aws-dynamic-subnets.git?ref=tags/0.19.0" + availability_zones = var.availability_zones + namespace = var.namespace + stage = var.stage + name = var.name + vpc_id = module.vpc.vpc_id + igw_id = module.vpc.igw_id + cidr_block = module.vpc.vpc_cidr_block + nat_gateway_enabled = true + nat_instance_enabled = false +} + +module "elasticsearch" { + source = "git::https://github.com/cloudposse/terraform-aws-elasticsearch.git?ref=tags/0.12.0" + namespace = var.namespace + stage = var.stage + name = var.name + security_groups = [module.vpc.vpc_default_security_group_id] + vpc_id = module.vpc.vpc_id + subnet_ids = module.subnets.private_subnet_ids + zone_awareness_enabled = var.zone_awareness_enabled + elasticsearch_version = var.elasticsearch_version + instance_type = var.instance_type + instance_count = var.instance_count + encrypt_at_rest_enabled = var.encrypt_at_rest_enabled + dedicated_master_enabled = var.dedicated_master_enabled + create_iam_service_linked_role = var.create_iam_service_linked_role + kibana_subdomain_name = var.kibana_subdomain_name + ebs_volume_size = var.ebs_volume_size + dns_zone_id = var.dns_zone_id + + advanced_options = { + "rest.action.multi.allow_explicit_index" = "true" + } +} + +module "elasticsearch_cleanup" { + source = "../.." + es_endpoint = module.elasticsearch.domain_endpoint + es_domain_arn = module.elasticsearch.domain_arn + es_security_group_id = module.elasticsearch.security_group_id + subnet_ids = module.subnets.public_subnet_ids + vpc_id = module.vpc.vpc_id + namespace = var.namespace + stage = var.stage + schedule = var.schedule +} diff --git a/examples/complete/outputs.tf b/examples/complete/outputs.tf new file mode 100644 index 0000000..8cf23f9 --- /dev/null +++ b/examples/complete/outputs.tf @@ -0,0 +1,79 @@ +output "public_subnet_cidrs" { + value = module.subnets.public_subnet_cidrs + description = "Public subnet CIDRs" +} + +output "private_subnet_cidrs" { + value = module.subnets.private_subnet_cidrs + description = "Private subnet CIDRs" +} + +output "vpc_cidr" { + value = module.vpc.vpc_cidr_block + description = "VPC CIDR" +} + +output "security_group_id" { + value = module.elasticsearch.security_group_id + description = "Security Group ID to control access to the Elasticsearch domain" +} + +output "domain_name" { + value = module.elasticsearch.domain_name + description = "Name of the Elasticsearch domain" +} + +output "domain_arn" { + value = module.elasticsearch.domain_arn + description = "ARN of the Elasticsearch domain" +} + +output "domain_id" { + value = module.elasticsearch.domain_id + description = "Unique identifier for the Elasticsearch domain" +} + +output "domain_endpoint" { + value = module.elasticsearch.domain_endpoint + description = "Domain-specific endpoint used to submit index, search, and data upload requests" +} + +output "kibana_endpoint" { + value = module.elasticsearch.kibana_endpoint + description = "Domain-specific endpoint for Kibana without https scheme" +} + +output "domain_hostname" { + value = module.elasticsearch.domain_hostname + description = "Elasticsearch domain hostname to submit index, search, and data upload requests" +} + +output "kibana_hostname" { + value = module.elasticsearch.kibana_hostname + description = "Kibana hostname" +} + +output "elasticsearch_user_iam_role_name" { + value = module.elasticsearch.elasticsearch_user_iam_role_name + description = "The name of the IAM role to allow access to Elasticsearch cluster" +} + +output "elasticsearch_user_iam_role_arn" { + value = module.elasticsearch.elasticsearch_user_iam_role_arn + description = "The ARN of the IAM role to allow access to Elasticsearch cluster" +} + +output "lambda_security_group_id" { + value = module.elasticsearch_cleanup.security_group_id + description = "Security Group ID of the Lambda Function" +} + +output "lambda_function_arn" { + value = module.elasticsearch_cleanup.lambda_function_arn + description = "ARN of the Lambda Function" +} + +output "lambda_function_source_code_size" { + value = module.elasticsearch_cleanup.lambda_function_source_code_size + description = "The size in bytes of the function .zip file" +} diff --git a/examples/complete/variables.tf b/examples/complete/variables.tf new file mode 100644 index 0000000..90efd44 --- /dev/null +++ b/examples/complete/variables.tf @@ -0,0 +1,80 @@ +variable "region" { + type = string + description = "AWS region" +} + +variable "namespace" { + type = string + description = "Namespace (e.g. `eg` or `cp`)" +} + +variable "stage" { + type = string + description = "Stage (e.g. `prod`, `dev`, `staging`, `infra`)" +} + +variable "name" { + type = string + description = "Name (e.g. `app` or `cluster`)" +} + +variable "availability_zones" { + type = list(string) + description = "List of availability zones" +} + +variable "instance_type" { + type = string + description = "The type of the instance" +} + +variable "elasticsearch_version" { + type = string + description = "Version of Elasticsearch to deploy (_e.g._ `7.1`, `6.8`, `6.7`, `6.5`, `6.4`, `6.3`, `6.2`, `6.0`, `5.6`, `5.5`, `5.3`, `5.1`, `2.3`, `1.5`" +} + +variable "instance_count" { + type = number + description = "Number of data nodes in the cluster" +} + +variable "zone_awareness_enabled" { + type = bool + description = "Enable zone awareness for Elasticsearch cluster" +} + +variable "encrypt_at_rest_enabled" { + type = bool + description = "Whether to enable encryption at rest" +} + +variable "dedicated_master_enabled" { + type = bool + description = "Indicates whether dedicated master nodes are enabled for the cluster" +} + +variable "kibana_subdomain_name" { + type = string + description = "The name of the subdomain for Kibana in the DNS zone (_e.g._ `kibana`, `ui`, `ui-es`, `search-ui`, `kibana.elasticsearch`)" +} + +variable "create_iam_service_linked_role" { + type = bool + description = "Whether to create `AWSServiceRoleForAmazonElasticsearchService` service-linked role. Set it to `false` if you already have an ElasticSearch cluster created in the AWS account and AWSServiceRoleForAmazonElasticsearchService already exists. See https://github.com/terraform-providers/terraform-provider-aws/issues/5218 for more info" +} + +variable "ebs_volume_size" { + type = number + description = "EBS volumes for data storage in GB" +} + +variable "dns_zone_id" { + type = string + description = "Route53 DNS Zone ID to add hostname records for Elasticsearch domain and Kibana" +} + +variable "schedule" { + type = string + default = "cron(0 3 * * ? *)" + description = "Schedule using cron or rate expression" +} diff --git a/main.tf b/main.tf index a92ba0d..9ee22af 100644 --- a/main.tf +++ b/main.tf @@ -75,7 +75,7 @@ data "aws_iam_policy_document" "default" { # Modules #-------------------------------------------------------------- module "label" { - source = "git::https://github.com/cloudposse/terraform-terraform-label.git?ref=tags/0.2.1" + source = "git::https://github.com/cloudposse/terraform-null-label.git?ref=tags/0.16.0" namespace = var.namespace name = var.name stage = var.stage @@ -85,7 +85,7 @@ module "label" { } module "artifact" { - source = "git::https://github.com/cloudposse/terraform-external-module-artifact.git?ref=tags/0.1.1" + source = "git::https://github.com/cloudposse/terraform-external-module-artifact.git?ref=tags/0.2.0" filename = "lambda.zip" module_name = "terraform-aws-lambda-elasticsearch-cleanup" module_path = substr(path.module, length(path.cwd) + 1, -1) diff --git a/outputs.tf b/outputs.tf index acbe8f8..6efcf85 100644 --- a/outputs.tf +++ b/outputs.tf @@ -1,4 +1,14 @@ output "security_group_id" { value = join("", aws_security_group.default.*.id) - description = "Security Group ID of the Lambda" + description = "Security Group ID of the Lambda Function" +} + +output "lambda_function_arn" { + value = join("", aws_lambda_function.default.*.arn) + description = "ARN of the Lambda Function" +} + +output "lambda_function_source_code_size" { + value = join("", aws_lambda_function.default.*.source_code_size) + description = "The size in bytes of the function .zip file" } diff --git a/test/.gitignore b/test/.gitignore new file mode 100644 index 0000000..442804a --- /dev/null +++ b/test/.gitignore @@ -0,0 +1 @@ +.test-harness diff --git a/test/Makefile b/test/Makefile new file mode 100644 index 0000000..17b2fe7 --- /dev/null +++ b/test/Makefile @@ -0,0 +1,43 @@ +TEST_HARNESS ?= https://github.com/cloudposse/test-harness.git +TEST_HARNESS_BRANCH ?= master +TEST_HARNESS_PATH = $(realpath .test-harness) +BATS_ARGS ?= --tap +BATS_LOG ?= test.log + +# Define a macro to run the tests +define RUN_TESTS +@echo "Running tests in $(1)" +@cd $(1) && bats $(BATS_ARGS) $(addsuffix .bats,$(addprefix $(TEST_HARNESS_PATH)/test/terraform/,$(TESTS))) +endef + +default: all + +-include Makefile.* + +## Provision the test-harnesss +.test-harness: + [ -d $@ ] || git clone --depth=1 -b $(TEST_HARNESS_BRANCH) $(TEST_HARNESS) $@ + +## Initialize the tests +init: .test-harness + +## Install all dependencies (OS specific) +deps:: + @exit 0 + +## Clean up the test harness +clean: + [ "$(TEST_HARNESS_PATH)" == "/" ] || rm -rf $(TEST_HARNESS_PATH) + +## Run all tests +all: module examples/complete + +## Run basic sanity checks against the module itself +module: export TESTS ?= installed lint get-modules module-pinning get-plugins provider-pinning validate terraform-docs input-descriptions output-descriptions +module: deps + $(call RUN_TESTS, ../) + +## Run tests against example +examples/complete: export TESTS ?= installed lint get-modules get-plugins validate +examples/complete: deps + $(call RUN_TESTS, ../$@) diff --git a/test/Makefile.alpine b/test/Makefile.alpine new file mode 100644 index 0000000..7925b18 --- /dev/null +++ b/test/Makefile.alpine @@ -0,0 +1,5 @@ +ifneq (,$(wildcard /sbin/apk)) +## Install all dependencies for alpine +deps:: init + @apk add --update terraform-docs@cloudposse json2hcl@cloudposse +endif diff --git a/test/src/.gitignore b/test/src/.gitignore new file mode 100644 index 0000000..31b0219 --- /dev/null +++ b/test/src/.gitignore @@ -0,0 +1,2 @@ +.gopath +vendor/ diff --git a/test/src/Gopkg.lock b/test/src/Gopkg.lock new file mode 100644 index 0000000..87bb6bd --- /dev/null +++ b/test/src/Gopkg.lock @@ -0,0 +1,92 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + digest = "1:ffe9824d294da03b391f44e1ae8281281b4afc1bdaa9588c9097785e3af10cec" + name = "github.com/davecgh/go-spew" + packages = ["spew"] + pruneopts = "UT" + revision = "8991bc29aa16c548c550c7ff78260e27b9ab7c73" + version = "v1.1.1" + +[[projects]] + digest = "1:75d6042fc66aebc974cc49b0c6c7cc3b9adb5f8130fbfa0dbec0820d990afa25" + name = "github.com/gruntwork-io/terratest" + packages = [ + "modules/collections", + "modules/customerrors", + "modules/files", + "modules/logger", + "modules/retry", + "modules/shell", + "modules/ssh", + "modules/terraform", + ] + pruneopts = "UT" + revision = "892abb2c35878d0808101bbfe6559e931dc2d354" + version = "v0.16.0" + +[[projects]] + digest = "1:0028cb19b2e4c3112225cd871870f2d9cf49b9b4276531f03438a88e94be86fe" + name = "github.com/pmezard/go-difflib" + packages = ["difflib"] + pruneopts = "UT" + revision = "792786c7400a136282c1664665ae0a8db921c6c2" + version = "v1.0.0" + +[[projects]] + digest = "1:5da8ce674952566deae4dbc23d07c85caafc6cfa815b0b3e03e41979cedb8750" + name = "github.com/stretchr/testify" + packages = [ + "assert", + "require", + ] + pruneopts = "UT" + revision = "ffdc059bfe9ce6a4e144ba849dbedead332c6053" + version = "v1.3.0" + +[[projects]] + branch = "master" + digest = "1:831470c2758c8b733941144f2803a0ccad0632c5a767415b777ebd296b5f463e" + name = "golang.org/x/crypto" + packages = [ + "curve25519", + "ed25519", + "ed25519/internal/edwards25519", + "internal/chacha20", + "internal/subtle", + "poly1305", + "ssh", + "ssh/agent", + ] + pruneopts = "UT" + revision = "22d7a77e9e5f409e934ed268692e56707cd169e5" + +[[projects]] + branch = "master" + digest = "1:76ee51c3f468493aff39dbacc401e8831fbb765104cbf613b89bef01cf4bad70" + name = "golang.org/x/net" + packages = ["context"] + pruneopts = "UT" + revision = "f3200d17e092c607f615320ecaad13d87ad9a2b3" + +[[projects]] + branch = "master" + digest = "1:181f3fd33e620b958b5ab77da177cf775cdcccd7db82963607875fbd09ae995e" + name = "golang.org/x/sys" + packages = [ + "cpu", + "unix", + ] + pruneopts = "UT" + revision = "9cd6430ef91e39e1a0ec0470cf1321a33ef1b887" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + input-imports = [ + "github.com/gruntwork-io/terratest/modules/terraform", + "github.com/stretchr/testify/assert", + ] + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/test/src/Gopkg.toml b/test/src/Gopkg.toml new file mode 100644 index 0000000..995bac5 --- /dev/null +++ b/test/src/Gopkg.toml @@ -0,0 +1,7 @@ +[[constraint]] + name = "github.com/stretchr/testify" + version = "1.2.2" + +[prune] + go-tests = true + unused-packages = true diff --git a/test/src/Makefile b/test/src/Makefile new file mode 100644 index 0000000..6d5339c --- /dev/null +++ b/test/src/Makefile @@ -0,0 +1,50 @@ +PACKAGE = terraform-aws-lambda-elasticsearch-cleanup +GOEXE ?= /usr/bin/go +GOPATH = $(CURDIR)/.gopath +GOBIN = $(GOPATH)/bin +BASE = $(GOPATH)/src/$(PACKAGE) +PATH := $(PATH):$(GOBIN) + +export TF_DATA_DIR ?= $(CURDIR)/.terraform +export TF_CLI_ARGS_init ?= -get-plugins=true +export GOPATH + +.PHONY: all +## Default target +all: test + +ifneq (,$(wildcard /sbin/apk)) +## Install go, if not installed +$(GOEXE): + apk add --update go +endif + +ifeq ($(shell uname -s),Linux) +## Install all `dep`, if not installed +$(GOBIN)/dep: + @mkdir -p $(GOBIN) + @curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh +endif + +## Prepare the GOPATH +$(BASE): $(GOEXE) + @mkdir -p $(dir $@) + @ln -sf $(CURDIR) $@ + +## Download vendor dependencies to vendor/ +$(BASE)/vendor: $(BASE) $(GOBIN)/dep + cd $(BASE) && dep ensure + +.PHONY : init +## Initialize tests +init: $(BASE)/vendor + +.PHONY : test +## Run tests +test: init + cd $(BASE) && go test -v -timeout 60m -run TestExamplesComplete + +.PHONY : clean +## Clean up files +clean: + rm -rf .gopath/ vendor/ $(TF_DATA_DIR) diff --git a/test/src/examples_complete_test.go b/test/src/examples_complete_test.go new file mode 100644 index 0000000..8b3bb73 --- /dev/null +++ b/test/src/examples_complete_test.go @@ -0,0 +1,58 @@ +package test + +import ( + "testing" + + "github.com/gruntwork-io/terratest/modules/terraform" + "github.com/stretchr/testify/assert" +) + +// Test the Terraform module in examples/complete using Terratest. +func TestExamplesComplete(t *testing.T) { + t.Parallel() + + terraformOptions := &terraform.Options{ + // The path to where our Terraform code is located + TerraformDir: "../../examples/complete", + Upgrade: true, + // Variables to pass to our Terraform code using -var-file options + VarFiles: []string{"fixtures.us-east-2.tfvars"}, + } + + // At the end of the test, run `terraform destroy` to clean up any resources that were created + defer terraform.Destroy(t, terraformOptions) + + // This will run `terraform init` and `terraform apply` and fail the test if there are any errors + terraform.InitAndApply(t, terraformOptions) + + // Run `terraform output` to get the value of an output variable + vpcCidr := terraform.Output(t, terraformOptions, "vpc_cidr") + // Verify we're getting back the outputs we expect + assert.Equal(t, "172.16.0.0/16", vpcCidr) + + // Run `terraform output` to get the value of an output variable + privateSubnetCidrs := terraform.OutputList(t, terraformOptions, "private_subnet_cidrs") + // Verify we're getting back the outputs we expect + assert.Equal(t, []string{"172.16.0.0/18", "172.16.64.0/18"}, privateSubnetCidrs) + + // Run `terraform output` to get the value of an output variable + publicSubnetCidrs := terraform.OutputList(t, terraformOptions, "public_subnet_cidrs") + // Verify we're getting back the outputs we expect + assert.Equal(t, []string{"172.16.128.0/18", "172.16.192.0/18"}, publicSubnetCidrs) + + // Run `terraform output` to get the value of an output variable + domainHostname := terraform.Output(t, terraformOptions, "domain_hostname") + // Verify we're getting back the outputs we expect + assert.Equal(t, "es-cleanup.testing.cloudposse.co", domainHostname) + + // Run `terraform output` to get the value of an output variable + kibanaHostname := terraform.Output(t, terraformOptions, "kibana_hostname") + // Verify we're getting back the outputs we expect + assert.Equal(t, "kibana-es-cleanup.testing.cloudposse.co", kibanaHostname) + + // Run `terraform output` to get the value of an output variable + domainEndpoint := terraform.Output(t, terraformOptions, "domain_endpoint") + // Verify we're getting back the outputs we expect + assert.Contains(t, domainEndpoint, "vpc-eg-test-es-cleanup") + assert.Contains(t, domainEndpoint, "us-east-2.es.amazonaws.com") +} diff --git a/variables.tf b/variables.tf index 395521d..0254851 100644 --- a/variables.tf +++ b/variables.tf @@ -1,3 +1,39 @@ +variable "namespace" { + type = string + description = "Namespace, which could be your organization name, e.g. 'eg' or 'cp'" + default = "" +} + +variable "stage" { + type = string + description = "Stage, e.g. 'prod', 'staging', 'dev', or 'test'" + default = "" +} + +variable "name" { + type = string + default = "app" + description = "Solution name, e.g. 'app' or 'cluster'" +} + +variable "delimiter" { + type = string + default = "-" + description = "Delimiter to be used between `namespace`, `stage`, `name` and `attributes`" +} + +variable "attributes" { + type = list(string) + default = [] + description = "Additional attributes (e.g. `1`)" +} + +variable "tags" { + type = map(string) + default = {} + description = "Additional tags (e.g. `map('BusinessUnit','XYZ')`" +} + variable "enabled" { type = bool default = true @@ -25,6 +61,11 @@ variable "schedule" { description = "CloudWatch Events rule schedule using cron or rate expression" } +variable "vpc_id" { + type = string + description = "The VPC ID for the Lambda function" +} + variable "subnet_ids" { type = list(string) description = "Subnet IDs" @@ -48,42 +89,6 @@ variable "delete_after" { description = "Number of days to preserve" } -variable "namespace" { - type = string - description = "Namespace, which could be your organization name, e.g. 'eg' or 'cp'" - default = "" -} - -variable "stage" { - type = string - description = "Stage, e.g. 'prod', 'staging', 'dev', or 'test'" - default = "" -} - -variable "name" { - type = string - default = "app" - description = "Solution name, e.g. 'app' or 'cluster'" -} - -variable "delimiter" { - type = string - default = "-" - description = "Delimiter to be used between `namespace`, `stage`, `name` and `attributes`" -} - -variable "attributes" { - type = list(string) - default = [] - description = "Additional attributes (e.g. `1`)" -} - -variable "tags" { - type = map(string) - default = {} - description = "Additional tags (e.g. `map('BusinessUnit','XYZ')`" -} - variable "index_format" { type = string default = "%Y.%m.%d" @@ -101,8 +106,3 @@ variable "timeout" { default = 300 description = "Timeout for Lambda function in seconds" } - -variable "vpc_id" { - type = string - description = "The VPC ID for the Lambda function" -} From 289ae0697036ac4d1c348b2ef21c760c6cffbe34 Mon Sep 17 00:00:00 2001 From: aknysh Date: Wed, 29 Apr 2020 22:43:18 -0400 Subject: [PATCH 3/6] Add example and tests --- main.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.tf b/main.tf index 9ee22af..6921193 100644 --- a/main.tf +++ b/main.tf @@ -88,7 +88,7 @@ module "artifact" { source = "git::https://github.com/cloudposse/terraform-external-module-artifact.git?ref=tags/0.2.0" filename = "lambda.zip" module_name = "terraform-aws-lambda-elasticsearch-cleanup" - module_path = substr(path.module, length(path.cwd) + 1, -1) + module_path = path.module } # Locals From ad8880314b145189ee74fbe3ab8e16971d9d6a4c Mon Sep 17 00:00:00 2001 From: aknysh Date: Wed, 29 Apr 2020 23:08:50 -0400 Subject: [PATCH 4/6] Add example and tests --- README.md | 1 + docs/terraform.md | 1 + examples/complete/fixtures.us-east-2.tfvars | 2 ++ examples/complete/main.tf | 1 + examples/complete/variables.tf | 6 ++++++ main.tf | 1 + variables.tf | 6 ++++++ 7 files changed, 18 insertions(+) diff --git a/README.md b/README.md index a92f006..f285c4f 100644 --- a/README.md +++ b/README.md @@ -131,6 +131,7 @@ Available targets: | Name | Description | Type | Default | Required | |------|-------------|:----:|:-----:|:-----:| +| artifact_url | URL template for the remote artifact | string | `https://artifacts.cloudposse.com/$$${module_name}/$$${git_ref}/$$${filename}` | no | | attributes | Additional attributes (e.g. `1`) | list(string) | `` | no | | delete_after | Number of days to preserve | number | `15` | no | | delimiter | Delimiter to be used between `namespace`, `stage`, `name` and `attributes` | string | `-` | no | diff --git a/docs/terraform.md b/docs/terraform.md index 378e3e3..9c481a0 100644 --- a/docs/terraform.md +++ b/docs/terraform.md @@ -2,6 +2,7 @@ | Name | Description | Type | Default | Required | |------|-------------|:----:|:-----:|:-----:| +| artifact_url | URL template for the remote artifact | string | `https://artifacts.cloudposse.com/$$${module_name}/$$${git_ref}/$$${filename}` | no | | attributes | Additional attributes (e.g. `1`) | list(string) | `` | no | | delete_after | Number of days to preserve | number | `15` | no | | delimiter | Delimiter to be used between `namespace`, `stage`, `name` and `attributes` | string | `-` | no | diff --git a/examples/complete/fixtures.us-east-2.tfvars b/examples/complete/fixtures.us-east-2.tfvars index 3f55308..64ca779 100644 --- a/examples/complete/fixtures.us-east-2.tfvars +++ b/examples/complete/fixtures.us-east-2.tfvars @@ -29,3 +29,5 @@ create_iam_service_linked_role = false dns_zone_id = "Z3SO0TKDDQ0RGG" schedule = "rate(5 minutes)" + +artifact_url = "https://artifacts.cloudposse.com/terraform-external-module-artifact/example/test.zip" diff --git a/examples/complete/main.tf b/examples/complete/main.tf index 2a437c1..c21b745 100644 --- a/examples/complete/main.tf +++ b/examples/complete/main.tf @@ -57,4 +57,5 @@ module "elasticsearch_cleanup" { namespace = var.namespace stage = var.stage schedule = var.schedule + artifact_url = var.artifact_url } diff --git a/examples/complete/variables.tf b/examples/complete/variables.tf index 90efd44..a1effba 100644 --- a/examples/complete/variables.tf +++ b/examples/complete/variables.tf @@ -78,3 +78,9 @@ variable "schedule" { default = "cron(0 3 * * ? *)" description = "Schedule using cron or rate expression" } + +variable "artifact_url" { + type = string + description = "URL template for the remote artifact" + default = "https://artifacts.cloudposse.com/$$${module_name}/$$${git_ref}/$$${filename}" +} diff --git a/main.tf b/main.tf index 6921193..ac65f5c 100644 --- a/main.tf +++ b/main.tf @@ -89,6 +89,7 @@ module "artifact" { filename = "lambda.zip" module_name = "terraform-aws-lambda-elasticsearch-cleanup" module_path = path.module + url = var.artifact_url } # Locals diff --git a/variables.tf b/variables.tf index 0254851..6c9acba 100644 --- a/variables.tf +++ b/variables.tf @@ -106,3 +106,9 @@ variable "timeout" { default = 300 description = "Timeout for Lambda function in seconds" } + +variable "artifact_url" { + type = string + description = "URL template for the remote artifact" + default = "https://artifacts.cloudposse.com/$$${module_name}/$$${git_ref}/$$${filename}" +} From 6adb7a73e722f69bf2ca50572945e79acd2d7ef4 Mon Sep 17 00:00:00 2001 From: aknysh Date: Wed, 29 Apr 2020 23:12:37 -0400 Subject: [PATCH 5/6] Add example and tests --- examples/complete/fixtures.us-east-2.tfvars | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/complete/fixtures.us-east-2.tfvars b/examples/complete/fixtures.us-east-2.tfvars index 64ca779..a348265 100644 --- a/examples/complete/fixtures.us-east-2.tfvars +++ b/examples/complete/fixtures.us-east-2.tfvars @@ -6,7 +6,7 @@ stage = "test" name = "es-cleanup" -availability_zones = ["us-east-1a", "us-east-1b"] +availability_zones = ["us-east-2a", "us-east-2b"] instance_type = "t2.small.elasticsearch" From 724ba10bb15dacd5e52913989a3bd4397504ad8a Mon Sep 17 00:00:00 2001 From: aknysh Date: Thu, 30 Apr 2020 00:28:47 -0400 Subject: [PATCH 6/6] Add example and tests --- examples/complete/main.tf | 2 +- test/src/Makefile | 2 +- test/src/examples_complete_test.go | 5 +++++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/examples/complete/main.tf b/examples/complete/main.tf index c21b745..ca845d1 100644 --- a/examples/complete/main.tf +++ b/examples/complete/main.tf @@ -52,7 +52,7 @@ module "elasticsearch_cleanup" { es_endpoint = module.elasticsearch.domain_endpoint es_domain_arn = module.elasticsearch.domain_arn es_security_group_id = module.elasticsearch.security_group_id - subnet_ids = module.subnets.public_subnet_ids + subnet_ids = module.subnets.private_subnet_ids vpc_id = module.vpc.vpc_id namespace = var.namespace stage = var.stage diff --git a/test/src/Makefile b/test/src/Makefile index 6d5339c..25cab8f 100644 --- a/test/src/Makefile +++ b/test/src/Makefile @@ -42,7 +42,7 @@ init: $(BASE)/vendor .PHONY : test ## Run tests test: init - cd $(BASE) && go test -v -timeout 60m -run TestExamplesComplete + cd $(BASE) && go test -v -timeout 120m -run TestExamplesComplete .PHONY : clean ## Clean up files diff --git a/test/src/examples_complete_test.go b/test/src/examples_complete_test.go index 8b3bb73..4f60f55 100644 --- a/test/src/examples_complete_test.go +++ b/test/src/examples_complete_test.go @@ -55,4 +55,9 @@ func TestExamplesComplete(t *testing.T) { // Verify we're getting back the outputs we expect assert.Contains(t, domainEndpoint, "vpc-eg-test-es-cleanup") assert.Contains(t, domainEndpoint, "us-east-2.es.amazonaws.com") + + // Run `terraform output` to get the value of an output variable + lambdaFunctionArn := terraform.Output(t, terraformOptions, "lambda_function_arn") + // Verify we're getting back the outputs we expect + assert.Contains(t, lambdaFunctionArn, "function:eg-test-app-elasticsearch-cleanup") }