diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 41c1baa..07b38d2 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -1,4 +1,17 @@
# Use this file to define individuals or teams that are responsible for code in a repository.
# Read more:
+#
+# Order is important: the last matching pattern takes the most precedence
-* @cloudposse/engineering
\ No newline at end of file
+# These owners will be the default owners for everything
+* @cloudposse/engineering @cloudposse/contributors
+
+# Cloud Posse must review any changes to Makefiles
+**/Makefile @cloudposse/engineering
+**/Makefile.* @cloudposse/engineering
+
+# Cloud Posse must review any changes to GitHub actions
+.github/* @cloudposse/engineering
+
+# Cloud Posse must review any changes to standard context definition
+**/context.tf @cloudposse/engineering
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index ecc9eb6..39a8686 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -7,7 +7,7 @@ assignees: ''
---
-Have a question? Please checkout our [Slack Community](https://slack.cloudposse.com) in the `#geodesic` channel or visit our [Slack Archive](https://archive.sweetops.com/geodesic/).
+Have a question? Please checkout our [Slack Community](https://slack.cloudposse.com) or visit our [Slack Archive](https://archive.sweetops.com/).
[](https://slack.cloudposse.com)
@@ -33,4 +33,4 @@ Explain what alternative solutions or features you've considered.
## Additional Context
-Add any other context or screenshots about the feature request here.
\ No newline at end of file
+Add any other context or screenshots about the feature request here.
diff --git a/.github/auto-release.yml b/.github/auto-release.yml
new file mode 100644
index 0000000..2836185
--- /dev/null
+++ b/.github/auto-release.yml
@@ -0,0 +1,40 @@
+name-template: 'v$RESOLVED_VERSION'
+tag-template: '$RESOLVED_VERSION'
+version-template: '$MAJOR.$MINOR.$PATCH'
+version-resolver:
+ major:
+ labels:
+ - 'major'
+ minor:
+ labels:
+ - 'minor'
+ - 'enhancement'
+ patch:
+ labels:
+ - 'patch'
+ - 'fix'
+ - 'bugfix'
+ - 'bug'
+ - 'hotfix'
+ default: 'minor'
+
+categories:
+ - title: '🚀 Enhancements'
+ labels:
+ - 'enhancement'
+ - title: '🐛 Bug Fixes'
+ labels:
+ - 'fix'
+ - 'bugfix'
+ - 'bug'
+ - 'hotfix'
+
+change-template: |
+
+ $TITLE @$AUTHOR (#$NUMBER)
+
+ $BODY
+
+
+template: |
+ $CHANGES
diff --git a/.github/workflows/auto-release.yml b/.github/workflows/auto-release.yml
new file mode 100644
index 0000000..ccc27be
--- /dev/null
+++ b/.github/workflows/auto-release.yml
@@ -0,0 +1,19 @@
+name: auto-release
+
+on:
+ push:
+ branches:
+ - master
+
+jobs:
+ semver:
+ runs-on: ubuntu-latest
+ steps:
+ # Drafts your next Release notes as Pull Requests are merged into "master"
+ - uses: release-drafter/release-drafter@v5
+ with:
+ publish: true
+ prerelease: false
+ config-name: auto-release.yml
+ env:
+ GITHUB_TOKEN: ${{ secrets.PUBLIC_REPO_ACCESS_TOKEN }}
diff --git a/.github/workflows/chatops.yml b/.github/workflows/chatops.yml
index a6bb11b..0d94310 100644
--- a/.github/workflows/chatops.yml
+++ b/.github/workflows/chatops.yml
@@ -9,13 +9,13 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: "Handle common commands"
- uses: cloudposse/actions/github/slash-command-dispatch@0.15.0
+ uses: cloudposse/actions/github/slash-command-dispatch@0.16.0
with:
token: ${{ secrets.PUBLIC_REPO_ACCESS_TOKEN }}
reaction-token: ${{ secrets.GITHUB_TOKEN }}
repository: cloudposse/actions
commands: rebuild-readme, terraform-fmt
- permission: none
+ permission: triage
issue-type: pull-request
test:
@@ -24,13 +24,13 @@ jobs:
- name: "Checkout commit"
uses: actions/checkout@v2
- name: "Run tests"
- uses: cloudposse/actions/github/slash-command-dispatch@0.15.0
+ uses: cloudposse/actions/github/slash-command-dispatch@0.16.0
with:
token: ${{ secrets.PUBLIC_REPO_ACCESS_TOKEN }}
reaction-token: ${{ secrets.GITHUB_TOKEN }}
repository: cloudposse/actions
commands: test
- permission: none
+ permission: triage
issue-type: pull-request
reactions: false
diff --git a/Makefile b/Makefile
index 0f5ff64..4d8a600 100644
--- a/Makefile
+++ b/Makefile
@@ -1,6 +1,6 @@
SHELL := /bin/bash
LAMBDA_DIR := lambda
-DEPS_CONTAINER := alpine:3.8
+DEPS_CONTAINER := alpine:3.11
# List of targets the `readme` target should call before generating the readme
export README_DEPS ?= docs/targets.md docs/terraform.md
diff --git a/README.md b/README.md
index 1fed862..a7059db 100644
--- a/README.md
+++ b/README.md
@@ -86,10 +86,41 @@ module "elasticsearch_cleanup" {
vpc_id = module.vpc.vpc_id
namespace = "eg"
stage = "dev"
- schedule = "rate(5 minutes)"
+ schedule = "cron(0 3 * * ? *)"
}
```
+Indexes are expected to be in the format `name-date` where `date` is in the format specified by `var.index_format`.
+By default, all indexes except for the ones added by Kibana will be deleted based on the date part of the full
+index name. The actual creation date of the index is not used.
+
+Index matching is done with unanchored regular expresssion, so "bar" matches index "foobarbaz".
+
+- If the full index name, including the date part, matches `skip_index_re`, then the index will be skipped (never deleted).
+ Kibana indexes are skipped by the default `skip_index_re` of `^\.kibana*` so if you specify a value for `skip_index_re`
+ you must include the Kibana exception in your regex if you want it excepted. (Since Kibana indexes do not have a
+ date part, this module should not delete them, but will complain about them having malformed dates if they are not excluded.)
+- If the index name without the trailing `-date` part matches `index_re`, then it will be cleaned up according to the date part.
+
+Keep in mind that, fundamentally, this module expects indexes to be in the format of `name-date` so it will not work
+properly if the regexes end up selecting an index that does not end with `-date`. To avoid edge cases, it is wise not
+to include dashes in your index name or date format.
+
+## Migration
+
+Prior to version 0.10.0, this moudle had inputs `index`, which was a comma-separated list of index names or the
+special name "all" to indicate all but Kibana indexes, and `index_regex`, which was a regular expression for parsing
+index name and date parts. There was no mechanism for specifying a list of indexes to exclude.
+Starting with version 0.10.0 this module drops those inputs and instead takes `index_re` and `skip_index_re`,
+both of which are regular expressions. (You probably want to anchor your regexes to the beginning of the index name
+by starting with `^`).
+
+| If you previously had | Now use |
+|----------------------|----------|
+|`index = "all"`| Default values for `index_re` and `skip_index_re`|
+|`index = "a,xb,c0"` | `index_re = "^(a\|xb\|c0)"` and `skip_index_re = "^$"`|
+|`index_regex = "(ipat)-(dpat)"`|`index_re = "ipat"` and be sure `index_format` is correct for your date format|
+
@@ -109,6 +140,7 @@ Available targets:
```
+
## Module: cloudposse/terraform-aws-lambda-elasticsearch-cleanup
This module creates a scheduled Lambda function which will delete old
@@ -120,38 +152,44 @@ is given
| Name | Version |
|------|---------|
-| terraform | >= 0.12.0, < 0.14.0 |
-| aws | ~> 2.0 |
-| null | ~> 2.0 |
-| template | ~> 2.0 |
+| terraform | >= 0.12.0 |
+| aws | >= 2.0 |
+| null | >= 2.0 |
## Providers
| Name | Version |
|------|---------|
-| aws | ~> 2.0 |
+| aws | >= 2.0 |
## Inputs
| Name | Description | Type | Default | Required |
|------|-------------|------|---------|:--------:|
+| additional\_tag\_map | Additional tags for appending to tags\_as\_list\_of\_maps. Not added to `tags`. | `map(string)` | `{}` | no |
+| artifact\_git\_ref | Git ref of the lambda artifact to use. Use latest version if null. | `string` | `null` | no |
| artifact\_url | URL template for the remote artifact | `string` | `"https://artifacts.cloudposse.com/$${module_name}/$${git_ref}/$${filename}"` | no |
| attributes | Additional attributes (e.g. `1`) | `list(string)` | `[]` | no |
+| context | Single object for setting entire context at once.
See description of individual variables for details.
Leave string and numeric variables as `null` to use default value.
Individual variable settings (non-null) override settings in context object,
except for attributes, tags, and additional\_tag\_map, which are merged. |
object({
enabled = bool
namespace = string
environment = string
stage = string
name = string
delimiter = string
attributes = list(string)
tags = map(string)
additional_tag_map = map(string)
regex_replace_chars = string
label_order = list(string)
id_length_limit = number
})
| {
"additional_tag_map": {},
"attributes": [],
"delimiter": null,
"enabled": true,
"environment": null,
"id_length_limit": null,
"label_order": [],
"name": null,
"namespace": null,
"regex_replace_chars": null,
"stage": null,
"tags": {}
}
| no |
| delete\_after | Number of days to preserve | `number` | `15` | no |
-| delimiter | Delimiter to be used between `namespace`, `stage`, `name` and `attributes` | `string` | `"-"` | no |
-| enabled | This module will not create any resources unless enabled is set to "true" | `bool` | `true` | no |
+| delimiter | Delimiter to be used between `namespace`, `environment`, `stage`, `name` and `attributes`.
Defaults to `-` (hyphen). Set to `""` to use no delimiter at all. | `string` | `null` | no |
+| enabled | Set to false to prevent the module from creating any resources | `bool` | `null` | no |
+| environment | Environment, e.g. 'uw2', 'us-west-2', OR 'prod', 'staging', 'dev', 'UAT' | `string` | `null` | no |
| es\_domain\_arn | The Elasticsearch domain ARN | `string` | n/a | yes |
| es\_endpoint | The Elasticsearch endpoint for the Lambda function to connect to | `string` | n/a | yes |
| es\_security\_group\_id | The Elasticsearch cluster security group ID | `string` | n/a | yes |
-| index | Index/indices to process. Use a comma-separated list. Specify `all` to match every index except for `.kibana` or `.kibana_1` | `string` | `"all"` | no |
+| id\_length\_limit | Limit `id` to this many characters.
Set to `0` for unlimited length.
Set to `null` for default, which is `0`.
Does not affect `id_full`. | `number` | `null` | no |
| index\_format | Combined with 'index' variable and is used to evaluate the index age | `string` | `"%Y.%m.%d"` | no |
-| index\_regex | Determines regex that is used for matching index name and index date. By default it match two groups separated by hyphen. | `string` | `"([^-]+)-(.*)"` | no |
-| name | Solution name, e.g. 'app' or 'cluster' | `string` | `"app"` | no |
-| namespace | Namespace, which could be your organization name, e.g. 'eg' or 'cp' | `string` | `""` | no |
-| python\_version | The Python version to use | `string` | `"2.7"` | no |
+| index\_re | Regular Expression that matches the index names to clean up (not including trailing dash and date) | `string` | `".*"` | no |
+| label\_order | The naming order of the id output and Name tag.
Defaults to ["namespace", "environment", "stage", "name", "attributes"].
You can omit any of the 5 elements, but at least one must be present. | `list(string)` | `null` | no |
+| name | Solution name, e.g. 'app' or 'jenkins' | `string` | `null` | no |
+| namespace | Namespace, which could be your organization name or abbreviation, e.g. 'eg' or 'cp' | `string` | `null` | no |
+| python\_version | The Python version to use | `string` | `"3.7"` | no |
+| regex\_replace\_chars | Regex to replace chars with empty string in `namespace`, `environment`, `stage` and `name`.
If not set, `"/[^a-zA-Z0-9-]/"` is used to remove all characters other than hyphens, letters and digits. | `string` | `null` | no |
| schedule | CloudWatch Events rule schedule using cron or rate expression | `string` | `"cron(0 3 * * ? *)"` | no |
+| skip\_index\_re | Regular Expression that matches the index names to ignore (not clean up). Takes precedence over `index_re`.
BY DEFAULT (when value is `null`), a pattern is used to exclude Kibana indexes.
Use `"^$"` if you do not want to skip any indexes. Include an exclusion for `kibana` if you
want to use a custom value and also exclude the kibana indexes. | `string` | `null` | no |
| sns\_arn | SNS ARN to publish alerts | `string` | `""` | no |
-| stage | Stage, e.g. 'prod', 'staging', 'dev', or 'test' | `string` | `""` | no |
+| stage | Stage, e.g. 'prod', 'staging', 'dev', OR 'source', 'build', 'test', 'deploy', 'release' | `string` | `null` | no |
| subnet\_ids | Subnet IDs | `list(string)` | n/a | yes |
| tags | Additional tags (e.g. `map('BusinessUnit','XYZ')` | `map(string)` | `{}` | no |
| timeout | Timeout for Lambda function in seconds | `number` | `300` | no |
@@ -165,6 +203,7 @@ is given
| lambda\_function\_source\_code\_size | The size in bytes of the function .zip file |
| security\_group\_id | Security Group ID of the Lambda Function |
+
diff --git a/README.yaml b/README.yaml
index 07b643c..054b172 100644
--- a/README.yaml
+++ b/README.yaml
@@ -43,9 +43,42 @@ usage: |2-
vpc_id = module.vpc.vpc_id
namespace = "eg"
stage = "dev"
- schedule = "rate(5 minutes)"
+ schedule = "cron(0 3 * * ? *)"
}
```
+
+ Indexes are expected to be in the format `name-date` where `date` is in the format specified by `var.index_format`.
+ By default, all indexes except for the ones added by Kibana will be deleted based on the date part of the full
+ index name. The actual creation date of the index is not used.
+
+ Index matching is done with unanchored regular expresssion, so "bar" matches index "foobarbaz".
+
+ - If the full index name, including the date part, matches `skip_index_re`, then the index will be skipped (never deleted).
+ Kibana indexes are skipped by the default `skip_index_re` of `^\.kibana*` so if you specify a value for `skip_index_re`
+ you must include the Kibana exception in your regex if you want it excepted. (Since Kibana indexes do not have a
+ date part, this module should not delete them, but will complain about them having malformed dates if they are not excluded.)
+ - If the index name without the trailing `-date` part matches `index_re`, then it will be cleaned up according to the date part.
+
+ Keep in mind that, fundamentally, this module expects indexes to be in the format of `name-date` so it will not work
+ properly if the regexes end up selecting an index that does not end with `-date`. To avoid edge cases, it is wise not
+ to include dashes in your index name or date format.
+
+ ## Migration
+
+ Prior to version 0.10.0, this moudle had inputs `index`, which was a comma-separated list of index names or the
+ special name "all" to indicate all but Kibana indexes, and `index_regex`, which was a regular expression for parsing
+ index name and date parts. There was no mechanism for specifying a list of indexes to exclude.
+ Starting with version 0.10.0 this module drops those inputs and instead takes `index_re` and `skip_index_re`,
+ both of which are regular expressions. (You probably want to anchor your regexes to the beginning of the index name
+ by starting with `^`).
+
+ | If you previously had | Now use |
+ |----------------------|----------|
+ |`index = "all"`| Default values for `index_re` and `skip_index_re`|
+ |`index = "a,xb,c0"` | `index_re = "^(a\|xb\|c0)"` and `skip_index_re = "^$"`|
+ |`index_regex = "(ipat)-(dpat)"`|`index_re = "ipat"` and be sure `index_format` is correct for your date format|
+
+
include:
- docs/targets.md
- docs/terraform.md
diff --git a/context.tf b/context.tf
new file mode 100644
index 0000000..bae0cf1
--- /dev/null
+++ b/context.tf
@@ -0,0 +1,167 @@
+#
+# ONLY EDIT THIS FILE IN github.com/cloudposse/terraform-null-label
+# All other instances of this file should be a copy of that one
+#
+#
+# Copy this file from https://github.com/cloudposse/terraform-null-label/blob/master/exports/context.tf
+# and then place it in your Terraform module to automatically get
+# Cloud Posse's standard configuration inputs suitable for passing
+# to Cloud Posse modules.
+#
+# Modules should access the whole context as `module.this.context`
+# to get the input variables with nulls for defaults,
+# for example `context = module.this.context`,
+# and access individual variables as `module.this.`,
+# with final values filled in.
+#
+# For example, when using defaults, `module.this.context.delimiter`
+# will be null, and `module.this.delimiter` will be `-` (hyphen).
+#
+
+module "this" {
+ source = "git::https://github.com/cloudposse/terraform-null-label.git?ref=tags/0.19.2"
+
+ enabled = var.enabled
+ namespace = var.namespace
+ environment = var.environment
+ stage = var.stage
+ name = var.name
+ delimiter = var.delimiter
+ attributes = var.attributes
+ tags = var.tags
+ additional_tag_map = var.additional_tag_map
+ label_order = var.label_order
+ regex_replace_chars = var.regex_replace_chars
+ id_length_limit = var.id_length_limit
+
+ context = var.context
+}
+
+# Copy contents of cloudposse/terraform-null-label/variables.tf here
+
+variable "context" {
+ type = object({
+ enabled = bool
+ namespace = string
+ environment = string
+ stage = string
+ name = string
+ delimiter = string
+ attributes = list(string)
+ tags = map(string)
+ additional_tag_map = map(string)
+ regex_replace_chars = string
+ label_order = list(string)
+ id_length_limit = number
+ })
+ default = {
+ enabled = true
+ namespace = null
+ environment = null
+ stage = null
+ name = null
+ delimiter = null
+ attributes = []
+ tags = {}
+ additional_tag_map = {}
+ regex_replace_chars = null
+ label_order = []
+ id_length_limit = null
+ }
+ description = <<-EOT
+ Single object for setting entire context at once.
+ See description of individual variables for details.
+ Leave string and numeric variables as `null` to use default value.
+ Individual variable settings (non-null) override settings in context object,
+ except for attributes, tags, and additional_tag_map, which are merged.
+ EOT
+}
+
+variable "enabled" {
+ type = bool
+ default = null
+ description = "Set to false to prevent the module from creating any resources"
+}
+
+variable "namespace" {
+ type = string
+ default = null
+ description = "Namespace, which could be your organization name or abbreviation, e.g. 'eg' or 'cp'"
+}
+
+variable "environment" {
+ type = string
+ default = null
+ description = "Environment, e.g. 'uw2', 'us-west-2', OR 'prod', 'staging', 'dev', 'UAT'"
+}
+
+variable "stage" {
+ type = string
+ default = null
+ description = "Stage, e.g. 'prod', 'staging', 'dev', OR 'source', 'build', 'test', 'deploy', 'release'"
+}
+
+variable "name" {
+ type = string
+ default = null
+ description = "Solution name, e.g. 'app' or 'jenkins'"
+}
+
+variable "delimiter" {
+ type = string
+ default = null
+ description = <<-EOT
+ Delimiter to be used between `namespace`, `environment`, `stage`, `name` and `attributes`.
+ Defaults to `-` (hyphen). Set to `""` to use no delimiter at all.
+ EOT
+}
+
+variable "attributes" {
+ type = list(string)
+ default = []
+ description = "Additional attributes (e.g. `1`)"
+}
+
+variable "tags" {
+ type = map(string)
+ default = {}
+ description = "Additional tags (e.g. `map('BusinessUnit','XYZ')`"
+}
+
+variable "additional_tag_map" {
+ type = map(string)
+ default = {}
+ description = "Additional tags for appending to tags_as_list_of_maps. Not added to `tags`."
+}
+
+variable "label_order" {
+ type = list(string)
+ default = null
+ description = <<-EOT
+ The naming order of the id output and Name tag.
+ Defaults to ["namespace", "environment", "stage", "name", "attributes"].
+ You can omit any of the 5 elements, but at least one must be present.
+ EOT
+}
+
+variable "regex_replace_chars" {
+ type = string
+ default = null
+ description = <<-EOT
+ Regex to replace chars with empty string in `namespace`, `environment`, `stage` and `name`.
+ If not set, `"/[^a-zA-Z0-9-]/"` is used to remove all characters other than hyphens, letters and digits.
+ EOT
+}
+
+variable "id_length_limit" {
+ type = number
+ default = null
+ description = <<-EOT
+ Limit `id` to this many characters.
+ Set to `0` for unlimited length.
+ Set to `null` for default, which is `0`.
+ Does not affect `id_full`.
+ EOT
+}
+
+#### End of copy of cloudposse/terraform-null-label/variables.tf
diff --git a/docs/terraform.md b/docs/terraform.md
index 90746c2..784f48b 100644
--- a/docs/terraform.md
+++ b/docs/terraform.md
@@ -1,3 +1,4 @@
+
## Module: cloudposse/terraform-aws-lambda-elasticsearch-cleanup
This module creates a scheduled Lambda function which will delete old
@@ -9,38 +10,44 @@ is given
| Name | Version |
|------|---------|
-| terraform | >= 0.12.0, < 0.14.0 |
-| aws | ~> 2.0 |
-| null | ~> 2.0 |
-| template | ~> 2.0 |
+| terraform | >= 0.12.0 |
+| aws | >= 2.0 |
+| null | >= 2.0 |
## Providers
| Name | Version |
|------|---------|
-| aws | ~> 2.0 |
+| aws | >= 2.0 |
## Inputs
| Name | Description | Type | Default | Required |
|------|-------------|------|---------|:--------:|
+| additional\_tag\_map | Additional tags for appending to tags\_as\_list\_of\_maps. Not added to `tags`. | `map(string)` | `{}` | no |
+| artifact\_git\_ref | Git ref of the lambda artifact to use. Use latest version if null. | `string` | `null` | no |
| artifact\_url | URL template for the remote artifact | `string` | `"https://artifacts.cloudposse.com/$${module_name}/$${git_ref}/$${filename}"` | no |
| attributes | Additional attributes (e.g. `1`) | `list(string)` | `[]` | no |
+| context | Single object for setting entire context at once.
See description of individual variables for details.
Leave string and numeric variables as `null` to use default value.
Individual variable settings (non-null) override settings in context object,
except for attributes, tags, and additional\_tag\_map, which are merged. | object({
enabled = bool
namespace = string
environment = string
stage = string
name = string
delimiter = string
attributes = list(string)
tags = map(string)
additional_tag_map = map(string)
regex_replace_chars = string
label_order = list(string)
id_length_limit = number
})
| {
"additional_tag_map": {},
"attributes": [],
"delimiter": null,
"enabled": true,
"environment": null,
"id_length_limit": null,
"label_order": [],
"name": null,
"namespace": null,
"regex_replace_chars": null,
"stage": null,
"tags": {}
}
| no |
| delete\_after | Number of days to preserve | `number` | `15` | no |
-| delimiter | Delimiter to be used between `namespace`, `stage`, `name` and `attributes` | `string` | `"-"` | no |
-| enabled | This module will not create any resources unless enabled is set to "true" | `bool` | `true` | no |
+| delimiter | Delimiter to be used between `namespace`, `environment`, `stage`, `name` and `attributes`.
Defaults to `-` (hyphen). Set to `""` to use no delimiter at all. | `string` | `null` | no |
+| enabled | Set to false to prevent the module from creating any resources | `bool` | `null` | no |
+| environment | Environment, e.g. 'uw2', 'us-west-2', OR 'prod', 'staging', 'dev', 'UAT' | `string` | `null` | no |
| es\_domain\_arn | The Elasticsearch domain ARN | `string` | n/a | yes |
| es\_endpoint | The Elasticsearch endpoint for the Lambda function to connect to | `string` | n/a | yes |
| es\_security\_group\_id | The Elasticsearch cluster security group ID | `string` | n/a | yes |
-| index | Index/indices to process. Use a comma-separated list. Specify `all` to match every index except for `.kibana` or `.kibana_1` | `string` | `"all"` | no |
+| id\_length\_limit | Limit `id` to this many characters.
Set to `0` for unlimited length.
Set to `null` for default, which is `0`.
Does not affect `id_full`. | `number` | `null` | no |
| index\_format | Combined with 'index' variable and is used to evaluate the index age | `string` | `"%Y.%m.%d"` | no |
-| index\_regex | Determines regex that is used for matching index name and index date. By default it match two groups separated by hyphen. | `string` | `"([^-]+)-(.*)"` | no |
-| name | Solution name, e.g. 'app' or 'cluster' | `string` | `"app"` | no |
-| namespace | Namespace, which could be your organization name, e.g. 'eg' or 'cp' | `string` | `""` | no |
-| python\_version | The Python version to use | `string` | `"2.7"` | no |
+| index\_re | Regular Expression that matches the index names to clean up (not including trailing dash and date) | `string` | `".*"` | no |
+| label\_order | The naming order of the id output and Name tag.
Defaults to ["namespace", "environment", "stage", "name", "attributes"].
You can omit any of the 5 elements, but at least one must be present. | `list(string)` | `null` | no |
+| name | Solution name, e.g. 'app' or 'jenkins' | `string` | `null` | no |
+| namespace | Namespace, which could be your organization name or abbreviation, e.g. 'eg' or 'cp' | `string` | `null` | no |
+| python\_version | The Python version to use | `string` | `"3.7"` | no |
+| regex\_replace\_chars | Regex to replace chars with empty string in `namespace`, `environment`, `stage` and `name`.
If not set, `"/[^a-zA-Z0-9-]/"` is used to remove all characters other than hyphens, letters and digits. | `string` | `null` | no |
| schedule | CloudWatch Events rule schedule using cron or rate expression | `string` | `"cron(0 3 * * ? *)"` | no |
+| skip\_index\_re | Regular Expression that matches the index names to ignore (not clean up). Takes precedence over `index_re`.
BY DEFAULT (when value is `null`), a pattern is used to exclude Kibana indexes.
Use `"^$"` if you do not want to skip any indexes. Include an exclusion for `kibana` if you
want to use a custom value and also exclude the kibana indexes. | `string` | `null` | no |
| sns\_arn | SNS ARN to publish alerts | `string` | `""` | no |
-| stage | Stage, e.g. 'prod', 'staging', 'dev', or 'test' | `string` | `""` | no |
+| stage | Stage, e.g. 'prod', 'staging', 'dev', OR 'source', 'build', 'test', 'deploy', 'release' | `string` | `null` | no |
| subnet\_ids | Subnet IDs | `list(string)` | n/a | yes |
| tags | Additional tags (e.g. `map('BusinessUnit','XYZ')` | `map(string)` | `{}` | no |
| timeout | Timeout for Lambda function in seconds | `number` | `300` | no |
@@ -54,3 +61,4 @@ is given
| lambda\_function\_source\_code\_size | The size in bytes of the function .zip file |
| security\_group\_id | Security Group ID of the Lambda Function |
+
diff --git a/examples/complete/context.tf b/examples/complete/context.tf
new file mode 100644
index 0000000..bae0cf1
--- /dev/null
+++ b/examples/complete/context.tf
@@ -0,0 +1,167 @@
+#
+# ONLY EDIT THIS FILE IN github.com/cloudposse/terraform-null-label
+# All other instances of this file should be a copy of that one
+#
+#
+# Copy this file from https://github.com/cloudposse/terraform-null-label/blob/master/exports/context.tf
+# and then place it in your Terraform module to automatically get
+# Cloud Posse's standard configuration inputs suitable for passing
+# to Cloud Posse modules.
+#
+# Modules should access the whole context as `module.this.context`
+# to get the input variables with nulls for defaults,
+# for example `context = module.this.context`,
+# and access individual variables as `module.this.`,
+# with final values filled in.
+#
+# For example, when using defaults, `module.this.context.delimiter`
+# will be null, and `module.this.delimiter` will be `-` (hyphen).
+#
+
+module "this" {
+ source = "git::https://github.com/cloudposse/terraform-null-label.git?ref=tags/0.19.2"
+
+ enabled = var.enabled
+ namespace = var.namespace
+ environment = var.environment
+ stage = var.stage
+ name = var.name
+ delimiter = var.delimiter
+ attributes = var.attributes
+ tags = var.tags
+ additional_tag_map = var.additional_tag_map
+ label_order = var.label_order
+ regex_replace_chars = var.regex_replace_chars
+ id_length_limit = var.id_length_limit
+
+ context = var.context
+}
+
+# Copy contents of cloudposse/terraform-null-label/variables.tf here
+
+variable "context" {
+ type = object({
+ enabled = bool
+ namespace = string
+ environment = string
+ stage = string
+ name = string
+ delimiter = string
+ attributes = list(string)
+ tags = map(string)
+ additional_tag_map = map(string)
+ regex_replace_chars = string
+ label_order = list(string)
+ id_length_limit = number
+ })
+ default = {
+ enabled = true
+ namespace = null
+ environment = null
+ stage = null
+ name = null
+ delimiter = null
+ attributes = []
+ tags = {}
+ additional_tag_map = {}
+ regex_replace_chars = null
+ label_order = []
+ id_length_limit = null
+ }
+ description = <<-EOT
+ Single object for setting entire context at once.
+ See description of individual variables for details.
+ Leave string and numeric variables as `null` to use default value.
+ Individual variable settings (non-null) override settings in context object,
+ except for attributes, tags, and additional_tag_map, which are merged.
+ EOT
+}
+
+variable "enabled" {
+ type = bool
+ default = null
+ description = "Set to false to prevent the module from creating any resources"
+}
+
+variable "namespace" {
+ type = string
+ default = null
+ description = "Namespace, which could be your organization name or abbreviation, e.g. 'eg' or 'cp'"
+}
+
+variable "environment" {
+ type = string
+ default = null
+ description = "Environment, e.g. 'uw2', 'us-west-2', OR 'prod', 'staging', 'dev', 'UAT'"
+}
+
+variable "stage" {
+ type = string
+ default = null
+ description = "Stage, e.g. 'prod', 'staging', 'dev', OR 'source', 'build', 'test', 'deploy', 'release'"
+}
+
+variable "name" {
+ type = string
+ default = null
+ description = "Solution name, e.g. 'app' or 'jenkins'"
+}
+
+variable "delimiter" {
+ type = string
+ default = null
+ description = <<-EOT
+ Delimiter to be used between `namespace`, `environment`, `stage`, `name` and `attributes`.
+ Defaults to `-` (hyphen). Set to `""` to use no delimiter at all.
+ EOT
+}
+
+variable "attributes" {
+ type = list(string)
+ default = []
+ description = "Additional attributes (e.g. `1`)"
+}
+
+variable "tags" {
+ type = map(string)
+ default = {}
+ description = "Additional tags (e.g. `map('BusinessUnit','XYZ')`"
+}
+
+variable "additional_tag_map" {
+ type = map(string)
+ default = {}
+ description = "Additional tags for appending to tags_as_list_of_maps. Not added to `tags`."
+}
+
+variable "label_order" {
+ type = list(string)
+ default = null
+ description = <<-EOT
+ The naming order of the id output and Name tag.
+ Defaults to ["namespace", "environment", "stage", "name", "attributes"].
+ You can omit any of the 5 elements, but at least one must be present.
+ EOT
+}
+
+variable "regex_replace_chars" {
+ type = string
+ default = null
+ description = <<-EOT
+ Regex to replace chars with empty string in `namespace`, `environment`, `stage` and `name`.
+ If not set, `"/[^a-zA-Z0-9-]/"` is used to remove all characters other than hyphens, letters and digits.
+ EOT
+}
+
+variable "id_length_limit" {
+ type = number
+ default = null
+ description = <<-EOT
+ Limit `id` to this many characters.
+ Set to `0` for unlimited length.
+ Set to `null` for default, which is `0`.
+ Does not affect `id_full`.
+ EOT
+}
+
+#### End of copy of cloudposse/terraform-null-label/variables.tf
diff --git a/examples/complete/fixtures.us-east-2.tfvars b/examples/complete/fixtures.us-east-2.tfvars
index a348265..3bd87c8 100644
--- a/examples/complete/fixtures.us-east-2.tfvars
+++ b/examples/complete/fixtures.us-east-2.tfvars
@@ -6,22 +6,20 @@ stage = "test"
name = "es-cleanup"
-availability_zones = ["us-east-2a", "us-east-2b"]
+availability_zones = ["us-east-2a"]
-instance_type = "t2.small.elasticsearch"
+instance_type = "t3.small.elasticsearch"
-elasticsearch_version = "7.4"
+elasticsearch_version = "7.7"
-instance_count = 2
+instance_count = 1
-zone_awareness_enabled = true
+zone_awareness_enabled = false
encrypt_at_rest_enabled = false
dedicated_master_enabled = false
-kibana_subdomain_name = "kibana-es-cleanup"
-
ebs_volume_size = 10
create_iam_service_linked_role = false
diff --git a/examples/complete/main.tf b/examples/complete/main.tf
index ca845d1..938c708 100644
--- a/examples/complete/main.tf
+++ b/examples/complete/main.tf
@@ -3,31 +3,29 @@ provider "aws" {
}
module "vpc" {
- source = "git::https://github.com/cloudposse/terraform-aws-vpc.git?ref=tags/0.10.0"
- namespace = var.namespace
- stage = var.stage
- name = var.name
+ source = "git::https://github.com/cloudposse/terraform-aws-vpc.git?ref=tags/0.17.0"
+
cidr_block = "172.16.0.0/16"
+
+ context = module.this.context
}
module "subnets" {
- source = "git::https://github.com/cloudposse/terraform-aws-dynamic-subnets.git?ref=tags/0.19.0"
+ source = "git::https://github.com/cloudposse/terraform-aws-dynamic-subnets.git?ref=tags/0.30.0"
+
availability_zones = var.availability_zones
- namespace = var.namespace
- stage = var.stage
- name = var.name
vpc_id = module.vpc.vpc_id
igw_id = module.vpc.igw_id
cidr_block = module.vpc.vpc_cidr_block
nat_gateway_enabled = true
nat_instance_enabled = false
+
+ context = module.this.context
}
module "elasticsearch" {
- source = "git::https://github.com/cloudposse/terraform-aws-elasticsearch.git?ref=tags/0.12.0"
- namespace = var.namespace
- stage = var.stage
- name = var.name
+ source = "git::https://github.com/cloudposse/terraform-aws-elasticsearch.git?ref=tags/0.24.0"
+
security_groups = [module.vpc.vpc_default_security_group_id]
vpc_id = module.vpc.vpc_id
subnet_ids = module.subnets.private_subnet_ids
@@ -38,6 +36,8 @@ module "elasticsearch" {
encrypt_at_rest_enabled = var.encrypt_at_rest_enabled
dedicated_master_enabled = var.dedicated_master_enabled
create_iam_service_linked_role = var.create_iam_service_linked_role
+ domain_hostname_enabled = var.domain_hostname_enabled
+ kibana_hostname_enabled = var.kibana_hostname_enabled
kibana_subdomain_name = var.kibana_subdomain_name
ebs_volume_size = var.ebs_volume_size
dns_zone_id = var.dns_zone_id
@@ -45,17 +45,20 @@ module "elasticsearch" {
advanced_options = {
"rest.action.multi.allow_explicit_index" = "true"
}
+
+ context = module.this.context
}
module "elasticsearch_cleanup" {
- source = "../.."
+ source = "../.."
+
es_endpoint = module.elasticsearch.domain_endpoint
es_domain_arn = module.elasticsearch.domain_arn
es_security_group_id = module.elasticsearch.security_group_id
subnet_ids = module.subnets.private_subnet_ids
vpc_id = module.vpc.vpc_id
- namespace = var.namespace
- stage = var.stage
schedule = var.schedule
artifact_url = var.artifact_url
+
+ context = module.this.context
}
diff --git a/examples/complete/variables.tf b/examples/complete/variables.tf
index a1effba..0e50aed 100644
--- a/examples/complete/variables.tf
+++ b/examples/complete/variables.tf
@@ -3,21 +3,6 @@ variable "region" {
description = "AWS region"
}
-variable "namespace" {
- type = string
- description = "Namespace (e.g. `eg` or `cp`)"
-}
-
-variable "stage" {
- type = string
- description = "Stage (e.g. `prod`, `dev`, `staging`, `infra`)"
-}
-
-variable "name" {
- type = string
- description = "Name (e.g. `app` or `cluster`)"
-}
-
variable "availability_zones" {
type = list(string)
description = "List of availability zones"
@@ -55,6 +40,7 @@ variable "dedicated_master_enabled" {
variable "kibana_subdomain_name" {
type = string
+ default = "kibana"
description = "The name of the subdomain for Kibana in the DNS zone (_e.g._ `kibana`, `ui`, `ui-es`, `search-ui`, `kibana.elasticsearch`)"
}
@@ -73,6 +59,19 @@ variable "dns_zone_id" {
description = "Route53 DNS Zone ID to add hostname records for Elasticsearch domain and Kibana"
}
+
+variable "domain_hostname_enabled" {
+ type = bool
+ description = "Explicit flag to enable creating a DNS hostname for ES. If `true`, then `var.dns_zone_id` is required."
+ default = true
+}
+
+variable "kibana_hostname_enabled" {
+ type = bool
+ description = "Explicit flag to enable creating a DNS hostname for Kibana. If `true`, then `var.dns_zone_id` is required."
+ default = true
+}
+
variable "schedule" {
type = string
default = "cron(0 3 * * ? *)"
diff --git a/lambda/COPYRIGHT.md b/lambda/COPYRIGHT.md
index 2ba2de6..f90b9a0 100644
--- a/lambda/COPYRIGHT.md
+++ b/lambda/COPYRIGHT.md
@@ -2,7 +2,7 @@ Files in this directory may be subject to the following notice:
Apache Software License 2.0
-Copyright 2016 **Cloudreach Europe Limited** or its affiliates. All Rights Reserved.
+Copyright 2020 **Cloudreach Europe Limited** or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
diff --git a/lambda/es-cleanup.py b/lambda/es-cleanup.py
index 31ed63c..c04b2d3 100755
--- a/lambda/es-cleanup.py
+++ b/lambda/es-cleanup.py
@@ -5,22 +5,24 @@
THIS FILE IS NOT EXACTLY THE ORIGINAL FILE DISTRIBUTED BY Cloudreach Europe Limited
IT HAS BEEN MODIFIED BY Cloud Posse, LLC
+
+Derived from https://github.com/cloudreach/aws-lambda-es-cleanup/blob/v0.14/es_cleanup.py
"""
-from __future__ import print_function
-import os
-import json
+import datetime
import re
+import sys
import time
import boto3
-import datetime
+import json
+import os
from botocore.auth import SigV4Auth
from botocore.awsrequest import AWSRequest
from botocore.credentials import create_credential_resolver
from botocore.httpsession import URLLib3Session
from botocore.session import get_session
-import sys
+
if sys.version_info[0] == 3:
from urllib.request import quote
else:
@@ -41,14 +43,13 @@ def __init__(self, status_code, payload):
class ES_Cleanup(object):
-
name = "lambda_es_cleanup"
def __init__(self, event, context):
"""Main Class init
Args:
- event (dict): AWS CloudWatch Scheduled Event
+ event (dict): AWS Cloudwatch Scheduled Event
context (object): AWS running context
"""
self.report = []
@@ -57,7 +58,8 @@ def __init__(self, event, context):
self.cfg = {}
self.cfg["es_endpoint"] = self.get_parameter("es_endpoint")
- self.cfg["index"] = self.get_parameter("index", "all").split(",")
+ self.cfg["index"] = self.get_parameter("index", ".*")
+ self.cfg["skip_index"] = self.get_parameter("skip_index", "^\\.kibana*")
self.cfg["delete_after"] = int(self.get_parameter("delete_after", 15))
self.cfg["es_max_retry"] = int(self.get_parameter("es_max_retry", 3))
@@ -102,20 +104,25 @@ def send_to_es(self, path, method="GET", payload={}):
es_region = self.cfg["es_endpoint"].split(".")[1]
+ headers = {
+ "Host": self.cfg["es_endpoint"],
+ "Content-Type": "application/json"
+ }
+
# send to ES with exponential backoff
retries = 0
while retries < int(self.cfg["es_max_retry"]):
if retries > 0:
- seconds = (2**retries) * .1
- # print('Waiting for %.1f seconds', seconds)
+ seconds = (2 ** retries) * .1
time.sleep(seconds)
req = AWSRequest(
method=method,
- url="https://{}{}?pretty&format=json".format(
+ url="https://{}{}".format(
self.cfg["es_endpoint"], quote(path)),
- data=payload,
- headers={'Host': self.cfg["es_endpoint"]})
+ data=json.dumps(payload),
+ params={"format": "json"},
+ headers=headers)
credential_resolver = create_credential_resolver(get_session())
credentials = credential_resolver.load_credentials()
SigV4Auth(credentials, 'es', es_region).add_auth(req)
@@ -125,7 +132,6 @@ def send_to_es(self, path, method="GET", payload={}):
session = URLLib3Session()
res = session.send(preq)
if res.status_code >= 200 and res.status_code <= 299:
- # print("%s %s" % (res.status_code, res.content))
return json.loads(res.content)
else:
raise ES_Exception(res.status_code, res._content)
@@ -148,7 +154,7 @@ def send_error(self, msg):
if self.cfg["sns_arn"] != "":
cur_account = self.cfg["sns_arn"].split(":")[4]
- _msg = "[%s][%s] %s" % (self.name, self.cur_account, msg)
+ _msg = "[%s][%s] %s" % (self.name, cur_account, msg)
print(_msg)
sns_region = self.cfg["sns_arn"].split(":")[3]
sns = boto3.client("sns", region_name=sns_region)
@@ -177,32 +183,65 @@ def get_indices(self):
return self.send_to_es("/_cat/indices")
+class DeleteDecider(object):
+ def __init__(self, delete_after, idx_format, idx_regex, skip_idx_regex, today):
+ self.delete_after = delete_after
+ self.idx_format = idx_format
+ self.idx_regex = idx_regex
+ self.skip_idx_regex = skip_idx_regex
+ self.today = today
+
+ def should_delete(self, index):
+ idx_split = index["index"].rsplit("-", 1 + self.idx_format.count("-"))
+ idx_date_str = '-'.join(word for word in idx_split[1:])
+ idx_name = idx_split[0]
+
+ if re.search(self.skip_idx_regex, index["index"]):
+ return False, "index matches skip condition"
+
+ if not re.search(self.idx_regex, idx_name):
+ return False, "index '{}' name '{}' did not match pattern '{}'".format(index["index"],
+ idx_name,
+ self.idx_regex)
+
+ earliest_to_keep = self.today - datetime.timedelta(days=self.delete_after)
+ try:
+ idx_datetime = datetime.datetime.strptime(idx_date_str, self.idx_format)
+ idx_date = idx_datetime.date()
+ except ValueError:
+ raise ValueError("Unable to parse index date {0} - "
+ "incorrect index date format set?".format(idx_date_str))
+
+ if idx_date < earliest_to_keep:
+ return True, "all conditions satisfied"
+
+ return False, "deletion age has not been reached. " \
+ "Oldest index kept: {0}, Index Date: {1}".format(earliest_to_keep, idx_date)
+
+
def lambda_handler(event, context):
"""Main Lambda function
Args:
- event (dict): AWS CloudWatch Scheduled Event
+ event (dict): AWS Cloudwatch Scheduled Event
context (object): AWS running context
Returns:
None
"""
es = ES_Cleanup(event, context)
try:
- # Index cutoff definition, remove older than this date
- earliest_to_keep = datetime.date.today() - datetime.timedelta(
- days=int(es.cfg["delete_after"]))
+ decider = DeleteDecider(delete_after=int(es.cfg["delete_after"]),
+ idx_regex=es.cfg["index"],
+ idx_format=es.cfg["index_format"],
+ skip_idx_regex=es.cfg["skip_index"],
+ today=datetime.date.today())
+
for index in es.get_indices():
- if index["index"] == ".kibana" or index["index"] == ".kibana_1":
- # ignore .kibana index
- print("Found Kibana index: %s - ignoring" % index["index"])
- continue
-
- idx_name, idx_date = re.match(es.cfg["index_regex"], index["index"]).groups()
- print("Found index: %s - %s" % (idx_name, idx_date))
- if idx_name in es.cfg["index"] or "all" in es.cfg["index"]:
-
- if idx_date <= earliest_to_keep.strftime(es.cfg["index_format"]):
- print("Deleting index: %s" % index["index"])
- es.delete_index(index["index"])
+ d, reason = decider.should_delete(index)
+ if d:
+ print("Deleting index: {}".format(index["index"]))
+ es.delete_index(index["index"])
+ else:
+ print("Skipping or keeping index: {}. Reason: {}".format(index["index"], reason))
except Exception as e:
print(str(e))
es.send_error(str(e))
@@ -217,6 +256,6 @@ def lambda_handler(event, context):
'time': '1970-01-01T00:00:00Z',
'id': 'cdc73f9d-aea9-11e3-9d5a-835b769c0d9c',
'resources':
- ['arn:aws:events:us-east-1:123456789012:rule/my-schedule']
+ ['arn:aws:events:us-east-1:123456789012:rule/my-schedule']
}
lambda_handler(event, "")
diff --git a/lambda/es-cleanup_test.py b/lambda/es-cleanup_test.py
new file mode 100644
index 0000000..98d16dd
--- /dev/null
+++ b/lambda/es-cleanup_test.py
@@ -0,0 +1,124 @@
+"""
+THIS FILE IS NOT EXACTLY THE ORIGINAL FILE DISTRIBUTED BY Cloudreach Europe Limited
+IT HAS BEEN MODIFIED BY Cloud Posse, LLC
+
+Derived from https://github.com/cloudreach/aws-lambda-es-cleanup/blob/v0.14/es_cleanup_test.py
+"""
+
+import datetime
+import unittest
+import importlib
+
+es_cleanup = importlib.import_module("es-cleanup")
+
+IDX_REGEX = '.*'
+IDX_FORMAT1 = '%Y.%m.%d'
+SKIP_IDX_REGEX = '^\\.kibana*'
+
+decider = es_cleanup.DeleteDecider(delete_after=4,
+ idx_format=IDX_FORMAT1,
+ idx_regex=IDX_REGEX,
+ skip_idx_regex=SKIP_IDX_REGEX,
+ today=datetime.date(2019, 12, 19))
+
+
+class TestShouldDelete(unittest.TestCase):
+ def test_should_be_deleted(self):
+ tuple = decider.should_delete({"index": "k8s-2019.12.14"})
+ self.assertTrue(tuple[0])
+
+ def test_should_not_be_deleted(self):
+ tuple = decider.should_delete({"index": "k8s-2019.12.15"})
+ self.assertFalse(tuple[0])
+
+ def test_should_raise_value_error(self):
+ with self.assertRaises(ValueError):
+ decider.should_delete({"index": "k8s-2019-12-15"})
+
+ def test_should_raise_value_error_2(self):
+ with self.assertRaises(ValueError):
+ decider.should_delete({"index": ".Kibana"})
+
+ def test_should_raise_value_error_3(self):
+ with self.assertRaises(ValueError):
+ decider.should_delete({"index": ".Kibana_1"})
+
+ def test_should_skip_index(self):
+ tuple = decider.should_delete({"index": ".kibana"})
+ self.assertFalse(tuple[0])
+ self.assertTrue("matches skip condition" in tuple[1])
+
+ def test_should_skip_index_2(self):
+ tuple = decider.should_delete({"index": ".kibana_1"})
+ self.assertFalse(tuple[0])
+ self.assertTrue("matches skip condition" in tuple[1])
+
+
+
+
+
+decider2 = es_cleanup.DeleteDecider(delete_after=4,
+ idx_format='%Y-%m-%d',
+ idx_regex='app[1-2].*|k8s.*|dev-west',
+ skip_idx_regex='kibana.*',
+ today=datetime.date(2019, 12, 19))
+
+
+class TestShouldDelete2(unittest.TestCase):
+ def test_should_be_deleted(self):
+ tuple = decider2.should_delete({"index": "k8s-2019-12-14"})
+ self.assertTrue(tuple[0])
+
+ def test_should_not_be_deleted(self):
+ tuple = decider2.should_delete({"index": "k8s-2019-12-15"})
+ self.assertFalse(tuple[0])
+
+ def test_should_be_deleted_dev(self):
+ tuple = decider2.should_delete({"index": "dev-west-2019-12-14"})
+ self.assertTrue(tuple[0])
+
+ def test_should_not_be_deleted_dev(self):
+ tuple = decider2.should_delete({"index": "dev-west-2019-12-15"})
+ self.assertFalse(tuple[0])
+
+ def test_should_not_be_deleted_dev(self):
+ tuple = decider2.should_delete({"index": "dev-2019-12-15"})
+ self.assertFalse(tuple[0])
+
+ def test_should_be_deleted_app1(self):
+ tuple = decider2.should_delete({"index": "app1-2019-12-14"})
+ self.assertTrue(tuple[0])
+
+ def test_should_not_be_deleted_app1(self):
+ tuple = decider2.should_delete({"index": "app1-2019-12-15"})
+ self.assertFalse(tuple[0])
+
+ def test_should_be_deleted_app2(self):
+ tuple = decider2.should_delete({"index": "app2-2019-12-14"})
+ self.assertTrue(tuple[0])
+
+ def test_should_not_be_deleted_app2(self):
+ tuple = decider2.should_delete({"index": "app2-2019-12-15"})
+ self.assertFalse(tuple[0])
+
+ def test_should_not_be_deleted_app3(self):
+ tuple = decider2.should_delete({"index": "app3-2019-12-14"})
+ self.assertFalse(tuple[0])
+
+ def test_should_raise_value_error(self):
+ with self.assertRaises(ValueError):
+ decider2.should_delete({"index": "k8s-2019.12.5"})
+
+ def test_should_skip_index(self):
+ tuple = decider2.should_delete({"index": ".kibana"})
+ self.assertFalse(tuple[0])
+ self.assertTrue("matches skip condition" in tuple[1])
+
+ def test_should_skip_index_2(self):
+ tuple = decider2.should_delete({"index": ".kibana_1"})
+ self.assertFalse(tuple[0])
+ self.assertTrue("matches skip condition" in tuple[1])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/main.tf b/main.tf
index 869577d..94b53e9 100644
--- a/main.tf
+++ b/main.tf
@@ -72,25 +72,28 @@ data "aws_iam_policy_document" "default" {
override_json = length(var.sns_arn) > 0 ? data.aws_iam_policy_document.sns.json : "{}"
}
+locals {
+ enabled = module.this.enabled
+ skip_index_re = var.skip_index_re == null ? "^\\.kibana*" : var.skip_index_re
+}
+
# Modules
#--------------------------------------------------------------
module "label" {
- source = "git::https://github.com/cloudposse/terraform-null-label.git?ref=tags/0.16.0"
- enabled = var.enabled
- namespace = var.namespace
- name = var.name
- stage = var.stage
- delimiter = var.delimiter
- attributes = compact(concat(var.attributes, ["elasticsearch", "cleanup"]))
- tags = var.tags
+ source = "git::https://github.com/cloudposse/terraform-null-label.git?ref=tags/0.19.2"
+
+ attributes = compact(concat(module.this.attributes, ["elasticsearch", "cleanup"]))
+
+ context = module.this.context
}
module "artifact" {
- source = "git::https://github.com/cloudposse/terraform-external-module-artifact.git?ref=tags/0.3.0"
- enabled = var.enabled
+ source = "git::https://github.com/cloudposse/terraform-external-module-artifact.git?ref=tags/0.5.0"
+ enabled = module.this.enabled
filename = "lambda.zip"
module_name = "terraform-aws-lambda-elasticsearch-cleanup"
module_path = path.module
+ git_ref = var.artifact_git_ref
url = var.artifact_url
}
@@ -103,7 +106,7 @@ locals {
# Resources
#--------------------------------------------------------------
resource "aws_lambda_function" "default" {
- count = var.enabled ? 1 : 0
+ count = local.enabled ? 1 : 0
filename = module.artifact.file
function_name = local.function_name
description = local.function_name
@@ -118,8 +121,8 @@ resource "aws_lambda_function" "default" {
variables = {
delete_after = var.delete_after
es_endpoint = var.es_endpoint
- index = var.index
- index_regex = var.index_regex
+ index = var.index_re
+ skip_index = local.skip_index_re
index_format = var.index_format
sns_arn = var.sns_arn
}
@@ -132,7 +135,7 @@ resource "aws_lambda_function" "default" {
}
resource "aws_security_group" "default" {
- count = var.enabled ? 1 : 0
+ count = local.enabled ? 1 : 0
name = local.function_name
description = local.function_name
vpc_id = var.vpc_id
@@ -140,7 +143,7 @@ resource "aws_security_group" "default" {
}
resource "aws_security_group_rule" "udp_dns_egress_from_lambda" {
- count = var.enabled ? 1 : 0
+ count = local.enabled ? 1 : 0
description = "Allow outbound UDP traffic from Lambda Elasticsearch cleanup to DNS"
type = "egress"
from_port = 53
@@ -151,7 +154,7 @@ resource "aws_security_group_rule" "udp_dns_egress_from_lambda" {
}
resource "aws_security_group_rule" "tcp_dns_egress_from_lambda" {
- count = var.enabled ? 1 : 0
+ count = local.enabled ? 1 : 0
description = "Allow outbound TCP traffic from Lambda Elasticsearch cleanup to DNS"
type = "egress"
from_port = 53
@@ -162,7 +165,7 @@ resource "aws_security_group_rule" "tcp_dns_egress_from_lambda" {
}
resource "aws_security_group_rule" "egress_from_lambda_to_es_cluster" {
- count = var.enabled ? 1 : 0
+ count = local.enabled ? 1 : 0
description = "Allow outbound traffic from Lambda Elasticsearch cleanup SG to Elasticsearch SG"
type = "egress"
from_port = 443
@@ -173,7 +176,7 @@ resource "aws_security_group_rule" "egress_from_lambda_to_es_cluster" {
}
resource "aws_security_group_rule" "ingress_to_es_cluster_from_lambda" {
- count = var.enabled ? 1 : 0
+ count = local.enabled ? 1 : 0
description = "Allow inbound traffic to Elasticsearch domain from Lambda Elasticsearch cleanup SG"
type = "ingress"
from_port = 443
@@ -184,34 +187,34 @@ resource "aws_security_group_rule" "ingress_to_es_cluster_from_lambda" {
}
resource "aws_iam_role" "default" {
- count = var.enabled ? 1 : 0
+ count = local.enabled ? 1 : 0
name = local.function_name
assume_role_policy = data.aws_iam_policy_document.assume_role.json
tags = module.label.tags
}
resource "aws_iam_role_policy" "default" {
- count = var.enabled ? 1 : 0
+ count = local.enabled ? 1 : 0
name = local.function_name
role = join("", aws_iam_role.default.*.name)
policy = data.aws_iam_policy_document.default.json
}
resource "aws_iam_role_policy_attachment" "default" {
- count = var.enabled ? 1 : 0
+ count = local.enabled ? 1 : 0
role = join("", aws_iam_role.default.*.name)
policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole"
}
resource "aws_cloudwatch_event_rule" "default" {
- count = var.enabled ? 1 : 0
+ count = local.enabled ? 1 : 0
name = local.function_name
description = local.function_name
schedule_expression = var.schedule
}
resource "aws_lambda_permission" "default" {
- count = var.enabled ? 1 : 0
+ count = local.enabled ? 1 : 0
statement_id = "AllowExecutionFromCloudWatch"
action = "lambda:InvokeFunction"
function_name = join("", aws_lambda_function.default.*.arn)
@@ -220,7 +223,7 @@ resource "aws_lambda_permission" "default" {
}
resource "aws_cloudwatch_event_target" "default" {
- count = var.enabled ? 1 : 0
+ count = local.enabled ? 1 : 0
target_id = local.function_name
rule = join("", aws_cloudwatch_event_rule.default.*.name)
arn = join("", aws_lambda_function.default.*.arn)
diff --git a/test/src/Gopkg.lock b/test/src/Gopkg.lock
deleted file mode 100644
index 87bb6bd..0000000
--- a/test/src/Gopkg.lock
+++ /dev/null
@@ -1,92 +0,0 @@
-# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
-
-
-[[projects]]
- digest = "1:ffe9824d294da03b391f44e1ae8281281b4afc1bdaa9588c9097785e3af10cec"
- name = "github.com/davecgh/go-spew"
- packages = ["spew"]
- pruneopts = "UT"
- revision = "8991bc29aa16c548c550c7ff78260e27b9ab7c73"
- version = "v1.1.1"
-
-[[projects]]
- digest = "1:75d6042fc66aebc974cc49b0c6c7cc3b9adb5f8130fbfa0dbec0820d990afa25"
- name = "github.com/gruntwork-io/terratest"
- packages = [
- "modules/collections",
- "modules/customerrors",
- "modules/files",
- "modules/logger",
- "modules/retry",
- "modules/shell",
- "modules/ssh",
- "modules/terraform",
- ]
- pruneopts = "UT"
- revision = "892abb2c35878d0808101bbfe6559e931dc2d354"
- version = "v0.16.0"
-
-[[projects]]
- digest = "1:0028cb19b2e4c3112225cd871870f2d9cf49b9b4276531f03438a88e94be86fe"
- name = "github.com/pmezard/go-difflib"
- packages = ["difflib"]
- pruneopts = "UT"
- revision = "792786c7400a136282c1664665ae0a8db921c6c2"
- version = "v1.0.0"
-
-[[projects]]
- digest = "1:5da8ce674952566deae4dbc23d07c85caafc6cfa815b0b3e03e41979cedb8750"
- name = "github.com/stretchr/testify"
- packages = [
- "assert",
- "require",
- ]
- pruneopts = "UT"
- revision = "ffdc059bfe9ce6a4e144ba849dbedead332c6053"
- version = "v1.3.0"
-
-[[projects]]
- branch = "master"
- digest = "1:831470c2758c8b733941144f2803a0ccad0632c5a767415b777ebd296b5f463e"
- name = "golang.org/x/crypto"
- packages = [
- "curve25519",
- "ed25519",
- "ed25519/internal/edwards25519",
- "internal/chacha20",
- "internal/subtle",
- "poly1305",
- "ssh",
- "ssh/agent",
- ]
- pruneopts = "UT"
- revision = "22d7a77e9e5f409e934ed268692e56707cd169e5"
-
-[[projects]]
- branch = "master"
- digest = "1:76ee51c3f468493aff39dbacc401e8831fbb765104cbf613b89bef01cf4bad70"
- name = "golang.org/x/net"
- packages = ["context"]
- pruneopts = "UT"
- revision = "f3200d17e092c607f615320ecaad13d87ad9a2b3"
-
-[[projects]]
- branch = "master"
- digest = "1:181f3fd33e620b958b5ab77da177cf775cdcccd7db82963607875fbd09ae995e"
- name = "golang.org/x/sys"
- packages = [
- "cpu",
- "unix",
- ]
- pruneopts = "UT"
- revision = "9cd6430ef91e39e1a0ec0470cf1321a33ef1b887"
-
-[solve-meta]
- analyzer-name = "dep"
- analyzer-version = 1
- input-imports = [
- "github.com/gruntwork-io/terratest/modules/terraform",
- "github.com/stretchr/testify/assert",
- ]
- solver-name = "gps-cdcl"
- solver-version = 1
diff --git a/test/src/Gopkg.toml b/test/src/Gopkg.toml
deleted file mode 100644
index 995bac5..0000000
--- a/test/src/Gopkg.toml
+++ /dev/null
@@ -1,7 +0,0 @@
-[[constraint]]
- name = "github.com/stretchr/testify"
- version = "1.2.2"
-
-[prune]
- go-tests = true
- unused-packages = true
diff --git a/test/src/Makefile b/test/src/Makefile
index 25cab8f..b772822 100644
--- a/test/src/Makefile
+++ b/test/src/Makefile
@@ -1,50 +1,30 @@
-PACKAGE = terraform-aws-lambda-elasticsearch-cleanup
-GOEXE ?= /usr/bin/go
-GOPATH = $(CURDIR)/.gopath
-GOBIN = $(GOPATH)/bin
-BASE = $(GOPATH)/src/$(PACKAGE)
-PATH := $(PATH):$(GOBIN)
-
-export TF_DATA_DIR ?= $(CURDIR)/.terraform
export TF_CLI_ARGS_init ?= -get-plugins=true
-export GOPATH
+export TERRAFORM_VERSION ?= $(shell curl -s https://checkpoint-api.hashicorp.com/v1/check/terraform | jq -r -M '.current_version' | cut -d. -f1-2)
+.DEFAULT_GOAL : all
.PHONY: all
+
## Default target
all: test
-ifneq (,$(wildcard /sbin/apk))
-## Install go, if not installed
-$(GOEXE):
- apk add --update go
-endif
-
-ifeq ($(shell uname -s),Linux)
-## Install all `dep`, if not installed
-$(GOBIN)/dep:
- @mkdir -p $(GOBIN)
- @curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
-endif
-
-## Prepare the GOPATH
-$(BASE): $(GOEXE)
- @mkdir -p $(dir $@)
- @ln -sf $(CURDIR) $@
-
-## Download vendor dependencies to vendor/
-$(BASE)/vendor: $(BASE) $(GOBIN)/dep
- cd $(BASE) && dep ensure
-
.PHONY : init
## Initialize tests
-init: $(BASE)/vendor
+init:
+ @exit 0
.PHONY : test
## Run tests
test: init
- cd $(BASE) && go test -v -timeout 120m -run TestExamplesComplete
+ go mod download
+ go test -v -timeout 60m -run TestExamplesComplete
+
+## Run tests in docker container
+docker/test:
+ docker run --name terratest --rm -it -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_SESSION_TOKEN -e GITHUB_TOKEN \
+ -e PATH="/usr/local/terraform/$(TERRAFORM_VERSION)/bin:/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" \
+ -v $(CURDIR)/../../:/module/ cloudposse/test-harness:latest -C /module/test/src test
.PHONY : clean
## Clean up files
clean:
- rm -rf .gopath/ vendor/ $(TF_DATA_DIR)
+ rm -rf ../../examples/complete/*.tfstate*
diff --git a/test/src/examples_complete_test.go b/test/src/examples_complete_test.go
index 3a70b00..61d1e49 100644
--- a/test/src/examples_complete_test.go
+++ b/test/src/examples_complete_test.go
@@ -1,7 +1,10 @@
package test
import (
+ "math/rand"
+ "strconv"
"testing"
+ "time"
"github.com/gruntwork-io/terratest/modules/terraform"
"github.com/stretchr/testify/assert"
@@ -11,12 +14,24 @@ import (
func TestExamplesComplete(t *testing.T) {
t.Parallel()
+ rand.Seed(time.Now().UnixNano())
+
+ randId := strconv.Itoa(rand.Intn(100000))
+ attributes := []string{randId}
+ kibanaSubdomain := "kibana-es-cleanup-"+randId
+
terraformOptions := &terraform.Options{
// The path to where our Terraform code is located
TerraformDir: "../../examples/complete",
+ // Suppress errors if there is a problem computing outputs during destroy
+ EnvVars: map[string]string{"TF_WARN_OUTPUT_ERRORS": "1"},
Upgrade: true,
// Variables to pass to our Terraform code using -var-file options
VarFiles: []string{"fixtures.us-east-2.tfvars"},
+ Vars: map[string]interface{}{
+ "attributes": attributes,
+ "kibana_subdomain_name": kibanaSubdomain,
+ },
}
// At the end of the test, run `terraform destroy` to clean up any resources that were created
@@ -33,22 +48,24 @@ func TestExamplesComplete(t *testing.T) {
// Run `terraform output` to get the value of an output variable
privateSubnetCidrs := terraform.OutputList(t, terraformOptions, "private_subnet_cidrs")
// Verify we're getting back the outputs we expect
- assert.Equal(t, []string{"172.16.0.0/19", "172.16.32.0/19"}, privateSubnetCidrs)
+ // assert.Equal(t, []string{"172.16.0.0/19", "172.16.32.0/19"}, privateSubnetCidrs)
+ assert.Equal(t, []string{"172.16.0.0/19"}, privateSubnetCidrs)
// Run `terraform output` to get the value of an output variable
publicSubnetCidrs := terraform.OutputList(t, terraformOptions, "public_subnet_cidrs")
// Verify we're getting back the outputs we expect
- assert.Equal(t, []string{"172.16.96.0/19", "172.16.128.0/19"}, publicSubnetCidrs)
+ // assert.Equal(t, []string{"172.16.96.0/19", "172.16.128.0/19"}, publicSubnetCidrs)
+ assert.Equal(t, []string{"172.16.96.0/19"}, publicSubnetCidrs)
// Run `terraform output` to get the value of an output variable
domainHostname := terraform.Output(t, terraformOptions, "domain_hostname")
// Verify we're getting back the outputs we expect
- assert.Equal(t, "es-cleanup.testing.cloudposse.co", domainHostname)
+ assert.Equal(t, "eg-test-es-cleanup-"+randId+".testing.cloudposse.co", domainHostname)
// Run `terraform output` to get the value of an output variable
kibanaHostname := terraform.Output(t, terraformOptions, "kibana_hostname")
// Verify we're getting back the outputs we expect
- assert.Equal(t, "kibana-es-cleanup.testing.cloudposse.co", kibanaHostname)
+ assert.Equal(t, kibanaSubdomain+".testing.cloudposse.co", kibanaHostname)
// Run `terraform output` to get the value of an output variable
domainEndpoint := terraform.Output(t, terraformOptions, "domain_endpoint")
@@ -59,5 +76,5 @@ func TestExamplesComplete(t *testing.T) {
// Run `terraform output` to get the value of an output variable
lambdaFunctionArn := terraform.Output(t, terraformOptions, "lambda_function_arn")
// Verify we're getting back the outputs we expect
- assert.Contains(t, lambdaFunctionArn, "function:eg-test-app-elasticsearch-cleanup")
+ assert.Contains(t, lambdaFunctionArn, "function:eg-test-es-cleanup")
}
diff --git a/test/src/go.mod b/test/src/go.mod
new file mode 100644
index 0000000..6606152
--- /dev/null
+++ b/test/src/go.mod
@@ -0,0 +1,14 @@
+module github.com/cloudposse/terraform-aws-lambda-elasticsearch-cleanup
+
+go 1.14
+
+require (
+ github.com/aws/aws-sdk-go v1.35.4 // indirect
+ github.com/davecgh/go-spew v1.1.1 // indirect
+ github.com/google/uuid v1.1.2 // indirect
+ github.com/gruntwork-io/terratest v0.16.0
+ github.com/pquerna/otp v1.2.0 // indirect
+ github.com/stretchr/testify v1.3.0
+ golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f // indirect
+ golang.org/x/sys v0.0.0-20190527104216-9cd6430ef91e // indirect
+)
diff --git a/test/src/go.sum b/test/src/go.sum
new file mode 100644
index 0000000..35d61c7
--- /dev/null
+++ b/test/src/go.sum
@@ -0,0 +1,41 @@
+github.com/aws/aws-sdk-go v1.35.4 h1:GG0sdhmzQSe4/UcF9iuQP9i+58bPRyU4OpujyzMlVjo=
+github.com/aws/aws-sdk-go v1.35.4/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48=
+github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
+github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
+github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
+github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y=
+github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/gruntwork-io/terratest v0.16.0 h1:8dDdkAzqwVDclmefcy//oBPWs5bVrWuKYCUwG0WFG4c=
+github.com/gruntwork-io/terratest v0.16.0/go.mod h1:NjUn6YXA5Skxt8Rs20t3isYx5Rl+EgvGB8/+RRXddqk=
+github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
+github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
+github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
+github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/pquerna/otp v1.2.0 h1:/A3+Jn+cagqayeR3iHs/L62m5ue7710D35zl1zJ1kok=
+github.com/pquerna/otp v1.2.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f h1:R423Cnkcp5JABoeemiGEPlt9tHXFfw5kvc0yqlxRPWo=
+golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI=
+golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190527104216-9cd6430ef91e h1:Pzdi8HRppinixnWWzN6KSa0QkBM+GKsTJaWwwfJskNw=
+golang.org/x/sys v0.0.0-20190527104216-9cd6430ef91e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
+gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
diff --git a/variables.tf b/variables.tf
index 433e3aa..7a97b54 100644
--- a/variables.tf
+++ b/variables.tf
@@ -1,45 +1,3 @@
-variable "namespace" {
- type = string
- description = "Namespace, which could be your organization name, e.g. 'eg' or 'cp'"
- default = ""
-}
-
-variable "stage" {
- type = string
- description = "Stage, e.g. 'prod', 'staging', 'dev', or 'test'"
- default = ""
-}
-
-variable "name" {
- type = string
- default = "app"
- description = "Solution name, e.g. 'app' or 'cluster'"
-}
-
-variable "delimiter" {
- type = string
- default = "-"
- description = "Delimiter to be used between `namespace`, `stage`, `name` and `attributes`"
-}
-
-variable "attributes" {
- type = list(string)
- default = []
- description = "Additional attributes (e.g. `1`)"
-}
-
-variable "tags" {
- type = map(string)
- default = {}
- description = "Additional tags (e.g. `map('BusinessUnit','XYZ')`"
-}
-
-variable "enabled" {
- type = bool
- default = true
- description = "This module will not create any resources unless enabled is set to \"true\""
-}
-
variable "es_endpoint" {
type = string
description = "The Elasticsearch endpoint for the Lambda function to connect to"
@@ -77,12 +35,6 @@ variable "sns_arn" {
description = "SNS ARN to publish alerts"
}
-variable "index" {
- type = string
- default = "all"
- description = "Index/indices to process. Use a comma-separated list. Specify `all` to match every index except for `.kibana` or `.kibana_1`"
-}
-
variable "delete_after" {
type = number
default = 15
@@ -95,15 +47,27 @@ variable "index_format" {
description = "Combined with 'index' variable and is used to evaluate the index age"
}
-variable "index_regex" {
+variable "index_re" {
type = string
- default = "([^-]+)-(.*)"
- description = "Determines regex that is used for matching index name and index date. By default it match two groups separated by hyphen."
+ default = ".*"
+ description = "Regular Expression that matches the index names to clean up (not including trailing dash and date)"
+}
+
+variable "skip_index_re" {
+ type = string
+ #default = "^\\.kibana*"
+ default = null
+ description = <<-EOT
+ Regular Expression that matches the index names to ignore (not clean up). Takes precedence over `index_re`.
+ BY DEFAULT (when value is `null`), a pattern is used to exclude Kibana indexes.
+ Use `"^$"` if you do not want to skip any indexes. Include an exclusion for `kibana` if you
+ want to use a custom value and also exclude the kibana indexes.
+ EOT
}
variable "python_version" {
type = string
- default = "2.7"
+ default = "3.7"
description = "The Python version to use"
}
@@ -118,3 +82,10 @@ variable "artifact_url" {
description = "URL template for the remote artifact"
default = "https://artifacts.cloudposse.com/$$${module_name}/$$${git_ref}/$$${filename}"
}
+
+variable "artifact_git_ref" {
+ type = string
+ description = "Git ref of the lambda artifact to use. Use latest version if null."
+ default = null
+}
+
diff --git a/versions.tf b/versions.tf
index 298a3d1..f4a8e78 100644
--- a/versions.tf
+++ b/versions.tf
@@ -1,9 +1,7 @@
terraform {
- required_version = ">= 0.12.0, < 0.14.0"
-
+ required_version = ">= 0.12.0"
required_providers {
- aws = "~> 2.0"
- template = "~> 2.0"
- null = "~> 2.0"
+ aws = ">= 2.0"
+ null = ">= 2.0"
}
}