add Data Lifecycle Policy for MongoDB volumes

Signed-off-by: Jakub Sokołowski <jakub@status.im>
This commit is contained in:
Jakub Sokołowski 2019-10-04 14:55:12 +02:00
parent 0f355679c7
commit 83d0c7434d
No known key found for this signature in database
GPG Key ID: 4EF064D0E6D63020
6 changed files with 101 additions and 8 deletions

View File

@ -46,9 +46,8 @@ ansible-playbook ansible/prod.yml
# TODO
* [#4](https://github.com/dap-ps/infra-dapps/issues/4) - [prod] Geographically spread hosts
* [#10](https://github.com/dap-ps/infra-dapps/issues/10) - [prod] Periodic EC2 Snapshots
* [#11](https://github.com/dap-ps/infra-dapps/issues/11) - [prod] MongoDB Web UI
* [#13](https://github.com/dap-ps/infra-dapps/issues/13) - [prod] Stress test infrastructure
* [#11](https://github.com/dap-ps/infra-dapps/issues/11) - [prod] MongoDB Web UI
* [#13](https://github.com/dap-ps/infra-dapps/issues/13) - [prod] Stress test infrastructure
# Links

85
backup.tf Normal file
View File

@ -0,0 +1,85 @@
/* Prod EBS Backups -----------------------------*/
resource "aws_iam_role" "prod_snapshots" {
name = "dap-ps-prod-snapshots-role"
assume_role_policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Action": "sts:AssumeRole",
"Principal": {
"Service": "dlm.amazonaws.com"
},
"Effect": "Allow",
"Sid": ""
}
]
}
EOF
}
resource "aws_iam_role_policy" "prod_snapshots" {
name = "dap-ps-prod-snapshots-policy"
role = "${aws_iam_role.prod_snapshots.id}"
policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"ec2:CreateSnapshot",
"ec2:DeleteSnapshot",
"ec2:DescribeVolumes",
"ec2:DescribeSnapshots"
],
"Resource": "*"
},
{
"Effect": "Allow",
"Action": [
"ec2:CreateTags"
],
"Resource": "arn:aws:ec2:*::snapshot/*"
}
]
}
EOF
}
resource "aws_dlm_lifecycle_policy" "prod_snapshots" {
description = "dap-ps prod DB DLM lifecycle policy"
execution_role_arn = "${aws_iam_role.prod_snapshots.arn}"
state = "ENABLED"
policy_details {
resource_types = ["VOLUME"]
schedule {
name = "one week of daily snapshots"
create_rule {
interval = 24
interval_unit = "HOURS"
times = ["23:45"]
}
retain_rule {
count = 7
}
tags_to_add = {
Source = "DLM lifecycle policy"
}
copy_tags = true
}
target_tags = {
Fleet = "db.prod"
}
}
}

View File

@ -5,9 +5,6 @@
/* SES S3 Bucket --------------------------------*/
data "aws_caller_identity" "current" {
}
resource "aws_s3_bucket" "ses-forwarder-emails" {
bucket = "ses-forwarder-emails"
acl = "private"

View File

@ -63,3 +63,5 @@ resource "aws_key_pair" "admin" {
key_name = "admin-key"
public_key = file("files/admin.pub")
}
data "aws_caller_identity" "current" {}

View File

@ -60,8 +60,14 @@ resource "aws_instance" "main" {
security_groups = [aws_security_group.main.name]
tags = {
Name = "node-${format("%02d", count.index+1)}.${local.host_suffix}"
Fqdn = "node-${format("%02d", count.index+1)}.${local.host_full_suffix}"
Name = "node-${format("%02d", count.index+1)}.${local.host_suffix}"
Fqdn = "node-${format("%02d", count.index+1)}.${local.host_full_suffix}"
Fleet = "${var.env}.${var.stage}"
}
/* for snapshots through lifecycle policy */
volume_tags = {
Fleet = "${var.env}.${var.stage}"
}
/* bootstraping access for later Ansible use */

View File

@ -14,3 +14,7 @@ output "hostnames" {
output "hosts" {
value = zipmap(local.hostnames, local.public_ips)
}
output "instances" {
value = aws_instance.main
}