Initial commit

This commit is contained in:
Anton Iakimov 2023-10-05 14:08:18 +02:00 committed by GitHub
commit 634ffca5a4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 1127 additions and 0 deletions

9
.gitignore vendored Normal file
View File

@ -0,0 +1,9 @@
.terraform/
.terraform.lock.hcl
*.tfstate
*.tfstate.d/
*.tfstate.backup
*.retry
ansible/files/*
__pycache__

48
Makefile Normal file
View File

@ -0,0 +1,48 @@
OS = $(strip $(shell uname -s))
ifeq ($(OS),Darwin)
ARCH = darwin_amd64
PROVISIONER_SHA1 = bd688a503f526beedaf6ef5d2dba1128051573b6
else
ARCH = linux_amd64
PROVISIONER_SHA1 = da9cdf019d8f860a6e417257d81b1b21aceba7b7
endif
TF_PLUGINS_DIR = $(HOME)/.terraform.d/plugins
PROVISIONER_NAME = terraform-provisioner-ansible
PROVISIONER_VERSION = v2.5.0
PROVISIONER_ARCHIVE = $(PROVISIONER_NAME)-$(subst _,-,$(ARCH))_$(PROVISIONER_VERSION)
PROVISIONER_URL = https://github.com/radekg/terraform-provisioner-ansible/releases/download/$(PROVISIONER_VERSION)/$(PROVISIONER_ARCHIVE)
PROVISIONER_PATH = $(TF_PLUGINS_DIR)/$(ARCH)/$(PROVISIONER_NAME)_$(PROVISIONER_VERSION)
all: requirements install-provisioner secrets init-terraform
@echo "Success!"
requirements-install:
ansible-galaxy install --keep-scm-meta --ignore-errors --force -r ansible/requirements.yml
requirements-check:
ansible/versioncheck.py
requirements: requirements-install requirements-check
$(PROVISIONER_PATH):
@mkdir -p $(TF_PLUGINS_DIR)/$(ARCH); \
wget -q $(PROVISIONER_URL) -O $(PROVISIONER_PATH); \
chmod +x $(PROVISIONER_PATH); \
install-provisioner: $(PROVISIONER_PATH)
@echo "$(PROVISIONER_SHA1) $(PROVISIONER_PATH)" | shasum -c \
|| rm -v $(PROVISIONER_PATH)
secrets:
pass services/consul/ca-crt > ansible/files/consul-ca.crt
pass services/consul/client-crt > ansible/files/consul-client.crt
pass services/consul/client-key > ansible/files/consul-client.key
init-terraform:
terraform init -upgrade=true
cleanup:
rm -r $(TF_PLUGINS_DIR)/$(ARCHIVE)

23
README.md Normal file
View File

@ -0,0 +1,23 @@
# Description
### :warning: Replace with actual description!
>This repo defines a standard template for new Status infrastructure repositories.
>
>Key things to change:
>
>- Update `README.md`
>- Terraform
> - Change `path` in `main.tf` to match new fleet
> - Add necessary providers in `providers.tf`
> - Add necessary secrets in `secrets.tf`
> - Adjust or remove `workspaces.tf`
> - Adjust `versions.tf`
>- Ansible
> - Extend `ansible/group_vars/all.yml`
> - Or add a dedicated `group_vars` file
> - Create the `ansible/main.yml` playbook
# Repo Usage
For how to use this repo read the [Infra Repo Usage](https://github.com/status-im/infra-docs/blob/master/docs/general/infra_repo_usage.md) doc.

24
ansible.cfg Normal file
View File

@ -0,0 +1,24 @@
[defaults]
forks = 30
timeout = 30
inventory = ./ansible/terraform.py
lookup_plugins = ./ansible/lookup_plugins
module_name = shell
host_key_checking = false
# human-readable stdout/stderr results display
stdout_callback = debug
# Avoid default Python warnings
interpreter_python = auto
# https://github.com/ansible/ansible/issues/56930
force_valid_group_names = ignore
[privilege_escalation]
become = true
become_user = root
[ssh_connection]
# this should speed up exection but might cause issues with sudo
pipelining = True
control_path = /tmp/ansible-ssh-%%h-%%p-%%r
# necessary for cloning private git repos
ssh_args = -o ControlMaster=auto -o ControlPersist=60s -o ForwardAgent=yes -o ConnectTimeout=360

62
ansible/README.md Normal file
View File

@ -0,0 +1,62 @@
# Description
Herein lie all ansible related files __except__ for `ansible.cfg` at the root of the repo for easier usage without having to `cd` here.
# Usage
Simply run the play related to the specific type of configuration you want to deploy:
```sh
> ls -1 ansible/*.yml
ansible/bootstrap.yml
ansible/upgrade.yml
ansible/main.yml
```
```sh
ansible-playbook ansible/main.yml
```
# Bootstrap
All hosts are bootstraped right after provisioning using these roles:
* [status-im/infra-role-bootstrap-linux](https://github.com/status-im/infra-role-bootstrap-linux)
* [status-im/infra-role-bootstrap-macos](https://github.com/status-im/infra-role-bootstrap-macos)
* [status-im/infra-role-bootstrap-windows](https://github.com/status-im/infra-role-bootstrap-windows)
If you want to re-run any bootstrap step you can do it like so:
```sh
ansible-playbook ansible/bootstrap.yml -t role::bootstrap:hostname
```
In this case only the `hostname` set of tasks will be executed due to the `role::bootstrap:hostname` tag.
# Inventory
The inventory we use is crated by Terraform via the [`terraform-provider-ansible`](https://github.com/nbering/terraform-provider-ansible) which generates the necessary data structures in the [Consul Key/Value store](https://www.consul.io/docs/dynamic-app-config/kv) that is later used by the [`terraform.py`](./terraform.py) script to provide hosts and their variables to Ansible.
Some ways to view existing hosts:
```sh
ansible localhost -m debug -a 'var=groups'
ansible all -o -m debug -a 'var=ansible_host' | columns -t
```
# Variables
Ansible variables can be provided to Ansible using the `--extra-vars`/`-e` flag. An example of such a flag is
```yaml
compose_state: 'present'
compose_recreate: 'smart'
compose_restart: false
```
These are used in every role that starts docker containers. You can use them to change the behaviour of roles.
For example to re-create all metric related containers use:
```sh
ansible-playbook ansible/main.yml -e compose_recreate=always
```
# Secrets
Secrets are stored and provided in two ways:
* [password-store](https://www.passwordstore.org/) through the [`passwordstore` lookup plugin](https://docs.ansible.com/ansible/latest/collections/community/general/passwordstore_lookup.html)
* [BitWarden](https://bitwarden.com/) through the [`./lookup_plugins/bitwarden.py`](./lookup_plugins/bitwarden.py) plugin.

33
ansible/bootstrap.yml Normal file
View File

@ -0,0 +1,33 @@
---
#
# This playbook configures most basic things about the hosts:
#
# - Admin and Additional users
# - Disables root login
# - Installs Common packages
# - Starts Consul client agent
#
# This is run on every newly provisioned host.
#
- name: Verify Ansible versions
hosts: all
tags: always
become: false
run_once: true
gather_facts: false
tasks:
- local_action: command ./versioncheck.py
changed_when: false
- name: Bootstrap Python support for Ansible
gather_facts: False
hosts: all
serial: '{{ serial|default(1) }}'
roles:
- infra-role-bootstrap-linux/raw
- name: Bootstrap admin users and Consul
hosts: all
serial: '{{ serial|default(1) }}'
roles:
- infra-role-bootstrap-linux

3
ansible/files/README.md Normal file
View File

@ -0,0 +1,3 @@
# Description
This directory is excluded from committing in `.gitignore` and is the destination for Consul certificates created by the `make secrets` target.

View File

@ -0,0 +1,10 @@
---
# Root password
bootstrap__root_pass: '{{lookup("bitwarden", "root-pass")}}'
# Consul
bootstrap__consul_encryption_key: '{{lookup("bitwarden", "consul/cluster", field="encryption-key")}}'
bootstarp__consul_agent_acl_token: '{{lookup("bitwarden", "consul/acl-tokens", field="agent-default")}}'
# SSHGuard
bootstrap__sshguard_whitelist_extra: ['{{lookup("bitwarden", "sshguard/whitelist", field="jakubgs-home")}}']
# Wireguard
wireguard_consul_acl_token: '{{lookup("bitwarden", "consul/acl-tokens", field="wireguard")}}'

View File

@ -0,0 +1,24 @@
# Description
The files here are Ansible inventory files generated by [`ansible/terraform.py`](/ansible/terraform.py).
Their purpose is an emergency inventory backup in case of failure or unavailability of Consul.
# Usage
To use simply provide the file for the given stage using the `-i` argument.
For example, if you want to run Ansible on mailservers of `eth.test` fleet while Consul is unavailable do:
```bash
ansible-playbook ansible/main.yml -i ansible/inventory/beta -l mail
```
# Terraform Backup
In addition to the Ansible inventory files the [`ansible/terraform.py`](/ansible/terraform.py) script also stores the last seen Terraform inventory under `.terraform/terraform.tfstate.backup'.
So if ever Consul KV store is lost, it can be recovered from this copy.
# Details
For more details on how Ansible and Terraform interact read [this article](https://github.com/status-im/infra-docs/blob/master/articles/ansible_terraform.md).

View File

@ -0,0 +1,224 @@
#!/usr/bin/env python
# (c) 2018, Matt Stofko <matt@mjslabs.com>
# GNU General Public License v3.0+ (see LICENSE or
# https://www.gnu.org/licenses/gpl-3.0.txt)
#
# This plugin can be run directly by specifying the field followed by a list of
# entries, e.g. bitwarden.py password google.com wufoo.com
#
# CHANGES:
# - Dropped custom_field argument
# - Started checking sources in order
# - Refactored Bitwarden class, added get_item()
# - Split LookupModule.run into two functions
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
import sys
from subprocess import Popen, PIPE, STDOUT, check_output
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
DOCUMENTATION = """
lookup: bitwarden
author:
- Matt Stofko <matt@mjslabs.com>
requirements:
- bw (command line utility)
- BW_SESSION environment var (from `bw login` or `bw unlock`)
short_description: look up data from a bitwarden vault
description:
- use the bw command line utility to grab one or more items stored in a
bitwarden vault
options:
_terms:
description: name of item that contains the field to fetch
required: true
field:
description: field to return from bitwarden
default: 'password'
sync:
description: If True, call `bw sync` before lookup
"""
EXAMPLES = """
- name: get 'username' from Bitwarden entry 'Google'
debug:
msg: "{{ lookup('bitwarden', 'Google', field='username') }}"
"""
RETURN = """
_raw:
description:
- Items from Bitwarden vault
"""
class Bitwarden(object):
def __init__(self, path):
self._cli_path = path
self._bw_session = ""
try:
check_output([self._cli_path, "--version"])
except OSError:
raise AnsibleError("Command not found: {0}".format(self._cli_path))
@property
def session(self):
return self._bw_session
@session.setter
def session(self, value):
self._bw_session = value
@property
def cli_path(self):
return self._cli_path
@property
def logged_in(self):
# Parse Bitwarden status to check if logged in
if self.status() == 'unlocked':
return True
else:
return False
def _run(self, args):
my_env = os.environ.copy()
if self.session != "":
my_env["BW_SESSION"] = self.session
p = Popen([self.cli_path] + args, stdin=PIPE,
stdout=PIPE, stderr=STDOUT, env=my_env)
out, _ = p.communicate()
out = out.decode()
rc = p.wait()
if rc != 0:
display.debug("Received error when running '{0} {1}': {2}"
.format(self.cli_path, args, out))
if out.startswith("Vault is locked."):
raise AnsibleError("Error accessing Bitwarden vault. "
"Run 'bw unlock' to unlock the vault.")
elif out.startswith("You are not logged in."):
raise AnsibleError("Error accessing Bitwarden vault. "
"Run 'bw login' to login.")
elif out.startswith("Failed to decrypt."):
raise AnsibleError("Error accessing Bitwarden vault. "
"Make sure BW_SESSION is set properly.")
elif out.startswith("More than one result was found."):
raise AnsibleError("More than one object found with this name.")
elif out.startswith("Not found."):
raise AnsibleError("Error accessing Bitwarden vault. "
"Specified item not found: {}".format(args[-1]))
else:
print("Unknown failure in 'bw' command: \n%s" % out)
return None
return out.strip()
def sync(self):
self._run(['sync'])
def status(self):
try:
data = json.loads(self._run(['status']))
except json.decoder.JSONDecodeError as e:
raise AnsibleError("Error decoding Bitwarden status: %s" % e)
return data['status']
def get_entry(self, key, field):
return self._run(["get", field, key])
def get_item(self, key):
return json.loads(self.get_entry(key, 'item'))
def get_notes(self, key):
return self.get_item(key).get('notes')
def get_custom_field(self, key, field):
rval = self.get_entry(key, 'item')
data = json.loads(rval)
if 'fields' not in data:
return None
matching = [x for x in data['fields'] if x['name'] == field]
if len(matching) == 0:
return None
return matching[0]['value']
def get_itemid(self, key):
return self.get_item(key).get('id')
def get_attachments(self, key, itemid):
return self._run(['get', 'attachment', key, '--itemid={}'.format(itemid), '--raw'])
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
self.bw = Bitwarden(path=kwargs.get('path', 'bw'))
if not self.bw.logged_in:
raise AnsibleError("Not logged into Bitwarden: please run "
"'bw login', or 'bw unlock' and set the "
"BW_SESSION environment variable first")
values = []
if kwargs.get('sync'):
self.bw.sync()
if kwargs.get('session'):
self.bw.session = kwargs.get('session')
for term in terms:
rval = self.lookup(term, kwargs)
if rval is None:
raise AnsibleError("No matching term, field or attachment found!")
values.append(rval)
return values
def lookup(self, term, kwargs):
if 'file' in kwargs:
# Try attachments first
itemid = self.bw.get_itemid(term)
if itemid is None:
raise AnsibleError("No such object, wrong name")
return self.bw.get_attachments(kwargs['file'], itemid)
# By default check password
field = kwargs.get('field', 'password')
# Special field which contains notes.
if field == 'notes':
return self.bw.get_notes(term)
# Try custom fields second
val = self.bw.get_custom_field(term, field)
if val:
return val
# If not found check default bw entries
return self.bw.get_entry(term, field)
def main():
if len(sys.argv) < 3:
print("Usage: %s <field> <name> [name name ...]" % os.path.basename(__file__))
return -1
print(LookupModule().run(sys.argv[2:], variables=None, field=sys.argv[1], file='origin.crt'))
return 0
if __name__ == "__main__":
sys.exit(main())

30
ansible/requirements.yml Normal file
View File

@ -0,0 +1,30 @@
---
- name: infra-role-bootstrap-linux
src: git@github.com:status-im/infra-role-bootstrap-linux.git
version: 50eda0808cceaaad2a5c5cdb4493935f2e3a637d
scm: git
- name: infra-role-wireguard
src: git@github.com:status-im/infra-role-wireguard.git
version: b711bbabd2dc3d9ce8b1c3a6e5bc785901db9d09
scm: git
- name: open-ports
src: git@github.com:status-im/infra-role-open-ports.git
version: 24dc30dbdf85e6758cb6924074b2f7a0f4541524
scm: git
- name: swap-file
src: git@github.com:status-im/infra-role-swap-file.git
version: 3fb0fb8d313ab388df1b38d516e2ff88b72a2cf7
scm: git
- name: consul-service
src: git@github.com:status-im/infra-role-consul-service.git
version: 2b3d4e53856d6cc91ae5c5a342fd12f2bb96aa88
scm: git
- name: systemd-timer
src: git@github.com:status-im/infra-role-systemd-timer.git
version: c6bbc3d1b4b0ba603d82fa06cd17297d12523182
scm: git

453
ansible/terraform.py Executable file
View File

@ -0,0 +1,453 @@
#!/usr/bin/env python
# source: https://github.com/nbering/terraform-inventory
'''
Terraform Inventory Script
==========================
This inventory script generates dynamic inventory by reading Terraform state
contents. Servers and groups a defined inside the Terraform state using special
resources defined by the Terraform Provider for Ansible.
Configuration
=============
State is fetched using the "terraform state pull" subcommand. The behaviour of
this action can be configured using some environment variables.
Environment Variables:
......................
ANSIBLE_TF_BIN
Override the path to the Terraform command executable. This is useful if
you have multiple copies or versions installed and need to specify a
specific binary. The inventory script runs the `terraform state pull`
command to fetch the Terraform state, so that remote state will be
fetched seemlessly regardless of the backend configuration.
ANSIBLE_TF_DIR
Set the working directory for the `terraform` command when the scripts
shells out to it. This is useful if you keep your terraform and ansible
configuration in separate directories. Defaults to using the current
working directory.
ANSIBLE_TF_WS_NAME
Sets the workspace for the `terraform` command when the scripts shells
out to it, defaults to `default` workspace - if you don't use workspaces
this is the one you'll be using.
'''
import sys
import json
import os
import re
import traceback
from subprocess import Popen, PIPE
TERRAFORM_DIR = os.environ.get('ANSIBLE_TF_DIR', os.getcwd())
TERRAFORM_ENV = os.path.join(TERRAFORM_DIR, '.terraform/environment')
TERRAFORM_PATH = os.environ.get('ANSIBLE_TF_BIN', 'terraform')
TERRAFORM_BPK = os.path.join(TERRAFORM_DIR, '.terraform/terraform.tfstate.backup')
def _tf_env():
# way to figure out currenly used TF workspace
try:
with open(TERRAFORM_ENV) as f:
return f.read()
except:
return 'default'
TERRAFORM_WS_NAME = os.environ.get('ANSIBLE_TF_WS_NAME', _tf_env())
ANSIBLE_BKP = os.path.join(TERRAFORM_DIR, 'ansible/inventory', _tf_env())
class TerraformState(object):
'''
TerraformState wraps the state content to provide some helpers for iterating
over resources.
'''
def __init__(self, state_json):
self.state_json = state_json
if "modules" in state_json:
# uses pre-0.12
self.flat_attrs = True
else:
# state format for 0.12+
self.flat_attrs = False
def resources(self):
'''Generator method to iterate over resources in the state file.'''
if self.flat_attrs:
modules = self.state_json["modules"]
for module in modules:
for resource in module["resources"].values():
yield TerraformResource(resource, flat_attrs=True)
else:
resources = self.state_json["resources"]
for resource in resources:
for instance in resource["instances"]:
yield TerraformResource(instance, resource_type=resource["type"])
class TerraformResource(object):
'''
TerraformResource wraps individual resource content and provide some helper
methods for reading older-style dictionary and list values from attributes
defined as a single-level map.
'''
DEFAULT_PRIORITIES = {
'ansible_host': 50,
'ansible_group': 50,
'ansible_host_var': 60,
'ansible_group_var': 60
}
def __init__(self, source_json, flat_attrs=False, resource_type=None):
self.flat_attrs = flat_attrs
self._type = resource_type
self._priority = None
self.source_json = source_json
def is_ansible(self):
'''Check if the resource is provided by the ansible provider.'''
return self.type().startswith("ansible_")
def priority(self):
'''Get the merge priority of the resource.'''
if self._priority is not None:
return self._priority
priority = 0
if self.read_int_attr("variable_priority") is not None:
priority = self.read_int_attr("variable_priority")
elif self.type() in TerraformResource.DEFAULT_PRIORITIES:
priority = TerraformResource.DEFAULT_PRIORITIES[self.type()]
self._priority = priority
return self._priority
def type(self):
'''Returns the Terraform resource type identifier.'''
if self._type:
return self._type
return self.source_json["type"]
def read_dict_attr(self, key):
'''
Read a dictionary attribute from the resource, handling old-style
Terraform state where maps are stored as multiple keys in the resource's
attributes.
'''
attrs = self._raw_attributes()
if self.flat_attrs:
out = {}
for k in attrs.keys():
match = re.match(r"^" + key + r"\.(.*)", k)
if not match or match.group(1) == "%":
continue
out[match.group(1)] = attrs[k]
return out
return attrs.get(key, {})
def read_list_attr(self, key):
'''
Read a list attribute from the resource, handling old-style Terraform
state where lists are stored as multiple keys in the resource's
attributes.
'''
attrs = self._raw_attributes()
if self.flat_attrs:
out = []
length_key = key + ".#"
if length_key not in attrs.keys():
return []
length = int(attrs[length_key])
if length < 1:
return []
for i in range(0, length):
out.append(attrs["{}.{}".format(key, i)])
return out
return attrs.get(key, None)
def read_int_attr(self, key):
'''
Read an attribute from state an convert it to type Int.
'''
val = self.read_attr(key)
if val is not None:
val = int(val)
return val
def read_attr(self, key):
'''
Read an attribute from the underlaying state content.
'''
return self._raw_attributes().get(key, None)
def _raw_attributes(self):
if self.flat_attrs:
return self.source_json["primary"]["attributes"]
return self.source_json["attributes"]
class AnsibleInventory(object):
'''
AnsibleInventory handles conversion from Terraform resource content to
Ansible inventory entities, and building of the final inventory json.
'''
def __init__(self):
self.groups = {}
self.hosts = {}
self.inner_json = {}
def add_host_resource(self, resource):
'''Upsert type action for host resources.'''
hostname = resource.read_attr("inventory_hostname")
if hostname in self.hosts:
host = self.hosts[hostname]
host.add_source(resource)
else:
host = AnsibleHost(hostname, source=resource)
self.hosts[hostname] = host
def add_group_resource(self, resource):
'''Upsert type action for group resources.'''
groupname = resource.read_attr("inventory_group_name")
if groupname in self.groups:
group = self.groups[groupname]
group.add_source(resource)
else:
group = AnsibleGroup(groupname, source=resource)
self.groups[groupname] = group
def update_groups(self, groupname, children=None, hosts=None, group_vars=None):
'''Upsert type action for group resources'''
if groupname in self.groups:
group = self.groups[groupname]
group.update(children=children, hosts=hosts, group_vars=group_vars)
else:
group = AnsibleGroup(groupname)
group.update(children, hosts, group_vars)
self.groups[groupname] = group
def add_resource(self, resource):
'''
Process a Terraform resource, passing to the correct handler function
by type.
'''
if resource.type().startswith("ansible_host"):
self.add_host_resource(resource)
elif resource.type().startswith("ansible_group"):
self.add_group_resource(resource)
def to_dict(self):
'''
Generate the file Ansible inventory structure to be serialized into JSON
for consumption by Ansible proper.
'''
out = {
"_meta": {
"hostvars": {}
}
}
for hostname, host in self.hosts.items():
host.build()
for group in host.groups:
self.update_groups(group, hosts=[host.hostname])
out["_meta"]["hostvars"][hostname] = host.get_vars()
for groupname, group in self.groups.items():
group.build()
out[groupname] = group.to_dict()
return out
class AnsibleHost(object):
'''
AnsibleHost represents a host for the Ansible inventory.
'''
def __init__(self, hostname, source=None):
self.sources = []
self.hostname = hostname
self.groups = set(["all"])
self.host_vars = {}
if source:
self.add_source(source)
def update(self, groups=None, host_vars=None):
'''Update host resource with additional groups and vars.'''
if host_vars:
self.host_vars.update(host_vars)
if groups:
self.groups.update(groups)
def add_source(self, source):
'''Add a Terraform resource to the sources list.'''
self.sources.append(source)
def build(self):
'''Assemble host details from registered sources.'''
self.sources.sort(key=lambda source: source.priority())
for source in self.sources:
if source.type() == "ansible_host":
groups = source.read_list_attr("groups")
host_vars = source.read_dict_attr("vars")
self.update(groups=groups, host_vars=host_vars)
elif source.type() == "ansible_host_var":
host_vars = {source.read_attr(
"key"): source.read_attr("value")}
self.update(host_vars=host_vars)
self.groups = sorted(self.groups)
def get_vars(self):
'''Get the host's variable dictionary.'''
return dict(self.host_vars)
class AnsibleGroup(object):
'''
AnsibleGroup represents a group for the Ansible inventory.
'''
def __init__(self, groupname, source=None):
self.groupname = groupname
self.sources = []
self.hosts = set()
self.children = set()
self.group_vars = {}
if source:
self.add_source(source)
def update(self, children=None, hosts=None, group_vars=None):
'''
Update host resource with additional children, hosts, or group variables.
'''
if hosts:
self.hosts.update(hosts)
if children:
self.children.update(children)
if group_vars:
self.group_vars.update(group_vars)
def add_source(self, source):
'''Add a Terraform resource to the sources list.'''
self.sources.append(source)
def build(self):
'''Assemble group details from registered sources.'''
self.sources.sort(key=lambda source: source.priority())
for source in self.sources:
if source.type() == "ansible_group":
children = source.read_list_attr("children")
group_vars = source.read_dict_attr("vars")
self.update(children=children, group_vars=group_vars)
elif source.type() == "ansible_group_var":
group_vars = {source.read_attr(
"key"): source.read_attr("value")}
self.update(group_vars=group_vars)
self.hosts = sorted(self.hosts)
self.children = sorted(self.children)
def to_dict(self):
'''Prepare structure for final Ansible inventory JSON.'''
return {
"children": list(self.children),
"hosts": list(self.hosts),
"vars": dict(self.group_vars)
}
def _execute_shell():
tf_workspace = [TERRAFORM_PATH, 'workspace', 'select', TERRAFORM_WS_NAME]
proc_ws = Popen(tf_workspace, cwd=TERRAFORM_DIR, stdout=PIPE,
stderr=PIPE, universal_newlines=True)
_, err_ws = proc_ws.communicate()
if err_ws != '':
sys.stderr.write(str(err_ws)+'\n')
sys.exit(1)
else:
tf_command = [TERRAFORM_PATH, 'state', 'pull']
proc_tf_cmd = Popen(tf_command, cwd=TERRAFORM_DIR,
stdout=PIPE, stderr=PIPE, universal_newlines=True)
out_cmd, err_cmd = proc_tf_cmd.communicate()
if err_cmd != '':
sys.stderr.write(str(err_cmd)+'\n')
sys.exit(1)
else:
return json.loads(out_cmd)
def _backup_tf(tfstate):
# Crates a state backup in case we lose Consul
with open(TERRAFORM_BPK, 'w') as f:
f.write(json.dumps(tfstate.state_json))
def _backup_ansible(inventory):
# Crates a state backup in Ansible inventory format
text = '# NOTE: This file is generated by terraform.py\n'
text += '# For emergency use when Consul fails\n'
text += '[all]\n'
for hostname, host in sorted(inventory.hosts.items()):
text += (
'{0} hostname={0} ansible_host={1} '
).format(hostname, host.host_vars['ansible_host']) + (
'env={env} stage={stage} data_center={data_center} '+
'region={region} dns_entry={dns_entry}\n'
).format(**host.host_vars)
text += '\n'
for name, hosts in sorted(inventory.groups.items()):
if name in ['_meta', 'all']:
continue
text += '[{}]\n'.format(name)
for hostname in sorted(hosts.hosts):
text += '{}\n'.format(hostname)
text += '\n'
with open(ANSIBLE_BKP, 'w') as f:
f.write(text)
def _main():
try:
tfstate = TerraformState(_execute_shell())
inventory = AnsibleInventory()
for resource in tfstate.resources():
if resource.is_ansible():
inventory.add_resource(resource)
sys.stdout.write(json.dumps(inventory.to_dict(), indent=2))
# backup raw TF state
_backup_tf(tfstate)
# backup ansible inventory
_backup_ansible(inventory)
except Exception:
traceback.print_exc(file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
_main()

18
ansible/upgrade.yml Normal file
View File

@ -0,0 +1,18 @@
---
- name: Update and upgrade apt packages
gather_facts: false
hosts: all
serial: 1
tasks:
- name: Upgrade packages
apt:
upgrade: true
update_cache: true
cache_valid_time: 86400 # One day
register: packages
- reboot:
when: packages.changed
- name: Remove unused packages
apt:
autoremove: true
autoclean: true

71
ansible/versioncheck.py Executable file
View File

@ -0,0 +1,71 @@
#!/usr/bin/env python3
# WARNING: If importing this fails set PYTHONPATH.
import yaml
import ansible
import subprocess
from os import path, environ
from packaging import version
SCRIPT_DIR = path.dirname(path.realpath(__file__))
# Where Ansible looks for installed roles.
ANSIBLE_ROLES_PATH = path.join(environ['HOME'], '.ansible/roles')
class Role:
def __init__(self, name, version):
self.name = name
self.version = version
@property
def path(self):
return path.join(ANSIBLE_ROLES_PATH, self.name)
def exists(self):
return path.isdir(self.path)
def local_version(self):
cmd = subprocess.run(
['git', 'rev-parse', 'HEAD'],
capture_output=True,
cwd=self.path
)
cmd.check_returncode()
return str(cmd.stdout.strip(), 'utf-8')
# Verify Ansible version is 2.8 or newer.
if version.parse(ansible.__version__) < version.parse("2.8"):
print('Your Ansible version is lower than 2.8. Upgrade it.')
exit(1)
# Read Ansible requirements file.
with open(path.join(SCRIPT_DIR, 'requirements.yml'), 'r') as f:
requirements = yaml.load(f, Loader=yaml.FullLoader)
# Check if each Ansible role is installed and has correct version.
errors = 0
for req in requirements:
role = Role(req['name'], req.get('version'))
if not role.exists():
print('%25s - MISSING!' % role.name)
errors += 1
continue
# For now we allow not specifying versions for everyhing.
if role.version is None:
print('%25s - No version!' % role.name)
continue
local_version = role.local_version()
if role.version != local_version:
print('%25s - MISMATCH: %s != %s' %
(role.name, role.version[:8], local_version[:8]))
errors += 1
continue
print('%25s - VALID' % role.name)
# Any issue with any role should cause failure.
if errors > 0:
exit(1)

31
main.tf Normal file
View File

@ -0,0 +1,31 @@
/* DATA -----------------------------------------*/
terraform {
backend "consul" {
address = "https://consul.statusim.net:8400"
/* Lock to avoid syncing issues */
lock = true
/* KV store has a limit of 512KB */
gzip = true
/* WARNING This needs to be changed for every repo. */
path = "terraform/codex/"
ca_file = "ansible/files/consul-ca.crt"
cert_file = "ansible/files/consul-client.crt"
key_file = "ansible/files/consul-client.key"
}
}
/* CF Zones ------------------------------------*/
/* CloudFlare Zone IDs required for records */
data "cloudflare_zones" "active" {
filter { status = "active" }
}
/* For easier access to zone ID by domain name */
locals {
zones = {
for zone in data.cloudflare_zones.active.zones :
zone.name => zone.id
}
}

8
providers.tf Normal file
View File

@ -0,0 +1,8 @@
provider "cloudflare" {
email = data.pass_password.cloudflare_email.password
api_key = data.pass_password.cloudflare_token.password
account_id = data.pass_password.cloudflare_account.password
}
# Uses PASSWORD_STORE_DIR environment variable
provider "pass" {}

14
secrets.tf Normal file
View File

@ -0,0 +1,14 @@
/* Token for interacting with Cloudflare API. */
data "pass_password" "cloudflare_token" {
path = "cloud/Cloudflare/token"
}
/* Email address of Cloudflare account. */
data "pass_password" "cloudflare_email" {
path = "cloud/Cloudflare/email"
}
/* ID of CloudFlare Account. */
data "pass_password" "cloudflare_account" {
path = "cloud/Cloudflare/account"
}

5
variables.tf Normal file
View File

@ -0,0 +1,5 @@
variable "domain" {
description = "DNS Domain to update"
type = string
default = "statusim.net"
}

13
versions.tf Normal file
View File

@ -0,0 +1,13 @@
terraform {
required_version = "~> 1.2.0"
required_providers {
cloudflare = {
source = "cloudflare/cloudflare"
version = " = 3.26.0"
}
pass = {
source = "camptocamp/pass"
version = " = 2.0.0"
}
}
}

24
workspaces.tf Normal file
View File

@ -0,0 +1,24 @@
/**
* This is a hacky way of binding specific variable
* values to different Terraform workspaces.
*
* Details:
* https://github.com/hashicorp/terraform/issues/15966
*/
locals {
env = {
defaults = {
/* Default settings for all fleets/workspaces. */
}
test = {
/* Settings specific to the test fleet/workspace. */
}
}
}
/* Makes fleet settings available under local.ws. */
locals {
ws = merge(local.env["defaults"], local.env[terraform.workspace])
}