add Ansible boilerplate

Signed-off-by: Jakub Sokołowski <jakub@status.im>
This commit is contained in:
Jakub Sokołowski 2022-02-14 11:34:55 +01:00
parent e0bc305351
commit ff39f24a88
No known key found for this signature in database
GPG Key ID: 09AA5403E54D9931
9 changed files with 884 additions and 0 deletions

25
ansible.cfg Normal file
View File

@ -0,0 +1,25 @@
[defaults]
forks = 30
timeout = 30
inventory = ./ansible/terraform.py
lookup_plugins = ./ansible/lookup_plugins
remote_user = admin
module_name = shell
host_key_checking = false
# human-readable stdout/stderr results display
stdout_callback = debug
# Avoid default Python warnings
interpreter_python = auto
# https://github.com/ansible/ansible/issues/56930
force_valid_group_names = ignore
[privilege_escalation]
become = true
become_user = root
[ssh_connection]
# this should speed up exection but might cause issues with sudo
pipelining = True
control_path = /tmp/ansible-ssh-%%h-%%p-%%r
# necessary for cloning private git repos
ssh_args = -o ControlMaster=auto -o ControlPersist=60s -o ForwardAgent=yes -o ConnectTimeout=360

33
ansible/bootstrap.yml Normal file
View File

@ -0,0 +1,33 @@
---
#
# This playbook configures most basic things about the hosts:
#
# - Admin and Additional users
# - Disables root login
# - Installs Common packages
# - Starts Consul client agent
#
# This is run on every newly provisioned host.
#
- name: Verify Ansible versions
hosts: all
tags: always
become: false
run_once: true
gather_facts: false
tasks:
- local_action: command ./versioncheck.py
changed_when: false
- name: Bootstrap Python support for Ansible
gather_facts: False
hosts: all
serial: '{{ serial|default(1) }}'
roles:
- infra-role-bootstrap-linux/raw
- name: Bootstrap admin users and Consul
hosts: all
serial: '{{ serial|default(1) }}'
roles:
- infra-role-bootstrap-linux

View File

@ -0,0 +1,8 @@
---
# Root password
bootstrap__root_pass: '{{lookup("bitwarden", "root-pass")}}'
# Consul
bootstrap__consul_encryption_key: '{{lookup("bitwarden", "consul", field="encryption-key")}}'
bootstarp__consul_agent_acl_token: '{{lookup("bitwarden", "consul", field="agent-acl-token")}}'
# Wireguard
wireguard_consul_acl_token: '{{lookup("bitwarden", "consul", field="wireguard-acl-token")}}'

View File

@ -0,0 +1,24 @@
# Description
The files here are Ansible inventory files generated by [`ansible/terraform.py`](/ansible/terraform.py).
Their purpose is an emergency inventory backup in case of failure or unavailability of Consul.
# Usage
To use simply provide the file for the given stage using the `-i` argument.
For example, if you want to run Ansible on mailservers of `eth.test` fleet while Consul is unavailable do:
```bash
ansible-playbook ansible/main.yml -i ansible/inventory/beta -l mail
```
# Terraform Backup
In addition to the Ansible inventory files the [`ansible/terraform.py`](/ansible/terraform.py) script also stores the last seen Terraform inventory under `.terraform/terraform.tfstate.backup'.
So if ever Consul KV store is lost, it can be recovered from this copy.
# Details
For more details on how Ansible and Terraform interact read [this article](https://github.com/status-im/infra-docs/blob/master/articles/ansible_terraform.md).

View File

@ -0,0 +1,224 @@
#!/usr/bin/env python
# (c) 2018, Matt Stofko <matt@mjslabs.com>
# GNU General Public License v3.0+ (see LICENSE or
# https://www.gnu.org/licenses/gpl-3.0.txt)
#
# This plugin can be run directly by specifying the field followed by a list of
# entries, e.g. bitwarden.py password google.com wufoo.com
#
# CHANGES:
# - Dropped custom_field argument
# - Started checking sources in order
# - Refactored Bitwarden class, added get_item()
# - Split LookupModule.run into two functions
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
import sys
from subprocess import Popen, PIPE, STDOUT, check_output
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
DOCUMENTATION = """
lookup: bitwarden
author:
- Matt Stofko <matt@mjslabs.com>
requirements:
- bw (command line utility)
- BW_SESSION environment var (from `bw login` or `bw unlock`)
short_description: look up data from a bitwarden vault
description:
- use the bw command line utility to grab one or more items stored in a
bitwarden vault
options:
_terms:
description: name of item that contains the field to fetch
required: true
field:
description: field to return from bitwarden
default: 'password'
sync:
description: If True, call `bw sync` before lookup
"""
EXAMPLES = """
- name: get 'username' from Bitwarden entry 'Google'
debug:
msg: "{{ lookup('bitwarden', 'Google', field='username') }}"
"""
RETURN = """
_raw:
description:
- Items from Bitwarden vault
"""
class Bitwarden(object):
def __init__(self, path):
self._cli_path = path
self._bw_session = ""
try:
check_output([self._cli_path, "--version"])
except OSError:
raise AnsibleError("Command not found: {0}".format(self._cli_path))
@property
def session(self):
return self._bw_session
@session.setter
def session(self, value):
self._bw_session = value
@property
def cli_path(self):
return self._cli_path
@property
def logged_in(self):
# Parse Bitwarden status to check if logged in
if self.status() == 'unlocked':
return True
else:
return False
def _run(self, args):
my_env = os.environ.copy()
if self.session != "":
my_env["BW_SESSION"] = self.session
p = Popen([self.cli_path] + args, stdin=PIPE,
stdout=PIPE, stderr=STDOUT, env=my_env)
out, _ = p.communicate()
out = out.decode()
rc = p.wait()
if rc != 0:
display.debug("Received error when running '{0} {1}': {2}"
.format(self.cli_path, args, out))
if out.startswith("Vault is locked."):
raise AnsibleError("Error accessing Bitwarden vault. "
"Run 'bw unlock' to unlock the vault.")
elif out.startswith("You are not logged in."):
raise AnsibleError("Error accessing Bitwarden vault. "
"Run 'bw login' to login.")
elif out.startswith("Failed to decrypt."):
raise AnsibleError("Error accessing Bitwarden vault. "
"Make sure BW_SESSION is set properly.")
elif out.startswith("More than one result was found."):
raise AnsibleError("More than one object found with this name.")
elif out.startswith("Not found."):
raise AnsibleError("Error accessing Bitwarden vault. "
"Specified item not found: {}".format(args[-1]))
else:
print("Unknown failure in 'bw' command: \n%s" % out)
return None
return out.strip()
def sync(self):
self._run(['sync'])
def status(self):
try:
data = json.loads(self._run(['status']))
except json.decoder.JSONDecodeError as e:
raise AnsibleError("Error decoding Bitwarden status: %s" % e)
return data['status']
def get_entry(self, key, field):
return self._run(["get", field, key])
def get_item(self, key):
return json.loads(self.get_entry(key, 'item'))
def get_notes(self, key):
return self.get_item(key).get('notes')
def get_custom_field(self, key, field):
rval = self.get_entry(key, 'item')
data = json.loads(rval)
if 'fields' not in data:
return None
matching = [x for x in data['fields'] if x['name'] == field]
if len(matching) == 0:
return None
return matching[0]['value']
def get_itemid(self, key):
return self.get_item(key).get('id')
def get_attachments(self, key, itemid):
return self._run(['get', 'attachment', key, '--itemid={}'.format(itemid), '--raw'])
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
self.bw = Bitwarden(path=kwargs.get('path', 'bw'))
if not self.bw.logged_in:
raise AnsibleError("Not logged into Bitwarden: please run "
"'bw login', or 'bw unlock' and set the "
"BW_SESSION environment variable first")
values = []
if kwargs.get('sync'):
self.bw.sync()
if kwargs.get('session'):
self.bw.session = kwargs.get('session')
for term in terms:
rval = self.lookup(term, kwargs)
if rval is None:
raise AnsibleError("No matching term, field or attachment found!")
values.append(rval)
return values
def lookup(self, term, kwargs):
if 'file' in kwargs:
# Try attachments first
itemid = self.bw.get_itemid(term)
if itemid is None:
raise AnsibleError("No such object, wrong name")
return self.bw.get_attachments(kwargs['file'], itemid)
# By default check password
field = kwargs.get('field', 'password')
# Special field which contains notes.
if field == 'notes':
return self.bw.get_notes(term)
# Try custom fields second
val = self.bw.get_custom_field(term, field)
if val:
return val
# If not found check default bw entries
return self.bw.get_entry(term, field)
def main():
if len(sys.argv) < 3:
print("Usage: %s <field> <name> [name name ...]" % os.path.basename(__file__))
return -1
print(LookupModule().run(sys.argv[2:], variables=None, field=sys.argv[1], file='origin.crt'))
return 0
if __name__ == "__main__":
sys.exit(main())

30
ansible/requirements.yml Normal file
View File

@ -0,0 +1,30 @@
---
- name: open-ports
src: git@github.com:status-im/infra-role-open-ports.git
version: 24dc30dbdf85e6758cb6924074b2f7a0f4541524
scm: git
- name: swap-file
src: git@github.com:status-im/infra-role-swap-file.git
version: 7b63fb7b5f0c525aa191e1a410fd79f7eab8d11a
scm: git
- name: infra-role-bootstrap-linux
src: git@github.com:status-im/infra-role-bootstrap-linux.git
version: 7d2646cad4046e20b02628f4efc1e2b87deef773
scm: git
- name: infra-role-wireguard
src: git@github.com:status-im/infra-role-wireguard.git
version: 2aa2627c9f01d5c08f43237a09dd3a7635592205
scm: git
- name: consul-service
src: git@github.com:status-im/infra-role-consul-service.git
version: 61a248a697d37c98b575be942bb6e86deb512380
scm: git
- name: certbot
src: git@github.com:status-im/infra-role-certbot.git
version: 4b93ca81043f843d573c43b42f50e7620e81590d
scm: git

453
ansible/terraform.py Executable file
View File

@ -0,0 +1,453 @@
#!/usr/bin/env python
# source: https://github.com/nbering/terraform-inventory
'''
Terraform Inventory Script
==========================
This inventory script generates dynamic inventory by reading Terraform state
contents. Servers and groups a defined inside the Terraform state using special
resources defined by the Terraform Provider for Ansible.
Configuration
=============
State is fetched using the "terraform state pull" subcommand. The behaviour of
this action can be configured using some environment variables.
Environment Variables:
......................
ANSIBLE_TF_BIN
Override the path to the Terraform command executable. This is useful if
you have multiple copies or versions installed and need to specify a
specific binary. The inventory script runs the `terraform state pull`
command to fetch the Terraform state, so that remote state will be
fetched seemlessly regardless of the backend configuration.
ANSIBLE_TF_DIR
Set the working directory for the `terraform` command when the scripts
shells out to it. This is useful if you keep your terraform and ansible
configuration in separate directories. Defaults to using the current
working directory.
ANSIBLE_TF_WS_NAME
Sets the workspace for the `terraform` command when the scripts shells
out to it, defaults to `default` workspace - if you don't use workspaces
this is the one you'll be using.
'''
import sys
import json
import os
import re
import traceback
from subprocess import Popen, PIPE
TERRAFORM_DIR = os.environ.get('ANSIBLE_TF_DIR', os.getcwd())
TERRAFORM_ENV = os.path.join(TERRAFORM_DIR, '.terraform/environment')
TERRAFORM_PATH = os.environ.get('ANSIBLE_TF_BIN', 'terraform')
TERRAFORM_BPK = os.path.join(TERRAFORM_DIR, '.terraform/terraform.tfstate.backup')
def _tf_env():
# way to figure out currenly used TF workspace
try:
with open(TERRAFORM_ENV) as f:
return f.read()
except:
return 'default'
TERRAFORM_WS_NAME = os.environ.get('ANSIBLE_TF_WS_NAME', _tf_env())
ANSIBLE_BKP = os.path.join(TERRAFORM_DIR, 'ansible/inventory', _tf_env())
class TerraformState(object):
'''
TerraformState wraps the state content to provide some helpers for iterating
over resources.
'''
def __init__(self, state_json):
self.state_json = state_json
if "modules" in state_json:
# uses pre-0.12
self.flat_attrs = True
else:
# state format for 0.12+
self.flat_attrs = False
def resources(self):
'''Generator method to iterate over resources in the state file.'''
if self.flat_attrs:
modules = self.state_json["modules"]
for module in modules:
for resource in module["resources"].values():
yield TerraformResource(resource, flat_attrs=True)
else:
resources = self.state_json["resources"]
for resource in resources:
for instance in resource["instances"]:
yield TerraformResource(instance, resource_type=resource["type"])
class TerraformResource(object):
'''
TerraformResource wraps individual resource content and provide some helper
methods for reading older-style dictionary and list values from attributes
defined as a single-level map.
'''
DEFAULT_PRIORITIES = {
'ansible_host': 50,
'ansible_group': 50,
'ansible_host_var': 60,
'ansible_group_var': 60
}
def __init__(self, source_json, flat_attrs=False, resource_type=None):
self.flat_attrs = flat_attrs
self._type = resource_type
self._priority = None
self.source_json = source_json
def is_ansible(self):
'''Check if the resource is provided by the ansible provider.'''
return self.type().startswith("ansible_")
def priority(self):
'''Get the merge priority of the resource.'''
if self._priority is not None:
return self._priority
priority = 0
if self.read_int_attr("variable_priority") is not None:
priority = self.read_int_attr("variable_priority")
elif self.type() in TerraformResource.DEFAULT_PRIORITIES:
priority = TerraformResource.DEFAULT_PRIORITIES[self.type()]
self._priority = priority
return self._priority
def type(self):
'''Returns the Terraform resource type identifier.'''
if self._type:
return self._type
return self.source_json["type"]
def read_dict_attr(self, key):
'''
Read a dictionary attribute from the resource, handling old-style
Terraform state where maps are stored as multiple keys in the resource's
attributes.
'''
attrs = self._raw_attributes()
if self.flat_attrs:
out = {}
for k in attrs.keys():
match = re.match(r"^" + key + r"\.(.*)", k)
if not match or match.group(1) == "%":
continue
out[match.group(1)] = attrs[k]
return out
return attrs.get(key, {})
def read_list_attr(self, key):
'''
Read a list attribute from the resource, handling old-style Terraform
state where lists are stored as multiple keys in the resource's
attributes.
'''
attrs = self._raw_attributes()
if self.flat_attrs:
out = []
length_key = key + ".#"
if length_key not in attrs.keys():
return []
length = int(attrs[length_key])
if length < 1:
return []
for i in range(0, length):
out.append(attrs["{}.{}".format(key, i)])
return out
return attrs.get(key, None)
def read_int_attr(self, key):
'''
Read an attribute from state an convert it to type Int.
'''
val = self.read_attr(key)
if val is not None:
val = int(val)
return val
def read_attr(self, key):
'''
Read an attribute from the underlaying state content.
'''
return self._raw_attributes().get(key, None)
def _raw_attributes(self):
if self.flat_attrs:
return self.source_json["primary"]["attributes"]
return self.source_json["attributes"]
class AnsibleInventory(object):
'''
AnsibleInventory handles conversion from Terraform resource content to
Ansible inventory entities, and building of the final inventory json.
'''
def __init__(self):
self.groups = {}
self.hosts = {}
self.inner_json = {}
def add_host_resource(self, resource):
'''Upsert type action for host resources.'''
hostname = resource.read_attr("inventory_hostname")
if hostname in self.hosts:
host = self.hosts[hostname]
host.add_source(resource)
else:
host = AnsibleHost(hostname, source=resource)
self.hosts[hostname] = host
def add_group_resource(self, resource):
'''Upsert type action for group resources.'''
groupname = resource.read_attr("inventory_group_name")
if groupname in self.groups:
group = self.groups[groupname]
group.add_source(resource)
else:
group = AnsibleGroup(groupname, source=resource)
self.groups[groupname] = group
def update_groups(self, groupname, children=None, hosts=None, group_vars=None):
'''Upsert type action for group resources'''
if groupname in self.groups:
group = self.groups[groupname]
group.update(children=children, hosts=hosts, group_vars=group_vars)
else:
group = AnsibleGroup(groupname)
group.update(children, hosts, group_vars)
self.groups[groupname] = group
def add_resource(self, resource):
'''
Process a Terraform resource, passing to the correct handler function
by type.
'''
if resource.type().startswith("ansible_host"):
self.add_host_resource(resource)
elif resource.type().startswith("ansible_group"):
self.add_group_resource(resource)
def to_dict(self):
'''
Generate the file Ansible inventory structure to be serialized into JSON
for consumption by Ansible proper.
'''
out = {
"_meta": {
"hostvars": {}
}
}
for hostname, host in self.hosts.items():
host.build()
for group in host.groups:
self.update_groups(group, hosts=[host.hostname])
out["_meta"]["hostvars"][hostname] = host.get_vars()
for groupname, group in self.groups.items():
group.build()
out[groupname] = group.to_dict()
return out
class AnsibleHost(object):
'''
AnsibleHost represents a host for the Ansible inventory.
'''
def __init__(self, hostname, source=None):
self.sources = []
self.hostname = hostname
self.groups = set(["all"])
self.host_vars = {}
if source:
self.add_source(source)
def update(self, groups=None, host_vars=None):
'''Update host resource with additional groups and vars.'''
if host_vars:
self.host_vars.update(host_vars)
if groups:
self.groups.update(groups)
def add_source(self, source):
'''Add a Terraform resource to the sources list.'''
self.sources.append(source)
def build(self):
'''Assemble host details from registered sources.'''
self.sources.sort(key=lambda source: source.priority())
for source in self.sources:
if source.type() == "ansible_host":
groups = source.read_list_attr("groups")
host_vars = source.read_dict_attr("vars")
self.update(groups=groups, host_vars=host_vars)
elif source.type() == "ansible_host_var":
host_vars = {source.read_attr(
"key"): source.read_attr("value")}
self.update(host_vars=host_vars)
self.groups = sorted(self.groups)
def get_vars(self):
'''Get the host's variable dictionary.'''
return dict(self.host_vars)
class AnsibleGroup(object):
'''
AnsibleGroup represents a group for the Ansible inventory.
'''
def __init__(self, groupname, source=None):
self.groupname = groupname
self.sources = []
self.hosts = set()
self.children = set()
self.group_vars = {}
if source:
self.add_source(source)
def update(self, children=None, hosts=None, group_vars=None):
'''
Update host resource with additional children, hosts, or group variables.
'''
if hosts:
self.hosts.update(hosts)
if children:
self.children.update(children)
if group_vars:
self.group_vars.update(group_vars)
def add_source(self, source):
'''Add a Terraform resource to the sources list.'''
self.sources.append(source)
def build(self):
'''Assemble group details from registered sources.'''
self.sources.sort(key=lambda source: source.priority())
for source in self.sources:
if source.type() == "ansible_group":
children = source.read_list_attr("children")
group_vars = source.read_dict_attr("vars")
self.update(children=children, group_vars=group_vars)
elif source.type() == "ansible_group_var":
group_vars = {source.read_attr(
"key"): source.read_attr("value")}
self.update(group_vars=group_vars)
self.hosts = sorted(self.hosts)
self.children = sorted(self.children)
def to_dict(self):
'''Prepare structure for final Ansible inventory JSON.'''
return {
"children": list(self.children),
"hosts": list(self.hosts),
"vars": dict(self.group_vars)
}
def _execute_shell():
tf_workspace = [TERRAFORM_PATH, 'workspace', 'select', TERRAFORM_WS_NAME]
proc_ws = Popen(tf_workspace, cwd=TERRAFORM_DIR, stdout=PIPE,
stderr=PIPE, universal_newlines=True)
_, err_ws = proc_ws.communicate()
if err_ws != '':
sys.stderr.write(str(err_ws)+'\n')
sys.exit(1)
else:
tf_command = [TERRAFORM_PATH, 'state', 'pull']
proc_tf_cmd = Popen(tf_command, cwd=TERRAFORM_DIR,
stdout=PIPE, stderr=PIPE, universal_newlines=True)
out_cmd, err_cmd = proc_tf_cmd.communicate()
if err_cmd != '':
sys.stderr.write(str(err_cmd)+'\n')
sys.exit(1)
else:
return json.loads(out_cmd)
def _backup_tf(tfstate):
# Crates a state backup in case we lose Consul
with open(TERRAFORM_BPK, 'w') as f:
f.write(json.dumps(tfstate.state_json))
def _backup_ansible(inventory):
# Crates a state backup in Ansible inventory format
text = '# NOTE: This file is generated by terraform.py\n'
text += '# For emergency use when Consul fails\n'
text += '[all]\n'
for hostname, host in sorted(inventory.hosts.items()):
text += (
'{0} hostname={0} ansible_host={1} '
).format(hostname, host.host_vars['ansible_host']) + (
'env={env} stage={stage} data_center={data_center} '+
'region={region} dns_entry={dns_entry}\n'
).format(**host.host_vars)
text += '\n'
for name, hosts in sorted(inventory.groups.items()):
if name in ['_meta', 'all']:
continue
text += '[{}]\n'.format(name)
for hostname in sorted(hosts.hosts):
text += '{}\n'.format(hostname)
text += '\n'
with open(ANSIBLE_BKP, 'w') as f:
f.write(text)
def _main():
try:
tfstate = TerraformState(_execute_shell())
inventory = AnsibleInventory()
for resource in tfstate.resources():
if resource.is_ansible():
inventory.add_resource(resource)
sys.stdout.write(json.dumps(inventory.to_dict(), indent=2))
# backup raw TF state
_backup_tf(tfstate)
# backup ansible inventory
_backup_ansible(inventory)
except Exception:
traceback.print_exc(file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
_main()

16
ansible/upgrade.yml Normal file
View File

@ -0,0 +1,16 @@
- name: Update and upgrade apt packages
gather_facts: false
hosts: all
serial: 1
tasks:
- name: Upgrade packages
apt:
upgrade: true
update_cache: true
cache_valid_time: 86400
- name: Remove unused packages
apt:
autoremove: true
register: packages
- reboot:
when: packages.changed

71
ansible/versioncheck.py Executable file
View File

@ -0,0 +1,71 @@
#!/usr/bin/env python3
# WARNING: If importing this fails set PYTHONPATH.
import yaml
import ansible
import subprocess
from os import path, environ
from packaging import version
SCRIPT_DIR = path.dirname(path.realpath(__file__))
# Where Ansible looks for installed roles.
ANSIBLE_ROLES_PATH = path.join(environ['HOME'], '.ansible/roles')
class Role:
def __init__(self, name, version):
self.name = name
self.version = version
@property
def path(self):
return path.join(ANSIBLE_ROLES_PATH, self.name)
def exists(self):
return path.isdir(self.path)
def local_version(self):
cmd = subprocess.run(
['git', 'rev-parse', 'HEAD'],
capture_output=True,
cwd=self.path
)
cmd.check_returncode()
return str(cmd.stdout.strip(), 'utf-8')
# Verify Ansible version is 2.8 or newer.
if version.parse(ansible.__version__) < version.parse("2.8"):
print('Your Ansible version is lower than 2.8. Upgrade it.')
exit(1)
# Read Ansible requirements file.
with open(path.join(SCRIPT_DIR, 'requirements.yml'), 'r') as f:
requirements = yaml.load(f, Loader=yaml.FullLoader)
# Check if each Ansible role is installed and has correct version.
errors = 0
for req in requirements:
role = Role(req['name'], req.get('version'))
if not role.exists():
print('%25s - MISSING!' % role.name)
errors += 1
continue
# For now we allow not specifying versions for everyhing.
if role.version is None:
print('%25s - No version!' % role.name)
continue
local_version = role.local_version()
if role.version != local_version:
print('%25s - MISMATCH: %s != %s' %
(role.name, role.version[:8], local_version[:8]))
errors += 1
continue
print('%25s - VALID' % role.name)
# Any issue with any role should cause failure.
if errors > 0:
exit(1)