--- # CloudFlare Origin certificates origin_certs: - domain: 'status.im' crt: '{{lookup("bitwarden", "Cloudflare/status.im", file="origin.crt")}}' key: '{{lookup("bitwarden", "Cloudflare/status.im", file="origin.key")}}' default: true # Syncing can use a lot of mamory swap_file_path: '/main.swap' swap_file_size_mb: 2048 # ElasticSearch Cluster es_service_name: 'elasticsearch' es_service_path: '/docker/{{ es_service_name }}' es_cluster_name: 'nimbus-logs-search' es_docker_network_name: '{{ es_service_name }}' es_master_nodes: | {{ ansible_play_hosts | map('extract', hostvars) | list | json_query( '[].{ name: hostname, addr: ansible_local.wireguard.vpn_ip, port: to_string(es_node_port) }') }} es_image: 'elasticsearch:7.14.2' es_api_port: 9200 es_node_port: 9300 es_exp_cont_port: 9114 # Since Logstash stores 1 index per day this is fine. # See: https://www.elastic.co/blog/how-many-shards-should-i-have-in-my-elasticsearch-cluster es_number_of_shards: 3 # Since Nimbus logs are low-value we don't need replicas. es_number_of_replicas: 0 # JVM memory settings es_jvm_mem_ratio: 0.8 es_jvm_mem: '{{ (ansible_memtotal_mb * es_jvm_mem_ratio) | round(0, "floor") }}' es_jvm_min_heap: '{{ es_jvm_mem | int }}m' es_jvm_max_heap: '{{ es_jvm_mem | int }}m' # ElasticSearch HQ es_hq_cont_enabled: true es_hq_cont_port: 5000 # oauth access oauth_service_name: '{{ es_hq_cont_name }}-oauth' oauth_service_path: '{{ es_service_path }}/oauth' oauth_domain: 'nimbus-es.infra.status.im' oauth_cont_networks: ['{{ es_docker_network_name }}'] oauth_upstream_addr: 'hq' oauth_upstream_port: '{{ es_hq_cont_port }}' oauth_local_port: 4180 oauth_provider: 'github' oauth_id: '{{ lookup("bitwarden", "nimbus/elastic-hq/oauth", field="client-id") }}' oauth_secret: '{{ lookup("bitwarden", "nimbus/elastic-hq/oauth", field="secret") }}' oauth_cookie_secret: '{{ lookup("bitwarden", "nimbus/elastic-hq/oauth", field="cookie-secret") }}' # Open Ports open_ports_default_comment: 'ElasticSearch' open_ports_default_chain: 'VPN' open_ports_list: - { port: 80, chain: 'SERVICES' } - { port: 443, chain: 'SERVICES' } - { port: '{{ es_api_port }}', ipset: 'logs.nimbus' } - { port: '{{ es_api_port }}', ipset: 'dash.nimbus' } - { port: '{{ es_api_port }}', ipset: 'log-aggr.hq' } - { port: '{{ es_node_port }}', ipset: 'logs.nimbus' } - { port: '{{ es_node_port }}', ipset: 'dash.nimbus' } - { port: '{{ es_exp_cont_port }}', ipset: 'metrics.hq' } # Proxy for ES HQ nginx_sites: elasticsearch_hq_http: - listen 80 - server_name {{ oauth_domain }} - return 302 https://$server_name$request_uri elasticsearch_hq_https: - listen 80 - listen 443 ssl - server_name {{ oauth_domain }} - ssl_certificate /certs/status.im/origin.crt - ssl_certificate_key /certs/status.im/origin.key - location / { proxy_set_header Host $host; proxy_pass http://127.0.0.1:{{ oauth_local_port }}/; }