rename the shards.test fleet to status.prod

https://github.com/status-im/infra-shards/issues/33

Signed-off-by: Jakub Sokołowski <jakub@status.im>
This commit is contained in:
Jakub Sokołowski 2024-07-24 12:04:37 +02:00
parent 55b31f42f5
commit 7df38c149d
No known key found for this signature in database
GPG Key ID: FE65CD384D5BF7B4
8 changed files with 86 additions and 88 deletions

View File

@ -1,6 +1,6 @@
# Description
This repo defines infrastructure for running [nim-waku](github.com/status-im/nim-waku) nodes shards.
This repo defines infrastructure for running [nim-waku](github.com/status-im/nim-waku) nodes.
There are 3 kinds of nodes:
@ -12,8 +12,8 @@ There are 3 kinds of nodes:
DNS `TXT` ENRTree records exist to discover available fleets:
```
enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards.nodes.status.im
enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@store.test.shards.nodes.status.im
enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.status.nodes.status.im
enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@store.prod.status.nodes.status.im
```
```
enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.status.nodes.status.im
@ -24,8 +24,8 @@ enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@store.staging.st
Jenkins CI builds can be used to build and deploy new Docker images:
* `shards.test` - https://ci.infra.status.im/job/nim-waku/job/deploy-shards-test/
* `shards.staging` - https://ci.infra.status.im/job/nim-waku/job/deploy-shards-staging/
* `status.staging` - https://ci.infra.status.im/job/nim-waku/job/deploy-status-staging/
* `status.prod` - https://ci.infra.status.im/job/nim-waku/job/deploy-status-test/
# Repo Usage

View File

@ -6,7 +6,7 @@ bootstrap__active_extra_users:
# Hourly rotation to avoid disk space issue
bootstrap__logrotate_frequency: 'hourly'
# Tag dependent on fleet: test
# Tag dependent on fleet
nim_waku_cont_tag: 'deploy-{{ env }}-{{ stage }}'
nim_waku_cont_name: 'nim-waku-boot'
nim_waku_cont_vol: '/docker/{{ nim_waku_cont_name }}'
@ -48,7 +48,7 @@ nim_waku_store_message_retention_policy: 'time:2592000' # 30 days
# DNS Discovery
nim_waku_dns_disc_enabled: true
nim_waku_dns_disc_url_map:
test: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards.nodes.status.im'
prod: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.status.nodes.status.im'
staging: 'enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.status.nodes.status.im'
nim_waku_dns_disc_url: '{{ nim_waku_dns_disc_url_map[stage] }}'
@ -73,15 +73,14 @@ certbot_containers_to_stop: ['{{ nim_waku_cont_name }}']
certbot_certs: '{{ certbot_certs_map[stage] }}'
# FIXME: Remove once ENR records are updated without the domain.
certbot_certs_map:
test:
prod:
- domains:
- '{{ nim_waku_websocket_domain }}'
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name
- '{{ nim_waku_websocket_domain | replace("status.im", "statusim.net") }}' # Legacy Domain
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards.test") }}' # Legacy Fleet Name
staging:
- domains:
- '{{ nim_waku_websocket_domain }}'
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards.staging") }}' # Legacy Fleet Name
# Open LibP2P Ports
open_ports_default_comment: '{{ nim_waku_cont_name }}'

View File

@ -11,7 +11,7 @@ postgres_ha_replica_enabled: false
postgres_ha_replica_allowed_addresses: []
postgres_ha_admin_user: 'postgres'
postgres_ha_admin_pass: '{{lookup("bitwarden", "fleets/shards/"+stage+"/db/admin")}}'
postgres_ha_admin_pass: '{{lookup("bitwarden", "fleets/status/"+stage+"/db/admin")}}'
# Disable backups since we have multiple DCs
postgres_ha_backup: false
@ -20,7 +20,7 @@ postgres_ha_backup_enabled: false
postgres_ha_databases:
- name: 'nim-waku'
user: 'nim-waku'
pass: '{{lookup("bitwarden", "fleets/shards/"+stage+"/db/nim-waku")}}'
pass: '{{lookup("bitwarden", "fleets/status/"+stage+"/db/nim-waku")}}'
# Avoid exceeding volume size with WAL log.
postgres_ha_alter_system_settings:

View File

@ -6,7 +6,7 @@ bootstrap__active_extra_users:
# Hourly rotation to avoid disk space issue
bootstrap__logrotate_frequency: 'hourly'
# Tag dependent on fleet: test
# Tag dependent on fleet
nim_waku_cont_tag: 'deploy-{{ env }}-{{ stage }}'
nim_waku_cont_name: 'nim-waku-store'
nim_waku_cont_vol: '/docker/{{ nim_waku_cont_name }}'
@ -48,14 +48,14 @@ nim_waku_store_message_db_user: 'nim-waku'
nim_waku_store_message_db_pass: '{{lookup("bitwarden", "fleets/status/"+stage+"/db/nim-waku")}}'
nim_waku_store_message_db_url: 'postgres://{{ nim_waku_store_message_db_user}}:{{ nim_waku_store_message_db_pass }}@store-db-01.{{ ansible_domain }}.wg:5432/{{nim_waku_store_message_db_name}}'
nim_waku_store_message_retention_policy_map:
test: 'size:250GB'
prod: 'size:250GB'
staging: 'size:75GB'
nim_waku_store_message_retention_policy: '{{ nim_waku_store_message_retention_policy_map[stage] }}'
# DNS Discovery
nim_waku_dns_disc_enabled: true
nim_waku_dns_disc_url_map:
test: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards.nodes.status.im'
prod: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.status.nodes.status.im'
staging: 'enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.status.nodes.status.im'
nim_waku_dns_disc_url: '{{ nim_waku_dns_disc_url_map[stage] }}'
@ -82,15 +82,14 @@ certbot_containers_to_stop: ['{{ nim_waku_cont_name }}']
certbot_certs: '{{ certbot_certs_map[stage] }}'
# FIXME: Remove once ENR records are updated without the domain.
certbot_certs_map:
test:
prod:
- domains:
- '{{ nim_waku_websocket_domain }}'
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name
- '{{ nim_waku_websocket_domain | replace("status.im", "statusim.net") }}' # Legacy Domain
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards.test") }}' # Legacy Fleet Name
staging:
- domains:
- '{{ nim_waku_websocket_domain }}'
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards.staging") }}' # Legacy Fleet Name
# Open LibP2P Ports
open_ports_default_comment: '{{ nim_waku_cont_name }}'

66
ansible/inventory/prod Normal file
View File

@ -0,0 +1,66 @@
# NOTE: This file is generated by terraform.py
# For emergency use when Consul fails
[all]
boot-01.ac-cn-hongkong-c.status.prod hostname=boot-01.ac-cn-hongkong-c.status.prod ansible_host=8.218.23.76 env=status stage=prod data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=boot-01.ac-cn-hongkong-c.status.prod.status.im
boot-01.do-ams3.status.prod hostname=boot-01.do-ams3.status.prod ansible_host=167.99.19.47 env=status stage=prod data_center=do-ams3 region=ams3 dns_entry=boot-01.do-ams3.status.prod.status.im
boot-01.gc-us-central1-a.status.prod hostname=boot-01.gc-us-central1-a.status.prod ansible_host=34.135.13.87 env=status stage=prod data_center=gc-us-central1-a region=us-central1-a dns_entry=boot-01.gc-us-central1-a.status.prod.status.im
store-01.ac-cn-hongkong-c.status.prod hostname=store-01.ac-cn-hongkong-c.status.prod ansible_host=8.218.74.73 env=status stage=prod data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-01.ac-cn-hongkong-c.status.prod.status.im
store-01.do-ams3.status.prod hostname=store-01.do-ams3.status.prod ansible_host=159.223.242.94 env=status stage=prod data_center=do-ams3 region=ams3 dns_entry=store-01.do-ams3.status.prod.status.im
store-01.gc-us-central1-a.status.prod hostname=store-01.gc-us-central1-a.status.prod ansible_host=34.170.192.39 env=status stage=prod data_center=gc-us-central1-a region=us-central1-a dns_entry=store-01.gc-us-central1-a.status.prod.status.im
store-02.ac-cn-hongkong-c.status.prod hostname=store-02.ac-cn-hongkong-c.status.prod ansible_host=8.218.121.232 env=status stage=prod data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-02.ac-cn-hongkong-c.status.prod.status.im
store-02.do-ams3.status.prod hostname=store-02.do-ams3.status.prod ansible_host=159.223.242.154 env=status stage=prod data_center=do-ams3 region=ams3 dns_entry=store-02.do-ams3.status.prod.status.im
store-02.gc-us-central1-a.status.prod hostname=store-02.gc-us-central1-a.status.prod ansible_host=34.170.154.2 env=status stage=prod data_center=gc-us-central1-a region=us-central1-a dns_entry=store-02.gc-us-central1-a.status.prod.status.im
store-db-01.ac-cn-hongkong-c.status.prod hostname=store-db-01.ac-cn-hongkong-c.status.prod ansible_host=47.243.139.240 env=status stage=prod data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-db-01.ac-cn-hongkong-c.status.prod.status.im
store-db-01.do-ams3.status.prod hostname=store-db-01.do-ams3.status.prod ansible_host=161.35.247.243 env=status stage=prod data_center=do-ams3 region=ams3 dns_entry=store-db-01.do-ams3.status.prod.status.im
store-db-01.gc-us-central1-a.status.prod hostname=store-db-01.gc-us-central1-a.status.prod ansible_host=34.71.57.210 env=status stage=prod data_center=gc-us-central1-a region=us-central1-a dns_entry=store-db-01.gc-us-central1-a.status.prod.status.im
[ac-cn-hongkong-c]
boot-01.ac-cn-hongkong-c.status.prod
store-01.ac-cn-hongkong-c.status.prod
store-02.ac-cn-hongkong-c.status.prod
store-db-01.ac-cn-hongkong-c.status.prod
[boot]
boot-01.ac-cn-hongkong-c.status.prod
boot-01.do-ams3.status.prod
boot-01.gc-us-central1-a.status.prod
[do-ams3]
boot-01.do-ams3.status.prod
store-01.do-ams3.status.prod
store-02.do-ams3.status.prod
store-db-01.do-ams3.status.prod
[gc-us-central1-a]
boot-01.gc-us-central1-a.status.prod
store-01.gc-us-central1-a.status.prod
store-02.gc-us-central1-a.status.prod
store-db-01.gc-us-central1-a.status.prod
[status.prod]
boot-01.ac-cn-hongkong-c.status.prod
boot-01.do-ams3.status.prod
boot-01.gc-us-central1-a.status.prod
store-01.ac-cn-hongkong-c.status.prod
store-01.do-ams3.status.prod
store-01.gc-us-central1-a.status.prod
store-02.ac-cn-hongkong-c.status.prod
store-02.do-ams3.status.prod
store-02.gc-us-central1-a.status.prod
store-db-01.ac-cn-hongkong-c.status.prod
store-db-01.do-ams3.status.prod
store-db-01.gc-us-central1-a.status.prod
[store]
store-01.ac-cn-hongkong-c.status.prod
store-01.do-ams3.status.prod
store-01.gc-us-central1-a.status.prod
store-02.ac-cn-hongkong-c.status.prod
store-02.do-ams3.status.prod
store-02.gc-us-central1-a.status.prod
[store-db]
store-db-01.ac-cn-hongkong-c.status.prod
store-db-01.do-ams3.status.prod
store-db-01.gc-us-central1-a.status.prod

View File

@ -1,66 +0,0 @@
# NOTE: This file is generated by terraform.py
# For emergency use when Consul fails
[all]
boot-01.ac-cn-hongkong-c.shards.test hostname=boot-01.ac-cn-hongkong-c.shards.test ansible_host=8.218.23.76 env=shards stage=test data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=boot-01.ac-cn-hongkong-c.shards.test.status.im
boot-01.do-ams3.shards.test hostname=boot-01.do-ams3.shards.test ansible_host=167.99.19.47 env=shards stage=test data_center=do-ams3 region=ams3 dns_entry=boot-01.do-ams3.shards.test.status.im
boot-01.gc-us-central1-a.shards.test hostname=boot-01.gc-us-central1-a.shards.test ansible_host=34.135.13.87 env=shards stage=test data_center=gc-us-central1-a region=us-central1-a dns_entry=boot-01.gc-us-central1-a.shards.test.status.im
store-01.ac-cn-hongkong-c.shards.test hostname=store-01.ac-cn-hongkong-c.shards.test ansible_host=8.218.74.73 env=shards stage=test data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-01.ac-cn-hongkong-c.shards.test.status.im
store-01.do-ams3.shards.test hostname=store-01.do-ams3.shards.test ansible_host=159.223.242.94 env=shards stage=test data_center=do-ams3 region=ams3 dns_entry=store-01.do-ams3.shards.test.status.im
store-01.gc-us-central1-a.shards.test hostname=store-01.gc-us-central1-a.shards.test ansible_host=34.170.192.39 env=shards stage=test data_center=gc-us-central1-a region=us-central1-a dns_entry=store-01.gc-us-central1-a.shards.test.status.im
store-02.ac-cn-hongkong-c.shards.test hostname=store-02.ac-cn-hongkong-c.shards.test ansible_host=8.218.121.232 env=shards stage=test data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-02.ac-cn-hongkong-c.shards.test.status.im
store-02.do-ams3.shards.test hostname=store-02.do-ams3.shards.test ansible_host=159.223.242.154 env=shards stage=test data_center=do-ams3 region=ams3 dns_entry=store-02.do-ams3.shards.test.status.im
store-02.gc-us-central1-a.shards.test hostname=store-02.gc-us-central1-a.shards.test ansible_host=34.170.154.2 env=shards stage=test data_center=gc-us-central1-a region=us-central1-a dns_entry=store-02.gc-us-central1-a.shards.test.status.im
store-db-01.ac-cn-hongkong-c.shards.test hostname=store-db-01.ac-cn-hongkong-c.shards.test ansible_host=47.243.139.240 env=shards stage=test data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-db-01.ac-cn-hongkong-c.shards.test.status.im
store-db-01.do-ams3.shards.test hostname=store-db-01.do-ams3.shards.test ansible_host=161.35.247.243 env=shards stage=test data_center=do-ams3 region=ams3 dns_entry=store-db-01.do-ams3.shards.test.status.im
store-db-01.gc-us-central1-a.shards.test hostname=store-db-01.gc-us-central1-a.shards.test ansible_host=34.71.57.210 env=shards stage=test data_center=gc-us-central1-a region=us-central1-a dns_entry=store-db-01.gc-us-central1-a.shards.test.status.im
[ac-cn-hongkong-c]
boot-01.ac-cn-hongkong-c.shards.test
store-01.ac-cn-hongkong-c.shards.test
store-02.ac-cn-hongkong-c.shards.test
store-db-01.ac-cn-hongkong-c.shards.test
[boot]
boot-01.ac-cn-hongkong-c.shards.test
boot-01.do-ams3.shards.test
boot-01.gc-us-central1-a.shards.test
[do-ams3]
boot-01.do-ams3.shards.test
store-01.do-ams3.shards.test
store-02.do-ams3.shards.test
store-db-01.do-ams3.shards.test
[gc-us-central1-a]
boot-01.gc-us-central1-a.shards.test
store-01.gc-us-central1-a.shards.test
store-02.gc-us-central1-a.shards.test
store-db-01.gc-us-central1-a.shards.test
[shards.test]
boot-01.ac-cn-hongkong-c.shards.test
boot-01.do-ams3.shards.test
boot-01.gc-us-central1-a.shards.test
store-01.ac-cn-hongkong-c.shards.test
store-01.do-ams3.shards.test
store-01.gc-us-central1-a.shards.test
store-02.ac-cn-hongkong-c.shards.test
store-02.do-ams3.shards.test
store-02.gc-us-central1-a.shards.test
store-db-01.ac-cn-hongkong-c.shards.test
store-db-01.do-ams3.shards.test
store-db-01.gc-us-central1-a.shards.test
[store]
store-01.ac-cn-hongkong-c.shards.test
store-01.do-ams3.shards.test
store-01.gc-us-central1-a.shards.test
store-02.ac-cn-hongkong-c.shards.test
store-02.do-ams3.shards.test
store-02.gc-us-central1-a.shards.test
[store-db]
store-db-01.ac-cn-hongkong-c.shards.test
store-db-01.do-ams3.shards.test
store-db-01.gc-us-central1-a.shards.test

View File

@ -18,7 +18,7 @@
- name: infra-role-nim-waku
src: git@github.com:status-im/infra-role-nim-waku.git
version: 0948fc8aa6c5738ca963d7faac2b203645d22afd
version: fe929f98309104ea610a80a2d2fac5a8fb330a21
- name: infra-role-certbot
src: git@github.com:status-im/infra-role-certbot.git

View File

@ -26,7 +26,7 @@ locals {
}
/* Settings specific to the test fleet/workspace. */
test = {
prod = {
db_do_type = "s-2vcpu-4gb"
db_ac_type = "ecs.c6.large"
db_gc_type = "c2d-highcpu-2"