From 7df38c149d34c2d332ce54059a842885a3213980 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Soko=C5=82owski?= Date: Wed, 24 Jul 2024 12:04:37 +0200 Subject: [PATCH] rename the shards.test fleet to status.prod MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit https://github.com/status-im/infra-shards/issues/33 Signed-off-by: Jakub SokoĊ‚owski --- README.md | 10 ++--- ansible/group_vars/boot.yml | 11 +++--- ansible/group_vars/store-db.yml | 4 +- ansible/group_vars/store.yml | 13 +++---- ansible/inventory/prod | 66 +++++++++++++++++++++++++++++++++ ansible/inventory/test | 66 --------------------------------- ansible/requirements.yml | 2 +- workspaces.tf | 2 +- 8 files changed, 86 insertions(+), 88 deletions(-) create mode 100644 ansible/inventory/prod delete mode 100644 ansible/inventory/test diff --git a/README.md b/README.md index f1d977b..252c923 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Description -This repo defines infrastructure for running [nim-waku](github.com/status-im/nim-waku) nodes shards. +This repo defines infrastructure for running [nim-waku](github.com/status-im/nim-waku) nodes. There are 3 kinds of nodes: @@ -12,8 +12,8 @@ There are 3 kinds of nodes: DNS `TXT` ENRTree records exist to discover available fleets: ``` -enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards.nodes.status.im -enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@store.test.shards.nodes.status.im +enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.status.nodes.status.im +enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@store.prod.status.nodes.status.im ``` ``` enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.status.nodes.status.im @@ -24,8 +24,8 @@ enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@store.staging.st Jenkins CI builds can be used to build and deploy new Docker images: -* `shards.test` - https://ci.infra.status.im/job/nim-waku/job/deploy-shards-test/ -* `shards.staging` - https://ci.infra.status.im/job/nim-waku/job/deploy-shards-staging/ +* `status.staging` - https://ci.infra.status.im/job/nim-waku/job/deploy-status-staging/ +* `status.prod` - https://ci.infra.status.im/job/nim-waku/job/deploy-status-test/ # Repo Usage diff --git a/ansible/group_vars/boot.yml b/ansible/group_vars/boot.yml index 9c6bf26..77c0422 100644 --- a/ansible/group_vars/boot.yml +++ b/ansible/group_vars/boot.yml @@ -6,7 +6,7 @@ bootstrap__active_extra_users: # Hourly rotation to avoid disk space issue bootstrap__logrotate_frequency: 'hourly' -# Tag dependent on fleet: test +# Tag dependent on fleet nim_waku_cont_tag: 'deploy-{{ env }}-{{ stage }}' nim_waku_cont_name: 'nim-waku-boot' nim_waku_cont_vol: '/docker/{{ nim_waku_cont_name }}' @@ -48,7 +48,7 @@ nim_waku_store_message_retention_policy: 'time:2592000' # 30 days # DNS Discovery nim_waku_dns_disc_enabled: true nim_waku_dns_disc_url_map: - test: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards.nodes.status.im' + prod: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.status.nodes.status.im' staging: 'enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.status.nodes.status.im' nim_waku_dns_disc_url: '{{ nim_waku_dns_disc_url_map[stage] }}' @@ -73,15 +73,14 @@ certbot_containers_to_stop: ['{{ nim_waku_cont_name }}'] certbot_certs: '{{ certbot_certs_map[stage] }}' # FIXME: Remove once ENR records are updated without the domain. certbot_certs_map: - test: + prod: - domains: - '{{ nim_waku_websocket_domain }}' - - '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name - - '{{ nim_waku_websocket_domain | replace("status.im", "statusim.net") }}' # Legacy Domain + - '{{ nim_waku_websocket_domain | replace("status."+stage, "shards.test") }}' # Legacy Fleet Name staging: - domains: - '{{ nim_waku_websocket_domain }}' - - '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name + - '{{ nim_waku_websocket_domain | replace("status."+stage, "shards.staging") }}' # Legacy Fleet Name # Open LibP2P Ports open_ports_default_comment: '{{ nim_waku_cont_name }}' diff --git a/ansible/group_vars/store-db.yml b/ansible/group_vars/store-db.yml index 247409d..63210e1 100644 --- a/ansible/group_vars/store-db.yml +++ b/ansible/group_vars/store-db.yml @@ -11,7 +11,7 @@ postgres_ha_replica_enabled: false postgres_ha_replica_allowed_addresses: [] postgres_ha_admin_user: 'postgres' -postgres_ha_admin_pass: '{{lookup("bitwarden", "fleets/shards/"+stage+"/db/admin")}}' +postgres_ha_admin_pass: '{{lookup("bitwarden", "fleets/status/"+stage+"/db/admin")}}' # Disable backups since we have multiple DCs postgres_ha_backup: false @@ -20,7 +20,7 @@ postgres_ha_backup_enabled: false postgres_ha_databases: - name: 'nim-waku' user: 'nim-waku' - pass: '{{lookup("bitwarden", "fleets/shards/"+stage+"/db/nim-waku")}}' + pass: '{{lookup("bitwarden", "fleets/status/"+stage+"/db/nim-waku")}}' # Avoid exceeding volume size with WAL log. postgres_ha_alter_system_settings: diff --git a/ansible/group_vars/store.yml b/ansible/group_vars/store.yml index d358c9c..c0ad415 100644 --- a/ansible/group_vars/store.yml +++ b/ansible/group_vars/store.yml @@ -6,7 +6,7 @@ bootstrap__active_extra_users: # Hourly rotation to avoid disk space issue bootstrap__logrotate_frequency: 'hourly' -# Tag dependent on fleet: test +# Tag dependent on fleet nim_waku_cont_tag: 'deploy-{{ env }}-{{ stage }}' nim_waku_cont_name: 'nim-waku-store' nim_waku_cont_vol: '/docker/{{ nim_waku_cont_name }}' @@ -48,14 +48,14 @@ nim_waku_store_message_db_user: 'nim-waku' nim_waku_store_message_db_pass: '{{lookup("bitwarden", "fleets/status/"+stage+"/db/nim-waku")}}' nim_waku_store_message_db_url: 'postgres://{{ nim_waku_store_message_db_user}}:{{ nim_waku_store_message_db_pass }}@store-db-01.{{ ansible_domain }}.wg:5432/{{nim_waku_store_message_db_name}}' nim_waku_store_message_retention_policy_map: - test: 'size:250GB' + prod: 'size:250GB' staging: 'size:75GB' nim_waku_store_message_retention_policy: '{{ nim_waku_store_message_retention_policy_map[stage] }}' # DNS Discovery nim_waku_dns_disc_enabled: true nim_waku_dns_disc_url_map: - test: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards.nodes.status.im' + prod: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.status.nodes.status.im' staging: 'enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.status.nodes.status.im' nim_waku_dns_disc_url: '{{ nim_waku_dns_disc_url_map[stage] }}' @@ -82,15 +82,14 @@ certbot_containers_to_stop: ['{{ nim_waku_cont_name }}'] certbot_certs: '{{ certbot_certs_map[stage] }}' # FIXME: Remove once ENR records are updated without the domain. certbot_certs_map: - test: + prod: - domains: - '{{ nim_waku_websocket_domain }}' - - '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name - - '{{ nim_waku_websocket_domain | replace("status.im", "statusim.net") }}' # Legacy Domain + - '{{ nim_waku_websocket_domain | replace("status."+stage, "shards.test") }}' # Legacy Fleet Name staging: - domains: - '{{ nim_waku_websocket_domain }}' - - '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name + - '{{ nim_waku_websocket_domain | replace("status."+stage, "shards.staging") }}' # Legacy Fleet Name # Open LibP2P Ports open_ports_default_comment: '{{ nim_waku_cont_name }}' diff --git a/ansible/inventory/prod b/ansible/inventory/prod new file mode 100644 index 0000000..78b2a11 --- /dev/null +++ b/ansible/inventory/prod @@ -0,0 +1,66 @@ +# NOTE: This file is generated by terraform.py +# For emergency use when Consul fails +[all] +boot-01.ac-cn-hongkong-c.status.prod hostname=boot-01.ac-cn-hongkong-c.status.prod ansible_host=8.218.23.76 env=status stage=prod data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=boot-01.ac-cn-hongkong-c.status.prod.status.im +boot-01.do-ams3.status.prod hostname=boot-01.do-ams3.status.prod ansible_host=167.99.19.47 env=status stage=prod data_center=do-ams3 region=ams3 dns_entry=boot-01.do-ams3.status.prod.status.im +boot-01.gc-us-central1-a.status.prod hostname=boot-01.gc-us-central1-a.status.prod ansible_host=34.135.13.87 env=status stage=prod data_center=gc-us-central1-a region=us-central1-a dns_entry=boot-01.gc-us-central1-a.status.prod.status.im +store-01.ac-cn-hongkong-c.status.prod hostname=store-01.ac-cn-hongkong-c.status.prod ansible_host=8.218.74.73 env=status stage=prod data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-01.ac-cn-hongkong-c.status.prod.status.im +store-01.do-ams3.status.prod hostname=store-01.do-ams3.status.prod ansible_host=159.223.242.94 env=status stage=prod data_center=do-ams3 region=ams3 dns_entry=store-01.do-ams3.status.prod.status.im +store-01.gc-us-central1-a.status.prod hostname=store-01.gc-us-central1-a.status.prod ansible_host=34.170.192.39 env=status stage=prod data_center=gc-us-central1-a region=us-central1-a dns_entry=store-01.gc-us-central1-a.status.prod.status.im +store-02.ac-cn-hongkong-c.status.prod hostname=store-02.ac-cn-hongkong-c.status.prod ansible_host=8.218.121.232 env=status stage=prod data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-02.ac-cn-hongkong-c.status.prod.status.im +store-02.do-ams3.status.prod hostname=store-02.do-ams3.status.prod ansible_host=159.223.242.154 env=status stage=prod data_center=do-ams3 region=ams3 dns_entry=store-02.do-ams3.status.prod.status.im +store-02.gc-us-central1-a.status.prod hostname=store-02.gc-us-central1-a.status.prod ansible_host=34.170.154.2 env=status stage=prod data_center=gc-us-central1-a region=us-central1-a dns_entry=store-02.gc-us-central1-a.status.prod.status.im +store-db-01.ac-cn-hongkong-c.status.prod hostname=store-db-01.ac-cn-hongkong-c.status.prod ansible_host=47.243.139.240 env=status stage=prod data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-db-01.ac-cn-hongkong-c.status.prod.status.im +store-db-01.do-ams3.status.prod hostname=store-db-01.do-ams3.status.prod ansible_host=161.35.247.243 env=status stage=prod data_center=do-ams3 region=ams3 dns_entry=store-db-01.do-ams3.status.prod.status.im +store-db-01.gc-us-central1-a.status.prod hostname=store-db-01.gc-us-central1-a.status.prod ansible_host=34.71.57.210 env=status stage=prod data_center=gc-us-central1-a region=us-central1-a dns_entry=store-db-01.gc-us-central1-a.status.prod.status.im + +[ac-cn-hongkong-c] +boot-01.ac-cn-hongkong-c.status.prod +store-01.ac-cn-hongkong-c.status.prod +store-02.ac-cn-hongkong-c.status.prod +store-db-01.ac-cn-hongkong-c.status.prod + +[boot] +boot-01.ac-cn-hongkong-c.status.prod +boot-01.do-ams3.status.prod +boot-01.gc-us-central1-a.status.prod + +[do-ams3] +boot-01.do-ams3.status.prod +store-01.do-ams3.status.prod +store-02.do-ams3.status.prod +store-db-01.do-ams3.status.prod + +[gc-us-central1-a] +boot-01.gc-us-central1-a.status.prod +store-01.gc-us-central1-a.status.prod +store-02.gc-us-central1-a.status.prod +store-db-01.gc-us-central1-a.status.prod + +[status.prod] +boot-01.ac-cn-hongkong-c.status.prod +boot-01.do-ams3.status.prod +boot-01.gc-us-central1-a.status.prod +store-01.ac-cn-hongkong-c.status.prod +store-01.do-ams3.status.prod +store-01.gc-us-central1-a.status.prod +store-02.ac-cn-hongkong-c.status.prod +store-02.do-ams3.status.prod +store-02.gc-us-central1-a.status.prod +store-db-01.ac-cn-hongkong-c.status.prod +store-db-01.do-ams3.status.prod +store-db-01.gc-us-central1-a.status.prod + +[store] +store-01.ac-cn-hongkong-c.status.prod +store-01.do-ams3.status.prod +store-01.gc-us-central1-a.status.prod +store-02.ac-cn-hongkong-c.status.prod +store-02.do-ams3.status.prod +store-02.gc-us-central1-a.status.prod + +[store-db] +store-db-01.ac-cn-hongkong-c.status.prod +store-db-01.do-ams3.status.prod +store-db-01.gc-us-central1-a.status.prod + diff --git a/ansible/inventory/test b/ansible/inventory/test deleted file mode 100644 index 895b53c..0000000 --- a/ansible/inventory/test +++ /dev/null @@ -1,66 +0,0 @@ -# NOTE: This file is generated by terraform.py -# For emergency use when Consul fails -[all] -boot-01.ac-cn-hongkong-c.shards.test hostname=boot-01.ac-cn-hongkong-c.shards.test ansible_host=8.218.23.76 env=shards stage=test data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=boot-01.ac-cn-hongkong-c.shards.test.status.im -boot-01.do-ams3.shards.test hostname=boot-01.do-ams3.shards.test ansible_host=167.99.19.47 env=shards stage=test data_center=do-ams3 region=ams3 dns_entry=boot-01.do-ams3.shards.test.status.im -boot-01.gc-us-central1-a.shards.test hostname=boot-01.gc-us-central1-a.shards.test ansible_host=34.135.13.87 env=shards stage=test data_center=gc-us-central1-a region=us-central1-a dns_entry=boot-01.gc-us-central1-a.shards.test.status.im -store-01.ac-cn-hongkong-c.shards.test hostname=store-01.ac-cn-hongkong-c.shards.test ansible_host=8.218.74.73 env=shards stage=test data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-01.ac-cn-hongkong-c.shards.test.status.im -store-01.do-ams3.shards.test hostname=store-01.do-ams3.shards.test ansible_host=159.223.242.94 env=shards stage=test data_center=do-ams3 region=ams3 dns_entry=store-01.do-ams3.shards.test.status.im -store-01.gc-us-central1-a.shards.test hostname=store-01.gc-us-central1-a.shards.test ansible_host=34.170.192.39 env=shards stage=test data_center=gc-us-central1-a region=us-central1-a dns_entry=store-01.gc-us-central1-a.shards.test.status.im -store-02.ac-cn-hongkong-c.shards.test hostname=store-02.ac-cn-hongkong-c.shards.test ansible_host=8.218.121.232 env=shards stage=test data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-02.ac-cn-hongkong-c.shards.test.status.im -store-02.do-ams3.shards.test hostname=store-02.do-ams3.shards.test ansible_host=159.223.242.154 env=shards stage=test data_center=do-ams3 region=ams3 dns_entry=store-02.do-ams3.shards.test.status.im -store-02.gc-us-central1-a.shards.test hostname=store-02.gc-us-central1-a.shards.test ansible_host=34.170.154.2 env=shards stage=test data_center=gc-us-central1-a region=us-central1-a dns_entry=store-02.gc-us-central1-a.shards.test.status.im -store-db-01.ac-cn-hongkong-c.shards.test hostname=store-db-01.ac-cn-hongkong-c.shards.test ansible_host=47.243.139.240 env=shards stage=test data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-db-01.ac-cn-hongkong-c.shards.test.status.im -store-db-01.do-ams3.shards.test hostname=store-db-01.do-ams3.shards.test ansible_host=161.35.247.243 env=shards stage=test data_center=do-ams3 region=ams3 dns_entry=store-db-01.do-ams3.shards.test.status.im -store-db-01.gc-us-central1-a.shards.test hostname=store-db-01.gc-us-central1-a.shards.test ansible_host=34.71.57.210 env=shards stage=test data_center=gc-us-central1-a region=us-central1-a dns_entry=store-db-01.gc-us-central1-a.shards.test.status.im - -[ac-cn-hongkong-c] -boot-01.ac-cn-hongkong-c.shards.test -store-01.ac-cn-hongkong-c.shards.test -store-02.ac-cn-hongkong-c.shards.test -store-db-01.ac-cn-hongkong-c.shards.test - -[boot] -boot-01.ac-cn-hongkong-c.shards.test -boot-01.do-ams3.shards.test -boot-01.gc-us-central1-a.shards.test - -[do-ams3] -boot-01.do-ams3.shards.test -store-01.do-ams3.shards.test -store-02.do-ams3.shards.test -store-db-01.do-ams3.shards.test - -[gc-us-central1-a] -boot-01.gc-us-central1-a.shards.test -store-01.gc-us-central1-a.shards.test -store-02.gc-us-central1-a.shards.test -store-db-01.gc-us-central1-a.shards.test - -[shards.test] -boot-01.ac-cn-hongkong-c.shards.test -boot-01.do-ams3.shards.test -boot-01.gc-us-central1-a.shards.test -store-01.ac-cn-hongkong-c.shards.test -store-01.do-ams3.shards.test -store-01.gc-us-central1-a.shards.test -store-02.ac-cn-hongkong-c.shards.test -store-02.do-ams3.shards.test -store-02.gc-us-central1-a.shards.test -store-db-01.ac-cn-hongkong-c.shards.test -store-db-01.do-ams3.shards.test -store-db-01.gc-us-central1-a.shards.test - -[store] -store-01.ac-cn-hongkong-c.shards.test -store-01.do-ams3.shards.test -store-01.gc-us-central1-a.shards.test -store-02.ac-cn-hongkong-c.shards.test -store-02.do-ams3.shards.test -store-02.gc-us-central1-a.shards.test - -[store-db] -store-db-01.ac-cn-hongkong-c.shards.test -store-db-01.do-ams3.shards.test -store-db-01.gc-us-central1-a.shards.test - diff --git a/ansible/requirements.yml b/ansible/requirements.yml index d1a584e..75045ae 100644 --- a/ansible/requirements.yml +++ b/ansible/requirements.yml @@ -18,7 +18,7 @@ - name: infra-role-nim-waku src: git@github.com:status-im/infra-role-nim-waku.git - version: 0948fc8aa6c5738ca963d7faac2b203645d22afd + version: fe929f98309104ea610a80a2d2fac5a8fb330a21 - name: infra-role-certbot src: git@github.com:status-im/infra-role-certbot.git diff --git a/workspaces.tf b/workspaces.tf index 9eccae6..a1ab8a9 100644 --- a/workspaces.tf +++ b/workspaces.tf @@ -26,7 +26,7 @@ locals { } /* Settings specific to the test fleet/workspace. */ - test = { + prod = { db_do_type = "s-2vcpu-4gb" db_ac_type = "ecs.c6.large" db_gc_type = "c2d-highcpu-2"