rename shards fleet to status fleet

While also retaining the old domain names.

Signed-off-by: Jakub Sokołowski <jakub@status.im>
This commit is contained in:
Jakub Sokołowski 2024-07-03 21:47:54 +02:00
parent b1da421448
commit 040b9d4949
No known key found for this signature in database
GPG Key ID: FE65CD384D5BF7B4
7 changed files with 65 additions and 61 deletions

View File

@ -16,8 +16,8 @@ enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards
enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@store.test.shards.nodes.status.im
```
```
enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.shards.nodes.status.im
enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@store.staging.shards.nodes.status.im
enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.status.nodes.status.im
enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@store.staging.status.nodes.status.im
```
# Continious Integration

View File

@ -15,7 +15,7 @@ nim_waku_log_level: 'debug'
nim_waku_protocols_enabled: ['relay', 'filter', 'lightpush', 'peer-exchange']
nim_waku_disc_v5_enabled: true
nim_waku_dns4_domain_name: '{{ dns_entry }}'
nim_waku_node_key: '{{lookup("bitwarden", "fleets/shards/"+stage+"/nodekeys", field=hostname)}}'
nim_waku_node_key: '{{lookup("bitwarden", "fleets/status/"+stage+"/nodekeys", field=hostname)}}'
# Topic configuration
nim_waku_cluster_id: 16
@ -49,7 +49,7 @@ nim_waku_store_message_retention_policy: 'time:2592000' # 30 days
nim_waku_dns_disc_enabled: true
nim_waku_dns_disc_url_map:
test: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards.nodes.status.im'
staging: 'enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.shards.nodes.status.im'
staging: 'enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.status.nodes.status.im'
nim_waku_dns_disc_url: '{{ nim_waku_dns_disc_url_map[stage] }}'
# Websockets
@ -76,10 +76,12 @@ certbot_certs_map:
test:
- domains:
- '{{ nim_waku_websocket_domain }}'
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name
- '{{ nim_waku_websocket_domain | replace("status.im", "statusim.net") }}' # Legacy Domain
staging:
- domains:
- '{{ nim_waku_websocket_domain }}'
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name
# Open LibP2P Ports
open_ports_default_comment: '{{ nim_waku_cont_name }}'

View File

@ -3,7 +3,7 @@
bootstrap__active_extra_users:
- { name: ivan, uid: 8000, groups: ['docker', 'dockremap'], key: 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJBdm8y1PfWjT1pioaWJSZ2ETrUySb+dS/ifDg+VIpLY ivansete@status.im' }
# Hourly rotation to avoid disk space issue
# Hourly rotation to avoid disk space issue
bootstrap__logrotate_frequency: 'hourly'
# Tag dependent on fleet: test
@ -15,7 +15,7 @@ nim_waku_log_level: 'debug'
nim_waku_protocols_enabled: ['relay', 'store']
nim_waku_disc_v5_enabled: true
nim_waku_dns4_domain_name: '{{ dns_entry }}'
nim_waku_node_key: '{{lookup("bitwarden", "fleets/shards/"+stage+"/nodekeys", field=hostname)}}'
nim_waku_node_key: '{{lookup("bitwarden", "fleets/status/"+stage+"/nodekeys", field=hostname)}}'
# Topic configuration
nim_waku_cluster_id: 16
@ -45,15 +45,15 @@ nim_waku_ip_colocation_limit: 100
# Store
nim_waku_store_message_db_name: 'nim-waku'
nim_waku_store_message_db_user: 'nim-waku'
nim_waku_store_message_db_pass: '{{lookup("bitwarden", "fleets/shards/"+stage+"/db/nim-waku")}}'
nim_waku_store_message_db_url: 'postgres://{{ nim_waku_store_message_db_user}}:{{ nim_waku_store_message_db_pass}}@store-db-01.{{ ansible_domain }}.wg:5432/{{nim_waku_store_message_db_name}}'
nim_waku_store_message_db_pass: '{{lookup("bitwarden", "fleets/status/"+stage+"/db/nim-waku")}}'
nim_waku_store_message_db_url: 'postgres://{{ nim_waku_store_message_db_user}}:{{ nim_waku_store_message_db_pass }}@store-db-01.{{ ansible_domain }}.wg:5432/{{nim_waku_store_message_db_name}}'
nim_waku_store_message_retention_policy: 'time:2592000' # 30 days
# DNS Discovery
nim_waku_dns_disc_enabled: true
nim_waku_dns_disc_url_map:
test: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards.nodes.status.im'
staging: 'enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.shards.nodes.status.im'
staging: 'enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.status.nodes.status.im'
nim_waku_dns_disc_url: '{{ nim_waku_dns_disc_url_map[stage] }}'
# Websockets
@ -82,10 +82,12 @@ certbot_certs_map:
test:
- domains:
- '{{ nim_waku_websocket_domain }}'
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name
- '{{ nim_waku_websocket_domain | replace("status.im", "statusim.net") }}' # Legacy Domain
staging:
- domains:
- '{{ nim_waku_websocket_domain }}'
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name
# Open LibP2P Ports
open_ports_default_comment: '{{ nim_waku_cont_name }}'

View File

@ -1,66 +1,66 @@
# NOTE: This file is generated by terraform.py
# For emergency use when Consul fails
[all]
boot-01.ac-cn-hongkong-c.shards.staging hostname=boot-01.ac-cn-hongkong-c.shards.staging ansible_host=47.76.168.186 env=shards stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=boot-01.ac-cn-hongkong-c.shards.staging.status.im
boot-01.do-ams3.shards.staging hostname=boot-01.do-ams3.shards.staging ansible_host=143.198.250.233 env=shards stage=staging data_center=do-ams3 region=ams3 dns_entry=boot-01.do-ams3.shards.staging.status.im
boot-01.gc-us-central1-a.shards.staging hostname=boot-01.gc-us-central1-a.shards.staging ansible_host=104.197.5.96 env=shards stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=boot-01.gc-us-central1-a.shards.staging.status.im
store-01.ac-cn-hongkong-c.shards.staging hostname=store-01.ac-cn-hongkong-c.shards.staging ansible_host=8.218.206.134 env=shards stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-01.ac-cn-hongkong-c.shards.staging.status.im
store-01.do-ams3.shards.staging hostname=store-01.do-ams3.shards.staging ansible_host=24.144.78.119 env=shards stage=staging data_center=do-ams3 region=ams3 dns_entry=store-01.do-ams3.shards.staging.status.im
store-01.gc-us-central1-a.shards.staging hostname=store-01.gc-us-central1-a.shards.staging ansible_host=35.224.231.209 env=shards stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=store-01.gc-us-central1-a.shards.staging.status.im
store-02.ac-cn-hongkong-c.shards.staging hostname=store-02.ac-cn-hongkong-c.shards.staging ansible_host=47.76.178.164 env=shards stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-02.ac-cn-hongkong-c.shards.staging.status.im
store-02.do-ams3.shards.staging hostname=store-02.do-ams3.shards.staging ansible_host=24.144.78.120 env=shards stage=staging data_center=do-ams3 region=ams3 dns_entry=store-02.do-ams3.shards.staging.status.im
store-02.gc-us-central1-a.shards.staging hostname=store-02.gc-us-central1-a.shards.staging ansible_host=34.72.140.183 env=shards stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=store-02.gc-us-central1-a.shards.staging.status.im
store-db-01.ac-cn-hongkong-c.shards.staging hostname=store-db-01.ac-cn-hongkong-c.shards.staging ansible_host=47.76.183.131 env=shards stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-db-01.ac-cn-hongkong-c.shards.staging.status.im
store-db-01.do-ams3.shards.staging hostname=store-db-01.do-ams3.shards.staging ansible_host=24.144.78.121 env=shards stage=staging data_center=do-ams3 region=ams3 dns_entry=store-db-01.do-ams3.shards.staging.status.im
store-db-01.gc-us-central1-a.shards.staging hostname=store-db-01.gc-us-central1-a.shards.staging ansible_host=34.173.29.3 env=shards stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=store-db-01.gc-us-central1-a.shards.staging.status.im
boot-01.ac-cn-hongkong-c.status.staging hostname=boot-01.ac-cn-hongkong-c.status.staging ansible_host=47.76.168.186 env=status stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=boot-01.ac-cn-hongkong-c.status.staging.status.im
boot-01.do-ams3.status.staging hostname=boot-01.do-ams3.status.staging ansible_host=143.198.250.233 env=status stage=staging data_center=do-ams3 region=ams3 dns_entry=boot-01.do-ams3.status.staging.status.im
boot-01.gc-us-central1-a.status.staging hostname=boot-01.gc-us-central1-a.status.staging ansible_host=104.197.5.96 env=status stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=boot-01.gc-us-central1-a.status.staging.status.im
store-01.ac-cn-hongkong-c.status.staging hostname=store-01.ac-cn-hongkong-c.status.staging ansible_host=8.218.206.134 env=status stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-01.ac-cn-hongkong-c.status.staging.status.im
store-01.do-ams3.status.staging hostname=store-01.do-ams3.status.staging ansible_host=24.144.78.119 env=status stage=staging data_center=do-ams3 region=ams3 dns_entry=store-01.do-ams3.status.staging.status.im
store-01.gc-us-central1-a.status.staging hostname=store-01.gc-us-central1-a.status.staging ansible_host=35.224.231.209 env=status stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=store-01.gc-us-central1-a.status.staging.status.im
store-02.ac-cn-hongkong-c.status.staging hostname=store-02.ac-cn-hongkong-c.status.staging ansible_host=47.76.178.164 env=status stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-02.ac-cn-hongkong-c.status.staging.status.im
store-02.do-ams3.status.staging hostname=store-02.do-ams3.status.staging ansible_host=24.144.78.120 env=status stage=staging data_center=do-ams3 region=ams3 dns_entry=store-02.do-ams3.status.staging.status.im
store-02.gc-us-central1-a.status.staging hostname=store-02.gc-us-central1-a.status.staging ansible_host=34.72.140.183 env=status stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=store-02.gc-us-central1-a.status.staging.status.im
store-db-01.ac-cn-hongkong-c.status.staging hostname=store-db-01.ac-cn-hongkong-c.status.staging ansible_host=47.76.183.131 env=status stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-db-01.ac-cn-hongkong-c.status.staging.status.im
store-db-01.do-ams3.status.staging hostname=store-db-01.do-ams3.status.staging ansible_host=24.144.78.121 env=status stage=staging data_center=do-ams3 region=ams3 dns_entry=store-db-01.do-ams3.status.staging.status.im
store-db-01.gc-us-central1-a.status.staging hostname=store-db-01.gc-us-central1-a.status.staging ansible_host=34.173.29.3 env=status stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=store-db-01.gc-us-central1-a.status.staging.status.im
[ac-cn-hongkong-c]
boot-01.ac-cn-hongkong-c.shards.staging
store-01.ac-cn-hongkong-c.shards.staging
store-02.ac-cn-hongkong-c.shards.staging
store-db-01.ac-cn-hongkong-c.shards.staging
boot-01.ac-cn-hongkong-c.status.staging
store-01.ac-cn-hongkong-c.status.staging
store-02.ac-cn-hongkong-c.status.staging
store-db-01.ac-cn-hongkong-c.status.staging
[boot]
boot-01.ac-cn-hongkong-c.shards.staging
boot-01.do-ams3.shards.staging
boot-01.gc-us-central1-a.shards.staging
boot-01.ac-cn-hongkong-c.status.staging
boot-01.do-ams3.status.staging
boot-01.gc-us-central1-a.status.staging
[do-ams3]
boot-01.do-ams3.shards.staging
store-01.do-ams3.shards.staging
store-02.do-ams3.shards.staging
store-db-01.do-ams3.shards.staging
boot-01.do-ams3.status.staging
store-01.do-ams3.status.staging
store-02.do-ams3.status.staging
store-db-01.do-ams3.status.staging
[gc-us-central1-a]
boot-01.gc-us-central1-a.shards.staging
store-01.gc-us-central1-a.shards.staging
store-02.gc-us-central1-a.shards.staging
store-db-01.gc-us-central1-a.shards.staging
boot-01.gc-us-central1-a.status.staging
store-01.gc-us-central1-a.status.staging
store-02.gc-us-central1-a.status.staging
store-db-01.gc-us-central1-a.status.staging
[shards.staging]
boot-01.ac-cn-hongkong-c.shards.staging
boot-01.do-ams3.shards.staging
boot-01.gc-us-central1-a.shards.staging
store-01.ac-cn-hongkong-c.shards.staging
store-01.do-ams3.shards.staging
store-01.gc-us-central1-a.shards.staging
store-02.ac-cn-hongkong-c.shards.staging
store-02.do-ams3.shards.staging
store-02.gc-us-central1-a.shards.staging
store-db-01.ac-cn-hongkong-c.shards.staging
store-db-01.do-ams3.shards.staging
store-db-01.gc-us-central1-a.shards.staging
[status.staging]
boot-01.ac-cn-hongkong-c.status.staging
boot-01.do-ams3.status.staging
boot-01.gc-us-central1-a.status.staging
store-01.ac-cn-hongkong-c.status.staging
store-01.do-ams3.status.staging
store-01.gc-us-central1-a.status.staging
store-02.ac-cn-hongkong-c.status.staging
store-02.do-ams3.status.staging
store-02.gc-us-central1-a.status.staging
store-db-01.ac-cn-hongkong-c.status.staging
store-db-01.do-ams3.status.staging
store-db-01.gc-us-central1-a.status.staging
[store]
store-01.ac-cn-hongkong-c.shards.staging
store-01.do-ams3.shards.staging
store-01.gc-us-central1-a.shards.staging
store-02.ac-cn-hongkong-c.shards.staging
store-02.do-ams3.shards.staging
store-02.gc-us-central1-a.shards.staging
store-01.ac-cn-hongkong-c.status.staging
store-01.do-ams3.status.staging
store-01.gc-us-central1-a.status.staging
store-02.ac-cn-hongkong-c.status.staging
store-02.do-ams3.status.staging
store-02.gc-us-central1-a.status.staging
[store-db]
store-db-01.ac-cn-hongkong-c.shards.staging
store-db-01.do-ams3.shards.staging
store-db-01.gc-us-central1-a.shards.staging
store-db-01.ac-cn-hongkong-c.status.staging
store-db-01.do-ams3.status.staging
store-db-01.gc-us-central1-a.status.staging

View File

@ -4,7 +4,7 @@ module "boot" {
/* node type */
name = "boot"
group = "boot"
env = "shards"
env = "status"
stage = terraform.workspace
/* scaling */

View File

@ -4,7 +4,7 @@ module "store" {
/* node type */
name = "store"
group = "store"
env = "shards"
env = "status"
stage = terraform.workspace
/* scaling */

View File

@ -4,7 +4,7 @@ module "store-db" {
/* node type */
name = "store-db"
group = "store-db"
env = "shards"
env = "status"
stage = terraform.workspace
/* scaling */