rename shards fleet to status fleet
While also retaining the old domain names. Signed-off-by: Jakub Sokołowski <jakub@status.im>
This commit is contained in:
parent
b1da421448
commit
040b9d4949
|
@ -16,8 +16,8 @@ enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards
|
||||||
enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@store.test.shards.nodes.status.im
|
enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@store.test.shards.nodes.status.im
|
||||||
```
|
```
|
||||||
```
|
```
|
||||||
enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.shards.nodes.status.im
|
enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.status.nodes.status.im
|
||||||
enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@store.staging.shards.nodes.status.im
|
enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@store.staging.status.nodes.status.im
|
||||||
```
|
```
|
||||||
|
|
||||||
# Continious Integration
|
# Continious Integration
|
||||||
|
|
|
@ -15,7 +15,7 @@ nim_waku_log_level: 'debug'
|
||||||
nim_waku_protocols_enabled: ['relay', 'filter', 'lightpush', 'peer-exchange']
|
nim_waku_protocols_enabled: ['relay', 'filter', 'lightpush', 'peer-exchange']
|
||||||
nim_waku_disc_v5_enabled: true
|
nim_waku_disc_v5_enabled: true
|
||||||
nim_waku_dns4_domain_name: '{{ dns_entry }}'
|
nim_waku_dns4_domain_name: '{{ dns_entry }}'
|
||||||
nim_waku_node_key: '{{lookup("bitwarden", "fleets/shards/"+stage+"/nodekeys", field=hostname)}}'
|
nim_waku_node_key: '{{lookup("bitwarden", "fleets/status/"+stage+"/nodekeys", field=hostname)}}'
|
||||||
|
|
||||||
# Topic configuration
|
# Topic configuration
|
||||||
nim_waku_cluster_id: 16
|
nim_waku_cluster_id: 16
|
||||||
|
@ -49,7 +49,7 @@ nim_waku_store_message_retention_policy: 'time:2592000' # 30 days
|
||||||
nim_waku_dns_disc_enabled: true
|
nim_waku_dns_disc_enabled: true
|
||||||
nim_waku_dns_disc_url_map:
|
nim_waku_dns_disc_url_map:
|
||||||
test: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards.nodes.status.im'
|
test: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards.nodes.status.im'
|
||||||
staging: 'enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.shards.nodes.status.im'
|
staging: 'enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.status.nodes.status.im'
|
||||||
nim_waku_dns_disc_url: '{{ nim_waku_dns_disc_url_map[stage] }}'
|
nim_waku_dns_disc_url: '{{ nim_waku_dns_disc_url_map[stage] }}'
|
||||||
|
|
||||||
# Websockets
|
# Websockets
|
||||||
|
@ -76,10 +76,12 @@ certbot_certs_map:
|
||||||
test:
|
test:
|
||||||
- domains:
|
- domains:
|
||||||
- '{{ nim_waku_websocket_domain }}'
|
- '{{ nim_waku_websocket_domain }}'
|
||||||
|
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name
|
||||||
- '{{ nim_waku_websocket_domain | replace("status.im", "statusim.net") }}' # Legacy Domain
|
- '{{ nim_waku_websocket_domain | replace("status.im", "statusim.net") }}' # Legacy Domain
|
||||||
staging:
|
staging:
|
||||||
- domains:
|
- domains:
|
||||||
- '{{ nim_waku_websocket_domain }}'
|
- '{{ nim_waku_websocket_domain }}'
|
||||||
|
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name
|
||||||
|
|
||||||
# Open LibP2P Ports
|
# Open LibP2P Ports
|
||||||
open_ports_default_comment: '{{ nim_waku_cont_name }}'
|
open_ports_default_comment: '{{ nim_waku_cont_name }}'
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
bootstrap__active_extra_users:
|
bootstrap__active_extra_users:
|
||||||
- { name: ivan, uid: 8000, groups: ['docker', 'dockremap'], key: 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJBdm8y1PfWjT1pioaWJSZ2ETrUySb+dS/ifDg+VIpLY ivansete@status.im' }
|
- { name: ivan, uid: 8000, groups: ['docker', 'dockremap'], key: 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJBdm8y1PfWjT1pioaWJSZ2ETrUySb+dS/ifDg+VIpLY ivansete@status.im' }
|
||||||
|
|
||||||
# Hourly rotation to avoid disk space issue
|
# Hourly rotation to avoid disk space issue
|
||||||
bootstrap__logrotate_frequency: 'hourly'
|
bootstrap__logrotate_frequency: 'hourly'
|
||||||
|
|
||||||
# Tag dependent on fleet: test
|
# Tag dependent on fleet: test
|
||||||
|
@ -15,7 +15,7 @@ nim_waku_log_level: 'debug'
|
||||||
nim_waku_protocols_enabled: ['relay', 'store']
|
nim_waku_protocols_enabled: ['relay', 'store']
|
||||||
nim_waku_disc_v5_enabled: true
|
nim_waku_disc_v5_enabled: true
|
||||||
nim_waku_dns4_domain_name: '{{ dns_entry }}'
|
nim_waku_dns4_domain_name: '{{ dns_entry }}'
|
||||||
nim_waku_node_key: '{{lookup("bitwarden", "fleets/shards/"+stage+"/nodekeys", field=hostname)}}'
|
nim_waku_node_key: '{{lookup("bitwarden", "fleets/status/"+stage+"/nodekeys", field=hostname)}}'
|
||||||
|
|
||||||
# Topic configuration
|
# Topic configuration
|
||||||
nim_waku_cluster_id: 16
|
nim_waku_cluster_id: 16
|
||||||
|
@ -45,15 +45,15 @@ nim_waku_ip_colocation_limit: 100
|
||||||
# Store
|
# Store
|
||||||
nim_waku_store_message_db_name: 'nim-waku'
|
nim_waku_store_message_db_name: 'nim-waku'
|
||||||
nim_waku_store_message_db_user: 'nim-waku'
|
nim_waku_store_message_db_user: 'nim-waku'
|
||||||
nim_waku_store_message_db_pass: '{{lookup("bitwarden", "fleets/shards/"+stage+"/db/nim-waku")}}'
|
nim_waku_store_message_db_pass: '{{lookup("bitwarden", "fleets/status/"+stage+"/db/nim-waku")}}'
|
||||||
nim_waku_store_message_db_url: 'postgres://{{ nim_waku_store_message_db_user}}:{{ nim_waku_store_message_db_pass}}@store-db-01.{{ ansible_domain }}.wg:5432/{{nim_waku_store_message_db_name}}'
|
nim_waku_store_message_db_url: 'postgres://{{ nim_waku_store_message_db_user}}:{{ nim_waku_store_message_db_pass }}@store-db-01.{{ ansible_domain }}.wg:5432/{{nim_waku_store_message_db_name}}'
|
||||||
nim_waku_store_message_retention_policy: 'time:2592000' # 30 days
|
nim_waku_store_message_retention_policy: 'time:2592000' # 30 days
|
||||||
|
|
||||||
# DNS Discovery
|
# DNS Discovery
|
||||||
nim_waku_dns_disc_enabled: true
|
nim_waku_dns_disc_enabled: true
|
||||||
nim_waku_dns_disc_url_map:
|
nim_waku_dns_disc_url_map:
|
||||||
test: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards.nodes.status.im'
|
test: 'enrtree://AMOJVZX4V6EXP7NTJPMAYJYST2QP6AJXYW76IU6VGJS7UVSNDYZG4@boot.test.shards.nodes.status.im'
|
||||||
staging: 'enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.shards.nodes.status.im'
|
staging: 'enrtree://AI4W5N5IFEUIHF5LESUAOSMV6TKWF2MB6GU2YK7PU4TYUGUNOCEPW@boot.staging.status.nodes.status.im'
|
||||||
nim_waku_dns_disc_url: '{{ nim_waku_dns_disc_url_map[stage] }}'
|
nim_waku_dns_disc_url: '{{ nim_waku_dns_disc_url_map[stage] }}'
|
||||||
|
|
||||||
# Websockets
|
# Websockets
|
||||||
|
@ -82,10 +82,12 @@ certbot_certs_map:
|
||||||
test:
|
test:
|
||||||
- domains:
|
- domains:
|
||||||
- '{{ nim_waku_websocket_domain }}'
|
- '{{ nim_waku_websocket_domain }}'
|
||||||
|
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name
|
||||||
- '{{ nim_waku_websocket_domain | replace("status.im", "statusim.net") }}' # Legacy Domain
|
- '{{ nim_waku_websocket_domain | replace("status.im", "statusim.net") }}' # Legacy Domain
|
||||||
staging:
|
staging:
|
||||||
- domains:
|
- domains:
|
||||||
- '{{ nim_waku_websocket_domain }}'
|
- '{{ nim_waku_websocket_domain }}'
|
||||||
|
- '{{ nim_waku_websocket_domain | replace("status."+stage, "shards."+stage) }}' # Legacy Fleet Name
|
||||||
|
|
||||||
# Open LibP2P Ports
|
# Open LibP2P Ports
|
||||||
open_ports_default_comment: '{{ nim_waku_cont_name }}'
|
open_ports_default_comment: '{{ nim_waku_cont_name }}'
|
||||||
|
|
|
@ -1,66 +1,66 @@
|
||||||
# NOTE: This file is generated by terraform.py
|
# NOTE: This file is generated by terraform.py
|
||||||
# For emergency use when Consul fails
|
# For emergency use when Consul fails
|
||||||
[all]
|
[all]
|
||||||
boot-01.ac-cn-hongkong-c.shards.staging hostname=boot-01.ac-cn-hongkong-c.shards.staging ansible_host=47.76.168.186 env=shards stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=boot-01.ac-cn-hongkong-c.shards.staging.status.im
|
boot-01.ac-cn-hongkong-c.status.staging hostname=boot-01.ac-cn-hongkong-c.status.staging ansible_host=47.76.168.186 env=status stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=boot-01.ac-cn-hongkong-c.status.staging.status.im
|
||||||
boot-01.do-ams3.shards.staging hostname=boot-01.do-ams3.shards.staging ansible_host=143.198.250.233 env=shards stage=staging data_center=do-ams3 region=ams3 dns_entry=boot-01.do-ams3.shards.staging.status.im
|
boot-01.do-ams3.status.staging hostname=boot-01.do-ams3.status.staging ansible_host=143.198.250.233 env=status stage=staging data_center=do-ams3 region=ams3 dns_entry=boot-01.do-ams3.status.staging.status.im
|
||||||
boot-01.gc-us-central1-a.shards.staging hostname=boot-01.gc-us-central1-a.shards.staging ansible_host=104.197.5.96 env=shards stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=boot-01.gc-us-central1-a.shards.staging.status.im
|
boot-01.gc-us-central1-a.status.staging hostname=boot-01.gc-us-central1-a.status.staging ansible_host=104.197.5.96 env=status stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=boot-01.gc-us-central1-a.status.staging.status.im
|
||||||
store-01.ac-cn-hongkong-c.shards.staging hostname=store-01.ac-cn-hongkong-c.shards.staging ansible_host=8.218.206.134 env=shards stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-01.ac-cn-hongkong-c.shards.staging.status.im
|
store-01.ac-cn-hongkong-c.status.staging hostname=store-01.ac-cn-hongkong-c.status.staging ansible_host=8.218.206.134 env=status stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-01.ac-cn-hongkong-c.status.staging.status.im
|
||||||
store-01.do-ams3.shards.staging hostname=store-01.do-ams3.shards.staging ansible_host=24.144.78.119 env=shards stage=staging data_center=do-ams3 region=ams3 dns_entry=store-01.do-ams3.shards.staging.status.im
|
store-01.do-ams3.status.staging hostname=store-01.do-ams3.status.staging ansible_host=24.144.78.119 env=status stage=staging data_center=do-ams3 region=ams3 dns_entry=store-01.do-ams3.status.staging.status.im
|
||||||
store-01.gc-us-central1-a.shards.staging hostname=store-01.gc-us-central1-a.shards.staging ansible_host=35.224.231.209 env=shards stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=store-01.gc-us-central1-a.shards.staging.status.im
|
store-01.gc-us-central1-a.status.staging hostname=store-01.gc-us-central1-a.status.staging ansible_host=35.224.231.209 env=status stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=store-01.gc-us-central1-a.status.staging.status.im
|
||||||
store-02.ac-cn-hongkong-c.shards.staging hostname=store-02.ac-cn-hongkong-c.shards.staging ansible_host=47.76.178.164 env=shards stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-02.ac-cn-hongkong-c.shards.staging.status.im
|
store-02.ac-cn-hongkong-c.status.staging hostname=store-02.ac-cn-hongkong-c.status.staging ansible_host=47.76.178.164 env=status stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-02.ac-cn-hongkong-c.status.staging.status.im
|
||||||
store-02.do-ams3.shards.staging hostname=store-02.do-ams3.shards.staging ansible_host=24.144.78.120 env=shards stage=staging data_center=do-ams3 region=ams3 dns_entry=store-02.do-ams3.shards.staging.status.im
|
store-02.do-ams3.status.staging hostname=store-02.do-ams3.status.staging ansible_host=24.144.78.120 env=status stage=staging data_center=do-ams3 region=ams3 dns_entry=store-02.do-ams3.status.staging.status.im
|
||||||
store-02.gc-us-central1-a.shards.staging hostname=store-02.gc-us-central1-a.shards.staging ansible_host=34.72.140.183 env=shards stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=store-02.gc-us-central1-a.shards.staging.status.im
|
store-02.gc-us-central1-a.status.staging hostname=store-02.gc-us-central1-a.status.staging ansible_host=34.72.140.183 env=status stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=store-02.gc-us-central1-a.status.staging.status.im
|
||||||
store-db-01.ac-cn-hongkong-c.shards.staging hostname=store-db-01.ac-cn-hongkong-c.shards.staging ansible_host=47.76.183.131 env=shards stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-db-01.ac-cn-hongkong-c.shards.staging.status.im
|
store-db-01.ac-cn-hongkong-c.status.staging hostname=store-db-01.ac-cn-hongkong-c.status.staging ansible_host=47.76.183.131 env=status stage=staging data_center=ac-cn-hongkong-c region=cn-hongkong-c dns_entry=store-db-01.ac-cn-hongkong-c.status.staging.status.im
|
||||||
store-db-01.do-ams3.shards.staging hostname=store-db-01.do-ams3.shards.staging ansible_host=24.144.78.121 env=shards stage=staging data_center=do-ams3 region=ams3 dns_entry=store-db-01.do-ams3.shards.staging.status.im
|
store-db-01.do-ams3.status.staging hostname=store-db-01.do-ams3.status.staging ansible_host=24.144.78.121 env=status stage=staging data_center=do-ams3 region=ams3 dns_entry=store-db-01.do-ams3.status.staging.status.im
|
||||||
store-db-01.gc-us-central1-a.shards.staging hostname=store-db-01.gc-us-central1-a.shards.staging ansible_host=34.173.29.3 env=shards stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=store-db-01.gc-us-central1-a.shards.staging.status.im
|
store-db-01.gc-us-central1-a.status.staging hostname=store-db-01.gc-us-central1-a.status.staging ansible_host=34.173.29.3 env=status stage=staging data_center=gc-us-central1-a region=us-central1-a dns_entry=store-db-01.gc-us-central1-a.status.staging.status.im
|
||||||
|
|
||||||
[ac-cn-hongkong-c]
|
[ac-cn-hongkong-c]
|
||||||
boot-01.ac-cn-hongkong-c.shards.staging
|
boot-01.ac-cn-hongkong-c.status.staging
|
||||||
store-01.ac-cn-hongkong-c.shards.staging
|
store-01.ac-cn-hongkong-c.status.staging
|
||||||
store-02.ac-cn-hongkong-c.shards.staging
|
store-02.ac-cn-hongkong-c.status.staging
|
||||||
store-db-01.ac-cn-hongkong-c.shards.staging
|
store-db-01.ac-cn-hongkong-c.status.staging
|
||||||
|
|
||||||
[boot]
|
[boot]
|
||||||
boot-01.ac-cn-hongkong-c.shards.staging
|
boot-01.ac-cn-hongkong-c.status.staging
|
||||||
boot-01.do-ams3.shards.staging
|
boot-01.do-ams3.status.staging
|
||||||
boot-01.gc-us-central1-a.shards.staging
|
boot-01.gc-us-central1-a.status.staging
|
||||||
|
|
||||||
[do-ams3]
|
[do-ams3]
|
||||||
boot-01.do-ams3.shards.staging
|
boot-01.do-ams3.status.staging
|
||||||
store-01.do-ams3.shards.staging
|
store-01.do-ams3.status.staging
|
||||||
store-02.do-ams3.shards.staging
|
store-02.do-ams3.status.staging
|
||||||
store-db-01.do-ams3.shards.staging
|
store-db-01.do-ams3.status.staging
|
||||||
|
|
||||||
[gc-us-central1-a]
|
[gc-us-central1-a]
|
||||||
boot-01.gc-us-central1-a.shards.staging
|
boot-01.gc-us-central1-a.status.staging
|
||||||
store-01.gc-us-central1-a.shards.staging
|
store-01.gc-us-central1-a.status.staging
|
||||||
store-02.gc-us-central1-a.shards.staging
|
store-02.gc-us-central1-a.status.staging
|
||||||
store-db-01.gc-us-central1-a.shards.staging
|
store-db-01.gc-us-central1-a.status.staging
|
||||||
|
|
||||||
[shards.staging]
|
[status.staging]
|
||||||
boot-01.ac-cn-hongkong-c.shards.staging
|
boot-01.ac-cn-hongkong-c.status.staging
|
||||||
boot-01.do-ams3.shards.staging
|
boot-01.do-ams3.status.staging
|
||||||
boot-01.gc-us-central1-a.shards.staging
|
boot-01.gc-us-central1-a.status.staging
|
||||||
store-01.ac-cn-hongkong-c.shards.staging
|
store-01.ac-cn-hongkong-c.status.staging
|
||||||
store-01.do-ams3.shards.staging
|
store-01.do-ams3.status.staging
|
||||||
store-01.gc-us-central1-a.shards.staging
|
store-01.gc-us-central1-a.status.staging
|
||||||
store-02.ac-cn-hongkong-c.shards.staging
|
store-02.ac-cn-hongkong-c.status.staging
|
||||||
store-02.do-ams3.shards.staging
|
store-02.do-ams3.status.staging
|
||||||
store-02.gc-us-central1-a.shards.staging
|
store-02.gc-us-central1-a.status.staging
|
||||||
store-db-01.ac-cn-hongkong-c.shards.staging
|
store-db-01.ac-cn-hongkong-c.status.staging
|
||||||
store-db-01.do-ams3.shards.staging
|
store-db-01.do-ams3.status.staging
|
||||||
store-db-01.gc-us-central1-a.shards.staging
|
store-db-01.gc-us-central1-a.status.staging
|
||||||
|
|
||||||
[store]
|
[store]
|
||||||
store-01.ac-cn-hongkong-c.shards.staging
|
store-01.ac-cn-hongkong-c.status.staging
|
||||||
store-01.do-ams3.shards.staging
|
store-01.do-ams3.status.staging
|
||||||
store-01.gc-us-central1-a.shards.staging
|
store-01.gc-us-central1-a.status.staging
|
||||||
store-02.ac-cn-hongkong-c.shards.staging
|
store-02.ac-cn-hongkong-c.status.staging
|
||||||
store-02.do-ams3.shards.staging
|
store-02.do-ams3.status.staging
|
||||||
store-02.gc-us-central1-a.shards.staging
|
store-02.gc-us-central1-a.status.staging
|
||||||
|
|
||||||
[store-db]
|
[store-db]
|
||||||
store-db-01.ac-cn-hongkong-c.shards.staging
|
store-db-01.ac-cn-hongkong-c.status.staging
|
||||||
store-db-01.do-ams3.shards.staging
|
store-db-01.do-ams3.status.staging
|
||||||
store-db-01.gc-us-central1-a.shards.staging
|
store-db-01.gc-us-central1-a.status.staging
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ module "boot" {
|
||||||
/* node type */
|
/* node type */
|
||||||
name = "boot"
|
name = "boot"
|
||||||
group = "boot"
|
group = "boot"
|
||||||
env = "shards"
|
env = "status"
|
||||||
stage = terraform.workspace
|
stage = terraform.workspace
|
||||||
|
|
||||||
/* scaling */
|
/* scaling */
|
||||||
|
|
|
@ -4,7 +4,7 @@ module "store" {
|
||||||
/* node type */
|
/* node type */
|
||||||
name = "store"
|
name = "store"
|
||||||
group = "store"
|
group = "store"
|
||||||
env = "shards"
|
env = "status"
|
||||||
stage = terraform.workspace
|
stage = terraform.workspace
|
||||||
|
|
||||||
/* scaling */
|
/* scaling */
|
||||||
|
|
|
@ -4,7 +4,7 @@ module "store-db" {
|
||||||
/* node type */
|
/* node type */
|
||||||
name = "store-db"
|
name = "store-db"
|
||||||
group = "store-db"
|
group = "store-db"
|
||||||
env = "shards"
|
env = "status"
|
||||||
stage = terraform.workspace
|
stage = terraform.workspace
|
||||||
|
|
||||||
/* scaling */
|
/* scaling */
|
||||||
|
|
Loading…
Reference in New Issue