mirror of
https://github.com/logos-messaging/logos-messaging-interop-tests.git
synced 2026-01-05 23:43:07 +00:00
224 lines
45 KiB
Plaintext
224 lines
45 KiB
Plaintext
[35mDEBUG [0m tests.conftest:conftest.py:51 Running fixture setup: test_id
|
||
[35mDEBUG [0m tests.conftest:conftest.py:57 Running test: test_filter_3_senders_multiple_msg_1_receiver with id: 2025-12-14_04-15-13__bf0e8d90-b670-45a0-8136-bd589ac1e9f1
|
||
[35mDEBUG [0m src.steps.common:common.py:19 Running fixture setup: common_setup
|
||
[35mDEBUG [0m src.steps.filter:filter.py:28 Running fixture setup: filter_setup
|
||
[35mDEBUG [0m src.steps.light_push:light_push.py:28 Running fixture setup: light_push_setup
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/node1_2025-12-14_04-15-13__bf0e8d90-b670-45a0-8136-bd589ac1e9f1__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/node2_2025-12-14_04-15-13__bf0e8d90-b670-45a0-8136-bd589ac1e9f1__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/node3_2025-12-14_04-15-13__bf0e8d90-b670-45a0-8136-bd589ac1e9f1__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.steps.relay:relay.py:28 Running fixture setup: relay_setup
|
||
[35mDEBUG [0m src.steps.store:store.py:31 Running fixture setup: store_setup
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/node4_2025-12-14_04-15-13__bf0e8d90-b670-45a0-8136-bd589ac1e9f1__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/node5_2025-12-14_04-15-13__bf0e8d90-b670-45a0-8136-bd589ac1e9f1__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m tests.e2e.test_e2e:test_e2e.py:231 Start 5 nodes
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.125.11
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['21072', '21073', '21074', '21075', '21076']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 21072:21072 -p 21073:21073 -p 21074:21074 -p 21075:21075 -p 21076:21076 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=21074 --rest-port=21072 --tcp-port=21073 --discv5-udp-port=21075 --rest-address=0.0.0.0 --nat=extip:172.18.125.11 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=e4ea4fe5af1bec4ef0cbcaa5fe86d4ce00468c4bf14fbce878875e38abfcb3e8 --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=21076 --metrics-logging=true --relay=true --store=false
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.125.11 waku 4e8b2f69e4234b5054868d7ccebaa34d34aa42e0bd084951629e3a0aa17b633f
|
||
[31m[1mERROR [0m src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container f325cbd1910b. Exiting log stream.
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID 4e8b2f69e423. Setting up logs at ./log/docker/node1_2025-12-14_04-15-13__bf0e8d90-b670-45a0-8136-bd589ac1e9f1__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 21072
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[31m[1mERROR [0m src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container 378b5e496450. Exiting log stream.
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:21072/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"NOT_READY","desc":"No connected peers"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_MOUNTED"},{"Store":"NOT_MOUNTED"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"NOT_READY","desc":"No Store service peer available yet, neither Store service set up for the node"},{"Legacy Store Client":"NOT_READY","desc":"No Legacy Store service peers are available yet, neither Store service set up for the node"},{"Filter Client":"NOT_READY","desc":"No Filter service peer available yet"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:21072/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.125.11/tcp/21073/p2p/16Uiu2HAmRkJt3iZBaKBqPcjaVGyXM8Kmah9kbPTuvx8LCLNFB5Hb","/ip4/172.18.125.11/tcp/21074/ws/p2p/16Uiu2HAmRkJt3iZBaKBqPcjaVGyXM8Kmah9kbPTuvx8LCLNFB5Hb"],"enrUri":"enr:-L24QOXA9jHR3NVW0TaMb65Sf29wLx0xbMIXMPE3GX2V8qjNNOMIk4suvzTyKTDnxNU3995Jv6QBHne4NEN-0X9BqJYCgmlkgnY0gmlwhKwSfQuKbXVsdGlhZGRyc5YACASsEn0LBlJRAAoErBJ9CwZSUt0DgnJzhQADAQAAiXNlY3AyNTZrMaEDwoTJT7HLteCCjZ55CWRCuL4wW9uD2H2kEqd7lgFu0iKDdGNwglJRg3VkcIJSU4V3YWt1MgE"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.37.185
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['21255', '21256', '21257', '21258', '21259']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 21255:21255 -p 21256:21256 -p 21257:21257 -p 21258:21258 -p 21259:21259 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=21257 --rest-port=21255 --tcp-port=21256 --discv5-udp-port=21258 --rest-address=0.0.0.0 --nat=extip:172.18.37.185 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=2c1a78e7cdeea5dd493b0c29f0fb81beb82ebde8e8e4b0aebdf9d65717c81ed3 --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=21259 --metrics-logging=true --relay=true --store=false --discv5-bootstrap-node=enr:-L24QOXA9jHR3NVW0TaMb65Sf29wLx0xbMIXMPE3GX2V8qjNNOMIk4suvzTyKTDnxNU3995Jv6QBHne4NEN-0X9BqJYCgmlkgnY0gmlwhKwSfQuKbXVsdGlhZGRyc5YACASsEn0LBlJRAAoErBJ9CwZSUt0DgnJzhQADAQAAiXNlY3AyNTZrMaEDwoTJT7HLteCCjZ55CWRCuL4wW9uD2H2kEqd7lgFu0iKDdGNwglJRg3VkcIJSU4V3YWt1MgE
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.37.185 waku 13b3b0c3cbc65ee8db703a89d22afa58371dd6e425ac100b6a7553abfb52d425
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID 13b3b0c3cbc6. Setting up logs at ./log/docker/node2_2025-12-14_04-15-13__bf0e8d90-b670-45a0-8136-bd589ac1e9f1__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 21255
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:21255/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"NOT_READY","desc":"No connected peers"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_MOUNTED"},{"Store":"NOT_MOUNTED"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"NOT_READY","desc":"No Store service peer available yet, neither Store service set up for the node"},{"Legacy Store Client":"NOT_READY","desc":"No Legacy Store service peers are available yet, neither Store service set up for the node"},{"Filter Client":"NOT_READY","desc":"No Filter service peer available yet"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:21255/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.37.185/tcp/21256/p2p/16Uiu2HAmDWedctGP5pXhXBDaWBy844cqtEtRhjKgVQfGGw5zk6V8","/ip4/172.18.37.185/tcp/21257/ws/p2p/16Uiu2HAmDWedctGP5pXhXBDaWBy844cqtEtRhjKgVQfGGw5zk6V8"],"enrUri":"enr:-L24QHC7YR8wdlQE6W7OHpBYXzcYOJI6c9ZhOUgkDeTxMk1BQXoOE3pRjqM56p3xvP5BNP4M1sZdu3clozljBMwxtGsCgmlkgnY0gmlwhKwSJbmKbXVsdGlhZGRyc5YACASsEiW5BlMIAAoErBIluQZTCd0DgnJzhQADAQAAiXNlY3AyNTZrMaEDDLi-plwyGUyGgItR4L2j9NWpCXRjDkWcdAntj3uAkduDdGNwglMIg3VkcIJTCoV3YWt1MgE"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.151.118
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['24018', '24019', '24020', '24021', '24022']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 24018:24018 -p 24019:24019 -p 24020:24020 -p 24021:24021 -p 24022:24022 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=24020 --rest-port=24018 --tcp-port=24019 --discv5-udp-port=24021 --rest-address=0.0.0.0 --nat=extip:172.18.151.118 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=849088bb08b2943fff6a2a5d2c2d2f48cebe505c52dfadffacf8add66bfe17d7 --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=24022 --metrics-logging=true --relay=true --store=false --filter=true --discv5-bootstrap-node=enr:-L24QHC7YR8wdlQE6W7OHpBYXzcYOJI6c9ZhOUgkDeTxMk1BQXoOE3pRjqM56p3xvP5BNP4M1sZdu3clozljBMwxtGsCgmlkgnY0gmlwhKwSJbmKbXVsdGlhZGRyc5YACASsEiW5BlMIAAoErBIluQZTCd0DgnJzhQADAQAAiXNlY3AyNTZrMaEDDLi-plwyGUyGgItR4L2j9NWpCXRjDkWcdAntj3uAkduDdGNwglMIg3VkcIJTCoV3YWt1MgE
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.151.118 waku d8205dfc79546e10ae0d94e602cab38df3f44cdfae607f5f1bb21100539e8501
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID d8205dfc7954. Setting up logs at ./log/docker/node3_2025-12-14_04-15-13__bf0e8d90-b670-45a0-8136-bd589ac1e9f1__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 24018
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:24018/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"NOT_READY","desc":"No connected peers"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_READY","desc":"Relay is not ready, filter will not be able to sort out messages"},{"Store":"NOT_MOUNTED"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"NOT_READY","desc":"No Store service peer available yet, neither Store service set up for the node"},{"Legacy Store Client":"NOT_READY","desc":"No Legacy Store service peers are available yet, neither Store service set up for the node"},{"Filter Client":"NOT_READY","desc":"No Filter service peer available yet"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:24018/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.151.118/tcp/24019/p2p/16Uiu2HAmEjV7dikB4Kms3SnjDMBzsWXyxd1yPq8mtXQdHmoLmrqJ","/ip4/172.18.151.118/tcp/24020/ws/p2p/16Uiu2HAmEjV7dikB4Kms3SnjDMBzsWXyxd1yPq8mtXQdHmoLmrqJ"],"enrUri":"enr:-L24QHwGTPTQ4_piYE0nv5TEr3z8Vj5I0DIsj1Qgv31xuPJiD7hWxukCLrMH8zCzGUg9zz3F0ROpY3WOA7Jhc5m2g4ACgmlkgnY0gmlwhKwSl3aKbXVsdGlhZGRyc5YACASsEpd2Bl3TAAoErBKXdgZd1N0DgnJzhQADAQAAiXNlY3AyNTZrMaEDHt44pUms_7mB0DwAKhl_kK9qyCK2BAZaKYrIf8mALaWDdGNwgl3Tg3VkcIJd1YV3YWt1MgU"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.224.204
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['1649', '1650', '1651', '1652', '1653']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 1649:1649 -p 1650:1650 -p 1651:1651 -p 1652:1652 -p 1653:1653 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=1651 --rest-port=1649 --tcp-port=1650 --discv5-udp-port=1652 --rest-address=0.0.0.0 --nat=extip:172.18.224.204 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=e4e5b047c944f3b137ea45661acacabcd392ae2afd0db350b13a6a2d8e0b9836 --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=1653 --metrics-logging=true --relay=true --filter=true --store=false --discv5-bootstrap-node=enr:-L24QHwGTPTQ4_piYE0nv5TEr3z8Vj5I0DIsj1Qgv31xuPJiD7hWxukCLrMH8zCzGUg9zz3F0ROpY3WOA7Jhc5m2g4ACgmlkgnY0gmlwhKwSl3aKbXVsdGlhZGRyc5YACASsEpd2Bl3TAAoErBKXdgZd1N0DgnJzhQADAQAAiXNlY3AyNTZrMaEDHt44pUms_7mB0DwAKhl_kK9qyCK2BAZaKYrIf8mALaWDdGNwgl3Tg3VkcIJd1YV3YWt1MgU
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.224.204 waku 1a18110b8a5625e562f4f6f5c6e39975374bf9546b35a3c1540bfb09dff68728
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID 1a18110b8a56. Setting up logs at ./log/docker/node4_2025-12-14_04-15-13__bf0e8d90-b670-45a0-8136-bd589ac1e9f1__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 1649
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:1649/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"NOT_READY","desc":"No connected peers"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_READY","desc":"Relay is not ready, filter will not be able to sort out messages"},{"Store":"NOT_MOUNTED"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"NOT_READY","desc":"No Store service peer available yet, neither Store service set up for the node"},{"Legacy Store Client":"NOT_READY","desc":"No Legacy Store service peers are available yet, neither Store service set up for the node"},{"Filter Client":"READY"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:1649/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.224.204/tcp/1650/p2p/16Uiu2HAmV434U4ctJ5LR7XpFuL1FB9u5MYmLBNMH8BF2YCBU8ud9","/ip4/172.18.224.204/tcp/1651/ws/p2p/16Uiu2HAmV434U4ctJ5LR7XpFuL1FB9u5MYmLBNMH8BF2YCBU8ud9"],"enrUri":"enr:-L24QMC144pCARaffeaaAYNXlphxPV_Zrt0-d-kc3W6gvg9MUnkmNp8Op_XchrSDhi1ZROKbb8StZb7Bz3dQMQXl3-MCgmlkgnY0gmlwhKwS4MyKbXVsdGlhZGRyc5YACASsEuDMBgZyAAoErBLgzAYGc90DgnJzhQADAQAAiXNlY3AyNTZrMaED86Jp4Pi-RWfhlbzWj_fvBz9vvkVjD5o5IrYwjvrCusiDdGNwggZyg3VkcIIGdIV3YWt1MgU"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.94.11
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['24199', '24200', '24201', '24202', '24203']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 24199:24199 -p 24200:24200 -p 24201:24201 -p 24202:24202 -p 24203:24203 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=24201 --rest-port=24199 --tcp-port=24200 --discv5-udp-port=24202 --rest-address=0.0.0.0 --nat=extip:172.18.94.11 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=3fea10ff7cada0abca6e1a054374c3bf181b3adcddfdc03f76a0e2660cb55231 --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=24203 --metrics-logging=true --relay=false --filternode=/ip4/172.18.224.204/tcp/1650/p2p/16Uiu2HAmV434U4ctJ5LR7XpFuL1FB9u5MYmLBNMH8BF2YCBU8ud9 --store=false --discv5-bootstrap-node=enr:-L24QHwGTPTQ4_piYE0nv5TEr3z8Vj5I0DIsj1Qgv31xuPJiD7hWxukCLrMH8zCzGUg9zz3F0ROpY3WOA7Jhc5m2g4ACgmlkgnY0gmlwhKwSl3aKbXVsdGlhZGRyc5YACASsEpd2Bl3TAAoErBKXdgZd1N0DgnJzhQADAQAAiXNlY3AyNTZrMaEDHt44pUms_7mB0DwAKhl_kK9qyCK2BAZaKYrIf8mALaWDdGNwgl3Tg3VkcIJd1YV3YWt1MgU
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.94.11 waku cfc190b72b8a635afa80d1bc3c66b3204ad5713fb74a361b314d48ba61377538
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID cfc190b72b8a. Setting up logs at ./log/docker/node5_2025-12-14_04-15-13__bf0e8d90-b670-45a0-8136-bd589ac1e9f1__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 24199
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:24199/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"NOT_MOUNTED"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_MOUNTED"},{"Store":"NOT_MOUNTED"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"NOT_READY","desc":"No Store service peer available yet, neither Store service set up for the node"},{"Legacy Store Client":"NOT_READY","desc":"No Legacy Store service peers are available yet, neither Store service set up for the node"},{"Filter Client":"READY"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:24199/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.94.11/tcp/24200/p2p/16Uiu2HAmQBkPaGMdUsBsGfEGhtxNWQsnRht6F8b9AwyLgibdsbyU","/ip4/172.18.94.11/tcp/24201/ws/p2p/16Uiu2HAmQBkPaGMdUsBsGfEGhtxNWQsnRht6F8b9AwyLgibdsbyU"],"enrUri":"enr:-L24QHzforPSmCMRyewF1T0ZGHp-zr3yfXXFhYzGXn-ETxf2HtxfYZcYQ4CZN9cD64mtH5YFTLE7TWLMUb_tR2F8MpkCgmlkgnY0gmlwhKwSXguKbXVsdGlhZGRyc5YACASsEl4LBl6IAAoErBJeCwZeid0DgnJzhQADAQAAiXNlY3AyNTZrMaEDq1HGpRFp2zAqyW7iuTDcKdRNpVsaWPen-rPEsYh1WaODdGNwgl6Ig3VkcIJeioV3YWt1MgA"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[35mDEBUG [0m tests.e2e.test_e2e:test_e2e.py:238 Subscribe nodes to relay pubsub topic /waku/2/rs/3/1
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:21072/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/1"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:21255/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/1"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:24018/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/1"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:1649/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/1"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m tests.e2e.test_e2e:test_e2e.py:243 Node5 makes filter request pubsubtopic /waku/2/rs/3/1 and content topic /test/1/waku-filter/proto
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:24199/filter/v2/subscriptions" -H "Content-Type: application/json" -d '{"requestId": "1", "contentFilters": ["/test/1/waku-filter/proto"], "pubsubTopic": "/waku/2/rs/3/1"}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"1","statusDesc":"OK"}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:21072/admin/v1/peers" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'[{"multiaddr":"/ip4/172.18.37.185/tcp/47332/p2p/16Uiu2HAmDWedctGP5pXhXBDaWBy844cqtEtRhjKgVQfGGw5zk6V8","protocols":["/ipfs/id/1.0.0","/libp2p/autonat/1.0.0","/libp2p/circuit/relay/0.2.0/hop","/vac/waku/metadata/1.0.0","/vac/waku/relay/2.0.0","/vac/waku/rendezvous/1.0.0","/ipfs/ping/1.0.0","/vac/waku/filter-push/2.0.0-beta1","/vac/waku/peer-exchange/2.0.0-alpha1"],"shards":[0],"connected":"Connected","agent":"nwaku-v0.36.0-114-g7d1c6a","origin":"Discv5"}]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:21255/admin/v1/peers" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'[{"multiaddr":"/ip4/172.18.125.11/tcp/21073/p2p/16Uiu2HAmRkJt3iZBaKBqPcjaVGyXM8Kmah9kbPTuvx8LCLNFB5Hb","protocols":["/ipfs/id/1.0.0","/libp2p/autonat/1.0.0","/libp2p/circuit/relay/0.2.0/hop","/vac/waku/metadata/1.0.0","/vac/waku/relay/2.0.0","/vac/waku/rendezvous/1.0.0","/ipfs/ping/1.0.0","/vac/waku/filter-push/2.0.0-beta1","/vac/waku/peer-exchange/2.0.0-alpha1"],"shards":[0],"connected":"Connected","agent":"nwaku-v0.36.0-114-g7d1c6a","origin":"Discv5"},{"multiaddr":"/ip4/172.18.151.118/tcp/39952/p2p/16Uiu2HAmEjV7dikB4Kms3SnjDMBzsWXyxd1yPq8mtXQdHmoLmrqJ","protocols":["/ipfs/id/1.0.0","/libp2p/autonat/1.0.0","/libp2p/circuit/relay/0.2.0/hop","/vac/waku/metadata/1.0.0","/vac/waku/relay/2.0.0","/vac/waku/rendezvous/1.0.0","/ipfs/ping/1.0.0","/vac/waku/filter-subscribe/2.0.0-beta1","/vac/waku/filter-push/2.0.0-beta1","/vac/waku/peer-exchange/2.0.0-alpha1"],"shards":[0],"connected":"Connected","agent":"nwaku-v0.36.0-114-g7d1c6a","origin":"UnknownOrigin"}]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:24018/admin/v1/peers" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'[{"multiaddr":"/ip4/172.18.224.204/tcp/46640/p2p/16Uiu2HAmV434U4ctJ5LR7XpFuL1FB9u5MYmLBNMH8BF2YCBU8ud9","protocols":["/ipfs/id/1.0.0","/libp2p/autonat/1.0.0","/libp2p/circuit/relay/0.2.0/hop","/vac/waku/metadata/1.0.0","/vac/waku/relay/2.0.0","/vac/waku/rendezvous/1.0.0","/ipfs/ping/1.0.0","/vac/waku/filter-subscribe/2.0.0-beta1","/vac/waku/filter-push/2.0.0-beta1","/vac/waku/peer-exchange/2.0.0-alpha1"],"shards":[0],"connected":"Connected","agent":"nwaku-v0.36.0-114-g7d1c6a","origin":"UnknownOrigin"},{"multiaddr":"/ip4/172.18.37.185/tcp/21256/p2p/16Uiu2HAmDWedctGP5pXhXBDaWBy844cqtEtRhjKgVQfGGw5zk6V8","protocols":["/ipfs/id/1.0.0","/libp2p/autonat/1.0.0","/libp2p/circuit/relay/0.2.0/hop","/vac/waku/metadata/1.0.0","/vac/waku/relay/2.0.0","/vac/waku/rendezvous/1.0.0","/ipfs/ping/1.0.0","/vac/waku/filter-push/2.0.0-beta1","/vac/waku/peer-exchange/2.0.0-alpha1"],"shards":[0],"connected":"Connected","agent":"nwaku-v0.36.0-114-g7d1c6a","origin":"Discv5"}]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:1649/admin/v1/peers" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'[{"multiaddr":"/ip4/172.18.151.118/tcp/24019/p2p/16Uiu2HAmEjV7dikB4Kms3SnjDMBzsWXyxd1yPq8mtXQdHmoLmrqJ","protocols":["/ipfs/id/1.0.0","/libp2p/autonat/1.0.0","/libp2p/circuit/relay/0.2.0/hop","/vac/waku/metadata/1.0.0","/vac/waku/relay/2.0.0","/vac/waku/rendezvous/1.0.0","/ipfs/ping/1.0.0","/vac/waku/filter-subscribe/2.0.0-beta1","/vac/waku/filter-push/2.0.0-beta1","/vac/waku/peer-exchange/2.0.0-alpha1"],"shards":[0],"connected":"Connected","agent":"nwaku-v0.36.0-114-g7d1c6a","origin":"Discv5"},{"multiaddr":"/ip4/172.18.94.11/tcp/60736/p2p/16Uiu2HAmQBkPaGMdUsBsGfEGhtxNWQsnRht6F8b9AwyLgibdsbyU","protocols":["/ipfs/id/1.0.0","/libp2p/autonat/1.0.0","/libp2p/circuit/relay/0.2.0/hop","/vac/waku/metadata/1.0.0","/vac/waku/rendezvous/1.0.0","/ipfs/ping/1.0.0","/vac/waku/filter-push/2.0.0-beta1","/vac/waku/peer-exchange/2.0.0-alpha1"],"shards":[],"connected":"Connected","agent":"nwaku-v0.36.0-114-g7d1c6a","origin":"UnknownOrigin"}]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:24199/admin/v1/peers" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'[{"multiaddr":"/ip4/172.18.224.204/tcp/1650/p2p/16Uiu2HAmV434U4ctJ5LR7XpFuL1FB9u5MYmLBNMH8BF2YCBU8ud9","protocols":["/ipfs/id/1.0.0","/libp2p/autonat/1.0.0","/libp2p/circuit/relay/0.2.0/hop","/vac/waku/metadata/1.0.0","/vac/waku/relay/2.0.0","/vac/waku/rendezvous/1.0.0","/ipfs/ping/1.0.0","/vac/waku/filter-subscribe/2.0.0-beta1","/vac/waku/filter-push/2.0.0-beta1","/vac/waku/peer-exchange/2.0.0-alpha1"],"shards":[0,1],"connected":"Connected","agent":"nwaku-v0.36.0-114-g7d1c6a","origin":"UnknownOrigin"},{"multiaddr":"/ip4/172.18.151.118/tcp/24019/p2p/16Uiu2HAmEjV7dikB4Kms3SnjDMBzsWXyxd1yPq8mtXQdHmoLmrqJ","protocols":["/vac/waku/relay/2.0.0","/vac/waku/filter-subscribe/2.0.0-beta1"],"shards":[0],"connected":"NotConnected","agent":"","origin":"Discv5"}]'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 60 seconds
|
||
[35mDEBUG [0m tests.e2e.test_e2e:test_e2e.py:248 3 Nodes publish 12 message
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:21072/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:21072/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:21072/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:21072/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:21255/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:21255/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:21255/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:21255/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:24018/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:24018/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:24018/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:24018/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 2 seconds
|
||
[35mDEBUG [0m tests.e2e.test_e2e:test_e2e.py:254 Node5 requests messages of subscribed filter topic /waku/2/rs/3/1
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:24199/filter/v2/messages/%2Ftest%2F1%2Fwaku-filter%2Fproto" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'[{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765685780015087179,"ephemeral":false},{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765685782225376640,"ephemeral":false},{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765685784441389918,"ephemeral":false},{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765685786656435345,"ephemeral":false},{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765685788869118568,"ephemeral":false},{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765685791085229791,"ephemeral":false},{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765685793295327006,"ephemeral":false},{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765685795504675179,"ephemeral":false},{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765685797715494045,"ephemeral":false},{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765685799929712447,"ephemeral":false},{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765685802137802581,"ephemeral":false},{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765685804349658115,"ephemeral":false}]'
|
||
[35mDEBUG [0m tests.e2e.test_e2e:test_e2e.py:256 Response for node 5 is [{'payload': 'RmlsdGVyIHdvcmtzISE=', 'contentTopic': '/test/1/waku-filter/proto', 'version': 0, 'timestamp': 1765685780015087179, 'ephemeral': False}, {'payload': 'RmlsdGVyIHdvcmtzISE=', 'contentTopic': '/test/1/waku-filter/proto', 'version': 0, 'timestamp': 1765685782225376640, 'ephemeral': False}, {'payload': 'RmlsdGVyIHdvcmtzISE=', 'contentTopic': '/test/1/waku-filter/proto', 'version': 0, 'timestamp': 1765685784441389918, 'ephemeral': False}, {'payload': 'RmlsdGVyIHdvcmtzISE=', 'contentTopic': '/test/1/waku-filter/proto', 'version': 0, 'timestamp': 1765685786656435345, 'ephemeral': False}, {'payload': 'RmlsdGVyIHdvcmtzISE=', 'contentTopic': '/test/1/waku-filter/proto', 'version': 0, 'timestamp': 1765685788869118568, 'ephemeral': False}, {'payload': 'RmlsdGVyIHdvcmtzISE=', 'contentTopic': '/test/1/waku-filter/proto', 'version': 0, 'timestamp': 1765685791085229791, 'ephemeral': False}, {'payload': 'RmlsdGVyIHdvcmtzISE=', 'contentTopic': '/test/1/waku-filter/proto', 'version': 0, 'timestamp': 1765685793295327006, 'ephemeral': False}, {'payload': 'RmlsdGVyIHdvcmtzISE=', 'contentTopic': '/test/1/waku-filter/proto', 'version': 0, 'timestamp': 1765685795504675179, 'ephemeral': False}, {'payload': 'RmlsdGVyIHdvcmtzISE=', 'contentTopic': '/test/1/waku-filter/proto', 'version': 0, 'timestamp': 1765685797715494045, 'ephemeral': False}, {'payload': 'RmlsdGVyIHdvcmtzISE=', 'contentTopic': '/test/1/waku-filter/proto', 'version': 0, 'timestamp': 1765685799929712447, 'ephemeral': False}, {'payload': 'RmlsdGVyIHdvcmtzISE=', 'contentTopic': '/test/1/waku-filter/proto', 'version': 0, 'timestamp': 1765685802137802581, 'ephemeral': False}, {'payload': 'RmlsdGVyIHdvcmtzISE=', 'contentTopic': '/test/1/waku-filter/proto', 'version': 0, 'timestamp': 1765685804349658115, 'ephemeral': False}]
|
||
[35mDEBUG [0m tests.conftest:conftest.py:59 Running fixture teardown: test_setup
|
||
[35mDEBUG [0m tests.conftest:conftest.py:83 Running fixture teardown: close_open_nodes
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id 4e8b2f69e423
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id 13b3b0c3cbc6
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id d8205dfc7954
|
||
[31m[1mERROR [0m src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container 4e8b2f69e423. Exiting log stream.
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id 1a18110b8a56
|
||
[31m[1mERROR [0m src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container 13b3b0c3cbc6. Exiting log stream.
|
||
[31m[1mERROR [0m src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container d8205dfc7954. Exiting log stream.
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id cfc190b72b8a
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m tests.conftest:conftest.py:98 Running fixture teardown: check_waku_log_errors
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs.
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs.
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs.
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs.
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs. |