mirror of
https://github.com/logos-messaging/logos-messaging-interop-tests.git
synced 2026-01-04 06:53:07 +00:00
102 lines
21 KiB
Plaintext
102 lines
21 KiB
Plaintext
[35mDEBUG [0m tests.conftest:conftest.py:51 Running fixture setup: test_id
|
||
[35mDEBUG [0m tests.conftest:conftest.py:57 Running test: test_filter_node_not_connected_directly_to_relaying_node with id: 2025-12-13_04-06-19__5014c4dc-e278-4d36-8308-14d3dc289dbd
|
||
[35mDEBUG [0m src.steps.common:common.py:19 Running fixture setup: common_setup
|
||
[35mDEBUG [0m src.steps.filter:filter.py:28 Running fixture setup: filter_setup
|
||
[35mDEBUG [0m src.steps.light_push:light_push.py:28 Running fixture setup: light_push_setup
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/node1_2025-12-13_04-06-19__5014c4dc-e278-4d36-8308-14d3dc289dbd__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/node2_2025-12-13_04-06-19__5014c4dc-e278-4d36-8308-14d3dc289dbd__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/node3_2025-12-13_04-06-19__5014c4dc-e278-4d36-8308-14d3dc289dbd__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.steps.relay:relay.py:28 Running fixture setup: relay_setup
|
||
[35mDEBUG [0m src.steps.store:store.py:31 Running fixture setup: store_setup
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:33 Network waku created
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.126.227
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['29947', '29948', '29949', '29950', '29951']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 29947:29947 -p 29948:29948 -p 29949:29949 -p 29950:29950 -p 29951:29951 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=29949 --rest-port=29947 --tcp-port=29948 --discv5-udp-port=29950 --rest-address=0.0.0.0 --nat=extip:172.18.126.227 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=98efcc7cee7ce6c19fde5bf1dbdac68cbaeb08f01cbedfd0feb80cba5db1d7d8 --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=29951 --metrics-logging=true --filter=true --relay=true
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.126.227 waku c4f9449084f1806ed2a5bb5d3cc811c0ccd47bc63f74569446ead21c282d9e79
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID c4f9449084f1. Setting up logs at ./log/docker/node1_2025-12-13_04-06-19__5014c4dc-e278-4d36-8308-14d3dc289dbd__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 29947
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:29947/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"NOT_READY","desc":"No connected peers"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_READY","desc":"Relay is not ready, filter will not be able to sort out messages"},{"Store":"NOT_MOUNTED"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"NOT_READY","desc":"No Store service peer available yet, neither Store service set up for the node"},{"Legacy Store Client":"NOT_READY","desc":"No Legacy Store service peers are available yet, neither Store service set up for the node"},{"Filter Client":"NOT_READY","desc":"No Filter service peer available yet"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:29947/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.126.227/tcp/29948/p2p/16Uiu2HAkydu4P6ijccHGP8v8zxjGkZmGafemP3rRv3JCNKwdbiyC","/ip4/172.18.126.227/tcp/29949/ws/p2p/16Uiu2HAkydu4P6ijccHGP8v8zxjGkZmGafemP3rRv3JCNKwdbiyC"],"enrUri":"enr:-L24QAzaByHCLHRQ1t0u55fVHV2ko5-ksRc9zyaEhRc53FwoL8kBtL47bULETX1yCGyGR6DYgeXHWhBLOWHMdizphDoCgmlkgnY0gmlwhKwSfuOKbXVsdGlhZGRyc5YACASsEn7jBnT8AAoErBJ-4wZ0_d0DgnJzhQADAQAAiXNlY3AyNTZrMaECPpCBidstYn1d2v5ieWboGhqBD6UDgnRT_UhAgfe_6M-DdGNwgnT8g3VkcIJ0_oV3YWt1MgU"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.120.213
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['62516', '62517', '62518', '62519', '62520']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 62516:62516 -p 62517:62517 -p 62518:62518 -p 62519:62519 -p 62520:62520 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=62518 --rest-port=62516 --tcp-port=62517 --discv5-udp-port=62519 --rest-address=0.0.0.0 --nat=extip:172.18.120.213 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=e33f0968444aaaeaeb7f9643bdfdbbd3dfbdcbccafefcc7469cffd8e5e9c40ac --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=62520 --metrics-logging=true --filter=true --relay=true --discv5-bootstrap-node=enr:-L24QAzaByHCLHRQ1t0u55fVHV2ko5-ksRc9zyaEhRc53FwoL8kBtL47bULETX1yCGyGR6DYgeXHWhBLOWHMdizphDoCgmlkgnY0gmlwhKwSfuOKbXVsdGlhZGRyc5YACASsEn7jBnT8AAoErBJ-4wZ0_d0DgnJzhQADAQAAiXNlY3AyNTZrMaECPpCBidstYn1d2v5ieWboGhqBD6UDgnRT_UhAgfe_6M-DdGNwgnT8g3VkcIJ0_oV3YWt1MgU
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.120.213 waku d1b49cc152da3508d679f4c32b1d370db7d99a5d86659d4a3a73a9683f68c23f
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID d1b49cc152da. Setting up logs at ./log/docker/node2_2025-12-13_04-06-19__5014c4dc-e278-4d36-8308-14d3dc289dbd__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 62516
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:62516/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"NOT_READY","desc":"No connected peers"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_READY","desc":"Relay is not ready, filter will not be able to sort out messages"},{"Store":"NOT_MOUNTED"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"NOT_READY","desc":"No Store service peer available yet, neither Store service set up for the node"},{"Legacy Store Client":"NOT_READY","desc":"No Legacy Store service peers are available yet, neither Store service set up for the node"},{"Filter Client":"READY"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:62516/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.120.213/tcp/62517/p2p/16Uiu2HAkwVZGsHfp872UZRHJmfsYRJ5vmVxWvsTF8H43N4apykgK","/ip4/172.18.120.213/tcp/62518/ws/p2p/16Uiu2HAkwVZGsHfp872UZRHJmfsYRJ5vmVxWvsTF8H43N4apykgK"],"enrUri":"enr:-L24QBxQA2Svk28mRLC8LKxC_RLalXMqvNrz6oFMlGin-aHjKUU2uL0ZceajuB-DFNO_NiJsgUPkKgxonY7BO-pbJOkCgmlkgnY0gmlwhKwSeNWKbXVsdGlhZGRyc5YACASsEnjVBvQ1AAoErBJ41Qb0Nt0DgnJzhQADAQAAiXNlY3AyNTZrMaECHrYdclUf4srjgUcTk7AjiWCQMV7rOS4_4xkzuwJHPYSDdGNwgvQ1g3VkcIL0N4V3YWt1MgU"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.179.37
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['1906', '1907', '1908', '1909', '1910']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 1906:1906 -p 1907:1907 -p 1908:1908 -p 1909:1909 -p 1910:1910 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=1908 --rest-port=1906 --tcp-port=1907 --discv5-udp-port=1909 --rest-address=0.0.0.0 --nat=extip:172.18.179.37 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=aa8699102113da8afce2cee3da9a9c3a3cfdb8d509bfd90e8fafacaf1b47cf1b --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=1910 --metrics-logging=true --relay=false --filternode=/ip4/172.18.120.213/tcp/62517/p2p/16Uiu2HAkwVZGsHfp872UZRHJmfsYRJ5vmVxWvsTF8H43N4apykgK --discv5-bootstrap-node=enr:-L24QBxQA2Svk28mRLC8LKxC_RLalXMqvNrz6oFMlGin-aHjKUU2uL0ZceajuB-DFNO_NiJsgUPkKgxonY7BO-pbJOkCgmlkgnY0gmlwhKwSeNWKbXVsdGlhZGRyc5YACASsEnjVBvQ1AAoErBJ41Qb0Nt0DgnJzhQADAQAAiXNlY3AyNTZrMaECHrYdclUf4srjgUcTk7AjiWCQMV7rOS4_4xkzuwJHPYSDdGNwgvQ1g3VkcIL0N4V3YWt1MgU
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.179.37 waku bae63a65685cb48b51ca2a33fc8ae5dc5ec3e0c19e2df6b119137618d98ad241
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID bae63a65685c. Setting up logs at ./log/docker/node3_2025-12-13_04-06-19__5014c4dc-e278-4d36-8308-14d3dc289dbd__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 1906
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:1906/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"NOT_MOUNTED"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_MOUNTED"},{"Store":"NOT_MOUNTED"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"NOT_READY","desc":"No Store service peer available yet, neither Store service set up for the node"},{"Legacy Store Client":"NOT_READY","desc":"No Legacy Store service peers are available yet, neither Store service set up for the node"},{"Filter Client":"READY"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:1906/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.179.37/tcp/1907/p2p/16Uiu2HAmHXCr9bFjSS8kMKCs8TpdNsNXFzszvtEdVhLnoxdVPVvn","/ip4/172.18.179.37/tcp/1908/ws/p2p/16Uiu2HAmHXCr9bFjSS8kMKCs8TpdNsNXFzszvtEdVhLnoxdVPVvn"],"enrUri":"enr:-L24QEdNF5y2KGCUK8BS0zxb8J7CpxI0Vrpu7hUYeHbhF9xGAmsXXWrWdA5dHdySKxsipUJSWctX_PXN1kyOpgFWQRICgmlkgnY0gmlwhKwSsyWKbXVsdGlhZGRyc5YACASsErMlBgdzAAoErBKzJQYHdN0DgnJzhQADAQAAiXNlY3AyNTZrMaEDSEvrUR3fFUb0yXWIlo9N_kVSFcM03nzwDC5Ku6P7SY-DdGNwggdzg3VkcIIHdYV3YWt1MgA"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:29947/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/1"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:62516/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/1"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:1906/filter/v2/subscriptions" -H "Content-Type: application/json" -d '{"requestId": "1", "contentFilters": ["/test/1/waku-filter/proto"], "pubsubTopic": "/waku/2/rs/3/1"}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"1","statusDesc":"OK"}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:29947/admin/v1/peers" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'[{"multiaddr":"/ip4/172.18.120.213/tcp/45486/p2p/16Uiu2HAkwVZGsHfp872UZRHJmfsYRJ5vmVxWvsTF8H43N4apykgK","protocols":["/ipfs/id/1.0.0","/libp2p/autonat/1.0.0","/libp2p/circuit/relay/0.2.0/hop","/vac/waku/metadata/1.0.0","/vac/waku/relay/2.0.0","/vac/waku/rendezvous/1.0.0","/ipfs/ping/1.0.0","/vac/waku/filter-subscribe/2.0.0-beta1","/vac/waku/filter-push/2.0.0-beta1","/vac/waku/peer-exchange/2.0.0-alpha1"],"shards":[0],"connected":"Connected","agent":"nwaku-v0.36.0-114-g7d1c6a","origin":"UnknownOrigin"}]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:62516/admin/v1/peers" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'[{"multiaddr":"/ip4/172.18.179.37/tcp/44632/p2p/16Uiu2HAmHXCr9bFjSS8kMKCs8TpdNsNXFzszvtEdVhLnoxdVPVvn","protocols":["/ipfs/id/1.0.0","/libp2p/autonat/1.0.0","/libp2p/circuit/relay/0.2.0/hop","/vac/waku/metadata/1.0.0","/vac/waku/rendezvous/1.0.0","/ipfs/ping/1.0.0","/vac/waku/filter-push/2.0.0-beta1","/vac/waku/peer-exchange/2.0.0-alpha1"],"shards":[],"connected":"Connected","agent":"nwaku-v0.36.0-114-g7d1c6a","origin":"UnknownOrigin"},{"multiaddr":"/ip4/172.18.126.227/tcp/29948/p2p/16Uiu2HAkydu4P6ijccHGP8v8zxjGkZmGafemP3rRv3JCNKwdbiyC","protocols":["/ipfs/id/1.0.0","/libp2p/autonat/1.0.0","/libp2p/circuit/relay/0.2.0/hop","/vac/waku/metadata/1.0.0","/vac/waku/relay/2.0.0","/vac/waku/rendezvous/1.0.0","/ipfs/ping/1.0.0","/vac/waku/filter-subscribe/2.0.0-beta1","/vac/waku/filter-push/2.0.0-beta1","/vac/waku/peer-exchange/2.0.0-alpha1"],"shards":[0],"connected":"Connected","agent":"nwaku-v0.36.0-114-g7d1c6a","origin":"Discv5"}]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:1906/admin/v1/peers" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'[{"multiaddr":"/ip4/172.18.120.213/tcp/62517/p2p/16Uiu2HAkwVZGsHfp872UZRHJmfsYRJ5vmVxWvsTF8H43N4apykgK","protocols":["/ipfs/id/1.0.0","/libp2p/autonat/1.0.0","/libp2p/circuit/relay/0.2.0/hop","/vac/waku/metadata/1.0.0","/vac/waku/relay/2.0.0","/vac/waku/rendezvous/1.0.0","/ipfs/ping/1.0.0","/vac/waku/filter-subscribe/2.0.0-beta1","/vac/waku/filter-push/2.0.0-beta1","/vac/waku/peer-exchange/2.0.0-alpha1"],"shards":[0,1],"connected":"Connected","agent":"nwaku-v0.36.0-114-g7d1c6a","origin":"Discv5"}]'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 30 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:29947/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F1" -H "Content-Type: application/json" -d '{"payload": "RmlsdGVyIHdvcmtzISE=", "contentTopic": "/test/1/waku-filter/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[35mDEBUG [0m src.steps.filter:filter.py:96 Checking that peer NODE_2:wakuorg/nwaku:latest can find the published message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:1906/filter/v2/messages/%2Ftest%2F1%2Fwaku-filter%2Fproto" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'[{"payload":"RmlsdGVyIHdvcmtzISE=","contentTopic":"/test/1/waku-filter/proto","version":0,"timestamp":1765598816116675764,"ephemeral":false}]'
|
||
[35mDEBUG [0m tests.conftest:conftest.py:59 Running fixture teardown: test_setup
|
||
[35mDEBUG [0m tests.conftest:conftest.py:83 Running fixture teardown: close_open_nodes
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id c4f9449084f1
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id d1b49cc152da
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id bae63a65685c
|
||
[1m[31mERROR [0m src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container c4f9449084f1. Exiting log stream.
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m tests.conftest:conftest.py:98 Running fixture teardown: check_waku_log_errors
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs.
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs.
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs. |