mirror of
https://github.com/logos-messaging/logos-messaging-interop-tests.git
synced 2026-01-05 23:43:07 +00:00
88 lines
16 KiB
Plaintext
88 lines
16 KiB
Plaintext
[35mDEBUG [0m tests.conftest:conftest.py:51 Running fixture setup: test_id
|
||
[35mDEBUG [0m tests.conftest:conftest.py:57 Running test: test_invalid_hash_param with id: 2025-12-18_04-20-05__edd7c489-a96b-468e-8de9-3ff269e02ccc
|
||
[35mDEBUG [0m src.steps.common:common.py:19 Running fixture setup: common_setup
|
||
[35mDEBUG [0m src.steps.store:store.py:31 Running fixture setup: store_setup
|
||
[35mDEBUG [0m src.steps.store:store.py:39 Running fixture setup: node_setup
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/publishing_node1_2025-12-18_04-20-05__edd7c489-a96b-468e-8de9-3ff269e02ccc__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.180.187
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['10742', '10743', '10744', '10745', '10746']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 10742:10742 -p 10743:10743 -p 10744:10744 -p 10745:10745 -p 10746:10746 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=10744 --rest-port=10742 --tcp-port=10743 --discv5-udp-port=10745 --rest-address=0.0.0.0 --nat=extip:172.18.180.187 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=9dedef7b90af04c0a4c0eb6fbfb3baae2dbfee6dcf223f1bb9be93a825e2e609 --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=10746 --metrics-logging=true --store=true --relay=true
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.180.187 waku c55c877a24382d8d75e5a17f01d13764e940c49ce526ca25205ef515412b393f
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID c55c877a2438. Setting up logs at ./log/docker/publishing_node1_2025-12-18_04-20-05__edd7c489-a96b-468e-8de9-3ff269e02ccc__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 10742
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[1m[31mERROR [0m src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container 8432d75c3ef7. Exiting log stream.
|
||
[1m[31mERROR [0m src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container 21c832358be5. Exiting log stream.
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:10742/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"NOT_READY","desc":"No connected peers"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_MOUNTED"},{"Store":"READY"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"READY"},{"Legacy Store Client":"NOT_READY","desc":"No Legacy Store service peers are available yet, neither Store service set up for the node"},{"Filter Client":"NOT_READY","desc":"No Filter service peer available yet"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:10742/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.180.187/tcp/10743/p2p/16Uiu2HAky3DZoEgTprSgFswSDU98TpMrL4wbKCyaUCaytx3nmdD7","/ip4/172.18.180.187/tcp/10744/ws/p2p/16Uiu2HAky3DZoEgTprSgFswSDU98TpMrL4wbKCyaUCaytx3nmdD7"],"enrUri":"enr:-L24QK9acU8FVaXHsRItoNf_RrTCIRMDEQb8SbSwRUBABKHLX8dVWtyQ_sUfXo5H6L5piEFPsK_R4jMNZIRLRMeIucgCgmlkgnY0gmlwhKwStLuKbXVsdGlhZGRyc5YACASsErS7Bin3AAoErBK0uwYp-N0DgnJzhQADAQAAiXNlY3AyNTZrMaECNa4alBEB1EfuM6Eb-FN1zZk64ShGoa0HH0FvZ8ZR5v6DdGNwgin3g3VkcIIp-YV3YWt1MgM"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/store_node1_2025-12-18_04-20-05__edd7c489-a96b-468e-8de9-3ff269e02ccc__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.122.181
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['13556', '13557', '13558', '13559', '13560']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 13556:13556 -p 13557:13557 -p 13558:13558 -p 13559:13559 -p 13560:13560 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=13558 --rest-port=13556 --tcp-port=13557 --discv5-udp-port=13559 --rest-address=0.0.0.0 --nat=extip:172.18.122.181 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=2e2bb8abc05ad81aae2e7ee826f697e53648d2a0a0c7fccceaaa0bb6ca5ccd7b --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=13560 --metrics-logging=true --discv5-bootstrap-node=enr:-L24QK9acU8FVaXHsRItoNf_RrTCIRMDEQb8SbSwRUBABKHLX8dVWtyQ_sUfXo5H6L5piEFPsK_R4jMNZIRLRMeIucgCgmlkgnY0gmlwhKwStLuKbXVsdGlhZGRyc5YACASsErS7Bin3AAoErBK0uwYp-N0DgnJzhQADAQAAiXNlY3AyNTZrMaECNa4alBEB1EfuM6Eb-FN1zZk64ShGoa0HH0FvZ8ZR5v6DdGNwgin3g3VkcIIp-YV3YWt1MgM --storenode=/ip4/172.18.180.187/tcp/10743/p2p/16Uiu2HAky3DZoEgTprSgFswSDU98TpMrL4wbKCyaUCaytx3nmdD7 --store=true --relay=true
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.122.181 waku a5007fc07e25098cc5df77c7ca077e1be68f214d7ca2ee09adfd1bc9eeace29a
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID a5007fc07e25. Setting up logs at ./log/docker/store_node1_2025-12-18_04-20-05__edd7c489-a96b-468e-8de9-3ff269e02ccc__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 13556
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:13556/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"READY"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_MOUNTED"},{"Store":"READY"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"READY"},{"Legacy Store Client":"READY"},{"Filter Client":"NOT_READY","desc":"No Filter service peer available yet"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:13556/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.122.181/tcp/13557/p2p/16Uiu2HAkxTfZqsWu1X4dScCcaiXeLXxsUEk1DrdsfqjsJZv8BUNk","/ip4/172.18.122.181/tcp/13558/ws/p2p/16Uiu2HAkxTfZqsWu1X4dScCcaiXeLXxsUEk1DrdsfqjsJZv8BUNk"],"enrUri":"enr:-L24QBNKYfr39AW7uEX5DGe98ByfTy3cC-93wN2MHfeFWv3PcWEEnjfZAHDC2du-S9oSzXIr4nwUXW9xHJfKxyx6Q08CgmlkgnY0gmlwhKwSerWKbXVsdGlhZGRyc5YACASsEnq1BjT1AAoErBJ6tQY09t0DgnJzhQADAQAAiXNlY3AyNTZrMaECLRXBZ_gGudPZkjy-MJ02hz7Htbs7t_dgoSXz_lluI1mDdGNwgjT1g3VkcII094V3YWt1MgM"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:13556/admin/v1/peers" -H "Content-Type: application/json" -d '["/ip4/172.18.180.187/tcp/10743/p2p/16Uiu2HAky3DZoEgTprSgFswSDU98TpMrL4wbKCyaUCaytx3nmdD7"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:10742/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/0"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:13556/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/0"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:10742/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "TWVzc2FnZV8w", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:10742/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "TWVzc2FnZV8x", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:10742/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "TWVzc2FnZV8y", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:10742/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "TWVzc2FnZV8z", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:10742/store/v3/messages?pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&hashes=0xe90ebac265ad9c061b2d70a68ab0e29795e82702651576ce29366d2c28df1e60&ascending=true" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0xe90ebac265ad9c061b2d70a68ab0e29795e82702651576ce29366d2c28df1e60"}]}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:10742/store/v3/messages?pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&ascending=true&hash=0xe90ebac265ad9c061b2d70a68ab0e29795e82702651576ce29366d2c28df1e60" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0x8c8c9925f68b187c305824f28092dc58eef97dcb4c5c394abc06bb3aefa944e1"},{"messageHash":"0xa8f165327d55226bc955e4f7a2286ee1f91ea0880c5f2fe257ce1e41fa68d8f9"},{"messageHash":"0xe90ebac265ad9c061b2d70a68ab0e29795e82702651576ce29366d2c28df1e60"},{"messageHash":"0x17170eed30179bfe6a7a145dc4c9b2f8f14a4c1b73c48372f428745cf068efca"}]}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:13556/store/v3/messages?pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&hashes=0xe90ebac265ad9c061b2d70a68ab0e29795e82702651576ce29366d2c28df1e60&ascending=true" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0xe90ebac265ad9c061b2d70a68ab0e29795e82702651576ce29366d2c28df1e60"}]}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:13556/store/v3/messages?pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&ascending=true&hash=0xe90ebac265ad9c061b2d70a68ab0e29795e82702651576ce29366d2c28df1e60" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0x8c8c9925f68b187c305824f28092dc58eef97dcb4c5c394abc06bb3aefa944e1"},{"messageHash":"0xa8f165327d55226bc955e4f7a2286ee1f91ea0880c5f2fe257ce1e41fa68d8f9"},{"messageHash":"0xe90ebac265ad9c061b2d70a68ab0e29795e82702651576ce29366d2c28df1e60"},{"messageHash":"0x17170eed30179bfe6a7a145dc4c9b2f8f14a4c1b73c48372f428745cf068efca"}]}'
|
||
[35mDEBUG [0m tests.conftest:conftest.py:59 Running fixture teardown: test_setup
|
||
[35mDEBUG [0m tests.conftest:conftest.py:83 Running fixture teardown: close_open_nodes
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id c55c877a2438
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id a5007fc07e25
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m tests.conftest:conftest.py:98 Running fixture teardown: check_waku_log_errors
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs.
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs. |