mirror of
https://github.com/logos-messaging/logos-messaging-interop-tests.git
synced 2026-01-04 06:53:07 +00:00
71 lines
13 KiB
Plaintext
71 lines
13 KiB
Plaintext
[35mDEBUG [0m tests.conftest:conftest.py:51 Running fixture setup: test_id
|
||
[35mDEBUG [0m tests.conftest:conftest.py:57 Running test: test_store_not_include_data with id: 2025-12-08_12-11-14__691c0d9f-e5f8-4355-903e-ed48d1a3ae6c
|
||
[35mDEBUG [0m src.steps.common:common.py:19 Running fixture setup: common_setup
|
||
[35mDEBUG [0m src.steps.store:store.py:31 Running fixture setup: store_setup
|
||
[35mDEBUG [0m src.steps.store:store.py:39 Running fixture setup: node_setup
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/publishing_node1_2025-12-08_12-11-14__691c0d9f-e5f8-4355-903e-ed48d1a3ae6c__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.133.200
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['13748', '13749', '13750', '13751', '13752']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 13748:13748 -p 13749:13749 -p 13750:13750 -p 13751:13751 -p 13752:13752 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=13750 --rest-port=13748 --tcp-port=13749 --discv5-udp-port=13751 --rest-address=0.0.0.0 --nat=extip:172.18.133.200 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=aef35c6e12a7d1fc3fa5c994bb9470e6b6843dee9e74d1b547daf31c185d46b5 --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=13752 --metrics-logging=true --store=true --relay=true
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.133.200 waku e23e888276b91f16eee692cdcbf82f1ef596e9e7d7a477262683dc5b75819b8a
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID e23e888276b9. Setting up logs at ./log/docker/publishing_node1_2025-12-08_12-11-14__691c0d9f-e5f8-4355-903e-ed48d1a3ae6c__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 13748
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[31m[1mERROR [0m src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container 5ee4bb4b2ff6. Exiting log stream.
|
||
[31m[1mERROR [0m src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container 622acec604f1. Exiting log stream.
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:13748/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"NOT_READY","desc":"No connected peers"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_MOUNTED"},{"Store":"READY"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"READY"},{"Legacy Store Client":"NOT_READY","desc":"No Legacy Store service peers are available yet, neither Store service set up for the node"},{"Filter Client":"NOT_READY","desc":"No Filter service peer available yet"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:13748/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.133.200/tcp/13749/p2p/16Uiu2HAkwQghzYRNfWSv6PjsUMzmERTEo492TfZxuxas6gouZGKR","/ip4/172.18.133.200/tcp/13750/ws/p2p/16Uiu2HAkwQghzYRNfWSv6PjsUMzmERTEo492TfZxuxas6gouZGKR"],"enrUri":"enr:-L24QA9fy4yJoD8PZb23yFhnszTQ_KYlo7s2gsF3hqgHJaTJOkpbNzv1Pj0lkv1uVl4jRffb7jM4nyXlHqtW-kx_tHMCgmlkgnY0gmlwhKwShciKbXVsdGlhZGRyc5YACASsEoXIBjW1AAoErBKFyAY1tt0DgnJzhQADAQAAiXNlY3AyNTZrMaECHXadn5RojN6WYWNJ0j72qEgY21G8Aqowl5hn1d1leIiDdGNwgjW1g3VkcII1t4V3YWt1MgM"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/store_node1_2025-12-08_12-11-14__691c0d9f-e5f8-4355-903e-ed48d1a3ae6c__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.212.169
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['21899', '21900', '21901', '21902', '21903']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 21899:21899 -p 21900:21900 -p 21901:21901 -p 21902:21902 -p 21903:21903 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=21901 --rest-port=21899 --tcp-port=21900 --discv5-udp-port=21902 --rest-address=0.0.0.0 --nat=extip:172.18.212.169 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=cdcc2429ea48dffedbacca55336a2fab0c9d8f62ec36a836ac2ca9940c04af1b --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=21903 --metrics-logging=true --discv5-bootstrap-node=enr:-L24QA9fy4yJoD8PZb23yFhnszTQ_KYlo7s2gsF3hqgHJaTJOkpbNzv1Pj0lkv1uVl4jRffb7jM4nyXlHqtW-kx_tHMCgmlkgnY0gmlwhKwShciKbXVsdGlhZGRyc5YACASsEoXIBjW1AAoErBKFyAY1tt0DgnJzhQADAQAAiXNlY3AyNTZrMaECHXadn5RojN6WYWNJ0j72qEgY21G8Aqowl5hn1d1leIiDdGNwgjW1g3VkcII1t4V3YWt1MgM --storenode=/ip4/172.18.133.200/tcp/13749/p2p/16Uiu2HAkwQghzYRNfWSv6PjsUMzmERTEo492TfZxuxas6gouZGKR --store=true --relay=true
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.212.169 waku cfbb53ff8c81d3d1f5cf169b05f00464bb69ead724d41caa9c0b6e54a84218a0
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID cfbb53ff8c81. Setting up logs at ./log/docker/store_node1_2025-12-08_12-11-14__691c0d9f-e5f8-4355-903e-ed48d1a3ae6c__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 21899
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:21899/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"READY"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_MOUNTED"},{"Store":"READY"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"READY"},{"Legacy Store Client":"READY"},{"Filter Client":"NOT_READY","desc":"No Filter service peer available yet"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:21899/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.212.169/tcp/21900/p2p/16Uiu2HAkxjzpM4z7SMi2XShm61FoZaCTiMLNNLostrV46nx51o1Y","/ip4/172.18.212.169/tcp/21901/ws/p2p/16Uiu2HAkxjzpM4z7SMi2XShm61FoZaCTiMLNNLostrV46nx51o1Y"],"enrUri":"enr:-L24QK80W0JjbOufW5Utt_G9c5yHCsvjWl_UUWTKX1vHqGbvGYO5OAnhhcoXi6dggWNtcudUfiYJHMMc-gHSQWU5t5kCgmlkgnY0gmlwhKwS1KmKbXVsdGlhZGRyc5YACASsEtSpBlWMAAoErBLUqQZVjd0DgnJzhQADAQAAiXNlY3AyNTZrMaECMUTQhkRDcAp30bFWENf7l-sWuxaVuuDfdo3Ny__QWveDdGNwglWMg3VkcIJVjoV3YWt1MgM"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:21899/admin/v1/peers" -H "Content-Type: application/json" -d '["/ip4/172.18.133.200/tcp/13749/p2p/16Uiu2HAkwQghzYRNfWSv6PjsUMzmERTEo492TfZxuxas6gouZGKR"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:13748/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/0"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:21899/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/0"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:13748/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "U3RvcmUgd29ya3MhIQ==", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:21899/store/v3/messages?includeData=false&pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&ascending=true" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0xf9e12de76ac478c54a0a6734e9b4c8738ffc3ff8b5d31253fa803d18b4d88469"}]}'
|
||
[35mDEBUG [0m tests.store.test_api_flags:test_api_flags.py:68 Message restored with hash only is [{'messageHash': '0xf9e12de76ac478c54a0a6734e9b4c8738ffc3ff8b5d31253fa803d18b4d88469'}]
|
||
[35mDEBUG [0m tests.conftest:conftest.py:59 Running fixture teardown: test_setup
|
||
[35mDEBUG [0m tests.conftest:conftest.py:83 Running fixture teardown: close_open_nodes
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id e23e888276b9
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id cfbb53ff8c81
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m tests.conftest:conftest.py:98 Running fixture teardown: check_waku_log_errors
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs.
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs. |