95 lines
19 KiB
Plaintext
Raw Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

DEBUG  tests.conftest:conftest.py:51 Running fixture setup: test_id
DEBUG  tests.conftest:conftest.py:57 Running test: test_time_filter_negative_start_time with id: 2025-12-14_04-33-51__6ec77ea7-5e35-45c8-8994-b3ec4ac25759
DEBUG  src.steps.common:common.py:19 Running fixture setup: common_setup
DEBUG  src.steps.store:store.py:31 Running fixture setup: store_setup
DEBUG  src.steps.store:store.py:39 Running fixture setup: node_setup
DEBUG  src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
DEBUG  src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/publishing_node1_2025-12-14_04-33-51__6ec77ea7-5e35-45c8-8994-b3ec4ac25759__wakuorg_nwaku:latest.log
DEBUG  src.node.waku_node:waku_node.py:90 Starting Node...
DEBUG  src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
DEBUG  src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
DEBUG  src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.64.248
DEBUG  src.node.docker_mananger:docker_mananger.py:101 Generated ports ['60930', '60931', '60932', '60933', '60934']
DEBUG  src.node.waku_node:waku_node.py:439 RLN credentials were not set
INFO  src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
DEBUG  src.node.waku_node:waku_node.py:178 Using volumes []
DEBUG  src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 60930:60930 -p 60931:60931 -p 60932:60932 -p 60933:60933 -p 60934:60934 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=60932 --rest-port=60930 --tcp-port=60931 --discv5-udp-port=60933 --rest-address=0.0.0.0 --nat=extip:172.18.64.248 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=2d0765d51ced4d9b8bf19ad630a4ddbd5cdba6a26ab6ae0b0fba7faca6ae71cf --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=60934 --metrics-logging=true --store=true --relay=true
DEBUG  src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.64.248 waku 4026ad89eb01f3b8d3f6d2a2dda3e99e26bb482fb05d22b93f742b464911c29f
DEBUG  src.node.docker_mananger:docker_mananger.py:58 Container started with ID 4026ad89eb01. Setting up logs at ./log/docker/publishing_node1_2025-12-14_04-33-51__6ec77ea7-5e35-45c8-8994-b3ec4ac25759__wakuorg_nwaku:latest.log
DEBUG  src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 60930
DEBUG  src.libs.common:common.py:47 Sleeping for 1 seconds
ERROR  src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container b2614aeacd08. Exiting log stream.
ERROR  src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container ce56afb780ad. Exiting log stream.
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:60930/health" -H "Content-Type: application/json" -d 'None'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"NOT_READY","desc":"No connected peers"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_MOUNTED"},{"Store":"READY"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"READY"},{"Legacy Store Client":"NOT_READY","desc":"No Legacy Store service peers are available yet, neither Store service set up for the node"},{"Filter Client":"NOT_READY","desc":"No Filter service peer available yet"}]}'
INFO  src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:60930/debug/v1/info" -H "Content-Type: application/json" -d 'None'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.64.248/tcp/60931/p2p/16Uiu2HAmR9Ra41YeUSTCEYZTs2HoWLivn4uvcaHCrjT1upqHE5Cr","/ip4/172.18.64.248/tcp/60932/ws/p2p/16Uiu2HAmR9Ra41YeUSTCEYZTs2HoWLivn4uvcaHCrjT1upqHE5Cr"],"enrUri":"enr:-L24QI8N_POEAbEf28Cf6iYH6ukuNpYp5yQ--QBdJY-SrKKGfBAt_bB8VpP3PS_-jS8X1a6HrS0rvFiy45YCA0OnQXcCgmlkgnY0gmlwhKwSQPiKbXVsdGlhZGRyc5YACASsEkD4Bu4DAAoErBJA-AbuBN0DgnJzhQADAQAAiXNlY3AyNTZrMaEDuZUFrPS0hG1GMNaehEGfW1iFNVVrBIhkh6O5vC5x1geDdGNwgu4Dg3VkcILuBYV3YWt1MgM"}'
INFO  src.node.waku_node:waku_node.py:292 REST service is ready !!
DEBUG  src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
DEBUG  src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/store_node1_2025-12-14_04-33-51__6ec77ea7-5e35-45c8-8994-b3ec4ac25759__wakuorg_nwaku:latest.log
DEBUG  src.node.waku_node:waku_node.py:90 Starting Node...
DEBUG  src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
DEBUG  src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
DEBUG  src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.16.49
DEBUG  src.node.docker_mananger:docker_mananger.py:101 Generated ports ['25317', '25318', '25319', '25320', '25321']
DEBUG  src.node.waku_node:waku_node.py:439 RLN credentials were not set
INFO  src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
DEBUG  src.node.waku_node:waku_node.py:178 Using volumes []
DEBUG  src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 25317:25317 -p 25318:25318 -p 25319:25319 -p 25320:25320 -p 25321:25321 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=25319 --rest-port=25317 --tcp-port=25318 --discv5-udp-port=25320 --rest-address=0.0.0.0 --nat=extip:172.18.16.49 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=d2ba20eff52ac9bc22b91bcb1fad10ce3af4cd5b27b990d6c16646e3fcccd00d --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=25321 --metrics-logging=true --discv5-bootstrap-node=enr:-L24QI8N_POEAbEf28Cf6iYH6ukuNpYp5yQ--QBdJY-SrKKGfBAt_bB8VpP3PS_-jS8X1a6HrS0rvFiy45YCA0OnQXcCgmlkgnY0gmlwhKwSQPiKbXVsdGlhZGRyc5YACASsEkD4Bu4DAAoErBJA-AbuBN0DgnJzhQADAQAAiXNlY3AyNTZrMaEDuZUFrPS0hG1GMNaehEGfW1iFNVVrBIhkh6O5vC5x1geDdGNwgu4Dg3VkcILuBYV3YWt1MgM --storenode=/ip4/172.18.64.248/tcp/60931/p2p/16Uiu2HAmR9Ra41YeUSTCEYZTs2HoWLivn4uvcaHCrjT1upqHE5Cr --store=true --relay=true
DEBUG  src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.16.49 waku 8ef10e313f7f476d87045fbf9a4faaf86ebcb83f1e9a87bf4488d2d0f7683869
DEBUG  src.node.docker_mananger:docker_mananger.py:58 Container started with ID 8ef10e313f7f. Setting up logs at ./log/docker/store_node1_2025-12-14_04-33-51__6ec77ea7-5e35-45c8-8994-b3ec4ac25759__wakuorg_nwaku:latest.log
DEBUG  src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 25317
DEBUG  src.libs.common:common.py:47 Sleeping for 1 seconds
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:25317/health" -H "Content-Type: application/json" -d 'None'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"READY"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_MOUNTED"},{"Store":"READY"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"READY"},{"Legacy Store Client":"READY"},{"Filter Client":"NOT_READY","desc":"No Filter service peer available yet"}]}'
INFO  src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:25317/debug/v1/info" -H "Content-Type: application/json" -d 'None'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.16.49/tcp/25318/p2p/16Uiu2HAmMoTWpmgzuPDgvpara9wE3xWafmEL3XM5E2vMuFJ71hks","/ip4/172.18.16.49/tcp/25319/ws/p2p/16Uiu2HAmMoTWpmgzuPDgvpara9wE3xWafmEL3XM5E2vMuFJ71hks"],"enrUri":"enr:-L24QPN5uG8I75hPNNNj9w50OgFIUjTXDLG9JRi-n0AGoFcIFRh6dDxufojceHB8wgQ3fckANQdT1FfKIShMezawBXMCgmlkgnY0gmlwhKwSEDGKbXVsdGlhZGRyc5YACASsEhAxBmLmAAoErBIQMQZi590DgnJzhQADAQAAiXNlY3AyNTZrMaEDh-SLLsaIWBObg5m_4AajFY8jKZoLFIadUjOY8fhWMRCDdGNwgmLmg3VkcIJi6IV3YWt1MgM"}'
INFO  src.node.waku_node:waku_node.py:292 REST service is ready !!
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:25317/admin/v1/peers" -H "Content-Type: application/json" -d '["/ip4/172.18.64.248/tcp/60931/p2p/16Uiu2HAmR9Ra41YeUSTCEYZTs2HoWLivn4uvcaHCrjT1upqHE5Cr"]'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:60930/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/0"]'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:25317/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/0"]'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
DEBUG  src.steps.store:store.py:132 Relaying message
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:60930/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "U3RvcmUgd29ya3MhIQ==", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
DEBUG  src.libs.common:common.py:47 Sleeping for 0.2 seconds
DEBUG  src.steps.store:store.py:132 Relaying message
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:60930/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "U3RvcmUgd29ya3MhIQ==", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
DEBUG  src.libs.common:common.py:47 Sleeping for 0.2 seconds
DEBUG  src.steps.store:store.py:132 Relaying message
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:60930/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "U3RvcmUgd29ya3MhIQ==", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
DEBUG  src.libs.common:common.py:47 Sleeping for 0.2 seconds
DEBUG  src.steps.store:store.py:132 Relaying message
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:60930/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "U3RvcmUgd29ya3MhIQ==", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
DEBUG  src.libs.common:common.py:47 Sleeping for 0.2 seconds
DEBUG  src.steps.store:store.py:132 Relaying message
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:60930/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "U3RvcmUgd29ya3MhIQ==", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
DEBUG  src.libs.common:common.py:47 Sleeping for 0.2 seconds
DEBUG  src.steps.store:store.py:132 Relaying message
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:60930/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "U3RvcmUgd29ya3MhIQ==", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
DEBUG  src.libs.common:common.py:47 Sleeping for 0.2 seconds
DEBUG  tests.store.test_time_filter:test_time_filter.py:131 inquering stored messages with start time -10000
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:60930/store/v3/messages?includeData=True&pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&startTime=-10000&pageSize=20&ascending=true" -H "Content-Type: application/json" -d 'None'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0xf6bed1331769cb727d96a60c21d57a1fc1d5d9a76b9f3fa95baa67462b3257ad","message":{"payload":"U3RvcmUgd29ya3MhIQ==","contentTopic":"/myapp/1/latest/proto","version":0,"timestamp":1765686831455055104,"ephemeral":false},"pubsubTopic":"/waku/2/rs/3/0"},{"messageHash":"0x2a00631710c1bc6094a01c754e73444d0c444a1cbfde11803895f04a06961f95","message":{"payload":"U3RvcmUgd29ya3MhIQ==","contentTopic":"/myapp/1/latest/proto","version":0,"timestamp":1765686833455062016,"ephemeral":false},"pubsubTopic":"/waku/2/rs/3/0"},{"messageHash":"0x67715958adf50b4de4af9a13a827cf5c7c1478620ff0359ca825bddab367c272","message":{"payload":"U3RvcmUgd29ya3MhIQ==","contentTopic":"/myapp/1/latest/proto","version":0,"timestamp":1765686834355063808,"ephemeral":false},"pubsubTopic":"/waku/2/rs/3/0"},{"messageHash":"0xa19ad9805693df661e78069f1eefd347116995d762fa3249684e610334cf96f9","message":{"payload":"U3RvcmUgd29ya3MhIQ==","contentTopic":"/myapp/1/latest/proto","version":0,"timestamp":1765686834555066112,"ephemeral":false},"pubsubTopic":"/waku/2/rs/3/0"},{"messageHash":"0xcf054fc81c4fe9499fa252c2707892b856b520f277002f176eb154ea992c5a22","message":{"payload":"U3RvcmUgd29ya3MhIQ==","contentTopic":"/myapp/1/latest/proto","version":0,"timestamp":1765686836455068160,"ephemeral":false},"pubsubTopic":"/waku/2/rs/3/0"},{"messageHash":"0x3872c1b05fcc58fb310e70967a9528157fc6acd1051b7e2a39c413090eb39b65","message":{"payload":"U3RvcmUgd29ya3MhIQ==","contentTopic":"/myapp/1/latest/proto","version":0,"timestamp":1765686844455069184,"ephemeral":false},"pubsubTopic":"/waku/2/rs/3/0"}]}'
DEBUG  tests.store.test_time_filter:test_time_filter.py:134 number of messages stored for start time = -10000 is 6
INFO  src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:25317/store/v3/messages?includeData=True&pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&startTime=-10000&pageSize=20&ascending=true" -H "Content-Type: application/json" -d 'None'
INFO  src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0xf6bed1331769cb727d96a60c21d57a1fc1d5d9a76b9f3fa95baa67462b3257ad","message":{"payload":"U3RvcmUgd29ya3MhIQ==","contentTopic":"/myapp/1/latest/proto","version":0,"timestamp":1765686831455055104,"ephemeral":false},"pubsubTopic":"/waku/2/rs/3/0"},{"messageHash":"0x2a00631710c1bc6094a01c754e73444d0c444a1cbfde11803895f04a06961f95","message":{"payload":"U3RvcmUgd29ya3MhIQ==","contentTopic":"/myapp/1/latest/proto","version":0,"timestamp":1765686833455062016,"ephemeral":false},"pubsubTopic":"/waku/2/rs/3/0"},{"messageHash":"0x67715958adf50b4de4af9a13a827cf5c7c1478620ff0359ca825bddab367c272","message":{"payload":"U3RvcmUgd29ya3MhIQ==","contentTopic":"/myapp/1/latest/proto","version":0,"timestamp":1765686834355063808,"ephemeral":false},"pubsubTopic":"/waku/2/rs/3/0"},{"messageHash":"0xa19ad9805693df661e78069f1eefd347116995d762fa3249684e610334cf96f9","message":{"payload":"U3RvcmUgd29ya3MhIQ==","contentTopic":"/myapp/1/latest/proto","version":0,"timestamp":1765686834555066112,"ephemeral":false},"pubsubTopic":"/waku/2/rs/3/0"},{"messageHash":"0xcf054fc81c4fe9499fa252c2707892b856b520f277002f176eb154ea992c5a22","message":{"payload":"U3RvcmUgd29ya3MhIQ==","contentTopic":"/myapp/1/latest/proto","version":0,"timestamp":1765686836455068160,"ephemeral":false},"pubsubTopic":"/waku/2/rs/3/0"},{"messageHash":"0x3872c1b05fcc58fb310e70967a9528157fc6acd1051b7e2a39c413090eb39b65","message":{"payload":"U3RvcmUgd29ya3MhIQ==","contentTopic":"/myapp/1/latest/proto","version":0,"timestamp":1765686844455069184,"ephemeral":false},"pubsubTopic":"/waku/2/rs/3/0"}]}'
DEBUG  tests.store.test_time_filter:test_time_filter.py:134 number of messages stored for start time = -10000 is 6
DEBUG  tests.conftest:conftest.py:59 Running fixture teardown: test_setup
DEBUG  tests.conftest:conftest.py:83 Running fixture teardown: close_open_nodes
DEBUG  src.node.waku_node:waku_node.py:234 Stopping container with id 4026ad89eb01
DEBUG  src.node.waku_node:waku_node.py:241 Container stopped.
DEBUG  src.node.waku_node:waku_node.py:234 Stopping container with id 8ef10e313f7f
DEBUG  src.node.waku_node:waku_node.py:241 Container stopped.
DEBUG  tests.conftest:conftest.py:98 Running fixture teardown: check_waku_log_errors
DEBUG  src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs.
DEBUG  src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs.