mirror of
https://github.com/logos-messaging/logos-messaging-interop-tests.git
synced 2026-01-04 06:53:07 +00:00
96 lines
18 KiB
Plaintext
96 lines
18 KiB
Plaintext
[35mDEBUG [0m tests.conftest:conftest.py:51 Running fixture setup: test_id
|
||
[35mDEBUG [0m tests.conftest:conftest.py:57 Running test: test_invalid_pagination_cursor_param with id: 2025-12-29_04-43-13__fea1bf86-acd2-4bbe-b710-d9332970a454
|
||
[35mDEBUG [0m src.steps.common:common.py:19 Running fixture setup: common_setup
|
||
[35mDEBUG [0m src.steps.store:store.py:31 Running fixture setup: store_setup
|
||
[35mDEBUG [0m src.steps.store:store.py:39 Running fixture setup: node_setup
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/publishing_node1_2025-12-29_04-43-13__fea1bf86-acd2-4bbe-b710-d9332970a454__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.237.19
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['23275', '23276', '23277', '23278', '23279']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 23275:23275 -p 23276:23276 -p 23277:23277 -p 23278:23278 -p 23279:23279 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=23277 --rest-port=23275 --tcp-port=23276 --discv5-udp-port=23278 --rest-address=0.0.0.0 --nat=extip:172.18.237.19 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=13d4ea49a6a5c6611d85dbff59e50b45c08c0d07ccc1be22e2e475bfa83b881d --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=23279 --metrics-logging=true --store=true --relay=true
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.237.19 waku 9f4de6396bd8272b851309a91c09b23cd6b131961f201a8a553346e390a47f25
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID 9f4de6396bd8. Setting up logs at ./log/docker/publishing_node1_2025-12-29_04-43-13__fea1bf86-acd2-4bbe-b710-d9332970a454__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 23275
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[31m[1mERROR [0m src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container 75bd02dd3ff8. Exiting log stream.
|
||
[31m[1mERROR [0m src.node.docker_mananger:docker_mananger.py:89 Max retries reached for container ec99b5632496. Exiting log stream.
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:23275/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"NOT_READY","desc":"No connected peers"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_MOUNTED"},{"Store":"READY"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"READY"},{"Legacy Store Client":"NOT_READY","desc":"No Legacy Store service peers are available yet, neither Store service set up for the node"},{"Filter Client":"NOT_READY","desc":"No Filter service peer available yet"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:23275/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.237.19/tcp/23276/p2p/16Uiu2HAmAb29TfCPVheqJ5vLRujt1stweBnkHCJU27ouPpce2xc6","/ip4/172.18.237.19/tcp/23277/ws/p2p/16Uiu2HAmAb29TfCPVheqJ5vLRujt1stweBnkHCJU27ouPpce2xc6"],"enrUri":"enr:-L24QEiMY9yKyTJblvIW5kriTuRj_6O4GHV-oMtuFgMDJN5HPaZ0r7-bBn2oSHFrsKDfku4brbK1BXCMWCwhcsy9xB8CgmlkgnY0gmlwhKwS7ROKbXVsdGlhZGRyc5YACASsEu0TBlrsAAoErBLtEwZa7d0DgnJzhQADAQAAiXNlY3AyNTZrMaEC4URVba09t1u_UaXf4v-gKcY3buSqxnTSPRwrHAAWiceDdGNwglrsg3VkcIJa7oV3YWt1MgM"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:19 Docker client initialized with image wakuorg/nwaku:latest
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:86 WakuNode instance initialized with log path ./log/docker/store_node1_2025-12-29_04-43-13__fea1bf86-acd2-4bbe-b710-d9332970a454__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:90 Starting Node...
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:22 Attempting to create or retrieve network waku
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:25 Network waku already exists
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:108 Generated random external IP 172.18.62.58
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:101 Generated ports ['52134', '52135', '52136', '52137', '52138']
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:439 RLN credentials were not set
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:176 RLN credentials not set or credential store not available, starting without RLN
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:178 Using volumes []
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:49 docker run -i -t -p 52134:52134 -p 52135:52135 -p 52136:52136 -p 52137:52137 -p 52138:52138 wakuorg/nwaku:latest --listen-address=0.0.0.0 --rest=true --rest-admin=true --websocket-support=true --log-level=TRACE --rest-relay-cache-capacity=100 --websocket-port=52136 --rest-port=52134 --tcp-port=52135 --discv5-udp-port=52137 --rest-address=0.0.0.0 --nat=extip:172.18.62.58 --peer-exchange=true --discv5-discovery=true --cluster-id=3 --nodekey=bbd98b3ef215b540953d1f2dbbb2b3341ea878a1a9923afa3ddfac27cb90df88 --shard=0 --metrics-server=true --metrics-server-address=0.0.0.0 --metrics-server-port=52138 --metrics-logging=true --discv5-bootstrap-node=enr:-L24QEiMY9yKyTJblvIW5kriTuRj_6O4GHV-oMtuFgMDJN5HPaZ0r7-bBn2oSHFrsKDfku4brbK1BXCMWCwhcsy9xB8CgmlkgnY0gmlwhKwS7ROKbXVsdGlhZGRyc5YACASsEu0TBlrsAAoErBLtEwZa7d0DgnJzhQADAQAAiXNlY3AyNTZrMaEC4URVba09t1u_UaXf4v-gKcY3buSqxnTSPRwrHAAWiceDdGNwglrsg3VkcIJa7oV3YWt1MgM --storenode=/ip4/172.18.237.19/tcp/23276/p2p/16Uiu2HAmAb29TfCPVheqJ5vLRujt1stweBnkHCJU27ouPpce2xc6 --store=true --relay=true
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:55 docker network connect --ip 172.18.62.58 waku b0a6e6a150b343827b1faab774ef0c1ce1f1456b34024340b1a12e7248ee95e5
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:58 Container started with ID b0a6e6a150b3. Setting up logs at ./log/docker/store_node1_2025-12-29_04-43-13__fea1bf86-acd2-4bbe-b710-d9332970a454__wakuorg_nwaku:latest.log
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:190 Started container from image wakuorg/nwaku:latest. REST: 52134
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 1 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:52134/health" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"nodeHealth":"READY","protocolsHealth":[{"Relay":"READY"},{"Rln Relay":"NOT_MOUNTED"},{"Lightpush":"NOT_MOUNTED"},{"Legacy Lightpush":"NOT_MOUNTED"},{"Filter":"NOT_MOUNTED"},{"Store":"READY"},{"Legacy Store":"NOT_MOUNTED"},{"Peer Exchange":"READY"},{"Rendezvous":"NOT_READY","desc":"No Rendezvous peers are available yet"},{"Mix":"NOT_MOUNTED"},{"Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Legacy Lightpush Client":"NOT_READY","desc":"No Lightpush service peer available yet"},{"Store Client":"READY"},{"Legacy Store Client":"READY"},{"Filter Client":"NOT_READY","desc":"No Filter service peer available yet"}]}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:287 Node protocols are initialized !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:52134/debug/v1/info" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"listenAddresses":["/ip4/172.18.62.58/tcp/52135/p2p/16Uiu2HAmFGsZqNuXiGQzeidutdWCP7mrrW1dPtbakkoHLY17J5iV","/ip4/172.18.62.58/tcp/52136/ws/p2p/16Uiu2HAmFGsZqNuXiGQzeidutdWCP7mrrW1dPtbakkoHLY17J5iV"],"enrUri":"enr:-L24QPYGvCObMcnTxM-bjWRJBesLHGd3D_3I4wge6dci7QlBYuM35egrxESl0bAqAz00Rr6p2UDZls-dltt4_KHessoCgmlkgnY0gmlwhKwSPjqKbXVsdGlhZGRyc5YACASsEj46BsunAAoErBI-OgbLqN0DgnJzhQADAQAAiXNlY3AyNTZrMaEDJuic1I3BreuB7nb8VROavnxCawWwMVFsh3b8HXIMgj6DdGNwgsung3VkcILLqYV3YWt1MgM"}'
|
||
[32mINFO [0m src.node.waku_node:waku_node.py:292 REST service is ready !!
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:52134/admin/v1/peers" -H "Content-Type: application/json" -d '["/ip4/172.18.237.19/tcp/23276/p2p/16Uiu2HAmAb29TfCPVheqJ5vLRujt1stweBnkHCJU27ouPpce2xc6"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:23275/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/0"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:52134/relay/v1/subscriptions" -H "Content-Type: application/json" -d '["/waku/2/rs/3/0"]'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:23275/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "TWVzc2FnZV8w", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:23275/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "TWVzc2FnZV8x", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:23275/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "TWVzc2FnZV8y", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:23275/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "TWVzc2FnZV8z", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[35mDEBUG [0m src.steps.store:store.py:132 Relaying message
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X POST "http://127.0.0.1:23275/relay/v1/messages/%2Fwaku%2F2%2Frs%2F3%2F0" -H "Content-Type: application/json" -d '{"payload": "TWVzc2FnZV80", "contentTopic": "/myapp/1/latest/proto", "timestamp": '$(date +%s%N)'}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'OK'
|
||
[35mDEBUG [0m src.libs.common:common.py:47 Sleeping for 0.2 seconds
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:23275/store/v3/messages?pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&pageSize=3&ascending=true" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0x2eada0ca14e5fc554d8fa315b28546cade2f75bb59e6b7b5b0924709a4d9e41d"},{"messageHash":"0x39071ea44e9cf94698c6683f6c18f322d1bd2936b5218cd77d9240213d095f7d"},{"messageHash":"0x7a17f6aca102c7de4c986e1037dc942d948dd8011f520d0b49899f9f5dc269e9"}],"paginationCursor":"0x7a17f6aca102c7de4c986e1037dc942d948dd8011f520d0b49899f9f5dc269e9"}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:23275/store/v3/messages?pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&pageSize=3&ascending=true&paginationCursor=0x7a17f6aca102c7de4c986e1037dc942d948dd8011f520d0b49899f9f5dc269e9" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0x2eada0ca14e5fc554d8fa315b28546cade2f75bb59e6b7b5b0924709a4d9e41d"},{"messageHash":"0x39071ea44e9cf94698c6683f6c18f322d1bd2936b5218cd77d9240213d095f7d"},{"messageHash":"0x7a17f6aca102c7de4c986e1037dc942d948dd8011f520d0b49899f9f5dc269e9"}],"paginationCursor":"0x7a17f6aca102c7de4c986e1037dc942d948dd8011f520d0b49899f9f5dc269e9"}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:23275/store/v3/messages?pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&cursor=0x7a17f6aca102c7de4c986e1037dc942d948dd8011f520d0b49899f9f5dc269e9&pageSize=3&ascending=true" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0xcf014eec622487707f5fd47cb19fd62e8b9fcfb5caadd24116fd6c422967ca24"},{"messageHash":"0xe40031cda2c63b2c01d1a98aeb5d80beaa73f6d148c05a2d83ac5600b91caba9"}]}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:52134/store/v3/messages?pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&pageSize=3&ascending=true" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0x2eada0ca14e5fc554d8fa315b28546cade2f75bb59e6b7b5b0924709a4d9e41d"},{"messageHash":"0x39071ea44e9cf94698c6683f6c18f322d1bd2936b5218cd77d9240213d095f7d"},{"messageHash":"0x7a17f6aca102c7de4c986e1037dc942d948dd8011f520d0b49899f9f5dc269e9"}],"paginationCursor":"0x7a17f6aca102c7de4c986e1037dc942d948dd8011f520d0b49899f9f5dc269e9"}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:52134/store/v3/messages?pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&pageSize=3&ascending=true&paginationCursor=0x7a17f6aca102c7de4c986e1037dc942d948dd8011f520d0b49899f9f5dc269e9" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0x2eada0ca14e5fc554d8fa315b28546cade2f75bb59e6b7b5b0924709a4d9e41d"},{"messageHash":"0x39071ea44e9cf94698c6683f6c18f322d1bd2936b5218cd77d9240213d095f7d"},{"messageHash":"0x7a17f6aca102c7de4c986e1037dc942d948dd8011f520d0b49899f9f5dc269e9"}],"paginationCursor":"0x7a17f6aca102c7de4c986e1037dc942d948dd8011f520d0b49899f9f5dc269e9"}'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:37 curl -v -X GET "http://127.0.0.1:52134/store/v3/messages?pubsubTopic=%2Fwaku%2F2%2Frs%2F3%2F0&cursor=0x7a17f6aca102c7de4c986e1037dc942d948dd8011f520d0b49899f9f5dc269e9&pageSize=3&ascending=true" -H "Content-Type: application/json" -d 'None'
|
||
[32mINFO [0m src.node.api_clients.base_client:base_client.py:22 Response status code: 200. Response content: b'{"requestId":"","statusCode":200,"statusDesc":"OK","messages":[{"messageHash":"0xcf014eec622487707f5fd47cb19fd62e8b9fcfb5caadd24116fd6c422967ca24"},{"messageHash":"0xe40031cda2c63b2c01d1a98aeb5d80beaa73f6d148c05a2d83ac5600b91caba9"}]}'
|
||
[35mDEBUG [0m tests.conftest:conftest.py:59 Running fixture teardown: test_setup
|
||
[35mDEBUG [0m tests.conftest:conftest.py:83 Running fixture teardown: close_open_nodes
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id 9f4de6396bd8
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:234 Stopping container with id b0a6e6a150b3
|
||
[35mDEBUG [0m src.node.waku_node:waku_node.py:241 Container stopped.
|
||
[35mDEBUG [0m tests.conftest:conftest.py:98 Running fixture teardown: check_waku_log_errors
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs.
|
||
[35mDEBUG [0m src.node.docker_mananger:docker_mananger.py:144 No errors found in the waku logs. |