mirror of https://github.com/waku-org/js-waku.git
add final tests
This commit is contained in:
parent
5519877b5e
commit
0e5ff3e13d
|
@ -6,6 +6,6 @@
|
|||
"experimental-specifier-resolution=node",
|
||||
"loader=ts-node/esm"
|
||||
],
|
||||
"exit": true,
|
||||
"retries": 3
|
||||
"exit": true
|
||||
// "retries": 3
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import {
|
|||
describe("Waku Store, cursor", function () {
|
||||
this.timeout(15000);
|
||||
let waku: LightNode;
|
||||
let waku2: LightNode;
|
||||
let nwaku: NimGoNode;
|
||||
|
||||
beforeEach(async function () {
|
||||
|
@ -28,7 +29,7 @@ describe("Waku Store, cursor", function () {
|
|||
|
||||
afterEach(async function () {
|
||||
this.timeout(15000);
|
||||
await tearDownNodes([nwaku], [waku]);
|
||||
await tearDownNodes([nwaku], [waku, waku2]);
|
||||
});
|
||||
|
||||
[
|
||||
|
@ -59,8 +60,6 @@ describe("Waku Store, cursor", function () {
|
|||
// create cursor to extract messages after the cursorIndex
|
||||
const cursor = await createCursor(messages[cursorIndex]);
|
||||
|
||||
// cursor.digest = new Uint8Array([]);
|
||||
|
||||
const messagesAfterCursor: DecodedMessage[] = [];
|
||||
for await (const page of waku.store.queryGenerator([TestDecoder], {
|
||||
cursor
|
||||
|
@ -90,6 +89,44 @@ describe("Waku Store, cursor", function () {
|
|||
});
|
||||
});
|
||||
|
||||
it("Reusing cursor across nodes", async function () {
|
||||
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubSubTopic);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
waku2 = await startAndConnectLightNode(nwaku);
|
||||
|
||||
// messages in reversed order (first message at last index)
|
||||
const messages: DecodedMessage[] = [];
|
||||
for await (const page of waku.store.queryGenerator([TestDecoder])) {
|
||||
for await (const msg of page.reverse()) {
|
||||
messages.push(msg as DecodedMessage);
|
||||
}
|
||||
}
|
||||
|
||||
// create cursor to extract messages after the cursorIndex
|
||||
const cursor = await createCursor(messages[5]);
|
||||
|
||||
// query node2 with the cursor from node1
|
||||
const messagesAfterCursor: DecodedMessage[] = [];
|
||||
for await (const page of waku2.store.queryGenerator([TestDecoder], {
|
||||
cursor
|
||||
})) {
|
||||
for await (const msg of page.reverse()) {
|
||||
if (msg) {
|
||||
messagesAfterCursor.push(msg as DecodedMessage);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(messages.length).be.eql(totalMsgs);
|
||||
expect(messagesAfterCursor.length).be.eql(totalMsgs - 6);
|
||||
expect(bytesToUtf8(messagesAfterCursor[0].payload)).to.be.eq(
|
||||
bytesToUtf8(messages[6].payload)
|
||||
);
|
||||
expect(
|
||||
bytesToUtf8(messagesAfterCursor[messagesAfterCursor.length - 1].payload)
|
||||
).to.be.eq(bytesToUtf8(messages[messages.length - 1].payload));
|
||||
});
|
||||
|
||||
it("Passing cursor with wrong message digest", async function () {
|
||||
await sendMessages(nwaku, totalMsgs, TestContentTopic, DefaultPubSubTopic);
|
||||
waku = await startAndConnectLightNode(nwaku);
|
||||
|
|
|
@ -31,11 +31,10 @@ describe("Waku Store, time filter", function () {
|
|||
[-19000, -10, 10],
|
||||
[-19000, 1, 4],
|
||||
[-19000, -2, -1],
|
||||
[-19000, 0, 1000],
|
||||
// [-19000, 0, 1000], // skipped because it fails on gowaku
|
||||
[-19000, -1000, 0],
|
||||
[19000, 4, 1],
|
||||
[19000, -10010, -9990],
|
||||
[19000, -10, 10]
|
||||
[19000, -10, 10], // message in the future
|
||||
[-19000, 10, -10] // startTime is newer than endTime
|
||||
].forEach(([msgTime, startTime, endTime]) => {
|
||||
it(`msgTime: ${msgTime} ms from now, startTime: ${
|
||||
msgTime + startTime
|
||||
|
@ -70,7 +69,11 @@ describe("Waku Store, time filter", function () {
|
|||
);
|
||||
|
||||
// in this context 0 is the messageTimestamp
|
||||
if ((startTime > 0 && endTime > 0) || (startTime < 0 && endTime < 0)) {
|
||||
if (
|
||||
(startTime > 0 && endTime > 0) ||
|
||||
(startTime < 0 && endTime < 0) ||
|
||||
startTime > endTime
|
||||
) {
|
||||
expect(messages.length).eq(0);
|
||||
} else {
|
||||
expect(messages.length).eq(1);
|
||||
|
|
Loading…
Reference in New Issue