fix: store v3 validate cursor & remove messages (#2636)

This commit is contained in:
Simon-Pierre Vivier 2024-05-01 14:47:06 -04:00 committed by GitHub
parent 44703f2608
commit db72e2b823
21 changed files with 220 additions and 154 deletions

View File

@ -59,10 +59,13 @@ suite "Waku Store - End to End - Sorted Archive":
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)), fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
] ]
archiveMessages = messages.mapIt( archiveMessages = messages.mapIt(
WakuMessageKeyValue(messageHash: computeMessageHash(pubsubTopic, it), message: it) WakuMessageKeyValue(
messageHash: computeMessageHash(pubsubTopic, it), message: some(it)
)
) )
storeQuery = StoreQueryRequest( storeQuery = StoreQueryRequest(
includeData: true,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
paginationForward: PagingDirection.Forward, paginationForward: PagingDirection.Forward,
@ -102,6 +105,7 @@ suite "Waku Store - End to End - Sorted Archive":
# Given the next query # Given the next query
var otherHistoryQuery = StoreQueryRequest( var otherHistoryQuery = StoreQueryRequest(
includeData: true,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
paginationCursor: queryResponse.get().paginationCursor, paginationCursor: queryResponse.get().paginationCursor,
@ -130,6 +134,7 @@ suite "Waku Store - End to End - Sorted Archive":
# Given the next query # Given the next query
var nextHistoryQuery = StoreQueryRequest( var nextHistoryQuery = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse.get().paginationCursor, paginationCursor: queryResponse.get().paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -159,6 +164,7 @@ suite "Waku Store - End to End - Sorted Archive":
# Given the next query (2/5) # Given the next query (2/5)
let historyQuery2 = StoreQueryRequest( let historyQuery2 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse1.get().paginationCursor, paginationCursor: queryResponse1.get().paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -175,6 +181,7 @@ suite "Waku Store - End to End - Sorted Archive":
# Given the next query (3/5) # Given the next query (3/5)
let historyQuery3 = StoreQueryRequest( let historyQuery3 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse2.get().paginationCursor, paginationCursor: queryResponse2.get().paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -191,6 +198,7 @@ suite "Waku Store - End to End - Sorted Archive":
# Given the next query (4/5) # Given the next query (4/5)
let historyQuery4 = StoreQueryRequest( let historyQuery4 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse3.get().paginationCursor, paginationCursor: queryResponse3.get().paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -207,6 +215,7 @@ suite "Waku Store - End to End - Sorted Archive":
# Given the next query (5/5) # Given the next query (5/5)
let historyQuery5 = StoreQueryRequest( let historyQuery5 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse4.get().paginationCursor, paginationCursor: queryResponse4.get().paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -234,6 +243,7 @@ suite "Waku Store - End to End - Sorted Archive":
# Given the next query (2/2) # Given the next query (2/2)
let historyQuery2 = StoreQueryRequest( let historyQuery2 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse1.get().paginationCursor, paginationCursor: queryResponse1.get().paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -272,6 +282,7 @@ suite "Waku Store - End to End - Sorted Archive":
# Given the next query (2/3) # Given the next query (2/3)
let historyQuery2 = StoreQueryRequest( let historyQuery2 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse1.get().paginationCursor, paginationCursor: queryResponse1.get().paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -288,6 +299,7 @@ suite "Waku Store - End to End - Sorted Archive":
# Given the next query (3/3) # Given the next query (3/3)
let historyQuery3 = StoreQueryRequest( let historyQuery3 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse2.get().paginationCursor, paginationCursor: queryResponse2.get().paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -310,7 +322,7 @@ suite "Waku Store - End to End - Sorted Archive":
let missingMessagesAmount = archive.DefaultPageSize - currentStoreLen + 5 let missingMessagesAmount = archive.DefaultPageSize - currentStoreLen + 5
let lastMessageTimestamp = let lastMessageTimestamp =
archiveMessages[archiveMessages.len - 1].message.timestamp archiveMessages[archiveMessages.len - 1].message.get().timestamp
var extraMessages: seq[WakuMessage] = @[] var extraMessages: seq[WakuMessage] = @[]
for i in 0 ..< missingMessagesAmount: for i in 0 ..< missingMessagesAmount:
let let
@ -325,7 +337,7 @@ suite "Waku Store - End to End - Sorted Archive":
archiveMessages & archiveMessages &
extraMessages.mapIt( extraMessages.mapIt(
WakuMessageKeyValue( WakuMessageKeyValue(
messageHash: computeMessageHash(pubsubTopic, it), message: it messageHash: computeMessageHash(pubsubTopic, it), message: some(it)
) )
) )
@ -341,6 +353,7 @@ suite "Waku Store - End to End - Sorted Archive":
# Given the next query (2/2) # Given the next query (2/2)
let historyQuery2 = StoreQueryRequest( let historyQuery2 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse1.get().paginationCursor, paginationCursor: queryResponse1.get().paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -364,7 +377,7 @@ suite "Waku Store - End to End - Sorted Archive":
let missingMessagesAmount = archive.DefaultPageSize - currentStoreLen + 5 let missingMessagesAmount = archive.DefaultPageSize - currentStoreLen + 5
let lastMessageTimestamp = let lastMessageTimestamp =
archiveMessages[archiveMessages.len - 1].message.timestamp archiveMessages[archiveMessages.len - 1].message.get().timestamp
var extraMessages: seq[WakuMessage] = @[] var extraMessages: seq[WakuMessage] = @[]
for i in 0 ..< missingMessagesAmount: for i in 0 ..< missingMessagesAmount:
let let
@ -379,12 +392,13 @@ suite "Waku Store - End to End - Sorted Archive":
archiveMessages & archiveMessages &
extraMessages.mapIt( extraMessages.mapIt(
WakuMessageKeyValue( WakuMessageKeyValue(
messageHash: computeMessageHash(pubsubTopic, it), message: it messageHash: computeMessageHash(pubsubTopic, it), message: some(it)
) )
) )
# Given a query with default page size (1/2) # Given a query with default page size (1/2)
storeQuery = StoreQueryRequest( storeQuery = StoreQueryRequest(
includeData: true,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
paginationForward: PagingDirection.FORWARD, paginationForward: PagingDirection.FORWARD,
@ -399,6 +413,7 @@ suite "Waku Store - End to End - Sorted Archive":
# Given the next query (2/2) # Given the next query (2/2)
let historyQuery2 = StoreQueryRequest( let historyQuery2 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse.get().paginationCursor, paginationCursor: queryResponse.get().paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -457,8 +472,9 @@ suite "Waku Store - End to End - Sorted Archive":
asyncTest "Cursor Reusability Across Nodes": asyncTest "Cursor Reusability Across Nodes":
# Given a different server node with the same archive # Given a different server node with the same archive
let let
otherArchiveDriverWithMessages = otherArchiveDriverWithMessages = newArchiveDriverWithMessages(
newArchiveDriverWithMessages(pubsubTopic, archiveMessages.mapIt(it.message)) pubsubTopic, archiveMessages.mapIt(it.message.get())
)
otherServerKey = generateSecp256k1Key() otherServerKey = generateSecp256k1Key()
otherServer = otherServer =
newTestWakuNode(otherServerKey, ValidIpAddress.init("0.0.0.0"), Port(0)) newTestWakuNode(otherServerKey, ValidIpAddress.init("0.0.0.0"), Port(0))
@ -483,6 +499,7 @@ suite "Waku Store - End to End - Sorted Archive":
# When making a history query to the second server node # When making a history query to the second server node
let otherHistoryQuery = StoreQueryRequest( let otherHistoryQuery = StoreQueryRequest(
includeData: true,
paginationCursor: paginationCursor, paginationCursor: paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -518,6 +535,7 @@ suite "Waku Store - End to End - Unsorted Archive":
contentTopicSeq = @[contentTopic] contentTopicSeq = @[contentTopic]
storeQuery = StoreQueryRequest( storeQuery = StoreQueryRequest(
includeData: true,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
paginationForward: PagingDirection.FORWARD, paginationForward: PagingDirection.FORWARD,
@ -539,7 +557,9 @@ suite "Waku Store - End to End - Unsorted Archive":
fakeWakuMessage(@[byte 05], ts = ts(20, timeOrigin)), fakeWakuMessage(@[byte 05], ts = ts(20, timeOrigin)),
] ]
unsortedArchiveMessages = messages.mapIt( unsortedArchiveMessages = messages.mapIt(
WakuMessageKeyValue(messageHash: computeMessageHash(pubsubTopic, it), message: it) WakuMessageKeyValue(
messageHash: computeMessageHash(pubsubTopic, it), message: some(it)
)
) )
let let
@ -575,17 +595,17 @@ suite "Waku Store - End to End - Unsorted Archive":
check: check:
queryResponse.get().messages.len == 5 queryResponse.get().messages.len == 5
queryResponse.get().messages[0].message.timestamp == queryResponse.get().messages[0].message.get().timestamp ==
queryResponse.get().messages[1].message.timestamp queryResponse.get().messages[1].message.get().timestamp
queryResponse.get().messages[1].message.timestamp == queryResponse.get().messages[1].message.get().timestamp ==
queryResponse.get().messages[2].message.timestamp queryResponse.get().messages[2].message.get().timestamp
queryResponse.get().messages[2].message.timestamp < queryResponse.get().messages[2].message.get().timestamp <
queryResponse.get().messages[3].message.timestamp queryResponse.get().messages[3].message.get().timestamp
queryResponse.get().messages[3].message.timestamp == queryResponse.get().messages[3].message.get().timestamp ==
queryResponse.get().messages[4].message.timestamp queryResponse.get().messages[4].message.get().timestamp
toHex(queryResponse.get().messages[0].messageHash) < toHex(queryResponse.get().messages[0].messageHash) <
toHex(queryResponse.get().messages[1].messageHash) toHex(queryResponse.get().messages[1].messageHash)
@ -598,6 +618,7 @@ suite "Waku Store - End to End - Unsorted Archive":
# Given the next query # Given the next query
var historyQuery2 = StoreQueryRequest( var historyQuery2 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse.get().paginationCursor, paginationCursor: queryResponse.get().paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -610,17 +631,17 @@ suite "Waku Store - End to End - Unsorted Archive":
# Check the ordering # Check the ordering
check: check:
queryResponse2.get().messages[0].message.timestamp < queryResponse2.get().messages[0].message.get().timestamp <
queryResponse2.get().messages[1].message.timestamp queryResponse2.get().messages[1].message.get().timestamp
queryResponse2.get().messages[1].message.timestamp == queryResponse2.get().messages[1].message.get().timestamp ==
queryResponse2.get().messages[2].message.timestamp queryResponse2.get().messages[2].message.get().timestamp
queryResponse2.get().messages[2].message.timestamp == queryResponse2.get().messages[2].message.get().timestamp ==
queryResponse2.get().messages[3].message.timestamp queryResponse2.get().messages[3].message.get().timestamp
queryResponse2.get().messages[3].message.timestamp == queryResponse2.get().messages[3].message.get().timestamp ==
queryResponse2.get().messages[4].message.timestamp queryResponse2.get().messages[4].message.get().timestamp
toHex(queryResponse2.get().messages[1].messageHash) < toHex(queryResponse2.get().messages[1].messageHash) <
toHex(queryResponse2.get().messages[2].messageHash) toHex(queryResponse2.get().messages[2].messageHash)
@ -651,11 +672,11 @@ suite "Waku Store - End to End - Unsorted Archive":
check: check:
queryResponse.get().messages.len == 3 queryResponse.get().messages.len == 3
queryResponse.get().messages[0].message.timestamp == queryResponse.get().messages[0].message.get().timestamp ==
queryResponse.get().messages[1].message.timestamp queryResponse.get().messages[1].message.get().timestamp
queryResponse.get().messages[1].message.timestamp == queryResponse.get().messages[1].message.get().timestamp ==
queryResponse.get().messages[2].message.timestamp queryResponse.get().messages[2].message.get().timestamp
toHex(queryResponse.get().messages[0].messageHash) < toHex(queryResponse.get().messages[0].messageHash) <
toHex(queryResponse.get().messages[1].messageHash) toHex(queryResponse.get().messages[1].messageHash)
@ -684,20 +705,20 @@ suite "Waku Store - End to End - Unsorted Archive":
check: check:
queryResponse.get().messages.len == 6 queryResponse.get().messages.len == 6
queryResponse.get().messages[0].message.timestamp == queryResponse.get().messages[0].message.get().timestamp ==
queryResponse.get().messages[1].message.timestamp queryResponse.get().messages[1].message.get().timestamp
queryResponse.get().messages[1].message.timestamp < queryResponse.get().messages[1].message.get().timestamp <
queryResponse.get().messages[2].message.timestamp queryResponse.get().messages[2].message.get().timestamp
queryResponse.get().messages[2].message.timestamp == queryResponse.get().messages[2].message.get().timestamp ==
queryResponse.get().messages[3].message.timestamp queryResponse.get().messages[3].message.get().timestamp
queryResponse.get().messages[3].message.timestamp == queryResponse.get().messages[3].message.get().timestamp ==
queryResponse.get().messages[4].message.timestamp queryResponse.get().messages[4].message.get().timestamp
queryResponse.get().messages[4].message.timestamp == queryResponse.get().messages[4].message.get().timestamp ==
queryResponse.get().messages[5].message.timestamp queryResponse.get().messages[5].message.get().timestamp
toHex(queryResponse.get().messages[0].messageHash) < toHex(queryResponse.get().messages[0].messageHash) <
toHex(queryResponse.get().messages[1].messageHash) toHex(queryResponse.get().messages[1].messageHash)
@ -730,6 +751,7 @@ suite "Waku Store - End to End - Unsorted Archive without provided Timestamp":
contentTopicSeq = @[contentTopic] contentTopicSeq = @[contentTopic]
storeQuery = StoreQueryRequest( storeQuery = StoreQueryRequest(
includeData: true,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
paginationForward: PagingDirection.FORWARD, paginationForward: PagingDirection.FORWARD,
@ -750,7 +772,9 @@ suite "Waku Store - End to End - Unsorted Archive without provided Timestamp":
fakeWakuMessage(@[byte 08]), fakeWakuMessage(@[byte 08]),
] ]
unsortedArchiveMessages = messages.mapIt( unsortedArchiveMessages = messages.mapIt(
WakuMessageKeyValue(messageHash: computeMessageHash(pubsubTopic, it), message: it) WakuMessageKeyValue(
messageHash: computeMessageHash(pubsubTopic, it), message: some(it)
)
) )
let let
@ -785,20 +809,21 @@ suite "Waku Store - End to End - Unsorted Archive without provided Timestamp":
check: check:
queryResponse.get().messages.len == 5 queryResponse.get().messages.len == 5
queryResponse.get().messages[0].message.timestamp <= queryResponse.get().messages[0].message.get().timestamp <=
queryResponse.get().messages[1].message.timestamp queryResponse.get().messages[1].message.get().timestamp
queryResponse.get().messages[1].message.timestamp <= queryResponse.get().messages[1].message.get().timestamp <=
queryResponse.get().messages[2].message.timestamp queryResponse.get().messages[2].message.get().timestamp
queryResponse.get().messages[2].message.timestamp <= queryResponse.get().messages[2].message.get().timestamp <=
queryResponse.get().messages[3].message.timestamp queryResponse.get().messages[3].message.get().timestamp
queryResponse.get().messages[3].message.timestamp <= queryResponse.get().messages[3].message.get().timestamp <=
queryResponse.get().messages[4].message.timestamp queryResponse.get().messages[4].message.get().timestamp
# Given the next query # Given the next query
var historyQuery2 = StoreQueryRequest( var historyQuery2 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse.get().paginationCursor, paginationCursor: queryResponse.get().paginationCursor,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -820,17 +845,17 @@ suite "Waku Store - End to End - Unsorted Archive without provided Timestamp":
queryResponse2.get().messages.len == 5 queryResponse2.get().messages.len == 5
queryResponse2.get().messages[0].message.timestamp <= queryResponse2.get().messages[0].message.get().timestamp <=
queryResponse2.get().messages[1].message.timestamp queryResponse2.get().messages[1].message.get().timestamp
queryResponse2.get().messages[1].message.timestamp <= queryResponse2.get().messages[1].message.get().timestamp <=
queryResponse2.get().messages[2].message.timestamp queryResponse2.get().messages[2].message.get().timestamp
queryResponse2.get().messages[2].message.timestamp <= queryResponse2.get().messages[2].message.get().timestamp <=
queryResponse2.get().messages[3].message.timestamp queryResponse2.get().messages[3].message.get().timestamp
queryResponse2.get().messages[3].message.timestamp <= queryResponse2.get().messages[3].message.get().timestamp <=
queryResponse2.get().messages[4].message.timestamp queryResponse2.get().messages[4].message.get().timestamp
suite "Waku Store - End to End - Archive with Multiple Topics": suite "Waku Store - End to End - Archive with Multiple Topics":
var pubsubTopic {.threadvar.}: PubsubTopic var pubsubTopic {.threadvar.}: PubsubTopic
@ -861,6 +886,7 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
@[contentTopic, contentTopicB, contentTopicC, contentTopicSpecials] @[contentTopic, contentTopicB, contentTopicC, contentTopicSpecials]
storeQuery = StoreQueryRequest( storeQuery = StoreQueryRequest(
includeData: true,
pubsubTopic: some(pubsubTopic), pubsubTopic: some(pubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
paginationForward: PagingDirection.FORWARD, paginationForward: PagingDirection.FORWARD,
@ -888,12 +914,14 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
] ]
archiveMessages = messages.mapIt( archiveMessages = messages.mapIt(
WakuMessageKeyValue(messageHash: computeMessageHash(pubsubTopic, it), message: it) WakuMessageKeyValue(
messageHash: computeMessageHash(pubsubTopic, it), message: some(it)
)
) )
for i in 6 ..< 10: for i in 6 ..< 10:
archiveMessages[i].messagehash = archiveMessages[i].messagehash =
computeMessageHash(pubsubTopicB, archiveMessages[i].message) computeMessageHash(pubsubTopicB, archiveMessages[i].message.get())
let let
serverKey = generateSecp256k1Key() serverKey = generateSecp256k1Key()
@ -961,6 +989,7 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
# Given the next query # Given the next query
let historyQuery2 = StoreQueryRequest( let historyQuery2 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse.get().paginationCursor, paginationCursor: queryResponse.get().paginationCursor,
pubsubTopic: none(PubsubTopic), pubsubTopic: none(PubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -1028,6 +1057,7 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
# Given the next query # Given the next query
let historyQuery2 = StoreQueryRequest( let historyQuery2 = StoreQueryRequest(
includeData: true,
paginationCursor: queryResponse.get().paginationCursor, paginationCursor: queryResponse.get().paginationCursor,
pubsubTopic: none(PubsubTopic), pubsubTopic: none(PubsubTopic),
contentTopics: contentTopicSeq, contentTopics: contentTopicSeq,
@ -1244,7 +1274,7 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
let voluminousArchiveMessages = messages.mapIt( let voluminousArchiveMessages = messages.mapIt(
WakuMessageKeyValue( WakuMessageKeyValue(
messageHash: computeMessageHash(pubsubTopic, it), message: it messageHash: computeMessageHash(pubsubTopic, it), message: some(it)
) )
) )

View File

@ -183,7 +183,7 @@ procSuite "Waku Archive - find messages":
waitFor archive.handleMessage("foo", msg2) waitFor archive.handleMessage("foo", msg2)
## Given ## Given
let req = ArchiveQuery(contentTopics: @[topic]) let req = ArchiveQuery(includeData: true, contentTopics: @[topic])
## When ## When
let queryRes = waitFor archive.findMessages(req) let queryRes = waitFor archive.findMessages(req)
@ -218,7 +218,7 @@ procSuite "Waku Archive - find messages":
waitFor archive.handleMessage("foo", msg3) waitFor archive.handleMessage("foo", msg3)
## Given ## Given
let req = ArchiveQuery(contentTopics: @[topic1, topic3]) let req = ArchiveQuery(includeData: true, contentTopics: @[topic1, topic3])
## When ## When
let queryRes = waitFor archive.findMessages(req) let queryRes = waitFor archive.findMessages(req)
@ -283,7 +283,9 @@ procSuite "Waku Archive - find messages":
## Given ## Given
# This query targets: pubsubtopic1 AND (contentTopic1 OR contentTopic3) # This query targets: pubsubtopic1 AND (contentTopic1 OR contentTopic3)
let req = ArchiveQuery( let req = ArchiveQuery(
pubsubTopic: some(pubsubTopic1), contentTopics: @[contentTopic1, contentTopic3] includeData: true,
pubsubTopic: some(pubsubTopic1),
contentTopics: @[contentTopic1, contentTopic3],
) )
## When ## When
@ -349,7 +351,7 @@ procSuite "Waku Archive - find messages":
waitFor archive.handleMessage(pubsubTopic, msg3) waitFor archive.handleMessage(pubsubTopic, msg3)
## Given ## Given
let req = ArchiveQuery(pubsubTopic: some(pubsubTopic)) let req = ArchiveQuery(includeData: true, pubsubTopic: some(pubsubTopic))
## When ## When
let res = waitFor archive.findMessages(req) let res = waitFor archive.findMessages(req)
@ -367,7 +369,8 @@ procSuite "Waku Archive - find messages":
test "handle query with forward pagination": test "handle query with forward pagination":
## Given ## Given
let req = ArchiveQuery(pageSize: 4, direction: PagingDirection.FORWARD) let req =
ArchiveQuery(includeData: true, pageSize: 4, direction: PagingDirection.FORWARD)
## When ## When
var nextReq = req # copy var nextReq = req # copy
@ -400,7 +403,8 @@ procSuite "Waku Archive - find messages":
test "handle query with backward pagination": test "handle query with backward pagination":
## Given ## Given
let req = ArchiveQuery(pageSize: 4, direction: PagingDirection.BACKWARD) let req =
ArchiveQuery(includeData: true, pageSize: 4, direction: PagingDirection.BACKWARD)
## When ## When
var nextReq = req # copy var nextReq = req # copy
@ -463,7 +467,7 @@ procSuite "Waku Archive - find messages":
).isOk() ).isOk()
## Given ## Given
let req = ArchiveQuery(contentTopics: @[DefaultContentTopic]) let req = ArchiveQuery(includeData: true, contentTopics: @[DefaultContentTopic])
## When ## When
let res = waitFor archive.findMessages(req) let res = waitFor archive.findMessages(req)
@ -482,6 +486,7 @@ procSuite "Waku Archive - find messages":
test "handle temporal history query with a valid time window": test "handle temporal history query with a valid time window":
## Given ## Given
let req = ArchiveQuery( let req = ArchiveQuery(
includeData: true,
contentTopics: @[ContentTopic("1")], contentTopics: @[ContentTopic("1")],
startTime: some(ts(15, timeOrigin)), startTime: some(ts(15, timeOrigin)),
endTime: some(ts(55, timeOrigin)), endTime: some(ts(55, timeOrigin)),

View File

@ -38,9 +38,9 @@ suite "Store Client":
hash3 = computeMessageHash(DefaultPubsubTopic, message3) hash3 = computeMessageHash(DefaultPubsubTopic, message3)
messageSeq = messageSeq =
@[ @[
WakuMessageKeyValue(messageHash: hash1, message: message1), WakuMessageKeyValue(messageHash: hash1, message: some(message1)),
WakuMessageKeyValue(messageHash: hash2, message: message2), WakuMessageKeyValue(messageHash: hash2, message: some(message2)),
WakuMessageKeyValue(messageHash: hash3, message: message3), WakuMessageKeyValue(messageHash: hash3, message: some(message3)),
] ]
handlerFuture = newHistoryFuture() handlerFuture = newHistoryFuture()
handler = proc(req: StoreQueryRequest): Future[StoreQueryResult] {.async, gcsafe.} = handler = proc(req: StoreQueryRequest): Future[StoreQueryResult] {.async, gcsafe.} =

View File

@ -14,7 +14,7 @@ procSuite "Waku Store - RPC codec":
## Given ## Given
let query = StoreQueryRequest( let query = StoreQueryRequest(
requestId: "0", requestId: "0",
includeData: false, includeData: true,
pubsubTopic: some(DefaultPubsubTopic), pubsubTopic: some(DefaultPubsubTopic),
contentTopics: @[DefaultContentTopic], contentTopics: @[DefaultContentTopic],
startTime: some(Timestamp(10)), startTime: some(Timestamp(10)),
@ -58,7 +58,7 @@ procSuite "Waku Store - RPC codec":
let let
message = fakeWakuMessage() message = fakeWakuMessage()
hash = computeMessageHash(DefaultPubsubTopic, message) hash = computeMessageHash(DefaultPubsubTopic, message)
keyValue = WakuMessageKeyValue(messageHash: hash, message: message) keyValue = WakuMessageKeyValue(messageHash: hash, message: some(message))
res = StoreQueryResponse( res = StoreQueryResponse(
requestId: "1", requestId: "1",
statusCode: 200, statusCode: 200,

View File

@ -29,7 +29,7 @@ suite "Waku Store - query handler":
let msg = fakeWakuMessage(contentTopic = DefaultContentTopic) let msg = fakeWakuMessage(contentTopic = DefaultContentTopic)
let hash = computeMessageHash(DefaultPubsubTopic, msg) let hash = computeMessageHash(DefaultPubsubTopic, msg)
let kv = WakuMessageKeyValue(messageHash: hash, message: msg) let kv = WakuMessageKeyValue(messageHash: hash, message: some(msg))
var queryHandlerFut = newFuture[(StoreQueryRequest)]() var queryHandlerFut = newFuture[(StoreQueryRequest)]()

View File

@ -49,18 +49,19 @@ procSuite "WakuNode - Store":
let hashes = msgListA.mapIt(computeMessageHash(DefaultPubsubTopic, it)) let hashes = msgListA.mapIt(computeMessageHash(DefaultPubsubTopic, it))
let kvs = let kvs = zip(hashes, msgListA).mapIt(
zip(hashes, msgListA).mapIt(WakuMessageKeyValue(messageHash: it[0], message: it[1])) WakuMessageKeyValue(messageHash: it[0], message: some(it[1]))
)
let archiveA = block: let archiveA = block:
let driver = newSqliteArchiveDriver() let driver = newSqliteArchiveDriver()
for kv in kvs: for kv in kvs:
let msg_digest = computeDigest(kv.message) let message = kv.message.get()
let msg_digest = computeDigest(message)
require ( require (
waitFor driver.put( waitFor driver.put(
DefaultPubsubTopic, kv.message, msg_digest, kv.messageHash, DefaultPubsubTopic, message, msg_digest, kv.messageHash, message.timestamp
kv.message.timestamp,
) )
).isOk() ).isOk()
@ -84,7 +85,8 @@ procSuite "WakuNode - Store":
client.mountStoreClient() client.mountStoreClient()
## Given ## Given
let req = StoreQueryRequest(contentTopics: @[DefaultContentTopic]) let req =
StoreQueryRequest(includeData: true, contentTopics: @[DefaultContentTopic])
let serverPeer = server.peerInfo.toRemotePeerInfo() let serverPeer = server.peerInfo.toRemotePeerInfo()
## When ## When
@ -119,6 +121,7 @@ procSuite "WakuNode - Store":
## Given ## Given
let req = StoreQueryRequest( let req = StoreQueryRequest(
includeData: true,
contentTopics: @[DefaultContentTopic], contentTopics: @[DefaultContentTopic],
paginationForward: PagingDirection.FORWARD, paginationForward: PagingDirection.FORWARD,
paginationLimit: some(uint64(7)), paginationLimit: some(uint64(7)),
@ -174,6 +177,7 @@ procSuite "WakuNode - Store":
## Given ## Given
let req = StoreQueryRequest( let req = StoreQueryRequest(
includeData: true,
contentTopics: @[DefaultContentTopic], contentTopics: @[DefaultContentTopic],
paginationLimit: some(uint64(7)), paginationLimit: some(uint64(7)),
paginationForward: PagingDirection.BACKWARD, paginationForward: PagingDirection.BACKWARD,
@ -261,7 +265,8 @@ procSuite "WakuNode - Store":
# Wait for the server filter to receive the push message # Wait for the server filter to receive the push message
require waitFor filterFut.withTimeout(5.seconds) require waitFor filterFut.withTimeout(5.seconds)
let req = StoreQueryRequest(contentTopics: @[DefaultContentTopic]) let req =
StoreQueryRequest(includeData: true, contentTopics: @[DefaultContentTopic])
let res = waitFor client.query(req, serverPeer) let res = waitFor client.query(req, serverPeer)
## Then ## Then
@ -270,7 +275,8 @@ procSuite "WakuNode - Store":
let response = res.get() let response = res.get()
check: check:
response.messages.len == 1 response.messages.len == 1
response.messages[0] == WakuMessageKeyValue(messageHash: hash, message: message) response.messages[0] ==
WakuMessageKeyValue(messageHash: hash, message: some(message))
let (handledPubsubTopic, handledMsg) = filterFut.read() let (handledPubsubTopic, handledMsg) = filterFut.read()
check: check:
@ -341,7 +347,8 @@ procSuite "WakuNode - Store":
client.mountStoreClient() client.mountStoreClient()
## Given ## Given
let req = StoreQueryRequest(contentTopics: @[DefaultContentTopic]) let req =
StoreQueryRequest(includeData: true, contentTopics: @[DefaultContentTopic])
let serverPeer = server.peerInfo.toRemotePeerInfo() let serverPeer = server.peerInfo.toRemotePeerInfo()
let requestProc = proc() {.async.} = let requestProc = proc() {.async.} =
@ -351,7 +358,7 @@ procSuite "WakuNode - Store":
let response = queryRes.get() let response = queryRes.get()
check: check:
response.messages.mapIt(it.message) == msgListA response.messages.mapIt(it.message.get()) == msgListA
for count in 0 ..< 4: for count in 0 ..< 4:
waitFor requestProc() waitFor requestProc()
@ -384,7 +391,8 @@ procSuite "WakuNode - Store":
client.mountStoreClient() client.mountStoreClient()
## Given ## Given
let req = StoreQueryRequest(contentTopics: @[DefaultContentTopic]) let req =
StoreQueryRequest(includeData: true, contentTopics: @[DefaultContentTopic])
let serverPeer = server.peerInfo.toRemotePeerInfo() let serverPeer = server.peerInfo.toRemotePeerInfo()
let successProc = proc() {.async.} = let successProc = proc() {.async.} =
@ -393,7 +401,7 @@ procSuite "WakuNode - Store":
check queryRes.isOk() check queryRes.isOk()
let response = queryRes.get() let response = queryRes.get()
check: check:
response.messages.mapIt(it.message) == msgListA response.messages.mapIt(it.message.get()) == msgListA
let failsProc = proc() {.async.} = let failsProc = proc() {.async.} =
let queryRes = waitFor client.query(req, peer = serverPeer) let queryRes = waitFor client.query(req, peer = serverPeer)

View File

@ -1,7 +1,7 @@
{.used.} {.used.}
import import
std/[options, times], std/[options, times, sugar],
stew/shims/net as stewNet, stew/shims/net as stewNet,
chronicles, chronicles,
testutils/unittests, testutils/unittests,
@ -224,9 +224,10 @@ procSuite "Waku Rest API - Store v3":
"7", # page size. Empty implies default page size. "7", # page size. Empty implies default page size.
) )
var wakuMessages = newSeq[WakuMessage](0) let wakuMessages = collect(newSeq):
for j in 0 ..< response.data.messages.len: for element in response.data.messages:
wakuMessages.add(response.data.messages[j].message) if element.message.isSome():
element.message.get()
pages[i] = wakuMessages pages[i] = wakuMessages
@ -620,15 +621,16 @@ procSuite "Waku Rest API - Store v3":
let client = newRestHttpClient(initTAddress(restAddress, restPort)) let client = newRestHttpClient(initTAddress(restAddress, restPort))
# Filtering by a known pubsub topic. # Filtering by a known pubsub topic.
var response = var response = await client.getStoreMessagesV3(
await client.getStoreMessagesV3(pubsubTopic = encodeUrl(DefaultPubsubTopic)) includeData = "true", pubsubTopic = encodeUrl(DefaultPubsubTopic)
)
check: check:
response.status == 200 response.status == 200
$response.contentType == $MIMETYPE_JSON $response.contentType == $MIMETYPE_JSON
response.data.messages.len == 1 response.data.messages.len == 1
let storeMessage = response.data.messages[0].message let storeMessage = response.data.messages[0].message.get()
check: check:
storeMessage.payload == msg.payload storeMessage.payload == msg.payload
@ -710,9 +712,10 @@ procSuite "Waku Rest API - Store v3":
"3", # page size. Empty implies default page size. "3", # page size. Empty implies default page size.
) )
var wakuMessages = newSeq[WakuMessage](0) let wakuMessages = collect(newSeq):
for j in 0 ..< response.data.messages.len: for element in response.data.messages:
wakuMessages.add(response.data.messages[j].message) if element.message.isSome():
element.message.get()
pages[i] = wakuMessages pages[i] = wakuMessages
@ -773,9 +776,10 @@ procSuite "Waku Rest API - Store v3":
response.status == 200 response.status == 200
$response.contentType == $MIMETYPE_JSON $response.contentType == $MIMETYPE_JSON
var wakuMessages = newSeq[WakuMessage](0) let wakuMessages = collect(newSeq):
for j in 0 ..< response.data.messages.len: for element in response.data.messages:
wakuMessages.add(response.data.messages[j].message) if element.message.isSome():
element.message.get()
check wakuMessages == msgList[6 .. 9] check wakuMessages == msgList[6 .. 9]

View File

@ -45,8 +45,6 @@ proc write3*(proto: var ProtoBuffer, field: int, value: auto) =
when value is Option: when value is Option:
if value.isSome(): if value.isSome():
proto.write(field, value.get()) proto.write(field, value.get())
elif value is bool:
proto.write(field, zint(value))
else: else:
proto.write(field, value) proto.write(field, value)

View File

@ -808,6 +808,7 @@ when defined(waku_exp_store_resume):
proc toArchiveQuery(request: StoreQueryRequest): ArchiveQuery = proc toArchiveQuery(request: StoreQueryRequest): ArchiveQuery =
var query = ArchiveQuery() var query = ArchiveQuery()
query.includeData = request.includeData
query.pubsubTopic = request.pubsubTopic query.pubsubTopic = request.pubsubTopic
query.contentTopics = request.contentTopics query.contentTopics = request.contentTopics
query.startTime = request.startTime query.startTime = request.startTime
@ -834,9 +835,17 @@ proc toStoreResult(res: ArchiveResult): StoreQueryResult =
res.statusCode = 200 res.statusCode = 200
res.statusDesc = "OK" res.statusDesc = "OK"
res.messages = response.hashes.zip(response.messages).mapIt(
WakuMessageKeyValue(messageHash: it[0], message: it[1]) for i in 0 ..< response.hashes.len:
) let hash = response.hashes[i]
let kv =
store_common.WakuMessageKeyValue(messageHash: hash, message: none(WakuMessage))
res.messages.add(kv)
for i in 0 ..< response.messages.len:
res.messages[i].message = some(response.messages[i])
if response.cursor.isSome(): if response.cursor.isSome():
res.paginationCursor = some(response.cursor.get().hash) res.paginationCursor = some(response.cursor.get().hash)

View File

@ -186,7 +186,9 @@ proc writeValue*(
writer.beginRecord() writer.beginRecord()
writer.writeField("message_hash", value.messageHash) writer.writeField("message_hash", value.messageHash)
writer.writeField("message", value.message)
if value.message.isSome():
writer.writeField("message", value.message.get())
writer.endRecord() writer.endRecord()
@ -217,10 +219,7 @@ proc readValue*(
if messageHash.isNone(): if messageHash.isNone():
reader.raiseUnexpectedValue("Field `message_hash` is missing") reader.raiseUnexpectedValue("Field `message_hash` is missing")
if message.isNone(): value = WakuMessageKeyValue(messageHash: messageHash.get(), message: message)
reader.raiseUnexpectedValue("Field `message` is missing")
value = WakuMessageKeyValue(messageHash: messageHash.get(), message: message.get())
## StoreQueryResponse serde ## StoreQueryResponse serde

View File

@ -144,10 +144,14 @@ proc findMessages*(
if query.contentTopics.len > 10: if query.contentTopics.len > 10:
return err(ArchiveError.invalidQuery("too many content topics")) return err(ArchiveError.invalidQuery("too many content topics"))
if query.cursor.isSome() and query.cursor.get().hash.len != 32:
return err(ArchiveError.invalidQuery("invalid cursor hash length"))
let queryStartTime = getTime().toUnixFloat() let queryStartTime = getTime().toUnixFloat()
let rows = ( let rows = (
await self.driver.getMessages( await self.driver.getMessages(
includeData = query.includeData,
contentTopic = query.contentTopics, contentTopic = query.contentTopics,
pubsubTopic = query.pubsubTopic, pubsubTopic = query.pubsubTopic,
cursor = query.cursor, cursor = query.cursor,
@ -174,7 +178,10 @@ proc findMessages*(
let pageSize = min(rows.len, int(maxPageSize)) let pageSize = min(rows.len, int(maxPageSize))
#TODO once store v2 is removed, unzip instead of 2x map #TODO once store v2 is removed, unzip instead of 2x map
messages = rows[0 ..< pageSize].mapIt(it[1]) #TODO once store v2 is removed, update driver to not return messages when not needed
if query.includeData:
messages = rows[0 ..< pageSize].mapIt(it[1])
hashes = rows[0 ..< pageSize].mapIt(it[4]) hashes = rows[0 ..< pageSize].mapIt(it[4])
## Cursor ## Cursor
@ -206,7 +213,7 @@ proc findMessages*(
proc findMessagesV2*( proc findMessagesV2*(
self: WakuArchive, query: ArchiveQuery self: WakuArchive, query: ArchiveQuery
): Future[ArchiveResult] {.async, gcsafe.} = ): Future[ArchiveResult] {.async, deprecated, gcsafe.} =
## Search the archive to return a single page of messages matching the query criteria ## Search the archive to return a single page of messages matching the query criteria
let maxPageSize = let maxPageSize =

View File

@ -43,6 +43,7 @@ type
hash*: WakuMessageHash hash*: WakuMessageHash
ArchiveQuery* = object ArchiveQuery* = object
includeData*: bool # indicate if messages should be returned in addition to hashes.
pubsubTopic*: Option[PubsubTopic] pubsubTopic*: Option[PubsubTopic]
contentTopics*: seq[ContentTopic] contentTopics*: seq[ContentTopic]
cursor*: Option[ArchiveCursor] cursor*: Option[ArchiveCursor]

View File

@ -41,11 +41,12 @@ method getMessagesV2*(
endTime = none(Timestamp), endTime = none(Timestamp),
maxPageSize = DefaultPageSize, maxPageSize = DefaultPageSize,
ascendingOrder = true, ascendingOrder = true,
): Future[ArchiveDriverResult[seq[ArchiveRow]]] {.base, async.} = ): Future[ArchiveDriverResult[seq[ArchiveRow]]] {.base, deprecated, async.} =
discard discard
method getMessages*( method getMessages*(
driver: ArchiveDriver, driver: ArchiveDriver,
includeData = false,
contentTopic = newSeq[ContentTopic](0), contentTopic = newSeq[ContentTopic](0),
pubsubTopic = none(PubsubTopic), pubsubTopic = none(PubsubTopic),
cursor = none(ArchiveCursor), cursor = none(ArchiveCursor),

View File

@ -377,7 +377,7 @@ proc getMessagesV2ArbitraryQuery(
endTime = none(Timestamp), endTime = none(Timestamp),
maxPageSize = DefaultPageSize, maxPageSize = DefaultPageSize,
ascendingOrder = true, ascendingOrder = true,
): Future[ArchiveDriverResult[seq[ArchiveRow]]] {.async.} = ): Future[ArchiveDriverResult[seq[ArchiveRow]]] {.async, deprecated.} =
## This proc allows to handle atypical queries. We don't use prepared statements for those. ## This proc allows to handle atypical queries. We don't use prepared statements for those.
var query = var query =
@ -521,7 +521,7 @@ proc getMessagesV2PreparedStmt(
endTime: Timestamp, endTime: Timestamp,
maxPageSize = DefaultPageSize, maxPageSize = DefaultPageSize,
ascOrder = true, ascOrder = true,
): Future[ArchiveDriverResult[seq[ArchiveRow]]] {.async.} = ): Future[ArchiveDriverResult[seq[ArchiveRow]]] {.async, deprecated.} =
## This proc aims to run the most typical queries in a more performant way, i.e. by means of ## This proc aims to run the most typical queries in a more performant way, i.e. by means of
## prepared statements. ## prepared statements.
## ##
@ -591,6 +591,7 @@ proc getMessagesV2PreparedStmt(
method getMessages*( method getMessages*(
s: PostgresDriver, s: PostgresDriver,
includeData = false,
contentTopicSeq = newSeq[ContentTopic](0), contentTopicSeq = newSeq[ContentTopic](0),
pubsubTopic = none(PubsubTopic), pubsubTopic = none(PubsubTopic),
cursor = none(ArchiveCursor), cursor = none(ArchiveCursor),
@ -631,7 +632,7 @@ method getMessagesV2*(
endTime = none(Timestamp), endTime = none(Timestamp),
maxPageSize = DefaultPageSize, maxPageSize = DefaultPageSize,
ascendingOrder = true, ascendingOrder = true,
): Future[ArchiveDriverResult[seq[ArchiveRow]]] {.async.} = ): Future[ArchiveDriverResult[seq[ArchiveRow]]] {.async, deprecated.} =
if contentTopicSeq.len == 1 and pubsubTopic.isSome() and startTime.isSome() and if contentTopicSeq.len == 1 and pubsubTopic.isSome() and startTime.isSome() and
endTime.isSome(): endTime.isSome():
## Considered the most common query. Therefore, we use prepared statements to optimize it. ## Considered the most common query. Therefore, we use prepared statements to optimize it.

View File

@ -258,6 +258,7 @@ method existsTable*(
method getMessages*( method getMessages*(
driver: QueueDriver, driver: QueueDriver,
includeData = false,
contentTopic: seq[ContentTopic] = @[], contentTopic: seq[ContentTopic] = @[],
pubsubTopic = none(PubsubTopic), pubsubTopic = none(PubsubTopic),
cursor = none(ArchiveCursor), cursor = none(ArchiveCursor),

View File

@ -292,7 +292,7 @@ proc whereClausev2(
startTime: Option[Timestamp], startTime: Option[Timestamp],
endTime: Option[Timestamp], endTime: Option[Timestamp],
ascending: bool, ascending: bool,
): Option[string] = ): Option[string] {.deprecated.} =
let cursorClause = let cursorClause =
if cursor: if cursor:
let comp = if ascending: ">" else: "<" let comp = if ascending: ">" else: "<"
@ -336,7 +336,7 @@ proc whereClausev2(
proc selectMessagesWithLimitQueryv2( proc selectMessagesWithLimitQueryv2(
table: string, where: Option[string], limit: uint, ascending = true, v3 = false table: string, where: Option[string], limit: uint, ascending = true, v3 = false
): SqlQueryStr = ): SqlQueryStr {.deprecated.} =
let order = if ascending: "ASC" else: "DESC" let order = if ascending: "ASC" else: "DESC"
var query: string var query: string
@ -369,7 +369,7 @@ proc execSelectMessagesV2WithLimitStmt(
startTime: Option[Timestamp], startTime: Option[Timestamp],
endTime: Option[Timestamp], endTime: Option[Timestamp],
onRowCallback: DataProc, onRowCallback: DataProc,
): DatabaseResult[void] = ): DatabaseResult[void] {.deprecated.} =
let s = RawStmtPtr(s) let s = RawStmtPtr(s)
# Bind params # Bind params
@ -416,29 +416,6 @@ proc execSelectMessagesV2WithLimitStmt(
discard sqlite3_reset(s) # same return information as step discard sqlite3_reset(s) # same return information as step
discard sqlite3_clear_bindings(s) # no errors possible discard sqlite3_clear_bindings(s) # no errors possible
proc execSelectMessageByHash(
s: SqliteStmt, hash: WakuMessageHash, onRowCallback: DataProc
): DatabaseResult[void] =
let s = RawStmtPtr(s)
checkErr bindParam(s, 1, toSeq(hash))
try:
while true:
let v = sqlite3_step(s)
case v
of SQLITE_ROW:
onRowCallback(s)
of SQLITE_DONE:
return ok()
else:
return err($sqlite3_errstr(v))
finally:
# release implicit transaction
discard sqlite3_reset(s) # same return information as step
discard sqlite3_clear_bindings(s)
# no errors possible
proc selectMessagesByHistoryQueryWithLimit*( proc selectMessagesByHistoryQueryWithLimit*(
db: SqliteDatabase, db: SqliteDatabase,
contentTopic: seq[ContentTopic], contentTopic: seq[ContentTopic],
@ -450,7 +427,7 @@ proc selectMessagesByHistoryQueryWithLimit*(
ascending: bool, ascending: bool,
): DatabaseResult[ ): DatabaseResult[
seq[(PubsubTopic, WakuMessage, seq[byte], Timestamp, WakuMessageHash)] seq[(PubsubTopic, WakuMessage, seq[byte], Timestamp, WakuMessageHash)]
] = ] {.deprecated.} =
var messages: seq[(PubsubTopic, WakuMessage, seq[byte], Timestamp, WakuMessageHash)] = var messages: seq[(PubsubTopic, WakuMessage, seq[byte], Timestamp, WakuMessageHash)] =
@[] @[]
@ -483,6 +460,28 @@ proc selectMessagesByHistoryQueryWithLimit*(
### Store v3 ### ### Store v3 ###
proc execSelectMessageByHash(
s: SqliteStmt, hash: WakuMessageHash, onRowCallback: DataProc
): DatabaseResult[void] =
let s = RawStmtPtr(s)
checkErr bindParam(s, 1, toSeq(hash))
try:
while true:
let v = sqlite3_step(s)
case v
of SQLITE_ROW:
onRowCallback(s)
of SQLITE_DONE:
return ok()
else:
return err($sqlite3_errstr(v))
finally:
# release implicit transaction
discard sqlite3_reset(s) # same return information as step
discard sqlite3_clear_bindings(s) # no errors possible
proc selectMessageByHashQuery(): SqlQueryStr = proc selectMessageByHashQuery(): SqlQueryStr =
var query: string var query: string

View File

@ -92,7 +92,7 @@ method getMessagesV2*(
endTime = none(Timestamp), endTime = none(Timestamp),
maxPageSize = DefaultPageSize, maxPageSize = DefaultPageSize,
ascendingOrder = true, ascendingOrder = true,
): Future[ArchiveDriverResult[seq[ArchiveRow]]] {.async.} = ): Future[ArchiveDriverResult[seq[ArchiveRow]]] {.async, deprecated.} =
echo "here" echo "here"
let cursor = cursor.map(toDbCursor) let cursor = cursor.map(toDbCursor)
@ -111,6 +111,7 @@ method getMessagesV2*(
method getMessages*( method getMessages*(
s: SqliteDriver, s: SqliteDriver,
includeData = false,
contentTopic = newSeq[ContentTopic](0), contentTopic = newSeq[ContentTopic](0),
pubsubTopic = none(PubsubTopic), pubsubTopic = none(PubsubTopic),
cursor = none(ArchiveCursor), cursor = none(ArchiveCursor),

View File

@ -18,7 +18,7 @@ proc encode*(message: WakuMessage): ProtoBuffer =
buf.write3(10, zint64(message.timestamp)) buf.write3(10, zint64(message.timestamp))
buf.write3(11, message.meta) buf.write3(11, message.meta)
buf.write3(21, message.proof) buf.write3(21, message.proof)
buf.write3(31, message.ephemeral) buf.write3(31, uint32(message.ephemeral))
buf.finish3() buf.finish3()
buf buf
@ -67,7 +67,7 @@ proc decode*(T: type WakuMessage, buffer: seq[byte]): ProtobufResult[T] =
else: else:
msg.proof = proof msg.proof = proof
var ephemeral: uint var ephemeral: uint32
if not ?pb.getField(31, ephemeral): if not ?pb.getField(31, ephemeral):
msg.ephemeral = false msg.ephemeral = false
else: else:

View File

@ -37,7 +37,7 @@ type
WakuMessageKeyValue* = object WakuMessageKeyValue* = object
messageHash*: WakuMessageHash messageHash*: WakuMessageHash
message*: WakuMessage message*: Option[WakuMessage]
StoreQueryResponse* = object StoreQueryResponse* = object
requestId*: string requestId*: string

View File

@ -49,11 +49,11 @@ proc handleQueryRequest*(
var res = StoreQueryResponse() var res = StoreQueryResponse()
let req = StoreQueryRequest.decode(raw_request).valueOr: let req = StoreQueryRequest.decode(raw_request).valueOr:
error "failed to decode rpc", peerId = requestor error "failed to decode rpc", peerId = requestor, error = $error
waku_store_errors.inc(labelValues = [decodeRpcFailure]) waku_store_errors.inc(labelValues = [decodeRpcFailure])
res.statusCode = uint32(ErrorCode.BAD_REQUEST) res.statusCode = uint32(ErrorCode.BAD_REQUEST)
res.statusDesc = "decode rpc failed" res.statusDesc = "decoding rpc failed: " & $error
return res.encode().buffer return res.encode().buffer
@ -82,10 +82,10 @@ proc handleQueryRequest*(
res = queryResult.valueOr: res = queryResult.valueOr:
error "store query failed", error "store query failed",
peerId = requestor, requestId = requestId, error = queryResult.error peerId = requestor, requestId = requestId, error = $error
res.statusCode = uint32(queryResult.error.kind) res.statusCode = uint32(error.kind)
res.statusDesc = $queryResult.error res.statusDesc = $error
return res.encode().buffer return res.encode().buffer

View File

@ -14,7 +14,7 @@ proc encode*(req: StoreQueryRequest): ProtoBuffer =
var pb = initProtoBuffer() var pb = initProtoBuffer()
pb.write3(1, req.requestId) pb.write3(1, req.requestId)
pb.write3(2, req.includeData) pb.write3(2, uint32(req.includeData))
pb.write3(10, req.pubsubTopic) pb.write3(10, req.pubsubTopic)
@ -56,11 +56,11 @@ proc decode*(
if not ?pb.getField(1, req.requestId): if not ?pb.getField(1, req.requestId):
return err(ProtobufError.missingRequiredField("request_id")) return err(ProtobufError.missingRequiredField("request_id"))
var inclData: uint var inclData: uint32
if not ?pb.getField(2, inclData): if not ?pb.getField(2, inclData):
req.includeData = false req.includeData = false
else: else:
req.includeData = inclData == 1 req.includeData = inclData > 0
var pubsubTopic: string var pubsubTopic: string
if not ?pb.getField(10, pubsubTopic): if not ?pb.getField(10, pubsubTopic):
@ -124,7 +124,9 @@ proc encode*(keyValue: WakuMessageKeyValue): ProtoBuffer =
var pb = initProtoBuffer() var pb = initProtoBuffer()
pb.write3(1, keyValue.messageHash) pb.write3(1, keyValue.messageHash)
pb.write3(2, keyValue.message.encode())
if keyValue.message.isSome():
pb.write3(2, keyValue.message.get().encode())
pb.finish3() pb.finish3()
@ -163,9 +165,9 @@ proc decode*(
var proto: ProtoBuffer var proto: ProtoBuffer
if not ?pb.getField(2, proto): if not ?pb.getField(2, proto):
return err(ProtobufError.missingRequiredField("message")) keyValue.message = none(WakuMessage)
else: else:
keyValue.message = ?WakuMessage.decode(proto.buffer) keyValue.message = some(?WakuMessage.decode(proto.buffer))
return ok(keyValue) return ok(keyValue)