mirror of
https://github.com/waku-org/nwaku.git
synced 2025-01-26 06:41:20 +00:00
feat: messageHash attaribute added in SQLite + migration script ready + testcase
This commit is contained in:
parent
13aeebe46f
commit
1b0e0655b0
18
migrations/message_store/00008_updatePrimaryKey_rm_id.up.sql
Normal file
18
migrations/message_store/00008_updatePrimaryKey_rm_id.up.sql
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
ALTER TABLE message RENAME TO message_backup;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS message(
|
||||||
|
pubsubTopic BLOB NOT NULL,
|
||||||
|
contentTopic BLOB NOT NULL,
|
||||||
|
payload BLOB,
|
||||||
|
version INTEGER NOT NULL,
|
||||||
|
timestamp INTEGER NOT NULL,
|
||||||
|
messageHash BLOB,
|
||||||
|
storedAt INTEGER NOT NULL,
|
||||||
|
CONSTRAINT messageIndex PRIMARY KEY (storedAt, messageHash)
|
||||||
|
) WITHOUT ROWID;
|
||||||
|
|
||||||
|
INSERT OR IGNORE INTO message(pubsubTopic, contentTopic, payload, version, timestamp, messageHash, storedAt)
|
||||||
|
SELECT pubsubTopic, contentTopic, payload, version, timestamp, id, storedAt
|
||||||
|
FROM message_backup;
|
||||||
|
|
||||||
|
DROP TABLE message_backup;
|
@ -19,7 +19,7 @@ proc computeTestCursor(pubsubTopic: PubsubTopic,
|
|||||||
pubsubTopic: pubsubTopic,
|
pubsubTopic: pubsubTopic,
|
||||||
senderTime: message.timestamp,
|
senderTime: message.timestamp,
|
||||||
storeTime: message.timestamp,
|
storeTime: message.timestamp,
|
||||||
digest: computeDigest(message, pubsubTopic)
|
messageHash: computeDigest(message, pubsubTopic)
|
||||||
)
|
)
|
||||||
|
|
||||||
suite "Postgres driver":
|
suite "Postgres driver":
|
||||||
@ -87,10 +87,10 @@ suite "Postgres driver":
|
|||||||
require:
|
require:
|
||||||
storedMsg.len == 1
|
storedMsg.len == 1
|
||||||
storedMsg.all do (item: auto) -> bool:
|
storedMsg.all do (item: auto) -> bool:
|
||||||
let (pubsubTopic, actualMsg, digest, storeTimestamp) = item
|
let (pubsubTopic, actualMsg, messageHash, storeTimestamp) = item
|
||||||
actualMsg.contentTopic == contentTopic and
|
actualMsg.contentTopic == contentTopic and
|
||||||
pubsubTopic == DefaultPubsubTopic and
|
pubsubTopic == DefaultPubsubTopic and
|
||||||
toHex(computedDigest.data) == toHex(digest) and
|
toHex(computedDigest.data) == toHex(messageHash) and
|
||||||
toHex(actualMsg.payload) == toHex(msg.payload)
|
toHex(actualMsg.payload) == toHex(msg.payload)
|
||||||
|
|
||||||
(await driver.close()).expect("driver to close")
|
(await driver.close()).expect("driver to close")
|
||||||
|
@ -37,7 +37,7 @@ proc computeTestCursor(pubsubTopic: PubsubTopic, message: WakuMessage): ArchiveC
|
|||||||
pubsubTopic: pubsubTopic,
|
pubsubTopic: pubsubTopic,
|
||||||
senderTime: message.timestamp,
|
senderTime: message.timestamp,
|
||||||
storeTime: message.timestamp,
|
storeTime: message.timestamp,
|
||||||
digest: computeDigest(message, pubsubTopic)
|
messageHash: computeDigest(message, pubsubTopic)
|
||||||
)
|
)
|
||||||
|
|
||||||
suite "Postgres driver - query by content topic":
|
suite "Postgres driver - query by content topic":
|
||||||
|
@ -23,7 +23,7 @@ proc genIndexedWakuMessage(i: int8): IndexedWakuMessage =
|
|||||||
cursor = Index(
|
cursor = Index(
|
||||||
receiverTime: Timestamp(i),
|
receiverTime: Timestamp(i),
|
||||||
senderTime: Timestamp(i),
|
senderTime: Timestamp(i),
|
||||||
digest: MessageDigest(data: data),
|
messageHash: MessageDigest(data: data),
|
||||||
pubsubTopic: "test-pubsub-topic"
|
pubsubTopic: "test-pubsub-topic"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -31,44 +31,44 @@ suite "Queue Driver - index":
|
|||||||
|
|
||||||
## Test vars
|
## Test vars
|
||||||
let
|
let
|
||||||
smallIndex1 = Index(digest: hashFromStr("1234"),
|
smallIndex1 = Index(messageHash: hashFromStr("1234"),
|
||||||
receiverTime: getNanosecondTime(0),
|
receiverTime: getNanosecondTime(0),
|
||||||
senderTime: getNanosecondTime(1000))
|
senderTime: getNanosecondTime(1000))
|
||||||
smallIndex2 = Index(digest: hashFromStr("1234567"), # digest is less significant than senderTime
|
smallIndex2 = Index(messageHash: hashFromStr("1234567"), # messageHash is less significant than senderTime
|
||||||
receiverTime: getNanosecondTime(0),
|
receiverTime: getNanosecondTime(0),
|
||||||
senderTime: getNanosecondTime(1000))
|
senderTime: getNanosecondTime(1000))
|
||||||
largeIndex1 = Index(digest: hashFromStr("1234"),
|
largeIndex1 = Index(messageHash: hashFromStr("1234"),
|
||||||
receiverTime: getNanosecondTime(0),
|
receiverTime: getNanosecondTime(0),
|
||||||
senderTime: getNanosecondTime(9000)) # only senderTime differ from smallIndex1
|
senderTime: getNanosecondTime(9000)) # only senderTime differ from smallIndex1
|
||||||
largeIndex2 = Index(digest: hashFromStr("12345"), # only digest differs from smallIndex1
|
largeIndex2 = Index(messageHash: hashFromStr("12345"), # only messageHash differs from smallIndex1
|
||||||
receiverTime: getNanosecondTime(0),
|
receiverTime: getNanosecondTime(0),
|
||||||
senderTime: getNanosecondTime(1000))
|
senderTime: getNanosecondTime(1000))
|
||||||
eqIndex1 = Index(digest: hashFromStr("0003"),
|
eqIndex1 = Index(messageHash: hashFromStr("0003"),
|
||||||
receiverTime: getNanosecondTime(0),
|
receiverTime: getNanosecondTime(0),
|
||||||
senderTime: getNanosecondTime(54321))
|
senderTime: getNanosecondTime(54321))
|
||||||
eqIndex2 = Index(digest: hashFromStr("0003"),
|
eqIndex2 = Index(messageHash: hashFromStr("0003"),
|
||||||
receiverTime: getNanosecondTime(0),
|
receiverTime: getNanosecondTime(0),
|
||||||
senderTime: getNanosecondTime(54321))
|
senderTime: getNanosecondTime(54321))
|
||||||
eqIndex3 = Index(digest: hashFromStr("0003"),
|
eqIndex3 = Index(messageHash: hashFromStr("0003"),
|
||||||
receiverTime: getNanosecondTime(9999), # receiverTime difference should have no effect on comparisons
|
receiverTime: getNanosecondTime(9999), # receiverTime difference should have no effect on comparisons
|
||||||
senderTime: getNanosecondTime(54321))
|
senderTime: getNanosecondTime(54321))
|
||||||
diffPsTopic = Index(digest: hashFromStr("1234"),
|
diffPsTopic = Index(messageHash: hashFromStr("1234"),
|
||||||
receiverTime: getNanosecondTime(0),
|
receiverTime: getNanosecondTime(0),
|
||||||
senderTime: getNanosecondTime(1000),
|
senderTime: getNanosecondTime(1000),
|
||||||
pubsubTopic: "zzzz")
|
pubsubTopic: "zzzz")
|
||||||
noSenderTime1 = Index(digest: hashFromStr("1234"),
|
noSenderTime1 = Index(messageHash: hashFromStr("1234"),
|
||||||
receiverTime: getNanosecondTime(1100),
|
receiverTime: getNanosecondTime(1100),
|
||||||
senderTime: getNanosecondTime(0),
|
senderTime: getNanosecondTime(0),
|
||||||
pubsubTopic: "zzzz")
|
pubsubTopic: "zzzz")
|
||||||
noSenderTime2 = Index(digest: hashFromStr("1234"),
|
noSenderTime2 = Index(messageHash: hashFromStr("1234"),
|
||||||
receiverTime: getNanosecondTime(10000),
|
receiverTime: getNanosecondTime(10000),
|
||||||
senderTime: getNanosecondTime(0),
|
senderTime: getNanosecondTime(0),
|
||||||
pubsubTopic: "zzzz")
|
pubsubTopic: "zzzz")
|
||||||
noSenderTime3 = Index(digest: hashFromStr("1234"),
|
noSenderTime3 = Index(messageHash: hashFromStr("1234"),
|
||||||
receiverTime: getNanosecondTime(1200),
|
receiverTime: getNanosecondTime(1200),
|
||||||
senderTime: getNanosecondTime(0),
|
senderTime: getNanosecondTime(0),
|
||||||
pubsubTopic: "aaaa")
|
pubsubTopic: "aaaa")
|
||||||
noSenderTime4 = Index(digest: hashFromStr("0"),
|
noSenderTime4 = Index(messageHash: hashFromStr("0"),
|
||||||
receiverTime: getNanosecondTime(1200),
|
receiverTime: getNanosecondTime(1200),
|
||||||
senderTime: getNanosecondTime(0),
|
senderTime: getNanosecondTime(0),
|
||||||
pubsubTopic: "zzzz")
|
pubsubTopic: "zzzz")
|
||||||
@ -156,8 +156,8 @@ suite "Queue Driver - index":
|
|||||||
|
|
||||||
## Then
|
## Then
|
||||||
check:
|
check:
|
||||||
index.digest.data.len != 0
|
index.messageHash.data.len != 0
|
||||||
index.digest.data.len == 32 # sha2 output length in bytes
|
index.messageHash.data.len == 32 # sha2 output length in bytes
|
||||||
index.receiverTime == ts2 # the receiver timestamp should be a non-zero value
|
index.receiverTime == ts2 # the receiver timestamp should be a non-zero value
|
||||||
index.senderTime == ts
|
index.senderTime == ts
|
||||||
index.pubsubTopic == DefaultContentTopic
|
index.pubsubTopic == DefaultContentTopic
|
||||||
@ -177,4 +177,4 @@ suite "Queue Driver - index":
|
|||||||
|
|
||||||
## Then
|
## Then
|
||||||
check:
|
check:
|
||||||
index1.digest == index2.digest
|
index1.messageHash == index2.messageHash
|
||||||
|
@ -25,7 +25,7 @@ proc getTestQueueDriver(numMessages: int): QueueDriver =
|
|||||||
index: Index(
|
index: Index(
|
||||||
receiverTime: Timestamp(i),
|
receiverTime: Timestamp(i),
|
||||||
senderTime: Timestamp(i),
|
senderTime: Timestamp(i),
|
||||||
digest: MessageDigest(data: data)
|
messageHash: MessageDigest(data: data)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
discard testQueueDriver.add(msg)
|
discard testQueueDriver.add(msg)
|
||||||
@ -156,7 +156,7 @@ procSuite "Queue driver - pagination":
|
|||||||
pubsubTopic: DefaultPubsubTopic,
|
pubsubTopic: DefaultPubsubTopic,
|
||||||
senderTime: msg.timestamp,
|
senderTime: msg.timestamp,
|
||||||
storeTime: msg.timestamp,
|
storeTime: msg.timestamp,
|
||||||
digest: computeDigest(msg, DefaultPubsubTopic)
|
messageHash: computeDigest(msg, DefaultPubsubTopic)
|
||||||
).toIndex()
|
).toIndex()
|
||||||
|
|
||||||
let
|
let
|
||||||
@ -337,7 +337,7 @@ procSuite "Queue driver - pagination":
|
|||||||
pubsubTopic: DefaultPubsubTopic,
|
pubsubTopic: DefaultPubsubTopic,
|
||||||
senderTime: msg.timestamp,
|
senderTime: msg.timestamp,
|
||||||
storeTime: msg.timestamp,
|
storeTime: msg.timestamp,
|
||||||
digest: computeDigest(msg, DefaultPubsubTopic)
|
messageHash: computeDigest(msg, DefaultPubsubTopic)
|
||||||
).toIndex()
|
).toIndex()
|
||||||
|
|
||||||
let
|
let
|
||||||
|
@ -29,7 +29,7 @@ proc computeTestCursor(pubsubTopic: PubsubTopic, message: WakuMessage): ArchiveC
|
|||||||
pubsubTopic: pubsubTopic,
|
pubsubTopic: pubsubTopic,
|
||||||
senderTime: message.timestamp,
|
senderTime: message.timestamp,
|
||||||
storeTime: message.timestamp,
|
storeTime: message.timestamp,
|
||||||
digest: computeDigest(message, pubsubTopic)
|
messageHash: computeDigest(message, pubsubTopic)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ suite "SQLite driver":
|
|||||||
check:
|
check:
|
||||||
storedMsg.len == 1
|
storedMsg.len == 1
|
||||||
storedMsg.all do (item: auto) -> bool:
|
storedMsg.all do (item: auto) -> bool:
|
||||||
let (pubsubTopic, msg, digest, storeTimestamp) = item
|
let (pubsubTopic, msg, messageHash, storeTimestamp) = item
|
||||||
msg.contentTopic == contentTopic and
|
msg.contentTopic == contentTopic and
|
||||||
pubsubTopic == DefaultPubsubTopic
|
pubsubTopic == DefaultPubsubTopic
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ proc computeTestCursor(pubsubTopic: PubsubTopic, message: WakuMessage): ArchiveC
|
|||||||
pubsubTopic: pubsubTopic,
|
pubsubTopic: pubsubTopic,
|
||||||
senderTime: message.timestamp,
|
senderTime: message.timestamp,
|
||||||
storeTime: message.timestamp,
|
storeTime: message.timestamp,
|
||||||
digest: computeDigest(message, pubsubTopic)
|
messageHash: computeDigest(message, pubsubTopic)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -423,6 +423,50 @@ suite "SQLite driver - query by pubsub topic":
|
|||||||
|
|
||||||
## Cleanup
|
## Cleanup
|
||||||
(await driver.close()).expect("driver to close")
|
(await driver.close()).expect("driver to close")
|
||||||
|
|
||||||
|
asyncTest "pubSubTopic messageHash match":
|
||||||
|
## Given
|
||||||
|
const pubsubTopic1 = "test-pubsub-topic1"
|
||||||
|
const pubsubTopic2 = "test-pubsub-topic2"
|
||||||
|
# take 2 variables to hold the message hashes
|
||||||
|
var msgHash1: seq[byte]
|
||||||
|
var msgHash2: seq[byte]
|
||||||
|
|
||||||
|
let driver = newTestSqliteDriver()
|
||||||
|
var putFutures = newSeq[Future[ArchiveDriverResult[void]]]()
|
||||||
|
|
||||||
|
let msg1 = fakeWakuMessage(contentTopic=DefaultContentTopic, ts=Timestamp(1))
|
||||||
|
putFutures.add(driver.put(pubsubTopic1, msg1, computeDigest(msg1, pubsubTopic1), msg1.timestamp))
|
||||||
|
|
||||||
|
let msg2 = fakeWakuMessage(contentTopic=DefaultContentTopic, ts=Timestamp(2))
|
||||||
|
putFutures.add(driver.put(pubsubTopic2, msg2, computeDigest(msg2, pubsubTopic2), msg2.timestamp))
|
||||||
|
|
||||||
|
discard waitFor allFinished(putFutures)
|
||||||
|
|
||||||
|
# get the messages from the database
|
||||||
|
let storedMsg = (waitFor driver.getAllMessages()).tryGet()
|
||||||
|
|
||||||
|
check:
|
||||||
|
# there needs to be two messages
|
||||||
|
storedMsg.len > 0
|
||||||
|
storedMsg.len == 2
|
||||||
|
|
||||||
|
# get the individual messages and message hash values
|
||||||
|
@[storedMsg[0]].all do (item1: auto) -> bool:
|
||||||
|
let (gotPubsubTopic1, gotMsg1, messageHash1, timestamp1) = item1
|
||||||
|
msgHash1 = messageHash1
|
||||||
|
true
|
||||||
|
|
||||||
|
@[storedMsg[1]].all do (item2: auto) -> bool:
|
||||||
|
let (gotPubsubTopic2, gotMsg2, messageHash2, timestamp2) = item2
|
||||||
|
msgHash2 = messageHash2
|
||||||
|
true
|
||||||
|
|
||||||
|
# compare of the messge hashes, given the context, they should be different
|
||||||
|
msgHash1 != msgHash2
|
||||||
|
|
||||||
|
## Cleanup
|
||||||
|
(await driver.close()).expect("driver to close")
|
||||||
|
|
||||||
|
|
||||||
suite "SQLite driver - query by cursor":
|
suite "SQLite driver - query by cursor":
|
||||||
|
@ -147,7 +147,7 @@ suite "Waku Archive - Retention policy":
|
|||||||
check:
|
check:
|
||||||
storedMsg.len == capacity
|
storedMsg.len == capacity
|
||||||
storedMsg.all do (item: auto) -> bool:
|
storedMsg.all do (item: auto) -> bool:
|
||||||
let (pubsubTopic, msg, digest, storeTimestamp) = item
|
let (pubsubTopic, msg, messageHash, storeTimestamp) = item
|
||||||
msg.contentTopic == contentTopic and
|
msg.contentTopic == contentTopic and
|
||||||
pubsubTopic == DefaultPubsubTopic
|
pubsubTopic == DefaultPubsubTopic
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ proc computeTestCursor(pubsubTopic: PubsubTopic, message: WakuMessage): ArchiveC
|
|||||||
pubsubTopic: pubsubTopic,
|
pubsubTopic: pubsubTopic,
|
||||||
senderTime: message.timestamp,
|
senderTime: message.timestamp,
|
||||||
storeTime: message.timestamp,
|
storeTime: message.timestamp,
|
||||||
digest: computeDigest(message, pubsubTopic)
|
messageHash: computeDigest(message, pubsubTopic)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ import
|
|||||||
../../waku/node/message_store/sqlite_store,
|
../../waku/node/message_store/sqlite_store,
|
||||||
../../waku/node/peer_manager,
|
../../waku/node/peer_manager,
|
||||||
../../waku/waku_core,
|
../../waku/waku_core,
|
||||||
../../waku/waku_store,
|
../../waku/waku_store
|
||||||
./testlib/common,
|
./testlib/common,
|
||||||
./testlib/switch
|
./testlib/switch
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ procSuite "Waku Store - resume store":
|
|||||||
]
|
]
|
||||||
|
|
||||||
for msg in msgList:
|
for msg in msgList:
|
||||||
require store.put(DefaultPubsubTopic, msg, computeDigest(msg), msg.timestamp).isOk()
|
require store.put(DefaultPubsubTopic, msg, computeDigest(msg, DefaultPubsubTopic), msg.timestamp).isOk()
|
||||||
|
|
||||||
store
|
store
|
||||||
|
|
||||||
@ -76,7 +76,7 @@ procSuite "Waku Store - resume store":
|
|||||||
]
|
]
|
||||||
|
|
||||||
for msg in msgList2:
|
for msg in msgList2:
|
||||||
require store.put(DefaultPubsubTopic, msg, computeDigest(msg), msg.timestamp).isOk()
|
require store.put(DefaultPubsubTopic, msg, computeDigest(msg, DefaultPubsubTopic), msg.timestamp).isOk()
|
||||||
|
|
||||||
store
|
store
|
||||||
|
|
||||||
@ -272,7 +272,7 @@ suite "WakuNode - waku store":
|
|||||||
# Insert the same message in both node's store
|
# Insert the same message in both node's store
|
||||||
let
|
let
|
||||||
receivedTime3 = now() + getNanosecondTime(10)
|
receivedTime3 = now() + getNanosecondTime(10)
|
||||||
digest3 = computeDigest(msg3)
|
digest3 = computeDigest(msg3, DefaultPubsubTopic)
|
||||||
require server.wakuStore.store.put(DefaultPubsubTopic, msg3, digest3, receivedTime3).isOk()
|
require server.wakuStore.store.put(DefaultPubsubTopic, msg3, digest3, receivedTime3).isOk()
|
||||||
require client.wakuStore.store.put(DefaultPubsubTopic, msg3, digest3, receivedTime3).isOk()
|
require client.wakuStore.store.put(DefaultPubsubTopic, msg3, digest3, receivedTime3).isOk()
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ proc computeTestCursor(pubsubTopic: PubsubTopic, message: WakuMessage): HistoryC
|
|||||||
pubsubTopic: pubsubTopic,
|
pubsubTopic: pubsubTopic,
|
||||||
senderTime: message.timestamp,
|
senderTime: message.timestamp,
|
||||||
storeTime: message.timestamp,
|
storeTime: message.timestamp,
|
||||||
digest: waku_archive.computeDigest(message, pubsubTopic)
|
messageHash: waku_archive.computeDigest(message, pubsubTopic)
|
||||||
)
|
)
|
||||||
|
|
||||||
procSuite "WakuNode - Store":
|
procSuite "WakuNode - Store":
|
||||||
@ -57,8 +57,8 @@ procSuite "WakuNode - Store":
|
|||||||
let driver = newTestArchiveDriver()
|
let driver = newTestArchiveDriver()
|
||||||
|
|
||||||
for msg in msgListA:
|
for msg in msgListA:
|
||||||
let msg_digest = waku_archive.computeDigest(msg, DefaultPubsubTopic)
|
let msg_hash = waku_archive.computeDigest(msg, DefaultPubsubTopic)
|
||||||
require (waitFor driver.put(DefaultPubsubTopic, msg, msg_digest, msg.timestamp)).isOk()
|
require (waitFor driver.put(DefaultPubsubTopic, msg, msg_hash, msg.timestamp)).isOk()
|
||||||
|
|
||||||
driver
|
driver
|
||||||
|
|
||||||
|
@ -32,11 +32,11 @@ logScope:
|
|||||||
|
|
||||||
proc put(store: ArchiveDriver, pubsubTopic: PubsubTopic, message: WakuMessage): Future[Result[void, string]] =
|
proc put(store: ArchiveDriver, pubsubTopic: PubsubTopic, message: WakuMessage): Future[Result[void, string]] =
|
||||||
let
|
let
|
||||||
digest = waku_archive.computeDigest(message, pubsubTopic)
|
messageHash = waku_archive.computeDigest(message, pubsubTopic)
|
||||||
receivedTime = if message.timestamp > 0: message.timestamp
|
receivedTime = if message.timestamp > 0: message.timestamp
|
||||||
else: getNanosecondTime(getTime().toUnixFloat())
|
else: getNanosecondTime(getTime().toUnixFloat())
|
||||||
|
|
||||||
store.put(pubsubTopic, message, digest, receivedTime)
|
store.put(pubsubTopic, message, messageHash, receivedTime)
|
||||||
|
|
||||||
# Creates a new WakuNode
|
# Creates a new WakuNode
|
||||||
proc testWakuNode(): WakuNode =
|
proc testWakuNode(): WakuNode =
|
||||||
@ -60,7 +60,7 @@ procSuite "Waku v2 Rest API - Store":
|
|||||||
payload: @[byte('H'), byte('i'), byte('!')]
|
payload: @[byte('H'), byte('i'), byte('!')]
|
||||||
)
|
)
|
||||||
|
|
||||||
let messageDigest = waku_store.computeDigest(wakuMsg)
|
let messageDigest = waku_store.computeDigest(wakuMsg, DefaultPubsubTopic)
|
||||||
let restMsgDigest = some(messageDigest.toRestStringMessageDigest())
|
let restMsgDigest = some(messageDigest.toRestStringMessageDigest())
|
||||||
let parsedMsgDigest = restMsgDigest.parseMsgDigest().value
|
let parsedMsgDigest = restMsgDigest.parseMsgDigest().value
|
||||||
|
|
||||||
@ -129,7 +129,7 @@ procSuite "Waku v2 Rest API - Store":
|
|||||||
"6", # end time
|
"6", # end time
|
||||||
"", # sender time
|
"", # sender time
|
||||||
"", # store time
|
"", # store time
|
||||||
"", # base64-encoded digest
|
"", # base64-encoded messageHash
|
||||||
"", # empty implies default page size
|
"", # empty implies default page size
|
||||||
"true" # ascending
|
"true" # ascending
|
||||||
)
|
)
|
||||||
@ -224,7 +224,7 @@ procSuite "Waku v2 Rest API - Store":
|
|||||||
# populate the cursor for next page
|
# populate the cursor for next page
|
||||||
if response.data.cursor.isSome():
|
if response.data.cursor.isSome():
|
||||||
reqPubsubTopic = response.data.cursor.get().pubsubTopic
|
reqPubsubTopic = response.data.cursor.get().pubsubTopic
|
||||||
reqDigest = response.data.cursor.get().digest
|
reqDigest = response.data.cursor.get().messageHash
|
||||||
reqSenderTime = response.data.cursor.get().senderTime
|
reqSenderTime = response.data.cursor.get().senderTime
|
||||||
reqStoreTime = response.data.cursor.get().storeTime
|
reqStoreTime = response.data.cursor.get().storeTime
|
||||||
|
|
||||||
|
@ -769,7 +769,7 @@ proc toArchiveQuery(request: HistoryQuery): ArchiveQuery =
|
|||||||
ArchiveQuery(
|
ArchiveQuery(
|
||||||
pubsubTopic: request.pubsubTopic,
|
pubsubTopic: request.pubsubTopic,
|
||||||
contentTopics: request.contentTopics,
|
contentTopics: request.contentTopics,
|
||||||
cursor: request.cursor.map(proc(cursor: HistoryCursor): ArchiveCursor = ArchiveCursor(pubsubTopic: cursor.pubsubTopic, senderTime: cursor.senderTime, storeTime: cursor.storeTime, digest: cursor.digest)),
|
cursor: request.cursor.map(proc(cursor: HistoryCursor): ArchiveCursor = ArchiveCursor(pubsubTopic: cursor.pubsubTopic, senderTime: cursor.senderTime, storeTime: cursor.storeTime, messageHash: cursor.messageHash)),
|
||||||
startTime: request.startTime,
|
startTime: request.startTime,
|
||||||
endTime: request.endTime,
|
endTime: request.endTime,
|
||||||
pageSize: request.pageSize.uint,
|
pageSize: request.pageSize.uint,
|
||||||
@ -793,7 +793,7 @@ proc toHistoryResult*(res: ArchiveResult): HistoryResult =
|
|||||||
let response = res.get()
|
let response = res.get()
|
||||||
ok(HistoryResponse(
|
ok(HistoryResponse(
|
||||||
messages: response.messages,
|
messages: response.messages,
|
||||||
cursor: response.cursor.map(proc(cursor: ArchiveCursor): HistoryCursor = HistoryCursor(pubsubTopic: cursor.pubsubTopic, senderTime: cursor.senderTime, storeTime: cursor.storeTime, digest: cursor.digest)),
|
cursor: response.cursor.map(proc(cursor: ArchiveCursor): HistoryCursor = HistoryCursor(pubsubTopic: cursor.pubsubTopic, senderTime: cursor.senderTime, storeTime: cursor.storeTime, messageHash: cursor.messageHash)),
|
||||||
))
|
))
|
||||||
|
|
||||||
proc mountStore*(node: WakuNode) {.async, raises: [Defect, LPError].} =
|
proc mountStore*(node: WakuNode) {.async, raises: [Defect, LPError].} =
|
||||||
|
@ -60,7 +60,7 @@ proc getStoreMessagesV1*(
|
|||||||
# Optional cursor fields
|
# Optional cursor fields
|
||||||
senderTime: string = "",
|
senderTime: string = "",
|
||||||
storeTime: string = "",
|
storeTime: string = "",
|
||||||
digest: string = "", # base64-encoded digest
|
messageHash: string = "", # base64-encoded digest
|
||||||
|
|
||||||
pageSize: string = "",
|
pageSize: string = "",
|
||||||
ascending: string = ""
|
ascending: string = ""
|
||||||
|
@ -76,7 +76,7 @@ proc parseTime(input: Option[string]):
|
|||||||
proc parseCursor(parsedPubsubTopic: Option[string],
|
proc parseCursor(parsedPubsubTopic: Option[string],
|
||||||
senderTime: Option[string],
|
senderTime: Option[string],
|
||||||
storeTime: Option[string],
|
storeTime: Option[string],
|
||||||
digest: Option[string]):
|
messageHash: Option[string]):
|
||||||
Result[Option[HistoryCursor], string] =
|
Result[Option[HistoryCursor], string] =
|
||||||
|
|
||||||
# Parse sender time
|
# Parse sender time
|
||||||
@ -90,7 +90,7 @@ proc parseCursor(parsedPubsubTopic: Option[string],
|
|||||||
return err(parsedStoreTime.error)
|
return err(parsedStoreTime.error)
|
||||||
|
|
||||||
# Parse message digest
|
# Parse message digest
|
||||||
let parsedMsgDigest = parseMsgDigest(digest)
|
let parsedMsgDigest = parseMsgDigest(messageHash)
|
||||||
if not parsedMsgDigest.isOk():
|
if not parsedMsgDigest.isOk():
|
||||||
return err(parsedMsgDigest.error)
|
return err(parsedMsgDigest.error)
|
||||||
|
|
||||||
@ -105,7 +105,7 @@ proc parseCursor(parsedPubsubTopic: Option[string],
|
|||||||
pubsubTopic: parsedPubsubTopic.get(),
|
pubsubTopic: parsedPubsubTopic.get(),
|
||||||
senderTime: parsedSenderTime.value.get(),
|
senderTime: parsedSenderTime.value.get(),
|
||||||
storeTime: parsedStoreTime.value.get(),
|
storeTime: parsedStoreTime.value.get(),
|
||||||
digest: parsedMsgDigest.value.get())
|
messageHash: parsedMsgDigest.value.get())
|
||||||
))
|
))
|
||||||
else:
|
else:
|
||||||
return ok(none(HistoryCursor))
|
return ok(none(HistoryCursor))
|
||||||
@ -115,7 +115,7 @@ proc createHistoryQuery(pubsubTopic: Option[string],
|
|||||||
contentTopics: Option[string],
|
contentTopics: Option[string],
|
||||||
senderTime: Option[string],
|
senderTime: Option[string],
|
||||||
storeTime: Option[string],
|
storeTime: Option[string],
|
||||||
digest: Option[string],
|
messageHash: Option[string],
|
||||||
startTime: Option[string],
|
startTime: Option[string],
|
||||||
endTime: Option[string],
|
endTime: Option[string],
|
||||||
pageSize: Option[string],
|
pageSize: Option[string],
|
||||||
@ -142,7 +142,7 @@ proc createHistoryQuery(pubsubTopic: Option[string],
|
|||||||
let parsedCursor = ? parseCursor(parsedPubsubTopic,
|
let parsedCursor = ? parseCursor(parsedPubsubTopic,
|
||||||
senderTime,
|
senderTime,
|
||||||
storeTime,
|
storeTime,
|
||||||
digest)
|
messageHash)
|
||||||
|
|
||||||
# Parse page size field
|
# Parse page size field
|
||||||
var parsedPagedSize = DefaultPageSize
|
var parsedPagedSize = DefaultPageSize
|
||||||
@ -195,7 +195,7 @@ proc installStoreV1Handler(router: var RestRouter,
|
|||||||
contentTopics: Option[string],
|
contentTopics: Option[string],
|
||||||
senderTime: Option[string],
|
senderTime: Option[string],
|
||||||
storeTime: Option[string],
|
storeTime: Option[string],
|
||||||
digest: Option[string],
|
messageHash: Option[string],
|
||||||
startTime: Option[string],
|
startTime: Option[string],
|
||||||
endTime: Option[string],
|
endTime: Option[string],
|
||||||
pageSize: Option[string],
|
pageSize: Option[string],
|
||||||
@ -228,7 +228,7 @@ proc installStoreV1Handler(router: var RestRouter,
|
|||||||
contentTopics.toOpt(),
|
contentTopics.toOpt(),
|
||||||
senderTime.toOpt(),
|
senderTime.toOpt(),
|
||||||
storeTime.toOpt(),
|
storeTime.toOpt(),
|
||||||
digest.toOpt(),
|
messageHash.toOpt(),
|
||||||
startTime.toOpt(),
|
startTime.toOpt(),
|
||||||
endTime.toOpt(),
|
endTime.toOpt(),
|
||||||
pageSize.toOpt(),
|
pageSize.toOpt(),
|
||||||
|
@ -24,7 +24,7 @@ type
|
|||||||
pubsubTopic*: PubsubTopic
|
pubsubTopic*: PubsubTopic
|
||||||
senderTime*: Timestamp
|
senderTime*: Timestamp
|
||||||
storeTime*: Timestamp
|
storeTime*: Timestamp
|
||||||
digest*: MessageDigest
|
messageHash*: MessageDigest
|
||||||
|
|
||||||
StoreRequestRest* = object
|
StoreRequestRest* = object
|
||||||
# inspired by https://github.com/waku-org/nwaku/blob/f95147f5b7edfd45f914586f2d41cd18fb0e0d18/waku/v2//waku_store/common.nim#L52
|
# inspired by https://github.com/waku-org/nwaku/blob/f95147f5b7edfd45f914586f2d41cd18fb0e0d18/waku/v2//waku_store/common.nim#L52
|
||||||
@ -119,7 +119,7 @@ proc toStoreResponseRest*(histResp: HistoryResponse): StoreResponseRest =
|
|||||||
pubsubTopic: histResp.cursor.get().pubsubTopic,
|
pubsubTopic: histResp.cursor.get().pubsubTopic,
|
||||||
senderTime: histResp.cursor.get().senderTime,
|
senderTime: histResp.cursor.get().senderTime,
|
||||||
storeTime: histResp.cursor.get().storeTime,
|
storeTime: histResp.cursor.get().storeTime,
|
||||||
digest: histResp.cursor.get().digest
|
messageHash: histResp.cursor.get().messageHash
|
||||||
))
|
))
|
||||||
|
|
||||||
StoreResponseRest(
|
StoreResponseRest(
|
||||||
@ -247,7 +247,7 @@ proc writeValue*(writer: var JsonWriter[RestJson],
|
|||||||
writer.writeField("pubsub_topic", value.pubsubTopic)
|
writer.writeField("pubsub_topic", value.pubsubTopic)
|
||||||
writer.writeField("sender_time", value.senderTime)
|
writer.writeField("sender_time", value.senderTime)
|
||||||
writer.writeField("store_time", value.storeTime)
|
writer.writeField("store_time", value.storeTime)
|
||||||
writer.writeField("digest", value.digest)
|
writer.writeField("messageHash", value.messageHash)
|
||||||
writer.endRecord()
|
writer.endRecord()
|
||||||
|
|
||||||
proc readValue*(reader: var JsonReader[RestJson],
|
proc readValue*(reader: var JsonReader[RestJson],
|
||||||
@ -257,7 +257,7 @@ proc readValue*(reader: var JsonReader[RestJson],
|
|||||||
pubsubTopic = none(PubsubTopic)
|
pubsubTopic = none(PubsubTopic)
|
||||||
senderTime = none(Timestamp)
|
senderTime = none(Timestamp)
|
||||||
storeTime = none(Timestamp)
|
storeTime = none(Timestamp)
|
||||||
digest = none(MessageDigest)
|
messageHash = none(MessageDigest)
|
||||||
|
|
||||||
for fieldName in readObjectFields(reader):
|
for fieldName in readObjectFields(reader):
|
||||||
case fieldName
|
case fieldName
|
||||||
@ -273,10 +273,10 @@ proc readValue*(reader: var JsonReader[RestJson],
|
|||||||
if storeTime.isSome():
|
if storeTime.isSome():
|
||||||
reader.raiseUnexpectedField("Multiple `store_time` fields found", "HistoryCursorRest")
|
reader.raiseUnexpectedField("Multiple `store_time` fields found", "HistoryCursorRest")
|
||||||
storeTime = some(reader.readValue(Timestamp))
|
storeTime = some(reader.readValue(Timestamp))
|
||||||
of "digest":
|
of "messageHash":
|
||||||
if digest.isSome():
|
if messageHash.isSome():
|
||||||
reader.raiseUnexpectedField("Multiple `digest` fields found", "HistoryCursorRest")
|
reader.raiseUnexpectedField("Multiple `messageHash` fields found", "HistoryCursorRest")
|
||||||
digest = some(reader.readValue(MessageDigest))
|
messageHash = some(reader.readValue(MessageDigest))
|
||||||
else:
|
else:
|
||||||
reader.raiseUnexpectedField("Unrecognided field", cstring(fieldName))
|
reader.raiseUnexpectedField("Unrecognided field", cstring(fieldName))
|
||||||
|
|
||||||
@ -289,14 +289,14 @@ proc readValue*(reader: var JsonReader[RestJson],
|
|||||||
if storeTime.isNone():
|
if storeTime.isNone():
|
||||||
reader.raiseUnexpectedValue("Field `store_time` is missing")
|
reader.raiseUnexpectedValue("Field `store_time` is missing")
|
||||||
|
|
||||||
if digest.isNone():
|
if messageHash.isNone():
|
||||||
reader.raiseUnexpectedValue("Field `digest` is missing")
|
reader.raiseUnexpectedValue("Field `messageHash` is missing")
|
||||||
|
|
||||||
value = HistoryCursorRest(
|
value = HistoryCursorRest(
|
||||||
pubsubTopic: pubsubTopic.get(),
|
pubsubTopic: pubsubTopic.get(),
|
||||||
senderTime: senderTime.get(),
|
senderTime: senderTime.get(),
|
||||||
storeTime: storeTime.get(),
|
storeTime: storeTime.get(),
|
||||||
digest: digest.get()
|
messageHash: messageHash.get()
|
||||||
)
|
)
|
||||||
|
|
||||||
## End of HistoryCursorRest serde
|
## End of HistoryCursorRest serde
|
||||||
|
@ -104,7 +104,7 @@ proc handleMessage*(w: WakuArchive,
|
|||||||
msgReceivedTime = if msg.timestamp > 0: msg.timestamp
|
msgReceivedTime = if msg.timestamp > 0: msg.timestamp
|
||||||
else: getNanosecondTime(getTime().toUnixFloat())
|
else: getNanosecondTime(getTime().toUnixFloat())
|
||||||
|
|
||||||
trace "handling message", pubsubTopic=pubsubTopic, contentTopic=msg.contentTopic, timestamp=msg.timestamp, digest=msgDigest
|
trace "handling message", pubsubTopic=pubsubTopic, contentTopic=msg.contentTopic, timestamp=msg.timestamp, messageHash=msgDigest
|
||||||
|
|
||||||
let putRes = await w.driver.put(pubsubTopic, msg, msgDigest, msgReceivedTime)
|
let putRes = await w.driver.put(pubsubTopic, msg, msgDigest, msgReceivedTime)
|
||||||
if putRes.isErr():
|
if putRes.isErr():
|
||||||
@ -163,13 +163,13 @@ proc findMessages*(w: WakuArchive, query: ArchiveQuery): Future[ArchiveResult] {
|
|||||||
## Build last message cursor
|
## Build last message cursor
|
||||||
## The cursor is built from the last message INCLUDED in the response
|
## The cursor is built from the last message INCLUDED in the response
|
||||||
## (i.e. the second last message in the rows list)
|
## (i.e. the second last message in the rows list)
|
||||||
let (pubsubTopic, message, digest, storeTimestamp) = rows[^2]
|
let (pubsubTopic, message, messageHash, storeTimestamp) = rows[^2]
|
||||||
|
|
||||||
# TODO: Improve coherence of MessageDigest type
|
# TODO: Improve coherence of MessageDigest type
|
||||||
let messageDigest = block:
|
let messageDigest = block:
|
||||||
var data: array[32, byte]
|
var data: array[32, byte]
|
||||||
for i in 0..<min(digest.len, 32):
|
for i in 0..<min(messageHash.len, 32):
|
||||||
data[i] = digest[i]
|
data[i] = messageHash[i]
|
||||||
|
|
||||||
MessageDigest(data: data)
|
MessageDigest(data: data)
|
||||||
|
|
||||||
@ -177,7 +177,7 @@ proc findMessages*(w: WakuArchive, query: ArchiveQuery): Future[ArchiveResult] {
|
|||||||
pubsubTopic: pubsubTopic,
|
pubsubTopic: pubsubTopic,
|
||||||
senderTime: message.timestamp,
|
senderTime: message.timestamp,
|
||||||
storeTime: storeTimestamp,
|
storeTime: storeTimestamp,
|
||||||
digest: messageDigest
|
messageHash: messageDigest
|
||||||
))
|
))
|
||||||
|
|
||||||
# All messages MUST be returned in chronological order
|
# All messages MUST be returned in chronological order
|
||||||
|
@ -38,7 +38,7 @@ type DbCursor = object
|
|||||||
pubsubTopic*: PubsubTopic
|
pubsubTopic*: PubsubTopic
|
||||||
senderTime*: Timestamp
|
senderTime*: Timestamp
|
||||||
storeTime*: Timestamp
|
storeTime*: Timestamp
|
||||||
digest*: MessageDigest
|
messageHash*: MessageDigest
|
||||||
|
|
||||||
|
|
||||||
## Public API types
|
## Public API types
|
||||||
|
@ -93,12 +93,12 @@ proc reset*(s: PostgresDriver): Future[ArchiveDriverResult[void]] {.async.} =
|
|||||||
method put*(s: PostgresDriver,
|
method put*(s: PostgresDriver,
|
||||||
pubsubTopic: PubsubTopic,
|
pubsubTopic: PubsubTopic,
|
||||||
message: WakuMessage,
|
message: WakuMessage,
|
||||||
digest: MessageDigest,
|
messageHash: MessageDigest,
|
||||||
receivedTime: Timestamp):
|
receivedTime: Timestamp):
|
||||||
Future[ArchiveDriverResult[void]] {.async.} =
|
Future[ArchiveDriverResult[void]] {.async.} =
|
||||||
|
|
||||||
let ret = await s.connPool.runStmt(insertRow(),
|
let ret = await s.connPool.runStmt(insertRow(),
|
||||||
@[toHex(digest.data),
|
@[toHex(messageHash.data),
|
||||||
$receivedTime,
|
$receivedTime,
|
||||||
message.contentTopic,
|
message.contentTopic,
|
||||||
toHex(message.payload),
|
toHex(message.payload),
|
||||||
@ -116,7 +116,7 @@ proc toArchiveRow(r: Row): ArchiveDriverResult[ArchiveRow] =
|
|||||||
var pubSubTopic: string
|
var pubSubTopic: string
|
||||||
var contentTopic: string
|
var contentTopic: string
|
||||||
var storedAt: int64
|
var storedAt: int64
|
||||||
var digest: string
|
var messageHash: string
|
||||||
var payload: string
|
var payload: string
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -126,7 +126,7 @@ proc toArchiveRow(r: Row): ArchiveDriverResult[ArchiveRow] =
|
|||||||
pubSubTopic = r[3]
|
pubSubTopic = r[3]
|
||||||
version = parseUInt(r[4])
|
version = parseUInt(r[4])
|
||||||
timestamp = parseInt(r[5])
|
timestamp = parseInt(r[5])
|
||||||
digest = parseHexStr(r[6])
|
messageHash = parseHexStr(r[6])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return err("could not parse timestamp")
|
return err("could not parse timestamp")
|
||||||
|
|
||||||
@ -137,7 +137,7 @@ proc toArchiveRow(r: Row): ArchiveDriverResult[ArchiveRow] =
|
|||||||
|
|
||||||
return ok((pubSubTopic,
|
return ok((pubSubTopic,
|
||||||
wakuMessage,
|
wakuMessage,
|
||||||
@(digest.toOpenArrayByte(0, digest.high)),
|
@(messageHash.toOpenArrayByte(0, messageHash.high)),
|
||||||
storedAt))
|
storedAt))
|
||||||
|
|
||||||
method getAllMessages*(s: PostgresDriver):
|
method getAllMessages*(s: PostgresDriver):
|
||||||
@ -190,7 +190,7 @@ method getMessages*(s: PostgresDriver,
|
|||||||
let comp = if ascendingOrder: ">" else: "<"
|
let comp = if ascendingOrder: ">" else: "<"
|
||||||
statements.add("(storedAt, id) " & comp & " (?,?)")
|
statements.add("(storedAt, id) " & comp & " (?,?)")
|
||||||
args.add($cursor.get().storeTime)
|
args.add($cursor.get().storeTime)
|
||||||
args.add(toHex(cursor.get().digest.data))
|
args.add(toHex(cursor.get().messageHash.data))
|
||||||
|
|
||||||
if startTime.isSome():
|
if startTime.isSome():
|
||||||
statements.add("storedAt >= ?")
|
statements.add("storedAt >= ?")
|
||||||
|
@ -16,19 +16,19 @@ type Index* = object
|
|||||||
pubsubTopic*: string
|
pubsubTopic*: string
|
||||||
senderTime*: Timestamp # the time at which the message is generated
|
senderTime*: Timestamp # the time at which the message is generated
|
||||||
receiverTime*: Timestamp
|
receiverTime*: Timestamp
|
||||||
digest*: MessageDigest # calculated over payload and content topic
|
messageHash*: MessageDigest # calculated over payload and content topic
|
||||||
|
|
||||||
proc compute*(T: type Index, msg: WakuMessage, receivedTime: Timestamp, pubsubTopic: PubsubTopic): T =
|
proc compute*(T: type Index, msg: WakuMessage, receivedTime: Timestamp, pubsubTopic: PubsubTopic): T =
|
||||||
## Takes a WakuMessage with received timestamp and returns its Index.
|
## Takes a WakuMessage with received timestamp and returns its Index.
|
||||||
let
|
let
|
||||||
digest = computeDigest(msg, pubsubTopic)
|
messageHash = computeDigest(msg, pubsubTopic)
|
||||||
senderTime = msg.timestamp
|
senderTime = msg.timestamp
|
||||||
|
|
||||||
Index(
|
Index(
|
||||||
pubsubTopic: pubsubTopic,
|
pubsubTopic: pubsubTopic,
|
||||||
senderTime: senderTime,
|
senderTime: senderTime,
|
||||||
receiverTime: receivedTime,
|
receiverTime: receivedTime,
|
||||||
digest: digest
|
messageHash: messageHash
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -37,7 +37,7 @@ proc tohistoryCursor*(index: Index): ArchiveCursor =
|
|||||||
pubsubTopic: index.pubsubTopic,
|
pubsubTopic: index.pubsubTopic,
|
||||||
senderTime: index.senderTime,
|
senderTime: index.senderTime,
|
||||||
storeTime: index.receiverTime,
|
storeTime: index.receiverTime,
|
||||||
digest: index.digest
|
messageHash: index.messageHash
|
||||||
)
|
)
|
||||||
|
|
||||||
proc toIndex*(index: ArchiveCursor): Index =
|
proc toIndex*(index: ArchiveCursor): Index =
|
||||||
@ -45,14 +45,14 @@ proc toIndex*(index: ArchiveCursor): Index =
|
|||||||
pubsubTopic: index.pubsubTopic,
|
pubsubTopic: index.pubsubTopic,
|
||||||
senderTime: index.senderTime,
|
senderTime: index.senderTime,
|
||||||
receiverTime: index.storeTime,
|
receiverTime: index.storeTime,
|
||||||
digest: index.digest
|
messageHash: index.messageHash
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
proc `==`*(x, y: Index): bool =
|
proc `==`*(x, y: Index): bool =
|
||||||
## receiverTime plays no role in index equality
|
## receiverTime plays no role in index equality
|
||||||
(x.senderTime == y.senderTime) and
|
(x.senderTime == y.senderTime) and
|
||||||
(x.digest == y.digest) and
|
(x.messageHash == y.messageHash) and
|
||||||
(x.pubsubTopic == y.pubsubTopic)
|
(x.pubsubTopic == y.pubsubTopic)
|
||||||
|
|
||||||
proc cmp*(x, y: Index): int =
|
proc cmp*(x, y: Index): int =
|
||||||
@ -84,7 +84,7 @@ proc cmp*(x, y: Index): int =
|
|||||||
return timecmp
|
return timecmp
|
||||||
|
|
||||||
# Continue only when timestamps are equal
|
# Continue only when timestamps are equal
|
||||||
let digestcmp = cmp(x.digest.data, y.digest.data)
|
let digestcmp = cmp(x.messageHash.data, y.messageHash.data)
|
||||||
if digestcmp != 0:
|
if digestcmp != 0:
|
||||||
return digestcmp
|
return digestcmp
|
||||||
|
|
||||||
|
@ -138,7 +138,7 @@ proc getPage(driver: QueueDriver,
|
|||||||
|
|
||||||
numberOfItems += 1
|
numberOfItems += 1
|
||||||
|
|
||||||
outSeq.add((key.pubsubTopic, data.msg, @(key.digest.data), key.receiverTime))
|
outSeq.add((key.pubsubTopic, data.msg, @(key.messageHash.data), key.receiverTime))
|
||||||
|
|
||||||
currentEntry = if forward: w.next()
|
currentEntry = if forward: w.next()
|
||||||
else: w.prev()
|
else: w.prev()
|
||||||
@ -227,10 +227,10 @@ proc add*(driver: QueueDriver, msg: IndexedWakuMessage): ArchiveDriverResult[voi
|
|||||||
method put*(driver: QueueDriver,
|
method put*(driver: QueueDriver,
|
||||||
pubsubTopic: PubsubTopic,
|
pubsubTopic: PubsubTopic,
|
||||||
message: WakuMessage,
|
message: WakuMessage,
|
||||||
digest: MessageDigest,
|
messageHash: MessageDigest,
|
||||||
receivedTime: Timestamp):
|
receivedTime: Timestamp):
|
||||||
Future[ArchiveDriverResult[void]] {.async.} =
|
Future[ArchiveDriverResult[void]] {.async.} =
|
||||||
let index = Index(pubsubTopic: pubsubTopic, senderTime: message.timestamp, receiverTime: receivedTime, digest: digest)
|
let index = Index(pubsubTopic: pubsubTopic, senderTime: message.timestamp, receiverTime: receivedTime, messageHash: messageHash)
|
||||||
let message = IndexedWakuMessage(msg: message, index: index, pubsubTopic: pubsubTopic)
|
let message = IndexedWakuMessage(msg: message, index: index, pubsubTopic: pubsubTopic)
|
||||||
return driver.add(message)
|
return driver.add(message)
|
||||||
|
|
||||||
|
@ -10,4 +10,4 @@ import
|
|||||||
|
|
||||||
type DbCursor* = (Timestamp, seq[byte], PubsubTopic)
|
type DbCursor* = (Timestamp, seq[byte], PubsubTopic)
|
||||||
|
|
||||||
proc toDbCursor*(c: ArchiveCursor): DbCursor = (c.storeTime, @(c.digest.data), c.pubsubTopic)
|
proc toDbCursor*(c: ArchiveCursor): DbCursor = (c.storeTime, @(c.messageHash.data), c.pubsubTopic)
|
||||||
|
@ -14,12 +14,12 @@ logScope:
|
|||||||
topics = "waku archive migration"
|
topics = "waku archive migration"
|
||||||
|
|
||||||
|
|
||||||
const SchemaVersion* = 7 # increase this when there is an update in the database schema
|
const SchemaVersion* = 8 # increase this when there is an update in the database schema
|
||||||
|
|
||||||
template projectRoot: string = currentSourcePath.rsplit(DirSep, 1)[0] / ".." / ".." / ".." / ".."
|
template projectRoot: string = currentSourcePath.rsplit(DirSep, 1)[0] / ".." / ".." / ".." / ".."
|
||||||
const MessageStoreMigrationPath: string = projectRoot / "migrations" / "message_store"
|
const MessageStoreMigrationPath: string = projectRoot / "migrations" / "message_store"
|
||||||
|
|
||||||
proc isSchemaVersion7*(db: SqliteDatabase): DatabaseResult[bool] =
|
proc isSchemaVersion8*(db: SqliteDatabase): DatabaseResult[bool] =
|
||||||
## Temporary proc created to analyse when the table actually belongs to the SchemaVersion 7.
|
## Temporary proc created to analyse when the table actually belongs to the SchemaVersion 7.
|
||||||
##
|
##
|
||||||
## During many nwaku versions, 0.14.0 until 0.18.0, the SchemaVersion wasn't set or checked.
|
## During many nwaku versions, 0.14.0 until 0.18.0, the SchemaVersion wasn't set or checked.
|
||||||
@ -48,7 +48,7 @@ proc isSchemaVersion7*(db: SqliteDatabase): DatabaseResult[bool] =
|
|||||||
return ok(true)
|
return ok(true)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
info "Not considered schema version 7"
|
info "Not considered schema version 8"
|
||||||
ok(false)
|
ok(false)
|
||||||
|
|
||||||
proc migrate*(db: SqliteDatabase, targetVersion = SchemaVersion): DatabaseResult[void] =
|
proc migrate*(db: SqliteDatabase, targetVersion = SchemaVersion): DatabaseResult[void] =
|
||||||
@ -63,12 +63,12 @@ proc migrate*(db: SqliteDatabase, targetVersion = SchemaVersion): DatabaseResult
|
|||||||
debug "starting message store's sqlite database migration"
|
debug "starting message store's sqlite database migration"
|
||||||
|
|
||||||
let userVersion = ? db.getUserVersion()
|
let userVersion = ? db.getUserVersion()
|
||||||
let isSchemaVersion7 = ? db.isSchemaVersion7()
|
let isSchemaVersion8 = ? db.isSchemaVersion8()
|
||||||
|
|
||||||
if userVersion == 0'i64 and isSchemaVersion7:
|
if userVersion == 0'i64 and isSchemaVersion8:
|
||||||
info "We found user_version 0 but the database schema reflects the user_version 7"
|
info "We found user_version 0 but the database schema reflects the user_version 7"
|
||||||
## Force the correct schema version
|
## Force the correct schema version
|
||||||
? db.setUserVersion( 7 )
|
? db.setUserVersion( 8 )
|
||||||
|
|
||||||
let migrationRes = migrate(db, targetVersion, migrationsScriptsDir=MessageStoreMigrationPath)
|
let migrationRes = migrate(db, targetVersion, migrationsScriptsDir=MessageStoreMigrationPath)
|
||||||
if migrationRes.isErr():
|
if migrationRes.isErr():
|
||||||
|
@ -70,9 +70,9 @@ proc createTableQuery(table: string): SqlQueryStr =
|
|||||||
" payload BLOB," &
|
" payload BLOB," &
|
||||||
" version INTEGER NOT NULL," &
|
" version INTEGER NOT NULL," &
|
||||||
" timestamp INTEGER NOT NULL," &
|
" timestamp INTEGER NOT NULL," &
|
||||||
" id BLOB," &
|
" messageHash BLOB," &
|
||||||
" storedAt INTEGER NOT NULL," &
|
" storedAt INTEGER NOT NULL," &
|
||||||
" CONSTRAINT messageIndex PRIMARY KEY (storedAt, id, pubsubTopic)" &
|
" CONSTRAINT messageIndex PRIMARY KEY (storedAt, messageHash)" &
|
||||||
") WITHOUT ROWID;"
|
") WITHOUT ROWID;"
|
||||||
|
|
||||||
proc createTable*(db: SqliteDatabase): DatabaseResult[void] =
|
proc createTable*(db: SqliteDatabase): DatabaseResult[void] =
|
||||||
@ -93,7 +93,7 @@ proc createOldestMessageTimestampIndex*(db: SqliteDatabase):
|
|||||||
|
|
||||||
|
|
||||||
proc createHistoryQueryIndexQuery(table: string): SqlQueryStr =
|
proc createHistoryQueryIndexQuery(table: string): SqlQueryStr =
|
||||||
"CREATE INDEX IF NOT EXISTS i_query ON " & table & " (contentTopic, pubsubTopic, storedAt, id);"
|
"CREATE INDEX IF NOT EXISTS i_query ON " & table & " (contentTopic, pubsubTopic, storedAt, messageHash);"
|
||||||
|
|
||||||
proc createHistoryQueryIndex*(db: SqliteDatabase): DatabaseResult[void] =
|
proc createHistoryQueryIndex*(db: SqliteDatabase): DatabaseResult[void] =
|
||||||
let query = createHistoryQueryIndexQuery(DbTable)
|
let query = createHistoryQueryIndexQuery(DbTable)
|
||||||
@ -105,7 +105,7 @@ proc createHistoryQueryIndex*(db: SqliteDatabase): DatabaseResult[void] =
|
|||||||
type InsertMessageParams* = (seq[byte], Timestamp, seq[byte], seq[byte], seq[byte], int64, Timestamp)
|
type InsertMessageParams* = (seq[byte], Timestamp, seq[byte], seq[byte], seq[byte], int64, Timestamp)
|
||||||
|
|
||||||
proc insertMessageQuery(table: string): SqlQueryStr =
|
proc insertMessageQuery(table: string): SqlQueryStr =
|
||||||
"INSERT INTO " & table & "(id, storedAt, contentTopic, payload, pubsubTopic, version, timestamp)" &
|
"INSERT INTO " & table & "(messageHash, storedAt, contentTopic, payload, pubsubTopic, version, timestamp)" &
|
||||||
" VALUES (?, ?, ?, ?, ?, ?, ?);"
|
" VALUES (?, ?, ?, ?, ?, ?, ?);"
|
||||||
|
|
||||||
proc prepareInsertMessageStmt*(db: SqliteDatabase): SqliteStmt[InsertMessageParams, void] =
|
proc prepareInsertMessageStmt*(db: SqliteDatabase): SqliteStmt[InsertMessageParams, void] =
|
||||||
@ -181,9 +181,9 @@ proc deleteMessagesOlderThanTimestamp*(db: SqliteDatabase, ts: int64):
|
|||||||
## Delete oldest messages not within limit
|
## Delete oldest messages not within limit
|
||||||
|
|
||||||
proc deleteOldestMessagesNotWithinLimitQuery(table: string, limit: int): SqlQueryStr =
|
proc deleteOldestMessagesNotWithinLimitQuery(table: string, limit: int): SqlQueryStr =
|
||||||
"DELETE FROM " & table & " WHERE (storedAt, id, pubsubTopic) NOT IN (" &
|
"DELETE FROM " & table & " WHERE (storedAt, messageHash, pubsubTopic) NOT IN (" &
|
||||||
" SELECT storedAt, id, pubsubTopic FROM " & table &
|
" SELECT storedAt, messageHash, pubsubTopic FROM " & table &
|
||||||
" ORDER BY storedAt DESC, id DESC" &
|
" ORDER BY storedAt DESC, messageHash DESC" &
|
||||||
" LIMIT " & $limit &
|
" LIMIT " & $limit &
|
||||||
");"
|
");"
|
||||||
|
|
||||||
@ -197,7 +197,7 @@ proc deleteOldestMessagesNotWithinLimit*(db: SqliteDatabase, limit: int):
|
|||||||
## Select all messages
|
## Select all messages
|
||||||
|
|
||||||
proc selectAllMessagesQuery(table: string): SqlQueryStr =
|
proc selectAllMessagesQuery(table: string): SqlQueryStr =
|
||||||
"SELECT storedAt, contentTopic, payload, pubsubTopic, version, timestamp, id" &
|
"SELECT storedAt, contentTopic, payload, pubsubTopic, version, timestamp, messageHash" &
|
||||||
" FROM " & table &
|
" FROM " & table &
|
||||||
" ORDER BY storedAt ASC"
|
" ORDER BY storedAt ASC"
|
||||||
|
|
||||||
@ -211,10 +211,10 @@ proc selectAllMessages*(db: SqliteDatabase): DatabaseResult[seq[(PubsubTopic,
|
|||||||
let
|
let
|
||||||
pubsubTopic = queryRowPubsubTopicCallback(s, pubsubTopicCol=3)
|
pubsubTopic = queryRowPubsubTopicCallback(s, pubsubTopicCol=3)
|
||||||
wakuMessage = queryRowWakuMessageCallback(s, contentTopicCol=1, payloadCol=2, versionCol=4, senderTimestampCol=5)
|
wakuMessage = queryRowWakuMessageCallback(s, contentTopicCol=1, payloadCol=2, versionCol=4, senderTimestampCol=5)
|
||||||
digest = queryRowDigestCallback(s, digestCol=6)
|
messageHash = queryRowDigestCallback(s, digestCol=6)
|
||||||
storedAt = queryRowReceiverTimestampCallback(s, storedAtCol=0)
|
storedAt = queryRowReceiverTimestampCallback(s, storedAtCol=0)
|
||||||
|
|
||||||
rows.add((pubsubTopic, wakuMessage, digest, storedAt))
|
rows.add((pubsubTopic, wakuMessage, messageHash, storedAt))
|
||||||
|
|
||||||
let query = selectAllMessagesQuery(DbTable)
|
let query = selectAllMessagesQuery(DbTable)
|
||||||
let res = db.query(query, queryRowCallback)
|
let res = db.query(query, queryRowCallback)
|
||||||
@ -246,7 +246,7 @@ proc whereClause(cursor: Option[DbCursor],
|
|||||||
none(string)
|
none(string)
|
||||||
else:
|
else:
|
||||||
let comp = if ascending: ">" else: "<"
|
let comp = if ascending: ">" else: "<"
|
||||||
some("(storedAt, id) " & comp & " (?, ?)")
|
some("(storedAt, messageHash) " & comp & " (?, ?)")
|
||||||
|
|
||||||
let pubsubTopicClause = if pubsubTopic.isNone():
|
let pubsubTopicClause = if pubsubTopic.isNone():
|
||||||
none(string)
|
none(string)
|
||||||
@ -280,13 +280,13 @@ proc selectMessagesWithLimitQuery(table: string, where: Option[string], limit: u
|
|||||||
|
|
||||||
var query: string
|
var query: string
|
||||||
|
|
||||||
query = "SELECT storedAt, contentTopic, payload, pubsubTopic, version, timestamp, id"
|
query = "SELECT storedAt, contentTopic, payload, pubsubTopic, version, timestamp, messageHash"
|
||||||
query &= " FROM " & table
|
query &= " FROM " & table
|
||||||
|
|
||||||
if where.isSome():
|
if where.isSome():
|
||||||
query &= " WHERE " & where.get()
|
query &= " WHERE " & where.get()
|
||||||
|
|
||||||
query &= " ORDER BY storedAt " & order & ", id " & order
|
query &= " ORDER BY storedAt " & order & ", messageHash " & order
|
||||||
query &= " LIMIT " & $limit & ";"
|
query &= " LIMIT " & $limit & ";"
|
||||||
|
|
||||||
query
|
query
|
||||||
@ -308,11 +308,11 @@ proc execSelectMessagesWithLimitStmt(s: SqliteStmt,
|
|||||||
# Bind params
|
# Bind params
|
||||||
var paramIndex = 1
|
var paramIndex = 1
|
||||||
|
|
||||||
if cursor.isSome(): # cursor = storedAt, id, pubsubTopic
|
if cursor.isSome(): # cursor = storedAt, messageHash, pubsubTopic
|
||||||
let (storedAt, id, _) = cursor.get()
|
let (storedAt, messageHash, _) = cursor.get()
|
||||||
checkErr bindParam(s, paramIndex, storedAt)
|
checkErr bindParam(s, paramIndex, storedAt)
|
||||||
paramIndex += 1
|
paramIndex += 1
|
||||||
checkErr bindParam(s, paramIndex, id)
|
checkErr bindParam(s, paramIndex, messageHash)
|
||||||
paramIndex += 1
|
paramIndex += 1
|
||||||
|
|
||||||
if pubsubTopic.isSome():
|
if pubsubTopic.isSome():
|
||||||
@ -369,10 +369,10 @@ proc selectMessagesByHistoryQueryWithLimit*(db: SqliteDatabase,
|
|||||||
let
|
let
|
||||||
pubsubTopic = queryRowPubsubTopicCallback(s, pubsubTopicCol=3)
|
pubsubTopic = queryRowPubsubTopicCallback(s, pubsubTopicCol=3)
|
||||||
message = queryRowWakuMessageCallback(s, contentTopicCol=1, payloadCol=2, versionCol=4, senderTimestampCol=5)
|
message = queryRowWakuMessageCallback(s, contentTopicCol=1, payloadCol=2, versionCol=4, senderTimestampCol=5)
|
||||||
digest = queryRowDigestCallback(s, digestCol=6)
|
messageHash = queryRowDigestCallback(s, digestCol=6)
|
||||||
storedAt = queryRowReceiverTimestampCallback(s, storedAtCol=0)
|
storedAt = queryRowReceiverTimestampCallback(s, storedAtCol=0)
|
||||||
|
|
||||||
messages.add((pubsubTopic, message, digest, storedAt))
|
messages.add((pubsubTopic, message, messageHash, storedAt))
|
||||||
|
|
||||||
let query = block:
|
let query = block:
|
||||||
let where = whereClause(cursor, pubsubTopic, contentTopic, startTime, endTime, ascending)
|
let where = whereClause(cursor, pubsubTopic, contentTopic, startTime, endTime, ascending)
|
||||||
|
@ -65,7 +65,7 @@ method put*(s: SqliteDriver,
|
|||||||
Future[ArchiveDriverResult[void]] {.async.} =
|
Future[ArchiveDriverResult[void]] {.async.} =
|
||||||
## Inserts a message into the store
|
## Inserts a message into the store
|
||||||
let res = s.insertStmt.exec((
|
let res = s.insertStmt.exec((
|
||||||
@(digest.data), # id
|
@(digest.data), # messageHash
|
||||||
receivedTime, # storedAt
|
receivedTime, # storedAt
|
||||||
toBytes(message.contentTopic), # contentTopic
|
toBytes(message.contentTopic), # contentTopic
|
||||||
message.payload, # payload
|
message.payload, # payload
|
||||||
|
@ -153,11 +153,11 @@ when defined(waku_exp_store_resume):
|
|||||||
|
|
||||||
proc put(store: ArchiveDriver, pubsubTopic: PubsubTopic, message: WakuMessage): Result[void, string] =
|
proc put(store: ArchiveDriver, pubsubTopic: PubsubTopic, message: WakuMessage): Result[void, string] =
|
||||||
let
|
let
|
||||||
digest = waku_archive.computeDigest(message, pubsubTopic)
|
messageHash = waku_archive.computeDigest(message, pubsubTopic)
|
||||||
receivedTime = if message.timestamp > 0: message.timestamp
|
receivedTime = if message.timestamp > 0: message.timestamp
|
||||||
else: getNanosecondTime(getTime().toUnixFloat())
|
else: getNanosecondTime(getTime().toUnixFloat())
|
||||||
|
|
||||||
store.put(pubsubTopic, message, digest, receivedTime)
|
store.put(pubsubTopic, message, messageHash, receivedTime)
|
||||||
|
|
||||||
proc resume*(w: WakuStoreClient,
|
proc resume*(w: WakuStoreClient,
|
||||||
peerList = none(seq[RemotePeerInfo]),
|
peerList = none(seq[RemotePeerInfo]),
|
||||||
|
@ -27,13 +27,15 @@ type WakuStoreResult*[T] = Result[T, string]
|
|||||||
|
|
||||||
type MessageDigest* = MDigest[256]
|
type MessageDigest* = MDigest[256]
|
||||||
|
|
||||||
proc computeDigest*(msg: WakuMessage): MessageDigest =
|
proc computeDigest*(msg: WakuMessage, pubSubTopic: string): MessageDigest =
|
||||||
var ctx: sha256
|
var ctx: sha256
|
||||||
ctx.init()
|
ctx.init()
|
||||||
defer: ctx.clear()
|
defer: ctx.clear()
|
||||||
|
|
||||||
ctx.update(msg.contentTopic.toBytes())
|
ctx.update(pubSubTopic.toBytes())
|
||||||
ctx.update(msg.payload)
|
ctx.update(msg.payload)
|
||||||
|
ctx.update(msg.contentTopic.toBytes())
|
||||||
|
ctx.update(msg.meta)
|
||||||
|
|
||||||
# Computes the hash
|
# Computes the hash
|
||||||
return ctx.finish()
|
return ctx.finish()
|
||||||
@ -46,7 +48,7 @@ type
|
|||||||
pubsubTopic*: PubsubTopic
|
pubsubTopic*: PubsubTopic
|
||||||
senderTime*: Timestamp
|
senderTime*: Timestamp
|
||||||
storeTime*: Timestamp
|
storeTime*: Timestamp
|
||||||
digest*: MessageDigest
|
messageHash*: MessageDigest
|
||||||
|
|
||||||
HistoryQuery* = object
|
HistoryQuery* = object
|
||||||
pubsubTopic*: Option[PubsubTopic]
|
pubsubTopic*: Option[PubsubTopic]
|
||||||
|
@ -18,25 +18,25 @@ type PagingIndexRPC* = object
|
|||||||
pubsubTopic*: PubsubTopic
|
pubsubTopic*: PubsubTopic
|
||||||
senderTime*: Timestamp # the time at which the message is generated
|
senderTime*: Timestamp # the time at which the message is generated
|
||||||
receiverTime*: Timestamp
|
receiverTime*: Timestamp
|
||||||
digest*: MessageDigest # calculated over payload and content topic
|
messageHash*: MessageDigest # calculated over payload and content topic
|
||||||
|
|
||||||
proc `==`*(x, y: PagingIndexRPC): bool =
|
proc `==`*(x, y: PagingIndexRPC): bool =
|
||||||
## receiverTime plays no role in index equality
|
## receiverTime plays no role in index equality
|
||||||
(x.senderTime == y.senderTime) and
|
(x.senderTime == y.senderTime) and
|
||||||
(x.digest == y.digest) and
|
(x.messageHash == y.messageHash) and
|
||||||
(x.pubsubTopic == y.pubsubTopic)
|
(x.pubsubTopic == y.pubsubTopic)
|
||||||
|
|
||||||
proc compute*(T: type PagingIndexRPC, msg: WakuMessage, receivedTime: Timestamp, pubsubTopic: PubsubTopic): T =
|
proc compute*(T: type PagingIndexRPC, msg: WakuMessage, receivedTime: Timestamp, pubsubTopic: PubsubTopic): T =
|
||||||
## Takes a WakuMessage with received timestamp and returns its Index.
|
## Takes a WakuMessage with received timestamp and returns its Index.
|
||||||
let
|
let
|
||||||
digest = computeDigest(msg)
|
digest = computeDigest(msg, pubsubTopic)
|
||||||
senderTime = msg.timestamp
|
senderTime = msg.timestamp
|
||||||
|
|
||||||
PagingIndexRPC(
|
PagingIndexRPC(
|
||||||
pubsubTopic: pubsubTopic,
|
pubsubTopic: pubsubTopic,
|
||||||
senderTime: senderTime,
|
senderTime: senderTime,
|
||||||
receiverTime: receivedTime,
|
receiverTime: receivedTime,
|
||||||
digest: digest
|
messageHash: digest
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -98,7 +98,7 @@ proc toRPC*(cursor: HistoryCursor): PagingIndexRPC {.gcsafe.}=
|
|||||||
pubsubTopic: cursor.pubsubTopic,
|
pubsubTopic: cursor.pubsubTopic,
|
||||||
senderTime: cursor.senderTime,
|
senderTime: cursor.senderTime,
|
||||||
receiverTime: cursor.storeTime,
|
receiverTime: cursor.storeTime,
|
||||||
digest: cursor.digest
|
messageHash: cursor.messageHash
|
||||||
)
|
)
|
||||||
|
|
||||||
proc toAPI*(rpc: PagingIndexRPC): HistoryCursor =
|
proc toAPI*(rpc: PagingIndexRPC): HistoryCursor =
|
||||||
@ -106,7 +106,7 @@ proc toAPI*(rpc: PagingIndexRPC): HistoryCursor =
|
|||||||
pubsubTopic: rpc.pubsubTopic,
|
pubsubTopic: rpc.pubsubTopic,
|
||||||
senderTime: rpc.senderTime,
|
senderTime: rpc.senderTime,
|
||||||
storeTime: rpc.receiverTime,
|
storeTime: rpc.receiverTime,
|
||||||
digest: rpc.digest
|
messageHash: rpc.messageHash
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ proc encode*(index: PagingIndexRPC): ProtoBuffer =
|
|||||||
## returns the resultant ProtoBuffer
|
## returns the resultant ProtoBuffer
|
||||||
var pb = initProtoBuffer()
|
var pb = initProtoBuffer()
|
||||||
|
|
||||||
pb.write3(1, index.digest.data)
|
pb.write3(1, index.messageHash.data)
|
||||||
pb.write3(2, zint64(index.receiverTime))
|
pb.write3(2, zint64(index.receiverTime))
|
||||||
pb.write3(3, zint64(index.senderTime))
|
pb.write3(3, zint64(index.senderTime))
|
||||||
pb.write3(4, index.pubsubTopic)
|
pb.write3(4, index.pubsubTopic)
|
||||||
@ -38,13 +38,13 @@ proc decode*(T: type PagingIndexRPC, buffer: seq[byte]): ProtobufResult[T] =
|
|||||||
|
|
||||||
var data: seq[byte]
|
var data: seq[byte]
|
||||||
if not ?pb.getField(1, data):
|
if not ?pb.getField(1, data):
|
||||||
return err(ProtobufError.missingRequiredField("digest"))
|
return err(ProtobufError.missingRequiredField("messageHash"))
|
||||||
else:
|
else:
|
||||||
var digest = MessageDigest()
|
var messageHash = MessageDigest()
|
||||||
for count, b in data:
|
for count, b in data:
|
||||||
digest.data[count] = b
|
messageHash.data[count] = b
|
||||||
|
|
||||||
rpc.digest = digest
|
rpc.messageHash = messageHash
|
||||||
|
|
||||||
var receiverTime: zint64
|
var receiverTime: zint64
|
||||||
if not ?pb.getField(2, receiverTime):
|
if not ?pb.getField(2, receiverTime):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user