2020-08-31 03:32:41 +00:00
|
|
|
import
|
|
|
|
std/tables,
|
|
|
|
chronos, chronicles, metrics, stew/results,
|
2020-09-24 02:16:25 +00:00
|
|
|
libp2p/switch,
|
2020-08-31 03:32:41 +00:00
|
|
|
libp2p/protocols/protocol,
|
|
|
|
libp2p/protobuf/minprotobuf,
|
|
|
|
libp2p/stream/connection,
|
2020-09-16 02:59:10 +00:00
|
|
|
./message_notifier,
|
|
|
|
./../../node/v2/waku_types
|
2020-08-27 02:44:09 +00:00
|
|
|
|
2020-09-16 04:23:10 +00:00
|
|
|
logScope:
|
|
|
|
topics = "wakustore"
|
|
|
|
|
2020-08-27 02:44:09 +00:00
|
|
|
const
|
2020-09-16 02:59:10 +00:00
|
|
|
WakuStoreCodec* = "/vac/waku/store/2.0.0-alpha5"
|
2020-08-27 02:44:09 +00:00
|
|
|
|
2020-08-31 03:32:41 +00:00
|
|
|
proc init*(T: type HistoryQuery, buffer: seq[byte]): ProtoResult[T] =
|
2020-08-27 02:44:09 +00:00
|
|
|
var msg = HistoryQuery()
|
|
|
|
let pb = initProtoBuffer(buffer)
|
|
|
|
|
|
|
|
var topics: seq[string]
|
|
|
|
|
2020-08-28 11:07:52 +00:00
|
|
|
discard ? pb.getField(1, msg.uuid)
|
2020-08-27 02:44:09 +00:00
|
|
|
discard ? pb.getRepeatedField(2, topics)
|
|
|
|
|
|
|
|
msg.topics = topics
|
|
|
|
ok(msg)
|
|
|
|
|
|
|
|
proc init*(T: type HistoryResponse, buffer: seq[byte]): ProtoResult[T] =
|
|
|
|
var msg = HistoryResponse()
|
|
|
|
let pb = initProtoBuffer(buffer)
|
|
|
|
|
|
|
|
var messages: seq[seq[byte]]
|
|
|
|
|
2020-08-28 11:07:52 +00:00
|
|
|
discard ? pb.getField(1, msg.uuid)
|
2020-08-27 02:44:09 +00:00
|
|
|
discard ? pb.getRepeatedField(2, messages)
|
|
|
|
|
|
|
|
for buf in messages:
|
2020-09-16 02:59:10 +00:00
|
|
|
msg.messages.add(? WakuMessage.init(buf))
|
2020-08-27 02:44:09 +00:00
|
|
|
|
|
|
|
ok(msg)
|
|
|
|
|
2020-08-31 03:32:41 +00:00
|
|
|
proc encode*(query: HistoryQuery): ProtoBuffer =
|
2020-08-27 02:44:09 +00:00
|
|
|
result = initProtoBuffer()
|
|
|
|
|
2020-08-28 11:07:52 +00:00
|
|
|
result.write(1, query.uuid)
|
2020-08-27 02:44:09 +00:00
|
|
|
|
|
|
|
for topic in query.topics:
|
|
|
|
result.write(2, topic)
|
|
|
|
|
2020-08-31 03:32:41 +00:00
|
|
|
proc encode*(response: HistoryResponse): ProtoBuffer =
|
2020-08-27 02:44:09 +00:00
|
|
|
result = initProtoBuffer()
|
|
|
|
|
2020-08-28 11:07:52 +00:00
|
|
|
result.write(1, response.uuid)
|
2020-08-27 02:44:09 +00:00
|
|
|
|
|
|
|
for msg in response.messages:
|
2020-09-16 02:59:10 +00:00
|
|
|
result.write(2, msg.encode())
|
2020-08-27 02:44:09 +00:00
|
|
|
|
2020-09-24 02:16:25 +00:00
|
|
|
proc findMessages(w: WakuStore, query: HistoryQuery): HistoryResponse =
|
2020-09-16 02:59:10 +00:00
|
|
|
result = HistoryResponse(uuid: query.uuid, messages: newSeq[WakuMessage]())
|
2020-08-27 02:44:09 +00:00
|
|
|
for msg in w.messages:
|
2020-09-16 02:59:10 +00:00
|
|
|
if msg.contentTopic in query.topics:
|
|
|
|
result.messages.insert(msg)
|
2020-08-27 02:44:09 +00:00
|
|
|
|
2020-09-18 13:28:19 +00:00
|
|
|
method init*(ws: WakuStore) =
|
2020-08-27 02:44:09 +00:00
|
|
|
proc handle(conn: Connection, proto: string) {.async, gcsafe, closure.} =
|
|
|
|
var message = await conn.readLp(64*1024)
|
2020-09-10 11:16:31 +00:00
|
|
|
var rpc = HistoryQuery.init(message)
|
2020-08-27 02:44:09 +00:00
|
|
|
if rpc.isErr:
|
|
|
|
return
|
|
|
|
|
|
|
|
info "received query"
|
|
|
|
|
2020-09-24 02:16:25 +00:00
|
|
|
let res = ws.findMessages(rpc.value)
|
2020-08-27 02:44:09 +00:00
|
|
|
|
2020-09-10 11:16:31 +00:00
|
|
|
await conn.writeLp(res.encode().buffer)
|
2020-08-27 02:44:09 +00:00
|
|
|
|
|
|
|
ws.handler = handle
|
|
|
|
ws.codec = WakuStoreCodec
|
2020-09-18 13:28:19 +00:00
|
|
|
|
2020-09-24 02:16:25 +00:00
|
|
|
proc init*(T: type WakuStore, switch: Switch): T =
|
2020-09-18 13:28:19 +00:00
|
|
|
new result
|
2020-09-24 02:16:25 +00:00
|
|
|
result.switch = switch
|
2020-09-18 13:28:19 +00:00
|
|
|
result.init()
|
2020-08-27 02:44:09 +00:00
|
|
|
|
2020-09-24 02:16:25 +00:00
|
|
|
# @TODO THIS SHOULD PROBABLY BE AN ADD FUNCTION AND APPEND THE PEER TO AN ARRAY
|
|
|
|
proc setPeer*(ws: WakuStore, peer: PeerInfo) =
|
|
|
|
ws.peers.add(HistoryPeer(peerInfo: peer))
|
|
|
|
|
2020-09-07 11:26:32 +00:00
|
|
|
proc subscription*(proto: WakuStore): MessageNotificationSubscription =
|
2020-08-27 02:44:09 +00:00
|
|
|
## The filter function returns the pubsub filter for the node.
|
|
|
|
## This is used to pipe messages into the storage, therefore
|
|
|
|
## the filter should be used by the component that receives
|
|
|
|
## new messages.
|
2020-09-17 20:10:41 +00:00
|
|
|
proc handle(topic: string, msg: WakuMessage) {.async.} =
|
2020-08-27 02:44:09 +00:00
|
|
|
proto.messages.add(msg)
|
|
|
|
|
2020-09-07 11:26:32 +00:00
|
|
|
MessageNotificationSubscription.init(@[], handle)
|
2020-09-24 02:16:25 +00:00
|
|
|
|
|
|
|
proc query*(w: WakuStore, query: HistoryQuery, handler: QueryHandlerFunc) {.async, gcsafe.} =
|
|
|
|
# @TODO We need to be more stratigic about which peers we dial. Right now we just set one on the service.
|
|
|
|
# Ideally depending on the query and our set of peers we take a subset of ideal peers.
|
|
|
|
# This will require us to check for various factors such as:
|
|
|
|
# - which topics they track
|
|
|
|
# - latency?
|
|
|
|
# - default store peer?
|
|
|
|
|
|
|
|
let peer = w.peers[0]
|
|
|
|
let conn = await w.switch.dial(peer.peerInfo.peerId, peer.peerInfo.addrs, WakuStoreCodec)
|
|
|
|
|
|
|
|
await conn.writeLP(query.encode().buffer)
|
|
|
|
|
|
|
|
var message = await conn.readLp(64*1024)
|
|
|
|
let response = HistoryResponse.init(message)
|
|
|
|
|
|
|
|
if response.isErr:
|
|
|
|
error "failed to decode response"
|
|
|
|
return
|
|
|
|
|
|
|
|
handler(response.value)
|