2023-04-10 20:39:49 +00:00
|
|
|
package rest
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"encoding/base64"
|
2024-05-14 13:06:11 +00:00
|
|
|
"errors"
|
2023-04-10 20:39:49 +00:00
|
|
|
"net/http"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/go-chi/chi/v5"
|
|
|
|
"github.com/multiformats/go-multiaddr"
|
|
|
|
"github.com/waku-org/go-waku/waku/v2/node"
|
2024-05-14 13:06:11 +00:00
|
|
|
"github.com/waku-org/go-waku/waku/v2/protocol"
|
2024-05-03 16:07:03 +00:00
|
|
|
"github.com/waku-org/go-waku/waku/v2/protocol/legacy_store"
|
2024-05-14 13:06:11 +00:00
|
|
|
"github.com/waku-org/go-waku/waku/v2/protocol/pb"
|
|
|
|
"github.com/waku-org/go-waku/waku/v2/protocol/store"
|
|
|
|
storepb "github.com/waku-org/go-waku/waku/v2/protocol/store/pb"
|
|
|
|
"google.golang.org/protobuf/proto"
|
2023-04-10 20:39:49 +00:00
|
|
|
)
|
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
type StoreQueryService struct {
|
2023-04-10 20:39:49 +00:00
|
|
|
node *node.WakuNode
|
|
|
|
mux *chi.Mux
|
|
|
|
}
|
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
const routeStoreMessagesV1 = "/store/v3/messages"
|
2023-04-10 20:39:49 +00:00
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
func NewStoreQueryService(node *node.WakuNode, m *chi.Mux) *StoreQueryService {
|
|
|
|
s := &StoreQueryService{
|
2023-04-10 20:39:49 +00:00
|
|
|
node: node,
|
|
|
|
mux: m,
|
|
|
|
}
|
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
m.Get(routeStoreMessagesV1, s.getV3Messages)
|
2023-04-10 20:39:49 +00:00
|
|
|
|
|
|
|
return s
|
|
|
|
}
|
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
func getStoreParams(r *http.Request) (store.Criteria, []store.RequestOption, error) {
|
|
|
|
var options []store.RequestOption
|
2023-11-13 22:52:46 +00:00
|
|
|
var err error
|
2023-04-10 20:39:49 +00:00
|
|
|
peerAddrStr := r.URL.Query().Get("peerAddr")
|
2023-11-13 22:52:46 +00:00
|
|
|
var m multiaddr.Multiaddr
|
|
|
|
if peerAddrStr != "" {
|
|
|
|
m, err = multiaddr.NewMultiaddr(peerAddrStr)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
2024-05-14 13:06:11 +00:00
|
|
|
options = append(options, store.WithPeerAddr(m))
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
2024-04-17 12:54:17 +00:00
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
includeData := false
|
|
|
|
includeDataStr := r.URL.Query().Get("includeData")
|
|
|
|
if includeDataStr != "" {
|
|
|
|
includeData, err = strconv.ParseBool(includeDataStr)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, errors.New("invalid value for includeData. Use true|false")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
options = append(options, store.IncludeData(includeData))
|
|
|
|
|
|
|
|
pubsubTopic := r.URL.Query().Get("pubsubTopic")
|
2023-04-10 20:39:49 +00:00
|
|
|
|
|
|
|
contentTopics := r.URL.Query().Get("contentTopics")
|
2024-05-14 13:06:11 +00:00
|
|
|
var contentTopicsArr []string
|
2023-04-10 20:39:49 +00:00
|
|
|
if contentTopics != "" {
|
2024-05-14 13:06:11 +00:00
|
|
|
contentTopicsArr = strings.Split(contentTopics, ",")
|
|
|
|
}
|
|
|
|
|
|
|
|
hashesStr := r.URL.Query().Get("hashes")
|
|
|
|
var hashes []pb.MessageHash
|
|
|
|
if hashesStr != "" {
|
|
|
|
hashesStrArr := strings.Split(hashesStr, ",")
|
|
|
|
for _, hashStr := range hashesStrArr {
|
|
|
|
hash, err := base64.URLEncoding.DecodeString(hashStr)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
|
|
|
hashes = append(hashes, pb.ToMessageHash(hash))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
isMsgHashCriteria := false
|
|
|
|
if len(hashes) != 0 {
|
|
|
|
isMsgHashCriteria = true
|
|
|
|
if pubsubTopic != "" || len(contentTopics) != 0 {
|
|
|
|
return nil, nil, errors.New("cant use content filters while specifying message hashes")
|
|
|
|
}
|
|
|
|
} else {
|
2024-05-15 14:38:09 +00:00
|
|
|
if pubsubTopic == "" || len(contentTopicsArr) == 0 {
|
|
|
|
return nil, nil, errors.New("pubsubTopic and contentTopics are required")
|
2024-05-14 13:06:11 +00:00
|
|
|
}
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
startTimeStr := r.URL.Query().Get("startTime")
|
2024-05-15 15:46:59 +00:00
|
|
|
var startTime *int64
|
2023-04-10 20:39:49 +00:00
|
|
|
if startTimeStr != "" {
|
2024-05-15 15:46:59 +00:00
|
|
|
startTimeValue, err := strconv.ParseInt(startTimeStr, 10, 64)
|
2023-04-10 20:39:49 +00:00
|
|
|
if err != nil {
|
2023-11-13 22:52:46 +00:00
|
|
|
return nil, nil, err
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
2024-05-15 15:46:59 +00:00
|
|
|
startTime = &startTimeValue
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
endTimeStr := r.URL.Query().Get("endTime")
|
2024-05-15 15:46:59 +00:00
|
|
|
var endTime *int64
|
2023-04-10 20:39:49 +00:00
|
|
|
if endTimeStr != "" {
|
2024-05-15 15:46:59 +00:00
|
|
|
endTimeValue, err := strconv.ParseInt(endTimeStr, 10, 64)
|
2023-04-10 20:39:49 +00:00
|
|
|
if err != nil {
|
2023-11-13 22:52:46 +00:00
|
|
|
return nil, nil, err
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
2024-05-15 15:46:59 +00:00
|
|
|
endTime = &endTimeValue
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
var cursor []byte
|
|
|
|
cursorStr := r.URL.Query().Get("cursor")
|
|
|
|
if cursorStr != "" {
|
|
|
|
cursor, err = base64.URLEncoding.DecodeString(cursorStr)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
2024-05-14 13:06:11 +00:00
|
|
|
options = append(options, store.WithCursor(cursor))
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pageSizeStr := r.URL.Query().Get("pageSize")
|
|
|
|
ascendingStr := r.URL.Query().Get("ascending")
|
|
|
|
if ascendingStr != "" || pageSizeStr != "" {
|
|
|
|
ascending := true
|
2024-05-03 16:07:03 +00:00
|
|
|
pageSize := uint64(legacy_store.DefaultPageSize)
|
2023-04-10 20:39:49 +00:00
|
|
|
if ascendingStr != "" {
|
|
|
|
ascending, err = strconv.ParseBool(ascendingStr)
|
|
|
|
if err != nil {
|
2023-11-13 22:52:46 +00:00
|
|
|
return nil, nil, err
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if pageSizeStr != "" {
|
|
|
|
pageSize, err = strconv.ParseUint(pageSizeStr, 10, 64)
|
|
|
|
if err != nil {
|
2023-11-13 22:52:46 +00:00
|
|
|
return nil, nil, err
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
2024-05-03 16:07:03 +00:00
|
|
|
if pageSize > legacy_store.MaxPageSize {
|
|
|
|
pageSize = legacy_store.MaxPageSize
|
2023-12-06 13:02:05 +00:00
|
|
|
}
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
options = append(options, store.WithPaging(ascending, pageSize))
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
var query store.Criteria
|
|
|
|
if isMsgHashCriteria {
|
|
|
|
query = store.MessageHashCriteria{
|
|
|
|
MessageHashes: hashes,
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
query = store.FilterCriteria{
|
|
|
|
ContentFilter: protocol.NewContentFilter(pubsubTopic, contentTopicsArr...),
|
2024-05-15 15:46:59 +00:00
|
|
|
TimeStart: startTime,
|
|
|
|
TimeEnd: endTime,
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
return query, options, nil
|
|
|
|
}
|
2023-04-10 20:39:49 +00:00
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
func writeStoreError(w http.ResponseWriter, code int, err error) {
|
|
|
|
writeResponse(w, &storepb.StoreQueryResponse{StatusCode: proto.Uint32(uint32(code)), StatusDesc: proto.String(err.Error())}, code)
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
func (d *StoreQueryService) getV3Messages(w http.ResponseWriter, r *http.Request) {
|
2023-11-13 22:52:46 +00:00
|
|
|
query, options, err := getStoreParams(r)
|
2023-04-10 20:39:49 +00:00
|
|
|
if err != nil {
|
|
|
|
writeStoreError(w, http.StatusBadRequest, err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
|
|
|
|
defer cancel()
|
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
result, err := d.node.Store().Request(ctx, query, options...)
|
2023-04-10 20:39:49 +00:00
|
|
|
if err != nil {
|
2024-05-14 13:06:11 +00:00
|
|
|
writeLegacyStoreError(w, http.StatusInternalServerError, err)
|
2023-04-10 20:39:49 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-05-14 13:06:11 +00:00
|
|
|
writeErrOrResponse(w, nil, result.Response())
|
2023-04-10 20:39:49 +00:00
|
|
|
}
|