2023-08-11 17:28:46 +00:00
|
|
|
package activity
|
|
|
|
|
|
|
|
import (
|
2023-11-14 17:16:39 +00:00
|
|
|
"context"
|
2023-08-11 17:28:46 +00:00
|
|
|
"database/sql"
|
|
|
|
"math/big"
|
|
|
|
"testing"
|
|
|
|
"time"
|
|
|
|
|
2024-09-20 09:08:11 +00:00
|
|
|
"go.uber.org/mock/gomock"
|
2024-06-27 21:27:09 +00:00
|
|
|
|
2023-08-11 17:28:46 +00:00
|
|
|
eth "github.com/ethereum/go-ethereum/common"
|
|
|
|
"github.com/ethereum/go-ethereum/event"
|
|
|
|
|
2024-03-29 12:44:50 +00:00
|
|
|
"github.com/status-im/status-go/appdatabase"
|
|
|
|
"github.com/status-im/status-go/multiaccounts/accounts"
|
2024-06-06 19:57:29 +00:00
|
|
|
"github.com/status-im/status-go/rpc/chain"
|
2024-06-27 21:27:09 +00:00
|
|
|
mock_rpcclient "github.com/status-im/status-go/rpc/mock/client"
|
2023-08-11 17:28:46 +00:00
|
|
|
"github.com/status-im/status-go/services/wallet/bigint"
|
|
|
|
"github.com/status-im/status-go/services/wallet/common"
|
|
|
|
"github.com/status-im/status-go/services/wallet/thirdparty"
|
|
|
|
"github.com/status-im/status-go/services/wallet/token"
|
2024-06-27 21:27:09 +00:00
|
|
|
mock_token "github.com/status-im/status-go/services/wallet/token/mock/token"
|
2023-08-11 17:28:46 +00:00
|
|
|
"github.com/status-im/status-go/services/wallet/transfer"
|
|
|
|
"github.com/status-im/status-go/services/wallet/walletevent"
|
|
|
|
"github.com/status-im/status-go/t/helpers"
|
2024-01-08 21:24:30 +00:00
|
|
|
"github.com/status-im/status-go/transactions"
|
2023-08-11 17:28:46 +00:00
|
|
|
"github.com/status-im/status-go/walletdatabase"
|
|
|
|
|
|
|
|
"github.com/stretchr/testify/mock"
|
|
|
|
"github.com/stretchr/testify/require"
|
|
|
|
)
|
|
|
|
|
2024-02-09 01:55:33 +00:00
|
|
|
const shouldNotWaitTimeout = 19999 * time.Second
|
|
|
|
|
2023-08-11 17:28:46 +00:00
|
|
|
// mockCollectiblesManager implements the collectibles.ManagerInterface
|
|
|
|
type mockCollectiblesManager struct {
|
|
|
|
mock.Mock
|
|
|
|
}
|
|
|
|
|
2023-12-13 12:19:25 +00:00
|
|
|
func (m *mockCollectiblesManager) FetchAssetsByCollectibleUniqueID(ctx context.Context, uniqueIDs []thirdparty.CollectibleUniqueID, asyncFetch bool) ([]thirdparty.FullCollectibleData, error) {
|
2023-08-11 17:28:46 +00:00
|
|
|
args := m.Called(uniqueIDs)
|
|
|
|
res := args.Get(0)
|
|
|
|
if res == nil {
|
|
|
|
return nil, args.Error(1)
|
|
|
|
}
|
|
|
|
return res.([]thirdparty.FullCollectibleData), args.Error(1)
|
|
|
|
}
|
|
|
|
|
2024-05-14 06:58:08 +00:00
|
|
|
func (m *mockCollectiblesManager) FetchCollectionSocialsAsync(contractID thirdparty.ContractID) error {
|
|
|
|
args := m.Called(contractID)
|
|
|
|
res := args.Get(0)
|
|
|
|
if res == nil {
|
|
|
|
return args.Error(1)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-01-26 04:31:18 +00:00
|
|
|
type testState struct {
|
|
|
|
service *Service
|
|
|
|
eventFeed *event.Feed
|
2024-06-27 21:27:09 +00:00
|
|
|
tokenMock *mock_token.MockManagerInterface
|
2024-01-26 04:31:18 +00:00
|
|
|
collectiblesMock *mockCollectiblesManager
|
|
|
|
close func()
|
|
|
|
pendingTracker *transactions.PendingTxTracker
|
|
|
|
chainClient *transactions.MockChainClient
|
2024-06-27 21:27:09 +00:00
|
|
|
rpcClient *mock_rpcclient.MockClientInterface
|
2024-01-26 04:31:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func setupTestService(tb testing.TB) (state testState) {
|
2023-08-11 17:28:46 +00:00
|
|
|
db, err := helpers.SetupTestMemorySQLDB(walletdatabase.DbInitializer{})
|
|
|
|
require.NoError(tb, err)
|
|
|
|
|
2024-03-29 12:44:50 +00:00
|
|
|
appDB, err := helpers.SetupTestMemorySQLDB(appdatabase.DbInitializer{})
|
|
|
|
require.NoError(tb, err)
|
|
|
|
accountsDB, err := accounts.NewDB(appDB)
|
|
|
|
require.NoError(tb, err)
|
|
|
|
|
2024-01-26 04:31:18 +00:00
|
|
|
state.eventFeed = new(event.Feed)
|
2024-06-27 21:27:09 +00:00
|
|
|
mockCtrl := gomock.NewController(tb)
|
|
|
|
state.tokenMock = mock_token.NewMockManagerInterface(mockCtrl)
|
2024-01-26 04:31:18 +00:00
|
|
|
state.collectiblesMock = &mockCollectiblesManager{}
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-01-26 04:31:18 +00:00
|
|
|
state.chainClient = transactions.NewMockChainClient()
|
2024-06-27 21:27:09 +00:00
|
|
|
state.rpcClient = mock_rpcclient.NewMockClientInterface(mockCtrl)
|
|
|
|
state.rpcClient.EXPECT().AbstractEthClient(gomock.Any()).DoAndReturn(func(chainID common.ChainID) (chain.BatchCallClient, error) {
|
|
|
|
return state.chainClient.AbstractEthClient(chainID)
|
|
|
|
}).AnyTimes()
|
2024-01-08 21:24:30 +00:00
|
|
|
|
|
|
|
// Ensure we process pending transactions as needed, only once
|
|
|
|
pendingCheckInterval := time.Second
|
2024-06-27 21:27:09 +00:00
|
|
|
state.pendingTracker = transactions.NewPendingTxTracker(db, state.rpcClient, nil, state.eventFeed, pendingCheckInterval)
|
2023-08-11 17:28:46 +00:00
|
|
|
|
2024-03-29 12:44:50 +00:00
|
|
|
state.service = NewService(db, accountsDB, state.tokenMock, state.collectiblesMock, state.eventFeed, state.pendingTracker)
|
2024-02-17 02:41:44 +00:00
|
|
|
state.service.debounceDuration = 0
|
2024-01-26 04:31:18 +00:00
|
|
|
state.close = func() {
|
|
|
|
require.NoError(tb, state.pendingTracker.Stop())
|
2023-08-11 17:28:46 +00:00
|
|
|
require.NoError(tb, db.Close())
|
2024-06-27 21:27:09 +00:00
|
|
|
defer mockCtrl.Finish()
|
2024-01-26 04:31:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return state
|
2023-08-11 17:28:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type arg struct {
|
|
|
|
chainID common.ChainID
|
|
|
|
tokenAddressStr string
|
|
|
|
tokenIDStr string
|
|
|
|
tokenID *big.Int
|
|
|
|
tokenAddress *eth.Address
|
|
|
|
}
|
|
|
|
|
|
|
|
// insertStubTransfersWithCollectibles will insert nil if tokenIDStr is empty
|
2023-09-12 10:19:15 +00:00
|
|
|
func insertStubTransfersWithCollectibles(t *testing.T, db *sql.DB, args []arg) (fromAddresses, toAddresses []eth.Address) {
|
|
|
|
trs, fromAddresses, toAddresses := transfer.GenerateTestTransfers(t, db, 0, len(args))
|
2023-08-11 17:28:46 +00:00
|
|
|
for i := range args {
|
|
|
|
trs[i].ChainID = args[i].chainID
|
|
|
|
if args[i].tokenIDStr == "" {
|
|
|
|
args[i].tokenID = nil
|
|
|
|
} else {
|
|
|
|
args[i].tokenID = new(big.Int)
|
|
|
|
args[i].tokenID.SetString(args[i].tokenIDStr, 0)
|
|
|
|
}
|
|
|
|
args[i].tokenAddress = new(eth.Address)
|
|
|
|
*args[i].tokenAddress = eth.HexToAddress(args[i].tokenAddressStr)
|
|
|
|
transfer.InsertTestTransferWithOptions(t, db, trs[i].To, &trs[i], &transfer.TestTransferOptions{
|
|
|
|
TokenAddress: *args[i].tokenAddress,
|
|
|
|
TokenID: args[i].tokenID,
|
|
|
|
})
|
|
|
|
}
|
2023-09-12 10:19:15 +00:00
|
|
|
return fromAddresses, toAddresses
|
2023-08-11 17:28:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestService_UpdateCollectibleInfo(t *testing.T) {
|
2024-01-26 04:31:18 +00:00
|
|
|
state := setupTestService(t)
|
|
|
|
defer state.close()
|
2023-08-11 17:28:46 +00:00
|
|
|
|
|
|
|
args := []arg{
|
|
|
|
{5, "0xA2838FDA19EB6EED3F8B9EFF411D4CD7D2DE0313", "0x0D", nil, nil},
|
|
|
|
{5, "0xA2838FDA19EB6EED3F8B9EFF411D4CD7D2DE0313", "0x762AD3E4934E687F8701F24C7274E5209213FD6208FF952ACEB325D028866949", nil, nil},
|
2024-07-04 11:51:43 +00:00
|
|
|
{5, "0xA2838FDA19EB6EED3F8B9EFF411D4CD7D2DE0313", "0x762AD3E4934E687F8701F24C7274E5209213FD6208FF952ACEB325D028866949", nil, nil},
|
2023-08-11 17:28:46 +00:00
|
|
|
{5, "0x3d6afaa395c31fcd391fe3d562e75fe9e8ec7e6a", "", nil, nil},
|
|
|
|
{5, "0xA2838FDA19EB6EED3F8B9EFF411D4CD7D2DE0313", "0x0F", nil, nil},
|
|
|
|
}
|
2024-01-26 04:31:18 +00:00
|
|
|
fromAddresses, toAddresses := insertStubTransfersWithCollectibles(t, state.service.db, args)
|
2023-08-11 17:28:46 +00:00
|
|
|
|
|
|
|
ch := make(chan walletevent.Event)
|
2024-01-26 04:31:18 +00:00
|
|
|
sub := state.eventFeed.Subscribe(ch)
|
2023-08-11 17:28:46 +00:00
|
|
|
|
|
|
|
// Expect one call for the fungible token
|
2024-06-27 21:27:09 +00:00
|
|
|
state.tokenMock.EXPECT().LookupTokenIdentity(uint64(5), eth.HexToAddress("0x3d6afaa395c31fcd391fe3d562e75fe9e8ec7e6a"), false).Return(
|
2023-08-11 17:28:46 +00:00
|
|
|
&token.Token{
|
|
|
|
ChainID: 5,
|
|
|
|
Address: eth.HexToAddress("0x3d6afaa395c31fcd391fe3d562e75fe9e8ec7e6a"),
|
|
|
|
Symbol: "STT",
|
2024-06-27 21:27:09 +00:00
|
|
|
},
|
|
|
|
).Times(1)
|
2024-01-26 04:31:18 +00:00
|
|
|
state.collectiblesMock.On("FetchAssetsByCollectibleUniqueID", []thirdparty.CollectibleUniqueID{
|
2023-08-11 17:28:46 +00:00
|
|
|
{
|
|
|
|
ContractID: thirdparty.ContractID{
|
2024-07-04 11:51:43 +00:00
|
|
|
ChainID: args[4].chainID,
|
|
|
|
Address: *args[4].tokenAddress},
|
|
|
|
TokenID: &bigint.BigInt{Int: args[4].tokenID},
|
2023-08-11 17:28:46 +00:00
|
|
|
}, {
|
|
|
|
ContractID: thirdparty.ContractID{
|
|
|
|
ChainID: args[1].chainID,
|
|
|
|
Address: *args[1].tokenAddress},
|
|
|
|
TokenID: &bigint.BigInt{Int: args[1].tokenID},
|
2024-07-04 11:51:43 +00:00
|
|
|
}, {
|
|
|
|
ContractID: thirdparty.ContractID{
|
|
|
|
ChainID: args[0].chainID,
|
|
|
|
Address: *args[0].tokenAddress},
|
|
|
|
TokenID: &bigint.BigInt{Int: args[0].tokenID},
|
2023-08-11 17:28:46 +00:00
|
|
|
},
|
|
|
|
}).Return([]thirdparty.FullCollectibleData{
|
|
|
|
{
|
|
|
|
CollectibleData: thirdparty.CollectibleData{
|
2024-07-14 16:34:43 +00:00
|
|
|
ID: thirdparty.CollectibleUniqueID{
|
|
|
|
ContractID: thirdparty.ContractID{
|
|
|
|
ChainID: args[4].chainID,
|
|
|
|
Address: *args[4].tokenAddress},
|
|
|
|
TokenID: &bigint.BigInt{Int: args[4].tokenID},
|
|
|
|
},
|
2024-07-04 11:51:43 +00:00
|
|
|
Name: "Test 4",
|
|
|
|
ImageURL: "test://url/4"},
|
2023-08-11 17:28:46 +00:00
|
|
|
CollectionData: nil,
|
|
|
|
}, {
|
|
|
|
CollectibleData: thirdparty.CollectibleData{
|
2024-07-14 16:34:43 +00:00
|
|
|
ID: thirdparty.CollectibleUniqueID{
|
|
|
|
ContractID: thirdparty.ContractID{
|
|
|
|
ChainID: args[1].chainID,
|
|
|
|
Address: *args[1].tokenAddress},
|
|
|
|
TokenID: &bigint.BigInt{Int: args[1].tokenID},
|
|
|
|
},
|
2023-08-11 17:28:46 +00:00
|
|
|
Name: "Test 1",
|
|
|
|
ImageURL: "test://url/1"},
|
|
|
|
CollectionData: nil,
|
|
|
|
},
|
2024-07-04 11:51:43 +00:00
|
|
|
{
|
|
|
|
CollectibleData: thirdparty.CollectibleData{
|
2024-07-14 16:34:43 +00:00
|
|
|
ID: thirdparty.CollectibleUniqueID{
|
|
|
|
ContractID: thirdparty.ContractID{
|
|
|
|
ChainID: args[0].chainID,
|
|
|
|
Address: *args[0].tokenAddress},
|
|
|
|
TokenID: &bigint.BigInt{Int: args[0].tokenID},
|
|
|
|
},
|
2024-07-04 11:51:43 +00:00
|
|
|
Name: "Test 0",
|
|
|
|
ImageURL: "test://url/0"},
|
|
|
|
CollectionData: nil,
|
|
|
|
},
|
2023-08-11 17:28:46 +00:00
|
|
|
}, nil).Once()
|
|
|
|
|
2024-07-04 11:51:43 +00:00
|
|
|
state.service.FilterActivityAsync(0, append(fromAddresses, toAddresses...), allNetworksFilter(), Filter{}, 0, 10)
|
2023-08-11 17:28:46 +00:00
|
|
|
|
|
|
|
filterResponseCount := 0
|
|
|
|
var updates []EntryData
|
|
|
|
|
|
|
|
for i := 0; i < 2; i++ {
|
|
|
|
select {
|
|
|
|
case res := <-ch:
|
|
|
|
switch res.Type {
|
|
|
|
case EventActivityFilteringDone:
|
2024-02-09 01:55:33 +00:00
|
|
|
payload, err := walletevent.GetPayload[FilterResponse](res)
|
2023-08-11 17:28:46 +00:00
|
|
|
require.NoError(t, err)
|
|
|
|
require.Equal(t, ErrorCodeSuccess, payload.ErrorCode)
|
2024-07-04 11:51:43 +00:00
|
|
|
require.Equal(t, 5, len(payload.Activities))
|
2023-08-11 17:28:46 +00:00
|
|
|
filterResponseCount++
|
|
|
|
case EventActivityFilteringUpdate:
|
2024-02-09 01:55:33 +00:00
|
|
|
err := walletevent.ExtractPayload(res, &updates)
|
2023-08-11 17:28:46 +00:00
|
|
|
require.NoError(t, err)
|
|
|
|
}
|
2024-02-09 01:55:33 +00:00
|
|
|
case <-time.NewTimer(shouldNotWaitTimeout).C:
|
2023-08-11 17:28:46 +00:00
|
|
|
require.Fail(t, "timeout while waiting for event")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-07-14 16:34:43 +00:00
|
|
|
// FetchAssetsByCollectibleUniqueID will receive only unique ids, while number of entries can be bigger
|
2023-08-11 17:28:46 +00:00
|
|
|
require.Equal(t, 1, filterResponseCount)
|
2024-07-14 16:34:43 +00:00
|
|
|
require.Equal(t, 4, len(updates))
|
2024-07-04 11:51:43 +00:00
|
|
|
require.Equal(t, "Test 4", *updates[0].NftName)
|
|
|
|
require.Equal(t, "test://url/4", *updates[0].NftURL)
|
2023-08-11 17:28:46 +00:00
|
|
|
require.Equal(t, "Test 1", *updates[1].NftName)
|
|
|
|
require.Equal(t, "test://url/1", *updates[1].NftURL)
|
2024-07-14 16:34:43 +00:00
|
|
|
require.Equal(t, "Test 1", *updates[2].NftName)
|
|
|
|
require.Equal(t, "test://url/1", *updates[2].NftURL)
|
|
|
|
require.Equal(t, "Test 0", *updates[3].NftName)
|
|
|
|
require.Equal(t, "test://url/0", *updates[3].NftURL)
|
2023-08-11 17:28:46 +00:00
|
|
|
|
|
|
|
sub.Unsubscribe()
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestService_UpdateCollectibleInfo_Error(t *testing.T) {
|
2024-01-26 04:31:18 +00:00
|
|
|
state := setupTestService(t)
|
|
|
|
defer state.close()
|
2023-08-11 17:28:46 +00:00
|
|
|
|
|
|
|
args := []arg{
|
|
|
|
{5, "0xA2838FDA19EB6EED3F8B9EFF411D4CD7D2DE0313", "0x762AD3E4934E687F8701F24C7274E5209213FD6208FF952ACEB325D028866949", nil, nil},
|
|
|
|
{5, "0xA2838FDA19EB6EED3F8B9EFF411D4CD7D2DE0313", "0x0D", nil, nil},
|
|
|
|
}
|
|
|
|
|
2023-09-12 10:19:15 +00:00
|
|
|
ch := make(chan walletevent.Event, 4)
|
2024-01-26 04:31:18 +00:00
|
|
|
sub := state.eventFeed.Subscribe(ch)
|
2023-08-11 17:28:46 +00:00
|
|
|
|
2024-01-26 04:31:18 +00:00
|
|
|
fromAddresses, toAddresses := insertStubTransfersWithCollectibles(t, state.service.db, args)
|
2023-09-12 10:19:15 +00:00
|
|
|
|
2024-01-26 04:31:18 +00:00
|
|
|
state.collectiblesMock.On("FetchAssetsByCollectibleUniqueID", mock.Anything).Return(nil, thirdparty.ErrChainIDNotSupported).Once()
|
2023-08-11 17:28:46 +00:00
|
|
|
|
2024-03-29 12:44:50 +00:00
|
|
|
state.service.FilterActivityAsync(0, append(fromAddresses, toAddresses...), allNetworksFilter(), Filter{}, 0, 5)
|
2023-08-11 17:28:46 +00:00
|
|
|
|
|
|
|
filterResponseCount := 0
|
|
|
|
updatesCount := 0
|
|
|
|
|
2023-09-12 10:19:15 +00:00
|
|
|
for i := 0; i < 2; i++ {
|
|
|
|
select {
|
|
|
|
case res := <-ch:
|
|
|
|
switch res.Type {
|
|
|
|
case EventActivityFilteringDone:
|
2024-02-09 01:55:33 +00:00
|
|
|
payload, err := walletevent.GetPayload[FilterResponse](res)
|
2023-09-12 10:19:15 +00:00
|
|
|
require.NoError(t, err)
|
|
|
|
require.Equal(t, ErrorCodeSuccess, payload.ErrorCode)
|
|
|
|
require.Equal(t, 2, len(payload.Activities))
|
|
|
|
filterResponseCount++
|
|
|
|
case EventActivityFilteringUpdate:
|
|
|
|
updatesCount++
|
|
|
|
}
|
|
|
|
case <-time.NewTimer(20 * time.Millisecond).C:
|
|
|
|
// We wait to ensure the EventActivityFilteringUpdate is never sent
|
2023-08-11 17:28:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
require.Equal(t, 1, filterResponseCount)
|
|
|
|
require.Equal(t, 0, updatesCount)
|
|
|
|
|
|
|
|
sub.Unsubscribe()
|
|
|
|
}
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-01-26 04:31:18 +00:00
|
|
|
func setupTransactions(t *testing.T, state testState, txCount int, testTxs []transactions.TestTxSummary) (allAddresses []eth.Address, pendings []transactions.PendingTransaction, ch chan walletevent.Event, cleanup func()) {
|
|
|
|
ch = make(chan walletevent.Event, 4)
|
|
|
|
sub := state.eventFeed.Subscribe(ch)
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-01-26 04:31:18 +00:00
|
|
|
pendings = transactions.MockTestTransactions(t, state.chainClient, testTxs)
|
2024-02-09 01:55:33 +00:00
|
|
|
for _, p := range pendings {
|
|
|
|
allAddresses = append(allAddresses, p.From, p.To)
|
|
|
|
}
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-01-26 04:31:18 +00:00
|
|
|
txs, fromTrs, toTrs := transfer.GenerateTestTransfers(t, state.service.db, len(pendings), txCount)
|
|
|
|
for i := range txs {
|
|
|
|
transfer.InsertTestTransfer(t, state.service.db, txs[i].To, &txs[i])
|
|
|
|
}
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-02-09 01:55:33 +00:00
|
|
|
allAddresses = append(append(allAddresses, fromTrs...), toTrs...)
|
|
|
|
|
2024-06-27 21:27:09 +00:00
|
|
|
state.tokenMock.EXPECT().LookupTokenIdentity(gomock.Any(), gomock.Any(), gomock.Any()).Return(
|
2024-01-26 04:31:18 +00:00
|
|
|
&token.Token{
|
|
|
|
ChainID: 5,
|
|
|
|
Address: eth.Address{},
|
|
|
|
Symbol: "ETH",
|
2024-06-27 21:27:09 +00:00
|
|
|
},
|
|
|
|
).AnyTimes()
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-06-27 21:27:09 +00:00
|
|
|
state.tokenMock.EXPECT().LookupToken(gomock.Any(), gomock.Any()).Return(
|
2024-01-08 21:24:30 +00:00
|
|
|
&token.Token{
|
|
|
|
ChainID: 5,
|
2024-01-26 04:31:18 +00:00
|
|
|
Address: eth.Address{},
|
2024-01-08 21:24:30 +00:00
|
|
|
Symbol: "ETH",
|
2024-01-26 04:31:18 +00:00
|
|
|
}, true,
|
2024-06-27 21:27:09 +00:00
|
|
|
).AnyTimes()
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-01-26 04:31:18 +00:00
|
|
|
return allAddresses, pendings, ch, func() {
|
|
|
|
sub.Unsubscribe()
|
|
|
|
}
|
|
|
|
}
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-02-09 01:55:33 +00:00
|
|
|
func getValidateSessionUpdateHasNewOnTopFn(t *testing.T) func(payload SessionUpdate) bool {
|
|
|
|
return func(payload SessionUpdate) bool {
|
|
|
|
require.NotNil(t, payload.HasNewOnTop)
|
|
|
|
require.True(t, *payload.HasNewOnTop)
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// validateSessionUpdateEvent expects will give up early if checkPayloadFn return true and not wait for expectCount
|
|
|
|
func validateSessionUpdateEvent(t *testing.T, ch chan walletevent.Event, filterResponseCount *int, expectCount int, checkPayloadFn func(payload SessionUpdate) bool) (pendingTransactionUpdate, sessionUpdatesCount int) {
|
|
|
|
for sessionUpdatesCount < expectCount {
|
2024-01-26 04:31:18 +00:00
|
|
|
select {
|
|
|
|
case res := <-ch:
|
|
|
|
switch res.Type {
|
|
|
|
case transactions.EventPendingTransactionUpdate:
|
|
|
|
pendingTransactionUpdate++
|
|
|
|
case EventActivitySessionUpdated:
|
2024-02-09 01:55:33 +00:00
|
|
|
payload, err := walletevent.GetPayload[SessionUpdate](res)
|
2024-01-26 04:31:18 +00:00
|
|
|
require.NoError(t, err)
|
2024-02-09 01:55:33 +00:00
|
|
|
|
|
|
|
if checkPayloadFn != nil && checkPayloadFn(*payload) {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-01-26 04:31:18 +00:00
|
|
|
sessionUpdatesCount++
|
|
|
|
case EventActivityFilteringDone:
|
|
|
|
(*filterResponseCount)++
|
|
|
|
}
|
2024-02-09 01:55:33 +00:00
|
|
|
case <-time.NewTimer(shouldNotWaitTimeout).C:
|
2024-01-26 04:31:18 +00:00
|
|
|
require.Fail(t, "timeout while waiting for EventActivitySessionUpdated")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-02-08 23:13:12 +00:00
|
|
|
type extraExpect struct {
|
|
|
|
offset *int
|
|
|
|
errorCode *ErrorCode
|
|
|
|
}
|
|
|
|
|
|
|
|
func getOptionalExpectations(e *extraExpect) (expectOffset int, expectErrorCode ErrorCode) {
|
|
|
|
expectOffset = 0
|
|
|
|
expectErrorCode = ErrorCodeSuccess
|
|
|
|
|
|
|
|
if e != nil {
|
|
|
|
if e.offset != nil {
|
|
|
|
expectOffset = *e.offset
|
|
|
|
}
|
|
|
|
if e.errorCode != nil {
|
|
|
|
expectErrorCode = *e.errorCode
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func validateFilteringDone(t *testing.T, ch chan walletevent.Event, resCount int, checkPayloadFn func(payload FilterResponse), extra *extraExpect) (filterResponseCount int) {
|
2024-01-26 04:31:18 +00:00
|
|
|
for filterResponseCount < 1 {
|
2024-01-08 21:24:30 +00:00
|
|
|
select {
|
|
|
|
case res := <-ch:
|
|
|
|
switch res.Type {
|
|
|
|
case EventActivityFilteringDone:
|
2024-02-09 01:55:33 +00:00
|
|
|
payload, err := walletevent.GetPayload[FilterResponse](res)
|
2024-01-08 21:24:30 +00:00
|
|
|
require.NoError(t, err)
|
2024-02-08 23:13:12 +00:00
|
|
|
|
|
|
|
expectOffset, expectErrorCode := getOptionalExpectations(extra)
|
|
|
|
|
|
|
|
require.Equal(t, expectErrorCode, payload.ErrorCode)
|
|
|
|
require.Equal(t, resCount, len(payload.Activities))
|
|
|
|
|
|
|
|
require.Equal(t, expectOffset, payload.Offset)
|
2024-01-08 21:24:30 +00:00
|
|
|
filterResponseCount++
|
2024-02-08 23:13:12 +00:00
|
|
|
|
|
|
|
if checkPayloadFn != nil {
|
2024-02-09 01:55:33 +00:00
|
|
|
checkPayloadFn(*payload)
|
2024-02-08 23:13:12 +00:00
|
|
|
}
|
2024-01-08 21:24:30 +00:00
|
|
|
}
|
2024-02-09 01:55:33 +00:00
|
|
|
case <-time.NewTimer(shouldNotWaitTimeout).C:
|
2024-01-08 21:24:30 +00:00
|
|
|
require.Fail(t, "timeout while waiting for EventActivityFilteringDone")
|
|
|
|
}
|
|
|
|
}
|
2024-01-26 04:31:18 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestService_IncrementalUpdateOnTop(t *testing.T) {
|
|
|
|
state := setupTestService(t)
|
|
|
|
defer state.close()
|
|
|
|
|
2024-02-08 23:13:12 +00:00
|
|
|
transactionCount := 2
|
|
|
|
allAddresses, pendings, ch, cleanup := setupTransactions(t, state, transactionCount, []transactions.TestTxSummary{{DontConfirm: true, Timestamp: transactionCount + 1}})
|
2024-01-26 04:31:18 +00:00
|
|
|
defer cleanup()
|
|
|
|
|
2024-03-29 12:44:50 +00:00
|
|
|
sessionID := state.service.StartFilterSession(allAddresses, allNetworksFilter(), Filter{}, 5)
|
2024-01-26 04:31:18 +00:00
|
|
|
require.Greater(t, sessionID, SessionID(0))
|
|
|
|
defer state.service.StopFilterSession(sessionID)
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-02-08 23:13:12 +00:00
|
|
|
filterResponseCount := validateFilteringDone(t, ch, 2, nil, nil)
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-01-26 04:31:18 +00:00
|
|
|
exp := pendings[0]
|
|
|
|
err := state.pendingTracker.StoreAndTrackPendingTx(&exp)
|
2024-01-08 21:24:30 +00:00
|
|
|
require.NoError(t, err)
|
|
|
|
|
2024-02-09 01:55:33 +00:00
|
|
|
vFn := getValidateSessionUpdateHasNewOnTopFn(t)
|
|
|
|
pendingTransactionUpdate, sessionUpdatesCount := validateSessionUpdateEvent(t, ch, &filterResponseCount, 1, vFn)
|
2024-01-26 04:31:18 +00:00
|
|
|
|
|
|
|
err = state.service.ResetFilterSession(sessionID, 5)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Validate the reset data
|
2024-02-08 23:13:12 +00:00
|
|
|
eventActivityDoneCount := validateFilteringDone(t, ch, 3, func(payload FilterResponse) {
|
|
|
|
require.True(t, payload.Activities[0].isNew)
|
|
|
|
require.False(t, payload.Activities[1].isNew)
|
|
|
|
require.False(t, payload.Activities[2].isNew)
|
|
|
|
|
|
|
|
// Check the new transaction data
|
|
|
|
newTx := payload.Activities[0]
|
|
|
|
require.Equal(t, PendingTransactionPT, newTx.payloadType)
|
|
|
|
// We don't keep type in the DB
|
|
|
|
require.Equal(t, (*int)(nil), newTx.transferType)
|
|
|
|
require.Equal(t, SendAT, newTx.activityType)
|
|
|
|
require.Equal(t, PendingAS, newTx.activityStatus)
|
|
|
|
require.Equal(t, exp.ChainID, newTx.transaction.ChainID)
|
|
|
|
require.Equal(t, exp.ChainID, *newTx.chainIDOut)
|
|
|
|
require.Equal(t, (*common.ChainID)(nil), newTx.chainIDIn)
|
|
|
|
require.Equal(t, exp.Hash, newTx.transaction.Hash)
|
|
|
|
// Pending doesn't have address as part of identity
|
|
|
|
require.Equal(t, eth.Address{}, newTx.transaction.Address)
|
|
|
|
require.Equal(t, exp.From, *newTx.sender)
|
|
|
|
require.Equal(t, exp.To, *newTx.recipient)
|
|
|
|
require.Equal(t, 0, exp.Value.Int.Cmp((*big.Int)(newTx.amountOut)))
|
|
|
|
require.Equal(t, exp.Timestamp, uint64(newTx.timestamp))
|
|
|
|
require.Equal(t, exp.Symbol, *newTx.symbolOut)
|
|
|
|
require.Equal(t, (*string)(nil), newTx.symbolIn)
|
|
|
|
require.Equal(t, &Token{
|
|
|
|
TokenType: Native,
|
|
|
|
ChainID: 5,
|
|
|
|
}, newTx.tokenOut)
|
|
|
|
require.Equal(t, (*Token)(nil), newTx.tokenIn)
|
|
|
|
require.Equal(t, (*eth.Address)(nil), newTx.contractAddress)
|
|
|
|
|
|
|
|
// Check the order of the following transaction data
|
|
|
|
require.Equal(t, SimpleTransactionPT, payload.Activities[1].payloadType)
|
|
|
|
require.Equal(t, int64(transactionCount), payload.Activities[1].timestamp)
|
|
|
|
require.Equal(t, SimpleTransactionPT, payload.Activities[2].payloadType)
|
|
|
|
require.Equal(t, int64(transactionCount-1), payload.Activities[2].timestamp)
|
|
|
|
}, nil)
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-01-26 04:31:18 +00:00
|
|
|
require.Equal(t, 1, pendingTransactionUpdate)
|
|
|
|
require.Equal(t, 1, filterResponseCount)
|
|
|
|
require.Equal(t, 1, sessionUpdatesCount)
|
|
|
|
require.Equal(t, 1, eventActivityDoneCount)
|
|
|
|
}
|
|
|
|
|
2024-02-09 01:55:33 +00:00
|
|
|
func TestService_IncrementalUpdateMixed(t *testing.T) {
|
|
|
|
state := setupTestService(t)
|
|
|
|
defer state.close()
|
|
|
|
|
|
|
|
transactionCount := 5
|
|
|
|
allAddresses, pendings, ch, cleanup := setupTransactions(t, state, transactionCount,
|
|
|
|
[]transactions.TestTxSummary{
|
|
|
|
{DontConfirm: true, Timestamp: 2},
|
|
|
|
{DontConfirm: true, Timestamp: 4},
|
|
|
|
{DontConfirm: true, Timestamp: 6},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
defer cleanup()
|
|
|
|
|
2024-03-29 12:44:50 +00:00
|
|
|
sessionID := state.service.StartFilterSession(allAddresses, allNetworksFilter(), Filter{}, 5)
|
2024-02-09 01:55:33 +00:00
|
|
|
require.Greater(t, sessionID, SessionID(0))
|
|
|
|
defer state.service.StopFilterSession(sessionID)
|
|
|
|
|
|
|
|
filterResponseCount := validateFilteringDone(t, ch, 5, nil, nil)
|
|
|
|
|
|
|
|
for i := range pendings {
|
|
|
|
err := state.pendingTracker.StoreAndTrackPendingTx(&pendings[i])
|
|
|
|
require.NoError(t, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
pendingTransactionUpdate, sessionUpdatesCount := validateSessionUpdateEvent(t, ch, &filterResponseCount, 2, func(payload SessionUpdate) bool {
|
|
|
|
require.Nil(t, payload.HasNewOnTop)
|
|
|
|
require.NotEmpty(t, payload.New)
|
|
|
|
for _, update := range payload.New {
|
|
|
|
require.True(t, update.Entry.isNew)
|
|
|
|
foundIdx := -1
|
|
|
|
for i, pTx := range pendings {
|
|
|
|
if pTx.Hash == update.Entry.transaction.Hash && pTx.ChainID == update.Entry.transaction.ChainID {
|
|
|
|
foundIdx = i
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
require.Greater(t, foundIdx, -1, "the updated transaction should be found in the pending list")
|
|
|
|
pendings = append(pendings[:foundIdx], pendings[foundIdx+1:]...)
|
|
|
|
}
|
|
|
|
return len(pendings) == 1
|
|
|
|
})
|
|
|
|
|
|
|
|
// Validate that the last one (oldest) is out of the window
|
|
|
|
require.Equal(t, 1, len(pendings))
|
|
|
|
require.Equal(t, uint64(2), pendings[0].Timestamp)
|
|
|
|
|
|
|
|
require.Equal(t, 3, pendingTransactionUpdate)
|
|
|
|
require.LessOrEqual(t, sessionUpdatesCount, 3)
|
|
|
|
require.Equal(t, 1, filterResponseCount)
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2024-02-08 23:13:12 +00:00
|
|
|
func TestService_IncrementalUpdateFetchWindow(t *testing.T) {
|
2024-01-26 04:31:18 +00:00
|
|
|
state := setupTestService(t)
|
|
|
|
defer state.close()
|
|
|
|
|
2024-02-08 23:13:12 +00:00
|
|
|
transactionCount := 5
|
|
|
|
allAddresses, pendings, ch, cleanup := setupTransactions(t, state, transactionCount, []transactions.TestTxSummary{{DontConfirm: true, Timestamp: transactionCount + 1}})
|
2024-01-26 04:31:18 +00:00
|
|
|
defer cleanup()
|
|
|
|
|
2024-03-29 12:44:50 +00:00
|
|
|
sessionID := state.service.StartFilterSession(allAddresses, allNetworksFilter(), Filter{}, 2)
|
2024-01-26 04:31:18 +00:00
|
|
|
require.Greater(t, sessionID, SessionID(0))
|
|
|
|
defer state.service.StopFilterSession(sessionID)
|
|
|
|
|
2024-02-08 23:13:12 +00:00
|
|
|
filterResponseCount := validateFilteringDone(t, ch, 2, nil, nil)
|
2024-01-26 04:31:18 +00:00
|
|
|
|
|
|
|
exp := pendings[0]
|
|
|
|
err := state.pendingTracker.StoreAndTrackPendingTx(&exp)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
2024-02-09 01:55:33 +00:00
|
|
|
vFn := getValidateSessionUpdateHasNewOnTopFn(t)
|
|
|
|
pendingTransactionUpdate, sessionUpdatesCount := validateSessionUpdateEvent(t, ch, &filterResponseCount, 1, vFn)
|
2024-01-26 04:31:18 +00:00
|
|
|
|
|
|
|
err = state.service.ResetFilterSession(sessionID, 2)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Validate the reset data
|
2024-02-08 23:13:12 +00:00
|
|
|
eventActivityDoneCount := validateFilteringDone(t, ch, 2, func(payload FilterResponse) {
|
|
|
|
require.True(t, payload.Activities[0].isNew)
|
|
|
|
require.Equal(t, int64(transactionCount+1), payload.Activities[0].timestamp)
|
|
|
|
require.False(t, payload.Activities[1].isNew)
|
|
|
|
require.Equal(t, int64(transactionCount), payload.Activities[1].timestamp)
|
|
|
|
}, nil)
|
2024-01-26 04:31:18 +00:00
|
|
|
|
2024-02-08 23:13:12 +00:00
|
|
|
require.Equal(t, 1, pendingTransactionUpdate)
|
|
|
|
require.Equal(t, 1, filterResponseCount)
|
|
|
|
require.Equal(t, 1, sessionUpdatesCount)
|
|
|
|
require.Equal(t, 1, eventActivityDoneCount)
|
|
|
|
|
|
|
|
err = state.service.GetMoreForFilterSession(sessionID, 2)
|
|
|
|
require.NoError(t, err)
|
2024-01-08 21:24:30 +00:00
|
|
|
|
2024-02-08 23:13:12 +00:00
|
|
|
eventActivityDoneCount = validateFilteringDone(t, ch, 2, func(payload FilterResponse) {
|
|
|
|
require.False(t, payload.Activities[0].isNew)
|
|
|
|
require.Equal(t, int64(transactionCount-1), payload.Activities[0].timestamp)
|
|
|
|
require.False(t, payload.Activities[1].isNew)
|
|
|
|
require.Equal(t, int64(transactionCount-2), payload.Activities[1].timestamp)
|
|
|
|
}, common.NewAndSet(extraExpect{common.NewAndSet(2), nil}))
|
|
|
|
require.Equal(t, 1, eventActivityDoneCount)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestService_IncrementalUpdateFetchWindowNoReset(t *testing.T) {
|
|
|
|
state := setupTestService(t)
|
|
|
|
defer state.close()
|
|
|
|
|
|
|
|
transactionCount := 5
|
|
|
|
allAddresses, pendings, ch, cleanup := setupTransactions(t, state, transactionCount, []transactions.TestTxSummary{{DontConfirm: true, Timestamp: transactionCount + 1}})
|
|
|
|
defer cleanup()
|
|
|
|
|
2024-03-29 12:44:50 +00:00
|
|
|
sessionID := state.service.StartFilterSession(allAddresses, allNetworksFilter(), Filter{}, 2)
|
2024-02-08 23:13:12 +00:00
|
|
|
require.Greater(t, sessionID, SessionID(0))
|
|
|
|
defer state.service.StopFilterSession(sessionID)
|
|
|
|
|
|
|
|
filterResponseCount := validateFilteringDone(t, ch, 2, func(payload FilterResponse) {
|
|
|
|
require.Equal(t, int64(transactionCount), payload.Activities[0].timestamp)
|
|
|
|
require.Equal(t, int64(transactionCount-1), payload.Activities[1].timestamp)
|
|
|
|
}, nil)
|
|
|
|
|
|
|
|
exp := pendings[0]
|
|
|
|
err := state.pendingTracker.StoreAndTrackPendingTx(&exp)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
2024-02-09 01:55:33 +00:00
|
|
|
vFn := getValidateSessionUpdateHasNewOnTopFn(t)
|
|
|
|
pendingTransactionUpdate, sessionUpdatesCount := validateSessionUpdateEvent(t, ch, &filterResponseCount, 1, vFn)
|
2024-01-08 21:24:30 +00:00
|
|
|
require.Equal(t, 1, pendingTransactionUpdate)
|
|
|
|
require.Equal(t, 1, filterResponseCount)
|
|
|
|
require.Equal(t, 1, sessionUpdatesCount)
|
2024-02-08 23:13:12 +00:00
|
|
|
|
|
|
|
err = state.service.GetMoreForFilterSession(sessionID, 2)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
2024-02-09 01:55:33 +00:00
|
|
|
// Validate that client continue loading the next window without being affected by the internal state of new
|
2024-02-08 23:13:12 +00:00
|
|
|
eventActivityDoneCount := validateFilteringDone(t, ch, 2, func(payload FilterResponse) {
|
|
|
|
require.False(t, payload.Activities[0].isNew)
|
|
|
|
require.Equal(t, int64(transactionCount-2), payload.Activities[0].timestamp)
|
|
|
|
require.False(t, payload.Activities[1].isNew)
|
|
|
|
require.Equal(t, int64(transactionCount-3), payload.Activities[1].timestamp)
|
|
|
|
}, common.NewAndSet(extraExpect{common.NewAndSet(2), nil}))
|
2024-01-26 04:31:18 +00:00
|
|
|
require.Equal(t, 1, eventActivityDoneCount)
|
2024-01-08 21:24:30 +00:00
|
|
|
}
|
2024-03-07 21:21:19 +00:00
|
|
|
|
|
|
|
// Simulate and validate a multi-step user flow that was also a regression in the original implementation
|
|
|
|
func TestService_FilteredIncrementalUpdateResetAndClear(t *testing.T) {
|
|
|
|
state := setupTestService(t)
|
|
|
|
defer state.close()
|
|
|
|
|
|
|
|
transactionCount := 5
|
|
|
|
allAddresses, pendings, ch, cleanup := setupTransactions(t, state, transactionCount, []transactions.TestTxSummary{{DontConfirm: true, Timestamp: transactionCount + 1}})
|
|
|
|
defer cleanup()
|
|
|
|
|
|
|
|
// Generate new transaction for step 5
|
|
|
|
newOffset := transactionCount + 2
|
|
|
|
newTxs, newFromTrs, newToTrs := transfer.GenerateTestTransfers(t, state.service.db, newOffset, 1)
|
|
|
|
allAddresses = append(append(allAddresses, newFromTrs...), newToTrs...)
|
|
|
|
|
|
|
|
// 1. User visualizes transactions for the first time
|
2024-03-29 12:44:50 +00:00
|
|
|
sessionID := state.service.StartFilterSession(allAddresses, allNetworksFilter(), Filter{}, 4)
|
2024-03-07 21:21:19 +00:00
|
|
|
require.Greater(t, sessionID, SessionID(0))
|
|
|
|
defer state.service.StopFilterSession(sessionID)
|
|
|
|
|
|
|
|
validateFilteringDone(t, ch, 4, nil, nil)
|
|
|
|
|
|
|
|
// 2. User applies a filter for pending transactions
|
|
|
|
err := state.service.UpdateFilterForSession(sessionID, Filter{Statuses: []Status{PendingAS}}, 4)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
filterResponseCount := validateFilteringDone(t, ch, 0, nil, nil)
|
|
|
|
|
|
|
|
// 3. A pending transaction is added
|
|
|
|
exp := pendings[0]
|
|
|
|
err = state.pendingTracker.StoreAndTrackPendingTx(&exp)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
vFn := getValidateSessionUpdateHasNewOnTopFn(t)
|
|
|
|
pendingTransactionUpdate, sessionUpdatesCount := validateSessionUpdateEvent(t, ch, &filterResponseCount, 1, vFn)
|
|
|
|
|
|
|
|
// 4. User resets the view and the new pending transaction has the new flag
|
|
|
|
err = state.service.ResetFilterSession(sessionID, 2)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Validate the reset data
|
|
|
|
eventActivityDoneCount := validateFilteringDone(t, ch, 1, func(payload FilterResponse) {
|
|
|
|
require.True(t, payload.Activities[0].isNew)
|
|
|
|
require.Equal(t, int64(transactionCount+1), payload.Activities[0].timestamp)
|
|
|
|
}, nil)
|
|
|
|
|
|
|
|
require.Equal(t, 1, pendingTransactionUpdate)
|
|
|
|
require.Equal(t, 1, filterResponseCount)
|
|
|
|
require.Equal(t, 1, sessionUpdatesCount)
|
|
|
|
require.Equal(t, 1, eventActivityDoneCount)
|
|
|
|
|
|
|
|
// 5. A new transaction is downloaded
|
|
|
|
transfer.InsertTestTransfer(t, state.service.db, newTxs[0].To, &newTxs[0])
|
|
|
|
|
|
|
|
// 6. User clears the filter and only the new transaction should have the new flag
|
|
|
|
err = state.service.UpdateFilterForSession(sessionID, Filter{}, 4)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
eventActivityDoneCount = validateFilteringDone(t, ch, 4, func(payload FilterResponse) {
|
|
|
|
require.True(t, payload.Activities[0].isNew)
|
|
|
|
require.Equal(t, int64(newOffset), payload.Activities[0].timestamp)
|
|
|
|
require.False(t, payload.Activities[1].isNew)
|
|
|
|
require.Equal(t, int64(newOffset-1), payload.Activities[1].timestamp)
|
|
|
|
require.False(t, payload.Activities[2].isNew)
|
|
|
|
require.Equal(t, int64(newOffset-2), payload.Activities[2].timestamp)
|
|
|
|
require.False(t, payload.Activities[3].isNew)
|
|
|
|
require.Equal(t, int64(newOffset-3), payload.Activities[3].timestamp)
|
|
|
|
}, nil)
|
|
|
|
require.Equal(t, 1, eventActivityDoneCount)
|
|
|
|
}
|