[#9927] Fast blocks sync after being offline
Front end changes: As there is no guarantee that `newblock` event will be dispatched consequently (e.g. if there was a delay after block#1 the next event might be dispatched for a block#2000, no guarantee that block#2 will be the next one), `newTransactions` field was added with a map of accounts to the number of new transactions received to this block. In result if there are new transactions we request them all of db instead of fetching them on for the block specified in `newblock` event, as it was done previously. Back end changes: - In order to avoid handling of the reorganized blocks we use an offset from the latest known block when start listening to new blocks. Before this commit the offset was 15 blocks for all networks. This offset is too big for mainnet and causes noticeable delay of marking a transfer as confirmed in Status (comparing to etherscan). So it was changed to be 5 blocks on mainnet and is still 15 blocks on other networks. - Also before this commit all new blocks were handled one by one with network specific interval (10s for mainnet), which means that in case of lost internet connection or application suspension (happens on iOS) receiving of new blocks would be paused and then resumed with the same "speed" - 1 blocks per 10s. In case if that pause is big enough the application would never catch up with the latest block in the network, and this also causes the state of transfers to be delayed in the application. In this commit in case if there was more than 40s delay after receiving of the previous block the whole history in range between the previous received block and ("latest"-reorgeSafetyDepth) block is checked at once and app catches up with a recent state of the chain.
This commit is contained in:
parent
58afae07ba
commit
b41df2f2fc
|
@ -25,14 +25,15 @@
|
|||
{:db (assoc-in db [:ethereum/subscriptions id] handler)})
|
||||
|
||||
(fx/defn new-block
|
||||
[{:keys [db] :as cofx} historical? block-number accounts]
|
||||
[{:keys [db] :as cofx} historical? block-number accounts transactions-per-account]
|
||||
(let [{:keys [:wallet/all-tokens]} db
|
||||
chain (ethereum/chain-keyword db)
|
||||
chain-tokens (into {} (map (juxt :address identity)
|
||||
(tokens/tokens-for all-tokens chain)))]
|
||||
(log/debug "[wallet-subs] new-block"
|
||||
"accounts" accounts
|
||||
"block" block-number)
|
||||
"block" block-number
|
||||
"transactions-per-account" transactions-per-account)
|
||||
(fx/merge cofx
|
||||
(cond-> {}
|
||||
(not historical?)
|
||||
|
@ -41,10 +42,10 @@
|
|||
;;NOTE only get transfers if the new block contains some
|
||||
;; from/to one of the multiaccount accounts
|
||||
(not-empty accounts)
|
||||
(assoc :transactions/get-transfers-from-block
|
||||
(assoc :transactions/get-transfers
|
||||
{:chain-tokens chain-tokens
|
||||
:addresses accounts
|
||||
:block block-number
|
||||
:before-block block-number
|
||||
:historical? historical?}))
|
||||
(transactions/check-watched-transactions))))
|
||||
|
||||
|
@ -91,12 +92,12 @@
|
|||
:historical? true}}))
|
||||
|
||||
(fx/defn new-wallet-event
|
||||
[cofx {:keys [type blockNumber accounts] :as event}]
|
||||
[cofx {:keys [type blockNumber accounts newTransactions] :as event}]
|
||||
(log/debug "[wallet-subs] new-wallet-event"
|
||||
"event-type" type)
|
||||
(case type
|
||||
"newblock" (new-block cofx false blockNumber accounts)
|
||||
"history" (new-block cofx true blockNumber accounts)
|
||||
"newblock" (new-block cofx false blockNumber accounts newTransactions)
|
||||
"history" (new-block cofx true blockNumber accounts nil)
|
||||
"reorg" (reorg cofx event)
|
||||
"recent-history-fetching" (recent-history-fetching-started cofx accounts)
|
||||
"recent-history-ready" (recent-history-fetching-ended cofx event)
|
||||
|
|
|
@ -231,18 +231,22 @@
|
|||
|
||||
(re-frame/reg-fx
|
||||
:transactions/get-transfers
|
||||
(fn [{:keys [chain-tokens addresses before-block page-size]
|
||||
(fn [{:keys [chain-tokens addresses before-block page-size
|
||||
transactions-per-address]
|
||||
:as params
|
||||
:or {page-size 0}}]
|
||||
:or {page-size 20}}]
|
||||
{:pre [(cljs.spec.alpha/valid?
|
||||
(cljs.spec.alpha/coll-of string?)
|
||||
addresses)]}
|
||||
(log/debug "[transactions] get-transfers"
|
||||
"addresses" addresses
|
||||
"block" before-block
|
||||
"page-size" page-size)
|
||||
"page-size" page-size
|
||||
"transactions-per-address" transactions-per-address)
|
||||
(when before-block
|
||||
(doseq [address addresses]
|
||||
(let [page-size (or (get transactions-per-address address)
|
||||
page-size)]
|
||||
(json-rpc/call
|
||||
{:method "wallet_getTransfersByAddress"
|
||||
:params [address (encode/uint before-block) (encode/uint page-size)]
|
||||
|
@ -250,7 +254,7 @@
|
|||
[::new-transfers
|
||||
(enrich-transfers chain-tokens %)
|
||||
(assoc params :address address)])
|
||||
:on-error #(re-frame/dispatch [::tx-fetching-failed address])})))))
|
||||
:on-error #(re-frame/dispatch [::tx-fetching-failed address])}))))))
|
||||
|
||||
(fx/defn initialize
|
||||
[{:keys [db]} addresses]
|
||||
|
@ -261,7 +265,6 @@
|
|||
{:transactions/get-transfers
|
||||
{:chain-tokens chain-tokens
|
||||
:addresses (map eip55/address->checksum addresses)
|
||||
:page-size 20
|
||||
:historical? true}}))
|
||||
|
||||
(fx/defn fetch-more-tx
|
||||
|
@ -282,6 +285,5 @@
|
|||
{:chain-tokens chain-tokens
|
||||
:addresses [address]
|
||||
:before-block min-known-block
|
||||
:page-size 20
|
||||
:historical? true}}
|
||||
(tx-fetching-in-progress [address]))))
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"_comment": "DO NOT EDIT THIS FILE BY HAND. USE 'scripts/update-status-go.sh <tag>' instead",
|
||||
"owner": "status-im",
|
||||
"repo": "status-go",
|
||||
"version": "v0.41.0",
|
||||
"commit-sha1": "8931b14c4e40261e6161a3c776bf16cc0fdd1f3a",
|
||||
"src-sha256": "16r3xbzhh3fljpx11yahm1r5xh4cyvggy5avg6smkpw9m93cb6f7"
|
||||
"version": "v0.41.1",
|
||||
"commit-sha1": "c2f22f1fbc73e68b8d82370c4645c786739fb40b",
|
||||
"src-sha256": "038paj2gwdih154nnafcxc3w02x9p21ifkv1g9k2rr1ac0ypainb"
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue