fix(@desktop/onboarding): onboarding flows for `I already use Status` path do not match figma requirements (2/2)
Continuation: desktop app updated after we decided to remove 30 secs waiting time from `status-go` and introduce it on the desktop app side.
This commit is contained in:
parent
19ba018a1d
commit
6b6aaf3af1
|
@ -134,7 +134,7 @@ proc newAppController*(statusFoundation: StatusFoundation): AppController =
|
|||
result.globalUtilsVariant = newQVariant(singletonInstance.utils)
|
||||
|
||||
# Services
|
||||
result.generalService = general_service.newService()
|
||||
result.generalService = general_service.newService(statusFoundation.events, statusFoundation.threadpool)
|
||||
result.activityCenterService = activity_center_service.newService(statusFoundation.events, statusFoundation.threadpool)
|
||||
result.keycardService = keycard_service.newService(statusFoundation.events, statusFoundation.threadpool)
|
||||
result.nodeConfigurationService = node_configuration_service.newService(statusFoundation.fleetConfiguration,
|
||||
|
|
|
@ -12,7 +12,7 @@ proc fromEvent*(T: type WakuFetchingBackupProgressSignal, event: JsonNode): Waku
|
|||
result = WakuFetchingBackupProgressSignal()
|
||||
result.fetchingBackupProgress = initTable[string, WakuFetchingBackupProgress]()
|
||||
|
||||
if event["event"]["fetchingBackedUpDataProgress"].kind == JObject:
|
||||
if event["event"].hasKey("fetchingBackedUpDataProgress") and event["event"]{"fetchingBackedUpDataProgress"}.kind == JObject:
|
||||
for key in event["event"]["fetchingBackedUpDataProgress"].keys:
|
||||
let entity = event["event"]["fetchingBackedUpDataProgress"][key]
|
||||
var details = WakuFetchingBackupProgress()
|
||||
|
|
|
@ -107,6 +107,12 @@ proc connectToFetchingFromWakuEvents*(self: Controller) =
|
|||
self.delegate.onFetchingFromWakuMessageReceived(k, v.totalNumber, v.dataNumber)
|
||||
self.connectionIds.add(handlerId)
|
||||
|
||||
proc connectToTimeoutEventAndStratTimer*(self: Controller, timeoutInMilliseconds: int) =
|
||||
var handlerId = self.events.onWithUUID(SIGNAL_GENERAL_TIMEOUT) do(e: Args):
|
||||
self.delegate.moveToStartupState()
|
||||
self.connectionIds.add(handlerId)
|
||||
self.generalService.runTimer(timeoutInMilliseconds)
|
||||
|
||||
proc disconnect*(self: Controller) =
|
||||
self.disconnectKeychain()
|
||||
for id in self.connectionIds:
|
||||
|
|
|
@ -323,6 +323,13 @@ proc prepareAndInitFetchingData[T](self: Module[T]) =
|
|||
FetchingFromWakuSettings]
|
||||
self.view.createAndInitFetchingDataModel(listOfEntitiesWeExpectToBeSynced)
|
||||
|
||||
proc delayStartingApp[T](self: Module[T]) =
|
||||
## In the following 2 cases:
|
||||
## - FlowType.FirstRunOldUserImportSeedPhrase
|
||||
## - FlowType.FirstRunOldUserKeycardImport
|
||||
## we want to delay app start just to be sure that messages from waku will be received
|
||||
self.controller.connectToTimeoutEventAndStratTimer(timeoutInMilliseconds = 30000) # delay for 30 seconds
|
||||
|
||||
proc logoutAndDisplayError[T](self: Module[T], error: string) =
|
||||
self.delegate.logout()
|
||||
self.moveToStartupState()
|
||||
|
@ -340,7 +347,7 @@ method onNodeLogin*[T](self: Module[T], error: string) =
|
|||
self.prepareAndInitFetchingData()
|
||||
self.controller.connectToFetchingFromWakuEvents()
|
||||
self.view.setCurrentStartupState(newProfileFetchingState(currStateObj.flowType(), nil))
|
||||
self.moveToStartupState()
|
||||
self.delayStartingApp()
|
||||
let err = self.delegate.userLoggedIn()
|
||||
if err.len > 0:
|
||||
self.logoutAndDisplayError(err)
|
||||
|
|
|
@ -27,9 +27,7 @@ type AccountDto* = object
|
|||
|
||||
type WakuBackedUpProfileDto* = object
|
||||
displayName*: string
|
||||
displayNameStored*: bool
|
||||
images*: seq[Image]
|
||||
imagesStored*: bool
|
||||
|
||||
proc isValid*(self: AccountDto): bool =
|
||||
result = self.name.len > 0 and self.keyUid.len > 0
|
||||
|
@ -70,8 +68,6 @@ proc contains*(accounts: seq[AccountDto], keyUid: string): bool =
|
|||
proc toWakuBackedUpProfileDto*(jsonObj: JsonNode): WakuBackedUpProfileDto =
|
||||
result = WakuBackedUpProfileDto()
|
||||
discard jsonObj.getProp("displayName", result.displayName)
|
||||
discard jsonObj.getProp("displayNameStored", result.displayNameStored)
|
||||
discard jsonObj.getProp("imagesStored", result.imagesStored)
|
||||
|
||||
var imagesObj: JsonNode
|
||||
if(jsonObj.getProp("images", imagesObj) and imagesObj.kind == JArray):
|
||||
|
|
|
@ -93,10 +93,8 @@ QtObject:
|
|||
proc connectToFetchingFromWakuEvents*(self: Service) =
|
||||
self.events.on(SignalType.WakuBackedUpProfile.event) do(e: Args):
|
||||
var receivedData = WakuBackedUpProfileSignal(e)
|
||||
if receivedData.backedUpProfile.displayNameStored:
|
||||
self.loggedInAccount.name = receivedData.backedUpProfile.displayName
|
||||
if receivedData.backedUpProfile.imagesStored:
|
||||
self.loggedInAccount.images = receivedData.backedUpProfile.images
|
||||
self.loggedInAccount.name = receivedData.backedUpProfile.displayName
|
||||
self.loggedInAccount.images = receivedData.backedUpProfile.images
|
||||
|
||||
proc init*(self: Service) =
|
||||
try:
|
||||
|
|
|
@ -39,17 +39,4 @@ const lookupContactTask: Task = proc(argEncoded: string) {.gcsafe, nimcall.} =
|
|||
"uuid": arg.uuid,
|
||||
"reason": arg.reason
|
||||
}
|
||||
arg.finish(output)
|
||||
|
||||
#################################################
|
||||
# Async timer
|
||||
#################################################
|
||||
|
||||
type
|
||||
TimerTaskArg = ref object of QObjectTaskArg
|
||||
timeoutInMilliseconds: int
|
||||
|
||||
const timerTask: Task = proc(argEncoded: string) {.gcsafe, nimcall.} =
|
||||
let arg = decode[TimerTaskArg](argEncoded)
|
||||
sleep(arg.timeoutInMilliseconds)
|
||||
arg.finish("done")
|
||||
arg.finish(output)
|
|
@ -1,52 +1,87 @@
|
|||
import os, json, chronicles
|
||||
import NimQml, os, json, chronicles
|
||||
|
||||
import ../../../backend/general as status_general
|
||||
import ../../../app/core/eventemitter
|
||||
import ../../../app/core/tasks/[qt, threadpool]
|
||||
import ../../../constants as app_constants
|
||||
|
||||
import ../accounts/dto/accounts
|
||||
|
||||
const TimerIntervalInMilliseconds = 1000 # 1 second
|
||||
|
||||
const SIGNAL_GENERAL_TIMEOUT* = "timeoutSignal"
|
||||
|
||||
logScope:
|
||||
topics = "general-app-service"
|
||||
|
||||
type
|
||||
Service* = ref object of RootObj
|
||||
include ../../common/async_tasks
|
||||
|
||||
proc delete*(self: Service) =
|
||||
discard
|
||||
QtObject:
|
||||
type Service* = ref object of QObject
|
||||
events: EventEmitter
|
||||
threadpool: ThreadPool
|
||||
timeoutInMilliseconds: int
|
||||
|
||||
proc newService*(): Service =
|
||||
result = Service()
|
||||
proc delete*(self: Service) =
|
||||
self.QObject.delete
|
||||
|
||||
proc init*(self: Service) =
|
||||
if not existsDir(app_constants.ROOTKEYSTOREDIR):
|
||||
createDir(app_constants.ROOTKEYSTOREDIR)
|
||||
proc newService*(events: EventEmitter, threadpool: ThreadPool): Service =
|
||||
new(result, delete)
|
||||
result.QObject.setup
|
||||
result.events = events
|
||||
result.threadpool = threadpool
|
||||
|
||||
proc startMessenger*(self: Service) =
|
||||
discard status_general.startMessenger()
|
||||
proc init*(self: Service) =
|
||||
if not existsDir(app_constants.ROOTKEYSTOREDIR):
|
||||
createDir(app_constants.ROOTKEYSTOREDIR)
|
||||
|
||||
proc logout*(self: Service) =
|
||||
discard status_general.logout()
|
||||
proc startMessenger*(self: Service) =
|
||||
discard status_general.startMessenger()
|
||||
|
||||
proc getPasswordStrengthScore*(self: Service, password, userName: string): int =
|
||||
try:
|
||||
let response = status_general.getPasswordStrengthScore(password, @[userName])
|
||||
if(response.result.contains("error")):
|
||||
let errMsg = response.result["error"].getStr()
|
||||
error "error: ", methodName="getPasswordStrengthScore", errDesription = errMsg
|
||||
return
|
||||
proc logout*(self: Service) =
|
||||
discard status_general.logout()
|
||||
|
||||
return response.result["score"].getInt()
|
||||
except Exception as e:
|
||||
error "error: ", methodName="getPasswordStrengthScore", errName = e.name, errDesription = e.msg
|
||||
proc getPasswordStrengthScore*(self: Service, password, userName: string): int =
|
||||
try:
|
||||
let response = status_general.getPasswordStrengthScore(password, @[userName])
|
||||
if(response.result.contains("error")):
|
||||
let errMsg = response.result["error"].getStr()
|
||||
error "error: ", methodName="getPasswordStrengthScore", errDesription = errMsg
|
||||
return
|
||||
|
||||
proc generateImages*(self: Service, image: string, aX: int, aY: int, bX: int, bY: int): seq[Image] =
|
||||
try:
|
||||
let response = status_general.generateImages(image, aX, aY, bX, bY)
|
||||
if(response.result.kind != JArray):
|
||||
error "error: ", procName="generateImages", errDesription = "response is not an array"
|
||||
return
|
||||
return response.result["score"].getInt()
|
||||
except Exception as e:
|
||||
error "error: ", methodName="getPasswordStrengthScore", errName = e.name, errDesription = e.msg
|
||||
|
||||
for img in response.result:
|
||||
result.add(toImage(img))
|
||||
except Exception as e:
|
||||
error "error: ", procName="generateImages", errName = e.name, errDesription = e.msg
|
||||
proc generateImages*(self: Service, image: string, aX: int, aY: int, bX: int, bY: int): seq[Image] =
|
||||
try:
|
||||
let response = status_general.generateImages(image, aX, aY, bX, bY)
|
||||
if(response.result.kind != JArray):
|
||||
error "error: ", procName="generateImages", errDesription = "response is not an array"
|
||||
return
|
||||
|
||||
for img in response.result:
|
||||
result.add(toImage(img))
|
||||
except Exception as e:
|
||||
error "error: ", procName="generateImages", errName = e.name, errDesription = e.msg
|
||||
|
||||
proc runTimer(self: Service) =
|
||||
let arg = TimerTaskArg(
|
||||
tptr: cast[ByteAddress](timerTask),
|
||||
vptr: cast[ByteAddress](self.vptr),
|
||||
slot: "onTimeout",
|
||||
timeoutInMilliseconds: TimerIntervalInMilliseconds
|
||||
)
|
||||
self.threadpool.start(arg)
|
||||
|
||||
proc runTimer*(self: Service, timeoutInMilliseconds: int) =
|
||||
## Runs timer only once. Each 1000ms we check for timeout in order to have non blocking app closing.
|
||||
self.timeoutInMilliseconds = timeoutInMilliseconds
|
||||
self.runTimer()
|
||||
|
||||
proc onTimeout(self: Service, response: string) {.slot.} =
|
||||
self.timeoutInMilliseconds = self.timeoutInMilliseconds - TimerIntervalInMilliseconds
|
||||
if self.timeoutInMilliseconds <= 0:
|
||||
self.events.emit(SIGNAL_GENERAL_TIMEOUT, Args())
|
||||
else:
|
||||
self.runTimer()
|
|
@ -46,7 +46,7 @@ logScope:
|
|||
include ../../common/json_utils
|
||||
include ../../common/mnemonics
|
||||
include internal
|
||||
include async_tasks
|
||||
include ../../common/async_tasks
|
||||
|
||||
type
|
||||
KeycardArgs* = ref object of Args
|
||||
|
@ -63,16 +63,13 @@ QtObject:
|
|||
setPayloadForCurrentFlow: JsonNode
|
||||
doLogging: bool
|
||||
|
||||
proc setup(self: Service) =
|
||||
self.QObject.setup
|
||||
|
||||
proc delete*(self: Service) =
|
||||
self.closingApp = true
|
||||
self.QObject.delete
|
||||
|
||||
proc newService*(events: EventEmitter, threadpool: ThreadPool): Service =
|
||||
new(result)
|
||||
result.setup()
|
||||
new(result, delete)
|
||||
result.QObject.setup
|
||||
result.events = events
|
||||
result.threadpool = threadpool
|
||||
result.closingApp = false
|
||||
|
|
|
@ -90,19 +90,6 @@ const fetchDerivedAddressDetailsTask*: Task = proc(argEncoded: string) {.gcsafe,
|
|||
data["error"] = %* err
|
||||
arg.finish(data)
|
||||
|
||||
#################################################
|
||||
# Async timer
|
||||
#################################################
|
||||
|
||||
type
|
||||
TimerTaskArg = ref object of QObjectTaskArg
|
||||
timeoutInMilliseconds: int
|
||||
|
||||
const timerTask: Task = proc(argEncoded: string) {.gcsafe, nimcall.} =
|
||||
let arg = decode[TimerTaskArg](argEncoded)
|
||||
sleep(arg.timeoutInMilliseconds)
|
||||
arg.finish("")
|
||||
|
||||
#################################################
|
||||
# Async building token
|
||||
#################################################
|
||||
|
|
Loading…
Reference in New Issue