feat(rln-relay-v2): update C FFI api's and serde (#2385)

* feat(rln-relay-v2): integrate new ffi bindings, serde

* chore: remove ExtendedRateLimitProof, add comments

* fix: typo
This commit is contained in:
Aaryamann Challani 2024-02-02 00:26:47 +05:30 committed by GitHub
parent 59d8b6204f
commit b88facd0b7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 251 additions and 99 deletions

View File

@ -1,6 +1,9 @@
import import
stint stint
import
./protocol_types
import import
../waku_keystore ../waku_keystore
@ -15,7 +18,8 @@ const
# inputs of the membership contract constructor # inputs of the membership contract constructor
# TODO may be able to make these constants private and put them inside the waku_rln_relay_utils # TODO may be able to make these constants private and put them inside the waku_rln_relay_utils
const const
MembershipFee* = 1000000000000000.u256 # in wei
MembershipFee* = 0.u256
# the current implementation of the rln lib supports a circuit for Merkle tree with depth 20 # the current implementation of the rln lib supports a circuit for Merkle tree with depth 20
MerkleTreeDepth* = 20 MerkleTreeDepth* = 20
EthClient* = "http://127.0.0.1:8540" EthClient* = "http://127.0.0.1:8540"
@ -29,6 +33,14 @@ const
const const
DefaultRlnTreePath* = "rln_tree.db" DefaultRlnTreePath* = "rln_tree.db"
when defined(rln_v2):
const
# pre-processed "rln/waku-rln-relay/v2.0.0" to array[32, byte]
DefaultRlnIdentifier*: RlnIdentifier = [114, 108, 110, 47, 119, 97, 107, 117,
45, 114, 108, 110, 45, 114, 101, 108,
97, 121, 47, 118, 50, 46, 48, 46,
48, 0, 0, 0, 0, 0, 0, 0]
# temporary variables to test waku-rln-relay performance in the static group mode # temporary variables to test waku-rln-relay performance in the static group mode
const const
StaticGroupSize* = 10000 StaticGroupSize* = 10000

View File

@ -53,18 +53,36 @@ proc encodeLengthPrefix*(input: openArray[byte]): seq[byte] =
output = concat(@len, @input) output = concat(@len, @input)
return output return output
proc serialize*(idSecretHash: IdentitySecretHash, when defined(rln_v2):
proc serialize*(idSecretHash: IdentitySecretHash,
memIndex: MembershipIndex,
userMessageLimit: UserMessageLimit,
messageId: MessageId,
externalNullifier: ExternalNullifier,
msg: openArray[byte]): seq[byte] =
## a private proc to convert RateLimitProof and the data to a byte seq
## this conversion is used in the proofGen proc
## the serialization is done as instructed in https://github.com/kilic/rln/blob/7ac74183f8b69b399e3bc96c1ae8ab61c026dc43/src/public.rs#L146
## [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let memIndexBytes = toBytes(uint64(memIndex), Endianness.littleEndian)
let userMessageLimitBytes = toBytes(uint64(userMessageLimit), Endianness.littleEndian)
let messageIdBytes = toBytes(uint64(messageId), Endianness.littleEndian)
let lenPrefMsg = encodeLengthPrefix(msg)
let output = concat(@idSecretHash, @memIndexBytes, @userMessageLimitBytes, @messageIdBytes, @externalNullifier, lenPrefMsg)
return output
else:
proc serialize*(idSecretHash: IdentitySecretHash,
memIndex: MembershipIndex, memIndex: MembershipIndex,
epoch: Epoch, epoch: Epoch,
msg: openArray[byte]): seq[byte] = msg: openArray[byte]): seq[byte] =
## a private proc to convert RateLimitProof and the data to a byte seq ## a private proc to convert RateLimitProof and the data to a byte seq
## this conversion is used in the proofGen proc ## this conversion is used in the proofGen proc
## the serialization is done as instructed in https://github.com/kilic/rln/blob/7ac74183f8b69b399e3bc96c1ae8ab61c026dc43/src/public.rs#L146 ## the serialization is done as instructed in https://github.com/kilic/rln/blob/7ac74183f8b69b399e3bc96c1ae8ab61c026dc43/src/public.rs#L146
## [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ] ## [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let memIndexBytes = toBytes(uint64(memIndex), Endianness.littleEndian) let memIndexBytes = toBytes(uint64(memIndex), Endianness.littleEndian)
let lenPrefMsg = encodeLengthPrefix(msg) let lenPrefMsg = encodeLengthPrefix(msg)
let output = concat(@idSecretHash, @memIndexBytes, @epoch, lenPrefMsg) let output = concat(@idSecretHash, @memIndexBytes, @epoch, lenPrefMsg)
return output return output
proc serialize*(proof: RateLimitProof, data: openArray[byte]): seq[byte] = proc serialize*(proof: RateLimitProof, data: openArray[byte]): seq[byte] =
@ -72,14 +90,23 @@ proc serialize*(proof: RateLimitProof, data: openArray[byte]): seq[byte] =
## this conversion is used in the proof verification proc ## this conversion is used in the proof verification proc
## [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> | signal_len<8> | signal<var> ] ## [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
let lenPrefMsg = encodeLengthPrefix(@data) let lenPrefMsg = encodeLengthPrefix(@data)
var proofBytes = concat(@(proof.proof), when defined(rln_v2):
@(proof.merkleRoot), var proofBytes = concat(@(proof.proof),
@(proof.epoch), @(proof.merkleRoot),
@(proof.shareX), @(proof.externalNullifier),
@(proof.shareY), @(proof.shareX),
@(proof.nullifier), @(proof.shareY),
@(proof.rlnIdentifier), @(proof.nullifier),
lenPrefMsg) lenPrefMsg)
else:
var proofBytes = concat(@(proof.proof),
@(proof.merkleRoot),
@(proof.epoch),
@(proof.shareX),
@(proof.shareY),
@(proof.nullifier),
@(proof.rlnIdentifier),
lenPrefMsg)
return proofBytes return proofBytes

View File

@ -135,14 +135,26 @@ template slideRootQueue*(g: GroupManager): untyped =
discard rootBuffer.slideRootQueue(root) discard rootBuffer.slideRootQueue(root)
rootBuffer rootBuffer
method verifyProof*(g: GroupManager, when defined(rln_v2):
method verifyProof*(g: GroupManager,
input: openArray[byte],
proof: RateLimitProof): GroupManagerResult[bool] {.base,gcsafe,raises:[].} =
## verifies the proof against the input and the current merkle root
## TODO: verify the external nullifier with provided RateLimitProof
let proofVerifyRes = g.rlnInstance.proofVerify(input, RateLimitProof(proof), g.validRoots.items().toSeq())
if proofVerifyRes.isErr():
return err("proof verification failed: " & $proofVerifyRes.error())
return ok(proofVerifyRes.value())
else:
method verifyProof*(g: GroupManager,
input: openArray[byte], input: openArray[byte],
proof: RateLimitProof): GroupManagerResult[bool] {.base,gcsafe,raises:[].} = proof: RateLimitProof): GroupManagerResult[bool] {.base,gcsafe,raises:[].} =
## verifies the proof against the input and the current merkle root ## verifies the proof against the input and the current merkle root
let proofVerifyRes = g.rlnInstance.proofVerify(input, proof, g.validRoots.items().toSeq()) let proofVerifyRes = g.rlnInstance.proofVerify(input, proof, g.validRoots.items().toSeq())
if proofVerifyRes.isErr(): if proofVerifyRes.isErr():
return err("proof verification failed: " & $proofVerifyRes.error()) return err("proof verification failed: " & $proofVerifyRes.error())
return ok(proofVerifyRes.value()) return ok(proofVerifyRes.value())
method generateProof*(g: GroupManager, method generateProof*(g: GroupManager,
data: openArray[byte], data: openArray[byte],

View File

@ -31,6 +31,12 @@ type
RlnIdentifier* = array[32, byte] RlnIdentifier* = array[32, byte]
ZKSNARK* = array[128, byte] ZKSNARK* = array[128, byte]
when defined(rln_v2):
type
UserMessageLimit* = uint64
MessageId* = uint64
ExternalNullifier* = array[32, byte]
# Custom data types defined for waku rln relay ------------------------- # Custom data types defined for waku rln relay -------------------------
type RateLimitProof* = object type RateLimitProof* = object
## RateLimitProof holds the public inputs to rln circuit as ## RateLimitProof holds the public inputs to rln circuit as
@ -39,8 +45,6 @@ type RateLimitProof* = object
proof*: ZKSNARK proof*: ZKSNARK
## the root of Merkle tree used for the generation of the `proof` ## the root of Merkle tree used for the generation of the `proof`
merkleRoot*: MerkleNode merkleRoot*: MerkleNode
## the epoch used for the generation of the `proof`
epoch*: Epoch
## shareX and shareY are shares of user's identity key ## shareX and shareY are shares of user's identity key
## these shares are created using Shamir secret sharing scheme ## these shares are created using Shamir secret sharing scheme
## see details in https://hackmd.io/tMTLMYmTR5eynw2lwK9n1w?view#Linear-Equation-amp-SSS ## see details in https://hackmd.io/tMTLMYmTR5eynw2lwK9n1w?view#Linear-Equation-amp-SSS
@ -49,8 +53,13 @@ type RateLimitProof* = object
## nullifier enables linking two messages published during the same epoch ## nullifier enables linking two messages published during the same epoch
## see details in https://hackmd.io/tMTLMYmTR5eynw2lwK9n1w?view#Nullifiers ## see details in https://hackmd.io/tMTLMYmTR5eynw2lwK9n1w?view#Nullifiers
nullifier*: Nullifier nullifier*: Nullifier
## the epoch used for the generation of the `proof`
epoch*: Epoch
## Application specific RLN Identifier ## Application specific RLN Identifier
rlnIdentifier*: RlnIdentifier rlnIdentifier*: RlnIdentifier
when defined(rln_v2):
## the external nullifier used for the generation of the `proof` (derived from poseidon([epoch, rln_identifier]))
externalNullifier*: ExternalNullifier
type ProofMetadata* = object type ProofMetadata* = object
nullifier*: Nullifier nullifier*: Nullifier
@ -69,6 +78,7 @@ type
# Protobufs enc and init # Protobufs enc and init
proc init*(T: type RateLimitProof, buffer: seq[byte]): ProtoResult[T] = proc init*(T: type RateLimitProof, buffer: seq[byte]): ProtoResult[T] =
var nsp: RateLimitProof var nsp: RateLimitProof
let pb = initProtoBuffer(buffer) let pb = initProtoBuffer(buffer)
var proof: seq[byte] var proof: seq[byte]
@ -101,6 +111,7 @@ proc init*(T: type RateLimitProof, buffer: seq[byte]): ProtoResult[T] =
return ok(nsp) return ok(nsp)
proc encode*(nsp: RateLimitProof): ProtoBuffer = proc encode*(nsp: RateLimitProof): ProtoBuffer =
var output = initProtoBuffer() var output = initProtoBuffer()
@ -113,7 +124,6 @@ proc encode*(nsp: RateLimitProof): ProtoBuffer =
output.write3(7, nsp.rlnIdentifier) output.write3(7, nsp.rlnIdentifier)
output.finish3() output.finish3()
return output return output
type type

View File

@ -101,33 +101,46 @@ proc seeded_key_gen*(ctx: ptr RLN, input_buffer: ptr Buffer, output_buffer: ptr
## the return bool value indicates the success or failure of the operation ## the return bool value indicates the success or failure of the operation
proc generate_proof*(ctx: ptr RLN, proc generate_proof*(ctx: ptr RLN,
input_buffer: ptr Buffer, input_buffer: ptr Buffer,
output_buffer: ptr Buffer): bool {.importc: "generate_rln_proof".} output_buffer: ptr Buffer): bool {.importc: "generate_rln_proof".}
## rln-v2
## input_buffer has to be serialized as [ identity_secret<32> | identity_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
## output_buffer holds the proof data and should be parsed as [ proof<128> | root<32> | external_nullifier<32> | share_x<32> | share_y<32> | nullifier<32> ]
## rln-v1
## input_buffer has to be serialized as [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ] ## input_buffer has to be serialized as [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
## output_buffer holds the proof data and should be parsed as [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ] ## output_buffer holds the proof data and should be parsed as [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]
## integers wrapped in <> indicate value sizes in bytes ## integers wrapped in <> indicate value sizes in bytes
## the return bool value indicates the success or failure of the operation ## the return bool value indicates the success or failure of the operation
proc verify*(ctx: ptr RLN, proc verify*(ctx: ptr RLN,
proof_buffer: ptr Buffer, proof_buffer: ptr Buffer,
proof_is_valid_ptr: ptr bool): bool {.importc: "verify_rln_proof".} proof_is_valid_ptr: ptr bool): bool {.importc: "verify_rln_proof".}
## proof_buffer has to be serialized as [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> | signal_len<8> | signal<var> ] ## rln-v2
## proof_buffer has to be serialized as [ proof<128> | root<32> | external_nullifier<32> | share_x<32> | share_y<32> | nullifier<32> | signal_len<8> | signal<var> ]
## rln-v1
## ## proof_buffer has to be serialized as [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
## the return bool value indicates the success or failure of the call to the verify function ## the return bool value indicates the success or failure of the call to the verify function
## the verification of the zk proof is available in proof_is_valid_ptr, where a value of true indicates success and false a failure ## the verification of the zk proof is available in proof_is_valid_ptr, where a value of true indicates success and false a failure
proc verify_with_roots*(ctx: ptr RLN, proc verify_with_roots*(ctx: ptr RLN,
proof_buffer: ptr Buffer, proof_buffer: ptr Buffer,
roots_buffer: ptr Buffer, roots_buffer: ptr Buffer,
proof_is_valid_ptr: ptr bool): bool {.importc: "verify_with_roots".} proof_is_valid_ptr: ptr bool): bool {.importc: "verify_with_roots".}
## rln-v2
## proof_buffer has to be serialized as [ proof<128> | root<32> | external_nullifier<32> | share_x<32> | share_y<32> | nullifier<32> | signal_len<8> | signal<var> ]
## rln-v1
## proof_buffer has to be serialized as [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> | signal_len<8> | signal<var> ] ## proof_buffer has to be serialized as [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
## roots_buffer contains the concatenation of 32 bytes long serializations in little endian of root values ## roots_buffer contains the concatenation of 32 bytes long serializations in little endian of root values
## the return bool value indicates the success or failure of the call to the verify function ## the return bool value indicates the success or failure of the call to the verify function
## the verification of the zk proof is available in proof_is_valid_ptr, where a value of true indicates success and false a failure ## the verification of the zk proof is available in proof_is_valid_ptr, where a value of true indicates success and false a failure
proc zk_prove*(ctx: ptr RLN, proc zk_prove*(ctx: ptr RLN,
input_buffer: ptr Buffer, input_buffer: ptr Buffer,
output_buffer: ptr Buffer): bool {.importc: "prove".} output_buffer: ptr Buffer): bool {.importc: "prove".}
## Computes the zkSNARK proof and stores it in output_buffer for input values stored in input_buffer ## Computes the zkSNARK proof and stores it in output_buffer for input values stored in input_buffer
## rln-v2
## input_buffer is serialized as input_data as [ identity_secret<32> | user_message_limit<32> | message_id<32> | path_elements<Vec<32>> | identity_path_index<Vec<1>> | x<32> | external_nullifier<32> ]
## rln-v1
## input_buffer is serialized as input_data as [ id_key<32> | path_elements<Vec<32>> | identity_path_index<Vec<1>> | x<32> | epoch<32> | rln_identifier<32> ] ## input_buffer is serialized as input_data as [ id_key<32> | path_elements<Vec<32>> | identity_path_index<Vec<1>> | x<32> | epoch<32> | rln_identifier<32> ]
## output_buffer holds the proof data and should be parsed as [ proof<128> ] ## output_buffer holds the proof data and should be parsed as [ proof<128> ]
## path_elements and indentity_path elements serialize a merkle proof for id_key and are vectors of elements of 32 and 1 bytes, respectively (not. Vec<>). ## path_elements and indentity_path elements serialize a merkle proof for id_key and are vectors of elements of 32 and 1 bytes, respectively (not. Vec<>).
@ -136,8 +149,8 @@ proc zk_prove*(ctx: ptr RLN,
## the return bool value indicates the success or failure of the operation ## the return bool value indicates the success or failure of the operation
proc zk_verify*(ctx: ptr RLN, proc zk_verify*(ctx: ptr RLN,
proof_buffer: ptr Buffer, proof_buffer: ptr Buffer,
proof_is_valid_ptr: ptr bool): bool {.importc: "verify".} proof_is_valid_ptr: ptr bool): bool {.importc: "verify".}
## Verifies the zkSNARK proof passed in proof_buffer ## Verifies the zkSNARK proof passed in proof_buffer
## input_buffer is serialized as input_data as [ proof<128> ] ## input_buffer is serialized as input_data as [ proof<128> ]
## the verification of the zk proof is available in proof_is_valid_ptr, where a value of true indicates success and false a failure ## the verification of the zk proof is available in proof_is_valid_ptr, where a value of true indicates success and false a failure

View File

@ -159,79 +159,157 @@ proc poseidon*(data: seq[seq[byte]]): RlnRelayResult[array[32, byte]] =
return ok(output) return ok(output)
# TODO: collocate this proc with the definition of the RateLimitProof when defined(rln_v2):
# and the ProofMetadata types # TODO: collocate this proc with the definition of the RateLimitProof
proc extractMetadata*(proof: RateLimitProof): RlnRelayResult[ProofMetadata] = # and the ProofMetadata types
let externalNullifierRes = poseidon(@[@(proof.epoch), proc extractMetadata*(proof: RateLimitProof): RlnRelayResult[ProofMetadata] =
@(proof.rlnIdentifier)]) return ok(ProofMetadata(
if externalNullifierRes.isErr(): nullifier: proof.nullifier,
return err("could not construct the external nullifier") shareX: proof.shareX,
return ok(ProofMetadata( shareY: proof.shareY,
nullifier: proof.nullifier, externalNullifier: externalNullifierRes.get()
shareX: proof.shareX, ))
shareY: proof.shareY, else:
externalNullifier: externalNullifierRes.get() proc extractMetadata*(proof: RateLimitProof): RlnRelayResult[ProofMetadata] =
)) let externalNullifierRes = poseidon(@[@(proof.epoch),
@(proof.rlnIdentifier)])
if externalNullifierRes.isErr():
return err("could not construct the external nullifier")
return ok(ProofMetadata(
nullifier: proof.nullifier,
shareX: proof.shareX,
shareY: proof.shareY,
externalNullifier: externalNullifierRes.get()
))
proc proofGen*(rlnInstance: ptr RLN, data: openArray[byte], when defined(rln_v2):
proc proofGen*(rlnInstance: ptr RLN,
data: openArray[byte],
membership: IdentityCredential,
userMessageLimit: UserMessageLimit,
messageId: MessageId,
index: MembershipIndex,
epoch: Epoch): RateLimitProofResult =
# obtain the external nullifier
let externalNullifierRes = poseidon(@[@(epoch),
@(DefaultRlnIdentifier)])
if externalNullifierRes.isErr():
return err("could not construct the external nullifier")
# serialize inputs
let serializedInputs = serialize(idSecretHash = membership.idSecretHash,
memIndex = index,
userMessageLimit = userMessageLimit,
messageId = messageId,
externalNullifier = externalNullifierRes.get(),
msg = data)
var inputBuffer = toBuffer(serializedInputs)
debug "input buffer ", inputBuffer= repr(inputBuffer)
# generate the proof
var proof: Buffer
let proofIsSuccessful = generate_proof(rlnInstance, addr inputBuffer, addr proof)
# check whether the generate_proof call is done successfully
if not proofIsSuccessful:
return err("could not generate the proof")
var proofValue = cast[ptr array[320, byte]] (proof.`ptr`)
let proofBytes: array[320, byte] = proofValue[]
debug "proof content", proofHex = proofValue[].toHex
## parse the proof as [ proof<128> | root<32> | external_nullifier<32> | share_x<32> | share_y<32> | nullifier<32> ]
let
proofOffset = 128
rootOffset = proofOffset + 32
externalNullifierOffset = rootOffset + 32
shareXOffset = externalNullifierOffset + 32
shareYOffset = shareXOffset + 32
nullifierOffset = shareYOffset + 32
var
zkproof: ZKSNARK
proofRoot, shareX, shareY: MerkleNode
externalNullifier: ExternalNullifier
nullifier: Nullifier
discard zkproof.copyFrom(proofBytes[0..proofOffset-1])
discard proofRoot.copyFrom(proofBytes[proofOffset..rootOffset-1])
discard externalNullifier.copyFrom(proofBytes[rootOffset..externalNullifierOffset-1])
discard shareX.copyFrom(proofBytes[externalNullifierOffset..shareXOffset-1])
discard shareY.copyFrom(proofBytes[shareXOffset..shareYOffset-1])
discard nullifier.copyFrom(proofBytes[shareYOffset..nullifierOffset-1])
let output = RateLimitProof(proof: zkproof,
merkleRoot: proofRoot,
externalNullifier: externalNullifier,
shareX: shareX,
shareY: shareY,
nullifier: nullifier)
return ok(output)
else:
proc proofGen*(rlnInstance: ptr RLN, data: openArray[byte],
memKeys: IdentityCredential, memIndex: MembershipIndex, memKeys: IdentityCredential, memIndex: MembershipIndex,
epoch: Epoch): RateLimitProofResult = epoch: Epoch): RateLimitProofResult =
# serialize inputs # serialize inputs
let serializedInputs = serialize(idSecretHash = memKeys.idSecretHash, let serializedInputs = serialize(idSecretHash = memKeys.idSecretHash,
memIndex = memIndex, memIndex = memIndex,
epoch = epoch, epoch = epoch,
msg = data) msg = data)
var inputBuffer = toBuffer(serializedInputs) var inputBuffer = toBuffer(serializedInputs)
debug "input buffer ", inputBuffer= repr(inputBuffer) debug "input buffer ", inputBuffer= repr(inputBuffer)
# generate the proof # generate the proof
var proof: Buffer var proof: Buffer
let proofIsSuccessful = generate_proof(rlnInstance, addr inputBuffer, addr proof) let proofIsSuccessful = generate_proof(rlnInstance, addr inputBuffer, addr proof)
# check whether the generate_proof call is done successfully # check whether the generate_proof call is done successfully
if not proofIsSuccessful: if not proofIsSuccessful:
return err("could not generate the proof") return err("could not generate the proof")
var proofValue = cast[ptr array[320, byte]] (proof.`ptr`) var proofValue = cast[ptr array[320, byte]] (proof.`ptr`)
let proofBytes: array[320, byte] = proofValue[] let proofBytes: array[320, byte] = proofValue[]
debug "proof content", proofHex = proofValue[].toHex debug "proof content", proofHex = proofValue[].toHex
## parse the proof as [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ] ## parse the proof as [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]
let let
proofOffset = 128 proofOffset = 128
rootOffset = proofOffset + 32 rootOffset = proofOffset + 32
epochOffset = rootOffset + 32 epochOffset = rootOffset + 32
shareXOffset = epochOffset + 32 shareXOffset = epochOffset + 32
shareYOffset = shareXOffset + 32 shareYOffset = shareXOffset + 32
nullifierOffset = shareYOffset + 32 nullifierOffset = shareYOffset + 32
rlnIdentifierOffset = nullifierOffset + 32 rlnIdentifierOffset = nullifierOffset + 32
var var
zkproof: ZKSNARK zkproof: ZKSNARK
proofRoot, shareX, shareY: MerkleNode proofRoot, shareX, shareY: MerkleNode
epoch: Epoch epoch: Epoch
nullifier: Nullifier nullifier: Nullifier
rlnIdentifier: RlnIdentifier rlnIdentifier: RlnIdentifier
discard zkproof.copyFrom(proofBytes[0..proofOffset-1]) discard zkproof.copyFrom(proofBytes[0..proofOffset-1])
discard proofRoot.copyFrom(proofBytes[proofOffset..rootOffset-1]) discard proofRoot.copyFrom(proofBytes[proofOffset..rootOffset-1])
discard epoch.copyFrom(proofBytes[rootOffset..epochOffset-1]) discard epoch.copyFrom(proofBytes[rootOffset..epochOffset-1])
discard shareX.copyFrom(proofBytes[epochOffset..shareXOffset-1]) discard shareX.copyFrom(proofBytes[epochOffset..shareXOffset-1])
discard shareY.copyFrom(proofBytes[shareXOffset..shareYOffset-1]) discard shareY.copyFrom(proofBytes[shareXOffset..shareYOffset-1])
discard nullifier.copyFrom(proofBytes[shareYOffset..nullifierOffset-1]) discard nullifier.copyFrom(proofBytes[shareYOffset..nullifierOffset-1])
discard rlnIdentifier.copyFrom(proofBytes[nullifierOffset..rlnIdentifierOffset-1]) discard rlnIdentifier.copyFrom(proofBytes[nullifierOffset..rlnIdentifierOffset-1])
let output = RateLimitProof(proof: zkproof, let output = RateLimitProof(proof: zkproof,
merkleRoot: proofRoot, merkleRoot: proofRoot,
epoch: epoch, epoch: epoch,
shareX: shareX, shareX: shareX,
shareY: shareY, shareY: shareY,
nullifier: nullifier, nullifier: nullifier,
rlnIdentifier: rlnIdentifier) rlnIdentifier: rlnIdentifier)
return ok(output) return ok(output)
# validRoots should contain a sequence of roots in the acceptable windows. # validRoots should contain a sequence of roots in the acceptable windows.
# As default, it is set to an empty sequence of roots. This implies that the validity check for the proof's root is skipped # As default, it is set to an empty sequence of roots. This implies that the validity check for the proof's root is skipped