mirror of
https://github.com/waku-org/nwaku.git
synced 2025-01-13 00:05:10 +00:00
deploy: 7f23bdf29fc60d89a3ec1d2f759e8bc037122833
This commit is contained in:
parent
c6e6802910
commit
38b9f2b6a0
@ -10,7 +10,7 @@ import
|
||||
|
||||
const clientId = "Waku example v1"
|
||||
|
||||
proc run(config: WakuNodeConf, rng: ref BrHmacDrbgContext) =
|
||||
proc run(config: WakuNodeConf, rng: ref HmacDrbgContext) =
|
||||
# Set up the address according to NAT information.
|
||||
let (ipExt, tcpPortExt, udpPortExt) = setupNat(config.nat, clientId,
|
||||
Port(config.tcpPort + config.portsShift),
|
||||
@ -33,6 +33,8 @@ proc run(config: WakuNodeConf, rng: ref BrHmacDrbgContext) =
|
||||
nil, # Database, not required for Waku
|
||||
clientId, # Client id string
|
||||
addAllCapabilities = false, # Disable default all RLPx capabilities
|
||||
bindUdpPort = address.udpPort, # Assume same as external
|
||||
bindTcpPort = address.tcpPort, # Assume same as external
|
||||
rng = rng)
|
||||
|
||||
node.addCapability Waku # Enable only the Waku protocol.
|
||||
@ -57,7 +59,7 @@ proc run(config: WakuNodeConf, rng: ref BrHmacDrbgContext) =
|
||||
# connection occurs, which is why we use a callback to exit on errors instead of
|
||||
# using `await`.
|
||||
# TODO: This looks a bit awkward and the API should perhaps be altered here.
|
||||
let connectedFut = node.connectToNetwork(@[],
|
||||
let connectedFut = node.connectToNetwork(
|
||||
true, # Enable listening
|
||||
false # Disable discovery (only discovery v4 is currently supported)
|
||||
)
|
||||
|
@ -25,7 +25,7 @@ proc runBackground() {.async.} =
|
||||
Port(uint16(conf.tcpPort) + conf.portsShift), extIp, extTcpPort)
|
||||
|
||||
await node.start()
|
||||
node.mountRelay()
|
||||
await node.mountRelay()
|
||||
|
||||
# Subscribe to a topic
|
||||
let topic = cast[Topic]("foobar")
|
||||
|
@ -214,9 +214,10 @@ proc publish(c: Chat, line: string) =
|
||||
when PayloadV1:
|
||||
# Use Waku v1 payload encoding/encryption
|
||||
let
|
||||
rng = keys.newRng()
|
||||
payload = Payload(payload: chat2pb.buffer, symKey: some(c.symKey))
|
||||
version = 1'u32
|
||||
encodedPayload = payload.encode(version, c.node.rng[])
|
||||
encodedPayload = payload.encode(version, rng[])
|
||||
if encodedPayload.isOk():
|
||||
var message = WakuMessage(payload: encodedPayload.get(),
|
||||
contentTopic: c.contentTopic, version: version, timestamp: getNanosecondTime(time))
|
||||
@ -359,7 +360,7 @@ proc readInput(wfd: AsyncFD) {.thread, raises: [Defect, CatchableError].} =
|
||||
discard waitFor transp.write(line & "\r\n")
|
||||
|
||||
{.pop.} # @TODO confutils.nim(775, 17) Error: can raise an unlisted exception: ref IOError
|
||||
proc processInput(rfd: AsyncFD, rng: ref BrHmacDrbgContext) {.async.} =
|
||||
proc processInput(rfd: AsyncFD) {.async.} =
|
||||
let transp = fromPipe(rfd)
|
||||
|
||||
let
|
||||
@ -375,10 +376,10 @@ proc processInput(rfd: AsyncFD, rng: ref BrHmacDrbgContext) {.async.} =
|
||||
wssEnabled = conf.websocketSecureSupport)
|
||||
await node.start()
|
||||
|
||||
node.mountRelay(conf.topics.split(" "),
|
||||
relayMessages = conf.relay) # Indicates if node is capable to relay messages
|
||||
if conf.relay:
|
||||
await node.mountRelay(conf.topics.split(" "))
|
||||
|
||||
node.mountLibp2pPing()
|
||||
await node.mountLibp2pPing()
|
||||
|
||||
let nick = await readNick(transp)
|
||||
echo "Welcome, " & nick & "!"
|
||||
@ -445,10 +446,10 @@ proc processInput(rfd: AsyncFD, rng: ref BrHmacDrbgContext) {.async.} =
|
||||
echo &"Listening on\n {listenStr}"
|
||||
|
||||
if conf.swap:
|
||||
node.mountSwap()
|
||||
await node.mountSwap()
|
||||
|
||||
if (conf.storenode != "") or (conf.store == true):
|
||||
node.mountStore(persistMessages = conf.persistMessages)
|
||||
await node.mountStore(persistMessages = conf.persistMessages)
|
||||
|
||||
var storenode: Option[RemotePeerInfo]
|
||||
|
||||
@ -477,12 +478,12 @@ proc processInput(rfd: AsyncFD, rng: ref BrHmacDrbgContext) {.async.} =
|
||||
|
||||
# NOTE Must be mounted after relay
|
||||
if conf.lightpushnode != "":
|
||||
mountLightPush(node)
|
||||
await mountLightPush(node)
|
||||
|
||||
node.wakuLightPush.setPeer(parseRemotePeerInfo(conf.lightpushnode))
|
||||
|
||||
if conf.filternode != "":
|
||||
node.mountFilter()
|
||||
await node.mountFilter()
|
||||
|
||||
node.wakuFilter.setPeer(parseRemotePeerInfo(conf.filternode))
|
||||
|
||||
@ -545,7 +546,6 @@ proc processInput(rfd: AsyncFD, rng: ref BrHmacDrbgContext) {.async.} =
|
||||
runForever()
|
||||
|
||||
proc main() {.async.} =
|
||||
let rng = crypto.newRng() # Singe random number source for the whole application
|
||||
let (rfd, wfd) = createAsyncPipe()
|
||||
if rfd == asyncInvalidPipe or wfd == asyncInvalidPipe:
|
||||
raise newException(ValueError, "Could not initialize pipe!")
|
||||
@ -553,7 +553,7 @@ proc main() {.async.} =
|
||||
var thread: Thread[AsyncFD]
|
||||
thread.createThread(readInput, wfd)
|
||||
|
||||
await processInput(rfd, rng)
|
||||
await processInput(rfd)
|
||||
|
||||
when isMainModule: # isMainModule = true when the module is compiled as the main file
|
||||
waitFor(main())
|
||||
|
@ -24,7 +24,7 @@ type
|
||||
|
||||
nodekey* {.
|
||||
desc: "P2P node private key as 64 char hex string.",
|
||||
defaultValue: crypto.PrivateKey.random(Secp256k1, keys.newRng()[]).tryGet()
|
||||
defaultValue: crypto.PrivateKey.random(Secp256k1, crypto.newRng()[]).tryGet()
|
||||
name: "nodekey" }: crypto.PrivateKey
|
||||
|
||||
listenAddress* {.
|
||||
|
@ -186,7 +186,7 @@ proc start*(cmb: Chat2MatterBridge) {.async.} =
|
||||
|
||||
# Always mount relay for bridge
|
||||
# `triggerSelf` is false on a `bridge` to avoid duplicates
|
||||
cmb.nodev2.mountRelay(triggerSelf = false)
|
||||
await cmb.nodev2.mountRelay(triggerSelf = false)
|
||||
|
||||
# Bridging
|
||||
# Handle messages on Waku v2 and bridge to Matterbridge
|
||||
@ -263,13 +263,13 @@ when isMainModule:
|
||||
|
||||
# Now load rest of config
|
||||
# Mount configured Waku v2 protocols
|
||||
mountLibp2pPing(bridge.nodev2)
|
||||
waitFor mountLibp2pPing(bridge.nodev2)
|
||||
|
||||
if conf.store:
|
||||
mountStore(bridge.nodev2)
|
||||
waitFor mountStore(bridge.nodev2)
|
||||
|
||||
if conf.filter:
|
||||
mountFilter(bridge.nodev2)
|
||||
waitFor mountFilter(bridge.nodev2)
|
||||
|
||||
if conf.staticnodes.len > 0:
|
||||
waitFor connectToNodes(bridge.nodev2, conf.staticnodes)
|
||||
|
33
nimbus-build-system.paths
Normal file
33
nimbus-build-system.paths
Normal file
@ -0,0 +1,33 @@
|
||||
--noNimblePath
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/dnsclient.nim/src"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/news/src"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-bearssl"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-chronicles"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-chronos"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-confutils"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-dnsdisc"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-eth"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-faststreams"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-http-utils"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-json-rpc"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-json-serialization"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-libbacktrace"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-libp2p"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-metrics"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-nat-traversal"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-presto"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-secp256k1"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-serialization"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-sqlite3-abi"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-stew"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-stint"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-testutils"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-toml-serialization"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-unittest2"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-web3"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-websock"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nim-zlib"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nimbus-build-system"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/nimcrypto"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/rln/src"
|
||||
--path:"/home/runner/work/nwaku/nwaku/vendor/zerokit"
|
@ -1,5 +1,5 @@
|
||||
import
|
||||
chronos, bearssl,
|
||||
chronos, bearssl/rand,
|
||||
eth/[keys, p2p]
|
||||
|
||||
import libp2p/crypto/crypto
|
||||
@ -12,22 +12,27 @@ proc localAddress*(port: int): Address =
|
||||
ip: parseIpAddress("127.0.0.1"))
|
||||
|
||||
proc setupTestNode*(
|
||||
rng: ref BrHmacDrbgContext,
|
||||
rng: ref HmacDrbgContext,
|
||||
capabilities: varargs[ProtocolInfo, `protocolInfo`]): EthereumNode =
|
||||
let keys1 = keys.KeyPair.random(rng[])
|
||||
result = newEthereumNode(keys1, localAddress(nextPort), NetworkId(1), nil,
|
||||
addAllCapabilities = false, rng = rng)
|
||||
let
|
||||
keys1 = keys.KeyPair.random(rng[])
|
||||
address = localAddress(nextPort)
|
||||
result = newEthereumNode(keys1, address, NetworkId(1), nil,
|
||||
addAllCapabilities = false,
|
||||
bindUdpPort = address.udpPort, # Assume same as external
|
||||
bindTcpPort = address.tcpPort, # Assume same as external
|
||||
rng = rng)
|
||||
nextPort.inc
|
||||
for capability in capabilities:
|
||||
result.addCapability capability
|
||||
|
||||
# Copied from here: https://github.com/status-im/nim-libp2p/blob/d522537b19a532bc4af94fcd146f779c1f23bad0/tests/helpers.nim#L28
|
||||
type RngWrap = object
|
||||
rng: ref BrHmacDrbgContext
|
||||
rng: ref rand.HmacDrbgContext
|
||||
|
||||
var rngVar: RngWrap
|
||||
|
||||
proc getRng(): ref BrHmacDrbgContext =
|
||||
proc getRng(): ref rand.HmacDrbgContext =
|
||||
# TODO if `rngVar` is a threadvar like it should be, there are random and
|
||||
# spurious compile failures on mac - this is not gcsafe but for the
|
||||
# purpose of the tests, it's ok as long as we only use a single thread
|
||||
@ -36,5 +41,5 @@ proc getRng(): ref BrHmacDrbgContext =
|
||||
rngVar.rng = crypto.newRng()
|
||||
rngVar.rng
|
||||
|
||||
template rng*(): ref BrHmacDrbgContext =
|
||||
template rng*(): ref rand.HmacDrbgContext =
|
||||
getRng()
|
||||
|
@ -14,14 +14,14 @@ const sigPath = sourceDir / ParDir / ParDir / "waku" / "v1" / "node" / "rpc" / "
|
||||
createRpcSigs(RpcSocketClient, sigPath)
|
||||
|
||||
proc setupNode(capabilities: varargs[ProtocolInfo, `protocolInfo`],
|
||||
rng: ref BrHmacDrbgContext, ): EthereumNode =
|
||||
rng: ref HmacDrbgContext, ): EthereumNode =
|
||||
let
|
||||
keypair = KeyPair.random(rng[])
|
||||
srvAddress = Address(ip: parseIpAddress("0.0.0.0"), tcpPort: Port(30303),
|
||||
udpPort: Port(30303))
|
||||
|
||||
result = newEthereumNode(keypair, srvAddress, NetworkId(1), nil, "waku test rpc",
|
||||
addAllCapabilities = false, rng = rng)
|
||||
addAllCapabilities = false, bindUdpPort = srvAddress.udpPort, bindTcpPort = srvAddress.tcpPort, rng = rng)
|
||||
for capability in capabilities:
|
||||
result.addCapability capability
|
||||
|
||||
|
@ -44,11 +44,11 @@ procSuite "Waku connections":
|
||||
n3 = setupTestNode(rng, Waku)
|
||||
n4 = setupTestNode(rng, Waku)
|
||||
|
||||
var topics: seq[Topic]
|
||||
var topics: seq[waku_protocol.Topic]
|
||||
n1.protocolState(Waku).config.topics = some(topics)
|
||||
n2.protocolState(Waku).config.topics = some(topics)
|
||||
n3.protocolState(Waku).config.topics = none(seq[Topic])
|
||||
n4.protocolState(Waku).config.topics = none(seq[Topic])
|
||||
n3.protocolState(Waku).config.topics = none(seq[waku_protocol.Topic])
|
||||
n4.protocolState(Waku).config.topics = none(seq[waku_protocol.Topic])
|
||||
|
||||
n1.startListening()
|
||||
n3.startListening()
|
||||
@ -499,7 +499,7 @@ procSuite "Waku connections":
|
||||
let bloomFilterUpdatedCondition = proc(): bool =
|
||||
for peer in wakuNode.peerPool.peers:
|
||||
return peer.state(Waku).bloom == bloom and
|
||||
peer.state(Waku).topics == none(seq[Topic])
|
||||
peer.state(Waku).topics == none(seq[waku_protocol.Topic])
|
||||
|
||||
let bloomFilterUpdated =
|
||||
await eventually(conditionTimeoutMs, bloomFilterUpdatedCondition)
|
||||
|
@ -47,7 +47,7 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
asyncTest "Debug API: get node info":
|
||||
waitFor node.start()
|
||||
|
||||
node.mountRelay()
|
||||
await node.mountRelay()
|
||||
|
||||
# RPC server setup
|
||||
let
|
||||
@ -74,7 +74,7 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
asyncTest "Relay API: publish and subscribe/unsubscribe":
|
||||
waitFor node.start()
|
||||
|
||||
node.mountRelay()
|
||||
await node.mountRelay()
|
||||
|
||||
# RPC server setup
|
||||
let
|
||||
@ -137,13 +137,13 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
message2 = WakuMessage(payload: payload2, contentTopic: contentTopic)
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay(@[pubSubTopic])
|
||||
await node1.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay(@[pubSubTopic])
|
||||
await node2.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node3.start()
|
||||
node3.mountRelay(@[pubSubTopic])
|
||||
await node3.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node1.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
await node3.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
@ -215,7 +215,7 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
asyncTest "Store API: retrieve historical messages":
|
||||
waitFor node.start()
|
||||
|
||||
node.mountRelay()
|
||||
await node.mountRelay()
|
||||
|
||||
# RPC server setup
|
||||
let
|
||||
@ -231,7 +231,7 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
key = wakunode2.PrivateKey.random(ECDSA, rng[]).get()
|
||||
peer = PeerInfo.new(key)
|
||||
|
||||
node.mountStore(persistMessages = true)
|
||||
await node.mountStore(persistMessages = true)
|
||||
|
||||
var listenSwitch = newStandardSwitch(some(key))
|
||||
waitFor listenSwitch.start()
|
||||
@ -273,9 +273,9 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
asyncTest "Filter API: subscribe/unsubscribe":
|
||||
waitFor node.start()
|
||||
|
||||
node.mountRelay()
|
||||
await node.mountRelay()
|
||||
|
||||
node.mountFilter()
|
||||
await node.mountFilter()
|
||||
|
||||
# RPC server setup
|
||||
let
|
||||
@ -329,7 +329,7 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
installFilterApiHandlers(node, server, newTable[ContentTopic, seq[WakuMessage]]())
|
||||
server.start()
|
||||
|
||||
node.mountFilter()
|
||||
await node.mountFilter()
|
||||
|
||||
let client = newRpcHttpClient()
|
||||
await client.connect("127.0.0.1", rpcPort, false)
|
||||
@ -412,9 +412,9 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
|
||||
await allFutures([node1.start(), node2.start(), node3.start()])
|
||||
|
||||
node1.mountRelay()
|
||||
node2.mountRelay()
|
||||
node3.mountRelay()
|
||||
await node1.mountRelay()
|
||||
await node2.mountRelay()
|
||||
await node3.mountRelay()
|
||||
|
||||
# RPC server setup
|
||||
let
|
||||
@ -469,9 +469,9 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
|
||||
await allFutures([node1.start(), node2.start(), node3.start()])
|
||||
|
||||
node1.mountRelay()
|
||||
node2.mountRelay()
|
||||
node3.mountRelay()
|
||||
await node1.mountRelay()
|
||||
await node2.mountRelay()
|
||||
await node3.mountRelay()
|
||||
|
||||
# Dial nodes 2 and 3 from node1
|
||||
await node1.connectToNodes(@[constructMultiaddrStr(peerInfo2)])
|
||||
@ -525,9 +525,9 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
let client = newRpcHttpClient()
|
||||
await client.connect("127.0.0.1", rpcPort, false)
|
||||
|
||||
node.mountFilter()
|
||||
node.mountSwap()
|
||||
node.mountStore(persistMessages = true)
|
||||
await node.mountFilter()
|
||||
await node.mountSwap()
|
||||
await node.mountStore(persistMessages = true)
|
||||
|
||||
# Create and set some peers
|
||||
let
|
||||
@ -577,13 +577,13 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
topicCache = newTable[string, seq[WakuMessage]]()
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay(@[pubSubTopic])
|
||||
await node1.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay(@[pubSubTopic])
|
||||
await node2.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node3.start()
|
||||
node3.mountRelay(@[pubSubTopic])
|
||||
await node3.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node1.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
await node3.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
@ -598,8 +598,8 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
server3 = newRpcHttpServer([ta3])
|
||||
|
||||
# Let's connect to nodes 1 and 3 via the API
|
||||
installPrivateApiHandlers(node1, server1, rng, newTable[string, seq[WakuMessage]]())
|
||||
installPrivateApiHandlers(node3, server3, rng, topicCache)
|
||||
installPrivateApiHandlers(node1, server1, newTable[string, seq[WakuMessage]]())
|
||||
installPrivateApiHandlers(node3, server3, topicCache)
|
||||
installRelayApiHandlers(node3, server3, topicCache)
|
||||
server1.start()
|
||||
server3.start()
|
||||
@ -668,13 +668,13 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
topicCache = newTable[string, seq[WakuMessage]]()
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay(@[pubSubTopic])
|
||||
await node1.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay(@[pubSubTopic])
|
||||
await node2.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node3.start()
|
||||
node3.mountRelay(@[pubSubTopic])
|
||||
await node3.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node1.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
await node3.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
@ -689,8 +689,8 @@ procSuite "Waku v2 JSON-RPC API":
|
||||
server3 = newRpcHttpServer([ta3])
|
||||
|
||||
# Let's connect to nodes 1 and 3 via the API
|
||||
installPrivateApiHandlers(node1, server1, rng, newTable[string, seq[WakuMessage]]())
|
||||
installPrivateApiHandlers(node3, server3, rng, topicCache)
|
||||
installPrivateApiHandlers(node1, server1, newTable[string, seq[WakuMessage]]())
|
||||
installPrivateApiHandlers(node3, server3, topicCache)
|
||||
installRelayApiHandlers(node3, server3, topicCache)
|
||||
server1.start()
|
||||
server3.start()
|
||||
|
@ -52,9 +52,9 @@ procSuite "Peer Exchange":
|
||||
peerExchangeHandler = handlePeerExchange
|
||||
emptyHandler = ignorePeerExchange
|
||||
|
||||
node1.mountRelay(peerExchangeHandler = some(emptyHandler))
|
||||
node2.mountRelay(peerExchangeHandler = some(emptyHandler))
|
||||
node3.mountRelay(peerExchangeHandler = some(peerExchangeHandler))
|
||||
await node1.mountRelay(peerExchangeHandler = some(emptyHandler))
|
||||
await node2.mountRelay(peerExchangeHandler = some(emptyHandler))
|
||||
await node3.mountRelay(peerExchangeHandler = some(peerExchangeHandler))
|
||||
|
||||
# Ensure that node1 prunes all peers after the first connection
|
||||
node1.wakuRelay.parameters.dHigh = 1
|
||||
|
@ -34,8 +34,8 @@ procSuite "Peer Manager":
|
||||
|
||||
await allFutures([node1.start(), node2.start()])
|
||||
|
||||
node1.mountRelay()
|
||||
node2.mountRelay()
|
||||
await node1.mountRelay()
|
||||
await node2.mountRelay()
|
||||
|
||||
# Dial node2 from node1
|
||||
let conn = (await node1.peerManager.dialPeer(peerInfo2.toRemotePeerInfo(), WakuRelayCodec)).get()
|
||||
@ -68,8 +68,8 @@ procSuite "Peer Manager":
|
||||
await node1.start()
|
||||
# Purposefully don't start node2
|
||||
|
||||
node1.mountRelay()
|
||||
node2.mountRelay()
|
||||
await node1.mountRelay()
|
||||
await node2.mountRelay()
|
||||
|
||||
# Dial node2 from node1
|
||||
let connOpt = await node1.peerManager.dialPeer(peerInfo2.toRemotePeerInfo(), WakuRelayCodec, 2.seconds)
|
||||
@ -100,9 +100,9 @@ procSuite "Peer Manager":
|
||||
|
||||
await node.start()
|
||||
|
||||
node.mountFilter()
|
||||
node.mountSwap()
|
||||
node.mountStore(persistMessages = true)
|
||||
await node.mountFilter()
|
||||
await node.mountSwap()
|
||||
await node.mountStore(persistMessages = true)
|
||||
|
||||
node.wakuFilter.setPeer(filterPeer.toRemotePeerInfo())
|
||||
node.wakuSwap.setPeer(swapPeer.toRemotePeerInfo())
|
||||
@ -136,8 +136,8 @@ procSuite "Peer Manager":
|
||||
|
||||
await node1.start()
|
||||
|
||||
node1.mountRelay()
|
||||
node2.mountRelay()
|
||||
await node1.mountRelay()
|
||||
await node2.mountRelay()
|
||||
|
||||
# Test default connectedness for new peers
|
||||
node1.peerManager.addPeer(peerInfo2.toRemotePeerInfo(), WakuRelayCodec)
|
||||
@ -182,8 +182,8 @@ procSuite "Peer Manager":
|
||||
await node1.start()
|
||||
await node2.start()
|
||||
|
||||
node1.mountRelay()
|
||||
node2.mountRelay()
|
||||
await node1.mountRelay()
|
||||
await node2.mountRelay()
|
||||
|
||||
discard await node1.peerManager.dialPeer(peerInfo2.toRemotePeerInfo(), WakuRelayCodec, 2.seconds)
|
||||
check:
|
||||
@ -205,7 +205,7 @@ procSuite "Peer Manager":
|
||||
node3.peerManager.peers().anyIt(it.peerId == peerInfo2.peerId)
|
||||
node3.peerManager.connectedness(peerInfo2.peerId) == NotConnected
|
||||
|
||||
node3.mountRelay() # This should trigger a reconnect
|
||||
await node3.mountRelay() # This should trigger a reconnect
|
||||
|
||||
check:
|
||||
# Reconnected to node2 after "restart"
|
||||
@ -232,9 +232,9 @@ asyncTest "Peer manager support multiple protocol IDs when reconnecting to peers
|
||||
await node1.start()
|
||||
await node2.start()
|
||||
|
||||
node1.mountRelay()
|
||||
await node1.mountRelay()
|
||||
node1.wakuRelay.codec = betaCodec
|
||||
node2.mountRelay()
|
||||
await node2.mountRelay()
|
||||
node2.wakuRelay.codec = betaCodec
|
||||
|
||||
discard await node1.peerManager.dialPeer(peerInfo2.toRemotePeerInfo(), node2.wakuRelay.codec, 2.seconds)
|
||||
@ -251,7 +251,7 @@ asyncTest "Peer manager support multiple protocol IDs when reconnecting to peers
|
||||
node3 = WakuNode.new(nodeKey3, ValidIpAddress.init("0.0.0.0"),
|
||||
Port(60004), peerStorage = storage)
|
||||
|
||||
node3.mountRelay()
|
||||
await node3.mountRelay()
|
||||
node3.wakuRelay.codec = stableCodec
|
||||
check:
|
||||
# Node 2 and 3 have differing codecs
|
||||
|
@ -28,7 +28,7 @@ suite "REST API - Debug":
|
||||
# Given
|
||||
let node = testWakuNode()
|
||||
await node.start()
|
||||
node.mountRelay()
|
||||
await node.mountRelay()
|
||||
|
||||
let restPort = Port(8546)
|
||||
let restAddress = ValidIpAddress.init("0.0.0.0")
|
||||
|
@ -39,7 +39,7 @@ suite "REST API - Relay":
|
||||
# Given
|
||||
let node = testWakuNode()
|
||||
await node.start()
|
||||
node.mountRelay()
|
||||
await node.mountRelay()
|
||||
|
||||
let restPort = Port(8546)
|
||||
let restAddress = ValidIpAddress.init("0.0.0.0")
|
||||
@ -84,7 +84,7 @@ suite "REST API - Relay":
|
||||
# Given
|
||||
let node = testWakuNode()
|
||||
await node.start()
|
||||
node.mountRelay()
|
||||
await node.mountRelay()
|
||||
|
||||
let restPort = Port(8546)
|
||||
let restAddress = ValidIpAddress.init("0.0.0.0")
|
||||
@ -132,7 +132,7 @@ suite "REST API - Relay":
|
||||
# Given
|
||||
let node = testWakuNode()
|
||||
await node.start()
|
||||
node.mountRelay()
|
||||
await node.mountRelay()
|
||||
|
||||
let restPort = Port(8546)
|
||||
let restAddress = ValidIpAddress.init("0.0.0.0")
|
||||
@ -183,7 +183,7 @@ suite "REST API - Relay":
|
||||
# Given
|
||||
let node = testWakuNode()
|
||||
await node.start()
|
||||
node.mountRelay()
|
||||
await node.mountRelay()
|
||||
|
||||
# RPC server setup
|
||||
let restPort = Port(8546)
|
||||
|
@ -85,7 +85,7 @@ procSuite "FloodSub":
|
||||
)
|
||||
|
||||
for node in nodes:
|
||||
node.mountRelay()
|
||||
await node.mountRelay()
|
||||
|
||||
await subscribeNodes(nodes)
|
||||
|
||||
|
@ -31,10 +31,11 @@ procSuite "WakuBridge":
|
||||
|
||||
let
|
||||
rng = keys.newRng()
|
||||
cryptoRng = crypto.newRng()
|
||||
|
||||
# Bridge
|
||||
nodev1Key = keys.KeyPair.random(rng[])
|
||||
nodev2Key = crypto.PrivateKey.random(Secp256k1, rng[])[]
|
||||
nodev2Key = crypto.PrivateKey.random(Secp256k1, cryptoRng[])[]
|
||||
bridge = WakuBridge.new(
|
||||
nodev1Key= nodev1Key,
|
||||
nodev1Address = localAddress(30302),
|
||||
@ -48,7 +49,7 @@ procSuite "WakuBridge":
|
||||
v1Node = setupTestNode(rng, Waku)
|
||||
|
||||
# Waku v2 node
|
||||
v2NodeKey = crypto.PrivateKey.random(Secp256k1, rng[])[]
|
||||
v2NodeKey = crypto.PrivateKey.random(Secp256k1, cryptoRng[])[]
|
||||
v2Node = WakuNode.new(v2NodeKey, ValidIpAddress.init("0.0.0.0"), Port(60002))
|
||||
|
||||
contentTopic = ContentTopic("/waku/1/0x1a2b3c4d/rfc26")
|
||||
@ -118,7 +119,7 @@ procSuite "WakuBridge":
|
||||
waitFor bridge.start()
|
||||
|
||||
waitFor v2Node.start()
|
||||
v2Node.mountRelay(@[DefaultBridgeTopic], triggerSelf = false)
|
||||
await v2Node.mountRelay(@[DefaultBridgeTopic], triggerSelf = false)
|
||||
|
||||
discard waitFor v1Node.rlpxConnect(newNode(bridge.nodev1.toENode()))
|
||||
waitFor v2Node.connectToNodes(@[bridge.nodev2.switch.peerInfo.toRemotePeerInfo()])
|
||||
|
@ -84,9 +84,9 @@ procSuite "Waku Discovery v5":
|
||||
node3.rng
|
||||
)
|
||||
|
||||
node1.mountRelay()
|
||||
node2.mountRelay()
|
||||
node3.mountRelay()
|
||||
await node1.mountRelay()
|
||||
await node2.mountRelay()
|
||||
await node3.mountRelay()
|
||||
|
||||
await allFutures([node1.start(), node2.start(), node3.start()])
|
||||
|
||||
|
@ -34,9 +34,9 @@ procSuite "Waku DNS Discovery":
|
||||
node3 = WakuNode.new(nodeKey3, bindIp, Port(60003))
|
||||
enr3 = node3.enr
|
||||
|
||||
node1.mountRelay()
|
||||
node2.mountRelay()
|
||||
node3.mountRelay()
|
||||
await node1.mountRelay()
|
||||
await node2.mountRelay()
|
||||
await node3.mountRelay()
|
||||
await allFutures([node1.start(), node2.start(), node3.start()])
|
||||
|
||||
# Build and sign tree
|
||||
@ -44,7 +44,7 @@ procSuite "Waku DNS Discovery":
|
||||
@[enr1, enr2, enr3], # ENR entries
|
||||
@[]).get() # No link entries
|
||||
|
||||
let treeKeys = keys.KeyPair.random(rng[])
|
||||
let treeKeys = keys.KeyPair.random(keys.newRng()[])
|
||||
|
||||
# Sign tree
|
||||
check:
|
||||
@ -68,7 +68,7 @@ procSuite "Waku DNS Discovery":
|
||||
nodeKey4 = crypto.PrivateKey.random(Secp256k1, rng[])[]
|
||||
node4 = WakuNode.new(nodeKey4, bindIp, Port(60004))
|
||||
|
||||
node4.mountRelay()
|
||||
await node4.mountRelay()
|
||||
await node4.start()
|
||||
|
||||
var wakuDnsDisc = WakuDnsDiscovery.init(location, resolver).get()
|
||||
|
@ -45,6 +45,7 @@ procSuite "Waku Filter":
|
||||
let
|
||||
serverPeerManager = PeerManager.new(serverSwitch)
|
||||
serverProto = WakuFilter.init(serverPeerManager, rng, dummyHandler)
|
||||
await serverProto.start()
|
||||
serverSwitch.mount(serverProto)
|
||||
|
||||
# Client
|
||||
@ -55,6 +56,7 @@ procSuite "Waku Filter":
|
||||
let
|
||||
clientPeerManager = PeerManager.new(clientSwitch)
|
||||
clientProto = WakuFilter.init(clientPeerManager, rng, handler)
|
||||
await clientProto.start()
|
||||
clientSwitch.mount(clientProto)
|
||||
|
||||
clientProto.setPeer(serverSwitch.peerInfo.toRemotePeerInfo())
|
||||
@ -93,6 +95,7 @@ procSuite "Waku Filter":
|
||||
let
|
||||
serverPeerManager = PeerManager.new(serverSwitch)
|
||||
serverProto = WakuFilter.init(serverPeerManager, rng, dummyHandler)
|
||||
await serverProto.start()
|
||||
serverSwitch.mount(serverProto)
|
||||
|
||||
# Client
|
||||
@ -103,6 +106,7 @@ procSuite "Waku Filter":
|
||||
let
|
||||
clientPeerManager = PeerManager.new(clientSwitch)
|
||||
clientProto = WakuFilter.init(clientPeerManager, rng, handler)
|
||||
await clientProto.start()
|
||||
clientSwitch.mount(clientProto)
|
||||
|
||||
clientProto.setPeer(serverSwitch.peerInfo.toRemotePeerInfo())
|
||||
@ -144,6 +148,7 @@ procSuite "Waku Filter":
|
||||
|
||||
## Given
|
||||
let clientProto = WakuFilter.init(PeerManager.new(clientSwitch), crypto.newRng(), dummyHandler)
|
||||
await clientProto.start()
|
||||
clientSwitch.mount(clientProto)
|
||||
|
||||
## When
|
||||
@ -168,6 +173,7 @@ procSuite "Waku Filter":
|
||||
let
|
||||
serverPeerManager = PeerManager.new(serverSwitch)
|
||||
serverProto = WakuFilter.init(serverPeerManager, rng, dummyHandler, timeout=1.seconds)
|
||||
await serverProto.start()
|
||||
serverSwitch.mount(serverProto)
|
||||
|
||||
# Client
|
||||
@ -178,6 +184,7 @@ procSuite "Waku Filter":
|
||||
let
|
||||
clientPeerManager = PeerManager.new(clientSwitch)
|
||||
clientProto = WakuFilter.init(clientPeerManager, rng, handler)
|
||||
await clientProto.start()
|
||||
clientSwitch.mount(clientProto)
|
||||
|
||||
clientProto.setPeer(serverSwitch.peerInfo.toRemotePeerInfo())
|
||||
@ -242,6 +249,7 @@ procSuite "Waku Filter":
|
||||
let
|
||||
serverPeerManager = PeerManager.new(serverSwitch)
|
||||
serverProto = WakuFilter.init(serverPeerManager, rng, dummyHandler, timeout=2.seconds)
|
||||
await serverProto.start()
|
||||
serverSwitch.mount(serverProto)
|
||||
|
||||
# Client
|
||||
@ -252,6 +260,7 @@ procSuite "Waku Filter":
|
||||
let
|
||||
clientPeerManager = PeerManager.new(clientSwitch)
|
||||
clientProto = WakuFilter.init(clientPeerManager, rng, handler)
|
||||
await clientProto.start()
|
||||
clientSwitch.mount(clientProto)
|
||||
|
||||
clientProto.setPeer(serverSwitch.peerInfo.toRemotePeerInfo())
|
||||
@ -289,6 +298,7 @@ procSuite "Waku Filter":
|
||||
# Start switch with same key as before
|
||||
var clientSwitch2 = newTestSwitch(some(clientKey), some(clientAddress))
|
||||
await clientSwitch2.start()
|
||||
await clientProto.start()
|
||||
clientSwitch2.mount(clientProto)
|
||||
|
||||
# If push succeeds after failure, the peer should removed from failed peers list
|
||||
|
@ -34,12 +34,15 @@ procSuite "Waku Keepalive":
|
||||
completionFut.complete(true)
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay()
|
||||
node1.mountLibp2pPing()
|
||||
await node1.mountRelay()
|
||||
await node1.mountLibp2pPing()
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay()
|
||||
node2.switch.mount(Ping.new(handler = pingHandler))
|
||||
await node2.mountRelay()
|
||||
|
||||
let pingProto = Ping.new(handler = pingHandler)
|
||||
await pingProto.start()
|
||||
node2.switch.mount(pingProto)
|
||||
|
||||
await node1.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
|
||||
|
@ -49,6 +49,7 @@ procSuite "Waku Lightpush":
|
||||
proto = WakuLightPush.init(peerManager, rng, requestHandler)
|
||||
|
||||
proto.setPeer(listenSwitch.peerInfo.toRemotePeerInfo())
|
||||
waitFor proto.start()
|
||||
dialSwitch.mount(proto)
|
||||
|
||||
|
||||
@ -63,7 +64,7 @@ procSuite "Waku Lightpush":
|
||||
peerManager2 = PeerManager.new(listenSwitch)
|
||||
rng2 = crypto.newRng()
|
||||
proto2 = WakuLightPush.init(peerManager2, rng2, requestHandler2)
|
||||
|
||||
waitFor proto2.start()
|
||||
listenSwitch.mount(proto2)
|
||||
|
||||
|
||||
|
@ -44,7 +44,7 @@ procSuite "Waku rln relay":
|
||||
let index = MembershipIndex(5)
|
||||
|
||||
# -------- mount rln-relay in the off-chain mode
|
||||
node.mountRelay(@[RLNRELAY_PUBSUB_TOPIC])
|
||||
await node.mountRelay(@[RLNRELAY_PUBSUB_TOPIC])
|
||||
node.mountRlnRelayStatic(group = groupIDCommitments,
|
||||
memKeyPair = groupKeyPairs[index],
|
||||
memIndex = index,
|
||||
|
@ -346,7 +346,7 @@ procSuite "Waku-rln-relay":
|
||||
|
||||
# test ------------------------------
|
||||
# start rln-relay
|
||||
node.mountRelay(@[RLNRELAY_PUBSUB_TOPIC])
|
||||
await node.mountRelay(@[RLNRELAY_PUBSUB_TOPIC])
|
||||
node.mountRlnRelayStatic(group = group,
|
||||
memKeyPair = keypair.get(),
|
||||
memIndex = index,
|
||||
@ -427,7 +427,7 @@ procSuite "Waku-rln-relay":
|
||||
|
||||
# test ------------------------------
|
||||
# start rln-relay
|
||||
node.mountRelay(@[RLNRELAY_PUBSUB_TOPIC])
|
||||
await node.mountRelay(@[RLNRELAY_PUBSUB_TOPIC])
|
||||
discard await node.mountRlnRelayDynamic(ethClientAddr = EthClient,
|
||||
ethAccAddr = ethacc,
|
||||
ethAccountPrivKeyOpt = some(ethPrivKey),
|
||||
@ -480,7 +480,7 @@ procSuite "Waku-rln-relay":
|
||||
let (ethPrivKey, ethacc) = await createEthAccount()
|
||||
|
||||
# start rln-relay on the first node, leave rln-relay credentials empty
|
||||
node.mountRelay(@[RLNRELAY_PUBSUB_TOPIC])
|
||||
await node.mountRelay(@[RLNRELAY_PUBSUB_TOPIC])
|
||||
discard await node.mountRlnRelayDynamic(ethClientAddr = EthClient,
|
||||
ethAccAddr = ethacc,
|
||||
ethAccountPrivKeyOpt = some(ethPrivKey),
|
||||
@ -493,7 +493,7 @@ procSuite "Waku-rln-relay":
|
||||
|
||||
|
||||
# start rln-relay on the second node, leave rln-relay credentials empty
|
||||
node2.mountRelay(@[RLNRELAY_PUBSUB_TOPIC])
|
||||
await node2.mountRelay(@[RLNRELAY_PUBSUB_TOPIC])
|
||||
discard await node2.mountRlnRelayDynamic(ethClientAddr = EthClient,
|
||||
ethAccAddr = ethacc,
|
||||
ethAccountPrivKeyOpt = some(ethPrivKey),
|
||||
|
@ -53,6 +53,7 @@ proc newTestWakuStore(switch: Switch): WakuStore =
|
||||
store = WakuMessageStore.init(database).tryGet()
|
||||
proto = WakuStore.init(peerManager, rng, store)
|
||||
|
||||
waitFor proto.start()
|
||||
switch.mount(proto)
|
||||
|
||||
return proto
|
||||
@ -468,6 +469,7 @@ procSuite "Waku Store - fault tolerant store":
|
||||
let storePeer = peer.get(listenSwitch.peerInfo.toRemotePeerInfo())
|
||||
proto.setPeer(storePeer)
|
||||
|
||||
await proto.start()
|
||||
listenSwitch.mount(proto)
|
||||
|
||||
return (listenSwitch, dialSwitch, proto)
|
||||
|
@ -62,11 +62,11 @@ procSuite "Waku SWAP Accounting":
|
||||
|
||||
# Start nodes and mount protocols
|
||||
await node1.start()
|
||||
node1.mountSwap()
|
||||
node1.mountStore(persistMessages = true)
|
||||
await node1.mountSwap()
|
||||
await node1.mountStore(persistMessages = true)
|
||||
await node2.start()
|
||||
node2.mountSwap()
|
||||
node2.mountStore(persistMessages = true)
|
||||
await node2.mountSwap()
|
||||
await node2.mountStore(persistMessages = true)
|
||||
|
||||
await node2.wakuStore.handleMessage("/waku/2/default-waku/proto", message)
|
||||
|
||||
@ -112,11 +112,11 @@ procSuite "Waku SWAP Accounting":
|
||||
|
||||
# Start nodes and mount protocols
|
||||
await node1.start()
|
||||
node1.mountSwap(swapConfig)
|
||||
node1.mountStore(persistMessages = true)
|
||||
await node1.mountSwap(swapConfig)
|
||||
await node1.mountStore(persistMessages = true)
|
||||
await node2.start()
|
||||
node2.mountSwap(swapConfig)
|
||||
node2.mountStore(persistMessages = true)
|
||||
await node2.mountSwap(swapConfig)
|
||||
await node2.mountStore(persistMessages = true)
|
||||
|
||||
await node2.wakuStore.handleMessage("/waku/2/default-waku/proto", message)
|
||||
|
||||
|
@ -34,7 +34,7 @@ const KEY_PATH = sourceDir / "resources/test_key.pem"
|
||||
const CERT_PATH = sourceDir / "resources/test_cert.pem"
|
||||
|
||||
procSuite "WakuNode":
|
||||
let rng = keys.newRng()
|
||||
let rng = crypto.newRng()
|
||||
|
||||
asyncTest "Message published with content filter is retrievable":
|
||||
let
|
||||
@ -67,7 +67,7 @@ procSuite "WakuNode":
|
||||
|
||||
await node.start()
|
||||
|
||||
node.mountRelay()
|
||||
await node.mountRelay()
|
||||
|
||||
# Subscribe our node to the pubSubTopic where all chat data go onto.
|
||||
node.subscribe(pubSubTopic, relayHandler)
|
||||
@ -119,11 +119,11 @@ procSuite "WakuNode":
|
||||
|
||||
await allFutures([node1.start(), node2.start()])
|
||||
|
||||
node1.mountRelay()
|
||||
node2.mountRelay()
|
||||
await node1.mountRelay()
|
||||
await node2.mountRelay()
|
||||
|
||||
node1.mountFilter()
|
||||
node2.mountFilter()
|
||||
await node1.mountFilter()
|
||||
await node2.mountFilter()
|
||||
|
||||
# Subscribe our node to the pubSubTopic where all chat data go onto.
|
||||
node1.subscribe(pubSubTopic, relayHandler)
|
||||
@ -166,12 +166,12 @@ procSuite "WakuNode":
|
||||
otherFR = FilterRequest(contentFilters: @[ContentFilter(contentTopic: otherContentTopic)], subscribe: true)
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay()
|
||||
node1.mountFilter()
|
||||
await node1.mountRelay()
|
||||
await node1.mountFilter()
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay()
|
||||
node2.mountFilter()
|
||||
await node2.mountRelay()
|
||||
await node2.mountFilter()
|
||||
node2.wakuFilter.setPeer(node1.switch.peerInfo.toRemotePeerInfo())
|
||||
|
||||
var defaultComplete = newFuture[bool]()
|
||||
@ -237,12 +237,11 @@ procSuite "WakuNode":
|
||||
filterRequest = FilterRequest(contentFilters: @[ContentFilter(contentTopic: contentTopic)], subscribe: true)
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay()
|
||||
node1.mountFilter()
|
||||
await node1.mountRelay()
|
||||
await node1.mountFilter()
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay(relayMessages=false) # Do not start WakuRelay or subscribe to any topics
|
||||
node2.mountFilter()
|
||||
await node2.mountFilter()
|
||||
node2.wakuFilter.setPeer(node1.switch.peerInfo.toRemotePeerInfo())
|
||||
|
||||
check:
|
||||
@ -286,9 +285,9 @@ procSuite "WakuNode":
|
||||
var completionFut = newFuture[bool]()
|
||||
|
||||
await node1.start()
|
||||
node1.mountStore(persistMessages = true)
|
||||
await node1.mountStore(persistMessages = true)
|
||||
await node2.start()
|
||||
node2.mountStore(persistMessages = true)
|
||||
await node2.mountStore(persistMessages = true)
|
||||
|
||||
await node2.wakuStore.handleMessage("/waku/2/default-waku/proto", message)
|
||||
|
||||
@ -322,9 +321,9 @@ procSuite "WakuNode":
|
||||
var completionFut = newFuture[bool]()
|
||||
|
||||
await node1.start()
|
||||
node1.mountFilter()
|
||||
await node1.mountFilter()
|
||||
await node2.start()
|
||||
node2.mountFilter()
|
||||
await node2.mountFilter()
|
||||
|
||||
node1.wakuFilter.setPeer(node2.switch.peerInfo.toRemotePeerInfo())
|
||||
|
||||
@ -363,12 +362,12 @@ procSuite "WakuNode":
|
||||
storeComplFut = newFuture[bool]()
|
||||
|
||||
await node1.start()
|
||||
node1.mountStore(persistMessages = true)
|
||||
node1.mountFilter()
|
||||
await node1.mountStore(persistMessages = true)
|
||||
await node1.mountFilter()
|
||||
|
||||
await node2.start()
|
||||
node2.mountStore(persistMessages = true)
|
||||
node2.mountFilter()
|
||||
await node2.mountStore(persistMessages = true)
|
||||
await node2.mountFilter()
|
||||
|
||||
node2.wakuFilter.setPeer(node1.switch.peerInfo.toRemotePeerInfo())
|
||||
node1.wakuStore.setPeer(node2.switch.peerInfo.toRemotePeerInfo())
|
||||
@ -422,13 +421,13 @@ procSuite "WakuNode":
|
||||
message = WakuMessage(payload: payload, contentTopic: contentTopic)
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay(@[pubSubTopic])
|
||||
await node1.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay(@[pubSubTopic])
|
||||
await node2.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node3.start()
|
||||
node3.mountRelay(@[pubSubTopic])
|
||||
await node3.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node1.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
await node3.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
@ -472,13 +471,13 @@ procSuite "WakuNode":
|
||||
# Setup node 1 with stable codec "/vac/waku/relay/2.0.0"
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay(@[pubSubTopic])
|
||||
await node1.mountRelay(@[pubSubTopic])
|
||||
node1.wakuRelay.codec = "/vac/waku/relay/2.0.0"
|
||||
|
||||
# Setup node 2 with beta codec "/vac/waku/relay/2.0.0-beta2"
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay(@[pubSubTopic])
|
||||
await node2.mountRelay(@[pubSubTopic])
|
||||
node2.wakuRelay.codec = "/vac/waku/relay/2.0.0-beta2"
|
||||
|
||||
check:
|
||||
@ -528,8 +527,8 @@ procSuite "WakuNode":
|
||||
node2PeerId = $(node2.switch.peerInfo.peerId)
|
||||
node2Dns4Addr = "/dns4/localhost/tcp/60002/p2p/" & node2PeerId
|
||||
|
||||
node1.mountRelay()
|
||||
node2.mountRelay()
|
||||
await node1.mountRelay()
|
||||
await node2.mountRelay()
|
||||
|
||||
await allFutures([node1.start(), node2.start()])
|
||||
|
||||
@ -570,13 +569,13 @@ procSuite "WakuNode":
|
||||
|
||||
# start all the nodes
|
||||
await node1.start()
|
||||
node1.mountRelay(@[pubSubTopic])
|
||||
await node1.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay(@[pubSubTopic])
|
||||
await node2.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node3.start()
|
||||
node3.mountRelay(@[pubSubTopic])
|
||||
await node3.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node1.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
await node3.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
@ -648,7 +647,7 @@ procSuite "WakuNode":
|
||||
|
||||
await node1.start()
|
||||
|
||||
node1.mountRelay()
|
||||
await node1.mountRelay()
|
||||
|
||||
check:
|
||||
GossipSub(node1.wakuRelay).heartbeatFut.isNil == false
|
||||
@ -660,7 +659,7 @@ procSuite "WakuNode":
|
||||
node2 = WakuNode.new(nodeKey2, ValidIpAddress.init("0.0.0.0"),
|
||||
Port(60002))
|
||||
|
||||
node2.mountRelay()
|
||||
await node2.mountRelay()
|
||||
|
||||
check:
|
||||
# Relay has not yet started as node has not yet started
|
||||
@ -692,17 +691,16 @@ procSuite "WakuNode":
|
||||
|
||||
# Light node, only lightpush
|
||||
await node1.start()
|
||||
node1.mountRelay(relayMessages=false) # Mount WakuRelay, but do not start or subscribe to any topics
|
||||
node1.mountLightPush()
|
||||
await node1.mountLightPush()
|
||||
|
||||
# Intermediate node
|
||||
await node2.start()
|
||||
node2.mountRelay(@[pubSubTopic])
|
||||
node2.mountLightPush()
|
||||
await node2.mountRelay(@[pubSubTopic])
|
||||
await node2.mountLightPush()
|
||||
|
||||
# Receiving node
|
||||
await node3.start()
|
||||
node3.mountRelay(@[pubSubTopic])
|
||||
await node3.mountRelay(@[pubSubTopic])
|
||||
|
||||
discard await node1.peerManager.dialPeer(node2.switch.peerInfo.toRemotePeerInfo(), WakuLightPushCodec)
|
||||
await sleepAsync(5.seconds)
|
||||
@ -757,9 +755,9 @@ procSuite "WakuNode":
|
||||
message = WakuMessage(payload: "hello world".toBytes(), contentTopic: contentTopic)
|
||||
|
||||
await node1.start()
|
||||
node1.mountStore(persistMessages = true)
|
||||
await node1.mountStore(persistMessages = true)
|
||||
await node2.start()
|
||||
node2.mountStore(persistMessages = true)
|
||||
await node2.mountStore(persistMessages = true)
|
||||
|
||||
await node2.wakuStore.handleMessage("/waku/2/default-waku/proto", message)
|
||||
|
||||
@ -797,9 +795,9 @@ procSuite "WakuNode":
|
||||
var completionFut = newFuture[bool]()
|
||||
|
||||
await node1.start()
|
||||
node1.mountStore(persistMessages = true, store = store)
|
||||
await node1.mountStore(persistMessages = true, store = store)
|
||||
await node2.start()
|
||||
node2.mountStore(persistMessages = true)
|
||||
await node2.mountStore(persistMessages = true)
|
||||
|
||||
await node2.wakuStore.handleMessage(DefaultTopic, msg1)
|
||||
await node2.wakuStore.handleMessage(DefaultTopic, msg2)
|
||||
@ -852,15 +850,15 @@ procSuite "WakuNode":
|
||||
|
||||
# Node with connection limit set to 1
|
||||
await node1.start()
|
||||
node1.mountRelay()
|
||||
await node1.mountRelay()
|
||||
|
||||
# Remote node 1
|
||||
await node2.start()
|
||||
node2.mountRelay()
|
||||
await node2.mountRelay()
|
||||
|
||||
# Remote node 2
|
||||
await node3.start()
|
||||
node3.mountRelay()
|
||||
await node3.mountRelay()
|
||||
|
||||
discard await node1.peerManager.dialPeer(node2.switch.peerInfo.toRemotePeerInfo(), WakuRelayCodec)
|
||||
await sleepAsync(3.seconds)
|
||||
@ -888,10 +886,10 @@ procSuite "WakuNode":
|
||||
message = WakuMessage(payload: payload, contentTopic: contentTopic)
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay(@[pubSubTopic])
|
||||
await node1.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay(@[pubSubTopic])
|
||||
await node2.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node1.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
|
||||
@ -933,10 +931,10 @@ procSuite "WakuNode":
|
||||
message = WakuMessage(payload: payload, contentTopic: contentTopic)
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay(@[pubSubTopic])
|
||||
await node1.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay(@[pubSubTopic])
|
||||
await node2.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node1.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
|
||||
@ -977,10 +975,10 @@ procSuite "WakuNode":
|
||||
message = WakuMessage(payload: payload, contentTopic: contentTopic)
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay(@[pubSubTopic])
|
||||
await node1.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay(@[pubSubTopic])
|
||||
await node2.mountRelay(@[pubSubTopic])
|
||||
|
||||
#delete websocket peer address
|
||||
# TODO: a better way to find the index - this is too brittle
|
||||
@ -1025,10 +1023,10 @@ procSuite "WakuNode":
|
||||
message = WakuMessage(payload: payload, contentTopic: contentTopic)
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay(@[pubSubTopic])
|
||||
await node1.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay(@[pubSubTopic])
|
||||
await node2.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node1.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
|
||||
@ -1078,10 +1076,10 @@ procSuite "WakuNode":
|
||||
message = WakuMessage(payload: payload, contentTopic: contentTopic)
|
||||
|
||||
await node1.start()
|
||||
node1.mountRelay(@[pubSubTopic])
|
||||
await node1.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node2.start()
|
||||
node2.mountRelay(@[pubSubTopic])
|
||||
await node2.mountRelay(@[pubSubTopic])
|
||||
|
||||
await node1.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])
|
||||
|
||||
|
@ -47,7 +47,7 @@ procSuite "WakuNode - RLN relay":
|
||||
|
||||
# set up three nodes
|
||||
# node1
|
||||
node1.mountRelay(@[rlnRelayPubSubTopic])
|
||||
await node1.mountRelay(@[rlnRelayPubSubTopic])
|
||||
let (groupOpt1, memKeyPairOpt1, memIndexOpt1) = rlnRelayStaticSetUp(1) # set up rln relay inputs
|
||||
# mount rlnrelay in off-chain mode
|
||||
node1.mountRlnRelayStatic(group = groupOpt1.get(),
|
||||
@ -58,7 +58,7 @@ procSuite "WakuNode - RLN relay":
|
||||
await node1.start()
|
||||
|
||||
# node 2
|
||||
node2.mountRelay(@[rlnRelayPubSubTopic])
|
||||
await node2.mountRelay(@[rlnRelayPubSubTopic])
|
||||
let (groupOpt2, memKeyPairOpt2, memIndexOpt2) = rlnRelayStaticSetUp(2) # set up rln relay inputs
|
||||
# mount rlnrelay in off-chain mode
|
||||
node2.mountRlnRelayStatic(group = groupOpt2.get(),
|
||||
@ -69,7 +69,7 @@ procSuite "WakuNode - RLN relay":
|
||||
await node2.start()
|
||||
|
||||
# node 3
|
||||
node3.mountRelay(@[rlnRelayPubSubTopic])
|
||||
await node3.mountRelay(@[rlnRelayPubSubTopic])
|
||||
let (groupOpt3, memKeyPairOpt3, memIndexOpt3) = rlnRelayStaticSetUp(3) # set up rln relay inputs
|
||||
# mount rlnrelay in off-chain mode
|
||||
node3.mountRlnRelayStatic(group = groupOpt3.get(),
|
||||
@ -133,7 +133,7 @@ procSuite "WakuNode - RLN relay":
|
||||
|
||||
# set up three nodes
|
||||
# node1
|
||||
node1.mountRelay(@[rlnRelayPubSubTopic])
|
||||
await node1.mountRelay(@[rlnRelayPubSubTopic])
|
||||
let (groupOpt1, memKeyPairOpt1, memIndexOpt1) = rlnRelayStaticSetUp(1) # set up rln relay inputs
|
||||
# mount rlnrelay in off-chain mode
|
||||
node1.mountRlnRelayStatic(group = groupOpt1.get(),
|
||||
@ -144,7 +144,7 @@ procSuite "WakuNode - RLN relay":
|
||||
await node1.start()
|
||||
|
||||
# node 2
|
||||
node2.mountRelay(@[rlnRelayPubSubTopic])
|
||||
await node2.mountRelay(@[rlnRelayPubSubTopic])
|
||||
let (groupOpt2, memKeyPairOpt2, memIndexOpt2) = rlnRelayStaticSetUp(2) # set up rln relay inputs
|
||||
# mount rlnrelay in off-chain mode
|
||||
node2.mountRlnRelayStatic(group = groupOpt2.get(),
|
||||
@ -155,7 +155,7 @@ procSuite "WakuNode - RLN relay":
|
||||
await node2.start()
|
||||
|
||||
# node 3
|
||||
node3.mountRelay(@[rlnRelayPubSubTopic])
|
||||
await node3.mountRelay(@[rlnRelayPubSubTopic])
|
||||
let (groupOpt3, memKeyPairOpt3, memIndexOpt3) = rlnRelayStaticSetUp(3) # set up rln relay inputs
|
||||
# mount rlnrelay in off-chain mode
|
||||
node3.mountRlnRelayStatic(group = groupOpt3.get(),
|
||||
@ -237,7 +237,7 @@ procSuite "WakuNode - RLN relay":
|
||||
|
||||
# set up three nodes
|
||||
# node1
|
||||
node1.mountRelay(@[rlnRelayPubSubTopic])
|
||||
await node1.mountRelay(@[rlnRelayPubSubTopic])
|
||||
let (groupOpt1, memKeyPairOpt1, memIndexOpt1) = rlnRelayStaticSetUp(1) # set up rln relay inputs
|
||||
# mount rlnrelay in off-chain mode
|
||||
node1.mountRlnRelayStatic(group = groupOpt1.get(),
|
||||
@ -248,7 +248,7 @@ procSuite "WakuNode - RLN relay":
|
||||
await node1.start()
|
||||
|
||||
# node 2
|
||||
node2.mountRelay(@[rlnRelayPubSubTopic])
|
||||
await node2.mountRelay(@[rlnRelayPubSubTopic])
|
||||
let (groupOpt2, memKeyPairOpt2, memIndexOpt2) = rlnRelayStaticSetUp(2) # set up rln relay inputs
|
||||
# mount rlnrelay in off-chain mode
|
||||
node2.mountRlnRelayStatic(group = groupOpt2.get(),
|
||||
@ -259,7 +259,7 @@ procSuite "WakuNode - RLN relay":
|
||||
await node2.start()
|
||||
|
||||
# node 3
|
||||
node3.mountRelay(@[rlnRelayPubSubTopic])
|
||||
await node3.mountRelay(@[rlnRelayPubSubTopic])
|
||||
let (groupOpt3, memKeyPairOpt3, memIndexOpt3) = rlnRelayStaticSetUp(3) # set up rln relay inputs
|
||||
# mount rlnrelay in off-chain mode
|
||||
node3.mountRlnRelayStatic(group = groupOpt3.get(),
|
||||
|
172
vendor/news/src/news.nim
vendored
172
vendor/news/src/news.nim
vendored
@ -1,6 +1,6 @@
|
||||
import
|
||||
strutils, streams, random, base64, uri, strformat, nativesockets, oids,
|
||||
strtabs, std/sha1, net, httpcore
|
||||
import std/[
|
||||
base64, deques, httpcore, nativesockets, net, oids, random, sha1, streams,
|
||||
strformat, strtabs, strutils, uri]
|
||||
|
||||
when not declaredInScope(newsUseChronos):
|
||||
# Currently chronos is second class citizen. To use this library in chronos-based
|
||||
@ -62,13 +62,60 @@ when newsUseChronos:
|
||||
t.closeWait()
|
||||
|
||||
else:
|
||||
import httpcore, asyncdispatch, asyncnet, asynchttpserver
|
||||
import std/[asyncdispatch, asynchttpserver, asyncnet]
|
||||
type Transport = AsyncSocket
|
||||
|
||||
const CRLF = "\c\l"
|
||||
const GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
||||
|
||||
type
|
||||
Opcode* = enum
|
||||
## 4 bits. Defines the interpretation of the "Payload data".
|
||||
Cont = 0x0 ## denotes a continuation frame
|
||||
Text = 0x1 ## denotes a text frame
|
||||
Binary = 0x2 ## denotes a binary frame
|
||||
# 3-7 are reserved for further non-control frames
|
||||
Close = 0x8 ## denotes a connection close
|
||||
Ping = 0x9 ## denotes a ping
|
||||
Pong = 0xa ## denotes a pong
|
||||
# B-F are reserved for further control frames
|
||||
|
||||
#[
|
||||
0 1 2 3
|
||||
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
|
||||
+-+-+-+-+-------+-+-------------+-------------------------------+
|
||||
|F|R|R|R| opcode|M| Payload len | Extended payload length |
|
||||
|I|S|S|S| (4) |A| (7) | (16/64) |
|
||||
|N|V|V|V| |S| | (if payload len==126/127) |
|
||||
| |1|2|3| |K| | |
|
||||
+-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - +
|
||||
| Extended payload length continued, if payload len == 127 |
|
||||
+ - - - - - - - - - - - - - - - +-------------------------------+
|
||||
| |Masking-key, if MASK set to 1 |
|
||||
+-------------------------------+-------------------------------+
|
||||
| Masking-key (continued) | Payload Data |
|
||||
+-------------------------------- - - - - - - - - - - - - - - - +
|
||||
: Payload Data continued ... :
|
||||
+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +
|
||||
| Payload Data continued ... |
|
||||
+---------------------------------------------------------------+
|
||||
]#
|
||||
Frame* = tuple
|
||||
fin: bool ## Indicates that this is the final fragment in a message.
|
||||
rsv1: bool ## MUST be 0 unless negotiated that defines meanings
|
||||
rsv2: bool
|
||||
rsv3: bool
|
||||
opcode: Opcode ## Defines the interpretation of the "Payload data".
|
||||
mask: bool ## Defines whether the "Payload data" is masked.
|
||||
data: string ## Payload data
|
||||
|
||||
Packet* = object
|
||||
case kind*: Opcode
|
||||
of Text, Binary:
|
||||
data*: string
|
||||
else:
|
||||
discard
|
||||
|
||||
ReadyState* = enum
|
||||
Connecting = 0 # The connection is not yet open.
|
||||
Open = 1 # The connection is open and ready to communicate.
|
||||
@ -82,6 +129,8 @@ type
|
||||
protocol*: string
|
||||
readyState*: ReadyState
|
||||
maskFrames*: bool
|
||||
sendFut: Future[void]
|
||||
sendQueue: Deque[tuple[text: string, opcode: Opcode, fut: Future[void]]]
|
||||
|
||||
template `[]`(value: uint8, index: int): bool =
|
||||
## get bits from uint8, uint8[2] gets 2nd bit
|
||||
@ -143,13 +192,14 @@ proc close*(ws: WebSocket) =
|
||||
if not ws.transp.isClosed:
|
||||
ws.transp.close()
|
||||
|
||||
proc closeWait*(ws: WebSocket) {.async.} =
|
||||
## close the socket
|
||||
ws.readyState = Closed
|
||||
if not ws.transp.isClosed:
|
||||
await ws.transp.closeWait()
|
||||
when newsUseChronos:
|
||||
proc closeWait*(ws: WebSocket) {.async.} =
|
||||
## close the socket
|
||||
ws.readyState = Closed
|
||||
if not ws.transp.isClosed:
|
||||
await ws.transp.closeWait()
|
||||
|
||||
when not newsUseChronos:
|
||||
else:
|
||||
proc newWebSocket*(req: Request): Future[WebSocket] {.async.} =
|
||||
## Creates a new socket from a request
|
||||
var ws = WebSocket()
|
||||
@ -243,7 +293,7 @@ proc newWebSocket*(url: string, headers: StringTableRef = nil,
|
||||
ws.transp.writer = newAsyncStreamWriter(tr)
|
||||
|
||||
if uri.scheme == "wss":
|
||||
let s = newTlsClientAsyncStream(ws.transp.reader, ws.transp.writer, serverName = uri.hostName)
|
||||
let s = newTLSClientAsyncStream(ws.transp.reader, ws.transp.writer, serverName = uri.hostname)
|
||||
ws.transp.reader = s.reader
|
||||
ws.transp.writer = s.writer
|
||||
|
||||
@ -300,54 +350,6 @@ proc newWebSocket*(url: string, headers: StringTableRef = nil,
|
||||
|
||||
return ws
|
||||
|
||||
type
|
||||
Opcode* = enum
|
||||
## 4 bits. Defines the interpretation of the "Payload data".
|
||||
Cont = 0x0 ## denotes a continuation frame
|
||||
Text = 0x1 ## denotes a text frame
|
||||
Binary = 0x2 ## denotes a binary frame
|
||||
# 3-7 are reserved for further non-control frames
|
||||
Close = 0x8 ## denotes a connection close
|
||||
Ping = 0x9 ## denotes a ping
|
||||
Pong = 0xa ## denotes a pong
|
||||
# B-F are reserved for further control frames
|
||||
|
||||
#[
|
||||
0 1 2 3
|
||||
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
|
||||
+-+-+-+-+-------+-+-------------+-------------------------------+
|
||||
|F|R|R|R| opcode|M| Payload len | Extended payload length |
|
||||
|I|S|S|S| (4) |A| (7) | (16/64) |
|
||||
|N|V|V|V| |S| | (if payload len==126/127) |
|
||||
| |1|2|3| |K| | |
|
||||
+-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - +
|
||||
| Extended payload length continued, if payload len == 127 |
|
||||
+ - - - - - - - - - - - - - - - +-------------------------------+
|
||||
| |Masking-key, if MASK set to 1 |
|
||||
+-------------------------------+-------------------------------+
|
||||
| Masking-key (continued) | Payload Data |
|
||||
+-------------------------------- - - - - - - - - - - - - - - - +
|
||||
: Payload Data continued ... :
|
||||
+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +
|
||||
| Payload Data continued ... |
|
||||
+---------------------------------------------------------------+
|
||||
]#
|
||||
Frame* = tuple
|
||||
fin: bool ## Indicates that this is the final fragment in a message.
|
||||
rsv1: bool ## MUST be 0 unless negotiated that defines meanings
|
||||
rsv2: bool
|
||||
rsv3: bool
|
||||
opcode: Opcode ## Defines the interpretation of the "Payload data".
|
||||
mask: bool ## Defines whether the "Payload data" is masked.
|
||||
data: string ## Payload data
|
||||
|
||||
Packet* = object
|
||||
case kind*: Opcode
|
||||
of Text, Binary:
|
||||
data*: string
|
||||
else:
|
||||
discard
|
||||
|
||||
proc encodeFrame*(f: Frame): string =
|
||||
## Encodes a frame into a string buffer
|
||||
## See https://tools.ietf.org/html/rfc6455#section-5.2
|
||||
@ -407,8 +409,7 @@ proc encodeFrame*(f: Frame): string =
|
||||
ret.setPosition(0)
|
||||
return ret.readAll()
|
||||
|
||||
|
||||
proc send*(ws: WebSocket, text: string, opcode = Opcode.Text): Future[void] {.async.} =
|
||||
proc doSend(ws: WebSocket, text: string, opcode: Opcode): Future[void] {.async.} =
|
||||
try:
|
||||
## write data to WebSocket
|
||||
var frame = encodeFrame((
|
||||
@ -439,6 +440,55 @@ proc send*(ws: WebSocket, text: string, opcode = Opcode.Text): Future[void] {.as
|
||||
raise newException(WebSocketError,
|
||||
&"Could not send packet because of [{e.name}]: {e.msg}")
|
||||
|
||||
proc continueSending(ws: WebSocket) =
|
||||
if ws.sendQueue.len <= 0:
|
||||
return
|
||||
|
||||
let
|
||||
task = ws.sendQueue.popFirst()
|
||||
fut = task.fut
|
||||
sendFut = ws.doSend(task.text, task.opcode)
|
||||
ws.sendFut = sendFut
|
||||
|
||||
proc doHandleSent() =
|
||||
if ws.sendFut.failed:
|
||||
fut.fail(ws.sendFut.error)
|
||||
else:
|
||||
fut.complete()
|
||||
ws.sendFut = nil
|
||||
ws.continueSending()
|
||||
|
||||
when newsUseChronos:
|
||||
proc handleSent(future: pointer) =
|
||||
doHandleSent()
|
||||
else:
|
||||
proc handleSent() =
|
||||
doHandleSent()
|
||||
|
||||
ws.sendFut.addCallback(handleSent)
|
||||
|
||||
proc send*(ws: WebSocket, text: string, opcode = Opcode.Text): Future[void] =
|
||||
if ws.sendFut != nil:
|
||||
let fut = newFuture[void]("send")
|
||||
ws.sendQueue.addLast (text: text, opcode: opcode, fut: fut)
|
||||
return fut
|
||||
|
||||
ws.sendFut = ws.doSend(text, opcode)
|
||||
|
||||
proc doHandleSent() =
|
||||
ws.sendFut = nil
|
||||
ws.continueSending()
|
||||
|
||||
when newsUseChronos:
|
||||
proc handleSent(future: pointer) =
|
||||
doHandleSent()
|
||||
else:
|
||||
proc handleSent() =
|
||||
doHandleSent()
|
||||
|
||||
ws.sendFut.addCallback(handleSent)
|
||||
ws.sendFut
|
||||
|
||||
proc send*(ws: WebSocket, packet: Packet): Future[void] =
|
||||
if packet.kind == Text or packet.kind == Binary:
|
||||
return ws.send(packet.data, packet.kind)
|
||||
|
5
vendor/nim-bearssl/.github/workflows/ci.yml
vendored
5
vendor/nim-bearssl/.github/workflows/ci.yml
vendored
@ -155,12 +155,7 @@ jobs:
|
||||
- name: Run tests
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ "${{ matrix.target.os }}" == "windows" ]]; then
|
||||
# https://github.com/status-im/nimbus-eth2/issues/3121
|
||||
export NIMFLAGS="-d:nimRawSetjmp"
|
||||
fi
|
||||
nim --version
|
||||
nimble --version
|
||||
nimble install -y --depsOnly
|
||||
env TEST_LANG="c" nimble test
|
||||
env TEST_LANG="cpp" nimble test
|
||||
|
3
vendor/nim-bearssl/.gitignore
vendored
3
vendor/nim-bearssl/.gitignore
vendored
@ -1,2 +1,5 @@
|
||||
nimcache/
|
||||
*.exe
|
||||
gen
|
||||
nimble.develop
|
||||
nimble.paths
|
||||
|
45
vendor/nim-bearssl/README.md
vendored
45
vendor/nim-bearssl/README.md
vendored
@ -7,15 +7,56 @@
|
||||
[![License: Apache](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
|
||||
![Github action](https://github.com/status-im/nim-bearssl/workflows/CI/badge.svg)
|
||||
|
||||
[BearSSL](https://bearssl.org/) wrapper.
|
||||
Simple [BearSSL](https://bearssl.org/) wrapper for Nim, fully integrated with the Nim build system.
|
||||
|
||||
Applications using `nim-bearssl` are fully stand-alone, needing no additional DLL or shared library.
|
||||
|
||||
## Usage
|
||||
|
||||
The library is organised into two parts:
|
||||
|
||||
* `bearssl/` (except for `abi`) exposes thin wrappers around the raw ABI making the functions more convenient to use in Nim
|
||||
* `bearssl/abi` exposes the raw C functions of bearssl
|
||||
|
||||
For each `bearssl` header file, a corresponding Nim file exists - `bearssl_rand.h` ~ `bearssl/rand.nim`.
|
||||
|
||||
```nim
|
||||
# You can import the whole library
|
||||
import bearssl
|
||||
|
||||
# ... or simply parts thereof, which can save compilation time
|
||||
import bearssl/rand
|
||||
```
|
||||
|
||||
In general, the mappings follow the conventions of the original BearSSL library closely. The following conventions exist:
|
||||
|
||||
* the `br_` prefix has been dropped throughout
|
||||
* functions taking a `XxxContext*` use `var` and not `ptr`
|
||||
* `byte` replaces `unsigned char*` - this type is predominantly used for byte buffers
|
||||
* `uint` used instead of `csize_t` - these are the same type in Nim, but spelled more conveniently
|
||||
* Canonical nim code will have to be careful when converting existing `int` lengths, looking out for out-of-range values
|
||||
|
||||
In addition to the raw `C`-like api, convenience functions are added where applicable - these follow a similar set of conventions:
|
||||
|
||||
* named after the function they simplify, but take advantage of types and overload support in Nim
|
||||
* help turn pointers and bytes into Nim types
|
||||
|
||||
## Installation
|
||||
|
||||
You can install the developement version of the library through nimble with the following command
|
||||
You can install the developement version of the library through nimble with the following command:
|
||||
|
||||
```
|
||||
nimble install bearssl
|
||||
```
|
||||
|
||||
`BearSSL` itself is compiled as part of your project - there is no need to install any third-party libraries.
|
||||
|
||||
## Developer notes
|
||||
|
||||
When updating the library, `c2nim` is used via `regenerate.sh` to update the RAW ABI files. Manual editing is then needed to make a few adjustments to the mapping, after which the files can be generated.
|
||||
|
||||
When adding new convenience functions, these should be added to `bearssl/` instead of the generated files.
|
||||
|
||||
## License
|
||||
|
||||
Licensed and distributed under either of
|
||||
|
26
vendor/nim-bearssl/bearssl.nim
vendored
26
vendor/nim-bearssl/bearssl.nim
vendored
@ -1,12 +1,32 @@
|
||||
## Nim-BearSSL
|
||||
## Copyright (c) 2018 Status Research & Development GmbH
|
||||
## Copyright (c) 2018-2022 Status Research & Development GmbH
|
||||
## Licensed under either of
|
||||
## * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
|
||||
## * MIT license ([LICENSE-MIT](LICENSE-MIT))
|
||||
## at your option.
|
||||
## This file may not be copied, modified, or distributed except according to
|
||||
## those terms.
|
||||
import bearssl/[decls, errors]
|
||||
export decls, errors
|
||||
|
||||
when defined(bearsslSplitAbi):
|
||||
# This will become default in the future - we cannot use it now because there
|
||||
# are duplicate symbols in `decls.nim` - the new ABI can already be accessed
|
||||
# using the more specific imports (`import bearssl/ssl`)
|
||||
import
|
||||
./bearssl/[
|
||||
aead, blockx, brssl, ec, errors, hash, hmac, kdf, pem, prf, rand, rsa,
|
||||
ssl, x509],
|
||||
./bearssl/abi/[cacert, config]
|
||||
|
||||
export
|
||||
aead, blockx, brssl, ec, errors, hash, hmac, kdf, pem, prf, rand, rsa,
|
||||
ssl, x509,
|
||||
cacert, config
|
||||
|
||||
else:
|
||||
import
|
||||
./bearssl/[cacert, errors, decls] # Deprecated, will be removed in the future
|
||||
|
||||
export cacert, errors, decls
|
||||
|
||||
|
||||
when defined(nimHasUsed): {.used.}
|
||||
|
2
vendor/nim-bearssl/bearssl.nimble
vendored
2
vendor/nim-bearssl/bearssl.nimble
vendored
@ -15,7 +15,7 @@ requires "nim >= 1.2.0",
|
||||
proc test(env, path: string) =
|
||||
# Compilation language is controlled by TEST_LANG
|
||||
exec "nim " & getEnv("TEST_LANG", "c") & " " & getEnv("NIMFLAGS") & " " & env &
|
||||
" -rf --hints:off --skipParentCfg --styleCheck:usages --styleCheck:error " & path
|
||||
" -d:bearsslSplitAbi -rf --hints:off --skipParentCfg --styleCheck:usages --styleCheck:error " & path
|
||||
|
||||
task test, "Run tests":
|
||||
for path in listFiles(thisDir() / "tests"):
|
||||
|
176
vendor/nim-bearssl/bearssl/abi/bearssl_aead.nim
vendored
Normal file
176
vendor/nim-bearssl/bearssl/abi/bearssl_aead.nim
vendored
Normal file
@ -0,0 +1,176 @@
|
||||
import
|
||||
"."/[bearssl_block, bearssl_hash, csources]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearAeadPath = bearSrcPath & "aead/"
|
||||
|
||||
{.compile: bearAeadPath & "ccm.c".}
|
||||
{.compile: bearAeadPath & "eax.c".}
|
||||
{.compile: bearAeadPath & "gcm.c".}
|
||||
|
||||
type
|
||||
AeadClass* {.importc: "br_aead_class", header: "bearssl_aead.h", bycopy.} = object
|
||||
tagSize* {.importc: "tag_size".}: uint
|
||||
reset* {.importc: "reset".}: proc (cc: ptr ptr AeadClass; iv: pointer; len: uint) {.
|
||||
importcFunc.}
|
||||
aadInject* {.importc: "aad_inject".}: proc (cc: ptr ptr AeadClass; data: pointer;
|
||||
len: uint) {.importcFunc.}
|
||||
flip* {.importc: "flip".}: proc (cc: ptr ptr AeadClass) {.importcFunc.}
|
||||
run* {.importc: "run".}: proc (cc: ptr ptr AeadClass; encrypt: cint; data: pointer;
|
||||
len: uint) {.importcFunc.}
|
||||
getTag* {.importc: "get_tag".}: proc (cc: ptr ptr AeadClass; tag: pointer) {.importcFunc.}
|
||||
checkTag* {.importc: "check_tag".}: proc (cc: ptr ptr AeadClass; tag: pointer): uint32 {.
|
||||
importcFunc.}
|
||||
getTagTrunc* {.importc: "get_tag_trunc".}: proc (cc: ptr ptr AeadClass;
|
||||
tag: pointer; len: uint) {.importcFunc.}
|
||||
checkTagTrunc* {.importc: "check_tag_trunc".}: proc (cc: ptr ptr AeadClass;
|
||||
tag: pointer; len: uint): uint32 {.importcFunc.}
|
||||
|
||||
|
||||
|
||||
type
|
||||
GcmContext* {.importc: "br_gcm_context", header: "bearssl_aead.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr AeadClass
|
||||
bctx* {.importc: "bctx".}: ptr ptr BlockCtrClass
|
||||
gh* {.importc: "gh".}: Ghash
|
||||
h* {.importc: "h".}: array[16, byte]
|
||||
j01* {.importc: "j0_1".}: array[12, byte]
|
||||
buf* {.importc: "buf".}: array[16, byte]
|
||||
y* {.importc: "y".}: array[16, byte]
|
||||
j02* {.importc: "j0_2".}: uint32
|
||||
jc* {.importc: "jc".}: uint32
|
||||
countAad* {.importc: "count_aad".}: uint64
|
||||
countCtr* {.importc: "count_ctr".}: uint64
|
||||
|
||||
|
||||
|
||||
proc gcmInit*(ctx: var GcmContext; bctx: ptr ptr BlockCtrClass; gh: Ghash) {.importcFunc,
|
||||
importc: "br_gcm_init", header: "bearssl_aead.h".}
|
||||
|
||||
proc gcmReset*(ctx: var GcmContext; iv: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_gcm_reset", header: "bearssl_aead.h".}
|
||||
|
||||
proc gcmAadInject*(ctx: var GcmContext; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_gcm_aad_inject", header: "bearssl_aead.h".}
|
||||
|
||||
proc gcmFlip*(ctx: var GcmContext) {.importcFunc, importc: "br_gcm_flip",
|
||||
header: "bearssl_aead.h".}
|
||||
|
||||
proc gcmRun*(ctx: var GcmContext; encrypt: cint; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_gcm_run", header: "bearssl_aead.h".}
|
||||
|
||||
proc gcmGetTag*(ctx: var GcmContext; tag: pointer) {.importcFunc, importc: "br_gcm_get_tag",
|
||||
header: "bearssl_aead.h".}
|
||||
|
||||
proc gcmCheckTag*(ctx: var GcmContext; tag: pointer): uint32 {.importcFunc,
|
||||
importc: "br_gcm_check_tag", header: "bearssl_aead.h".}
|
||||
|
||||
proc gcmGetTagTrunc*(ctx: var GcmContext; tag: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_gcm_get_tag_trunc", header: "bearssl_aead.h".}
|
||||
|
||||
proc gcmCheckTagTrunc*(ctx: var GcmContext; tag: pointer; len: uint): uint32 {.importcFunc,
|
||||
importc: "br_gcm_check_tag_trunc", header: "bearssl_aead.h".}
|
||||
|
||||
var gcmVtable* {.importc: "br_gcm_vtable", header: "bearssl_aead.h".}: AeadClass
|
||||
|
||||
|
||||
type
|
||||
EaxContext* {.importc: "br_eax_context", header: "bearssl_aead.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr AeadClass
|
||||
bctx* {.importc: "bctx".}: ptr ptr BlockCtrcbcClass
|
||||
l2* {.importc: "L2".}: array[16, byte]
|
||||
l4* {.importc: "L4".}: array[16, byte]
|
||||
nonce* {.importc: "nonce".}: array[16, byte]
|
||||
head* {.importc: "head".}: array[16, byte]
|
||||
ctr* {.importc: "ctr".}: array[16, byte]
|
||||
cbcmac* {.importc: "cbcmac".}: array[16, byte]
|
||||
buf* {.importc: "buf".}: array[16, byte]
|
||||
`ptr`* {.importc: "ptr".}: uint
|
||||
|
||||
|
||||
|
||||
type
|
||||
EaxState* {.importc: "br_eax_state", header: "bearssl_aead.h", bycopy.} = object
|
||||
st* {.importc: "st".}: array[3, array[16, byte]]
|
||||
|
||||
|
||||
|
||||
proc eaxInit*(ctx: var EaxContext; bctx: ptr ptr BlockCtrcbcClass) {.importcFunc,
|
||||
importc: "br_eax_init", header: "bearssl_aead.h".}
|
||||
|
||||
proc eaxCapture*(ctx: var EaxContext; st: ptr EaxState) {.importcFunc,
|
||||
importc: "br_eax_capture", header: "bearssl_aead.h".}
|
||||
|
||||
proc eaxReset*(ctx: var EaxContext; nonce: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_eax_reset", header: "bearssl_aead.h".}
|
||||
|
||||
proc eaxResetPreAad*(ctx: var EaxContext; st: ptr EaxState; nonce: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_eax_reset_pre_aad", header: "bearssl_aead.h".}
|
||||
|
||||
proc eaxResetPostAad*(ctx: var EaxContext; st: ptr EaxState; nonce: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_eax_reset_post_aad", header: "bearssl_aead.h".}
|
||||
|
||||
proc eaxAadInject*(ctx: var EaxContext; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_eax_aad_inject", header: "bearssl_aead.h".}
|
||||
|
||||
proc eaxFlip*(ctx: var EaxContext) {.importcFunc, importc: "br_eax_flip",
|
||||
header: "bearssl_aead.h".}
|
||||
|
||||
proc eaxGetAadMac*(ctx: var EaxContext; st: ptr EaxState) {.inline.} =
|
||||
copyMem(unsafeAddr st.st[1], unsafeAddr ctx.head, sizeof(ctx.head))
|
||||
|
||||
|
||||
proc eaxRun*(ctx: var EaxContext; encrypt: cint; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_eax_run", header: "bearssl_aead.h".}
|
||||
|
||||
proc eaxGetTag*(ctx: var EaxContext; tag: pointer) {.importcFunc, importc: "br_eax_get_tag",
|
||||
header: "bearssl_aead.h".}
|
||||
|
||||
proc eaxCheckTag*(ctx: var EaxContext; tag: pointer): uint32 {.importcFunc,
|
||||
importc: "br_eax_check_tag", header: "bearssl_aead.h".}
|
||||
|
||||
proc eaxGetTagTrunc*(ctx: var EaxContext; tag: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_eax_get_tag_trunc", header: "bearssl_aead.h".}
|
||||
|
||||
proc eaxCheckTagTrunc*(ctx: var EaxContext; tag: pointer; len: uint): uint32 {.importcFunc,
|
||||
importc: "br_eax_check_tag_trunc", header: "bearssl_aead.h".}
|
||||
|
||||
var eaxVtable* {.importc: "br_eax_vtable", header: "bearssl_aead.h".}: AeadClass
|
||||
|
||||
|
||||
type
|
||||
CcmContext* {.importc: "br_ccm_context", header: "bearssl_aead.h", bycopy.} = object
|
||||
bctx* {.importc: "bctx".}: ptr ptr BlockCtrcbcClass
|
||||
ctr* {.importc: "ctr".}: array[16, byte]
|
||||
cbcmac* {.importc: "cbcmac".}: array[16, byte]
|
||||
tagmask* {.importc: "tagmask".}: array[16, byte]
|
||||
buf* {.importc: "buf".}: array[16, byte]
|
||||
`ptr`* {.importc: "ptr".}: uint
|
||||
tagLen* {.importc: "tag_len".}: uint
|
||||
|
||||
|
||||
|
||||
proc ccmInit*(ctx: var CcmContext; bctx: ptr ptr BlockCtrcbcClass) {.importcFunc,
|
||||
importc: "br_ccm_init", header: "bearssl_aead.h".}
|
||||
|
||||
proc ccmReset*(ctx: var CcmContext; nonce: pointer; nonceLen: uint; aadLen: uint64;
|
||||
dataLen: uint64; tagLen: uint): cint {.importcFunc,
|
||||
importc: "br_ccm_reset", header: "bearssl_aead.h".}
|
||||
|
||||
proc ccmAadInject*(ctx: var CcmContext; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_ccm_aad_inject", header: "bearssl_aead.h".}
|
||||
|
||||
proc ccmFlip*(ctx: var CcmContext) {.importcFunc, importc: "br_ccm_flip",
|
||||
header: "bearssl_aead.h".}
|
||||
|
||||
proc ccmRun*(ctx: var CcmContext; encrypt: cint; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_ccm_run", header: "bearssl_aead.h".}
|
||||
|
||||
proc ccmGetTag*(ctx: var CcmContext; tag: pointer): uint {.importcFunc,
|
||||
importc: "br_ccm_get_tag", header: "bearssl_aead.h".}
|
||||
|
||||
proc ccmCheckTag*(ctx: var CcmContext; tag: pointer): uint32 {.importcFunc,
|
||||
importc: "br_ccm_check_tag", header: "bearssl_aead.h".}
|
908
vendor/nim-bearssl/bearssl/abi/bearssl_block.nim
vendored
Normal file
908
vendor/nim-bearssl/bearssl/abi/bearssl_block.nim
vendored
Normal file
@ -0,0 +1,908 @@
|
||||
import
|
||||
"."/[csources, intx]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearSymcPath = bearSrcPath & "symcipher/"
|
||||
|
||||
{.compile: bearSymcPath & "aes_big_cbcdec.c".}
|
||||
{.compile: bearSymcPath & "aes_big_cbcenc.c".}
|
||||
{.compile: bearSymcPath & "aes_big_ctr.c".}
|
||||
{.compile: bearSymcPath & "aes_big_ctrcbc.c".}
|
||||
{.compile: bearSymcPath & "aes_big_dec.c".}
|
||||
{.compile: bearSymcPath & "aes_big_enc.c".}
|
||||
{.compile: bearSymcPath & "aes_common.c".}
|
||||
{.compile: bearSymcPath & "aes_ct.c".}
|
||||
{.compile: bearSymcPath & "aes_ct64.c".}
|
||||
{.compile: bearSymcPath & "aes_ct64_cbcdec.c".}
|
||||
{.compile: bearSymcPath & "aes_ct64_cbcenc.c".}
|
||||
{.compile: bearSymcPath & "aes_ct64_ctr.c".}
|
||||
{.compile: bearSymcPath & "aes_ct64_ctrcbc.c".}
|
||||
{.compile: bearSymcPath & "aes_ct64_dec.c".}
|
||||
{.compile: bearSymcPath & "aes_ct64_enc.c".}
|
||||
{.compile: bearSymcPath & "aes_ct_cbcdec.c".}
|
||||
{.compile: bearSymcPath & "aes_ct_cbcenc.c".}
|
||||
{.compile: bearSymcPath & "aes_ct_ctr.c".}
|
||||
{.compile: bearSymcPath & "aes_ct_ctrcbc.c".}
|
||||
{.compile: bearSymcPath & "aes_ct_dec.c".}
|
||||
{.compile: bearSymcPath & "aes_ct_enc.c".}
|
||||
{.compile: bearSymcPath & "aes_pwr8.c".}
|
||||
{.compile: bearSymcPath & "aes_pwr8_cbcdec.c".}
|
||||
{.compile: bearSymcPath & "aes_pwr8_cbcenc.c".}
|
||||
{.compile: bearSymcPath & "aes_pwr8_ctr.c".}
|
||||
{.compile: bearSymcPath & "aes_pwr8_ctrcbc.c".}
|
||||
{.compile: bearSymcPath & "aes_small_cbcdec.c".}
|
||||
{.compile: bearSymcPath & "aes_small_cbcenc.c".}
|
||||
{.compile: bearSymcPath & "aes_small_ctr.c".}
|
||||
{.compile: bearSymcPath & "aes_small_ctrcbc.c".}
|
||||
{.compile: bearSymcPath & "aes_small_dec.c".}
|
||||
{.compile: bearSymcPath & "aes_small_enc.c".}
|
||||
{.compile: bearSymcPath & "aes_x86ni.c".}
|
||||
{.compile: bearSymcPath & "aes_x86ni_cbcdec.c".}
|
||||
{.compile: bearSymcPath & "aes_x86ni_cbcenc.c".}
|
||||
{.compile: bearSymcPath & "aes_x86ni_ctr.c".}
|
||||
{.compile: bearSymcPath & "aes_x86ni_ctrcbc.c".}
|
||||
{.compile: bearSymcPath & "chacha20_ct.c".}
|
||||
{.compile: bearSymcPath & "chacha20_sse2.c".}
|
||||
{.compile: bearSymcPath & "des_ct.c".}
|
||||
{.compile: bearSymcPath & "des_ct_cbcdec.c".}
|
||||
{.compile: bearSymcPath & "des_ct_cbcenc.c".}
|
||||
{.compile: bearSymcPath & "des_support.c".}
|
||||
{.compile: bearSymcPath & "des_tab.c".}
|
||||
{.compile: bearSymcPath & "des_tab_cbcdec.c".}
|
||||
{.compile: bearSymcPath & "des_tab_cbcenc.c".}
|
||||
{.compile: bearSymcPath & "poly1305_ctmul.c".}
|
||||
{.compile: bearSymcPath & "poly1305_ctmul32.c".}
|
||||
{.compile: bearSymcPath & "poly1305_ctmulq.c".}
|
||||
{.compile: bearSymcPath & "poly1305_i15.c".}
|
||||
|
||||
type
|
||||
BlockCbcencClass* {.importc: "br_block_cbcenc_class", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
contextSize* {.importc: "context_size".}: uint
|
||||
blockSize* {.importc: "block_size".}: cuint
|
||||
logBlockSize* {.importc: "log_block_size".}: cuint
|
||||
init* {.importc: "init".}: proc (ctx: ptr ptr BlockCbcencClass; key: pointer;
|
||||
keyLen: uint) {.importcFunc.}
|
||||
run* {.importc: "run".}: proc (ctx: ptr ptr BlockCbcencClass; iv: pointer;
|
||||
data: pointer; len: uint) {.importcFunc.}
|
||||
|
||||
|
||||
|
||||
type
|
||||
BlockCbcdecClass* {.importc: "br_block_cbcdec_class", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
contextSize* {.importc: "context_size".}: uint
|
||||
blockSize* {.importc: "block_size".}: cuint
|
||||
logBlockSize* {.importc: "log_block_size".}: cuint
|
||||
init* {.importc: "init".}: proc (ctx: ptr ptr BlockCbcdecClass; key: pointer;
|
||||
keyLen: uint) {.importcFunc.}
|
||||
run* {.importc: "run".}: proc (ctx: ptr ptr BlockCbcdecClass; iv: pointer;
|
||||
data: pointer; len: uint) {.importcFunc.}
|
||||
|
||||
|
||||
|
||||
type
|
||||
BlockCtrClass* {.importc: "br_block_ctr_class", header: "bearssl_block.h", bycopy.} = object
|
||||
contextSize* {.importc: "context_size".}: uint
|
||||
blockSize* {.importc: "block_size".}: cuint
|
||||
logBlockSize* {.importc: "log_block_size".}: cuint
|
||||
init* {.importc: "init".}: proc (ctx: ptr ptr BlockCtrClass; key: pointer;
|
||||
keyLen: uint) {.importcFunc.}
|
||||
run* {.importc: "run".}: proc (ctx: ptr ptr BlockCtrClass; iv: pointer; cc: uint32;
|
||||
data: pointer; len: uint): uint32 {.importcFunc.}
|
||||
|
||||
|
||||
|
||||
type
|
||||
BlockCtrcbcClass* {.importc: "br_block_ctrcbc_class", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
contextSize* {.importc: "context_size".}: uint
|
||||
blockSize* {.importc: "block_size".}: cuint
|
||||
logBlockSize* {.importc: "log_block_size".}: cuint
|
||||
init* {.importc: "init".}: proc (ctx: ptr ptr BlockCtrcbcClass; key: pointer;
|
||||
keyLen: uint) {.importcFunc.}
|
||||
encrypt* {.importc: "encrypt".}: proc (ctx: ptr ptr BlockCtrcbcClass; ctr: pointer;
|
||||
cbcmac: pointer; data: pointer; len: uint) {.
|
||||
importcFunc.}
|
||||
decrypt* {.importc: "decrypt".}: proc (ctx: ptr ptr BlockCtrcbcClass; ctr: pointer;
|
||||
cbcmac: pointer; data: pointer; len: uint) {.
|
||||
importcFunc.}
|
||||
ctr* {.importc: "ctr".}: proc (ctx: ptr ptr BlockCtrcbcClass; ctr: pointer;
|
||||
data: pointer; len: uint) {.importcFunc.}
|
||||
mac* {.importc: "mac".}: proc (ctx: ptr ptr BlockCtrcbcClass; cbcmac: pointer;
|
||||
data: pointer; len: uint) {.importcFunc.}
|
||||
|
||||
|
||||
|
||||
const
|
||||
aesBigBLOCK_SIZE* = 16
|
||||
|
||||
|
||||
type
|
||||
AesBigCbcencKeys* {.importc: "br_aes_big_cbcenc_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcencClass
|
||||
skey* {.importc: "skey".}: array[60, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesBigCbcdecKeys* {.importc: "br_aes_big_cbcdec_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcdecClass
|
||||
skey* {.importc: "skey".}: array[60, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesBigCtrKeys* {.importc: "br_aes_big_ctr_keys", header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrClass
|
||||
skey* {.importc: "skey".}: array[60, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesBigCtrcbcKeys* {.importc: "br_aes_big_ctrcbc_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrcbcClass
|
||||
skey* {.importc: "skey".}: array[60, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
var aesBigCbcencVtable* {.importc: "br_aes_big_cbcenc_vtable", header: "bearssl_block.h".}: BlockCbcencClass
|
||||
|
||||
|
||||
var aesBigCbcdecVtable* {.importc: "br_aes_big_cbcdec_vtable", header: "bearssl_block.h".}: BlockCbcdecClass
|
||||
|
||||
|
||||
var aesBigCtrVtable* {.importc: "br_aes_big_ctr_vtable", header: "bearssl_block.h".}: BlockCtrClass
|
||||
|
||||
|
||||
var aesBigCtrcbcVtable* {.importc: "br_aes_big_ctrcbc_vtable", header: "bearssl_block.h".}: BlockCtrcbcClass
|
||||
|
||||
|
||||
proc aesBigCbcencInit*(ctx: var AesBigCbcencKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_big_cbcenc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesBigCbcdecInit*(ctx: var AesBigCbcdecKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_big_cbcdec_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesBigCtrInit*(ctx: var AesBigCtrKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_big_ctr_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesBigCtrcbcInit*(ctx: var AesBigCtrcbcKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_big_ctrcbc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesBigCbcencRun*(ctx: var AesBigCbcencKeys; iv: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_big_cbcenc_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesBigCbcdecRun*(ctx: var AesBigCbcdecKeys; iv: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_big_cbcdec_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesBigCtrRun*(ctx: var AesBigCtrKeys; iv: pointer; cc: uint32; data: pointer;
|
||||
len: uint): uint32 {.importcFunc, importc: "br_aes_big_ctr_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesBigCtrcbcEncrypt*(ctx: var AesBigCtrcbcKeys; ctr: pointer; cbcmac: pointer;
|
||||
data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_big_ctrcbc_encrypt", header: "bearssl_block.h".}
|
||||
|
||||
proc aesBigCtrcbcDecrypt*(ctx: var AesBigCtrcbcKeys; ctr: pointer; cbcmac: pointer;
|
||||
data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_big_ctrcbc_decrypt", header: "bearssl_block.h".}
|
||||
|
||||
proc aesBigCtrcbcCtr*(ctx: var AesBigCtrcbcKeys; ctr: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_big_ctrcbc_ctr",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesBigCtrcbcMac*(ctx: var AesBigCtrcbcKeys; cbcmac: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_big_ctrcbc_mac",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
const
|
||||
aesSmallBLOCK_SIZE* = 16
|
||||
|
||||
|
||||
type
|
||||
AesSmallCbcencKeys* {.importc: "br_aes_small_cbcenc_keys",
|
||||
header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcencClass
|
||||
skey* {.importc: "skey".}: array[60, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesSmallCbcdecKeys* {.importc: "br_aes_small_cbcdec_keys",
|
||||
header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcdecClass
|
||||
skey* {.importc: "skey".}: array[60, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesSmallCtrKeys* {.importc: "br_aes_small_ctr_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrClass
|
||||
skey* {.importc: "skey".}: array[60, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesSmallCtrcbcKeys* {.importc: "br_aes_small_ctrcbc_keys",
|
||||
header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrcbcClass
|
||||
skey* {.importc: "skey".}: array[60, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
var aesSmallCbcencVtable* {.importc: "br_aes_small_cbcenc_vtable", header: "bearssl_block.h".}: BlockCbcencClass
|
||||
|
||||
|
||||
var aesSmallCbcdecVtable* {.importc: "br_aes_small_cbcdec_vtable", header: "bearssl_block.h".}: BlockCbcdecClass
|
||||
|
||||
|
||||
var aesSmallCtrVtable* {.importc: "br_aes_small_ctr_vtable", header: "bearssl_block.h".}: BlockCtrClass
|
||||
|
||||
|
||||
var aesSmallCtrcbcVtable* {.importc: "br_aes_small_ctrcbc_vtable", header: "bearssl_block.h".}: BlockCtrcbcClass
|
||||
|
||||
|
||||
proc aesSmallCbcencInit*(ctx: var AesSmallCbcencKeys; key: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_aes_small_cbcenc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesSmallCbcdecInit*(ctx: var AesSmallCbcdecKeys; key: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_aes_small_cbcdec_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesSmallCtrInit*(ctx: var AesSmallCtrKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_small_ctr_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesSmallCtrcbcInit*(ctx: var AesSmallCtrcbcKeys; key: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_aes_small_ctrcbc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesSmallCbcencRun*(ctx: var AesSmallCbcencKeys; iv: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_small_cbcenc_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesSmallCbcdecRun*(ctx: var AesSmallCbcdecKeys; iv: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_small_cbcdec_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesSmallCtrRun*(ctx: var AesSmallCtrKeys; iv: pointer; cc: uint32; data: pointer;
|
||||
len: uint): uint32 {.importcFunc, importc: "br_aes_small_ctr_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesSmallCtrcbcEncrypt*(ctx: var AesSmallCtrcbcKeys; ctr: pointer;
|
||||
cbcmac: pointer; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_small_ctrcbc_encrypt", header: "bearssl_block.h".}
|
||||
|
||||
proc aesSmallCtrcbcDecrypt*(ctx: var AesSmallCtrcbcKeys; ctr: pointer;
|
||||
cbcmac: pointer; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_small_ctrcbc_decrypt", header: "bearssl_block.h".}
|
||||
|
||||
proc aesSmallCtrcbcCtr*(ctx: var AesSmallCtrcbcKeys; ctr: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_small_ctrcbc_ctr",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesSmallCtrcbcMac*(ctx: var AesSmallCtrcbcKeys; cbcmac: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_small_ctrcbc_mac",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
const
|
||||
aesCtBLOCK_SIZE* = 16
|
||||
|
||||
|
||||
type
|
||||
AesCtCbcencKeys* {.importc: "br_aes_ct_cbcenc_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcencClass
|
||||
skey* {.importc: "skey".}: array[60, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesCtCbcdecKeys* {.importc: "br_aes_ct_cbcdec_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcdecClass
|
||||
skey* {.importc: "skey".}: array[60, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesCtCtrKeys* {.importc: "br_aes_ct_ctr_keys", header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrClass
|
||||
skey* {.importc: "skey".}: array[60, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesCtCtrcbcKeys* {.importc: "br_aes_ct_ctrcbc_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrcbcClass
|
||||
skey* {.importc: "skey".}: array[60, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
var aesCtCbcencVtable* {.importc: "br_aes_ct_cbcenc_vtable", header: "bearssl_block.h".}: BlockCbcencClass
|
||||
|
||||
|
||||
var aesCtCbcdecVtable* {.importc: "br_aes_ct_cbcdec_vtable", header: "bearssl_block.h".}: BlockCbcdecClass
|
||||
|
||||
|
||||
var aesCtCtrVtable* {.importc: "br_aes_ct_ctr_vtable", header: "bearssl_block.h".}: BlockCtrClass
|
||||
|
||||
|
||||
var aesCtCtrcbcVtable* {.importc: "br_aes_ct_ctrcbc_vtable", header: "bearssl_block.h".}: BlockCtrcbcClass
|
||||
|
||||
|
||||
proc aesCtCbcencInit*(ctx: var AesCtCbcencKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_ct_cbcenc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCtCbcdecInit*(ctx: var AesCtCbcdecKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_ct_cbcdec_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCtCtrInit*(ctx: var AesCtCtrKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_ct_ctr_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCtCtrcbcInit*(ctx: var AesCtCtrcbcKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_ct_ctrcbc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCtCbcencRun*(ctx: var AesCtCbcencKeys; iv: pointer; data: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_aes_ct_cbcenc_run", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCtCbcdecRun*(ctx: var AesCtCbcdecKeys; iv: pointer; data: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_aes_ct_cbcdec_run", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCtCtrRun*(ctx: var AesCtCtrKeys; iv: pointer; cc: uint32; data: pointer;
|
||||
len: uint): uint32 {.importcFunc, importc: "br_aes_ct_ctr_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesCtCtrcbcEncrypt*(ctx: var AesCtCtrcbcKeys; ctr: pointer; cbcmac: pointer;
|
||||
data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_ct_ctrcbc_encrypt", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCtCtrcbcDecrypt*(ctx: var AesCtCtrcbcKeys; ctr: pointer; cbcmac: pointer;
|
||||
data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_ct_ctrcbc_decrypt", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCtCtrcbcCtr*(ctx: var AesCtCtrcbcKeys; ctr: pointer; data: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_aes_ct_ctrcbc_ctr", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCtCtrcbcMac*(ctx: var AesCtCtrcbcKeys; cbcmac: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_ct_ctrcbc_mac",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
const
|
||||
aesCt64BLOCK_SIZE* = 16
|
||||
|
||||
|
||||
type
|
||||
AesCt64CbcencKeys* {.importc: "br_aes_ct64_cbcenc_keys",
|
||||
header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcencClass
|
||||
skey* {.importc: "skey".}: array[30, uint64]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesCt64CbcdecKeys* {.importc: "br_aes_ct64_cbcdec_keys",
|
||||
header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcdecClass
|
||||
skey* {.importc: "skey".}: array[30, uint64]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesCt64CtrKeys* {.importc: "br_aes_ct64_ctr_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrClass
|
||||
skey* {.importc: "skey".}: array[30, uint64]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesCt64CtrcbcKeys* {.importc: "br_aes_ct64_ctrcbc_keys",
|
||||
header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrcbcClass
|
||||
skey* {.importc: "skey".}: array[30, uint64]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
var aesCt64CbcencVtable* {.importc: "br_aes_ct64_cbcenc_vtable", header: "bearssl_block.h".}: BlockCbcencClass
|
||||
|
||||
|
||||
var aesCt64CbcdecVtable* {.importc: "br_aes_ct64_cbcdec_vtable", header: "bearssl_block.h".}: BlockCbcdecClass
|
||||
|
||||
|
||||
var aesCt64CtrVtable* {.importc: "br_aes_ct64_ctr_vtable", header: "bearssl_block.h".}: BlockCtrClass
|
||||
|
||||
|
||||
var aesCt64CtrcbcVtable* {.importc: "br_aes_ct64_ctrcbc_vtable", header: "bearssl_block.h".}: BlockCtrcbcClass
|
||||
|
||||
|
||||
proc aesCt64CbcencInit*(ctx: var AesCt64CbcencKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_ct64_cbcenc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCt64CbcdecInit*(ctx: var AesCt64CbcdecKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_ct64_cbcdec_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCt64CtrInit*(ctx: var AesCt64CtrKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_ct64_ctr_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCt64CtrcbcInit*(ctx: var AesCt64CtrcbcKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_ct64_ctrcbc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCt64CbcencRun*(ctx: var AesCt64CbcencKeys; iv: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_ct64_cbcenc_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesCt64CbcdecRun*(ctx: var AesCt64CbcdecKeys; iv: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_ct64_cbcdec_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesCt64CtrRun*(ctx: var AesCt64CtrKeys; iv: pointer; cc: uint32; data: pointer;
|
||||
len: uint): uint32 {.importcFunc, importc: "br_aes_ct64_ctr_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesCt64CtrcbcEncrypt*(ctx: var AesCt64CtrcbcKeys; ctr: pointer; cbcmac: pointer;
|
||||
data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_ct64_ctrcbc_encrypt", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCt64CtrcbcDecrypt*(ctx: var AesCt64CtrcbcKeys; ctr: pointer; cbcmac: pointer;
|
||||
data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_ct64_ctrcbc_decrypt", header: "bearssl_block.h".}
|
||||
|
||||
proc aesCt64CtrcbcCtr*(ctx: var AesCt64CtrcbcKeys; ctr: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_ct64_ctrcbc_ctr",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesCt64CtrcbcMac*(ctx: var AesCt64CtrcbcKeys; cbcmac: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_ct64_ctrcbc_mac",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
const
|
||||
aesX86niBLOCK_SIZE* = 16
|
||||
|
||||
|
||||
type
|
||||
INNER_C_UNION_bearssl_block_1* {.importc: "br_aes_x86ni_cbcenc_keys::no_name",
|
||||
header: "bearssl_block.h", bycopy, union.} = object
|
||||
skni* {.importc: "skni".}: array[16 * 15, byte]
|
||||
|
||||
AesX86niCbcencKeys* {.importc: "br_aes_x86ni_cbcenc_keys",
|
||||
header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcencClass
|
||||
skey* {.importc: "skey".}: INNER_C_UNION_bearssl_block_1
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
INNER_C_UNION_bearssl_block_3* {.importc: "br_aes_x86ni_cbcdec_keys::no_name",
|
||||
header: "bearssl_block.h", bycopy, union.} = object
|
||||
skni* {.importc: "skni".}: array[16 * 15, byte]
|
||||
|
||||
AesX86niCbcdecKeys* {.importc: "br_aes_x86ni_cbcdec_keys",
|
||||
header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcdecClass
|
||||
skey* {.importc: "skey".}: INNER_C_UNION_bearssl_block_3
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
INNER_C_UNION_bearssl_block_5* {.importc: "br_aes_x86ni_ctr_keys::no_name",
|
||||
header: "bearssl_block.h", bycopy, union.} = object
|
||||
skni* {.importc: "skni".}: array[16 * 15, byte]
|
||||
|
||||
AesX86niCtrKeys* {.importc: "br_aes_x86ni_ctr_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrClass
|
||||
skey* {.importc: "skey".}: INNER_C_UNION_bearssl_block_5
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
INNER_C_UNION_bearssl_block_7* {.importc: "br_aes_x86ni_ctrcbc_keys::no_name",
|
||||
header: "bearssl_block.h", bycopy, union.} = object
|
||||
skni* {.importc: "skni".}: array[16 * 15, byte]
|
||||
|
||||
AesX86niCtrcbcKeys* {.importc: "br_aes_x86ni_ctrcbc_keys",
|
||||
header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrcbcClass
|
||||
skey* {.importc: "skey".}: INNER_C_UNION_bearssl_block_7
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
var aesX86niCbcencVtable* {.importc: "br_aes_x86ni_cbcenc_vtable", header: "bearssl_block.h".}: BlockCbcencClass
|
||||
|
||||
|
||||
var aesX86niCbcdecVtable* {.importc: "br_aes_x86ni_cbcdec_vtable", header: "bearssl_block.h".}: BlockCbcdecClass
|
||||
|
||||
|
||||
var aesX86niCtrVtable* {.importc: "br_aes_x86ni_ctr_vtable", header: "bearssl_block.h".}: BlockCtrClass
|
||||
|
||||
|
||||
var aesX86niCtrcbcVtable* {.importc: "br_aes_x86ni_ctrcbc_vtable", header: "bearssl_block.h".}: BlockCtrcbcClass
|
||||
|
||||
|
||||
proc aesX86niCbcencInit*(ctx: var AesX86niCbcencKeys; key: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_aes_x86ni_cbcenc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCbcdecInit*(ctx: var AesX86niCbcdecKeys; key: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_aes_x86ni_cbcdec_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCtrInit*(ctx: var AesX86niCtrKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_x86ni_ctr_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCtrcbcInit*(ctx: var AesX86niCtrcbcKeys; key: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_aes_x86ni_ctrcbc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCbcencRun*(ctx: var AesX86niCbcencKeys; iv: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_x86ni_cbcenc_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCbcdecRun*(ctx: var AesX86niCbcdecKeys; iv: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_x86ni_cbcdec_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCtrRun*(ctx: var AesX86niCtrKeys; iv: pointer; cc: uint32; data: pointer;
|
||||
len: uint): uint32 {.importcFunc, importc: "br_aes_x86ni_ctr_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCtrcbcEncrypt*(ctx: var AesX86niCtrcbcKeys; ctr: pointer;
|
||||
cbcmac: pointer; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_x86ni_ctrcbc_encrypt", header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCtrcbcDecrypt*(ctx: var AesX86niCtrcbcKeys; ctr: pointer;
|
||||
cbcmac: pointer; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_x86ni_ctrcbc_decrypt", header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCtrcbcCtr*(ctx: var AesX86niCtrcbcKeys; ctr: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_x86ni_ctrcbc_ctr",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCtrcbcMac*(ctx: var AesX86niCtrcbcKeys; cbcmac: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_x86ni_ctrcbc_mac",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCbcencGetVtable*(): ptr BlockCbcencClass {.importcFunc,
|
||||
importc: "br_aes_x86ni_cbcenc_get_vtable", header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCbcdecGetVtable*(): ptr BlockCbcdecClass {.importcFunc,
|
||||
importc: "br_aes_x86ni_cbcdec_get_vtable", header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCtrGetVtable*(): ptr BlockCtrClass {.importcFunc,
|
||||
importc: "br_aes_x86ni_ctr_get_vtable", header: "bearssl_block.h".}
|
||||
|
||||
proc aesX86niCtrcbcGetVtable*(): ptr BlockCtrcbcClass {.importcFunc,
|
||||
importc: "br_aes_x86ni_ctrcbc_get_vtable", header: "bearssl_block.h".}
|
||||
|
||||
const
|
||||
aesPwr8BLOCK_SIZE* = 16
|
||||
|
||||
|
||||
type
|
||||
INNER_C_UNION_bearssl_block_9* {.importc: "br_aes_pwr8_cbcenc_keys::no_name",
|
||||
header: "bearssl_block.h", bycopy, union.} = object
|
||||
skni* {.importc: "skni".}: array[16 * 15, byte]
|
||||
|
||||
AesPwr8CbcencKeys* {.importc: "br_aes_pwr8_cbcenc_keys",
|
||||
header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcencClass
|
||||
skey* {.importc: "skey".}: INNER_C_UNION_bearssl_block_9
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
INNER_C_UNION_bearssl_block_11* {.importc: "br_aes_pwr8_cbcdec_keys::no_name",
|
||||
header: "bearssl_block.h", bycopy, union.} = object
|
||||
skni* {.importc: "skni".}: array[16 * 15, byte]
|
||||
|
||||
AesPwr8CbcdecKeys* {.importc: "br_aes_pwr8_cbcdec_keys",
|
||||
header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcdecClass
|
||||
skey* {.importc: "skey".}: INNER_C_UNION_bearssl_block_11
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
INNER_C_UNION_bearssl_block_13* {.importc: "br_aes_pwr8_ctr_keys::no_name",
|
||||
header: "bearssl_block.h", bycopy, union.} = object
|
||||
skni* {.importc: "skni".}: array[16 * 15, byte]
|
||||
|
||||
AesPwr8CtrKeys* {.importc: "br_aes_pwr8_ctr_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrClass
|
||||
skey* {.importc: "skey".}: INNER_C_UNION_bearssl_block_13
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
INNER_C_UNION_bearssl_block_15* {.importc: "br_aes_pwr8_ctrcbc_keys::no_name",
|
||||
header: "bearssl_block.h", bycopy, union.} = object
|
||||
skni* {.importc: "skni".}: array[16 * 15, byte]
|
||||
|
||||
AesPwr8CtrcbcKeys* {.importc: "br_aes_pwr8_ctrcbc_keys",
|
||||
header: "bearssl_block.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrcbcClass
|
||||
skey* {.importc: "skey".}: INNER_C_UNION_bearssl_block_15
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
var aesPwr8CbcencVtable* {.importc: "br_aes_pwr8_cbcenc_vtable", header: "bearssl_block.h".}: BlockCbcencClass
|
||||
|
||||
|
||||
var aesPwr8CbcdecVtable* {.importc: "br_aes_pwr8_cbcdec_vtable", header: "bearssl_block.h".}: BlockCbcdecClass
|
||||
|
||||
|
||||
var aesPwr8CtrVtable* {.importc: "br_aes_pwr8_ctr_vtable", header: "bearssl_block.h".}: BlockCtrClass
|
||||
|
||||
|
||||
var aesPwr8CtrcbcVtable* {.importc: "br_aes_pwr8_ctrcbc_vtable", header: "bearssl_block.h".}: BlockCtrcbcClass
|
||||
|
||||
|
||||
proc aesPwr8CbcencInit*(ctx: var AesPwr8CbcencKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_pwr8_cbcenc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CbcdecInit*(ctx: var AesPwr8CbcdecKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_pwr8_cbcdec_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CtrInit*(ctx: var AesPwr8CtrKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_pwr8_ctr_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CtrcbcInit*(ctx: var AesPwr8CtrcbcKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_pwr8_ctrcbc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CbcencRun*(ctx: var AesPwr8CbcencKeys; iv: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_pwr8_cbcenc_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CbcdecRun*(ctx: var AesPwr8CbcdecKeys; iv: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_pwr8_cbcdec_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CtrRun*(ctx: var AesPwr8CtrKeys; iv: pointer; cc: uint32; data: pointer;
|
||||
len: uint): uint32 {.importcFunc, importc: "br_aes_pwr8_ctr_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CtrcbcEncrypt*(ctx: var AesPwr8CtrcbcKeys; ctr: pointer; cbcmac: pointer;
|
||||
data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_pwr8_ctrcbc_encrypt", header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CtrcbcDecrypt*(ctx: var AesPwr8CtrcbcKeys; ctr: pointer; cbcmac: pointer;
|
||||
data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_aes_pwr8_ctrcbc_decrypt", header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CtrcbcCtr*(ctx: var AesPwr8CtrcbcKeys; ctr: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_pwr8_ctrcbc_ctr",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CtrcbcMac*(ctx: var AesPwr8CtrcbcKeys; cbcmac: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_aes_pwr8_ctrcbc_mac",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CbcencGetVtable*(): ptr BlockCbcencClass {.importcFunc,
|
||||
importc: "br_aes_pwr8_cbcenc_get_vtable", header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CbcdecGetVtable*(): ptr BlockCbcdecClass {.importcFunc,
|
||||
importc: "br_aes_pwr8_cbcdec_get_vtable", header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CtrGetVtable*(): ptr BlockCtrClass {.importcFunc,
|
||||
importc: "br_aes_pwr8_ctr_get_vtable", header: "bearssl_block.h".}
|
||||
|
||||
proc aesPwr8CtrcbcGetVtable*(): ptr BlockCtrcbcClass {.importcFunc,
|
||||
importc: "br_aes_pwr8_ctrcbc_get_vtable", header: "bearssl_block.h".}
|
||||
|
||||
type
|
||||
AesGenCbcencKeys* {.importc: "br_aes_gen_cbcenc_keys", header: "bearssl_block.h",
|
||||
bycopy, union.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcencClass
|
||||
cBig* {.importc: "c_big".}: AesBigCbcencKeys
|
||||
cSmall* {.importc: "c_small".}: AesSmallCbcencKeys
|
||||
cCt* {.importc: "c_ct".}: AesCtCbcencKeys
|
||||
cCt64* {.importc: "c_ct64".}: AesCt64CbcencKeys
|
||||
cX86ni* {.importc: "c_x86ni".}: AesX86niCbcencKeys
|
||||
cPwr8* {.importc: "c_pwr8".}: AesPwr8CbcencKeys
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesGenCbcdecKeys* {.importc: "br_aes_gen_cbcdec_keys", header: "bearssl_block.h",
|
||||
bycopy, union.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcdecClass
|
||||
cBig* {.importc: "c_big".}: AesBigCbcdecKeys
|
||||
cSmall* {.importc: "c_small".}: AesSmallCbcdecKeys
|
||||
cCt* {.importc: "c_ct".}: AesCtCbcdecKeys
|
||||
cCt64* {.importc: "c_ct64".}: AesCt64CbcdecKeys
|
||||
cX86ni* {.importc: "c_x86ni".}: AesX86niCbcdecKeys
|
||||
cPwr8* {.importc: "c_pwr8".}: AesPwr8CbcdecKeys
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesGenCtrKeys* {.importc: "br_aes_gen_ctr_keys", header: "bearssl_block.h", bycopy,
|
||||
union.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrClass
|
||||
cBig* {.importc: "c_big".}: AesBigCtrKeys
|
||||
cSmall* {.importc: "c_small".}: AesSmallCtrKeys
|
||||
cCt* {.importc: "c_ct".}: AesCtCtrKeys
|
||||
cCt64* {.importc: "c_ct64".}: AesCt64CtrKeys
|
||||
cX86ni* {.importc: "c_x86ni".}: AesX86niCtrKeys
|
||||
cPwr8* {.importc: "c_pwr8".}: AesPwr8CtrKeys
|
||||
|
||||
|
||||
|
||||
type
|
||||
AesGenCtrcbcKeys* {.importc: "br_aes_gen_ctrcbc_keys", header: "bearssl_block.h",
|
||||
bycopy, union.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCtrcbcClass
|
||||
cBig* {.importc: "c_big".}: AesBigCtrcbcKeys
|
||||
cSmall* {.importc: "c_small".}: AesSmallCtrcbcKeys
|
||||
cCt* {.importc: "c_ct".}: AesCtCtrcbcKeys
|
||||
cCt64* {.importc: "c_ct64".}: AesCt64CtrcbcKeys
|
||||
cX86ni* {.importc: "c_x86ni".}: AesX86niCtrcbcKeys
|
||||
cPwr8* {.importc: "c_pwr8".}: AesPwr8CtrcbcKeys
|
||||
|
||||
|
||||
|
||||
const
|
||||
desTabBLOCK_SIZE* = 8
|
||||
|
||||
|
||||
type
|
||||
DesTabCbcencKeys* {.importc: "br_des_tab_cbcenc_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcencClass
|
||||
skey* {.importc: "skey".}: array[96, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
DesTabCbcdecKeys* {.importc: "br_des_tab_cbcdec_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcdecClass
|
||||
skey* {.importc: "skey".}: array[96, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
var desTabCbcencVtable* {.importc: "br_des_tab_cbcenc_vtable", header: "bearssl_block.h".}: BlockCbcencClass
|
||||
|
||||
|
||||
var desTabCbcdecVtable* {.importc: "br_des_tab_cbcdec_vtable", header: "bearssl_block.h".}: BlockCbcdecClass
|
||||
|
||||
|
||||
proc desTabCbcencInit*(ctx: var DesTabCbcencKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_des_tab_cbcenc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc desTabCbcdecInit*(ctx: var DesTabCbcdecKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_des_tab_cbcdec_init", header: "bearssl_block.h".}
|
||||
|
||||
proc desTabCbcencRun*(ctx: var DesTabCbcencKeys; iv: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_des_tab_cbcenc_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc desTabCbcdecRun*(ctx: var DesTabCbcdecKeys; iv: pointer; data: pointer;
|
||||
len: uint) {.importcFunc, importc: "br_des_tab_cbcdec_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
const
|
||||
desCtBLOCK_SIZE* = 8
|
||||
|
||||
|
||||
type
|
||||
DesCtCbcencKeys* {.importc: "br_des_ct_cbcenc_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcencClass
|
||||
skey* {.importc: "skey".}: array[96, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
|
||||
type
|
||||
DesCtCbcdecKeys* {.importc: "br_des_ct_cbcdec_keys", header: "bearssl_block.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcdecClass
|
||||
skey* {.importc: "skey".}: array[96, uint32]
|
||||
numRounds* {.importc: "num_rounds".}: cuint
|
||||
|
||||
|
||||
var desCtCbcencVtable* {.importc: "br_des_ct_cbcenc_vtable", header: "bearssl_block.h".}: BlockCbcencClass
|
||||
|
||||
|
||||
var desCtCbcdecVtable* {.importc: "br_des_ct_cbcdec_vtable", header: "bearssl_block.h".}: BlockCbcdecClass
|
||||
|
||||
|
||||
proc desCtCbcencInit*(ctx: var DesCtCbcencKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_des_ct_cbcenc_init", header: "bearssl_block.h".}
|
||||
|
||||
proc desCtCbcdecInit*(ctx: var DesCtCbcdecKeys; key: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_des_ct_cbcdec_init", header: "bearssl_block.h".}
|
||||
|
||||
proc desCtCbcencRun*(ctx: var DesCtCbcencKeys; iv: pointer; data: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_des_ct_cbcenc_run", header: "bearssl_block.h".}
|
||||
|
||||
proc desCtCbcdecRun*(ctx: var DesCtCbcdecKeys; iv: pointer; data: pointer; len: uint) {.
|
||||
importcFunc, importc: "br_des_ct_cbcdec_run", header: "bearssl_block.h".}
|
||||
|
||||
type
|
||||
DesGenCbcencKeys* {.importc: "br_des_gen_cbcenc_keys", header: "bearssl_block.h",
|
||||
bycopy, union.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcencClass
|
||||
tab* {.importc: "tab".}: DesTabCbcencKeys
|
||||
ct* {.importc: "ct".}: DesCtCbcencKeys
|
||||
|
||||
|
||||
|
||||
type
|
||||
DesGenCbcdecKeys* {.importc: "br_des_gen_cbcdec_keys", header: "bearssl_block.h",
|
||||
bycopy, union.} = object
|
||||
vtable* {.importc: "vtable".}: ptr BlockCbcdecClass
|
||||
cTab* {.importc: "c_tab".}: DesTabCbcdecKeys
|
||||
cCt* {.importc: "c_ct".}: DesCtCbcdecKeys
|
||||
|
||||
|
||||
|
||||
type
|
||||
Chacha20Run* {.importc: "br_chacha20_run".} = proc (key: pointer; iv: pointer; cc: uint32; data: pointer; len: uint): uint32 {.
|
||||
importcFunc.}
|
||||
|
||||
|
||||
proc chacha20CtRun*(key: pointer; iv: pointer; cc: uint32; data: pointer; len: uint): uint32 {.
|
||||
importcFunc, importc: "br_chacha20_ct_run", header: "bearssl_block.h".}
|
||||
|
||||
proc chacha20Sse2Run*(key: pointer; iv: pointer; cc: uint32; data: pointer; len: uint): uint32 {.
|
||||
importcFunc, importc: "br_chacha20_sse2_run", header: "bearssl_block.h".}
|
||||
|
||||
proc chacha20Sse2Get*(): Chacha20Run {.importcFunc, importc: "br_chacha20_sse2_get",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
type
|
||||
Poly1305Run* {.importc: "br_poly1305_run".} = proc (key: pointer; iv: pointer; data: pointer; len: uint;
|
||||
aad: pointer; aadLen: uint; tag: pointer; ichacha: Chacha20Run;
|
||||
encrypt: cint) {.importcFunc.}
|
||||
|
||||
|
||||
proc poly1305CtmulRun*(key: pointer; iv: pointer; data: pointer; len: uint;
|
||||
aad: pointer; aadLen: uint; tag: pointer;
|
||||
ichacha: Chacha20Run; encrypt: cint) {.importcFunc,
|
||||
importc: "br_poly1305_ctmul_run", header: "bearssl_block.h".}
|
||||
|
||||
proc poly1305Ctmul32Run*(key: pointer; iv: pointer; data: pointer; len: uint;
|
||||
aad: pointer; aadLen: uint; tag: pointer;
|
||||
ichacha: Chacha20Run; encrypt: cint) {.importcFunc,
|
||||
importc: "br_poly1305_ctmul32_run", header: "bearssl_block.h".}
|
||||
|
||||
proc poly1305I15Run*(key: pointer; iv: pointer; data: pointer; len: uint;
|
||||
aad: pointer; aadLen: uint; tag: pointer; ichacha: Chacha20Run;
|
||||
encrypt: cint) {.importcFunc, importc: "br_poly1305_i15_run",
|
||||
header: "bearssl_block.h".}
|
||||
|
||||
proc poly1305CtmulqRun*(key: pointer; iv: pointer; data: pointer; len: uint;
|
||||
aad: pointer; aadLen: uint; tag: pointer;
|
||||
ichacha: Chacha20Run; encrypt: cint) {.importcFunc,
|
||||
importc: "br_poly1305_ctmulq_run", header: "bearssl_block.h".}
|
||||
|
||||
proc poly1305CtmulqGet*(): Poly1305Run {.importcFunc, importc: "br_poly1305_ctmulq_get",
|
||||
header: "bearssl_block.h".}
|
334
vendor/nim-bearssl/bearssl/abi/bearssl_ec.nim
vendored
Normal file
334
vendor/nim-bearssl/bearssl/abi/bearssl_ec.nim
vendored
Normal file
@ -0,0 +1,334 @@
|
||||
import
|
||||
"."/[bearssl_hash, bearssl_rand, csources, intx]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearEcPath = bearSrcPath & "ec/"
|
||||
|
||||
{.compile: bearEcPath & "ecdsa_atr.c".}
|
||||
{.compile: bearEcPath & "ecdsa_default_sign_asn1.c".}
|
||||
{.compile: bearEcPath & "ecdsa_default_sign_raw.c".}
|
||||
{.compile: bearEcPath & "ecdsa_default_vrfy_asn1.c".}
|
||||
{.compile: bearEcPath & "ecdsa_default_vrfy_raw.c".}
|
||||
{.compile: bearEcPath & "ecdsa_i15_bits.c".}
|
||||
{.compile: bearEcPath & "ecdsa_i15_sign_asn1.c".}
|
||||
{.compile: bearEcPath & "ecdsa_i15_sign_raw.c".}
|
||||
{.compile: bearEcPath & "ecdsa_i15_vrfy_asn1.c".}
|
||||
{.compile: bearEcPath & "ecdsa_i15_vrfy_raw.c".}
|
||||
{.compile: bearEcPath & "ecdsa_i31_bits.c".}
|
||||
{.compile: bearEcPath & "ecdsa_i31_sign_asn1.c".}
|
||||
{.compile: bearEcPath & "ecdsa_i31_sign_raw.c".}
|
||||
{.compile: bearEcPath & "ecdsa_i31_vrfy_asn1.c".}
|
||||
{.compile: bearEcPath & "ecdsa_i31_vrfy_raw.c".}
|
||||
{.compile: bearEcPath & "ecdsa_rta.c".}
|
||||
{.compile: bearEcPath & "ec_all_m15.c".}
|
||||
{.compile: bearEcPath & "ec_all_m31.c".}
|
||||
{.compile: bearEcPath & "ec_c25519_i15.c".}
|
||||
{.compile: bearEcPath & "ec_c25519_i31.c".}
|
||||
{.compile: bearEcPath & "ec_c25519_m15.c".}
|
||||
{.compile: bearEcPath & "ec_c25519_m31.c".}
|
||||
{.compile: bearEcPath & "ec_c25519_m62.c".}
|
||||
{.compile: bearEcPath & "ec_c25519_m64.c".}
|
||||
{.compile: bearEcPath & "ec_curve25519.c".}
|
||||
{.compile: bearEcPath & "ec_default.c".}
|
||||
{.compile: bearEcPath & "ec_keygen.c".}
|
||||
{.compile: bearEcPath & "ec_p256_m15.c".}
|
||||
{.compile: bearEcPath & "ec_p256_m31.c".}
|
||||
{.compile: bearEcPath & "ec_p256_m62.c".}
|
||||
{.compile: bearEcPath & "ec_p256_m64.c".}
|
||||
{.compile: bearEcPath & "ec_prime_i15.c".}
|
||||
{.compile: bearEcPath & "ec_prime_i31.c".}
|
||||
{.compile: bearEcPath & "ec_pubkey.c".}
|
||||
{.compile: bearEcPath & "ec_secp256r1.c".}
|
||||
{.compile: bearEcPath & "ec_secp384r1.c".}
|
||||
{.compile: bearEcPath & "ec_secp521r1.c".}
|
||||
|
||||
|
||||
const
|
||||
EC_sect163k1* = 1
|
||||
|
||||
|
||||
const
|
||||
EC_sect163r1* = 2
|
||||
|
||||
|
||||
const
|
||||
EC_sect163r2* = 3
|
||||
|
||||
|
||||
const
|
||||
EC_sect193r1* = 4
|
||||
|
||||
|
||||
const
|
||||
EC_sect193r2* = 5
|
||||
|
||||
|
||||
const
|
||||
EC_sect233k1* = 6
|
||||
|
||||
|
||||
const
|
||||
EC_sect233r1* = 7
|
||||
|
||||
|
||||
const
|
||||
EC_sect239k1* = 8
|
||||
|
||||
|
||||
const
|
||||
EC_sect283k1* = 9
|
||||
|
||||
|
||||
const
|
||||
EC_sect283r1* = 10
|
||||
|
||||
|
||||
const
|
||||
EC_sect409k1* = 11
|
||||
|
||||
|
||||
const
|
||||
EC_sect409r1* = 12
|
||||
|
||||
|
||||
const
|
||||
EC_sect571k1* = 13
|
||||
|
||||
|
||||
const
|
||||
EC_sect571r1* = 14
|
||||
|
||||
|
||||
const
|
||||
EC_secp160k1* = 15
|
||||
|
||||
|
||||
const
|
||||
EC_secp160r1* = 16
|
||||
|
||||
|
||||
const
|
||||
EC_secp160r2* = 17
|
||||
|
||||
|
||||
const
|
||||
EC_secp192k1* = 18
|
||||
|
||||
|
||||
const
|
||||
EC_secp192r1* = 19
|
||||
|
||||
|
||||
const
|
||||
EC_secp224k1* = 20
|
||||
|
||||
|
||||
const
|
||||
EC_secp224r1* = 21
|
||||
|
||||
|
||||
const
|
||||
EC_secp256k1* = 22
|
||||
|
||||
|
||||
const
|
||||
EC_secp256r1* = 23
|
||||
|
||||
|
||||
const
|
||||
EC_secp384r1* = 24
|
||||
|
||||
|
||||
const
|
||||
EC_secp521r1* = 25
|
||||
|
||||
|
||||
const
|
||||
EC_brainpoolP256r1* = 26
|
||||
|
||||
|
||||
const
|
||||
EC_brainpoolP384r1* = 27
|
||||
|
||||
|
||||
const
|
||||
EC_brainpoolP512r1* = 28
|
||||
|
||||
|
||||
const
|
||||
EC_curve25519* = 29
|
||||
|
||||
|
||||
const
|
||||
EC_curve448* = 30
|
||||
|
||||
|
||||
type
|
||||
EcPublicKey* {.importc: "br_ec_public_key", header: "bearssl_ec.h", bycopy.} = object
|
||||
curve* {.importc: "curve".}: cint
|
||||
q* {.importc: "q".}: ptr byte
|
||||
qlen* {.importc: "qlen".}: uint
|
||||
|
||||
|
||||
|
||||
type
|
||||
EcPrivateKey* {.importc: "br_ec_private_key", header: "bearssl_ec.h", bycopy.} = object
|
||||
curve* {.importc: "curve".}: cint
|
||||
x* {.importc: "x".}: ptr byte
|
||||
xlen* {.importc: "xlen".}: uint
|
||||
|
||||
|
||||
|
||||
type
|
||||
EcImpl* {.importc: "br_ec_impl", header: "bearssl_ec.h", bycopy.} = object
|
||||
supportedCurves* {.importc: "supported_curves".}: uint32
|
||||
generator* {.importc: "generator".}: proc (curve: cint; len: var uint): ptr byte {.
|
||||
importcFunc.}
|
||||
order* {.importc: "order".}: proc (curve: cint; len: var uint): ptr byte {.importcFunc.}
|
||||
xoff* {.importc: "xoff".}: proc (curve: cint; len: var uint): uint {.importcFunc.}
|
||||
mul* {.importc: "mul".}: proc (g: ptr byte; glen: uint; x: ptr byte;
|
||||
xlen: uint; curve: cint): uint32 {.importcFunc.}
|
||||
mulgen* {.importc: "mulgen".}: proc (r: ptr byte; x: ptr byte; xlen: uint;
|
||||
curve: cint): uint {.importcFunc.}
|
||||
muladd* {.importc: "muladd".}: proc (a: ptr byte; b: ptr byte; len: uint;
|
||||
x: ptr byte; xlen: uint; y: ptr byte;
|
||||
ylen: uint; curve: cint): uint32 {.importcFunc.}
|
||||
|
||||
|
||||
var ecPrimeI31* {.importc: "br_ec_prime_i31", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
var ecPrimeI15* {.importc: "br_ec_prime_i15", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
var ecP256M15* {.importc: "br_ec_p256_m15", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
var ecP256M31* {.importc: "br_ec_p256_m31", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
var ecP256M62* {.importc: "br_ec_p256_m62", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
proc ecP256M62Get*(): ptr EcImpl {.importcFunc, importc: "br_ec_p256_m62_get",
|
||||
header: "bearssl_ec.h".}
|
||||
|
||||
var ecP256M64* {.importc: "br_ec_p256_m64", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
proc ecP256M64Get*(): ptr EcImpl {.importcFunc, importc: "br_ec_p256_m64_get",
|
||||
header: "bearssl_ec.h".}
|
||||
|
||||
|
||||
var ecC25519I15* {.importc: "br_ec_c25519_i15", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
var ecC25519I31* {.importc: "br_ec_c25519_i31", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
var ecC25519M15* {.importc: "br_ec_c25519_m15", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
var ecC25519M31* {.importc: "br_ec_c25519_m31", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
var ecC25519M62* {.importc: "br_ec_c25519_m62", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
proc ecC25519M62Get*(): ptr EcImpl {.importcFunc, importc: "br_ec_c25519_m62_get",
|
||||
header: "bearssl_ec.h".}
|
||||
|
||||
var ecC25519M64* {.importc: "br_ec_c25519_m64", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
proc ecC25519M64Get*(): ptr EcImpl {.importcFunc, importc: "br_ec_c25519_m64_get",
|
||||
header: "bearssl_ec.h".}
|
||||
|
||||
|
||||
var ecAllM15* {.importc: "br_ec_all_m15", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
var ecAllM31* {.importc: "br_ec_all_m31", header: "bearssl_ec.h".}: EcImpl
|
||||
|
||||
|
||||
proc ecGetDefault*(): ptr EcImpl {.importcFunc, importc: "br_ec_get_default",
|
||||
header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaRawToAsn1*(sig: pointer; sigLen: uint): uint {.importcFunc,
|
||||
importc: "br_ecdsa_raw_to_asn1", header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaAsn1ToRaw*(sig: pointer; sigLen: uint): uint {.importcFunc,
|
||||
importc: "br_ecdsa_asn1_to_raw", header: "bearssl_ec.h".}
|
||||
|
||||
type
|
||||
EcdsaSign* {.importc: "br_ecdsa_sign".} = proc (impl: ptr EcImpl; hf: ptr HashClass; hashValue: pointer;
|
||||
sk: ptr EcPrivateKey; sig: pointer): uint {.importcFunc.}
|
||||
|
||||
|
||||
type
|
||||
EcdsaVrfy* {.importc: "br_ecdsa_vrfy".} = proc (impl: ptr EcImpl; hash: pointer; hashLen: uint;
|
||||
pk: ptr EcPublicKey; sig: pointer; sigLen: uint): uint32 {.importcFunc.}
|
||||
|
||||
|
||||
proc ecdsaI31SignAsn1*(impl: ptr EcImpl; hf: ptr HashClass; hashValue: pointer;
|
||||
sk: ptr EcPrivateKey; sig: pointer): uint {.importcFunc,
|
||||
importc: "br_ecdsa_i31_sign_asn1", header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaI31SignRaw*(impl: ptr EcImpl; hf: ptr HashClass; hashValue: pointer;
|
||||
sk: ptr EcPrivateKey; sig: pointer): uint {.importcFunc,
|
||||
importc: "br_ecdsa_i31_sign_raw", header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaI31VrfyAsn1*(impl: ptr EcImpl; hash: pointer; hashLen: uint;
|
||||
pk: ptr EcPublicKey; sig: pointer; sigLen: uint): uint32 {.
|
||||
importcFunc, importc: "br_ecdsa_i31_vrfy_asn1", header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaI31VrfyRaw*(impl: ptr EcImpl; hash: pointer; hashLen: uint;
|
||||
pk: ptr EcPublicKey; sig: pointer; sigLen: uint): uint32 {.
|
||||
importcFunc, importc: "br_ecdsa_i31_vrfy_raw", header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaI15SignAsn1*(impl: ptr EcImpl; hf: ptr HashClass; hashValue: pointer;
|
||||
sk: ptr EcPrivateKey; sig: pointer): uint {.importcFunc,
|
||||
importc: "br_ecdsa_i15_sign_asn1", header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaI15SignRaw*(impl: ptr EcImpl; hf: ptr HashClass; hashValue: pointer;
|
||||
sk: ptr EcPrivateKey; sig: pointer): uint {.importcFunc,
|
||||
importc: "br_ecdsa_i15_sign_raw", header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaI15VrfyAsn1*(impl: ptr EcImpl; hash: pointer; hashLen: uint;
|
||||
pk: ptr EcPublicKey; sig: pointer; sigLen: uint): uint32 {.
|
||||
importcFunc, importc: "br_ecdsa_i15_vrfy_asn1", header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaI15VrfyRaw*(impl: ptr EcImpl; hash: pointer; hashLen: uint;
|
||||
pk: ptr EcPublicKey; sig: pointer; sigLen: uint): uint32 {.
|
||||
importcFunc, importc: "br_ecdsa_i15_vrfy_raw", header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaSignAsn1GetDefault*(): EcdsaSign {.importcFunc,
|
||||
importc: "br_ecdsa_sign_asn1_get_default", header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaSignRawGetDefault*(): EcdsaSign {.importcFunc,
|
||||
importc: "br_ecdsa_sign_raw_get_default", header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaVrfyAsn1GetDefault*(): EcdsaVrfy {.importcFunc,
|
||||
importc: "br_ecdsa_vrfy_asn1_get_default", header: "bearssl_ec.h".}
|
||||
|
||||
proc ecdsaVrfyRawGetDefault*(): EcdsaVrfy {.importcFunc,
|
||||
importc: "br_ecdsa_vrfy_raw_get_default", header: "bearssl_ec.h".}
|
||||
|
||||
const
|
||||
EC_KBUF_PRIV_MAX_SIZE* = 72
|
||||
|
||||
|
||||
const
|
||||
EC_KBUF_PUB_MAX_SIZE* = 145
|
||||
|
||||
|
||||
proc ecKeygen*(rngCtx: ptr ptr PrngClass; impl: ptr EcImpl; sk: ptr EcPrivateKey;
|
||||
kbuf: pointer; curve: cint): uint {.importcFunc, importc: "br_ec_keygen",
|
||||
header: "bearssl_ec.h".}
|
||||
|
||||
proc ecComputePub*(impl: ptr EcImpl; pk: ptr EcPublicKey; kbuf: pointer;
|
||||
sk: ptr EcPrivateKey): uint {.importcFunc,
|
||||
importc: "br_ec_compute_pub", header: "bearssl_ec.h".}
|
368
vendor/nim-bearssl/bearssl/abi/bearssl_hash.nim
vendored
Normal file
368
vendor/nim-bearssl/bearssl/abi/bearssl_hash.nim
vendored
Normal file
@ -0,0 +1,368 @@
|
||||
import
|
||||
"."/[csources, inner]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearHashPath = bearSrcPath & "hash/"
|
||||
|
||||
{.compile: bearHashPath & "dig_oid.c".}
|
||||
{.compile: bearHashPath & "dig_size.c".}
|
||||
{.compile: bearHashPath & "ghash_ctmul.c".}
|
||||
{.compile: bearHashPath & "ghash_ctmul32.c".}
|
||||
{.compile: bearHashPath & "ghash_ctmul64.c".}
|
||||
{.compile: bearHashPath & "ghash_pclmul.c".}
|
||||
{.compile: bearHashPath & "ghash_pwr8.c".}
|
||||
{.compile: bearHashPath & "md5.c".}
|
||||
{.compile: bearHashPath & "md5sha1.c".}
|
||||
{.compile: bearHashPath & "mgf1.c".}
|
||||
{.compile: bearHashPath & "multihash.c".}
|
||||
{.compile: bearHashPath & "sha1.c".}
|
||||
{.compile: bearHashPath & "sha2big.c".}
|
||||
{.compile: bearHashPath & "sha2small.c".}
|
||||
|
||||
type
|
||||
HashClass* {.importc: "br_hash_class", header: "bearssl_hash.h", bycopy.} = object
|
||||
contextSize* {.importc: "context_size".}: uint
|
||||
desc* {.importc: "desc".}: uint32
|
||||
init* {.importc: "init".}: proc (ctx: ptr ptr HashClass) {.importcFunc.}
|
||||
update* {.importc: "update".}: proc (ctx: ptr ptr HashClass; data: pointer;
|
||||
len: uint) {.importcFunc.}
|
||||
`out`* {.importc: "out".}: proc (ctx: ptr ptr HashClass; dst: pointer) {.importcFunc.}
|
||||
state* {.importc: "state".}: proc (ctx: ptr ptr HashClass; dst: pointer): uint64 {.
|
||||
importcFunc.}
|
||||
setState* {.importc: "set_state".}: proc (ctx: ptr ptr HashClass; stb: pointer;
|
||||
count: uint64) {.importcFunc.}
|
||||
|
||||
|
||||
template hashdesc_Id*(id: untyped): untyped =
|
||||
((uint32)(id) shl hashdesc_Id_Off)
|
||||
|
||||
const
|
||||
HASHDESC_ID_OFF* = 0
|
||||
HASHDESC_ID_MASK* = 0xFF
|
||||
|
||||
template hashdesc_Out*(size: untyped): untyped =
|
||||
((uint32)(size) shl hashdesc_Out_Off)
|
||||
|
||||
const
|
||||
HASHDESC_OUT_OFF* = 8
|
||||
HASHDESC_OUT_MASK* = 0x7F
|
||||
|
||||
template hashdesc_State*(size: untyped): untyped =
|
||||
((uint32)(size) shl hashdesc_State_Off)
|
||||
|
||||
const
|
||||
HASHDESC_STATE_OFF* = 15
|
||||
HASHDESC_STATE_MASK* = 0xFF
|
||||
|
||||
template hashdesc_Lblen*(ls: untyped): untyped =
|
||||
((uint32)(ls) shl hashdesc_Lblen_Off)
|
||||
|
||||
const
|
||||
HASHDESC_LBLEN_OFF* = 23
|
||||
HASHDESC_LBLEN_MASK* = 0x0F
|
||||
HASHDESC_MD_PADDING* = (1'u32 shl 28)
|
||||
HASHDESC_MD_PADDING_128* = (1'u32 shl 29)
|
||||
HASHDESC_MD_PADDING_BE* = (1'u32 shl 30)
|
||||
|
||||
|
||||
const
|
||||
md5ID* = 1
|
||||
|
||||
|
||||
const
|
||||
md5SIZE* = 16
|
||||
|
||||
|
||||
var md5Vtable* {.importc: "br_md5_vtable", header: "bearssl_hash.h".}: HashClass
|
||||
|
||||
|
||||
type
|
||||
Md5Context* {.importc: "br_md5_context", header: "bearssl_hash.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr HashClass
|
||||
buf* {.importc: "buf".}: array[64, byte]
|
||||
count* {.importc: "count".}: uint64
|
||||
val* {.importc: "val".}: array[4, uint32]
|
||||
|
||||
|
||||
|
||||
proc md5Init*(ctx: var Md5Context) {.importcFunc, importc: "br_md5_init",
|
||||
header: "bearssl_hash.h".}
|
||||
|
||||
proc md5Update*(ctx: var Md5Context; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_md5_update", header: "bearssl_hash.h".}
|
||||
|
||||
proc md5Out*(ctx: var Md5Context; `out`: pointer) {.importcFunc, importc: "br_md5_out",
|
||||
header: "bearssl_hash.h".}
|
||||
|
||||
proc md5State*(ctx: var Md5Context; `out`: pointer): uint64 {.importcFunc,
|
||||
importc: "br_md5_state", header: "bearssl_hash.h".}
|
||||
|
||||
proc md5SetState*(ctx: var Md5Context; stb: pointer; count: uint64) {.importcFunc,
|
||||
importc: "br_md5_set_state", header: "bearssl_hash.h".}
|
||||
|
||||
const
|
||||
sha1ID* = 2
|
||||
|
||||
|
||||
const
|
||||
sha1SIZE* = 20
|
||||
|
||||
|
||||
var sha1Vtable* {.importc: "br_sha1_vtable", header: "bearssl_hash.h".}: HashClass
|
||||
|
||||
|
||||
type
|
||||
Sha1Context* {.importc: "br_sha1_context", header: "bearssl_hash.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr HashClass
|
||||
buf* {.importc: "buf".}: array[64, byte]
|
||||
count* {.importc: "count".}: uint64
|
||||
val* {.importc: "val".}: array[5, uint32]
|
||||
|
||||
|
||||
|
||||
proc sha1Init*(ctx: var Sha1Context) {.importcFunc, importc: "br_sha1_init",
|
||||
header: "bearssl_hash.h".}
|
||||
|
||||
proc sha1Update*(ctx: var Sha1Context; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_sha1_update", header: "bearssl_hash.h".}
|
||||
|
||||
proc sha1Out*(ctx: var Sha1Context; `out`: pointer) {.importcFunc, importc: "br_sha1_out",
|
||||
header: "bearssl_hash.h".}
|
||||
|
||||
proc sha1State*(ctx: var Sha1Context; `out`: pointer): uint64 {.importcFunc,
|
||||
importc: "br_sha1_state", header: "bearssl_hash.h".}
|
||||
|
||||
proc sha1SetState*(ctx: var Sha1Context; stb: pointer; count: uint64) {.importcFunc,
|
||||
importc: "br_sha1_set_state", header: "bearssl_hash.h".}
|
||||
|
||||
const
|
||||
sha224ID* = 3
|
||||
|
||||
|
||||
const
|
||||
sha224SIZE* = 28
|
||||
|
||||
|
||||
var sha224Vtable* {.importc: "br_sha224_vtable", header: "bearssl_hash.h".}: HashClass
|
||||
|
||||
|
||||
type
|
||||
Sha224Context* {.importc: "br_sha224_context", header: "bearssl_hash.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr HashClass
|
||||
buf* {.importc: "buf".}: array[64, byte]
|
||||
count* {.importc: "count".}: uint64
|
||||
val* {.importc: "val".}: array[8, uint32]
|
||||
|
||||
|
||||
|
||||
proc sha224Init*(ctx: var Sha224Context) {.importcFunc, importc: "br_sha224_init",
|
||||
header: "bearssl_hash.h".}
|
||||
|
||||
proc sha224Update*(ctx: var Sha224Context; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_sha224_update", header: "bearssl_hash.h".}
|
||||
|
||||
proc sha224Out*(ctx: var Sha224Context; `out`: pointer) {.importcFunc,
|
||||
importc: "br_sha224_out", header: "bearssl_hash.h".}
|
||||
|
||||
proc sha224State*(ctx: var Sha224Context; `out`: pointer): uint64 {.importcFunc,
|
||||
importc: "br_sha224_state", header: "bearssl_hash.h".}
|
||||
|
||||
proc sha224SetState*(ctx: var Sha224Context; stb: pointer; count: uint64) {.importcFunc,
|
||||
importc: "br_sha224_set_state", header: "bearssl_hash.h".}
|
||||
|
||||
const
|
||||
sha256ID* = 4
|
||||
|
||||
|
||||
const
|
||||
sha256SIZE* = 32
|
||||
|
||||
|
||||
var sha256Vtable* {.importc: "br_sha256_vtable", header: "bearssl_hash.h".}: HashClass
|
||||
|
||||
type
|
||||
Sha256Context* = Sha224Context
|
||||
|
||||
|
||||
proc sha256Init*(ctx: var Sha256Context) {.importcFunc, importc: "br_sha256_init",
|
||||
header: "bearssl_hash.h".}
|
||||
|
||||
template sha256Update*(ctx: var Sha256Context; data: pointer; len: int) =
|
||||
sha224Update(ctx, data, len)
|
||||
|
||||
proc sha256Out*(ctx: var Sha256Context; `out`: pointer) {.importcFunc,
|
||||
importc: "br_sha256_out", header: "bearssl_hash.h".}
|
||||
|
||||
template sha256State*(ctx: var Sha256Context; `out`: pointer): uint64 =
|
||||
sha224State(ctx, `out`)
|
||||
|
||||
template sha256SetState*(ctx: var Sha256Context; stb: pointer; count: uint64) =
|
||||
sha224SetState(ctx, stb, count)
|
||||
|
||||
const
|
||||
sha384ID* = 5
|
||||
|
||||
|
||||
const
|
||||
sha384SIZE* = 48
|
||||
|
||||
|
||||
var sha384Vtable* {.importc: "br_sha384_vtable", header: "bearssl_hash.h".}: HashClass
|
||||
|
||||
|
||||
type
|
||||
Sha384Context* {.importc: "br_sha384_context", header: "bearssl_hash.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr HashClass
|
||||
buf* {.importc: "buf".}: array[128, byte]
|
||||
count* {.importc: "count".}: uint64
|
||||
val* {.importc: "val".}: array[8, uint64]
|
||||
|
||||
|
||||
|
||||
proc sha384Init*(ctx: var Sha384Context) {.importcFunc, importc: "br_sha384_init",
|
||||
header: "bearssl_hash.h".}
|
||||
|
||||
proc sha384Update*(ctx: var Sha384Context; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_sha384_update", header: "bearssl_hash.h".}
|
||||
|
||||
proc sha384Out*(ctx: var Sha384Context; `out`: pointer) {.importcFunc,
|
||||
importc: "br_sha384_out", header: "bearssl_hash.h".}
|
||||
|
||||
proc sha384State*(ctx: var Sha384Context; `out`: pointer): uint64 {.importcFunc,
|
||||
importc: "br_sha384_state", header: "bearssl_hash.h".}
|
||||
|
||||
proc sha384SetState*(ctx: var Sha384Context; stb: pointer; count: uint64) {.importcFunc,
|
||||
importc: "br_sha384_set_state", header: "bearssl_hash.h".}
|
||||
|
||||
const
|
||||
sha512ID* = 6
|
||||
|
||||
|
||||
const
|
||||
sha512SIZE* = 64
|
||||
|
||||
|
||||
var sha512Vtable* {.importc: "br_sha512_vtable", header: "bearssl_hash.h".}: HashClass
|
||||
|
||||
type
|
||||
Sha512Context* = Sha384Context
|
||||
|
||||
|
||||
proc sha512Init*(ctx: var Sha512Context) {.importcFunc, importc: "br_sha512_init",
|
||||
header: "bearssl_hash.h".}
|
||||
const
|
||||
sha512Update* = sha384Update
|
||||
|
||||
|
||||
proc sha512Out*(ctx: var Sha512Context; `out`: pointer) {.importcFunc,
|
||||
importc: "br_sha512_out", header: "bearssl_hash.h".}
|
||||
|
||||
const
|
||||
md5sha1ID* = 0
|
||||
|
||||
|
||||
const
|
||||
md5sha1SIZE* = 36
|
||||
|
||||
|
||||
var md5sha1Vtable* {.importc: "br_md5sha1_vtable", header: "bearssl_hash.h".}: HashClass
|
||||
|
||||
|
||||
type
|
||||
Md5sha1Context* {.importc: "br_md5sha1_context", header: "bearssl_hash.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr HashClass
|
||||
buf* {.importc: "buf".}: array[64, byte]
|
||||
count* {.importc: "count".}: uint64
|
||||
valMd5* {.importc: "val_md5".}: array[4, uint32]
|
||||
valSha1* {.importc: "val_sha1".}: array[5, uint32]
|
||||
|
||||
|
||||
|
||||
proc md5sha1Init*(ctx: var Md5sha1Context) {.importcFunc, importc: "br_md5sha1_init",
|
||||
header: "bearssl_hash.h".}
|
||||
|
||||
proc md5sha1Update*(ctx: var Md5sha1Context; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_md5sha1_update", header: "bearssl_hash.h".}
|
||||
|
||||
proc md5sha1Out*(ctx: var Md5sha1Context; `out`: pointer) {.importcFunc,
|
||||
importc: "br_md5sha1_out", header: "bearssl_hash.h".}
|
||||
|
||||
proc md5sha1State*(ctx: var Md5sha1Context; `out`: pointer): uint64 {.importcFunc,
|
||||
importc: "br_md5sha1_state", header: "bearssl_hash.h".}
|
||||
|
||||
proc md5sha1SetState*(ctx: var Md5sha1Context; stb: pointer; count: uint64) {.importcFunc,
|
||||
importc: "br_md5sha1_set_state", header: "bearssl_hash.h".}
|
||||
|
||||
type
|
||||
HashCompatContext* {.importc: "br_hash_compat_context", header: "bearssl_hash.h",
|
||||
bycopy, union.} = object
|
||||
vtable* {.importc: "vtable".}: ptr HashClass
|
||||
md5* {.importc: "md5".}: Md5Context
|
||||
sha1* {.importc: "sha1".}: Sha1Context
|
||||
sha224* {.importc: "sha224".}: Sha224Context
|
||||
sha256* {.importc: "sha256".}: Sha256Context
|
||||
sha384* {.importc: "sha384".}: Sha384Context
|
||||
sha512* {.importc: "sha512".}: Sha512Context
|
||||
md5sha1* {.importc: "md5sha1".}: Md5sha1Context
|
||||
|
||||
|
||||
|
||||
type
|
||||
MultihashContext* {.importc: "br_multihash_context", header: "bearssl_hash.h",
|
||||
bycopy.} = object
|
||||
buf* {.importc: "buf".}: array[128, byte]
|
||||
count* {.importc: "count".}: uint64
|
||||
val32* {.importc: "val_32".}: array[25, uint32]
|
||||
val64* {.importc: "val_64".}: array[16, uint64]
|
||||
impl* {.importc: "impl".}: array[6, ptr HashClass]
|
||||
|
||||
|
||||
|
||||
proc multihashZero*(ctx: var MultihashContext) {.importcFunc, importc: "br_multihash_zero",
|
||||
header: "bearssl_hash.h".}
|
||||
|
||||
proc multihashSetimpl*(ctx: var MultihashContext; id: cint; impl: ptr HashClass) {.
|
||||
inline.} =
|
||||
ctx.impl[id - 1] = impl
|
||||
|
||||
|
||||
proc multihashGetimpl*(ctx: var MultihashContext; id: cint): ptr HashClass {.inline.} =
|
||||
return ctx.impl[id - 1]
|
||||
|
||||
|
||||
proc multihashInit*(ctx: var MultihashContext) {.importcFunc, importc: "br_multihash_init",
|
||||
header: "bearssl_hash.h".}
|
||||
|
||||
proc multihashUpdate*(ctx: var MultihashContext; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_multihash_update", header: "bearssl_hash.h".}
|
||||
|
||||
proc multihashOut*(ctx: var MultihashContext; id: cint; dst: pointer): uint {.importcFunc,
|
||||
importc: "br_multihash_out", header: "bearssl_hash.h".}
|
||||
|
||||
type
|
||||
Ghash* {.importc: "br_ghash".} = proc (y: pointer; h: pointer; data: pointer; len: uint) {.importcFunc.}
|
||||
|
||||
|
||||
proc ghashCtmul*(y: pointer; h: pointer; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_ghash_ctmul", header: "bearssl_hash.h".}
|
||||
|
||||
proc ghashCtmul32*(y: pointer; h: pointer; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_ghash_ctmul32", header: "bearssl_hash.h".}
|
||||
|
||||
proc ghashCtmul64*(y: pointer; h: pointer; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_ghash_ctmul64", header: "bearssl_hash.h".}
|
||||
|
||||
proc ghashPclmul*(y: pointer; h: pointer; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_ghash_pclmul", header: "bearssl_hash.h".}
|
||||
|
||||
proc ghashPclmulGet*(): Ghash {.importcFunc, importc: "br_ghash_pclmul_get",
|
||||
header: "bearssl_hash.h".}
|
||||
|
||||
proc ghashPwr8*(y: pointer; h: pointer; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_ghash_pwr8", header: "bearssl_hash.h".}
|
||||
|
||||
proc ghashPwr8Get*(): Ghash {.importcFunc, importc: "br_ghash_pwr8_get",
|
||||
header: "bearssl_hash.h".}
|
56
vendor/nim-bearssl/bearssl/abi/bearssl_hmac.nim
vendored
Normal file
56
vendor/nim-bearssl/bearssl/abi/bearssl_hmac.nim
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
import
|
||||
"."/[bearssl_hash, csources, inner]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearMacPath = bearSrcPath & "mac/"
|
||||
|
||||
{.compile: bearMacPath & "hmac.c".}
|
||||
{.compile: bearMacPath & "hmac_ct.c".}
|
||||
|
||||
type
|
||||
HmacKeyContext* {.importc: "br_hmac_key_context", header: "bearssl_hmac.h", bycopy.} = object
|
||||
digVtable* {.importc: "dig_vtable".}: ptr HashClass
|
||||
ksi* {.importc: "ksi".}: array[64, byte]
|
||||
kso* {.importc: "kso".}: array[64, byte]
|
||||
|
||||
|
||||
|
||||
proc hmacKeyInit*(kc: var HmacKeyContext; digestVtable: ptr HashClass; key: pointer;
|
||||
keyLen: uint) {.importcFunc, importc: "br_hmac_key_init",
|
||||
header: "bearssl_hmac.h".}
|
||||
|
||||
proc hmacKeyGetDigest*(kc: var HmacKeyContext): ptr HashClass {.inline.} =
|
||||
return kc.digVtable
|
||||
|
||||
|
||||
type
|
||||
HmacContext* {.importc: "br_hmac_context", header: "bearssl_hmac.h", bycopy.} = object
|
||||
dig* {.importc: "dig".}: HashCompatContext
|
||||
kso* {.importc: "kso".}: array[64, byte]
|
||||
outLen* {.importc: "out_len".}: uint
|
||||
|
||||
|
||||
|
||||
proc hmacInit*(ctx: var HmacContext; kc: var HmacKeyContext; outLen: uint) {.importcFunc,
|
||||
importc: "br_hmac_init", header: "bearssl_hmac.h".}
|
||||
|
||||
proc hmacSize*(ctx: var HmacContext): uint {.inline, importcFunc, importc: "br_hmac_size".} =
|
||||
return ctx.outLen
|
||||
|
||||
|
||||
proc hmacGetDigest*(hc: var HmacContext): ptr HashClass {.inline.} =
|
||||
return hc.dig.vtable
|
||||
|
||||
|
||||
proc hmacUpdate*(ctx: var HmacContext; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_hmac_update", header: "bearssl_hmac.h".}
|
||||
|
||||
proc hmacOut*(ctx: var HmacContext; `out`: pointer): uint {.importcFunc,
|
||||
importc: "br_hmac_out", header: "bearssl_hmac.h".}
|
||||
|
||||
proc hmacOutCT*(ctx: var HmacContext; data: pointer; len: uint; minLen: uint;
|
||||
maxLen: uint; `out`: pointer): uint {.importcFunc,
|
||||
importc: "br_hmac_outCT", header: "bearssl_hmac.h".}
|
65
vendor/nim-bearssl/bearssl/abi/bearssl_kdf.nim
vendored
Normal file
65
vendor/nim-bearssl/bearssl/abi/bearssl_kdf.nim
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
import
|
||||
"."/[bearssl_hash, bearssl_hmac, csources]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearKdfPath = bearSrcPath & "kdf/"
|
||||
|
||||
{.compile: bearKdfPath & "hkdf.c".}
|
||||
{.compile: bearKdfPath & "shake.c".}
|
||||
|
||||
type
|
||||
INNER_C_UNION_bearssl_kdf_1* {.importc: "br_hkdf_context::no_name",
|
||||
header: "bearssl_kdf.h", bycopy, union.} = object
|
||||
hmacCtx* {.importc: "hmac_ctx".}: HmacContext
|
||||
prkCtx* {.importc: "prk_ctx".}: HmacKeyContext
|
||||
|
||||
HkdfContext* {.importc: "br_hkdf_context", header: "bearssl_kdf.h", bycopy.} = object
|
||||
u* {.importc: "u".}: INNER_C_UNION_bearssl_kdf_1
|
||||
buf* {.importc: "buf".}: array[64, byte]
|
||||
`ptr`* {.importc: "ptr".}: uint
|
||||
digLen* {.importc: "dig_len".}: uint
|
||||
chunkNum* {.importc: "chunk_num".}: cuint
|
||||
|
||||
|
||||
|
||||
proc hkdfInit*(hc: var HkdfContext; digestVtable: ptr HashClass; salt: pointer;
|
||||
saltLen: uint) {.importcFunc, importc: "br_hkdf_init",
|
||||
header: "bearssl_kdf.h".}
|
||||
|
||||
|
||||
var hkdfNoSalt* {.importc: "br_hkdf_no_salt", header: "bearssl_kdf.h".}: byte
|
||||
|
||||
|
||||
proc hkdfInject*(hc: var HkdfContext; ikm: pointer; ikmLen: uint) {.importcFunc,
|
||||
importc: "br_hkdf_inject", header: "bearssl_kdf.h".}
|
||||
|
||||
proc hkdfFlip*(hc: var HkdfContext) {.importcFunc, importc: "br_hkdf_flip",
|
||||
header: "bearssl_kdf.h".}
|
||||
|
||||
proc hkdfProduce*(hc: var HkdfContext; info: pointer; infoLen: uint; `out`: pointer;
|
||||
outLen: uint): uint {.importcFunc, importc: "br_hkdf_produce",
|
||||
header: "bearssl_kdf.h".}
|
||||
|
||||
type
|
||||
ShakeContext* {.importc: "br_shake_context", header: "bearssl_kdf.h", bycopy.} = object
|
||||
dbuf* {.importc: "dbuf".}: array[200, byte]
|
||||
dptr* {.importc: "dptr".}: uint
|
||||
rate* {.importc: "rate".}: uint
|
||||
a* {.importc: "A".}: array[25, uint64]
|
||||
|
||||
|
||||
|
||||
proc shakeInit*(sc: var ShakeContext; securityLevel: cint) {.importcFunc,
|
||||
importc: "br_shake_init", header: "bearssl_kdf.h".}
|
||||
|
||||
proc shakeInject*(sc: var ShakeContext; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_shake_inject", header: "bearssl_kdf.h".}
|
||||
|
||||
proc shakeFlip*(hc: var ShakeContext) {.importcFunc, importc: "br_shake_flip",
|
||||
header: "bearssl_kdf.h".}
|
||||
|
||||
proc shakeProduce*(sc: var ShakeContext; `out`: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_shake_produce", header: "bearssl_kdf.h".}
|
76
vendor/nim-bearssl/bearssl/abi/bearssl_pem.nim
vendored
Normal file
76
vendor/nim-bearssl/bearssl/abi/bearssl_pem.nim
vendored
Normal file
@ -0,0 +1,76 @@
|
||||
import
|
||||
"."/[csources]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearCodecPath = bearSrcPath & "codec/"
|
||||
|
||||
{.compile: bearCodecPath & "pemdec.c".}
|
||||
{.compile: bearCodecPath & "pemenc.c".}
|
||||
|
||||
type
|
||||
INNER_C_STRUCT_bearssl_pem_1* {.importc: "br_pem_decoder_context::no_name",
|
||||
header: "bearssl_pem.h", bycopy.} = object
|
||||
dp* {.importc: "dp".}: ptr uint32
|
||||
rp* {.importc: "rp".}: ptr uint32
|
||||
ip* {.importc: "ip".}: ptr byte
|
||||
|
||||
PemDecoderContext* {.importc: "br_pem_decoder_context", header: "bearssl_pem.h",
|
||||
bycopy.} = object
|
||||
cpu* {.importc: "cpu".}: INNER_C_STRUCT_bearssl_pem_1
|
||||
dpStack* {.importc: "dp_stack".}: array[32, uint32]
|
||||
rpStack* {.importc: "rp_stack".}: array[32, uint32]
|
||||
err* {.importc: "err".}: cint
|
||||
hbuf* {.importc: "hbuf".}: ptr byte
|
||||
hlen* {.importc: "hlen".}: uint
|
||||
dest* {.importc: "dest".}: proc (destCtx: pointer; src: pointer; len: uint) {.importcFunc.}
|
||||
destCtx* {.importc: "dest_ctx".}: pointer
|
||||
event* {.importc: "event".}: byte
|
||||
name* {.importc: "name".}: array[128, char]
|
||||
buf* {.importc: "buf".}: array[255, byte]
|
||||
`ptr`* {.importc: "ptr".}: uint
|
||||
|
||||
|
||||
|
||||
proc pemDecoderInit*(ctx: var PemDecoderContext) {.importcFunc,
|
||||
importc: "br_pem_decoder_init", header: "bearssl_pem.h".}
|
||||
|
||||
proc pemDecoderPush*(ctx: var PemDecoderContext; data: pointer; len: uint): uint {.
|
||||
importcFunc, importc: "br_pem_decoder_push", header: "bearssl_pem.h".}
|
||||
|
||||
proc pemDecoderSetdest*(ctx: var PemDecoderContext; dest: proc (destCtx: pointer;
|
||||
src: pointer; len: uint) {.importcFunc.}; destCtx: pointer) {.inline.} =
|
||||
ctx.dest = dest
|
||||
ctx.destCtx = destCtx
|
||||
|
||||
|
||||
proc pemDecoderEvent*(ctx: var PemDecoderContext): cint {.importcFunc,
|
||||
importc: "br_pem_decoder_event", header: "bearssl_pem.h".}
|
||||
|
||||
const
|
||||
PEM_BEGIN_OBJ* = 1
|
||||
|
||||
|
||||
const
|
||||
PEM_END_OBJ* = 2
|
||||
|
||||
|
||||
const
|
||||
PEM_ERROR* = 3
|
||||
|
||||
|
||||
proc pemDecoderName*(ctx: var PemDecoderContext): cstring {.inline.} =
|
||||
return addr ctx.name
|
||||
|
||||
|
||||
proc pemEncode*(dest: pointer; data: pointer; len: uint; banner: cstring; flags: cuint): uint {.
|
||||
importcFunc, importc: "br_pem_encode", header: "bearssl_pem.h".}
|
||||
|
||||
const
|
||||
PEM_LINE64* = 0x0001
|
||||
|
||||
|
||||
const
|
||||
PEM_CRLF* = 0x0002
|
37
vendor/nim-bearssl/bearssl/abi/bearssl_prf.nim
vendored
Normal file
37
vendor/nim-bearssl/bearssl/abi/bearssl_prf.nim
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
import
|
||||
"."/[csources]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearSslPath = bearSrcPath & "ssl/"
|
||||
|
||||
{.compile: bearSslPath & "prf.c".}
|
||||
{.compile: bearSslPath & "prf_md5sha1.c".}
|
||||
{.compile: bearSslPath & "prf_sha256.c".}
|
||||
{.compile: bearSslPath & "prf_sha384.c".}
|
||||
|
||||
type
|
||||
TlsPrfSeedChunk* {.importc: "br_tls_prf_seed_chunk", header: "bearssl_prf.h",
|
||||
bycopy.} = object
|
||||
data* {.importc: "data".}: pointer
|
||||
len* {.importc: "len".}: uint
|
||||
|
||||
|
||||
|
||||
proc tls10Prf*(dst: pointer; len: uint; secret: pointer; secretLen: uint;
|
||||
label: cstring; seedNum: uint; seed: ptr TlsPrfSeedChunk) {.importcFunc,
|
||||
importc: "br_tls10_prf", header: "bearssl_prf.h".}
|
||||
|
||||
proc tls12Sha256Prf*(dst: pointer; len: uint; secret: pointer; secretLen: uint;
|
||||
label: cstring; seedNum: uint; seed: ptr TlsPrfSeedChunk) {.
|
||||
importcFunc, importc: "br_tls12_sha256_prf", header: "bearssl_prf.h".}
|
||||
|
||||
proc tls12Sha384Prf*(dst: pointer; len: uint; secret: pointer; secretLen: uint;
|
||||
label: cstring; seedNum: uint; seed: ptr TlsPrfSeedChunk) {.
|
||||
importcFunc, importc: "br_tls12_sha384_prf", header: "bearssl_prf.h".}
|
||||
|
||||
type
|
||||
TlsPrfImpl* {.importc: "br_tls_prf_impl".} = proc (dst: pointer; len: uint; secret: pointer; secretLen: uint;
|
||||
label: cstring; seedNum: uint; seed: ptr TlsPrfSeedChunk) {.importcFunc.}
|
80
vendor/nim-bearssl/bearssl/abi/bearssl_rand.nim
vendored
Normal file
80
vendor/nim-bearssl/bearssl/abi/bearssl_rand.nim
vendored
Normal file
@ -0,0 +1,80 @@
|
||||
import
|
||||
"."/[bearssl_hash, bearssl_hmac, csources]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearRandPath = bearSrcPath & "rand/"
|
||||
|
||||
# {.compile: bearRandPath & "aesctr_drbg.c".}
|
||||
{.compile: bearRandPath & "hmac_drbg.c".}
|
||||
{.compile: bearRandPath & "sysrng.c".}
|
||||
|
||||
type
|
||||
PrngClass* {.importc: "br_prng_class", header: "bearssl_rand.h", bycopy.} = object
|
||||
contextSize* {.importc: "context_size".}: uint
|
||||
init* {.importc: "init".}: proc (ctx: ptr ptr PrngClass; params: pointer;
|
||||
seed: pointer; seedLen: uint) {.importcFunc.}
|
||||
generate* {.importc: "generate".}: proc (ctx: ptr ptr PrngClass; `out`: pointer;
|
||||
len: uint) {.importcFunc.}
|
||||
update* {.importc: "update".}: proc (ctx: ptr ptr PrngClass; seed: pointer;
|
||||
seedLen: uint) {.importcFunc.}
|
||||
|
||||
|
||||
|
||||
type
|
||||
HmacDrbgContext* {.importc: "br_hmac_drbg_context", header: "bearssl_rand.h",
|
||||
bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr PrngClass
|
||||
k* {.importc: "K".}: array[64, byte]
|
||||
v* {.importc: "V".}: array[64, byte]
|
||||
digestClass* {.importc: "digest_class".}: ptr HashClass
|
||||
|
||||
|
||||
|
||||
var hmacDrbgVtable* {.importc: "br_hmac_drbg_vtable", header: "bearssl_rand.h".}: PrngClass
|
||||
|
||||
|
||||
proc hmacDrbgInit*(ctx: var HmacDrbgContext; digestClass: ptr HashClass; seed: pointer;
|
||||
seedLen: uint) {.importcFunc, importc: "br_hmac_drbg_init",
|
||||
header: "bearssl_rand.h".}
|
||||
|
||||
proc hmacDrbgGenerate*(ctx: var HmacDrbgContext; `out`: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_hmac_drbg_generate", header: "bearssl_rand.h".}
|
||||
|
||||
proc hmacDrbgUpdate*(ctx: var HmacDrbgContext; seed: pointer; seedLen: uint) {.importcFunc,
|
||||
importc: "br_hmac_drbg_update", header: "bearssl_rand.h".}
|
||||
|
||||
proc hmacDrbgGetHash*(ctx: var HmacDrbgContext): ptr HashClass {.inline.} =
|
||||
return ctx.digestClass
|
||||
|
||||
|
||||
type
|
||||
PrngSeeder* {.importc: "br_prng_seeder".} = proc (ctx: ptr ptr PrngClass): cint {.importcFunc.}
|
||||
|
||||
|
||||
proc prngSeederSystem*(name: cstringArray): PrngSeeder {.importcFunc,
|
||||
importc: "br_prng_seeder_system", header: "bearssl_rand.h".}
|
||||
|
||||
# type
|
||||
# AesctrDrbgContext* {.importc: "br_aesctr_drbg_context", header: "bearssl_rand.h",
|
||||
# bycopy.} = object
|
||||
# vtable* {.importc: "vtable".}: ptr PrngClass
|
||||
# sk* {.importc: "sk".}: AesGenCtrKeys
|
||||
# cc* {.importc: "cc".}: uint32
|
||||
|
||||
|
||||
|
||||
# var aesctrDrbgVtable* {.importc: "br_aesctr_drbg_vtable", header: "bearssl_rand.h".}: PrngClass
|
||||
|
||||
|
||||
# proc aesctrDrbgInit*(ctx: var AesctrDrbgContext; aesctr: ptr BlockCtrClass;
|
||||
# seed: pointer; seedLen: uint) {.importcFunc,
|
||||
# importc: "br_aesctr_drbg_init", header: "bearssl_rand.h".}
|
||||
|
||||
# proc aesctrDrbgGenerate*(ctx: var AesctrDrbgContext; `out`: pointer; len: uint) {.
|
||||
# importcFunc, importc: "br_aesctr_drbg_generate", header: "bearssl_rand.h".}
|
||||
|
||||
# proc aesctrDrbgUpdate*(ctx: var AesctrDrbgContext; seed: pointer; seedLen: uint) {.
|
||||
# importcFunc, importc: "br_aesctr_drbg_update", header: "bearssl_rand.h".}
|
422
vendor/nim-bearssl/bearssl/abi/bearssl_rsa.nim
vendored
Normal file
422
vendor/nim-bearssl/bearssl/abi/bearssl_rsa.nim
vendored
Normal file
@ -0,0 +1,422 @@
|
||||
import
|
||||
"."/[bearssl_hash, bearssl_rand, csources, intx]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearRsaPath = bearSrcPath & "rsa/"
|
||||
|
||||
{.compile: bearRsaPath & "rsa_default_keygen.c".}
|
||||
{.compile: bearRsaPath & "rsa_default_modulus.c".}
|
||||
{.compile: bearRsaPath & "rsa_default_oaep_decrypt.c".}
|
||||
{.compile: bearRsaPath & "rsa_default_oaep_encrypt.c".}
|
||||
{.compile: bearRsaPath & "rsa_default_pkcs1_sign.c".}
|
||||
{.compile: bearRsaPath & "rsa_default_pkcs1_vrfy.c".}
|
||||
{.compile: bearRsaPath & "rsa_default_priv.c".}
|
||||
{.compile: bearRsaPath & "rsa_default_privexp.c".}
|
||||
{.compile: bearRsaPath & "rsa_default_pss_sign.c".}
|
||||
{.compile: bearRsaPath & "rsa_default_pss_vrfy.c".}
|
||||
{.compile: bearRsaPath & "rsa_default_pub.c".}
|
||||
{.compile: bearRsaPath & "rsa_default_pubexp.c".}
|
||||
{.compile: bearRsaPath & "rsa_i15_keygen.c".}
|
||||
{.compile: bearRsaPath & "rsa_i15_modulus.c".}
|
||||
{.compile: bearRsaPath & "rsa_i15_oaep_decrypt.c".}
|
||||
{.compile: bearRsaPath & "rsa_i15_oaep_encrypt.c".}
|
||||
{.compile: bearRsaPath & "rsa_i15_pkcs1_sign.c".}
|
||||
{.compile: bearRsaPath & "rsa_i15_pkcs1_vrfy.c".}
|
||||
{.compile: bearRsaPath & "rsa_i15_priv.c".}
|
||||
{.compile: bearRsaPath & "rsa_i15_privexp.c".}
|
||||
{.compile: bearRsaPath & "rsa_i15_pss_sign.c".}
|
||||
{.compile: bearRsaPath & "rsa_i15_pss_vrfy.c".}
|
||||
{.compile: bearRsaPath & "rsa_i15_pub.c".}
|
||||
{.compile: bearRsaPath & "rsa_i15_pubexp.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_keygen.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_keygen_inner.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_modulus.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_oaep_decrypt.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_oaep_encrypt.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_pkcs1_sign.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_pkcs1_vrfy.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_priv.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_privexp.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_pss_sign.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_pss_vrfy.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_pub.c".}
|
||||
{.compile: bearRsaPath & "rsa_i31_pubexp.c".}
|
||||
{.compile: bearRsaPath & "rsa_i32_oaep_decrypt.c".}
|
||||
{.compile: bearRsaPath & "rsa_i32_oaep_encrypt.c".}
|
||||
{.compile: bearRsaPath & "rsa_i32_pkcs1_sign.c".}
|
||||
{.compile: bearRsaPath & "rsa_i32_pkcs1_vrfy.c".}
|
||||
{.compile: bearRsaPath & "rsa_i32_priv.c".}
|
||||
{.compile: bearRsaPath & "rsa_i32_pss_sign.c".}
|
||||
{.compile: bearRsaPath & "rsa_i32_pss_vrfy.c".}
|
||||
{.compile: bearRsaPath & "rsa_i32_pub.c".}
|
||||
{.compile: bearRsaPath & "rsa_i62_keygen.c".}
|
||||
{.compile: bearRsaPath & "rsa_i62_oaep_decrypt.c".}
|
||||
{.compile: bearRsaPath & "rsa_i62_oaep_encrypt.c".}
|
||||
{.compile: bearRsaPath & "rsa_i62_pkcs1_sign.c".}
|
||||
{.compile: bearRsaPath & "rsa_i62_pkcs1_vrfy.c".}
|
||||
{.compile: bearRsaPath & "rsa_i62_priv.c".}
|
||||
{.compile: bearRsaPath & "rsa_i62_pss_sign.c".}
|
||||
{.compile: bearRsaPath & "rsa_i62_pss_vrfy.c".}
|
||||
{.compile: bearRsaPath & "rsa_i62_pub.c".}
|
||||
{.compile: bearRsaPath & "rsa_oaep_pad.c".}
|
||||
{.compile: bearRsaPath & "rsa_oaep_unpad.c".}
|
||||
{.compile: bearRsaPath & "rsa_pkcs1_sig_pad.c".}
|
||||
{.compile: bearRsaPath & "rsa_pkcs1_sig_unpad.c".}
|
||||
{.compile: bearRsaPath & "rsa_pss_sig_pad.c".}
|
||||
{.compile: bearRsaPath & "rsa_pss_sig_unpad.c".}
|
||||
{.compile: bearRsaPath & "rsa_ssl_decrypt.c".}
|
||||
|
||||
type
|
||||
RsaPublicKey* {.importc: "br_rsa_public_key", header: "bearssl_rsa.h", bycopy.} = object
|
||||
n* {.importc: "n".}: ptr byte
|
||||
nlen* {.importc: "nlen".}: uint
|
||||
e* {.importc: "e".}: ptr byte
|
||||
elen* {.importc: "elen".}: uint
|
||||
|
||||
|
||||
|
||||
type
|
||||
RsaPrivateKey* {.importc: "br_rsa_private_key", header: "bearssl_rsa.h", bycopy.} = object
|
||||
nBitlen* {.importc: "n_bitlen".}: uint32
|
||||
p* {.importc: "p".}: ptr byte
|
||||
plen* {.importc: "plen".}: uint
|
||||
q* {.importc: "q".}: ptr byte
|
||||
qlen* {.importc: "qlen".}: uint
|
||||
dp* {.importc: "dp".}: ptr byte
|
||||
dplen* {.importc: "dplen".}: uint
|
||||
dq* {.importc: "dq".}: ptr byte
|
||||
dqlen* {.importc: "dqlen".}: uint
|
||||
iq* {.importc: "iq".}: ptr byte
|
||||
iqlen* {.importc: "iqlen".}: uint
|
||||
|
||||
|
||||
|
||||
type
|
||||
RsaPublic* {.importc: "br_rsa_public".} = proc (x: ptr byte; xlen: uint; pk: ptr RsaPublicKey): uint32 {.importcFunc.}
|
||||
|
||||
|
||||
type
|
||||
RsaPkcs1Vrfy* {.importc: "br_rsa_pkcs1_vrfy".} = proc (x: ptr byte; xlen: uint; hashOid: ptr byte;
|
||||
hashLen: uint; pk: ptr RsaPublicKey; hashOut: ptr byte): uint32 {.
|
||||
importcFunc.}
|
||||
|
||||
|
||||
type
|
||||
RsaPssVrfy* {.importc: "br_rsa_pss_vrfy".} = proc (x: ptr byte; xlen: uint; hfData: ptr HashClass;
|
||||
hfMgf1: ptr HashClass; hash: pointer; saltLen: uint;
|
||||
pk: ptr RsaPublicKey): uint32 {.importcFunc.}
|
||||
|
||||
|
||||
type
|
||||
RsaOaepEncrypt* {.importc: "br_rsa_oaep_encrypt".} = proc (rnd: ptr ptr PrngClass; dig: ptr HashClass; label: pointer;
|
||||
labelLen: uint; pk: ptr RsaPublicKey; dst: pointer;
|
||||
dstMaxLen: uint; src: pointer; srcLen: uint): uint {.
|
||||
importcFunc.}
|
||||
|
||||
|
||||
type
|
||||
RsaPrivate* {.importc: "br_rsa_private".} = proc (x: ptr byte; sk: ptr RsaPrivateKey): uint32 {.importcFunc.}
|
||||
|
||||
|
||||
type
|
||||
RsaPkcs1Sign* {.importc: "br_rsa_pkcs1_sign".} = proc (hashOid: ptr byte; hash: ptr byte; hashLen: uint;
|
||||
sk: ptr RsaPrivateKey; x: ptr byte): uint32 {.importcFunc.}
|
||||
|
||||
|
||||
type
|
||||
RsaPssSign* {.importc: "br_rsa_pss_sign".} = proc (rng: ptr ptr PrngClass; hfData: ptr HashClass;
|
||||
hfMgf1: ptr HashClass; hashValue: ptr byte; saltLen: uint;
|
||||
sk: ptr RsaPrivateKey; x: ptr byte): uint32 {.importcFunc.}
|
||||
|
||||
|
||||
const
|
||||
HASH_OID_SHA1* = (("\x05+\x0E\x03\x02\x1A"))
|
||||
|
||||
|
||||
const
|
||||
HASH_OID_SHA224* = (("\t`\x86H\x01e\x03\x04\x02\x04"))
|
||||
|
||||
|
||||
const
|
||||
HASH_OID_SHA256* = (("\t`\x86H\x01e\x03\x04\x02\x01"))
|
||||
|
||||
|
||||
const
|
||||
HASH_OID_SHA384* = (("\t`\x86H\x01e\x03\x04\x02\x02"))
|
||||
|
||||
|
||||
const
|
||||
HASH_OID_SHA512* = (("\t`\x86H\x01e\x03\x04\x02\x03"))
|
||||
|
||||
|
||||
type
|
||||
RsaOaepDecrypt* {.importc: "br_rsa_oaep_decrypt".} = proc (dig: ptr HashClass; label: pointer; labelLen: uint;
|
||||
sk: ptr RsaPrivateKey; data: pointer; len: var uint): uint32 {.
|
||||
importcFunc.}
|
||||
|
||||
|
||||
proc rsaI32Public*(x: ptr byte; xlen: uint; pk: ptr RsaPublicKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i32_public", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI32Pkcs1Vrfy*(x: ptr byte; xlen: uint; hashOid: ptr byte;
|
||||
hashLen: uint; pk: ptr RsaPublicKey; hashOut: ptr byte): uint32 {.
|
||||
importcFunc, importc: "br_rsa_i32_pkcs1_vrfy", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI32PssVrfy*(x: ptr byte; xlen: uint; hfData: ptr HashClass;
|
||||
hfMgf1: ptr HashClass; hash: pointer; saltLen: uint;
|
||||
pk: ptr RsaPublicKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i32_pss_vrfy", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI32Private*(x: ptr byte; sk: ptr RsaPrivateKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i32_private", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI32Pkcs1Sign*(hashOid: ptr byte; hash: ptr byte; hashLen: uint;
|
||||
sk: ptr RsaPrivateKey; x: ptr byte): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i32_pkcs1_sign", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI32PssSign*(rng: ptr ptr PrngClass; hfData: ptr HashClass;
|
||||
hfMgf1: ptr HashClass; hashValue: ptr byte; saltLen: uint;
|
||||
sk: ptr RsaPrivateKey; x: ptr byte): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i32_pss_sign", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI31Public*(x: ptr byte; xlen: uint; pk: ptr RsaPublicKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i31_public", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI31Pkcs1Vrfy*(x: ptr byte; xlen: uint; hashOid: ptr byte;
|
||||
hashLen: uint; pk: ptr RsaPublicKey; hashOut: ptr byte): uint32 {.
|
||||
importcFunc, importc: "br_rsa_i31_pkcs1_vrfy", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI31PssVrfy*(x: ptr byte; xlen: uint; hfData: ptr HashClass;
|
||||
hfMgf1: ptr HashClass; hash: pointer; saltLen: uint;
|
||||
pk: ptr RsaPublicKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i31_pss_vrfy", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI31Private*(x: ptr byte; sk: ptr RsaPrivateKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i31_private", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI31Pkcs1Sign*(hashOid: ptr byte; hash: ptr byte; hashLen: uint;
|
||||
sk: ptr RsaPrivateKey; x: ptr byte): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i31_pkcs1_sign", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI31PssSign*(rng: ptr ptr PrngClass; hfData: ptr HashClass;
|
||||
hfMgf1: ptr HashClass; hashValue: ptr byte; saltLen: uint;
|
||||
sk: ptr RsaPrivateKey; x: ptr byte): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i31_pss_sign", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62Public*(x: ptr byte; xlen: uint; pk: ptr RsaPublicKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i62_public", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62Pkcs1Vrfy*(x: ptr byte; xlen: uint; hashOid: ptr byte;
|
||||
hashLen: uint; pk: ptr RsaPublicKey; hashOut: ptr byte): uint32 {.
|
||||
importcFunc, importc: "br_rsa_i62_pkcs1_vrfy", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62PssVrfy*(x: ptr byte; xlen: uint; hfData: ptr HashClass;
|
||||
hfMgf1: ptr HashClass; hash: pointer; saltLen: uint;
|
||||
pk: ptr RsaPublicKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i62_pss_vrfy", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62Private*(x: ptr byte; sk: ptr RsaPrivateKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i62_private", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62Pkcs1Sign*(hashOid: ptr byte; hash: ptr byte; hashLen: uint;
|
||||
sk: ptr RsaPrivateKey; x: ptr byte): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i62_pkcs1_sign", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62PssSign*(rng: ptr ptr PrngClass; hfData: ptr HashClass;
|
||||
hfMgf1: ptr HashClass; hashValue: ptr byte; saltLen: uint;
|
||||
sk: ptr RsaPrivateKey; x: ptr byte): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i62_pss_sign", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62PublicGet*(): RsaPublic {.importcFunc, importc: "br_rsa_i62_public_get",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62Pkcs1VrfyGet*(): RsaPkcs1Vrfy {.importcFunc,
|
||||
importc: "br_rsa_i62_pkcs1_vrfy_get",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62PssVrfyGet*(): RsaPssVrfy {.importcFunc, importc: "br_rsa_i62_pss_vrfy_get",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62PrivateGet*(): RsaPrivate {.importcFunc, importc: "br_rsa_i62_private_get",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62Pkcs1SignGet*(): RsaPkcs1Sign {.importcFunc,
|
||||
importc: "br_rsa_i62_pkcs1_sign_get",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62PssSignGet*(): RsaPssSign {.importcFunc, importc: "br_rsa_i62_pss_sign_get",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62OaepEncryptGet*(): RsaOaepEncrypt {.importcFunc,
|
||||
importc: "br_rsa_i62_oaep_encrypt_get", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62OaepDecryptGet*(): RsaOaepDecrypt {.importcFunc,
|
||||
importc: "br_rsa_i62_oaep_decrypt_get", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI15Public*(x: ptr byte; xlen: uint; pk: ptr RsaPublicKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i15_public", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI15Pkcs1Vrfy*(x: ptr byte; xlen: uint; hashOid: ptr byte;
|
||||
hashLen: uint; pk: ptr RsaPublicKey; hashOut: ptr byte): uint32 {.
|
||||
importcFunc, importc: "br_rsa_i15_pkcs1_vrfy", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI15PssVrfy*(x: ptr byte; xlen: uint; hfData: ptr HashClass;
|
||||
hfMgf1: ptr HashClass; hash: pointer; saltLen: uint;
|
||||
pk: ptr RsaPublicKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i15_pss_vrfy", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI15Private*(x: ptr byte; sk: ptr RsaPrivateKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i15_private", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI15Pkcs1Sign*(hashOid: ptr byte; hash: ptr byte; hashLen: uint;
|
||||
sk: ptr RsaPrivateKey; x: ptr byte): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i15_pkcs1_sign", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI15PssSign*(rng: ptr ptr PrngClass; hfData: ptr HashClass;
|
||||
hfMgf1: ptr HashClass; hashValue: ptr byte; saltLen: uint;
|
||||
sk: ptr RsaPrivateKey; x: ptr byte): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i15_pss_sign", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaPublicGetDefault*(): RsaPublic {.importcFunc,
|
||||
importc: "br_rsa_public_get_default",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaPrivateGetDefault*(): RsaPrivate {.importcFunc,
|
||||
importc: "br_rsa_private_get_default",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaPkcs1VrfyGetDefault*(): RsaPkcs1Vrfy {.importcFunc,
|
||||
importc: "br_rsa_pkcs1_vrfy_get_default", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaPssVrfyGetDefault*(): RsaPssVrfy {.importcFunc,
|
||||
importc: "br_rsa_pss_vrfy_get_default",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaPkcs1SignGetDefault*(): RsaPkcs1Sign {.importcFunc,
|
||||
importc: "br_rsa_pkcs1_sign_get_default", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaPssSignGetDefault*(): RsaPssSign {.importcFunc,
|
||||
importc: "br_rsa_pss_sign_get_default",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaOaepEncryptGetDefault*(): RsaOaepEncrypt {.importcFunc,
|
||||
importc: "br_rsa_oaep_encrypt_get_default", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaOaepDecryptGetDefault*(): RsaOaepDecrypt {.importcFunc,
|
||||
importc: "br_rsa_oaep_decrypt_get_default", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaSslDecrypt*(core: RsaPrivate; sk: ptr RsaPrivateKey; data: ptr byte;
|
||||
len: uint): uint32 {.importcFunc, importc: "br_rsa_ssl_decrypt",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI15OaepEncrypt*(rnd: ptr ptr PrngClass; dig: ptr HashClass; label: pointer;
|
||||
labelLen: uint; pk: ptr RsaPublicKey; dst: pointer;
|
||||
dstMaxLen: uint; src: pointer; srcLen: uint): uint {.
|
||||
importcFunc, importc: "br_rsa_i15_oaep_encrypt", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI15OaepDecrypt*(dig: ptr HashClass; label: pointer; labelLen: uint;
|
||||
sk: ptr RsaPrivateKey; data: pointer; len: var uint): uint32 {.
|
||||
importcFunc, importc: "br_rsa_i15_oaep_decrypt", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI31OaepEncrypt*(rnd: ptr ptr PrngClass; dig: ptr HashClass; label: pointer;
|
||||
labelLen: uint; pk: ptr RsaPublicKey; dst: pointer;
|
||||
dstMaxLen: uint; src: pointer; srcLen: uint): uint {.
|
||||
importcFunc, importc: "br_rsa_i31_oaep_encrypt", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI31OaepDecrypt*(dig: ptr HashClass; label: pointer; labelLen: uint;
|
||||
sk: ptr RsaPrivateKey; data: pointer; len: var uint): uint32 {.
|
||||
importcFunc, importc: "br_rsa_i31_oaep_decrypt", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI32OaepEncrypt*(rnd: ptr ptr PrngClass; dig: ptr HashClass; label: pointer;
|
||||
labelLen: uint; pk: ptr RsaPublicKey; dst: pointer;
|
||||
dstMaxLen: uint; src: pointer; srcLen: uint): uint {.
|
||||
importcFunc, importc: "br_rsa_i32_oaep_encrypt", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI32OaepDecrypt*(dig: ptr HashClass; label: pointer; labelLen: uint;
|
||||
sk: ptr RsaPrivateKey; data: pointer; len: var uint): uint32 {.
|
||||
importcFunc, importc: "br_rsa_i32_oaep_decrypt", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62OaepEncrypt*(rnd: ptr ptr PrngClass; dig: ptr HashClass; label: pointer;
|
||||
labelLen: uint; pk: ptr RsaPublicKey; dst: pointer;
|
||||
dstMaxLen: uint; src: pointer; srcLen: uint): uint {.
|
||||
importcFunc, importc: "br_rsa_i62_oaep_encrypt", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62OaepDecrypt*(dig: ptr HashClass; label: pointer; labelLen: uint;
|
||||
sk: ptr RsaPrivateKey; data: pointer; len: var uint): uint32 {.
|
||||
importcFunc, importc: "br_rsa_i62_oaep_decrypt", header: "bearssl_rsa.h".}
|
||||
|
||||
template rsaKbufPrivSize*(size: untyped): untyped =
|
||||
(5 * (((size) + 15) shr 4))
|
||||
|
||||
|
||||
template rsaKbufPubSize*(size: untyped): untyped =
|
||||
(4 + (((size) + 7) shr 3))
|
||||
|
||||
|
||||
type
|
||||
RsaKeygen* {.importc: "br_rsa_keygen".} = proc (rngCtx: ptr ptr PrngClass; sk: ptr RsaPrivateKey; kbufPriv: pointer;
|
||||
pk: ptr RsaPublicKey; kbufPub: pointer; size: cuint; pubexp: uint32): uint32 {.
|
||||
importcFunc.}
|
||||
|
||||
|
||||
proc rsaI15Keygen*(rngCtx: ptr ptr PrngClass; sk: ptr RsaPrivateKey; kbufPriv: pointer;
|
||||
pk: ptr RsaPublicKey; kbufPub: pointer; size: cuint; pubexp: uint32): uint32 {.
|
||||
importcFunc, importc: "br_rsa_i15_keygen", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI31Keygen*(rngCtx: ptr ptr PrngClass; sk: ptr RsaPrivateKey; kbufPriv: pointer;
|
||||
pk: ptr RsaPublicKey; kbufPub: pointer; size: cuint; pubexp: uint32): uint32 {.
|
||||
importcFunc, importc: "br_rsa_i31_keygen", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62Keygen*(rngCtx: ptr ptr PrngClass; sk: ptr RsaPrivateKey; kbufPriv: pointer;
|
||||
pk: ptr RsaPublicKey; kbufPub: pointer; size: cuint; pubexp: uint32): uint32 {.
|
||||
importcFunc, importc: "br_rsa_i62_keygen", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI62KeygenGet*(): RsaKeygen {.importcFunc, importc: "br_rsa_i62_keygen_get",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaKeygenGetDefault*(): RsaKeygen {.importcFunc,
|
||||
importc: "br_rsa_keygen_get_default",
|
||||
header: "bearssl_rsa.h".}
|
||||
|
||||
type
|
||||
RsaComputeModulus* {.importc: "br_rsa_compute_modulus".} = proc (n: pointer; sk: ptr RsaPrivateKey): uint {.importcFunc.}
|
||||
|
||||
|
||||
proc rsaI15ComputeModulus*(n: pointer; sk: ptr RsaPrivateKey): uint {.importcFunc,
|
||||
importc: "br_rsa_i15_compute_modulus", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI31ComputeModulus*(n: pointer; sk: ptr RsaPrivateKey): uint {.importcFunc,
|
||||
importc: "br_rsa_i31_compute_modulus", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaComputeModulusGetDefault*(): RsaComputeModulus {.importcFunc,
|
||||
importc: "br_rsa_compute_modulus_get_default", header: "bearssl_rsa.h".}
|
||||
|
||||
type
|
||||
RsaComputePubexp* = proc (sk: ptr RsaPrivateKey): uint32 {.importcFunc.}
|
||||
|
||||
|
||||
proc rsaI15ComputePubexp*(sk: ptr RsaPrivateKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i15_compute_pubexp", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI31ComputePubexp*(sk: ptr RsaPrivateKey): uint32 {.importcFunc,
|
||||
importc: "br_rsa_i31_compute_pubexp", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaComputePubexpGetDefault*(): RsaComputePubexp {.importcFunc,
|
||||
importc: "br_rsa_compute_pubexp_get_default", header: "bearssl_rsa.h".}
|
||||
|
||||
type
|
||||
RsaComputePrivexp* {.importc: "br_rsa_compute_privexp".} = proc (d: pointer; sk: ptr RsaPrivateKey; pubexp: uint32): uint {.
|
||||
importcFunc.}
|
||||
|
||||
|
||||
proc rsaI15ComputePrivexp*(d: pointer; sk: ptr RsaPrivateKey; pubexp: uint32): uint {.
|
||||
importcFunc, importc: "br_rsa_i15_compute_privexp", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaI31ComputePrivexp*(d: pointer; sk: ptr RsaPrivateKey; pubexp: uint32): uint {.
|
||||
importcFunc, importc: "br_rsa_i31_compute_privexp", header: "bearssl_rsa.h".}
|
||||
|
||||
proc rsaComputePrivexpGetDefault*(): RsaComputePrivexp {.importcFunc,
|
||||
importc: "br_rsa_compute_privexp_get_default", header: "bearssl_rsa.h".}
|
1425
vendor/nim-bearssl/bearssl/abi/bearssl_ssl.nim
vendored
Normal file
1425
vendor/nim-bearssl/bearssl/abi/bearssl_ssl.nim
vendored
Normal file
File diff suppressed because it is too large
Load Diff
488
vendor/nim-bearssl/bearssl/abi/bearssl_x509.nim
vendored
Normal file
488
vendor/nim-bearssl/bearssl/abi/bearssl_x509.nim
vendored
Normal file
@ -0,0 +1,488 @@
|
||||
import
|
||||
"."/[bearssl_ec, bearssl_hash, bearssl_rsa, csources]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearX509Path = bearSrcPath & "x509/"
|
||||
|
||||
{.compile: bearX509Path & "asn1enc.c".}
|
||||
{.compile: bearX509Path & "encode_ec_pk8der.c".}
|
||||
{.compile: bearX509Path & "encode_ec_rawder.c".}
|
||||
{.compile: bearX509Path & "encode_rsa_pk8der.c".}
|
||||
{.compile: bearX509Path & "encode_rsa_rawder.c".}
|
||||
{.compile: bearX509Path & "skey_decoder.c".}
|
||||
{.compile: bearX509Path & "x509_decoder.c".}
|
||||
{.compile: bearX509Path & "x509_knownkey.c".}
|
||||
{.compile: bearX509Path & "x509_minimal.c".}
|
||||
{.compile: bearX509Path & "x509_minimal_full.c".}
|
||||
|
||||
const
|
||||
ERR_X509_OK* = 32
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_INVALID_VALUE* = 33
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_TRUNCATED* = 34
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_EMPTY_CHAIN* = 35
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_INNER_TRUNC* = 36
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_BAD_TAG_CLASS* = 37
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_BAD_TAG_VALUE* = 38
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_INDEFINITE_LENGTH* = 39
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_EXTRA_ELEMENT* = 40
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_UNEXPECTED* = 41
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_NOT_CONSTRUCTED* = 42
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_NOT_PRIMITIVE* = 43
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_PARTIAL_BYTE* = 44
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_BAD_BOOLEAN* = 45
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_OVERFLOW* = 46
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_BAD_DN* = 47
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_BAD_TIME* = 48
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_UNSUPPORTED* = 49
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_LIMIT_EXCEEDED* = 50
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_WRONG_KEY_TYPE* = 51
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_BAD_SIGNATURE* = 52
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_TIME_UNKNOWN* = 53
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_EXPIRED* = 54
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_DN_MISMATCH* = 55
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_BAD_SERVER_NAME* = 56
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_CRITICAL_EXTENSION* = 57
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_NOT_CA* = 58
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_FORBIDDEN_KEY_USAGE* = 59
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_WEAK_PUBLIC_KEY* = 60
|
||||
|
||||
|
||||
const
|
||||
ERR_X509_NOT_TRUSTED* = 62
|
||||
|
||||
|
||||
type
|
||||
INNER_C_UNION_bearssl_x509_1* {.importc: "br_x509_pkey::no_name",
|
||||
header: "bearssl_x509.h", bycopy, union.} = object
|
||||
rsa* {.importc: "rsa".}: RsaPublicKey
|
||||
ec* {.importc: "ec".}: EcPublicKey
|
||||
|
||||
X509Pkey* {.importc: "br_x509_pkey", header: "bearssl_x509.h", bycopy.} = object
|
||||
keyType* {.importc: "key_type".}: byte
|
||||
key* {.importc: "key".}: INNER_C_UNION_bearssl_x509_1
|
||||
|
||||
|
||||
|
||||
type
|
||||
X500Name* {.importc: "br_x500_name", header: "bearssl_x509.h", bycopy.} = object
|
||||
data* {.importc: "data".}: ptr byte
|
||||
len* {.importc: "len".}: uint
|
||||
|
||||
|
||||
|
||||
type
|
||||
X509TrustAnchor* {.importc: "br_x509_trust_anchor", header: "bearssl_x509.h",
|
||||
bycopy.} = object
|
||||
dn* {.importc: "dn".}: X500Name
|
||||
flags* {.importc: "flags".}: cuint
|
||||
pkey* {.importc: "pkey".}: X509Pkey
|
||||
|
||||
|
||||
|
||||
const
|
||||
X509_TA_CA* = 0x0001
|
||||
|
||||
|
||||
const
|
||||
KEYTYPE_RSA* = 1
|
||||
|
||||
|
||||
const
|
||||
KEYTYPE_EC* = 2
|
||||
|
||||
|
||||
const
|
||||
KEYTYPE_KEYX* = 0x10
|
||||
|
||||
|
||||
const
|
||||
KEYTYPE_SIGN* = 0x20
|
||||
|
||||
|
||||
type
|
||||
X509Class* {.importc: "br_x509_class", header: "bearssl_x509.h", bycopy.} = object
|
||||
contextSize* {.importc: "context_size".}: uint
|
||||
startChain* {.importc: "start_chain".}: proc (ctx: ptr ptr X509Class;
|
||||
serverName: cstring) {.importcFunc.}
|
||||
startCert* {.importc: "start_cert".}: proc (ctx: ptr ptr X509Class; length: uint32) {.
|
||||
importcFunc.}
|
||||
append* {.importc: "append".}: proc (ctx: ptr ptr X509Class; buf: ptr byte;
|
||||
len: uint) {.importcFunc.}
|
||||
endCert* {.importc: "end_cert".}: proc (ctx: ptr ptr X509Class) {.importcFunc.}
|
||||
endChain* {.importc: "end_chain".}: proc (ctx: ptr ptr X509Class): cuint {.importcFunc.}
|
||||
getPkey* {.importc: "get_pkey".}: proc (ctx: ptr ptr X509Class; usages: ptr cuint): ptr X509Pkey {.
|
||||
importcFunc.}
|
||||
|
||||
|
||||
|
||||
type
|
||||
X509KnownkeyContext* {.importc: "br_x509_knownkey_context",
|
||||
header: "bearssl_x509.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr X509Class
|
||||
pkey* {.importc: "pkey".}: X509Pkey
|
||||
usages* {.importc: "usages".}: cuint
|
||||
|
||||
|
||||
var x509KnownkeyVtable* {.importc: "br_x509_knownkey_vtable",
|
||||
header: "bearssl_x509.h".}: X509Class
|
||||
|
||||
proc x509KnownkeyInitRsa*(ctx: var X509KnownkeyContext; pk: ptr RsaPublicKey;
|
||||
usages: cuint) {.importcFunc,
|
||||
importc: "br_x509_knownkey_init_rsa",
|
||||
header: "bearssl_x509.h".}
|
||||
|
||||
proc x509KnownkeyInitEc*(ctx: var X509KnownkeyContext; pk: ptr EcPublicKey;
|
||||
usages: cuint) {.importcFunc,
|
||||
importc: "br_x509_knownkey_init_ec",
|
||||
header: "bearssl_x509.h".}
|
||||
|
||||
const
|
||||
X509_BUFSIZE_KEY* = 520
|
||||
X509_BUFSIZE_SIG* = 512
|
||||
|
||||
|
||||
type
|
||||
NameElement* {.importc: "br_name_element", header: "bearssl_x509.h", bycopy.} = object
|
||||
oid* {.importc: "oid".}: ptr byte
|
||||
buf* {.importc: "buf".}: cstring
|
||||
len* {.importc: "len".}: uint
|
||||
status* {.importc: "status".}: cint
|
||||
|
||||
|
||||
|
||||
type
|
||||
INNER_C_STRUCT_bearssl_x509_3* {.importc: "br_x509_minimal_context::no_name",
|
||||
header: "bearssl_x509.h", bycopy.} = object
|
||||
dp* {.importc: "dp".}: ptr uint32
|
||||
rp* {.importc: "rp".}: ptr uint32
|
||||
ip* {.importc: "ip".}: ptr byte
|
||||
|
||||
X509MinimalContext* {.importc: "br_x509_minimal_context",
|
||||
header: "bearssl_x509.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr X509Class
|
||||
pkey* {.importc: "pkey".}: X509Pkey
|
||||
cpu* {.importc: "cpu".}: INNER_C_STRUCT_bearssl_x509_3
|
||||
dpStack* {.importc: "dp_stack".}: array[32, uint32]
|
||||
rpStack* {.importc: "rp_stack".}: array[32, uint32]
|
||||
err* {.importc: "err".}: cint
|
||||
serverName* {.importc: "server_name".}: cstring
|
||||
keyUsages* {.importc: "key_usages".}: byte
|
||||
days* {.importc: "days".}: uint32
|
||||
seconds* {.importc: "seconds".}: uint32
|
||||
certLength* {.importc: "cert_length".}: uint32
|
||||
numCerts* {.importc: "num_certs".}: uint32
|
||||
hbuf* {.importc: "hbuf".}: ptr byte
|
||||
hlen* {.importc: "hlen".}: uint
|
||||
pad* {.importc: "pad".}: array[256, byte]
|
||||
eePkeyData* {.importc: "ee_pkey_data".}: array[X509_BUFSIZE_KEY, byte]
|
||||
pkeyData* {.importc: "pkey_data".}: array[X509_BUFSIZE_KEY, byte]
|
||||
certSignerKeyType* {.importc: "cert_signer_key_type".}: byte
|
||||
certSigHashOid* {.importc: "cert_sig_hash_oid".}: uint16
|
||||
certSigHashLen* {.importc: "cert_sig_hash_len".}: byte
|
||||
certSig* {.importc: "cert_sig".}: array[X509_BUFSIZE_SIG, byte]
|
||||
certSigLen* {.importc: "cert_sig_len".}: uint16
|
||||
minRsaSize* {.importc: "min_rsa_size".}: int16
|
||||
trustAnchors* {.importc: "trust_anchors".}: ptr X509TrustAnchor
|
||||
trustAnchorsNum* {.importc: "trust_anchors_num".}: uint
|
||||
doMhash* {.importc: "do_mhash".}: byte
|
||||
mhash* {.importc: "mhash".}: MultihashContext
|
||||
tbsHash* {.importc: "tbs_hash".}: array[64, byte]
|
||||
doDnHash* {.importc: "do_dn_hash".}: byte
|
||||
dnHashImpl* {.importc: "dn_hash_impl".}: ptr HashClass
|
||||
dnHash* {.importc: "dn_hash".}: HashCompatContext
|
||||
currentDnHash* {.importc: "current_dn_hash".}: array[64, byte]
|
||||
nextDnHash* {.importc: "next_dn_hash".}: array[64, byte]
|
||||
savedDnHash* {.importc: "saved_dn_hash".}: array[64, byte]
|
||||
nameElts* {.importc: "name_elts".}: ptr NameElement
|
||||
numNameElts* {.importc: "num_name_elts".}: uint
|
||||
irsa* {.importc: "irsa".}: RsaPkcs1Vrfy
|
||||
iecdsa* {.importc: "iecdsa".}: EcdsaVrfy
|
||||
iec* {.importc: "iec".}: ptr EcImpl
|
||||
|
||||
|
||||
var x509MinimalVtable* {.importc: "br_x509_minimal_vtable", header: "bearssl_x509.h".}: X509Class
|
||||
|
||||
proc x509MinimalInit*(ctx: var X509MinimalContext; dnHashImpl: ptr HashClass;
|
||||
trustAnchors: ptr X509TrustAnchor; trustAnchorsNum: uint) {.
|
||||
importcFunc, importc: "br_x509_minimal_init", header: "bearssl_x509.h".}
|
||||
|
||||
proc x509MinimalSetHash*(ctx: var X509MinimalContext; id: cint; impl: ptr HashClass) {.
|
||||
inline.} =
|
||||
multihashSetimpl(ctx.mhash, id, impl)
|
||||
|
||||
|
||||
proc x509MinimalSetRsa*(ctx: var X509MinimalContext; irsa: RsaPkcs1Vrfy) {.inline.} =
|
||||
ctx.irsa = irsa
|
||||
|
||||
|
||||
proc x509MinimalSetEcdsa*(ctx: var X509MinimalContext; iec: ptr EcImpl;
|
||||
iecdsa: EcdsaVrfy) {.inline.} =
|
||||
ctx.iecdsa = iecdsa
|
||||
ctx.iec = iec
|
||||
|
||||
|
||||
proc x509MinimalInitFull*(ctx: var X509MinimalContext;
|
||||
trustAnchors: ptr X509TrustAnchor;
|
||||
trustAnchorsNum: uint) {.importcFunc,
|
||||
importc: "br_x509_minimal_init_full", header: "bearssl_x509.h".}
|
||||
|
||||
proc x509MinimalSetTime*(ctx: var X509MinimalContext; days: uint32; seconds: uint32) {.
|
||||
inline.} =
|
||||
ctx.days = days
|
||||
ctx.seconds = seconds
|
||||
|
||||
|
||||
proc x509MinimalSetMinrsa*(ctx: var X509MinimalContext; byteLength: cint) {.inline,
|
||||
importcFunc.} =
|
||||
ctx.minRsaSize = (int16)(byteLength - 128)
|
||||
|
||||
|
||||
proc x509MinimalSetNameElements*(ctx: var X509MinimalContext; elts: ptr NameElement;
|
||||
numElts: uint) {.inline.} =
|
||||
ctx.nameElts = elts
|
||||
ctx.numNameElts = numElts
|
||||
|
||||
|
||||
type
|
||||
INNER_C_STRUCT_bearssl_x509_5* {.importc: "br_x509_decoder_context::no_name",
|
||||
header: "bearssl_x509.h", bycopy.} = object
|
||||
dp* {.importc: "dp".}: ptr uint32
|
||||
rp* {.importc: "rp".}: ptr uint32
|
||||
ip* {.importc: "ip".}: ptr byte
|
||||
|
||||
X509DecoderContext* {.importc: "br_x509_decoder_context",
|
||||
header: "bearssl_x509.h", bycopy.} = object
|
||||
pkey* {.importc: "pkey".}: X509Pkey
|
||||
cpu* {.importc: "cpu".}: INNER_C_STRUCT_bearssl_x509_5
|
||||
dpStack* {.importc: "dp_stack".}: array[32, uint32]
|
||||
rpStack* {.importc: "rp_stack".}: array[32, uint32]
|
||||
err* {.importc: "err".}: cint
|
||||
pad* {.importc: "pad".}: array[256, byte]
|
||||
decoded* {.importc: "decoded".}: bool
|
||||
notbeforeDays* {.importc: "notbefore_days".}: uint32
|
||||
notbeforeSeconds* {.importc: "notbefore_seconds".}: uint32
|
||||
notafterDays* {.importc: "notafter_days".}: uint32
|
||||
notafterSeconds* {.importc: "notafter_seconds".}: uint32
|
||||
isCA* {.importc: "isCA".}: bool
|
||||
copyDn* {.importc: "copy_dn".}: byte
|
||||
appendDnCtx* {.importc: "append_dn_ctx".}: pointer
|
||||
appendDn* {.importc: "append_dn".}: proc (ctx: pointer; buf: pointer; len: uint) {.
|
||||
importcFunc.}
|
||||
hbuf* {.importc: "hbuf".}: ptr byte
|
||||
hlen* {.importc: "hlen".}: uint
|
||||
pkeyData* {.importc: "pkey_data".}: array[X509_BUFSIZE_KEY, byte]
|
||||
signerKeyType* {.importc: "signer_key_type".}: byte
|
||||
signerHashId* {.importc: "signer_hash_id".}: byte
|
||||
|
||||
|
||||
|
||||
proc x509DecoderInit*(ctx: var X509DecoderContext; appendDn: proc (ctx: pointer;
|
||||
buf: pointer; len: uint) {.importcFunc.}; appendDnCtx: pointer) {.importcFunc,
|
||||
importc: "br_x509_decoder_init", header: "bearssl_x509.h".}
|
||||
|
||||
proc x509DecoderPush*(ctx: var X509DecoderContext; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_x509_decoder_push", header: "bearssl_x509.h".}
|
||||
|
||||
proc x509DecoderGetPkey*(ctx: var X509DecoderContext): ptr X509Pkey {.inline.} =
|
||||
if ctx.decoded and ctx.err == 0:
|
||||
return addr(ctx.pkey)
|
||||
else:
|
||||
return nil
|
||||
|
||||
|
||||
proc x509DecoderLastError*(ctx: var X509DecoderContext): cint {.inline.} =
|
||||
if ctx.err != 0:
|
||||
return ctx.err
|
||||
if not ctx.decoded:
|
||||
return ERR_X509_TRUNCATED
|
||||
return 0
|
||||
|
||||
proc x509DecoderIsCA*(ctx: var X509DecoderContext): cint {.inline.} =
|
||||
return cint ctx.isCA
|
||||
|
||||
proc x509DecoderGetSignerKeyType*(ctx: var X509DecoderContext): cint {.inline.} =
|
||||
return cint ctx.signerKeyType
|
||||
|
||||
proc x509DecoderGetSignerHashId*(ctx: var X509DecoderContext): cint {.inline.} =
|
||||
return cint ctx.signerHashId
|
||||
|
||||
type
|
||||
X509Certificate* {.importc: "br_x509_certificate", header: "bearssl_x509.h", bycopy.} = object
|
||||
data* {.importc: "data".}: ptr byte
|
||||
dataLen* {.importc: "data_len".}: uint
|
||||
|
||||
|
||||
|
||||
type
|
||||
INNER_C_UNION_bearssl_x509_8* {.importc: "br_skey_decoder_context::no_name",
|
||||
header: "bearssl_x509.h", bycopy, union.} = object
|
||||
rsa* {.importc: "rsa".}: RsaPrivateKey
|
||||
ec* {.importc: "ec".}: EcPrivateKey
|
||||
|
||||
INNER_C_STRUCT_bearssl_x509_9* {.importc: "br_skey_decoder_context::no_name",
|
||||
header: "bearssl_x509.h", bycopy.} = object
|
||||
dp* {.importc: "dp".}: ptr uint32
|
||||
rp* {.importc: "rp".}: ptr uint32
|
||||
ip* {.importc: "ip".}: ptr byte
|
||||
|
||||
SkeyDecoderContext* {.importc: "br_skey_decoder_context",
|
||||
header: "bearssl_x509.h", bycopy.} = object
|
||||
key* {.importc: "key".}: INNER_C_UNION_bearssl_x509_8
|
||||
cpu* {.importc: "cpu".}: INNER_C_STRUCT_bearssl_x509_9
|
||||
dpStack* {.importc: "dp_stack".}: array[32, uint32]
|
||||
rpStack* {.importc: "rp_stack".}: array[32, uint32]
|
||||
err* {.importc: "err".}: cint
|
||||
hbuf* {.importc: "hbuf".}: ptr byte
|
||||
hlen* {.importc: "hlen".}: uint
|
||||
pad* {.importc: "pad".}: array[256, byte]
|
||||
keyType* {.importc: "key_type".}: byte
|
||||
keyData* {.importc: "key_data".}: array[3 * X509_BUFSIZE_SIG, byte]
|
||||
|
||||
|
||||
|
||||
proc skeyDecoderInit*(ctx: var SkeyDecoderContext) {.importcFunc,
|
||||
importc: "br_skey_decoder_init", header: "bearssl_x509.h".}
|
||||
|
||||
proc skeyDecoderPush*(ctx: var SkeyDecoderContext; data: pointer; len: uint) {.importcFunc,
|
||||
importc: "br_skey_decoder_push", header: "bearssl_x509.h".}
|
||||
|
||||
proc skeyDecoderLastError*(ctx: var SkeyDecoderContext): cint {.inline.} =
|
||||
if ctx.err != 0:
|
||||
return ctx.err
|
||||
if ctx.keyType == '\0'.byte:
|
||||
return ERR_X509_TRUNCATED
|
||||
return 0
|
||||
|
||||
|
||||
proc skeyDecoderKeyType*(ctx: var SkeyDecoderContext): cint {.inline.} =
|
||||
if ctx.err == 0:
|
||||
return cint ctx.keyType
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
proc skeyDecoderGetRsa*(ctx: var SkeyDecoderContext): ptr RsaPrivateKey {.inline.} =
|
||||
if ctx.err == 0 and ctx.keyType == KEYTYPE_RSA:
|
||||
return addr(ctx.key.rsa)
|
||||
else:
|
||||
return nil
|
||||
|
||||
|
||||
proc skeyDecoderGetEc*(ctx: var SkeyDecoderContext): ptr EcPrivateKey {.inline.} =
|
||||
if ctx.err == 0 and ctx.keyType == KEYTYPE_EC:
|
||||
return addr(ctx.key.ec)
|
||||
else:
|
||||
return nil
|
||||
|
||||
|
||||
proc encodeRsaRawDer*(dest: pointer; sk: ptr RsaPrivateKey; pk: ptr RsaPublicKey;
|
||||
d: pointer; dlen: uint): uint {.importcFunc,
|
||||
importc: "br_encode_rsa_raw_der", header: "bearssl_x509.h".}
|
||||
|
||||
proc encodeRsaPkcs8Der*(dest: pointer; sk: ptr RsaPrivateKey; pk: ptr RsaPublicKey;
|
||||
d: pointer; dlen: uint): uint {.importcFunc,
|
||||
importc: "br_encode_rsa_pkcs8_der", header: "bearssl_x509.h".}
|
||||
|
||||
proc encodeEcRawDer*(dest: pointer; sk: ptr EcPrivateKey; pk: ptr EcPublicKey): uint {.
|
||||
importcFunc, importc: "br_encode_ec_raw_der", header: "bearssl_x509.h".}
|
||||
|
||||
proc encodeEcPkcs8Der*(dest: pointer; sk: ptr EcPrivateKey; pk: ptr EcPublicKey): uint {.
|
||||
importcFunc, importc: "br_encode_ec_pkcs8_der", header: "bearssl_x509.h".}
|
||||
|
||||
const
|
||||
ENCODE_PEM_RSA_RAW* = "RSA PRIVATE KEY"
|
||||
|
||||
|
||||
const
|
||||
ENCODE_PEM_EC_RAW* = "EC PRIVATE KEY"
|
||||
|
||||
|
||||
const
|
||||
ENCODE_PEM_PKCS8* = "PRIVATE KEY"
|
26
vendor/nim-bearssl/bearssl/abi/brssl.nim
vendored
Normal file
26
vendor/nim-bearssl/bearssl/abi/brssl.nim
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
import
|
||||
"."/[csources, bearssl_x509]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearToolsPath = bearPath & "tools/"
|
||||
|
||||
|
||||
{.compile: bearToolsPath & "vector.c".}
|
||||
{.compile: bearToolsPath & "xmem.c".}
|
||||
{.compile: bearToolsPath & "names.c".}
|
||||
{.compile: bearToolsPath & "certs.c".}
|
||||
{.compile: bearToolsPath & "files.c".}
|
||||
|
||||
type
|
||||
X509NoanchorContext* {.importc: "x509_noanchor_context", header: "brssl.h", bycopy.} = object
|
||||
vtable* {.importc: "vtable".}: ptr X509Class
|
||||
inner* {.importc: "inner".}: ptr ptr X509Class
|
||||
|
||||
proc x509NoanchorInit*(xwc: var X509NoanchorContext; inner: ptr ptr X509Class) {.importcFunc,
|
||||
importc: "x509_noanchor_init", header: "brssl.h".}
|
||||
|
||||
proc initNoAnchor*(xwc: var X509NoanchorContext, inner: ptr ptr X509Class) {.
|
||||
importcFunc, importc: "x509_noanchor_init", header: "brssl.h", deprecated: "x509NoanchorInit".}
|
23
vendor/nim-bearssl/bearssl/abi/cacert.nim
vendored
Normal file
23
vendor/nim-bearssl/bearssl/abi/cacert.nim
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
## Nim-BearSSL
|
||||
## Copyright (c) 2018-2021 Status Research & Development GmbH
|
||||
## Licensed under either of
|
||||
## * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
|
||||
## * MIT license ([LICENSE-MIT](LICENSE-MIT))
|
||||
## at your option.
|
||||
## This file may not be copied, modified, or distributed except according to
|
||||
## those terms.
|
||||
|
||||
## This module provides access to Mozilla's CA certificate store in PEM format.
|
||||
## This certificate store was downloaded from
|
||||
## https://curl.haxx.se/ca/cacert.pem
|
||||
## And converted to C header using ``brssl ta cacert.pem > cacert.h``.
|
||||
|
||||
import ./csources
|
||||
from ./bearssl_x509 import X509TrustAnchor
|
||||
|
||||
{.passc: "-I" & bearPath & "../certs/".}
|
||||
|
||||
var MozillaTrustAnchors* {.
|
||||
importc: "TAs", header: "cacert20210119.h".}: array[129, X509TrustAnchor]
|
||||
var MozillaTrustAnchorsCount* {.
|
||||
importc: "TAs_NUM", header: "cacert20210119.h".}: cint
|
21
vendor/nim-bearssl/bearssl/abi/config.nim
vendored
Normal file
21
vendor/nim-bearssl/bearssl/abi/config.nim
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
import
|
||||
"."/[csources]
|
||||
|
||||
{.pragma: importcFunc, cdecl, gcsafe, noSideEffect, raises: [].}
|
||||
{.pragma: headerFunc, importcFunc, header: "bearssl.h".}
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearRootPath = bearSrcPath
|
||||
|
||||
{.compile: bearRootPath & "settings.c".}
|
||||
|
||||
type
|
||||
ConfigOption* {.importc: "br_config_option", header: "bearssl.h", bycopy.} = object
|
||||
name* {.importc: "name".}: cstring
|
||||
value* {.importc: "value".}: clong
|
||||
|
||||
# TODO: missing `extern "C"` in bearssl.h means this function cannot
|
||||
# be used from C++
|
||||
proc getConfig*(): ptr ConfigOption {.importcFunc, importc: "br_get_config",
|
||||
headerFunc.}
|
63
vendor/nim-bearssl/bearssl/abi/csources.nim
vendored
Normal file
63
vendor/nim-bearssl/bearssl/abi/csources.nim
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
## Nim-BearSSL
|
||||
## Copyright (c) 2018-2022 Status Research & Development GmbH
|
||||
## Licensed under either of
|
||||
## * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
|
||||
## * MIT license ([LICENSE-MIT](LICENSE-MIT))
|
||||
## at your option.
|
||||
## This file may not be copied, modified, or distributed except according to
|
||||
## those terms.
|
||||
|
||||
import
|
||||
std/[os, strutils]
|
||||
|
||||
export os
|
||||
|
||||
# For each bearssl header file, we create one nim module that compilers the
|
||||
# C file related to that module. Some C "modules" have dependencies - the Nim
|
||||
# modules make sure to import these dependencies so that the correct C source
|
||||
# files get compiled transitively.
|
||||
#
|
||||
# Most of the header-like content was generated with c2nim, then hand-edited.
|
||||
#
|
||||
# For historical reasons, some functions and types are exposed with a "Br"
|
||||
# prefix - these have been marked deprecated.
|
||||
#
|
||||
# Some functions take a length as input - in bearssl, `csize_t` is used for this
|
||||
# purpose - wrappers do the same
|
||||
|
||||
static: doAssert sizeof(csize_t) == sizeof(int)
|
||||
const
|
||||
bearPath* = currentSourcePath.rsplit({DirSep, AltSep}, 1)[0] & "/../" &
|
||||
"csources" & "/"
|
||||
bearIncPath* = bearPath & "inc/"
|
||||
bearSrcPath* = bearPath & "src/"
|
||||
bearToolsPath* = bearPath & "tools/"
|
||||
|
||||
# TODO https://github.com/nim-lang/Nim/issues/19864
|
||||
|
||||
{.passc: "-I" & quoteShell(bearSrcPath)}
|
||||
{.passc: "-I" & quoteShell(bearIncPath)}
|
||||
{.passc: "-I" & quoteShell(bearToolsPath)}
|
||||
|
||||
when defined(windows):
|
||||
{.passc: "-DBR_USE_WIN32_TIME=1".}
|
||||
{.passc: "-DBR_USE_WIN32_RAND=1".}
|
||||
else:
|
||||
{.passc: "-DBR_USE_UNIX_TIME=1".}
|
||||
{.passc: "-DBR_USE_URANDOM=1".}
|
||||
|
||||
when defined(i386) or defined(amd64) or defined(arm64):
|
||||
{.passc: "-DBR_LE_UNALIGNED=1".}
|
||||
elif defined(powerpc) or defined(powerpc64):
|
||||
{.passc: "-DBR_BE_UNALIGNED=1".}
|
||||
elif defined(powerpc64el):
|
||||
{.passc: "-DBR_LE_UNALIGNED=1".}
|
||||
|
||||
when sizeof(int) == 8:
|
||||
{.passc: "-DBR_64=1".}
|
||||
when hostCPU == "amd64":
|
||||
{.passc:" -DBR_amd64=1".}
|
||||
when defined(vcc):
|
||||
{.passc: "-DBR_UMUL128=1".}
|
||||
else:
|
||||
{.passc: "-DBR_INT128=1".}
|
21
vendor/nim-bearssl/bearssl/abi/inner.nim
vendored
Normal file
21
vendor/nim-bearssl/bearssl/abi/inner.nim
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
import
|
||||
"."/[csources]
|
||||
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearCodecPath = bearSrcPath & "codec/"
|
||||
|
||||
{.compile: bearCodecPath & "ccopy.c".}
|
||||
{.compile: bearCodecPath & "dec16be.c".}
|
||||
{.compile: bearCodecPath & "dec16le.c".}
|
||||
{.compile: bearCodecPath & "dec32be.c".}
|
||||
{.compile: bearCodecPath & "dec32le.c".}
|
||||
{.compile: bearCodecPath & "dec64be.c".}
|
||||
{.compile: bearCodecPath & "dec64le.c".}
|
||||
{.compile: bearCodecPath & "enc16be.c".}
|
||||
{.compile: bearCodecPath & "enc16le.c".}
|
||||
{.compile: bearCodecPath & "enc32be.c".}
|
||||
{.compile: bearCodecPath & "enc32le.c".}
|
||||
{.compile: bearCodecPath & "enc64be.c".}
|
||||
{.compile: bearCodecPath & "enc64le.c".}
|
64
vendor/nim-bearssl/bearssl/abi/intx.nim
vendored
Normal file
64
vendor/nim-bearssl/bearssl/abi/intx.nim
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
import
|
||||
"."/[csources]
|
||||
|
||||
{.used.}
|
||||
|
||||
const
|
||||
bearIntPath = bearSrcPath & "int/"
|
||||
|
||||
{.compile: bearIntPath & "i15_add.c".}
|
||||
{.compile: bearIntPath & "i15_bitlen.c".}
|
||||
{.compile: bearIntPath & "i15_decmod.c".}
|
||||
{.compile: bearIntPath & "i15_decode.c".}
|
||||
{.compile: bearIntPath & "i15_decred.c".}
|
||||
{.compile: bearIntPath & "i15_encode.c".}
|
||||
{.compile: bearIntPath & "i15_fmont.c".}
|
||||
{.compile: bearIntPath & "i15_iszero.c".}
|
||||
{.compile: bearIntPath & "i15_moddiv.c".}
|
||||
{.compile: bearIntPath & "i15_modpow.c".}
|
||||
{.compile: bearIntPath & "i15_modpow2.c".}
|
||||
{.compile: bearIntPath & "i15_montmul.c".}
|
||||
{.compile: bearIntPath & "i15_mulacc.c".}
|
||||
{.compile: bearIntPath & "i15_muladd.c".}
|
||||
{.compile: bearIntPath & "i15_ninv15.c".}
|
||||
{.compile: bearIntPath & "i15_reduce.c".}
|
||||
{.compile: bearIntPath & "i15_rshift.c".}
|
||||
{.compile: bearIntPath & "i15_sub.c".}
|
||||
{.compile: bearIntPath & "i15_tmont.c".}
|
||||
{.compile: bearIntPath & "i31_add.c".}
|
||||
{.compile: bearIntPath & "i31_bitlen.c".}
|
||||
{.compile: bearIntPath & "i31_decmod.c".}
|
||||
{.compile: bearIntPath & "i31_decode.c".}
|
||||
{.compile: bearIntPath & "i31_decred.c".}
|
||||
{.compile: bearIntPath & "i31_encode.c".}
|
||||
{.compile: bearIntPath & "i31_fmont.c".}
|
||||
{.compile: bearIntPath & "i31_iszero.c".}
|
||||
{.compile: bearIntPath & "i31_moddiv.c".}
|
||||
{.compile: bearIntPath & "i31_modpow.c".}
|
||||
{.compile: bearIntPath & "i31_modpow2.c".}
|
||||
{.compile: bearIntPath & "i31_montmul.c".}
|
||||
{.compile: bearIntPath & "i31_mulacc.c".}
|
||||
{.compile: bearIntPath & "i31_muladd.c".}
|
||||
{.compile: bearIntPath & "i31_ninv31.c".}
|
||||
{.compile: bearIntPath & "i31_reduce.c".}
|
||||
{.compile: bearIntPath & "i31_rshift.c".}
|
||||
{.compile: bearIntPath & "i31_sub.c".}
|
||||
{.compile: bearIntPath & "i31_tmont.c".}
|
||||
{.compile: bearIntPath & "i32_add.c".}
|
||||
{.compile: bearIntPath & "i32_bitlen.c".}
|
||||
{.compile: bearIntPath & "i32_decmod.c".}
|
||||
{.compile: bearIntPath & "i32_decode.c".}
|
||||
{.compile: bearIntPath & "i32_decred.c".}
|
||||
{.compile: bearIntPath & "i32_div32.c".}
|
||||
{.compile: bearIntPath & "i32_encode.c".}
|
||||
{.compile: bearIntPath & "i32_fmont.c".}
|
||||
{.compile: bearIntPath & "i32_iszero.c".}
|
||||
{.compile: bearIntPath & "i32_modpow.c".}
|
||||
{.compile: bearIntPath & "i32_montmul.c".}
|
||||
{.compile: bearIntPath & "i32_mulacc.c".}
|
||||
{.compile: bearIntPath & "i32_muladd.c".}
|
||||
{.compile: bearIntPath & "i32_ninv32.c".}
|
||||
{.compile: bearIntPath & "i32_reduce.c".}
|
||||
{.compile: bearIntPath & "i32_sub.c".}
|
||||
{.compile: bearIntPath & "i32_tmont.c".}
|
||||
{.compile: bearIntPath & "i62_modpow2.c".}
|
4
vendor/nim-bearssl/bearssl/aead.nim
vendored
Normal file
4
vendor/nim-bearssl/bearssl/aead.nim
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import
|
||||
./abi/bearssl_aead
|
||||
|
||||
export bearssl_aead
|
4
vendor/nim-bearssl/bearssl/blockx.nim
vendored
Normal file
4
vendor/nim-bearssl/bearssl/blockx.nim
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import
|
||||
./abi/bearssl_block
|
||||
|
||||
export bearssl_block
|
4
vendor/nim-bearssl/bearssl/brssl.nim
vendored
Normal file
4
vendor/nim-bearssl/bearssl/brssl.nim
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import
|
||||
./abi/brssl
|
||||
|
||||
export brssl
|
1321
vendor/nim-bearssl/bearssl/decls.nim
vendored
1321
vendor/nim-bearssl/bearssl/decls.nim
vendored
File diff suppressed because it is too large
Load Diff
4
vendor/nim-bearssl/bearssl/ec.nim
vendored
Normal file
4
vendor/nim-bearssl/bearssl/ec.nim
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import
|
||||
./abi/bearssl_ec
|
||||
|
||||
export bearssl_ec
|
4
vendor/nim-bearssl/bearssl/hash.nim
vendored
Normal file
4
vendor/nim-bearssl/bearssl/hash.nim
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import
|
||||
./abi/bearssl_hash
|
||||
|
||||
export bearssl_hash
|
4
vendor/nim-bearssl/bearssl/hmac.nim
vendored
Normal file
4
vendor/nim-bearssl/bearssl/hmac.nim
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import
|
||||
./abi/bearssl_hmac
|
||||
|
||||
export bearssl_hmac
|
4
vendor/nim-bearssl/bearssl/kdf.nim
vendored
Normal file
4
vendor/nim-bearssl/bearssl/kdf.nim
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import
|
||||
./abi/bearssl_kdf
|
||||
|
||||
export bearssl_kdf
|
45
vendor/nim-bearssl/bearssl/pem.nim
vendored
Normal file
45
vendor/nim-bearssl/bearssl/pem.nim
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
import
|
||||
typetraits,
|
||||
./abi/bearssl_pem
|
||||
|
||||
export bearssl_pem
|
||||
|
||||
func init*(v: var PemDecoderContext) =
|
||||
# Careful, PemDecoderContext items are not copyable!
|
||||
# TODO prevent copying
|
||||
pemDecoderInit(v)
|
||||
|
||||
func push*(ctx: var PemDecoderContext, data: openArray[byte|char]): int =
|
||||
if data.len > 0:
|
||||
let consumed = pemDecoderPush(
|
||||
ctx, unsafeAddr data[0], uint data.len)
|
||||
int(consumed)
|
||||
else:
|
||||
0
|
||||
|
||||
func setdest*(
|
||||
ctx: var PemDecoderContext;
|
||||
dest: proc (destCtx: pointer;
|
||||
src: pointer; len: uint) {.cdecl, gcsafe, noSideEffect, raises: [].};
|
||||
destCtx: pointer) =
|
||||
pemDecoderSetdest(ctx, dest, destCtx)
|
||||
|
||||
func lastEvent*(ctx: var PemDecoderContext): cint =
|
||||
pemDecoderEvent(ctx)
|
||||
|
||||
func banner*(ctx: PemDecoderContext): string =
|
||||
## Return the `name` field as a string
|
||||
if ctx.name[ctx.name.high] == char(0):
|
||||
$(unsafeAddr ctx.name)
|
||||
else:
|
||||
var res = newString(ctx.name.len)
|
||||
for i, c in ctx.name: res[i] = ctx.name[i]
|
||||
res
|
||||
|
||||
func pemEncode*(
|
||||
data: openArray[byte|char], banner: cstring, flags: cuint = 0): string =
|
||||
let bytes = pemEncode(nil, nil, uint data.len, banner, flags)
|
||||
result.setLen(int bytes + 1)
|
||||
discard pemEncode(
|
||||
addr result[0], unsafeAddr data[0], uint data.len, banner, flags)
|
||||
result.setLen(int bytes)
|
4
vendor/nim-bearssl/bearssl/prf.nim
vendored
Normal file
4
vendor/nim-bearssl/bearssl/prf.nim
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import
|
||||
./abi/bearssl_prf
|
||||
|
||||
export bearssl_prf
|
100
vendor/nim-bearssl/bearssl/rand.nim
vendored
Normal file
100
vendor/nim-bearssl/bearssl/rand.nim
vendored
Normal file
@ -0,0 +1,100 @@
|
||||
import
|
||||
typetraits,
|
||||
./abi/[bearssl_hash, bearssl_rand]
|
||||
|
||||
export bearssl_rand
|
||||
|
||||
# About types used in helpers:
|
||||
# `bool` types are problematic because because they only use one bit of the
|
||||
# entire byte - a similar problem occurs with `object` types with alignment
|
||||
# gaps - `supportsCopyMem` is wrong here, we should be using `supportsMemCmp` or
|
||||
# something similar that takes into account these issues, but alas, there's no
|
||||
# such trait as of now
|
||||
|
||||
proc init*[S](T: type HmacDrbgContext, seed: openArray[S]): HmacDrbgContext =
|
||||
## Create a new randomness context with the given seed - typically, a single
|
||||
## instance per thread should be created.
|
||||
##
|
||||
## The seed can later be topped up with `update`.
|
||||
static: doAssert supportsCopyMem(S) and sizeof(S) > 0 and S isnot bool
|
||||
|
||||
if seed.len == 0:
|
||||
hmacDrbgInit(result, addr bearssl_hash.sha256Vtable, nil, 0)
|
||||
else:
|
||||
# In theory the multiplication can overflow, but practically we can't
|
||||
# allocate that much memory, so it won't
|
||||
hmacDrbgInit(
|
||||
result, addr sha256Vtable, unsafeAddr seed[0], uint seed.len * sizeof(S))
|
||||
|
||||
proc new*(T: type HmacDrbgContext): ref HmacDrbgContext =
|
||||
## Create a new randomness context intended to be shared between randomness
|
||||
## consumers - typically, a single instance per thread should be created.
|
||||
##
|
||||
## The context is seeded with randomness from the OS / system.
|
||||
## Returns `nil` if the OS / system has no randomness API.
|
||||
let seeder = prngSeederSystem(nil)
|
||||
if seeder == nil:
|
||||
return nil
|
||||
|
||||
let rng = (ref HmacDrbgContext)()
|
||||
hmacDrbgInit(rng[], addr sha256Vtable, nil, 0)
|
||||
|
||||
if seeder(addr rng.vtable) == 0:
|
||||
return nil
|
||||
|
||||
rng
|
||||
|
||||
func generate*(ctx: var HmacDrbgContext, v: var auto) =
|
||||
## Fill `v` with random data - `v` must be a simple type
|
||||
static: doAssert supportsCopyMem(type v)
|
||||
|
||||
when sizeof(v) > 0:
|
||||
when v is bool:
|
||||
# `bool` would result in a heavily biased value because >0 == true
|
||||
var tmp: byte
|
||||
hmacDrbgGenerate(ctx, addr tmp, uint sizeof(tmp))
|
||||
v = (tmp and 1'u8) == 1
|
||||
else:
|
||||
hmacDrbgGenerate(ctx, addr v, uint sizeof(v))
|
||||
|
||||
func generate*[V](ctx: var HmacDrbgContext, v: var openArray[V]) =
|
||||
## Fill `v` with random data - `T` must be a simple type
|
||||
static: doAssert supportsCopyMem(V) and sizeof(V) > 0
|
||||
|
||||
when V is bool:
|
||||
for b in v.mitems:
|
||||
ctx.generate(b)
|
||||
else:
|
||||
if v.len > 0:
|
||||
# In theory the multiplication can overflow, but practically we can't
|
||||
# allocate that much memory, so it won't
|
||||
hmacDrbgGenerate(ctx, addr v[0], uint v.len * sizeof(V))
|
||||
|
||||
template generate*[V](ctx: var HmacDrbgContext, v: var seq[V]) =
|
||||
generate(ctx, v.toOpenArray(0, v.high()))
|
||||
|
||||
func generateBytes*(ctx: var HmacDrbgContext, n: int): seq[byte] =
|
||||
# https://github.com/nim-lang/Nim/issues/19357
|
||||
if n > 0:
|
||||
result = newSeqUninitialized[byte](n)
|
||||
ctx.generate(result)
|
||||
|
||||
func generate*(ctx: var HmacDrbgContext, T: type): T {.noinit.} =
|
||||
## Create a new instance of `T` filled with random data - `T` must be
|
||||
## a simple type
|
||||
ctx.generate(result)
|
||||
|
||||
func update*[S](ctx: var HmacDrbgContext, seed: openArray[S]) =
|
||||
## Update context with additional seed data
|
||||
static: doAssert supportsCopyMem(S) and sizeof(S) > 0 and S isnot bool
|
||||
|
||||
if seed.len > 0:
|
||||
# In theory the multiplication can overflow, but practically we can't
|
||||
# allocate that much memory, so it won't
|
||||
hmacDrbgUpdate(ctx, unsafeAddr seed[0], uint seed.len * sizeof(S))
|
||||
|
||||
# Convenience helpers using bearssl naming
|
||||
|
||||
template hmacDrbgGenerate*(
|
||||
ctx: var HmacDrbgContext, output: var openArray[byte]) =
|
||||
generate(ctx, output)
|
4
vendor/nim-bearssl/bearssl/rsa.nim
vendored
Normal file
4
vendor/nim-bearssl/bearssl/rsa.nim
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import
|
||||
./abi/bearssl_rsa
|
||||
|
||||
export bearssl_rsa
|
4
vendor/nim-bearssl/bearssl/ssl.nim
vendored
Normal file
4
vendor/nim-bearssl/bearssl/ssl.nim
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import
|
||||
./abi/bearssl_ssl
|
||||
|
||||
export bearssl_ssl
|
4
vendor/nim-bearssl/bearssl/x509.nim
vendored
Normal file
4
vendor/nim-bearssl/bearssl/x509.nim
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
import
|
||||
./abi/bearssl_x509
|
||||
|
||||
export bearssl_x509
|
4
vendor/nim-bearssl/config.nims
vendored
Normal file
4
vendor/nim-bearssl/config.nims
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
# begin Nimble config (version 1)
|
||||
when fileExists("nimble.paths"):
|
||||
include "nimble.paths"
|
||||
# end Nimble config
|
5
vendor/nim-bearssl/nim.cfg
vendored
Normal file
5
vendor/nim-bearssl/nim.cfg
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
# Avoid some rare stack corruption while using exceptions with a SEH-enabled
|
||||
# toolchain: https://github.com/status-im/nimbus-eth2/issues/3121
|
||||
@if windows and not vcc:
|
||||
--define:nimRawSetjmp
|
||||
@end
|
43
vendor/nim-bearssl/regenerate.sh
vendored
Normal file
43
vendor/nim-bearssl/regenerate.sh
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
#!/bin/sh
|
||||
|
||||
[[ $(c2nim -v) == "0.9.18" ]] || echo "Different c2nim used, check the code"
|
||||
|
||||
mkdir -p gen
|
||||
cp bearssl/csources/inc/*.h gen
|
||||
cp bearssl/csources/tools/brssl.h gen
|
||||
|
||||
# c2nim gets confused by #ifdef inside struct's
|
||||
unifdef -m -UBR_DOXYGEN_IGNORE gen/*.h
|
||||
|
||||
# TODO: several things broken in c2nim 0.9.18
|
||||
# https://github.com/nim-lang/c2nim/issues/239
|
||||
# https://github.com/nim-lang/c2nim/issues/240
|
||||
# https://github.com/nim-lang/c2nim/issues/241
|
||||
# https://github.com/nim-lang/c2nim/issues/242
|
||||
|
||||
c2nim --header --importc --nep1 --prefix:br_ --prefix:BR_ --skipinclude --cdecl --skipcomments gen/*.h
|
||||
|
||||
rm gen/*.h
|
||||
|
||||
# Fix cosmetic and ease-of-use issues
|
||||
sed -i \
|
||||
-e "s/int16T/int16/g" \
|
||||
-e "s/int32T/int32/g" \
|
||||
-e "s/int64T/int64/g" \
|
||||
-e "s/cuchar/byte/g" \
|
||||
-e "s/cdecl/importcFunc/g" \
|
||||
-e "s/csize_t/uint/g" \
|
||||
gen/*.nim
|
||||
|
||||
# The functions taking a "Context" don't allow `nil` being passed to them - use
|
||||
# `var` instead - ditto for "output" parameters like length
|
||||
sed -i \
|
||||
-e 's/ctx: ptr \(.*\)Context/ctx: var \1Context/g' \
|
||||
-e 's/ctx: ptr \(.*\)Keys/ctx: var \1Keys/g' \
|
||||
-e 's/hc: ptr \(.*\)Context/hc: var \1Context/g' \
|
||||
-e 's/sc: ptr \(.*\)Context/sc: var \1Context/g' \
|
||||
-e 's/cc: ptr \(.*\)Context/cc: var \1Context/g' \
|
||||
-e 's/kc: ptr \(.*\)Context/kc: var \1Context/g' \
|
||||
-e 's/xwc: ptr \(.*\)Context/xwc: var \1Context/g' \
|
||||
-e 's/len: ptr uint/len: var uint/g' \
|
||||
gen/*.nim
|
5
vendor/nim-bearssl/tests/nim.cfg
vendored
Normal file
5
vendor/nim-bearssl/tests/nim.cfg
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
# Avoid some rare stack corruption while using exceptions with a SEH-enabled
|
||||
# toolchain: https://github.com/status-im/nimbus-eth2/issues/3121
|
||||
@if windows and not vcc:
|
||||
--define:nimRawSetjmp
|
||||
@end
|
@ -1,6 +1,8 @@
|
||||
import std/[strutils, sequtils],
|
||||
unittest2,
|
||||
../bearssl
|
||||
../bearssl/hash
|
||||
|
||||
{.used.}
|
||||
|
||||
suite "Hashing":
|
||||
test "MD5":
|
||||
@ -25,7 +27,7 @@ suite "Hashing":
|
||||
ctx = Md5Context()
|
||||
res: array[md5SIZE, uint8]
|
||||
|
||||
md5Init(addr ctx)
|
||||
md5Update(addr ctx, input[i].cstring, input[i].len)
|
||||
md5Out(addr ctx, addr res[0])
|
||||
md5Init(ctx)
|
||||
md5Update(ctx, input[i].cstring, uint input[i].len)
|
||||
md5Out(ctx, addr res[0])
|
||||
check res.foldl(a & b.toHex(), "").toLower() == output[i]
|
11
vendor/nim-bearssl/tests/test_import.nim
vendored
Normal file
11
vendor/nim-bearssl/tests/test_import.nim
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
# Test the full thing, given we do lots of compile and import tricks
|
||||
|
||||
import ../bearssl
|
||||
|
||||
# TODO doesn't work from C++ due to missing `export "C"`
|
||||
# discard getConfig()
|
||||
|
||||
# TODO doesn't work from C++ due to `const`:ness issues
|
||||
# discard ecGetDefault()
|
||||
|
||||
discard ghashPwr8Get()
|
32
vendor/nim-bearssl/tests/test_pem.nim
vendored
Normal file
32
vendor/nim-bearssl/tests/test_pem.nim
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
import
|
||||
unittest2,
|
||||
../bearssl/pem
|
||||
|
||||
suite "PEM":
|
||||
test "roundtrip":
|
||||
let
|
||||
data = [byte 0, 1, 2, 3]
|
||||
pem = pemEncode(data, "")
|
||||
|
||||
var
|
||||
ctx: PemDecoderContext
|
||||
called = false
|
||||
|
||||
ctx.init()
|
||||
|
||||
proc test(dctx: pointer, data: pointer, len: uint) {.cdecl.} =
|
||||
cast[ptr bool](dctx)[] = true
|
||||
|
||||
ctx.setdest(test, addr called)
|
||||
|
||||
var read = 0
|
||||
while read < pem.len:
|
||||
let
|
||||
consumed = ctx.push(pem.toOpenArray(read, pem.high))
|
||||
read += consumed
|
||||
if read < pem.len:
|
||||
check: ctx.lastEvent > 0
|
||||
|
||||
check:
|
||||
pem.len > data.len
|
||||
called
|
62
vendor/nim-bearssl/tests/test_rand.nim
vendored
Normal file
62
vendor/nim-bearssl/tests/test_rand.nim
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
import
|
||||
unittest2,
|
||||
../bearssl/rand
|
||||
|
||||
{.used.}
|
||||
|
||||
suite "random":
|
||||
test "simple random ops":
|
||||
# Some of these tests may end up triggering false fails, but given their
|
||||
# probability, should be fine
|
||||
|
||||
let rng = HmacDrbgContext.new()
|
||||
|
||||
var v: array[1024, byte]
|
||||
rng[].generate(v)
|
||||
|
||||
let v2 = rng[].generate(array[1024, byte])
|
||||
check:
|
||||
v != default(array[1024, byte]) # probable
|
||||
v2 != default(array[1024, byte]) # probable
|
||||
|
||||
for i in 0..<1000:
|
||||
doAssert cast[int](rng[].generate(bool)) in [0, 1]
|
||||
|
||||
var bools: array[64 * 1024, bool]
|
||||
rng[].generate(bools)
|
||||
|
||||
check:
|
||||
true in bools # probable
|
||||
false in bools # probable
|
||||
|
||||
var
|
||||
xxx = newSeq[int](1024)
|
||||
yyy = xxx
|
||||
rng[].generate(xxx)
|
||||
check:
|
||||
xxx != yyy # probable
|
||||
|
||||
test "seed":
|
||||
for seed in [@[byte 0], @[byte 1], @[byte 1, 1], @[byte 42, 13, 37]]:
|
||||
var
|
||||
rng = HmacDrbgContext.init(seed)
|
||||
rng2 = HmacDrbgContext.init(seed)
|
||||
|
||||
check:
|
||||
rng.generate(uint64) == rng2.generate(uint64)
|
||||
|
||||
for seed in [@[0], @[1], @[1, 1], @[42, 1337, -5]]:
|
||||
var
|
||||
rng = HmacDrbgContext.init(seed)
|
||||
rng2 = HmacDrbgContext.init(seed)
|
||||
|
||||
check:
|
||||
rng.generate(uint64) == rng2.generate(uint64)
|
||||
|
||||
test "antiseed":
|
||||
var
|
||||
rng = HmacDrbgContext.init([0])
|
||||
rng2 = HmacDrbgContext.init([1])
|
||||
|
||||
check:
|
||||
rng.generate(array[1024, byte]) != rng2.generate(array[1024, byte])
|
@ -26,11 +26,11 @@ jobs:
|
||||
include:
|
||||
- target:
|
||||
os: linux
|
||||
builder: ubuntu-18.04
|
||||
builder: ubuntu-20.04
|
||||
shell: bash
|
||||
- target:
|
||||
os: macos
|
||||
builder: macos-10.15
|
||||
builder: macos-11
|
||||
shell: bash
|
||||
- target:
|
||||
os: windows
|
||||
|
2
vendor/nim-chronicles/chronicles.nimble
vendored
2
vendor/nim-chronicles/chronicles.nimble
vendored
@ -1,7 +1,7 @@
|
||||
mode = ScriptMode.Verbose
|
||||
|
||||
packageName = "chronicles"
|
||||
version = "0.10.2"
|
||||
version = "0.10.3"
|
||||
author = "Status Research & Development GmbH"
|
||||
description = "A crafty implementation of structured logging for Nim"
|
||||
license = "Apache License 2.0"
|
||||
|
5
vendor/nim-chronicles/config.nims
vendored
5
vendor/nim-chronicles/config.nims
vendored
@ -2,3 +2,8 @@
|
||||
when fileExists("nimble.paths"):
|
||||
include "nimble.paths"
|
||||
# end Nimble config
|
||||
|
||||
when (NimMajor, NimMinor) < (1, 6):
|
||||
switch("styleCheck", "hint")
|
||||
else:
|
||||
switch("styleCheck", "error")
|
||||
|
2
vendor/nim-chronicles/nim.cfg
vendored
2
vendor/nim-chronicles/nim.cfg
vendored
@ -1,3 +1,3 @@
|
||||
--path: "."
|
||||
--styleCheck:usages
|
||||
--styleCheck:error
|
||||
# --styleCheck:error # set up in config.nims
|
||||
|
118
vendor/nim-chronicles/nimble.lock
vendored
Normal file
118
vendor/nim-chronicles/nimble.lock
vendored
Normal file
@ -0,0 +1,118 @@
|
||||
{
|
||||
"version": 1,
|
||||
"packages": {
|
||||
"stew": {
|
||||
"version": "0.1.0",
|
||||
"vcsRevision": "6ad35b876fb6ebe0dfee0f697af173acc47906ee",
|
||||
"url": "https://github.com/status-im/nim-stew.git",
|
||||
"downloadMethod": "git",
|
||||
"dependencies": [],
|
||||
"checksums": {
|
||||
"sha1": "46d58c4feb457f3241e3347778334e325dce5268"
|
||||
}
|
||||
},
|
||||
"unittest2": {
|
||||
"version": "0.0.4",
|
||||
"vcsRevision": "f180f596c88dfd266f746ed6f8dbebce39c824db",
|
||||
"url": "https://github.com/status-im/nim-unittest2.git",
|
||||
"downloadMethod": "git",
|
||||
"dependencies": [],
|
||||
"checksums": {
|
||||
"sha1": "fa309c41eaf6ef57895b9e603f2620a2f6e11780"
|
||||
}
|
||||
},
|
||||
"testutils": {
|
||||
"version": "0.4.2",
|
||||
"vcsRevision": "aa6e5216f4b4ab5aa971cdcdd70e1ec1203cedf2",
|
||||
"url": "https://github.com/status-im/nim-testutils",
|
||||
"downloadMethod": "git",
|
||||
"dependencies": [
|
||||
"unittest2"
|
||||
],
|
||||
"checksums": {
|
||||
"sha1": "94427e0cce0e0c5841edcd3a6530b4e6b857a3cb"
|
||||
}
|
||||
},
|
||||
"bearssl": {
|
||||
"version": "0.1.5",
|
||||
"vcsRevision": "0a82a068280e4e9fb7a4936d92f1d0991c3bb363",
|
||||
"url": "https://github.com/status-im/nim-bearssl.git",
|
||||
"downloadMethod": "git",
|
||||
"dependencies": [
|
||||
"unittest2"
|
||||
],
|
||||
"checksums": {
|
||||
"sha1": "9a34d4a65881261099270a22b34d49c9ca6af372"
|
||||
}
|
||||
},
|
||||
"httputils": {
|
||||
"version": "0.3.0",
|
||||
"vcsRevision": "689da19e9e9cfff4ced85e2b25c6b2b5598ed079",
|
||||
"url": "https://github.com/status-im/nim-http-utils.git",
|
||||
"downloadMethod": "git",
|
||||
"dependencies": [
|
||||
"stew"
|
||||
],
|
||||
"checksums": {
|
||||
"sha1": "4ad3ad68d13c50184180ab4b2eacc0bd7ed2ed44"
|
||||
}
|
||||
},
|
||||
"chronos": {
|
||||
"version": "3.0.11",
|
||||
"vcsRevision": "17fed89c99beac5a92d3668d0d3e9b0e4ac13936",
|
||||
"url": "https://github.com/status-im/nim-chronos.git",
|
||||
"downloadMethod": "git",
|
||||
"dependencies": [
|
||||
"stew",
|
||||
"bearssl",
|
||||
"httputils",
|
||||
"unittest2"
|
||||
],
|
||||
"checksums": {
|
||||
"sha1": "f6fffc87571e5f76af2a77c4ebcc0e00909ced4e"
|
||||
}
|
||||
},
|
||||
"faststreams": {
|
||||
"version": "0.3.0",
|
||||
"vcsRevision": "1b561a9e71b6bdad1c1cdff753418906037e9d09",
|
||||
"url": "https://github.com/status-im/nim-faststreams.git",
|
||||
"downloadMethod": "git",
|
||||
"dependencies": [
|
||||
"stew",
|
||||
"testutils",
|
||||
"chronos",
|
||||
"unittest2"
|
||||
],
|
||||
"checksums": {
|
||||
"sha1": "97edf9797924af48566a0af8267203dc21d80c77"
|
||||
}
|
||||
},
|
||||
"serialization": {
|
||||
"version": "0.1.0",
|
||||
"vcsRevision": "fcd0eadadde0ee000a63df8ab21dc4e9f015a790",
|
||||
"url": "https://github.com/status-im/nim-serialization.git",
|
||||
"downloadMethod": "git",
|
||||
"dependencies": [
|
||||
"faststreams",
|
||||
"unittest2",
|
||||
"stew"
|
||||
],
|
||||
"checksums": {
|
||||
"sha1": "fef59519892cac70cccd81b612085caaa5e3e6cf"
|
||||
}
|
||||
},
|
||||
"json_serialization": {
|
||||
"version": "0.1.0",
|
||||
"vcsRevision": "c5f0e2465e8375dfc7aa0f56ccef67cb680bc6b0",
|
||||
"url": "https://github.com/status-im/nim-json-serialization.git",
|
||||
"downloadMethod": "git",
|
||||
"dependencies": [
|
||||
"serialization",
|
||||
"stew"
|
||||
],
|
||||
"checksums": {
|
||||
"sha1": "d89d79d0679a3a41b350e3ad4be56c0308cc5ec6"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
2
vendor/nim-chronos/chronos.nimble
vendored
2
vendor/nim-chronos/chronos.nimble
vendored
@ -11,7 +11,7 @@ requires "nim > 1.2.0",
|
||||
"stew",
|
||||
"bearssl",
|
||||
"httputils",
|
||||
"https://github.com/status-im/nim-unittest2.git#head"
|
||||
"unittest2"
|
||||
|
||||
var commandStart = "nim c -r --hints:off --verbosity:0 --skipParentCfg:on --warning[ObservableStores]:off --styleCheck:usages --styleCheck:error"
|
||||
|
||||
|
@ -157,6 +157,7 @@ type
|
||||
contentEncoding*: set[ContentEncodingFlags]
|
||||
transferEncoding*: set[TransferEncodingFlags]
|
||||
contentLength*: uint64
|
||||
contentType*: Opt[ContentTypeData]
|
||||
|
||||
HttpClientResponseRef* = ref HttpClientResponse
|
||||
|
||||
@ -783,13 +784,25 @@ proc prepareResponse(request: HttpClientRequestRef, data: openArray[byte]
|
||||
else:
|
||||
false
|
||||
|
||||
let contentType =
|
||||
block:
|
||||
let list = headers.getList(ContentTypeHeader)
|
||||
if len(list) > 0:
|
||||
let res = getContentType(list)
|
||||
if res.isErr():
|
||||
return err("Invalid headers received, invalid `Content-Type`")
|
||||
else:
|
||||
Opt.some(res.get())
|
||||
else:
|
||||
Opt.none(ContentTypeData)
|
||||
|
||||
let res = HttpClientResponseRef(
|
||||
state: HttpReqRespState.Open, status: resp.code,
|
||||
address: request.address, requestMethod: request.meth,
|
||||
reason: resp.reason(data), version: resp.version, session: request.session,
|
||||
connection: request.connection, headers: headers,
|
||||
contentEncoding: contentEncoding, transferEncoding: transferEncoding,
|
||||
contentLength: contentLength, bodyFlag: bodyFlag
|
||||
contentLength: contentLength, contentType: contentType, bodyFlag: bodyFlag
|
||||
)
|
||||
res.connection.state = HttpClientConnectionState.ResponseHeadersReceived
|
||||
if nobodyFlag:
|
||||
|
@ -32,8 +32,8 @@ const
|
||||
LocationHeader* = "location"
|
||||
AuthorizationHeader* = "authorization"
|
||||
|
||||
UrlEncodedContentType* = "application/x-www-form-urlencoded"
|
||||
MultipartContentType* = "multipart/form-data"
|
||||
UrlEncodedContentType* = MediaType.init("application/x-www-form-urlencoded")
|
||||
MultipartContentType* = MediaType.init("multipart/form-data")
|
||||
|
||||
type
|
||||
HttpResult*[T] = Result[T, string]
|
||||
@ -193,7 +193,7 @@ func getContentEncoding*(ch: openArray[string]): HttpResult[
|
||||
return err("Incorrect Content-Encoding value")
|
||||
ok(res)
|
||||
|
||||
func getContentType*(ch: openArray[string]): HttpResult[string] {.
|
||||
func getContentType*(ch: openArray[string]): HttpResult[ContentTypeData] {.
|
||||
raises: [Defect].} =
|
||||
## Check and prepare value of ``Content-Type`` header.
|
||||
if len(ch) == 0:
|
||||
@ -201,8 +201,10 @@ func getContentType*(ch: openArray[string]): HttpResult[string] {.
|
||||
elif len(ch) > 1:
|
||||
err("Multiple Content-Type values found")
|
||||
else:
|
||||
let mparts = ch[0].split(";")
|
||||
ok(strip(mparts[0]).toLowerAscii())
|
||||
let res = getContentType(ch[0])
|
||||
if res.isErr():
|
||||
return err($res.error())
|
||||
ok(res.get())
|
||||
|
||||
proc bytesToString*(src: openArray[byte], dst: var openArray[char]) =
|
||||
## Convert array of bytes to array of characters.
|
||||
|
203
vendor/nim-chronos/chronos/apps/http/httpserver.nim
vendored
203
vendor/nim-chronos/chronos/apps/http/httpserver.nim
vendored
@ -67,7 +67,7 @@ type
|
||||
address*: TransportAddress
|
||||
# semaphore*: AsyncSemaphore
|
||||
maxConnections*: int
|
||||
backlogSize: int
|
||||
backlogSize*: int
|
||||
baseUri*: Uri
|
||||
serverIdent*: string
|
||||
flags*: set[HttpServerFlags]
|
||||
@ -75,12 +75,12 @@ type
|
||||
connections*: Table[string, Future[void]]
|
||||
acceptLoop*: Future[void]
|
||||
lifetime*: Future[void]
|
||||
headersTimeout: Duration
|
||||
bufferSize: int
|
||||
maxHeadersSize: int
|
||||
maxRequestBodySize: int
|
||||
processCallback: HttpProcessCallback
|
||||
createConnCallback: HttpConnectionCallback
|
||||
headersTimeout*: Duration
|
||||
bufferSize*: int
|
||||
maxHeadersSize*: int
|
||||
maxRequestBodySize*: int
|
||||
processCallback*: HttpProcessCallback
|
||||
createConnCallback*: HttpConnectionCallback
|
||||
|
||||
HttpServerRef* = ref HttpServer
|
||||
|
||||
@ -98,6 +98,7 @@ type
|
||||
transferEncoding*: set[TransferEncodingFlags]
|
||||
requestFlags*: set[HttpRequestFlags]
|
||||
contentLength: int
|
||||
contentTypeData*: Option[ContentTypeData]
|
||||
connection*: HttpConnectionRef
|
||||
response*: Option[HttpResponseRef]
|
||||
|
||||
@ -135,46 +136,6 @@ proc init(htype: typedesc[HttpProcessError], error: HttpServerError,
|
||||
code: HttpCode): HttpProcessError {.raises: [Defect].} =
|
||||
HttpProcessError(error: error, exc: exc, remote: remote, code: code)
|
||||
|
||||
proc init*(value: var HttpServer,
|
||||
address: TransportAddress,
|
||||
server: StreamServer,
|
||||
processCallback: HttpProcessCallback,
|
||||
createConnCallback: HttpConnectionCallback,
|
||||
serverUri: Uri,
|
||||
serverFlags: set[HttpServerFlags] = {},
|
||||
socketFlags: set[ServerFlags] = {ReuseAddr},
|
||||
serverIdent = "",
|
||||
maxConnections: int = -1,
|
||||
bufferSize: int = 4096,
|
||||
backlogSize: int = 100,
|
||||
httpHeadersTimeout = 10.seconds,
|
||||
maxHeadersSize: int = 8192,
|
||||
maxRequestBodySize: int = 1_048_576) =
|
||||
|
||||
value = HttpServer(
|
||||
address: address,
|
||||
instance: server,
|
||||
processCallback: processCallback,
|
||||
createConnCallback: createConnCallback,
|
||||
baseUri: serverUri,
|
||||
serverIdent: serverIdent,
|
||||
flags: serverFlags,
|
||||
socketFlags: socketFlags,
|
||||
maxConnections: maxConnections,
|
||||
bufferSize: bufferSize,
|
||||
backlogSize: backlogSize,
|
||||
headersTimeout: httpHeadersTimeout,
|
||||
maxHeadersSize: maxHeadersSize,
|
||||
maxRequestBodySize: maxRequestBodySize,
|
||||
# semaphore:
|
||||
# if maxConnections > 0:
|
||||
# newAsyncSemaphore(maxConnections)
|
||||
# else:
|
||||
# nil
|
||||
lifetime: newFuture[void]("http.server.lifetime"),
|
||||
connections: initTable[string, Future[void]]()
|
||||
)
|
||||
|
||||
proc createConnection(server: HttpServerRef,
|
||||
transp: StreamTransport): Future[HttpConnectionRef] {.
|
||||
gcsafe.}
|
||||
@ -191,7 +152,8 @@ proc new*(htype: typedesc[HttpServerRef],
|
||||
backlogSize: int = 100,
|
||||
httpHeadersTimeout = 10.seconds,
|
||||
maxHeadersSize: int = 8192,
|
||||
maxRequestBodySize: int = 1_048_576): HttpResult[HttpServerRef] =
|
||||
maxRequestBodySize: int = 1_048_576): HttpResult[HttpServerRef] {.
|
||||
raises: [Defect].} =
|
||||
|
||||
let serverUri =
|
||||
if len(serverUri.hostname) > 0:
|
||||
@ -211,11 +173,29 @@ proc new*(htype: typedesc[HttpServerRef],
|
||||
except CatchableError as exc:
|
||||
return err(exc.msg)
|
||||
|
||||
var res = HttpServerRef()
|
||||
res[].init(address, serverInstance, processCallback, createConnection,
|
||||
serverUri, serverFlags, socketFlags, serverIdent, maxConnections,
|
||||
bufferSize, backlogSize, httpHeadersTimeout, maxHeadersSize,
|
||||
maxRequestBodySize)
|
||||
var res = HttpServerRef(
|
||||
address: address,
|
||||
instance: serverInstance,
|
||||
processCallback: processCallback,
|
||||
createConnCallback: createConnection,
|
||||
baseUri: serverUri,
|
||||
serverIdent: serverIdent,
|
||||
flags: serverFlags,
|
||||
socketFlags: socketFlags,
|
||||
maxConnections: maxConnections,
|
||||
bufferSize: bufferSize,
|
||||
backlogSize: backlogSize,
|
||||
headersTimeout: httpHeadersTimeout,
|
||||
maxHeadersSize: maxHeadersSize,
|
||||
maxRequestBodySize: maxRequestBodySize,
|
||||
# semaphore:
|
||||
# if maxConnections > 0:
|
||||
# newAsyncSemaphore(maxConnections)
|
||||
# else:
|
||||
# nil
|
||||
lifetime: newFuture[void]("http.server.lifetime"),
|
||||
connections: initTable[string, Future[void]]()
|
||||
)
|
||||
ok(res)
|
||||
|
||||
proc getResponse*(req: HttpRequestRef): HttpResponseRef {.raises: [Defect].} =
|
||||
@ -345,9 +325,10 @@ proc prepareRequest(conn: HttpConnectionRef,
|
||||
# steps to reveal information about body.
|
||||
if ContentLengthHeader in request.headers:
|
||||
let length = request.headers.getInt(ContentLengthHeader)
|
||||
if length > 0:
|
||||
if length >= 0:
|
||||
if request.meth == MethodTrace:
|
||||
return err(Http400)
|
||||
# Because of coversion to `int` we should avoid unexpected OverflowError.
|
||||
if length > uint64(high(int)):
|
||||
return err(Http413)
|
||||
if length > uint64(conn.server.maxRequestBodySize):
|
||||
@ -363,12 +344,14 @@ proc prepareRequest(conn: HttpConnectionRef,
|
||||
if request.hasBody():
|
||||
# If request has body, we going to understand how its encoded.
|
||||
if ContentTypeHeader in request.headers:
|
||||
let contentType = request.headers.getString(ContentTypeHeader)
|
||||
let tmp = strip(contentType).toLowerAscii()
|
||||
if tmp.startsWith(UrlEncodedContentType):
|
||||
let contentType =
|
||||
getContentType(request.headers.getList(ContentTypeHeader)).valueOr:
|
||||
return err(Http415)
|
||||
if contentType == UrlEncodedContentType:
|
||||
request.requestFlags.incl(HttpRequestFlags.UrlencodedForm)
|
||||
elif tmp.startsWith(MultipartContentType):
|
||||
elif contentType == MultipartContentType:
|
||||
request.requestFlags.incl(HttpRequestFlags.MultipartForm)
|
||||
request.contentTypeData = some(contentType)
|
||||
|
||||
if ExpectHeader in request.headers:
|
||||
let expectHeader = request.headers.getString(ExpectHeader)
|
||||
@ -488,9 +471,10 @@ proc preferredContentMediaType*(acceptHeader: string): MediaType =
|
||||
MediaType.init("*", "*")
|
||||
|
||||
proc preferredContentType*(acceptHeader: string,
|
||||
types: varargs[MediaType]): Result[MediaType, cstring] =
|
||||
## Match or obtain preferred content-type using ``Accept`` header specified by
|
||||
## string ``acceptHeader``.
|
||||
types: varargs[MediaType]
|
||||
): Result[MediaType, cstring] =
|
||||
## Match or obtain preferred content type using ``Accept`` header specified by
|
||||
## string ``acceptHeader`` and server preferred content types ``types``.
|
||||
##
|
||||
## If ``Accept`` header is missing in client's request - ``types[0]`` or
|
||||
## ``*/*`` value will be returned as result.
|
||||
@ -498,10 +482,14 @@ proc preferredContentType*(acceptHeader: string,
|
||||
## If ``Accept`` header has incorrect format in client's request -
|
||||
## ``types[0]`` or ``*/*`` value will be returned as result.
|
||||
##
|
||||
## If ``Accept`` header is present and has one or more content types supported
|
||||
## by client, the best value will be selected from ``types`` using
|
||||
## quality value (weight) reported in ``Accept`` header. If client do not
|
||||
## support any methods in ``types`` error will be returned.
|
||||
## If ``Accept`` header is present in request to server and it has one or more
|
||||
## content types supported by client, the best value will be selected from
|
||||
## ``types`` using position and quality value (weight) reported in ``Accept``
|
||||
## header. If client do not support any methods in ``types`` error
|
||||
## will be returned.
|
||||
##
|
||||
## Note: Quality value (weight) for content type has priority over server's
|
||||
## preferred content-type.
|
||||
if len(types) == 0:
|
||||
if len(acceptHeader) == 0:
|
||||
# If `Accept` header is missing, return `*/*`.
|
||||
@ -513,10 +501,19 @@ proc preferredContentType*(acceptHeader: string,
|
||||
ok(wildCardMediaType)
|
||||
else:
|
||||
let mediaTypes = res.get().data
|
||||
if len(mediaTypes) > 0:
|
||||
ok(mediaTypes[0].mediaType)
|
||||
else:
|
||||
var
|
||||
currentType = MediaType()
|
||||
currentWeight = 0.0
|
||||
# `Accept` header values array is not sorted, so we need to find value
|
||||
# with the biggest ``q-value``.
|
||||
for item in mediaTypes:
|
||||
if currentWeight < item.qvalue:
|
||||
currentType = item.mediaType
|
||||
currentWeight = item.qvalue
|
||||
if len(currentType.media) == 0 and len(currentType.subtype) == 0:
|
||||
ok(wildCardMediaType)
|
||||
else:
|
||||
ok(currentType)
|
||||
else:
|
||||
if len(acceptHeader) == 0:
|
||||
# If `Accept` header is missing, client accepts any type of content.
|
||||
@ -527,7 +524,32 @@ proc preferredContentType*(acceptHeader: string,
|
||||
# If `Accept` header is incorrect, client accepts any type of content.
|
||||
ok(types[0])
|
||||
else:
|
||||
selectContentType(ares.get().data, types)
|
||||
# ``maxWeight`` represents maximum possible weight value which can be
|
||||
# obtained.
|
||||
let maxWeight = (1.0, 0)
|
||||
var
|
||||
currentType = MediaType()
|
||||
currentIndex = -1
|
||||
currentWeight = (-1.0, 0)
|
||||
|
||||
for itemType in ares.get().data:
|
||||
let preferredIndex = types.find(itemType.mediaType)
|
||||
if preferredIndex != -1:
|
||||
let weight = (itemType.qvalue, -preferredIndex)
|
||||
if currentWeight < weight:
|
||||
currentType = types[preferredIndex]
|
||||
currentWeight = weight
|
||||
currentIndex = preferredIndex
|
||||
|
||||
if currentWeight == maxWeight:
|
||||
# There is no reason to continue search, because maximum possible
|
||||
# weight is already achieved, so this is the best match.
|
||||
break
|
||||
|
||||
if currentIndex == -1:
|
||||
err("Preferred content type not found")
|
||||
else:
|
||||
ok(currentType)
|
||||
|
||||
proc preferredContentMediaType*(request: HttpRequestRef): MediaType =
|
||||
## Returns preferred content-type using ``Accept`` header specified by
|
||||
@ -535,7 +557,8 @@ proc preferredContentMediaType*(request: HttpRequestRef): MediaType =
|
||||
preferredContentMediaType(request.headers.getString(AcceptHeaderName))
|
||||
|
||||
proc preferredContentType*(request: HttpRequestRef,
|
||||
types: varargs[MediaType]): Result[MediaType, cstring] =
|
||||
types: varargs[MediaType]
|
||||
): Result[MediaType, cstring] =
|
||||
## Match or obtain preferred content-type using ``Accept`` header specified by
|
||||
## client in request ``request``.
|
||||
preferredContentType(request.headers.getString(AcceptHeaderName), types)
|
||||
@ -920,19 +943,17 @@ proc join*(server: HttpServerRef): Future[void] =
|
||||
|
||||
retFuture
|
||||
|
||||
proc getMultipartReader*(req: HttpRequestRef): HttpResult[MultiPartReaderRef] =
|
||||
proc getMultipartReader*(req: HttpRequestRef): HttpResult[MultiPartReaderRef] {.
|
||||
raises: [Defect].} =
|
||||
## Create new MultiPartReader interface for specific request.
|
||||
if req.meth in PostMethods:
|
||||
if MultipartForm in req.requestFlags:
|
||||
let ctype = ? getContentType(req.headers.getList(ContentTypeHeader))
|
||||
if ctype != MultipartContentType:
|
||||
err("Content type is not supported")
|
||||
else:
|
||||
let boundary = ? getMultipartBoundary(
|
||||
req.headers.getList(ContentTypeHeader)
|
||||
)
|
||||
if req.contentTypeData.isSome():
|
||||
let boundary = ? getMultipartBoundary(req.contentTypeData.get())
|
||||
var stream = ? req.getBodyReader()
|
||||
ok(MultiPartReaderRef.new(stream, boundary))
|
||||
else:
|
||||
err("Content type is missing or invalid")
|
||||
else:
|
||||
err("Request's data is not multipart encoded")
|
||||
else:
|
||||
@ -1322,17 +1343,35 @@ proc respond*(req: HttpRequestRef, code: HttpCode): Future[HttpResponseRef] =
|
||||
## Responds to the request with specified ``HttpCode`` only.
|
||||
respond(req, code, "", HttpTable.init())
|
||||
|
||||
proc redirect*(req: HttpRequestRef, code: HttpCode,
|
||||
location: string, headers: HttpTable): Future[HttpResponseRef] =
|
||||
## Responds to the request with redirection to location ``location`` and
|
||||
## additional headers ``headers``.
|
||||
##
|
||||
## Note, ``location`` argument's value has priority over "Location" header's
|
||||
## value in ``headers`` argument.
|
||||
var mheaders = headers
|
||||
mheaders.set("location", location)
|
||||
respond(req, code, "", mheaders)
|
||||
|
||||
proc redirect*(req: HttpRequestRef, code: HttpCode,
|
||||
location: Uri, headers: HttpTable): Future[HttpResponseRef] =
|
||||
## Responds to the request with redirection to location ``location`` and
|
||||
## additional headers ``headers``.
|
||||
##
|
||||
## Note, ``location`` argument's value has priority over "Location" header's
|
||||
## value in ``headers`` argument.
|
||||
redirect(req, code, $location, headers)
|
||||
|
||||
proc redirect*(req: HttpRequestRef, code: HttpCode,
|
||||
location: Uri): Future[HttpResponseRef] =
|
||||
## Responds to the request with redirection to location ``location``.
|
||||
let headers = HttpTable.init([("location", $location)])
|
||||
respond(req, code, "", headers)
|
||||
redirect(req, code, location, HttpTable.init())
|
||||
|
||||
proc redirect*(req: HttpRequestRef, code: HttpCode,
|
||||
location: string): Future[HttpResponseRef] =
|
||||
## Responds to the request with redirection to location ``location``.
|
||||
let headers = HttpTable.init([("location", location)])
|
||||
respond(req, code, "", headers)
|
||||
redirect(req, code, location, HttpTable.init())
|
||||
|
||||
proc responded*(req: HttpRequestRef): bool =
|
||||
## Returns ``true`` if request ``req`` has been responded or responding.
|
||||
|
@ -10,7 +10,10 @@
|
||||
import std/[tables, strutils]
|
||||
import stew/base10
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
type
|
||||
HttpTable* = object
|
||||
|
@ -8,11 +8,11 @@
|
||||
# Apache License, version 2.0, (LICENSE-APACHEv2)
|
||||
# MIT license (LICENSE-MIT)
|
||||
import std/[monotimes, strutils]
|
||||
import stew/results
|
||||
import stew/results, httputils
|
||||
import ../../asyncloop
|
||||
import ../../streams/[asyncstream, boundstream, chunkstream]
|
||||
import httptable, httpcommon, httpbodyrw
|
||||
export asyncloop, httptable, httpcommon, httpbodyrw, asyncstream
|
||||
export asyncloop, httptable, httpcommon, httpbodyrw, asyncstream, httputils
|
||||
|
||||
const
|
||||
UnableToReadMultipartBody = "Unable to read multipart message body"
|
||||
@ -439,55 +439,25 @@ func validateBoundary[B: BChar](boundary: openArray[B]): HttpResult[void] =
|
||||
return err("Content-Type boundary alphabet incorrect")
|
||||
ok()
|
||||
|
||||
func getMultipartBoundary*(ch: openArray[string]): HttpResult[string] {.
|
||||
func getMultipartBoundary*(contentData: ContentTypeData): HttpResult[string] {.
|
||||
raises: [Defect].} =
|
||||
## Returns ``multipart/form-data`` boundary value from ``Content-Type``
|
||||
## header.
|
||||
##
|
||||
## The procedure carries out all the necessary checks:
|
||||
## 1) There should be single `Content-Type` header value in headers.
|
||||
## 2) `Content-Type` must be ``multipart/form-data``.
|
||||
## 3) `boundary` value must be present
|
||||
## 4) `boundary` value must be less then 70 characters length and
|
||||
## 1) `boundary` value must be present.
|
||||
## 2) `boundary` value must be less then 70 characters length and
|
||||
## all characters should be part of specific alphabet.
|
||||
if len(ch) > 1:
|
||||
err("Multiple Content-Type headers found")
|
||||
else:
|
||||
if len(ch) == 0:
|
||||
err("Content-Type header is missing")
|
||||
else:
|
||||
if len(ch[0]) == 0:
|
||||
return err("Content-Type header has empty value")
|
||||
let mparts = ch[0].split(";")
|
||||
if strip(mparts[0]).toLowerAscii() != "multipart/form-data":
|
||||
return err("Content-Type is not multipart")
|
||||
if len(mparts) < 2:
|
||||
return err("Content-Type missing boundary value")
|
||||
|
||||
let index =
|
||||
block:
|
||||
var idx = 0
|
||||
for i in 1 ..< len(mparts):
|
||||
let stripped = strip(mparts[i])
|
||||
if stripped.toLowerAscii().startsWith("boundary="):
|
||||
idx = i
|
||||
break
|
||||
idx
|
||||
|
||||
if index == 0:
|
||||
err("Missing Content-Type boundary key")
|
||||
else:
|
||||
let stripped = strip(mparts[index])
|
||||
let bparts = stripped.split("=", 1)
|
||||
if len(bparts) < 2:
|
||||
err("Missing Content-Type boundary")
|
||||
else:
|
||||
let candidate = strip(bparts[1])
|
||||
let res = validateBoundary(candidate)
|
||||
if res.isErr():
|
||||
err($res.error())
|
||||
else:
|
||||
ok(candidate)
|
||||
let candidate =
|
||||
block:
|
||||
var res: string
|
||||
for item in contentData.params:
|
||||
if cmpIgnoreCase(item.name, "boundary") == 0:
|
||||
res = item.value
|
||||
break
|
||||
res
|
||||
? validateBoundary(candidate)
|
||||
ok(candidate)
|
||||
|
||||
proc quoteCheck(name: string): HttpResult[string] =
|
||||
if len(name) > 0:
|
||||
|
@ -70,7 +70,7 @@ proc new*(htype: typedesc[SecureHttpServerRef],
|
||||
httpHeadersTimeout = 10.seconds,
|
||||
maxHeadersSize: int = 8192,
|
||||
maxRequestBodySize: int = 1_048_576
|
||||
): HttpResult[SecureHttpServerRef] =
|
||||
): HttpResult[SecureHttpServerRef] {.raises: [Defect].} =
|
||||
|
||||
doAssert(not(isNil(tlsPrivateKey)), "TLS private key must not be nil!")
|
||||
doAssert(not(isNil(tlsCertificate)), "TLS certificate must not be nil!")
|
||||
@ -93,13 +93,30 @@ proc new*(htype: typedesc[SecureHttpServerRef],
|
||||
except CatchableError as exc:
|
||||
return err(exc.msg)
|
||||
|
||||
var res = SecureHttpServerRef()
|
||||
HttpServer(res[]).init(address, serverInstance, processCallback,
|
||||
createSecConnection, serverUri, serverFlags,
|
||||
socketFlags, serverIdent, maxConnections,
|
||||
bufferSize, backlogSize, httpHeadersTimeout,
|
||||
maxHeadersSize, maxRequestBodySize)
|
||||
res.tlsCertificate = tlsCertificate
|
||||
res.tlsPrivateKey = tlsPrivateKey
|
||||
res.secureFlags = secureFlags
|
||||
let res = SecureHttpServerRef(
|
||||
address: address,
|
||||
instance: serverInstance,
|
||||
processCallback: processCallback,
|
||||
createConnCallback: createSecConnection,
|
||||
baseUri: serverUri,
|
||||
serverIdent: serverIdent,
|
||||
flags: serverFlags,
|
||||
socketFlags: socketFlags,
|
||||
maxConnections: maxConnections,
|
||||
bufferSize: bufferSize,
|
||||
backlogSize: backlogSize,
|
||||
headersTimeout: httpHeadersTimeout,
|
||||
maxHeadersSize: maxHeadersSize,
|
||||
maxRequestBodySize: maxRequestBodySize,
|
||||
# semaphore:
|
||||
# if maxConnections > 0:
|
||||
# newAsyncSemaphore(maxConnections)
|
||||
# else:
|
||||
# nil
|
||||
lifetime: newFuture[void]("http.server.lifetime"),
|
||||
connections: initTable[string, Future[void]](),
|
||||
tlsCertificate: tlsCertificate,
|
||||
tlsPrivateKey: tlsPrivateKey,
|
||||
secureFlags: secureFlags
|
||||
)
|
||||
ok(res)
|
||||
|
57
vendor/nim-chronos/chronos/asyncloop.nim
vendored
57
vendor/nim-chronos/chronos/asyncloop.nim
vendored
@ -8,7 +8,10 @@
|
||||
# Apache License, version 2.0, (LICENSE-APACHEv2)
|
||||
# MIT license (LICENSE-MIT)
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
import std/[os, tables, strutils, heapqueue, lists, options, nativesockets, net,
|
||||
deques]
|
||||
@ -174,7 +177,13 @@ when defined(windows):
|
||||
elif unixPlatform:
|
||||
import ./selectors2
|
||||
from posix import EINTR, EAGAIN, EINPROGRESS, EWOULDBLOCK, MSG_PEEK,
|
||||
MSG_NOSIGNAL, SIGPIPE
|
||||
MSG_NOSIGNAL
|
||||
from posix import SIGHUP, SIGINT, SIGQUIT, SIGILL, SIGTRAP, SIGABRT,
|
||||
SIGBUS, SIGFPE, SIGKILL, SIGUSR1, SIGSEGV, SIGUSR2,
|
||||
SIGPIPE, SIGALRM, SIGTERM, SIGPIPE
|
||||
export SIGHUP, SIGINT, SIGQUIT, SIGILL, SIGTRAP, SIGABRT,
|
||||
SIGBUS, SIGFPE, SIGKILL, SIGUSR1, SIGSEGV, SIGUSR2,
|
||||
SIGPIPE, SIGALRM, SIGTERM, SIGPIPE
|
||||
|
||||
type
|
||||
CallbackFunc* = proc (arg: pointer) {.gcsafe, raises: [Defect].}
|
||||
@ -851,6 +860,50 @@ proc callIdle*(cbproc: CallbackFunc) {.gcsafe, raises: [Defect].} =
|
||||
|
||||
include asyncfutures2
|
||||
|
||||
when not(defined(windows)):
|
||||
when ioselSupportedPlatform:
|
||||
proc waitSignal*(signal: int): Future[void] {.
|
||||
raises: [Defect].} =
|
||||
var retFuture = newFuture[void]("chronos.waitSignal()")
|
||||
var sigfd: int = -1
|
||||
|
||||
template getSignalException(e: untyped): untyped =
|
||||
newException(AsyncError, "Could not manipulate signal handler, " &
|
||||
"reason [" & $e.name & "]: " & $e.msg)
|
||||
|
||||
proc continuation(udata: pointer) {.gcsafe.} =
|
||||
if not(retFuture.finished()):
|
||||
if sigfd != -1:
|
||||
try:
|
||||
removeSignal(sigfd)
|
||||
retFuture.complete()
|
||||
except IOSelectorsException as exc:
|
||||
retFuture.fail(getSignalException(exc))
|
||||
|
||||
proc cancellation(udata: pointer) {.gcsafe.} =
|
||||
if not(retFuture.finished()):
|
||||
if sigfd != -1:
|
||||
try:
|
||||
removeSignal(sigfd)
|
||||
except IOSelectorsException as exc:
|
||||
retFuture.fail(getSignalException(exc))
|
||||
|
||||
sigfd =
|
||||
try:
|
||||
addSignal(signal, continuation)
|
||||
except IOSelectorsException as exc:
|
||||
retFuture.fail(getSignalException(exc))
|
||||
return retFuture
|
||||
except ValueError as exc:
|
||||
retFuture.fail(getSignalException(exc))
|
||||
return retFuture
|
||||
except OSError as exc:
|
||||
retFuture.fail(getSignalException(exc))
|
||||
return retFuture
|
||||
|
||||
retFuture.cancelCallback = cancellation
|
||||
retFuture
|
||||
|
||||
proc sleepAsync*(duration: Duration): Future[void] =
|
||||
## Suspends the execution of the current async procedure for the next
|
||||
## ``duration`` time.
|
||||
|
261
vendor/nim-chronos/chronos/asyncsync.nim
vendored
261
vendor/nim-chronos/chronos/asyncsync.nim
vendored
@ -10,9 +10,12 @@
|
||||
|
||||
## This module implements some core synchronization primitives.
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
import std/[sequtils, deques, tables, typetraits]
|
||||
import std/[sequtils, math, deques, tables, typetraits]
|
||||
import ./asyncloop
|
||||
export asyncloop
|
||||
|
||||
@ -55,11 +58,11 @@ type
|
||||
queue: Deque[T]
|
||||
maxsize: int
|
||||
|
||||
AsyncQueueEmptyError* = object of CatchableError
|
||||
AsyncQueueEmptyError* = object of AsyncError
|
||||
## ``AsyncQueue`` is empty.
|
||||
AsyncQueueFullError* = object of CatchableError
|
||||
AsyncQueueFullError* = object of AsyncError
|
||||
## ``AsyncQueue`` is full.
|
||||
AsyncLockError* = object of CatchableError
|
||||
AsyncLockError* = object of AsyncError
|
||||
## ``AsyncLock`` is either locked or unlocked.
|
||||
|
||||
EventBusSubscription*[T] = proc(bus: AsyncEventBus,
|
||||
@ -106,6 +109,23 @@ type
|
||||
eventName: string
|
||||
payload: EventPayloadBase
|
||||
|
||||
AsyncEventQueueFullError* = object of AsyncError
|
||||
|
||||
EventQueueKey* = distinct uint64
|
||||
|
||||
EventQueueReader* = object
|
||||
key: EventQueueKey
|
||||
offset: int
|
||||
waiter: Future[void]
|
||||
overflow: bool
|
||||
|
||||
AsyncEventQueue*[T] = ref object of RootObj
|
||||
readers: seq[EventQueueReader]
|
||||
queue: Deque[T]
|
||||
counter: uint64
|
||||
limit: int
|
||||
offset: int
|
||||
|
||||
proc newAsyncLock*(): AsyncLock =
|
||||
## Creates new asynchronous lock ``AsyncLock``.
|
||||
##
|
||||
@ -448,7 +468,9 @@ proc `$`*[T](aq: AsyncQueue[T]): string =
|
||||
template generateKey(typeName, eventName: string): string =
|
||||
"type[" & typeName & "]-key[" & eventName & "]"
|
||||
|
||||
proc newAsyncEventBus*(): AsyncEventBus =
|
||||
proc newAsyncEventBus*(): AsyncEventBus {.
|
||||
deprecated: "Implementation has unfixable flaws, please use" &
|
||||
"AsyncEventQueue[T] instead".} =
|
||||
## Creates new ``AsyncEventBus``.
|
||||
AsyncEventBus(counter: 0'u64, events: initTable[string, EventItem]())
|
||||
|
||||
@ -460,7 +482,9 @@ template location*(payload: EventPayloadBase): SrcLoc =
|
||||
## Returns source location address of event emitter.
|
||||
payload.loc[]
|
||||
|
||||
proc get*(event: AwaitableEvent, T: typedesc): T =
|
||||
proc get*(event: AwaitableEvent, T: typedesc): T {.
|
||||
deprecated: "Implementation has unfixable flaws, please use " &
|
||||
"AsyncEventQueue[T] instead".} =
|
||||
## Returns event's payload of type ``T`` from event ``event``.
|
||||
cast[EventPayload[T]](event.payload).value
|
||||
|
||||
@ -472,7 +496,9 @@ template location*(event: AwaitableEvent): SrcLoc =
|
||||
## Returns source location address of event emitter.
|
||||
event.payload.loc[]
|
||||
|
||||
proc waitEvent*(bus: AsyncEventBus, T: typedesc, event: string): Future[T] =
|
||||
proc waitEvent*(bus: AsyncEventBus, T: typedesc, event: string): Future[T] {.
|
||||
deprecated: "Implementation has unfixable flaws, please use " &
|
||||
"AsyncEventQueue[T] instead".} =
|
||||
## Wait for the event from AsyncEventBus ``bus`` with name ``event``.
|
||||
##
|
||||
## Returned ``Future[T]`` will hold event's payload of type ``T``.
|
||||
@ -488,7 +514,9 @@ proc waitEvent*(bus: AsyncEventBus, T: typedesc, event: string): Future[T] =
|
||||
bus.events.mgetOrPut(eventKey, default).waiters.add(baseFuture)
|
||||
retFuture
|
||||
|
||||
proc waitAllEvents*(bus: AsyncEventBus): Future[AwaitableEvent] =
|
||||
proc waitAllEvents*(bus: AsyncEventBus): Future[AwaitableEvent] {.
|
||||
deprecated: "Implementation has unfixable flaws, please use " &
|
||||
"AsyncEventQueue[T] instead".} =
|
||||
## Wait for any event from AsyncEventBus ``bus``.
|
||||
##
|
||||
## Returns ``Future`` which holds helper object. Using this object you can
|
||||
@ -502,7 +530,9 @@ proc waitAllEvents*(bus: AsyncEventBus): Future[AwaitableEvent] =
|
||||
retFuture
|
||||
|
||||
proc subscribe*[T](bus: AsyncEventBus, event: string,
|
||||
callback: EventBusSubscription[T]): EventBusKey =
|
||||
callback: EventBusSubscription[T]): EventBusKey {.
|
||||
deprecated: "Implementation has unfixable flaws, please use " &
|
||||
"AsyncEventQueue[T] instead".} =
|
||||
## Subscribe to the event ``event`` passed through eventbus ``bus`` with
|
||||
## callback ``callback``.
|
||||
##
|
||||
@ -524,7 +554,9 @@ proc subscribe*[T](bus: AsyncEventBus, event: string,
|
||||
subkey
|
||||
|
||||
proc subscribeAll*(bus: AsyncEventBus,
|
||||
callback: EventBusAllSubscription): EventBusKey =
|
||||
callback: EventBusAllSubscription): EventBusKey {.
|
||||
deprecated: "Implementation has unfixable flaws, please use " &
|
||||
"AsyncEventQueue instead".} =
|
||||
## Subscribe to all events passed through eventbus ``bus`` with callback
|
||||
## ``callback``.
|
||||
##
|
||||
@ -542,7 +574,9 @@ proc subscribeAll*(bus: AsyncEventBus,
|
||||
bus.subscribers.add(subkey)
|
||||
subkey
|
||||
|
||||
proc unsubscribe*(bus: AsyncEventBus, key: EventBusKey) =
|
||||
proc unsubscribe*(bus: AsyncEventBus, key: EventBusKey) {.
|
||||
deprecated: "Implementation has unfixable flaws, please use " &
|
||||
"AsyncEventQueue instead".} =
|
||||
## Cancel subscription of subscriber with key ``key`` from eventbus ``bus``.
|
||||
let eventKey = generateKey(key.typeName, key.eventName)
|
||||
|
||||
@ -590,7 +624,9 @@ proc emit[T](bus: AsyncEventBus, event: string, data: T, loc: ptr SrcLoc) =
|
||||
for subscriber in bus.subscribers:
|
||||
triggerSubscriberCallback(subscriber)
|
||||
|
||||
template emit*[T](bus: AsyncEventBus, event: string, data: T) =
|
||||
template emit*[T](bus: AsyncEventBus, event: string, data: T) {.
|
||||
deprecated: "Implementation has unfixable flaws, please use " &
|
||||
"AsyncEventQueue instead".} =
|
||||
## Emit new event ``event`` to the eventbus ``bus`` with payload ``data``.
|
||||
emit(bus, event, data, getSrcLocation())
|
||||
|
||||
@ -605,8 +641,205 @@ proc emitWait[T](bus: AsyncEventBus, event: string, data: T,
|
||||
return retFuture
|
||||
|
||||
template emitWait*[T](bus: AsyncEventBus, event: string,
|
||||
data: T): Future[void] =
|
||||
data: T): Future[void] {.
|
||||
deprecated: "Implementation has unfixable flaws, please use " &
|
||||
"AsyncEventQueue instead".} =
|
||||
## Emit new event ``event`` to the eventbus ``bus`` with payload ``data`` and
|
||||
## wait until all the subscribers/waiters will receive notification about
|
||||
## event.
|
||||
emitWait(bus, event, data, getSrcLocation())
|
||||
|
||||
proc `==`(a, b: EventQueueKey): bool {.borrow.}
|
||||
|
||||
proc compact(ab: AsyncEventQueue) {.raises: [Defect].} =
|
||||
if len(ab.readers) > 0:
|
||||
let minOffset =
|
||||
block:
|
||||
var res = -1
|
||||
for reader in ab.readers.items():
|
||||
if not(reader.overflow):
|
||||
res = reader.offset
|
||||
break
|
||||
res
|
||||
|
||||
if minOffset == -1:
|
||||
ab.offset += len(ab.queue)
|
||||
ab.queue.clear()
|
||||
else:
|
||||
doAssert(minOffset >= ab.offset)
|
||||
if minOffset > ab.offset:
|
||||
let delta = minOffset - ab.offset
|
||||
ab.queue.shrink(fromFirst = delta)
|
||||
ab.offset += delta
|
||||
else:
|
||||
ab.queue.clear()
|
||||
|
||||
proc getReaderIndex(ab: AsyncEventQueue, key: EventQueueKey): int {.
|
||||
raises: [Defect].} =
|
||||
for index, value in ab.readers.pairs():
|
||||
if value.key == key:
|
||||
return index
|
||||
-1
|
||||
|
||||
proc newAsyncEventQueue*[T](limitSize = 0): AsyncEventQueue[T] {.
|
||||
raises: [Defect].} =
|
||||
## Creates new ``AsyncEventBus`` maximum size of ``limitSize`` (default is
|
||||
## ``0`` which means that there no limits).
|
||||
##
|
||||
## When number of events emitted exceeds ``limitSize`` - emit() procedure
|
||||
## will discard new events, consumers which has number of pending events
|
||||
## more than ``limitSize`` will get ``AsyncEventQueueFullError``
|
||||
## error.
|
||||
doAssert(limitSize >= 0, "Limit size should be non-negative integer")
|
||||
let queue =
|
||||
if limitSize == 0:
|
||||
initDeque[T]()
|
||||
elif isPowerOfTwo(limitSize + 1):
|
||||
initDeque[T](limitSize + 1)
|
||||
else:
|
||||
initDeque[T](nextPowerOfTwo(limitSize + 1))
|
||||
AsyncEventQueue[T](counter: 0'u64, queue: queue, limit: limitSize)
|
||||
|
||||
proc len*(ab: AsyncEventQueue): int {.raises: [Defect].} =
|
||||
len(ab.queue)
|
||||
|
||||
proc register*(ab: AsyncEventQueue): EventQueueKey {.raises: [Defect].} =
|
||||
inc(ab.counter)
|
||||
let reader = EventQueueReader(key: EventQueueKey(ab.counter),
|
||||
offset: ab.offset + len(ab.queue),
|
||||
overflow: false)
|
||||
ab.readers.add(reader)
|
||||
EventQueueKey(ab.counter)
|
||||
|
||||
proc unregister*(ab: AsyncEventQueue, key: EventQueueKey) {.
|
||||
raises: [Defect] .} =
|
||||
let index = ab.getReaderIndex(key)
|
||||
if index >= 0:
|
||||
let reader = ab.readers[index]
|
||||
# Completing pending Future to avoid deadlock.
|
||||
if not(isNil(reader.waiter)) and not(reader.waiter.finished()):
|
||||
reader.waiter.complete()
|
||||
ab.readers.delete(index)
|
||||
ab.compact()
|
||||
|
||||
proc close*(ab: AsyncEventQueue) {.raises: [Defect].} =
|
||||
for reader in ab.readers.items():
|
||||
if not(isNil(reader.waiter)) and not(reader.waiter.finished()):
|
||||
reader.waiter.complete()
|
||||
ab.readers.reset()
|
||||
ab.queue.clear()
|
||||
|
||||
proc closeWait*(ab: AsyncEventQueue): Future[void] {.raises: [Defect].} =
|
||||
var retFuture = newFuture[void]("AsyncEventQueue.closeWait()")
|
||||
proc continuation(udata: pointer) {.gcsafe.} =
|
||||
if not(retFuture.finished()):
|
||||
retFuture.complete()
|
||||
ab.close()
|
||||
# Schedule `continuation` to be called only after all the `reader`
|
||||
# notifications will be scheduled and processed.
|
||||
callSoon(continuation)
|
||||
retFuture
|
||||
|
||||
template readerOverflow*(ab: AsyncEventQueue,
|
||||
reader: EventQueueReader): bool =
|
||||
ab.limit + (reader.offset - ab.offset) <= len(ab.queue)
|
||||
|
||||
proc emit*[T](ab: AsyncEventQueue[T], data: T) {.raises: [Defect].} =
|
||||
if len(ab.readers) > 0:
|
||||
# We enqueue `data` only if there active reader present.
|
||||
var changesPresent = false
|
||||
let couldEmit =
|
||||
if ab.limit == 0:
|
||||
true
|
||||
else:
|
||||
# Because ab.readers is sequence sorted by `offset`, we will apply our
|
||||
# limit to the most recent consumer.
|
||||
if ab.readerOverflow(ab.readers[^1]):
|
||||
false
|
||||
else:
|
||||
true
|
||||
|
||||
if couldEmit:
|
||||
if ab.limit != 0:
|
||||
for reader in ab.readers.mitems():
|
||||
if not(reader.overflow):
|
||||
if ab.readerOverflow(reader):
|
||||
reader.overflow = true
|
||||
changesPresent = true
|
||||
ab.queue.addLast(data)
|
||||
for reader in ab.readers.mitems():
|
||||
if not(isNil(reader.waiter)) and not(reader.waiter.finished()):
|
||||
reader.waiter.complete()
|
||||
else:
|
||||
for reader in ab.readers.mitems():
|
||||
if not(reader.overflow):
|
||||
reader.overflow = true
|
||||
changesPresent = true
|
||||
|
||||
if changesPresent:
|
||||
ab.compact()
|
||||
|
||||
proc waitEvents*[T](ab: AsyncEventQueue[T],
|
||||
key: EventQueueKey,
|
||||
eventsCount = -1): Future[seq[T]] {.async.} =
|
||||
## Wait for events
|
||||
var
|
||||
events: seq[T]
|
||||
resetFuture = false
|
||||
|
||||
while true:
|
||||
# We need to obtain reader index at every iteration, because `ab.readers`
|
||||
# sequence could be changed after `await waitFuture` call.
|
||||
let index = ab.getReaderIndex(key)
|
||||
if index < 0:
|
||||
# We going to return everything we have in `events`.
|
||||
break
|
||||
|
||||
if resetFuture:
|
||||
resetFuture = false
|
||||
ab.readers[index].waiter = nil
|
||||
|
||||
let reader = ab.readers[index]
|
||||
doAssert(isNil(reader.waiter),
|
||||
"Concurrent waits on same key are not allowed!")
|
||||
|
||||
if reader.overflow:
|
||||
raise newException(AsyncEventQueueFullError,
|
||||
"AsyncEventQueue size exceeds limits")
|
||||
|
||||
let length = len(ab.queue) + ab.offset
|
||||
doAssert(length >= ab.readers[index].offset)
|
||||
if length == ab.readers[index].offset:
|
||||
# We are at the end of queue, it means that we should wait for new events.
|
||||
let waitFuture = newFuture[void]("AsyncEventQueue.waitEvents")
|
||||
ab.readers[index].waiter = waitFuture
|
||||
resetFuture = true
|
||||
await waitFuture
|
||||
else:
|
||||
let
|
||||
itemsInQueue = length - ab.readers[index].offset
|
||||
itemsOffset = ab.readers[index].offset - ab.offset
|
||||
itemsCount =
|
||||
if eventsCount <= 0:
|
||||
itemsInQueue
|
||||
else:
|
||||
min(itemsInQueue, eventsCount - len(events))
|
||||
|
||||
for i in 0 ..< itemsCount:
|
||||
events.add(ab.queue[itemsOffset + i])
|
||||
ab.readers[index].offset += itemsCount
|
||||
|
||||
# Keep readers sequence sorted by `offset` field.
|
||||
var slider = index
|
||||
while (slider + 1 < len(ab.readers)) and
|
||||
(ab.readers[slider].offset > ab.readers[slider + 1].offset):
|
||||
swap(ab.readers[slider], ab.readers[slider + 1])
|
||||
inc(slider)
|
||||
|
||||
# Shrink data queue.
|
||||
ab.compact()
|
||||
|
||||
if (eventsCount <= 0) or (len(events) == eventsCount):
|
||||
break
|
||||
|
||||
return events
|
||||
|
5
vendor/nim-chronos/chronos/debugutils.nim
vendored
5
vendor/nim-chronos/chronos/debugutils.nim
vendored
@ -7,7 +7,10 @@
|
||||
# Apache License, version 2.0, (LICENSE-APACHEv2)
|
||||
# MIT license (LICENSE-MIT)
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
import ./asyncloop
|
||||
export asyncloop
|
||||
|
5
vendor/nim-chronos/chronos/handles.nim
vendored
5
vendor/nim-chronos/chronos/handles.nim
vendored
@ -7,7 +7,10 @@
|
||||
# Apache License, version 2.0, (LICENSE-APACHEv2)
|
||||
# MIT license (LICENSE-MIT)
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
import std/[net, nativesockets]
|
||||
import stew/base10
|
||||
|
@ -9,7 +9,10 @@
|
||||
|
||||
# This module implements Linux epoll().
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
import posix, times, epoll
|
||||
|
||||
|
5
vendor/nim-chronos/chronos/sendfile.nim
vendored
5
vendor/nim-chronos/chronos/sendfile.nim
vendored
@ -9,7 +9,10 @@
|
||||
|
||||
## This module provides cross-platform wrapper for ``sendfile()`` syscall.
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
when defined(nimdoc):
|
||||
proc sendfile*(outfd, infd: int, offset: int, count: var int): int =
|
||||
|
5
vendor/nim-chronos/chronos/srcloc.nim
vendored
5
vendor/nim-chronos/chronos/srcloc.nim
vendored
@ -6,7 +6,10 @@
|
||||
# Licensed under either of
|
||||
# Apache License, version 2.0, (LICENSE-APACHEv2)
|
||||
# MIT license (LICENSE-MIT)
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
import stew/base10
|
||||
|
||||
type
|
||||
|
@ -7,7 +7,10 @@
|
||||
# Apache License, version 2.0, (LICENSE-APACHEv2)
|
||||
# MIT license (LICENSE-MIT)
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
import ../asyncloop, ../asyncsync
|
||||
import ../transports/common, ../transports/stream
|
||||
@ -820,7 +823,7 @@ proc write*(wstream: AsyncStreamWriter, pbytes: pointer,
|
||||
except CatchableError as exc:
|
||||
raise newAsyncStreamWriteError(exc)
|
||||
|
||||
proc write*(wstream: AsyncStreamWriter, sbytes: seq[byte],
|
||||
proc write*(wstream: AsyncStreamWriter, sbytes: sink seq[byte],
|
||||
msglen = -1) {.async.} =
|
||||
## Write sequence of bytes ``sbytes`` of length ``msglen`` to writer
|
||||
## stream ``wstream``.
|
||||
@ -852,8 +855,11 @@ proc write*(wstream: AsyncStreamWriter, sbytes: seq[byte],
|
||||
wstream.bytesCount = wstream.bytesCount + uint64(length)
|
||||
else:
|
||||
var item = WriteItem(kind: Sequence)
|
||||
if not isLiteral(sbytes):
|
||||
shallowCopy(item.dataSeq, sbytes)
|
||||
when declared(shallowCopy):
|
||||
if not(isLiteral(sbytes)):
|
||||
shallowCopy(item.dataSeq, sbytes)
|
||||
else:
|
||||
item.dataSeq = sbytes
|
||||
else:
|
||||
item.dataSeq = sbytes
|
||||
item.size = length
|
||||
@ -869,7 +875,7 @@ proc write*(wstream: AsyncStreamWriter, sbytes: seq[byte],
|
||||
except CatchableError as exc:
|
||||
raise newAsyncStreamWriteError(exc)
|
||||
|
||||
proc write*(wstream: AsyncStreamWriter, sbytes: string,
|
||||
proc write*(wstream: AsyncStreamWriter, sbytes: sink string,
|
||||
msglen = -1) {.async.} =
|
||||
## Write string ``sbytes`` of length ``msglen`` to writer stream ``wstream``.
|
||||
##
|
||||
@ -900,8 +906,11 @@ proc write*(wstream: AsyncStreamWriter, sbytes: string,
|
||||
wstream.bytesCount = wstream.bytesCount + uint64(length)
|
||||
else:
|
||||
var item = WriteItem(kind: String)
|
||||
if not isLiteral(sbytes):
|
||||
shallowCopy(item.dataStr, sbytes)
|
||||
when declared(shallowCopy):
|
||||
if not(isLiteral(sbytes)):
|
||||
shallowCopy(item.dataStr, sbytes)
|
||||
else:
|
||||
item.dataStr = sbytes
|
||||
else:
|
||||
item.dataStr = sbytes
|
||||
item.size = length
|
||||
|
138
vendor/nim-chronos/chronos/streams/tlsstream.nim
vendored
138
vendor/nim-chronos/chronos/streams/tlsstream.nim
vendored
@ -9,10 +9,12 @@
|
||||
|
||||
## This module implements Transport Layer Security (TLS) stream. This module
|
||||
## uses sources of BearSSL <https://www.bearssl.org> by Thomas Pornin.
|
||||
import bearssl, bearssl/cacert
|
||||
import
|
||||
bearssl/[brssl, ec, errors, pem, rsa, ssl, x509],
|
||||
bearssl/abi/cacert
|
||||
import ../asyncloop, ../timer, ../asyncsync
|
||||
import asyncstream, ../transports/stream, ../transports/common
|
||||
export asyncloop, asyncsync, timer, asyncstream, bearssl
|
||||
export asyncloop, asyncsync, timer, asyncstream
|
||||
|
||||
type
|
||||
TLSStreamKind {.pure.} = enum
|
||||
@ -79,7 +81,7 @@ type
|
||||
handshakeFut*: Future[void]
|
||||
|
||||
TLSAsyncStream* = ref object of RootRef
|
||||
xwc*: X509NoAnchorContext
|
||||
xwc*: X509NoanchorContext
|
||||
ccontext*: SslClientContext
|
||||
scontext*: SslServerContext
|
||||
sbuffer*: seq[byte]
|
||||
@ -141,10 +143,10 @@ proc tlsWriteRec(engine: ptr SslEngineContext,
|
||||
writer: TLSStreamWriter): Future[TLSResult] {.async.} =
|
||||
try:
|
||||
var length = 0'u
|
||||
var buf = sslEngineSendrecBuf(engine, length)
|
||||
var buf = sslEngineSendrecBuf(engine[], length)
|
||||
doAssert(length != 0 and not isNil(buf))
|
||||
await writer.wsource.write(buf, int(length))
|
||||
sslEngineSendrecAck(engine, length)
|
||||
sslEngineSendrecAck(engine[], length)
|
||||
return TLSResult.Success
|
||||
except AsyncStreamError as exc:
|
||||
if writer.state == AsyncStreamState.Running:
|
||||
@ -161,13 +163,13 @@ proc tlsWriteApp(engine: ptr SslEngineContext,
|
||||
var item = await writer.queue.get()
|
||||
if item.size > 0:
|
||||
var length = 0'u
|
||||
var buf = sslEngineSendappBuf(engine, length)
|
||||
var buf = sslEngineSendappBuf(engine[], length)
|
||||
let toWrite = min(int(length), item.size)
|
||||
copyOut(buf, item, toWrite)
|
||||
if int(length) >= item.size:
|
||||
# BearSSL is ready to accept whole item size.
|
||||
sslEngineSendappAck(engine, uint(item.size))
|
||||
sslEngineFlush(engine, 0)
|
||||
sslEngineSendappAck(engine[], uint(item.size))
|
||||
sslEngineFlush(engine[], 0)
|
||||
item.future.complete()
|
||||
return TLSResult.Success
|
||||
else:
|
||||
@ -176,10 +178,10 @@ proc tlsWriteApp(engine: ptr SslEngineContext,
|
||||
item.offset = item.offset + int(length)
|
||||
item.size = item.size - int(length)
|
||||
writer.queue.addFirstNoWait(item)
|
||||
sslEngineSendappAck(engine, length)
|
||||
sslEngineSendappAck(engine[], length)
|
||||
return TLSResult.Success
|
||||
else:
|
||||
sslEngineClose(engine)
|
||||
sslEngineClose(engine[])
|
||||
item.future.complete()
|
||||
return TLSResult.Success
|
||||
except CancelledError:
|
||||
@ -191,11 +193,11 @@ proc tlsReadRec(engine: ptr SslEngineContext,
|
||||
reader: TLSStreamReader): Future[TLSResult] {.async.} =
|
||||
try:
|
||||
var length = 0'u
|
||||
var buf = sslEngineRecvrecBuf(engine, length)
|
||||
var buf = sslEngineRecvrecBuf(engine[], length)
|
||||
let res = await reader.rsource.readOnce(buf, int(length))
|
||||
sslEngineRecvrecAck(engine, uint(res))
|
||||
sslEngineRecvrecAck(engine[], uint(res))
|
||||
if res == 0:
|
||||
sslEngineClose(engine)
|
||||
sslEngineClose(engine[])
|
||||
|
||||
return TLSResult.EOF
|
||||
else:
|
||||
@ -213,9 +215,9 @@ proc tlsReadApp(engine: ptr SslEngineContext,
|
||||
reader: TLSStreamReader): Future[TLSResult] {.async.} =
|
||||
try:
|
||||
var length = 0'u
|
||||
var buf = sslEngineRecvappBuf(engine, length)
|
||||
var buf = sslEngineRecvappBuf(engine[], length)
|
||||
await upload(addr reader.buffer, buf, int(length))
|
||||
sslEngineRecvappAck(engine, length)
|
||||
sslEngineRecvappAck(engine[], length)
|
||||
return TLSResult.Success
|
||||
except CancelledError:
|
||||
if reader.state == AsyncStreamState.Running:
|
||||
@ -291,7 +293,7 @@ proc tlsLoop*(stream: TLSAsyncStream) {.async.} =
|
||||
|
||||
while true:
|
||||
var waiting: seq[Future[TLSResult]]
|
||||
var state = sslEngineCurrentState(engine)
|
||||
var state = sslEngineCurrentState(engine[])
|
||||
|
||||
if (state and SSL_CLOSED) == SSL_CLOSED:
|
||||
if loopState == AsyncStreamState.Running:
|
||||
@ -361,7 +363,7 @@ proc tlsLoop*(stream: TLSAsyncStream) {.async.} =
|
||||
else:
|
||||
newTLSStreamWriteError(stream.reader.error)
|
||||
of AsyncStreamState.Finished:
|
||||
let err = engine.sslEngineLastError()
|
||||
let err = engine[].sslEngineLastError()
|
||||
if err != 0:
|
||||
newTLSStreamProtocolError(err)
|
||||
else:
|
||||
@ -419,13 +421,13 @@ proc tlsReadLoop(stream: AsyncStreamReader) {.async.} =
|
||||
proc getSignerAlgo(xc: X509Certificate): int =
|
||||
## Get certificate's signing algorithm.
|
||||
var dc: X509DecoderContext
|
||||
x509DecoderInit(addr dc, nil, nil)
|
||||
x509DecoderPush(addr dc, xc.data, xc.dataLen)
|
||||
let err = x509DecoderLastError(addr dc)
|
||||
x509DecoderInit(dc, nil, nil)
|
||||
x509DecoderPush(dc, xc.data, xc.dataLen)
|
||||
let err = x509DecoderLastError(dc)
|
||||
if err != 0:
|
||||
-1
|
||||
else:
|
||||
int(x509DecoderGetSignerKeyType(addr dc))
|
||||
int(x509DecoderGetSignerKeyType(dc))
|
||||
|
||||
proc newTLSClientAsyncStream*(rsource: AsyncStreamReader,
|
||||
wsource: AsyncStreamWriter,
|
||||
@ -465,23 +467,23 @@ proc newTLSClientAsyncStream*(rsource: AsyncStreamReader,
|
||||
res.writer = writer
|
||||
|
||||
if TLSFlags.NoVerifyHost in flags:
|
||||
sslClientInitFull(addr res.ccontext, addr res.x509, nil, 0)
|
||||
initNoAnchor(addr res.xwc, addr res.x509.vtable)
|
||||
sslEngineSetX509(addr res.ccontext.eng, addr res.xwc.vtable)
|
||||
sslClientInitFull(res.ccontext, addr res.x509, nil, 0)
|
||||
initNoAnchor(res.xwc, addr res.x509.vtable)
|
||||
sslEngineSetX509(res.ccontext.eng, addr res.xwc.vtable)
|
||||
else:
|
||||
sslClientInitFull(addr res.ccontext, addr res.x509,
|
||||
sslClientInitFull(res.ccontext, addr res.x509,
|
||||
unsafeAddr MozillaTrustAnchors[0],
|
||||
len(MozillaTrustAnchors))
|
||||
uint(len(MozillaTrustAnchors)))
|
||||
|
||||
let size = max(SSL_BUFSIZE_BIDI, bufferSize)
|
||||
res.sbuffer = newSeq[byte](size)
|
||||
sslEngineSetBuffer(addr res.ccontext.eng, addr res.sbuffer[0],
|
||||
sslEngineSetBuffer(res.ccontext.eng, addr res.sbuffer[0],
|
||||
uint(len(res.sbuffer)), 1)
|
||||
sslEngineSetVersions(addr res.ccontext.eng, uint16(minVersion),
|
||||
sslEngineSetVersions(res.ccontext.eng, uint16(minVersion),
|
||||
uint16(maxVersion))
|
||||
|
||||
if TLSFlags.NoVerifyServerName in flags:
|
||||
let err = sslClientReset(addr res.ccontext, "", 0)
|
||||
let err = sslClientReset(res.ccontext, "", 0)
|
||||
if err == 0:
|
||||
raise newException(TLSStreamInitError, "Could not initialize TLS layer")
|
||||
else:
|
||||
@ -489,7 +491,7 @@ proc newTLSClientAsyncStream*(rsource: AsyncStreamReader,
|
||||
raise newException(TLSStreamInitError,
|
||||
"serverName must not be empty string")
|
||||
|
||||
let err = sslClientReset(addr res.ccontext, serverName, 0)
|
||||
let err = sslClientReset(res.ccontext, serverName, 0)
|
||||
if err == 0:
|
||||
raise newException(TLSStreamInitError, "Could not initialize TLS layer")
|
||||
|
||||
@ -545,33 +547,33 @@ proc newTLSServerAsyncStream*(rsource: AsyncStreamReader,
|
||||
let algo = getSignerAlgo(certificate.certs[0])
|
||||
if algo == -1:
|
||||
raiseTLSStreamProtocolError("Could not decode certificate")
|
||||
sslServerInitFullEc(addr res.scontext, addr certificate.certs[0],
|
||||
len(certificate.certs), cuint(algo),
|
||||
sslServerInitFullEc(res.scontext, addr certificate.certs[0],
|
||||
uint(len(certificate.certs)), cuint(algo),
|
||||
addr privateKey.eckey)
|
||||
elif privateKey.kind == TLSKeyType.RSA:
|
||||
sslServerInitFullRsa(addr res.scontext, addr certificate.certs[0],
|
||||
len(certificate.certs), addr privateKey.rsakey)
|
||||
sslServerInitFullRsa(res.scontext, addr certificate.certs[0],
|
||||
uint(len(certificate.certs)), addr privateKey.rsakey)
|
||||
|
||||
let size = max(SSL_BUFSIZE_BIDI, bufferSize)
|
||||
res.sbuffer = newSeq[byte](size)
|
||||
sslEngineSetBuffer(addr res.scontext.eng, addr res.sbuffer[0],
|
||||
sslEngineSetBuffer(res.scontext.eng, addr res.sbuffer[0],
|
||||
uint(len(res.sbuffer)), 1)
|
||||
sslEngineSetVersions(addr res.scontext.eng, uint16(minVersion),
|
||||
sslEngineSetVersions(res.scontext.eng, uint16(minVersion),
|
||||
uint16(maxVersion))
|
||||
|
||||
if not isNil(cache):
|
||||
sslServerSetCache(addr res.scontext, addr cache.context.vtable)
|
||||
sslServerSetCache(res.scontext, addr cache.context.vtable)
|
||||
|
||||
if TLSFlags.EnforceServerPref in flags:
|
||||
sslEngineAddFlags(addr res.scontext.eng, OPT_ENFORCE_SERVER_PREFERENCES)
|
||||
sslEngineAddFlags(res.scontext.eng, OPT_ENFORCE_SERVER_PREFERENCES)
|
||||
if TLSFlags.NoRenegotiation in flags:
|
||||
sslEngineAddFlags(addr res.scontext.eng, OPT_NO_RENEGOTIATION)
|
||||
sslEngineAddFlags(res.scontext.eng, OPT_NO_RENEGOTIATION)
|
||||
if TLSFlags.TolerateNoClientAuth in flags:
|
||||
sslEngineAddFlags(addr res.scontext.eng, OPT_TOLERATE_NO_CLIENT_AUTH)
|
||||
sslEngineAddFlags(res.scontext.eng, OPT_TOLERATE_NO_CLIENT_AUTH)
|
||||
if TLSFlags.FailOnAlpnMismatch in flags:
|
||||
sslEngineAddFlags(addr res.scontext.eng, OPT_FAIL_ON_ALPN_MISMATCH)
|
||||
sslEngineAddFlags(res.scontext.eng, OPT_FAIL_ON_ALPN_MISMATCH)
|
||||
|
||||
let err = sslServerReset(addr res.scontext)
|
||||
let err = sslServerReset(res.scontext)
|
||||
if err == 0:
|
||||
raise newException(TLSStreamInitError, "Could not initialize TLS layer")
|
||||
|
||||
@ -583,27 +585,27 @@ proc newTLSServerAsyncStream*(rsource: AsyncStreamReader,
|
||||
|
||||
proc copyKey(src: RsaPrivateKey): TLSPrivateKey =
|
||||
## Creates copy of RsaPrivateKey ``src``.
|
||||
var offset = 0
|
||||
var offset = 0'u
|
||||
let keySize = src.plen + src.qlen + src.dplen + src.dqlen + src.iqlen
|
||||
var res = TLSPrivateKey(kind: TLSKeyType.RSA, storage: newSeq[byte](keySize))
|
||||
copyMem(addr res.storage[offset], src.p, src.plen)
|
||||
res.rsakey.p = cast[ptr cuchar](addr res.storage[offset])
|
||||
res.rsakey.p = addr res.storage[offset]
|
||||
res.rsakey.plen = src.plen
|
||||
offset = offset + src.plen
|
||||
copyMem(addr res.storage[offset], src.q, src.qlen)
|
||||
res.rsakey.q = cast[ptr cuchar](addr res.storage[offset])
|
||||
res.rsakey.q = addr res.storage[offset]
|
||||
res.rsakey.qlen = src.qlen
|
||||
offset = offset + src.qlen
|
||||
copyMem(addr res.storage[offset], src.dp, src.dplen)
|
||||
res.rsakey.dp = cast[ptr cuchar](addr res.storage[offset])
|
||||
res.rsakey.dp = addr res.storage[offset]
|
||||
res.rsakey.dplen = src.dplen
|
||||
offset = offset + src.dplen
|
||||
copyMem(addr res.storage[offset], src.dq, src.dqlen)
|
||||
res.rsakey.dq = cast[ptr cuchar](addr res.storage[offset])
|
||||
res.rsakey.dq = addr res.storage[offset]
|
||||
res.rsakey.dqlen = src.dqlen
|
||||
offset = offset + src.dqlen
|
||||
copyMem(addr res.storage[offset], src.iq, src.iqlen)
|
||||
res.rsakey.iq = cast[ptr cuchar](addr res.storage[offset])
|
||||
res.rsakey.iq = addr res.storage[offset]
|
||||
res.rsakey.iqlen = src.iqlen
|
||||
res.rsakey.nBitlen = src.nBitlen
|
||||
res
|
||||
@ -614,7 +616,7 @@ proc copyKey(src: EcPrivateKey): TLSPrivateKey =
|
||||
let keySize = src.xlen
|
||||
var res = TLSPrivateKey(kind: TLSKeyType.EC, storage: newSeq[byte](keySize))
|
||||
copyMem(addr res.storage[offset], src.x, src.xlen)
|
||||
res.eckey.x = cast[ptr cuchar](addr res.storage[offset])
|
||||
res.eckey.x = addr res.storage[offset]
|
||||
res.eckey.xlen = src.xlen
|
||||
res.eckey.curve = src.curve
|
||||
res
|
||||
@ -627,12 +629,12 @@ proc init*(tt: typedesc[TLSPrivateKey], data: openArray[byte]): TLSPrivateKey =
|
||||
var ctx: SkeyDecoderContext
|
||||
if len(data) == 0:
|
||||
raiseTLSStreamProtocolError("Incorrect private key")
|
||||
skeyDecoderInit(addr ctx)
|
||||
skeyDecoderPush(addr ctx, cast[pointer](unsafeAddr data[0]), len(data))
|
||||
let err = skeyDecoderLastError(addr ctx)
|
||||
skeyDecoderInit(ctx)
|
||||
skeyDecoderPush(ctx, cast[pointer](unsafeAddr data[0]), uint(len(data)))
|
||||
let err = skeyDecoderLastError(ctx)
|
||||
if err != 0:
|
||||
raiseTLSStreamProtocolError(err)
|
||||
let keyType = skeyDecoderKeyType(addr ctx)
|
||||
let keyType = skeyDecoderKeyType(ctx)
|
||||
let res =
|
||||
if keyType == KEYTYPE_RSA:
|
||||
copyKey(ctx.key.rsa)
|
||||
@ -646,34 +648,32 @@ proc pemDecode*(data: openArray[char]): seq[PEMElement] =
|
||||
## Decode PEM encoded string and get array of binary blobs.
|
||||
if len(data) == 0:
|
||||
raiseTLSStreamProtocolError("Empty PEM message")
|
||||
var ctx: PemDecoderContext
|
||||
var pctx = new PEMContext
|
||||
var res = newSeq[PEMElement]()
|
||||
pemDecoderInit(addr ctx)
|
||||
|
||||
proc itemAppend(ctx: pointer, pbytes: pointer, nbytes: int) {.cdecl.} =
|
||||
proc itemAppend(ctx: pointer, pbytes: pointer, nbytes: uint) {.cdecl.} =
|
||||
var p = cast[PEMContext](ctx)
|
||||
var o = len(p.data)
|
||||
var o = uint(len(p.data))
|
||||
p.data.setLen(o + nbytes)
|
||||
copyMem(addr p.data[o], pbytes, nbytes)
|
||||
|
||||
var length = len(data)
|
||||
var offset = 0
|
||||
var inobj = false
|
||||
var elem: PEMElement
|
||||
|
||||
while length > 0:
|
||||
var tlen = pemDecoderPush(addr ctx,
|
||||
cast[pointer](unsafeAddr data[offset]), length)
|
||||
offset = offset + tlen
|
||||
length = length - tlen
|
||||
var ctx: PemDecoderContext
|
||||
ctx.init()
|
||||
ctx.setdest(itemAppend, cast[pointer](pctx))
|
||||
|
||||
let event = pemDecoderEvent(addr ctx)
|
||||
while offset < data.len:
|
||||
let tlen = ctx.push(data.toOpenArray(offset, data.high))
|
||||
offset = offset + tlen
|
||||
|
||||
let event = ctx.lastEvent()
|
||||
if event == PEM_BEGIN_OBJ:
|
||||
inobj = true
|
||||
elem.name = $pemDecoderName(addr ctx)
|
||||
pctx.data = newSeq[byte]()
|
||||
pemDecoderSetdest(addr ctx, itemAppend, cast[pointer](pctx))
|
||||
elem.name = ctx.banner()
|
||||
pctx.data.setLen(0)
|
||||
elif event == PEM_END_OBJ:
|
||||
if inobj:
|
||||
elem.data = pctx.data
|
||||
@ -715,8 +715,8 @@ proc init*(tt: typedesc[TLSCertificate],
|
||||
let offset = len(res.storage)
|
||||
res.storage.add(item.data)
|
||||
let cert = X509Certificate(
|
||||
data: cast[ptr cuchar](addr res.storage[offset]),
|
||||
dataLen: len(item.data)
|
||||
data: addr res.storage[offset],
|
||||
dataLen: uint(len(item.data))
|
||||
)
|
||||
let ares = getSignerAlgo(cert)
|
||||
if ares == -1:
|
||||
|
23
vendor/nim-chronos/chronos/timer.nim
vendored
23
vendor/nim-chronos/chronos/timer.nim
vendored
@ -24,7 +24,10 @@
|
||||
## You can specify which timer you want to use ``-d:asyncTimer=<system/mono>``.
|
||||
const asyncTimer* {.strdefine.} = "mono"
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
when defined(windows):
|
||||
when asyncTimer == "system":
|
||||
@ -302,6 +305,18 @@ const
|
||||
ZeroDuration* = Duration(value: 0'i64)
|
||||
InfiniteDuration* = Duration(value: high(int64))
|
||||
|
||||
template high*(T: typedesc[Moment]): Moment =
|
||||
Moment(value: high(int64))
|
||||
|
||||
template low*(T: typedesc[Moment]): Moment =
|
||||
Moment(value: 0)
|
||||
|
||||
template high*(T: typedesc[Duration]): Duration =
|
||||
Duration(value: high(int64))
|
||||
|
||||
template low*(T: typedesc[Duration]): Duration =
|
||||
Duration(value: 0)
|
||||
|
||||
func nanoseconds*(v: SomeIntegerI64): Duration {.inline.} =
|
||||
## Initialize Duration with nanoseconds value ``v``.
|
||||
result.value = int64(v)
|
||||
@ -438,6 +453,12 @@ func init*(t: typedesc[Moment], value: int64, precision: Duration): Moment =
|
||||
## ``precision``.
|
||||
result.value = value * precision.value
|
||||
|
||||
func epochSeconds*(moment: Moment): int64 =
|
||||
moment.value div Second.value
|
||||
|
||||
func epochNanoSeconds*(moment: Moment): int64 =
|
||||
moment.value
|
||||
|
||||
proc fromNow*(t: typedesc[Moment], a: Duration): Moment {.inline.} =
|
||||
## Returns moment in time which is equal to current moment + Duration.
|
||||
result = Moment.now() + a
|
||||
|
@ -7,7 +7,10 @@
|
||||
# Apache License, version 2.0, (LICENSE-APACHEv2)
|
||||
# MIT license (LICENSE-MIT)
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
import std/[os, strutils, nativesockets, net]
|
||||
import stew/base10
|
||||
|
@ -7,7 +7,10 @@
|
||||
# Apache License, version 2.0, (LICENSE-APACHEv2)
|
||||
# MIT license (LICENSE-MIT)
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
import std/[net, nativesockets, os, deques]
|
||||
import ".."/[selectors2, asyncloop, handles]
|
||||
@ -768,13 +771,17 @@ proc send*(transp: DatagramTransport, pbytes: pointer,
|
||||
transp.resumeWrite()
|
||||
return retFuture
|
||||
|
||||
proc send*(transp: DatagramTransport, msg: string, msglen = -1): Future[void] =
|
||||
proc send*(transp: DatagramTransport, msg: sink string,
|
||||
msglen = -1): Future[void] =
|
||||
## Send string ``msg`` using transport ``transp`` to remote destination
|
||||
## address which was bounded on transport.
|
||||
var retFuture = newFutureStr[void]("datagram.transport.send(string)")
|
||||
transp.checkClosed(retFuture)
|
||||
if not isLiteral(msg):
|
||||
shallowCopy(retFuture.gcholder, msg)
|
||||
when declared(shallowCopy):
|
||||
if not(isLiteral(msg)):
|
||||
shallowCopy(retFuture.gcholder, msg)
|
||||
else:
|
||||
retFuture.gcholder = msg
|
||||
else:
|
||||
retFuture.gcholder = msg
|
||||
let length = if msglen <= 0: len(msg) else: msglen
|
||||
@ -786,14 +793,17 @@ proc send*(transp: DatagramTransport, msg: string, msglen = -1): Future[void] =
|
||||
transp.resumeWrite()
|
||||
return retFuture
|
||||
|
||||
proc send*[T](transp: DatagramTransport, msg: seq[T],
|
||||
proc send*[T](transp: DatagramTransport, msg: sink seq[T],
|
||||
msglen = -1): Future[void] =
|
||||
## Send string ``msg`` using transport ``transp`` to remote destination
|
||||
## address which was bounded on transport.
|
||||
var retFuture = newFutureSeq[void, T]("datagram.transport.send(seq)")
|
||||
transp.checkClosed(retFuture)
|
||||
if not isLiteral(msg):
|
||||
shallowCopy(retFuture.gcholder, msg)
|
||||
when declared(shallowCopy):
|
||||
if not(isLiteral(msg)):
|
||||
shallowCopy(retFuture.gcholder, msg)
|
||||
else:
|
||||
retFuture.gcholder = msg
|
||||
else:
|
||||
retFuture.gcholder = msg
|
||||
let length = if msglen <= 0: (len(msg) * sizeof(T)) else: (msglen * sizeof(T))
|
||||
@ -819,13 +829,16 @@ proc sendTo*(transp: DatagramTransport, remote: TransportAddress,
|
||||
return retFuture
|
||||
|
||||
proc sendTo*(transp: DatagramTransport, remote: TransportAddress,
|
||||
msg: string, msglen = -1): Future[void] =
|
||||
msg: sink string, msglen = -1): Future[void] =
|
||||
## Send string ``msg`` using transport ``transp`` to remote destination
|
||||
## address ``remote``.
|
||||
var retFuture = newFutureStr[void]("datagram.transport.sendTo(string)")
|
||||
transp.checkClosed(retFuture)
|
||||
if not isLiteral(msg):
|
||||
shallowCopy(retFuture.gcholder, msg)
|
||||
when declared(shallowCopy):
|
||||
if not(isLiteral(msg)):
|
||||
shallowCopy(retFuture.gcholder, msg)
|
||||
else:
|
||||
retFuture.gcholder = msg
|
||||
else:
|
||||
retFuture.gcholder = msg
|
||||
let length = if msglen <= 0: len(msg) else: msglen
|
||||
@ -839,13 +852,16 @@ proc sendTo*(transp: DatagramTransport, remote: TransportAddress,
|
||||
return retFuture
|
||||
|
||||
proc sendTo*[T](transp: DatagramTransport, remote: TransportAddress,
|
||||
msg: seq[T], msglen = -1): Future[void] =
|
||||
msg: sink seq[T], msglen = -1): Future[void] =
|
||||
## Send sequence ``msg`` using transport ``transp`` to remote destination
|
||||
## address ``remote``.
|
||||
var retFuture = newFutureSeq[void, T]("datagram.transport.sendTo(seq)")
|
||||
transp.checkClosed(retFuture)
|
||||
if not isLiteral(msg):
|
||||
shallowCopy(retFuture.gcholder, msg)
|
||||
when declared(shallowCopy):
|
||||
if not(isLiteral(msg)):
|
||||
shallowCopy(retFuture.gcholder, msg)
|
||||
else:
|
||||
retFuture.gcholder = msg
|
||||
else:
|
||||
retFuture.gcholder = msg
|
||||
let length = if msglen <= 0: (len(msg) * sizeof(T)) else: (msglen * sizeof(T))
|
||||
@ -864,7 +880,10 @@ proc peekMessage*(transp: DatagramTransport, msg: var seq[byte],
|
||||
if ReadError in transp.state:
|
||||
transp.state.excl(ReadError)
|
||||
raise transp.getError()
|
||||
shallowCopy(msg, transp.buffer)
|
||||
when declared(shallowCopy):
|
||||
shallowCopy(msg, transp.buffer)
|
||||
else:
|
||||
msg = transp.buffer
|
||||
msglen = transp.buflen
|
||||
|
||||
proc getMessage*(transp: DatagramTransport): seq[byte] {.
|
||||
|
@ -9,7 +9,10 @@
|
||||
|
||||
## This module implements various IP network utility procedures.
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
import std/strutils
|
||||
import stew/endians2
|
||||
|
@ -10,7 +10,10 @@
|
||||
## This module implements cross-platform network interfaces list.
|
||||
## Currently supported OSes are Windows, Linux, MacOS, BSD(not tested).
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
import std/algorithm
|
||||
from std/strutils import toHex
|
||||
|
45
vendor/nim-chronos/chronos/transports/stream.nim
vendored
45
vendor/nim-chronos/chronos/transports/stream.nim
vendored
@ -7,7 +7,10 @@
|
||||
# Apache License, version 2.0, (LICENSE-APACHEv2)
|
||||
# MIT license (LICENSE-MIT)
|
||||
|
||||
{.push raises: [Defect].}
|
||||
when (NimMajor, NimMinor) < (1, 4):
|
||||
{.push raises: [Defect].}
|
||||
else:
|
||||
{.push raises: [].}
|
||||
|
||||
import std/[net, nativesockets, os, deques]
|
||||
import ".."/[asyncloop, handles, selectors2]
|
||||
@ -2116,7 +2119,8 @@ proc write*(transp: StreamTransport, pbytes: pointer,
|
||||
transp.resumeWrite()
|
||||
return retFuture
|
||||
|
||||
proc write*(transp: StreamTransport, msg: string, msglen = -1): Future[int] =
|
||||
proc write*(transp: StreamTransport, msg: sink string,
|
||||
msglen = -1): Future[int] =
|
||||
## Write data from string ``msg`` using transport ``transp``.
|
||||
var retFuture = newFutureStr[int]("stream.transport.write(string)")
|
||||
transp.checkClosed(retFuture)
|
||||
@ -2134,12 +2138,17 @@ proc write*(transp: StreamTransport, msg: string, msglen = -1): Future[int] =
|
||||
let
|
||||
written = nbytes - rbytes # In case fastWrite wrote some
|
||||
|
||||
pbytes = if not(isLiteral(msg)):
|
||||
shallowCopy(retFuture.gcholder, msg)
|
||||
cast[ptr byte](addr retFuture.gcholder[written])
|
||||
else:
|
||||
retFuture.gcholder = msg[written..<nbytes]
|
||||
cast[ptr byte](addr retFuture.gcholder[0])
|
||||
pbytes =
|
||||
when declared(shallowCopy):
|
||||
if not(isLiteral(msg)):
|
||||
shallowCopy(retFuture.gcholder, msg)
|
||||
cast[ptr byte](addr retFuture.gcholder[written])
|
||||
else:
|
||||
retFuture.gcholder = msg[written ..< nbytes]
|
||||
cast[ptr byte](addr retFuture.gcholder[0])
|
||||
else:
|
||||
retFuture.gcholder = msg[written ..< nbytes]
|
||||
cast[ptr byte](addr retFuture.gcholder[0])
|
||||
|
||||
var vector = StreamVector(kind: DataBuffer, writer: retFuture,
|
||||
buf: pbytes, buflen: rbytes, size: nbytes)
|
||||
@ -2147,7 +2156,8 @@ proc write*(transp: StreamTransport, msg: string, msglen = -1): Future[int] =
|
||||
transp.resumeWrite()
|
||||
return retFuture
|
||||
|
||||
proc write*[T](transp: StreamTransport, msg: seq[T], msglen = -1): Future[int] =
|
||||
proc write*[T](transp: StreamTransport, msg: sink seq[T],
|
||||
msglen = -1): Future[int] =
|
||||
## Write sequence ``msg`` using transport ``transp``.
|
||||
var retFuture = newFutureSeq[int, T]("stream.transport.write(seq)")
|
||||
transp.checkClosed(retFuture)
|
||||
@ -2165,12 +2175,17 @@ proc write*[T](transp: StreamTransport, msg: seq[T], msglen = -1): Future[int] =
|
||||
let
|
||||
written = nbytes - rbytes # In case fastWrite wrote some
|
||||
|
||||
pbytes = if not(isLiteral(msg)):
|
||||
shallowCopy(retFuture.gcholder, msg)
|
||||
cast[ptr byte](addr retFuture.gcholder[written])
|
||||
else:
|
||||
retFuture.gcholder = msg[written..<nbytes]
|
||||
cast[ptr byte](addr retFuture.gcholder[0])
|
||||
pbytes =
|
||||
when declared(shallowCopy):
|
||||
if not(isLiteral(msg)):
|
||||
shallowCopy(retFuture.gcholder, msg)
|
||||
cast[ptr byte](addr retFuture.gcholder[written])
|
||||
else:
|
||||
retFuture.gcholder = msg[written ..< nbytes]
|
||||
cast[ptr byte](addr retFuture.gcholder[0])
|
||||
else:
|
||||
retFuture.gcholder = msg[written ..< nbytes]
|
||||
cast[ptr byte](addr retFuture.gcholder[0])
|
||||
|
||||
var vector = StreamVector(kind: DataBuffer, writer: retFuture,
|
||||
buf: pbytes, buflen: rbytes, size: nbytes)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user