2021-01-27 06:14:17 +00:00
|
|
|
#
|
|
|
|
# Chronos HTTP/S common types
|
2021-02-02 22:33:14 +00:00
|
|
|
# (c) Copyright 2021-Present
|
2021-01-27 06:14:17 +00:00
|
|
|
# Status Research & Development GmbH
|
|
|
|
#
|
|
|
|
# Licensed under either of
|
|
|
|
# Apache License, version 2.0, (LICENSE-APACHEv2)
|
|
|
|
# MIT license (LICENSE-MIT)
|
2021-04-26 11:05:37 +00:00
|
|
|
import std/[strutils, uri]
|
|
|
|
import stew/results, httputils
|
2021-02-10 08:43:05 +00:00
|
|
|
import ../../asyncloop, ../../asyncsync
|
|
|
|
import ../../streams/[asyncstream, boundstream]
|
2021-01-27 19:39:14 +00:00
|
|
|
export results, httputils, strutils
|
2021-01-27 06:14:17 +00:00
|
|
|
|
|
|
|
const
|
|
|
|
HeadersMark* = @[byte(0x0D), byte(0x0A), byte(0x0D), byte(0x0A)]
|
2021-01-30 18:18:06 +00:00
|
|
|
PostMethods* = {MethodPost, MethodPatch, MethodPut, MethodDelete}
|
2021-01-27 06:14:17 +00:00
|
|
|
|
2021-04-26 11:05:37 +00:00
|
|
|
MaximumBodySizeError* = "Maximum size of request's body reached"
|
|
|
|
|
2021-01-27 06:14:17 +00:00
|
|
|
type
|
|
|
|
HttpResult*[T] = Result[T, string]
|
|
|
|
HttpResultCode*[T] = Result[T, HttpCode]
|
|
|
|
|
2021-02-03 10:47:03 +00:00
|
|
|
HttpDefect* = object of Defect
|
2021-01-27 06:14:17 +00:00
|
|
|
HttpError* = object of CatchableError
|
2021-01-27 19:39:14 +00:00
|
|
|
HttpCriticalError* = object of HttpError
|
2021-02-03 10:47:03 +00:00
|
|
|
code*: HttpCode
|
2021-01-27 19:39:14 +00:00
|
|
|
HttpRecoverableError* = object of HttpError
|
2021-02-03 10:47:03 +00:00
|
|
|
code*: HttpCode
|
2021-02-11 06:32:25 +00:00
|
|
|
HttpDisconnectError* = object of HttpError
|
2021-01-27 19:39:14 +00:00
|
|
|
|
|
|
|
TransferEncodingFlags* {.pure.} = enum
|
|
|
|
Identity, Chunked, Compress, Deflate, Gzip
|
|
|
|
|
|
|
|
ContentEncodingFlags* {.pure.} = enum
|
|
|
|
Identity, Br, Compress, Deflate, Gzip
|
2021-01-27 06:14:17 +00:00
|
|
|
|
2021-02-10 08:43:05 +00:00
|
|
|
HttpBodyReader* = ref object of AsyncStreamReader
|
|
|
|
streams*: seq[AsyncStreamReader]
|
|
|
|
|
|
|
|
proc newHttpBodyReader*(streams: varargs[AsyncStreamReader]): HttpBodyReader =
|
|
|
|
## HttpBodyReader is AsyncStreamReader which holds references to all the
|
|
|
|
## ``streams``. Also on close it will close all the ``streams``.
|
|
|
|
##
|
|
|
|
## First stream in sequence will be used as a source.
|
|
|
|
doAssert(len(streams) > 0, "At least one stream must be added")
|
|
|
|
var res = HttpBodyReader(streams: @streams)
|
|
|
|
res.init(streams[0])
|
|
|
|
res
|
|
|
|
|
|
|
|
proc closeWait*(bstream: HttpBodyReader) {.async.} =
|
|
|
|
## Close and free resource allocated by body reader.
|
2021-04-26 11:05:37 +00:00
|
|
|
var res = newSeq[Future[void]]()
|
|
|
|
# We closing streams in reversed order because stream at position [0], uses
|
|
|
|
# data from stream at position [1].
|
|
|
|
for index in countdown((len(bstream.streams) - 1), 0):
|
|
|
|
res.add(bstream.streams[index].closeWait())
|
|
|
|
await allFutures(res)
|
|
|
|
await procCall(closeWait(AsyncStreamReader(bstream)))
|
2021-02-10 08:43:05 +00:00
|
|
|
|
2021-04-26 11:05:37 +00:00
|
|
|
proc hasOverflow*(bstream: HttpBodyReader): bool {.raises: [Defect].} =
|
|
|
|
if len(bstream.streams) == 1:
|
|
|
|
# If HttpBodyReader has only one stream it has ``BoundedStreamReader``, in
|
|
|
|
# such case its impossible to get more bytes then expected amount.
|
|
|
|
false
|
|
|
|
else:
|
|
|
|
# If HttpBodyReader has two or more streams, we check if
|
|
|
|
# ``BoundedStreamReader`` at EOF.
|
|
|
|
if bstream.streams[0].atEof():
|
|
|
|
for i in 1 ..< len(bstream.streams):
|
|
|
|
if not(bstream.streams[1].atEof()):
|
|
|
|
return true
|
|
|
|
false
|
|
|
|
else:
|
|
|
|
false
|
2021-02-10 08:43:05 +00:00
|
|
|
|
2021-02-17 00:03:12 +00:00
|
|
|
proc raiseHttpCriticalError*(msg: string,
|
|
|
|
code = Http400) {.noinline, noreturn.} =
|
|
|
|
raise (ref HttpCriticalError)(code: code, msg: msg)
|
2021-01-27 19:39:14 +00:00
|
|
|
|
2021-02-17 00:03:12 +00:00
|
|
|
proc raiseHttpDisconnectError*() {.noinline, noreturn.} =
|
|
|
|
raise (ref HttpDisconnectError)(msg: "Remote peer disconnected")
|
2021-01-27 19:39:14 +00:00
|
|
|
|
2021-02-17 00:03:12 +00:00
|
|
|
proc raiseHttpDefect*(msg: string) {.noinline, noreturn.} =
|
|
|
|
raise (ref HttpDefect)(msg: msg)
|
2021-02-11 06:32:25 +00:00
|
|
|
|
2021-02-10 13:13:36 +00:00
|
|
|
iterator queryParams*(query: string): tuple[key: string, value: string] {.
|
|
|
|
raises: [Defect].} =
|
2021-01-27 19:39:14 +00:00
|
|
|
## Iterate over url-encoded query string.
|
|
|
|
for pair in query.split('&'):
|
|
|
|
let items = pair.split('=', maxsplit = 1)
|
|
|
|
let k = items[0]
|
2021-02-02 10:48:04 +00:00
|
|
|
if len(k) > 0:
|
|
|
|
let v = if len(items) > 1: items[1] else: ""
|
|
|
|
yield (decodeUrl(k), decodeUrl(v))
|
2021-01-27 19:39:14 +00:00
|
|
|
|
|
|
|
func getTransferEncoding*(ch: openarray[string]): HttpResult[
|
2021-02-10 13:13:36 +00:00
|
|
|
set[TransferEncodingFlags]] {.
|
|
|
|
raises: [Defect].} =
|
2021-01-27 19:39:14 +00:00
|
|
|
## Parse value of multiple HTTP headers ``Transfer-Encoding`` and return
|
|
|
|
## it as set of ``TransferEncodingFlags``.
|
|
|
|
var res: set[TransferEncodingFlags] = {}
|
|
|
|
if len(ch) == 0:
|
|
|
|
res.incl(TransferEncodingFlags.Identity)
|
|
|
|
ok(res)
|
|
|
|
else:
|
|
|
|
for header in ch:
|
|
|
|
for item in header.split(","):
|
|
|
|
case strip(item.toLowerAscii())
|
|
|
|
of "identity":
|
|
|
|
res.incl(TransferEncodingFlags.Identity)
|
|
|
|
of "chunked":
|
|
|
|
res.incl(TransferEncodingFlags.Chunked)
|
|
|
|
of "compress":
|
|
|
|
res.incl(TransferEncodingFlags.Compress)
|
|
|
|
of "deflate":
|
|
|
|
res.incl(TransferEncodingFlags.Deflate)
|
|
|
|
of "gzip":
|
|
|
|
res.incl(TransferEncodingFlags.Gzip)
|
2021-02-10 13:13:36 +00:00
|
|
|
of "x-gzip":
|
|
|
|
res.incl(TransferEncodingFlags.Gzip)
|
2021-01-27 19:39:14 +00:00
|
|
|
of "":
|
|
|
|
res.incl(TransferEncodingFlags.Identity)
|
|
|
|
else:
|
|
|
|
return err("Incorrect Transfer-Encoding value")
|
|
|
|
ok(res)
|
|
|
|
|
|
|
|
func getContentEncoding*(ch: openarray[string]): HttpResult[
|
2021-02-10 13:13:36 +00:00
|
|
|
set[ContentEncodingFlags]] {.
|
|
|
|
raises: [Defect].} =
|
2021-01-27 19:39:14 +00:00
|
|
|
## Parse value of multiple HTTP headers ``Content-Encoding`` and return
|
|
|
|
## it as set of ``ContentEncodingFlags``.
|
|
|
|
var res: set[ContentEncodingFlags] = {}
|
|
|
|
if len(ch) == 0:
|
|
|
|
res.incl(ContentEncodingFlags.Identity)
|
|
|
|
ok(res)
|
|
|
|
else:
|
|
|
|
for header in ch:
|
|
|
|
for item in header.split(","):
|
|
|
|
case strip(item.toLowerAscii()):
|
|
|
|
of "identity":
|
|
|
|
res.incl(ContentEncodingFlags.Identity)
|
|
|
|
of "br":
|
|
|
|
res.incl(ContentEncodingFlags.Br)
|
|
|
|
of "compress":
|
|
|
|
res.incl(ContentEncodingFlags.Compress)
|
|
|
|
of "deflate":
|
|
|
|
res.incl(ContentEncodingFlags.Deflate)
|
|
|
|
of "gzip":
|
|
|
|
res.incl(ContentEncodingFlags.Gzip)
|
2021-02-10 13:13:36 +00:00
|
|
|
of "x-gzip":
|
|
|
|
res.incl(ContentEncodingFlags.Gzip)
|
2021-01-27 19:39:14 +00:00
|
|
|
of "":
|
|
|
|
res.incl(ContentEncodingFlags.Identity)
|
|
|
|
else:
|
|
|
|
return err("Incorrect Content-Encoding value")
|
|
|
|
ok(res)
|
|
|
|
|
2021-02-10 13:13:36 +00:00
|
|
|
func getContentType*(ch: openarray[string]): HttpResult[string] {.
|
|
|
|
raises: [Defect].} =
|
2021-01-27 19:39:14 +00:00
|
|
|
## Check and prepare value of ``Content-Type`` header.
|
2021-03-02 13:26:07 +00:00
|
|
|
if len(ch) == 0:
|
|
|
|
err("No Content-Type values found")
|
|
|
|
elif len(ch) > 1:
|
2021-01-27 19:39:14 +00:00
|
|
|
err("Multiple Content-Type values found")
|
|
|
|
else:
|
|
|
|
let mparts = ch[0].split(";")
|
|
|
|
ok(strip(mparts[0]).toLowerAscii())
|
2021-02-18 12:08:21 +00:00
|
|
|
|
|
|
|
proc bytesToString*(src: openarray[byte], dst: var openarray[char]) =
|
|
|
|
## Convert array of bytes to array of characters.
|
|
|
|
##
|
|
|
|
## Note, that this procedure assume that `sizeof(byte) == sizeof(char) == 1`.
|
|
|
|
## If this equation is not correct this procedures MUST not be used.
|
|
|
|
doAssert(len(src) == len(dst))
|
|
|
|
if len(src) > 0:
|
|
|
|
copyMem(addr dst[0], unsafeAddr src[0], len(src))
|
|
|
|
|
|
|
|
proc stringToBytes*(src: openarray[char], dst: var openarray[byte]) =
|
|
|
|
## Convert array of characters to array of bytes.
|
|
|
|
##
|
|
|
|
## Note, that this procedure assume that `sizeof(byte) == sizeof(char) == 1`.
|
|
|
|
## If this equation is not correct this procedures MUST not be used.
|
|
|
|
doAssert(len(src) == len(dst))
|
|
|
|
if len(src) > 0:
|
|
|
|
copyMem(addr dst[0], unsafeAddr src[0], len(src))
|
|
|
|
|
|
|
|
func bytesToString*(src: openarray[byte]): string =
|
|
|
|
## Convert array of bytes to a string.
|
|
|
|
##
|
|
|
|
## Note, that this procedure assume that `sizeof(byte) == sizeof(char) == 1`.
|
|
|
|
## If this equation is not correct this procedures MUST not be used.
|
|
|
|
var default: string
|
|
|
|
if len(src) > 0:
|
|
|
|
var dst = newString(len(src))
|
|
|
|
bytesToString(src, dst)
|
|
|
|
dst
|
|
|
|
else:
|
|
|
|
default
|
|
|
|
|
|
|
|
func stringToBytes*(src: openarray[char]): seq[byte] =
|
|
|
|
## Convert string to sequence of bytes.
|
|
|
|
##
|
|
|
|
## Note, that this procedure assume that `sizeof(byte) == sizeof(char) == 1`.
|
|
|
|
## If this equation is not correct this procedures MUST not be used.
|
|
|
|
var default: seq[byte]
|
|
|
|
if len(src) > 0:
|
|
|
|
var dst = newSeq[byte](len(src))
|
|
|
|
stringToBytes(src, dst)
|
|
|
|
dst
|
|
|
|
else:
|
|
|
|
default
|