0) {\n if(p < this.DB && (d = this.data[i]>>p) > 0) { m = true; r = int2char(d); }\n while(i >= 0) {\n if(p < k) {\n d = (this.data[i]&((1<>(p+=this.DB-k);\n } else {\n d = (this.data[i]>>(p-=k))&km;\n if(p <= 0) { p += this.DB; --i; }\n }\n if(d > 0) m = true;\n if(m) r += int2char(d);\n }\n }\n return m?r:\"0\";\n}\n\n// (public) -this\nfunction bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; }\n\n// (public) |this|\nfunction bnAbs() { return (this.s<0)?this.negate():this; }\n\n// (public) return + if this > a, - if this < a, 0 if equal\nfunction bnCompareTo(a) {\n var r = this.s-a.s;\n if(r != 0) return r;\n var i = this.t;\n r = i-a.t;\n if(r != 0) return (this.s<0)?-r:r;\n while(--i >= 0) if((r=this.data[i]-a.data[i]) != 0) return r;\n return 0;\n}\n\n// returns bit length of the integer x\nfunction nbits(x) {\n var r = 1, t;\n if((t=x>>>16) != 0) { x = t; r += 16; }\n if((t=x>>8) != 0) { x = t; r += 8; }\n if((t=x>>4) != 0) { x = t; r += 4; }\n if((t=x>>2) != 0) { x = t; r += 2; }\n if((t=x>>1) != 0) { x = t; r += 1; }\n return r;\n}\n\n// (public) return the number of bits in \"this\"\nfunction bnBitLength() {\n if(this.t <= 0) return 0;\n return this.DB*(this.t-1)+nbits(this.data[this.t-1]^(this.s&this.DM));\n}\n\n// (protected) r = this << n*DB\nfunction bnpDLShiftTo(n,r) {\n var i;\n for(i = this.t-1; i >= 0; --i) r.data[i+n] = this.data[i];\n for(i = n-1; i >= 0; --i) r.data[i] = 0;\n r.t = this.t+n;\n r.s = this.s;\n}\n\n// (protected) r = this >> n*DB\nfunction bnpDRShiftTo(n,r) {\n for(var i = n; i < this.t; ++i) r.data[i-n] = this.data[i];\n r.t = Math.max(this.t-n,0);\n r.s = this.s;\n}\n\n// (protected) r = this << n\nfunction bnpLShiftTo(n,r) {\n var bs = n%this.DB;\n var cbs = this.DB-bs;\n var bm = (1<= 0; --i) {\n r.data[i+ds+1] = (this.data[i]>>cbs)|c;\n c = (this.data[i]&bm)<= 0; --i) r.data[i] = 0;\n r.data[ds] = c;\n r.t = this.t+ds+1;\n r.s = this.s;\n r.clamp();\n}\n\n// (protected) r = this >> n\nfunction bnpRShiftTo(n,r) {\n r.s = this.s;\n var ds = Math.floor(n/this.DB);\n if(ds >= this.t) { r.t = 0; return; }\n var bs = n%this.DB;\n var cbs = this.DB-bs;\n var bm = (1<>bs;\n for(var i = ds+1; i < this.t; ++i) {\n r.data[i-ds-1] |= (this.data[i]&bm)<>bs;\n }\n if(bs > 0) r.data[this.t-ds-1] |= (this.s&bm)<>= this.DB;\n }\n if(a.t < this.t) {\n c -= a.s;\n while(i < this.t) {\n c += this.data[i];\n r.data[i++] = c&this.DM;\n c >>= this.DB;\n }\n c += this.s;\n } else {\n c += this.s;\n while(i < a.t) {\n c -= a.data[i];\n r.data[i++] = c&this.DM;\n c >>= this.DB;\n }\n c -= a.s;\n }\n r.s = (c<0)?-1:0;\n if(c < -1) r.data[i++] = this.DV+c;\n else if(c > 0) r.data[i++] = c;\n r.t = i;\n r.clamp();\n}\n\n// (protected) r = this * a, r != this,a (HAC 14.12)\n// \"this\" should be the larger one if appropriate.\nfunction bnpMultiplyTo(a,r) {\n var x = this.abs(), y = a.abs();\n var i = x.t;\n r.t = i+y.t;\n while(--i >= 0) r.data[i] = 0;\n for(i = 0; i < y.t; ++i) r.data[i+x.t] = x.am(0,y.data[i],r,i,0,x.t);\n r.s = 0;\n r.clamp();\n if(this.s != a.s) BigInteger.ZERO.subTo(r,r);\n}\n\n// (protected) r = this^2, r != this (HAC 14.16)\nfunction bnpSquareTo(r) {\n var x = this.abs();\n var i = r.t = 2*x.t;\n while(--i >= 0) r.data[i] = 0;\n for(i = 0; i < x.t-1; ++i) {\n var c = x.am(i,x.data[i],r,2*i,0,1);\n if((r.data[i+x.t]+=x.am(i+1,2*x.data[i],r,2*i+1,c,x.t-i-1)) >= x.DV) {\n r.data[i+x.t] -= x.DV;\n r.data[i+x.t+1] = 1;\n }\n }\n if(r.t > 0) r.data[r.t-1] += x.am(i,x.data[i],r,2*i,0,1);\n r.s = 0;\n r.clamp();\n}\n\n// (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)\n// r != q, this != m. q or r may be null.\nfunction bnpDivRemTo(m,q,r) {\n var pm = m.abs();\n if(pm.t <= 0) return;\n var pt = this.abs();\n if(pt.t < pm.t) {\n if(q != null) q.fromInt(0);\n if(r != null) this.copyTo(r);\n return;\n }\n if(r == null) r = nbi();\n var y = nbi(), ts = this.s, ms = m.s;\n var nsh = this.DB-nbits(pm.data[pm.t-1]);\t// normalize modulus\n if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); } else { pm.copyTo(y); pt.copyTo(r); }\n var ys = y.t;\n var y0 = y.data[ys-1];\n if(y0 == 0) return;\n var yt = y0*(1<1)?y.data[ys-2]>>this.F2:0);\n var d1 = this.FV/yt, d2 = (1<= 0) {\n r.data[r.t++] = 1;\n r.subTo(t,r);\n }\n BigInteger.ONE.dlShiftTo(ys,t);\n t.subTo(y,y);\t// \"negative\" y so we can replace sub with am later\n while(y.t < ys) y.data[y.t++] = 0;\n while(--j >= 0) {\n // Estimate quotient digit\n var qd = (r.data[--i]==y0)?this.DM:Math.floor(r.data[i]*d1+(r.data[i-1]+e)*d2);\n if((r.data[i]+=y.am(0,qd,r,j,0,ys)) < qd) {\t// Try it out\n y.dlShiftTo(j,t);\n r.subTo(t,r);\n while(r.data[i] < --qd) r.subTo(t,r);\n }\n }\n if(q != null) {\n r.drShiftTo(ys,q);\n if(ts != ms) BigInteger.ZERO.subTo(q,q);\n }\n r.t = ys;\n r.clamp();\n if(nsh > 0) r.rShiftTo(nsh,r);\t// Denormalize remainder\n if(ts < 0) BigInteger.ZERO.subTo(r,r);\n}\n\n// (public) this mod a\nfunction bnMod(a) {\n var r = nbi();\n this.abs().divRemTo(a,null,r);\n if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r);\n return r;\n}\n\n// Modular reduction using \"classic\" algorithm\nfunction Classic(m) { this.m = m; }\nfunction cConvert(x) {\n if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m);\n else return x;\n}\nfunction cRevert(x) { return x; }\nfunction cReduce(x) { x.divRemTo(this.m,null,x); }\nfunction cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }\nfunction cSqrTo(x,r) { x.squareTo(r); this.reduce(r); }\n\nClassic.prototype.convert = cConvert;\nClassic.prototype.revert = cRevert;\nClassic.prototype.reduce = cReduce;\nClassic.prototype.mulTo = cMulTo;\nClassic.prototype.sqrTo = cSqrTo;\n\n// (protected) return \"-1/this % 2^DB\"; useful for Mont. reduction\n// justification:\n// xy == 1 (mod m)\n// xy = 1+km\n// xy(2-xy) = (1+km)(1-km)\n// x[y(2-xy)] = 1-k^2m^2\n// x[y(2-xy)] == 1 (mod m^2)\n// if y is 1/x mod m, then y(2-xy) is 1/x mod m^2\n// should reduce x and y(2-xy) by m^2 at each step to keep size bounded.\n// JS multiply \"overflows\" differently from C/C++, so care is needed here.\nfunction bnpInvDigit() {\n if(this.t < 1) return 0;\n var x = this.data[0];\n if((x&1) == 0) return 0;\n var y = x&3;\t\t// y == 1/x mod 2^2\n y = (y*(2-(x&0xf)*y))&0xf;\t// y == 1/x mod 2^4\n y = (y*(2-(x&0xff)*y))&0xff;\t// y == 1/x mod 2^8\n y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff;\t// y == 1/x mod 2^16\n // last step - calculate inverse mod DV directly;\n // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints\n y = (y*(2-x*y%this.DV))%this.DV;\t\t// y == 1/x mod 2^dbits\n // we really want the negative inverse, and -DV < y < DV\n return (y>0)?this.DV-y:-y;\n}\n\n// Montgomery reduction\nfunction Montgomery(m) {\n this.m = m;\n this.mp = m.invDigit();\n this.mpl = this.mp&0x7fff;\n this.mph = this.mp>>15;\n this.um = (1<<(m.DB-15))-1;\n this.mt2 = 2*m.t;\n}\n\n// xR mod m\nfunction montConvert(x) {\n var r = nbi();\n x.abs().dlShiftTo(this.m.t,r);\n r.divRemTo(this.m,null,r);\n if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r);\n return r;\n}\n\n// x/R mod m\nfunction montRevert(x) {\n var r = nbi();\n x.copyTo(r);\n this.reduce(r);\n return r;\n}\n\n// x = x/R mod m (HAC 14.32)\nfunction montReduce(x) {\n while(x.t <= this.mt2)\t// pad x so am has enough room later\n x.data[x.t++] = 0;\n for(var i = 0; i < this.m.t; ++i) {\n // faster way of calculating u0 = x.data[i]*mp mod DV\n var j = x.data[i]&0x7fff;\n var u0 = (j*this.mpl+(((j*this.mph+(x.data[i]>>15)*this.mpl)&this.um)<<15))&x.DM;\n // use am to combine the multiply-shift-add into one call\n j = i+this.m.t;\n x.data[j] += this.m.am(0,u0,x,i,0,this.m.t);\n // propagate carry\n while(x.data[j] >= x.DV) { x.data[j] -= x.DV; x.data[++j]++; }\n }\n x.clamp();\n x.drShiftTo(this.m.t,x);\n if(x.compareTo(this.m) >= 0) x.subTo(this.m,x);\n}\n\n// r = \"x^2/R mod m\"; x != r\nfunction montSqrTo(x,r) { x.squareTo(r); this.reduce(r); }\n\n// r = \"xy/R mod m\"; x,y != r\nfunction montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }\n\nMontgomery.prototype.convert = montConvert;\nMontgomery.prototype.revert = montRevert;\nMontgomery.prototype.reduce = montReduce;\nMontgomery.prototype.mulTo = montMulTo;\nMontgomery.prototype.sqrTo = montSqrTo;\n\n// (protected) true iff this is even\nfunction bnpIsEven() { return ((this.t>0)?(this.data[0]&1):this.s) == 0; }\n\n// (protected) this^e, e < 2^32, doing sqr and mul with \"r\" (HAC 14.79)\nfunction bnpExp(e,z) {\n if(e > 0xffffffff || e < 1) return BigInteger.ONE;\n var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1;\n g.copyTo(r);\n while(--i >= 0) {\n z.sqrTo(r,r2);\n if((e&(1< 0) z.mulTo(r2,g,r);\n else { var t = r; r = r2; r2 = t; }\n }\n return z.revert(r);\n}\n\n// (public) this^e % m, 0 <= e < 2^32\nfunction bnModPowInt(e,m) {\n var z;\n if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m);\n return this.exp(e,z);\n}\n\n// protected\nBigInteger.prototype.copyTo = bnpCopyTo;\nBigInteger.prototype.fromInt = bnpFromInt;\nBigInteger.prototype.fromString = bnpFromString;\nBigInteger.prototype.clamp = bnpClamp;\nBigInteger.prototype.dlShiftTo = bnpDLShiftTo;\nBigInteger.prototype.drShiftTo = bnpDRShiftTo;\nBigInteger.prototype.lShiftTo = bnpLShiftTo;\nBigInteger.prototype.rShiftTo = bnpRShiftTo;\nBigInteger.prototype.subTo = bnpSubTo;\nBigInteger.prototype.multiplyTo = bnpMultiplyTo;\nBigInteger.prototype.squareTo = bnpSquareTo;\nBigInteger.prototype.divRemTo = bnpDivRemTo;\nBigInteger.prototype.invDigit = bnpInvDigit;\nBigInteger.prototype.isEven = bnpIsEven;\nBigInteger.prototype.exp = bnpExp;\n\n// public\nBigInteger.prototype.toString = bnToString;\nBigInteger.prototype.negate = bnNegate;\nBigInteger.prototype.abs = bnAbs;\nBigInteger.prototype.compareTo = bnCompareTo;\nBigInteger.prototype.bitLength = bnBitLength;\nBigInteger.prototype.mod = bnMod;\nBigInteger.prototype.modPowInt = bnModPowInt;\n\n// \"constants\"\nBigInteger.ZERO = nbv(0);\nBigInteger.ONE = nbv(1);\n\n// jsbn2 lib\n\n//Copyright (c) 2005-2009 Tom Wu\n//All Rights Reserved.\n//See \"LICENSE\" for details (See jsbn.js for LICENSE).\n\n//Extended JavaScript BN functions, required for RSA private ops.\n\n//Version 1.1: new BigInteger(\"0\", 10) returns \"proper\" zero\n\n//(public)\nfunction bnClone() { var r = nbi(); this.copyTo(r); return r; }\n\n//(public) return value as integer\nfunction bnIntValue() {\nif(this.s < 0) {\n if(this.t == 1) return this.data[0]-this.DV;\n else if(this.t == 0) return -1;\n} else if(this.t == 1) return this.data[0];\nelse if(this.t == 0) return 0;\n// assumes 16 < DB < 32\nreturn ((this.data[1]&((1<<(32-this.DB))-1))<>24; }\n\n//(public) return value as short (assumes DB>=16)\nfunction bnShortValue() { return (this.t==0)?this.s:(this.data[0]<<16)>>16; }\n\n//(protected) return x s.t. r^x < DV\nfunction bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); }\n\n//(public) 0 if this == 0, 1 if this > 0\nfunction bnSigNum() {\nif(this.s < 0) return -1;\nelse if(this.t <= 0 || (this.t == 1 && this.data[0] <= 0)) return 0;\nelse return 1;\n}\n\n//(protected) convert to radix string\nfunction bnpToRadix(b) {\nif(b == null) b = 10;\nif(this.signum() == 0 || b < 2 || b > 36) return \"0\";\nvar cs = this.chunkSize(b);\nvar a = Math.pow(b,cs);\nvar d = nbv(a), y = nbi(), z = nbi(), r = \"\";\nthis.divRemTo(d,y,z);\nwhile(y.signum() > 0) {\n r = (a+z.intValue()).toString(b).substr(1) + r;\n y.divRemTo(d,y,z);\n}\nreturn z.intValue().toString(b) + r;\n}\n\n//(protected) convert from radix string\nfunction bnpFromRadix(s,b) {\nthis.fromInt(0);\nif(b == null) b = 10;\nvar cs = this.chunkSize(b);\nvar d = Math.pow(b,cs), mi = false, j = 0, w = 0;\nfor(var i = 0; i < s.length; ++i) {\n var x = intAt(s,i);\n if(x < 0) {\n if(s.charAt(i) == \"-\" && this.signum() == 0) mi = true;\n continue;\n }\n w = b*w+x;\n if(++j >= cs) {\n this.dMultiply(d);\n this.dAddOffset(w,0);\n j = 0;\n w = 0;\n }\n}\nif(j > 0) {\n this.dMultiply(Math.pow(b,j));\n this.dAddOffset(w,0);\n}\nif(mi) BigInteger.ZERO.subTo(this,this);\n}\n\n//(protected) alternate constructor\nfunction bnpFromNumber(a,b,c) {\nif(\"number\" == typeof b) {\n // new BigInteger(int,int,RNG)\n if(a < 2) this.fromInt(1);\n else {\n this.fromNumber(a,c);\n if(!this.testBit(a-1)) // force MSB set\n this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this);\n if(this.isEven()) this.dAddOffset(1,0); // force odd\n while(!this.isProbablePrime(b)) {\n this.dAddOffset(2,0);\n if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this);\n }\n }\n} else {\n // new BigInteger(int,RNG)\n var x = new Array(), t = a&7;\n x.length = (a>>3)+1;\n b.nextBytes(x);\n if(t > 0) x[0] &= ((1< 0) {\n if(p < this.DB && (d = this.data[i]>>p) != (this.s&this.DM)>>p)\n r[k++] = d|(this.s<<(this.DB-p));\n while(i >= 0) {\n if(p < 8) {\n d = (this.data[i]&((1<>(p+=this.DB-8);\n } else {\n d = (this.data[i]>>(p-=8))&0xff;\n if(p <= 0) { p += this.DB; --i; }\n }\n if((d&0x80) != 0) d |= -256;\n if(k == 0 && (this.s&0x80) != (d&0x80)) ++k;\n if(k > 0 || d != this.s) r[k++] = d;\n }\n}\nreturn r;\n}\n\nfunction bnEquals(a) { return(this.compareTo(a)==0); }\nfunction bnMin(a) { return(this.compareTo(a)<0)?this:a; }\nfunction bnMax(a) { return(this.compareTo(a)>0)?this:a; }\n\n//(protected) r = this op a (bitwise)\nfunction bnpBitwiseTo(a,op,r) {\nvar i, f, m = Math.min(a.t,this.t);\nfor(i = 0; i < m; ++i) r.data[i] = op(this.data[i],a.data[i]);\nif(a.t < this.t) {\n f = a.s&this.DM;\n for(i = m; i < this.t; ++i) r.data[i] = op(this.data[i],f);\n r.t = this.t;\n} else {\n f = this.s&this.DM;\n for(i = m; i < a.t; ++i) r.data[i] = op(f,a.data[i]);\n r.t = a.t;\n}\nr.s = op(this.s,a.s);\nr.clamp();\n}\n\n//(public) this & a\nfunction op_and(x,y) { return x&y; }\nfunction bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; }\n\n//(public) this | a\nfunction op_or(x,y) { return x|y; }\nfunction bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; }\n\n//(public) this ^ a\nfunction op_xor(x,y) { return x^y; }\nfunction bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; }\n\n//(public) this & ~a\nfunction op_andnot(x,y) { return x&~y; }\nfunction bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; }\n\n//(public) ~this\nfunction bnNot() {\nvar r = nbi();\nfor(var i = 0; i < this.t; ++i) r.data[i] = this.DM&~this.data[i];\nr.t = this.t;\nr.s = ~this.s;\nreturn r;\n}\n\n//(public) this << n\nfunction bnShiftLeft(n) {\nvar r = nbi();\nif(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r);\nreturn r;\n}\n\n//(public) this >> n\nfunction bnShiftRight(n) {\nvar r = nbi();\nif(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r);\nreturn r;\n}\n\n//return index of lowest 1-bit in x, x < 2^31\nfunction lbit(x) {\nif(x == 0) return -1;\nvar r = 0;\nif((x&0xffff) == 0) { x >>= 16; r += 16; }\nif((x&0xff) == 0) { x >>= 8; r += 8; }\nif((x&0xf) == 0) { x >>= 4; r += 4; }\nif((x&3) == 0) { x >>= 2; r += 2; }\nif((x&1) == 0) ++r;\nreturn r;\n}\n\n//(public) returns index of lowest 1-bit (or -1 if none)\nfunction bnGetLowestSetBit() {\nfor(var i = 0; i < this.t; ++i)\n if(this.data[i] != 0) return i*this.DB+lbit(this.data[i]);\nif(this.s < 0) return this.t*this.DB;\nreturn -1;\n}\n\n//return number of 1 bits in x\nfunction cbit(x) {\nvar r = 0;\nwhile(x != 0) { x &= x-1; ++r; }\nreturn r;\n}\n\n//(public) return number of set bits\nfunction bnBitCount() {\nvar r = 0, x = this.s&this.DM;\nfor(var i = 0; i < this.t; ++i) r += cbit(this.data[i]^x);\nreturn r;\n}\n\n//(public) true iff nth bit is set\nfunction bnTestBit(n) {\nvar j = Math.floor(n/this.DB);\nif(j >= this.t) return(this.s!=0);\nreturn((this.data[j]&(1<<(n%this.DB)))!=0);\n}\n\n//(protected) this op (1<>= this.DB;\n}\nif(a.t < this.t) {\n c += a.s;\n while(i < this.t) {\n c += this.data[i];\n r.data[i++] = c&this.DM;\n c >>= this.DB;\n }\n c += this.s;\n} else {\n c += this.s;\n while(i < a.t) {\n c += a.data[i];\n r.data[i++] = c&this.DM;\n c >>= this.DB;\n }\n c += a.s;\n}\nr.s = (c<0)?-1:0;\nif(c > 0) r.data[i++] = c;\nelse if(c < -1) r.data[i++] = this.DV+c;\nr.t = i;\nr.clamp();\n}\n\n//(public) this + a\nfunction bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; }\n\n//(public) this - a\nfunction bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; }\n\n//(public) this * a\nfunction bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; }\n\n//(public) this / a\nfunction bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; }\n\n//(public) this % a\nfunction bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; }\n\n//(public) [this/a,this%a]\nfunction bnDivideAndRemainder(a) {\nvar q = nbi(), r = nbi();\nthis.divRemTo(a,q,r);\nreturn new Array(q,r);\n}\n\n//(protected) this *= n, this >= 0, 1 < n < DV\nfunction bnpDMultiply(n) {\nthis.data[this.t] = this.am(0,n-1,this,0,0,this.t);\n++this.t;\nthis.clamp();\n}\n\n//(protected) this += n << w words, this >= 0\nfunction bnpDAddOffset(n,w) {\nif(n == 0) return;\nwhile(this.t <= w) this.data[this.t++] = 0;\nthis.data[w] += n;\nwhile(this.data[w] >= this.DV) {\n this.data[w] -= this.DV;\n if(++w >= this.t) this.data[this.t++] = 0;\n ++this.data[w];\n}\n}\n\n//A \"null\" reducer\nfunction NullExp() {}\nfunction nNop(x) { return x; }\nfunction nMulTo(x,y,r) { x.multiplyTo(y,r); }\nfunction nSqrTo(x,r) { x.squareTo(r); }\n\nNullExp.prototype.convert = nNop;\nNullExp.prototype.revert = nNop;\nNullExp.prototype.mulTo = nMulTo;\nNullExp.prototype.sqrTo = nSqrTo;\n\n//(public) this^e\nfunction bnPow(e) { return this.exp(e,new NullExp()); }\n\n//(protected) r = lower n words of \"this * a\", a.t <= n\n//\"this\" should be the larger one if appropriate.\nfunction bnpMultiplyLowerTo(a,n,r) {\nvar i = Math.min(this.t+a.t,n);\nr.s = 0; // assumes a,this >= 0\nr.t = i;\nwhile(i > 0) r.data[--i] = 0;\nvar j;\nfor(j = r.t-this.t; i < j; ++i) r.data[i+this.t] = this.am(0,a.data[i],r,i,0,this.t);\nfor(j = Math.min(a.t,n); i < j; ++i) this.am(0,a.data[i],r,i,0,n-i);\nr.clamp();\n}\n\n//(protected) r = \"this * a\" without lower n words, n > 0\n//\"this\" should be the larger one if appropriate.\nfunction bnpMultiplyUpperTo(a,n,r) {\n--n;\nvar i = r.t = this.t+a.t-n;\nr.s = 0; // assumes a,this >= 0\nwhile(--i >= 0) r.data[i] = 0;\nfor(i = Math.max(n-this.t,0); i < a.t; ++i)\n r.data[this.t+i-n] = this.am(n-i,a.data[i],r,0,0,this.t+i-n);\nr.clamp();\nr.drShiftTo(1,r);\n}\n\n//Barrett modular reduction\nfunction Barrett(m) {\n// setup Barrett\nthis.r2 = nbi();\nthis.q3 = nbi();\nBigInteger.ONE.dlShiftTo(2*m.t,this.r2);\nthis.mu = this.r2.divide(m);\nthis.m = m;\n}\n\nfunction barrettConvert(x) {\nif(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m);\nelse if(x.compareTo(this.m) < 0) return x;\nelse { var r = nbi(); x.copyTo(r); this.reduce(r); return r; }\n}\n\nfunction barrettRevert(x) { return x; }\n\n//x = x mod m (HAC 14.42)\nfunction barrettReduce(x) {\nx.drShiftTo(this.m.t-1,this.r2);\nif(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); }\nthis.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3);\nthis.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2);\nwhile(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1);\nx.subTo(this.r2,x);\nwhile(x.compareTo(this.m) >= 0) x.subTo(this.m,x);\n}\n\n//r = x^2 mod m; x != r\nfunction barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); }\n\n//r = x*y mod m; x,y != r\nfunction barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }\n\nBarrett.prototype.convert = barrettConvert;\nBarrett.prototype.revert = barrettRevert;\nBarrett.prototype.reduce = barrettReduce;\nBarrett.prototype.mulTo = barrettMulTo;\nBarrett.prototype.sqrTo = barrettSqrTo;\n\n//(public) this^e % m (HAC 14.85)\nfunction bnModPow(e,m) {\nvar i = e.bitLength(), k, r = nbv(1), z;\nif(i <= 0) return r;\nelse if(i < 18) k = 1;\nelse if(i < 48) k = 3;\nelse if(i < 144) k = 4;\nelse if(i < 768) k = 5;\nelse k = 6;\nif(i < 8)\n z = new Classic(m);\nelse if(m.isEven())\n z = new Barrett(m);\nelse\n z = new Montgomery(m);\n\n// precomputation\nvar g = new Array(), n = 3, k1 = k-1, km = (1< 1) {\n var g2 = nbi();\n z.sqrTo(g[1],g2);\n while(n <= km) {\n g[n] = nbi();\n z.mulTo(g2,g[n-2],g[n]);\n n += 2;\n }\n}\n\nvar j = e.t-1, w, is1 = true, r2 = nbi(), t;\ni = nbits(e.data[j])-1;\nwhile(j >= 0) {\n if(i >= k1) w = (e.data[j]>>(i-k1))&km;\n else {\n w = (e.data[j]&((1<<(i+1))-1))<<(k1-i);\n if(j > 0) w |= e.data[j-1]>>(this.DB+i-k1);\n }\n\n n = k;\n while((w&1) == 0) { w >>= 1; --n; }\n if((i -= n) < 0) { i += this.DB; --j; }\n if(is1) { // ret == 1, don't bother squaring or multiplying it\n g[w].copyTo(r);\n is1 = false;\n } else {\n while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; }\n if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; }\n z.mulTo(r2,g[w],r);\n }\n\n while(j >= 0 && (e.data[j]&(1< 0) {\n x.rShiftTo(g,x);\n y.rShiftTo(g,y);\n}\nwhile(x.signum() > 0) {\n if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x);\n if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y);\n if(x.compareTo(y) >= 0) {\n x.subTo(y,x);\n x.rShiftTo(1,x);\n } else {\n y.subTo(x,y);\n y.rShiftTo(1,y);\n }\n}\nif(g > 0) y.lShiftTo(g,y);\nreturn y;\n}\n\n//(protected) this % n, n < 2^26\nfunction bnpModInt(n) {\nif(n <= 0) return 0;\nvar d = this.DV%n, r = (this.s<0)?n-1:0;\nif(this.t > 0)\n if(d == 0) r = this.data[0]%n;\n else for(var i = this.t-1; i >= 0; --i) r = (d*r+this.data[i])%n;\nreturn r;\n}\n\n//(public) 1/this % m (HAC 14.61)\nfunction bnModInverse(m) {\nvar ac = m.isEven();\nif((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO;\nvar u = m.clone(), v = this.clone();\nvar a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1);\nwhile(u.signum() != 0) {\n while(u.isEven()) {\n u.rShiftTo(1,u);\n if(ac) {\n if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); }\n a.rShiftTo(1,a);\n } else if(!b.isEven()) b.subTo(m,b);\n b.rShiftTo(1,b);\n }\n while(v.isEven()) {\n v.rShiftTo(1,v);\n if(ac) {\n if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); }\n c.rShiftTo(1,c);\n } else if(!d.isEven()) d.subTo(m,d);\n d.rShiftTo(1,d);\n }\n if(u.compareTo(v) >= 0) {\n u.subTo(v,u);\n if(ac) a.subTo(c,a);\n b.subTo(d,b);\n } else {\n v.subTo(u,v);\n if(ac) c.subTo(a,c);\n d.subTo(b,d);\n }\n}\nif(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO;\nif(d.compareTo(m) >= 0) return d.subtract(m);\nif(d.signum() < 0) d.addTo(m,d); else return d;\nif(d.signum() < 0) return d.add(m); else return d;\n}\n\nvar lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509];\nvar lplim = (1<<26)/lowprimes[lowprimes.length-1];\n\n//(public) test primality with certainty >= 1-.5^t\nfunction bnIsProbablePrime(t) {\nvar i, x = this.abs();\nif(x.t == 1 && x.data[0] <= lowprimes[lowprimes.length-1]) {\n for(i = 0; i < lowprimes.length; ++i)\n if(x.data[0] == lowprimes[i]) return true;\n return false;\n}\nif(x.isEven()) return false;\ni = 1;\nwhile(i < lowprimes.length) {\n var m = lowprimes[i], j = i+1;\n while(j < lowprimes.length && m < lplim) m *= lowprimes[j++];\n m = x.modInt(m);\n while(i < j) if(m%lowprimes[i++] == 0) return false;\n}\nreturn x.millerRabin(t);\n}\n\n//(protected) true if probably prime (HAC 4.24, Miller-Rabin)\nfunction bnpMillerRabin(t) {\nvar n1 = this.subtract(BigInteger.ONE);\nvar k = n1.getLowestSetBit();\nif(k <= 0) return false;\nvar r = n1.shiftRight(k);\nvar prng = bnGetPrng();\nvar a;\nfor(var i = 0; i < t; ++i) {\n // select witness 'a' at random from between 1 and n1\n do {\n a = new BigInteger(this.bitLength(), prng);\n }\n while(a.compareTo(BigInteger.ONE) <= 0 || a.compareTo(n1) >= 0);\n var y = a.modPow(r,this);\n if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) {\n var j = 1;\n while(j++ < k && y.compareTo(n1) != 0) {\n y = y.modPowInt(2,this);\n if(y.compareTo(BigInteger.ONE) == 0) return false;\n }\n if(y.compareTo(n1) != 0) return false;\n }\n}\nreturn true;\n}\n\n// get pseudo random number generator\nfunction bnGetPrng() {\n // create prng with api that matches BigInteger secure random\n return {\n // x is an array to fill with bytes\n nextBytes: function(x) {\n for(var i = 0; i < x.length; ++i) {\n x[i] = Math.floor(Math.random() * 0x0100);\n }\n }\n };\n}\n\n//protected\nBigInteger.prototype.chunkSize = bnpChunkSize;\nBigInteger.prototype.toRadix = bnpToRadix;\nBigInteger.prototype.fromRadix = bnpFromRadix;\nBigInteger.prototype.fromNumber = bnpFromNumber;\nBigInteger.prototype.bitwiseTo = bnpBitwiseTo;\nBigInteger.prototype.changeBit = bnpChangeBit;\nBigInteger.prototype.addTo = bnpAddTo;\nBigInteger.prototype.dMultiply = bnpDMultiply;\nBigInteger.prototype.dAddOffset = bnpDAddOffset;\nBigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo;\nBigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo;\nBigInteger.prototype.modInt = bnpModInt;\nBigInteger.prototype.millerRabin = bnpMillerRabin;\n\n//public\nBigInteger.prototype.clone = bnClone;\nBigInteger.prototype.intValue = bnIntValue;\nBigInteger.prototype.byteValue = bnByteValue;\nBigInteger.prototype.shortValue = bnShortValue;\nBigInteger.prototype.signum = bnSigNum;\nBigInteger.prototype.toByteArray = bnToByteArray;\nBigInteger.prototype.equals = bnEquals;\nBigInteger.prototype.min = bnMin;\nBigInteger.prototype.max = bnMax;\nBigInteger.prototype.and = bnAnd;\nBigInteger.prototype.or = bnOr;\nBigInteger.prototype.xor = bnXor;\nBigInteger.prototype.andNot = bnAndNot;\nBigInteger.prototype.not = bnNot;\nBigInteger.prototype.shiftLeft = bnShiftLeft;\nBigInteger.prototype.shiftRight = bnShiftRight;\nBigInteger.prototype.getLowestSetBit = bnGetLowestSetBit;\nBigInteger.prototype.bitCount = bnBitCount;\nBigInteger.prototype.testBit = bnTestBit;\nBigInteger.prototype.setBit = bnSetBit;\nBigInteger.prototype.clearBit = bnClearBit;\nBigInteger.prototype.flipBit = bnFlipBit;\nBigInteger.prototype.add = bnAdd;\nBigInteger.prototype.subtract = bnSubtract;\nBigInteger.prototype.multiply = bnMultiply;\nBigInteger.prototype.divide = bnDivide;\nBigInteger.prototype.remainder = bnRemainder;\nBigInteger.prototype.divideAndRemainder = bnDivideAndRemainder;\nBigInteger.prototype.modPow = bnModPow;\nBigInteger.prototype.modInverse = bnModInverse;\nBigInteger.prototype.pow = bnPow;\nBigInteger.prototype.gcd = bnGCD;\nBigInteger.prototype.isProbablePrime = bnIsProbablePrime;\n\n//BigInteger interfaces not implemented in jsbn:\n\n//BigInteger(int signum, byte[] magnitude)\n//double doubleValue()\n//float floatValue()\n//int hashCode()\n//long longValue()\n//static BigInteger valueOf(long val)\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/jsbn.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/md.js":
/*!*******************************************!*\
!*** ./node_modules/node-forge/lib/md.js ***!
\*******************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Node.js module for Forge message digests.\n *\n * @author Dave Longley\n *\n * Copyright 2011-2017 Digital Bazaar, Inc.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n\nmodule.exports = forge.md = forge.md || {};\nforge.md.algorithms = forge.md.algorithms || {};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/md.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/mgf.js":
/*!********************************************!*\
!*** ./node_modules/node-forge/lib/mgf.js ***!
\********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Node.js module for Forge mask generation functions.\n *\n * @author Stefan Siegl\n *\n * Copyright 2012 Stefan Siegl \n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./mgf1 */ \"./node_modules/node-forge/lib/mgf1.js\");\n\nmodule.exports = forge.mgf = forge.mgf || {};\nforge.mgf.mgf1 = forge.mgf1;\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/mgf.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/mgf1.js":
/*!*********************************************!*\
!*** ./node_modules/node-forge/lib/mgf1.js ***!
\*********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Javascript implementation of mask generation function MGF1.\n *\n * @author Stefan Siegl\n * @author Dave Longley\n *\n * Copyright (c) 2012 Stefan Siegl \n * Copyright (c) 2014 Digital Bazaar, Inc.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\nforge.mgf = forge.mgf || {};\nvar mgf1 = module.exports = forge.mgf.mgf1 = forge.mgf1 = forge.mgf1 || {};\n\n/**\n * Creates a MGF1 mask generation function object.\n *\n * @param md the message digest API to use (eg: forge.md.sha1.create()).\n *\n * @return a mask generation function object.\n */\nmgf1.create = function(md) {\n var mgf = {\n /**\n * Generate mask of specified length.\n *\n * @param {String} seed The seed for mask generation.\n * @param maskLen Number of bytes to generate.\n * @return {String} The generated mask.\n */\n generate: function(seed, maskLen) {\n /* 2. Let T be the empty octet string. */\n var t = new forge.util.ByteBuffer();\n\n /* 3. For counter from 0 to ceil(maskLen / hLen), do the following: */\n var len = Math.ceil(maskLen / md.digestLength);\n for(var i = 0; i < len; i++) {\n /* a. Convert counter to an octet string C of length 4 octets */\n var c = new forge.util.ByteBuffer();\n c.putInt32(i);\n\n /* b. Concatenate the hash of the seed mgfSeed and C to the octet\n * string T: */\n md.start();\n md.update(seed + c.getBytes());\n t.putBuffer(md.digest());\n }\n\n /* Output the leading maskLen octets of T as the octet string mask. */\n t.truncate(t.length() - maskLen);\n return t.getBytes();\n }\n };\n\n return mgf;\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/mgf1.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/oids.js":
/*!*********************************************!*\
!*** ./node_modules/node-forge/lib/oids.js ***!
\*********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Object IDs for ASN.1.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2013 Digital Bazaar, Inc.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n\nforge.pki = forge.pki || {};\nvar oids = module.exports = forge.pki.oids = forge.oids = forge.oids || {};\n\n// set id to name mapping and name to id mapping\nfunction _IN(id, name) {\n oids[id] = name;\n oids[name] = id;\n}\n// set id to name mapping only\nfunction _I_(id, name) {\n oids[id] = name;\n}\n\n// algorithm OIDs\n_IN('1.2.840.113549.1.1.1', 'rsaEncryption');\n// Note: md2 & md4 not implemented\n//_IN('1.2.840.113549.1.1.2', 'md2WithRSAEncryption');\n//_IN('1.2.840.113549.1.1.3', 'md4WithRSAEncryption');\n_IN('1.2.840.113549.1.1.4', 'md5WithRSAEncryption');\n_IN('1.2.840.113549.1.1.5', 'sha1WithRSAEncryption');\n_IN('1.2.840.113549.1.1.7', 'RSAES-OAEP');\n_IN('1.2.840.113549.1.1.8', 'mgf1');\n_IN('1.2.840.113549.1.1.9', 'pSpecified');\n_IN('1.2.840.113549.1.1.10', 'RSASSA-PSS');\n_IN('1.2.840.113549.1.1.11', 'sha256WithRSAEncryption');\n_IN('1.2.840.113549.1.1.12', 'sha384WithRSAEncryption');\n_IN('1.2.840.113549.1.1.13', 'sha512WithRSAEncryption');\n// Edwards-curve Digital Signature Algorithm (EdDSA) Ed25519\n_IN('1.3.101.112', 'EdDSA25519');\n\n_IN('1.2.840.10040.4.3', 'dsa-with-sha1');\n\n_IN('1.3.14.3.2.7', 'desCBC');\n\n_IN('1.3.14.3.2.26', 'sha1');\n// Deprecated equivalent of sha1WithRSAEncryption\n_IN('1.3.14.3.2.29', 'sha1WithRSASignature');\n_IN('2.16.840.1.101.3.4.2.1', 'sha256');\n_IN('2.16.840.1.101.3.4.2.2', 'sha384');\n_IN('2.16.840.1.101.3.4.2.3', 'sha512');\n_IN('2.16.840.1.101.3.4.2.4', 'sha224');\n_IN('2.16.840.1.101.3.4.2.5', 'sha512-224');\n_IN('2.16.840.1.101.3.4.2.6', 'sha512-256');\n_IN('1.2.840.113549.2.2', 'md2');\n_IN('1.2.840.113549.2.5', 'md5');\n\n// pkcs#7 content types\n_IN('1.2.840.113549.1.7.1', 'data');\n_IN('1.2.840.113549.1.7.2', 'signedData');\n_IN('1.2.840.113549.1.7.3', 'envelopedData');\n_IN('1.2.840.113549.1.7.4', 'signedAndEnvelopedData');\n_IN('1.2.840.113549.1.7.5', 'digestedData');\n_IN('1.2.840.113549.1.7.6', 'encryptedData');\n\n// pkcs#9 oids\n_IN('1.2.840.113549.1.9.1', 'emailAddress');\n_IN('1.2.840.113549.1.9.2', 'unstructuredName');\n_IN('1.2.840.113549.1.9.3', 'contentType');\n_IN('1.2.840.113549.1.9.4', 'messageDigest');\n_IN('1.2.840.113549.1.9.5', 'signingTime');\n_IN('1.2.840.113549.1.9.6', 'counterSignature');\n_IN('1.2.840.113549.1.9.7', 'challengePassword');\n_IN('1.2.840.113549.1.9.8', 'unstructuredAddress');\n_IN('1.2.840.113549.1.9.14', 'extensionRequest');\n\n_IN('1.2.840.113549.1.9.20', 'friendlyName');\n_IN('1.2.840.113549.1.9.21', 'localKeyId');\n_IN('1.2.840.113549.1.9.22.1', 'x509Certificate');\n\n// pkcs#12 safe bags\n_IN('1.2.840.113549.1.12.10.1.1', 'keyBag');\n_IN('1.2.840.113549.1.12.10.1.2', 'pkcs8ShroudedKeyBag');\n_IN('1.2.840.113549.1.12.10.1.3', 'certBag');\n_IN('1.2.840.113549.1.12.10.1.4', 'crlBag');\n_IN('1.2.840.113549.1.12.10.1.5', 'secretBag');\n_IN('1.2.840.113549.1.12.10.1.6', 'safeContentsBag');\n\n// password-based-encryption for pkcs#12\n_IN('1.2.840.113549.1.5.13', 'pkcs5PBES2');\n_IN('1.2.840.113549.1.5.12', 'pkcs5PBKDF2');\n\n_IN('1.2.840.113549.1.12.1.1', 'pbeWithSHAAnd128BitRC4');\n_IN('1.2.840.113549.1.12.1.2', 'pbeWithSHAAnd40BitRC4');\n_IN('1.2.840.113549.1.12.1.3', 'pbeWithSHAAnd3-KeyTripleDES-CBC');\n_IN('1.2.840.113549.1.12.1.4', 'pbeWithSHAAnd2-KeyTripleDES-CBC');\n_IN('1.2.840.113549.1.12.1.5', 'pbeWithSHAAnd128BitRC2-CBC');\n_IN('1.2.840.113549.1.12.1.6', 'pbewithSHAAnd40BitRC2-CBC');\n\n// hmac OIDs\n_IN('1.2.840.113549.2.7', 'hmacWithSHA1');\n_IN('1.2.840.113549.2.8', 'hmacWithSHA224');\n_IN('1.2.840.113549.2.9', 'hmacWithSHA256');\n_IN('1.2.840.113549.2.10', 'hmacWithSHA384');\n_IN('1.2.840.113549.2.11', 'hmacWithSHA512');\n\n// symmetric key algorithm oids\n_IN('1.2.840.113549.3.7', 'des-EDE3-CBC');\n_IN('2.16.840.1.101.3.4.1.2', 'aes128-CBC');\n_IN('2.16.840.1.101.3.4.1.22', 'aes192-CBC');\n_IN('2.16.840.1.101.3.4.1.42', 'aes256-CBC');\n\n// certificate issuer/subject OIDs\n_IN('2.5.4.3', 'commonName');\n_IN('2.5.4.4', 'surname');\n_IN('2.5.4.5', 'serialNumber');\n_IN('2.5.4.6', 'countryName');\n_IN('2.5.4.7', 'localityName');\n_IN('2.5.4.8', 'stateOrProvinceName');\n_IN('2.5.4.9', 'streetAddress');\n_IN('2.5.4.10', 'organizationName');\n_IN('2.5.4.11', 'organizationalUnitName');\n_IN('2.5.4.12', 'title');\n_IN('2.5.4.13', 'description');\n_IN('2.5.4.15', 'businessCategory');\n_IN('2.5.4.17', 'postalCode');\n_IN('2.5.4.42', 'givenName');\n_IN('1.3.6.1.4.1.311.60.2.1.2', 'jurisdictionOfIncorporationStateOrProvinceName');\n_IN('1.3.6.1.4.1.311.60.2.1.3', 'jurisdictionOfIncorporationCountryName');\n\n// X.509 extension OIDs\n_IN('2.16.840.1.113730.1.1', 'nsCertType');\n_IN('2.16.840.1.113730.1.13', 'nsComment'); // deprecated in theory; still widely used\n_I_('2.5.29.1', 'authorityKeyIdentifier'); // deprecated, use .35\n_I_('2.5.29.2', 'keyAttributes'); // obsolete use .37 or .15\n_I_('2.5.29.3', 'certificatePolicies'); // deprecated, use .32\n_I_('2.5.29.4', 'keyUsageRestriction'); // obsolete use .37 or .15\n_I_('2.5.29.5', 'policyMapping'); // deprecated use .33\n_I_('2.5.29.6', 'subtreesConstraint'); // obsolete use .30\n_I_('2.5.29.7', 'subjectAltName'); // deprecated use .17\n_I_('2.5.29.8', 'issuerAltName'); // deprecated use .18\n_I_('2.5.29.9', 'subjectDirectoryAttributes');\n_I_('2.5.29.10', 'basicConstraints'); // deprecated use .19\n_I_('2.5.29.11', 'nameConstraints'); // deprecated use .30\n_I_('2.5.29.12', 'policyConstraints'); // deprecated use .36\n_I_('2.5.29.13', 'basicConstraints'); // deprecated use .19\n_IN('2.5.29.14', 'subjectKeyIdentifier');\n_IN('2.5.29.15', 'keyUsage');\n_I_('2.5.29.16', 'privateKeyUsagePeriod');\n_IN('2.5.29.17', 'subjectAltName');\n_IN('2.5.29.18', 'issuerAltName');\n_IN('2.5.29.19', 'basicConstraints');\n_I_('2.5.29.20', 'cRLNumber');\n_I_('2.5.29.21', 'cRLReason');\n_I_('2.5.29.22', 'expirationDate');\n_I_('2.5.29.23', 'instructionCode');\n_I_('2.5.29.24', 'invalidityDate');\n_I_('2.5.29.25', 'cRLDistributionPoints'); // deprecated use .31\n_I_('2.5.29.26', 'issuingDistributionPoint'); // deprecated use .28\n_I_('2.5.29.27', 'deltaCRLIndicator');\n_I_('2.5.29.28', 'issuingDistributionPoint');\n_I_('2.5.29.29', 'certificateIssuer');\n_I_('2.5.29.30', 'nameConstraints');\n_IN('2.5.29.31', 'cRLDistributionPoints');\n_IN('2.5.29.32', 'certificatePolicies');\n_I_('2.5.29.33', 'policyMappings');\n_I_('2.5.29.34', 'policyConstraints'); // deprecated use .36\n_IN('2.5.29.35', 'authorityKeyIdentifier');\n_I_('2.5.29.36', 'policyConstraints');\n_IN('2.5.29.37', 'extKeyUsage');\n_I_('2.5.29.46', 'freshestCRL');\n_I_('2.5.29.54', 'inhibitAnyPolicy');\n\n// extKeyUsage purposes\n_IN('1.3.6.1.4.1.11129.2.4.2', 'timestampList');\n_IN('1.3.6.1.5.5.7.1.1', 'authorityInfoAccess');\n_IN('1.3.6.1.5.5.7.3.1', 'serverAuth');\n_IN('1.3.6.1.5.5.7.3.2', 'clientAuth');\n_IN('1.3.6.1.5.5.7.3.3', 'codeSigning');\n_IN('1.3.6.1.5.5.7.3.4', 'emailProtection');\n_IN('1.3.6.1.5.5.7.3.8', 'timeStamping');\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/oids.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/pbe.js":
/*!********************************************!*\
!*** ./node_modules/node-forge/lib/pbe.js ***!
\********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Password-based encryption functions.\n *\n * @author Dave Longley\n * @author Stefan Siegl \n *\n * Copyright (c) 2010-2013 Digital Bazaar, Inc.\n * Copyright (c) 2012 Stefan Siegl \n *\n * An EncryptedPrivateKeyInfo:\n *\n * EncryptedPrivateKeyInfo ::= SEQUENCE {\n * encryptionAlgorithm EncryptionAlgorithmIdentifier,\n * encryptedData EncryptedData }\n *\n * EncryptionAlgorithmIdentifier ::= AlgorithmIdentifier\n *\n * EncryptedData ::= OCTET STRING\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./aes */ \"./node_modules/node-forge/lib/aes.js\");\n__webpack_require__(/*! ./asn1 */ \"./node_modules/node-forge/lib/asn1.js\");\n__webpack_require__(/*! ./des */ \"./node_modules/node-forge/lib/des.js\");\n__webpack_require__(/*! ./md */ \"./node_modules/node-forge/lib/md.js\");\n__webpack_require__(/*! ./oids */ \"./node_modules/node-forge/lib/oids.js\");\n__webpack_require__(/*! ./pbkdf2 */ \"./node_modules/node-forge/lib/pbkdf2.js\");\n__webpack_require__(/*! ./pem */ \"./node_modules/node-forge/lib/pem.js\");\n__webpack_require__(/*! ./random */ \"./node_modules/node-forge/lib/random.js\");\n__webpack_require__(/*! ./rc2 */ \"./node_modules/node-forge/lib/rc2.js\");\n__webpack_require__(/*! ./rsa */ \"./node_modules/node-forge/lib/rsa.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\nif(typeof BigInteger === 'undefined') {\n var BigInteger = forge.jsbn.BigInteger;\n}\n\n// shortcut for asn.1 API\nvar asn1 = forge.asn1;\n\n/* Password-based encryption implementation. */\nvar pki = forge.pki = forge.pki || {};\nmodule.exports = pki.pbe = forge.pbe = forge.pbe || {};\nvar oids = pki.oids;\n\n// validator for an EncryptedPrivateKeyInfo structure\n// Note: Currently only works w/algorithm params\nvar encryptedPrivateKeyValidator = {\n name: 'EncryptedPrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EncryptedPrivateKeyInfo.encryptionAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'encryptionOid'\n }, {\n name: 'AlgorithmIdentifier.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'encryptionParams'\n }]\n }, {\n // encryptedData\n name: 'EncryptedPrivateKeyInfo.encryptedData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'encryptedData'\n }]\n};\n\n// validator for a PBES2Algorithms structure\n// Note: Currently only works w/PBKDF2 + AES encryption schemes\nvar PBES2AlgorithmsValidator = {\n name: 'PBES2Algorithms',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PBES2Algorithms.keyDerivationFunc',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PBES2Algorithms.keyDerivationFunc.oid',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'kdfOid'\n }, {\n name: 'PBES2Algorithms.params',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PBES2Algorithms.params.salt',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'kdfSalt'\n }, {\n name: 'PBES2Algorithms.params.iterationCount',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'kdfIterationCount'\n }, {\n name: 'PBES2Algorithms.params.keyLength',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n optional: true,\n capture: 'keyLength'\n }, {\n // prf\n name: 'PBES2Algorithms.params.prf',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n optional: true,\n value: [{\n name: 'PBES2Algorithms.params.prf.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'prfOid'\n }]\n }]\n }]\n }, {\n name: 'PBES2Algorithms.encryptionScheme',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'PBES2Algorithms.encryptionScheme.oid',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'encOid'\n }, {\n name: 'PBES2Algorithms.encryptionScheme.iv',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'encIv'\n }]\n }]\n};\n\nvar pkcs12PbeParamsValidator = {\n name: 'pkcs-12PbeParams',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'pkcs-12PbeParams.salt',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'salt'\n }, {\n name: 'pkcs-12PbeParams.iterations',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'iterations'\n }]\n};\n\n/**\n * Encrypts a ASN.1 PrivateKeyInfo object, producing an EncryptedPrivateKeyInfo.\n *\n * PBES2Algorithms ALGORITHM-IDENTIFIER ::=\n * { {PBES2-params IDENTIFIED BY id-PBES2}, ...}\n *\n * id-PBES2 OBJECT IDENTIFIER ::= {pkcs-5 13}\n *\n * PBES2-params ::= SEQUENCE {\n * keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}},\n * encryptionScheme AlgorithmIdentifier {{PBES2-Encs}}\n * }\n *\n * PBES2-KDFs ALGORITHM-IDENTIFIER ::=\n * { {PBKDF2-params IDENTIFIED BY id-PBKDF2}, ... }\n *\n * PBES2-Encs ALGORITHM-IDENTIFIER ::= { ... }\n *\n * PBKDF2-params ::= SEQUENCE {\n * salt CHOICE {\n * specified OCTET STRING,\n * otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}}\n * },\n * iterationCount INTEGER (1..MAX),\n * keyLength INTEGER (1..MAX) OPTIONAL,\n * prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1\n * }\n *\n * @param obj the ASN.1 PrivateKeyInfo object.\n * @param password the password to encrypt with.\n * @param options:\n * algorithm the encryption algorithm to use\n * ('aes128', 'aes192', 'aes256', '3des'), defaults to 'aes128'.\n * count the iteration count to use.\n * saltSize the salt size to use.\n * prfAlgorithm the PRF message digest algorithm to use\n * ('sha1', 'sha224', 'sha256', 'sha384', 'sha512')\n *\n * @return the ASN.1 EncryptedPrivateKeyInfo.\n */\npki.encryptPrivateKeyInfo = function(obj, password, options) {\n // set default options\n options = options || {};\n options.saltSize = options.saltSize || 8;\n options.count = options.count || 2048;\n options.algorithm = options.algorithm || 'aes128';\n options.prfAlgorithm = options.prfAlgorithm || 'sha1';\n\n // generate PBE params\n var salt = forge.random.getBytesSync(options.saltSize);\n var count = options.count;\n var countBytes = asn1.integerToDer(count);\n var dkLen;\n var encryptionAlgorithm;\n var encryptedData;\n if(options.algorithm.indexOf('aes') === 0 || options.algorithm === 'des') {\n // do PBES2\n var ivLen, encOid, cipherFn;\n switch(options.algorithm) {\n case 'aes128':\n dkLen = 16;\n ivLen = 16;\n encOid = oids['aes128-CBC'];\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'aes192':\n dkLen = 24;\n ivLen = 16;\n encOid = oids['aes192-CBC'];\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'aes256':\n dkLen = 32;\n ivLen = 16;\n encOid = oids['aes256-CBC'];\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'des':\n dkLen = 8;\n ivLen = 8;\n encOid = oids['desCBC'];\n cipherFn = forge.des.createEncryptionCipher;\n break;\n default:\n var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.');\n error.algorithm = options.algorithm;\n throw error;\n }\n\n // get PRF message digest\n var prfAlgorithm = 'hmacWith' + options.prfAlgorithm.toUpperCase();\n var md = prfAlgorithmToMessageDigest(prfAlgorithm);\n\n // encrypt private key using pbe SHA-1 and AES/DES\n var dk = forge.pkcs5.pbkdf2(password, salt, count, dkLen, md);\n var iv = forge.random.getBytesSync(ivLen);\n var cipher = cipherFn(dk);\n cipher.start(iv);\n cipher.update(asn1.toDer(obj));\n cipher.finish();\n encryptedData = cipher.output.getBytes();\n\n // get PBKDF2-params\n var params = createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm);\n\n encryptionAlgorithm = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(oids['pkcs5PBES2']).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // keyDerivationFunc\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(oids['pkcs5PBKDF2']).getBytes()),\n // PBKDF2-params\n params\n ]),\n // encryptionScheme\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(encOid).getBytes()),\n // iv\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, iv)\n ])\n ])\n ]);\n } else if(options.algorithm === '3des') {\n // Do PKCS12 PBE\n dkLen = 24;\n\n var saltBytes = new forge.util.ByteBuffer(salt);\n var dk = pki.pbe.generatePkcs12Key(password, saltBytes, 1, count, dkLen);\n var iv = pki.pbe.generatePkcs12Key(password, saltBytes, 2, count, dkLen);\n var cipher = forge.des.createEncryptionCipher(dk);\n cipher.start(iv);\n cipher.update(asn1.toDer(obj));\n cipher.finish();\n encryptedData = cipher.output.getBytes();\n\n encryptionAlgorithm = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(oids['pbeWithSHAAnd3-KeyTripleDES-CBC']).getBytes()),\n // pkcs-12PbeParams\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // salt\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt),\n // iteration count\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n countBytes.getBytes())\n ])\n ]);\n } else {\n var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.');\n error.algorithm = options.algorithm;\n throw error;\n }\n\n // EncryptedPrivateKeyInfo\n var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // encryptionAlgorithm\n encryptionAlgorithm,\n // encryptedData\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, encryptedData)\n ]);\n return rval;\n};\n\n/**\n * Decrypts a ASN.1 PrivateKeyInfo object.\n *\n * @param obj the ASN.1 EncryptedPrivateKeyInfo object.\n * @param password the password to decrypt with.\n *\n * @return the ASN.1 PrivateKeyInfo on success, null on failure.\n */\npki.decryptPrivateKeyInfo = function(obj, password) {\n var rval = null;\n\n // get PBE params\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, encryptedPrivateKeyValidator, capture, errors)) {\n var error = new Error('Cannot read encrypted private key. ' +\n 'ASN.1 object is not a supported EncryptedPrivateKeyInfo.');\n error.errors = errors;\n throw error;\n }\n\n // get cipher\n var oid = asn1.derToOid(capture.encryptionOid);\n var cipher = pki.pbe.getCipher(oid, capture.encryptionParams, password);\n\n // get encrypted data\n var encrypted = forge.util.createBuffer(capture.encryptedData);\n\n cipher.update(encrypted);\n if(cipher.finish()) {\n rval = asn1.fromDer(cipher.output);\n }\n\n return rval;\n};\n\n/**\n * Converts a EncryptedPrivateKeyInfo to PEM format.\n *\n * @param epki the EncryptedPrivateKeyInfo.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted encrypted private key.\n */\npki.encryptedPrivateKeyToPem = function(epki, maxline) {\n // convert to DER, then PEM-encode\n var msg = {\n type: 'ENCRYPTED PRIVATE KEY',\n body: asn1.toDer(epki).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Converts a PEM-encoded EncryptedPrivateKeyInfo to ASN.1 format. Decryption\n * is not performed.\n *\n * @param pem the EncryptedPrivateKeyInfo in PEM-format.\n *\n * @return the ASN.1 EncryptedPrivateKeyInfo.\n */\npki.encryptedPrivateKeyFromPem = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'ENCRYPTED PRIVATE KEY') {\n var error = new Error('Could not convert encrypted private key from PEM; ' +\n 'PEM header type is \"ENCRYPTED PRIVATE KEY\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert encrypted private key from PEM; ' +\n 'PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n return asn1.fromDer(msg.body);\n};\n\n/**\n * Encrypts an RSA private key. By default, the key will be wrapped in\n * a PrivateKeyInfo and encrypted to produce a PKCS#8 EncryptedPrivateKeyInfo.\n * This is the standard, preferred way to encrypt a private key.\n *\n * To produce a non-standard PEM-encrypted private key that uses encapsulated\n * headers to indicate the encryption algorithm (old-style non-PKCS#8 OpenSSL\n * private key encryption), set the 'legacy' option to true. Note: Using this\n * option will cause the iteration count to be forced to 1.\n *\n * Note: The 'des' algorithm is supported, but it is not considered to be\n * secure because it only uses a single 56-bit key. If possible, it is highly\n * recommended that a different algorithm be used.\n *\n * @param rsaKey the RSA key to encrypt.\n * @param password the password to use.\n * @param options:\n * algorithm: the encryption algorithm to use\n * ('aes128', 'aes192', 'aes256', '3des', 'des').\n * count: the iteration count to use.\n * saltSize: the salt size to use.\n * legacy: output an old non-PKCS#8 PEM-encrypted+encapsulated\n * headers (DEK-Info) private key.\n *\n * @return the PEM-encoded ASN.1 EncryptedPrivateKeyInfo.\n */\npki.encryptRsaPrivateKey = function(rsaKey, password, options) {\n // standard PKCS#8\n options = options || {};\n if(!options.legacy) {\n // encrypt PrivateKeyInfo\n var rval = pki.wrapRsaPrivateKey(pki.privateKeyToAsn1(rsaKey));\n rval = pki.encryptPrivateKeyInfo(rval, password, options);\n return pki.encryptedPrivateKeyToPem(rval);\n }\n\n // legacy non-PKCS#8\n var algorithm;\n var iv;\n var dkLen;\n var cipherFn;\n switch(options.algorithm) {\n case 'aes128':\n algorithm = 'AES-128-CBC';\n dkLen = 16;\n iv = forge.random.getBytesSync(16);\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'aes192':\n algorithm = 'AES-192-CBC';\n dkLen = 24;\n iv = forge.random.getBytesSync(16);\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case 'aes256':\n algorithm = 'AES-256-CBC';\n dkLen = 32;\n iv = forge.random.getBytesSync(16);\n cipherFn = forge.aes.createEncryptionCipher;\n break;\n case '3des':\n algorithm = 'DES-EDE3-CBC';\n dkLen = 24;\n iv = forge.random.getBytesSync(8);\n cipherFn = forge.des.createEncryptionCipher;\n break;\n case 'des':\n algorithm = 'DES-CBC';\n dkLen = 8;\n iv = forge.random.getBytesSync(8);\n cipherFn = forge.des.createEncryptionCipher;\n break;\n default:\n var error = new Error('Could not encrypt RSA private key; unsupported ' +\n 'encryption algorithm \"' + options.algorithm + '\".');\n error.algorithm = options.algorithm;\n throw error;\n }\n\n // encrypt private key using OpenSSL legacy key derivation\n var dk = forge.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen);\n var cipher = cipherFn(dk);\n cipher.start(iv);\n cipher.update(asn1.toDer(pki.privateKeyToAsn1(rsaKey)));\n cipher.finish();\n\n var msg = {\n type: 'RSA PRIVATE KEY',\n procType: {\n version: '4',\n type: 'ENCRYPTED'\n },\n dekInfo: {\n algorithm: algorithm,\n parameters: forge.util.bytesToHex(iv).toUpperCase()\n },\n body: cipher.output.getBytes()\n };\n return forge.pem.encode(msg);\n};\n\n/**\n * Decrypts an RSA private key.\n *\n * @param pem the PEM-formatted EncryptedPrivateKeyInfo to decrypt.\n * @param password the password to use.\n *\n * @return the RSA key on success, null on failure.\n */\npki.decryptRsaPrivateKey = function(pem, password) {\n var rval = null;\n\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'ENCRYPTED PRIVATE KEY' &&\n msg.type !== 'PRIVATE KEY' &&\n msg.type !== 'RSA PRIVATE KEY') {\n var error = new Error('Could not convert private key from PEM; PEM header type ' +\n 'is not \"ENCRYPTED PRIVATE KEY\", \"PRIVATE KEY\", or \"RSA PRIVATE KEY\".');\n error.headerType = error;\n throw error;\n }\n\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n var dkLen;\n var cipherFn;\n switch(msg.dekInfo.algorithm) {\n case 'DES-CBC':\n dkLen = 8;\n cipherFn = forge.des.createDecryptionCipher;\n break;\n case 'DES-EDE3-CBC':\n dkLen = 24;\n cipherFn = forge.des.createDecryptionCipher;\n break;\n case 'AES-128-CBC':\n dkLen = 16;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'AES-192-CBC':\n dkLen = 24;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'AES-256-CBC':\n dkLen = 32;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'RC2-40-CBC':\n dkLen = 5;\n cipherFn = function(key) {\n return forge.rc2.createDecryptionCipher(key, 40);\n };\n break;\n case 'RC2-64-CBC':\n dkLen = 8;\n cipherFn = function(key) {\n return forge.rc2.createDecryptionCipher(key, 64);\n };\n break;\n case 'RC2-128-CBC':\n dkLen = 16;\n cipherFn = function(key) {\n return forge.rc2.createDecryptionCipher(key, 128);\n };\n break;\n default:\n var error = new Error('Could not decrypt private key; unsupported ' +\n 'encryption algorithm \"' + msg.dekInfo.algorithm + '\".');\n error.algorithm = msg.dekInfo.algorithm;\n throw error;\n }\n\n // use OpenSSL legacy key derivation\n var iv = forge.util.hexToBytes(msg.dekInfo.parameters);\n var dk = forge.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen);\n var cipher = cipherFn(dk);\n cipher.start(iv);\n cipher.update(forge.util.createBuffer(msg.body));\n if(cipher.finish()) {\n rval = cipher.output.getBytes();\n } else {\n return rval;\n }\n } else {\n rval = msg.body;\n }\n\n if(msg.type === 'ENCRYPTED PRIVATE KEY') {\n rval = pki.decryptPrivateKeyInfo(asn1.fromDer(rval), password);\n } else {\n // decryption already performed above\n rval = asn1.fromDer(rval);\n }\n\n if(rval !== null) {\n rval = pki.privateKeyFromAsn1(rval);\n }\n\n return rval;\n};\n\n/**\n * Derives a PKCS#12 key.\n *\n * @param password the password to derive the key material from, null or\n * undefined for none.\n * @param salt the salt, as a ByteBuffer, to use.\n * @param id the PKCS#12 ID byte (1 = key material, 2 = IV, 3 = MAC).\n * @param iter the iteration count.\n * @param n the number of bytes to derive from the password.\n * @param md the message digest to use, defaults to SHA-1.\n *\n * @return a ByteBuffer with the bytes derived from the password.\n */\npki.pbe.generatePkcs12Key = function(password, salt, id, iter, n, md) {\n var j, l;\n\n if(typeof md === 'undefined' || md === null) {\n if(!('sha1' in forge.md)) {\n throw new Error('\"sha1\" hash algorithm unavailable.');\n }\n md = forge.md.sha1.create();\n }\n\n var u = md.digestLength;\n var v = md.blockLength;\n var result = new forge.util.ByteBuffer();\n\n /* Convert password to Unicode byte buffer + trailing 0-byte. */\n var passBuf = new forge.util.ByteBuffer();\n if(password !== null && password !== undefined) {\n for(l = 0; l < password.length; l++) {\n passBuf.putInt16(password.charCodeAt(l));\n }\n passBuf.putInt16(0);\n }\n\n /* Length of salt and password in BYTES. */\n var p = passBuf.length();\n var s = salt.length();\n\n /* 1. Construct a string, D (the \"diversifier\"), by concatenating\n v copies of ID. */\n var D = new forge.util.ByteBuffer();\n D.fillWithByte(id, v);\n\n /* 2. Concatenate copies of the salt together to create a string S of length\n v * ceil(s / v) bytes (the final copy of the salt may be trunacted\n to create S).\n Note that if the salt is the empty string, then so is S. */\n var Slen = v * Math.ceil(s / v);\n var S = new forge.util.ByteBuffer();\n for(l = 0; l < Slen; l++) {\n S.putByte(salt.at(l % s));\n }\n\n /* 3. Concatenate copies of the password together to create a string P of\n length v * ceil(p / v) bytes (the final copy of the password may be\n truncated to create P).\n Note that if the password is the empty string, then so is P. */\n var Plen = v * Math.ceil(p / v);\n var P = new forge.util.ByteBuffer();\n for(l = 0; l < Plen; l++) {\n P.putByte(passBuf.at(l % p));\n }\n\n /* 4. Set I=S||P to be the concatenation of S and P. */\n var I = S;\n I.putBuffer(P);\n\n /* 5. Set c=ceil(n / u). */\n var c = Math.ceil(n / u);\n\n /* 6. For i=1, 2, ..., c, do the following: */\n for(var i = 1; i <= c; i++) {\n /* a) Set Ai=H^r(D||I). (l.e. the rth hash of D||I, H(H(H(...H(D||I)))) */\n var buf = new forge.util.ByteBuffer();\n buf.putBytes(D.bytes());\n buf.putBytes(I.bytes());\n for(var round = 0; round < iter; round++) {\n md.start();\n md.update(buf.getBytes());\n buf = md.digest();\n }\n\n /* b) Concatenate copies of Ai to create a string B of length v bytes (the\n final copy of Ai may be truncated to create B). */\n var B = new forge.util.ByteBuffer();\n for(l = 0; l < v; l++) {\n B.putByte(buf.at(l % u));\n }\n\n /* c) Treating I as a concatenation I0, I1, ..., Ik-1 of v-byte blocks,\n where k=ceil(s / v) + ceil(p / v), modify I by setting\n Ij=(Ij+B+1) mod 2v for each j. */\n var k = Math.ceil(s / v) + Math.ceil(p / v);\n var Inew = new forge.util.ByteBuffer();\n for(j = 0; j < k; j++) {\n var chunk = new forge.util.ByteBuffer(I.getBytes(v));\n var x = 0x1ff;\n for(l = B.length() - 1; l >= 0; l--) {\n x = x >> 8;\n x += B.at(l) + chunk.at(l);\n chunk.setAt(l, x & 0xff);\n }\n Inew.putBuffer(chunk);\n }\n I = Inew;\n\n /* Add Ai to A. */\n result.putBuffer(buf);\n }\n\n result.truncate(result.length() - n);\n return result;\n};\n\n/**\n * Get new Forge cipher object instance.\n *\n * @param oid the OID (in string notation).\n * @param params the ASN.1 params object.\n * @param password the password to decrypt with.\n *\n * @return new cipher object instance.\n */\npki.pbe.getCipher = function(oid, params, password) {\n switch(oid) {\n case pki.oids['pkcs5PBES2']:\n return pki.pbe.getCipherForPBES2(oid, params, password);\n\n case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']:\n case pki.oids['pbewithSHAAnd40BitRC2-CBC']:\n return pki.pbe.getCipherForPKCS12PBE(oid, params, password);\n\n default:\n var error = new Error('Cannot read encrypted PBE data block. Unsupported OID.');\n error.oid = oid;\n error.supportedOids = [\n 'pkcs5PBES2',\n 'pbeWithSHAAnd3-KeyTripleDES-CBC',\n 'pbewithSHAAnd40BitRC2-CBC'\n ];\n throw error;\n }\n};\n\n/**\n * Get new Forge cipher object instance according to PBES2 params block.\n *\n * The returned cipher instance is already started using the IV\n * from PBES2 parameter block.\n *\n * @param oid the PKCS#5 PBKDF2 OID (in string notation).\n * @param params the ASN.1 PBES2-params object.\n * @param password the password to decrypt with.\n *\n * @return new cipher object instance.\n */\npki.pbe.getCipherForPBES2 = function(oid, params, password) {\n // get PBE params\n var capture = {};\n var errors = [];\n if(!asn1.validate(params, PBES2AlgorithmsValidator, capture, errors)) {\n var error = new Error('Cannot read password-based-encryption algorithm ' +\n 'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.');\n error.errors = errors;\n throw error;\n }\n\n // check oids\n oid = asn1.derToOid(capture.kdfOid);\n if(oid !== pki.oids['pkcs5PBKDF2']) {\n var error = new Error('Cannot read encrypted private key. ' +\n 'Unsupported key derivation function OID.');\n error.oid = oid;\n error.supportedOids = ['pkcs5PBKDF2'];\n throw error;\n }\n oid = asn1.derToOid(capture.encOid);\n if(oid !== pki.oids['aes128-CBC'] &&\n oid !== pki.oids['aes192-CBC'] &&\n oid !== pki.oids['aes256-CBC'] &&\n oid !== pki.oids['des-EDE3-CBC'] &&\n oid !== pki.oids['desCBC']) {\n var error = new Error('Cannot read encrypted private key. ' +\n 'Unsupported encryption scheme OID.');\n error.oid = oid;\n error.supportedOids = [\n 'aes128-CBC', 'aes192-CBC', 'aes256-CBC', 'des-EDE3-CBC', 'desCBC'];\n throw error;\n }\n\n // set PBE params\n var salt = capture.kdfSalt;\n var count = forge.util.createBuffer(capture.kdfIterationCount);\n count = count.getInt(count.length() << 3);\n var dkLen;\n var cipherFn;\n switch(pki.oids[oid]) {\n case 'aes128-CBC':\n dkLen = 16;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'aes192-CBC':\n dkLen = 24;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'aes256-CBC':\n dkLen = 32;\n cipherFn = forge.aes.createDecryptionCipher;\n break;\n case 'des-EDE3-CBC':\n dkLen = 24;\n cipherFn = forge.des.createDecryptionCipher;\n break;\n case 'desCBC':\n dkLen = 8;\n cipherFn = forge.des.createDecryptionCipher;\n break;\n }\n\n // get PRF message digest\n var md = prfOidToMessageDigest(capture.prfOid);\n\n // decrypt private key using pbe with chosen PRF and AES/DES\n var dk = forge.pkcs5.pbkdf2(password, salt, count, dkLen, md);\n var iv = capture.encIv;\n var cipher = cipherFn(dk);\n cipher.start(iv);\n\n return cipher;\n};\n\n/**\n * Get new Forge cipher object instance for PKCS#12 PBE.\n *\n * The returned cipher instance is already started using the key & IV\n * derived from the provided password and PKCS#12 PBE salt.\n *\n * @param oid The PKCS#12 PBE OID (in string notation).\n * @param params The ASN.1 PKCS#12 PBE-params object.\n * @param password The password to decrypt with.\n *\n * @return the new cipher object instance.\n */\npki.pbe.getCipherForPKCS12PBE = function(oid, params, password) {\n // get PBE params\n var capture = {};\n var errors = [];\n if(!asn1.validate(params, pkcs12PbeParamsValidator, capture, errors)) {\n var error = new Error('Cannot read password-based-encryption algorithm ' +\n 'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.');\n error.errors = errors;\n throw error;\n }\n\n var salt = forge.util.createBuffer(capture.salt);\n var count = forge.util.createBuffer(capture.iterations);\n count = count.getInt(count.length() << 3);\n\n var dkLen, dIvLen, cipherFn;\n switch(oid) {\n case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']:\n dkLen = 24;\n dIvLen = 8;\n cipherFn = forge.des.startDecrypting;\n break;\n\n case pki.oids['pbewithSHAAnd40BitRC2-CBC']:\n dkLen = 5;\n dIvLen = 8;\n cipherFn = function(key, iv) {\n var cipher = forge.rc2.createDecryptionCipher(key, 40);\n cipher.start(iv, null);\n return cipher;\n };\n break;\n\n default:\n var error = new Error('Cannot read PKCS #12 PBE data block. Unsupported OID.');\n error.oid = oid;\n throw error;\n }\n\n // get PRF message digest\n var md = prfOidToMessageDigest(capture.prfOid);\n var key = pki.pbe.generatePkcs12Key(password, salt, 1, count, dkLen, md);\n md.start();\n var iv = pki.pbe.generatePkcs12Key(password, salt, 2, count, dIvLen, md);\n\n return cipherFn(key, iv);\n};\n\n/**\n * OpenSSL's legacy key derivation function.\n *\n * See: http://www.openssl.org/docs/crypto/EVP_BytesToKey.html\n *\n * @param password the password to derive the key from.\n * @param salt the salt to use, null for none.\n * @param dkLen the number of bytes needed for the derived key.\n * @param [options] the options to use:\n * [md] an optional message digest object to use.\n */\npki.pbe.opensslDeriveBytes = function(password, salt, dkLen, md) {\n if(typeof md === 'undefined' || md === null) {\n if(!('md5' in forge.md)) {\n throw new Error('\"md5\" hash algorithm unavailable.');\n }\n md = forge.md.md5.create();\n }\n if(salt === null) {\n salt = '';\n }\n var digests = [hash(md, password + salt)];\n for(var length = 16, i = 1; length < dkLen; ++i, length += 16) {\n digests.push(hash(md, digests[i - 1] + password + salt));\n }\n return digests.join('').substr(0, dkLen);\n};\n\nfunction hash(md, bytes) {\n return md.start().update(bytes).digest().getBytes();\n}\n\nfunction prfOidToMessageDigest(prfOid) {\n // get PRF algorithm, default to SHA-1\n var prfAlgorithm;\n if(!prfOid) {\n prfAlgorithm = 'hmacWithSHA1';\n } else {\n prfAlgorithm = pki.oids[asn1.derToOid(prfOid)];\n if(!prfAlgorithm) {\n var error = new Error('Unsupported PRF OID.');\n error.oid = prfOid;\n error.supported = [\n 'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384',\n 'hmacWithSHA512'];\n throw error;\n }\n }\n return prfAlgorithmToMessageDigest(prfAlgorithm);\n}\n\nfunction prfAlgorithmToMessageDigest(prfAlgorithm) {\n var factory = forge.md;\n switch(prfAlgorithm) {\n case 'hmacWithSHA224':\n factory = forge.md.sha512;\n case 'hmacWithSHA1':\n case 'hmacWithSHA256':\n case 'hmacWithSHA384':\n case 'hmacWithSHA512':\n prfAlgorithm = prfAlgorithm.substr(8).toLowerCase();\n break;\n default:\n var error = new Error('Unsupported PRF algorithm.');\n error.algorithm = prfAlgorithm;\n error.supported = [\n 'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384',\n 'hmacWithSHA512'];\n throw error;\n }\n if(!factory || !(prfAlgorithm in factory)) {\n throw new Error('Unknown hash algorithm: ' + prfAlgorithm);\n }\n return factory[prfAlgorithm].create();\n}\n\nfunction createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm) {\n var params = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // salt\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt),\n // iteration count\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n countBytes.getBytes())\n ]);\n // when PRF algorithm is not SHA-1 default, add key length and PRF algorithm\n if(prfAlgorithm !== 'hmacWithSHA1') {\n params.value.push(\n // key length\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n forge.util.hexToBytes(dkLen.toString(16))),\n // AlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids[prfAlgorithm]).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]));\n }\n return params;\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/pbe.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/pbkdf2.js":
/*!***********************************************!*\
!*** ./node_modules/node-forge/lib/pbkdf2.js ***!
\***********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Password-Based Key-Derivation Function #2 implementation.\n *\n * See RFC 2898 for details.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2013 Digital Bazaar, Inc.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./hmac */ \"./node_modules/node-forge/lib/hmac.js\");\n__webpack_require__(/*! ./md */ \"./node_modules/node-forge/lib/md.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\nvar pkcs5 = forge.pkcs5 = forge.pkcs5 || {};\n\nvar crypto;\nif(forge.util.isNodejs && !forge.options.usePureJavaScript) {\n crypto = __webpack_require__(/*! crypto */ \"?b254\");\n}\n\n/**\n * Derives a key from a password.\n *\n * @param p the password as a binary-encoded string of bytes.\n * @param s the salt as a binary-encoded string of bytes.\n * @param c the iteration count, a positive integer.\n * @param dkLen the intended length, in bytes, of the derived key,\n * (max: 2^32 - 1) * hash length of the PRF.\n * @param [md] the message digest (or algorithm identifier as a string) to use\n * in the PRF, defaults to SHA-1.\n * @param [callback(err, key)] presence triggers asynchronous version, called\n * once the operation completes.\n *\n * @return the derived key, as a binary-encoded string of bytes, for the\n * synchronous version (if no callback is specified).\n */\nmodule.exports = forge.pbkdf2 = pkcs5.pbkdf2 = function(\n p, s, c, dkLen, md, callback) {\n if(typeof md === 'function') {\n callback = md;\n md = null;\n }\n\n // use native implementation if possible and not disabled, note that\n // some node versions only support SHA-1, others allow digest to be changed\n if(forge.util.isNodejs && !forge.options.usePureJavaScript &&\n crypto.pbkdf2 && (md === null || typeof md !== 'object') &&\n (crypto.pbkdf2Sync.length > 4 || (!md || md === 'sha1'))) {\n if(typeof md !== 'string') {\n // default prf to SHA-1\n md = 'sha1';\n }\n p = Buffer.from(p, 'binary');\n s = Buffer.from(s, 'binary');\n if(!callback) {\n if(crypto.pbkdf2Sync.length === 4) {\n return crypto.pbkdf2Sync(p, s, c, dkLen).toString('binary');\n }\n return crypto.pbkdf2Sync(p, s, c, dkLen, md).toString('binary');\n }\n if(crypto.pbkdf2Sync.length === 4) {\n return crypto.pbkdf2(p, s, c, dkLen, function(err, key) {\n if(err) {\n return callback(err);\n }\n callback(null, key.toString('binary'));\n });\n }\n return crypto.pbkdf2(p, s, c, dkLen, md, function(err, key) {\n if(err) {\n return callback(err);\n }\n callback(null, key.toString('binary'));\n });\n }\n\n if(typeof md === 'undefined' || md === null) {\n // default prf to SHA-1\n md = 'sha1';\n }\n if(typeof md === 'string') {\n if(!(md in forge.md.algorithms)) {\n throw new Error('Unknown hash algorithm: ' + md);\n }\n md = forge.md[md].create();\n }\n\n var hLen = md.digestLength;\n\n /* 1. If dkLen > (2^32 - 1) * hLen, output \"derived key too long\" and\n stop. */\n if(dkLen > (0xFFFFFFFF * hLen)) {\n var err = new Error('Derived key is too long.');\n if(callback) {\n return callback(err);\n }\n throw err;\n }\n\n /* 2. Let len be the number of hLen-octet blocks in the derived key,\n rounding up, and let r be the number of octets in the last\n block:\n\n len = CEIL(dkLen / hLen),\n r = dkLen - (len - 1) * hLen. */\n var len = Math.ceil(dkLen / hLen);\n var r = dkLen - (len - 1) * hLen;\n\n /* 3. For each block of the derived key apply the function F defined\n below to the password P, the salt S, the iteration count c, and\n the block index to compute the block:\n\n T_1 = F(P, S, c, 1),\n T_2 = F(P, S, c, 2),\n ...\n T_len = F(P, S, c, len),\n\n where the function F is defined as the exclusive-or sum of the\n first c iterates of the underlying pseudorandom function PRF\n applied to the password P and the concatenation of the salt S\n and the block index i:\n\n F(P, S, c, i) = u_1 XOR u_2 XOR ... XOR u_c\n\n where\n\n u_1 = PRF(P, S || INT(i)),\n u_2 = PRF(P, u_1),\n ...\n u_c = PRF(P, u_{c-1}).\n\n Here, INT(i) is a four-octet encoding of the integer i, most\n significant octet first. */\n var prf = forge.hmac.create();\n prf.start(md, p);\n var dk = '';\n var xor, u_c, u_c1;\n\n // sync version\n if(!callback) {\n for(var i = 1; i <= len; ++i) {\n // PRF(P, S || INT(i)) (first iteration)\n prf.start(null, null);\n prf.update(s);\n prf.update(forge.util.int32ToBytes(i));\n xor = u_c1 = prf.digest().getBytes();\n\n // PRF(P, u_{c-1}) (other iterations)\n for(var j = 2; j <= c; ++j) {\n prf.start(null, null);\n prf.update(u_c1);\n u_c = prf.digest().getBytes();\n // F(p, s, c, i)\n xor = forge.util.xorBytes(xor, u_c, hLen);\n u_c1 = u_c;\n }\n\n /* 4. Concatenate the blocks and extract the first dkLen octets to\n produce a derived key DK:\n\n DK = T_1 || T_2 || ... || T_len<0..r-1> */\n dk += (i < len) ? xor : xor.substr(0, r);\n }\n /* 5. Output the derived key DK. */\n return dk;\n }\n\n // async version\n var i = 1, j;\n function outer() {\n if(i > len) {\n // done\n return callback(null, dk);\n }\n\n // PRF(P, S || INT(i)) (first iteration)\n prf.start(null, null);\n prf.update(s);\n prf.update(forge.util.int32ToBytes(i));\n xor = u_c1 = prf.digest().getBytes();\n\n // PRF(P, u_{c-1}) (other iterations)\n j = 2;\n inner();\n }\n\n function inner() {\n if(j <= c) {\n prf.start(null, null);\n prf.update(u_c1);\n u_c = prf.digest().getBytes();\n // F(p, s, c, i)\n xor = forge.util.xorBytes(xor, u_c, hLen);\n u_c1 = u_c;\n ++j;\n return forge.util.setImmediate(inner);\n }\n\n /* 4. Concatenate the blocks and extract the first dkLen octets to\n produce a derived key DK:\n\n DK = T_1 || T_2 || ... || T_len<0..r-1> */\n dk += (i < len) ? xor : xor.substr(0, r);\n\n ++i;\n outer();\n }\n\n outer();\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/pbkdf2.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/pem.js":
/*!********************************************!*\
!*** ./node_modules/node-forge/lib/pem.js ***!
\********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Javascript implementation of basic PEM (Privacy Enhanced Mail) algorithms.\n *\n * See: RFC 1421.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2013-2014 Digital Bazaar, Inc.\n *\n * A Forge PEM object has the following fields:\n *\n * type: identifies the type of message (eg: \"RSA PRIVATE KEY\").\n *\n * procType: identifies the type of processing performed on the message,\n * it has two subfields: version and type, eg: 4,ENCRYPTED.\n *\n * contentDomain: identifies the type of content in the message, typically\n * only uses the value: \"RFC822\".\n *\n * dekInfo: identifies the message encryption algorithm and mode and includes\n * any parameters for the algorithm, it has two subfields: algorithm and\n * parameters, eg: DES-CBC,F8143EDE5960C597.\n *\n * headers: contains all other PEM encapsulated headers -- where order is\n * significant (for pairing data like recipient ID + key info).\n *\n * body: the binary-encoded body.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\n// shortcut for pem API\nvar pem = module.exports = forge.pem = forge.pem || {};\n\n/**\n * Encodes (serializes) the given PEM object.\n *\n * @param msg the PEM message object to encode.\n * @param options the options to use:\n * maxline the maximum characters per line for the body, (default: 64).\n *\n * @return the PEM-formatted string.\n */\npem.encode = function(msg, options) {\n options = options || {};\n var rval = '-----BEGIN ' + msg.type + '-----\\r\\n';\n\n // encode special headers\n var header;\n if(msg.procType) {\n header = {\n name: 'Proc-Type',\n values: [String(msg.procType.version), msg.procType.type]\n };\n rval += foldHeader(header);\n }\n if(msg.contentDomain) {\n header = {name: 'Content-Domain', values: [msg.contentDomain]};\n rval += foldHeader(header);\n }\n if(msg.dekInfo) {\n header = {name: 'DEK-Info', values: [msg.dekInfo.algorithm]};\n if(msg.dekInfo.parameters) {\n header.values.push(msg.dekInfo.parameters);\n }\n rval += foldHeader(header);\n }\n\n if(msg.headers) {\n // encode all other headers\n for(var i = 0; i < msg.headers.length; ++i) {\n rval += foldHeader(msg.headers[i]);\n }\n }\n\n // terminate header\n if(msg.procType) {\n rval += '\\r\\n';\n }\n\n // add body\n rval += forge.util.encode64(msg.body, options.maxline || 64) + '\\r\\n';\n\n rval += '-----END ' + msg.type + '-----\\r\\n';\n return rval;\n};\n\n/**\n * Decodes (deserializes) all PEM messages found in the given string.\n *\n * @param str the PEM-formatted string to decode.\n *\n * @return the PEM message objects in an array.\n */\npem.decode = function(str) {\n var rval = [];\n\n // split string into PEM messages (be lenient w/EOF on BEGIN line)\n var rMessage = /\\s*-----BEGIN ([A-Z0-9- ]+)-----\\r?\\n?([\\x21-\\x7e\\s]+?(?:\\r?\\n\\r?\\n))?([:A-Za-z0-9+\\/=\\s]+?)-----END \\1-----/g;\n var rHeader = /([\\x21-\\x7e]+):\\s*([\\x21-\\x7e\\s^:]+)/;\n var rCRLF = /\\r?\\n/;\n var match;\n while(true) {\n match = rMessage.exec(str);\n if(!match) {\n break;\n }\n\n // accept \"NEW CERTIFICATE REQUEST\" as \"CERTIFICATE REQUEST\"\n // https://datatracker.ietf.org/doc/html/rfc7468#section-7\n var type = match[1];\n if(type === 'NEW CERTIFICATE REQUEST') {\n type = 'CERTIFICATE REQUEST';\n }\n\n var msg = {\n type: type,\n procType: null,\n contentDomain: null,\n dekInfo: null,\n headers: [],\n body: forge.util.decode64(match[3])\n };\n rval.push(msg);\n\n // no headers\n if(!match[2]) {\n continue;\n }\n\n // parse headers\n var lines = match[2].split(rCRLF);\n var li = 0;\n while(match && li < lines.length) {\n // get line, trim any rhs whitespace\n var line = lines[li].replace(/\\s+$/, '');\n\n // RFC2822 unfold any following folded lines\n for(var nl = li + 1; nl < lines.length; ++nl) {\n var next = lines[nl];\n if(!/\\s/.test(next[0])) {\n break;\n }\n line += next;\n li = nl;\n }\n\n // parse header\n match = line.match(rHeader);\n if(match) {\n var header = {name: match[1], values: []};\n var values = match[2].split(',');\n for(var vi = 0; vi < values.length; ++vi) {\n header.values.push(ltrim(values[vi]));\n }\n\n // Proc-Type must be the first header\n if(!msg.procType) {\n if(header.name !== 'Proc-Type') {\n throw new Error('Invalid PEM formatted message. The first ' +\n 'encapsulated header must be \"Proc-Type\".');\n } else if(header.values.length !== 2) {\n throw new Error('Invalid PEM formatted message. The \"Proc-Type\" ' +\n 'header must have two subfields.');\n }\n msg.procType = {version: values[0], type: values[1]};\n } else if(!msg.contentDomain && header.name === 'Content-Domain') {\n // special-case Content-Domain\n msg.contentDomain = values[0] || '';\n } else if(!msg.dekInfo && header.name === 'DEK-Info') {\n // special-case DEK-Info\n if(header.values.length === 0) {\n throw new Error('Invalid PEM formatted message. The \"DEK-Info\" ' +\n 'header must have at least one subfield.');\n }\n msg.dekInfo = {algorithm: values[0], parameters: values[1] || null};\n } else {\n msg.headers.push(header);\n }\n }\n\n ++li;\n }\n\n if(msg.procType === 'ENCRYPTED' && !msg.dekInfo) {\n throw new Error('Invalid PEM formatted message. The \"DEK-Info\" ' +\n 'header must be present if \"Proc-Type\" is \"ENCRYPTED\".');\n }\n }\n\n if(rval.length === 0) {\n throw new Error('Invalid PEM formatted message.');\n }\n\n return rval;\n};\n\nfunction foldHeader(header) {\n var rval = header.name + ': ';\n\n // ensure values with CRLF are folded\n var values = [];\n var insertSpace = function(match, $1) {\n return ' ' + $1;\n };\n for(var i = 0; i < header.values.length; ++i) {\n values.push(header.values[i].replace(/^(\\S+\\r\\n)/, insertSpace));\n }\n rval += values.join(',') + '\\r\\n';\n\n // do folding\n var length = 0;\n var candidate = -1;\n for(var i = 0; i < rval.length; ++i, ++length) {\n if(length > 65 && candidate !== -1) {\n var insert = rval[candidate];\n if(insert === ',') {\n ++candidate;\n rval = rval.substr(0, candidate) + '\\r\\n ' + rval.substr(candidate);\n } else {\n rval = rval.substr(0, candidate) +\n '\\r\\n' + insert + rval.substr(candidate + 1);\n }\n length = (i - candidate - 1);\n candidate = -1;\n ++i;\n } else if(rval[i] === ' ' || rval[i] === '\\t' || rval[i] === ',') {\n candidate = i;\n }\n }\n\n return rval;\n}\n\nfunction ltrim(str) {\n return str.replace(/^\\s+/, '');\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/pem.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/pkcs1.js":
/*!**********************************************!*\
!*** ./node_modules/node-forge/lib/pkcs1.js ***!
\**********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Partial implementation of PKCS#1 v2.2: RSA-OEAP\n *\n * Modified but based on the following MIT and BSD licensed code:\n *\n * https://github.com/kjur/jsjws/blob/master/rsa.js:\n *\n * The 'jsjws'(JSON Web Signature JavaScript Library) License\n *\n * Copyright (c) 2012 Kenji Urushima\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n *\n * http://webrsa.cvs.sourceforge.net/viewvc/webrsa/Client/RSAES-OAEP.js?content-type=text%2Fplain:\n *\n * RSAES-OAEP.js\n * $Id: RSAES-OAEP.js,v 1.1.1.1 2003/03/19 15:37:20 ellispritchard Exp $\n * JavaScript Implementation of PKCS #1 v2.1 RSA CRYPTOGRAPHY STANDARD (RSA Laboratories, June 14, 2002)\n * Copyright (C) Ellis Pritchard, Guardian Unlimited 2003.\n * Contact: ellis@nukinetics.com\n * Distributed under the BSD License.\n *\n * Official documentation: http://www.rsa.com/rsalabs/node.asp?id=2125\n *\n * @author Evan Jones (http://evanjones.ca/)\n * @author Dave Longley\n *\n * Copyright (c) 2013-2014 Digital Bazaar, Inc.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n__webpack_require__(/*! ./random */ \"./node_modules/node-forge/lib/random.js\");\n__webpack_require__(/*! ./sha1 */ \"./node_modules/node-forge/lib/sha1.js\");\n\n// shortcut for PKCS#1 API\nvar pkcs1 = module.exports = forge.pkcs1 = forge.pkcs1 || {};\n\n/**\n * Encode the given RSAES-OAEP message (M) using key, with optional label (L)\n * and seed.\n *\n * This method does not perform RSA encryption, it only encodes the message\n * using RSAES-OAEP.\n *\n * @param key the RSA key to use.\n * @param message the message to encode.\n * @param options the options to use:\n * label an optional label to use.\n * seed the seed to use.\n * md the message digest object to use, undefined for SHA-1.\n * mgf1 optional mgf1 parameters:\n * md the message digest object to use for MGF1.\n *\n * @return the encoded message bytes.\n */\npkcs1.encode_rsa_oaep = function(key, message, options) {\n // parse arguments\n var label;\n var seed;\n var md;\n var mgf1Md;\n // legacy args (label, seed, md)\n if(typeof options === 'string') {\n label = options;\n seed = arguments[3] || undefined;\n md = arguments[4] || undefined;\n } else if(options) {\n label = options.label || undefined;\n seed = options.seed || undefined;\n md = options.md || undefined;\n if(options.mgf1 && options.mgf1.md) {\n mgf1Md = options.mgf1.md;\n }\n }\n\n // default OAEP to SHA-1 message digest\n if(!md) {\n md = forge.md.sha1.create();\n } else {\n md.start();\n }\n\n // default MGF-1 to same as OAEP\n if(!mgf1Md) {\n mgf1Md = md;\n }\n\n // compute length in bytes and check output\n var keyLength = Math.ceil(key.n.bitLength() / 8);\n var maxLength = keyLength - 2 * md.digestLength - 2;\n if(message.length > maxLength) {\n var error = new Error('RSAES-OAEP input message length is too long.');\n error.length = message.length;\n error.maxLength = maxLength;\n throw error;\n }\n\n if(!label) {\n label = '';\n }\n md.update(label, 'raw');\n var lHash = md.digest();\n\n var PS = '';\n var PS_length = maxLength - message.length;\n for(var i = 0; i < PS_length; i++) {\n PS += '\\x00';\n }\n\n var DB = lHash.getBytes() + PS + '\\x01' + message;\n\n if(!seed) {\n seed = forge.random.getBytes(md.digestLength);\n } else if(seed.length !== md.digestLength) {\n var error = new Error('Invalid RSAES-OAEP seed. The seed length must ' +\n 'match the digest length.');\n error.seedLength = seed.length;\n error.digestLength = md.digestLength;\n throw error;\n }\n\n var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md);\n var maskedDB = forge.util.xorBytes(DB, dbMask, DB.length);\n\n var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md);\n var maskedSeed = forge.util.xorBytes(seed, seedMask, seed.length);\n\n // return encoded message\n return '\\x00' + maskedSeed + maskedDB;\n};\n\n/**\n * Decode the given RSAES-OAEP encoded message (EM) using key, with optional\n * label (L).\n *\n * This method does not perform RSA decryption, it only decodes the message\n * using RSAES-OAEP.\n *\n * @param key the RSA key to use.\n * @param em the encoded message to decode.\n * @param options the options to use:\n * label an optional label to use.\n * md the message digest object to use for OAEP, undefined for SHA-1.\n * mgf1 optional mgf1 parameters:\n * md the message digest object to use for MGF1.\n *\n * @return the decoded message bytes.\n */\npkcs1.decode_rsa_oaep = function(key, em, options) {\n // parse args\n var label;\n var md;\n var mgf1Md;\n // legacy args\n if(typeof options === 'string') {\n label = options;\n md = arguments[3] || undefined;\n } else if(options) {\n label = options.label || undefined;\n md = options.md || undefined;\n if(options.mgf1 && options.mgf1.md) {\n mgf1Md = options.mgf1.md;\n }\n }\n\n // compute length in bytes\n var keyLength = Math.ceil(key.n.bitLength() / 8);\n\n if(em.length !== keyLength) {\n var error = new Error('RSAES-OAEP encoded message length is invalid.');\n error.length = em.length;\n error.expectedLength = keyLength;\n throw error;\n }\n\n // default OAEP to SHA-1 message digest\n if(md === undefined) {\n md = forge.md.sha1.create();\n } else {\n md.start();\n }\n\n // default MGF-1 to same as OAEP\n if(!mgf1Md) {\n mgf1Md = md;\n }\n\n if(keyLength < 2 * md.digestLength + 2) {\n throw new Error('RSAES-OAEP key is too short for the hash function.');\n }\n\n if(!label) {\n label = '';\n }\n md.update(label, 'raw');\n var lHash = md.digest().getBytes();\n\n // split the message into its parts\n var y = em.charAt(0);\n var maskedSeed = em.substring(1, md.digestLength + 1);\n var maskedDB = em.substring(1 + md.digestLength);\n\n var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md);\n var seed = forge.util.xorBytes(maskedSeed, seedMask, maskedSeed.length);\n\n var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md);\n var db = forge.util.xorBytes(maskedDB, dbMask, maskedDB.length);\n\n var lHashPrime = db.substring(0, md.digestLength);\n\n // constant time check that all values match what is expected\n var error = (y !== '\\x00');\n\n // constant time check lHash vs lHashPrime\n for(var i = 0; i < md.digestLength; ++i) {\n error |= (lHash.charAt(i) !== lHashPrime.charAt(i));\n }\n\n // \"constant time\" find the 0x1 byte separating the padding (zeros) from the\n // message\n // TODO: It must be possible to do this in a better/smarter way?\n var in_ps = 1;\n var index = md.digestLength;\n for(var j = md.digestLength; j < db.length; j++) {\n var code = db.charCodeAt(j);\n\n var is_0 = (code & 0x1) ^ 0x1;\n\n // non-zero if not 0 or 1 in the ps section\n var error_mask = in_ps ? 0xfffe : 0x0000;\n error |= (code & error_mask);\n\n // latch in_ps to zero after we find 0x1\n in_ps = in_ps & is_0;\n index += in_ps;\n }\n\n if(error || db.charCodeAt(index) !== 0x1) {\n throw new Error('Invalid RSAES-OAEP padding.');\n }\n\n return db.substring(index + 1);\n};\n\nfunction rsa_mgf1(seed, maskLength, hash) {\n // default to SHA-1 message digest\n if(!hash) {\n hash = forge.md.sha1.create();\n }\n var t = '';\n var count = Math.ceil(maskLength / hash.digestLength);\n for(var i = 0; i < count; ++i) {\n var c = String.fromCharCode(\n (i >> 24) & 0xFF, (i >> 16) & 0xFF, (i >> 8) & 0xFF, i & 0xFF);\n hash.start();\n hash.update(seed + c);\n t += hash.digest().getBytes();\n }\n return t.substring(0, maskLength);\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/pkcs1.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/pkcs7.js":
/*!**********************************************!*\
!*** ./node_modules/node-forge/lib/pkcs7.js ***!
\**********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Javascript implementation of PKCS#7 v1.5.\n *\n * @author Stefan Siegl\n * @author Dave Longley\n *\n * Copyright (c) 2012 Stefan Siegl \n * Copyright (c) 2012-2015 Digital Bazaar, Inc.\n *\n * Currently this implementation only supports ContentType of EnvelopedData,\n * EncryptedData, or SignedData at the root level. The top level elements may\n * contain only a ContentInfo of ContentType Data, i.e. plain data. Further\n * nesting is not (yet) supported.\n *\n * The Forge validators for PKCS #7's ASN.1 structures are available from\n * a separate file pkcs7asn1.js, since those are referenced from other\n * PKCS standards like PKCS #12.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./aes */ \"./node_modules/node-forge/lib/aes.js\");\n__webpack_require__(/*! ./asn1 */ \"./node_modules/node-forge/lib/asn1.js\");\n__webpack_require__(/*! ./des */ \"./node_modules/node-forge/lib/des.js\");\n__webpack_require__(/*! ./oids */ \"./node_modules/node-forge/lib/oids.js\");\n__webpack_require__(/*! ./pem */ \"./node_modules/node-forge/lib/pem.js\");\n__webpack_require__(/*! ./pkcs7asn1 */ \"./node_modules/node-forge/lib/pkcs7asn1.js\");\n__webpack_require__(/*! ./random */ \"./node_modules/node-forge/lib/random.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n__webpack_require__(/*! ./x509 */ \"./node_modules/node-forge/lib/x509.js\");\n\n// shortcut for ASN.1 API\nvar asn1 = forge.asn1;\n\n// shortcut for PKCS#7 API\nvar p7 = module.exports = forge.pkcs7 = forge.pkcs7 || {};\n\n/**\n * Converts a PKCS#7 message from PEM format.\n *\n * @param pem the PEM-formatted PKCS#7 message.\n *\n * @return the PKCS#7 message.\n */\np7.messageFromPem = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'PKCS7') {\n var error = new Error('Could not convert PKCS#7 message from PEM; PEM ' +\n 'header type is not \"PKCS#7\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert PKCS#7 message from PEM; PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body);\n\n return p7.messageFromAsn1(obj);\n};\n\n/**\n * Converts a PKCS#7 message to PEM format.\n *\n * @param msg The PKCS#7 message object\n * @param maxline The maximum characters per line, defaults to 64.\n *\n * @return The PEM-formatted PKCS#7 message.\n */\np7.messageToPem = function(msg, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var pemObj = {\n type: 'PKCS7',\n body: asn1.toDer(msg.toAsn1()).getBytes()\n };\n return forge.pem.encode(pemObj, {maxline: maxline});\n};\n\n/**\n * Converts a PKCS#7 message from an ASN.1 object.\n *\n * @param obj the ASN.1 representation of a ContentInfo.\n *\n * @return the PKCS#7 message.\n */\np7.messageFromAsn1 = function(obj) {\n // validate root level ContentInfo and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, p7.asn1.contentInfoValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#7 message. ' +\n 'ASN.1 object is not an PKCS#7 ContentInfo.');\n error.errors = errors;\n throw error;\n }\n\n var contentType = asn1.derToOid(capture.contentType);\n var msg;\n\n switch(contentType) {\n case forge.pki.oids.envelopedData:\n msg = p7.createEnvelopedData();\n break;\n\n case forge.pki.oids.encryptedData:\n msg = p7.createEncryptedData();\n break;\n\n case forge.pki.oids.signedData:\n msg = p7.createSignedData();\n break;\n\n default:\n throw new Error('Cannot read PKCS#7 message. ContentType with OID ' +\n contentType + ' is not (yet) supported.');\n }\n\n msg.fromAsn1(capture.content.value[0]);\n return msg;\n};\n\np7.createSignedData = function() {\n var msg = null;\n msg = {\n type: forge.pki.oids.signedData,\n version: 1,\n certificates: [],\n crls: [],\n // TODO: add json-formatted signer stuff here?\n signers: [],\n // populated during sign()\n digestAlgorithmIdentifiers: [],\n contentInfo: null,\n signerInfos: [],\n\n fromAsn1: function(obj) {\n // validate SignedData content block and capture data.\n _fromAsn1(msg, obj, p7.asn1.signedDataValidator);\n msg.certificates = [];\n msg.crls = [];\n msg.digestAlgorithmIdentifiers = [];\n msg.contentInfo = null;\n msg.signerInfos = [];\n\n if(msg.rawCapture.certificates) {\n var certs = msg.rawCapture.certificates.value;\n for(var i = 0; i < certs.length; ++i) {\n msg.certificates.push(forge.pki.certificateFromAsn1(certs[i]));\n }\n }\n\n // TODO: parse crls\n },\n\n toAsn1: function() {\n // degenerate case with no content\n if(!msg.contentInfo) {\n msg.sign();\n }\n\n var certs = [];\n for(var i = 0; i < msg.certificates.length; ++i) {\n certs.push(forge.pki.certificateToAsn1(msg.certificates[i]));\n }\n\n var crls = [];\n // TODO: implement CRLs\n\n // [0] SignedData\n var signedData = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(msg.version).getBytes()),\n // DigestAlgorithmIdentifiers\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SET, true,\n msg.digestAlgorithmIdentifiers),\n // ContentInfo\n msg.contentInfo\n ])\n ]);\n if(certs.length > 0) {\n // [0] IMPLICIT ExtendedCertificatesAndCertificates OPTIONAL\n signedData.value[0].value.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, certs));\n }\n if(crls.length > 0) {\n // [1] IMPLICIT CertificateRevocationLists OPTIONAL\n signedData.value[0].value.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, crls));\n }\n // SignerInfos\n signedData.value[0].value.push(\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true,\n msg.signerInfos));\n\n // ContentInfo\n return asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // ContentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(msg.type).getBytes()),\n // [0] SignedData\n signedData\n ]);\n },\n\n /**\n * Add (another) entity to list of signers.\n *\n * Note: If authenticatedAttributes are provided, then, per RFC 2315,\n * they must include at least two attributes: content type and\n * message digest. The message digest attribute value will be\n * auto-calculated during signing and will be ignored if provided.\n *\n * Here's an example of providing these two attributes:\n *\n * forge.pkcs7.createSignedData();\n * p7.addSigner({\n * issuer: cert.issuer.attributes,\n * serialNumber: cert.serialNumber,\n * key: privateKey,\n * digestAlgorithm: forge.pki.oids.sha1,\n * authenticatedAttributes: [{\n * type: forge.pki.oids.contentType,\n * value: forge.pki.oids.data\n * }, {\n * type: forge.pki.oids.messageDigest\n * }]\n * });\n *\n * TODO: Support [subjectKeyIdentifier] as signer's ID.\n *\n * @param signer the signer information:\n * key the signer's private key.\n * [certificate] a certificate containing the public key\n * associated with the signer's private key; use this option as\n * an alternative to specifying signer.issuer and\n * signer.serialNumber.\n * [issuer] the issuer attributes (eg: cert.issuer.attributes).\n * [serialNumber] the signer's certificate's serial number in\n * hexadecimal (eg: cert.serialNumber).\n * [digestAlgorithm] the message digest OID, as a string, to use\n * (eg: forge.pki.oids.sha1).\n * [authenticatedAttributes] an optional array of attributes\n * to also sign along with the content.\n */\n addSigner: function(signer) {\n var issuer = signer.issuer;\n var serialNumber = signer.serialNumber;\n if(signer.certificate) {\n var cert = signer.certificate;\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n issuer = cert.issuer.attributes;\n serialNumber = cert.serialNumber;\n }\n var key = signer.key;\n if(!key) {\n throw new Error(\n 'Could not add PKCS#7 signer; no private key specified.');\n }\n if(typeof key === 'string') {\n key = forge.pki.privateKeyFromPem(key);\n }\n\n // ensure OID known for digest algorithm\n var digestAlgorithm = signer.digestAlgorithm || forge.pki.oids.sha1;\n switch(digestAlgorithm) {\n case forge.pki.oids.sha1:\n case forge.pki.oids.sha256:\n case forge.pki.oids.sha384:\n case forge.pki.oids.sha512:\n case forge.pki.oids.md5:\n break;\n default:\n throw new Error(\n 'Could not add PKCS#7 signer; unknown message digest algorithm: ' +\n digestAlgorithm);\n }\n\n // if authenticatedAttributes is present, then the attributes\n // must contain at least PKCS #9 content-type and message-digest\n var authenticatedAttributes = signer.authenticatedAttributes || [];\n if(authenticatedAttributes.length > 0) {\n var contentType = false;\n var messageDigest = false;\n for(var i = 0; i < authenticatedAttributes.length; ++i) {\n var attr = authenticatedAttributes[i];\n if(!contentType && attr.type === forge.pki.oids.contentType) {\n contentType = true;\n if(messageDigest) {\n break;\n }\n continue;\n }\n if(!messageDigest && attr.type === forge.pki.oids.messageDigest) {\n messageDigest = true;\n if(contentType) {\n break;\n }\n continue;\n }\n }\n\n if(!contentType || !messageDigest) {\n throw new Error('Invalid signer.authenticatedAttributes. If ' +\n 'signer.authenticatedAttributes is specified, then it must ' +\n 'contain at least two attributes, PKCS #9 content-type and ' +\n 'PKCS #9 message-digest.');\n }\n }\n\n msg.signers.push({\n key: key,\n version: 1,\n issuer: issuer,\n serialNumber: serialNumber,\n digestAlgorithm: digestAlgorithm,\n signatureAlgorithm: forge.pki.oids.rsaEncryption,\n signature: null,\n authenticatedAttributes: authenticatedAttributes,\n unauthenticatedAttributes: []\n });\n },\n\n /**\n * Signs the content.\n * @param options Options to apply when signing:\n * [detached] boolean. If signing should be done in detached mode. Defaults to false.\n */\n sign: function(options) {\n options = options || {};\n // auto-generate content info\n if(typeof msg.content !== 'object' || msg.contentInfo === null) {\n // use Data ContentInfo\n msg.contentInfo = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // ContentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(forge.pki.oids.data).getBytes())\n ]);\n\n // add actual content, if present\n if('content' in msg) {\n var content;\n if(msg.content instanceof forge.util.ByteBuffer) {\n content = msg.content.bytes();\n } else if(typeof msg.content === 'string') {\n content = forge.util.encodeUtf8(msg.content);\n }\n\n if (options.detached) {\n msg.detachedContent = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, content);\n } else {\n msg.contentInfo.value.push(\n // [0] EXPLICIT content\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n content)\n ]));\n }\n }\n }\n\n // no signers, return early (degenerate case for certificate container)\n if(msg.signers.length === 0) {\n return;\n }\n\n // generate digest algorithm identifiers\n var mds = addDigestAlgorithmIds();\n\n // generate signerInfos\n addSignerInfos(mds);\n },\n\n verify: function() {\n throw new Error('PKCS#7 signature verification not yet implemented.');\n },\n\n /**\n * Add a certificate.\n *\n * @param cert the certificate to add.\n */\n addCertificate: function(cert) {\n // convert from PEM\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n msg.certificates.push(cert);\n },\n\n /**\n * Add a certificate revokation list.\n *\n * @param crl the certificate revokation list to add.\n */\n addCertificateRevokationList: function(crl) {\n throw new Error('PKCS#7 CRL support not yet implemented.');\n }\n };\n return msg;\n\n function addDigestAlgorithmIds() {\n var mds = {};\n\n for(var i = 0; i < msg.signers.length; ++i) {\n var signer = msg.signers[i];\n var oid = signer.digestAlgorithm;\n if(!(oid in mds)) {\n // content digest\n mds[oid] = forge.md[forge.pki.oids[oid]].create();\n }\n if(signer.authenticatedAttributes.length === 0) {\n // no custom attributes to digest; use content message digest\n signer.md = mds[oid];\n } else {\n // custom attributes to be digested; use own message digest\n // TODO: optimize to just copy message digest state if that\n // feature is ever supported with message digests\n signer.md = forge.md[forge.pki.oids[oid]].create();\n }\n }\n\n // add unique digest algorithm identifiers\n msg.digestAlgorithmIdentifiers = [];\n for(var oid in mds) {\n msg.digestAlgorithmIdentifiers.push(\n // AlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(oid).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]));\n }\n\n return mds;\n }\n\n function addSignerInfos(mds) {\n var content;\n\n if (msg.detachedContent) {\n // Signature has been made in detached mode.\n content = msg.detachedContent;\n } else {\n // Note: ContentInfo is a SEQUENCE with 2 values, second value is\n // the content field and is optional for a ContentInfo but required here\n // since signers are present\n // get ContentInfo content\n content = msg.contentInfo.value[1];\n // skip [0] EXPLICIT content wrapper\n content = content.value[0];\n }\n\n if(!content) {\n throw new Error(\n 'Could not sign PKCS#7 message; there is no content to sign.');\n }\n\n // get ContentInfo content type\n var contentType = asn1.derToOid(msg.contentInfo.value[0].value);\n\n // serialize content\n var bytes = asn1.toDer(content);\n\n // skip identifier and length per RFC 2315 9.3\n // skip identifier (1 byte)\n bytes.getByte();\n // read and discard length bytes\n asn1.getBerValueLength(bytes);\n bytes = bytes.getBytes();\n\n // digest content DER value bytes\n for(var oid in mds) {\n mds[oid].start().update(bytes);\n }\n\n // sign content\n var signingTime = new Date();\n for(var i = 0; i < msg.signers.length; ++i) {\n var signer = msg.signers[i];\n\n if(signer.authenticatedAttributes.length === 0) {\n // if ContentInfo content type is not \"Data\", then\n // authenticatedAttributes must be present per RFC 2315\n if(contentType !== forge.pki.oids.data) {\n throw new Error(\n 'Invalid signer; authenticatedAttributes must be present ' +\n 'when the ContentInfo content type is not PKCS#7 Data.');\n }\n } else {\n // process authenticated attributes\n // [0] IMPLICIT\n signer.authenticatedAttributesAsn1 = asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, 0, true, []);\n\n // per RFC 2315, attributes are to be digested using a SET container\n // not the above [0] IMPLICIT container\n var attrsAsn1 = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SET, true, []);\n\n for(var ai = 0; ai < signer.authenticatedAttributes.length; ++ai) {\n var attr = signer.authenticatedAttributes[ai];\n if(attr.type === forge.pki.oids.messageDigest) {\n // use content message digest as value\n attr.value = mds[signer.digestAlgorithm].digest();\n } else if(attr.type === forge.pki.oids.signingTime) {\n // auto-populate signing time if not already set\n if(!attr.value) {\n attr.value = signingTime;\n }\n }\n\n // convert to ASN.1 and push onto Attributes SET (for signing) and\n // onto authenticatedAttributesAsn1 to complete SignedData ASN.1\n // TODO: optimize away duplication\n attrsAsn1.value.push(_attributeToAsn1(attr));\n signer.authenticatedAttributesAsn1.value.push(_attributeToAsn1(attr));\n }\n\n // DER-serialize and digest SET OF attributes only\n bytes = asn1.toDer(attrsAsn1).getBytes();\n signer.md.start().update(bytes);\n }\n\n // sign digest\n signer.signature = signer.key.sign(signer.md, 'RSASSA-PKCS1-V1_5');\n }\n\n // add signer info\n msg.signerInfos = _signersToAsn1(msg.signers);\n }\n};\n\n/**\n * Creates an empty PKCS#7 message of type EncryptedData.\n *\n * @return the message.\n */\np7.createEncryptedData = function() {\n var msg = null;\n msg = {\n type: forge.pki.oids.encryptedData,\n version: 0,\n encryptedContent: {\n algorithm: forge.pki.oids['aes256-CBC']\n },\n\n /**\n * Reads an EncryptedData content block (in ASN.1 format)\n *\n * @param obj The ASN.1 representation of the EncryptedData content block\n */\n fromAsn1: function(obj) {\n // Validate EncryptedData content block and capture data.\n _fromAsn1(msg, obj, p7.asn1.encryptedDataValidator);\n },\n\n /**\n * Decrypt encrypted content\n *\n * @param key The (symmetric) key as a byte buffer\n */\n decrypt: function(key) {\n if(key !== undefined) {\n msg.encryptedContent.key = key;\n }\n _decryptContent(msg);\n }\n };\n return msg;\n};\n\n/**\n * Creates an empty PKCS#7 message of type EnvelopedData.\n *\n * @return the message.\n */\np7.createEnvelopedData = function() {\n var msg = null;\n msg = {\n type: forge.pki.oids.envelopedData,\n version: 0,\n recipients: [],\n encryptedContent: {\n algorithm: forge.pki.oids['aes256-CBC']\n },\n\n /**\n * Reads an EnvelopedData content block (in ASN.1 format)\n *\n * @param obj the ASN.1 representation of the EnvelopedData content block.\n */\n fromAsn1: function(obj) {\n // validate EnvelopedData content block and capture data\n var capture = _fromAsn1(msg, obj, p7.asn1.envelopedDataValidator);\n msg.recipients = _recipientsFromAsn1(capture.recipientInfos.value);\n },\n\n toAsn1: function() {\n // ContentInfo\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // ContentType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(msg.type).getBytes()),\n // [0] EnvelopedData\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(msg.version).getBytes()),\n // RecipientInfos\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true,\n _recipientsToAsn1(msg.recipients)),\n // EncryptedContentInfo\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true,\n _encryptedContentToAsn1(msg.encryptedContent))\n ])\n ])\n ]);\n },\n\n /**\n * Find recipient by X.509 certificate's issuer.\n *\n * @param cert the certificate with the issuer to look for.\n *\n * @return the recipient object.\n */\n findRecipient: function(cert) {\n var sAttr = cert.issuer.attributes;\n\n for(var i = 0; i < msg.recipients.length; ++i) {\n var r = msg.recipients[i];\n var rAttr = r.issuer;\n\n if(r.serialNumber !== cert.serialNumber) {\n continue;\n }\n\n if(rAttr.length !== sAttr.length) {\n continue;\n }\n\n var match = true;\n for(var j = 0; j < sAttr.length; ++j) {\n if(rAttr[j].type !== sAttr[j].type ||\n rAttr[j].value !== sAttr[j].value) {\n match = false;\n break;\n }\n }\n\n if(match) {\n return r;\n }\n }\n\n return null;\n },\n\n /**\n * Decrypt enveloped content\n *\n * @param recipient The recipient object related to the private key\n * @param privKey The (RSA) private key object\n */\n decrypt: function(recipient, privKey) {\n if(msg.encryptedContent.key === undefined && recipient !== undefined &&\n privKey !== undefined) {\n switch(recipient.encryptedContent.algorithm) {\n case forge.pki.oids.rsaEncryption:\n case forge.pki.oids.desCBC:\n var key = privKey.decrypt(recipient.encryptedContent.content);\n msg.encryptedContent.key = forge.util.createBuffer(key);\n break;\n\n default:\n throw new Error('Unsupported asymmetric cipher, ' +\n 'OID ' + recipient.encryptedContent.algorithm);\n }\n }\n\n _decryptContent(msg);\n },\n\n /**\n * Add (another) entity to list of recipients.\n *\n * @param cert The certificate of the entity to add.\n */\n addRecipient: function(cert) {\n msg.recipients.push({\n version: 0,\n issuer: cert.issuer.attributes,\n serialNumber: cert.serialNumber,\n encryptedContent: {\n // We simply assume rsaEncryption here, since forge.pki only\n // supports RSA so far. If the PKI module supports other\n // ciphers one day, we need to modify this one as well.\n algorithm: forge.pki.oids.rsaEncryption,\n key: cert.publicKey\n }\n });\n },\n\n /**\n * Encrypt enveloped content.\n *\n * This function supports two optional arguments, cipher and key, which\n * can be used to influence symmetric encryption. Unless cipher is\n * provided, the cipher specified in encryptedContent.algorithm is used\n * (defaults to AES-256-CBC). If no key is provided, encryptedContent.key\n * is (re-)used. If that one's not set, a random key will be generated\n * automatically.\n *\n * @param [key] The key to be used for symmetric encryption.\n * @param [cipher] The OID of the symmetric cipher to use.\n */\n encrypt: function(key, cipher) {\n // Part 1: Symmetric encryption\n if(msg.encryptedContent.content === undefined) {\n cipher = cipher || msg.encryptedContent.algorithm;\n key = key || msg.encryptedContent.key;\n\n var keyLen, ivLen, ciphFn;\n switch(cipher) {\n case forge.pki.oids['aes128-CBC']:\n keyLen = 16;\n ivLen = 16;\n ciphFn = forge.aes.createEncryptionCipher;\n break;\n\n case forge.pki.oids['aes192-CBC']:\n keyLen = 24;\n ivLen = 16;\n ciphFn = forge.aes.createEncryptionCipher;\n break;\n\n case forge.pki.oids['aes256-CBC']:\n keyLen = 32;\n ivLen = 16;\n ciphFn = forge.aes.createEncryptionCipher;\n break;\n\n case forge.pki.oids['des-EDE3-CBC']:\n keyLen = 24;\n ivLen = 8;\n ciphFn = forge.des.createEncryptionCipher;\n break;\n\n default:\n throw new Error('Unsupported symmetric cipher, OID ' + cipher);\n }\n\n if(key === undefined) {\n key = forge.util.createBuffer(forge.random.getBytes(keyLen));\n } else if(key.length() != keyLen) {\n throw new Error('Symmetric key has wrong length; ' +\n 'got ' + key.length() + ' bytes, expected ' + keyLen + '.');\n }\n\n // Keep a copy of the key & IV in the object, so the caller can\n // use it for whatever reason.\n msg.encryptedContent.algorithm = cipher;\n msg.encryptedContent.key = key;\n msg.encryptedContent.parameter = forge.util.createBuffer(\n forge.random.getBytes(ivLen));\n\n var ciph = ciphFn(key);\n ciph.start(msg.encryptedContent.parameter.copy());\n ciph.update(msg.content);\n\n // The finish function does PKCS#7 padding by default, therefore\n // no action required by us.\n if(!ciph.finish()) {\n throw new Error('Symmetric encryption failed.');\n }\n\n msg.encryptedContent.content = ciph.output;\n }\n\n // Part 2: asymmetric encryption for each recipient\n for(var i = 0; i < msg.recipients.length; ++i) {\n var recipient = msg.recipients[i];\n\n // Nothing to do, encryption already done.\n if(recipient.encryptedContent.content !== undefined) {\n continue;\n }\n\n switch(recipient.encryptedContent.algorithm) {\n case forge.pki.oids.rsaEncryption:\n recipient.encryptedContent.content =\n recipient.encryptedContent.key.encrypt(\n msg.encryptedContent.key.data);\n break;\n\n default:\n throw new Error('Unsupported asymmetric cipher, OID ' +\n recipient.encryptedContent.algorithm);\n }\n }\n }\n };\n return msg;\n};\n\n/**\n * Converts a single recipient from an ASN.1 object.\n *\n * @param obj the ASN.1 RecipientInfo.\n *\n * @return the recipient object.\n */\nfunction _recipientFromAsn1(obj) {\n // validate EnvelopedData content block and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, p7.asn1.recipientInfoValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#7 RecipientInfo. ' +\n 'ASN.1 object is not an PKCS#7 RecipientInfo.');\n error.errors = errors;\n throw error;\n }\n\n return {\n version: capture.version.charCodeAt(0),\n issuer: forge.pki.RDNAttributesAsArray(capture.issuer),\n serialNumber: forge.util.createBuffer(capture.serial).toHex(),\n encryptedContent: {\n algorithm: asn1.derToOid(capture.encAlgorithm),\n parameter: capture.encParameter ? capture.encParameter.value : undefined,\n content: capture.encKey\n }\n };\n}\n\n/**\n * Converts a single recipient object to an ASN.1 object.\n *\n * @param obj the recipient object.\n *\n * @return the ASN.1 RecipientInfo.\n */\nfunction _recipientToAsn1(obj) {\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(obj.version).getBytes()),\n // IssuerAndSerialNumber\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Name\n forge.pki.distinguishedNameToAsn1({attributes: obj.issuer}),\n // Serial\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n forge.util.hexToBytes(obj.serialNumber))\n ]),\n // KeyEncryptionAlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(obj.encryptedContent.algorithm).getBytes()),\n // Parameter, force NULL, only RSA supported for now.\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]),\n // EncryptedKey\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n obj.encryptedContent.content)\n ]);\n}\n\n/**\n * Map a set of RecipientInfo ASN.1 objects to recipient objects.\n *\n * @param infos an array of ASN.1 representations RecipientInfo (i.e. SET OF).\n *\n * @return an array of recipient objects.\n */\nfunction _recipientsFromAsn1(infos) {\n var ret = [];\n for(var i = 0; i < infos.length; ++i) {\n ret.push(_recipientFromAsn1(infos[i]));\n }\n return ret;\n}\n\n/**\n * Map an array of recipient objects to ASN.1 RecipientInfo objects.\n *\n * @param recipients an array of recipientInfo objects.\n *\n * @return an array of ASN.1 RecipientInfos.\n */\nfunction _recipientsToAsn1(recipients) {\n var ret = [];\n for(var i = 0; i < recipients.length; ++i) {\n ret.push(_recipientToAsn1(recipients[i]));\n }\n return ret;\n}\n\n/**\n * Converts a single signer from an ASN.1 object.\n *\n * @param obj the ASN.1 representation of a SignerInfo.\n *\n * @return the signer object.\n */\nfunction _signerFromAsn1(obj) {\n // validate EnvelopedData content block and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, p7.asn1.signerInfoValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#7 SignerInfo. ' +\n 'ASN.1 object is not an PKCS#7 SignerInfo.');\n error.errors = errors;\n throw error;\n }\n\n var rval = {\n version: capture.version.charCodeAt(0),\n issuer: forge.pki.RDNAttributesAsArray(capture.issuer),\n serialNumber: forge.util.createBuffer(capture.serial).toHex(),\n digestAlgorithm: asn1.derToOid(capture.digestAlgorithm),\n signatureAlgorithm: asn1.derToOid(capture.signatureAlgorithm),\n signature: capture.signature,\n authenticatedAttributes: [],\n unauthenticatedAttributes: []\n };\n\n // TODO: convert attributes\n var authenticatedAttributes = capture.authenticatedAttributes || [];\n var unauthenticatedAttributes = capture.unauthenticatedAttributes || [];\n\n return rval;\n}\n\n/**\n * Converts a single signerInfo object to an ASN.1 object.\n *\n * @param obj the signerInfo object.\n *\n * @return the ASN.1 representation of a SignerInfo.\n */\nfunction _signerToAsn1(obj) {\n // SignerInfo\n var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(obj.version).getBytes()),\n // issuerAndSerialNumber\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // name\n forge.pki.distinguishedNameToAsn1({attributes: obj.issuer}),\n // serial\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n forge.util.hexToBytes(obj.serialNumber))\n ]),\n // digestAlgorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(obj.digestAlgorithm).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ])\n ]);\n\n // authenticatedAttributes (OPTIONAL)\n if(obj.authenticatedAttributesAsn1) {\n // add ASN.1 previously generated during signing\n rval.value.push(obj.authenticatedAttributesAsn1);\n }\n\n // digestEncryptionAlgorithm\n rval.value.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(obj.signatureAlgorithm).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]));\n\n // encryptedDigest\n rval.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, obj.signature));\n\n // unauthenticatedAttributes (OPTIONAL)\n if(obj.unauthenticatedAttributes.length > 0) {\n // [1] IMPLICIT\n var attrsAsn1 = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, []);\n for(var i = 0; i < obj.unauthenticatedAttributes.length; ++i) {\n var attr = obj.unauthenticatedAttributes[i];\n attrsAsn1.values.push(_attributeToAsn1(attr));\n }\n rval.value.push(attrsAsn1);\n }\n\n return rval;\n}\n\n/**\n * Map a set of SignerInfo ASN.1 objects to an array of signer objects.\n *\n * @param signerInfoAsn1s an array of ASN.1 SignerInfos (i.e. SET OF).\n *\n * @return an array of signers objects.\n */\nfunction _signersFromAsn1(signerInfoAsn1s) {\n var ret = [];\n for(var i = 0; i < signerInfoAsn1s.length; ++i) {\n ret.push(_signerFromAsn1(signerInfoAsn1s[i]));\n }\n return ret;\n}\n\n/**\n * Map an array of signer objects to ASN.1 objects.\n *\n * @param signers an array of signer objects.\n *\n * @return an array of ASN.1 SignerInfos.\n */\nfunction _signersToAsn1(signers) {\n var ret = [];\n for(var i = 0; i < signers.length; ++i) {\n ret.push(_signerToAsn1(signers[i]));\n }\n return ret;\n}\n\n/**\n * Convert an attribute object to an ASN.1 Attribute.\n *\n * @param attr the attribute object.\n *\n * @return the ASN.1 Attribute.\n */\nfunction _attributeToAsn1(attr) {\n var value;\n\n // TODO: generalize to support more attributes\n if(attr.type === forge.pki.oids.contentType) {\n value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(attr.value).getBytes());\n } else if(attr.type === forge.pki.oids.messageDigest) {\n value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n attr.value.bytes());\n } else if(attr.type === forge.pki.oids.signingTime) {\n /* Note per RFC 2985: Dates between 1 January 1950 and 31 December 2049\n (inclusive) MUST be encoded as UTCTime. Any dates with year values\n before 1950 or after 2049 MUST be encoded as GeneralizedTime. [Further,]\n UTCTime values MUST be expressed in Greenwich Mean Time (Zulu) and MUST\n include seconds (i.e., times are YYMMDDHHMMSSZ), even where the\n number of seconds is zero. Midnight (GMT) must be represented as\n \"YYMMDD000000Z\". */\n // TODO: make these module-level constants\n var jan_1_1950 = new Date('1950-01-01T00:00:00Z');\n var jan_1_2050 = new Date('2050-01-01T00:00:00Z');\n var date = attr.value;\n if(typeof date === 'string') {\n // try to parse date\n var timestamp = Date.parse(date);\n if(!isNaN(timestamp)) {\n date = new Date(timestamp);\n } else if(date.length === 13) {\n // YYMMDDHHMMSSZ (13 chars for UTCTime)\n date = asn1.utcTimeToDate(date);\n } else {\n // assume generalized time\n date = asn1.generalizedTimeToDate(date);\n }\n }\n\n if(date >= jan_1_1950 && date < jan_1_2050) {\n value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.UTCTIME, false,\n asn1.dateToUtcTime(date));\n } else {\n value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.GENERALIZEDTIME, false,\n asn1.dateToGeneralizedTime(date));\n }\n }\n\n // TODO: expose as common API call\n // create a RelativeDistinguishedName set\n // each value in the set is an AttributeTypeAndValue first\n // containing the type (an OID) and second the value\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // AttributeType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(attr.type).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n // AttributeValue\n value\n ])\n ]);\n}\n\n/**\n * Map messages encrypted content to ASN.1 objects.\n *\n * @param ec The encryptedContent object of the message.\n *\n * @return ASN.1 representation of the encryptedContent object (SEQUENCE).\n */\nfunction _encryptedContentToAsn1(ec) {\n return [\n // ContentType, always Data for the moment\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(forge.pki.oids.data).getBytes()),\n // ContentEncryptionAlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // Algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(ec.algorithm).getBytes()),\n // Parameters (IV)\n !ec.parameter ?\n undefined :\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n ec.parameter.getBytes())\n ]),\n // [0] EncryptedContent\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n ec.content.getBytes())\n ])\n ];\n}\n\n/**\n * Reads the \"common part\" of an PKCS#7 content block (in ASN.1 format)\n *\n * This function reads the \"common part\" of the PKCS#7 content blocks\n * EncryptedData and EnvelopedData, i.e. version number and symmetrically\n * encrypted content block.\n *\n * The result of the ASN.1 validate and capture process is returned\n * to allow the caller to extract further data, e.g. the list of recipients\n * in case of a EnvelopedData object.\n *\n * @param msg the PKCS#7 object to read the data to.\n * @param obj the ASN.1 representation of the content block.\n * @param validator the ASN.1 structure validator object to use.\n *\n * @return the value map captured by validator object.\n */\nfunction _fromAsn1(msg, obj, validator) {\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, validator, capture, errors)) {\n var error = new Error('Cannot read PKCS#7 message. ' +\n 'ASN.1 object is not a supported PKCS#7 message.');\n error.errors = error;\n throw error;\n }\n\n // Check contentType, so far we only support (raw) Data.\n var contentType = asn1.derToOid(capture.contentType);\n if(contentType !== forge.pki.oids.data) {\n throw new Error('Unsupported PKCS#7 message. ' +\n 'Only wrapped ContentType Data supported.');\n }\n\n if(capture.encryptedContent) {\n var content = '';\n if(forge.util.isArray(capture.encryptedContent)) {\n for(var i = 0; i < capture.encryptedContent.length; ++i) {\n if(capture.encryptedContent[i].type !== asn1.Type.OCTETSTRING) {\n throw new Error('Malformed PKCS#7 message, expecting encrypted ' +\n 'content constructed of only OCTET STRING objects.');\n }\n content += capture.encryptedContent[i].value;\n }\n } else {\n content = capture.encryptedContent;\n }\n msg.encryptedContent = {\n algorithm: asn1.derToOid(capture.encAlgorithm),\n parameter: forge.util.createBuffer(capture.encParameter.value),\n content: forge.util.createBuffer(content)\n };\n }\n\n if(capture.content) {\n var content = '';\n if(forge.util.isArray(capture.content)) {\n for(var i = 0; i < capture.content.length; ++i) {\n if(capture.content[i].type !== asn1.Type.OCTETSTRING) {\n throw new Error('Malformed PKCS#7 message, expecting ' +\n 'content constructed of only OCTET STRING objects.');\n }\n content += capture.content[i].value;\n }\n } else {\n content = capture.content;\n }\n msg.content = forge.util.createBuffer(content);\n }\n\n msg.version = capture.version.charCodeAt(0);\n msg.rawCapture = capture;\n\n return capture;\n}\n\n/**\n * Decrypt the symmetrically encrypted content block of the PKCS#7 message.\n *\n * Decryption is skipped in case the PKCS#7 message object already has a\n * (decrypted) content attribute. The algorithm, key and cipher parameters\n * (probably the iv) are taken from the encryptedContent attribute of the\n * message object.\n *\n * @param The PKCS#7 message object.\n */\nfunction _decryptContent(msg) {\n if(msg.encryptedContent.key === undefined) {\n throw new Error('Symmetric key not available.');\n }\n\n if(msg.content === undefined) {\n var ciph;\n\n switch(msg.encryptedContent.algorithm) {\n case forge.pki.oids['aes128-CBC']:\n case forge.pki.oids['aes192-CBC']:\n case forge.pki.oids['aes256-CBC']:\n ciph = forge.aes.createDecryptionCipher(msg.encryptedContent.key);\n break;\n\n case forge.pki.oids['desCBC']:\n case forge.pki.oids['des-EDE3-CBC']:\n ciph = forge.des.createDecryptionCipher(msg.encryptedContent.key);\n break;\n\n default:\n throw new Error('Unsupported symmetric cipher, OID ' +\n msg.encryptedContent.algorithm);\n }\n ciph.start(msg.encryptedContent.parameter);\n ciph.update(msg.encryptedContent.content);\n\n if(!ciph.finish()) {\n throw new Error('Symmetric decryption failed.');\n }\n\n msg.content = ciph.output;\n }\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/pkcs7.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/pkcs7asn1.js":
/*!**************************************************!*\
!*** ./node_modules/node-forge/lib/pkcs7asn1.js ***!
\**************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Javascript implementation of ASN.1 validators for PKCS#7 v1.5.\n *\n * @author Dave Longley\n * @author Stefan Siegl\n *\n * Copyright (c) 2012-2015 Digital Bazaar, Inc.\n * Copyright (c) 2012 Stefan Siegl \n *\n * The ASN.1 representation of PKCS#7 is as follows\n * (see RFC #2315 for details, http://www.ietf.org/rfc/rfc2315.txt):\n *\n * A PKCS#7 message consists of a ContentInfo on root level, which may\n * contain any number of further ContentInfo nested into it.\n *\n * ContentInfo ::= SEQUENCE {\n * contentType ContentType,\n * content [0] EXPLICIT ANY DEFINED BY contentType OPTIONAL\n * }\n *\n * ContentType ::= OBJECT IDENTIFIER\n *\n * EnvelopedData ::= SEQUENCE {\n * version Version,\n * recipientInfos RecipientInfos,\n * encryptedContentInfo EncryptedContentInfo\n * }\n *\n * EncryptedData ::= SEQUENCE {\n * version Version,\n * encryptedContentInfo EncryptedContentInfo\n * }\n *\n * id-signedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)\n * us(840) rsadsi(113549) pkcs(1) pkcs7(7) 2 }\n *\n * SignedData ::= SEQUENCE {\n * version INTEGER,\n * digestAlgorithms DigestAlgorithmIdentifiers,\n * contentInfo ContentInfo,\n * certificates [0] IMPLICIT Certificates OPTIONAL,\n * crls [1] IMPLICIT CertificateRevocationLists OPTIONAL,\n * signerInfos SignerInfos\n * }\n *\n * SignerInfos ::= SET OF SignerInfo\n *\n * SignerInfo ::= SEQUENCE {\n * version Version,\n * issuerAndSerialNumber IssuerAndSerialNumber,\n * digestAlgorithm DigestAlgorithmIdentifier,\n * authenticatedAttributes [0] IMPLICIT Attributes OPTIONAL,\n * digestEncryptionAlgorithm DigestEncryptionAlgorithmIdentifier,\n * encryptedDigest EncryptedDigest,\n * unauthenticatedAttributes [1] IMPLICIT Attributes OPTIONAL\n * }\n *\n * EncryptedDigest ::= OCTET STRING\n *\n * Attributes ::= SET OF Attribute\n *\n * Attribute ::= SEQUENCE {\n * attrType OBJECT IDENTIFIER,\n * attrValues SET OF AttributeValue\n * }\n *\n * AttributeValue ::= ANY\n *\n * Version ::= INTEGER\n *\n * RecipientInfos ::= SET OF RecipientInfo\n *\n * EncryptedContentInfo ::= SEQUENCE {\n * contentType ContentType,\n * contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier,\n * encryptedContent [0] IMPLICIT EncryptedContent OPTIONAL\n * }\n *\n * ContentEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier\n *\n * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters\n * for the algorithm, if any. In the case of AES and DES3, there is only one,\n * the IV.\n *\n * AlgorithmIdentifer ::= SEQUENCE {\n * algorithm OBJECT IDENTIFIER,\n * parameters ANY DEFINED BY algorithm OPTIONAL\n * }\n *\n * EncryptedContent ::= OCTET STRING\n *\n * RecipientInfo ::= SEQUENCE {\n * version Version,\n * issuerAndSerialNumber IssuerAndSerialNumber,\n * keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier,\n * encryptedKey EncryptedKey\n * }\n *\n * IssuerAndSerialNumber ::= SEQUENCE {\n * issuer Name,\n * serialNumber CertificateSerialNumber\n * }\n *\n * CertificateSerialNumber ::= INTEGER\n *\n * KeyEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier\n *\n * EncryptedKey ::= OCTET STRING\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./asn1 */ \"./node_modules/node-forge/lib/asn1.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\n// shortcut for ASN.1 API\nvar asn1 = forge.asn1;\n\n// shortcut for PKCS#7 API\nvar p7v = module.exports = forge.pkcs7asn1 = forge.pkcs7asn1 || {};\nforge.pkcs7 = forge.pkcs7 || {};\nforge.pkcs7.asn1 = p7v;\n\nvar contentInfoValidator = {\n name: 'ContentInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'ContentInfo.ContentType',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'contentType'\n }, {\n name: 'ContentInfo.content',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n optional: true,\n captureAsn1: 'content'\n }]\n};\np7v.contentInfoValidator = contentInfoValidator;\n\nvar encryptedContentInfoValidator = {\n name: 'EncryptedContentInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EncryptedContentInfo.contentType',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'contentType'\n }, {\n name: 'EncryptedContentInfo.contentEncryptionAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EncryptedContentInfo.contentEncryptionAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'encAlgorithm'\n }, {\n name: 'EncryptedContentInfo.contentEncryptionAlgorithm.parameter',\n tagClass: asn1.Class.UNIVERSAL,\n captureAsn1: 'encParameter'\n }]\n }, {\n name: 'EncryptedContentInfo.encryptedContent',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n /* The PKCS#7 structure output by OpenSSL somewhat differs from what\n * other implementations do generate.\n *\n * OpenSSL generates a structure like this:\n * SEQUENCE {\n * ...\n * [0]\n * 26 DA 67 D2 17 9C 45 3C B1 2A A8 59 2F 29 33 38\n * C3 C3 DF 86 71 74 7A 19 9F 40 D0 29 BE 85 90 45\n * ...\n * }\n *\n * Whereas other implementations (and this PKCS#7 module) generate:\n * SEQUENCE {\n * ...\n * [0] {\n * OCTET STRING\n * 26 DA 67 D2 17 9C 45 3C B1 2A A8 59 2F 29 33 38\n * C3 C3 DF 86 71 74 7A 19 9F 40 D0 29 BE 85 90 45\n * ...\n * }\n * }\n *\n * In order to support both, we just capture the context specific\n * field here. The OCTET STRING bit is removed below.\n */\n capture: 'encryptedContent',\n captureAsn1: 'encryptedContentAsn1'\n }]\n};\n\np7v.envelopedDataValidator = {\n name: 'EnvelopedData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EnvelopedData.Version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n }, {\n name: 'EnvelopedData.RecipientInfos',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true,\n captureAsn1: 'recipientInfos'\n }].concat(encryptedContentInfoValidator)\n};\n\np7v.encryptedDataValidator = {\n name: 'EncryptedData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'EncryptedData.Version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n }].concat(encryptedContentInfoValidator)\n};\n\nvar signerValidator = {\n name: 'SignerInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SignerInfo.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false\n }, {\n name: 'SignerInfo.issuerAndSerialNumber',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SignerInfo.issuerAndSerialNumber.issuer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'issuer'\n }, {\n name: 'SignerInfo.issuerAndSerialNumber.serialNumber',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'serial'\n }]\n }, {\n name: 'SignerInfo.digestAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SignerInfo.digestAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'digestAlgorithm'\n }, {\n name: 'SignerInfo.digestAlgorithm.parameter',\n tagClass: asn1.Class.UNIVERSAL,\n constructed: false,\n captureAsn1: 'digestParameter',\n optional: true\n }]\n }, {\n name: 'SignerInfo.authenticatedAttributes',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n optional: true,\n capture: 'authenticatedAttributes'\n }, {\n name: 'SignerInfo.digestEncryptionAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n capture: 'signatureAlgorithm'\n }, {\n name: 'SignerInfo.encryptedDigest',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'signature'\n }, {\n name: 'SignerInfo.unauthenticatedAttributes',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 1,\n constructed: true,\n optional: true,\n capture: 'unauthenticatedAttributes'\n }]\n};\n\np7v.signedDataValidator = {\n name: 'SignedData',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'SignedData.Version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n }, {\n name: 'SignedData.DigestAlgorithms',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true,\n captureAsn1: 'digestAlgorithms'\n },\n contentInfoValidator,\n {\n name: 'SignedData.Certificates',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n optional: true,\n captureAsn1: 'certificates'\n }, {\n name: 'SignedData.CertificateRevocationLists',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 1,\n optional: true,\n captureAsn1: 'crls'\n }, {\n name: 'SignedData.SignerInfos',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n capture: 'signerInfos',\n optional: true,\n value: [signerValidator]\n }]\n};\n\np7v.recipientInfoValidator = {\n name: 'RecipientInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'RecipientInfo.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'version'\n }, {\n name: 'RecipientInfo.issuerAndSerial',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'RecipientInfo.issuerAndSerial.issuer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'issuer'\n }, {\n name: 'RecipientInfo.issuerAndSerial.serialNumber',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'serial'\n }]\n }, {\n name: 'RecipientInfo.keyEncryptionAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'RecipientInfo.keyEncryptionAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'encAlgorithm'\n }, {\n name: 'RecipientInfo.keyEncryptionAlgorithm.parameter',\n tagClass: asn1.Class.UNIVERSAL,\n constructed: false,\n captureAsn1: 'encParameter',\n optional: true\n }]\n }, {\n name: 'RecipientInfo.encryptedKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'encKey'\n }]\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/pkcs7asn1.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/prime.js":
/*!**********************************************!*\
!*** ./node_modules/node-forge/lib/prime.js ***!
\**********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Prime number generation API.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2014 Digital Bazaar, Inc.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n__webpack_require__(/*! ./jsbn */ \"./node_modules/node-forge/lib/jsbn.js\");\n__webpack_require__(/*! ./random */ \"./node_modules/node-forge/lib/random.js\");\n\n(function() {\n\n// forge.prime already defined\nif(forge.prime) {\n module.exports = forge.prime;\n return;\n}\n\n/* PRIME API */\nvar prime = module.exports = forge.prime = forge.prime || {};\n\nvar BigInteger = forge.jsbn.BigInteger;\n\n// primes are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29\nvar GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2];\nvar THIRTY = new BigInteger(null);\nTHIRTY.fromInt(30);\nvar op_or = function(x, y) {return x|y;};\n\n/**\n * Generates a random probable prime with the given number of bits.\n *\n * Alternative algorithms can be specified by name as a string or as an\n * object with custom options like so:\n *\n * {\n * name: 'PRIMEINC',\n * options: {\n * maxBlockTime: ,\n * millerRabinTests: ,\n * workerScript: ,\n * workers: .\n * workLoad: the size of the work load, ie: number of possible prime\n * numbers for each web worker to check per work assignment,\n * (default: 100).\n * }\n * }\n *\n * @param bits the number of bits for the prime number.\n * @param options the options to use.\n * [algorithm] the algorithm to use (default: 'PRIMEINC').\n * [prng] a custom crypto-secure pseudo-random number generator to use,\n * that must define \"getBytesSync\".\n *\n * @return callback(err, num) called once the operation completes.\n */\nprime.generateProbablePrime = function(bits, options, callback) {\n if(typeof options === 'function') {\n callback = options;\n options = {};\n }\n options = options || {};\n\n // default to PRIMEINC algorithm\n var algorithm = options.algorithm || 'PRIMEINC';\n if(typeof algorithm === 'string') {\n algorithm = {name: algorithm};\n }\n algorithm.options = algorithm.options || {};\n\n // create prng with api that matches BigInteger secure random\n var prng = options.prng || forge.random;\n var rng = {\n // x is an array to fill with bytes\n nextBytes: function(x) {\n var b = prng.getBytesSync(x.length);\n for(var i = 0; i < x.length; ++i) {\n x[i] = b.charCodeAt(i);\n }\n }\n };\n\n if(algorithm.name === 'PRIMEINC') {\n return primeincFindPrime(bits, rng, algorithm.options, callback);\n }\n\n throw new Error('Invalid prime generation algorithm: ' + algorithm.name);\n};\n\nfunction primeincFindPrime(bits, rng, options, callback) {\n if('workers' in options) {\n return primeincFindPrimeWithWorkers(bits, rng, options, callback);\n }\n return primeincFindPrimeWithoutWorkers(bits, rng, options, callback);\n}\n\nfunction primeincFindPrimeWithoutWorkers(bits, rng, options, callback) {\n // initialize random number\n var num = generateRandom(bits, rng);\n\n /* Note: All primes are of the form 30k+i for i < 30 and gcd(30, i)=1. The\n number we are given is always aligned at 30k + 1. Each time the number is\n determined not to be prime we add to get to the next 'i', eg: if the number\n was at 30k + 1 we add 6. */\n var deltaIdx = 0;\n\n // get required number of MR tests\n var mrTests = getMillerRabinTests(num.bitLength());\n if('millerRabinTests' in options) {\n mrTests = options.millerRabinTests;\n }\n\n // find prime nearest to 'num' for maxBlockTime ms\n // 10 ms gives 5ms of leeway for other calculations before dropping\n // below 60fps (1000/60 == 16.67), but in reality, the number will\n // likely be higher due to an 'atomic' big int modPow\n var maxBlockTime = 10;\n if('maxBlockTime' in options) {\n maxBlockTime = options.maxBlockTime;\n }\n\n _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback);\n}\n\nfunction _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback) {\n var start = +new Date();\n do {\n // overflow, regenerate random number\n if(num.bitLength() > bits) {\n num = generateRandom(bits, rng);\n }\n // do primality test\n if(num.isProbablePrime(mrTests)) {\n return callback(null, num);\n }\n // get next potential prime\n num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0);\n } while(maxBlockTime < 0 || (+new Date() - start < maxBlockTime));\n\n // keep trying later\n forge.util.setImmediate(function() {\n _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback);\n });\n}\n\n// NOTE: This algorithm is indeterminate in nature because workers\n// run in parallel looking at different segments of numbers. Even if this\n// algorithm is run twice with the same input from a predictable RNG, it\n// may produce different outputs.\nfunction primeincFindPrimeWithWorkers(bits, rng, options, callback) {\n // web workers unavailable\n if(typeof Worker === 'undefined') {\n return primeincFindPrimeWithoutWorkers(bits, rng, options, callback);\n }\n\n // initialize random number\n var num = generateRandom(bits, rng);\n\n // use web workers to generate keys\n var numWorkers = options.workers;\n var workLoad = options.workLoad || 100;\n var range = workLoad * 30 / 8;\n var workerScript = options.workerScript || 'forge/prime.worker.js';\n if(numWorkers === -1) {\n return forge.util.estimateCores(function(err, cores) {\n if(err) {\n // default to 2\n cores = 2;\n }\n numWorkers = cores - 1;\n generate();\n });\n }\n generate();\n\n function generate() {\n // require at least 1 worker\n numWorkers = Math.max(1, numWorkers);\n\n // TODO: consider optimizing by starting workers outside getPrime() ...\n // note that in order to clean up they will have to be made internally\n // asynchronous which may actually be slower\n\n // start workers immediately\n var workers = [];\n for(var i = 0; i < numWorkers; ++i) {\n // FIXME: fix path or use blob URLs\n workers[i] = new Worker(workerScript);\n }\n var running = numWorkers;\n\n // listen for requests from workers and assign ranges to find prime\n for(var i = 0; i < numWorkers; ++i) {\n workers[i].addEventListener('message', workerMessage);\n }\n\n /* Note: The distribution of random numbers is unknown. Therefore, each\n web worker is continuously allocated a range of numbers to check for a\n random number until one is found.\n\n Every 30 numbers will be checked just 8 times, because prime numbers\n have the form:\n\n 30k+i, for i < 30 and gcd(30, i)=1 (there are 8 values of i for this)\n\n Therefore, if we want a web worker to run N checks before asking for\n a new range of numbers, each range must contain N*30/8 numbers.\n\n For 100 checks (workLoad), this is a range of 375. */\n\n var found = false;\n function workerMessage(e) {\n // ignore message, prime already found\n if(found) {\n return;\n }\n\n --running;\n var data = e.data;\n if(data.found) {\n // terminate all workers\n for(var i = 0; i < workers.length; ++i) {\n workers[i].terminate();\n }\n found = true;\n return callback(null, new BigInteger(data.prime, 16));\n }\n\n // overflow, regenerate random number\n if(num.bitLength() > bits) {\n num = generateRandom(bits, rng);\n }\n\n // assign new range to check\n var hex = num.toString(16);\n\n // start prime search\n e.target.postMessage({\n hex: hex,\n workLoad: workLoad\n });\n\n num.dAddOffset(range, 0);\n }\n }\n}\n\n/**\n * Generates a random number using the given number of bits and RNG.\n *\n * @param bits the number of bits for the number.\n * @param rng the random number generator to use.\n *\n * @return the random number.\n */\nfunction generateRandom(bits, rng) {\n var num = new BigInteger(bits, rng);\n // force MSB set\n var bits1 = bits - 1;\n if(!num.testBit(bits1)) {\n num.bitwiseTo(BigInteger.ONE.shiftLeft(bits1), op_or, num);\n }\n // align number on 30k+1 boundary\n num.dAddOffset(31 - num.mod(THIRTY).byteValue(), 0);\n return num;\n}\n\n/**\n * Returns the required number of Miller-Rabin tests to generate a\n * prime with an error probability of (1/2)^80.\n *\n * See Handbook of Applied Cryptography Chapter 4, Table 4.4.\n *\n * @param bits the bit size.\n *\n * @return the required number of iterations.\n */\nfunction getMillerRabinTests(bits) {\n if(bits <= 100) return 27;\n if(bits <= 150) return 18;\n if(bits <= 200) return 15;\n if(bits <= 250) return 12;\n if(bits <= 300) return 9;\n if(bits <= 350) return 8;\n if(bits <= 400) return 7;\n if(bits <= 500) return 6;\n if(bits <= 600) return 5;\n if(bits <= 800) return 4;\n if(bits <= 1250) return 3;\n return 2;\n}\n\n})();\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/prime.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/prng.js":
/*!*********************************************!*\
!*** ./node_modules/node-forge/lib/prng.js ***!
\*********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * A javascript implementation of a cryptographically-secure\n * Pseudo Random Number Generator (PRNG). The Fortuna algorithm is followed\n * here though the use of SHA-256 is not enforced; when generating an\n * a PRNG context, the hashing algorithm and block cipher used for\n * the generator are specified via a plugin.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\nvar _crypto = null;\nif(forge.util.isNodejs && !forge.options.usePureJavaScript &&\n !process.versions['node-webkit']) {\n _crypto = __webpack_require__(/*! crypto */ \"?b254\");\n}\n\n/* PRNG API */\nvar prng = module.exports = forge.prng = forge.prng || {};\n\n/**\n * Creates a new PRNG context.\n *\n * A PRNG plugin must be passed in that will provide:\n *\n * 1. A function that initializes the key and seed of a PRNG context. It\n * will be given a 16 byte key and a 16 byte seed. Any key expansion\n * or transformation of the seed from a byte string into an array of\n * integers (or similar) should be performed.\n * 2. The cryptographic function used by the generator. It takes a key and\n * a seed.\n * 3. A seed increment function. It takes the seed and returns seed + 1.\n * 4. An api to create a message digest.\n *\n * For an example, see random.js.\n *\n * @param plugin the PRNG plugin to use.\n */\nprng.create = function(plugin) {\n var ctx = {\n plugin: plugin,\n key: null,\n seed: null,\n time: null,\n // number of reseeds so far\n reseeds: 0,\n // amount of data generated so far\n generated: 0,\n // no initial key bytes\n keyBytes: ''\n };\n\n // create 32 entropy pools (each is a message digest)\n var md = plugin.md;\n var pools = new Array(32);\n for(var i = 0; i < 32; ++i) {\n pools[i] = md.create();\n }\n ctx.pools = pools;\n\n // entropy pools are written to cyclically, starting at index 0\n ctx.pool = 0;\n\n /**\n * Generates random bytes. The bytes may be generated synchronously or\n * asynchronously. Web workers must use the asynchronous interface or\n * else the behavior is undefined.\n *\n * @param count the number of random bytes to generate.\n * @param [callback(err, bytes)] called once the operation completes.\n *\n * @return count random bytes as a string.\n */\n ctx.generate = function(count, callback) {\n // do synchronously\n if(!callback) {\n return ctx.generateSync(count);\n }\n\n // simple generator using counter-based CBC\n var cipher = ctx.plugin.cipher;\n var increment = ctx.plugin.increment;\n var formatKey = ctx.plugin.formatKey;\n var formatSeed = ctx.plugin.formatSeed;\n var b = forge.util.createBuffer();\n\n // paranoid deviation from Fortuna:\n // reset key for every request to protect previously\n // generated random bytes should the key be discovered;\n // there is no 100ms based reseeding because of this\n // forced reseed for every `generate` call\n ctx.key = null;\n\n generate();\n\n function generate(err) {\n if(err) {\n return callback(err);\n }\n\n // sufficient bytes generated\n if(b.length() >= count) {\n return callback(null, b.getBytes(count));\n }\n\n // if amount of data generated is greater than 1 MiB, trigger reseed\n if(ctx.generated > 0xfffff) {\n ctx.key = null;\n }\n\n if(ctx.key === null) {\n // prevent stack overflow\n return forge.util.nextTick(function() {\n _reseed(generate);\n });\n }\n\n // generate the random bytes\n var bytes = cipher(ctx.key, ctx.seed);\n ctx.generated += bytes.length;\n b.putBytes(bytes);\n\n // generate bytes for a new key and seed\n ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed)));\n ctx.seed = formatSeed(cipher(ctx.key, ctx.seed));\n\n forge.util.setImmediate(generate);\n }\n };\n\n /**\n * Generates random bytes synchronously.\n *\n * @param count the number of random bytes to generate.\n *\n * @return count random bytes as a string.\n */\n ctx.generateSync = function(count) {\n // simple generator using counter-based CBC\n var cipher = ctx.plugin.cipher;\n var increment = ctx.plugin.increment;\n var formatKey = ctx.plugin.formatKey;\n var formatSeed = ctx.plugin.formatSeed;\n\n // paranoid deviation from Fortuna:\n // reset key for every request to protect previously\n // generated random bytes should the key be discovered;\n // there is no 100ms based reseeding because of this\n // forced reseed for every `generateSync` call\n ctx.key = null;\n\n var b = forge.util.createBuffer();\n while(b.length() < count) {\n // if amount of data generated is greater than 1 MiB, trigger reseed\n if(ctx.generated > 0xfffff) {\n ctx.key = null;\n }\n\n if(ctx.key === null) {\n _reseedSync();\n }\n\n // generate the random bytes\n var bytes = cipher(ctx.key, ctx.seed);\n ctx.generated += bytes.length;\n b.putBytes(bytes);\n\n // generate bytes for a new key and seed\n ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed)));\n ctx.seed = formatSeed(cipher(ctx.key, ctx.seed));\n }\n\n return b.getBytes(count);\n };\n\n /**\n * Private function that asynchronously reseeds a generator.\n *\n * @param callback(err) called once the operation completes.\n */\n function _reseed(callback) {\n if(ctx.pools[0].messageLength >= 32) {\n _seed();\n return callback();\n }\n // not enough seed data...\n var needed = (32 - ctx.pools[0].messageLength) << 5;\n ctx.seedFile(needed, function(err, bytes) {\n if(err) {\n return callback(err);\n }\n ctx.collect(bytes);\n _seed();\n callback();\n });\n }\n\n /**\n * Private function that synchronously reseeds a generator.\n */\n function _reseedSync() {\n if(ctx.pools[0].messageLength >= 32) {\n return _seed();\n }\n // not enough seed data...\n var needed = (32 - ctx.pools[0].messageLength) << 5;\n ctx.collect(ctx.seedFileSync(needed));\n _seed();\n }\n\n /**\n * Private function that seeds a generator once enough bytes are available.\n */\n function _seed() {\n // update reseed count\n ctx.reseeds = (ctx.reseeds === 0xffffffff) ? 0 : ctx.reseeds + 1;\n\n // goal is to update `key` via:\n // key = hash(key + s)\n // where 's' is all collected entropy from selected pools, then...\n\n // create a plugin-based message digest\n var md = ctx.plugin.md.create();\n\n // consume current key bytes\n md.update(ctx.keyBytes);\n\n // digest the entropy of pools whose index k meet the\n // condition 'n mod 2^k == 0' where n is the number of reseeds\n var _2powK = 1;\n for(var k = 0; k < 32; ++k) {\n if(ctx.reseeds % _2powK === 0) {\n md.update(ctx.pools[k].digest().getBytes());\n ctx.pools[k].start();\n }\n _2powK = _2powK << 1;\n }\n\n // get digest for key bytes\n ctx.keyBytes = md.digest().getBytes();\n\n // paranoid deviation from Fortuna:\n // update `seed` via `seed = hash(key)`\n // instead of initializing to zero once and only\n // ever incrementing it\n md.start();\n md.update(ctx.keyBytes);\n var seedBytes = md.digest().getBytes();\n\n // update state\n ctx.key = ctx.plugin.formatKey(ctx.keyBytes);\n ctx.seed = ctx.plugin.formatSeed(seedBytes);\n ctx.generated = 0;\n }\n\n /**\n * The built-in default seedFile. This seedFile is used when entropy\n * is needed immediately.\n *\n * @param needed the number of bytes that are needed.\n *\n * @return the random bytes.\n */\n function defaultSeedFile(needed) {\n // use window.crypto.getRandomValues strong source of entropy if available\n var getRandomValues = null;\n var globalScope = forge.util.globalScope;\n var _crypto = globalScope.crypto || globalScope.msCrypto;\n if(_crypto && _crypto.getRandomValues) {\n getRandomValues = function(arr) {\n return _crypto.getRandomValues(arr);\n };\n }\n\n var b = forge.util.createBuffer();\n if(getRandomValues) {\n while(b.length() < needed) {\n // max byte length is 65536 before QuotaExceededError is thrown\n // http://www.w3.org/TR/WebCryptoAPI/#RandomSource-method-getRandomValues\n var count = Math.max(1, Math.min(needed - b.length(), 65536) / 4);\n var entropy = new Uint32Array(Math.floor(count));\n try {\n getRandomValues(entropy);\n for(var i = 0; i < entropy.length; ++i) {\n b.putInt32(entropy[i]);\n }\n } catch(e) {\n /* only ignore QuotaExceededError */\n if(!(typeof QuotaExceededError !== 'undefined' &&\n e instanceof QuotaExceededError)) {\n throw e;\n }\n }\n }\n }\n\n // be sad and add some weak random data\n if(b.length() < needed) {\n /* Draws from Park-Miller \"minimal standard\" 31 bit PRNG,\n implemented with David G. Carta's optimization: with 32 bit math\n and without division (Public Domain). */\n var hi, lo, next;\n var seed = Math.floor(Math.random() * 0x010000);\n while(b.length() < needed) {\n lo = 16807 * (seed & 0xFFFF);\n hi = 16807 * (seed >> 16);\n lo += (hi & 0x7FFF) << 16;\n lo += hi >> 15;\n lo = (lo & 0x7FFFFFFF) + (lo >> 31);\n seed = lo & 0xFFFFFFFF;\n\n // consume lower 3 bytes of seed\n for(var i = 0; i < 3; ++i) {\n // throw in more pseudo random\n next = seed >>> (i << 3);\n next ^= Math.floor(Math.random() * 0x0100);\n b.putByte(next & 0xFF);\n }\n }\n }\n\n return b.getBytes(needed);\n }\n // initialize seed file APIs\n if(_crypto) {\n // use nodejs async API\n ctx.seedFile = function(needed, callback) {\n _crypto.randomBytes(needed, function(err, bytes) {\n if(err) {\n return callback(err);\n }\n callback(null, bytes.toString());\n });\n };\n // use nodejs sync API\n ctx.seedFileSync = function(needed) {\n return _crypto.randomBytes(needed).toString();\n };\n } else {\n ctx.seedFile = function(needed, callback) {\n try {\n callback(null, defaultSeedFile(needed));\n } catch(e) {\n callback(e);\n }\n };\n ctx.seedFileSync = defaultSeedFile;\n }\n\n /**\n * Adds entropy to a prng ctx's accumulator.\n *\n * @param bytes the bytes of entropy as a string.\n */\n ctx.collect = function(bytes) {\n // iterate over pools distributing entropy cyclically\n var count = bytes.length;\n for(var i = 0; i < count; ++i) {\n ctx.pools[ctx.pool].update(bytes.substr(i, 1));\n ctx.pool = (ctx.pool === 31) ? 0 : ctx.pool + 1;\n }\n };\n\n /**\n * Collects an integer of n bits.\n *\n * @param i the integer entropy.\n * @param n the number of bits in the integer.\n */\n ctx.collectInt = function(i, n) {\n var bytes = '';\n for(var x = 0; x < n; x += 8) {\n bytes += String.fromCharCode((i >> x) & 0xFF);\n }\n ctx.collect(bytes);\n };\n\n /**\n * Registers a Web Worker to receive immediate entropy from the main thread.\n * This method is required until Web Workers can access the native crypto\n * API. This method should be called twice for each created worker, once in\n * the main thread, and once in the worker itself.\n *\n * @param worker the worker to register.\n */\n ctx.registerWorker = function(worker) {\n // worker receives random bytes\n if(worker === self) {\n ctx.seedFile = function(needed, callback) {\n function listener(e) {\n var data = e.data;\n if(data.forge && data.forge.prng) {\n self.removeEventListener('message', listener);\n callback(data.forge.prng.err, data.forge.prng.bytes);\n }\n }\n self.addEventListener('message', listener);\n self.postMessage({forge: {prng: {needed: needed}}});\n };\n } else {\n // main thread sends random bytes upon request\n var listener = function(e) {\n var data = e.data;\n if(data.forge && data.forge.prng) {\n ctx.seedFile(data.forge.prng.needed, function(err, bytes) {\n worker.postMessage({forge: {prng: {err: err, bytes: bytes}}});\n });\n }\n };\n // TODO: do we need to remove the event listener when the worker dies?\n worker.addEventListener('message', listener);\n }\n };\n\n return ctx;\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/prng.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/pss.js":
/*!********************************************!*\
!*** ./node_modules/node-forge/lib/pss.js ***!
\********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Javascript implementation of PKCS#1 PSS signature padding.\n *\n * @author Stefan Siegl\n *\n * Copyright (c) 2012 Stefan Siegl \n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./random */ \"./node_modules/node-forge/lib/random.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\n// shortcut for PSS API\nvar pss = module.exports = forge.pss = forge.pss || {};\n\n/**\n * Creates a PSS signature scheme object.\n *\n * There are several ways to provide a salt for encoding:\n *\n * 1. Specify the saltLength only and the built-in PRNG will generate it.\n * 2. Specify the saltLength and a custom PRNG with 'getBytesSync' defined that\n * will be used.\n * 3. Specify the salt itself as a forge.util.ByteBuffer.\n *\n * @param options the options to use:\n * md the message digest object to use, a forge md instance.\n * mgf the mask generation function to use, a forge mgf instance.\n * [saltLength] the length of the salt in octets.\n * [prng] the pseudo-random number generator to use to produce a salt.\n * [salt] the salt to use when encoding.\n *\n * @return a signature scheme object.\n */\npss.create = function(options) {\n // backwards compatibility w/legacy args: hash, mgf, sLen\n if(arguments.length === 3) {\n options = {\n md: arguments[0],\n mgf: arguments[1],\n saltLength: arguments[2]\n };\n }\n\n var hash = options.md;\n var mgf = options.mgf;\n var hLen = hash.digestLength;\n\n var salt_ = options.salt || null;\n if(typeof salt_ === 'string') {\n // assume binary-encoded string\n salt_ = forge.util.createBuffer(salt_);\n }\n\n var sLen;\n if('saltLength' in options) {\n sLen = options.saltLength;\n } else if(salt_ !== null) {\n sLen = salt_.length();\n } else {\n throw new Error('Salt length not specified or specific salt not given.');\n }\n\n if(salt_ !== null && salt_.length() !== sLen) {\n throw new Error('Given salt length does not match length of given salt.');\n }\n\n var prng = options.prng || forge.random;\n\n var pssobj = {};\n\n /**\n * Encodes a PSS signature.\n *\n * This function implements EMSA-PSS-ENCODE as per RFC 3447, section 9.1.1.\n *\n * @param md the message digest object with the hash to sign.\n * @param modsBits the length of the RSA modulus in bits.\n *\n * @return the encoded message as a binary-encoded string of length\n * ceil((modBits - 1) / 8).\n */\n pssobj.encode = function(md, modBits) {\n var i;\n var emBits = modBits - 1;\n var emLen = Math.ceil(emBits / 8);\n\n /* 2. Let mHash = Hash(M), an octet string of length hLen. */\n var mHash = md.digest().getBytes();\n\n /* 3. If emLen < hLen + sLen + 2, output \"encoding error\" and stop. */\n if(emLen < hLen + sLen + 2) {\n throw new Error('Message is too long to encrypt.');\n }\n\n /* 4. Generate a random octet string salt of length sLen; if sLen = 0,\n * then salt is the empty string. */\n var salt;\n if(salt_ === null) {\n salt = prng.getBytesSync(sLen);\n } else {\n salt = salt_.bytes();\n }\n\n /* 5. Let M' = (0x)00 00 00 00 00 00 00 00 || mHash || salt; */\n var m_ = new forge.util.ByteBuffer();\n m_.fillWithByte(0, 8);\n m_.putBytes(mHash);\n m_.putBytes(salt);\n\n /* 6. Let H = Hash(M'), an octet string of length hLen. */\n hash.start();\n hash.update(m_.getBytes());\n var h = hash.digest().getBytes();\n\n /* 7. Generate an octet string PS consisting of emLen - sLen - hLen - 2\n * zero octets. The length of PS may be 0. */\n var ps = new forge.util.ByteBuffer();\n ps.fillWithByte(0, emLen - sLen - hLen - 2);\n\n /* 8. Let DB = PS || 0x01 || salt; DB is an octet string of length\n * emLen - hLen - 1. */\n ps.putByte(0x01);\n ps.putBytes(salt);\n var db = ps.getBytes();\n\n /* 9. Let dbMask = MGF(H, emLen - hLen - 1). */\n var maskLen = emLen - hLen - 1;\n var dbMask = mgf.generate(h, maskLen);\n\n /* 10. Let maskedDB = DB \\xor dbMask. */\n var maskedDB = '';\n for(i = 0; i < maskLen; i++) {\n maskedDB += String.fromCharCode(db.charCodeAt(i) ^ dbMask.charCodeAt(i));\n }\n\n /* 11. Set the leftmost 8emLen - emBits bits of the leftmost octet in\n * maskedDB to zero. */\n var mask = (0xFF00 >> (8 * emLen - emBits)) & 0xFF;\n maskedDB = String.fromCharCode(maskedDB.charCodeAt(0) & ~mask) +\n maskedDB.substr(1);\n\n /* 12. Let EM = maskedDB || H || 0xbc.\n * 13. Output EM. */\n return maskedDB + h + String.fromCharCode(0xbc);\n };\n\n /**\n * Verifies a PSS signature.\n *\n * This function implements EMSA-PSS-VERIFY as per RFC 3447, section 9.1.2.\n *\n * @param mHash the message digest hash, as a binary-encoded string, to\n * compare against the signature.\n * @param em the encoded message, as a binary-encoded string\n * (RSA decryption result).\n * @param modsBits the length of the RSA modulus in bits.\n *\n * @return true if the signature was verified, false if not.\n */\n pssobj.verify = function(mHash, em, modBits) {\n var i;\n var emBits = modBits - 1;\n var emLen = Math.ceil(emBits / 8);\n\n /* c. Convert the message representative m to an encoded message EM\n * of length emLen = ceil((modBits - 1) / 8) octets, where modBits\n * is the length in bits of the RSA modulus n */\n em = em.substr(-emLen);\n\n /* 3. If emLen < hLen + sLen + 2, output \"inconsistent\" and stop. */\n if(emLen < hLen + sLen + 2) {\n throw new Error('Inconsistent parameters to PSS signature verification.');\n }\n\n /* 4. If the rightmost octet of EM does not have hexadecimal value\n * 0xbc, output \"inconsistent\" and stop. */\n if(em.charCodeAt(emLen - 1) !== 0xbc) {\n throw new Error('Encoded message does not end in 0xBC.');\n }\n\n /* 5. Let maskedDB be the leftmost emLen - hLen - 1 octets of EM, and\n * let H be the next hLen octets. */\n var maskLen = emLen - hLen - 1;\n var maskedDB = em.substr(0, maskLen);\n var h = em.substr(maskLen, hLen);\n\n /* 6. If the leftmost 8emLen - emBits bits of the leftmost octet in\n * maskedDB are not all equal to zero, output \"inconsistent\" and stop. */\n var mask = (0xFF00 >> (8 * emLen - emBits)) & 0xFF;\n if((maskedDB.charCodeAt(0) & mask) !== 0) {\n throw new Error('Bits beyond keysize not zero as expected.');\n }\n\n /* 7. Let dbMask = MGF(H, emLen - hLen - 1). */\n var dbMask = mgf.generate(h, maskLen);\n\n /* 8. Let DB = maskedDB \\xor dbMask. */\n var db = '';\n for(i = 0; i < maskLen; i++) {\n db += String.fromCharCode(maskedDB.charCodeAt(i) ^ dbMask.charCodeAt(i));\n }\n\n /* 9. Set the leftmost 8emLen - emBits bits of the leftmost octet\n * in DB to zero. */\n db = String.fromCharCode(db.charCodeAt(0) & ~mask) + db.substr(1);\n\n /* 10. If the emLen - hLen - sLen - 2 leftmost octets of DB are not zero\n * or if the octet at position emLen - hLen - sLen - 1 (the leftmost\n * position is \"position 1\") does not have hexadecimal value 0x01,\n * output \"inconsistent\" and stop. */\n var checkLen = emLen - hLen - sLen - 2;\n for(i = 0; i < checkLen; i++) {\n if(db.charCodeAt(i) !== 0x00) {\n throw new Error('Leftmost octets not zero as expected');\n }\n }\n\n if(db.charCodeAt(checkLen) !== 0x01) {\n throw new Error('Inconsistent PSS signature, 0x01 marker not found');\n }\n\n /* 11. Let salt be the last sLen octets of DB. */\n var salt = db.substr(-sLen);\n\n /* 12. Let M' = (0x)00 00 00 00 00 00 00 00 || mHash || salt */\n var m_ = new forge.util.ByteBuffer();\n m_.fillWithByte(0, 8);\n m_.putBytes(mHash);\n m_.putBytes(salt);\n\n /* 13. Let H' = Hash(M'), an octet string of length hLen. */\n hash.start();\n hash.update(m_.getBytes());\n var h_ = hash.digest().getBytes();\n\n /* 14. If H = H', output \"consistent.\" Otherwise, output \"inconsistent.\" */\n return h === h_;\n };\n\n return pssobj;\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/pss.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/random.js":
/*!***********************************************!*\
!*** ./node_modules/node-forge/lib/random.js ***!
\***********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * An API for getting cryptographically-secure random bytes. The bytes are\n * generated using the Fortuna algorithm devised by Bruce Schneier and\n * Niels Ferguson.\n *\n * Getting strong random bytes is not yet easy to do in javascript. The only\n * truish random entropy that can be collected is from the mouse, keyboard, or\n * from timing with respect to page loads, etc. This generator makes a poor\n * attempt at providing random bytes when those sources haven't yet provided\n * enough entropy to initially seed or to reseed the PRNG.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2009-2014 Digital Bazaar, Inc.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./aes */ \"./node_modules/node-forge/lib/aes.js\");\n__webpack_require__(/*! ./sha256 */ \"./node_modules/node-forge/lib/sha256.js\");\n__webpack_require__(/*! ./prng */ \"./node_modules/node-forge/lib/prng.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\n(function() {\n\n// forge.random already defined\nif(forge.random && forge.random.getBytes) {\n module.exports = forge.random;\n return;\n}\n\n(function(jQuery) {\n\n// the default prng plugin, uses AES-128\nvar prng_aes = {};\nvar _prng_aes_output = new Array(4);\nvar _prng_aes_buffer = forge.util.createBuffer();\nprng_aes.formatKey = function(key) {\n // convert the key into 32-bit integers\n var tmp = forge.util.createBuffer(key);\n key = new Array(4);\n key[0] = tmp.getInt32();\n key[1] = tmp.getInt32();\n key[2] = tmp.getInt32();\n key[3] = tmp.getInt32();\n\n // return the expanded key\n return forge.aes._expandKey(key, false);\n};\nprng_aes.formatSeed = function(seed) {\n // convert seed into 32-bit integers\n var tmp = forge.util.createBuffer(seed);\n seed = new Array(4);\n seed[0] = tmp.getInt32();\n seed[1] = tmp.getInt32();\n seed[2] = tmp.getInt32();\n seed[3] = tmp.getInt32();\n return seed;\n};\nprng_aes.cipher = function(key, seed) {\n forge.aes._updateBlock(key, seed, _prng_aes_output, false);\n _prng_aes_buffer.putInt32(_prng_aes_output[0]);\n _prng_aes_buffer.putInt32(_prng_aes_output[1]);\n _prng_aes_buffer.putInt32(_prng_aes_output[2]);\n _prng_aes_buffer.putInt32(_prng_aes_output[3]);\n return _prng_aes_buffer.getBytes();\n};\nprng_aes.increment = function(seed) {\n // FIXME: do we care about carry or signed issues?\n ++seed[3];\n return seed;\n};\nprng_aes.md = forge.md.sha256;\n\n/**\n * Creates a new PRNG.\n */\nfunction spawnPrng() {\n var ctx = forge.prng.create(prng_aes);\n\n /**\n * Gets random bytes. If a native secure crypto API is unavailable, this\n * method tries to make the bytes more unpredictable by drawing from data that\n * can be collected from the user of the browser, eg: mouse movement.\n *\n * If a callback is given, this method will be called asynchronously.\n *\n * @param count the number of random bytes to get.\n * @param [callback(err, bytes)] called once the operation completes.\n *\n * @return the random bytes in a string.\n */\n ctx.getBytes = function(count, callback) {\n return ctx.generate(count, callback);\n };\n\n /**\n * Gets random bytes asynchronously. If a native secure crypto API is\n * unavailable, this method tries to make the bytes more unpredictable by\n * drawing from data that can be collected from the user of the browser,\n * eg: mouse movement.\n *\n * @param count the number of random bytes to get.\n *\n * @return the random bytes in a string.\n */\n ctx.getBytesSync = function(count) {\n return ctx.generate(count);\n };\n\n return ctx;\n}\n\n// create default prng context\nvar _ctx = spawnPrng();\n\n// add other sources of entropy only if window.crypto.getRandomValues is not\n// available -- otherwise this source will be automatically used by the prng\nvar getRandomValues = null;\nvar globalScope = forge.util.globalScope;\nvar _crypto = globalScope.crypto || globalScope.msCrypto;\nif(_crypto && _crypto.getRandomValues) {\n getRandomValues = function(arr) {\n return _crypto.getRandomValues(arr);\n };\n}\n\nif(forge.options.usePureJavaScript ||\n (!forge.util.isNodejs && !getRandomValues)) {\n // if this is a web worker, do not use weak entropy, instead register to\n // receive strong entropy asynchronously from the main thread\n if(typeof window === 'undefined' || window.document === undefined) {\n // FIXME:\n }\n\n // get load time entropy\n _ctx.collectInt(+new Date(), 32);\n\n // add some entropy from navigator object\n if(typeof(navigator) !== 'undefined') {\n var _navBytes = '';\n for(var key in navigator) {\n try {\n if(typeof(navigator[key]) == 'string') {\n _navBytes += navigator[key];\n }\n } catch(e) {\n /* Some navigator keys might not be accessible, e.g. the geolocation\n attribute throws an exception if touched in Mozilla chrome://\n context.\n\n Silently ignore this and just don't use this as a source of\n entropy. */\n }\n }\n _ctx.collect(_navBytes);\n _navBytes = null;\n }\n\n // add mouse and keyboard collectors if jquery is available\n if(jQuery) {\n // set up mouse entropy capture\n jQuery().mousemove(function(e) {\n // add mouse coords\n _ctx.collectInt(e.clientX, 16);\n _ctx.collectInt(e.clientY, 16);\n });\n\n // set up keyboard entropy capture\n jQuery().keypress(function(e) {\n _ctx.collectInt(e.charCode, 8);\n });\n }\n}\n\n/* Random API */\nif(!forge.random) {\n forge.random = _ctx;\n} else {\n // extend forge.random with _ctx\n for(var key in _ctx) {\n forge.random[key] = _ctx[key];\n }\n}\n\n// expose spawn PRNG\nforge.random.createInstance = spawnPrng;\n\nmodule.exports = forge.random;\n\n})(typeof(jQuery) !== 'undefined' ? jQuery : null);\n\n})();\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/random.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/rc2.js":
/*!********************************************!*\
!*** ./node_modules/node-forge/lib/rc2.js ***!
\********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * RC2 implementation.\n *\n * @author Stefan Siegl\n *\n * Copyright (c) 2012 Stefan Siegl \n *\n * Information on the RC2 cipher is available from RFC #2268,\n * http://www.ietf.org/rfc/rfc2268.txt\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\nvar piTable = [\n 0xd9, 0x78, 0xf9, 0xc4, 0x19, 0xdd, 0xb5, 0xed, 0x28, 0xe9, 0xfd, 0x79, 0x4a, 0xa0, 0xd8, 0x9d,\n 0xc6, 0x7e, 0x37, 0x83, 0x2b, 0x76, 0x53, 0x8e, 0x62, 0x4c, 0x64, 0x88, 0x44, 0x8b, 0xfb, 0xa2,\n 0x17, 0x9a, 0x59, 0xf5, 0x87, 0xb3, 0x4f, 0x13, 0x61, 0x45, 0x6d, 0x8d, 0x09, 0x81, 0x7d, 0x32,\n 0xbd, 0x8f, 0x40, 0xeb, 0x86, 0xb7, 0x7b, 0x0b, 0xf0, 0x95, 0x21, 0x22, 0x5c, 0x6b, 0x4e, 0x82,\n 0x54, 0xd6, 0x65, 0x93, 0xce, 0x60, 0xb2, 0x1c, 0x73, 0x56, 0xc0, 0x14, 0xa7, 0x8c, 0xf1, 0xdc,\n 0x12, 0x75, 0xca, 0x1f, 0x3b, 0xbe, 0xe4, 0xd1, 0x42, 0x3d, 0xd4, 0x30, 0xa3, 0x3c, 0xb6, 0x26,\n 0x6f, 0xbf, 0x0e, 0xda, 0x46, 0x69, 0x07, 0x57, 0x27, 0xf2, 0x1d, 0x9b, 0xbc, 0x94, 0x43, 0x03,\n 0xf8, 0x11, 0xc7, 0xf6, 0x90, 0xef, 0x3e, 0xe7, 0x06, 0xc3, 0xd5, 0x2f, 0xc8, 0x66, 0x1e, 0xd7,\n 0x08, 0xe8, 0xea, 0xde, 0x80, 0x52, 0xee, 0xf7, 0x84, 0xaa, 0x72, 0xac, 0x35, 0x4d, 0x6a, 0x2a,\n 0x96, 0x1a, 0xd2, 0x71, 0x5a, 0x15, 0x49, 0x74, 0x4b, 0x9f, 0xd0, 0x5e, 0x04, 0x18, 0xa4, 0xec,\n 0xc2, 0xe0, 0x41, 0x6e, 0x0f, 0x51, 0xcb, 0xcc, 0x24, 0x91, 0xaf, 0x50, 0xa1, 0xf4, 0x70, 0x39,\n 0x99, 0x7c, 0x3a, 0x85, 0x23, 0xb8, 0xb4, 0x7a, 0xfc, 0x02, 0x36, 0x5b, 0x25, 0x55, 0x97, 0x31,\n 0x2d, 0x5d, 0xfa, 0x98, 0xe3, 0x8a, 0x92, 0xae, 0x05, 0xdf, 0x29, 0x10, 0x67, 0x6c, 0xba, 0xc9,\n 0xd3, 0x00, 0xe6, 0xcf, 0xe1, 0x9e, 0xa8, 0x2c, 0x63, 0x16, 0x01, 0x3f, 0x58, 0xe2, 0x89, 0xa9,\n 0x0d, 0x38, 0x34, 0x1b, 0xab, 0x33, 0xff, 0xb0, 0xbb, 0x48, 0x0c, 0x5f, 0xb9, 0xb1, 0xcd, 0x2e,\n 0xc5, 0xf3, 0xdb, 0x47, 0xe5, 0xa5, 0x9c, 0x77, 0x0a, 0xa6, 0x20, 0x68, 0xfe, 0x7f, 0xc1, 0xad\n];\n\nvar s = [1, 2, 3, 5];\n\n/**\n * Rotate a word left by given number of bits.\n *\n * Bits that are shifted out on the left are put back in on the right\n * hand side.\n *\n * @param word The word to shift left.\n * @param bits The number of bits to shift by.\n * @return The rotated word.\n */\nvar rol = function(word, bits) {\n return ((word << bits) & 0xffff) | ((word & 0xffff) >> (16 - bits));\n};\n\n/**\n * Rotate a word right by given number of bits.\n *\n * Bits that are shifted out on the right are put back in on the left\n * hand side.\n *\n * @param word The word to shift right.\n * @param bits The number of bits to shift by.\n * @return The rotated word.\n */\nvar ror = function(word, bits) {\n return ((word & 0xffff) >> bits) | ((word << (16 - bits)) & 0xffff);\n};\n\n/* RC2 API */\nmodule.exports = forge.rc2 = forge.rc2 || {};\n\n/**\n * Perform RC2 key expansion as per RFC #2268, section 2.\n *\n * @param key variable-length user key (between 1 and 128 bytes)\n * @param effKeyBits number of effective key bits (default: 128)\n * @return the expanded RC2 key (ByteBuffer of 128 bytes)\n */\nforge.rc2.expandKey = function(key, effKeyBits) {\n if(typeof key === 'string') {\n key = forge.util.createBuffer(key);\n }\n effKeyBits = effKeyBits || 128;\n\n /* introduce variables that match the names used in RFC #2268 */\n var L = key;\n var T = key.length();\n var T1 = effKeyBits;\n var T8 = Math.ceil(T1 / 8);\n var TM = 0xff >> (T1 & 0x07);\n var i;\n\n for(i = T; i < 128; i++) {\n L.putByte(piTable[(L.at(i - 1) + L.at(i - T)) & 0xff]);\n }\n\n L.setAt(128 - T8, piTable[L.at(128 - T8) & TM]);\n\n for(i = 127 - T8; i >= 0; i--) {\n L.setAt(i, piTable[L.at(i + 1) ^ L.at(i + T8)]);\n }\n\n return L;\n};\n\n/**\n * Creates a RC2 cipher object.\n *\n * @param key the symmetric key to use (as base for key generation).\n * @param bits the number of effective key bits.\n * @param encrypt false for decryption, true for encryption.\n *\n * @return the cipher.\n */\nvar createCipher = function(key, bits, encrypt) {\n var _finish = false, _input = null, _output = null, _iv = null;\n var mixRound, mashRound;\n var i, j, K = [];\n\n /* Expand key and fill into K[] Array */\n key = forge.rc2.expandKey(key, bits);\n for(i = 0; i < 64; i++) {\n K.push(key.getInt16Le());\n }\n\n if(encrypt) {\n /**\n * Perform one mixing round \"in place\".\n *\n * @param R Array of four words to perform mixing on.\n */\n mixRound = function(R) {\n for(i = 0; i < 4; i++) {\n R[i] += K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) +\n ((~R[(i + 3) % 4]) & R[(i + 1) % 4]);\n R[i] = rol(R[i], s[i]);\n j++;\n }\n };\n\n /**\n * Perform one mashing round \"in place\".\n *\n * @param R Array of four words to perform mashing on.\n */\n mashRound = function(R) {\n for(i = 0; i < 4; i++) {\n R[i] += K[R[(i + 3) % 4] & 63];\n }\n };\n } else {\n /**\n * Perform one r-mixing round \"in place\".\n *\n * @param R Array of four words to perform mixing on.\n */\n mixRound = function(R) {\n for(i = 3; i >= 0; i--) {\n R[i] = ror(R[i], s[i]);\n R[i] -= K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) +\n ((~R[(i + 3) % 4]) & R[(i + 1) % 4]);\n j--;\n }\n };\n\n /**\n * Perform one r-mashing round \"in place\".\n *\n * @param R Array of four words to perform mashing on.\n */\n mashRound = function(R) {\n for(i = 3; i >= 0; i--) {\n R[i] -= K[R[(i + 3) % 4] & 63];\n }\n };\n }\n\n /**\n * Run the specified cipher execution plan.\n *\n * This function takes four words from the input buffer, applies the IV on\n * it (if requested) and runs the provided execution plan.\n *\n * The plan must be put together in form of a array of arrays. Where the\n * outer one is simply a list of steps to perform and the inner one needs\n * to have two elements: the first one telling how many rounds to perform,\n * the second one telling what to do (i.e. the function to call).\n *\n * @param {Array} plan The plan to execute.\n */\n var runPlan = function(plan) {\n var R = [];\n\n /* Get data from input buffer and fill the four words into R */\n for(i = 0; i < 4; i++) {\n var val = _input.getInt16Le();\n\n if(_iv !== null) {\n if(encrypt) {\n /* We're encrypting, apply the IV first. */\n val ^= _iv.getInt16Le();\n } else {\n /* We're decryption, keep cipher text for next block. */\n _iv.putInt16Le(val);\n }\n }\n\n R.push(val & 0xffff);\n }\n\n /* Reset global \"j\" variable as per spec. */\n j = encrypt ? 0 : 63;\n\n /* Run execution plan. */\n for(var ptr = 0; ptr < plan.length; ptr++) {\n for(var ctr = 0; ctr < plan[ptr][0]; ctr++) {\n plan[ptr][1](R);\n }\n }\n\n /* Write back result to output buffer. */\n for(i = 0; i < 4; i++) {\n if(_iv !== null) {\n if(encrypt) {\n /* We're encrypting in CBC-mode, feed back encrypted bytes into\n IV buffer to carry it forward to next block. */\n _iv.putInt16Le(R[i]);\n } else {\n R[i] ^= _iv.getInt16Le();\n }\n }\n\n _output.putInt16Le(R[i]);\n }\n };\n\n /* Create cipher object */\n var cipher = null;\n cipher = {\n /**\n * Starts or restarts the encryption or decryption process, whichever\n * was previously configured.\n *\n * To use the cipher in CBC mode, iv may be given either as a string\n * of bytes, or as a byte buffer. For ECB mode, give null as iv.\n *\n * @param iv the initialization vector to use, null for ECB mode.\n * @param output the output the buffer to write to, null to create one.\n */\n start: function(iv, output) {\n if(iv) {\n /* CBC mode */\n if(typeof iv === 'string') {\n iv = forge.util.createBuffer(iv);\n }\n }\n\n _finish = false;\n _input = forge.util.createBuffer();\n _output = output || new forge.util.createBuffer();\n _iv = iv;\n\n cipher.output = _output;\n },\n\n /**\n * Updates the next block.\n *\n * @param input the buffer to read from.\n */\n update: function(input) {\n if(!_finish) {\n // not finishing, so fill the input buffer with more input\n _input.putBuffer(input);\n }\n\n while(_input.length() >= 8) {\n runPlan([\n [ 5, mixRound ],\n [ 1, mashRound ],\n [ 6, mixRound ],\n [ 1, mashRound ],\n [ 5, mixRound ]\n ]);\n }\n },\n\n /**\n * Finishes encrypting or decrypting.\n *\n * @param pad a padding function to use, null for PKCS#7 padding,\n * signature(blockSize, buffer, decrypt).\n *\n * @return true if successful, false on error.\n */\n finish: function(pad) {\n var rval = true;\n\n if(encrypt) {\n if(pad) {\n rval = pad(8, _input, !encrypt);\n } else {\n // add PKCS#7 padding to block (each pad byte is the\n // value of the number of pad bytes)\n var padding = (_input.length() === 8) ? 8 : (8 - _input.length());\n _input.fillWithByte(padding, padding);\n }\n }\n\n if(rval) {\n // do final update\n _finish = true;\n cipher.update();\n }\n\n if(!encrypt) {\n // check for error: input data not a multiple of block size\n rval = (_input.length() === 0);\n if(rval) {\n if(pad) {\n rval = pad(8, _output, !encrypt);\n } else {\n // ensure padding byte count is valid\n var len = _output.length();\n var count = _output.at(len - 1);\n\n if(count > len) {\n rval = false;\n } else {\n // trim off padding bytes\n _output.truncate(count);\n }\n }\n }\n }\n\n return rval;\n }\n };\n\n return cipher;\n};\n\n/**\n * Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the\n * given symmetric key. The output will be stored in the 'output' member\n * of the returned cipher.\n *\n * The key and iv may be given as a string of bytes or a byte buffer.\n * The cipher is initialized to use 128 effective key bits.\n *\n * @param key the symmetric key to use.\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n *\n * @return the cipher.\n */\nforge.rc2.startEncrypting = function(key, iv, output) {\n var cipher = forge.rc2.createEncryptionCipher(key, 128);\n cipher.start(iv, output);\n return cipher;\n};\n\n/**\n * Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the\n * given symmetric key.\n *\n * The key may be given as a string of bytes or a byte buffer.\n *\n * To start encrypting call start() on the cipher with an iv and optional\n * output buffer.\n *\n * @param key the symmetric key to use.\n *\n * @return the cipher.\n */\nforge.rc2.createEncryptionCipher = function(key, bits) {\n return createCipher(key, bits, true);\n};\n\n/**\n * Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the\n * given symmetric key. The output will be stored in the 'output' member\n * of the returned cipher.\n *\n * The key and iv may be given as a string of bytes or a byte buffer.\n * The cipher is initialized to use 128 effective key bits.\n *\n * @param key the symmetric key to use.\n * @param iv the initialization vector to use.\n * @param output the buffer to write to, null to create one.\n *\n * @return the cipher.\n */\nforge.rc2.startDecrypting = function(key, iv, output) {\n var cipher = forge.rc2.createDecryptionCipher(key, 128);\n cipher.start(iv, output);\n return cipher;\n};\n\n/**\n * Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the\n * given symmetric key.\n *\n * The key may be given as a string of bytes or a byte buffer.\n *\n * To start decrypting call start() on the cipher with an iv and optional\n * output buffer.\n *\n * @param key the symmetric key to use.\n *\n * @return the cipher.\n */\nforge.rc2.createDecryptionCipher = function(key, bits) {\n return createCipher(key, bits, false);\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/rc2.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/rsa.js":
/*!********************************************!*\
!*** ./node_modules/node-forge/lib/rsa.js ***!
\********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Javascript implementation of basic RSA algorithms.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n *\n * The only algorithm currently supported for PKI is RSA.\n *\n * An RSA key is often stored in ASN.1 DER format. The SubjectPublicKeyInfo\n * ASN.1 structure is composed of an algorithm of type AlgorithmIdentifier\n * and a subjectPublicKey of type bit string.\n *\n * The AlgorithmIdentifier contains an Object Identifier (OID) and parameters\n * for the algorithm, if any. In the case of RSA, there aren't any.\n *\n * SubjectPublicKeyInfo ::= SEQUENCE {\n * algorithm AlgorithmIdentifier,\n * subjectPublicKey BIT STRING\n * }\n *\n * AlgorithmIdentifer ::= SEQUENCE {\n * algorithm OBJECT IDENTIFIER,\n * parameters ANY DEFINED BY algorithm OPTIONAL\n * }\n *\n * For an RSA public key, the subjectPublicKey is:\n *\n * RSAPublicKey ::= SEQUENCE {\n * modulus INTEGER, -- n\n * publicExponent INTEGER -- e\n * }\n *\n * PrivateKeyInfo ::= SEQUENCE {\n * version Version,\n * privateKeyAlgorithm PrivateKeyAlgorithmIdentifier,\n * privateKey PrivateKey,\n * attributes [0] IMPLICIT Attributes OPTIONAL\n * }\n *\n * Version ::= INTEGER\n * PrivateKeyAlgorithmIdentifier ::= AlgorithmIdentifier\n * PrivateKey ::= OCTET STRING\n * Attributes ::= SET OF Attribute\n *\n * An RSA private key as the following structure:\n *\n * RSAPrivateKey ::= SEQUENCE {\n * version Version,\n * modulus INTEGER, -- n\n * publicExponent INTEGER, -- e\n * privateExponent INTEGER, -- d\n * prime1 INTEGER, -- p\n * prime2 INTEGER, -- q\n * exponent1 INTEGER, -- d mod (p-1)\n * exponent2 INTEGER, -- d mod (q-1)\n * coefficient INTEGER -- (inverse of q) mod p\n * }\n *\n * Version ::= INTEGER\n *\n * The OID for the RSA key algorithm is: 1.2.840.113549.1.1.1\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./asn1 */ \"./node_modules/node-forge/lib/asn1.js\");\n__webpack_require__(/*! ./jsbn */ \"./node_modules/node-forge/lib/jsbn.js\");\n__webpack_require__(/*! ./oids */ \"./node_modules/node-forge/lib/oids.js\");\n__webpack_require__(/*! ./pkcs1 */ \"./node_modules/node-forge/lib/pkcs1.js\");\n__webpack_require__(/*! ./prime */ \"./node_modules/node-forge/lib/prime.js\");\n__webpack_require__(/*! ./random */ \"./node_modules/node-forge/lib/random.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\nif(typeof BigInteger === 'undefined') {\n var BigInteger = forge.jsbn.BigInteger;\n}\n\nvar _crypto = forge.util.isNodejs ? __webpack_require__(/*! crypto */ \"?b254\") : null;\n\n// shortcut for asn.1 API\nvar asn1 = forge.asn1;\n\n// shortcut for util API\nvar util = forge.util;\n\n/*\n * RSA encryption and decryption, see RFC 2313.\n */\nforge.pki = forge.pki || {};\nmodule.exports = forge.pki.rsa = forge.rsa = forge.rsa || {};\nvar pki = forge.pki;\n\n// for finding primes, which are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29\nvar GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2];\n\n// validator for a PrivateKeyInfo structure\nvar privateKeyValidator = {\n // PrivateKeyInfo\n name: 'PrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // Version (INTEGER)\n name: 'PrivateKeyInfo.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyVersion'\n }, {\n // privateKeyAlgorithm\n name: 'PrivateKeyInfo.privateKeyAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'privateKeyOid'\n }]\n }, {\n // PrivateKey\n name: 'PrivateKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'privateKey'\n }]\n};\n\n// validator for an RSA private key\nvar rsaPrivateKeyValidator = {\n // RSAPrivateKey\n name: 'RSAPrivateKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // Version (INTEGER)\n name: 'RSAPrivateKey.version',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyVersion'\n }, {\n // modulus (n)\n name: 'RSAPrivateKey.modulus',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyModulus'\n }, {\n // publicExponent (e)\n name: 'RSAPrivateKey.publicExponent',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyPublicExponent'\n }, {\n // privateExponent (d)\n name: 'RSAPrivateKey.privateExponent',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyPrivateExponent'\n }, {\n // prime1 (p)\n name: 'RSAPrivateKey.prime1',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyPrime1'\n }, {\n // prime2 (q)\n name: 'RSAPrivateKey.prime2',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyPrime2'\n }, {\n // exponent1 (d mod (p-1))\n name: 'RSAPrivateKey.exponent1',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyExponent1'\n }, {\n // exponent2 (d mod (q-1))\n name: 'RSAPrivateKey.exponent2',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyExponent2'\n }, {\n // coefficient ((inverse of q) mod p)\n name: 'RSAPrivateKey.coefficient',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'privateKeyCoefficient'\n }]\n};\n\n// validator for an RSA public key\nvar rsaPublicKeyValidator = {\n // RSAPublicKey\n name: 'RSAPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // modulus (n)\n name: 'RSAPublicKey.modulus',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'publicKeyModulus'\n }, {\n // publicExponent (e)\n name: 'RSAPublicKey.exponent',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'publicKeyExponent'\n }]\n};\n\n// validator for an SubjectPublicKeyInfo structure\n// Note: Currently only works with an RSA public key\nvar publicKeyValidator = forge.pki.rsa.publicKeyValidator = {\n name: 'SubjectPublicKeyInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'subjectPublicKeyInfo',\n value: [{\n name: 'SubjectPublicKeyInfo.AlgorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'publicKeyOid'\n }]\n }, {\n // subjectPublicKey\n name: 'SubjectPublicKeyInfo.subjectPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n value: [{\n // RSAPublicKey\n name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n optional: true,\n captureAsn1: 'rsaPublicKey'\n }]\n }]\n};\n\n// validator for a DigestInfo structure\nvar digestInfoValidator = {\n name: 'DigestInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'DigestInfo.DigestAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'DigestInfo.DigestAlgorithm.algorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'algorithmIdentifier'\n }, {\n // NULL paramters\n name: 'DigestInfo.DigestAlgorithm.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.NULL,\n // captured only to check existence for md2 and md5\n capture: 'parameters',\n optional: true,\n constructed: false\n }]\n }, {\n // digest\n name: 'DigestInfo.digest',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OCTETSTRING,\n constructed: false,\n capture: 'digest'\n }]\n};\n\n/**\n * Wrap digest in DigestInfo object.\n *\n * This function implements EMSA-PKCS1-v1_5-ENCODE as per RFC 3447.\n *\n * DigestInfo ::= SEQUENCE {\n * digestAlgorithm DigestAlgorithmIdentifier,\n * digest Digest\n * }\n *\n * DigestAlgorithmIdentifier ::= AlgorithmIdentifier\n * Digest ::= OCTET STRING\n *\n * @param md the message digest object with the hash to sign.\n *\n * @return the encoded message (ready for RSA encrytion)\n */\nvar emsaPkcs1v15encode = function(md) {\n // get the oid for the algorithm\n var oid;\n if(md.algorithm in pki.oids) {\n oid = pki.oids[md.algorithm];\n } else {\n var error = new Error('Unknown message digest algorithm.');\n error.algorithm = md.algorithm;\n throw error;\n }\n var oidBytes = asn1.oidToDer(oid).getBytes();\n\n // create the digest info\n var digestInfo = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n var digestAlgorithm = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n digestAlgorithm.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OID, false, oidBytes));\n digestAlgorithm.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.NULL, false, ''));\n var digest = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING,\n false, md.digest().getBytes());\n digestInfo.value.push(digestAlgorithm);\n digestInfo.value.push(digest);\n\n // encode digest info\n return asn1.toDer(digestInfo).getBytes();\n};\n\n/**\n * Performs x^c mod n (RSA encryption or decryption operation).\n *\n * @param x the number to raise and mod.\n * @param key the key to use.\n * @param pub true if the key is public, false if private.\n *\n * @return the result of x^c mod n.\n */\nvar _modPow = function(x, key, pub) {\n if(pub) {\n return x.modPow(key.e, key.n);\n }\n\n if(!key.p || !key.q) {\n // allow calculation without CRT params (slow)\n return x.modPow(key.d, key.n);\n }\n\n // pre-compute dP, dQ, and qInv if necessary\n if(!key.dP) {\n key.dP = key.d.mod(key.p.subtract(BigInteger.ONE));\n }\n if(!key.dQ) {\n key.dQ = key.d.mod(key.q.subtract(BigInteger.ONE));\n }\n if(!key.qInv) {\n key.qInv = key.q.modInverse(key.p);\n }\n\n /* Chinese remainder theorem (CRT) states:\n\n Suppose n1, n2, ..., nk are positive integers which are pairwise\n coprime (n1 and n2 have no common factors other than 1). For any\n integers x1, x2, ..., xk there exists an integer x solving the\n system of simultaneous congruences (where ~= means modularly\n congruent so a ~= b mod n means a mod n = b mod n):\n\n x ~= x1 mod n1\n x ~= x2 mod n2\n ...\n x ~= xk mod nk\n\n This system of congruences has a single simultaneous solution x\n between 0 and n - 1. Furthermore, each xk solution and x itself\n is congruent modulo the product n = n1*n2*...*nk.\n So x1 mod n = x2 mod n = xk mod n = x mod n.\n\n The single simultaneous solution x can be solved with the following\n equation:\n\n x = sum(xi*ri*si) mod n where ri = n/ni and si = ri^-1 mod ni.\n\n Where x is less than n, xi = x mod ni.\n\n For RSA we are only concerned with k = 2. The modulus n = pq, where\n p and q are coprime. The RSA decryption algorithm is:\n\n y = x^d mod n\n\n Given the above:\n\n x1 = x^d mod p\n r1 = n/p = q\n s1 = q^-1 mod p\n x2 = x^d mod q\n r2 = n/q = p\n s2 = p^-1 mod q\n\n So y = (x1r1s1 + x2r2s2) mod n\n = ((x^d mod p)q(q^-1 mod p) + (x^d mod q)p(p^-1 mod q)) mod n\n\n According to Fermat's Little Theorem, if the modulus P is prime,\n for any integer A not evenly divisible by P, A^(P-1) ~= 1 mod P.\n Since A is not divisible by P it follows that if:\n N ~= M mod (P - 1), then A^N mod P = A^M mod P. Therefore:\n\n A^N mod P = A^(M mod (P - 1)) mod P. (The latter takes less effort\n to calculate). In order to calculate x^d mod p more quickly the\n exponent d mod (p - 1) is stored in the RSA private key (the same\n is done for x^d mod q). These values are referred to as dP and dQ\n respectively. Therefore we now have:\n\n y = ((x^dP mod p)q(q^-1 mod p) + (x^dQ mod q)p(p^-1 mod q)) mod n\n\n Since we'll be reducing x^dP by modulo p (same for q) we can also\n reduce x by p (and q respectively) before hand. Therefore, let\n\n xp = ((x mod p)^dP mod p), and\n xq = ((x mod q)^dQ mod q), yielding:\n\n y = (xp*q*(q^-1 mod p) + xq*p*(p^-1 mod q)) mod n\n\n This can be further reduced to a simple algorithm that only\n requires 1 inverse (the q inverse is used) to be used and stored.\n The algorithm is called Garner's algorithm. If qInv is the\n inverse of q, we simply calculate:\n\n y = (qInv*(xp - xq) mod p) * q + xq\n\n However, there are two further complications. First, we need to\n ensure that xp > xq to prevent signed BigIntegers from being used\n so we add p until this is true (since we will be mod'ing with\n p anyway). Then, there is a known timing attack on algorithms\n using the CRT. To mitigate this risk, \"cryptographic blinding\"\n should be used. This requires simply generating a random number r\n between 0 and n-1 and its inverse and multiplying x by r^e before\n calculating y and then multiplying y by r^-1 afterwards. Note that\n r must be coprime with n (gcd(r, n) === 1) in order to have an\n inverse.\n */\n\n // cryptographic blinding\n var r;\n do {\n r = new BigInteger(\n forge.util.bytesToHex(forge.random.getBytes(key.n.bitLength() / 8)),\n 16);\n } while(r.compareTo(key.n) >= 0 || !r.gcd(key.n).equals(BigInteger.ONE));\n x = x.multiply(r.modPow(key.e, key.n)).mod(key.n);\n\n // calculate xp and xq\n var xp = x.mod(key.p).modPow(key.dP, key.p);\n var xq = x.mod(key.q).modPow(key.dQ, key.q);\n\n // xp must be larger than xq to avoid signed bit usage\n while(xp.compareTo(xq) < 0) {\n xp = xp.add(key.p);\n }\n\n // do last step\n var y = xp.subtract(xq)\n .multiply(key.qInv).mod(key.p)\n .multiply(key.q).add(xq);\n\n // remove effect of random for cryptographic blinding\n y = y.multiply(r.modInverse(key.n)).mod(key.n);\n\n return y;\n};\n\n/**\n * NOTE: THIS METHOD IS DEPRECATED, use 'sign' on a private key object or\n * 'encrypt' on a public key object instead.\n *\n * Performs RSA encryption.\n *\n * The parameter bt controls whether to put padding bytes before the\n * message passed in. Set bt to either true or false to disable padding\n * completely (in order to handle e.g. EMSA-PSS encoding seperately before),\n * signaling whether the encryption operation is a public key operation\n * (i.e. encrypting data) or not, i.e. private key operation (data signing).\n *\n * For PKCS#1 v1.5 padding pass in the block type to use, i.e. either 0x01\n * (for signing) or 0x02 (for encryption). The key operation mode (private\n * or public) is derived from this flag in that case).\n *\n * @param m the message to encrypt as a byte string.\n * @param key the RSA key to use.\n * @param bt for PKCS#1 v1.5 padding, the block type to use\n * (0x01 for private key, 0x02 for public),\n * to disable padding: true = public key, false = private key.\n *\n * @return the encrypted bytes as a string.\n */\npki.rsa.encrypt = function(m, key, bt) {\n var pub = bt;\n var eb;\n\n // get the length of the modulus in bytes\n var k = Math.ceil(key.n.bitLength() / 8);\n\n if(bt !== false && bt !== true) {\n // legacy, default to PKCS#1 v1.5 padding\n pub = (bt === 0x02);\n eb = _encodePkcs1_v1_5(m, key, bt);\n } else {\n eb = forge.util.createBuffer();\n eb.putBytes(m);\n }\n\n // load encryption block as big integer 'x'\n // FIXME: hex conversion inefficient, get BigInteger w/byte strings\n var x = new BigInteger(eb.toHex(), 16);\n\n // do RSA encryption\n var y = _modPow(x, key, pub);\n\n // convert y into the encrypted data byte string, if y is shorter in\n // bytes than k, then prepend zero bytes to fill up ed\n // FIXME: hex conversion inefficient, get BigInteger w/byte strings\n var yhex = y.toString(16);\n var ed = forge.util.createBuffer();\n var zeros = k - Math.ceil(yhex.length / 2);\n while(zeros > 0) {\n ed.putByte(0x00);\n --zeros;\n }\n ed.putBytes(forge.util.hexToBytes(yhex));\n return ed.getBytes();\n};\n\n/**\n * NOTE: THIS METHOD IS DEPRECATED, use 'decrypt' on a private key object or\n * 'verify' on a public key object instead.\n *\n * Performs RSA decryption.\n *\n * The parameter ml controls whether to apply PKCS#1 v1.5 padding\n * or not. Set ml = false to disable padding removal completely\n * (in order to handle e.g. EMSA-PSS later on) and simply pass back\n * the RSA encryption block.\n *\n * @param ed the encrypted data to decrypt in as a byte string.\n * @param key the RSA key to use.\n * @param pub true for a public key operation, false for private.\n * @param ml the message length, if known, false to disable padding.\n *\n * @return the decrypted message as a byte string.\n */\npki.rsa.decrypt = function(ed, key, pub, ml) {\n // get the length of the modulus in bytes\n var k = Math.ceil(key.n.bitLength() / 8);\n\n // error if the length of the encrypted data ED is not k\n if(ed.length !== k) {\n var error = new Error('Encrypted message length is invalid.');\n error.length = ed.length;\n error.expected = k;\n throw error;\n }\n\n // convert encrypted data into a big integer\n // FIXME: hex conversion inefficient, get BigInteger w/byte strings\n var y = new BigInteger(forge.util.createBuffer(ed).toHex(), 16);\n\n // y must be less than the modulus or it wasn't the result of\n // a previous mod operation (encryption) using that modulus\n if(y.compareTo(key.n) >= 0) {\n throw new Error('Encrypted message is invalid.');\n }\n\n // do RSA decryption\n var x = _modPow(y, key, pub);\n\n // create the encryption block, if x is shorter in bytes than k, then\n // prepend zero bytes to fill up eb\n // FIXME: hex conversion inefficient, get BigInteger w/byte strings\n var xhex = x.toString(16);\n var eb = forge.util.createBuffer();\n var zeros = k - Math.ceil(xhex.length / 2);\n while(zeros > 0) {\n eb.putByte(0x00);\n --zeros;\n }\n eb.putBytes(forge.util.hexToBytes(xhex));\n\n if(ml !== false) {\n // legacy, default to PKCS#1 v1.5 padding\n return _decodePkcs1_v1_5(eb.getBytes(), key, pub);\n }\n\n // return message\n return eb.getBytes();\n};\n\n/**\n * Creates an RSA key-pair generation state object. It is used to allow\n * key-generation to be performed in steps. It also allows for a UI to\n * display progress updates.\n *\n * @param bits the size for the private key in bits, defaults to 2048.\n * @param e the public exponent to use, defaults to 65537 (0x10001).\n * @param [options] the options to use.\n * prng a custom crypto-secure pseudo-random number generator to use,\n * that must define \"getBytesSync\".\n * algorithm the algorithm to use (default: 'PRIMEINC').\n *\n * @return the state object to use to generate the key-pair.\n */\npki.rsa.createKeyPairGenerationState = function(bits, e, options) {\n // TODO: migrate step-based prime generation code to forge.prime\n\n // set default bits\n if(typeof(bits) === 'string') {\n bits = parseInt(bits, 10);\n }\n bits = bits || 2048;\n\n // create prng with api that matches BigInteger secure random\n options = options || {};\n var prng = options.prng || forge.random;\n var rng = {\n // x is an array to fill with bytes\n nextBytes: function(x) {\n var b = prng.getBytesSync(x.length);\n for(var i = 0; i < x.length; ++i) {\n x[i] = b.charCodeAt(i);\n }\n }\n };\n\n var algorithm = options.algorithm || 'PRIMEINC';\n\n // create PRIMEINC algorithm state\n var rval;\n if(algorithm === 'PRIMEINC') {\n rval = {\n algorithm: algorithm,\n state: 0,\n bits: bits,\n rng: rng,\n eInt: e || 65537,\n e: new BigInteger(null),\n p: null,\n q: null,\n qBits: bits >> 1,\n pBits: bits - (bits >> 1),\n pqState: 0,\n num: null,\n keys: null\n };\n rval.e.fromInt(rval.eInt);\n } else {\n throw new Error('Invalid key generation algorithm: ' + algorithm);\n }\n\n return rval;\n};\n\n/**\n * Attempts to runs the key-generation algorithm for at most n seconds\n * (approximately) using the given state. When key-generation has completed,\n * the keys will be stored in state.keys.\n *\n * To use this function to update a UI while generating a key or to prevent\n * causing browser lockups/warnings, set \"n\" to a value other than 0. A\n * simple pattern for generating a key and showing a progress indicator is:\n *\n * var state = pki.rsa.createKeyPairGenerationState(2048);\n * var step = function() {\n * // step key-generation, run algorithm for 100 ms, repeat\n * if(!forge.pki.rsa.stepKeyPairGenerationState(state, 100)) {\n * setTimeout(step, 1);\n * } else {\n * // key-generation complete\n * // TODO: turn off progress indicator here\n * // TODO: use the generated key-pair in \"state.keys\"\n * }\n * };\n * // TODO: turn on progress indicator here\n * setTimeout(step, 0);\n *\n * @param state the state to use.\n * @param n the maximum number of milliseconds to run the algorithm for, 0\n * to run the algorithm to completion.\n *\n * @return true if the key-generation completed, false if not.\n */\npki.rsa.stepKeyPairGenerationState = function(state, n) {\n // set default algorithm if not set\n if(!('algorithm' in state)) {\n state.algorithm = 'PRIMEINC';\n }\n\n // TODO: migrate step-based prime generation code to forge.prime\n // TODO: abstract as PRIMEINC algorithm\n\n // do key generation (based on Tom Wu's rsa.js, see jsbn.js license)\n // with some minor optimizations and designed to run in steps\n\n // local state vars\n var THIRTY = new BigInteger(null);\n THIRTY.fromInt(30);\n var deltaIdx = 0;\n var op_or = function(x, y) {return x | y;};\n\n // keep stepping until time limit is reached or done\n var t1 = +new Date();\n var t2;\n var total = 0;\n while(state.keys === null && (n <= 0 || total < n)) {\n // generate p or q\n if(state.state === 0) {\n /* Note: All primes are of the form:\n\n 30k+i, for i < 30 and gcd(30, i)=1, where there are 8 values for i\n\n When we generate a random number, we always align it at 30k + 1. Each\n time the number is determined not to be prime we add to get to the\n next 'i', eg: if the number was at 30k + 1 we add 6. */\n var bits = (state.p === null) ? state.pBits : state.qBits;\n var bits1 = bits - 1;\n\n // get a random number\n if(state.pqState === 0) {\n state.num = new BigInteger(bits, state.rng);\n // force MSB set\n if(!state.num.testBit(bits1)) {\n state.num.bitwiseTo(\n BigInteger.ONE.shiftLeft(bits1), op_or, state.num);\n }\n // align number on 30k+1 boundary\n state.num.dAddOffset(31 - state.num.mod(THIRTY).byteValue(), 0);\n deltaIdx = 0;\n\n ++state.pqState;\n } else if(state.pqState === 1) {\n // try to make the number a prime\n if(state.num.bitLength() > bits) {\n // overflow, try again\n state.pqState = 0;\n // do primality test\n } else if(state.num.isProbablePrime(\n _getMillerRabinTests(state.num.bitLength()))) {\n ++state.pqState;\n } else {\n // get next potential prime\n state.num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0);\n }\n } else if(state.pqState === 2) {\n // ensure number is coprime with e\n state.pqState =\n (state.num.subtract(BigInteger.ONE).gcd(state.e)\n .compareTo(BigInteger.ONE) === 0) ? 3 : 0;\n } else if(state.pqState === 3) {\n // store p or q\n state.pqState = 0;\n if(state.p === null) {\n state.p = state.num;\n } else {\n state.q = state.num;\n }\n\n // advance state if both p and q are ready\n if(state.p !== null && state.q !== null) {\n ++state.state;\n }\n state.num = null;\n }\n } else if(state.state === 1) {\n // ensure p is larger than q (swap them if not)\n if(state.p.compareTo(state.q) < 0) {\n state.num = state.p;\n state.p = state.q;\n state.q = state.num;\n }\n ++state.state;\n } else if(state.state === 2) {\n // compute phi: (p - 1)(q - 1) (Euler's totient function)\n state.p1 = state.p.subtract(BigInteger.ONE);\n state.q1 = state.q.subtract(BigInteger.ONE);\n state.phi = state.p1.multiply(state.q1);\n ++state.state;\n } else if(state.state === 3) {\n // ensure e and phi are coprime\n if(state.phi.gcd(state.e).compareTo(BigInteger.ONE) === 0) {\n // phi and e are coprime, advance\n ++state.state;\n } else {\n // phi and e aren't coprime, so generate a new p and q\n state.p = null;\n state.q = null;\n state.state = 0;\n }\n } else if(state.state === 4) {\n // create n, ensure n is has the right number of bits\n state.n = state.p.multiply(state.q);\n\n // ensure n is right number of bits\n if(state.n.bitLength() === state.bits) {\n // success, advance\n ++state.state;\n } else {\n // failed, get new q\n state.q = null;\n state.state = 0;\n }\n } else if(state.state === 5) {\n // set keys\n var d = state.e.modInverse(state.phi);\n state.keys = {\n privateKey: pki.rsa.setPrivateKey(\n state.n, state.e, d, state.p, state.q,\n d.mod(state.p1), d.mod(state.q1),\n state.q.modInverse(state.p)),\n publicKey: pki.rsa.setPublicKey(state.n, state.e)\n };\n }\n\n // update timing\n t2 = +new Date();\n total += t2 - t1;\n t1 = t2;\n }\n\n return state.keys !== null;\n};\n\n/**\n * Generates an RSA public-private key pair in a single call.\n *\n * To generate a key-pair in steps (to allow for progress updates and to\n * prevent blocking or warnings in slow browsers) then use the key-pair\n * generation state functions.\n *\n * To generate a key-pair asynchronously (either through web-workers, if\n * available, or by breaking up the work on the main thread), pass a\n * callback function.\n *\n * @param [bits] the size for the private key in bits, defaults to 2048.\n * @param [e] the public exponent to use, defaults to 65537.\n * @param [options] options for key-pair generation, if given then 'bits'\n * and 'e' must *not* be given:\n * bits the size for the private key in bits, (default: 2048).\n * e the public exponent to use, (default: 65537 (0x10001)).\n * workerScript the worker script URL.\n * workers the number of web workers (if supported) to use,\n * (default: 2).\n * workLoad the size of the work load, ie: number of possible prime\n * numbers for each web worker to check per work assignment,\n * (default: 100).\n * prng a custom crypto-secure pseudo-random number generator to use,\n * that must define \"getBytesSync\". Disables use of native APIs.\n * algorithm the algorithm to use (default: 'PRIMEINC').\n * @param [callback(err, keypair)] called once the operation completes.\n *\n * @return an object with privateKey and publicKey properties.\n */\npki.rsa.generateKeyPair = function(bits, e, options, callback) {\n // (bits), (options), (callback)\n if(arguments.length === 1) {\n if(typeof bits === 'object') {\n options = bits;\n bits = undefined;\n } else if(typeof bits === 'function') {\n callback = bits;\n bits = undefined;\n }\n } else if(arguments.length === 2) {\n // (bits, e), (bits, options), (bits, callback), (options, callback)\n if(typeof bits === 'number') {\n if(typeof e === 'function') {\n callback = e;\n e = undefined;\n } else if(typeof e !== 'number') {\n options = e;\n e = undefined;\n }\n } else {\n options = bits;\n callback = e;\n bits = undefined;\n e = undefined;\n }\n } else if(arguments.length === 3) {\n // (bits, e, options), (bits, e, callback), (bits, options, callback)\n if(typeof e === 'number') {\n if(typeof options === 'function') {\n callback = options;\n options = undefined;\n }\n } else {\n callback = options;\n options = e;\n e = undefined;\n }\n }\n options = options || {};\n if(bits === undefined) {\n bits = options.bits || 2048;\n }\n if(e === undefined) {\n e = options.e || 0x10001;\n }\n\n // use native code if permitted, available, and parameters are acceptable\n if(!forge.options.usePureJavaScript && !options.prng &&\n bits >= 256 && bits <= 16384 && (e === 0x10001 || e === 3)) {\n if(callback) {\n // try native async\n if(_detectNodeCrypto('generateKeyPair')) {\n return _crypto.generateKeyPair('rsa', {\n modulusLength: bits,\n publicExponent: e,\n publicKeyEncoding: {\n type: 'spki',\n format: 'pem'\n },\n privateKeyEncoding: {\n type: 'pkcs8',\n format: 'pem'\n }\n }, function(err, pub, priv) {\n if(err) {\n return callback(err);\n }\n callback(null, {\n privateKey: pki.privateKeyFromPem(priv),\n publicKey: pki.publicKeyFromPem(pub)\n });\n });\n }\n if(_detectSubtleCrypto('generateKey') &&\n _detectSubtleCrypto('exportKey')) {\n // use standard native generateKey\n return util.globalScope.crypto.subtle.generateKey({\n name: 'RSASSA-PKCS1-v1_5',\n modulusLength: bits,\n publicExponent: _intToUint8Array(e),\n hash: {name: 'SHA-256'}\n }, true /* key can be exported*/, ['sign', 'verify'])\n .then(function(pair) {\n return util.globalScope.crypto.subtle.exportKey(\n 'pkcs8', pair.privateKey);\n // avoiding catch(function(err) {...}) to support IE <= 8\n }).then(undefined, function(err) {\n callback(err);\n }).then(function(pkcs8) {\n if(pkcs8) {\n var privateKey = pki.privateKeyFromAsn1(\n asn1.fromDer(forge.util.createBuffer(pkcs8)));\n callback(null, {\n privateKey: privateKey,\n publicKey: pki.setRsaPublicKey(privateKey.n, privateKey.e)\n });\n }\n });\n }\n if(_detectSubtleMsCrypto('generateKey') &&\n _detectSubtleMsCrypto('exportKey')) {\n var genOp = util.globalScope.msCrypto.subtle.generateKey({\n name: 'RSASSA-PKCS1-v1_5',\n modulusLength: bits,\n publicExponent: _intToUint8Array(e),\n hash: {name: 'SHA-256'}\n }, true /* key can be exported*/, ['sign', 'verify']);\n genOp.oncomplete = function(e) {\n var pair = e.target.result;\n var exportOp = util.globalScope.msCrypto.subtle.exportKey(\n 'pkcs8', pair.privateKey);\n exportOp.oncomplete = function(e) {\n var pkcs8 = e.target.result;\n var privateKey = pki.privateKeyFromAsn1(\n asn1.fromDer(forge.util.createBuffer(pkcs8)));\n callback(null, {\n privateKey: privateKey,\n publicKey: pki.setRsaPublicKey(privateKey.n, privateKey.e)\n });\n };\n exportOp.onerror = function(err) {\n callback(err);\n };\n };\n genOp.onerror = function(err) {\n callback(err);\n };\n return;\n }\n } else {\n // try native sync\n if(_detectNodeCrypto('generateKeyPairSync')) {\n var keypair = _crypto.generateKeyPairSync('rsa', {\n modulusLength: bits,\n publicExponent: e,\n publicKeyEncoding: {\n type: 'spki',\n format: 'pem'\n },\n privateKeyEncoding: {\n type: 'pkcs8',\n format: 'pem'\n }\n });\n return {\n privateKey: pki.privateKeyFromPem(keypair.privateKey),\n publicKey: pki.publicKeyFromPem(keypair.publicKey)\n };\n }\n }\n }\n\n // use JavaScript implementation\n var state = pki.rsa.createKeyPairGenerationState(bits, e, options);\n if(!callback) {\n pki.rsa.stepKeyPairGenerationState(state, 0);\n return state.keys;\n }\n _generateKeyPair(state, options, callback);\n};\n\n/**\n * Sets an RSA public key from BigIntegers modulus and exponent.\n *\n * @param n the modulus.\n * @param e the exponent.\n *\n * @return the public key.\n */\npki.setRsaPublicKey = pki.rsa.setPublicKey = function(n, e) {\n var key = {\n n: n,\n e: e\n };\n\n /**\n * Encrypts the given data with this public key. Newer applications\n * should use the 'RSA-OAEP' decryption scheme, 'RSAES-PKCS1-V1_5' is for\n * legacy applications.\n *\n * @param data the byte string to encrypt.\n * @param scheme the encryption scheme to use:\n * 'RSAES-PKCS1-V1_5' (default),\n * 'RSA-OAEP',\n * 'RAW', 'NONE', or null to perform raw RSA encryption,\n * an object with an 'encode' property set to a function\n * with the signature 'function(data, key)' that returns\n * a binary-encoded string representing the encoded data.\n * @param schemeOptions any scheme-specific options.\n *\n * @return the encrypted byte string.\n */\n key.encrypt = function(data, scheme, schemeOptions) {\n if(typeof scheme === 'string') {\n scheme = scheme.toUpperCase();\n } else if(scheme === undefined) {\n scheme = 'RSAES-PKCS1-V1_5';\n }\n\n if(scheme === 'RSAES-PKCS1-V1_5') {\n scheme = {\n encode: function(m, key, pub) {\n return _encodePkcs1_v1_5(m, key, 0x02).getBytes();\n }\n };\n } else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') {\n scheme = {\n encode: function(m, key) {\n return forge.pkcs1.encode_rsa_oaep(key, m, schemeOptions);\n }\n };\n } else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) {\n scheme = {encode: function(e) {return e;}};\n } else if(typeof scheme === 'string') {\n throw new Error('Unsupported encryption scheme: \"' + scheme + '\".');\n }\n\n // do scheme-based encoding then rsa encryption\n var e = scheme.encode(data, key, true);\n return pki.rsa.encrypt(e, key, true);\n };\n\n /**\n * Verifies the given signature against the given digest.\n *\n * PKCS#1 supports multiple (currently two) signature schemes:\n * RSASSA-PKCS1-V1_5 and RSASSA-PSS.\n *\n * By default this implementation uses the \"old scheme\", i.e.\n * RSASSA-PKCS1-V1_5, in which case once RSA-decrypted, the\n * signature is an OCTET STRING that holds a DigestInfo.\n *\n * DigestInfo ::= SEQUENCE {\n * digestAlgorithm DigestAlgorithmIdentifier,\n * digest Digest\n * }\n * DigestAlgorithmIdentifier ::= AlgorithmIdentifier\n * Digest ::= OCTET STRING\n *\n * To perform PSS signature verification, provide an instance\n * of Forge PSS object as the scheme parameter.\n *\n * @param digest the message digest hash to compare against the signature,\n * as a binary-encoded string.\n * @param signature the signature to verify, as a binary-encoded string.\n * @param scheme signature verification scheme to use:\n * 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5,\n * a Forge PSS object for RSASSA-PSS,\n * 'NONE' or null for none, DigestInfo will not be expected, but\n * PKCS#1 v1.5 padding will still be used.\n * @param options optional verify options\n * _parseAllDigestBytes testing flag to control parsing of all\n * digest bytes. Unsupported and not for general usage.\n * (default: true)\n *\n * @return true if the signature was verified, false if not.\n */\n key.verify = function(digest, signature, scheme, options) {\n if(typeof scheme === 'string') {\n scheme = scheme.toUpperCase();\n } else if(scheme === undefined) {\n scheme = 'RSASSA-PKCS1-V1_5';\n }\n if(options === undefined) {\n options = {\n _parseAllDigestBytes: true\n };\n }\n if(!('_parseAllDigestBytes' in options)) {\n options._parseAllDigestBytes = true;\n }\n\n if(scheme === 'RSASSA-PKCS1-V1_5') {\n scheme = {\n verify: function(digest, d) {\n // remove padding\n d = _decodePkcs1_v1_5(d, key, true);\n // d is ASN.1 BER-encoded DigestInfo\n var obj = asn1.fromDer(d, {\n parseAllBytes: options._parseAllDigestBytes\n });\n\n // validate DigestInfo\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, digestInfoValidator, capture, errors)) {\n var error = new Error(\n 'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' +\n 'DigestInfo value.');\n error.errors = errors;\n throw error;\n }\n // check hash algorithm identifier\n // see PKCS1-v1-5DigestAlgorithms in RFC 8017\n // FIXME: add support to vaidator for strict value choices\n var oid = asn1.derToOid(capture.algorithmIdentifier);\n if(!(oid === forge.oids.md2 ||\n oid === forge.oids.md5 ||\n oid === forge.oids.sha1 ||\n oid === forge.oids.sha224 ||\n oid === forge.oids.sha256 ||\n oid === forge.oids.sha384 ||\n oid === forge.oids.sha512 ||\n oid === forge.oids['sha512-224'] ||\n oid === forge.oids['sha512-256'])) {\n var error = new Error(\n 'Unknown RSASSA-PKCS1-v1_5 DigestAlgorithm identifier.');\n error.oid = oid;\n throw error;\n }\n\n // special check for md2 and md5 that NULL parameters exist\n if(oid === forge.oids.md2 || oid === forge.oids.md5) {\n if(!('parameters' in capture)) {\n throw new Error(\n 'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' +\n 'DigestInfo value. ' +\n 'Missing algorithm identifer NULL parameters.');\n }\n }\n\n // compare the given digest to the decrypted one\n return digest === capture.digest;\n }\n };\n } else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) {\n scheme = {\n verify: function(digest, d) {\n // remove padding\n d = _decodePkcs1_v1_5(d, key, true);\n return digest === d;\n }\n };\n }\n\n // do rsa decryption w/o any decoding, then verify -- which does decoding\n var d = pki.rsa.decrypt(signature, key, true, false);\n return scheme.verify(digest, d, key.n.bitLength());\n };\n\n return key;\n};\n\n/**\n * Sets an RSA private key from BigIntegers modulus, exponent, primes,\n * prime exponents, and modular multiplicative inverse.\n *\n * @param n the modulus.\n * @param e the public exponent.\n * @param d the private exponent ((inverse of e) mod n).\n * @param p the first prime.\n * @param q the second prime.\n * @param dP exponent1 (d mod (p-1)).\n * @param dQ exponent2 (d mod (q-1)).\n * @param qInv ((inverse of q) mod p)\n *\n * @return the private key.\n */\npki.setRsaPrivateKey = pki.rsa.setPrivateKey = function(\n n, e, d, p, q, dP, dQ, qInv) {\n var key = {\n n: n,\n e: e,\n d: d,\n p: p,\n q: q,\n dP: dP,\n dQ: dQ,\n qInv: qInv\n };\n\n /**\n * Decrypts the given data with this private key. The decryption scheme\n * must match the one used to encrypt the data.\n *\n * @param data the byte string to decrypt.\n * @param scheme the decryption scheme to use:\n * 'RSAES-PKCS1-V1_5' (default),\n * 'RSA-OAEP',\n * 'RAW', 'NONE', or null to perform raw RSA decryption.\n * @param schemeOptions any scheme-specific options.\n *\n * @return the decrypted byte string.\n */\n key.decrypt = function(data, scheme, schemeOptions) {\n if(typeof scheme === 'string') {\n scheme = scheme.toUpperCase();\n } else if(scheme === undefined) {\n scheme = 'RSAES-PKCS1-V1_5';\n }\n\n // do rsa decryption w/o any decoding\n var d = pki.rsa.decrypt(data, key, false, false);\n\n if(scheme === 'RSAES-PKCS1-V1_5') {\n scheme = {decode: _decodePkcs1_v1_5};\n } else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') {\n scheme = {\n decode: function(d, key) {\n return forge.pkcs1.decode_rsa_oaep(key, d, schemeOptions);\n }\n };\n } else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) {\n scheme = {decode: function(d) {return d;}};\n } else {\n throw new Error('Unsupported encryption scheme: \"' + scheme + '\".');\n }\n\n // decode according to scheme\n return scheme.decode(d, key, false);\n };\n\n /**\n * Signs the given digest, producing a signature.\n *\n * PKCS#1 supports multiple (currently two) signature schemes:\n * RSASSA-PKCS1-V1_5 and RSASSA-PSS.\n *\n * By default this implementation uses the \"old scheme\", i.e.\n * RSASSA-PKCS1-V1_5. In order to generate a PSS signature, provide\n * an instance of Forge PSS object as the scheme parameter.\n *\n * @param md the message digest object with the hash to sign.\n * @param scheme the signature scheme to use:\n * 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5,\n * a Forge PSS object for RSASSA-PSS,\n * 'NONE' or null for none, DigestInfo will not be used but\n * PKCS#1 v1.5 padding will still be used.\n *\n * @return the signature as a byte string.\n */\n key.sign = function(md, scheme) {\n /* Note: The internal implementation of RSA operations is being\n transitioned away from a PKCS#1 v1.5 hard-coded scheme. Some legacy\n code like the use of an encoding block identifier 'bt' will eventually\n be removed. */\n\n // private key operation\n var bt = false;\n\n if(typeof scheme === 'string') {\n scheme = scheme.toUpperCase();\n }\n\n if(scheme === undefined || scheme === 'RSASSA-PKCS1-V1_5') {\n scheme = {encode: emsaPkcs1v15encode};\n bt = 0x01;\n } else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) {\n scheme = {encode: function() {return md;}};\n bt = 0x01;\n }\n\n // encode and then encrypt\n var d = scheme.encode(md, key.n.bitLength());\n return pki.rsa.encrypt(d, key, bt);\n };\n\n return key;\n};\n\n/**\n * Wraps an RSAPrivateKey ASN.1 object in an ASN.1 PrivateKeyInfo object.\n *\n * @param rsaKey the ASN.1 RSAPrivateKey.\n *\n * @return the ASN.1 PrivateKeyInfo.\n */\npki.wrapRsaPrivateKey = function(rsaKey) {\n // PrivateKeyInfo\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version (0)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(0).getBytes()),\n // privateKeyAlgorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.rsaEncryption).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]),\n // PrivateKey\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false,\n asn1.toDer(rsaKey).getBytes())\n ]);\n};\n\n/**\n * Converts a private key from an ASN.1 object.\n *\n * @param obj the ASN.1 representation of a PrivateKeyInfo containing an\n * RSAPrivateKey or an RSAPrivateKey.\n *\n * @return the private key.\n */\npki.privateKeyFromAsn1 = function(obj) {\n // get PrivateKeyInfo\n var capture = {};\n var errors = [];\n if(asn1.validate(obj, privateKeyValidator, capture, errors)) {\n obj = asn1.fromDer(forge.util.createBuffer(capture.privateKey));\n }\n\n // get RSAPrivateKey\n capture = {};\n errors = [];\n if(!asn1.validate(obj, rsaPrivateKeyValidator, capture, errors)) {\n var error = new Error('Cannot read private key. ' +\n 'ASN.1 object does not contain an RSAPrivateKey.');\n error.errors = errors;\n throw error;\n }\n\n // Note: Version is currently ignored.\n // capture.privateKeyVersion\n // FIXME: inefficient, get a BigInteger that uses byte strings\n var n, e, d, p, q, dP, dQ, qInv;\n n = forge.util.createBuffer(capture.privateKeyModulus).toHex();\n e = forge.util.createBuffer(capture.privateKeyPublicExponent).toHex();\n d = forge.util.createBuffer(capture.privateKeyPrivateExponent).toHex();\n p = forge.util.createBuffer(capture.privateKeyPrime1).toHex();\n q = forge.util.createBuffer(capture.privateKeyPrime2).toHex();\n dP = forge.util.createBuffer(capture.privateKeyExponent1).toHex();\n dQ = forge.util.createBuffer(capture.privateKeyExponent2).toHex();\n qInv = forge.util.createBuffer(capture.privateKeyCoefficient).toHex();\n\n // set private key\n return pki.setRsaPrivateKey(\n new BigInteger(n, 16),\n new BigInteger(e, 16),\n new BigInteger(d, 16),\n new BigInteger(p, 16),\n new BigInteger(q, 16),\n new BigInteger(dP, 16),\n new BigInteger(dQ, 16),\n new BigInteger(qInv, 16));\n};\n\n/**\n * Converts a private key to an ASN.1 RSAPrivateKey.\n *\n * @param key the private key.\n *\n * @return the ASN.1 representation of an RSAPrivateKey.\n */\npki.privateKeyToAsn1 = pki.privateKeyToRSAPrivateKey = function(key) {\n // RSAPrivateKey\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version (0 = only 2 primes, 1 multiple primes)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(0).getBytes()),\n // modulus (n)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.n)),\n // publicExponent (e)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.e)),\n // privateExponent (d)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.d)),\n // privateKeyPrime1 (p)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.p)),\n // privateKeyPrime2 (q)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.q)),\n // privateKeyExponent1 (dP)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.dP)),\n // privateKeyExponent2 (dQ)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.dQ)),\n // coefficient (qInv)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.qInv))\n ]);\n};\n\n/**\n * Converts a public key from an ASN.1 SubjectPublicKeyInfo or RSAPublicKey.\n *\n * @param obj the asn1 representation of a SubjectPublicKeyInfo or RSAPublicKey.\n *\n * @return the public key.\n */\npki.publicKeyFromAsn1 = function(obj) {\n // get SubjectPublicKeyInfo\n var capture = {};\n var errors = [];\n if(asn1.validate(obj, publicKeyValidator, capture, errors)) {\n // get oid\n var oid = asn1.derToOid(capture.publicKeyOid);\n if(oid !== pki.oids.rsaEncryption) {\n var error = new Error('Cannot read public key. Unknown OID.');\n error.oid = oid;\n throw error;\n }\n obj = capture.rsaPublicKey;\n }\n\n // get RSA params\n errors = [];\n if(!asn1.validate(obj, rsaPublicKeyValidator, capture, errors)) {\n var error = new Error('Cannot read public key. ' +\n 'ASN.1 object does not contain an RSAPublicKey.');\n error.errors = errors;\n throw error;\n }\n\n // FIXME: inefficient, get a BigInteger that uses byte strings\n var n = forge.util.createBuffer(capture.publicKeyModulus).toHex();\n var e = forge.util.createBuffer(capture.publicKeyExponent).toHex();\n\n // set public key\n return pki.setRsaPublicKey(\n new BigInteger(n, 16),\n new BigInteger(e, 16));\n};\n\n/**\n * Converts a public key to an ASN.1 SubjectPublicKeyInfo.\n *\n * @param key the public key.\n *\n * @return the asn1 representation of a SubjectPublicKeyInfo.\n */\npki.publicKeyToAsn1 = pki.publicKeyToSubjectPublicKeyInfo = function(key) {\n // SubjectPublicKeyInfo\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // AlgorithmIdentifier\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(pki.oids.rsaEncryption).getBytes()),\n // parameters (null)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ]),\n // subjectPublicKey\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, [\n pki.publicKeyToRSAPublicKey(key)\n ])\n ]);\n};\n\n/**\n * Converts a public key to an ASN.1 RSAPublicKey.\n *\n * @param key the public key.\n *\n * @return the asn1 representation of a RSAPublicKey.\n */\npki.publicKeyToRSAPublicKey = function(key) {\n // RSAPublicKey\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // modulus (n)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.n)),\n // publicExponent (e)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n _bnToBytes(key.e))\n ]);\n};\n\n/**\n * Encodes a message using PKCS#1 v1.5 padding.\n *\n * @param m the message to encode.\n * @param key the RSA key to use.\n * @param bt the block type to use, i.e. either 0x01 (for signing) or 0x02\n * (for encryption).\n *\n * @return the padded byte buffer.\n */\nfunction _encodePkcs1_v1_5(m, key, bt) {\n var eb = forge.util.createBuffer();\n\n // get the length of the modulus in bytes\n var k = Math.ceil(key.n.bitLength() / 8);\n\n /* use PKCS#1 v1.5 padding */\n if(m.length > (k - 11)) {\n var error = new Error('Message is too long for PKCS#1 v1.5 padding.');\n error.length = m.length;\n error.max = k - 11;\n throw error;\n }\n\n /* A block type BT, a padding string PS, and the data D shall be\n formatted into an octet string EB, the encryption block:\n\n EB = 00 || BT || PS || 00 || D\n\n The block type BT shall be a single octet indicating the structure of\n the encryption block. For this version of the document it shall have\n value 00, 01, or 02. For a private-key operation, the block type\n shall be 00 or 01. For a public-key operation, it shall be 02.\n\n The padding string PS shall consist of k-3-||D|| octets. For block\n type 00, the octets shall have value 00; for block type 01, they\n shall have value FF; and for block type 02, they shall be\n pseudorandomly generated and nonzero. This makes the length of the\n encryption block EB equal to k. */\n\n // build the encryption block\n eb.putByte(0x00);\n eb.putByte(bt);\n\n // create the padding\n var padNum = k - 3 - m.length;\n var padByte;\n // private key op\n if(bt === 0x00 || bt === 0x01) {\n padByte = (bt === 0x00) ? 0x00 : 0xFF;\n for(var i = 0; i < padNum; ++i) {\n eb.putByte(padByte);\n }\n } else {\n // public key op\n // pad with random non-zero values\n while(padNum > 0) {\n var numZeros = 0;\n var padBytes = forge.random.getBytes(padNum);\n for(var i = 0; i < padNum; ++i) {\n padByte = padBytes.charCodeAt(i);\n if(padByte === 0) {\n ++numZeros;\n } else {\n eb.putByte(padByte);\n }\n }\n padNum = numZeros;\n }\n }\n\n // zero followed by message\n eb.putByte(0x00);\n eb.putBytes(m);\n\n return eb;\n}\n\n/**\n * Decodes a message using PKCS#1 v1.5 padding.\n *\n * @param em the message to decode.\n * @param key the RSA key to use.\n * @param pub true if the key is a public key, false if it is private.\n * @param ml the message length, if specified.\n *\n * @return the decoded bytes.\n */\nfunction _decodePkcs1_v1_5(em, key, pub, ml) {\n // get the length of the modulus in bytes\n var k = Math.ceil(key.n.bitLength() / 8);\n\n /* It is an error if any of the following conditions occurs:\n\n 1. The encryption block EB cannot be parsed unambiguously.\n 2. The padding string PS consists of fewer than eight octets\n or is inconsisent with the block type BT.\n 3. The decryption process is a public-key operation and the block\n type BT is not 00 or 01, or the decryption process is a\n private-key operation and the block type is not 02.\n */\n\n // parse the encryption block\n var eb = forge.util.createBuffer(em);\n var first = eb.getByte();\n var bt = eb.getByte();\n if(first !== 0x00 ||\n (pub && bt !== 0x00 && bt !== 0x01) ||\n (!pub && bt != 0x02) ||\n (pub && bt === 0x00 && typeof(ml) === 'undefined')) {\n throw new Error('Encryption block is invalid.');\n }\n\n var padNum = 0;\n if(bt === 0x00) {\n // check all padding bytes for 0x00\n padNum = k - 3 - ml;\n for(var i = 0; i < padNum; ++i) {\n if(eb.getByte() !== 0x00) {\n throw new Error('Encryption block is invalid.');\n }\n }\n } else if(bt === 0x01) {\n // find the first byte that isn't 0xFF, should be after all padding\n padNum = 0;\n while(eb.length() > 1) {\n if(eb.getByte() !== 0xFF) {\n --eb.read;\n break;\n }\n ++padNum;\n }\n } else if(bt === 0x02) {\n // look for 0x00 byte\n padNum = 0;\n while(eb.length() > 1) {\n if(eb.getByte() === 0x00) {\n --eb.read;\n break;\n }\n ++padNum;\n }\n }\n\n // zero must be 0x00 and padNum must be (k - 3 - message length)\n var zero = eb.getByte();\n if(zero !== 0x00 || padNum !== (k - 3 - eb.length())) {\n throw new Error('Encryption block is invalid.');\n }\n\n return eb.getBytes();\n}\n\n/**\n * Runs the key-generation algorithm asynchronously, either in the background\n * via Web Workers, or using the main thread and setImmediate.\n *\n * @param state the key-pair generation state.\n * @param [options] options for key-pair generation:\n * workerScript the worker script URL.\n * workers the number of web workers (if supported) to use,\n * (default: 2, -1 to use estimated cores minus one).\n * workLoad the size of the work load, ie: number of possible prime\n * numbers for each web worker to check per work assignment,\n * (default: 100).\n * @param callback(err, keypair) called once the operation completes.\n */\nfunction _generateKeyPair(state, options, callback) {\n if(typeof options === 'function') {\n callback = options;\n options = {};\n }\n options = options || {};\n\n var opts = {\n algorithm: {\n name: options.algorithm || 'PRIMEINC',\n options: {\n workers: options.workers || 2,\n workLoad: options.workLoad || 100,\n workerScript: options.workerScript\n }\n }\n };\n if('prng' in options) {\n opts.prng = options.prng;\n }\n\n generate();\n\n function generate() {\n // find p and then q (done in series to simplify)\n getPrime(state.pBits, function(err, num) {\n if(err) {\n return callback(err);\n }\n state.p = num;\n if(state.q !== null) {\n return finish(err, state.q);\n }\n getPrime(state.qBits, finish);\n });\n }\n\n function getPrime(bits, callback) {\n forge.prime.generateProbablePrime(bits, opts, callback);\n }\n\n function finish(err, num) {\n if(err) {\n return callback(err);\n }\n\n // set q\n state.q = num;\n\n // ensure p is larger than q (swap them if not)\n if(state.p.compareTo(state.q) < 0) {\n var tmp = state.p;\n state.p = state.q;\n state.q = tmp;\n }\n\n // ensure p is coprime with e\n if(state.p.subtract(BigInteger.ONE).gcd(state.e)\n .compareTo(BigInteger.ONE) !== 0) {\n state.p = null;\n generate();\n return;\n }\n\n // ensure q is coprime with e\n if(state.q.subtract(BigInteger.ONE).gcd(state.e)\n .compareTo(BigInteger.ONE) !== 0) {\n state.q = null;\n getPrime(state.qBits, finish);\n return;\n }\n\n // compute phi: (p - 1)(q - 1) (Euler's totient function)\n state.p1 = state.p.subtract(BigInteger.ONE);\n state.q1 = state.q.subtract(BigInteger.ONE);\n state.phi = state.p1.multiply(state.q1);\n\n // ensure e and phi are coprime\n if(state.phi.gcd(state.e).compareTo(BigInteger.ONE) !== 0) {\n // phi and e aren't coprime, so generate a new p and q\n state.p = state.q = null;\n generate();\n return;\n }\n\n // create n, ensure n is has the right number of bits\n state.n = state.p.multiply(state.q);\n if(state.n.bitLength() !== state.bits) {\n // failed, get new q\n state.q = null;\n getPrime(state.qBits, finish);\n return;\n }\n\n // set keys\n var d = state.e.modInverse(state.phi);\n state.keys = {\n privateKey: pki.rsa.setPrivateKey(\n state.n, state.e, d, state.p, state.q,\n d.mod(state.p1), d.mod(state.q1),\n state.q.modInverse(state.p)),\n publicKey: pki.rsa.setPublicKey(state.n, state.e)\n };\n\n callback(null, state.keys);\n }\n}\n\n/**\n * Converts a positive BigInteger into 2's-complement big-endian bytes.\n *\n * @param b the big integer to convert.\n *\n * @return the bytes.\n */\nfunction _bnToBytes(b) {\n // prepend 0x00 if first byte >= 0x80\n var hex = b.toString(16);\n if(hex[0] >= '8') {\n hex = '00' + hex;\n }\n var bytes = forge.util.hexToBytes(hex);\n\n // ensure integer is minimally-encoded\n if(bytes.length > 1 &&\n // leading 0x00 for positive integer\n ((bytes.charCodeAt(0) === 0 &&\n (bytes.charCodeAt(1) & 0x80) === 0) ||\n // leading 0xFF for negative integer\n (bytes.charCodeAt(0) === 0xFF &&\n (bytes.charCodeAt(1) & 0x80) === 0x80))) {\n return bytes.substr(1);\n }\n return bytes;\n}\n\n/**\n * Returns the required number of Miller-Rabin tests to generate a\n * prime with an error probability of (1/2)^80.\n *\n * See Handbook of Applied Cryptography Chapter 4, Table 4.4.\n *\n * @param bits the bit size.\n *\n * @return the required number of iterations.\n */\nfunction _getMillerRabinTests(bits) {\n if(bits <= 100) return 27;\n if(bits <= 150) return 18;\n if(bits <= 200) return 15;\n if(bits <= 250) return 12;\n if(bits <= 300) return 9;\n if(bits <= 350) return 8;\n if(bits <= 400) return 7;\n if(bits <= 500) return 6;\n if(bits <= 600) return 5;\n if(bits <= 800) return 4;\n if(bits <= 1250) return 3;\n return 2;\n}\n\n/**\n * Performs feature detection on the Node crypto interface.\n *\n * @param fn the feature (function) to detect.\n *\n * @return true if detected, false if not.\n */\nfunction _detectNodeCrypto(fn) {\n return forge.util.isNodejs && typeof _crypto[fn] === 'function';\n}\n\n/**\n * Performs feature detection on the SubtleCrypto interface.\n *\n * @param fn the feature (function) to detect.\n *\n * @return true if detected, false if not.\n */\nfunction _detectSubtleCrypto(fn) {\n return (typeof util.globalScope !== 'undefined' &&\n typeof util.globalScope.crypto === 'object' &&\n typeof util.globalScope.crypto.subtle === 'object' &&\n typeof util.globalScope.crypto.subtle[fn] === 'function');\n}\n\n/**\n * Performs feature detection on the deprecated Microsoft Internet Explorer\n * outdated SubtleCrypto interface. This function should only be used after\n * checking for the modern, standard SubtleCrypto interface.\n *\n * @param fn the feature (function) to detect.\n *\n * @return true if detected, false if not.\n */\nfunction _detectSubtleMsCrypto(fn) {\n return (typeof util.globalScope !== 'undefined' &&\n typeof util.globalScope.msCrypto === 'object' &&\n typeof util.globalScope.msCrypto.subtle === 'object' &&\n typeof util.globalScope.msCrypto.subtle[fn] === 'function');\n}\n\nfunction _intToUint8Array(x) {\n var bytes = forge.util.hexToBytes(x.toString(16));\n var buffer = new Uint8Array(bytes.length);\n for(var i = 0; i < bytes.length; ++i) {\n buffer[i] = bytes.charCodeAt(i);\n }\n return buffer;\n}\n\nfunction _privateKeyFromJwk(jwk) {\n if(jwk.kty !== 'RSA') {\n throw new Error(\n 'Unsupported key algorithm \"' + jwk.kty + '\"; algorithm must be \"RSA\".');\n }\n return pki.setRsaPrivateKey(\n _base64ToBigInt(jwk.n),\n _base64ToBigInt(jwk.e),\n _base64ToBigInt(jwk.d),\n _base64ToBigInt(jwk.p),\n _base64ToBigInt(jwk.q),\n _base64ToBigInt(jwk.dp),\n _base64ToBigInt(jwk.dq),\n _base64ToBigInt(jwk.qi));\n}\n\nfunction _publicKeyFromJwk(jwk) {\n if(jwk.kty !== 'RSA') {\n throw new Error('Key algorithm must be \"RSA\".');\n }\n return pki.setRsaPublicKey(\n _base64ToBigInt(jwk.n),\n _base64ToBigInt(jwk.e));\n}\n\nfunction _base64ToBigInt(b64) {\n return new BigInteger(forge.util.bytesToHex(forge.util.decode64(b64)), 16);\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/rsa.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/sha1.js":
/*!*********************************************!*\
!*** ./node_modules/node-forge/lib/sha1.js ***!
\*********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Secure Hash Algorithm with 160-bit digest (SHA-1) implementation.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2015 Digital Bazaar, Inc.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./md */ \"./node_modules/node-forge/lib/md.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\nvar sha1 = module.exports = forge.sha1 = forge.sha1 || {};\nforge.md.sha1 = forge.md.algorithms.sha1 = sha1;\n\n/**\n * Creates a SHA-1 message digest object.\n *\n * @return a message digest object.\n */\nsha1.create = function() {\n // do initialization as necessary\n if(!_initialized) {\n _init();\n }\n\n // SHA-1 state contains five 32-bit integers\n var _state = null;\n\n // input buffer\n var _input = forge.util.createBuffer();\n\n // used for word storage\n var _w = new Array(80);\n\n // message digest object\n var md = {\n algorithm: 'sha1',\n blockLength: 64,\n digestLength: 20,\n // 56-bit length of message so far (does not including padding)\n messageLength: 0,\n // true message length\n fullMessageLength: null,\n // size of message length in bytes\n messageLengthSize: 8\n };\n\n /**\n * Starts the digest.\n *\n * @return this digest object.\n */\n md.start = function() {\n // up to 56-bit message length for convenience\n md.messageLength = 0;\n\n // full message length (set md.messageLength64 for backwards-compatibility)\n md.fullMessageLength = md.messageLength64 = [];\n var int32s = md.messageLengthSize / 4;\n for(var i = 0; i < int32s; ++i) {\n md.fullMessageLength.push(0);\n }\n _input = forge.util.createBuffer();\n _state = {\n h0: 0x67452301,\n h1: 0xEFCDAB89,\n h2: 0x98BADCFE,\n h3: 0x10325476,\n h4: 0xC3D2E1F0\n };\n return md;\n };\n // start digest automatically for first time\n md.start();\n\n /**\n * Updates the digest with the given message input. The given input can\n * treated as raw input (no encoding will be applied) or an encoding of\n * 'utf8' maybe given to encode the input using UTF-8.\n *\n * @param msg the message input to update with.\n * @param encoding the encoding to use (default: 'raw', other: 'utf8').\n *\n * @return this digest object.\n */\n md.update = function(msg, encoding) {\n if(encoding === 'utf8') {\n msg = forge.util.encodeUtf8(msg);\n }\n\n // update message length\n var len = msg.length;\n md.messageLength += len;\n len = [(len / 0x100000000) >>> 0, len >>> 0];\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n md.fullMessageLength[i] += len[1];\n len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);\n md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;\n len[0] = ((len[1] / 0x100000000) >>> 0);\n }\n\n // add bytes to input buffer\n _input.putBytes(msg);\n\n // process bytes\n _update(_state, _w, _input);\n\n // compact input buffer every 2K or if empty\n if(_input.read > 2048 || _input.length() === 0) {\n _input.compact();\n }\n\n return md;\n };\n\n /**\n * Produces the digest.\n *\n * @return a byte buffer containing the digest value.\n */\n md.digest = function() {\n /* Note: Here we copy the remaining bytes in the input buffer and\n add the appropriate SHA-1 padding. Then we do the final update\n on a copy of the state so that if the user wants to get\n intermediate digests they can do so. */\n\n /* Determine the number of bytes that must be added to the message\n to ensure its length is congruent to 448 mod 512. In other words,\n the data to be digested must be a multiple of 512 bits (or 128 bytes).\n This data includes the message, some padding, and the length of the\n message. Since the length of the message will be encoded as 8 bytes (64\n bits), that means that the last segment of the data must have 56 bytes\n (448 bits) of message and padding. Therefore, the length of the message\n plus the padding must be congruent to 448 mod 512 because\n 512 - 128 = 448.\n\n In order to fill up the message length it must be filled with\n padding that begins with 1 bit followed by all 0 bits. Padding\n must *always* be present, so if the message length is already\n congruent to 448 mod 512, then 512 padding bits must be added. */\n\n var finalBlock = forge.util.createBuffer();\n finalBlock.putBytes(_input.bytes());\n\n // compute remaining size to be digested (include message length size)\n var remaining = (\n md.fullMessageLength[md.fullMessageLength.length - 1] +\n md.messageLengthSize);\n\n // add padding for overflow blockSize - overflow\n // _padding starts with 1 byte with first bit is set (byte value 128), then\n // there may be up to (blockSize - 1) other pad bytes\n var overflow = remaining & (md.blockLength - 1);\n finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow));\n\n // serialize message length in bits in big-endian order; since length\n // is stored in bytes we multiply by 8 and add carry from next int\n var next, carry;\n var bits = md.fullMessageLength[0] * 8;\n for(var i = 0; i < md.fullMessageLength.length - 1; ++i) {\n next = md.fullMessageLength[i + 1] * 8;\n carry = (next / 0x100000000) >>> 0;\n bits += carry;\n finalBlock.putInt32(bits >>> 0);\n bits = next >>> 0;\n }\n finalBlock.putInt32(bits);\n\n var s2 = {\n h0: _state.h0,\n h1: _state.h1,\n h2: _state.h2,\n h3: _state.h3,\n h4: _state.h4\n };\n _update(s2, _w, finalBlock);\n var rval = forge.util.createBuffer();\n rval.putInt32(s2.h0);\n rval.putInt32(s2.h1);\n rval.putInt32(s2.h2);\n rval.putInt32(s2.h3);\n rval.putInt32(s2.h4);\n return rval;\n };\n\n return md;\n};\n\n// sha-1 padding bytes not initialized yet\nvar _padding = null;\nvar _initialized = false;\n\n/**\n * Initializes the constant tables.\n */\nfunction _init() {\n // create padding\n _padding = String.fromCharCode(128);\n _padding += forge.util.fillString(String.fromCharCode(0x00), 64);\n\n // now initialized\n _initialized = true;\n}\n\n/**\n * Updates a SHA-1 state with the given byte buffer.\n *\n * @param s the SHA-1 state to update.\n * @param w the array to use to store words.\n * @param bytes the byte buffer to update with.\n */\nfunction _update(s, w, bytes) {\n // consume 512 bit (64 byte) chunks\n var t, a, b, c, d, e, f, i;\n var len = bytes.length();\n while(len >= 64) {\n // the w array will be populated with sixteen 32-bit big-endian words\n // and then extended into 80 32-bit words according to SHA-1 algorithm\n // and for 32-79 using Max Locktyukhin's optimization\n\n // initialize hash value for this chunk\n a = s.h0;\n b = s.h1;\n c = s.h2;\n d = s.h3;\n e = s.h4;\n\n // round 1\n for(i = 0; i < 16; ++i) {\n t = bytes.getInt32();\n w[i] = t;\n f = d ^ (b & (c ^ d));\n t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n for(; i < 20; ++i) {\n t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]);\n t = (t << 1) | (t >>> 31);\n w[i] = t;\n f = d ^ (b & (c ^ d));\n t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n // round 2\n for(; i < 32; ++i) {\n t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]);\n t = (t << 1) | (t >>> 31);\n w[i] = t;\n f = b ^ c ^ d;\n t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n for(; i < 40; ++i) {\n t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]);\n t = (t << 2) | (t >>> 30);\n w[i] = t;\n f = b ^ c ^ d;\n t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n // round 3\n for(; i < 60; ++i) {\n t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]);\n t = (t << 2) | (t >>> 30);\n w[i] = t;\n f = (b & c) | (d & (b ^ c));\n t = ((a << 5) | (a >>> 27)) + f + e + 0x8F1BBCDC + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n // round 4\n for(; i < 80; ++i) {\n t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]);\n t = (t << 2) | (t >>> 30);\n w[i] = t;\n f = b ^ c ^ d;\n t = ((a << 5) | (a >>> 27)) + f + e + 0xCA62C1D6 + t;\n e = d;\n d = c;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n c = ((b << 30) | (b >>> 2)) >>> 0;\n b = a;\n a = t;\n }\n\n // update hash state\n s.h0 = (s.h0 + a) | 0;\n s.h1 = (s.h1 + b) | 0;\n s.h2 = (s.h2 + c) | 0;\n s.h3 = (s.h3 + d) | 0;\n s.h4 = (s.h4 + e) | 0;\n\n len -= 64;\n }\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/sha1.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/sha256.js":
/*!***********************************************!*\
!*** ./node_modules/node-forge/lib/sha256.js ***!
\***********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Secure Hash Algorithm with 256-bit digest (SHA-256) implementation.\n *\n * See FIPS 180-2 for details.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2015 Digital Bazaar, Inc.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./md */ \"./node_modules/node-forge/lib/md.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\nvar sha256 = module.exports = forge.sha256 = forge.sha256 || {};\nforge.md.sha256 = forge.md.algorithms.sha256 = sha256;\n\n/**\n * Creates a SHA-256 message digest object.\n *\n * @return a message digest object.\n */\nsha256.create = function() {\n // do initialization as necessary\n if(!_initialized) {\n _init();\n }\n\n // SHA-256 state contains eight 32-bit integers\n var _state = null;\n\n // input buffer\n var _input = forge.util.createBuffer();\n\n // used for word storage\n var _w = new Array(64);\n\n // message digest object\n var md = {\n algorithm: 'sha256',\n blockLength: 64,\n digestLength: 32,\n // 56-bit length of message so far (does not including padding)\n messageLength: 0,\n // true message length\n fullMessageLength: null,\n // size of message length in bytes\n messageLengthSize: 8\n };\n\n /**\n * Starts the digest.\n *\n * @return this digest object.\n */\n md.start = function() {\n // up to 56-bit message length for convenience\n md.messageLength = 0;\n\n // full message length (set md.messageLength64 for backwards-compatibility)\n md.fullMessageLength = md.messageLength64 = [];\n var int32s = md.messageLengthSize / 4;\n for(var i = 0; i < int32s; ++i) {\n md.fullMessageLength.push(0);\n }\n _input = forge.util.createBuffer();\n _state = {\n h0: 0x6A09E667,\n h1: 0xBB67AE85,\n h2: 0x3C6EF372,\n h3: 0xA54FF53A,\n h4: 0x510E527F,\n h5: 0x9B05688C,\n h6: 0x1F83D9AB,\n h7: 0x5BE0CD19\n };\n return md;\n };\n // start digest automatically for first time\n md.start();\n\n /**\n * Updates the digest with the given message input. The given input can\n * treated as raw input (no encoding will be applied) or an encoding of\n * 'utf8' maybe given to encode the input using UTF-8.\n *\n * @param msg the message input to update with.\n * @param encoding the encoding to use (default: 'raw', other: 'utf8').\n *\n * @return this digest object.\n */\n md.update = function(msg, encoding) {\n if(encoding === 'utf8') {\n msg = forge.util.encodeUtf8(msg);\n }\n\n // update message length\n var len = msg.length;\n md.messageLength += len;\n len = [(len / 0x100000000) >>> 0, len >>> 0];\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n md.fullMessageLength[i] += len[1];\n len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);\n md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;\n len[0] = ((len[1] / 0x100000000) >>> 0);\n }\n\n // add bytes to input buffer\n _input.putBytes(msg);\n\n // process bytes\n _update(_state, _w, _input);\n\n // compact input buffer every 2K or if empty\n if(_input.read > 2048 || _input.length() === 0) {\n _input.compact();\n }\n\n return md;\n };\n\n /**\n * Produces the digest.\n *\n * @return a byte buffer containing the digest value.\n */\n md.digest = function() {\n /* Note: Here we copy the remaining bytes in the input buffer and\n add the appropriate SHA-256 padding. Then we do the final update\n on a copy of the state so that if the user wants to get\n intermediate digests they can do so. */\n\n /* Determine the number of bytes that must be added to the message\n to ensure its length is congruent to 448 mod 512. In other words,\n the data to be digested must be a multiple of 512 bits (or 128 bytes).\n This data includes the message, some padding, and the length of the\n message. Since the length of the message will be encoded as 8 bytes (64\n bits), that means that the last segment of the data must have 56 bytes\n (448 bits) of message and padding. Therefore, the length of the message\n plus the padding must be congruent to 448 mod 512 because\n 512 - 128 = 448.\n\n In order to fill up the message length it must be filled with\n padding that begins with 1 bit followed by all 0 bits. Padding\n must *always* be present, so if the message length is already\n congruent to 448 mod 512, then 512 padding bits must be added. */\n\n var finalBlock = forge.util.createBuffer();\n finalBlock.putBytes(_input.bytes());\n\n // compute remaining size to be digested (include message length size)\n var remaining = (\n md.fullMessageLength[md.fullMessageLength.length - 1] +\n md.messageLengthSize);\n\n // add padding for overflow blockSize - overflow\n // _padding starts with 1 byte with first bit is set (byte value 128), then\n // there may be up to (blockSize - 1) other pad bytes\n var overflow = remaining & (md.blockLength - 1);\n finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow));\n\n // serialize message length in bits in big-endian order; since length\n // is stored in bytes we multiply by 8 and add carry from next int\n var next, carry;\n var bits = md.fullMessageLength[0] * 8;\n for(var i = 0; i < md.fullMessageLength.length - 1; ++i) {\n next = md.fullMessageLength[i + 1] * 8;\n carry = (next / 0x100000000) >>> 0;\n bits += carry;\n finalBlock.putInt32(bits >>> 0);\n bits = next >>> 0;\n }\n finalBlock.putInt32(bits);\n\n var s2 = {\n h0: _state.h0,\n h1: _state.h1,\n h2: _state.h2,\n h3: _state.h3,\n h4: _state.h4,\n h5: _state.h5,\n h6: _state.h6,\n h7: _state.h7\n };\n _update(s2, _w, finalBlock);\n var rval = forge.util.createBuffer();\n rval.putInt32(s2.h0);\n rval.putInt32(s2.h1);\n rval.putInt32(s2.h2);\n rval.putInt32(s2.h3);\n rval.putInt32(s2.h4);\n rval.putInt32(s2.h5);\n rval.putInt32(s2.h6);\n rval.putInt32(s2.h7);\n return rval;\n };\n\n return md;\n};\n\n// sha-256 padding bytes not initialized yet\nvar _padding = null;\nvar _initialized = false;\n\n// table of constants\nvar _k = null;\n\n/**\n * Initializes the constant tables.\n */\nfunction _init() {\n // create padding\n _padding = String.fromCharCode(128);\n _padding += forge.util.fillString(String.fromCharCode(0x00), 64);\n\n // create K table for SHA-256\n _k = [\n 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,\n 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,\n 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,\n 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,\n 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,\n 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,\n 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,\n 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,\n 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,\n 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,\n 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,\n 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,\n 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,\n 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,\n 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,\n 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2];\n\n // now initialized\n _initialized = true;\n}\n\n/**\n * Updates a SHA-256 state with the given byte buffer.\n *\n * @param s the SHA-256 state to update.\n * @param w the array to use to store words.\n * @param bytes the byte buffer to update with.\n */\nfunction _update(s, w, bytes) {\n // consume 512 bit (64 byte) chunks\n var t1, t2, s0, s1, ch, maj, i, a, b, c, d, e, f, g, h;\n var len = bytes.length();\n while(len >= 64) {\n // the w array will be populated with sixteen 32-bit big-endian words\n // and then extended into 64 32-bit words according to SHA-256\n for(i = 0; i < 16; ++i) {\n w[i] = bytes.getInt32();\n }\n for(; i < 64; ++i) {\n // XOR word 2 words ago rot right 17, rot right 19, shft right 10\n t1 = w[i - 2];\n t1 =\n ((t1 >>> 17) | (t1 << 15)) ^\n ((t1 >>> 19) | (t1 << 13)) ^\n (t1 >>> 10);\n // XOR word 15 words ago rot right 7, rot right 18, shft right 3\n t2 = w[i - 15];\n t2 =\n ((t2 >>> 7) | (t2 << 25)) ^\n ((t2 >>> 18) | (t2 << 14)) ^\n (t2 >>> 3);\n // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^32\n w[i] = (t1 + w[i - 7] + t2 + w[i - 16]) | 0;\n }\n\n // initialize hash value for this chunk\n a = s.h0;\n b = s.h1;\n c = s.h2;\n d = s.h3;\n e = s.h4;\n f = s.h5;\n g = s.h6;\n h = s.h7;\n\n // round function\n for(i = 0; i < 64; ++i) {\n // Sum1(e)\n s1 =\n ((e >>> 6) | (e << 26)) ^\n ((e >>> 11) | (e << 21)) ^\n ((e >>> 25) | (e << 7));\n // Ch(e, f, g) (optimized the same way as SHA-1)\n ch = g ^ (e & (f ^ g));\n // Sum0(a)\n s0 =\n ((a >>> 2) | (a << 30)) ^\n ((a >>> 13) | (a << 19)) ^\n ((a >>> 22) | (a << 10));\n // Maj(a, b, c) (optimized the same way as SHA-1)\n maj = (a & b) | (c & (a ^ b));\n\n // main algorithm\n t1 = h + s1 + ch + _k[i] + w[i];\n t2 = s0 + maj;\n h = g;\n g = f;\n f = e;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n // can't truncate with `| 0`\n e = (d + t1) >>> 0;\n d = c;\n c = b;\n b = a;\n // `>>> 0` necessary to avoid iOS/Safari 10 optimization bug\n // can't truncate with `| 0`\n a = (t1 + t2) >>> 0;\n }\n\n // update hash state\n s.h0 = (s.h0 + a) | 0;\n s.h1 = (s.h1 + b) | 0;\n s.h2 = (s.h2 + c) | 0;\n s.h3 = (s.h3 + d) | 0;\n s.h4 = (s.h4 + e) | 0;\n s.h5 = (s.h5 + f) | 0;\n s.h6 = (s.h6 + g) | 0;\n s.h7 = (s.h7 + h) | 0;\n len -= 64;\n }\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/sha256.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/sha512.js":
/*!***********************************************!*\
!*** ./node_modules/node-forge/lib/sha512.js ***!
\***********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Secure Hash Algorithm with a 1024-bit block size implementation.\n *\n * This includes: SHA-512, SHA-384, SHA-512/224, and SHA-512/256. For\n * SHA-256 (block size 512 bits), see sha256.js.\n *\n * See FIPS 180-4 for details.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2014-2015 Digital Bazaar, Inc.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./md */ \"./node_modules/node-forge/lib/md.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\nvar sha512 = module.exports = forge.sha512 = forge.sha512 || {};\n\n// SHA-512\nforge.md.sha512 = forge.md.algorithms.sha512 = sha512;\n\n// SHA-384\nvar sha384 = forge.sha384 = forge.sha512.sha384 = forge.sha512.sha384 || {};\nsha384.create = function() {\n return sha512.create('SHA-384');\n};\nforge.md.sha384 = forge.md.algorithms.sha384 = sha384;\n\n// SHA-512/256\nforge.sha512.sha256 = forge.sha512.sha256 || {\n create: function() {\n return sha512.create('SHA-512/256');\n }\n};\nforge.md['sha512/256'] = forge.md.algorithms['sha512/256'] =\n forge.sha512.sha256;\n\n// SHA-512/224\nforge.sha512.sha224 = forge.sha512.sha224 || {\n create: function() {\n return sha512.create('SHA-512/224');\n }\n};\nforge.md['sha512/224'] = forge.md.algorithms['sha512/224'] =\n forge.sha512.sha224;\n\n/**\n * Creates a SHA-2 message digest object.\n *\n * @param algorithm the algorithm to use (SHA-512, SHA-384, SHA-512/224,\n * SHA-512/256).\n *\n * @return a message digest object.\n */\nsha512.create = function(algorithm) {\n // do initialization as necessary\n if(!_initialized) {\n _init();\n }\n\n if(typeof algorithm === 'undefined') {\n algorithm = 'SHA-512';\n }\n\n if(!(algorithm in _states)) {\n throw new Error('Invalid SHA-512 algorithm: ' + algorithm);\n }\n\n // SHA-512 state contains eight 64-bit integers (each as two 32-bit ints)\n var _state = _states[algorithm];\n var _h = null;\n\n // input buffer\n var _input = forge.util.createBuffer();\n\n // used for 64-bit word storage\n var _w = new Array(80);\n for(var wi = 0; wi < 80; ++wi) {\n _w[wi] = new Array(2);\n }\n\n // determine digest length by algorithm name (default)\n var digestLength = 64;\n switch(algorithm) {\n case 'SHA-384':\n digestLength = 48;\n break;\n case 'SHA-512/256':\n digestLength = 32;\n break;\n case 'SHA-512/224':\n digestLength = 28;\n break;\n }\n\n // message digest object\n var md = {\n // SHA-512 => sha512\n algorithm: algorithm.replace('-', '').toLowerCase(),\n blockLength: 128,\n digestLength: digestLength,\n // 56-bit length of message so far (does not including padding)\n messageLength: 0,\n // true message length\n fullMessageLength: null,\n // size of message length in bytes\n messageLengthSize: 16\n };\n\n /**\n * Starts the digest.\n *\n * @return this digest object.\n */\n md.start = function() {\n // up to 56-bit message length for convenience\n md.messageLength = 0;\n\n // full message length (set md.messageLength128 for backwards-compatibility)\n md.fullMessageLength = md.messageLength128 = [];\n var int32s = md.messageLengthSize / 4;\n for(var i = 0; i < int32s; ++i) {\n md.fullMessageLength.push(0);\n }\n _input = forge.util.createBuffer();\n _h = new Array(_state.length);\n for(var i = 0; i < _state.length; ++i) {\n _h[i] = _state[i].slice(0);\n }\n return md;\n };\n // start digest automatically for first time\n md.start();\n\n /**\n * Updates the digest with the given message input. The given input can\n * treated as raw input (no encoding will be applied) or an encoding of\n * 'utf8' maybe given to encode the input using UTF-8.\n *\n * @param msg the message input to update with.\n * @param encoding the encoding to use (default: 'raw', other: 'utf8').\n *\n * @return this digest object.\n */\n md.update = function(msg, encoding) {\n if(encoding === 'utf8') {\n msg = forge.util.encodeUtf8(msg);\n }\n\n // update message length\n var len = msg.length;\n md.messageLength += len;\n len = [(len / 0x100000000) >>> 0, len >>> 0];\n for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {\n md.fullMessageLength[i] += len[1];\n len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);\n md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;\n len[0] = ((len[1] / 0x100000000) >>> 0);\n }\n\n // add bytes to input buffer\n _input.putBytes(msg);\n\n // process bytes\n _update(_h, _w, _input);\n\n // compact input buffer every 2K or if empty\n if(_input.read > 2048 || _input.length() === 0) {\n _input.compact();\n }\n\n return md;\n };\n\n /**\n * Produces the digest.\n *\n * @return a byte buffer containing the digest value.\n */\n md.digest = function() {\n /* Note: Here we copy the remaining bytes in the input buffer and\n add the appropriate SHA-512 padding. Then we do the final update\n on a copy of the state so that if the user wants to get\n intermediate digests they can do so. */\n\n /* Determine the number of bytes that must be added to the message\n to ensure its length is congruent to 896 mod 1024. In other words,\n the data to be digested must be a multiple of 1024 bits (or 128 bytes).\n This data includes the message, some padding, and the length of the\n message. Since the length of the message will be encoded as 16 bytes (128\n bits), that means that the last segment of the data must have 112 bytes\n (896 bits) of message and padding. Therefore, the length of the message\n plus the padding must be congruent to 896 mod 1024 because\n 1024 - 128 = 896.\n\n In order to fill up the message length it must be filled with\n padding that begins with 1 bit followed by all 0 bits. Padding\n must *always* be present, so if the message length is already\n congruent to 896 mod 1024, then 1024 padding bits must be added. */\n\n var finalBlock = forge.util.createBuffer();\n finalBlock.putBytes(_input.bytes());\n\n // compute remaining size to be digested (include message length size)\n var remaining = (\n md.fullMessageLength[md.fullMessageLength.length - 1] +\n md.messageLengthSize);\n\n // add padding for overflow blockSize - overflow\n // _padding starts with 1 byte with first bit is set (byte value 128), then\n // there may be up to (blockSize - 1) other pad bytes\n var overflow = remaining & (md.blockLength - 1);\n finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow));\n\n // serialize message length in bits in big-endian order; since length\n // is stored in bytes we multiply by 8 and add carry from next int\n var next, carry;\n var bits = md.fullMessageLength[0] * 8;\n for(var i = 0; i < md.fullMessageLength.length - 1; ++i) {\n next = md.fullMessageLength[i + 1] * 8;\n carry = (next / 0x100000000) >>> 0;\n bits += carry;\n finalBlock.putInt32(bits >>> 0);\n bits = next >>> 0;\n }\n finalBlock.putInt32(bits);\n\n var h = new Array(_h.length);\n for(var i = 0; i < _h.length; ++i) {\n h[i] = _h[i].slice(0);\n }\n _update(h, _w, finalBlock);\n var rval = forge.util.createBuffer();\n var hlen;\n if(algorithm === 'SHA-512') {\n hlen = h.length;\n } else if(algorithm === 'SHA-384') {\n hlen = h.length - 2;\n } else {\n hlen = h.length - 4;\n }\n for(var i = 0; i < hlen; ++i) {\n rval.putInt32(h[i][0]);\n if(i !== hlen - 1 || algorithm !== 'SHA-512/224') {\n rval.putInt32(h[i][1]);\n }\n }\n return rval;\n };\n\n return md;\n};\n\n// sha-512 padding bytes not initialized yet\nvar _padding = null;\nvar _initialized = false;\n\n// table of constants\nvar _k = null;\n\n// initial hash states\nvar _states = null;\n\n/**\n * Initializes the constant tables.\n */\nfunction _init() {\n // create padding\n _padding = String.fromCharCode(128);\n _padding += forge.util.fillString(String.fromCharCode(0x00), 128);\n\n // create K table for SHA-512\n _k = [\n [0x428a2f98, 0xd728ae22], [0x71374491, 0x23ef65cd],\n [0xb5c0fbcf, 0xec4d3b2f], [0xe9b5dba5, 0x8189dbbc],\n [0x3956c25b, 0xf348b538], [0x59f111f1, 0xb605d019],\n [0x923f82a4, 0xaf194f9b], [0xab1c5ed5, 0xda6d8118],\n [0xd807aa98, 0xa3030242], [0x12835b01, 0x45706fbe],\n [0x243185be, 0x4ee4b28c], [0x550c7dc3, 0xd5ffb4e2],\n [0x72be5d74, 0xf27b896f], [0x80deb1fe, 0x3b1696b1],\n [0x9bdc06a7, 0x25c71235], [0xc19bf174, 0xcf692694],\n [0xe49b69c1, 0x9ef14ad2], [0xefbe4786, 0x384f25e3],\n [0x0fc19dc6, 0x8b8cd5b5], [0x240ca1cc, 0x77ac9c65],\n [0x2de92c6f, 0x592b0275], [0x4a7484aa, 0x6ea6e483],\n [0x5cb0a9dc, 0xbd41fbd4], [0x76f988da, 0x831153b5],\n [0x983e5152, 0xee66dfab], [0xa831c66d, 0x2db43210],\n [0xb00327c8, 0x98fb213f], [0xbf597fc7, 0xbeef0ee4],\n [0xc6e00bf3, 0x3da88fc2], [0xd5a79147, 0x930aa725],\n [0x06ca6351, 0xe003826f], [0x14292967, 0x0a0e6e70],\n [0x27b70a85, 0x46d22ffc], [0x2e1b2138, 0x5c26c926],\n [0x4d2c6dfc, 0x5ac42aed], [0x53380d13, 0x9d95b3df],\n [0x650a7354, 0x8baf63de], [0x766a0abb, 0x3c77b2a8],\n [0x81c2c92e, 0x47edaee6], [0x92722c85, 0x1482353b],\n [0xa2bfe8a1, 0x4cf10364], [0xa81a664b, 0xbc423001],\n [0xc24b8b70, 0xd0f89791], [0xc76c51a3, 0x0654be30],\n [0xd192e819, 0xd6ef5218], [0xd6990624, 0x5565a910],\n [0xf40e3585, 0x5771202a], [0x106aa070, 0x32bbd1b8],\n [0x19a4c116, 0xb8d2d0c8], [0x1e376c08, 0x5141ab53],\n [0x2748774c, 0xdf8eeb99], [0x34b0bcb5, 0xe19b48a8],\n [0x391c0cb3, 0xc5c95a63], [0x4ed8aa4a, 0xe3418acb],\n [0x5b9cca4f, 0x7763e373], [0x682e6ff3, 0xd6b2b8a3],\n [0x748f82ee, 0x5defb2fc], [0x78a5636f, 0x43172f60],\n [0x84c87814, 0xa1f0ab72], [0x8cc70208, 0x1a6439ec],\n [0x90befffa, 0x23631e28], [0xa4506ceb, 0xde82bde9],\n [0xbef9a3f7, 0xb2c67915], [0xc67178f2, 0xe372532b],\n [0xca273ece, 0xea26619c], [0xd186b8c7, 0x21c0c207],\n [0xeada7dd6, 0xcde0eb1e], [0xf57d4f7f, 0xee6ed178],\n [0x06f067aa, 0x72176fba], [0x0a637dc5, 0xa2c898a6],\n [0x113f9804, 0xbef90dae], [0x1b710b35, 0x131c471b],\n [0x28db77f5, 0x23047d84], [0x32caab7b, 0x40c72493],\n [0x3c9ebe0a, 0x15c9bebc], [0x431d67c4, 0x9c100d4c],\n [0x4cc5d4be, 0xcb3e42b6], [0x597f299c, 0xfc657e2a],\n [0x5fcb6fab, 0x3ad6faec], [0x6c44198c, 0x4a475817]\n ];\n\n // initial hash states\n _states = {};\n _states['SHA-512'] = [\n [0x6a09e667, 0xf3bcc908],\n [0xbb67ae85, 0x84caa73b],\n [0x3c6ef372, 0xfe94f82b],\n [0xa54ff53a, 0x5f1d36f1],\n [0x510e527f, 0xade682d1],\n [0x9b05688c, 0x2b3e6c1f],\n [0x1f83d9ab, 0xfb41bd6b],\n [0x5be0cd19, 0x137e2179]\n ];\n _states['SHA-384'] = [\n [0xcbbb9d5d, 0xc1059ed8],\n [0x629a292a, 0x367cd507],\n [0x9159015a, 0x3070dd17],\n [0x152fecd8, 0xf70e5939],\n [0x67332667, 0xffc00b31],\n [0x8eb44a87, 0x68581511],\n [0xdb0c2e0d, 0x64f98fa7],\n [0x47b5481d, 0xbefa4fa4]\n ];\n _states['SHA-512/256'] = [\n [0x22312194, 0xFC2BF72C],\n [0x9F555FA3, 0xC84C64C2],\n [0x2393B86B, 0x6F53B151],\n [0x96387719, 0x5940EABD],\n [0x96283EE2, 0xA88EFFE3],\n [0xBE5E1E25, 0x53863992],\n [0x2B0199FC, 0x2C85B8AA],\n [0x0EB72DDC, 0x81C52CA2]\n ];\n _states['SHA-512/224'] = [\n [0x8C3D37C8, 0x19544DA2],\n [0x73E19966, 0x89DCD4D6],\n [0x1DFAB7AE, 0x32FF9C82],\n [0x679DD514, 0x582F9FCF],\n [0x0F6D2B69, 0x7BD44DA8],\n [0x77E36F73, 0x04C48942],\n [0x3F9D85A8, 0x6A1D36C8],\n [0x1112E6AD, 0x91D692A1]\n ];\n\n // now initialized\n _initialized = true;\n}\n\n/**\n * Updates a SHA-512 state with the given byte buffer.\n *\n * @param s the SHA-512 state to update.\n * @param w the array to use to store words.\n * @param bytes the byte buffer to update with.\n */\nfunction _update(s, w, bytes) {\n // consume 512 bit (128 byte) chunks\n var t1_hi, t1_lo;\n var t2_hi, t2_lo;\n var s0_hi, s0_lo;\n var s1_hi, s1_lo;\n var ch_hi, ch_lo;\n var maj_hi, maj_lo;\n var a_hi, a_lo;\n var b_hi, b_lo;\n var c_hi, c_lo;\n var d_hi, d_lo;\n var e_hi, e_lo;\n var f_hi, f_lo;\n var g_hi, g_lo;\n var h_hi, h_lo;\n var i, hi, lo, w2, w7, w15, w16;\n var len = bytes.length();\n while(len >= 128) {\n // the w array will be populated with sixteen 64-bit big-endian words\n // and then extended into 64 64-bit words according to SHA-512\n for(i = 0; i < 16; ++i) {\n w[i][0] = bytes.getInt32() >>> 0;\n w[i][1] = bytes.getInt32() >>> 0;\n }\n for(; i < 80; ++i) {\n // for word 2 words ago: ROTR 19(x) ^ ROTR 61(x) ^ SHR 6(x)\n w2 = w[i - 2];\n hi = w2[0];\n lo = w2[1];\n\n // high bits\n t1_hi = (\n ((hi >>> 19) | (lo << 13)) ^ // ROTR 19\n ((lo >>> 29) | (hi << 3)) ^ // ROTR 61/(swap + ROTR 29)\n (hi >>> 6)) >>> 0; // SHR 6\n // low bits\n t1_lo = (\n ((hi << 13) | (lo >>> 19)) ^ // ROTR 19\n ((lo << 3) | (hi >>> 29)) ^ // ROTR 61/(swap + ROTR 29)\n ((hi << 26) | (lo >>> 6))) >>> 0; // SHR 6\n\n // for word 15 words ago: ROTR 1(x) ^ ROTR 8(x) ^ SHR 7(x)\n w15 = w[i - 15];\n hi = w15[0];\n lo = w15[1];\n\n // high bits\n t2_hi = (\n ((hi >>> 1) | (lo << 31)) ^ // ROTR 1\n ((hi >>> 8) | (lo << 24)) ^ // ROTR 8\n (hi >>> 7)) >>> 0; // SHR 7\n // low bits\n t2_lo = (\n ((hi << 31) | (lo >>> 1)) ^ // ROTR 1\n ((hi << 24) | (lo >>> 8)) ^ // ROTR 8\n ((hi << 25) | (lo >>> 7))) >>> 0; // SHR 7\n\n // sum(t1, word 7 ago, t2, word 16 ago) modulo 2^64 (carry lo overflow)\n w7 = w[i - 7];\n w16 = w[i - 16];\n lo = (t1_lo + w7[1] + t2_lo + w16[1]);\n w[i][0] = (t1_hi + w7[0] + t2_hi + w16[0] +\n ((lo / 0x100000000) >>> 0)) >>> 0;\n w[i][1] = lo >>> 0;\n }\n\n // initialize hash value for this chunk\n a_hi = s[0][0];\n a_lo = s[0][1];\n b_hi = s[1][0];\n b_lo = s[1][1];\n c_hi = s[2][0];\n c_lo = s[2][1];\n d_hi = s[3][0];\n d_lo = s[3][1];\n e_hi = s[4][0];\n e_lo = s[4][1];\n f_hi = s[5][0];\n f_lo = s[5][1];\n g_hi = s[6][0];\n g_lo = s[6][1];\n h_hi = s[7][0];\n h_lo = s[7][1];\n\n // round function\n for(i = 0; i < 80; ++i) {\n // Sum1(e) = ROTR 14(e) ^ ROTR 18(e) ^ ROTR 41(e)\n s1_hi = (\n ((e_hi >>> 14) | (e_lo << 18)) ^ // ROTR 14\n ((e_hi >>> 18) | (e_lo << 14)) ^ // ROTR 18\n ((e_lo >>> 9) | (e_hi << 23))) >>> 0; // ROTR 41/(swap + ROTR 9)\n s1_lo = (\n ((e_hi << 18) | (e_lo >>> 14)) ^ // ROTR 14\n ((e_hi << 14) | (e_lo >>> 18)) ^ // ROTR 18\n ((e_lo << 23) | (e_hi >>> 9))) >>> 0; // ROTR 41/(swap + ROTR 9)\n\n // Ch(e, f, g) (optimized the same way as SHA-1)\n ch_hi = (g_hi ^ (e_hi & (f_hi ^ g_hi))) >>> 0;\n ch_lo = (g_lo ^ (e_lo & (f_lo ^ g_lo))) >>> 0;\n\n // Sum0(a) = ROTR 28(a) ^ ROTR 34(a) ^ ROTR 39(a)\n s0_hi = (\n ((a_hi >>> 28) | (a_lo << 4)) ^ // ROTR 28\n ((a_lo >>> 2) | (a_hi << 30)) ^ // ROTR 34/(swap + ROTR 2)\n ((a_lo >>> 7) | (a_hi << 25))) >>> 0; // ROTR 39/(swap + ROTR 7)\n s0_lo = (\n ((a_hi << 4) | (a_lo >>> 28)) ^ // ROTR 28\n ((a_lo << 30) | (a_hi >>> 2)) ^ // ROTR 34/(swap + ROTR 2)\n ((a_lo << 25) | (a_hi >>> 7))) >>> 0; // ROTR 39/(swap + ROTR 7)\n\n // Maj(a, b, c) (optimized the same way as SHA-1)\n maj_hi = ((a_hi & b_hi) | (c_hi & (a_hi ^ b_hi))) >>> 0;\n maj_lo = ((a_lo & b_lo) | (c_lo & (a_lo ^ b_lo))) >>> 0;\n\n // main algorithm\n // t1 = (h + s1 + ch + _k[i] + _w[i]) modulo 2^64 (carry lo overflow)\n lo = (h_lo + s1_lo + ch_lo + _k[i][1] + w[i][1]);\n t1_hi = (h_hi + s1_hi + ch_hi + _k[i][0] + w[i][0] +\n ((lo / 0x100000000) >>> 0)) >>> 0;\n t1_lo = lo >>> 0;\n\n // t2 = s0 + maj modulo 2^64 (carry lo overflow)\n lo = s0_lo + maj_lo;\n t2_hi = (s0_hi + maj_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n t2_lo = lo >>> 0;\n\n h_hi = g_hi;\n h_lo = g_lo;\n\n g_hi = f_hi;\n g_lo = f_lo;\n\n f_hi = e_hi;\n f_lo = e_lo;\n\n // e = (d + t1) modulo 2^64 (carry lo overflow)\n lo = d_lo + t1_lo;\n e_hi = (d_hi + t1_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n e_lo = lo >>> 0;\n\n d_hi = c_hi;\n d_lo = c_lo;\n\n c_hi = b_hi;\n c_lo = b_lo;\n\n b_hi = a_hi;\n b_lo = a_lo;\n\n // a = (t1 + t2) modulo 2^64 (carry lo overflow)\n lo = t1_lo + t2_lo;\n a_hi = (t1_hi + t2_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n a_lo = lo >>> 0;\n }\n\n // update hash state (additional modulo 2^64)\n lo = s[0][1] + a_lo;\n s[0][0] = (s[0][0] + a_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[0][1] = lo >>> 0;\n\n lo = s[1][1] + b_lo;\n s[1][0] = (s[1][0] + b_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[1][1] = lo >>> 0;\n\n lo = s[2][1] + c_lo;\n s[2][0] = (s[2][0] + c_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[2][1] = lo >>> 0;\n\n lo = s[3][1] + d_lo;\n s[3][0] = (s[3][0] + d_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[3][1] = lo >>> 0;\n\n lo = s[4][1] + e_lo;\n s[4][0] = (s[4][0] + e_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[4][1] = lo >>> 0;\n\n lo = s[5][1] + f_lo;\n s[5][0] = (s[5][0] + f_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[5][1] = lo >>> 0;\n\n lo = s[6][1] + g_lo;\n s[6][0] = (s[6][0] + g_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[6][1] = lo >>> 0;\n\n lo = s[7][1] + h_lo;\n s[7][0] = (s[7][0] + h_hi + ((lo / 0x100000000) >>> 0)) >>> 0;\n s[7][1] = lo >>> 0;\n\n len -= 128;\n }\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/sha512.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/util.js":
/*!*********************************************!*\
!*** ./node_modules/node-forge/lib/util.js ***!
\*********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Utility functions for web applications.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2018 Digital Bazaar, Inc.\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\nvar baseN = __webpack_require__(/*! ./baseN */ \"./node_modules/node-forge/lib/baseN.js\");\n\n/* Utilities API */\nvar util = module.exports = forge.util = forge.util || {};\n\n// define setImmediate and nextTick\n(function() {\n // use native nextTick (unless we're in webpack)\n // webpack (or better node-libs-browser polyfill) sets process.browser.\n // this way we can detect webpack properly\n if(typeof process !== 'undefined' && process.nextTick && !process.browser) {\n util.nextTick = process.nextTick;\n if(typeof setImmediate === 'function') {\n util.setImmediate = setImmediate;\n } else {\n // polyfill setImmediate with nextTick, older versions of node\n // (those w/o setImmediate) won't totally starve IO\n util.setImmediate = util.nextTick;\n }\n return;\n }\n\n // polyfill nextTick with native setImmediate\n if(typeof setImmediate === 'function') {\n util.setImmediate = function() { return setImmediate.apply(undefined, arguments); };\n util.nextTick = function(callback) {\n return setImmediate(callback);\n };\n return;\n }\n\n /* Note: A polyfill upgrade pattern is used here to allow combining\n polyfills. For example, MutationObserver is fast, but blocks UI updates,\n so it needs to allow UI updates periodically, so it falls back on\n postMessage or setTimeout. */\n\n // polyfill with setTimeout\n util.setImmediate = function(callback) {\n setTimeout(callback, 0);\n };\n\n // upgrade polyfill to use postMessage\n if(typeof window !== 'undefined' &&\n typeof window.postMessage === 'function') {\n var msg = 'forge.setImmediate';\n var callbacks = [];\n util.setImmediate = function(callback) {\n callbacks.push(callback);\n // only send message when one hasn't been sent in\n // the current turn of the event loop\n if(callbacks.length === 1) {\n window.postMessage(msg, '*');\n }\n };\n function handler(event) {\n if(event.source === window && event.data === msg) {\n event.stopPropagation();\n var copy = callbacks.slice();\n callbacks.length = 0;\n copy.forEach(function(callback) {\n callback();\n });\n }\n }\n window.addEventListener('message', handler, true);\n }\n\n // upgrade polyfill to use MutationObserver\n if(typeof MutationObserver !== 'undefined') {\n // polyfill with MutationObserver\n var now = Date.now();\n var attr = true;\n var div = document.createElement('div');\n var callbacks = [];\n new MutationObserver(function() {\n var copy = callbacks.slice();\n callbacks.length = 0;\n copy.forEach(function(callback) {\n callback();\n });\n }).observe(div, {attributes: true});\n var oldSetImmediate = util.setImmediate;\n util.setImmediate = function(callback) {\n if(Date.now() - now > 15) {\n now = Date.now();\n oldSetImmediate(callback);\n } else {\n callbacks.push(callback);\n // only trigger observer when it hasn't been triggered in\n // the current turn of the event loop\n if(callbacks.length === 1) {\n div.setAttribute('a', attr = !attr);\n }\n }\n };\n }\n\n util.nextTick = util.setImmediate;\n})();\n\n// check if running under Node.js\nutil.isNodejs =\n typeof process !== 'undefined' && process.versions && process.versions.node;\n\n\n// 'self' will also work in Web Workers (instance of WorkerGlobalScope) while\n// it will point to `window` in the main thread.\n// To remain compatible with older browsers, we fall back to 'window' if 'self'\n// is not available.\nutil.globalScope = (function() {\n if(util.isNodejs) {\n return __webpack_require__.g;\n }\n\n return typeof self === 'undefined' ? window : self;\n})();\n\n// define isArray\nutil.isArray = Array.isArray || function(x) {\n return Object.prototype.toString.call(x) === '[object Array]';\n};\n\n// define isArrayBuffer\nutil.isArrayBuffer = function(x) {\n return typeof ArrayBuffer !== 'undefined' && x instanceof ArrayBuffer;\n};\n\n// define isArrayBufferView\nutil.isArrayBufferView = function(x) {\n return x && util.isArrayBuffer(x.buffer) && x.byteLength !== undefined;\n};\n\n/**\n * Ensure a bits param is 8, 16, 24, or 32. Used to validate input for\n * algorithms where bit manipulation, JavaScript limitations, and/or algorithm\n * design only allow for byte operations of a limited size.\n *\n * @param n number of bits.\n *\n * Throw Error if n invalid.\n */\nfunction _checkBitsParam(n) {\n if(!(n === 8 || n === 16 || n === 24 || n === 32)) {\n throw new Error('Only 8, 16, 24, or 32 bits supported: ' + n);\n }\n}\n\n// TODO: set ByteBuffer to best available backing\nutil.ByteBuffer = ByteStringBuffer;\n\n/** Buffer w/BinaryString backing */\n\n/**\n * Constructor for a binary string backed byte buffer.\n *\n * @param [b] the bytes to wrap (either encoded as string, one byte per\n * character, or as an ArrayBuffer or Typed Array).\n */\nfunction ByteStringBuffer(b) {\n // TODO: update to match DataBuffer API\n\n // the data in this buffer\n this.data = '';\n // the pointer for reading from this buffer\n this.read = 0;\n\n if(typeof b === 'string') {\n this.data = b;\n } else if(util.isArrayBuffer(b) || util.isArrayBufferView(b)) {\n if(typeof Buffer !== 'undefined' && b instanceof Buffer) {\n this.data = b.toString('binary');\n } else {\n // convert native buffer to forge buffer\n // FIXME: support native buffers internally instead\n var arr = new Uint8Array(b);\n try {\n this.data = String.fromCharCode.apply(null, arr);\n } catch(e) {\n for(var i = 0; i < arr.length; ++i) {\n this.putByte(arr[i]);\n }\n }\n }\n } else if(b instanceof ByteStringBuffer ||\n (typeof b === 'object' && typeof b.data === 'string' &&\n typeof b.read === 'number')) {\n // copy existing buffer\n this.data = b.data;\n this.read = b.read;\n }\n\n // used for v8 optimization\n this._constructedStringLength = 0;\n}\nutil.ByteStringBuffer = ByteStringBuffer;\n\n/* Note: This is an optimization for V8-based browsers. When V8 concatenates\n a string, the strings are only joined logically using a \"cons string\" or\n \"constructed/concatenated string\". These containers keep references to one\n another and can result in very large memory usage. For example, if a 2MB\n string is constructed by concatenating 4 bytes together at a time, the\n memory usage will be ~44MB; so ~22x increase. The strings are only joined\n together when an operation requiring their joining takes place, such as\n substr(). This function is called when adding data to this buffer to ensure\n these types of strings are periodically joined to reduce the memory\n footprint. */\nvar _MAX_CONSTRUCTED_STRING_LENGTH = 4096;\nutil.ByteStringBuffer.prototype._optimizeConstructedString = function(x) {\n this._constructedStringLength += x;\n if(this._constructedStringLength > _MAX_CONSTRUCTED_STRING_LENGTH) {\n // this substr() should cause the constructed string to join\n this.data.substr(0, 1);\n this._constructedStringLength = 0;\n }\n};\n\n/**\n * Gets the number of bytes in this buffer.\n *\n * @return the number of bytes in this buffer.\n */\nutil.ByteStringBuffer.prototype.length = function() {\n return this.data.length - this.read;\n};\n\n/**\n * Gets whether or not this buffer is empty.\n *\n * @return true if this buffer is empty, false if not.\n */\nutil.ByteStringBuffer.prototype.isEmpty = function() {\n return this.length() <= 0;\n};\n\n/**\n * Puts a byte in this buffer.\n *\n * @param b the byte to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putByte = function(b) {\n return this.putBytes(String.fromCharCode(b));\n};\n\n/**\n * Puts a byte in this buffer N times.\n *\n * @param b the byte to put.\n * @param n the number of bytes of value b to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.fillWithByte = function(b, n) {\n b = String.fromCharCode(b);\n var d = this.data;\n while(n > 0) {\n if(n & 1) {\n d += b;\n }\n n >>>= 1;\n if(n > 0) {\n b += b;\n }\n }\n this.data = d;\n this._optimizeConstructedString(n);\n return this;\n};\n\n/**\n * Puts bytes in this buffer.\n *\n * @param bytes the bytes (as a binary encoded string) to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putBytes = function(bytes) {\n this.data += bytes;\n this._optimizeConstructedString(bytes.length);\n return this;\n};\n\n/**\n * Puts a UTF-16 encoded string into this buffer.\n *\n * @param str the string to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putString = function(str) {\n return this.putBytes(util.encodeUtf8(str));\n};\n\n/**\n * Puts a 16-bit integer in this buffer in big-endian order.\n *\n * @param i the 16-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt16 = function(i) {\n return this.putBytes(\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i & 0xFF));\n};\n\n/**\n * Puts a 24-bit integer in this buffer in big-endian order.\n *\n * @param i the 24-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt24 = function(i) {\n return this.putBytes(\n String.fromCharCode(i >> 16 & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i & 0xFF));\n};\n\n/**\n * Puts a 32-bit integer in this buffer in big-endian order.\n *\n * @param i the 32-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt32 = function(i) {\n return this.putBytes(\n String.fromCharCode(i >> 24 & 0xFF) +\n String.fromCharCode(i >> 16 & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i & 0xFF));\n};\n\n/**\n * Puts a 16-bit integer in this buffer in little-endian order.\n *\n * @param i the 16-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt16Le = function(i) {\n return this.putBytes(\n String.fromCharCode(i & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF));\n};\n\n/**\n * Puts a 24-bit integer in this buffer in little-endian order.\n *\n * @param i the 24-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt24Le = function(i) {\n return this.putBytes(\n String.fromCharCode(i & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i >> 16 & 0xFF));\n};\n\n/**\n * Puts a 32-bit integer in this buffer in little-endian order.\n *\n * @param i the 32-bit integer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt32Le = function(i) {\n return this.putBytes(\n String.fromCharCode(i & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i >> 16 & 0xFF) +\n String.fromCharCode(i >> 24 & 0xFF));\n};\n\n/**\n * Puts an n-bit integer in this buffer in big-endian order.\n *\n * @param i the n-bit integer.\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putInt = function(i, n) {\n _checkBitsParam(n);\n var bytes = '';\n do {\n n -= 8;\n bytes += String.fromCharCode((i >> n) & 0xFF);\n } while(n > 0);\n return this.putBytes(bytes);\n};\n\n/**\n * Puts a signed n-bit integer in this buffer in big-endian order. Two's\n * complement representation is used.\n *\n * @param i the n-bit integer.\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putSignedInt = function(i, n) {\n // putInt checks n\n if(i < 0) {\n i += 2 << (n - 1);\n }\n return this.putInt(i, n);\n};\n\n/**\n * Puts the given buffer into this buffer.\n *\n * @param buffer the buffer to put into this one.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.putBuffer = function(buffer) {\n return this.putBytes(buffer.getBytes());\n};\n\n/**\n * Gets a byte from this buffer and advances the read pointer by 1.\n *\n * @return the byte.\n */\nutil.ByteStringBuffer.prototype.getByte = function() {\n return this.data.charCodeAt(this.read++);\n};\n\n/**\n * Gets a uint16 from this buffer in big-endian order and advances the read\n * pointer by 2.\n *\n * @return the uint16.\n */\nutil.ByteStringBuffer.prototype.getInt16 = function() {\n var rval = (\n this.data.charCodeAt(this.read) << 8 ^\n this.data.charCodeAt(this.read + 1));\n this.read += 2;\n return rval;\n};\n\n/**\n * Gets a uint24 from this buffer in big-endian order and advances the read\n * pointer by 3.\n *\n * @return the uint24.\n */\nutil.ByteStringBuffer.prototype.getInt24 = function() {\n var rval = (\n this.data.charCodeAt(this.read) << 16 ^\n this.data.charCodeAt(this.read + 1) << 8 ^\n this.data.charCodeAt(this.read + 2));\n this.read += 3;\n return rval;\n};\n\n/**\n * Gets a uint32 from this buffer in big-endian order and advances the read\n * pointer by 4.\n *\n * @return the word.\n */\nutil.ByteStringBuffer.prototype.getInt32 = function() {\n var rval = (\n this.data.charCodeAt(this.read) << 24 ^\n this.data.charCodeAt(this.read + 1) << 16 ^\n this.data.charCodeAt(this.read + 2) << 8 ^\n this.data.charCodeAt(this.read + 3));\n this.read += 4;\n return rval;\n};\n\n/**\n * Gets a uint16 from this buffer in little-endian order and advances the read\n * pointer by 2.\n *\n * @return the uint16.\n */\nutil.ByteStringBuffer.prototype.getInt16Le = function() {\n var rval = (\n this.data.charCodeAt(this.read) ^\n this.data.charCodeAt(this.read + 1) << 8);\n this.read += 2;\n return rval;\n};\n\n/**\n * Gets a uint24 from this buffer in little-endian order and advances the read\n * pointer by 3.\n *\n * @return the uint24.\n */\nutil.ByteStringBuffer.prototype.getInt24Le = function() {\n var rval = (\n this.data.charCodeAt(this.read) ^\n this.data.charCodeAt(this.read + 1) << 8 ^\n this.data.charCodeAt(this.read + 2) << 16);\n this.read += 3;\n return rval;\n};\n\n/**\n * Gets a uint32 from this buffer in little-endian order and advances the read\n * pointer by 4.\n *\n * @return the word.\n */\nutil.ByteStringBuffer.prototype.getInt32Le = function() {\n var rval = (\n this.data.charCodeAt(this.read) ^\n this.data.charCodeAt(this.read + 1) << 8 ^\n this.data.charCodeAt(this.read + 2) << 16 ^\n this.data.charCodeAt(this.read + 3) << 24);\n this.read += 4;\n return rval;\n};\n\n/**\n * Gets an n-bit integer from this buffer in big-endian order and advances the\n * read pointer by ceil(n/8).\n *\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return the integer.\n */\nutil.ByteStringBuffer.prototype.getInt = function(n) {\n _checkBitsParam(n);\n var rval = 0;\n do {\n // TODO: Use (rval * 0x100) if adding support for 33 to 53 bits.\n rval = (rval << 8) + this.data.charCodeAt(this.read++);\n n -= 8;\n } while(n > 0);\n return rval;\n};\n\n/**\n * Gets a signed n-bit integer from this buffer in big-endian order, using\n * two's complement, and advances the read pointer by n/8.\n *\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return the integer.\n */\nutil.ByteStringBuffer.prototype.getSignedInt = function(n) {\n // getInt checks n\n var x = this.getInt(n);\n var max = 2 << (n - 2);\n if(x >= max) {\n x -= max << 1;\n }\n return x;\n};\n\n/**\n * Reads bytes out as a binary encoded string and clears them from the\n * buffer. Note that the resulting string is binary encoded (in node.js this\n * encoding is referred to as `binary`, it is *not* `utf8`).\n *\n * @param count the number of bytes to read, undefined or null for all.\n *\n * @return a binary encoded string of bytes.\n */\nutil.ByteStringBuffer.prototype.getBytes = function(count) {\n var rval;\n if(count) {\n // read count bytes\n count = Math.min(this.length(), count);\n rval = this.data.slice(this.read, this.read + count);\n this.read += count;\n } else if(count === 0) {\n rval = '';\n } else {\n // read all bytes, optimize to only copy when needed\n rval = (this.read === 0) ? this.data : this.data.slice(this.read);\n this.clear();\n }\n return rval;\n};\n\n/**\n * Gets a binary encoded string of the bytes from this buffer without\n * modifying the read pointer.\n *\n * @param count the number of bytes to get, omit to get all.\n *\n * @return a string full of binary encoded characters.\n */\nutil.ByteStringBuffer.prototype.bytes = function(count) {\n return (typeof(count) === 'undefined' ?\n this.data.slice(this.read) :\n this.data.slice(this.read, this.read + count));\n};\n\n/**\n * Gets a byte at the given index without modifying the read pointer.\n *\n * @param i the byte index.\n *\n * @return the byte.\n */\nutil.ByteStringBuffer.prototype.at = function(i) {\n return this.data.charCodeAt(this.read + i);\n};\n\n/**\n * Puts a byte at the given index without modifying the read pointer.\n *\n * @param i the byte index.\n * @param b the byte to put.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.setAt = function(i, b) {\n this.data = this.data.substr(0, this.read + i) +\n String.fromCharCode(b) +\n this.data.substr(this.read + i + 1);\n return this;\n};\n\n/**\n * Gets the last byte without modifying the read pointer.\n *\n * @return the last byte.\n */\nutil.ByteStringBuffer.prototype.last = function() {\n return this.data.charCodeAt(this.data.length - 1);\n};\n\n/**\n * Creates a copy of this buffer.\n *\n * @return the copy.\n */\nutil.ByteStringBuffer.prototype.copy = function() {\n var c = util.createBuffer(this.data);\n c.read = this.read;\n return c;\n};\n\n/**\n * Compacts this buffer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.compact = function() {\n if(this.read > 0) {\n this.data = this.data.slice(this.read);\n this.read = 0;\n }\n return this;\n};\n\n/**\n * Clears this buffer.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.clear = function() {\n this.data = '';\n this.read = 0;\n return this;\n};\n\n/**\n * Shortens this buffer by triming bytes off of the end of this buffer.\n *\n * @param count the number of bytes to trim off.\n *\n * @return this buffer.\n */\nutil.ByteStringBuffer.prototype.truncate = function(count) {\n var len = Math.max(0, this.length() - count);\n this.data = this.data.substr(this.read, len);\n this.read = 0;\n return this;\n};\n\n/**\n * Converts this buffer to a hexadecimal string.\n *\n * @return a hexadecimal string.\n */\nutil.ByteStringBuffer.prototype.toHex = function() {\n var rval = '';\n for(var i = this.read; i < this.data.length; ++i) {\n var b = this.data.charCodeAt(i);\n if(b < 16) {\n rval += '0';\n }\n rval += b.toString(16);\n }\n return rval;\n};\n\n/**\n * Converts this buffer to a UTF-16 string (standard JavaScript string).\n *\n * @return a UTF-16 string.\n */\nutil.ByteStringBuffer.prototype.toString = function() {\n return util.decodeUtf8(this.bytes());\n};\n\n/** End Buffer w/BinaryString backing */\n\n/** Buffer w/UInt8Array backing */\n\n/**\n * FIXME: Experimental. Do not use yet.\n *\n * Constructor for an ArrayBuffer-backed byte buffer.\n *\n * The buffer may be constructed from a string, an ArrayBuffer, DataView, or a\n * TypedArray.\n *\n * If a string is given, its encoding should be provided as an option,\n * otherwise it will default to 'binary'. A 'binary' string is encoded such\n * that each character is one byte in length and size.\n *\n * If an ArrayBuffer, DataView, or TypedArray is given, it will be used\n * *directly* without any copying. Note that, if a write to the buffer requires\n * more space, the buffer will allocate a new backing ArrayBuffer to\n * accommodate. The starting read and write offsets for the buffer may be\n * given as options.\n *\n * @param [b] the initial bytes for this buffer.\n * @param options the options to use:\n * [readOffset] the starting read offset to use (default: 0).\n * [writeOffset] the starting write offset to use (default: the\n * length of the first parameter).\n * [growSize] the minimum amount, in bytes, to grow the buffer by to\n * accommodate writes (default: 1024).\n * [encoding] the encoding ('binary', 'utf8', 'utf16', 'hex') for the\n * first parameter, if it is a string (default: 'binary').\n */\nfunction DataBuffer(b, options) {\n // default options\n options = options || {};\n\n // pointers for read from/write to buffer\n this.read = options.readOffset || 0;\n this.growSize = options.growSize || 1024;\n\n var isArrayBuffer = util.isArrayBuffer(b);\n var isArrayBufferView = util.isArrayBufferView(b);\n if(isArrayBuffer || isArrayBufferView) {\n // use ArrayBuffer directly\n if(isArrayBuffer) {\n this.data = new DataView(b);\n } else {\n // TODO: adjust read/write offset based on the type of view\n // or specify that this must be done in the options ... that the\n // offsets are byte-based\n this.data = new DataView(b.buffer, b.byteOffset, b.byteLength);\n }\n this.write = ('writeOffset' in options ?\n options.writeOffset : this.data.byteLength);\n return;\n }\n\n // initialize to empty array buffer and add any given bytes using putBytes\n this.data = new DataView(new ArrayBuffer(0));\n this.write = 0;\n\n if(b !== null && b !== undefined) {\n this.putBytes(b);\n }\n\n if('writeOffset' in options) {\n this.write = options.writeOffset;\n }\n}\nutil.DataBuffer = DataBuffer;\n\n/**\n * Gets the number of bytes in this buffer.\n *\n * @return the number of bytes in this buffer.\n */\nutil.DataBuffer.prototype.length = function() {\n return this.write - this.read;\n};\n\n/**\n * Gets whether or not this buffer is empty.\n *\n * @return true if this buffer is empty, false if not.\n */\nutil.DataBuffer.prototype.isEmpty = function() {\n return this.length() <= 0;\n};\n\n/**\n * Ensures this buffer has enough empty space to accommodate the given number\n * of bytes. An optional parameter may be given that indicates a minimum\n * amount to grow the buffer if necessary. If the parameter is not given,\n * the buffer will be grown by some previously-specified default amount\n * or heuristic.\n *\n * @param amount the number of bytes to accommodate.\n * @param [growSize] the minimum amount, in bytes, to grow the buffer by if\n * necessary.\n */\nutil.DataBuffer.prototype.accommodate = function(amount, growSize) {\n if(this.length() >= amount) {\n return this;\n }\n growSize = Math.max(growSize || this.growSize, amount);\n\n // grow buffer\n var src = new Uint8Array(\n this.data.buffer, this.data.byteOffset, this.data.byteLength);\n var dst = new Uint8Array(this.length() + growSize);\n dst.set(src);\n this.data = new DataView(dst.buffer);\n\n return this;\n};\n\n/**\n * Puts a byte in this buffer.\n *\n * @param b the byte to put.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putByte = function(b) {\n this.accommodate(1);\n this.data.setUint8(this.write++, b);\n return this;\n};\n\n/**\n * Puts a byte in this buffer N times.\n *\n * @param b the byte to put.\n * @param n the number of bytes of value b to put.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.fillWithByte = function(b, n) {\n this.accommodate(n);\n for(var i = 0; i < n; ++i) {\n this.data.setUint8(b);\n }\n return this;\n};\n\n/**\n * Puts bytes in this buffer. The bytes may be given as a string, an\n * ArrayBuffer, a DataView, or a TypedArray.\n *\n * @param bytes the bytes to put.\n * @param [encoding] the encoding for the first parameter ('binary', 'utf8',\n * 'utf16', 'hex'), if it is a string (default: 'binary').\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putBytes = function(bytes, encoding) {\n if(util.isArrayBufferView(bytes)) {\n var src = new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength);\n var len = src.byteLength - src.byteOffset;\n this.accommodate(len);\n var dst = new Uint8Array(this.data.buffer, this.write);\n dst.set(src);\n this.write += len;\n return this;\n }\n\n if(util.isArrayBuffer(bytes)) {\n var src = new Uint8Array(bytes);\n this.accommodate(src.byteLength);\n var dst = new Uint8Array(this.data.buffer);\n dst.set(src, this.write);\n this.write += src.byteLength;\n return this;\n }\n\n // bytes is a util.DataBuffer or equivalent\n if(bytes instanceof util.DataBuffer ||\n (typeof bytes === 'object' &&\n typeof bytes.read === 'number' && typeof bytes.write === 'number' &&\n util.isArrayBufferView(bytes.data))) {\n var src = new Uint8Array(bytes.data.byteLength, bytes.read, bytes.length());\n this.accommodate(src.byteLength);\n var dst = new Uint8Array(bytes.data.byteLength, this.write);\n dst.set(src);\n this.write += src.byteLength;\n return this;\n }\n\n if(bytes instanceof util.ByteStringBuffer) {\n // copy binary string and process as the same as a string parameter below\n bytes = bytes.data;\n encoding = 'binary';\n }\n\n // string conversion\n encoding = encoding || 'binary';\n if(typeof bytes === 'string') {\n var view;\n\n // decode from string\n if(encoding === 'hex') {\n this.accommodate(Math.ceil(bytes.length / 2));\n view = new Uint8Array(this.data.buffer, this.write);\n this.write += util.binary.hex.decode(bytes, view, this.write);\n return this;\n }\n if(encoding === 'base64') {\n this.accommodate(Math.ceil(bytes.length / 4) * 3);\n view = new Uint8Array(this.data.buffer, this.write);\n this.write += util.binary.base64.decode(bytes, view, this.write);\n return this;\n }\n\n // encode text as UTF-8 bytes\n if(encoding === 'utf8') {\n // encode as UTF-8 then decode string as raw binary\n bytes = util.encodeUtf8(bytes);\n encoding = 'binary';\n }\n\n // decode string as raw binary\n if(encoding === 'binary' || encoding === 'raw') {\n // one byte per character\n this.accommodate(bytes.length);\n view = new Uint8Array(this.data.buffer, this.write);\n this.write += util.binary.raw.decode(view);\n return this;\n }\n\n // encode text as UTF-16 bytes\n if(encoding === 'utf16') {\n // two bytes per character\n this.accommodate(bytes.length * 2);\n view = new Uint16Array(this.data.buffer, this.write);\n this.write += util.text.utf16.encode(view);\n return this;\n }\n\n throw new Error('Invalid encoding: ' + encoding);\n }\n\n throw Error('Invalid parameter: ' + bytes);\n};\n\n/**\n * Puts the given buffer into this buffer.\n *\n * @param buffer the buffer to put into this one.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putBuffer = function(buffer) {\n this.putBytes(buffer);\n buffer.clear();\n return this;\n};\n\n/**\n * Puts a string into this buffer.\n *\n * @param str the string to put.\n * @param [encoding] the encoding for the string (default: 'utf16').\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putString = function(str) {\n return this.putBytes(str, 'utf16');\n};\n\n/**\n * Puts a 16-bit integer in this buffer in big-endian order.\n *\n * @param i the 16-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt16 = function(i) {\n this.accommodate(2);\n this.data.setInt16(this.write, i);\n this.write += 2;\n return this;\n};\n\n/**\n * Puts a 24-bit integer in this buffer in big-endian order.\n *\n * @param i the 24-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt24 = function(i) {\n this.accommodate(3);\n this.data.setInt16(this.write, i >> 8 & 0xFFFF);\n this.data.setInt8(this.write, i >> 16 & 0xFF);\n this.write += 3;\n return this;\n};\n\n/**\n * Puts a 32-bit integer in this buffer in big-endian order.\n *\n * @param i the 32-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt32 = function(i) {\n this.accommodate(4);\n this.data.setInt32(this.write, i);\n this.write += 4;\n return this;\n};\n\n/**\n * Puts a 16-bit integer in this buffer in little-endian order.\n *\n * @param i the 16-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt16Le = function(i) {\n this.accommodate(2);\n this.data.setInt16(this.write, i, true);\n this.write += 2;\n return this;\n};\n\n/**\n * Puts a 24-bit integer in this buffer in little-endian order.\n *\n * @param i the 24-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt24Le = function(i) {\n this.accommodate(3);\n this.data.setInt8(this.write, i >> 16 & 0xFF);\n this.data.setInt16(this.write, i >> 8 & 0xFFFF, true);\n this.write += 3;\n return this;\n};\n\n/**\n * Puts a 32-bit integer in this buffer in little-endian order.\n *\n * @param i the 32-bit integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt32Le = function(i) {\n this.accommodate(4);\n this.data.setInt32(this.write, i, true);\n this.write += 4;\n return this;\n};\n\n/**\n * Puts an n-bit integer in this buffer in big-endian order.\n *\n * @param i the n-bit integer.\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putInt = function(i, n) {\n _checkBitsParam(n);\n this.accommodate(n / 8);\n do {\n n -= 8;\n this.data.setInt8(this.write++, (i >> n) & 0xFF);\n } while(n > 0);\n return this;\n};\n\n/**\n * Puts a signed n-bit integer in this buffer in big-endian order. Two's\n * complement representation is used.\n *\n * @param i the n-bit integer.\n * @param n the number of bits in the integer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.putSignedInt = function(i, n) {\n _checkBitsParam(n);\n this.accommodate(n / 8);\n if(i < 0) {\n i += 2 << (n - 1);\n }\n return this.putInt(i, n);\n};\n\n/**\n * Gets a byte from this buffer and advances the read pointer by 1.\n *\n * @return the byte.\n */\nutil.DataBuffer.prototype.getByte = function() {\n return this.data.getInt8(this.read++);\n};\n\n/**\n * Gets a uint16 from this buffer in big-endian order and advances the read\n * pointer by 2.\n *\n * @return the uint16.\n */\nutil.DataBuffer.prototype.getInt16 = function() {\n var rval = this.data.getInt16(this.read);\n this.read += 2;\n return rval;\n};\n\n/**\n * Gets a uint24 from this buffer in big-endian order and advances the read\n * pointer by 3.\n *\n * @return the uint24.\n */\nutil.DataBuffer.prototype.getInt24 = function() {\n var rval = (\n this.data.getInt16(this.read) << 8 ^\n this.data.getInt8(this.read + 2));\n this.read += 3;\n return rval;\n};\n\n/**\n * Gets a uint32 from this buffer in big-endian order and advances the read\n * pointer by 4.\n *\n * @return the word.\n */\nutil.DataBuffer.prototype.getInt32 = function() {\n var rval = this.data.getInt32(this.read);\n this.read += 4;\n return rval;\n};\n\n/**\n * Gets a uint16 from this buffer in little-endian order and advances the read\n * pointer by 2.\n *\n * @return the uint16.\n */\nutil.DataBuffer.prototype.getInt16Le = function() {\n var rval = this.data.getInt16(this.read, true);\n this.read += 2;\n return rval;\n};\n\n/**\n * Gets a uint24 from this buffer in little-endian order and advances the read\n * pointer by 3.\n *\n * @return the uint24.\n */\nutil.DataBuffer.prototype.getInt24Le = function() {\n var rval = (\n this.data.getInt8(this.read) ^\n this.data.getInt16(this.read + 1, true) << 8);\n this.read += 3;\n return rval;\n};\n\n/**\n * Gets a uint32 from this buffer in little-endian order and advances the read\n * pointer by 4.\n *\n * @return the word.\n */\nutil.DataBuffer.prototype.getInt32Le = function() {\n var rval = this.data.getInt32(this.read, true);\n this.read += 4;\n return rval;\n};\n\n/**\n * Gets an n-bit integer from this buffer in big-endian order and advances the\n * read pointer by n/8.\n *\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return the integer.\n */\nutil.DataBuffer.prototype.getInt = function(n) {\n _checkBitsParam(n);\n var rval = 0;\n do {\n // TODO: Use (rval * 0x100) if adding support for 33 to 53 bits.\n rval = (rval << 8) + this.data.getInt8(this.read++);\n n -= 8;\n } while(n > 0);\n return rval;\n};\n\n/**\n * Gets a signed n-bit integer from this buffer in big-endian order, using\n * two's complement, and advances the read pointer by n/8.\n *\n * @param n the number of bits in the integer (8, 16, 24, or 32).\n *\n * @return the integer.\n */\nutil.DataBuffer.prototype.getSignedInt = function(n) {\n // getInt checks n\n var x = this.getInt(n);\n var max = 2 << (n - 2);\n if(x >= max) {\n x -= max << 1;\n }\n return x;\n};\n\n/**\n * Reads bytes out as a binary encoded string and clears them from the\n * buffer.\n *\n * @param count the number of bytes to read, undefined or null for all.\n *\n * @return a binary encoded string of bytes.\n */\nutil.DataBuffer.prototype.getBytes = function(count) {\n // TODO: deprecate this method, it is poorly named and\n // this.toString('binary') replaces it\n // add a toTypedArray()/toArrayBuffer() function\n var rval;\n if(count) {\n // read count bytes\n count = Math.min(this.length(), count);\n rval = this.data.slice(this.read, this.read + count);\n this.read += count;\n } else if(count === 0) {\n rval = '';\n } else {\n // read all bytes, optimize to only copy when needed\n rval = (this.read === 0) ? this.data : this.data.slice(this.read);\n this.clear();\n }\n return rval;\n};\n\n/**\n * Gets a binary encoded string of the bytes from this buffer without\n * modifying the read pointer.\n *\n * @param count the number of bytes to get, omit to get all.\n *\n * @return a string full of binary encoded characters.\n */\nutil.DataBuffer.prototype.bytes = function(count) {\n // TODO: deprecate this method, it is poorly named, add \"getString()\"\n return (typeof(count) === 'undefined' ?\n this.data.slice(this.read) :\n this.data.slice(this.read, this.read + count));\n};\n\n/**\n * Gets a byte at the given index without modifying the read pointer.\n *\n * @param i the byte index.\n *\n * @return the byte.\n */\nutil.DataBuffer.prototype.at = function(i) {\n return this.data.getUint8(this.read + i);\n};\n\n/**\n * Puts a byte at the given index without modifying the read pointer.\n *\n * @param i the byte index.\n * @param b the byte to put.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.setAt = function(i, b) {\n this.data.setUint8(i, b);\n return this;\n};\n\n/**\n * Gets the last byte without modifying the read pointer.\n *\n * @return the last byte.\n */\nutil.DataBuffer.prototype.last = function() {\n return this.data.getUint8(this.write - 1);\n};\n\n/**\n * Creates a copy of this buffer.\n *\n * @return the copy.\n */\nutil.DataBuffer.prototype.copy = function() {\n return new util.DataBuffer(this);\n};\n\n/**\n * Compacts this buffer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.compact = function() {\n if(this.read > 0) {\n var src = new Uint8Array(this.data.buffer, this.read);\n var dst = new Uint8Array(src.byteLength);\n dst.set(src);\n this.data = new DataView(dst);\n this.write -= this.read;\n this.read = 0;\n }\n return this;\n};\n\n/**\n * Clears this buffer.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.clear = function() {\n this.data = new DataView(new ArrayBuffer(0));\n this.read = this.write = 0;\n return this;\n};\n\n/**\n * Shortens this buffer by triming bytes off of the end of this buffer.\n *\n * @param count the number of bytes to trim off.\n *\n * @return this buffer.\n */\nutil.DataBuffer.prototype.truncate = function(count) {\n this.write = Math.max(0, this.length() - count);\n this.read = Math.min(this.read, this.write);\n return this;\n};\n\n/**\n * Converts this buffer to a hexadecimal string.\n *\n * @return a hexadecimal string.\n */\nutil.DataBuffer.prototype.toHex = function() {\n var rval = '';\n for(var i = this.read; i < this.data.byteLength; ++i) {\n var b = this.data.getUint8(i);\n if(b < 16) {\n rval += '0';\n }\n rval += b.toString(16);\n }\n return rval;\n};\n\n/**\n * Converts this buffer to a string, using the given encoding. If no\n * encoding is given, 'utf8' (UTF-8) is used.\n *\n * @param [encoding] the encoding to use: 'binary', 'utf8', 'utf16', 'hex',\n * 'base64' (default: 'utf8').\n *\n * @return a string representation of the bytes in this buffer.\n */\nutil.DataBuffer.prototype.toString = function(encoding) {\n var view = new Uint8Array(this.data, this.read, this.length());\n encoding = encoding || 'utf8';\n\n // encode to string\n if(encoding === 'binary' || encoding === 'raw') {\n return util.binary.raw.encode(view);\n }\n if(encoding === 'hex') {\n return util.binary.hex.encode(view);\n }\n if(encoding === 'base64') {\n return util.binary.base64.encode(view);\n }\n\n // decode to text\n if(encoding === 'utf8') {\n return util.text.utf8.decode(view);\n }\n if(encoding === 'utf16') {\n return util.text.utf16.decode(view);\n }\n\n throw new Error('Invalid encoding: ' + encoding);\n};\n\n/** End Buffer w/UInt8Array backing */\n\n/**\n * Creates a buffer that stores bytes. A value may be given to populate the\n * buffer with data. This value can either be string of encoded bytes or a\n * regular string of characters. When passing a string of binary encoded\n * bytes, the encoding `raw` should be given. This is also the default. When\n * passing a string of characters, the encoding `utf8` should be given.\n *\n * @param [input] a string with encoded bytes to store in the buffer.\n * @param [encoding] (default: 'raw', other: 'utf8').\n */\nutil.createBuffer = function(input, encoding) {\n // TODO: deprecate, use new ByteBuffer() instead\n encoding = encoding || 'raw';\n if(input !== undefined && encoding === 'utf8') {\n input = util.encodeUtf8(input);\n }\n return new util.ByteBuffer(input);\n};\n\n/**\n * Fills a string with a particular value. If you want the string to be a byte\n * string, pass in String.fromCharCode(theByte).\n *\n * @param c the character to fill the string with, use String.fromCharCode\n * to fill the string with a byte value.\n * @param n the number of characters of value c to fill with.\n *\n * @return the filled string.\n */\nutil.fillString = function(c, n) {\n var s = '';\n while(n > 0) {\n if(n & 1) {\n s += c;\n }\n n >>>= 1;\n if(n > 0) {\n c += c;\n }\n }\n return s;\n};\n\n/**\n * Performs a per byte XOR between two byte strings and returns the result as a\n * string of bytes.\n *\n * @param s1 first string of bytes.\n * @param s2 second string of bytes.\n * @param n the number of bytes to XOR.\n *\n * @return the XOR'd result.\n */\nutil.xorBytes = function(s1, s2, n) {\n var s3 = '';\n var b = '';\n var t = '';\n var i = 0;\n var c = 0;\n for(; n > 0; --n, ++i) {\n b = s1.charCodeAt(i) ^ s2.charCodeAt(i);\n if(c >= 10) {\n s3 += t;\n t = '';\n c = 0;\n }\n t += String.fromCharCode(b);\n ++c;\n }\n s3 += t;\n return s3;\n};\n\n/**\n * Converts a hex string into a 'binary' encoded string of bytes.\n *\n * @param hex the hexadecimal string to convert.\n *\n * @return the binary-encoded string of bytes.\n */\nutil.hexToBytes = function(hex) {\n // TODO: deprecate: \"Deprecated. Use util.binary.hex.decode instead.\"\n var rval = '';\n var i = 0;\n if(hex.length & 1 == 1) {\n // odd number of characters, convert first character alone\n i = 1;\n rval += String.fromCharCode(parseInt(hex[0], 16));\n }\n // convert 2 characters (1 byte) at a time\n for(; i < hex.length; i += 2) {\n rval += String.fromCharCode(parseInt(hex.substr(i, 2), 16));\n }\n return rval;\n};\n\n/**\n * Converts a 'binary' encoded string of bytes to hex.\n *\n * @param bytes the byte string to convert.\n *\n * @return the string of hexadecimal characters.\n */\nutil.bytesToHex = function(bytes) {\n // TODO: deprecate: \"Deprecated. Use util.binary.hex.encode instead.\"\n return util.createBuffer(bytes).toHex();\n};\n\n/**\n * Converts an 32-bit integer to 4-big-endian byte string.\n *\n * @param i the integer.\n *\n * @return the byte string.\n */\nutil.int32ToBytes = function(i) {\n return (\n String.fromCharCode(i >> 24 & 0xFF) +\n String.fromCharCode(i >> 16 & 0xFF) +\n String.fromCharCode(i >> 8 & 0xFF) +\n String.fromCharCode(i & 0xFF));\n};\n\n// base64 characters, reverse mapping\nvar _base64 =\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';\nvar _base64Idx = [\n/*43 -43 = 0*/\n/*'+', 1, 2, 3,'/' */\n 62, -1, -1, -1, 63,\n\n/*'0','1','2','3','4','5','6','7','8','9' */\n 52, 53, 54, 55, 56, 57, 58, 59, 60, 61,\n\n/*15, 16, 17,'=', 19, 20, 21 */\n -1, -1, -1, 64, -1, -1, -1,\n\n/*65 - 43 = 22*/\n/*'A','B','C','D','E','F','G','H','I','J','K','L','M', */\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,\n\n/*'N','O','P','Q','R','S','T','U','V','W','X','Y','Z' */\n 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,\n\n/*91 - 43 = 48 */\n/*48, 49, 50, 51, 52, 53 */\n -1, -1, -1, -1, -1, -1,\n\n/*97 - 43 = 54*/\n/*'a','b','c','d','e','f','g','h','i','j','k','l','m' */\n 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,\n\n/*'n','o','p','q','r','s','t','u','v','w','x','y','z' */\n 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51\n];\n\n// base58 characters (Bitcoin alphabet)\nvar _base58 = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz';\n\n/**\n * Base64 encodes a 'binary' encoded string of bytes.\n *\n * @param input the binary encoded string of bytes to base64-encode.\n * @param maxline the maximum number of encoded characters per line to use,\n * defaults to none.\n *\n * @return the base64-encoded output.\n */\nutil.encode64 = function(input, maxline) {\n // TODO: deprecate: \"Deprecated. Use util.binary.base64.encode instead.\"\n var line = '';\n var output = '';\n var chr1, chr2, chr3;\n var i = 0;\n while(i < input.length) {\n chr1 = input.charCodeAt(i++);\n chr2 = input.charCodeAt(i++);\n chr3 = input.charCodeAt(i++);\n\n // encode 4 character group\n line += _base64.charAt(chr1 >> 2);\n line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4));\n if(isNaN(chr2)) {\n line += '==';\n } else {\n line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6));\n line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63);\n }\n\n if(maxline && line.length > maxline) {\n output += line.substr(0, maxline) + '\\r\\n';\n line = line.substr(maxline);\n }\n }\n output += line;\n return output;\n};\n\n/**\n * Base64 decodes a string into a 'binary' encoded string of bytes.\n *\n * @param input the base64-encoded input.\n *\n * @return the binary encoded string.\n */\nutil.decode64 = function(input) {\n // TODO: deprecate: \"Deprecated. Use util.binary.base64.decode instead.\"\n\n // remove all non-base64 characters\n input = input.replace(/[^A-Za-z0-9\\+\\/\\=]/g, '');\n\n var output = '';\n var enc1, enc2, enc3, enc4;\n var i = 0;\n\n while(i < input.length) {\n enc1 = _base64Idx[input.charCodeAt(i++) - 43];\n enc2 = _base64Idx[input.charCodeAt(i++) - 43];\n enc3 = _base64Idx[input.charCodeAt(i++) - 43];\n enc4 = _base64Idx[input.charCodeAt(i++) - 43];\n\n output += String.fromCharCode((enc1 << 2) | (enc2 >> 4));\n if(enc3 !== 64) {\n // decoded at least 2 bytes\n output += String.fromCharCode(((enc2 & 15) << 4) | (enc3 >> 2));\n if(enc4 !== 64) {\n // decoded 3 bytes\n output += String.fromCharCode(((enc3 & 3) << 6) | enc4);\n }\n }\n }\n\n return output;\n};\n\n/**\n * Encodes the given string of characters (a standard JavaScript\n * string) as a binary encoded string where the bytes represent\n * a UTF-8 encoded string of characters. Non-ASCII characters will be\n * encoded as multiple bytes according to UTF-8.\n *\n * @param str a standard string of characters to encode.\n *\n * @return the binary encoded string.\n */\nutil.encodeUtf8 = function(str) {\n return unescape(encodeURIComponent(str));\n};\n\n/**\n * Decodes a binary encoded string that contains bytes that\n * represent a UTF-8 encoded string of characters -- into a\n * string of characters (a standard JavaScript string).\n *\n * @param str the binary encoded string to decode.\n *\n * @return the resulting standard string of characters.\n */\nutil.decodeUtf8 = function(str) {\n return decodeURIComponent(escape(str));\n};\n\n// binary encoding/decoding tools\n// FIXME: Experimental. Do not use yet.\nutil.binary = {\n raw: {},\n hex: {},\n base64: {},\n base58: {},\n baseN : {\n encode: baseN.encode,\n decode: baseN.decode\n }\n};\n\n/**\n * Encodes a Uint8Array as a binary-encoded string. This encoding uses\n * a value between 0 and 255 for each character.\n *\n * @param bytes the Uint8Array to encode.\n *\n * @return the binary-encoded string.\n */\nutil.binary.raw.encode = function(bytes) {\n return String.fromCharCode.apply(null, bytes);\n};\n\n/**\n * Decodes a binary-encoded string to a Uint8Array. This encoding uses\n * a value between 0 and 255 for each character.\n *\n * @param str the binary-encoded string to decode.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.binary.raw.decode = function(str, output, offset) {\n var out = output;\n if(!out) {\n out = new Uint8Array(str.length);\n }\n offset = offset || 0;\n var j = offset;\n for(var i = 0; i < str.length; ++i) {\n out[j++] = str.charCodeAt(i);\n }\n return output ? (j - offset) : out;\n};\n\n/**\n * Encodes a 'binary' string, ArrayBuffer, DataView, TypedArray, or\n * ByteBuffer as a string of hexadecimal characters.\n *\n * @param bytes the bytes to convert.\n *\n * @return the string of hexadecimal characters.\n */\nutil.binary.hex.encode = util.bytesToHex;\n\n/**\n * Decodes a hex-encoded string to a Uint8Array.\n *\n * @param hex the hexadecimal string to convert.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.binary.hex.decode = function(hex, output, offset) {\n var out = output;\n if(!out) {\n out = new Uint8Array(Math.ceil(hex.length / 2));\n }\n offset = offset || 0;\n var i = 0, j = offset;\n if(hex.length & 1) {\n // odd number of characters, convert first character alone\n i = 1;\n out[j++] = parseInt(hex[0], 16);\n }\n // convert 2 characters (1 byte) at a time\n for(; i < hex.length; i += 2) {\n out[j++] = parseInt(hex.substr(i, 2), 16);\n }\n return output ? (j - offset) : out;\n};\n\n/**\n * Base64-encodes a Uint8Array.\n *\n * @param input the Uint8Array to encode.\n * @param maxline the maximum number of encoded characters per line to use,\n * defaults to none.\n *\n * @return the base64-encoded output string.\n */\nutil.binary.base64.encode = function(input, maxline) {\n var line = '';\n var output = '';\n var chr1, chr2, chr3;\n var i = 0;\n while(i < input.byteLength) {\n chr1 = input[i++];\n chr2 = input[i++];\n chr3 = input[i++];\n\n // encode 4 character group\n line += _base64.charAt(chr1 >> 2);\n line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4));\n if(isNaN(chr2)) {\n line += '==';\n } else {\n line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6));\n line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63);\n }\n\n if(maxline && line.length > maxline) {\n output += line.substr(0, maxline) + '\\r\\n';\n line = line.substr(maxline);\n }\n }\n output += line;\n return output;\n};\n\n/**\n * Decodes a base64-encoded string to a Uint8Array.\n *\n * @param input the base64-encoded input string.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.binary.base64.decode = function(input, output, offset) {\n var out = output;\n if(!out) {\n out = new Uint8Array(Math.ceil(input.length / 4) * 3);\n }\n\n // remove all non-base64 characters\n input = input.replace(/[^A-Za-z0-9\\+\\/\\=]/g, '');\n\n offset = offset || 0;\n var enc1, enc2, enc3, enc4;\n var i = 0, j = offset;\n\n while(i < input.length) {\n enc1 = _base64Idx[input.charCodeAt(i++) - 43];\n enc2 = _base64Idx[input.charCodeAt(i++) - 43];\n enc3 = _base64Idx[input.charCodeAt(i++) - 43];\n enc4 = _base64Idx[input.charCodeAt(i++) - 43];\n\n out[j++] = (enc1 << 2) | (enc2 >> 4);\n if(enc3 !== 64) {\n // decoded at least 2 bytes\n out[j++] = ((enc2 & 15) << 4) | (enc3 >> 2);\n if(enc4 !== 64) {\n // decoded 3 bytes\n out[j++] = ((enc3 & 3) << 6) | enc4;\n }\n }\n }\n\n // make sure result is the exact decoded length\n return output ? (j - offset) : out.subarray(0, j);\n};\n\n// add support for base58 encoding/decoding with Bitcoin alphabet\nutil.binary.base58.encode = function(input, maxline) {\n return util.binary.baseN.encode(input, _base58, maxline);\n};\nutil.binary.base58.decode = function(input, maxline) {\n return util.binary.baseN.decode(input, _base58, maxline);\n};\n\n// text encoding/decoding tools\n// FIXME: Experimental. Do not use yet.\nutil.text = {\n utf8: {},\n utf16: {}\n};\n\n/**\n * Encodes the given string as UTF-8 in a Uint8Array.\n *\n * @param str the string to encode.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.text.utf8.encode = function(str, output, offset) {\n str = util.encodeUtf8(str);\n var out = output;\n if(!out) {\n out = new Uint8Array(str.length);\n }\n offset = offset || 0;\n var j = offset;\n for(var i = 0; i < str.length; ++i) {\n out[j++] = str.charCodeAt(i);\n }\n return output ? (j - offset) : out;\n};\n\n/**\n * Decodes the UTF-8 contents from a Uint8Array.\n *\n * @param bytes the Uint8Array to decode.\n *\n * @return the resulting string.\n */\nutil.text.utf8.decode = function(bytes) {\n return util.decodeUtf8(String.fromCharCode.apply(null, bytes));\n};\n\n/**\n * Encodes the given string as UTF-16 in a Uint8Array.\n *\n * @param str the string to encode.\n * @param [output] an optional Uint8Array to write the output to; if it\n * is too small, an exception will be thrown.\n * @param [offset] the start offset for writing to the output (default: 0).\n *\n * @return the Uint8Array or the number of bytes written if output was given.\n */\nutil.text.utf16.encode = function(str, output, offset) {\n var out = output;\n if(!out) {\n out = new Uint8Array(str.length * 2);\n }\n var view = new Uint16Array(out.buffer);\n offset = offset || 0;\n var j = offset;\n var k = offset;\n for(var i = 0; i < str.length; ++i) {\n view[k++] = str.charCodeAt(i);\n j += 2;\n }\n return output ? (j - offset) : out;\n};\n\n/**\n * Decodes the UTF-16 contents from a Uint8Array.\n *\n * @param bytes the Uint8Array to decode.\n *\n * @return the resulting string.\n */\nutil.text.utf16.decode = function(bytes) {\n return String.fromCharCode.apply(null, new Uint16Array(bytes.buffer));\n};\n\n/**\n * Deflates the given data using a flash interface.\n *\n * @param api the flash interface.\n * @param bytes the data.\n * @param raw true to return only raw deflate data, false to include zlib\n * header and trailer.\n *\n * @return the deflated data as a string.\n */\nutil.deflate = function(api, bytes, raw) {\n bytes = util.decode64(api.deflate(util.encode64(bytes)).rval);\n\n // strip zlib header and trailer if necessary\n if(raw) {\n // zlib header is 2 bytes (CMF,FLG) where FLG indicates that\n // there is a 4-byte DICT (alder-32) block before the data if\n // its 5th bit is set\n var start = 2;\n var flg = bytes.charCodeAt(1);\n if(flg & 0x20) {\n start = 6;\n }\n // zlib trailer is 4 bytes of adler-32\n bytes = bytes.substring(start, bytes.length - 4);\n }\n\n return bytes;\n};\n\n/**\n * Inflates the given data using a flash interface.\n *\n * @param api the flash interface.\n * @param bytes the data.\n * @param raw true if the incoming data has no zlib header or trailer and is\n * raw DEFLATE data.\n *\n * @return the inflated data as a string, null on error.\n */\nutil.inflate = function(api, bytes, raw) {\n // TODO: add zlib header and trailer if necessary/possible\n var rval = api.inflate(util.encode64(bytes)).rval;\n return (rval === null) ? null : util.decode64(rval);\n};\n\n/**\n * Sets a storage object.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n * @param obj the storage object, null to remove.\n */\nvar _setStorageObject = function(api, id, obj) {\n if(!api) {\n throw new Error('WebStorage not available.');\n }\n\n var rval;\n if(obj === null) {\n rval = api.removeItem(id);\n } else {\n // json-encode and base64-encode object\n obj = util.encode64(JSON.stringify(obj));\n rval = api.setItem(id, obj);\n }\n\n // handle potential flash error\n if(typeof(rval) !== 'undefined' && rval.rval !== true) {\n var error = new Error(rval.error.message);\n error.id = rval.error.id;\n error.name = rval.error.name;\n throw error;\n }\n};\n\n/**\n * Gets a storage object.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n *\n * @return the storage object entry or null if none exists.\n */\nvar _getStorageObject = function(api, id) {\n if(!api) {\n throw new Error('WebStorage not available.');\n }\n\n // get the existing entry\n var rval = api.getItem(id);\n\n /* Note: We check api.init because we can't do (api == localStorage)\n on IE because of \"Class doesn't support Automation\" exception. Only\n the flash api has an init method so this works too, but we need a\n better solution in the future. */\n\n // flash returns item wrapped in an object, handle special case\n if(api.init) {\n if(rval.rval === null) {\n if(rval.error) {\n var error = new Error(rval.error.message);\n error.id = rval.error.id;\n error.name = rval.error.name;\n throw error;\n }\n // no error, but also no item\n rval = null;\n } else {\n rval = rval.rval;\n }\n }\n\n // handle decoding\n if(rval !== null) {\n // base64-decode and json-decode data\n rval = JSON.parse(util.decode64(rval));\n }\n\n return rval;\n};\n\n/**\n * Stores an item in local storage.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n * @param key the key for the item.\n * @param data the data for the item (any javascript object/primitive).\n */\nvar _setItem = function(api, id, key, data) {\n // get storage object\n var obj = _getStorageObject(api, id);\n if(obj === null) {\n // create a new storage object\n obj = {};\n }\n // update key\n obj[key] = data;\n\n // set storage object\n _setStorageObject(api, id, obj);\n};\n\n/**\n * Gets an item from local storage.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n * @param key the key for the item.\n *\n * @return the item.\n */\nvar _getItem = function(api, id, key) {\n // get storage object\n var rval = _getStorageObject(api, id);\n if(rval !== null) {\n // return data at key\n rval = (key in rval) ? rval[key] : null;\n }\n\n return rval;\n};\n\n/**\n * Removes an item from local storage.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n * @param key the key for the item.\n */\nvar _removeItem = function(api, id, key) {\n // get storage object\n var obj = _getStorageObject(api, id);\n if(obj !== null && key in obj) {\n // remove key\n delete obj[key];\n\n // see if entry has no keys remaining\n var empty = true;\n for(var prop in obj) {\n empty = false;\n break;\n }\n if(empty) {\n // remove entry entirely if no keys are left\n obj = null;\n }\n\n // set storage object\n _setStorageObject(api, id, obj);\n }\n};\n\n/**\n * Clears the local disk storage identified by the given ID.\n *\n * @param api the storage interface.\n * @param id the storage ID to use.\n */\nvar _clearItems = function(api, id) {\n _setStorageObject(api, id, null);\n};\n\n/**\n * Calls a storage function.\n *\n * @param func the function to call.\n * @param args the arguments for the function.\n * @param location the location argument.\n *\n * @return the return value from the function.\n */\nvar _callStorageFunction = function(func, args, location) {\n var rval = null;\n\n // default storage types\n if(typeof(location) === 'undefined') {\n location = ['web', 'flash'];\n }\n\n // apply storage types in order of preference\n var type;\n var done = false;\n var exception = null;\n for(var idx in location) {\n type = location[idx];\n try {\n if(type === 'flash' || type === 'both') {\n if(args[0] === null) {\n throw new Error('Flash local storage not available.');\n }\n rval = func.apply(this, args);\n done = (type === 'flash');\n }\n if(type === 'web' || type === 'both') {\n args[0] = localStorage;\n rval = func.apply(this, args);\n done = true;\n }\n } catch(ex) {\n exception = ex;\n }\n if(done) {\n break;\n }\n }\n\n if(!done) {\n throw exception;\n }\n\n return rval;\n};\n\n/**\n * Stores an item on local disk.\n *\n * The available types of local storage include 'flash', 'web', and 'both'.\n *\n * The type 'flash' refers to flash local storage (SharedObject). In order\n * to use flash local storage, the 'api' parameter must be valid. The type\n * 'web' refers to WebStorage, if supported by the browser. The type 'both'\n * refers to storing using both 'flash' and 'web', not just one or the\n * other.\n *\n * The location array should list the storage types to use in order of\n * preference:\n *\n * ['flash']: flash only storage\n * ['web']: web only storage\n * ['both']: try to store in both\n * ['flash','web']: store in flash first, but if not available, 'web'\n * ['web','flash']: store in web first, but if not available, 'flash'\n *\n * The location array defaults to: ['web', 'flash']\n *\n * @param api the flash interface, null to use only WebStorage.\n * @param id the storage ID to use.\n * @param key the key for the item.\n * @param data the data for the item (any javascript object/primitive).\n * @param location an array with the preferred types of storage to use.\n */\nutil.setItem = function(api, id, key, data, location) {\n _callStorageFunction(_setItem, arguments, location);\n};\n\n/**\n * Gets an item on local disk.\n *\n * Set setItem() for details on storage types.\n *\n * @param api the flash interface, null to use only WebStorage.\n * @param id the storage ID to use.\n * @param key the key for the item.\n * @param location an array with the preferred types of storage to use.\n *\n * @return the item.\n */\nutil.getItem = function(api, id, key, location) {\n return _callStorageFunction(_getItem, arguments, location);\n};\n\n/**\n * Removes an item on local disk.\n *\n * Set setItem() for details on storage types.\n *\n * @param api the flash interface.\n * @param id the storage ID to use.\n * @param key the key for the item.\n * @param location an array with the preferred types of storage to use.\n */\nutil.removeItem = function(api, id, key, location) {\n _callStorageFunction(_removeItem, arguments, location);\n};\n\n/**\n * Clears the local disk storage identified by the given ID.\n *\n * Set setItem() for details on storage types.\n *\n * @param api the flash interface if flash is available.\n * @param id the storage ID to use.\n * @param location an array with the preferred types of storage to use.\n */\nutil.clearItems = function(api, id, location) {\n _callStorageFunction(_clearItems, arguments, location);\n};\n\n/**\n * Check if an object is empty.\n *\n * Taken from:\n * http://stackoverflow.com/questions/679915/how-do-i-test-for-an-empty-javascript-object-from-json/679937#679937\n *\n * @param object the object to check.\n */\nutil.isEmpty = function(obj) {\n for(var prop in obj) {\n if(obj.hasOwnProperty(prop)) {\n return false;\n }\n }\n return true;\n};\n\n/**\n * Format with simple printf-style interpolation.\n *\n * %%: literal '%'\n * %s,%o: convert next argument into a string.\n *\n * @param format the string to format.\n * @param ... arguments to interpolate into the format string.\n */\nutil.format = function(format) {\n var re = /%./g;\n // current match\n var match;\n // current part\n var part;\n // current arg index\n var argi = 0;\n // collected parts to recombine later\n var parts = [];\n // last index found\n var last = 0;\n // loop while matches remain\n while((match = re.exec(format))) {\n part = format.substring(last, re.lastIndex - 2);\n // don't add empty strings (ie, parts between %s%s)\n if(part.length > 0) {\n parts.push(part);\n }\n last = re.lastIndex;\n // switch on % code\n var code = match[0][1];\n switch(code) {\n case 's':\n case 'o':\n // check if enough arguments were given\n if(argi < arguments.length) {\n parts.push(arguments[argi++ + 1]);\n } else {\n parts.push('>');\n }\n break;\n // FIXME: do proper formating for numbers, etc\n //case 'f':\n //case 'd':\n case '%':\n parts.push('%');\n break;\n default:\n parts.push('<%' + code + '?>');\n }\n }\n // add trailing part of format string\n parts.push(format.substring(last));\n return parts.join('');\n};\n\n/**\n * Formats a number.\n *\n * http://snipplr.com/view/5945/javascript-numberformat--ported-from-php/\n */\nutil.formatNumber = function(number, decimals, dec_point, thousands_sep) {\n // http://kevin.vanzonneveld.net\n // + original by: Jonas Raoni Soares Silva (http://www.jsfromhell.com)\n // + improved by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)\n // + bugfix by: Michael White (http://crestidg.com)\n // + bugfix by: Benjamin Lupton\n // + bugfix by: Allan Jensen (http://www.winternet.no)\n // + revised by: Jonas Raoni Soares Silva (http://www.jsfromhell.com)\n // * example 1: number_format(1234.5678, 2, '.', '');\n // * returns 1: 1234.57\n\n var n = number, c = isNaN(decimals = Math.abs(decimals)) ? 2 : decimals;\n var d = dec_point === undefined ? ',' : dec_point;\n var t = thousands_sep === undefined ?\n '.' : thousands_sep, s = n < 0 ? '-' : '';\n var i = parseInt((n = Math.abs(+n || 0).toFixed(c)), 10) + '';\n var j = (i.length > 3) ? i.length % 3 : 0;\n return s + (j ? i.substr(0, j) + t : '') +\n i.substr(j).replace(/(\\d{3})(?=\\d)/g, '$1' + t) +\n (c ? d + Math.abs(n - i).toFixed(c).slice(2) : '');\n};\n\n/**\n * Formats a byte size.\n *\n * http://snipplr.com/view/5949/format-humanize-file-byte-size-presentation-in-javascript/\n */\nutil.formatSize = function(size) {\n if(size >= 1073741824) {\n size = util.formatNumber(size / 1073741824, 2, '.', '') + ' GiB';\n } else if(size >= 1048576) {\n size = util.formatNumber(size / 1048576, 2, '.', '') + ' MiB';\n } else if(size >= 1024) {\n size = util.formatNumber(size / 1024, 0) + ' KiB';\n } else {\n size = util.formatNumber(size, 0) + ' bytes';\n }\n return size;\n};\n\n/**\n * Converts an IPv4 or IPv6 string representation into bytes (in network order).\n *\n * @param ip the IPv4 or IPv6 address to convert.\n *\n * @return the 4-byte IPv6 or 16-byte IPv6 address or null if the address can't\n * be parsed.\n */\nutil.bytesFromIP = function(ip) {\n if(ip.indexOf('.') !== -1) {\n return util.bytesFromIPv4(ip);\n }\n if(ip.indexOf(':') !== -1) {\n return util.bytesFromIPv6(ip);\n }\n return null;\n};\n\n/**\n * Converts an IPv4 string representation into bytes (in network order).\n *\n * @param ip the IPv4 address to convert.\n *\n * @return the 4-byte address or null if the address can't be parsed.\n */\nutil.bytesFromIPv4 = function(ip) {\n ip = ip.split('.');\n if(ip.length !== 4) {\n return null;\n }\n var b = util.createBuffer();\n for(var i = 0; i < ip.length; ++i) {\n var num = parseInt(ip[i], 10);\n if(isNaN(num)) {\n return null;\n }\n b.putByte(num);\n }\n return b.getBytes();\n};\n\n/**\n * Converts an IPv6 string representation into bytes (in network order).\n *\n * @param ip the IPv6 address to convert.\n *\n * @return the 16-byte address or null if the address can't be parsed.\n */\nutil.bytesFromIPv6 = function(ip) {\n var blanks = 0;\n ip = ip.split(':').filter(function(e) {\n if(e.length === 0) ++blanks;\n return true;\n });\n var zeros = (8 - ip.length + blanks) * 2;\n var b = util.createBuffer();\n for(var i = 0; i < 8; ++i) {\n if(!ip[i] || ip[i].length === 0) {\n b.fillWithByte(0, zeros);\n zeros = 0;\n continue;\n }\n var bytes = util.hexToBytes(ip[i]);\n if(bytes.length < 2) {\n b.putByte(0);\n }\n b.putBytes(bytes);\n }\n return b.getBytes();\n};\n\n/**\n * Converts 4-bytes into an IPv4 string representation or 16-bytes into\n * an IPv6 string representation. The bytes must be in network order.\n *\n * @param bytes the bytes to convert.\n *\n * @return the IPv4 or IPv6 string representation if 4 or 16 bytes,\n * respectively, are given, otherwise null.\n */\nutil.bytesToIP = function(bytes) {\n if(bytes.length === 4) {\n return util.bytesToIPv4(bytes);\n }\n if(bytes.length === 16) {\n return util.bytesToIPv6(bytes);\n }\n return null;\n};\n\n/**\n * Converts 4-bytes into an IPv4 string representation. The bytes must be\n * in network order.\n *\n * @param bytes the bytes to convert.\n *\n * @return the IPv4 string representation or null for an invalid # of bytes.\n */\nutil.bytesToIPv4 = function(bytes) {\n if(bytes.length !== 4) {\n return null;\n }\n var ip = [];\n for(var i = 0; i < bytes.length; ++i) {\n ip.push(bytes.charCodeAt(i));\n }\n return ip.join('.');\n};\n\n/**\n * Converts 16-bytes into an IPv16 string representation. The bytes must be\n * in network order.\n *\n * @param bytes the bytes to convert.\n *\n * @return the IPv16 string representation or null for an invalid # of bytes.\n */\nutil.bytesToIPv6 = function(bytes) {\n if(bytes.length !== 16) {\n return null;\n }\n var ip = [];\n var zeroGroups = [];\n var zeroMaxGroup = 0;\n for(var i = 0; i < bytes.length; i += 2) {\n var hex = util.bytesToHex(bytes[i] + bytes[i + 1]);\n // canonicalize zero representation\n while(hex[0] === '0' && hex !== '0') {\n hex = hex.substr(1);\n }\n if(hex === '0') {\n var last = zeroGroups[zeroGroups.length - 1];\n var idx = ip.length;\n if(!last || idx !== last.end + 1) {\n zeroGroups.push({start: idx, end: idx});\n } else {\n last.end = idx;\n if((last.end - last.start) >\n (zeroGroups[zeroMaxGroup].end - zeroGroups[zeroMaxGroup].start)) {\n zeroMaxGroup = zeroGroups.length - 1;\n }\n }\n }\n ip.push(hex);\n }\n if(zeroGroups.length > 0) {\n var group = zeroGroups[zeroMaxGroup];\n // only shorten group of length > 0\n if(group.end - group.start > 0) {\n ip.splice(group.start, group.end - group.start + 1, '');\n if(group.start === 0) {\n ip.unshift('');\n }\n if(group.end === 7) {\n ip.push('');\n }\n }\n }\n return ip.join(':');\n};\n\n/**\n * Estimates the number of processes that can be run concurrently. If\n * creating Web Workers, keep in mind that the main JavaScript process needs\n * its own core.\n *\n * @param options the options to use:\n * update true to force an update (not use the cached value).\n * @param callback(err, max) called once the operation completes.\n */\nutil.estimateCores = function(options, callback) {\n if(typeof options === 'function') {\n callback = options;\n options = {};\n }\n options = options || {};\n if('cores' in util && !options.update) {\n return callback(null, util.cores);\n }\n if(typeof navigator !== 'undefined' &&\n 'hardwareConcurrency' in navigator &&\n navigator.hardwareConcurrency > 0) {\n util.cores = navigator.hardwareConcurrency;\n return callback(null, util.cores);\n }\n if(typeof Worker === 'undefined') {\n // workers not available\n util.cores = 1;\n return callback(null, util.cores);\n }\n if(typeof Blob === 'undefined') {\n // can't estimate, default to 2\n util.cores = 2;\n return callback(null, util.cores);\n }\n\n // create worker concurrency estimation code as blob\n var blobUrl = URL.createObjectURL(new Blob(['(',\n function() {\n self.addEventListener('message', function(e) {\n // run worker for 4 ms\n var st = Date.now();\n var et = st + 4;\n while(Date.now() < et);\n self.postMessage({st: st, et: et});\n });\n }.toString(),\n ')()'], {type: 'application/javascript'}));\n\n // take 5 samples using 16 workers\n sample([], 5, 16);\n\n function sample(max, samples, numWorkers) {\n if(samples === 0) {\n // get overlap average\n var avg = Math.floor(max.reduce(function(avg, x) {\n return avg + x;\n }, 0) / max.length);\n util.cores = Math.max(1, avg);\n URL.revokeObjectURL(blobUrl);\n return callback(null, util.cores);\n }\n map(numWorkers, function(err, results) {\n max.push(reduce(numWorkers, results));\n sample(max, samples - 1, numWorkers);\n });\n }\n\n function map(numWorkers, callback) {\n var workers = [];\n var results = [];\n for(var i = 0; i < numWorkers; ++i) {\n var worker = new Worker(blobUrl);\n worker.addEventListener('message', function(e) {\n results.push(e.data);\n if(results.length === numWorkers) {\n for(var i = 0; i < numWorkers; ++i) {\n workers[i].terminate();\n }\n callback(null, results);\n }\n });\n workers.push(worker);\n }\n for(var i = 0; i < numWorkers; ++i) {\n workers[i].postMessage(i);\n }\n }\n\n function reduce(numWorkers, results) {\n // find overlapping time windows\n var overlaps = [];\n for(var n = 0; n < numWorkers; ++n) {\n var r1 = results[n];\n var overlap = overlaps[n] = [];\n for(var i = 0; i < numWorkers; ++i) {\n if(n === i) {\n continue;\n }\n var r2 = results[i];\n if((r1.st > r2.st && r1.st < r2.et) ||\n (r2.st > r1.st && r2.st < r1.et)) {\n overlap.push(i);\n }\n }\n }\n // get maximum overlaps ... don't include overlapping worker itself\n // as the main JS process was also being scheduled during the work and\n // would have to be subtracted from the estimate anyway\n return overlaps.reduce(function(max, overlap) {\n return Math.max(max, overlap.length);\n }, 0);\n }\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/util.js?");
/***/ }),
/***/ "./node_modules/node-forge/lib/x509.js":
/*!*********************************************!*\
!*** ./node_modules/node-forge/lib/x509.js ***!
\*********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Javascript implementation of X.509 and related components (such as\n * Certification Signing Requests) of a Public Key Infrastructure.\n *\n * @author Dave Longley\n *\n * Copyright (c) 2010-2014 Digital Bazaar, Inc.\n *\n * The ASN.1 representation of an X.509v3 certificate is as follows\n * (see RFC 2459):\n *\n * Certificate ::= SEQUENCE {\n * tbsCertificate TBSCertificate,\n * signatureAlgorithm AlgorithmIdentifier,\n * signatureValue BIT STRING\n * }\n *\n * TBSCertificate ::= SEQUENCE {\n * version [0] EXPLICIT Version DEFAULT v1,\n * serialNumber CertificateSerialNumber,\n * signature AlgorithmIdentifier,\n * issuer Name,\n * validity Validity,\n * subject Name,\n * subjectPublicKeyInfo SubjectPublicKeyInfo,\n * issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL,\n * -- If present, version shall be v2 or v3\n * subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL,\n * -- If present, version shall be v2 or v3\n * extensions [3] EXPLICIT Extensions OPTIONAL\n * -- If present, version shall be v3\n * }\n *\n * Version ::= INTEGER { v1(0), v2(1), v3(2) }\n *\n * CertificateSerialNumber ::= INTEGER\n *\n * Name ::= CHOICE {\n * // only one possible choice for now\n * RDNSequence\n * }\n *\n * RDNSequence ::= SEQUENCE OF RelativeDistinguishedName\n *\n * RelativeDistinguishedName ::= SET OF AttributeTypeAndValue\n *\n * AttributeTypeAndValue ::= SEQUENCE {\n * type AttributeType,\n * value AttributeValue\n * }\n * AttributeType ::= OBJECT IDENTIFIER\n * AttributeValue ::= ANY DEFINED BY AttributeType\n *\n * Validity ::= SEQUENCE {\n * notBefore Time,\n * notAfter Time\n * }\n *\n * Time ::= CHOICE {\n * utcTime UTCTime,\n * generalTime GeneralizedTime\n * }\n *\n * UniqueIdentifier ::= BIT STRING\n *\n * SubjectPublicKeyInfo ::= SEQUENCE {\n * algorithm AlgorithmIdentifier,\n * subjectPublicKey BIT STRING\n * }\n *\n * Extensions ::= SEQUENCE SIZE (1..MAX) OF Extension\n *\n * Extension ::= SEQUENCE {\n * extnID OBJECT IDENTIFIER,\n * critical BOOLEAN DEFAULT FALSE,\n * extnValue OCTET STRING\n * }\n *\n * The only key algorithm currently supported for PKI is RSA.\n *\n * RSASSA-PSS signatures are described in RFC 3447 and RFC 4055.\n *\n * PKCS#10 v1.7 describes certificate signing requests:\n *\n * CertificationRequestInfo:\n *\n * CertificationRequestInfo ::= SEQUENCE {\n * version INTEGER { v1(0) } (v1,...),\n * subject Name,\n * subjectPKInfo SubjectPublicKeyInfo{{ PKInfoAlgorithms }},\n * attributes [0] Attributes{{ CRIAttributes }}\n * }\n *\n * Attributes { ATTRIBUTE:IOSet } ::= SET OF Attribute{{ IOSet }}\n *\n * CRIAttributes ATTRIBUTE ::= {\n * ... -- add any locally defined attributes here -- }\n *\n * Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE {\n * type ATTRIBUTE.&id({IOSet}),\n * values SET SIZE(1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type})\n * }\n *\n * CertificationRequest ::= SEQUENCE {\n * certificationRequestInfo CertificationRequestInfo,\n * signatureAlgorithm AlgorithmIdentifier{{ SignatureAlgorithms }},\n * signature BIT STRING\n * }\n */\nvar forge = __webpack_require__(/*! ./forge */ \"./node_modules/node-forge/lib/forge.js\");\n__webpack_require__(/*! ./aes */ \"./node_modules/node-forge/lib/aes.js\");\n__webpack_require__(/*! ./asn1 */ \"./node_modules/node-forge/lib/asn1.js\");\n__webpack_require__(/*! ./des */ \"./node_modules/node-forge/lib/des.js\");\n__webpack_require__(/*! ./md */ \"./node_modules/node-forge/lib/md.js\");\n__webpack_require__(/*! ./mgf */ \"./node_modules/node-forge/lib/mgf.js\");\n__webpack_require__(/*! ./oids */ \"./node_modules/node-forge/lib/oids.js\");\n__webpack_require__(/*! ./pem */ \"./node_modules/node-forge/lib/pem.js\");\n__webpack_require__(/*! ./pss */ \"./node_modules/node-forge/lib/pss.js\");\n__webpack_require__(/*! ./rsa */ \"./node_modules/node-forge/lib/rsa.js\");\n__webpack_require__(/*! ./util */ \"./node_modules/node-forge/lib/util.js\");\n\n// shortcut for asn.1 API\nvar asn1 = forge.asn1;\n\n/* Public Key Infrastructure (PKI) implementation. */\nvar pki = module.exports = forge.pki = forge.pki || {};\nvar oids = pki.oids;\n\n// short name OID mappings\nvar _shortNames = {};\n_shortNames['CN'] = oids['commonName'];\n_shortNames['commonName'] = 'CN';\n_shortNames['C'] = oids['countryName'];\n_shortNames['countryName'] = 'C';\n_shortNames['L'] = oids['localityName'];\n_shortNames['localityName'] = 'L';\n_shortNames['ST'] = oids['stateOrProvinceName'];\n_shortNames['stateOrProvinceName'] = 'ST';\n_shortNames['O'] = oids['organizationName'];\n_shortNames['organizationName'] = 'O';\n_shortNames['OU'] = oids['organizationalUnitName'];\n_shortNames['organizationalUnitName'] = 'OU';\n_shortNames['E'] = oids['emailAddress'];\n_shortNames['emailAddress'] = 'E';\n\n// validator for an SubjectPublicKeyInfo structure\n// Note: Currently only works with an RSA public key\nvar publicKeyValidator = forge.pki.rsa.publicKeyValidator;\n\n// validator for an X.509v3 certificate\nvar x509CertificateValidator = {\n name: 'Certificate',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'Certificate.TBSCertificate',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'tbsCertificate',\n value: [{\n name: 'Certificate.TBSCertificate.version',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n optional: true,\n value: [{\n name: 'Certificate.TBSCertificate.version.integer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'certVersion'\n }]\n }, {\n name: 'Certificate.TBSCertificate.serialNumber',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'certSerialNumber'\n }, {\n name: 'Certificate.TBSCertificate.signature',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'Certificate.TBSCertificate.signature.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'certinfoSignatureOid'\n }, {\n name: 'Certificate.TBSCertificate.signature.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n optional: true,\n captureAsn1: 'certinfoSignatureParams'\n }]\n }, {\n name: 'Certificate.TBSCertificate.issuer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'certIssuer'\n }, {\n name: 'Certificate.TBSCertificate.validity',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n // Note: UTC and generalized times may both appear so the capture\n // names are based on their detected order, the names used below\n // are only for the common case, which validity time really means\n // \"notBefore\" and which means \"notAfter\" will be determined by order\n value: [{\n // notBefore (Time) (UTC time case)\n name: 'Certificate.TBSCertificate.validity.notBefore (utc)',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.UTCTIME,\n constructed: false,\n optional: true,\n capture: 'certValidity1UTCTime'\n }, {\n // notBefore (Time) (generalized time case)\n name: 'Certificate.TBSCertificate.validity.notBefore (generalized)',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.GENERALIZEDTIME,\n constructed: false,\n optional: true,\n capture: 'certValidity2GeneralizedTime'\n }, {\n // notAfter (Time) (only UTC time is supported)\n name: 'Certificate.TBSCertificate.validity.notAfter (utc)',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.UTCTIME,\n constructed: false,\n optional: true,\n capture: 'certValidity3UTCTime'\n }, {\n // notAfter (Time) (only UTC time is supported)\n name: 'Certificate.TBSCertificate.validity.notAfter (generalized)',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.GENERALIZEDTIME,\n constructed: false,\n optional: true,\n capture: 'certValidity4GeneralizedTime'\n }]\n }, {\n // Name (subject) (RDNSequence)\n name: 'Certificate.TBSCertificate.subject',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'certSubject'\n },\n // SubjectPublicKeyInfo\n publicKeyValidator,\n {\n // issuerUniqueID (optional)\n name: 'Certificate.TBSCertificate.issuerUniqueID',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 1,\n constructed: true,\n optional: true,\n value: [{\n name: 'Certificate.TBSCertificate.issuerUniqueID.id',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n // TODO: support arbitrary bit length ids\n captureBitStringValue: 'certIssuerUniqueId'\n }]\n }, {\n // subjectUniqueID (optional)\n name: 'Certificate.TBSCertificate.subjectUniqueID',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 2,\n constructed: true,\n optional: true,\n value: [{\n name: 'Certificate.TBSCertificate.subjectUniqueID.id',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n // TODO: support arbitrary bit length ids\n captureBitStringValue: 'certSubjectUniqueId'\n }]\n }, {\n // Extensions (optional)\n name: 'Certificate.TBSCertificate.extensions',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 3,\n constructed: true,\n captureAsn1: 'certExtensions',\n optional: true\n }]\n }, {\n // AlgorithmIdentifier (signature algorithm)\n name: 'Certificate.signatureAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // algorithm\n name: 'Certificate.signatureAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'certSignatureOid'\n }, {\n name: 'Certificate.TBSCertificate.signature.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n optional: true,\n captureAsn1: 'certSignatureParams'\n }]\n }, {\n // SignatureValue\n name: 'Certificate.signatureValue',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n captureBitStringValue: 'certSignature'\n }]\n};\n\nvar rsassaPssParameterValidator = {\n name: 'rsapss',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'rsapss.hashAlgorithm',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n value: [{\n name: 'rsapss.hashAlgorithm.AlgorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.SEQUENCE,\n constructed: true,\n optional: true,\n value: [{\n name: 'rsapss.hashAlgorithm.AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'hashOid'\n /* parameter block omitted, for SHA1 NULL anyhow. */\n }]\n }]\n }, {\n name: 'rsapss.maskGenAlgorithm',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 1,\n constructed: true,\n value: [{\n name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.SEQUENCE,\n constructed: true,\n optional: true,\n value: [{\n name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'maskGenOid'\n }, {\n name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.params',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'rsapss.maskGenAlgorithm.AlgorithmIdentifier.params.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'maskGenHashOid'\n /* parameter block omitted, for SHA1 NULL anyhow. */\n }]\n }]\n }]\n }, {\n name: 'rsapss.saltLength',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 2,\n optional: true,\n value: [{\n name: 'rsapss.saltLength.saltLength',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.INTEGER,\n constructed: false,\n capture: 'saltLength'\n }]\n }, {\n name: 'rsapss.trailerField',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 3,\n optional: true,\n value: [{\n name: 'rsapss.trailer.trailer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Class.INTEGER,\n constructed: false,\n capture: 'trailer'\n }]\n }]\n};\n\n// validator for a CertificationRequestInfo structure\nvar certificationRequestInfoValidator = {\n name: 'CertificationRequestInfo',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'certificationRequestInfo',\n value: [{\n name: 'CertificationRequestInfo.integer',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.INTEGER,\n constructed: false,\n capture: 'certificationRequestInfoVersion'\n }, {\n // Name (subject) (RDNSequence)\n name: 'CertificationRequestInfo.subject',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'certificationRequestInfoSubject'\n },\n // SubjectPublicKeyInfo\n publicKeyValidator,\n {\n name: 'CertificationRequestInfo.attributes',\n tagClass: asn1.Class.CONTEXT_SPECIFIC,\n type: 0,\n constructed: true,\n optional: true,\n capture: 'certificationRequestInfoAttributes',\n value: [{\n name: 'CertificationRequestInfo.attributes',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n name: 'CertificationRequestInfo.attributes.type',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false\n }, {\n name: 'CertificationRequestInfo.attributes.value',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SET,\n constructed: true\n }]\n }]\n }]\n};\n\n// validator for a CertificationRequest structure\nvar certificationRequestValidator = {\n name: 'CertificationRequest',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n captureAsn1: 'csr',\n value: [\n certificationRequestInfoValidator, {\n // AlgorithmIdentifier (signature algorithm)\n name: 'CertificationRequest.signatureAlgorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.SEQUENCE,\n constructed: true,\n value: [{\n // algorithm\n name: 'CertificationRequest.signatureAlgorithm.algorithm',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.OID,\n constructed: false,\n capture: 'csrSignatureOid'\n }, {\n name: 'CertificationRequest.signatureAlgorithm.parameters',\n tagClass: asn1.Class.UNIVERSAL,\n optional: true,\n captureAsn1: 'csrSignatureParams'\n }]\n }, {\n // signature\n name: 'CertificationRequest.signature',\n tagClass: asn1.Class.UNIVERSAL,\n type: asn1.Type.BITSTRING,\n constructed: false,\n captureBitStringValue: 'csrSignature'\n }\n ]\n};\n\n/**\n * Converts an RDNSequence of ASN.1 DER-encoded RelativeDistinguishedName\n * sets into an array with objects that have type and value properties.\n *\n * @param rdn the RDNSequence to convert.\n * @param md a message digest to append type and value to if provided.\n */\npki.RDNAttributesAsArray = function(rdn, md) {\n var rval = [];\n\n // each value in 'rdn' in is a SET of RelativeDistinguishedName\n var set, attr, obj;\n for(var si = 0; si < rdn.value.length; ++si) {\n // get the RelativeDistinguishedName set\n set = rdn.value[si];\n\n // each value in the SET is an AttributeTypeAndValue sequence\n // containing first a type (an OID) and second a value (defined by\n // the OID)\n for(var i = 0; i < set.value.length; ++i) {\n obj = {};\n attr = set.value[i];\n obj.type = asn1.derToOid(attr.value[0].value);\n obj.value = attr.value[1].value;\n obj.valueTagClass = attr.value[1].type;\n // if the OID is known, get its name and short name\n if(obj.type in oids) {\n obj.name = oids[obj.type];\n if(obj.name in _shortNames) {\n obj.shortName = _shortNames[obj.name];\n }\n }\n if(md) {\n md.update(obj.type);\n md.update(obj.value);\n }\n rval.push(obj);\n }\n }\n\n return rval;\n};\n\n/**\n * Converts ASN.1 CRIAttributes into an array with objects that have type and\n * value properties.\n *\n * @param attributes the CRIAttributes to convert.\n */\npki.CRIAttributesAsArray = function(attributes) {\n var rval = [];\n\n // each value in 'attributes' in is a SEQUENCE with an OID and a SET\n for(var si = 0; si < attributes.length; ++si) {\n // get the attribute sequence\n var seq = attributes[si];\n\n // each value in the SEQUENCE containing first a type (an OID) and\n // second a set of values (defined by the OID)\n var type = asn1.derToOid(seq.value[0].value);\n var values = seq.value[1].value;\n for(var vi = 0; vi < values.length; ++vi) {\n var obj = {};\n obj.type = type;\n obj.value = values[vi].value;\n obj.valueTagClass = values[vi].type;\n // if the OID is known, get its name and short name\n if(obj.type in oids) {\n obj.name = oids[obj.type];\n if(obj.name in _shortNames) {\n obj.shortName = _shortNames[obj.name];\n }\n }\n // parse extensions\n if(obj.type === oids.extensionRequest) {\n obj.extensions = [];\n for(var ei = 0; ei < obj.value.length; ++ei) {\n obj.extensions.push(pki.certificateExtensionFromAsn1(obj.value[ei]));\n }\n }\n rval.push(obj);\n }\n }\n\n return rval;\n};\n\n/**\n * Gets an issuer or subject attribute from its name, type, or short name.\n *\n * @param obj the issuer or subject object.\n * @param options a short name string or an object with:\n * shortName the short name for the attribute.\n * name the name for the attribute.\n * type the type for the attribute.\n *\n * @return the attribute.\n */\nfunction _getAttribute(obj, options) {\n if(typeof options === 'string') {\n options = {shortName: options};\n }\n\n var rval = null;\n var attr;\n for(var i = 0; rval === null && i < obj.attributes.length; ++i) {\n attr = obj.attributes[i];\n if(options.type && options.type === attr.type) {\n rval = attr;\n } else if(options.name && options.name === attr.name) {\n rval = attr;\n } else if(options.shortName && options.shortName === attr.shortName) {\n rval = attr;\n }\n }\n return rval;\n}\n\n/**\n * Converts signature parameters from ASN.1 structure.\n *\n * Currently only RSASSA-PSS supported. The PKCS#1 v1.5 signature scheme had\n * no parameters.\n *\n * RSASSA-PSS-params ::= SEQUENCE {\n * hashAlgorithm [0] HashAlgorithm DEFAULT\n * sha1Identifier,\n * maskGenAlgorithm [1] MaskGenAlgorithm DEFAULT\n * mgf1SHA1Identifier,\n * saltLength [2] INTEGER DEFAULT 20,\n * trailerField [3] INTEGER DEFAULT 1\n * }\n *\n * HashAlgorithm ::= AlgorithmIdentifier\n *\n * MaskGenAlgorithm ::= AlgorithmIdentifier\n *\n * AlgorithmIdentifer ::= SEQUENCE {\n * algorithm OBJECT IDENTIFIER,\n * parameters ANY DEFINED BY algorithm OPTIONAL\n * }\n *\n * @param oid The OID specifying the signature algorithm\n * @param obj The ASN.1 structure holding the parameters\n * @param fillDefaults Whether to use return default values where omitted\n * @return signature parameter object\n */\nvar _readSignatureParameters = function(oid, obj, fillDefaults) {\n var params = {};\n\n if(oid !== oids['RSASSA-PSS']) {\n return params;\n }\n\n if(fillDefaults) {\n params = {\n hash: {\n algorithmOid: oids['sha1']\n },\n mgf: {\n algorithmOid: oids['mgf1'],\n hash: {\n algorithmOid: oids['sha1']\n }\n },\n saltLength: 20\n };\n }\n\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, rsassaPssParameterValidator, capture, errors)) {\n var error = new Error('Cannot read RSASSA-PSS parameter block.');\n error.errors = errors;\n throw error;\n }\n\n if(capture.hashOid !== undefined) {\n params.hash = params.hash || {};\n params.hash.algorithmOid = asn1.derToOid(capture.hashOid);\n }\n\n if(capture.maskGenOid !== undefined) {\n params.mgf = params.mgf || {};\n params.mgf.algorithmOid = asn1.derToOid(capture.maskGenOid);\n params.mgf.hash = params.mgf.hash || {};\n params.mgf.hash.algorithmOid = asn1.derToOid(capture.maskGenHashOid);\n }\n\n if(capture.saltLength !== undefined) {\n params.saltLength = capture.saltLength.charCodeAt(0);\n }\n\n return params;\n};\n\n/**\n * Create signature digest for OID.\n *\n * @param options\n * signatureOid: the OID specifying the signature algorithm.\n * type: a human readable type for error messages\n * @return a created md instance. throws if unknown oid.\n */\nvar _createSignatureDigest = function(options) {\n switch(oids[options.signatureOid]) {\n case 'sha1WithRSAEncryption':\n // deprecated alias\n case 'sha1WithRSASignature':\n return forge.md.sha1.create();\n case 'md5WithRSAEncryption':\n return forge.md.md5.create();\n case 'sha256WithRSAEncryption':\n return forge.md.sha256.create();\n case 'sha384WithRSAEncryption':\n return forge.md.sha384.create();\n case 'sha512WithRSAEncryption':\n return forge.md.sha512.create();\n case 'RSASSA-PSS':\n return forge.md.sha256.create();\n default:\n var error = new Error(\n 'Could not compute ' + options.type + ' digest. ' +\n 'Unknown signature OID.');\n error.signatureOid = options.signatureOid;\n throw error;\n }\n};\n\n/**\n * Verify signature on certificate or CSR.\n *\n * @param options:\n * certificate the certificate or CSR to verify.\n * md the signature digest.\n * signature the signature\n * @return a created md instance. throws if unknown oid.\n */\nvar _verifySignature = function(options) {\n var cert = options.certificate;\n var scheme;\n\n switch(cert.signatureOid) {\n case oids.sha1WithRSAEncryption:\n // deprecated alias\n case oids.sha1WithRSASignature:\n /* use PKCS#1 v1.5 padding scheme */\n break;\n case oids['RSASSA-PSS']:\n var hash, mgf;\n\n /* initialize mgf */\n hash = oids[cert.signatureParameters.mgf.hash.algorithmOid];\n if(hash === undefined || forge.md[hash] === undefined) {\n var error = new Error('Unsupported MGF hash function.');\n error.oid = cert.signatureParameters.mgf.hash.algorithmOid;\n error.name = hash;\n throw error;\n }\n\n mgf = oids[cert.signatureParameters.mgf.algorithmOid];\n if(mgf === undefined || forge.mgf[mgf] === undefined) {\n var error = new Error('Unsupported MGF function.');\n error.oid = cert.signatureParameters.mgf.algorithmOid;\n error.name = mgf;\n throw error;\n }\n\n mgf = forge.mgf[mgf].create(forge.md[hash].create());\n\n /* initialize hash function */\n hash = oids[cert.signatureParameters.hash.algorithmOid];\n if(hash === undefined || forge.md[hash] === undefined) {\n var error = new Error('Unsupported RSASSA-PSS hash function.');\n error.oid = cert.signatureParameters.hash.algorithmOid;\n error.name = hash;\n throw error;\n }\n\n scheme = forge.pss.create(\n forge.md[hash].create(), mgf, cert.signatureParameters.saltLength\n );\n break;\n }\n\n // verify signature on cert using public key\n return cert.publicKey.verify(\n options.md.digest().getBytes(), options.signature, scheme\n );\n};\n\n/**\n * Converts an X.509 certificate from PEM format.\n *\n * Note: If the certificate is to be verified then compute hash should\n * be set to true. This will scan the TBSCertificate part of the ASN.1\n * object while it is converted so it doesn't need to be converted back\n * to ASN.1-DER-encoding later.\n *\n * @param pem the PEM-formatted certificate.\n * @param computeHash true to compute the hash for verification.\n * @param strict true to be strict when checking ASN.1 value lengths, false to\n * allow truncated values (default: true).\n *\n * @return the certificate.\n */\npki.certificateFromPem = function(pem, computeHash, strict) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'CERTIFICATE' &&\n msg.type !== 'X509 CERTIFICATE' &&\n msg.type !== 'TRUSTED CERTIFICATE') {\n var error = new Error(\n 'Could not convert certificate from PEM; PEM header type ' +\n 'is not \"CERTIFICATE\", \"X509 CERTIFICATE\", or \"TRUSTED CERTIFICATE\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error(\n 'Could not convert certificate from PEM; PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body, strict);\n\n return pki.certificateFromAsn1(obj, computeHash);\n};\n\n/**\n * Converts an X.509 certificate to PEM format.\n *\n * @param cert the certificate.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted certificate.\n */\npki.certificateToPem = function(cert, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'CERTIFICATE',\n body: asn1.toDer(pki.certificateToAsn1(cert)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Converts an RSA public key from PEM format.\n *\n * @param pem the PEM-formatted public key.\n *\n * @return the public key.\n */\npki.publicKeyFromPem = function(pem) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'PUBLIC KEY' && msg.type !== 'RSA PUBLIC KEY') {\n var error = new Error('Could not convert public key from PEM; PEM header ' +\n 'type is not \"PUBLIC KEY\" or \"RSA PUBLIC KEY\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert public key from PEM; PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body);\n\n return pki.publicKeyFromAsn1(obj);\n};\n\n/**\n * Converts an RSA public key to PEM format (using a SubjectPublicKeyInfo).\n *\n * @param key the public key.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted public key.\n */\npki.publicKeyToPem = function(key, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'PUBLIC KEY',\n body: asn1.toDer(pki.publicKeyToAsn1(key)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Converts an RSA public key to PEM format (using an RSAPublicKey).\n *\n * @param key the public key.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted public key.\n */\npki.publicKeyToRSAPublicKeyPem = function(key, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'RSA PUBLIC KEY',\n body: asn1.toDer(pki.publicKeyToRSAPublicKey(key)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Gets a fingerprint for the given public key.\n *\n * @param options the options to use.\n * [md] the message digest object to use (defaults to forge.md.sha1).\n * [type] the type of fingerprint, such as 'RSAPublicKey',\n * 'SubjectPublicKeyInfo' (defaults to 'RSAPublicKey').\n * [encoding] an alternative output encoding, such as 'hex'\n * (defaults to none, outputs a byte buffer).\n * [delimiter] the delimiter to use between bytes for 'hex' encoded\n * output, eg: ':' (defaults to none).\n *\n * @return the fingerprint as a byte buffer or other encoding based on options.\n */\npki.getPublicKeyFingerprint = function(key, options) {\n options = options || {};\n var md = options.md || forge.md.sha1.create();\n var type = options.type || 'RSAPublicKey';\n\n var bytes;\n switch(type) {\n case 'RSAPublicKey':\n bytes = asn1.toDer(pki.publicKeyToRSAPublicKey(key)).getBytes();\n break;\n case 'SubjectPublicKeyInfo':\n bytes = asn1.toDer(pki.publicKeyToAsn1(key)).getBytes();\n break;\n default:\n throw new Error('Unknown fingerprint type \"' + options.type + '\".');\n }\n\n // hash public key bytes\n md.start();\n md.update(bytes);\n var digest = md.digest();\n if(options.encoding === 'hex') {\n var hex = digest.toHex();\n if(options.delimiter) {\n return hex.match(/.{2}/g).join(options.delimiter);\n }\n return hex;\n } else if(options.encoding === 'binary') {\n return digest.getBytes();\n } else if(options.encoding) {\n throw new Error('Unknown encoding \"' + options.encoding + '\".');\n }\n return digest;\n};\n\n/**\n * Converts a PKCS#10 certification request (CSR) from PEM format.\n *\n * Note: If the certification request is to be verified then compute hash\n * should be set to true. This will scan the CertificationRequestInfo part of\n * the ASN.1 object while it is converted so it doesn't need to be converted\n * back to ASN.1-DER-encoding later.\n *\n * @param pem the PEM-formatted certificate.\n * @param computeHash true to compute the hash for verification.\n * @param strict true to be strict when checking ASN.1 value lengths, false to\n * allow truncated values (default: true).\n *\n * @return the certification request (CSR).\n */\npki.certificationRequestFromPem = function(pem, computeHash, strict) {\n var msg = forge.pem.decode(pem)[0];\n\n if(msg.type !== 'CERTIFICATE REQUEST') {\n var error = new Error('Could not convert certification request from PEM; ' +\n 'PEM header type is not \"CERTIFICATE REQUEST\".');\n error.headerType = msg.type;\n throw error;\n }\n if(msg.procType && msg.procType.type === 'ENCRYPTED') {\n throw new Error('Could not convert certification request from PEM; ' +\n 'PEM is encrypted.');\n }\n\n // convert DER to ASN.1 object\n var obj = asn1.fromDer(msg.body, strict);\n\n return pki.certificationRequestFromAsn1(obj, computeHash);\n};\n\n/**\n * Converts a PKCS#10 certification request (CSR) to PEM format.\n *\n * @param csr the certification request.\n * @param maxline the maximum characters per line, defaults to 64.\n *\n * @return the PEM-formatted certification request.\n */\npki.certificationRequestToPem = function(csr, maxline) {\n // convert to ASN.1, then DER, then PEM-encode\n var msg = {\n type: 'CERTIFICATE REQUEST',\n body: asn1.toDer(pki.certificationRequestToAsn1(csr)).getBytes()\n };\n return forge.pem.encode(msg, {maxline: maxline});\n};\n\n/**\n * Creates an empty X.509v3 RSA certificate.\n *\n * @return the certificate.\n */\npki.createCertificate = function() {\n var cert = {};\n cert.version = 0x02;\n cert.serialNumber = '00';\n cert.signatureOid = null;\n cert.signature = null;\n cert.siginfo = {};\n cert.siginfo.algorithmOid = null;\n cert.validity = {};\n cert.validity.notBefore = new Date();\n cert.validity.notAfter = new Date();\n\n cert.issuer = {};\n cert.issuer.getField = function(sn) {\n return _getAttribute(cert.issuer, sn);\n };\n cert.issuer.addField = function(attr) {\n _fillMissingFields([attr]);\n cert.issuer.attributes.push(attr);\n };\n cert.issuer.attributes = [];\n cert.issuer.hash = null;\n\n cert.subject = {};\n cert.subject.getField = function(sn) {\n return _getAttribute(cert.subject, sn);\n };\n cert.subject.addField = function(attr) {\n _fillMissingFields([attr]);\n cert.subject.attributes.push(attr);\n };\n cert.subject.attributes = [];\n cert.subject.hash = null;\n\n cert.extensions = [];\n cert.publicKey = null;\n cert.md = null;\n\n /**\n * Sets the subject of this certificate.\n *\n * @param attrs the array of subject attributes to use.\n * @param uniqueId an optional a unique ID to use.\n */\n cert.setSubject = function(attrs, uniqueId) {\n // set new attributes, clear hash\n _fillMissingFields(attrs);\n cert.subject.attributes = attrs;\n delete cert.subject.uniqueId;\n if(uniqueId) {\n // TODO: support arbitrary bit length ids\n cert.subject.uniqueId = uniqueId;\n }\n cert.subject.hash = null;\n };\n\n /**\n * Sets the issuer of this certificate.\n *\n * @param attrs the array of issuer attributes to use.\n * @param uniqueId an optional a unique ID to use.\n */\n cert.setIssuer = function(attrs, uniqueId) {\n // set new attributes, clear hash\n _fillMissingFields(attrs);\n cert.issuer.attributes = attrs;\n delete cert.issuer.uniqueId;\n if(uniqueId) {\n // TODO: support arbitrary bit length ids\n cert.issuer.uniqueId = uniqueId;\n }\n cert.issuer.hash = null;\n };\n\n /**\n * Sets the extensions of this certificate.\n *\n * @param exts the array of extensions to use.\n */\n cert.setExtensions = function(exts) {\n for(var i = 0; i < exts.length; ++i) {\n _fillMissingExtensionFields(exts[i], {cert: cert});\n }\n // set new extensions\n cert.extensions = exts;\n };\n\n /**\n * Gets an extension by its name or id.\n *\n * @param options the name to use or an object with:\n * name the name to use.\n * id the id to use.\n *\n * @return the extension or null if not found.\n */\n cert.getExtension = function(options) {\n if(typeof options === 'string') {\n options = {name: options};\n }\n\n var rval = null;\n var ext;\n for(var i = 0; rval === null && i < cert.extensions.length; ++i) {\n ext = cert.extensions[i];\n if(options.id && ext.id === options.id) {\n rval = ext;\n } else if(options.name && ext.name === options.name) {\n rval = ext;\n }\n }\n return rval;\n };\n\n /**\n * Signs this certificate using the given private key.\n *\n * @param key the private key to sign with.\n * @param md the message digest object to use (defaults to forge.md.sha1).\n */\n cert.sign = function(key, md) {\n // TODO: get signature OID from private key\n cert.md = md || forge.md.sha1.create();\n var algorithmOid = oids[cert.md.algorithm + 'WithRSAEncryption'];\n if(!algorithmOid) {\n var error = new Error('Could not compute certificate digest. ' +\n 'Unknown message digest algorithm OID.');\n error.algorithm = cert.md.algorithm;\n throw error;\n }\n cert.signatureOid = cert.siginfo.algorithmOid = algorithmOid;\n\n // get TBSCertificate, convert to DER\n cert.tbsCertificate = pki.getTBSCertificate(cert);\n var bytes = asn1.toDer(cert.tbsCertificate);\n\n // digest and sign\n cert.md.update(bytes.getBytes());\n cert.signature = key.sign(cert.md);\n };\n\n /**\n * Attempts verify the signature on the passed certificate using this\n * certificate's public key.\n *\n * @param child the certificate to verify.\n *\n * @return true if verified, false if not.\n */\n cert.verify = function(child) {\n var rval = false;\n\n if(!cert.issued(child)) {\n var issuer = child.issuer;\n var subject = cert.subject;\n var error = new Error(\n 'The parent certificate did not issue the given child ' +\n 'certificate; the child certificate\\'s issuer does not match the ' +\n 'parent\\'s subject.');\n error.expectedIssuer = subject.attributes;\n error.actualIssuer = issuer.attributes;\n throw error;\n }\n\n var md = child.md;\n if(md === null) {\n // create digest for OID signature types\n md = _createSignatureDigest({\n signatureOid: child.signatureOid,\n type: 'certificate'\n });\n\n // produce DER formatted TBSCertificate and digest it\n var tbsCertificate = child.tbsCertificate || pki.getTBSCertificate(child);\n var bytes = asn1.toDer(tbsCertificate);\n md.update(bytes.getBytes());\n }\n\n if(md !== null) {\n rval = _verifySignature({\n certificate: cert, md: md, signature: child.signature\n });\n }\n\n return rval;\n };\n\n /**\n * Returns true if this certificate's issuer matches the passed\n * certificate's subject. Note that no signature check is performed.\n *\n * @param parent the certificate to check.\n *\n * @return true if this certificate's issuer matches the passed certificate's\n * subject.\n */\n cert.isIssuer = function(parent) {\n var rval = false;\n\n var i = cert.issuer;\n var s = parent.subject;\n\n // compare hashes if present\n if(i.hash && s.hash) {\n rval = (i.hash === s.hash);\n } else if(i.attributes.length === s.attributes.length) {\n // all attributes are the same so issuer matches subject\n rval = true;\n var iattr, sattr;\n for(var n = 0; rval && n < i.attributes.length; ++n) {\n iattr = i.attributes[n];\n sattr = s.attributes[n];\n if(iattr.type !== sattr.type || iattr.value !== sattr.value) {\n // attribute mismatch\n rval = false;\n }\n }\n }\n\n return rval;\n };\n\n /**\n * Returns true if this certificate's subject matches the issuer of the\n * given certificate). Note that not signature check is performed.\n *\n * @param child the certificate to check.\n *\n * @return true if this certificate's subject matches the passed\n * certificate's issuer.\n */\n cert.issued = function(child) {\n return child.isIssuer(cert);\n };\n\n /**\n * Generates the subjectKeyIdentifier for this certificate as byte buffer.\n *\n * @return the subjectKeyIdentifier for this certificate as byte buffer.\n */\n cert.generateSubjectKeyIdentifier = function() {\n /* See: 4.2.1.2 section of the the RFC3280, keyIdentifier is either:\n\n (1) The keyIdentifier is composed of the 160-bit SHA-1 hash of the\n value of the BIT STRING subjectPublicKey (excluding the tag,\n length, and number of unused bits).\n\n (2) The keyIdentifier is composed of a four bit type field with\n the value 0100 followed by the least significant 60 bits of the\n SHA-1 hash of the value of the BIT STRING subjectPublicKey\n (excluding the tag, length, and number of unused bit string bits).\n */\n\n // skipping the tag, length, and number of unused bits is the same\n // as just using the RSAPublicKey (for RSA keys, which are the\n // only ones supported)\n return pki.getPublicKeyFingerprint(cert.publicKey, {type: 'RSAPublicKey'});\n };\n\n /**\n * Verifies the subjectKeyIdentifier extension value for this certificate\n * against its public key. If no extension is found, false will be\n * returned.\n *\n * @return true if verified, false if not.\n */\n cert.verifySubjectKeyIdentifier = function() {\n var oid = oids['subjectKeyIdentifier'];\n for(var i = 0; i < cert.extensions.length; ++i) {\n var ext = cert.extensions[i];\n if(ext.id === oid) {\n var ski = cert.generateSubjectKeyIdentifier().getBytes();\n return (forge.util.hexToBytes(ext.subjectKeyIdentifier) === ski);\n }\n }\n return false;\n };\n\n return cert;\n};\n\n/**\n * Converts an X.509v3 RSA certificate from an ASN.1 object.\n *\n * Note: If the certificate is to be verified then compute hash should\n * be set to true. There is currently no implementation for converting\n * a certificate back to ASN.1 so the TBSCertificate part of the ASN.1\n * object needs to be scanned before the cert object is created.\n *\n * @param obj the asn1 representation of an X.509v3 RSA certificate.\n * @param computeHash true to compute the hash for verification.\n *\n * @return the certificate.\n */\npki.certificateFromAsn1 = function(obj, computeHash) {\n // validate certificate and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, x509CertificateValidator, capture, errors)) {\n var error = new Error('Cannot read X.509 certificate. ' +\n 'ASN.1 object is not an X509v3 Certificate.');\n error.errors = errors;\n throw error;\n }\n\n // get oid\n var oid = asn1.derToOid(capture.publicKeyOid);\n if(oid !== pki.oids.rsaEncryption) {\n throw new Error('Cannot read public key. OID is not RSA.');\n }\n\n // create certificate\n var cert = pki.createCertificate();\n cert.version = capture.certVersion ?\n capture.certVersion.charCodeAt(0) : 0;\n var serial = forge.util.createBuffer(capture.certSerialNumber);\n cert.serialNumber = serial.toHex();\n cert.signatureOid = forge.asn1.derToOid(capture.certSignatureOid);\n cert.signatureParameters = _readSignatureParameters(\n cert.signatureOid, capture.certSignatureParams, true);\n cert.siginfo.algorithmOid = forge.asn1.derToOid(capture.certinfoSignatureOid);\n cert.siginfo.parameters = _readSignatureParameters(cert.siginfo.algorithmOid,\n capture.certinfoSignatureParams, false);\n cert.signature = capture.certSignature;\n\n var validity = [];\n if(capture.certValidity1UTCTime !== undefined) {\n validity.push(asn1.utcTimeToDate(capture.certValidity1UTCTime));\n }\n if(capture.certValidity2GeneralizedTime !== undefined) {\n validity.push(asn1.generalizedTimeToDate(\n capture.certValidity2GeneralizedTime));\n }\n if(capture.certValidity3UTCTime !== undefined) {\n validity.push(asn1.utcTimeToDate(capture.certValidity3UTCTime));\n }\n if(capture.certValidity4GeneralizedTime !== undefined) {\n validity.push(asn1.generalizedTimeToDate(\n capture.certValidity4GeneralizedTime));\n }\n if(validity.length > 2) {\n throw new Error('Cannot read notBefore/notAfter validity times; more ' +\n 'than two times were provided in the certificate.');\n }\n if(validity.length < 2) {\n throw new Error('Cannot read notBefore/notAfter validity times; they ' +\n 'were not provided as either UTCTime or GeneralizedTime.');\n }\n cert.validity.notBefore = validity[0];\n cert.validity.notAfter = validity[1];\n\n // keep TBSCertificate to preserve signature when exporting\n cert.tbsCertificate = capture.tbsCertificate;\n\n if(computeHash) {\n // create digest for OID signature type\n cert.md = _createSignatureDigest({\n signatureOid: cert.signatureOid,\n type: 'certificate'\n });\n\n // produce DER formatted TBSCertificate and digest it\n var bytes = asn1.toDer(cert.tbsCertificate);\n cert.md.update(bytes.getBytes());\n }\n\n // handle issuer, build issuer message digest\n var imd = forge.md.sha1.create();\n var ibytes = asn1.toDer(capture.certIssuer);\n imd.update(ibytes.getBytes());\n cert.issuer.getField = function(sn) {\n return _getAttribute(cert.issuer, sn);\n };\n cert.issuer.addField = function(attr) {\n _fillMissingFields([attr]);\n cert.issuer.attributes.push(attr);\n };\n cert.issuer.attributes = pki.RDNAttributesAsArray(capture.certIssuer);\n if(capture.certIssuerUniqueId) {\n cert.issuer.uniqueId = capture.certIssuerUniqueId;\n }\n cert.issuer.hash = imd.digest().toHex();\n\n // handle subject, build subject message digest\n var smd = forge.md.sha1.create();\n var sbytes = asn1.toDer(capture.certSubject);\n smd.update(sbytes.getBytes());\n cert.subject.getField = function(sn) {\n return _getAttribute(cert.subject, sn);\n };\n cert.subject.addField = function(attr) {\n _fillMissingFields([attr]);\n cert.subject.attributes.push(attr);\n };\n cert.subject.attributes = pki.RDNAttributesAsArray(capture.certSubject);\n if(capture.certSubjectUniqueId) {\n cert.subject.uniqueId = capture.certSubjectUniqueId;\n }\n cert.subject.hash = smd.digest().toHex();\n\n // handle extensions\n if(capture.certExtensions) {\n cert.extensions = pki.certificateExtensionsFromAsn1(capture.certExtensions);\n } else {\n cert.extensions = [];\n }\n\n // convert RSA public key from ASN.1\n cert.publicKey = pki.publicKeyFromAsn1(capture.subjectPublicKeyInfo);\n\n return cert;\n};\n\n/**\n * Converts an ASN.1 extensions object (with extension sequences as its\n * values) into an array of extension objects with types and values.\n *\n * Supported extensions:\n *\n * id-ce-keyUsage OBJECT IDENTIFIER ::= { id-ce 15 }\n * KeyUsage ::= BIT STRING {\n * digitalSignature (0),\n * nonRepudiation (1),\n * keyEncipherment (2),\n * dataEncipherment (3),\n * keyAgreement (4),\n * keyCertSign (5),\n * cRLSign (6),\n * encipherOnly (7),\n * decipherOnly (8)\n * }\n *\n * id-ce-basicConstraints OBJECT IDENTIFIER ::= { id-ce 19 }\n * BasicConstraints ::= SEQUENCE {\n * cA BOOLEAN DEFAULT FALSE,\n * pathLenConstraint INTEGER (0..MAX) OPTIONAL\n * }\n *\n * subjectAltName EXTENSION ::= {\n * SYNTAX GeneralNames\n * IDENTIFIED BY id-ce-subjectAltName\n * }\n *\n * GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName\n *\n * GeneralName ::= CHOICE {\n * otherName [0] INSTANCE OF OTHER-NAME,\n * rfc822Name [1] IA5String,\n * dNSName [2] IA5String,\n * x400Address [3] ORAddress,\n * directoryName [4] Name,\n * ediPartyName [5] EDIPartyName,\n * uniformResourceIdentifier [6] IA5String,\n * IPAddress [7] OCTET STRING,\n * registeredID [8] OBJECT IDENTIFIER\n * }\n *\n * OTHER-NAME ::= TYPE-IDENTIFIER\n *\n * EDIPartyName ::= SEQUENCE {\n * nameAssigner [0] DirectoryString {ub-name} OPTIONAL,\n * partyName [1] DirectoryString {ub-name}\n * }\n *\n * @param exts the extensions ASN.1 with extension sequences to parse.\n *\n * @return the array.\n */\npki.certificateExtensionsFromAsn1 = function(exts) {\n var rval = [];\n for(var i = 0; i < exts.value.length; ++i) {\n // get extension sequence\n var extseq = exts.value[i];\n for(var ei = 0; ei < extseq.value.length; ++ei) {\n rval.push(pki.certificateExtensionFromAsn1(extseq.value[ei]));\n }\n }\n\n return rval;\n};\n\n/**\n * Parses a single certificate extension from ASN.1.\n *\n * @param ext the extension in ASN.1 format.\n *\n * @return the parsed extension as an object.\n */\npki.certificateExtensionFromAsn1 = function(ext) {\n // an extension has:\n // [0] extnID OBJECT IDENTIFIER\n // [1] critical BOOLEAN DEFAULT FALSE\n // [2] extnValue OCTET STRING\n var e = {};\n e.id = asn1.derToOid(ext.value[0].value);\n e.critical = false;\n if(ext.value[1].type === asn1.Type.BOOLEAN) {\n e.critical = (ext.value[1].value.charCodeAt(0) !== 0x00);\n e.value = ext.value[2].value;\n } else {\n e.value = ext.value[1].value;\n }\n // if the oid is known, get its name\n if(e.id in oids) {\n e.name = oids[e.id];\n\n // handle key usage\n if(e.name === 'keyUsage') {\n // get value as BIT STRING\n var ev = asn1.fromDer(e.value);\n var b2 = 0x00;\n var b3 = 0x00;\n if(ev.value.length > 1) {\n // skip first byte, just indicates unused bits which\n // will be padded with 0s anyway\n // get bytes with flag bits\n b2 = ev.value.charCodeAt(1);\n b3 = ev.value.length > 2 ? ev.value.charCodeAt(2) : 0;\n }\n // set flags\n e.digitalSignature = (b2 & 0x80) === 0x80;\n e.nonRepudiation = (b2 & 0x40) === 0x40;\n e.keyEncipherment = (b2 & 0x20) === 0x20;\n e.dataEncipherment = (b2 & 0x10) === 0x10;\n e.keyAgreement = (b2 & 0x08) === 0x08;\n e.keyCertSign = (b2 & 0x04) === 0x04;\n e.cRLSign = (b2 & 0x02) === 0x02;\n e.encipherOnly = (b2 & 0x01) === 0x01;\n e.decipherOnly = (b3 & 0x80) === 0x80;\n } else if(e.name === 'basicConstraints') {\n // handle basic constraints\n // get value as SEQUENCE\n var ev = asn1.fromDer(e.value);\n // get cA BOOLEAN flag (defaults to false)\n if(ev.value.length > 0 && ev.value[0].type === asn1.Type.BOOLEAN) {\n e.cA = (ev.value[0].value.charCodeAt(0) !== 0x00);\n } else {\n e.cA = false;\n }\n // get path length constraint\n var value = null;\n if(ev.value.length > 0 && ev.value[0].type === asn1.Type.INTEGER) {\n value = ev.value[0].value;\n } else if(ev.value.length > 1) {\n value = ev.value[1].value;\n }\n if(value !== null) {\n e.pathLenConstraint = asn1.derToInteger(value);\n }\n } else if(e.name === 'extKeyUsage') {\n // handle extKeyUsage\n // value is a SEQUENCE of OIDs\n var ev = asn1.fromDer(e.value);\n for(var vi = 0; vi < ev.value.length; ++vi) {\n var oid = asn1.derToOid(ev.value[vi].value);\n if(oid in oids) {\n e[oids[oid]] = true;\n } else {\n e[oid] = true;\n }\n }\n } else if(e.name === 'nsCertType') {\n // handle nsCertType\n // get value as BIT STRING\n var ev = asn1.fromDer(e.value);\n var b2 = 0x00;\n if(ev.value.length > 1) {\n // skip first byte, just indicates unused bits which\n // will be padded with 0s anyway\n // get bytes with flag bits\n b2 = ev.value.charCodeAt(1);\n }\n // set flags\n e.client = (b2 & 0x80) === 0x80;\n e.server = (b2 & 0x40) === 0x40;\n e.email = (b2 & 0x20) === 0x20;\n e.objsign = (b2 & 0x10) === 0x10;\n e.reserved = (b2 & 0x08) === 0x08;\n e.sslCA = (b2 & 0x04) === 0x04;\n e.emailCA = (b2 & 0x02) === 0x02;\n e.objCA = (b2 & 0x01) === 0x01;\n } else if(\n e.name === 'subjectAltName' ||\n e.name === 'issuerAltName') {\n // handle subjectAltName/issuerAltName\n e.altNames = [];\n\n // ev is a SYNTAX SEQUENCE\n var gn;\n var ev = asn1.fromDer(e.value);\n for(var n = 0; n < ev.value.length; ++n) {\n // get GeneralName\n gn = ev.value[n];\n\n var altName = {\n type: gn.type,\n value: gn.value\n };\n e.altNames.push(altName);\n\n // Note: Support for types 1,2,6,7,8\n switch(gn.type) {\n // rfc822Name\n case 1:\n // dNSName\n case 2:\n // uniformResourceIdentifier (URI)\n case 6:\n break;\n // IPAddress\n case 7:\n // convert to IPv4/IPv6 string representation\n altName.ip = forge.util.bytesToIP(gn.value);\n break;\n // registeredID\n case 8:\n altName.oid = asn1.derToOid(gn.value);\n break;\n default:\n // unsupported\n }\n }\n } else if(e.name === 'subjectKeyIdentifier') {\n // value is an OCTETSTRING w/the hash of the key-type specific\n // public key structure (eg: RSAPublicKey)\n var ev = asn1.fromDer(e.value);\n e.subjectKeyIdentifier = forge.util.bytesToHex(ev.value);\n }\n }\n return e;\n};\n\n/**\n * Converts a PKCS#10 certification request (CSR) from an ASN.1 object.\n *\n * Note: If the certification request is to be verified then compute hash\n * should be set to true. There is currently no implementation for converting\n * a certificate back to ASN.1 so the CertificationRequestInfo part of the\n * ASN.1 object needs to be scanned before the csr object is created.\n *\n * @param obj the asn1 representation of a PKCS#10 certification request (CSR).\n * @param computeHash true to compute the hash for verification.\n *\n * @return the certification request (CSR).\n */\npki.certificationRequestFromAsn1 = function(obj, computeHash) {\n // validate certification request and capture data\n var capture = {};\n var errors = [];\n if(!asn1.validate(obj, certificationRequestValidator, capture, errors)) {\n var error = new Error('Cannot read PKCS#10 certificate request. ' +\n 'ASN.1 object is not a PKCS#10 CertificationRequest.');\n error.errors = errors;\n throw error;\n }\n\n // get oid\n var oid = asn1.derToOid(capture.publicKeyOid);\n if(oid !== pki.oids.rsaEncryption) {\n throw new Error('Cannot read public key. OID is not RSA.');\n }\n\n // create certification request\n var csr = pki.createCertificationRequest();\n csr.version = capture.csrVersion ? capture.csrVersion.charCodeAt(0) : 0;\n csr.signatureOid = forge.asn1.derToOid(capture.csrSignatureOid);\n csr.signatureParameters = _readSignatureParameters(\n csr.signatureOid, capture.csrSignatureParams, true);\n csr.siginfo.algorithmOid = forge.asn1.derToOid(capture.csrSignatureOid);\n csr.siginfo.parameters = _readSignatureParameters(\n csr.siginfo.algorithmOid, capture.csrSignatureParams, false);\n csr.signature = capture.csrSignature;\n\n // keep CertificationRequestInfo to preserve signature when exporting\n csr.certificationRequestInfo = capture.certificationRequestInfo;\n\n if(computeHash) {\n // create digest for OID signature type\n csr.md = _createSignatureDigest({\n signatureOid: csr.signatureOid,\n type: 'certification request'\n });\n\n // produce DER formatted CertificationRequestInfo and digest it\n var bytes = asn1.toDer(csr.certificationRequestInfo);\n csr.md.update(bytes.getBytes());\n }\n\n // handle subject, build subject message digest\n var smd = forge.md.sha1.create();\n csr.subject.getField = function(sn) {\n return _getAttribute(csr.subject, sn);\n };\n csr.subject.addField = function(attr) {\n _fillMissingFields([attr]);\n csr.subject.attributes.push(attr);\n };\n csr.subject.attributes = pki.RDNAttributesAsArray(\n capture.certificationRequestInfoSubject, smd);\n csr.subject.hash = smd.digest().toHex();\n\n // convert RSA public key from ASN.1\n csr.publicKey = pki.publicKeyFromAsn1(capture.subjectPublicKeyInfo);\n\n // convert attributes from ASN.1\n csr.getAttribute = function(sn) {\n return _getAttribute(csr, sn);\n };\n csr.addAttribute = function(attr) {\n _fillMissingFields([attr]);\n csr.attributes.push(attr);\n };\n csr.attributes = pki.CRIAttributesAsArray(\n capture.certificationRequestInfoAttributes || []);\n\n return csr;\n};\n\n/**\n * Creates an empty certification request (a CSR or certificate signing\n * request). Once created, its public key and attributes can be set and then\n * it can be signed.\n *\n * @return the empty certification request.\n */\npki.createCertificationRequest = function() {\n var csr = {};\n csr.version = 0x00;\n csr.signatureOid = null;\n csr.signature = null;\n csr.siginfo = {};\n csr.siginfo.algorithmOid = null;\n\n csr.subject = {};\n csr.subject.getField = function(sn) {\n return _getAttribute(csr.subject, sn);\n };\n csr.subject.addField = function(attr) {\n _fillMissingFields([attr]);\n csr.subject.attributes.push(attr);\n };\n csr.subject.attributes = [];\n csr.subject.hash = null;\n\n csr.publicKey = null;\n csr.attributes = [];\n csr.getAttribute = function(sn) {\n return _getAttribute(csr, sn);\n };\n csr.addAttribute = function(attr) {\n _fillMissingFields([attr]);\n csr.attributes.push(attr);\n };\n csr.md = null;\n\n /**\n * Sets the subject of this certification request.\n *\n * @param attrs the array of subject attributes to use.\n */\n csr.setSubject = function(attrs) {\n // set new attributes\n _fillMissingFields(attrs);\n csr.subject.attributes = attrs;\n csr.subject.hash = null;\n };\n\n /**\n * Sets the attributes of this certification request.\n *\n * @param attrs the array of attributes to use.\n */\n csr.setAttributes = function(attrs) {\n // set new attributes\n _fillMissingFields(attrs);\n csr.attributes = attrs;\n };\n\n /**\n * Signs this certification request using the given private key.\n *\n * @param key the private key to sign with.\n * @param md the message digest object to use (defaults to forge.md.sha1).\n */\n csr.sign = function(key, md) {\n // TODO: get signature OID from private key\n csr.md = md || forge.md.sha1.create();\n var algorithmOid = oids[csr.md.algorithm + 'WithRSAEncryption'];\n if(!algorithmOid) {\n var error = new Error('Could not compute certification request digest. ' +\n 'Unknown message digest algorithm OID.');\n error.algorithm = csr.md.algorithm;\n throw error;\n }\n csr.signatureOid = csr.siginfo.algorithmOid = algorithmOid;\n\n // get CertificationRequestInfo, convert to DER\n csr.certificationRequestInfo = pki.getCertificationRequestInfo(csr);\n var bytes = asn1.toDer(csr.certificationRequestInfo);\n\n // digest and sign\n csr.md.update(bytes.getBytes());\n csr.signature = key.sign(csr.md);\n };\n\n /**\n * Attempts verify the signature on the passed certification request using\n * its public key.\n *\n * A CSR that has been exported to a file in PEM format can be verified using\n * OpenSSL using this command:\n *\n * openssl req -in -verify -noout -text\n *\n * @return true if verified, false if not.\n */\n csr.verify = function() {\n var rval = false;\n\n var md = csr.md;\n if(md === null) {\n md = _createSignatureDigest({\n signatureOid: csr.signatureOid,\n type: 'certification request'\n });\n\n // produce DER formatted CertificationRequestInfo and digest it\n var cri = csr.certificationRequestInfo ||\n pki.getCertificationRequestInfo(csr);\n var bytes = asn1.toDer(cri);\n md.update(bytes.getBytes());\n }\n\n if(md !== null) {\n rval = _verifySignature({\n certificate: csr, md: md, signature: csr.signature\n });\n }\n\n return rval;\n };\n\n return csr;\n};\n\n/**\n * Converts an X.509 subject or issuer to an ASN.1 RDNSequence.\n *\n * @param obj the subject or issuer (distinguished name).\n *\n * @return the ASN.1 RDNSequence.\n */\nfunction _dnToAsn1(obj) {\n // create an empty RDNSequence\n var rval = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n\n // iterate over attributes\n var attr, set;\n var attrs = obj.attributes;\n for(var i = 0; i < attrs.length; ++i) {\n attr = attrs[i];\n var value = attr.value;\n\n // reuse tag class for attribute value if available\n var valueTagClass = asn1.Type.PRINTABLESTRING;\n if('valueTagClass' in attr) {\n valueTagClass = attr.valueTagClass;\n\n if(valueTagClass === asn1.Type.UTF8) {\n value = forge.util.encodeUtf8(value);\n }\n // FIXME: handle more encodings\n }\n\n // create a RelativeDistinguishedName set\n // each value in the set is an AttributeTypeAndValue first\n // containing the type (an OID) and second the value\n set = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // AttributeType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(attr.type).getBytes()),\n // AttributeValue\n asn1.create(asn1.Class.UNIVERSAL, valueTagClass, false, value)\n ])\n ]);\n rval.value.push(set);\n }\n\n return rval;\n}\n\n/**\n * Gets all printable attributes (typically of an issuer or subject) in a\n * simplified JSON format for display.\n *\n * @param attrs the attributes.\n *\n * @return the JSON for display.\n */\nfunction _getAttributesAsJson(attrs) {\n var rval = {};\n for(var i = 0; i < attrs.length; ++i) {\n var attr = attrs[i];\n if(attr.shortName && (\n attr.valueTagClass === asn1.Type.UTF8 ||\n attr.valueTagClass === asn1.Type.PRINTABLESTRING ||\n attr.valueTagClass === asn1.Type.IA5STRING)) {\n var value = attr.value;\n if(attr.valueTagClass === asn1.Type.UTF8) {\n value = forge.util.encodeUtf8(attr.value);\n }\n if(!(attr.shortName in rval)) {\n rval[attr.shortName] = value;\n } else if(forge.util.isArray(rval[attr.shortName])) {\n rval[attr.shortName].push(value);\n } else {\n rval[attr.shortName] = [rval[attr.shortName], value];\n }\n }\n }\n return rval;\n}\n\n/**\n * Fills in missing fields in attributes.\n *\n * @param attrs the attributes to fill missing fields in.\n */\nfunction _fillMissingFields(attrs) {\n var attr;\n for(var i = 0; i < attrs.length; ++i) {\n attr = attrs[i];\n\n // populate missing name\n if(typeof attr.name === 'undefined') {\n if(attr.type && attr.type in pki.oids) {\n attr.name = pki.oids[attr.type];\n } else if(attr.shortName && attr.shortName in _shortNames) {\n attr.name = pki.oids[_shortNames[attr.shortName]];\n }\n }\n\n // populate missing type (OID)\n if(typeof attr.type === 'undefined') {\n if(attr.name && attr.name in pki.oids) {\n attr.type = pki.oids[attr.name];\n } else {\n var error = new Error('Attribute type not specified.');\n error.attribute = attr;\n throw error;\n }\n }\n\n // populate missing shortname\n if(typeof attr.shortName === 'undefined') {\n if(attr.name && attr.name in _shortNames) {\n attr.shortName = _shortNames[attr.name];\n }\n }\n\n // convert extensions to value\n if(attr.type === oids.extensionRequest) {\n attr.valueConstructed = true;\n attr.valueTagClass = asn1.Type.SEQUENCE;\n if(!attr.value && attr.extensions) {\n attr.value = [];\n for(var ei = 0; ei < attr.extensions.length; ++ei) {\n attr.value.push(pki.certificateExtensionToAsn1(\n _fillMissingExtensionFields(attr.extensions[ei])));\n }\n }\n }\n\n if(typeof attr.value === 'undefined') {\n var error = new Error('Attribute value not specified.');\n error.attribute = attr;\n throw error;\n }\n }\n}\n\n/**\n * Fills in missing fields in certificate extensions.\n *\n * @param e the extension.\n * @param [options] the options to use.\n * [cert] the certificate the extensions are for.\n *\n * @return the extension.\n */\nfunction _fillMissingExtensionFields(e, options) {\n options = options || {};\n\n // populate missing name\n if(typeof e.name === 'undefined') {\n if(e.id && e.id in pki.oids) {\n e.name = pki.oids[e.id];\n }\n }\n\n // populate missing id\n if(typeof e.id === 'undefined') {\n if(e.name && e.name in pki.oids) {\n e.id = pki.oids[e.name];\n } else {\n var error = new Error('Extension ID not specified.');\n error.extension = e;\n throw error;\n }\n }\n\n if(typeof e.value !== 'undefined') {\n return e;\n }\n\n // handle missing value:\n\n // value is a BIT STRING\n if(e.name === 'keyUsage') {\n // build flags\n var unused = 0;\n var b2 = 0x00;\n var b3 = 0x00;\n if(e.digitalSignature) {\n b2 |= 0x80;\n unused = 7;\n }\n if(e.nonRepudiation) {\n b2 |= 0x40;\n unused = 6;\n }\n if(e.keyEncipherment) {\n b2 |= 0x20;\n unused = 5;\n }\n if(e.dataEncipherment) {\n b2 |= 0x10;\n unused = 4;\n }\n if(e.keyAgreement) {\n b2 |= 0x08;\n unused = 3;\n }\n if(e.keyCertSign) {\n b2 |= 0x04;\n unused = 2;\n }\n if(e.cRLSign) {\n b2 |= 0x02;\n unused = 1;\n }\n if(e.encipherOnly) {\n b2 |= 0x01;\n unused = 0;\n }\n if(e.decipherOnly) {\n b3 |= 0x80;\n unused = 7;\n }\n\n // create bit string\n var value = String.fromCharCode(unused);\n if(b3 !== 0) {\n value += String.fromCharCode(b2) + String.fromCharCode(b3);\n } else if(b2 !== 0) {\n value += String.fromCharCode(b2);\n }\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, value);\n } else if(e.name === 'basicConstraints') {\n // basicConstraints is a SEQUENCE\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n // cA BOOLEAN flag defaults to false\n if(e.cA) {\n e.value.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.BOOLEAN, false,\n String.fromCharCode(0xFF)));\n }\n if('pathLenConstraint' in e) {\n e.value.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(e.pathLenConstraint).getBytes()));\n }\n } else if(e.name === 'extKeyUsage') {\n // extKeyUsage is a SEQUENCE of OIDs\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n var seq = e.value.value;\n for(var key in e) {\n if(e[key] !== true) {\n continue;\n }\n // key is name in OID map\n if(key in oids) {\n seq.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID,\n false, asn1.oidToDer(oids[key]).getBytes()));\n } else if(key.indexOf('.') !== -1) {\n // assume key is an OID\n seq.push(asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID,\n false, asn1.oidToDer(key).getBytes()));\n }\n }\n } else if(e.name === 'nsCertType') {\n // nsCertType is a BIT STRING\n // build flags\n var unused = 0;\n var b2 = 0x00;\n\n if(e.client) {\n b2 |= 0x80;\n unused = 7;\n }\n if(e.server) {\n b2 |= 0x40;\n unused = 6;\n }\n if(e.email) {\n b2 |= 0x20;\n unused = 5;\n }\n if(e.objsign) {\n b2 |= 0x10;\n unused = 4;\n }\n if(e.reserved) {\n b2 |= 0x08;\n unused = 3;\n }\n if(e.sslCA) {\n b2 |= 0x04;\n unused = 2;\n }\n if(e.emailCA) {\n b2 |= 0x02;\n unused = 1;\n }\n if(e.objCA) {\n b2 |= 0x01;\n unused = 0;\n }\n\n // create bit string\n var value = String.fromCharCode(unused);\n if(b2 !== 0) {\n value += String.fromCharCode(b2);\n }\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false, value);\n } else if(e.name === 'subjectAltName' || e.name === 'issuerAltName') {\n // SYNTAX SEQUENCE\n e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n\n var altName;\n for(var n = 0; n < e.altNames.length; ++n) {\n altName = e.altNames[n];\n var value = altName.value;\n // handle IP\n if(altName.type === 7 && altName.ip) {\n value = forge.util.bytesFromIP(altName.ip);\n if(value === null) {\n var error = new Error(\n 'Extension \"ip\" value is not a valid IPv4 or IPv6 address.');\n error.extension = e;\n throw error;\n }\n } else if(altName.type === 8) {\n // handle OID\n if(altName.oid) {\n value = asn1.oidToDer(asn1.oidToDer(altName.oid));\n } else {\n // deprecated ... convert value to OID\n value = asn1.oidToDer(value);\n }\n }\n e.value.value.push(asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, altName.type, false,\n value));\n }\n } else if(e.name === 'nsComment' && options.cert) {\n // sanity check value is ASCII (req'd) and not too big\n if(!(/^[\\x00-\\x7F]*$/.test(e.comment)) ||\n (e.comment.length < 1) || (e.comment.length > 128)) {\n throw new Error('Invalid \"nsComment\" content.');\n }\n // IA5STRING opaque comment\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.IA5STRING, false, e.comment);\n } else if(e.name === 'subjectKeyIdentifier' && options.cert) {\n var ski = options.cert.generateSubjectKeyIdentifier();\n e.subjectKeyIdentifier = ski.toHex();\n // OCTETSTRING w/digest\n e.value = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, ski.getBytes());\n } else if(e.name === 'authorityKeyIdentifier' && options.cert) {\n // SYNTAX SEQUENCE\n e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n var seq = e.value.value;\n\n if(e.keyIdentifier) {\n var keyIdentifier = (e.keyIdentifier === true ?\n options.cert.generateSubjectKeyIdentifier().getBytes() :\n e.keyIdentifier);\n seq.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, false, keyIdentifier));\n }\n\n if(e.authorityCertIssuer) {\n var authorityCertIssuer = [\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 4, true, [\n _dnToAsn1(e.authorityCertIssuer === true ?\n options.cert.issuer : e.authorityCertIssuer)\n ])\n ];\n seq.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, authorityCertIssuer));\n }\n\n if(e.serialNumber) {\n var serialNumber = forge.util.hexToBytes(e.serialNumber === true ?\n options.cert.serialNumber : e.serialNumber);\n seq.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, false, serialNumber));\n }\n } else if(e.name === 'cRLDistributionPoints') {\n e.value = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n var seq = e.value.value;\n\n // Create sub SEQUENCE of DistributionPointName\n var subSeq = asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n\n // Create fullName CHOICE\n var fullNameGeneralNames = asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, 0, true, []);\n var altName;\n for(var n = 0; n < e.altNames.length; ++n) {\n altName = e.altNames[n];\n var value = altName.value;\n // handle IP\n if(altName.type === 7 && altName.ip) {\n value = forge.util.bytesFromIP(altName.ip);\n if(value === null) {\n var error = new Error(\n 'Extension \"ip\" value is not a valid IPv4 or IPv6 address.');\n error.extension = e;\n throw error;\n }\n } else if(altName.type === 8) {\n // handle OID\n if(altName.oid) {\n value = asn1.oidToDer(asn1.oidToDer(altName.oid));\n } else {\n // deprecated ... convert value to OID\n value = asn1.oidToDer(value);\n }\n }\n fullNameGeneralNames.value.push(asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, altName.type, false,\n value));\n }\n\n // Add to the parent SEQUENCE\n subSeq.value.push(asn1.create(\n asn1.Class.CONTEXT_SPECIFIC, 0, true, [fullNameGeneralNames]));\n seq.push(subSeq);\n }\n\n // ensure value has been defined by now\n if(typeof e.value === 'undefined') {\n var error = new Error('Extension value not specified.');\n error.extension = e;\n throw error;\n }\n\n return e;\n}\n\n/**\n * Convert signature parameters object to ASN.1\n *\n * @param {String} oid Signature algorithm OID\n * @param params The signature parametrs object\n * @return ASN.1 object representing signature parameters\n */\nfunction _signatureParametersToAsn1(oid, params) {\n switch(oid) {\n case oids['RSASSA-PSS']:\n var parts = [];\n\n if(params.hash.algorithmOid !== undefined) {\n parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(params.hash.algorithmOid).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ])\n ]));\n }\n\n if(params.mgf.algorithmOid !== undefined) {\n parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(params.mgf.algorithmOid).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(params.mgf.hash.algorithmOid).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')\n ])\n ])\n ]));\n }\n\n if(params.saltLength !== undefined) {\n parts.push(asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(params.saltLength).getBytes())\n ]));\n }\n\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, parts);\n\n default:\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '');\n }\n}\n\n/**\n * Converts a certification request's attributes to an ASN.1 set of\n * CRIAttributes.\n *\n * @param csr certification request.\n *\n * @return the ASN.1 set of CRIAttributes.\n */\nfunction _CRIAttributesToAsn1(csr) {\n // create an empty context-specific container\n var rval = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, []);\n\n // no attributes, return empty container\n if(csr.attributes.length === 0) {\n return rval;\n }\n\n // each attribute has a sequence with a type and a set of values\n var attrs = csr.attributes;\n for(var i = 0; i < attrs.length; ++i) {\n var attr = attrs[i];\n var value = attr.value;\n\n // reuse tag class for attribute value if available\n var valueTagClass = asn1.Type.UTF8;\n if('valueTagClass' in attr) {\n valueTagClass = attr.valueTagClass;\n }\n if(valueTagClass === asn1.Type.UTF8) {\n value = forge.util.encodeUtf8(value);\n }\n var valueConstructed = false;\n if('valueConstructed' in attr) {\n valueConstructed = attr.valueConstructed;\n }\n // FIXME: handle more encodings\n\n // create a RelativeDistinguishedName set\n // each value in the set is an AttributeTypeAndValue first\n // containing the type (an OID) and second the value\n var seq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // AttributeType\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(attr.type).getBytes()),\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SET, true, [\n // AttributeValue\n asn1.create(\n asn1.Class.UNIVERSAL, valueTagClass, valueConstructed, value)\n ])\n ]);\n rval.value.push(seq);\n }\n\n return rval;\n}\n\nvar jan_1_1950 = new Date('1950-01-01T00:00:00Z');\nvar jan_1_2050 = new Date('2050-01-01T00:00:00Z');\n\n/**\n * Converts a Date object to ASN.1\n * Handles the different format before and after 1st January 2050\n *\n * @param date date object.\n *\n * @return the ASN.1 object representing the date.\n */\nfunction _dateToAsn1(date) {\n if(date >= jan_1_1950 && date < jan_1_2050) {\n return asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.UTCTIME, false,\n asn1.dateToUtcTime(date));\n } else {\n return asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.GENERALIZEDTIME, false,\n asn1.dateToGeneralizedTime(date));\n }\n}\n\n/**\n * Gets the ASN.1 TBSCertificate part of an X.509v3 certificate.\n *\n * @param cert the certificate.\n *\n * @return the asn1 TBSCertificate.\n */\npki.getTBSCertificate = function(cert) {\n // TBSCertificate\n var notBefore = _dateToAsn1(cert.validity.notBefore);\n var notAfter = _dateToAsn1(cert.validity.notAfter);\n var tbs = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 0, true, [\n // integer\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(cert.version).getBytes())\n ]),\n // serialNumber\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n forge.util.hexToBytes(cert.serialNumber)),\n // signature\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(cert.siginfo.algorithmOid).getBytes()),\n // parameters\n _signatureParametersToAsn1(\n cert.siginfo.algorithmOid, cert.siginfo.parameters)\n ]),\n // issuer\n _dnToAsn1(cert.issuer),\n // validity\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n notBefore,\n notAfter\n ]),\n // subject\n _dnToAsn1(cert.subject),\n // SubjectPublicKeyInfo\n pki.publicKeyToAsn1(cert.publicKey)\n ]);\n\n if(cert.issuer.uniqueId) {\n // issuerUniqueID (optional)\n tbs.value.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 1, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false,\n // TODO: support arbitrary bit length ids\n String.fromCharCode(0x00) +\n cert.issuer.uniqueId\n )\n ])\n );\n }\n if(cert.subject.uniqueId) {\n // subjectUniqueID (optional)\n tbs.value.push(\n asn1.create(asn1.Class.CONTEXT_SPECIFIC, 2, true, [\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false,\n // TODO: support arbitrary bit length ids\n String.fromCharCode(0x00) +\n cert.subject.uniqueId\n )\n ])\n );\n }\n\n if(cert.extensions.length > 0) {\n // extensions (optional)\n tbs.value.push(pki.certificateExtensionsToAsn1(cert.extensions));\n }\n\n return tbs;\n};\n\n/**\n * Gets the ASN.1 CertificationRequestInfo part of a\n * PKCS#10 CertificationRequest.\n *\n * @param csr the certification request.\n *\n * @return the asn1 CertificationRequestInfo.\n */\npki.getCertificationRequestInfo = function(csr) {\n // CertificationRequestInfo\n var cri = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // version\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,\n asn1.integerToDer(csr.version).getBytes()),\n // subject\n _dnToAsn1(csr.subject),\n // SubjectPublicKeyInfo\n pki.publicKeyToAsn1(csr.publicKey),\n // attributes\n _CRIAttributesToAsn1(csr)\n ]);\n\n return cri;\n};\n\n/**\n * Converts a DistinguishedName (subject or issuer) to an ASN.1 object.\n *\n * @param dn the DistinguishedName.\n *\n * @return the asn1 representation of a DistinguishedName.\n */\npki.distinguishedNameToAsn1 = function(dn) {\n return _dnToAsn1(dn);\n};\n\n/**\n * Converts an X.509v3 RSA certificate to an ASN.1 object.\n *\n * @param cert the certificate.\n *\n * @return the asn1 representation of an X.509v3 RSA certificate.\n */\npki.certificateToAsn1 = function(cert) {\n // prefer cached TBSCertificate over generating one\n var tbsCertificate = cert.tbsCertificate || pki.getTBSCertificate(cert);\n\n // Certificate\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // TBSCertificate\n tbsCertificate,\n // AlgorithmIdentifier (signature algorithm)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(cert.signatureOid).getBytes()),\n // parameters\n _signatureParametersToAsn1(cert.signatureOid, cert.signatureParameters)\n ]),\n // SignatureValue\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false,\n String.fromCharCode(0x00) + cert.signature)\n ]);\n};\n\n/**\n * Converts X.509v3 certificate extensions to ASN.1.\n *\n * @param exts the extensions to convert.\n *\n * @return the extensions in ASN.1 format.\n */\npki.certificateExtensionsToAsn1 = function(exts) {\n // create top-level extension container\n var rval = asn1.create(asn1.Class.CONTEXT_SPECIFIC, 3, true, []);\n\n // create extension sequence (stores a sequence for each extension)\n var seq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n rval.value.push(seq);\n\n for(var i = 0; i < exts.length; ++i) {\n seq.value.push(pki.certificateExtensionToAsn1(exts[i]));\n }\n\n return rval;\n};\n\n/**\n * Converts a single certificate extension to ASN.1.\n *\n * @param ext the extension to convert.\n *\n * @return the extension in ASN.1 format.\n */\npki.certificateExtensionToAsn1 = function(ext) {\n // create a sequence for each extension\n var extseq = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, []);\n\n // extnID (OID)\n extseq.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(ext.id).getBytes()));\n\n // critical defaults to false\n if(ext.critical) {\n // critical BOOLEAN DEFAULT FALSE\n extseq.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.BOOLEAN, false,\n String.fromCharCode(0xFF)));\n }\n\n var value = ext.value;\n if(typeof ext.value !== 'string') {\n // value is asn.1\n value = asn1.toDer(value).getBytes();\n }\n\n // extnValue (OCTET STRING)\n extseq.value.push(asn1.create(\n asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, value));\n\n return extseq;\n};\n\n/**\n * Converts a PKCS#10 certification request to an ASN.1 object.\n *\n * @param csr the certification request.\n *\n * @return the asn1 representation of a certification request.\n */\npki.certificationRequestToAsn1 = function(csr) {\n // prefer cached CertificationRequestInfo over generating one\n var cri = csr.certificationRequestInfo ||\n pki.getCertificationRequestInfo(csr);\n\n // Certificate\n return asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // CertificationRequestInfo\n cri,\n // AlgorithmIdentifier (signature algorithm)\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [\n // algorithm\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,\n asn1.oidToDer(csr.signatureOid).getBytes()),\n // parameters\n _signatureParametersToAsn1(csr.signatureOid, csr.signatureParameters)\n ]),\n // signature\n asn1.create(asn1.Class.UNIVERSAL, asn1.Type.BITSTRING, false,\n String.fromCharCode(0x00) + csr.signature)\n ]);\n};\n\n/**\n * Creates a CA store.\n *\n * @param certs an optional array of certificate objects or PEM-formatted\n * certificate strings to add to the CA store.\n *\n * @return the CA store.\n */\npki.createCaStore = function(certs) {\n // create CA store\n var caStore = {\n // stored certificates\n certs: {}\n };\n\n /**\n * Gets the certificate that issued the passed certificate or its\n * 'parent'.\n *\n * @param cert the certificate to get the parent for.\n *\n * @return the parent certificate or null if none was found.\n */\n caStore.getIssuer = function(cert) {\n var rval = getBySubject(cert.issuer);\n\n // see if there are multiple matches\n /*if(forge.util.isArray(rval)) {\n // TODO: resolve multiple matches by checking\n // authorityKey/subjectKey/issuerUniqueID/other identifiers, etc.\n // FIXME: or alternatively do authority key mapping\n // if possible (X.509v1 certs can't work?)\n throw new Error('Resolving multiple issuer matches not implemented yet.');\n }*/\n\n return rval;\n };\n\n /**\n * Adds a trusted certificate to the store.\n *\n * @param cert the certificate to add as a trusted certificate (either a\n * pki.certificate object or a PEM-formatted certificate).\n */\n caStore.addCertificate = function(cert) {\n // convert from pem if necessary\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n\n ensureSubjectHasHash(cert.subject);\n\n if(!caStore.hasCertificate(cert)) { // avoid duplicate certificates in store\n if(cert.subject.hash in caStore.certs) {\n // subject hash already exists, append to array\n var tmp = caStore.certs[cert.subject.hash];\n if(!forge.util.isArray(tmp)) {\n tmp = [tmp];\n }\n tmp.push(cert);\n caStore.certs[cert.subject.hash] = tmp;\n } else {\n caStore.certs[cert.subject.hash] = cert;\n }\n }\n };\n\n /**\n * Checks to see if the given certificate is in the store.\n *\n * @param cert the certificate to check (either a pki.certificate or a\n * PEM-formatted certificate).\n *\n * @return true if the certificate is in the store, false if not.\n */\n caStore.hasCertificate = function(cert) {\n // convert from pem if necessary\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n\n var match = getBySubject(cert.subject);\n if(!match) {\n return false;\n }\n if(!forge.util.isArray(match)) {\n match = [match];\n }\n // compare DER-encoding of certificates\n var der1 = asn1.toDer(pki.certificateToAsn1(cert)).getBytes();\n for(var i = 0; i < match.length; ++i) {\n var der2 = asn1.toDer(pki.certificateToAsn1(match[i])).getBytes();\n if(der1 === der2) {\n return true;\n }\n }\n return false;\n };\n\n /**\n * Lists all of the certificates kept in the store.\n *\n * @return an array of all of the pki.certificate objects in the store.\n */\n caStore.listAllCertificates = function() {\n var certList = [];\n\n for(var hash in caStore.certs) {\n if(caStore.certs.hasOwnProperty(hash)) {\n var value = caStore.certs[hash];\n if(!forge.util.isArray(value)) {\n certList.push(value);\n } else {\n for(var i = 0; i < value.length; ++i) {\n certList.push(value[i]);\n }\n }\n }\n }\n\n return certList;\n };\n\n /**\n * Removes a certificate from the store.\n *\n * @param cert the certificate to remove (either a pki.certificate or a\n * PEM-formatted certificate).\n *\n * @return the certificate that was removed or null if the certificate\n * wasn't in store.\n */\n caStore.removeCertificate = function(cert) {\n var result;\n\n // convert from pem if necessary\n if(typeof cert === 'string') {\n cert = forge.pki.certificateFromPem(cert);\n }\n ensureSubjectHasHash(cert.subject);\n if(!caStore.hasCertificate(cert)) {\n return null;\n }\n\n var match = getBySubject(cert.subject);\n\n if(!forge.util.isArray(match)) {\n result = caStore.certs[cert.subject.hash];\n delete caStore.certs[cert.subject.hash];\n return result;\n }\n\n // compare DER-encoding of certificates\n var der1 = asn1.toDer(pki.certificateToAsn1(cert)).getBytes();\n for(var i = 0; i < match.length; ++i) {\n var der2 = asn1.toDer(pki.certificateToAsn1(match[i])).getBytes();\n if(der1 === der2) {\n result = match[i];\n match.splice(i, 1);\n }\n }\n if(match.length === 0) {\n delete caStore.certs[cert.subject.hash];\n }\n\n return result;\n };\n\n function getBySubject(subject) {\n ensureSubjectHasHash(subject);\n return caStore.certs[subject.hash] || null;\n }\n\n function ensureSubjectHasHash(subject) {\n // produce subject hash if it doesn't exist\n if(!subject.hash) {\n var md = forge.md.sha1.create();\n subject.attributes = pki.RDNAttributesAsArray(_dnToAsn1(subject), md);\n subject.hash = md.digest().toHex();\n }\n }\n\n // auto-add passed in certs\n if(certs) {\n // parse PEM-formatted certificates as necessary\n for(var i = 0; i < certs.length; ++i) {\n var cert = certs[i];\n caStore.addCertificate(cert);\n }\n }\n\n return caStore;\n};\n\n/**\n * Certificate verification errors, based on TLS.\n */\npki.certificateError = {\n bad_certificate: 'forge.pki.BadCertificate',\n unsupported_certificate: 'forge.pki.UnsupportedCertificate',\n certificate_revoked: 'forge.pki.CertificateRevoked',\n certificate_expired: 'forge.pki.CertificateExpired',\n certificate_unknown: 'forge.pki.CertificateUnknown',\n unknown_ca: 'forge.pki.UnknownCertificateAuthority'\n};\n\n/**\n * Verifies a certificate chain against the given Certificate Authority store\n * with an optional custom verify callback.\n *\n * @param caStore a certificate store to verify against.\n * @param chain the certificate chain to verify, with the root or highest\n * authority at the end (an array of certificates).\n * @param options a callback to be called for every certificate in the chain or\n * an object with:\n * verify a callback to be called for every certificate in the\n * chain\n * validityCheckDate the date against which the certificate\n * validity period should be checked. Pass null to not check\n * the validity period. By default, the current date is used.\n *\n * The verify callback has the following signature:\n *\n * verified - Set to true if certificate was verified, otherwise the\n * pki.certificateError for why the certificate failed.\n * depth - The current index in the chain, where 0 is the end point's cert.\n * certs - The certificate chain, *NOTE* an empty chain indicates an anonymous\n * end point.\n *\n * The function returns true on success and on failure either the appropriate\n * pki.certificateError or an object with 'error' set to the appropriate\n * pki.certificateError and 'message' set to a custom error message.\n *\n * @return true if successful, error thrown if not.\n */\npki.verifyCertificateChain = function(caStore, chain, options) {\n /* From: RFC3280 - Internet X.509 Public Key Infrastructure Certificate\n Section 6: Certification Path Validation\n See inline parentheticals related to this particular implementation.\n\n The primary goal of path validation is to verify the binding between\n a subject distinguished name or a subject alternative name and subject\n public key, as represented in the end entity certificate, based on the\n public key of the trust anchor. This requires obtaining a sequence of\n certificates that support that binding. That sequence should be provided\n in the passed 'chain'. The trust anchor should be in the given CA\n store. The 'end entity' certificate is the certificate provided by the\n end point (typically a server) and is the first in the chain.\n\n To meet this goal, the path validation process verifies, among other\n things, that a prospective certification path (a sequence of n\n certificates or a 'chain') satisfies the following conditions:\n\n (a) for all x in {1, ..., n-1}, the subject of certificate x is\n the issuer of certificate x+1;\n\n (b) certificate 1 is issued by the trust anchor;\n\n (c) certificate n is the certificate to be validated; and\n\n (d) for all x in {1, ..., n}, the certificate was valid at the\n time in question.\n\n Note that here 'n' is index 0 in the chain and 1 is the last certificate\n in the chain and it must be signed by a certificate in the connection's\n CA store.\n\n The path validation process also determines the set of certificate\n policies that are valid for this path, based on the certificate policies\n extension, policy mapping extension, policy constraints extension, and\n inhibit any-policy extension.\n\n Note: Policy mapping extension not supported (Not Required).\n\n Note: If the certificate has an unsupported critical extension, then it\n must be rejected.\n\n Note: A certificate is self-issued if the DNs that appear in the subject\n and issuer fields are identical and are not empty.\n\n The path validation algorithm assumes the following seven inputs are\n provided to the path processing logic. What this specific implementation\n will use is provided parenthetically:\n\n (a) a prospective certification path of length n (the 'chain')\n (b) the current date/time: ('now').\n (c) user-initial-policy-set: A set of certificate policy identifiers\n naming the policies that are acceptable to the certificate user.\n The user-initial-policy-set contains the special value any-policy\n if the user is not concerned about certificate policy\n (Not implemented. Any policy is accepted).\n (d) trust anchor information, describing a CA that serves as a trust\n anchor for the certification path. The trust anchor information\n includes:\n\n (1) the trusted issuer name,\n (2) the trusted public key algorithm,\n (3) the trusted public key, and\n (4) optionally, the trusted public key parameters associated\n with the public key.\n\n (Trust anchors are provided via certificates in the CA store).\n\n The trust anchor information may be provided to the path processing\n procedure in the form of a self-signed certificate. The trusted anchor\n information is trusted because it was delivered to the path processing\n procedure by some trustworthy out-of-band procedure. If the trusted\n public key algorithm requires parameters, then the parameters are\n provided along with the trusted public key (No parameters used in this\n implementation).\n\n (e) initial-policy-mapping-inhibit, which indicates if policy mapping is\n allowed in the certification path.\n (Not implemented, no policy checking)\n\n (f) initial-explicit-policy, which indicates if the path must be valid\n for at least one of the certificate policies in the user-initial-\n policy-set.\n (Not implemented, no policy checking)\n\n (g) initial-any-policy-inhibit, which indicates whether the\n anyPolicy OID should be processed if it is included in a\n certificate.\n (Not implemented, so any policy is valid provided that it is\n not marked as critical) */\n\n /* Basic Path Processing:\n\n For each certificate in the 'chain', the following is checked:\n\n 1. The certificate validity period includes the current time.\n 2. The certificate was signed by its parent (where the parent is either\n the next in the chain or from the CA store). Allow processing to\n continue to the next step if no parent is found but the certificate is\n in the CA store.\n 3. TODO: The certificate has not been revoked.\n 4. The certificate issuer name matches the parent's subject name.\n 5. TODO: If the certificate is self-issued and not the final certificate\n in the chain, skip this step, otherwise verify that the subject name\n is within one of the permitted subtrees of X.500 distinguished names\n and that each of the alternative names in the subjectAltName extension\n (critical or non-critical) is within one of the permitted subtrees for\n that name type.\n 6. TODO: If the certificate is self-issued and not the final certificate\n in the chain, skip this step, otherwise verify that the subject name\n is not within one of the excluded subtrees for X.500 distinguished\n names and none of the subjectAltName extension names are excluded for\n that name type.\n 7. The other steps in the algorithm for basic path processing involve\n handling the policy extension which is not presently supported in this\n implementation. Instead, if a critical policy extension is found, the\n certificate is rejected as not supported.\n 8. If the certificate is not the first or if its the only certificate in\n the chain (having no parent from the CA store or is self-signed) and it\n has a critical key usage extension, verify that the keyCertSign bit is\n set. If the key usage extension exists, verify that the basic\n constraints extension exists. If the basic constraints extension exists,\n verify that the cA flag is set. If pathLenConstraint is set, ensure that\n the number of certificates that precede in the chain (come earlier\n in the chain as implemented below), excluding the very first in the\n chain (typically the end-entity one), isn't greater than the\n pathLenConstraint. This constraint limits the number of intermediate\n CAs that may appear below a CA before only end-entity certificates\n may be issued. */\n\n // if a verify callback is passed as the third parameter, package it within\n // the options object. This is to support a legacy function signature that\n // expected the verify callback as the third parameter.\n if(typeof options === 'function') {\n options = {verify: options};\n }\n options = options || {};\n\n // copy cert chain references to another array to protect against changes\n // in verify callback\n chain = chain.slice(0);\n var certs = chain.slice(0);\n\n var validityCheckDate = options.validityCheckDate;\n // if no validityCheckDate is specified, default to the current date. Make\n // sure to maintain the value null because it indicates that the validity\n // period should not be checked.\n if(typeof validityCheckDate === 'undefined') {\n validityCheckDate = new Date();\n }\n\n // verify each cert in the chain using its parent, where the parent\n // is either the next in the chain or from the CA store\n var first = true;\n var error = null;\n var depth = 0;\n do {\n var cert = chain.shift();\n var parent = null;\n var selfSigned = false;\n\n if(validityCheckDate) {\n // 1. check valid time\n if(validityCheckDate < cert.validity.notBefore ||\n validityCheckDate > cert.validity.notAfter) {\n error = {\n message: 'Certificate is not valid yet or has expired.',\n error: pki.certificateError.certificate_expired,\n notBefore: cert.validity.notBefore,\n notAfter: cert.validity.notAfter,\n // TODO: we might want to reconsider renaming 'now' to\n // 'validityCheckDate' should this API be changed in the future.\n now: validityCheckDate\n };\n }\n }\n\n // 2. verify with parent from chain or CA store\n if(error === null) {\n parent = chain[0] || caStore.getIssuer(cert);\n if(parent === null) {\n // check for self-signed cert\n if(cert.isIssuer(cert)) {\n selfSigned = true;\n parent = cert;\n }\n }\n\n if(parent) {\n // FIXME: current CA store implementation might have multiple\n // certificates where the issuer can't be determined from the\n // certificate (happens rarely with, eg: old certificates) so normalize\n // by always putting parents into an array\n // TODO: there's may be an extreme degenerate case currently uncovered\n // where an old intermediate certificate seems to have a matching parent\n // but none of the parents actually verify ... but the intermediate\n // is in the CA and it should pass this check; needs investigation\n var parents = parent;\n if(!forge.util.isArray(parents)) {\n parents = [parents];\n }\n\n // try to verify with each possible parent (typically only one)\n var verified = false;\n while(!verified && parents.length > 0) {\n parent = parents.shift();\n try {\n verified = parent.verify(cert);\n } catch(ex) {\n // failure to verify, don't care why, try next one\n }\n }\n\n if(!verified) {\n error = {\n message: 'Certificate signature is invalid.',\n error: pki.certificateError.bad_certificate\n };\n }\n }\n\n if(error === null && (!parent || selfSigned) &&\n !caStore.hasCertificate(cert)) {\n // no parent issuer and certificate itself is not trusted\n error = {\n message: 'Certificate is not trusted.',\n error: pki.certificateError.unknown_ca\n };\n }\n }\n\n // TODO: 3. check revoked\n\n // 4. check for matching issuer/subject\n if(error === null && parent && !cert.isIssuer(parent)) {\n // parent is not issuer\n error = {\n message: 'Certificate issuer is invalid.',\n error: pki.certificateError.bad_certificate\n };\n }\n\n // 5. TODO: check names with permitted names tree\n\n // 6. TODO: check names against excluded names tree\n\n // 7. check for unsupported critical extensions\n if(error === null) {\n // supported extensions\n var se = {\n keyUsage: true,\n basicConstraints: true\n };\n for(var i = 0; error === null && i < cert.extensions.length; ++i) {\n var ext = cert.extensions[i];\n if(ext.critical && !(ext.name in se)) {\n error = {\n message:\n 'Certificate has an unsupported critical extension.',\n error: pki.certificateError.unsupported_certificate\n };\n }\n }\n }\n\n // 8. check for CA if cert is not first or is the only certificate\n // remaining in chain with no parent or is self-signed\n if(error === null &&\n (!first || (chain.length === 0 && (!parent || selfSigned)))) {\n // first check keyUsage extension and then basic constraints\n var bcExt = cert.getExtension('basicConstraints');\n var keyUsageExt = cert.getExtension('keyUsage');\n if(keyUsageExt !== null) {\n // keyCertSign must be true and there must be a basic\n // constraints extension\n if(!keyUsageExt.keyCertSign || bcExt === null) {\n // bad certificate\n error = {\n message:\n 'Certificate keyUsage or basicConstraints conflict ' +\n 'or indicate that the certificate is not a CA. ' +\n 'If the certificate is the only one in the chain or ' +\n 'isn\\'t the first then the certificate must be a ' +\n 'valid CA.',\n error: pki.certificateError.bad_certificate\n };\n }\n }\n // basic constraints cA flag must be set\n if(error === null && bcExt !== null && !bcExt.cA) {\n // bad certificate\n error = {\n message:\n 'Certificate basicConstraints indicates the certificate ' +\n 'is not a CA.',\n error: pki.certificateError.bad_certificate\n };\n }\n // if error is not null and keyUsage is available, then we know it\n // has keyCertSign and there is a basic constraints extension too,\n // which means we can check pathLenConstraint (if it exists)\n if(error === null && keyUsageExt !== null &&\n 'pathLenConstraint' in bcExt) {\n // pathLen is the maximum # of intermediate CA certs that can be\n // found between the current certificate and the end-entity (depth 0)\n // certificate; this number does not include the end-entity (depth 0,\n // last in the chain) even if it happens to be a CA certificate itself\n var pathLen = depth - 1;\n if(pathLen > bcExt.pathLenConstraint) {\n // pathLenConstraint violated, bad certificate\n error = {\n message:\n 'Certificate basicConstraints pathLenConstraint violated.',\n error: pki.certificateError.bad_certificate\n };\n }\n }\n }\n\n // call application callback\n var vfd = (error === null) ? true : error.error;\n var ret = options.verify ? options.verify(vfd, depth, certs) : vfd;\n if(ret === true) {\n // clear any set error\n error = null;\n } else {\n // if passed basic tests, set default message and alert\n if(vfd === true) {\n error = {\n message: 'The application rejected the certificate.',\n error: pki.certificateError.bad_certificate\n };\n }\n\n // check for custom error info\n if(ret || ret === 0) {\n // set custom message and error\n if(typeof ret === 'object' && !forge.util.isArray(ret)) {\n if(ret.message) {\n error.message = ret.message;\n }\n if(ret.error) {\n error.error = ret.error;\n }\n } else if(typeof ret === 'string') {\n // set custom error\n error.error = ret;\n }\n }\n\n // throw error\n throw error;\n }\n\n // no longer first cert in chain\n first = false;\n ++depth;\n } while(chain.length > 0);\n\n return true;\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/node-forge/lib/x509.js?");
/***/ }),
/***/ "./node_modules/p-fifo/index.js":
/*!**************************************!*\
!*** ./node_modules/p-fifo/index.js ***!
\**************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const Fifo = __webpack_require__(/*! fast-fifo */ \"./node_modules/fast-fifo/index.js\")\nconst defer = __webpack_require__(/*! p-defer */ \"./node_modules/p-fifo/node_modules/p-defer/index.js\")\n\nmodule.exports = class PFifo {\n constructor () {\n this._buffer = new Fifo()\n this._waitingConsumers = new Fifo()\n }\n\n push (chunk) {\n const { promise, resolve } = defer()\n this._buffer.push({ chunk, resolve })\n this._consume()\n return promise\n }\n\n _consume () {\n while (!this._waitingConsumers.isEmpty() && !this._buffer.isEmpty()) {\n const nextConsumer = this._waitingConsumers.shift()\n const nextChunk = this._buffer.shift()\n nextConsumer.resolve(nextChunk.chunk)\n nextChunk.resolve()\n }\n }\n\n shift () {\n const { promise, resolve } = defer()\n this._waitingConsumers.push({ resolve })\n this._consume()\n return promise\n }\n\n isEmpty () {\n return this._buffer.isEmpty()\n }\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/p-fifo/index.js?");
/***/ }),
/***/ "./node_modules/p-fifo/node_modules/p-defer/index.js":
/*!***********************************************************!*\
!*** ./node_modules/p-fifo/node_modules/p-defer/index.js ***!
\***********************************************************/
/***/ ((module) => {
"use strict";
eval("\n\nconst pDefer = () => {\n\tconst deferred = {};\n\n\tdeferred.promise = new Promise((resolve, reject) => {\n\t\tdeferred.resolve = resolve;\n\t\tdeferred.reject = reject;\n\t});\n\n\treturn deferred;\n};\n\nmodule.exports = pDefer;\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/p-fifo/node_modules/p-defer/index.js?");
/***/ }),
/***/ "./node_modules/p-queue/node_modules/eventemitter3/index.js":
/*!******************************************************************!*\
!*** ./node_modules/p-queue/node_modules/eventemitter3/index.js ***!
\******************************************************************/
/***/ ((module) => {
"use strict";
eval("\n\nvar has = Object.prototype.hasOwnProperty\n , prefix = '~';\n\n/**\n * Constructor to create a storage for our `EE` objects.\n * An `Events` instance is a plain object whose properties are event names.\n *\n * @constructor\n * @private\n */\nfunction Events() {}\n\n//\n// We try to not inherit from `Object.prototype`. In some engines creating an\n// instance in this way is faster than calling `Object.create(null)` directly.\n// If `Object.create(null)` is not supported we prefix the event names with a\n// character to make sure that the built-in object properties are not\n// overridden or used as an attack vector.\n//\nif (Object.create) {\n Events.prototype = Object.create(null);\n\n //\n // This hack is needed because the `__proto__` property is still inherited in\n // some old browsers like Android 4, iPhone 5.1, Opera 11 and Safari 5.\n //\n if (!new Events().__proto__) prefix = false;\n}\n\n/**\n * Representation of a single event listener.\n *\n * @param {Function} fn The listener function.\n * @param {*} context The context to invoke the listener with.\n * @param {Boolean} [once=false] Specify if the listener is a one-time listener.\n * @constructor\n * @private\n */\nfunction EE(fn, context, once) {\n this.fn = fn;\n this.context = context;\n this.once = once || false;\n}\n\n/**\n * Add a listener for a given event.\n *\n * @param {EventEmitter} emitter Reference to the `EventEmitter` instance.\n * @param {(String|Symbol)} event The event name.\n * @param {Function} fn The listener function.\n * @param {*} context The context to invoke the listener with.\n * @param {Boolean} once Specify if the listener is a one-time listener.\n * @returns {EventEmitter}\n * @private\n */\nfunction addListener(emitter, event, fn, context, once) {\n if (typeof fn !== 'function') {\n throw new TypeError('The listener must be a function');\n }\n\n var listener = new EE(fn, context || emitter, once)\n , evt = prefix ? prefix + event : event;\n\n if (!emitter._events[evt]) emitter._events[evt] = listener, emitter._eventsCount++;\n else if (!emitter._events[evt].fn) emitter._events[evt].push(listener);\n else emitter._events[evt] = [emitter._events[evt], listener];\n\n return emitter;\n}\n\n/**\n * Clear event by name.\n *\n * @param {EventEmitter} emitter Reference to the `EventEmitter` instance.\n * @param {(String|Symbol)} evt The Event name.\n * @private\n */\nfunction clearEvent(emitter, evt) {\n if (--emitter._eventsCount === 0) emitter._events = new Events();\n else delete emitter._events[evt];\n}\n\n/**\n * Minimal `EventEmitter` interface that is molded against the Node.js\n * `EventEmitter` interface.\n *\n * @constructor\n * @public\n */\nfunction EventEmitter() {\n this._events = new Events();\n this._eventsCount = 0;\n}\n\n/**\n * Return an array listing the events for which the emitter has registered\n * listeners.\n *\n * @returns {Array}\n * @public\n */\nEventEmitter.prototype.eventNames = function eventNames() {\n var names = []\n , events\n , name;\n\n if (this._eventsCount === 0) return names;\n\n for (name in (events = this._events)) {\n if (has.call(events, name)) names.push(prefix ? name.slice(1) : name);\n }\n\n if (Object.getOwnPropertySymbols) {\n return names.concat(Object.getOwnPropertySymbols(events));\n }\n\n return names;\n};\n\n/**\n * Return the listeners registered for a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @returns {Array} The registered listeners.\n * @public\n */\nEventEmitter.prototype.listeners = function listeners(event) {\n var evt = prefix ? prefix + event : event\n , handlers = this._events[evt];\n\n if (!handlers) return [];\n if (handlers.fn) return [handlers.fn];\n\n for (var i = 0, l = handlers.length, ee = new Array(l); i < l; i++) {\n ee[i] = handlers[i].fn;\n }\n\n return ee;\n};\n\n/**\n * Return the number of listeners listening to a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @returns {Number} The number of listeners.\n * @public\n */\nEventEmitter.prototype.listenerCount = function listenerCount(event) {\n var evt = prefix ? prefix + event : event\n , listeners = this._events[evt];\n\n if (!listeners) return 0;\n if (listeners.fn) return 1;\n return listeners.length;\n};\n\n/**\n * Calls each of the listeners registered for a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @returns {Boolean} `true` if the event had listeners, else `false`.\n * @public\n */\nEventEmitter.prototype.emit = function emit(event, a1, a2, a3, a4, a5) {\n var evt = prefix ? prefix + event : event;\n\n if (!this._events[evt]) return false;\n\n var listeners = this._events[evt]\n , len = arguments.length\n , args\n , i;\n\n if (listeners.fn) {\n if (listeners.once) this.removeListener(event, listeners.fn, undefined, true);\n\n switch (len) {\n case 1: return listeners.fn.call(listeners.context), true;\n case 2: return listeners.fn.call(listeners.context, a1), true;\n case 3: return listeners.fn.call(listeners.context, a1, a2), true;\n case 4: return listeners.fn.call(listeners.context, a1, a2, a3), true;\n case 5: return listeners.fn.call(listeners.context, a1, a2, a3, a4), true;\n case 6: return listeners.fn.call(listeners.context, a1, a2, a3, a4, a5), true;\n }\n\n for (i = 1, args = new Array(len -1); i < len; i++) {\n args[i - 1] = arguments[i];\n }\n\n listeners.fn.apply(listeners.context, args);\n } else {\n var length = listeners.length\n , j;\n\n for (i = 0; i < length; i++) {\n if (listeners[i].once) this.removeListener(event, listeners[i].fn, undefined, true);\n\n switch (len) {\n case 1: listeners[i].fn.call(listeners[i].context); break;\n case 2: listeners[i].fn.call(listeners[i].context, a1); break;\n case 3: listeners[i].fn.call(listeners[i].context, a1, a2); break;\n case 4: listeners[i].fn.call(listeners[i].context, a1, a2, a3); break;\n default:\n if (!args) for (j = 1, args = new Array(len -1); j < len; j++) {\n args[j - 1] = arguments[j];\n }\n\n listeners[i].fn.apply(listeners[i].context, args);\n }\n }\n }\n\n return true;\n};\n\n/**\n * Add a listener for a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @param {Function} fn The listener function.\n * @param {*} [context=this] The context to invoke the listener with.\n * @returns {EventEmitter} `this`.\n * @public\n */\nEventEmitter.prototype.on = function on(event, fn, context) {\n return addListener(this, event, fn, context, false);\n};\n\n/**\n * Add a one-time listener for a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @param {Function} fn The listener function.\n * @param {*} [context=this] The context to invoke the listener with.\n * @returns {EventEmitter} `this`.\n * @public\n */\nEventEmitter.prototype.once = function once(event, fn, context) {\n return addListener(this, event, fn, context, true);\n};\n\n/**\n * Remove the listeners of a given event.\n *\n * @param {(String|Symbol)} event The event name.\n * @param {Function} fn Only remove the listeners that match this function.\n * @param {*} context Only remove the listeners that have this context.\n * @param {Boolean} once Only remove one-time listeners.\n * @returns {EventEmitter} `this`.\n * @public\n */\nEventEmitter.prototype.removeListener = function removeListener(event, fn, context, once) {\n var evt = prefix ? prefix + event : event;\n\n if (!this._events[evt]) return this;\n if (!fn) {\n clearEvent(this, evt);\n return this;\n }\n\n var listeners = this._events[evt];\n\n if (listeners.fn) {\n if (\n listeners.fn === fn &&\n (!once || listeners.once) &&\n (!context || listeners.context === context)\n ) {\n clearEvent(this, evt);\n }\n } else {\n for (var i = 0, events = [], length = listeners.length; i < length; i++) {\n if (\n listeners[i].fn !== fn ||\n (once && !listeners[i].once) ||\n (context && listeners[i].context !== context)\n ) {\n events.push(listeners[i]);\n }\n }\n\n //\n // Reset the array, or remove it completely if we have no more listeners.\n //\n if (events.length) this._events[evt] = events.length === 1 ? events[0] : events;\n else clearEvent(this, evt);\n }\n\n return this;\n};\n\n/**\n * Remove all listeners, or those of the specified event.\n *\n * @param {(String|Symbol)} [event] The event name.\n * @returns {EventEmitter} `this`.\n * @public\n */\nEventEmitter.prototype.removeAllListeners = function removeAllListeners(event) {\n var evt;\n\n if (event) {\n evt = prefix ? prefix + event : event;\n if (this._events[evt]) clearEvent(this, evt);\n } else {\n this._events = new Events();\n this._eventsCount = 0;\n }\n\n return this;\n};\n\n//\n// Alias methods names because people roll like that.\n//\nEventEmitter.prototype.off = EventEmitter.prototype.removeListener;\nEventEmitter.prototype.addListener = EventEmitter.prototype.on;\n\n//\n// Expose the prefix.\n//\nEventEmitter.prefixed = prefix;\n\n//\n// Allow `EventEmitter` to be imported as module namespace.\n//\nEventEmitter.EventEmitter = EventEmitter;\n\n//\n// Expose the module.\n//\nif (true) {\n module.exports = EventEmitter;\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/p-queue/node_modules/eventemitter3/index.js?");
/***/ }),
/***/ "./node_modules/pkcs7-padding/index.js":
/*!*********************************************!*\
!*** ./node_modules/pkcs7-padding/index.js ***!
\*********************************************/
/***/ ((module) => {
eval("var DEF_PAD_LENGTH = 16;\n\n/**\n * Append PKCS#7 padding to a buffer or a string.\n *\n * @see {@link http://tools.ietf.org/html/rfc5652|RFC 5652 section 6.3}\n *\n * @param {!(string|Uint8Array|Uint8ClampedArray)} data - the data to be padded\n * @param {number=} size - the block size to pad for\n * @return {!(string|Uint8Array|Uint8ClampedArray)} the padded data, if the function succeed\n */\nfunction pad(data, size) {\n var out = data;\n if (typeof size !== 'number') {\n size = DEF_PAD_LENGTH;\n } else if (size > 255) {\n throw new RangeError('pad(): PKCS#7 padding cannot be longer than 255 bytes');\n } else if (size < 0) {\n throw new RangeError('pad(): PKCS#7 padding size must be positive');\n }\n if (typeof data === 'string') {\n var padLen = size - data.length % size;\n if (isNaN(padLen)) padLen = 0;\n var padChar = String.fromCharCode(padLen);\n for (var i = 0; i < padLen; i++) {\n out += padChar;\n }\n } else if (data instanceof Uint8Array || data instanceof Uint8ClampedArray) {\n var baseLen = data.byteLength;\n padLen = size - baseLen % size;\n if (isNaN(padLen)) padLen = 0;\n var newLen = baseLen + padLen;\n out = new data.constructor(newLen);\n out.set(data);\n for (i = baseLen; i < newLen; i++) {\n out[i] = padLen;\n }\n } else {\n throw new TypeError('pad(): data could not be padded');\n }\n return out;\n}\n\n/**\n * Remove the PKCS#7 padding from a buffer or a string.\n *\n * @see {@link http://tools.ietf.org/html/rfc5652|RFC 5652 section 6.3}\n *\n * @param {!(string|Uint8Array|Uint8ClampedArray)} data - the data to be unpadded\n * @return {!(string|Uint8Array|Uint8ClampedArray)} the unpadded data, if the function succeed\n */\nfunction unpad(data) {\n var out = data;\n if (typeof data === 'string' && data.length > 0) {\n var padLen = data.charCodeAt(data.length - 1);\n if (padLen > data.length) {\n throw new Error('unpad(): cannot remove ' + padLen + ' bytes from a ' +\n data.length + '-byte(s) string');\n }\n for (var i = data.length - 2, end = data.length - padLen; i >= end; i--) {\n if (data.charCodeAt(i) !== padLen) {\n throw new Error('unpad(): found a padding byte of ' + data.charCodeAt(i) +\n ' instead of ' + padLen + ' at position ' + i);\n }\n }\n out = data.substring(0, end);\n } else if (data instanceof Uint8Array || data instanceof Uint8ClampedArray) {\n var baseLen = data.byteLength;\n padLen = data[baseLen - 1];\n var newLen = baseLen - padLen;\n if (newLen < 0) {\n throw new Error('unpad(): cannot remove ' + padLen + ' bytes from a ' +\n baseLen + '-byte(s) string');\n }\n for (i = baseLen - 2; i >= newLen; i--) {\n if (data[i] !== padLen) {\n throw new Error('unpad(): found a padding byte of ' + data[i] +\n ' instead of ' + padLen + ' at position ' + i);\n }\n }\n out = data.slice(0, newLen);\n }\n return out;\n}\n\nmodule.exports = { pad: pad, unpad: unpad };\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/pkcs7-padding/index.js?");
/***/ }),
/***/ "./node_modules/protobufjs/index.js":
/*!******************************************!*\
!*** ./node_modules/protobufjs/index.js ***!
\******************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("// full library entry point.\n\n\nmodule.exports = __webpack_require__(/*! ./src/index */ \"./node_modules/protobufjs/src/index.js\");\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/index.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/common.js":
/*!***********************************************!*\
!*** ./node_modules/protobufjs/src/common.js ***!
\***********************************************/
/***/ ((module) => {
"use strict";
eval("\nmodule.exports = common;\n\nvar commonRe = /\\/|\\./;\n\n/**\n * Provides common type definitions.\n * Can also be used to provide additional google types or your own custom types.\n * @param {string} name Short name as in `google/protobuf/[name].proto` or full file name\n * @param {Object.} json JSON definition within `google.protobuf` if a short name, otherwise the file's root definition\n * @returns {undefined}\n * @property {INamespace} google/protobuf/any.proto Any\n * @property {INamespace} google/protobuf/duration.proto Duration\n * @property {INamespace} google/protobuf/empty.proto Empty\n * @property {INamespace} google/protobuf/field_mask.proto FieldMask\n * @property {INamespace} google/protobuf/struct.proto Struct, Value, NullValue and ListValue\n * @property {INamespace} google/protobuf/timestamp.proto Timestamp\n * @property {INamespace} google/protobuf/wrappers.proto Wrappers\n * @example\n * // manually provides descriptor.proto (assumes google/protobuf/ namespace and .proto extension)\n * protobuf.common(\"descriptor\", descriptorJson);\n *\n * // manually provides a custom definition (uses my.foo namespace)\n * protobuf.common(\"my/foo/bar.proto\", myFooBarJson);\n */\nfunction common(name, json) {\n if (!commonRe.test(name)) {\n name = \"google/protobuf/\" + name + \".proto\";\n json = { nested: { google: { nested: { protobuf: { nested: json } } } } };\n }\n common[name] = json;\n}\n\n// Not provided because of limited use (feel free to discuss or to provide yourself):\n//\n// google/protobuf/descriptor.proto\n// google/protobuf/source_context.proto\n// google/protobuf/type.proto\n//\n// Stripped and pre-parsed versions of these non-bundled files are instead available as part of\n// the repository or package within the google/protobuf directory.\n\ncommon(\"any\", {\n\n /**\n * Properties of a google.protobuf.Any message.\n * @interface IAny\n * @type {Object}\n * @property {string} [typeUrl]\n * @property {Uint8Array} [bytes]\n * @memberof common\n */\n Any: {\n fields: {\n type_url: {\n type: \"string\",\n id: 1\n },\n value: {\n type: \"bytes\",\n id: 2\n }\n }\n }\n});\n\nvar timeType;\n\ncommon(\"duration\", {\n\n /**\n * Properties of a google.protobuf.Duration message.\n * @interface IDuration\n * @type {Object}\n * @property {number|Long} [seconds]\n * @property {number} [nanos]\n * @memberof common\n */\n Duration: timeType = {\n fields: {\n seconds: {\n type: \"int64\",\n id: 1\n },\n nanos: {\n type: \"int32\",\n id: 2\n }\n }\n }\n});\n\ncommon(\"timestamp\", {\n\n /**\n * Properties of a google.protobuf.Timestamp message.\n * @interface ITimestamp\n * @type {Object}\n * @property {number|Long} [seconds]\n * @property {number} [nanos]\n * @memberof common\n */\n Timestamp: timeType\n});\n\ncommon(\"empty\", {\n\n /**\n * Properties of a google.protobuf.Empty message.\n * @interface IEmpty\n * @memberof common\n */\n Empty: {\n fields: {}\n }\n});\n\ncommon(\"struct\", {\n\n /**\n * Properties of a google.protobuf.Struct message.\n * @interface IStruct\n * @type {Object}\n * @property {Object.} [fields]\n * @memberof common\n */\n Struct: {\n fields: {\n fields: {\n keyType: \"string\",\n type: \"Value\",\n id: 1\n }\n }\n },\n\n /**\n * Properties of a google.protobuf.Value message.\n * @interface IValue\n * @type {Object}\n * @property {string} [kind]\n * @property {0} [nullValue]\n * @property {number} [numberValue]\n * @property {string} [stringValue]\n * @property {boolean} [boolValue]\n * @property {IStruct} [structValue]\n * @property {IListValue} [listValue]\n * @memberof common\n */\n Value: {\n oneofs: {\n kind: {\n oneof: [\n \"nullValue\",\n \"numberValue\",\n \"stringValue\",\n \"boolValue\",\n \"structValue\",\n \"listValue\"\n ]\n }\n },\n fields: {\n nullValue: {\n type: \"NullValue\",\n id: 1\n },\n numberValue: {\n type: \"double\",\n id: 2\n },\n stringValue: {\n type: \"string\",\n id: 3\n },\n boolValue: {\n type: \"bool\",\n id: 4\n },\n structValue: {\n type: \"Struct\",\n id: 5\n },\n listValue: {\n type: \"ListValue\",\n id: 6\n }\n }\n },\n\n NullValue: {\n values: {\n NULL_VALUE: 0\n }\n },\n\n /**\n * Properties of a google.protobuf.ListValue message.\n * @interface IListValue\n * @type {Object}\n * @property {Array.} [values]\n * @memberof common\n */\n ListValue: {\n fields: {\n values: {\n rule: \"repeated\",\n type: \"Value\",\n id: 1\n }\n }\n }\n});\n\ncommon(\"wrappers\", {\n\n /**\n * Properties of a google.protobuf.DoubleValue message.\n * @interface IDoubleValue\n * @type {Object}\n * @property {number} [value]\n * @memberof common\n */\n DoubleValue: {\n fields: {\n value: {\n type: \"double\",\n id: 1\n }\n }\n },\n\n /**\n * Properties of a google.protobuf.FloatValue message.\n * @interface IFloatValue\n * @type {Object}\n * @property {number} [value]\n * @memberof common\n */\n FloatValue: {\n fields: {\n value: {\n type: \"float\",\n id: 1\n }\n }\n },\n\n /**\n * Properties of a google.protobuf.Int64Value message.\n * @interface IInt64Value\n * @type {Object}\n * @property {number|Long} [value]\n * @memberof common\n */\n Int64Value: {\n fields: {\n value: {\n type: \"int64\",\n id: 1\n }\n }\n },\n\n /**\n * Properties of a google.protobuf.UInt64Value message.\n * @interface IUInt64Value\n * @type {Object}\n * @property {number|Long} [value]\n * @memberof common\n */\n UInt64Value: {\n fields: {\n value: {\n type: \"uint64\",\n id: 1\n }\n }\n },\n\n /**\n * Properties of a google.protobuf.Int32Value message.\n * @interface IInt32Value\n * @type {Object}\n * @property {number} [value]\n * @memberof common\n */\n Int32Value: {\n fields: {\n value: {\n type: \"int32\",\n id: 1\n }\n }\n },\n\n /**\n * Properties of a google.protobuf.UInt32Value message.\n * @interface IUInt32Value\n * @type {Object}\n * @property {number} [value]\n * @memberof common\n */\n UInt32Value: {\n fields: {\n value: {\n type: \"uint32\",\n id: 1\n }\n }\n },\n\n /**\n * Properties of a google.protobuf.BoolValue message.\n * @interface IBoolValue\n * @type {Object}\n * @property {boolean} [value]\n * @memberof common\n */\n BoolValue: {\n fields: {\n value: {\n type: \"bool\",\n id: 1\n }\n }\n },\n\n /**\n * Properties of a google.protobuf.StringValue message.\n * @interface IStringValue\n * @type {Object}\n * @property {string} [value]\n * @memberof common\n */\n StringValue: {\n fields: {\n value: {\n type: \"string\",\n id: 1\n }\n }\n },\n\n /**\n * Properties of a google.protobuf.BytesValue message.\n * @interface IBytesValue\n * @type {Object}\n * @property {Uint8Array} [value]\n * @memberof common\n */\n BytesValue: {\n fields: {\n value: {\n type: \"bytes\",\n id: 1\n }\n }\n }\n});\n\ncommon(\"field_mask\", {\n\n /**\n * Properties of a google.protobuf.FieldMask message.\n * @interface IDoubleValue\n * @type {Object}\n * @property {number} [value]\n * @memberof common\n */\n FieldMask: {\n fields: {\n paths: {\n rule: \"repeated\",\n type: \"string\",\n id: 1\n }\n }\n }\n});\n\n/**\n * Gets the root definition of the specified common proto file.\n *\n * Bundled definitions are:\n * - google/protobuf/any.proto\n * - google/protobuf/duration.proto\n * - google/protobuf/empty.proto\n * - google/protobuf/field_mask.proto\n * - google/protobuf/struct.proto\n * - google/protobuf/timestamp.proto\n * - google/protobuf/wrappers.proto\n *\n * @param {string} file Proto file name\n * @returns {INamespace|null} Root definition or `null` if not defined\n */\ncommon.get = function get(file) {\n return common[file] || null;\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/common.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/converter.js":
/*!**************************************************!*\
!*** ./node_modules/protobufjs/src/converter.js ***!
\**************************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
eval("\n/**\n * Runtime message from/to plain object converters.\n * @namespace\n */\nvar converter = exports;\n\nvar Enum = __webpack_require__(/*! ./enum */ \"./node_modules/protobufjs/src/enum.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\n/**\n * Generates a partial value fromObject conveter.\n * @param {Codegen} gen Codegen instance\n * @param {Field} field Reflected field\n * @param {number} fieldIndex Field index\n * @param {string} prop Property reference\n * @returns {Codegen} Codegen instance\n * @ignore\n */\nfunction genValuePartial_fromObject(gen, field, fieldIndex, prop) {\n var defaultAlreadyEmitted = false;\n /* eslint-disable no-unexpected-multiline, block-scoped-var, no-redeclare */\n if (field.resolvedType) {\n if (field.resolvedType instanceof Enum) { gen\n (\"switch(d%s){\", prop);\n for (var values = field.resolvedType.values, keys = Object.keys(values), i = 0; i < keys.length; ++i) {\n // enum unknown values passthrough\n if (values[keys[i]] === field.typeDefault && !defaultAlreadyEmitted) { gen\n (\"default:\")\n (\"if(typeof(d%s)===\\\"number\\\"){m%s=d%s;break}\", prop, prop, prop);\n if (!field.repeated) gen // fallback to default value only for\n // arrays, to avoid leaving holes.\n (\"break\"); // for non-repeated fields, just ignore\n defaultAlreadyEmitted = true;\n }\n gen\n (\"case%j:\", keys[i])\n (\"case %i:\", values[keys[i]])\n (\"m%s=%j\", prop, values[keys[i]])\n (\"break\");\n } gen\n (\"}\");\n } else gen\n (\"if(typeof d%s!==\\\"object\\\")\", prop)\n (\"throw TypeError(%j)\", field.fullName + \": object expected\")\n (\"m%s=types[%i].fromObject(d%s)\", prop, fieldIndex, prop);\n } else {\n var isUnsigned = false;\n switch (field.type) {\n case \"double\":\n case \"float\": gen\n (\"m%s=Number(d%s)\", prop, prop); // also catches \"NaN\", \"Infinity\"\n break;\n case \"uint32\":\n case \"fixed32\": gen\n (\"m%s=d%s>>>0\", prop, prop);\n break;\n case \"int32\":\n case \"sint32\":\n case \"sfixed32\": gen\n (\"m%s=d%s|0\", prop, prop);\n break;\n case \"uint64\":\n isUnsigned = true;\n // eslint-disable-line no-fallthrough\n case \"int64\":\n case \"sint64\":\n case \"fixed64\":\n case \"sfixed64\": gen\n (\"if(util.Long)\")\n (\"(m%s=util.Long.fromValue(d%s)).unsigned=%j\", prop, prop, isUnsigned)\n (\"else if(typeof d%s===\\\"string\\\")\", prop)\n (\"m%s=parseInt(d%s,10)\", prop, prop)\n (\"else if(typeof d%s===\\\"number\\\")\", prop)\n (\"m%s=d%s\", prop, prop)\n (\"else if(typeof d%s===\\\"object\\\")\", prop)\n (\"m%s=new util.LongBits(d%s.low>>>0,d%s.high>>>0).toNumber(%s)\", prop, prop, prop, isUnsigned ? \"true\" : \"\");\n break;\n case \"bytes\": gen\n (\"if(typeof d%s===\\\"string\\\")\", prop)\n (\"util.base64.decode(d%s,m%s=util.newBuffer(util.base64.length(d%s)),0)\", prop, prop, prop)\n (\"else if(d%s.length >= 0)\", prop)\n (\"m%s=d%s\", prop, prop);\n break;\n case \"string\": gen\n (\"m%s=String(d%s)\", prop, prop);\n break;\n case \"bool\": gen\n (\"m%s=Boolean(d%s)\", prop, prop);\n break;\n /* default: gen\n (\"m%s=d%s\", prop, prop);\n break; */\n }\n }\n return gen;\n /* eslint-enable no-unexpected-multiline, block-scoped-var, no-redeclare */\n}\n\n/**\n * Generates a plain object to runtime message converter specific to the specified message type.\n * @param {Type} mtype Message type\n * @returns {Codegen} Codegen instance\n */\nconverter.fromObject = function fromObject(mtype) {\n /* eslint-disable no-unexpected-multiline, block-scoped-var, no-redeclare */\n var fields = mtype.fieldsArray;\n var gen = util.codegen([\"d\"], mtype.name + \"$fromObject\")\n (\"if(d instanceof this.ctor)\")\n (\"return d\");\n if (!fields.length) return gen\n (\"return new this.ctor\");\n gen\n (\"var m=new this.ctor\");\n for (var i = 0; i < fields.length; ++i) {\n var field = fields[i].resolve(),\n prop = util.safeProp(field.name);\n\n // Map fields\n if (field.map) { gen\n (\"if(d%s){\", prop)\n (\"if(typeof d%s!==\\\"object\\\")\", prop)\n (\"throw TypeError(%j)\", field.fullName + \": object expected\")\n (\"m%s={}\", prop)\n (\"for(var ks=Object.keys(d%s),i=0;i>>0,m%s.high>>>0).toNumber(%s):m%s\", prop, prop, prop, prop, isUnsigned ? \"true\": \"\", prop);\n break;\n case \"bytes\": gen\n (\"d%s=o.bytes===String?util.base64.encode(m%s,0,m%s.length):o.bytes===Array?Array.prototype.slice.call(m%s):m%s\", prop, prop, prop, prop, prop);\n break;\n default: gen\n (\"d%s=m%s\", prop, prop);\n break;\n }\n }\n return gen;\n /* eslint-enable no-unexpected-multiline, block-scoped-var, no-redeclare */\n}\n\n/**\n * Generates a runtime message to plain object converter specific to the specified message type.\n * @param {Type} mtype Message type\n * @returns {Codegen} Codegen instance\n */\nconverter.toObject = function toObject(mtype) {\n /* eslint-disable no-unexpected-multiline, block-scoped-var, no-redeclare */\n var fields = mtype.fieldsArray.slice().sort(util.compareFieldsById);\n if (!fields.length)\n return util.codegen()(\"return {}\");\n var gen = util.codegen([\"m\", \"o\"], mtype.name + \"$toObject\")\n (\"if(!o)\")\n (\"o={}\")\n (\"var d={}\");\n\n var repeatedFields = [],\n mapFields = [],\n normalFields = [],\n i = 0;\n for (; i < fields.length; ++i)\n if (!fields[i].partOf)\n ( fields[i].resolve().repeated ? repeatedFields\n : fields[i].map ? mapFields\n : normalFields).push(fields[i]);\n\n if (repeatedFields.length) { gen\n (\"if(o.arrays||o.defaults){\");\n for (i = 0; i < repeatedFields.length; ++i) gen\n (\"d%s=[]\", util.safeProp(repeatedFields[i].name));\n gen\n (\"}\");\n }\n\n if (mapFields.length) { gen\n (\"if(o.objects||o.defaults){\");\n for (i = 0; i < mapFields.length; ++i) gen\n (\"d%s={}\", util.safeProp(mapFields[i].name));\n gen\n (\"}\");\n }\n\n if (normalFields.length) { gen\n (\"if(o.defaults){\");\n for (i = 0; i < normalFields.length; ++i) {\n var field = normalFields[i],\n prop = util.safeProp(field.name);\n if (field.resolvedType instanceof Enum) gen\n (\"d%s=o.enums===String?%j:%j\", prop, field.resolvedType.valuesById[field.typeDefault], field.typeDefault);\n else if (field.long) gen\n (\"if(util.Long){\")\n (\"var n=new util.Long(%i,%i,%j)\", field.typeDefault.low, field.typeDefault.high, field.typeDefault.unsigned)\n (\"d%s=o.longs===String?n.toString():o.longs===Number?n.toNumber():n\", prop)\n (\"}else\")\n (\"d%s=o.longs===String?%j:%i\", prop, field.typeDefault.toString(), field.typeDefault.toNumber());\n else if (field.bytes) {\n var arrayDefault = \"[\" + Array.prototype.slice.call(field.typeDefault).join(\",\") + \"]\";\n gen\n (\"if(o.bytes===String)d%s=%j\", prop, String.fromCharCode.apply(String, field.typeDefault))\n (\"else{\")\n (\"d%s=%s\", prop, arrayDefault)\n (\"if(o.bytes!==Array)d%s=util.newBuffer(d%s)\", prop, prop)\n (\"}\");\n } else gen\n (\"d%s=%j\", prop, field.typeDefault); // also messages (=null)\n } gen\n (\"}\");\n }\n var hasKs2 = false;\n for (i = 0; i < fields.length; ++i) {\n var field = fields[i],\n index = mtype._fieldsArray.indexOf(field),\n prop = util.safeProp(field.name);\n if (field.map) {\n if (!hasKs2) { hasKs2 = true; gen\n (\"var ks2\");\n } gen\n (\"if(m%s&&(ks2=Object.keys(m%s)).length){\", prop, prop)\n (\"d%s={}\", prop)\n (\"for(var j=0;j {
"use strict";
eval("\nmodule.exports = decoder;\n\nvar Enum = __webpack_require__(/*! ./enum */ \"./node_modules/protobufjs/src/enum.js\"),\n types = __webpack_require__(/*! ./types */ \"./node_modules/protobufjs/src/types.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\nfunction missing(field) {\n return \"missing required '\" + field.name + \"'\";\n}\n\n/**\n * Generates a decoder specific to the specified message type.\n * @param {Type} mtype Message type\n * @returns {Codegen} Codegen instance\n */\nfunction decoder(mtype) {\n /* eslint-disable no-unexpected-multiline */\n var gen = util.codegen([\"r\", \"l\"], mtype.name + \"$decode\")\n (\"if(!(r instanceof Reader))\")\n (\"r=Reader.create(r)\")\n (\"var c=l===undefined?r.len:r.pos+l,m=new this.ctor\" + (mtype.fieldsArray.filter(function(field) { return field.map; }).length ? \",k,value\" : \"\"))\n (\"while(r.pos>>3){\");\n\n var i = 0;\n for (; i < /* initializes */ mtype.fieldsArray.length; ++i) {\n var field = mtype._fieldsArray[i].resolve(),\n type = field.resolvedType instanceof Enum ? \"int32\" : field.type,\n ref = \"m\" + util.safeProp(field.name); gen\n (\"case %i: {\", field.id);\n\n // Map fields\n if (field.map) { gen\n (\"if(%s===util.emptyObject)\", ref)\n (\"%s={}\", ref)\n (\"var c2 = r.uint32()+r.pos\");\n\n if (types.defaults[field.keyType] !== undefined) gen\n (\"k=%j\", types.defaults[field.keyType]);\n else gen\n (\"k=null\");\n\n if (types.defaults[type] !== undefined) gen\n (\"value=%j\", types.defaults[type]);\n else gen\n (\"value=null\");\n\n gen\n (\"while(r.pos>>3){\")\n (\"case 1: k=r.%s(); break\", field.keyType)\n (\"case 2:\");\n\n if (types.basic[type] === undefined) gen\n (\"value=types[%i].decode(r,r.uint32())\", i); // can't be groups\n else gen\n (\"value=r.%s()\", type);\n\n gen\n (\"break\")\n (\"default:\")\n (\"r.skipType(tag2&7)\")\n (\"break\")\n (\"}\")\n (\"}\");\n\n if (types.long[field.keyType] !== undefined) gen\n (\"%s[typeof k===\\\"object\\\"?util.longToHash(k):k]=value\", ref);\n else gen\n (\"%s[k]=value\", ref);\n\n // Repeated fields\n } else if (field.repeated) { gen\n\n (\"if(!(%s&&%s.length))\", ref, ref)\n (\"%s=[]\", ref);\n\n // Packable (always check for forward and backward compatiblity)\n if (types.packed[type] !== undefined) gen\n (\"if((t&7)===2){\")\n (\"var c2=r.uint32()+r.pos\")\n (\"while(r.pos {
"use strict";
eval("\nmodule.exports = encoder;\n\nvar Enum = __webpack_require__(/*! ./enum */ \"./node_modules/protobufjs/src/enum.js\"),\n types = __webpack_require__(/*! ./types */ \"./node_modules/protobufjs/src/types.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\n/**\n * Generates a partial message type encoder.\n * @param {Codegen} gen Codegen instance\n * @param {Field} field Reflected field\n * @param {number} fieldIndex Field index\n * @param {string} ref Variable reference\n * @returns {Codegen} Codegen instance\n * @ignore\n */\nfunction genTypePartial(gen, field, fieldIndex, ref) {\n return field.resolvedType.group\n ? gen(\"types[%i].encode(%s,w.uint32(%i)).uint32(%i)\", fieldIndex, ref, (field.id << 3 | 3) >>> 0, (field.id << 3 | 4) >>> 0)\n : gen(\"types[%i].encode(%s,w.uint32(%i).fork()).ldelim()\", fieldIndex, ref, (field.id << 3 | 2) >>> 0);\n}\n\n/**\n * Generates an encoder specific to the specified message type.\n * @param {Type} mtype Message type\n * @returns {Codegen} Codegen instance\n */\nfunction encoder(mtype) {\n /* eslint-disable no-unexpected-multiline, block-scoped-var, no-redeclare */\n var gen = util.codegen([\"m\", \"w\"], mtype.name + \"$encode\")\n (\"if(!w)\")\n (\"w=Writer.create()\");\n\n var i, ref;\n\n // \"when a message is serialized its known fields should be written sequentially by field number\"\n var fields = /* initializes */ mtype.fieldsArray.slice().sort(util.compareFieldsById);\n\n for (var i = 0; i < fields.length; ++i) {\n var field = fields[i].resolve(),\n index = mtype._fieldsArray.indexOf(field),\n type = field.resolvedType instanceof Enum ? \"int32\" : field.type,\n wireType = types.basic[type];\n ref = \"m\" + util.safeProp(field.name);\n\n // Map fields\n if (field.map) {\n gen\n (\"if(%s!=null&&Object.hasOwnProperty.call(m,%j)){\", ref, field.name) // !== undefined && !== null\n (\"for(var ks=Object.keys(%s),i=0;i>> 0, 8 | types.mapKey[field.keyType], field.keyType);\n if (wireType === undefined) gen\n (\"types[%i].encode(%s[ks[i]],w.uint32(18).fork()).ldelim().ldelim()\", index, ref); // can't be groups\n else gen\n (\".uint32(%i).%s(%s[ks[i]]).ldelim()\", 16 | wireType, type, ref);\n gen\n (\"}\")\n (\"}\");\n\n // Repeated fields\n } else if (field.repeated) { gen\n (\"if(%s!=null&&%s.length){\", ref, ref); // !== undefined && !== null\n\n // Packed repeated\n if (field.packed && types.packed[type] !== undefined) { gen\n\n (\"w.uint32(%i).fork()\", (field.id << 3 | 2) >>> 0)\n (\"for(var i=0;i<%s.length;++i)\", ref)\n (\"w.%s(%s[i])\", type, ref)\n (\"w.ldelim()\");\n\n // Non-packed\n } else { gen\n\n (\"for(var i=0;i<%s.length;++i)\", ref);\n if (wireType === undefined)\n genTypePartial(gen, field, index, ref + \"[i]\");\n else gen\n (\"w.uint32(%i).%s(%s[i])\", (field.id << 3 | wireType) >>> 0, type, ref);\n\n } gen\n (\"}\");\n\n // Non-repeated\n } else {\n if (field.optional) gen\n (\"if(%s!=null&&Object.hasOwnProperty.call(m,%j))\", ref, field.name); // !== undefined && !== null\n\n if (wireType === undefined)\n genTypePartial(gen, field, index, ref);\n else gen\n (\"w.uint32(%i).%s(%s)\", (field.id << 3 | wireType) >>> 0, type, ref);\n\n }\n }\n\n return gen\n (\"return w\");\n /* eslint-enable no-unexpected-multiline, block-scoped-var, no-redeclare */\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/encoder.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/enum.js":
/*!*********************************************!*\
!*** ./node_modules/protobufjs/src/enum.js ***!
\*********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = Enum;\n\n// extends ReflectionObject\nvar ReflectionObject = __webpack_require__(/*! ./object */ \"./node_modules/protobufjs/src/object.js\");\n((Enum.prototype = Object.create(ReflectionObject.prototype)).constructor = Enum).className = \"Enum\";\n\nvar Namespace = __webpack_require__(/*! ./namespace */ \"./node_modules/protobufjs/src/namespace.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\n/**\n * Constructs a new enum instance.\n * @classdesc Reflected enum.\n * @extends ReflectionObject\n * @constructor\n * @param {string} name Unique name within its namespace\n * @param {Object.} [values] Enum values as an object, by name\n * @param {Object.} [options] Declared options\n * @param {string} [comment] The comment for this enum\n * @param {Object.} [comments] The value comments for this enum\n * @param {Object.>|undefined} [valuesOptions] The value options for this enum\n */\nfunction Enum(name, values, options, comment, comments, valuesOptions) {\n ReflectionObject.call(this, name, options);\n\n if (values && typeof values !== \"object\")\n throw TypeError(\"values must be an object\");\n\n /**\n * Enum values by id.\n * @type {Object.}\n */\n this.valuesById = {};\n\n /**\n * Enum values by name.\n * @type {Object.}\n */\n this.values = Object.create(this.valuesById); // toJSON, marker\n\n /**\n * Enum comment text.\n * @type {string|null}\n */\n this.comment = comment;\n\n /**\n * Value comment texts, if any.\n * @type {Object.}\n */\n this.comments = comments || {};\n\n /**\n * Values options, if any\n * @type {Object>|undefined}\n */\n this.valuesOptions = valuesOptions;\n\n /**\n * Reserved ranges, if any.\n * @type {Array.}\n */\n this.reserved = undefined; // toJSON\n\n // Note that values inherit valuesById on their prototype which makes them a TypeScript-\n // compatible enum. This is used by pbts to write actual enum definitions that work for\n // static and reflection code alike instead of emitting generic object definitions.\n\n if (values)\n for (var keys = Object.keys(values), i = 0; i < keys.length; ++i)\n if (typeof values[keys[i]] === \"number\") // use forward entries only\n this.valuesById[ this.values[keys[i]] = values[keys[i]] ] = keys[i];\n}\n\n/**\n * Enum descriptor.\n * @interface IEnum\n * @property {Object.} values Enum values\n * @property {Object.} [options] Enum options\n */\n\n/**\n * Constructs an enum from an enum descriptor.\n * @param {string} name Enum name\n * @param {IEnum} json Enum descriptor\n * @returns {Enum} Created enum\n * @throws {TypeError} If arguments are invalid\n */\nEnum.fromJSON = function fromJSON(name, json) {\n var enm = new Enum(name, json.values, json.options, json.comment, json.comments);\n enm.reserved = json.reserved;\n return enm;\n};\n\n/**\n * Converts this enum to an enum descriptor.\n * @param {IToJSONOptions} [toJSONOptions] JSON conversion options\n * @returns {IEnum} Enum descriptor\n */\nEnum.prototype.toJSON = function toJSON(toJSONOptions) {\n var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;\n return util.toObject([\n \"options\" , this.options,\n \"valuesOptions\" , this.valuesOptions,\n \"values\" , this.values,\n \"reserved\" , this.reserved && this.reserved.length ? this.reserved : undefined,\n \"comment\" , keepComments ? this.comment : undefined,\n \"comments\" , keepComments ? this.comments : undefined\n ]);\n};\n\n/**\n * Adds a value to this enum.\n * @param {string} name Value name\n * @param {number} id Value id\n * @param {string} [comment] Comment, if any\n * @param {Object.|undefined} [options] Options, if any\n * @returns {Enum} `this`\n * @throws {TypeError} If arguments are invalid\n * @throws {Error} If there is already a value with this name or id\n */\nEnum.prototype.add = function add(name, id, comment, options) {\n // utilized by the parser but not by .fromJSON\n\n if (!util.isString(name))\n throw TypeError(\"name must be a string\");\n\n if (!util.isInteger(id))\n throw TypeError(\"id must be an integer\");\n\n if (this.values[name] !== undefined)\n throw Error(\"duplicate name '\" + name + \"' in \" + this);\n\n if (this.isReservedId(id))\n throw Error(\"id \" + id + \" is reserved in \" + this);\n\n if (this.isReservedName(name))\n throw Error(\"name '\" + name + \"' is reserved in \" + this);\n\n if (this.valuesById[id] !== undefined) {\n if (!(this.options && this.options.allow_alias))\n throw Error(\"duplicate id \" + id + \" in \" + this);\n this.values[name] = id;\n } else\n this.valuesById[this.values[name] = id] = name;\n\n if (options) {\n if (this.valuesOptions === undefined)\n this.valuesOptions = {};\n this.valuesOptions[name] = options || null;\n }\n\n this.comments[name] = comment || null;\n return this;\n};\n\n/**\n * Removes a value from this enum\n * @param {string} name Value name\n * @returns {Enum} `this`\n * @throws {TypeError} If arguments are invalid\n * @throws {Error} If `name` is not a name of this enum\n */\nEnum.prototype.remove = function remove(name) {\n\n if (!util.isString(name))\n throw TypeError(\"name must be a string\");\n\n var val = this.values[name];\n if (val == null)\n throw Error(\"name '\" + name + \"' does not exist in \" + this);\n\n delete this.valuesById[val];\n delete this.values[name];\n delete this.comments[name];\n if (this.valuesOptions)\n delete this.valuesOptions[name];\n\n return this;\n};\n\n/**\n * Tests if the specified id is reserved.\n * @param {number} id Id to test\n * @returns {boolean} `true` if reserved, otherwise `false`\n */\nEnum.prototype.isReservedId = function isReservedId(id) {\n return Namespace.isReservedId(this.reserved, id);\n};\n\n/**\n * Tests if the specified name is reserved.\n * @param {string} name Name to test\n * @returns {boolean} `true` if reserved, otherwise `false`\n */\nEnum.prototype.isReservedName = function isReservedName(name) {\n return Namespace.isReservedName(this.reserved, name);\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/enum.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/field.js":
/*!**********************************************!*\
!*** ./node_modules/protobufjs/src/field.js ***!
\**********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = Field;\n\n// extends ReflectionObject\nvar ReflectionObject = __webpack_require__(/*! ./object */ \"./node_modules/protobufjs/src/object.js\");\n((Field.prototype = Object.create(ReflectionObject.prototype)).constructor = Field).className = \"Field\";\n\nvar Enum = __webpack_require__(/*! ./enum */ \"./node_modules/protobufjs/src/enum.js\"),\n types = __webpack_require__(/*! ./types */ \"./node_modules/protobufjs/src/types.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\nvar Type; // cyclic\n\nvar ruleRe = /^required|optional|repeated$/;\n\n/**\n * Constructs a new message field instance. Note that {@link MapField|map fields} have their own class.\n * @name Field\n * @classdesc Reflected message field.\n * @extends FieldBase\n * @constructor\n * @param {string} name Unique name within its namespace\n * @param {number} id Unique id within its namespace\n * @param {string} type Value type\n * @param {string|Object.} [rule=\"optional\"] Field rule\n * @param {string|Object.} [extend] Extended type if different from parent\n * @param {Object.} [options] Declared options\n */\n\n/**\n * Constructs a field from a field descriptor.\n * @param {string} name Field name\n * @param {IField} json Field descriptor\n * @returns {Field} Created field\n * @throws {TypeError} If arguments are invalid\n */\nField.fromJSON = function fromJSON(name, json) {\n return new Field(name, json.id, json.type, json.rule, json.extend, json.options, json.comment);\n};\n\n/**\n * Not an actual constructor. Use {@link Field} instead.\n * @classdesc Base class of all reflected message fields. This is not an actual class but here for the sake of having consistent type definitions.\n * @exports FieldBase\n * @extends ReflectionObject\n * @constructor\n * @param {string} name Unique name within its namespace\n * @param {number} id Unique id within its namespace\n * @param {string} type Value type\n * @param {string|Object.} [rule=\"optional\"] Field rule\n * @param {string|Object.} [extend] Extended type if different from parent\n * @param {Object.} [options] Declared options\n * @param {string} [comment] Comment associated with this field\n */\nfunction Field(name, id, type, rule, extend, options, comment) {\n\n if (util.isObject(rule)) {\n comment = extend;\n options = rule;\n rule = extend = undefined;\n } else if (util.isObject(extend)) {\n comment = options;\n options = extend;\n extend = undefined;\n }\n\n ReflectionObject.call(this, name, options);\n\n if (!util.isInteger(id) || id < 0)\n throw TypeError(\"id must be a non-negative integer\");\n\n if (!util.isString(type))\n throw TypeError(\"type must be a string\");\n\n if (rule !== undefined && !ruleRe.test(rule = rule.toString().toLowerCase()))\n throw TypeError(\"rule must be a string rule\");\n\n if (extend !== undefined && !util.isString(extend))\n throw TypeError(\"extend must be a string\");\n\n /**\n * Field rule, if any.\n * @type {string|undefined}\n */\n if (rule === \"proto3_optional\") {\n rule = \"optional\";\n }\n this.rule = rule && rule !== \"optional\" ? rule : undefined; // toJSON\n\n /**\n * Field type.\n * @type {string}\n */\n this.type = type; // toJSON\n\n /**\n * Unique field id.\n * @type {number}\n */\n this.id = id; // toJSON, marker\n\n /**\n * Extended type if different from parent.\n * @type {string|undefined}\n */\n this.extend = extend || undefined; // toJSON\n\n /**\n * Whether this field is required.\n * @type {boolean}\n */\n this.required = rule === \"required\";\n\n /**\n * Whether this field is optional.\n * @type {boolean}\n */\n this.optional = !this.required;\n\n /**\n * Whether this field is repeated.\n * @type {boolean}\n */\n this.repeated = rule === \"repeated\";\n\n /**\n * Whether this field is a map or not.\n * @type {boolean}\n */\n this.map = false;\n\n /**\n * Message this field belongs to.\n * @type {Type|null}\n */\n this.message = null;\n\n /**\n * OneOf this field belongs to, if any,\n * @type {OneOf|null}\n */\n this.partOf = null;\n\n /**\n * The field type's default value.\n * @type {*}\n */\n this.typeDefault = null;\n\n /**\n * The field's default value on prototypes.\n * @type {*}\n */\n this.defaultValue = null;\n\n /**\n * Whether this field's value should be treated as a long.\n * @type {boolean}\n */\n this.long = util.Long ? types.long[type] !== undefined : /* istanbul ignore next */ false;\n\n /**\n * Whether this field's value is a buffer.\n * @type {boolean}\n */\n this.bytes = type === \"bytes\";\n\n /**\n * Resolved type if not a basic type.\n * @type {Type|Enum|null}\n */\n this.resolvedType = null;\n\n /**\n * Sister-field within the extended type if a declaring extension field.\n * @type {Field|null}\n */\n this.extensionField = null;\n\n /**\n * Sister-field within the declaring namespace if an extended field.\n * @type {Field|null}\n */\n this.declaringField = null;\n\n /**\n * Internally remembers whether this field is packed.\n * @type {boolean|null}\n * @private\n */\n this._packed = null;\n\n /**\n * Comment for this field.\n * @type {string|null}\n */\n this.comment = comment;\n}\n\n/**\n * Determines whether this field is packed. Only relevant when repeated and working with proto2.\n * @name Field#packed\n * @type {boolean}\n * @readonly\n */\nObject.defineProperty(Field.prototype, \"packed\", {\n get: function() {\n // defaults to packed=true if not explicity set to false\n if (this._packed === null)\n this._packed = this.getOption(\"packed\") !== false;\n return this._packed;\n }\n});\n\n/**\n * @override\n */\nField.prototype.setOption = function setOption(name, value, ifNotSet) {\n if (name === \"packed\") // clear cached before setting\n this._packed = null;\n return ReflectionObject.prototype.setOption.call(this, name, value, ifNotSet);\n};\n\n/**\n * Field descriptor.\n * @interface IField\n * @property {string} [rule=\"optional\"] Field rule\n * @property {string} type Field type\n * @property {number} id Field id\n * @property {Object.} [options] Field options\n */\n\n/**\n * Extension field descriptor.\n * @interface IExtensionField\n * @extends IField\n * @property {string} extend Extended type\n */\n\n/**\n * Converts this field to a field descriptor.\n * @param {IToJSONOptions} [toJSONOptions] JSON conversion options\n * @returns {IField} Field descriptor\n */\nField.prototype.toJSON = function toJSON(toJSONOptions) {\n var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;\n return util.toObject([\n \"rule\" , this.rule !== \"optional\" && this.rule || undefined,\n \"type\" , this.type,\n \"id\" , this.id,\n \"extend\" , this.extend,\n \"options\" , this.options,\n \"comment\" , keepComments ? this.comment : undefined\n ]);\n};\n\n/**\n * Resolves this field's type references.\n * @returns {Field} `this`\n * @throws {Error} If any reference cannot be resolved\n */\nField.prototype.resolve = function resolve() {\n\n if (this.resolved)\n return this;\n\n if ((this.typeDefault = types.defaults[this.type]) === undefined) { // if not a basic type, resolve it\n this.resolvedType = (this.declaringField ? this.declaringField.parent : this.parent).lookupTypeOrEnum(this.type);\n if (this.resolvedType instanceof Type)\n this.typeDefault = null;\n else // instanceof Enum\n this.typeDefault = this.resolvedType.values[Object.keys(this.resolvedType.values)[0]]; // first defined\n } else if (this.options && this.options.proto3_optional) {\n // proto3 scalar value marked optional; should default to null\n this.typeDefault = null;\n }\n\n // use explicitly set default value if present\n if (this.options && this.options[\"default\"] != null) {\n this.typeDefault = this.options[\"default\"];\n if (this.resolvedType instanceof Enum && typeof this.typeDefault === \"string\")\n this.typeDefault = this.resolvedType.values[this.typeDefault];\n }\n\n // remove unnecessary options\n if (this.options) {\n if (this.options.packed === true || this.options.packed !== undefined && this.resolvedType && !(this.resolvedType instanceof Enum))\n delete this.options.packed;\n if (!Object.keys(this.options).length)\n this.options = undefined;\n }\n\n // convert to internal data type if necesssary\n if (this.long) {\n this.typeDefault = util.Long.fromNumber(this.typeDefault, this.type.charAt(0) === \"u\");\n\n /* istanbul ignore else */\n if (Object.freeze)\n Object.freeze(this.typeDefault); // long instances are meant to be immutable anyway (i.e. use small int cache that even requires it)\n\n } else if (this.bytes && typeof this.typeDefault === \"string\") {\n var buf;\n if (util.base64.test(this.typeDefault))\n util.base64.decode(this.typeDefault, buf = util.newBuffer(util.base64.length(this.typeDefault)), 0);\n else\n util.utf8.write(this.typeDefault, buf = util.newBuffer(util.utf8.length(this.typeDefault)), 0);\n this.typeDefault = buf;\n }\n\n // take special care of maps and repeated fields\n if (this.map)\n this.defaultValue = util.emptyObject;\n else if (this.repeated)\n this.defaultValue = util.emptyArray;\n else\n this.defaultValue = this.typeDefault;\n\n // ensure proper value on prototype\n if (this.parent instanceof Type)\n this.parent.ctor.prototype[this.name] = this.defaultValue;\n\n return ReflectionObject.prototype.resolve.call(this);\n};\n\n/**\n * Decorator function as returned by {@link Field.d} and {@link MapField.d} (TypeScript).\n * @typedef FieldDecorator\n * @type {function}\n * @param {Object} prototype Target prototype\n * @param {string} fieldName Field name\n * @returns {undefined}\n */\n\n/**\n * Field decorator (TypeScript).\n * @name Field.d\n * @function\n * @param {number} fieldId Field id\n * @param {\"double\"|\"float\"|\"int32\"|\"uint32\"|\"sint32\"|\"fixed32\"|\"sfixed32\"|\"int64\"|\"uint64\"|\"sint64\"|\"fixed64\"|\"sfixed64\"|\"string\"|\"bool\"|\"bytes\"|Object} fieldType Field type\n * @param {\"optional\"|\"required\"|\"repeated\"} [fieldRule=\"optional\"] Field rule\n * @param {T} [defaultValue] Default value\n * @returns {FieldDecorator} Decorator function\n * @template T extends number | number[] | Long | Long[] | string | string[] | boolean | boolean[] | Uint8Array | Uint8Array[] | Buffer | Buffer[]\n */\nField.d = function decorateField(fieldId, fieldType, fieldRule, defaultValue) {\n\n // submessage: decorate the submessage and use its name as the type\n if (typeof fieldType === \"function\")\n fieldType = util.decorateType(fieldType).name;\n\n // enum reference: create a reflected copy of the enum and keep reuseing it\n else if (fieldType && typeof fieldType === \"object\")\n fieldType = util.decorateEnum(fieldType).name;\n\n return function fieldDecorator(prototype, fieldName) {\n util.decorateType(prototype.constructor)\n .add(new Field(fieldName, fieldId, fieldType, fieldRule, { \"default\": defaultValue }));\n };\n};\n\n/**\n * Field decorator (TypeScript).\n * @name Field.d\n * @function\n * @param {number} fieldId Field id\n * @param {Constructor|string} fieldType Field type\n * @param {\"optional\"|\"required\"|\"repeated\"} [fieldRule=\"optional\"] Field rule\n * @returns {FieldDecorator} Decorator function\n * @template T extends Message\n * @variation 2\n */\n// like Field.d but without a default value\n\n// Sets up cyclic dependencies (called in index-light)\nField._configure = function configure(Type_) {\n Type = Type_;\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/field.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/index-light.js":
/*!****************************************************!*\
!*** ./node_modules/protobufjs/src/index-light.js ***!
\****************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nvar protobuf = module.exports = __webpack_require__(/*! ./index-minimal */ \"./node_modules/protobufjs/src/index-minimal.js\");\n\nprotobuf.build = \"light\";\n\n/**\n * A node-style callback as used by {@link load} and {@link Root#load}.\n * @typedef LoadCallback\n * @type {function}\n * @param {Error|null} error Error, if any, otherwise `null`\n * @param {Root} [root] Root, if there hasn't been an error\n * @returns {undefined}\n */\n\n/**\n * Loads one or multiple .proto or preprocessed .json files into a common root namespace and calls the callback.\n * @param {string|string[]} filename One or multiple files to load\n * @param {Root} root Root namespace, defaults to create a new one if omitted.\n * @param {LoadCallback} callback Callback function\n * @returns {undefined}\n * @see {@link Root#load}\n */\nfunction load(filename, root, callback) {\n if (typeof root === \"function\") {\n callback = root;\n root = new protobuf.Root();\n } else if (!root)\n root = new protobuf.Root();\n return root.load(filename, callback);\n}\n\n/**\n * Loads one or multiple .proto or preprocessed .json files into a common root namespace and calls the callback.\n * @name load\n * @function\n * @param {string|string[]} filename One or multiple files to load\n * @param {LoadCallback} callback Callback function\n * @returns {undefined}\n * @see {@link Root#load}\n * @variation 2\n */\n// function load(filename:string, callback:LoadCallback):undefined\n\n/**\n * Loads one or multiple .proto or preprocessed .json files into a common root namespace and returns a promise.\n * @name load\n * @function\n * @param {string|string[]} filename One or multiple files to load\n * @param {Root} [root] Root namespace, defaults to create a new one if omitted.\n * @returns {Promise} Promise\n * @see {@link Root#load}\n * @variation 3\n */\n// function load(filename:string, [root:Root]):Promise\n\nprotobuf.load = load;\n\n/**\n * Synchronously loads one or multiple .proto or preprocessed .json files into a common root namespace (node only).\n * @param {string|string[]} filename One or multiple files to load\n * @param {Root} [root] Root namespace, defaults to create a new one if omitted.\n * @returns {Root} Root namespace\n * @throws {Error} If synchronous fetching is not supported (i.e. in browsers) or if a file's syntax is invalid\n * @see {@link Root#loadSync}\n */\nfunction loadSync(filename, root) {\n if (!root)\n root = new protobuf.Root();\n return root.loadSync(filename);\n}\n\nprotobuf.loadSync = loadSync;\n\n// Serialization\nprotobuf.encoder = __webpack_require__(/*! ./encoder */ \"./node_modules/protobufjs/src/encoder.js\");\nprotobuf.decoder = __webpack_require__(/*! ./decoder */ \"./node_modules/protobufjs/src/decoder.js\");\nprotobuf.verifier = __webpack_require__(/*! ./verifier */ \"./node_modules/protobufjs/src/verifier.js\");\nprotobuf.converter = __webpack_require__(/*! ./converter */ \"./node_modules/protobufjs/src/converter.js\");\n\n// Reflection\nprotobuf.ReflectionObject = __webpack_require__(/*! ./object */ \"./node_modules/protobufjs/src/object.js\");\nprotobuf.Namespace = __webpack_require__(/*! ./namespace */ \"./node_modules/protobufjs/src/namespace.js\");\nprotobuf.Root = __webpack_require__(/*! ./root */ \"./node_modules/protobufjs/src/root.js\");\nprotobuf.Enum = __webpack_require__(/*! ./enum */ \"./node_modules/protobufjs/src/enum.js\");\nprotobuf.Type = __webpack_require__(/*! ./type */ \"./node_modules/protobufjs/src/type.js\");\nprotobuf.Field = __webpack_require__(/*! ./field */ \"./node_modules/protobufjs/src/field.js\");\nprotobuf.OneOf = __webpack_require__(/*! ./oneof */ \"./node_modules/protobufjs/src/oneof.js\");\nprotobuf.MapField = __webpack_require__(/*! ./mapfield */ \"./node_modules/protobufjs/src/mapfield.js\");\nprotobuf.Service = __webpack_require__(/*! ./service */ \"./node_modules/protobufjs/src/service.js\");\nprotobuf.Method = __webpack_require__(/*! ./method */ \"./node_modules/protobufjs/src/method.js\");\n\n// Runtime\nprotobuf.Message = __webpack_require__(/*! ./message */ \"./node_modules/protobufjs/src/message.js\");\nprotobuf.wrappers = __webpack_require__(/*! ./wrappers */ \"./node_modules/protobufjs/src/wrappers.js\");\n\n// Utility\nprotobuf.types = __webpack_require__(/*! ./types */ \"./node_modules/protobufjs/src/types.js\");\nprotobuf.util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\n// Set up possibly cyclic reflection dependencies\nprotobuf.ReflectionObject._configure(protobuf.Root);\nprotobuf.Namespace._configure(protobuf.Type, protobuf.Service, protobuf.Enum);\nprotobuf.Root._configure(protobuf.Type);\nprotobuf.Field._configure(protobuf.Type);\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/index-light.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/index-minimal.js":
/*!******************************************************!*\
!*** ./node_modules/protobufjs/src/index-minimal.js ***!
\******************************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
eval("\nvar protobuf = exports;\n\n/**\n * Build type, one of `\"full\"`, `\"light\"` or `\"minimal\"`.\n * @name build\n * @type {string}\n * @const\n */\nprotobuf.build = \"minimal\";\n\n// Serialization\nprotobuf.Writer = __webpack_require__(/*! ./writer */ \"./node_modules/protobufjs/src/writer.js\");\nprotobuf.BufferWriter = __webpack_require__(/*! ./writer_buffer */ \"./node_modules/protobufjs/src/writer_buffer.js\");\nprotobuf.Reader = __webpack_require__(/*! ./reader */ \"./node_modules/protobufjs/src/reader.js\");\nprotobuf.BufferReader = __webpack_require__(/*! ./reader_buffer */ \"./node_modules/protobufjs/src/reader_buffer.js\");\n\n// Utility\nprotobuf.util = __webpack_require__(/*! ./util/minimal */ \"./node_modules/protobufjs/src/util/minimal.js\");\nprotobuf.rpc = __webpack_require__(/*! ./rpc */ \"./node_modules/protobufjs/src/rpc.js\");\nprotobuf.roots = __webpack_require__(/*! ./roots */ \"./node_modules/protobufjs/src/roots.js\");\nprotobuf.configure = configure;\n\n/* istanbul ignore next */\n/**\n * Reconfigures the library according to the environment.\n * @returns {undefined}\n */\nfunction configure() {\n protobuf.util._configure();\n protobuf.Writer._configure(protobuf.BufferWriter);\n protobuf.Reader._configure(protobuf.BufferReader);\n}\n\n// Set up buffer utility according to the environment\nconfigure();\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/index-minimal.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/index.js":
/*!**********************************************!*\
!*** ./node_modules/protobufjs/src/index.js ***!
\**********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nvar protobuf = module.exports = __webpack_require__(/*! ./index-light */ \"./node_modules/protobufjs/src/index-light.js\");\n\nprotobuf.build = \"full\";\n\n// Parser\nprotobuf.tokenize = __webpack_require__(/*! ./tokenize */ \"./node_modules/protobufjs/src/tokenize.js\");\nprotobuf.parse = __webpack_require__(/*! ./parse */ \"./node_modules/protobufjs/src/parse.js\");\nprotobuf.common = __webpack_require__(/*! ./common */ \"./node_modules/protobufjs/src/common.js\");\n\n// Configure parser\nprotobuf.Root._configure(protobuf.Type, protobuf.parse, protobuf.common);\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/index.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/mapfield.js":
/*!*************************************************!*\
!*** ./node_modules/protobufjs/src/mapfield.js ***!
\*************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = MapField;\n\n// extends Field\nvar Field = __webpack_require__(/*! ./field */ \"./node_modules/protobufjs/src/field.js\");\n((MapField.prototype = Object.create(Field.prototype)).constructor = MapField).className = \"MapField\";\n\nvar types = __webpack_require__(/*! ./types */ \"./node_modules/protobufjs/src/types.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\n/**\n * Constructs a new map field instance.\n * @classdesc Reflected map field.\n * @extends FieldBase\n * @constructor\n * @param {string} name Unique name within its namespace\n * @param {number} id Unique id within its namespace\n * @param {string} keyType Key type\n * @param {string} type Value type\n * @param {Object.} [options] Declared options\n * @param {string} [comment] Comment associated with this field\n */\nfunction MapField(name, id, keyType, type, options, comment) {\n Field.call(this, name, id, type, undefined, undefined, options, comment);\n\n /* istanbul ignore if */\n if (!util.isString(keyType))\n throw TypeError(\"keyType must be a string\");\n\n /**\n * Key type.\n * @type {string}\n */\n this.keyType = keyType; // toJSON, marker\n\n /**\n * Resolved key type if not a basic type.\n * @type {ReflectionObject|null}\n */\n this.resolvedKeyType = null;\n\n // Overrides Field#map\n this.map = true;\n}\n\n/**\n * Map field descriptor.\n * @interface IMapField\n * @extends {IField}\n * @property {string} keyType Key type\n */\n\n/**\n * Extension map field descriptor.\n * @interface IExtensionMapField\n * @extends IMapField\n * @property {string} extend Extended type\n */\n\n/**\n * Constructs a map field from a map field descriptor.\n * @param {string} name Field name\n * @param {IMapField} json Map field descriptor\n * @returns {MapField} Created map field\n * @throws {TypeError} If arguments are invalid\n */\nMapField.fromJSON = function fromJSON(name, json) {\n return new MapField(name, json.id, json.keyType, json.type, json.options, json.comment);\n};\n\n/**\n * Converts this map field to a map field descriptor.\n * @param {IToJSONOptions} [toJSONOptions] JSON conversion options\n * @returns {IMapField} Map field descriptor\n */\nMapField.prototype.toJSON = function toJSON(toJSONOptions) {\n var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;\n return util.toObject([\n \"keyType\" , this.keyType,\n \"type\" , this.type,\n \"id\" , this.id,\n \"extend\" , this.extend,\n \"options\" , this.options,\n \"comment\" , keepComments ? this.comment : undefined\n ]);\n};\n\n/**\n * @override\n */\nMapField.prototype.resolve = function resolve() {\n if (this.resolved)\n return this;\n\n // Besides a value type, map fields have a key type that may be \"any scalar type except for floating point types and bytes\"\n if (types.mapKey[this.keyType] === undefined)\n throw Error(\"invalid key type: \" + this.keyType);\n\n return Field.prototype.resolve.call(this);\n};\n\n/**\n * Map field decorator (TypeScript).\n * @name MapField.d\n * @function\n * @param {number} fieldId Field id\n * @param {\"int32\"|\"uint32\"|\"sint32\"|\"fixed32\"|\"sfixed32\"|\"int64\"|\"uint64\"|\"sint64\"|\"fixed64\"|\"sfixed64\"|\"bool\"|\"string\"} fieldKeyType Field key type\n * @param {\"double\"|\"float\"|\"int32\"|\"uint32\"|\"sint32\"|\"fixed32\"|\"sfixed32\"|\"int64\"|\"uint64\"|\"sint64\"|\"fixed64\"|\"sfixed64\"|\"bool\"|\"string\"|\"bytes\"|Object|Constructor<{}>} fieldValueType Field value type\n * @returns {FieldDecorator} Decorator function\n * @template T extends { [key: string]: number | Long | string | boolean | Uint8Array | Buffer | number[] | Message<{}> }\n */\nMapField.d = function decorateMapField(fieldId, fieldKeyType, fieldValueType) {\n\n // submessage value: decorate the submessage and use its name as the type\n if (typeof fieldValueType === \"function\")\n fieldValueType = util.decorateType(fieldValueType).name;\n\n // enum reference value: create a reflected copy of the enum and keep reuseing it\n else if (fieldValueType && typeof fieldValueType === \"object\")\n fieldValueType = util.decorateEnum(fieldValueType).name;\n\n return function mapFieldDecorator(prototype, fieldName) {\n util.decorateType(prototype.constructor)\n .add(new MapField(fieldName, fieldId, fieldKeyType, fieldValueType));\n };\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/mapfield.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/message.js":
/*!************************************************!*\
!*** ./node_modules/protobufjs/src/message.js ***!
\************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = Message;\n\nvar util = __webpack_require__(/*! ./util/minimal */ \"./node_modules/protobufjs/src/util/minimal.js\");\n\n/**\n * Constructs a new message instance.\n * @classdesc Abstract runtime message.\n * @constructor\n * @param {Properties} [properties] Properties to set\n * @template T extends object = object\n */\nfunction Message(properties) {\n // not used internally\n if (properties)\n for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)\n this[keys[i]] = properties[keys[i]];\n}\n\n/**\n * Reference to the reflected type.\n * @name Message.$type\n * @type {Type}\n * @readonly\n */\n\n/**\n * Reference to the reflected type.\n * @name Message#$type\n * @type {Type}\n * @readonly\n */\n\n/*eslint-disable valid-jsdoc*/\n\n/**\n * Creates a new message of this type using the specified properties.\n * @param {Object.} [properties] Properties to set\n * @returns {Message} Message instance\n * @template T extends Message\n * @this Constructor\n */\nMessage.create = function create(properties) {\n return this.$type.create(properties);\n};\n\n/**\n * Encodes a message of this type.\n * @param {T|Object.} message Message to encode\n * @param {Writer} [writer] Writer to use\n * @returns {Writer} Writer\n * @template T extends Message\n * @this Constructor\n */\nMessage.encode = function encode(message, writer) {\n return this.$type.encode(message, writer);\n};\n\n/**\n * Encodes a message of this type preceeded by its length as a varint.\n * @param {T|Object.} message Message to encode\n * @param {Writer} [writer] Writer to use\n * @returns {Writer} Writer\n * @template T extends Message\n * @this Constructor\n */\nMessage.encodeDelimited = function encodeDelimited(message, writer) {\n return this.$type.encodeDelimited(message, writer);\n};\n\n/**\n * Decodes a message of this type.\n * @name Message.decode\n * @function\n * @param {Reader|Uint8Array} reader Reader or buffer to decode\n * @returns {T} Decoded message\n * @template T extends Message\n * @this Constructor\n */\nMessage.decode = function decode(reader) {\n return this.$type.decode(reader);\n};\n\n/**\n * Decodes a message of this type preceeded by its length as a varint.\n * @name Message.decodeDelimited\n * @function\n * @param {Reader|Uint8Array} reader Reader or buffer to decode\n * @returns {T} Decoded message\n * @template T extends Message\n * @this Constructor\n */\nMessage.decodeDelimited = function decodeDelimited(reader) {\n return this.$type.decodeDelimited(reader);\n};\n\n/**\n * Verifies a message of this type.\n * @name Message.verify\n * @function\n * @param {Object.} message Plain object to verify\n * @returns {string|null} `null` if valid, otherwise the reason why it is not\n */\nMessage.verify = function verify(message) {\n return this.$type.verify(message);\n};\n\n/**\n * Creates a new message of this type from a plain object. Also converts values to their respective internal types.\n * @param {Object.} object Plain object\n * @returns {T} Message instance\n * @template T extends Message\n * @this Constructor\n */\nMessage.fromObject = function fromObject(object) {\n return this.$type.fromObject(object);\n};\n\n/**\n * Creates a plain object from a message of this type. Also converts values to other types if specified.\n * @param {T} message Message instance\n * @param {IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n * @template T extends Message\n * @this Constructor\n */\nMessage.toObject = function toObject(message, options) {\n return this.$type.toObject(message, options);\n};\n\n/**\n * Converts this message to JSON.\n * @returns {Object.} JSON object\n */\nMessage.prototype.toJSON = function toJSON() {\n return this.$type.toObject(this, util.toJSONOptions);\n};\n\n/*eslint-enable valid-jsdoc*/\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/message.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/method.js":
/*!***********************************************!*\
!*** ./node_modules/protobufjs/src/method.js ***!
\***********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = Method;\n\n// extends ReflectionObject\nvar ReflectionObject = __webpack_require__(/*! ./object */ \"./node_modules/protobufjs/src/object.js\");\n((Method.prototype = Object.create(ReflectionObject.prototype)).constructor = Method).className = \"Method\";\n\nvar util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\n/**\n * Constructs a new service method instance.\n * @classdesc Reflected service method.\n * @extends ReflectionObject\n * @constructor\n * @param {string} name Method name\n * @param {string|undefined} type Method type, usually `\"rpc\"`\n * @param {string} requestType Request message type\n * @param {string} responseType Response message type\n * @param {boolean|Object.} [requestStream] Whether the request is streamed\n * @param {boolean|Object.} [responseStream] Whether the response is streamed\n * @param {Object.} [options] Declared options\n * @param {string} [comment] The comment for this method\n * @param {Object.} [parsedOptions] Declared options, properly parsed into an object\n */\nfunction Method(name, type, requestType, responseType, requestStream, responseStream, options, comment, parsedOptions) {\n\n /* istanbul ignore next */\n if (util.isObject(requestStream)) {\n options = requestStream;\n requestStream = responseStream = undefined;\n } else if (util.isObject(responseStream)) {\n options = responseStream;\n responseStream = undefined;\n }\n\n /* istanbul ignore if */\n if (!(type === undefined || util.isString(type)))\n throw TypeError(\"type must be a string\");\n\n /* istanbul ignore if */\n if (!util.isString(requestType))\n throw TypeError(\"requestType must be a string\");\n\n /* istanbul ignore if */\n if (!util.isString(responseType))\n throw TypeError(\"responseType must be a string\");\n\n ReflectionObject.call(this, name, options);\n\n /**\n * Method type.\n * @type {string}\n */\n this.type = type || \"rpc\"; // toJSON\n\n /**\n * Request type.\n * @type {string}\n */\n this.requestType = requestType; // toJSON, marker\n\n /**\n * Whether requests are streamed or not.\n * @type {boolean|undefined}\n */\n this.requestStream = requestStream ? true : undefined; // toJSON\n\n /**\n * Response type.\n * @type {string}\n */\n this.responseType = responseType; // toJSON\n\n /**\n * Whether responses are streamed or not.\n * @type {boolean|undefined}\n */\n this.responseStream = responseStream ? true : undefined; // toJSON\n\n /**\n * Resolved request type.\n * @type {Type|null}\n */\n this.resolvedRequestType = null;\n\n /**\n * Resolved response type.\n * @type {Type|null}\n */\n this.resolvedResponseType = null;\n\n /**\n * Comment for this method\n * @type {string|null}\n */\n this.comment = comment;\n\n /**\n * Options properly parsed into an object\n */\n this.parsedOptions = parsedOptions;\n}\n\n/**\n * Method descriptor.\n * @interface IMethod\n * @property {string} [type=\"rpc\"] Method type\n * @property {string} requestType Request type\n * @property {string} responseType Response type\n * @property {boolean} [requestStream=false] Whether requests are streamed\n * @property {boolean} [responseStream=false] Whether responses are streamed\n * @property {Object.} [options] Method options\n * @property {string} comment Method comments\n * @property {Object.} [parsedOptions] Method options properly parsed into an object\n */\n\n/**\n * Constructs a method from a method descriptor.\n * @param {string} name Method name\n * @param {IMethod} json Method descriptor\n * @returns {Method} Created method\n * @throws {TypeError} If arguments are invalid\n */\nMethod.fromJSON = function fromJSON(name, json) {\n return new Method(name, json.type, json.requestType, json.responseType, json.requestStream, json.responseStream, json.options, json.comment, json.parsedOptions);\n};\n\n/**\n * Converts this method to a method descriptor.\n * @param {IToJSONOptions} [toJSONOptions] JSON conversion options\n * @returns {IMethod} Method descriptor\n */\nMethod.prototype.toJSON = function toJSON(toJSONOptions) {\n var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;\n return util.toObject([\n \"type\" , this.type !== \"rpc\" && /* istanbul ignore next */ this.type || undefined,\n \"requestType\" , this.requestType,\n \"requestStream\" , this.requestStream,\n \"responseType\" , this.responseType,\n \"responseStream\" , this.responseStream,\n \"options\" , this.options,\n \"comment\" , keepComments ? this.comment : undefined,\n \"parsedOptions\" , this.parsedOptions,\n ]);\n};\n\n/**\n * @override\n */\nMethod.prototype.resolve = function resolve() {\n\n /* istanbul ignore if */\n if (this.resolved)\n return this;\n\n this.resolvedRequestType = this.parent.lookupType(this.requestType);\n this.resolvedResponseType = this.parent.lookupType(this.responseType);\n\n return ReflectionObject.prototype.resolve.call(this);\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/method.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/namespace.js":
/*!**************************************************!*\
!*** ./node_modules/protobufjs/src/namespace.js ***!
\**************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = Namespace;\n\n// extends ReflectionObject\nvar ReflectionObject = __webpack_require__(/*! ./object */ \"./node_modules/protobufjs/src/object.js\");\n((Namespace.prototype = Object.create(ReflectionObject.prototype)).constructor = Namespace).className = \"Namespace\";\n\nvar Field = __webpack_require__(/*! ./field */ \"./node_modules/protobufjs/src/field.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\"),\n OneOf = __webpack_require__(/*! ./oneof */ \"./node_modules/protobufjs/src/oneof.js\");\n\nvar Type, // cyclic\n Service,\n Enum;\n\n/**\n * Constructs a new namespace instance.\n * @name Namespace\n * @classdesc Reflected namespace.\n * @extends NamespaceBase\n * @constructor\n * @param {string} name Namespace name\n * @param {Object.} [options] Declared options\n */\n\n/**\n * Constructs a namespace from JSON.\n * @memberof Namespace\n * @function\n * @param {string} name Namespace name\n * @param {Object.} json JSON object\n * @returns {Namespace} Created namespace\n * @throws {TypeError} If arguments are invalid\n */\nNamespace.fromJSON = function fromJSON(name, json) {\n return new Namespace(name, json.options).addJSON(json.nested);\n};\n\n/**\n * Converts an array of reflection objects to JSON.\n * @memberof Namespace\n * @param {ReflectionObject[]} array Object array\n * @param {IToJSONOptions} [toJSONOptions] JSON conversion options\n * @returns {Object.|undefined} JSON object or `undefined` when array is empty\n */\nfunction arrayToJSON(array, toJSONOptions) {\n if (!(array && array.length))\n return undefined;\n var obj = {};\n for (var i = 0; i < array.length; ++i)\n obj[array[i].name] = array[i].toJSON(toJSONOptions);\n return obj;\n}\n\nNamespace.arrayToJSON = arrayToJSON;\n\n/**\n * Tests if the specified id is reserved.\n * @param {Array.|undefined} reserved Array of reserved ranges and names\n * @param {number} id Id to test\n * @returns {boolean} `true` if reserved, otherwise `false`\n */\nNamespace.isReservedId = function isReservedId(reserved, id) {\n if (reserved)\n for (var i = 0; i < reserved.length; ++i)\n if (typeof reserved[i] !== \"string\" && reserved[i][0] <= id && reserved[i][1] > id)\n return true;\n return false;\n};\n\n/**\n * Tests if the specified name is reserved.\n * @param {Array.|undefined} reserved Array of reserved ranges and names\n * @param {string} name Name to test\n * @returns {boolean} `true` if reserved, otherwise `false`\n */\nNamespace.isReservedName = function isReservedName(reserved, name) {\n if (reserved)\n for (var i = 0; i < reserved.length; ++i)\n if (reserved[i] === name)\n return true;\n return false;\n};\n\n/**\n * Not an actual constructor. Use {@link Namespace} instead.\n * @classdesc Base class of all reflection objects containing nested objects. This is not an actual class but here for the sake of having consistent type definitions.\n * @exports NamespaceBase\n * @extends ReflectionObject\n * @abstract\n * @constructor\n * @param {string} name Namespace name\n * @param {Object.} [options] Declared options\n * @see {@link Namespace}\n */\nfunction Namespace(name, options) {\n ReflectionObject.call(this, name, options);\n\n /**\n * Nested objects by name.\n * @type {Object.|undefined}\n */\n this.nested = undefined; // toJSON\n\n /**\n * Cached nested objects as an array.\n * @type {ReflectionObject[]|null}\n * @private\n */\n this._nestedArray = null;\n}\n\nfunction clearCache(namespace) {\n namespace._nestedArray = null;\n return namespace;\n}\n\n/**\n * Nested objects of this namespace as an array for iteration.\n * @name NamespaceBase#nestedArray\n * @type {ReflectionObject[]}\n * @readonly\n */\nObject.defineProperty(Namespace.prototype, \"nestedArray\", {\n get: function() {\n return this._nestedArray || (this._nestedArray = util.toArray(this.nested));\n }\n});\n\n/**\n * Namespace descriptor.\n * @interface INamespace\n * @property {Object.} [options] Namespace options\n * @property {Object.} [nested] Nested object descriptors\n */\n\n/**\n * Any extension field descriptor.\n * @typedef AnyExtensionField\n * @type {IExtensionField|IExtensionMapField}\n */\n\n/**\n * Any nested object descriptor.\n * @typedef AnyNestedObject\n * @type {IEnum|IType|IService|AnyExtensionField|INamespace|IOneOf}\n */\n\n/**\n * Converts this namespace to a namespace descriptor.\n * @param {IToJSONOptions} [toJSONOptions] JSON conversion options\n * @returns {INamespace} Namespace descriptor\n */\nNamespace.prototype.toJSON = function toJSON(toJSONOptions) {\n return util.toObject([\n \"options\" , this.options,\n \"nested\" , arrayToJSON(this.nestedArray, toJSONOptions)\n ]);\n};\n\n/**\n * Adds nested objects to this namespace from nested object descriptors.\n * @param {Object.} nestedJson Any nested object descriptors\n * @returns {Namespace} `this`\n */\nNamespace.prototype.addJSON = function addJSON(nestedJson) {\n var ns = this;\n /* istanbul ignore else */\n if (nestedJson) {\n for (var names = Object.keys(nestedJson), i = 0, nested; i < names.length; ++i) {\n nested = nestedJson[names[i]];\n ns.add( // most to least likely\n ( nested.fields !== undefined\n ? Type.fromJSON\n : nested.values !== undefined\n ? Enum.fromJSON\n : nested.methods !== undefined\n ? Service.fromJSON\n : nested.id !== undefined\n ? Field.fromJSON\n : Namespace.fromJSON )(names[i], nested)\n );\n }\n }\n return this;\n};\n\n/**\n * Gets the nested object of the specified name.\n * @param {string} name Nested object name\n * @returns {ReflectionObject|null} The reflection object or `null` if it doesn't exist\n */\nNamespace.prototype.get = function get(name) {\n return this.nested && this.nested[name]\n || null;\n};\n\n/**\n * Gets the values of the nested {@link Enum|enum} of the specified name.\n * This methods differs from {@link Namespace#get|get} in that it returns an enum's values directly and throws instead of returning `null`.\n * @param {string} name Nested enum name\n * @returns {Object.} Enum values\n * @throws {Error} If there is no such enum\n */\nNamespace.prototype.getEnum = function getEnum(name) {\n if (this.nested && this.nested[name] instanceof Enum)\n return this.nested[name].values;\n throw Error(\"no such enum: \" + name);\n};\n\n/**\n * Adds a nested object to this namespace.\n * @param {ReflectionObject} object Nested object to add\n * @returns {Namespace} `this`\n * @throws {TypeError} If arguments are invalid\n * @throws {Error} If there is already a nested object with this name\n */\nNamespace.prototype.add = function add(object) {\n\n if (!(object instanceof Field && object.extend !== undefined || object instanceof Type || object instanceof OneOf || object instanceof Enum || object instanceof Service || object instanceof Namespace))\n throw TypeError(\"object must be a valid nested object\");\n\n if (!this.nested)\n this.nested = {};\n else {\n var prev = this.get(object.name);\n if (prev) {\n if (prev instanceof Namespace && object instanceof Namespace && !(prev instanceof Type || prev instanceof Service)) {\n // replace plain namespace but keep existing nested elements and options\n var nested = prev.nestedArray;\n for (var i = 0; i < nested.length; ++i)\n object.add(nested[i]);\n this.remove(prev);\n if (!this.nested)\n this.nested = {};\n object.setOptions(prev.options, true);\n\n } else\n throw Error(\"duplicate name '\" + object.name + \"' in \" + this);\n }\n }\n this.nested[object.name] = object;\n object.onAdd(this);\n return clearCache(this);\n};\n\n/**\n * Removes a nested object from this namespace.\n * @param {ReflectionObject} object Nested object to remove\n * @returns {Namespace} `this`\n * @throws {TypeError} If arguments are invalid\n * @throws {Error} If `object` is not a member of this namespace\n */\nNamespace.prototype.remove = function remove(object) {\n\n if (!(object instanceof ReflectionObject))\n throw TypeError(\"object must be a ReflectionObject\");\n if (object.parent !== this)\n throw Error(object + \" is not a member of \" + this);\n\n delete this.nested[object.name];\n if (!Object.keys(this.nested).length)\n this.nested = undefined;\n\n object.onRemove(this);\n return clearCache(this);\n};\n\n/**\n * Defines additial namespaces within this one if not yet existing.\n * @param {string|string[]} path Path to create\n * @param {*} [json] Nested types to create from JSON\n * @returns {Namespace} Pointer to the last namespace created or `this` if path is empty\n */\nNamespace.prototype.define = function define(path, json) {\n\n if (util.isString(path))\n path = path.split(\".\");\n else if (!Array.isArray(path))\n throw TypeError(\"illegal path\");\n if (path && path.length && path[0] === \"\")\n throw Error(\"path must be relative\");\n\n var ptr = this;\n while (path.length > 0) {\n var part = path.shift();\n if (ptr.nested && ptr.nested[part]) {\n ptr = ptr.nested[part];\n if (!(ptr instanceof Namespace))\n throw Error(\"path conflicts with non-namespace objects\");\n } else\n ptr.add(ptr = new Namespace(part));\n }\n if (json)\n ptr.addJSON(json);\n return ptr;\n};\n\n/**\n * Resolves this namespace's and all its nested objects' type references. Useful to validate a reflection tree, but comes at a cost.\n * @returns {Namespace} `this`\n */\nNamespace.prototype.resolveAll = function resolveAll() {\n var nested = this.nestedArray, i = 0;\n while (i < nested.length)\n if (nested[i] instanceof Namespace)\n nested[i++].resolveAll();\n else\n nested[i++].resolve();\n return this.resolve();\n};\n\n/**\n * Recursively looks up the reflection object matching the specified path in the scope of this namespace.\n * @param {string|string[]} path Path to look up\n * @param {*|Array.<*>} filterTypes Filter types, any combination of the constructors of `protobuf.Type`, `protobuf.Enum`, `protobuf.Service` etc.\n * @param {boolean} [parentAlreadyChecked=false] If known, whether the parent has already been checked\n * @returns {ReflectionObject|null} Looked up object or `null` if none could be found\n */\nNamespace.prototype.lookup = function lookup(path, filterTypes, parentAlreadyChecked) {\n\n /* istanbul ignore next */\n if (typeof filterTypes === \"boolean\") {\n parentAlreadyChecked = filterTypes;\n filterTypes = undefined;\n } else if (filterTypes && !Array.isArray(filterTypes))\n filterTypes = [ filterTypes ];\n\n if (util.isString(path) && path.length) {\n if (path === \".\")\n return this.root;\n path = path.split(\".\");\n } else if (!path.length)\n return this;\n\n // Start at root if path is absolute\n if (path[0] === \"\")\n return this.root.lookup(path.slice(1), filterTypes);\n\n // Test if the first part matches any nested object, and if so, traverse if path contains more\n var found = this.get(path[0]);\n if (found) {\n if (path.length === 1) {\n if (!filterTypes || filterTypes.indexOf(found.constructor) > -1)\n return found;\n } else if (found instanceof Namespace && (found = found.lookup(path.slice(1), filterTypes, true)))\n return found;\n\n // Otherwise try each nested namespace\n } else\n for (var i = 0; i < this.nestedArray.length; ++i)\n if (this._nestedArray[i] instanceof Namespace && (found = this._nestedArray[i].lookup(path, filterTypes, true)))\n return found;\n\n // If there hasn't been a match, try again at the parent\n if (this.parent === null || parentAlreadyChecked)\n return null;\n return this.parent.lookup(path, filterTypes);\n};\n\n/**\n * Looks up the reflection object at the specified path, relative to this namespace.\n * @name NamespaceBase#lookup\n * @function\n * @param {string|string[]} path Path to look up\n * @param {boolean} [parentAlreadyChecked=false] Whether the parent has already been checked\n * @returns {ReflectionObject|null} Looked up object or `null` if none could be found\n * @variation 2\n */\n// lookup(path: string, [parentAlreadyChecked: boolean])\n\n/**\n * Looks up the {@link Type|type} at the specified path, relative to this namespace.\n * Besides its signature, this methods differs from {@link Namespace#lookup|lookup} in that it throws instead of returning `null`.\n * @param {string|string[]} path Path to look up\n * @returns {Type} Looked up type\n * @throws {Error} If `path` does not point to a type\n */\nNamespace.prototype.lookupType = function lookupType(path) {\n var found = this.lookup(path, [ Type ]);\n if (!found)\n throw Error(\"no such type: \" + path);\n return found;\n};\n\n/**\n * Looks up the values of the {@link Enum|enum} at the specified path, relative to this namespace.\n * Besides its signature, this methods differs from {@link Namespace#lookup|lookup} in that it throws instead of returning `null`.\n * @param {string|string[]} path Path to look up\n * @returns {Enum} Looked up enum\n * @throws {Error} If `path` does not point to an enum\n */\nNamespace.prototype.lookupEnum = function lookupEnum(path) {\n var found = this.lookup(path, [ Enum ]);\n if (!found)\n throw Error(\"no such Enum '\" + path + \"' in \" + this);\n return found;\n};\n\n/**\n * Looks up the {@link Type|type} or {@link Enum|enum} at the specified path, relative to this namespace.\n * Besides its signature, this methods differs from {@link Namespace#lookup|lookup} in that it throws instead of returning `null`.\n * @param {string|string[]} path Path to look up\n * @returns {Type} Looked up type or enum\n * @throws {Error} If `path` does not point to a type or enum\n */\nNamespace.prototype.lookupTypeOrEnum = function lookupTypeOrEnum(path) {\n var found = this.lookup(path, [ Type, Enum ]);\n if (!found)\n throw Error(\"no such Type or Enum '\" + path + \"' in \" + this);\n return found;\n};\n\n/**\n * Looks up the {@link Service|service} at the specified path, relative to this namespace.\n * Besides its signature, this methods differs from {@link Namespace#lookup|lookup} in that it throws instead of returning `null`.\n * @param {string|string[]} path Path to look up\n * @returns {Service} Looked up service\n * @throws {Error} If `path` does not point to a service\n */\nNamespace.prototype.lookupService = function lookupService(path) {\n var found = this.lookup(path, [ Service ]);\n if (!found)\n throw Error(\"no such Service '\" + path + \"' in \" + this);\n return found;\n};\n\n// Sets up cyclic dependencies (called in index-light)\nNamespace._configure = function(Type_, Service_, Enum_) {\n Type = Type_;\n Service = Service_;\n Enum = Enum_;\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/namespace.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/object.js":
/*!***********************************************!*\
!*** ./node_modules/protobufjs/src/object.js ***!
\***********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = ReflectionObject;\n\nReflectionObject.className = \"ReflectionObject\";\n\nvar util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\nvar Root; // cyclic\n\n/**\n * Constructs a new reflection object instance.\n * @classdesc Base class of all reflection objects.\n * @constructor\n * @param {string} name Object name\n * @param {Object.} [options] Declared options\n * @abstract\n */\nfunction ReflectionObject(name, options) {\n\n if (!util.isString(name))\n throw TypeError(\"name must be a string\");\n\n if (options && !util.isObject(options))\n throw TypeError(\"options must be an object\");\n\n /**\n * Options.\n * @type {Object.|undefined}\n */\n this.options = options; // toJSON\n\n /**\n * Parsed Options.\n * @type {Array.>|undefined}\n */\n this.parsedOptions = null;\n\n /**\n * Unique name within its namespace.\n * @type {string}\n */\n this.name = name;\n\n /**\n * Parent namespace.\n * @type {Namespace|null}\n */\n this.parent = null;\n\n /**\n * Whether already resolved or not.\n * @type {boolean}\n */\n this.resolved = false;\n\n /**\n * Comment text, if any.\n * @type {string|null}\n */\n this.comment = null;\n\n /**\n * Defining file name.\n * @type {string|null}\n */\n this.filename = null;\n}\n\nObject.defineProperties(ReflectionObject.prototype, {\n\n /**\n * Reference to the root namespace.\n * @name ReflectionObject#root\n * @type {Root}\n * @readonly\n */\n root: {\n get: function() {\n var ptr = this;\n while (ptr.parent !== null)\n ptr = ptr.parent;\n return ptr;\n }\n },\n\n /**\n * Full name including leading dot.\n * @name ReflectionObject#fullName\n * @type {string}\n * @readonly\n */\n fullName: {\n get: function() {\n var path = [ this.name ],\n ptr = this.parent;\n while (ptr) {\n path.unshift(ptr.name);\n ptr = ptr.parent;\n }\n return path.join(\".\");\n }\n }\n});\n\n/**\n * Converts this reflection object to its descriptor representation.\n * @returns {Object.} Descriptor\n * @abstract\n */\nReflectionObject.prototype.toJSON = /* istanbul ignore next */ function toJSON() {\n throw Error(); // not implemented, shouldn't happen\n};\n\n/**\n * Called when this object is added to a parent.\n * @param {ReflectionObject} parent Parent added to\n * @returns {undefined}\n */\nReflectionObject.prototype.onAdd = function onAdd(parent) {\n if (this.parent && this.parent !== parent)\n this.parent.remove(this);\n this.parent = parent;\n this.resolved = false;\n var root = parent.root;\n if (root instanceof Root)\n root._handleAdd(this);\n};\n\n/**\n * Called when this object is removed from a parent.\n * @param {ReflectionObject} parent Parent removed from\n * @returns {undefined}\n */\nReflectionObject.prototype.onRemove = function onRemove(parent) {\n var root = parent.root;\n if (root instanceof Root)\n root._handleRemove(this);\n this.parent = null;\n this.resolved = false;\n};\n\n/**\n * Resolves this objects type references.\n * @returns {ReflectionObject} `this`\n */\nReflectionObject.prototype.resolve = function resolve() {\n if (this.resolved)\n return this;\n if (this.root instanceof Root)\n this.resolved = true; // only if part of a root\n return this;\n};\n\n/**\n * Gets an option value.\n * @param {string} name Option name\n * @returns {*} Option value or `undefined` if not set\n */\nReflectionObject.prototype.getOption = function getOption(name) {\n if (this.options)\n return this.options[name];\n return undefined;\n};\n\n/**\n * Sets an option.\n * @param {string} name Option name\n * @param {*} value Option value\n * @param {boolean} [ifNotSet] Sets the option only if it isn't currently set\n * @returns {ReflectionObject} `this`\n */\nReflectionObject.prototype.setOption = function setOption(name, value, ifNotSet) {\n if (!ifNotSet || !this.options || this.options[name] === undefined)\n (this.options || (this.options = {}))[name] = value;\n return this;\n};\n\n/**\n * Sets a parsed option.\n * @param {string} name parsed Option name\n * @param {*} value Option value\n * @param {string} propName dot '.' delimited full path of property within the option to set. if undefined\\empty, will add a new option with that value\n * @returns {ReflectionObject} `this`\n */\nReflectionObject.prototype.setParsedOption = function setParsedOption(name, value, propName) {\n if (!this.parsedOptions) {\n this.parsedOptions = [];\n }\n var parsedOptions = this.parsedOptions;\n if (propName) {\n // If setting a sub property of an option then try to merge it\n // with an existing option\n var opt = parsedOptions.find(function (opt) {\n return Object.prototype.hasOwnProperty.call(opt, name);\n });\n if (opt) {\n // If we found an existing option - just merge the property value\n var newValue = opt[name];\n util.setProperty(newValue, propName, value);\n } else {\n // otherwise, create a new option, set it's property and add it to the list\n opt = {};\n opt[name] = util.setProperty({}, propName, value);\n parsedOptions.push(opt);\n }\n } else {\n // Always create a new option when setting the value of the option itself\n var newOpt = {};\n newOpt[name] = value;\n parsedOptions.push(newOpt);\n }\n return this;\n};\n\n/**\n * Sets multiple options.\n * @param {Object.} options Options to set\n * @param {boolean} [ifNotSet] Sets an option only if it isn't currently set\n * @returns {ReflectionObject} `this`\n */\nReflectionObject.prototype.setOptions = function setOptions(options, ifNotSet) {\n if (options)\n for (var keys = Object.keys(options), i = 0; i < keys.length; ++i)\n this.setOption(keys[i], options[keys[i]], ifNotSet);\n return this;\n};\n\n/**\n * Converts this instance to its string representation.\n * @returns {string} Class name[, space, full name]\n */\nReflectionObject.prototype.toString = function toString() {\n var className = this.constructor.className,\n fullName = this.fullName;\n if (fullName.length)\n return className + \" \" + fullName;\n return className;\n};\n\n// Sets up cyclic dependencies (called in index-light)\nReflectionObject._configure = function(Root_) {\n Root = Root_;\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/object.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/oneof.js":
/*!**********************************************!*\
!*** ./node_modules/protobufjs/src/oneof.js ***!
\**********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = OneOf;\n\n// extends ReflectionObject\nvar ReflectionObject = __webpack_require__(/*! ./object */ \"./node_modules/protobufjs/src/object.js\");\n((OneOf.prototype = Object.create(ReflectionObject.prototype)).constructor = OneOf).className = \"OneOf\";\n\nvar Field = __webpack_require__(/*! ./field */ \"./node_modules/protobufjs/src/field.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\n/**\n * Constructs a new oneof instance.\n * @classdesc Reflected oneof.\n * @extends ReflectionObject\n * @constructor\n * @param {string} name Oneof name\n * @param {string[]|Object.} [fieldNames] Field names\n * @param {Object.} [options] Declared options\n * @param {string} [comment] Comment associated with this field\n */\nfunction OneOf(name, fieldNames, options, comment) {\n if (!Array.isArray(fieldNames)) {\n options = fieldNames;\n fieldNames = undefined;\n }\n ReflectionObject.call(this, name, options);\n\n /* istanbul ignore if */\n if (!(fieldNames === undefined || Array.isArray(fieldNames)))\n throw TypeError(\"fieldNames must be an Array\");\n\n /**\n * Field names that belong to this oneof.\n * @type {string[]}\n */\n this.oneof = fieldNames || []; // toJSON, marker\n\n /**\n * Fields that belong to this oneof as an array for iteration.\n * @type {Field[]}\n * @readonly\n */\n this.fieldsArray = []; // declared readonly for conformance, possibly not yet added to parent\n\n /**\n * Comment for this field.\n * @type {string|null}\n */\n this.comment = comment;\n}\n\n/**\n * Oneof descriptor.\n * @interface IOneOf\n * @property {Array.} oneof Oneof field names\n * @property {Object.} [options] Oneof options\n */\n\n/**\n * Constructs a oneof from a oneof descriptor.\n * @param {string} name Oneof name\n * @param {IOneOf} json Oneof descriptor\n * @returns {OneOf} Created oneof\n * @throws {TypeError} If arguments are invalid\n */\nOneOf.fromJSON = function fromJSON(name, json) {\n return new OneOf(name, json.oneof, json.options, json.comment);\n};\n\n/**\n * Converts this oneof to a oneof descriptor.\n * @param {IToJSONOptions} [toJSONOptions] JSON conversion options\n * @returns {IOneOf} Oneof descriptor\n */\nOneOf.prototype.toJSON = function toJSON(toJSONOptions) {\n var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;\n return util.toObject([\n \"options\" , this.options,\n \"oneof\" , this.oneof,\n \"comment\" , keepComments ? this.comment : undefined\n ]);\n};\n\n/**\n * Adds the fields of the specified oneof to the parent if not already done so.\n * @param {OneOf} oneof The oneof\n * @returns {undefined}\n * @inner\n * @ignore\n */\nfunction addFieldsToParent(oneof) {\n if (oneof.parent)\n for (var i = 0; i < oneof.fieldsArray.length; ++i)\n if (!oneof.fieldsArray[i].parent)\n oneof.parent.add(oneof.fieldsArray[i]);\n}\n\n/**\n * Adds a field to this oneof and removes it from its current parent, if any.\n * @param {Field} field Field to add\n * @returns {OneOf} `this`\n */\nOneOf.prototype.add = function add(field) {\n\n /* istanbul ignore if */\n if (!(field instanceof Field))\n throw TypeError(\"field must be a Field\");\n\n if (field.parent && field.parent !== this.parent)\n field.parent.remove(field);\n this.oneof.push(field.name);\n this.fieldsArray.push(field);\n field.partOf = this; // field.parent remains null\n addFieldsToParent(this);\n return this;\n};\n\n/**\n * Removes a field from this oneof and puts it back to the oneof's parent.\n * @param {Field} field Field to remove\n * @returns {OneOf} `this`\n */\nOneOf.prototype.remove = function remove(field) {\n\n /* istanbul ignore if */\n if (!(field instanceof Field))\n throw TypeError(\"field must be a Field\");\n\n var index = this.fieldsArray.indexOf(field);\n\n /* istanbul ignore if */\n if (index < 0)\n throw Error(field + \" is not a member of \" + this);\n\n this.fieldsArray.splice(index, 1);\n index = this.oneof.indexOf(field.name);\n\n /* istanbul ignore else */\n if (index > -1) // theoretical\n this.oneof.splice(index, 1);\n\n field.partOf = null;\n return this;\n};\n\n/**\n * @override\n */\nOneOf.prototype.onAdd = function onAdd(parent) {\n ReflectionObject.prototype.onAdd.call(this, parent);\n var self = this;\n // Collect present fields\n for (var i = 0; i < this.oneof.length; ++i) {\n var field = parent.get(this.oneof[i]);\n if (field && !field.partOf) {\n field.partOf = self;\n self.fieldsArray.push(field);\n }\n }\n // Add not yet present fields\n addFieldsToParent(this);\n};\n\n/**\n * @override\n */\nOneOf.prototype.onRemove = function onRemove(parent) {\n for (var i = 0, field; i < this.fieldsArray.length; ++i)\n if ((field = this.fieldsArray[i]).parent)\n field.parent.remove(field);\n ReflectionObject.prototype.onRemove.call(this, parent);\n};\n\n/**\n * Decorator function as returned by {@link OneOf.d} (TypeScript).\n * @typedef OneOfDecorator\n * @type {function}\n * @param {Object} prototype Target prototype\n * @param {string} oneofName OneOf name\n * @returns {undefined}\n */\n\n/**\n * OneOf decorator (TypeScript).\n * @function\n * @param {...string} fieldNames Field names\n * @returns {OneOfDecorator} Decorator function\n * @template T extends string\n */\nOneOf.d = function decorateOneOf() {\n var fieldNames = new Array(arguments.length),\n index = 0;\n while (index < arguments.length)\n fieldNames[index] = arguments[index++];\n return function oneOfDecorator(prototype, oneofName) {\n util.decorateType(prototype.constructor)\n .add(new OneOf(oneofName, fieldNames));\n Object.defineProperty(prototype, oneofName, {\n get: util.oneOfGetter(fieldNames),\n set: util.oneOfSetter(fieldNames)\n });\n };\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/oneof.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/parse.js":
/*!**********************************************!*\
!*** ./node_modules/protobufjs/src/parse.js ***!
\**********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = parse;\n\nparse.filename = null;\nparse.defaults = { keepCase: false };\n\nvar tokenize = __webpack_require__(/*! ./tokenize */ \"./node_modules/protobufjs/src/tokenize.js\"),\n Root = __webpack_require__(/*! ./root */ \"./node_modules/protobufjs/src/root.js\"),\n Type = __webpack_require__(/*! ./type */ \"./node_modules/protobufjs/src/type.js\"),\n Field = __webpack_require__(/*! ./field */ \"./node_modules/protobufjs/src/field.js\"),\n MapField = __webpack_require__(/*! ./mapfield */ \"./node_modules/protobufjs/src/mapfield.js\"),\n OneOf = __webpack_require__(/*! ./oneof */ \"./node_modules/protobufjs/src/oneof.js\"),\n Enum = __webpack_require__(/*! ./enum */ \"./node_modules/protobufjs/src/enum.js\"),\n Service = __webpack_require__(/*! ./service */ \"./node_modules/protobufjs/src/service.js\"),\n Method = __webpack_require__(/*! ./method */ \"./node_modules/protobufjs/src/method.js\"),\n types = __webpack_require__(/*! ./types */ \"./node_modules/protobufjs/src/types.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\nvar base10Re = /^[1-9][0-9]*$/,\n base10NegRe = /^-?[1-9][0-9]*$/,\n base16Re = /^0[x][0-9a-fA-F]+$/,\n base16NegRe = /^-?0[x][0-9a-fA-F]+$/,\n base8Re = /^0[0-7]+$/,\n base8NegRe = /^-?0[0-7]+$/,\n numberRe = /^(?![eE])[0-9]*(?:\\.[0-9]*)?(?:[eE][+-]?[0-9]+)?$/,\n nameRe = /^[a-zA-Z_][a-zA-Z_0-9]*$/,\n typeRefRe = /^(?:\\.?[a-zA-Z_][a-zA-Z_0-9]*)(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)*$/,\n fqTypeRefRe = /^(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)+$/;\n\n/**\n * Result object returned from {@link parse}.\n * @interface IParserResult\n * @property {string|undefined} package Package name, if declared\n * @property {string[]|undefined} imports Imports, if any\n * @property {string[]|undefined} weakImports Weak imports, if any\n * @property {string|undefined} syntax Syntax, if specified (either `\"proto2\"` or `\"proto3\"`)\n * @property {Root} root Populated root instance\n */\n\n/**\n * Options modifying the behavior of {@link parse}.\n * @interface IParseOptions\n * @property {boolean} [keepCase=false] Keeps field casing instead of converting to camel case\n * @property {boolean} [alternateCommentMode=false] Recognize double-slash comments in addition to doc-block comments.\n * @property {boolean} [preferTrailingComment=false] Use trailing comment when both leading comment and trailing comment exist.\n */\n\n/**\n * Options modifying the behavior of JSON serialization.\n * @interface IToJSONOptions\n * @property {boolean} [keepComments=false] Serializes comments.\n */\n\n/**\n * Parses the given .proto source and returns an object with the parsed contents.\n * @param {string} source Source contents\n * @param {Root} root Root to populate\n * @param {IParseOptions} [options] Parse options. Defaults to {@link parse.defaults} when omitted.\n * @returns {IParserResult} Parser result\n * @property {string} filename=null Currently processing file name for error reporting, if known\n * @property {IParseOptions} defaults Default {@link IParseOptions}\n */\nfunction parse(source, root, options) {\n /* eslint-disable callback-return */\n if (!(root instanceof Root)) {\n options = root;\n root = new Root();\n }\n if (!options)\n options = parse.defaults;\n\n var preferTrailingComment = options.preferTrailingComment || false;\n var tn = tokenize(source, options.alternateCommentMode || false),\n next = tn.next,\n push = tn.push,\n peek = tn.peek,\n skip = tn.skip,\n cmnt = tn.cmnt;\n\n var head = true,\n pkg,\n imports,\n weakImports,\n syntax,\n isProto3 = false;\n\n var ptr = root;\n\n var applyCase = options.keepCase ? function(name) { return name; } : util.camelCase;\n\n /* istanbul ignore next */\n function illegal(token, name, insideTryCatch) {\n var filename = parse.filename;\n if (!insideTryCatch)\n parse.filename = null;\n return Error(\"illegal \" + (name || \"token\") + \" '\" + token + \"' (\" + (filename ? filename + \", \" : \"\") + \"line \" + tn.line + \")\");\n }\n\n function readString() {\n var values = [],\n token;\n do {\n /* istanbul ignore if */\n if ((token = next()) !== \"\\\"\" && token !== \"'\")\n throw illegal(token);\n\n values.push(next());\n skip(token);\n token = peek();\n } while (token === \"\\\"\" || token === \"'\");\n return values.join(\"\");\n }\n\n function readValue(acceptTypeRef) {\n var token = next();\n switch (token) {\n case \"'\":\n case \"\\\"\":\n push(token);\n return readString();\n case \"true\": case \"TRUE\":\n return true;\n case \"false\": case \"FALSE\":\n return false;\n }\n try {\n return parseNumber(token, /* insideTryCatch */ true);\n } catch (e) {\n\n /* istanbul ignore else */\n if (acceptTypeRef && typeRefRe.test(token))\n return token;\n\n /* istanbul ignore next */\n throw illegal(token, \"value\");\n }\n }\n\n function readRanges(target, acceptStrings) {\n var token, start;\n do {\n if (acceptStrings && ((token = peek()) === \"\\\"\" || token === \"'\"))\n target.push(readString());\n else\n target.push([ start = parseId(next()), skip(\"to\", true) ? parseId(next()) : start ]);\n } while (skip(\",\", true));\n skip(\";\");\n }\n\n function parseNumber(token, insideTryCatch) {\n var sign = 1;\n if (token.charAt(0) === \"-\") {\n sign = -1;\n token = token.substring(1);\n }\n switch (token) {\n case \"inf\": case \"INF\": case \"Inf\":\n return sign * Infinity;\n case \"nan\": case \"NAN\": case \"Nan\": case \"NaN\":\n return NaN;\n case \"0\":\n return 0;\n }\n if (base10Re.test(token))\n return sign * parseInt(token, 10);\n if (base16Re.test(token))\n return sign * parseInt(token, 16);\n if (base8Re.test(token))\n return sign * parseInt(token, 8);\n\n /* istanbul ignore else */\n if (numberRe.test(token))\n return sign * parseFloat(token);\n\n /* istanbul ignore next */\n throw illegal(token, \"number\", insideTryCatch);\n }\n\n function parseId(token, acceptNegative) {\n switch (token) {\n case \"max\": case \"MAX\": case \"Max\":\n return 536870911;\n case \"0\":\n return 0;\n }\n\n /* istanbul ignore if */\n if (!acceptNegative && token.charAt(0) === \"-\")\n throw illegal(token, \"id\");\n\n if (base10NegRe.test(token))\n return parseInt(token, 10);\n if (base16NegRe.test(token))\n return parseInt(token, 16);\n\n /* istanbul ignore else */\n if (base8NegRe.test(token))\n return parseInt(token, 8);\n\n /* istanbul ignore next */\n throw illegal(token, \"id\");\n }\n\n function parsePackage() {\n\n /* istanbul ignore if */\n if (pkg !== undefined)\n throw illegal(\"package\");\n\n pkg = next();\n\n /* istanbul ignore if */\n if (!typeRefRe.test(pkg))\n throw illegal(pkg, \"name\");\n\n ptr = ptr.define(pkg);\n skip(\";\");\n }\n\n function parseImport() {\n var token = peek();\n var whichImports;\n switch (token) {\n case \"weak\":\n whichImports = weakImports || (weakImports = []);\n next();\n break;\n case \"public\":\n next();\n // eslint-disable-line no-fallthrough\n default:\n whichImports = imports || (imports = []);\n break;\n }\n token = readString();\n skip(\";\");\n whichImports.push(token);\n }\n\n function parseSyntax() {\n skip(\"=\");\n syntax = readString();\n isProto3 = syntax === \"proto3\";\n\n /* istanbul ignore if */\n if (!isProto3 && syntax !== \"proto2\")\n throw illegal(syntax, \"syntax\");\n\n skip(\";\");\n }\n\n function parseCommon(parent, token) {\n switch (token) {\n\n case \"option\":\n parseOption(parent, token);\n skip(\";\");\n return true;\n\n case \"message\":\n parseType(parent, token);\n return true;\n\n case \"enum\":\n parseEnum(parent, token);\n return true;\n\n case \"service\":\n parseService(parent, token);\n return true;\n\n case \"extend\":\n parseExtension(parent, token);\n return true;\n }\n return false;\n }\n\n function ifBlock(obj, fnIf, fnElse) {\n var trailingLine = tn.line;\n if (obj) {\n if(typeof obj.comment !== \"string\") {\n obj.comment = cmnt(); // try block-type comment\n }\n obj.filename = parse.filename;\n }\n if (skip(\"{\", true)) {\n var token;\n while ((token = next()) !== \"}\")\n fnIf(token);\n skip(\";\", true);\n } else {\n if (fnElse)\n fnElse();\n skip(\";\");\n if (obj && (typeof obj.comment !== \"string\" || preferTrailingComment))\n obj.comment = cmnt(trailingLine) || obj.comment; // try line-type comment\n }\n }\n\n function parseType(parent, token) {\n\n /* istanbul ignore if */\n if (!nameRe.test(token = next()))\n throw illegal(token, \"type name\");\n\n var type = new Type(token);\n ifBlock(type, function parseType_block(token) {\n if (parseCommon(type, token))\n return;\n\n switch (token) {\n\n case \"map\":\n parseMapField(type, token);\n break;\n\n case \"required\":\n case \"repeated\":\n parseField(type, token);\n break;\n\n case \"optional\":\n /* istanbul ignore if */\n if (isProto3) {\n parseField(type, \"proto3_optional\");\n } else {\n parseField(type, \"optional\");\n }\n break;\n\n case \"oneof\":\n parseOneOf(type, token);\n break;\n\n case \"extensions\":\n readRanges(type.extensions || (type.extensions = []));\n break;\n\n case \"reserved\":\n readRanges(type.reserved || (type.reserved = []), true);\n break;\n\n default:\n /* istanbul ignore if */\n if (!isProto3 || !typeRefRe.test(token))\n throw illegal(token);\n\n push(token);\n parseField(type, \"optional\");\n break;\n }\n });\n parent.add(type);\n }\n\n function parseField(parent, rule, extend) {\n var type = next();\n if (type === \"group\") {\n parseGroup(parent, rule);\n return;\n }\n\n /* istanbul ignore if */\n if (!typeRefRe.test(type))\n throw illegal(type, \"type\");\n\n var name = next();\n\n /* istanbul ignore if */\n if (!nameRe.test(name))\n throw illegal(name, \"name\");\n\n name = applyCase(name);\n skip(\"=\");\n\n var field = new Field(name, parseId(next()), type, rule, extend);\n ifBlock(field, function parseField_block(token) {\n\n /* istanbul ignore else */\n if (token === \"option\") {\n parseOption(field, token);\n skip(\";\");\n } else\n throw illegal(token);\n\n }, function parseField_line() {\n parseInlineOptions(field);\n });\n\n if (rule === \"proto3_optional\") {\n // for proto3 optional fields, we create a single-member Oneof to mimic \"optional\" behavior\n var oneof = new OneOf(\"_\" + name);\n field.setOption(\"proto3_optional\", true);\n oneof.add(field);\n parent.add(oneof);\n } else {\n parent.add(field);\n }\n\n // JSON defaults to packed=true if not set so we have to set packed=false explicity when\n // parsing proto2 descriptors without the option, where applicable. This must be done for\n // all known packable types and anything that could be an enum (= is not a basic type).\n if (!isProto3 && field.repeated && (types.packed[type] !== undefined || types.basic[type] === undefined))\n field.setOption(\"packed\", false, /* ifNotSet */ true);\n }\n\n function parseGroup(parent, rule) {\n var name = next();\n\n /* istanbul ignore if */\n if (!nameRe.test(name))\n throw illegal(name, \"name\");\n\n var fieldName = util.lcFirst(name);\n if (name === fieldName)\n name = util.ucFirst(name);\n skip(\"=\");\n var id = parseId(next());\n var type = new Type(name);\n type.group = true;\n var field = new Field(fieldName, id, name, rule);\n field.filename = parse.filename;\n ifBlock(type, function parseGroup_block(token) {\n switch (token) {\n\n case \"option\":\n parseOption(type, token);\n skip(\";\");\n break;\n\n case \"required\":\n case \"repeated\":\n parseField(type, token);\n break;\n\n case \"optional\":\n /* istanbul ignore if */\n if (isProto3) {\n parseField(type, \"proto3_optional\");\n } else {\n parseField(type, \"optional\");\n }\n break;\n\n case \"message\":\n parseType(type, token);\n break;\n\n case \"enum\":\n parseEnum(type, token);\n break;\n\n /* istanbul ignore next */\n default:\n throw illegal(token); // there are no groups with proto3 semantics\n }\n });\n parent.add(type)\n .add(field);\n }\n\n function parseMapField(parent) {\n skip(\"<\");\n var keyType = next();\n\n /* istanbul ignore if */\n if (types.mapKey[keyType] === undefined)\n throw illegal(keyType, \"type\");\n\n skip(\",\");\n var valueType = next();\n\n /* istanbul ignore if */\n if (!typeRefRe.test(valueType))\n throw illegal(valueType, \"type\");\n\n skip(\">\");\n var name = next();\n\n /* istanbul ignore if */\n if (!nameRe.test(name))\n throw illegal(name, \"name\");\n\n skip(\"=\");\n var field = new MapField(applyCase(name), parseId(next()), keyType, valueType);\n ifBlock(field, function parseMapField_block(token) {\n\n /* istanbul ignore else */\n if (token === \"option\") {\n parseOption(field, token);\n skip(\";\");\n } else\n throw illegal(token);\n\n }, function parseMapField_line() {\n parseInlineOptions(field);\n });\n parent.add(field);\n }\n\n function parseOneOf(parent, token) {\n\n /* istanbul ignore if */\n if (!nameRe.test(token = next()))\n throw illegal(token, \"name\");\n\n var oneof = new OneOf(applyCase(token));\n ifBlock(oneof, function parseOneOf_block(token) {\n if (token === \"option\") {\n parseOption(oneof, token);\n skip(\";\");\n } else {\n push(token);\n parseField(oneof, \"optional\");\n }\n });\n parent.add(oneof);\n }\n\n function parseEnum(parent, token) {\n\n /* istanbul ignore if */\n if (!nameRe.test(token = next()))\n throw illegal(token, \"name\");\n\n var enm = new Enum(token);\n ifBlock(enm, function parseEnum_block(token) {\n switch(token) {\n case \"option\":\n parseOption(enm, token);\n skip(\";\");\n break;\n\n case \"reserved\":\n readRanges(enm.reserved || (enm.reserved = []), true);\n break;\n\n default:\n parseEnumValue(enm, token);\n }\n });\n parent.add(enm);\n }\n\n function parseEnumValue(parent, token) {\n\n /* istanbul ignore if */\n if (!nameRe.test(token))\n throw illegal(token, \"name\");\n\n skip(\"=\");\n var value = parseId(next(), true),\n dummy = {\n options: undefined\n };\n dummy.setOption = function(name, value) {\n if (this.options === undefined)\n this.options = {};\n this.options[name] = value;\n };\n ifBlock(dummy, function parseEnumValue_block(token) {\n\n /* istanbul ignore else */\n if (token === \"option\") {\n parseOption(dummy, token); // skip\n skip(\";\");\n } else\n throw illegal(token);\n\n }, function parseEnumValue_line() {\n parseInlineOptions(dummy); // skip\n });\n parent.add(token, value, dummy.comment, dummy.options);\n }\n\n function parseOption(parent, token) {\n var isCustom = skip(\"(\", true);\n\n /* istanbul ignore if */\n if (!typeRefRe.test(token = next()))\n throw illegal(token, \"name\");\n\n var name = token;\n var option = name;\n var propName;\n\n if (isCustom) {\n skip(\")\");\n name = \"(\" + name + \")\";\n option = name;\n token = peek();\n if (fqTypeRefRe.test(token)) {\n propName = token.slice(1); //remove '.' before property name\n name += token;\n next();\n }\n }\n skip(\"=\");\n var optionValue = parseOptionValue(parent, name);\n setParsedOption(parent, option, optionValue, propName);\n }\n\n function parseOptionValue(parent, name) {\n // { a: \"foo\" b { c: \"bar\" } }\n if (skip(\"{\", true)) {\n var objectResult = {};\n\n while (!skip(\"}\", true)) {\n /* istanbul ignore if */\n if (!nameRe.test(token = next())) {\n throw illegal(token, \"name\");\n }\n\n var value;\n var propName = token;\n\n skip(\":\", true);\n\n if (peek() === \"{\")\n value = parseOptionValue(parent, name + \".\" + token);\n else if (peek() === \"[\") {\n // option (my_option) = {\n // repeated_value: [ \"foo\", \"bar\" ]\n // };\n value = [];\n var lastValue;\n if (skip(\"[\", true)) {\n do {\n lastValue = readValue(true);\n value.push(lastValue);\n } while (skip(\",\", true));\n skip(\"]\");\n if (typeof lastValue !== \"undefined\") {\n setOption(parent, name + \".\" + token, lastValue);\n }\n }\n } else {\n value = readValue(true);\n setOption(parent, name + \".\" + token, value);\n }\n\n var prevValue = objectResult[propName];\n\n if (prevValue)\n value = [].concat(prevValue).concat(value);\n\n objectResult[propName] = value;\n\n // Semicolons and commas can be optional\n skip(\",\", true);\n skip(\";\", true);\n }\n\n return objectResult;\n }\n\n var simpleValue = readValue(true);\n setOption(parent, name, simpleValue);\n return simpleValue;\n // Does not enforce a delimiter to be universal\n }\n\n function setOption(parent, name, value) {\n if (parent.setOption)\n parent.setOption(name, value);\n }\n\n function setParsedOption(parent, name, value, propName) {\n if (parent.setParsedOption)\n parent.setParsedOption(name, value, propName);\n }\n\n function parseInlineOptions(parent) {\n if (skip(\"[\", true)) {\n do {\n parseOption(parent, \"option\");\n } while (skip(\",\", true));\n skip(\"]\");\n }\n return parent;\n }\n\n function parseService(parent, token) {\n\n /* istanbul ignore if */\n if (!nameRe.test(token = next()))\n throw illegal(token, \"service name\");\n\n var service = new Service(token);\n ifBlock(service, function parseService_block(token) {\n if (parseCommon(service, token))\n return;\n\n /* istanbul ignore else */\n if (token === \"rpc\")\n parseMethod(service, token);\n else\n throw illegal(token);\n });\n parent.add(service);\n }\n\n function parseMethod(parent, token) {\n // Get the comment of the preceding line now (if one exists) in case the\n // method is defined across multiple lines.\n var commentText = cmnt();\n\n var type = token;\n\n /* istanbul ignore if */\n if (!nameRe.test(token = next()))\n throw illegal(token, \"name\");\n\n var name = token,\n requestType, requestStream,\n responseType, responseStream;\n\n skip(\"(\");\n if (skip(\"stream\", true))\n requestStream = true;\n\n /* istanbul ignore if */\n if (!typeRefRe.test(token = next()))\n throw illegal(token);\n\n requestType = token;\n skip(\")\"); skip(\"returns\"); skip(\"(\");\n if (skip(\"stream\", true))\n responseStream = true;\n\n /* istanbul ignore if */\n if (!typeRefRe.test(token = next()))\n throw illegal(token);\n\n responseType = token;\n skip(\")\");\n\n var method = new Method(name, type, requestType, responseType, requestStream, responseStream);\n method.comment = commentText;\n ifBlock(method, function parseMethod_block(token) {\n\n /* istanbul ignore else */\n if (token === \"option\") {\n parseOption(method, token);\n skip(\";\");\n } else\n throw illegal(token);\n\n });\n parent.add(method);\n }\n\n function parseExtension(parent, token) {\n\n /* istanbul ignore if */\n if (!typeRefRe.test(token = next()))\n throw illegal(token, \"reference\");\n\n var reference = token;\n ifBlock(null, function parseExtension_block(token) {\n switch (token) {\n\n case \"required\":\n case \"repeated\":\n parseField(parent, token, reference);\n break;\n\n case \"optional\":\n /* istanbul ignore if */\n if (isProto3) {\n parseField(parent, \"proto3_optional\", reference);\n } else {\n parseField(parent, \"optional\", reference);\n }\n break;\n\n default:\n /* istanbul ignore if */\n if (!isProto3 || !typeRefRe.test(token))\n throw illegal(token);\n push(token);\n parseField(parent, \"optional\", reference);\n break;\n }\n });\n }\n\n var token;\n while ((token = next()) !== null) {\n switch (token) {\n\n case \"package\":\n\n /* istanbul ignore if */\n if (!head)\n throw illegal(token);\n\n parsePackage();\n break;\n\n case \"import\":\n\n /* istanbul ignore if */\n if (!head)\n throw illegal(token);\n\n parseImport();\n break;\n\n case \"syntax\":\n\n /* istanbul ignore if */\n if (!head)\n throw illegal(token);\n\n parseSyntax();\n break;\n\n case \"option\":\n\n parseOption(ptr, token);\n skip(\";\");\n break;\n\n default:\n\n /* istanbul ignore else */\n if (parseCommon(ptr, token)) {\n head = false;\n continue;\n }\n\n /* istanbul ignore next */\n throw illegal(token);\n }\n }\n\n parse.filename = null;\n return {\n \"package\" : pkg,\n \"imports\" : imports,\n weakImports : weakImports,\n syntax : syntax,\n root : root\n };\n}\n\n/**\n * Parses the given .proto source and returns an object with the parsed contents.\n * @name parse\n * @function\n * @param {string} source Source contents\n * @param {IParseOptions} [options] Parse options. Defaults to {@link parse.defaults} when omitted.\n * @returns {IParserResult} Parser result\n * @property {string} filename=null Currently processing file name for error reporting, if known\n * @property {IParseOptions} defaults Default {@link IParseOptions}\n * @variation 2\n */\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/parse.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/reader.js":
/*!***********************************************!*\
!*** ./node_modules/protobufjs/src/reader.js ***!
\***********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = Reader;\n\nvar util = __webpack_require__(/*! ./util/minimal */ \"./node_modules/protobufjs/src/util/minimal.js\");\n\nvar BufferReader; // cyclic\n\nvar LongBits = util.LongBits,\n utf8 = util.utf8;\n\n/* istanbul ignore next */\nfunction indexOutOfRange(reader, writeLength) {\n return RangeError(\"index out of range: \" + reader.pos + \" + \" + (writeLength || 1) + \" > \" + reader.len);\n}\n\n/**\n * Constructs a new reader instance using the specified buffer.\n * @classdesc Wire format reader using `Uint8Array` if available, otherwise `Array`.\n * @constructor\n * @param {Uint8Array} buffer Buffer to read from\n */\nfunction Reader(buffer) {\n\n /**\n * Read buffer.\n * @type {Uint8Array}\n */\n this.buf = buffer;\n\n /**\n * Read buffer position.\n * @type {number}\n */\n this.pos = 0;\n\n /**\n * Read buffer length.\n * @type {number}\n */\n this.len = buffer.length;\n}\n\nvar create_array = typeof Uint8Array !== \"undefined\"\n ? function create_typed_array(buffer) {\n if (buffer instanceof Uint8Array || Array.isArray(buffer))\n return new Reader(buffer);\n throw Error(\"illegal buffer\");\n }\n /* istanbul ignore next */\n : function create_array(buffer) {\n if (Array.isArray(buffer))\n return new Reader(buffer);\n throw Error(\"illegal buffer\");\n };\n\nvar create = function create() {\n return util.Buffer\n ? function create_buffer_setup(buffer) {\n return (Reader.create = function create_buffer(buffer) {\n return util.Buffer.isBuffer(buffer)\n ? new BufferReader(buffer)\n /* istanbul ignore next */\n : create_array(buffer);\n })(buffer);\n }\n /* istanbul ignore next */\n : create_array;\n};\n\n/**\n * Creates a new reader using the specified buffer.\n * @function\n * @param {Uint8Array|Buffer} buffer Buffer to read from\n * @returns {Reader|BufferReader} A {@link BufferReader} if `buffer` is a Buffer, otherwise a {@link Reader}\n * @throws {Error} If `buffer` is not a valid buffer\n */\nReader.create = create();\n\nReader.prototype._slice = util.Array.prototype.subarray || /* istanbul ignore next */ util.Array.prototype.slice;\n\n/**\n * Reads a varint as an unsigned 32 bit value.\n * @function\n * @returns {number} Value read\n */\nReader.prototype.uint32 = (function read_uint32_setup() {\n var value = 4294967295; // optimizer type-hint, tends to deopt otherwise (?!)\n return function read_uint32() {\n value = ( this.buf[this.pos] & 127 ) >>> 0; if (this.buf[this.pos++] < 128) return value;\n value = (value | (this.buf[this.pos] & 127) << 7) >>> 0; if (this.buf[this.pos++] < 128) return value;\n value = (value | (this.buf[this.pos] & 127) << 14) >>> 0; if (this.buf[this.pos++] < 128) return value;\n value = (value | (this.buf[this.pos] & 127) << 21) >>> 0; if (this.buf[this.pos++] < 128) return value;\n value = (value | (this.buf[this.pos] & 15) << 28) >>> 0; if (this.buf[this.pos++] < 128) return value;\n\n /* istanbul ignore if */\n if ((this.pos += 5) > this.len) {\n this.pos = this.len;\n throw indexOutOfRange(this, 10);\n }\n return value;\n };\n})();\n\n/**\n * Reads a varint as a signed 32 bit value.\n * @returns {number} Value read\n */\nReader.prototype.int32 = function read_int32() {\n return this.uint32() | 0;\n};\n\n/**\n * Reads a zig-zag encoded varint as a signed 32 bit value.\n * @returns {number} Value read\n */\nReader.prototype.sint32 = function read_sint32() {\n var value = this.uint32();\n return value >>> 1 ^ -(value & 1) | 0;\n};\n\n/* eslint-disable no-invalid-this */\n\nfunction readLongVarint() {\n // tends to deopt with local vars for octet etc.\n var bits = new LongBits(0, 0);\n var i = 0;\n if (this.len - this.pos > 4) { // fast route (lo)\n for (; i < 4; ++i) {\n // 1st..4th\n bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n }\n // 5th\n bits.lo = (bits.lo | (this.buf[this.pos] & 127) << 28) >>> 0;\n bits.hi = (bits.hi | (this.buf[this.pos] & 127) >> 4) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n i = 0;\n } else {\n for (; i < 3; ++i) {\n /* istanbul ignore if */\n if (this.pos >= this.len)\n throw indexOutOfRange(this);\n // 1st..3th\n bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n }\n // 4th\n bits.lo = (bits.lo | (this.buf[this.pos++] & 127) << i * 7) >>> 0;\n return bits;\n }\n if (this.len - this.pos > 4) { // fast route (hi)\n for (; i < 5; ++i) {\n // 6th..10th\n bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n }\n } else {\n for (; i < 5; ++i) {\n /* istanbul ignore if */\n if (this.pos >= this.len)\n throw indexOutOfRange(this);\n // 6th..10th\n bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;\n if (this.buf[this.pos++] < 128)\n return bits;\n }\n }\n /* istanbul ignore next */\n throw Error(\"invalid varint encoding\");\n}\n\n/* eslint-enable no-invalid-this */\n\n/**\n * Reads a varint as a signed 64 bit value.\n * @name Reader#int64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads a varint as an unsigned 64 bit value.\n * @name Reader#uint64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads a zig-zag encoded varint as a signed 64 bit value.\n * @name Reader#sint64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads a varint as a boolean.\n * @returns {boolean} Value read\n */\nReader.prototype.bool = function read_bool() {\n return this.uint32() !== 0;\n};\n\nfunction readFixed32_end(buf, end) { // note that this uses `end`, not `pos`\n return (buf[end - 4]\n | buf[end - 3] << 8\n | buf[end - 2] << 16\n | buf[end - 1] << 24) >>> 0;\n}\n\n/**\n * Reads fixed 32 bits as an unsigned 32 bit integer.\n * @returns {number} Value read\n */\nReader.prototype.fixed32 = function read_fixed32() {\n\n /* istanbul ignore if */\n if (this.pos + 4 > this.len)\n throw indexOutOfRange(this, 4);\n\n return readFixed32_end(this.buf, this.pos += 4);\n};\n\n/**\n * Reads fixed 32 bits as a signed 32 bit integer.\n * @returns {number} Value read\n */\nReader.prototype.sfixed32 = function read_sfixed32() {\n\n /* istanbul ignore if */\n if (this.pos + 4 > this.len)\n throw indexOutOfRange(this, 4);\n\n return readFixed32_end(this.buf, this.pos += 4) | 0;\n};\n\n/* eslint-disable no-invalid-this */\n\nfunction readFixed64(/* this: Reader */) {\n\n /* istanbul ignore if */\n if (this.pos + 8 > this.len)\n throw indexOutOfRange(this, 8);\n\n return new LongBits(readFixed32_end(this.buf, this.pos += 4), readFixed32_end(this.buf, this.pos += 4));\n}\n\n/* eslint-enable no-invalid-this */\n\n/**\n * Reads fixed 64 bits.\n * @name Reader#fixed64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads zig-zag encoded fixed 64 bits.\n * @name Reader#sfixed64\n * @function\n * @returns {Long} Value read\n */\n\n/**\n * Reads a float (32 bit) as a number.\n * @function\n * @returns {number} Value read\n */\nReader.prototype.float = function read_float() {\n\n /* istanbul ignore if */\n if (this.pos + 4 > this.len)\n throw indexOutOfRange(this, 4);\n\n var value = util.float.readFloatLE(this.buf, this.pos);\n this.pos += 4;\n return value;\n};\n\n/**\n * Reads a double (64 bit float) as a number.\n * @function\n * @returns {number} Value read\n */\nReader.prototype.double = function read_double() {\n\n /* istanbul ignore if */\n if (this.pos + 8 > this.len)\n throw indexOutOfRange(this, 4);\n\n var value = util.float.readDoubleLE(this.buf, this.pos);\n this.pos += 8;\n return value;\n};\n\n/**\n * Reads a sequence of bytes preceeded by its length as a varint.\n * @returns {Uint8Array} Value read\n */\nReader.prototype.bytes = function read_bytes() {\n var length = this.uint32(),\n start = this.pos,\n end = this.pos + length;\n\n /* istanbul ignore if */\n if (end > this.len)\n throw indexOutOfRange(this, length);\n\n this.pos += length;\n if (Array.isArray(this.buf)) // plain array\n return this.buf.slice(start, end);\n return start === end // fix for IE 10/Win8 and others' subarray returning array of size 1\n ? new this.buf.constructor(0)\n : this._slice.call(this.buf, start, end);\n};\n\n/**\n * Reads a string preceeded by its byte length as a varint.\n * @returns {string} Value read\n */\nReader.prototype.string = function read_string() {\n var bytes = this.bytes();\n return utf8.read(bytes, 0, bytes.length);\n};\n\n/**\n * Skips the specified number of bytes if specified, otherwise skips a varint.\n * @param {number} [length] Length if known, otherwise a varint is assumed\n * @returns {Reader} `this`\n */\nReader.prototype.skip = function skip(length) {\n if (typeof length === \"number\") {\n /* istanbul ignore if */\n if (this.pos + length > this.len)\n throw indexOutOfRange(this, length);\n this.pos += length;\n } else {\n do {\n /* istanbul ignore if */\n if (this.pos >= this.len)\n throw indexOutOfRange(this);\n } while (this.buf[this.pos++] & 128);\n }\n return this;\n};\n\n/**\n * Skips the next element of the specified wire type.\n * @param {number} wireType Wire type received\n * @returns {Reader} `this`\n */\nReader.prototype.skipType = function(wireType) {\n switch (wireType) {\n case 0:\n this.skip();\n break;\n case 1:\n this.skip(8);\n break;\n case 2:\n this.skip(this.uint32());\n break;\n case 3:\n while ((wireType = this.uint32() & 7) !== 4) {\n this.skipType(wireType);\n }\n break;\n case 5:\n this.skip(4);\n break;\n\n /* istanbul ignore next */\n default:\n throw Error(\"invalid wire type \" + wireType + \" at offset \" + this.pos);\n }\n return this;\n};\n\nReader._configure = function(BufferReader_) {\n BufferReader = BufferReader_;\n Reader.create = create();\n BufferReader._configure();\n\n var fn = util.Long ? \"toLong\" : /* istanbul ignore next */ \"toNumber\";\n util.merge(Reader.prototype, {\n\n int64: function read_int64() {\n return readLongVarint.call(this)[fn](false);\n },\n\n uint64: function read_uint64() {\n return readLongVarint.call(this)[fn](true);\n },\n\n sint64: function read_sint64() {\n return readLongVarint.call(this).zzDecode()[fn](false);\n },\n\n fixed64: function read_fixed64() {\n return readFixed64.call(this)[fn](true);\n },\n\n sfixed64: function read_sfixed64() {\n return readFixed64.call(this)[fn](false);\n }\n\n });\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/reader.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/reader_buffer.js":
/*!******************************************************!*\
!*** ./node_modules/protobufjs/src/reader_buffer.js ***!
\******************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = BufferReader;\n\n// extends Reader\nvar Reader = __webpack_require__(/*! ./reader */ \"./node_modules/protobufjs/src/reader.js\");\n(BufferReader.prototype = Object.create(Reader.prototype)).constructor = BufferReader;\n\nvar util = __webpack_require__(/*! ./util/minimal */ \"./node_modules/protobufjs/src/util/minimal.js\");\n\n/**\n * Constructs a new buffer reader instance.\n * @classdesc Wire format reader using node buffers.\n * @extends Reader\n * @constructor\n * @param {Buffer} buffer Buffer to read from\n */\nfunction BufferReader(buffer) {\n Reader.call(this, buffer);\n\n /**\n * Read buffer.\n * @name BufferReader#buf\n * @type {Buffer}\n */\n}\n\nBufferReader._configure = function () {\n /* istanbul ignore else */\n if (util.Buffer)\n BufferReader.prototype._slice = util.Buffer.prototype.slice;\n};\n\n\n/**\n * @override\n */\nBufferReader.prototype.string = function read_string_buffer() {\n var len = this.uint32(); // modifies pos\n return this.buf.utf8Slice\n ? this.buf.utf8Slice(this.pos, this.pos = Math.min(this.pos + len, this.len))\n : this.buf.toString(\"utf-8\", this.pos, this.pos = Math.min(this.pos + len, this.len));\n};\n\n/**\n * Reads a sequence of bytes preceeded by its length as a varint.\n * @name BufferReader#bytes\n * @function\n * @returns {Buffer} Value read\n */\n\nBufferReader._configure();\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/reader_buffer.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/root.js":
/*!*********************************************!*\
!*** ./node_modules/protobufjs/src/root.js ***!
\*********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = Root;\n\n// extends Namespace\nvar Namespace = __webpack_require__(/*! ./namespace */ \"./node_modules/protobufjs/src/namespace.js\");\n((Root.prototype = Object.create(Namespace.prototype)).constructor = Root).className = \"Root\";\n\nvar Field = __webpack_require__(/*! ./field */ \"./node_modules/protobufjs/src/field.js\"),\n Enum = __webpack_require__(/*! ./enum */ \"./node_modules/protobufjs/src/enum.js\"),\n OneOf = __webpack_require__(/*! ./oneof */ \"./node_modules/protobufjs/src/oneof.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\nvar Type, // cyclic\n parse, // might be excluded\n common; // \"\n\n/**\n * Constructs a new root namespace instance.\n * @classdesc Root namespace wrapping all types, enums, services, sub-namespaces etc. that belong together.\n * @extends NamespaceBase\n * @constructor\n * @param {Object.} [options] Top level options\n */\nfunction Root(options) {\n Namespace.call(this, \"\", options);\n\n /**\n * Deferred extension fields.\n * @type {Field[]}\n */\n this.deferred = [];\n\n /**\n * Resolved file names of loaded files.\n * @type {string[]}\n */\n this.files = [];\n}\n\n/**\n * Loads a namespace descriptor into a root namespace.\n * @param {INamespace} json Nameespace descriptor\n * @param {Root} [root] Root namespace, defaults to create a new one if omitted\n * @returns {Root} Root namespace\n */\nRoot.fromJSON = function fromJSON(json, root) {\n if (!root)\n root = new Root();\n if (json.options)\n root.setOptions(json.options);\n return root.addJSON(json.nested);\n};\n\n/**\n * Resolves the path of an imported file, relative to the importing origin.\n * This method exists so you can override it with your own logic in case your imports are scattered over multiple directories.\n * @function\n * @param {string} origin The file name of the importing file\n * @param {string} target The file name being imported\n * @returns {string|null} Resolved path to `target` or `null` to skip the file\n */\nRoot.prototype.resolvePath = util.path.resolve;\n\n/**\n * Fetch content from file path or url\n * This method exists so you can override it with your own logic.\n * @function\n * @param {string} path File path or url\n * @param {FetchCallback} callback Callback function\n * @returns {undefined}\n */\nRoot.prototype.fetch = util.fetch;\n\n// A symbol-like function to safely signal synchronous loading\n/* istanbul ignore next */\nfunction SYNC() {} // eslint-disable-line no-empty-function\n\n/**\n * Loads one or multiple .proto or preprocessed .json files into this root namespace and calls the callback.\n * @param {string|string[]} filename Names of one or multiple files to load\n * @param {IParseOptions} options Parse options\n * @param {LoadCallback} callback Callback function\n * @returns {undefined}\n */\nRoot.prototype.load = function load(filename, options, callback) {\n if (typeof options === \"function\") {\n callback = options;\n options = undefined;\n }\n var self = this;\n if (!callback)\n return util.asPromise(load, self, filename, options);\n\n var sync = callback === SYNC; // undocumented\n\n // Finishes loading by calling the callback (exactly once)\n function finish(err, root) {\n /* istanbul ignore if */\n if (!callback)\n return;\n var cb = callback;\n callback = null;\n if (sync)\n throw err;\n cb(err, root);\n }\n\n // Bundled definition existence checking\n function getBundledFileName(filename) {\n var idx = filename.lastIndexOf(\"google/protobuf/\");\n if (idx > -1) {\n var altname = filename.substring(idx);\n if (altname in common) return altname;\n }\n return null;\n }\n\n // Processes a single file\n function process(filename, source) {\n try {\n if (util.isString(source) && source.charAt(0) === \"{\")\n source = JSON.parse(source);\n if (!util.isString(source))\n self.setOptions(source.options).addJSON(source.nested);\n else {\n parse.filename = filename;\n var parsed = parse(source, self, options),\n resolved,\n i = 0;\n if (parsed.imports)\n for (; i < parsed.imports.length; ++i)\n if (resolved = getBundledFileName(parsed.imports[i]) || self.resolvePath(filename, parsed.imports[i]))\n fetch(resolved);\n if (parsed.weakImports)\n for (i = 0; i < parsed.weakImports.length; ++i)\n if (resolved = getBundledFileName(parsed.weakImports[i]) || self.resolvePath(filename, parsed.weakImports[i]))\n fetch(resolved, true);\n }\n } catch (err) {\n finish(err);\n }\n if (!sync && !queued)\n finish(null, self); // only once anyway\n }\n\n // Fetches a single file\n function fetch(filename, weak) {\n\n // Skip if already loaded / attempted\n if (self.files.indexOf(filename) > -1)\n return;\n self.files.push(filename);\n\n // Shortcut bundled definitions\n if (filename in common) {\n if (sync)\n process(filename, common[filename]);\n else {\n ++queued;\n setTimeout(function() {\n --queued;\n process(filename, common[filename]);\n });\n }\n return;\n }\n\n // Otherwise fetch from disk or network\n if (sync) {\n var source;\n try {\n source = util.fs.readFileSync(filename).toString(\"utf8\");\n } catch (err) {\n if (!weak)\n finish(err);\n return;\n }\n process(filename, source);\n } else {\n ++queued;\n self.fetch(filename, function(err, source) {\n --queued;\n /* istanbul ignore if */\n if (!callback)\n return; // terminated meanwhile\n if (err) {\n /* istanbul ignore else */\n if (!weak)\n finish(err);\n else if (!queued) // can't be covered reliably\n finish(null, self);\n return;\n }\n process(filename, source);\n });\n }\n }\n var queued = 0;\n\n // Assembling the root namespace doesn't require working type\n // references anymore, so we can load everything in parallel\n if (util.isString(filename))\n filename = [ filename ];\n for (var i = 0, resolved; i < filename.length; ++i)\n if (resolved = self.resolvePath(\"\", filename[i]))\n fetch(resolved);\n\n if (sync)\n return self;\n if (!queued)\n finish(null, self);\n return undefined;\n};\n// function load(filename:string, options:IParseOptions, callback:LoadCallback):undefined\n\n/**\n * Loads one or multiple .proto or preprocessed .json files into this root namespace and calls the callback.\n * @function Root#load\n * @param {string|string[]} filename Names of one or multiple files to load\n * @param {LoadCallback} callback Callback function\n * @returns {undefined}\n * @variation 2\n */\n// function load(filename:string, callback:LoadCallback):undefined\n\n/**\n * Loads one or multiple .proto or preprocessed .json files into this root namespace and returns a promise.\n * @function Root#load\n * @param {string|string[]} filename Names of one or multiple files to load\n * @param {IParseOptions} [options] Parse options. Defaults to {@link parse.defaults} when omitted.\n * @returns {Promise} Promise\n * @variation 3\n */\n// function load(filename:string, [options:IParseOptions]):Promise\n\n/**\n * Synchronously loads one or multiple .proto or preprocessed .json files into this root namespace (node only).\n * @function Root#loadSync\n * @param {string|string[]} filename Names of one or multiple files to load\n * @param {IParseOptions} [options] Parse options. Defaults to {@link parse.defaults} when omitted.\n * @returns {Root} Root namespace\n * @throws {Error} If synchronous fetching is not supported (i.e. in browsers) or if a file's syntax is invalid\n */\nRoot.prototype.loadSync = function loadSync(filename, options) {\n if (!util.isNode)\n throw Error(\"not supported\");\n return this.load(filename, options, SYNC);\n};\n\n/**\n * @override\n */\nRoot.prototype.resolveAll = function resolveAll() {\n if (this.deferred.length)\n throw Error(\"unresolvable extensions: \" + this.deferred.map(function(field) {\n return \"'extend \" + field.extend + \"' in \" + field.parent.fullName;\n }).join(\", \"));\n return Namespace.prototype.resolveAll.call(this);\n};\n\n// only uppercased (and thus conflict-free) children are exposed, see below\nvar exposeRe = /^[A-Z]/;\n\n/**\n * Handles a deferred declaring extension field by creating a sister field to represent it within its extended type.\n * @param {Root} root Root instance\n * @param {Field} field Declaring extension field witin the declaring type\n * @returns {boolean} `true` if successfully added to the extended type, `false` otherwise\n * @inner\n * @ignore\n */\nfunction tryHandleExtension(root, field) {\n var extendedType = field.parent.lookup(field.extend);\n if (extendedType) {\n var sisterField = new Field(field.fullName, field.id, field.type, field.rule, undefined, field.options);\n sisterField.declaringField = field;\n field.extensionField = sisterField;\n extendedType.add(sisterField);\n return true;\n }\n return false;\n}\n\n/**\n * Called when any object is added to this root or its sub-namespaces.\n * @param {ReflectionObject} object Object added\n * @returns {undefined}\n * @private\n */\nRoot.prototype._handleAdd = function _handleAdd(object) {\n if (object instanceof Field) {\n\n if (/* an extension field (implies not part of a oneof) */ object.extend !== undefined && /* not already handled */ !object.extensionField)\n if (!tryHandleExtension(this, object))\n this.deferred.push(object);\n\n } else if (object instanceof Enum) {\n\n if (exposeRe.test(object.name))\n object.parent[object.name] = object.values; // expose enum values as property of its parent\n\n } else if (!(object instanceof OneOf)) /* everything else is a namespace */ {\n\n if (object instanceof Type) // Try to handle any deferred extensions\n for (var i = 0; i < this.deferred.length;)\n if (tryHandleExtension(this, this.deferred[i]))\n this.deferred.splice(i, 1);\n else\n ++i;\n for (var j = 0; j < /* initializes */ object.nestedArray.length; ++j) // recurse into the namespace\n this._handleAdd(object._nestedArray[j]);\n if (exposeRe.test(object.name))\n object.parent[object.name] = object; // expose namespace as property of its parent\n }\n\n // The above also adds uppercased (and thus conflict-free) nested types, services and enums as\n // properties of namespaces just like static code does. This allows using a .d.ts generated for\n // a static module with reflection-based solutions where the condition is met.\n};\n\n/**\n * Called when any object is removed from this root or its sub-namespaces.\n * @param {ReflectionObject} object Object removed\n * @returns {undefined}\n * @private\n */\nRoot.prototype._handleRemove = function _handleRemove(object) {\n if (object instanceof Field) {\n\n if (/* an extension field */ object.extend !== undefined) {\n if (/* already handled */ object.extensionField) { // remove its sister field\n object.extensionField.parent.remove(object.extensionField);\n object.extensionField = null;\n } else { // cancel the extension\n var index = this.deferred.indexOf(object);\n /* istanbul ignore else */\n if (index > -1)\n this.deferred.splice(index, 1);\n }\n }\n\n } else if (object instanceof Enum) {\n\n if (exposeRe.test(object.name))\n delete object.parent[object.name]; // unexpose enum values\n\n } else if (object instanceof Namespace) {\n\n for (var i = 0; i < /* initializes */ object.nestedArray.length; ++i) // recurse into the namespace\n this._handleRemove(object._nestedArray[i]);\n\n if (exposeRe.test(object.name))\n delete object.parent[object.name]; // unexpose namespaces\n\n }\n};\n\n// Sets up cyclic dependencies (called in index-light)\nRoot._configure = function(Type_, parse_, common_) {\n Type = Type_;\n parse = parse_;\n common = common_;\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/root.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/roots.js":
/*!**********************************************!*\
!*** ./node_modules/protobufjs/src/roots.js ***!
\**********************************************/
/***/ ((module) => {
"use strict";
eval("\nmodule.exports = {};\n\n/**\n * Named roots.\n * This is where pbjs stores generated structures (the option `-r, --root` specifies a name).\n * Can also be used manually to make roots available across modules.\n * @name roots\n * @type {Object.}\n * @example\n * // pbjs -r myroot -o compiled.js ...\n *\n * // in another module:\n * require(\"./compiled.js\");\n *\n * // in any subsequent module:\n * var root = protobuf.roots[\"myroot\"];\n */\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/roots.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/rpc.js":
/*!********************************************!*\
!*** ./node_modules/protobufjs/src/rpc.js ***!
\********************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
eval("\n\n/**\n * Streaming RPC helpers.\n * @namespace\n */\nvar rpc = exports;\n\n/**\n * RPC implementation passed to {@link Service#create} performing a service request on network level, i.e. by utilizing http requests or websockets.\n * @typedef RPCImpl\n * @type {function}\n * @param {Method|rpc.ServiceMethod,Message<{}>>} method Reflected or static method being called\n * @param {Uint8Array} requestData Request data\n * @param {RPCImplCallback} callback Callback function\n * @returns {undefined}\n * @example\n * function rpcImpl(method, requestData, callback) {\n * if (protobuf.util.lcFirst(method.name) !== \"myMethod\") // compatible with static code\n * throw Error(\"no such method\");\n * asynchronouslyObtainAResponse(requestData, function(err, responseData) {\n * callback(err, responseData);\n * });\n * }\n */\n\n/**\n * Node-style callback as used by {@link RPCImpl}.\n * @typedef RPCImplCallback\n * @type {function}\n * @param {Error|null} error Error, if any, otherwise `null`\n * @param {Uint8Array|null} [response] Response data or `null` to signal end of stream, if there hasn't been an error\n * @returns {undefined}\n */\n\nrpc.Service = __webpack_require__(/*! ./rpc/service */ \"./node_modules/protobufjs/src/rpc/service.js\");\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/rpc.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/rpc/service.js":
/*!****************************************************!*\
!*** ./node_modules/protobufjs/src/rpc/service.js ***!
\****************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = Service;\n\nvar util = __webpack_require__(/*! ../util/minimal */ \"./node_modules/protobufjs/src/util/minimal.js\");\n\n// Extends EventEmitter\n(Service.prototype = Object.create(util.EventEmitter.prototype)).constructor = Service;\n\n/**\n * A service method callback as used by {@link rpc.ServiceMethod|ServiceMethod}.\n *\n * Differs from {@link RPCImplCallback} in that it is an actual callback of a service method which may not return `response = null`.\n * @typedef rpc.ServiceMethodCallback\n * @template TRes extends Message\n * @type {function}\n * @param {Error|null} error Error, if any\n * @param {TRes} [response] Response message\n * @returns {undefined}\n */\n\n/**\n * A service method part of a {@link rpc.Service} as created by {@link Service.create}.\n * @typedef rpc.ServiceMethod\n * @template TReq extends Message\n * @template TRes extends Message\n * @type {function}\n * @param {TReq|Properties} request Request message or plain object\n * @param {rpc.ServiceMethodCallback} [callback] Node-style callback called with the error, if any, and the response message\n * @returns {Promise>} Promise if `callback` has been omitted, otherwise `undefined`\n */\n\n/**\n * Constructs a new RPC service instance.\n * @classdesc An RPC service as returned by {@link Service#create}.\n * @exports rpc.Service\n * @extends util.EventEmitter\n * @constructor\n * @param {RPCImpl} rpcImpl RPC implementation\n * @param {boolean} [requestDelimited=false] Whether requests are length-delimited\n * @param {boolean} [responseDelimited=false] Whether responses are length-delimited\n */\nfunction Service(rpcImpl, requestDelimited, responseDelimited) {\n\n if (typeof rpcImpl !== \"function\")\n throw TypeError(\"rpcImpl must be a function\");\n\n util.EventEmitter.call(this);\n\n /**\n * RPC implementation. Becomes `null` once the service is ended.\n * @type {RPCImpl|null}\n */\n this.rpcImpl = rpcImpl;\n\n /**\n * Whether requests are length-delimited.\n * @type {boolean}\n */\n this.requestDelimited = Boolean(requestDelimited);\n\n /**\n * Whether responses are length-delimited.\n * @type {boolean}\n */\n this.responseDelimited = Boolean(responseDelimited);\n}\n\n/**\n * Calls a service method through {@link rpc.Service#rpcImpl|rpcImpl}.\n * @param {Method|rpc.ServiceMethod} method Reflected or static method\n * @param {Constructor} requestCtor Request constructor\n * @param {Constructor} responseCtor Response constructor\n * @param {TReq|Properties} request Request message or plain object\n * @param {rpc.ServiceMethodCallback} callback Service callback\n * @returns {undefined}\n * @template TReq extends Message\n * @template TRes extends Message\n */\nService.prototype.rpcCall = function rpcCall(method, requestCtor, responseCtor, request, callback) {\n\n if (!request)\n throw TypeError(\"request must be specified\");\n\n var self = this;\n if (!callback)\n return util.asPromise(rpcCall, self, method, requestCtor, responseCtor, request);\n\n if (!self.rpcImpl) {\n setTimeout(function() { callback(Error(\"already ended\")); }, 0);\n return undefined;\n }\n\n try {\n return self.rpcImpl(\n method,\n requestCtor[self.requestDelimited ? \"encodeDelimited\" : \"encode\"](request).finish(),\n function rpcCallback(err, response) {\n\n if (err) {\n self.emit(\"error\", err, method);\n return callback(err);\n }\n\n if (response === null) {\n self.end(/* endedByRPC */ true);\n return undefined;\n }\n\n if (!(response instanceof responseCtor)) {\n try {\n response = responseCtor[self.responseDelimited ? \"decodeDelimited\" : \"decode\"](response);\n } catch (err) {\n self.emit(\"error\", err, method);\n return callback(err);\n }\n }\n\n self.emit(\"data\", response, method);\n return callback(null, response);\n }\n );\n } catch (err) {\n self.emit(\"error\", err, method);\n setTimeout(function() { callback(err); }, 0);\n return undefined;\n }\n};\n\n/**\n * Ends this service and emits the `end` event.\n * @param {boolean} [endedByRPC=false] Whether the service has been ended by the RPC implementation.\n * @returns {rpc.Service} `this`\n */\nService.prototype.end = function end(endedByRPC) {\n if (this.rpcImpl) {\n if (!endedByRPC) // signal end to rpcImpl\n this.rpcImpl(null, null, null);\n this.rpcImpl = null;\n this.emit(\"end\").off();\n }\n return this;\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/rpc/service.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/service.js":
/*!************************************************!*\
!*** ./node_modules/protobufjs/src/service.js ***!
\************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = Service;\n\n// extends Namespace\nvar Namespace = __webpack_require__(/*! ./namespace */ \"./node_modules/protobufjs/src/namespace.js\");\n((Service.prototype = Object.create(Namespace.prototype)).constructor = Service).className = \"Service\";\n\nvar Method = __webpack_require__(/*! ./method */ \"./node_modules/protobufjs/src/method.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\"),\n rpc = __webpack_require__(/*! ./rpc */ \"./node_modules/protobufjs/src/rpc.js\");\n\n/**\n * Constructs a new service instance.\n * @classdesc Reflected service.\n * @extends NamespaceBase\n * @constructor\n * @param {string} name Service name\n * @param {Object.} [options] Service options\n * @throws {TypeError} If arguments are invalid\n */\nfunction Service(name, options) {\n Namespace.call(this, name, options);\n\n /**\n * Service methods.\n * @type {Object.}\n */\n this.methods = {}; // toJSON, marker\n\n /**\n * Cached methods as an array.\n * @type {Method[]|null}\n * @private\n */\n this._methodsArray = null;\n}\n\n/**\n * Service descriptor.\n * @interface IService\n * @extends INamespace\n * @property {Object.} methods Method descriptors\n */\n\n/**\n * Constructs a service from a service descriptor.\n * @param {string} name Service name\n * @param {IService} json Service descriptor\n * @returns {Service} Created service\n * @throws {TypeError} If arguments are invalid\n */\nService.fromJSON = function fromJSON(name, json) {\n var service = new Service(name, json.options);\n /* istanbul ignore else */\n if (json.methods)\n for (var names = Object.keys(json.methods), i = 0; i < names.length; ++i)\n service.add(Method.fromJSON(names[i], json.methods[names[i]]));\n if (json.nested)\n service.addJSON(json.nested);\n service.comment = json.comment;\n return service;\n};\n\n/**\n * Converts this service to a service descriptor.\n * @param {IToJSONOptions} [toJSONOptions] JSON conversion options\n * @returns {IService} Service descriptor\n */\nService.prototype.toJSON = function toJSON(toJSONOptions) {\n var inherited = Namespace.prototype.toJSON.call(this, toJSONOptions);\n var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;\n return util.toObject([\n \"options\" , inherited && inherited.options || undefined,\n \"methods\" , Namespace.arrayToJSON(this.methodsArray, toJSONOptions) || /* istanbul ignore next */ {},\n \"nested\" , inherited && inherited.nested || undefined,\n \"comment\" , keepComments ? this.comment : undefined\n ]);\n};\n\n/**\n * Methods of this service as an array for iteration.\n * @name Service#methodsArray\n * @type {Method[]}\n * @readonly\n */\nObject.defineProperty(Service.prototype, \"methodsArray\", {\n get: function() {\n return this._methodsArray || (this._methodsArray = util.toArray(this.methods));\n }\n});\n\nfunction clearCache(service) {\n service._methodsArray = null;\n return service;\n}\n\n/**\n * @override\n */\nService.prototype.get = function get(name) {\n return this.methods[name]\n || Namespace.prototype.get.call(this, name);\n};\n\n/**\n * @override\n */\nService.prototype.resolveAll = function resolveAll() {\n var methods = this.methodsArray;\n for (var i = 0; i < methods.length; ++i)\n methods[i].resolve();\n return Namespace.prototype.resolve.call(this);\n};\n\n/**\n * @override\n */\nService.prototype.add = function add(object) {\n\n /* istanbul ignore if */\n if (this.get(object.name))\n throw Error(\"duplicate name '\" + object.name + \"' in \" + this);\n\n if (object instanceof Method) {\n this.methods[object.name] = object;\n object.parent = this;\n return clearCache(this);\n }\n return Namespace.prototype.add.call(this, object);\n};\n\n/**\n * @override\n */\nService.prototype.remove = function remove(object) {\n if (object instanceof Method) {\n\n /* istanbul ignore if */\n if (this.methods[object.name] !== object)\n throw Error(object + \" is not a member of \" + this);\n\n delete this.methods[object.name];\n object.parent = null;\n return clearCache(this);\n }\n return Namespace.prototype.remove.call(this, object);\n};\n\n/**\n * Creates a runtime service using the specified rpc implementation.\n * @param {RPCImpl} rpcImpl RPC implementation\n * @param {boolean} [requestDelimited=false] Whether requests are length-delimited\n * @param {boolean} [responseDelimited=false] Whether responses are length-delimited\n * @returns {rpc.Service} RPC service. Useful where requests and/or responses are streamed.\n */\nService.prototype.create = function create(rpcImpl, requestDelimited, responseDelimited) {\n var rpcService = new rpc.Service(rpcImpl, requestDelimited, responseDelimited);\n for (var i = 0, method; i < /* initializes */ this.methodsArray.length; ++i) {\n var methodName = util.lcFirst((method = this._methodsArray[i]).resolve().name).replace(/[^$\\w_]/g, \"\");\n rpcService[methodName] = util.codegen([\"r\",\"c\"], util.isReserved(methodName) ? methodName + \"_\" : methodName)(\"return this.rpcCall(m,q,s,r,c)\")({\n m: method,\n q: method.resolvedRequestType.ctor,\n s: method.resolvedResponseType.ctor\n });\n }\n return rpcService;\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/service.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/tokenize.js":
/*!*************************************************!*\
!*** ./node_modules/protobufjs/src/tokenize.js ***!
\*************************************************/
/***/ ((module) => {
"use strict";
eval("\nmodule.exports = tokenize;\n\nvar delimRe = /[\\s{}=;:[\\],'\"()<>]/g,\n stringDoubleRe = /(?:\"([^\"\\\\]*(?:\\\\.[^\"\\\\]*)*)\")/g,\n stringSingleRe = /(?:'([^'\\\\]*(?:\\\\.[^'\\\\]*)*)')/g;\n\nvar setCommentRe = /^ *[*/]+ */,\n setCommentAltRe = /^\\s*\\*?\\/*/,\n setCommentSplitRe = /\\n/g,\n whitespaceRe = /\\s/,\n unescapeRe = /\\\\(.?)/g;\n\nvar unescapeMap = {\n \"0\": \"\\0\",\n \"r\": \"\\r\",\n \"n\": \"\\n\",\n \"t\": \"\\t\"\n};\n\n/**\n * Unescapes a string.\n * @param {string} str String to unescape\n * @returns {string} Unescaped string\n * @property {Object.} map Special characters map\n * @memberof tokenize\n */\nfunction unescape(str) {\n return str.replace(unescapeRe, function($0, $1) {\n switch ($1) {\n case \"\\\\\":\n case \"\":\n return $1;\n default:\n return unescapeMap[$1] || \"\";\n }\n });\n}\n\ntokenize.unescape = unescape;\n\n/**\n * Gets the next token and advances.\n * @typedef TokenizerHandleNext\n * @type {function}\n * @returns {string|null} Next token or `null` on eof\n */\n\n/**\n * Peeks for the next token.\n * @typedef TokenizerHandlePeek\n * @type {function}\n * @returns {string|null} Next token or `null` on eof\n */\n\n/**\n * Pushes a token back to the stack.\n * @typedef TokenizerHandlePush\n * @type {function}\n * @param {string} token Token\n * @returns {undefined}\n */\n\n/**\n * Skips the next token.\n * @typedef TokenizerHandleSkip\n * @type {function}\n * @param {string} expected Expected token\n * @param {boolean} [optional=false] If optional\n * @returns {boolean} Whether the token matched\n * @throws {Error} If the token didn't match and is not optional\n */\n\n/**\n * Gets the comment on the previous line or, alternatively, the line comment on the specified line.\n * @typedef TokenizerHandleCmnt\n * @type {function}\n * @param {number} [line] Line number\n * @returns {string|null} Comment text or `null` if none\n */\n\n/**\n * Handle object returned from {@link tokenize}.\n * @interface ITokenizerHandle\n * @property {TokenizerHandleNext} next Gets the next token and advances (`null` on eof)\n * @property {TokenizerHandlePeek} peek Peeks for the next token (`null` on eof)\n * @property {TokenizerHandlePush} push Pushes a token back to the stack\n * @property {TokenizerHandleSkip} skip Skips a token, returns its presence and advances or, if non-optional and not present, throws\n * @property {TokenizerHandleCmnt} cmnt Gets the comment on the previous line or the line comment on the specified line, if any\n * @property {number} line Current line number\n */\n\n/**\n * Tokenizes the given .proto source and returns an object with useful utility functions.\n * @param {string} source Source contents\n * @param {boolean} alternateCommentMode Whether we should activate alternate comment parsing mode.\n * @returns {ITokenizerHandle} Tokenizer handle\n */\nfunction tokenize(source, alternateCommentMode) {\n /* eslint-disable callback-return */\n source = source.toString();\n\n var offset = 0,\n length = source.length,\n line = 1,\n lastCommentLine = 0,\n comments = {};\n\n var stack = [];\n\n var stringDelim = null;\n\n /* istanbul ignore next */\n /**\n * Creates an error for illegal syntax.\n * @param {string} subject Subject\n * @returns {Error} Error created\n * @inner\n */\n function illegal(subject) {\n return Error(\"illegal \" + subject + \" (line \" + line + \")\");\n }\n\n /**\n * Reads a string till its end.\n * @returns {string} String read\n * @inner\n */\n function readString() {\n var re = stringDelim === \"'\" ? stringSingleRe : stringDoubleRe;\n re.lastIndex = offset - 1;\n var match = re.exec(source);\n if (!match)\n throw illegal(\"string\");\n offset = re.lastIndex;\n push(stringDelim);\n stringDelim = null;\n return unescape(match[1]);\n }\n\n /**\n * Gets the character at `pos` within the source.\n * @param {number} pos Position\n * @returns {string} Character\n * @inner\n */\n function charAt(pos) {\n return source.charAt(pos);\n }\n\n /**\n * Sets the current comment text.\n * @param {number} start Start offset\n * @param {number} end End offset\n * @param {boolean} isLeading set if a leading comment\n * @returns {undefined}\n * @inner\n */\n function setComment(start, end, isLeading) {\n var comment = {\n type: source.charAt(start++),\n lineEmpty: false,\n leading: isLeading,\n };\n var lookback;\n if (alternateCommentMode) {\n lookback = 2; // alternate comment parsing: \"//\" or \"/*\"\n } else {\n lookback = 3; // \"///\" or \"/**\"\n }\n var commentOffset = start - lookback,\n c;\n do {\n if (--commentOffset < 0 ||\n (c = source.charAt(commentOffset)) === \"\\n\") {\n comment.lineEmpty = true;\n break;\n }\n } while (c === \" \" || c === \"\\t\");\n var lines = source\n .substring(start, end)\n .split(setCommentSplitRe);\n for (var i = 0; i < lines.length; ++i)\n lines[i] = lines[i]\n .replace(alternateCommentMode ? setCommentAltRe : setCommentRe, \"\")\n .trim();\n comment.text = lines\n .join(\"\\n\")\n .trim();\n\n comments[line] = comment;\n lastCommentLine = line;\n }\n\n function isDoubleSlashCommentLine(startOffset) {\n var endOffset = findEndOfLine(startOffset);\n\n // see if remaining line matches comment pattern\n var lineText = source.substring(startOffset, endOffset);\n // look for 1 or 2 slashes since startOffset would already point past\n // the first slash that started the comment.\n var isComment = /^\\s*\\/{1,2}/.test(lineText);\n return isComment;\n }\n\n function findEndOfLine(cursor) {\n // find end of cursor's line\n var endOffset = cursor;\n while (endOffset < length && charAt(endOffset) !== \"\\n\") {\n endOffset++;\n }\n return endOffset;\n }\n\n /**\n * Obtains the next token.\n * @returns {string|null} Next token or `null` on eof\n * @inner\n */\n function next() {\n if (stack.length > 0)\n return stack.shift();\n if (stringDelim)\n return readString();\n var repeat,\n prev,\n curr,\n start,\n isDoc,\n isLeadingComment = offset === 0;\n do {\n if (offset === length)\n return null;\n repeat = false;\n while (whitespaceRe.test(curr = charAt(offset))) {\n if (curr === \"\\n\") {\n isLeadingComment = true;\n ++line;\n }\n if (++offset === length)\n return null;\n }\n\n if (charAt(offset) === \"/\") {\n if (++offset === length) {\n throw illegal(\"comment\");\n }\n if (charAt(offset) === \"/\") { // Line\n if (!alternateCommentMode) {\n // check for triple-slash comment\n isDoc = charAt(start = offset + 1) === \"/\";\n\n while (charAt(++offset) !== \"\\n\") {\n if (offset === length) {\n return null;\n }\n }\n ++offset;\n if (isDoc) {\n setComment(start, offset - 1, isLeadingComment);\n // Trailing comment cannot not be multi-line,\n // so leading comment state should be reset to handle potential next comments\n isLeadingComment = true;\n }\n ++line;\n repeat = true;\n } else {\n // check for double-slash comments, consolidating consecutive lines\n start = offset;\n isDoc = false;\n if (isDoubleSlashCommentLine(offset)) {\n isDoc = true;\n do {\n offset = findEndOfLine(offset);\n if (offset === length) {\n break;\n }\n offset++;\n if (!isLeadingComment) {\n // Trailing comment cannot not be multi-line\n break;\n }\n } while (isDoubleSlashCommentLine(offset));\n } else {\n offset = Math.min(length, findEndOfLine(offset) + 1);\n }\n if (isDoc) {\n setComment(start, offset, isLeadingComment);\n isLeadingComment = true;\n }\n line++;\n repeat = true;\n }\n } else if ((curr = charAt(offset)) === \"*\") { /* Block */\n // check for /** (regular comment mode) or /* (alternate comment mode)\n start = offset + 1;\n isDoc = alternateCommentMode || charAt(start) === \"*\";\n do {\n if (curr === \"\\n\") {\n ++line;\n }\n if (++offset === length) {\n throw illegal(\"comment\");\n }\n prev = curr;\n curr = charAt(offset);\n } while (prev !== \"*\" || curr !== \"/\");\n ++offset;\n if (isDoc) {\n setComment(start, offset - 2, isLeadingComment);\n isLeadingComment = true;\n }\n repeat = true;\n } else {\n return \"/\";\n }\n }\n } while (repeat);\n\n // offset !== length if we got here\n\n var end = offset;\n delimRe.lastIndex = 0;\n var delim = delimRe.test(charAt(end++));\n if (!delim)\n while (end < length && !delimRe.test(charAt(end)))\n ++end;\n var token = source.substring(offset, offset = end);\n if (token === \"\\\"\" || token === \"'\")\n stringDelim = token;\n return token;\n }\n\n /**\n * Pushes a token back to the stack.\n * @param {string} token Token\n * @returns {undefined}\n * @inner\n */\n function push(token) {\n stack.push(token);\n }\n\n /**\n * Peeks for the next token.\n * @returns {string|null} Token or `null` on eof\n * @inner\n */\n function peek() {\n if (!stack.length) {\n var token = next();\n if (token === null)\n return null;\n push(token);\n }\n return stack[0];\n }\n\n /**\n * Skips a token.\n * @param {string} expected Expected token\n * @param {boolean} [optional=false] Whether the token is optional\n * @returns {boolean} `true` when skipped, `false` if not\n * @throws {Error} When a required token is not present\n * @inner\n */\n function skip(expected, optional) {\n var actual = peek(),\n equals = actual === expected;\n if (equals) {\n next();\n return true;\n }\n if (!optional)\n throw illegal(\"token '\" + actual + \"', '\" + expected + \"' expected\");\n return false;\n }\n\n /**\n * Gets a comment.\n * @param {number} [trailingLine] Line number if looking for a trailing comment\n * @returns {string|null} Comment text\n * @inner\n */\n function cmnt(trailingLine) {\n var ret = null;\n var comment;\n if (trailingLine === undefined) {\n comment = comments[line - 1];\n delete comments[line - 1];\n if (comment && (alternateCommentMode || comment.type === \"*\" || comment.lineEmpty)) {\n ret = comment.leading ? comment.text : null;\n }\n } else {\n /* istanbul ignore else */\n if (lastCommentLine < trailingLine) {\n peek();\n }\n comment = comments[trailingLine];\n delete comments[trailingLine];\n if (comment && !comment.lineEmpty && (alternateCommentMode || comment.type === \"/\")) {\n ret = comment.leading ? null : comment.text;\n }\n }\n return ret;\n }\n\n return Object.defineProperty({\n next: next,\n peek: peek,\n push: push,\n skip: skip,\n cmnt: cmnt\n }, \"line\", {\n get: function() { return line; }\n });\n /* eslint-enable callback-return */\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/tokenize.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/type.js":
/*!*********************************************!*\
!*** ./node_modules/protobufjs/src/type.js ***!
\*********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = Type;\n\n// extends Namespace\nvar Namespace = __webpack_require__(/*! ./namespace */ \"./node_modules/protobufjs/src/namespace.js\");\n((Type.prototype = Object.create(Namespace.prototype)).constructor = Type).className = \"Type\";\n\nvar Enum = __webpack_require__(/*! ./enum */ \"./node_modules/protobufjs/src/enum.js\"),\n OneOf = __webpack_require__(/*! ./oneof */ \"./node_modules/protobufjs/src/oneof.js\"),\n Field = __webpack_require__(/*! ./field */ \"./node_modules/protobufjs/src/field.js\"),\n MapField = __webpack_require__(/*! ./mapfield */ \"./node_modules/protobufjs/src/mapfield.js\"),\n Service = __webpack_require__(/*! ./service */ \"./node_modules/protobufjs/src/service.js\"),\n Message = __webpack_require__(/*! ./message */ \"./node_modules/protobufjs/src/message.js\"),\n Reader = __webpack_require__(/*! ./reader */ \"./node_modules/protobufjs/src/reader.js\"),\n Writer = __webpack_require__(/*! ./writer */ \"./node_modules/protobufjs/src/writer.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\"),\n encoder = __webpack_require__(/*! ./encoder */ \"./node_modules/protobufjs/src/encoder.js\"),\n decoder = __webpack_require__(/*! ./decoder */ \"./node_modules/protobufjs/src/decoder.js\"),\n verifier = __webpack_require__(/*! ./verifier */ \"./node_modules/protobufjs/src/verifier.js\"),\n converter = __webpack_require__(/*! ./converter */ \"./node_modules/protobufjs/src/converter.js\"),\n wrappers = __webpack_require__(/*! ./wrappers */ \"./node_modules/protobufjs/src/wrappers.js\");\n\n/**\n * Constructs a new reflected message type instance.\n * @classdesc Reflected message type.\n * @extends NamespaceBase\n * @constructor\n * @param {string} name Message name\n * @param {Object.} [options] Declared options\n */\nfunction Type(name, options) {\n Namespace.call(this, name, options);\n\n /**\n * Message fields.\n * @type {Object.}\n */\n this.fields = {}; // toJSON, marker\n\n /**\n * Oneofs declared within this namespace, if any.\n * @type {Object.}\n */\n this.oneofs = undefined; // toJSON\n\n /**\n * Extension ranges, if any.\n * @type {number[][]}\n */\n this.extensions = undefined; // toJSON\n\n /**\n * Reserved ranges, if any.\n * @type {Array.}\n */\n this.reserved = undefined; // toJSON\n\n /*?\n * Whether this type is a legacy group.\n * @type {boolean|undefined}\n */\n this.group = undefined; // toJSON\n\n /**\n * Cached fields by id.\n * @type {Object.|null}\n * @private\n */\n this._fieldsById = null;\n\n /**\n * Cached fields as an array.\n * @type {Field[]|null}\n * @private\n */\n this._fieldsArray = null;\n\n /**\n * Cached oneofs as an array.\n * @type {OneOf[]|null}\n * @private\n */\n this._oneofsArray = null;\n\n /**\n * Cached constructor.\n * @type {Constructor<{}>}\n * @private\n */\n this._ctor = null;\n}\n\nObject.defineProperties(Type.prototype, {\n\n /**\n * Message fields by id.\n * @name Type#fieldsById\n * @type {Object.}\n * @readonly\n */\n fieldsById: {\n get: function() {\n\n /* istanbul ignore if */\n if (this._fieldsById)\n return this._fieldsById;\n\n this._fieldsById = {};\n for (var names = Object.keys(this.fields), i = 0; i < names.length; ++i) {\n var field = this.fields[names[i]],\n id = field.id;\n\n /* istanbul ignore if */\n if (this._fieldsById[id])\n throw Error(\"duplicate id \" + id + \" in \" + this);\n\n this._fieldsById[id] = field;\n }\n return this._fieldsById;\n }\n },\n\n /**\n * Fields of this message as an array for iteration.\n * @name Type#fieldsArray\n * @type {Field[]}\n * @readonly\n */\n fieldsArray: {\n get: function() {\n return this._fieldsArray || (this._fieldsArray = util.toArray(this.fields));\n }\n },\n\n /**\n * Oneofs of this message as an array for iteration.\n * @name Type#oneofsArray\n * @type {OneOf[]}\n * @readonly\n */\n oneofsArray: {\n get: function() {\n return this._oneofsArray || (this._oneofsArray = util.toArray(this.oneofs));\n }\n },\n\n /**\n * The registered constructor, if any registered, otherwise a generic constructor.\n * Assigning a function replaces the internal constructor. If the function does not extend {@link Message} yet, its prototype will be setup accordingly and static methods will be populated. If it already extends {@link Message}, it will just replace the internal constructor.\n * @name Type#ctor\n * @type {Constructor<{}>}\n */\n ctor: {\n get: function() {\n return this._ctor || (this.ctor = Type.generateConstructor(this)());\n },\n set: function(ctor) {\n\n // Ensure proper prototype\n var prototype = ctor.prototype;\n if (!(prototype instanceof Message)) {\n (ctor.prototype = new Message()).constructor = ctor;\n util.merge(ctor.prototype, prototype);\n }\n\n // Classes and messages reference their reflected type\n ctor.$type = ctor.prototype.$type = this;\n\n // Mix in static methods\n util.merge(ctor, Message, true);\n\n this._ctor = ctor;\n\n // Messages have non-enumerable default values on their prototype\n var i = 0;\n for (; i < /* initializes */ this.fieldsArray.length; ++i)\n this._fieldsArray[i].resolve(); // ensures a proper value\n\n // Messages have non-enumerable getters and setters for each virtual oneof field\n var ctorProperties = {};\n for (i = 0; i < /* initializes */ this.oneofsArray.length; ++i)\n ctorProperties[this._oneofsArray[i].resolve().name] = {\n get: util.oneOfGetter(this._oneofsArray[i].oneof),\n set: util.oneOfSetter(this._oneofsArray[i].oneof)\n };\n if (i)\n Object.defineProperties(ctor.prototype, ctorProperties);\n }\n }\n});\n\n/**\n * Generates a constructor function for the specified type.\n * @param {Type} mtype Message type\n * @returns {Codegen} Codegen instance\n */\nType.generateConstructor = function generateConstructor(mtype) {\n /* eslint-disable no-unexpected-multiline */\n var gen = util.codegen([\"p\"], mtype.name);\n // explicitly initialize mutable object/array fields so that these aren't just inherited from the prototype\n for (var i = 0, field; i < mtype.fieldsArray.length; ++i)\n if ((field = mtype._fieldsArray[i]).map) gen\n (\"this%s={}\", util.safeProp(field.name));\n else if (field.repeated) gen\n (\"this%s=[]\", util.safeProp(field.name));\n return gen\n (\"if(p)for(var ks=Object.keys(p),i=0;i} [oneofs] Oneof descriptors\n * @property {Object.} fields Field descriptors\n * @property {number[][]} [extensions] Extension ranges\n * @property {number[][]} [reserved] Reserved ranges\n * @property {boolean} [group=false] Whether a legacy group or not\n */\n\n/**\n * Creates a message type from a message type descriptor.\n * @param {string} name Message name\n * @param {IType} json Message type descriptor\n * @returns {Type} Created message type\n */\nType.fromJSON = function fromJSON(name, json) {\n var type = new Type(name, json.options);\n type.extensions = json.extensions;\n type.reserved = json.reserved;\n var names = Object.keys(json.fields),\n i = 0;\n for (; i < names.length; ++i)\n type.add(\n ( typeof json.fields[names[i]].keyType !== \"undefined\"\n ? MapField.fromJSON\n : Field.fromJSON )(names[i], json.fields[names[i]])\n );\n if (json.oneofs)\n for (names = Object.keys(json.oneofs), i = 0; i < names.length; ++i)\n type.add(OneOf.fromJSON(names[i], json.oneofs[names[i]]));\n if (json.nested)\n for (names = Object.keys(json.nested), i = 0; i < names.length; ++i) {\n var nested = json.nested[names[i]];\n type.add( // most to least likely\n ( nested.id !== undefined\n ? Field.fromJSON\n : nested.fields !== undefined\n ? Type.fromJSON\n : nested.values !== undefined\n ? Enum.fromJSON\n : nested.methods !== undefined\n ? Service.fromJSON\n : Namespace.fromJSON )(names[i], nested)\n );\n }\n if (json.extensions && json.extensions.length)\n type.extensions = json.extensions;\n if (json.reserved && json.reserved.length)\n type.reserved = json.reserved;\n if (json.group)\n type.group = true;\n if (json.comment)\n type.comment = json.comment;\n return type;\n};\n\n/**\n * Converts this message type to a message type descriptor.\n * @param {IToJSONOptions} [toJSONOptions] JSON conversion options\n * @returns {IType} Message type descriptor\n */\nType.prototype.toJSON = function toJSON(toJSONOptions) {\n var inherited = Namespace.prototype.toJSON.call(this, toJSONOptions);\n var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;\n return util.toObject([\n \"options\" , inherited && inherited.options || undefined,\n \"oneofs\" , Namespace.arrayToJSON(this.oneofsArray, toJSONOptions),\n \"fields\" , Namespace.arrayToJSON(this.fieldsArray.filter(function(obj) { return !obj.declaringField; }), toJSONOptions) || {},\n \"extensions\" , this.extensions && this.extensions.length ? this.extensions : undefined,\n \"reserved\" , this.reserved && this.reserved.length ? this.reserved : undefined,\n \"group\" , this.group || undefined,\n \"nested\" , inherited && inherited.nested || undefined,\n \"comment\" , keepComments ? this.comment : undefined\n ]);\n};\n\n/**\n * @override\n */\nType.prototype.resolveAll = function resolveAll() {\n var fields = this.fieldsArray, i = 0;\n while (i < fields.length)\n fields[i++].resolve();\n var oneofs = this.oneofsArray; i = 0;\n while (i < oneofs.length)\n oneofs[i++].resolve();\n return Namespace.prototype.resolveAll.call(this);\n};\n\n/**\n * @override\n */\nType.prototype.get = function get(name) {\n return this.fields[name]\n || this.oneofs && this.oneofs[name]\n || this.nested && this.nested[name]\n || null;\n};\n\n/**\n * Adds a nested object to this type.\n * @param {ReflectionObject} object Nested object to add\n * @returns {Type} `this`\n * @throws {TypeError} If arguments are invalid\n * @throws {Error} If there is already a nested object with this name or, if a field, when there is already a field with this id\n */\nType.prototype.add = function add(object) {\n\n if (this.get(object.name))\n throw Error(\"duplicate name '\" + object.name + \"' in \" + this);\n\n if (object instanceof Field && object.extend === undefined) {\n // NOTE: Extension fields aren't actual fields on the declaring type, but nested objects.\n // The root object takes care of adding distinct sister-fields to the respective extended\n // type instead.\n\n // avoids calling the getter if not absolutely necessary because it's called quite frequently\n if (this._fieldsById ? /* istanbul ignore next */ this._fieldsById[object.id] : this.fieldsById[object.id])\n throw Error(\"duplicate id \" + object.id + \" in \" + this);\n if (this.isReservedId(object.id))\n throw Error(\"id \" + object.id + \" is reserved in \" + this);\n if (this.isReservedName(object.name))\n throw Error(\"name '\" + object.name + \"' is reserved in \" + this);\n\n if (object.parent)\n object.parent.remove(object);\n this.fields[object.name] = object;\n object.message = this;\n object.onAdd(this);\n return clearCache(this);\n }\n if (object instanceof OneOf) {\n if (!this.oneofs)\n this.oneofs = {};\n this.oneofs[object.name] = object;\n object.onAdd(this);\n return clearCache(this);\n }\n return Namespace.prototype.add.call(this, object);\n};\n\n/**\n * Removes a nested object from this type.\n * @param {ReflectionObject} object Nested object to remove\n * @returns {Type} `this`\n * @throws {TypeError} If arguments are invalid\n * @throws {Error} If `object` is not a member of this type\n */\nType.prototype.remove = function remove(object) {\n if (object instanceof Field && object.extend === undefined) {\n // See Type#add for the reason why extension fields are excluded here.\n\n /* istanbul ignore if */\n if (!this.fields || this.fields[object.name] !== object)\n throw Error(object + \" is not a member of \" + this);\n\n delete this.fields[object.name];\n object.parent = null;\n object.onRemove(this);\n return clearCache(this);\n }\n if (object instanceof OneOf) {\n\n /* istanbul ignore if */\n if (!this.oneofs || this.oneofs[object.name] !== object)\n throw Error(object + \" is not a member of \" + this);\n\n delete this.oneofs[object.name];\n object.parent = null;\n object.onRemove(this);\n return clearCache(this);\n }\n return Namespace.prototype.remove.call(this, object);\n};\n\n/**\n * Tests if the specified id is reserved.\n * @param {number} id Id to test\n * @returns {boolean} `true` if reserved, otherwise `false`\n */\nType.prototype.isReservedId = function isReservedId(id) {\n return Namespace.isReservedId(this.reserved, id);\n};\n\n/**\n * Tests if the specified name is reserved.\n * @param {string} name Name to test\n * @returns {boolean} `true` if reserved, otherwise `false`\n */\nType.prototype.isReservedName = function isReservedName(name) {\n return Namespace.isReservedName(this.reserved, name);\n};\n\n/**\n * Creates a new message of this type using the specified properties.\n * @param {Object.} [properties] Properties to set\n * @returns {Message<{}>} Message instance\n */\nType.prototype.create = function create(properties) {\n return new this.ctor(properties);\n};\n\n/**\n * Sets up {@link Type#encode|encode}, {@link Type#decode|decode} and {@link Type#verify|verify}.\n * @returns {Type} `this`\n */\nType.prototype.setup = function setup() {\n // Sets up everything at once so that the prototype chain does not have to be re-evaluated\n // multiple times (V8, soft-deopt prototype-check).\n\n var fullName = this.fullName,\n types = [];\n for (var i = 0; i < /* initializes */ this.fieldsArray.length; ++i)\n types.push(this._fieldsArray[i].resolve().resolvedType);\n\n // Replace setup methods with type-specific generated functions\n this.encode = encoder(this)({\n Writer : Writer,\n types : types,\n util : util\n });\n this.decode = decoder(this)({\n Reader : Reader,\n types : types,\n util : util\n });\n this.verify = verifier(this)({\n types : types,\n util : util\n });\n this.fromObject = converter.fromObject(this)({\n types : types,\n util : util\n });\n this.toObject = converter.toObject(this)({\n types : types,\n util : util\n });\n\n // Inject custom wrappers for common types\n var wrapper = wrappers[fullName];\n if (wrapper) {\n var originalThis = Object.create(this);\n // if (wrapper.fromObject) {\n originalThis.fromObject = this.fromObject;\n this.fromObject = wrapper.fromObject.bind(originalThis);\n // }\n // if (wrapper.toObject) {\n originalThis.toObject = this.toObject;\n this.toObject = wrapper.toObject.bind(originalThis);\n // }\n }\n\n return this;\n};\n\n/**\n * Encodes a message of this type. Does not implicitly {@link Type#verify|verify} messages.\n * @param {Message<{}>|Object.} message Message instance or plain object\n * @param {Writer} [writer] Writer to encode to\n * @returns {Writer} writer\n */\nType.prototype.encode = function encode_setup(message, writer) {\n return this.setup().encode(message, writer); // overrides this method\n};\n\n/**\n * Encodes a message of this type preceeded by its byte length as a varint. Does not implicitly {@link Type#verify|verify} messages.\n * @param {Message<{}>|Object.} message Message instance or plain object\n * @param {Writer} [writer] Writer to encode to\n * @returns {Writer} writer\n */\nType.prototype.encodeDelimited = function encodeDelimited(message, writer) {\n return this.encode(message, writer && writer.len ? writer.fork() : writer).ldelim();\n};\n\n/**\n * Decodes a message of this type.\n * @param {Reader|Uint8Array} reader Reader or buffer to decode from\n * @param {number} [length] Length of the message, if known beforehand\n * @returns {Message<{}>} Decoded message\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {util.ProtocolError<{}>} If required fields are missing\n */\nType.prototype.decode = function decode_setup(reader, length) {\n return this.setup().decode(reader, length); // overrides this method\n};\n\n/**\n * Decodes a message of this type preceeded by its byte length as a varint.\n * @param {Reader|Uint8Array} reader Reader or buffer to decode from\n * @returns {Message<{}>} Decoded message\n * @throws {Error} If the payload is not a reader or valid buffer\n * @throws {util.ProtocolError} If required fields are missing\n */\nType.prototype.decodeDelimited = function decodeDelimited(reader) {\n if (!(reader instanceof Reader))\n reader = Reader.create(reader);\n return this.decode(reader, reader.uint32());\n};\n\n/**\n * Verifies that field values are valid and that required fields are present.\n * @param {Object.} message Plain object to verify\n * @returns {null|string} `null` if valid, otherwise the reason why it is not\n */\nType.prototype.verify = function verify_setup(message) {\n return this.setup().verify(message); // overrides this method\n};\n\n/**\n * Creates a new message of this type from a plain object. Also converts values to their respective internal types.\n * @param {Object.} object Plain object to convert\n * @returns {Message<{}>} Message instance\n */\nType.prototype.fromObject = function fromObject(object) {\n return this.setup().fromObject(object);\n};\n\n/**\n * Conversion options as used by {@link Type#toObject} and {@link Message.toObject}.\n * @interface IConversionOptions\n * @property {Function} [longs] Long conversion type.\n * Valid values are `String` and `Number` (the global types).\n * Defaults to copy the present value, which is a possibly unsafe number without and a {@link Long} with a long library.\n * @property {Function} [enums] Enum value conversion type.\n * Only valid value is `String` (the global type).\n * Defaults to copy the present value, which is the numeric id.\n * @property {Function} [bytes] Bytes value conversion type.\n * Valid values are `Array` and (a base64 encoded) `String` (the global types).\n * Defaults to copy the present value, which usually is a Buffer under node and an Uint8Array in the browser.\n * @property {boolean} [defaults=false] Also sets default values on the resulting object\n * @property {boolean} [arrays=false] Sets empty arrays for missing repeated fields even if `defaults=false`\n * @property {boolean} [objects=false] Sets empty objects for missing map fields even if `defaults=false`\n * @property {boolean} [oneofs=false] Includes virtual oneof properties set to the present field's name, if any\n * @property {boolean} [json=false] Performs additional JSON compatibility conversions, i.e. NaN and Infinity to strings\n */\n\n/**\n * Creates a plain object from a message of this type. Also converts values to other types if specified.\n * @param {Message<{}>} message Message instance\n * @param {IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n */\nType.prototype.toObject = function toObject(message, options) {\n return this.setup().toObject(message, options);\n};\n\n/**\n * Decorator function as returned by {@link Type.d} (TypeScript).\n * @typedef TypeDecorator\n * @type {function}\n * @param {Constructor} target Target constructor\n * @returns {undefined}\n * @template T extends Message\n */\n\n/**\n * Type decorator (TypeScript).\n * @param {string} [typeName] Type name, defaults to the constructor's name\n * @returns {TypeDecorator} Decorator function\n * @template T extends Message\n */\nType.d = function decorateType(typeName) {\n return function typeDecorator(target) {\n util.decorateType(target, typeName);\n };\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/type.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/types.js":
/*!**********************************************!*\
!*** ./node_modules/protobufjs/src/types.js ***!
\**********************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
"use strict";
eval("\n\n/**\n * Common type constants.\n * @namespace\n */\nvar types = exports;\n\nvar util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\nvar s = [\n \"double\", // 0\n \"float\", // 1\n \"int32\", // 2\n \"uint32\", // 3\n \"sint32\", // 4\n \"fixed32\", // 5\n \"sfixed32\", // 6\n \"int64\", // 7\n \"uint64\", // 8\n \"sint64\", // 9\n \"fixed64\", // 10\n \"sfixed64\", // 11\n \"bool\", // 12\n \"string\", // 13\n \"bytes\" // 14\n];\n\nfunction bake(values, offset) {\n var i = 0, o = {};\n offset |= 0;\n while (i < values.length) o[s[i + offset]] = values[i++];\n return o;\n}\n\n/**\n * Basic type wire types.\n * @type {Object.}\n * @const\n * @property {number} double=1 Fixed64 wire type\n * @property {number} float=5 Fixed32 wire type\n * @property {number} int32=0 Varint wire type\n * @property {number} uint32=0 Varint wire type\n * @property {number} sint32=0 Varint wire type\n * @property {number} fixed32=5 Fixed32 wire type\n * @property {number} sfixed32=5 Fixed32 wire type\n * @property {number} int64=0 Varint wire type\n * @property {number} uint64=0 Varint wire type\n * @property {number} sint64=0 Varint wire type\n * @property {number} fixed64=1 Fixed64 wire type\n * @property {number} sfixed64=1 Fixed64 wire type\n * @property {number} bool=0 Varint wire type\n * @property {number} string=2 Ldelim wire type\n * @property {number} bytes=2 Ldelim wire type\n */\ntypes.basic = bake([\n /* double */ 1,\n /* float */ 5,\n /* int32 */ 0,\n /* uint32 */ 0,\n /* sint32 */ 0,\n /* fixed32 */ 5,\n /* sfixed32 */ 5,\n /* int64 */ 0,\n /* uint64 */ 0,\n /* sint64 */ 0,\n /* fixed64 */ 1,\n /* sfixed64 */ 1,\n /* bool */ 0,\n /* string */ 2,\n /* bytes */ 2\n]);\n\n/**\n * Basic type defaults.\n * @type {Object.}\n * @const\n * @property {number} double=0 Double default\n * @property {number} float=0 Float default\n * @property {number} int32=0 Int32 default\n * @property {number} uint32=0 Uint32 default\n * @property {number} sint32=0 Sint32 default\n * @property {number} fixed32=0 Fixed32 default\n * @property {number} sfixed32=0 Sfixed32 default\n * @property {number} int64=0 Int64 default\n * @property {number} uint64=0 Uint64 default\n * @property {number} sint64=0 Sint32 default\n * @property {number} fixed64=0 Fixed64 default\n * @property {number} sfixed64=0 Sfixed64 default\n * @property {boolean} bool=false Bool default\n * @property {string} string=\"\" String default\n * @property {Array.} bytes=Array(0) Bytes default\n * @property {null} message=null Message default\n */\ntypes.defaults = bake([\n /* double */ 0,\n /* float */ 0,\n /* int32 */ 0,\n /* uint32 */ 0,\n /* sint32 */ 0,\n /* fixed32 */ 0,\n /* sfixed32 */ 0,\n /* int64 */ 0,\n /* uint64 */ 0,\n /* sint64 */ 0,\n /* fixed64 */ 0,\n /* sfixed64 */ 0,\n /* bool */ false,\n /* string */ \"\",\n /* bytes */ util.emptyArray,\n /* message */ null\n]);\n\n/**\n * Basic long type wire types.\n * @type {Object.}\n * @const\n * @property {number} int64=0 Varint wire type\n * @property {number} uint64=0 Varint wire type\n * @property {number} sint64=0 Varint wire type\n * @property {number} fixed64=1 Fixed64 wire type\n * @property {number} sfixed64=1 Fixed64 wire type\n */\ntypes.long = bake([\n /* int64 */ 0,\n /* uint64 */ 0,\n /* sint64 */ 0,\n /* fixed64 */ 1,\n /* sfixed64 */ 1\n], 7);\n\n/**\n * Allowed types for map keys with their associated wire type.\n * @type {Object.}\n * @const\n * @property {number} int32=0 Varint wire type\n * @property {number} uint32=0 Varint wire type\n * @property {number} sint32=0 Varint wire type\n * @property {number} fixed32=5 Fixed32 wire type\n * @property {number} sfixed32=5 Fixed32 wire type\n * @property {number} int64=0 Varint wire type\n * @property {number} uint64=0 Varint wire type\n * @property {number} sint64=0 Varint wire type\n * @property {number} fixed64=1 Fixed64 wire type\n * @property {number} sfixed64=1 Fixed64 wire type\n * @property {number} bool=0 Varint wire type\n * @property {number} string=2 Ldelim wire type\n */\ntypes.mapKey = bake([\n /* int32 */ 0,\n /* uint32 */ 0,\n /* sint32 */ 0,\n /* fixed32 */ 5,\n /* sfixed32 */ 5,\n /* int64 */ 0,\n /* uint64 */ 0,\n /* sint64 */ 0,\n /* fixed64 */ 1,\n /* sfixed64 */ 1,\n /* bool */ 0,\n /* string */ 2\n], 2);\n\n/**\n * Allowed types for packed repeated fields with their associated wire type.\n * @type {Object.}\n * @const\n * @property {number} double=1 Fixed64 wire type\n * @property {number} float=5 Fixed32 wire type\n * @property {number} int32=0 Varint wire type\n * @property {number} uint32=0 Varint wire type\n * @property {number} sint32=0 Varint wire type\n * @property {number} fixed32=5 Fixed32 wire type\n * @property {number} sfixed32=5 Fixed32 wire type\n * @property {number} int64=0 Varint wire type\n * @property {number} uint64=0 Varint wire type\n * @property {number} sint64=0 Varint wire type\n * @property {number} fixed64=1 Fixed64 wire type\n * @property {number} sfixed64=1 Fixed64 wire type\n * @property {number} bool=0 Varint wire type\n */\ntypes.packed = bake([\n /* double */ 1,\n /* float */ 5,\n /* int32 */ 0,\n /* uint32 */ 0,\n /* sint32 */ 0,\n /* fixed32 */ 5,\n /* sfixed32 */ 5,\n /* int64 */ 0,\n /* uint64 */ 0,\n /* sint64 */ 0,\n /* fixed64 */ 1,\n /* sfixed64 */ 1,\n /* bool */ 0\n]);\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/types.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/util.js":
/*!*********************************************!*\
!*** ./node_modules/protobufjs/src/util.js ***!
\*********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\n\n/**\n * Various utility functions.\n * @namespace\n */\nvar util = module.exports = __webpack_require__(/*! ./util/minimal */ \"./node_modules/protobufjs/src/util/minimal.js\");\n\nvar roots = __webpack_require__(/*! ./roots */ \"./node_modules/protobufjs/src/roots.js\");\n\nvar Type, // cyclic\n Enum;\n\nutil.codegen = __webpack_require__(/*! @protobufjs/codegen */ \"./node_modules/@protobufjs/codegen/index.js\");\nutil.fetch = __webpack_require__(/*! @protobufjs/fetch */ \"./node_modules/@protobufjs/fetch/index.js\");\nutil.path = __webpack_require__(/*! @protobufjs/path */ \"./node_modules/@protobufjs/path/index.js\");\n\n/**\n * Node's fs module if available.\n * @type {Object.}\n */\nutil.fs = util.inquire(\"fs\");\n\n/**\n * Converts an object's values to an array.\n * @param {Object.} object Object to convert\n * @returns {Array.<*>} Converted array\n */\nutil.toArray = function toArray(object) {\n if (object) {\n var keys = Object.keys(object),\n array = new Array(keys.length),\n index = 0;\n while (index < keys.length)\n array[index] = object[keys[index++]];\n return array;\n }\n return [];\n};\n\n/**\n * Converts an array of keys immediately followed by their respective value to an object, omitting undefined values.\n * @param {Array.<*>} array Array to convert\n * @returns {Object.} Converted object\n */\nutil.toObject = function toObject(array) {\n var object = {},\n index = 0;\n while (index < array.length) {\n var key = array[index++],\n val = array[index++];\n if (val !== undefined)\n object[key] = val;\n }\n return object;\n};\n\nvar safePropBackslashRe = /\\\\/g,\n safePropQuoteRe = /\"/g;\n\n/**\n * Tests whether the specified name is a reserved word in JS.\n * @param {string} name Name to test\n * @returns {boolean} `true` if reserved, otherwise `false`\n */\nutil.isReserved = function isReserved(name) {\n return /^(?:do|if|in|for|let|new|try|var|case|else|enum|eval|false|null|this|true|void|with|break|catch|class|const|super|throw|while|yield|delete|export|import|public|return|static|switch|typeof|default|extends|finally|package|private|continue|debugger|function|arguments|interface|protected|implements|instanceof)$/.test(name);\n};\n\n/**\n * Returns a safe property accessor for the specified property name.\n * @param {string} prop Property name\n * @returns {string} Safe accessor\n */\nutil.safeProp = function safeProp(prop) {\n if (!/^[$\\w_]+$/.test(prop) || util.isReserved(prop))\n return \"[\\\"\" + prop.replace(safePropBackslashRe, \"\\\\\\\\\").replace(safePropQuoteRe, \"\\\\\\\"\") + \"\\\"]\";\n return \".\" + prop;\n};\n\n/**\n * Converts the first character of a string to upper case.\n * @param {string} str String to convert\n * @returns {string} Converted string\n */\nutil.ucFirst = function ucFirst(str) {\n return str.charAt(0).toUpperCase() + str.substring(1);\n};\n\nvar camelCaseRe = /_([a-z])/g;\n\n/**\n * Converts a string to camel case.\n * @param {string} str String to convert\n * @returns {string} Converted string\n */\nutil.camelCase = function camelCase(str) {\n return str.substring(0, 1)\n + str.substring(1)\n .replace(camelCaseRe, function($0, $1) { return $1.toUpperCase(); });\n};\n\n/**\n * Compares reflected fields by id.\n * @param {Field} a First field\n * @param {Field} b Second field\n * @returns {number} Comparison value\n */\nutil.compareFieldsById = function compareFieldsById(a, b) {\n return a.id - b.id;\n};\n\n/**\n * Decorator helper for types (TypeScript).\n * @param {Constructor} ctor Constructor function\n * @param {string} [typeName] Type name, defaults to the constructor's name\n * @returns {Type} Reflected type\n * @template T extends Message\n * @property {Root} root Decorators root\n */\nutil.decorateType = function decorateType(ctor, typeName) {\n\n /* istanbul ignore if */\n if (ctor.$type) {\n if (typeName && ctor.$type.name !== typeName) {\n util.decorateRoot.remove(ctor.$type);\n ctor.$type.name = typeName;\n util.decorateRoot.add(ctor.$type);\n }\n return ctor.$type;\n }\n\n /* istanbul ignore next */\n if (!Type)\n Type = __webpack_require__(/*! ./type */ \"./node_modules/protobufjs/src/type.js\");\n\n var type = new Type(typeName || ctor.name);\n util.decorateRoot.add(type);\n type.ctor = ctor; // sets up .encode, .decode etc.\n Object.defineProperty(ctor, \"$type\", { value: type, enumerable: false });\n Object.defineProperty(ctor.prototype, \"$type\", { value: type, enumerable: false });\n return type;\n};\n\nvar decorateEnumIndex = 0;\n\n/**\n * Decorator helper for enums (TypeScript).\n * @param {Object} object Enum object\n * @returns {Enum} Reflected enum\n */\nutil.decorateEnum = function decorateEnum(object) {\n\n /* istanbul ignore if */\n if (object.$type)\n return object.$type;\n\n /* istanbul ignore next */\n if (!Enum)\n Enum = __webpack_require__(/*! ./enum */ \"./node_modules/protobufjs/src/enum.js\");\n\n var enm = new Enum(\"Enum\" + decorateEnumIndex++, object);\n util.decorateRoot.add(enm);\n Object.defineProperty(object, \"$type\", { value: enm, enumerable: false });\n return enm;\n};\n\n\n/**\n * Sets the value of a property by property path. If a value already exists, it is turned to an array\n * @param {Object.} dst Destination object\n * @param {string} path dot '.' delimited path of the property to set\n * @param {Object} value the value to set\n * @returns {Object.} Destination object\n */\nutil.setProperty = function setProperty(dst, path, value) {\n function setProp(dst, path, value) {\n var part = path.shift();\n if (part === \"__proto__\") {\n return dst;\n }\n if (path.length > 0) {\n dst[part] = setProp(dst[part] || {}, path, value);\n } else {\n var prevValue = dst[part];\n if (prevValue)\n value = [].concat(prevValue).concat(value);\n dst[part] = value;\n }\n return dst;\n }\n\n if (typeof dst !== \"object\")\n throw TypeError(\"dst must be an object\");\n if (!path)\n throw TypeError(\"path must be specified\");\n\n path = path.split(\".\");\n return setProp(dst, path, value);\n};\n\n/**\n * Decorator root (TypeScript).\n * @name util.decorateRoot\n * @type {Root}\n * @readonly\n */\nObject.defineProperty(util, \"decorateRoot\", {\n get: function() {\n return roots[\"decorated\"] || (roots[\"decorated\"] = new (__webpack_require__(/*! ./root */ \"./node_modules/protobufjs/src/root.js\"))());\n }\n});\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/util.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/util/longbits.js":
/*!******************************************************!*\
!*** ./node_modules/protobufjs/src/util/longbits.js ***!
\******************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = LongBits;\n\nvar util = __webpack_require__(/*! ../util/minimal */ \"./node_modules/protobufjs/src/util/minimal.js\");\n\n/**\n * Constructs new long bits.\n * @classdesc Helper class for working with the low and high bits of a 64 bit value.\n * @memberof util\n * @constructor\n * @param {number} lo Low 32 bits, unsigned\n * @param {number} hi High 32 bits, unsigned\n */\nfunction LongBits(lo, hi) {\n\n // note that the casts below are theoretically unnecessary as of today, but older statically\n // generated converter code might still call the ctor with signed 32bits. kept for compat.\n\n /**\n * Low bits.\n * @type {number}\n */\n this.lo = lo >>> 0;\n\n /**\n * High bits.\n * @type {number}\n */\n this.hi = hi >>> 0;\n}\n\n/**\n * Zero bits.\n * @memberof util.LongBits\n * @type {util.LongBits}\n */\nvar zero = LongBits.zero = new LongBits(0, 0);\n\nzero.toNumber = function() { return 0; };\nzero.zzEncode = zero.zzDecode = function() { return this; };\nzero.length = function() { return 1; };\n\n/**\n * Zero hash.\n * @memberof util.LongBits\n * @type {string}\n */\nvar zeroHash = LongBits.zeroHash = \"\\0\\0\\0\\0\\0\\0\\0\\0\";\n\n/**\n * Constructs new long bits from the specified number.\n * @param {number} value Value\n * @returns {util.LongBits} Instance\n */\nLongBits.fromNumber = function fromNumber(value) {\n if (value === 0)\n return zero;\n var sign = value < 0;\n if (sign)\n value = -value;\n var lo = value >>> 0,\n hi = (value - lo) / 4294967296 >>> 0;\n if (sign) {\n hi = ~hi >>> 0;\n lo = ~lo >>> 0;\n if (++lo > 4294967295) {\n lo = 0;\n if (++hi > 4294967295)\n hi = 0;\n }\n }\n return new LongBits(lo, hi);\n};\n\n/**\n * Constructs new long bits from a number, long or string.\n * @param {Long|number|string} value Value\n * @returns {util.LongBits} Instance\n */\nLongBits.from = function from(value) {\n if (typeof value === \"number\")\n return LongBits.fromNumber(value);\n if (util.isString(value)) {\n /* istanbul ignore else */\n if (util.Long)\n value = util.Long.fromString(value);\n else\n return LongBits.fromNumber(parseInt(value, 10));\n }\n return value.low || value.high ? new LongBits(value.low >>> 0, value.high >>> 0) : zero;\n};\n\n/**\n * Converts this long bits to a possibly unsafe JavaScript number.\n * @param {boolean} [unsigned=false] Whether unsigned or not\n * @returns {number} Possibly unsafe number\n */\nLongBits.prototype.toNumber = function toNumber(unsigned) {\n if (!unsigned && this.hi >>> 31) {\n var lo = ~this.lo + 1 >>> 0,\n hi = ~this.hi >>> 0;\n if (!lo)\n hi = hi + 1 >>> 0;\n return -(lo + hi * 4294967296);\n }\n return this.lo + this.hi * 4294967296;\n};\n\n/**\n * Converts this long bits to a long.\n * @param {boolean} [unsigned=false] Whether unsigned or not\n * @returns {Long} Long\n */\nLongBits.prototype.toLong = function toLong(unsigned) {\n return util.Long\n ? new util.Long(this.lo | 0, this.hi | 0, Boolean(unsigned))\n /* istanbul ignore next */\n : { low: this.lo | 0, high: this.hi | 0, unsigned: Boolean(unsigned) };\n};\n\nvar charCodeAt = String.prototype.charCodeAt;\n\n/**\n * Constructs new long bits from the specified 8 characters long hash.\n * @param {string} hash Hash\n * @returns {util.LongBits} Bits\n */\nLongBits.fromHash = function fromHash(hash) {\n if (hash === zeroHash)\n return zero;\n return new LongBits(\n ( charCodeAt.call(hash, 0)\n | charCodeAt.call(hash, 1) << 8\n | charCodeAt.call(hash, 2) << 16\n | charCodeAt.call(hash, 3) << 24) >>> 0\n ,\n ( charCodeAt.call(hash, 4)\n | charCodeAt.call(hash, 5) << 8\n | charCodeAt.call(hash, 6) << 16\n | charCodeAt.call(hash, 7) << 24) >>> 0\n );\n};\n\n/**\n * Converts this long bits to a 8 characters long hash.\n * @returns {string} Hash\n */\nLongBits.prototype.toHash = function toHash() {\n return String.fromCharCode(\n this.lo & 255,\n this.lo >>> 8 & 255,\n this.lo >>> 16 & 255,\n this.lo >>> 24 ,\n this.hi & 255,\n this.hi >>> 8 & 255,\n this.hi >>> 16 & 255,\n this.hi >>> 24\n );\n};\n\n/**\n * Zig-zag encodes this long bits.\n * @returns {util.LongBits} `this`\n */\nLongBits.prototype.zzEncode = function zzEncode() {\n var mask = this.hi >> 31;\n this.hi = ((this.hi << 1 | this.lo >>> 31) ^ mask) >>> 0;\n this.lo = ( this.lo << 1 ^ mask) >>> 0;\n return this;\n};\n\n/**\n * Zig-zag decodes this long bits.\n * @returns {util.LongBits} `this`\n */\nLongBits.prototype.zzDecode = function zzDecode() {\n var mask = -(this.lo & 1);\n this.lo = ((this.lo >>> 1 | this.hi << 31) ^ mask) >>> 0;\n this.hi = ( this.hi >>> 1 ^ mask) >>> 0;\n return this;\n};\n\n/**\n * Calculates the length of this longbits when encoded as a varint.\n * @returns {number} Length\n */\nLongBits.prototype.length = function length() {\n var part0 = this.lo,\n part1 = (this.lo >>> 28 | this.hi << 4) >>> 0,\n part2 = this.hi >>> 24;\n return part2 === 0\n ? part1 === 0\n ? part0 < 16384\n ? part0 < 128 ? 1 : 2\n : part0 < 2097152 ? 3 : 4\n : part1 < 16384\n ? part1 < 128 ? 5 : 6\n : part1 < 2097152 ? 7 : 8\n : part2 < 128 ? 9 : 10;\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/util/longbits.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/util/minimal.js":
/*!*****************************************************!*\
!*** ./node_modules/protobufjs/src/util/minimal.js ***!
\*****************************************************/
/***/ (function(__unused_webpack_module, exports, __webpack_require__) {
"use strict";
eval("\nvar util = exports;\n\n// used to return a Promise where callback is omitted\nutil.asPromise = __webpack_require__(/*! @protobufjs/aspromise */ \"./node_modules/@protobufjs/aspromise/index.js\");\n\n// converts to / from base64 encoded strings\nutil.base64 = __webpack_require__(/*! @protobufjs/base64 */ \"./node_modules/@protobufjs/base64/index.js\");\n\n// base class of rpc.Service\nutil.EventEmitter = __webpack_require__(/*! @protobufjs/eventemitter */ \"./node_modules/@protobufjs/eventemitter/index.js\");\n\n// float handling accross browsers\nutil.float = __webpack_require__(/*! @protobufjs/float */ \"./node_modules/@protobufjs/float/index.js\");\n\n// requires modules optionally and hides the call from bundlers\nutil.inquire = __webpack_require__(/*! @protobufjs/inquire */ \"./node_modules/@protobufjs/inquire/index.js\");\n\n// converts to / from utf8 encoded strings\nutil.utf8 = __webpack_require__(/*! @protobufjs/utf8 */ \"./node_modules/@protobufjs/utf8/index.js\");\n\n// provides a node-like buffer pool in the browser\nutil.pool = __webpack_require__(/*! @protobufjs/pool */ \"./node_modules/@protobufjs/pool/index.js\");\n\n// utility to work with the low and high bits of a 64 bit value\nutil.LongBits = __webpack_require__(/*! ./longbits */ \"./node_modules/protobufjs/src/util/longbits.js\");\n\n/**\n * Whether running within node or not.\n * @memberof util\n * @type {boolean}\n */\nutil.isNode = Boolean(typeof __webpack_require__.g !== \"undefined\"\n && __webpack_require__.g\n && __webpack_require__.g.process\n && __webpack_require__.g.process.versions\n && __webpack_require__.g.process.versions.node);\n\n/**\n * Global object reference.\n * @memberof util\n * @type {Object}\n */\nutil.global = util.isNode && __webpack_require__.g\n || typeof window !== \"undefined\" && window\n || typeof self !== \"undefined\" && self\n || this; // eslint-disable-line no-invalid-this\n\n/**\n * An immuable empty array.\n * @memberof util\n * @type {Array.<*>}\n * @const\n */\nutil.emptyArray = Object.freeze ? Object.freeze([]) : /* istanbul ignore next */ []; // used on prototypes\n\n/**\n * An immutable empty object.\n * @type {Object}\n * @const\n */\nutil.emptyObject = Object.freeze ? Object.freeze({}) : /* istanbul ignore next */ {}; // used on prototypes\n\n/**\n * Tests if the specified value is an integer.\n * @function\n * @param {*} value Value to test\n * @returns {boolean} `true` if the value is an integer\n */\nutil.isInteger = Number.isInteger || /* istanbul ignore next */ function isInteger(value) {\n return typeof value === \"number\" && isFinite(value) && Math.floor(value) === value;\n};\n\n/**\n * Tests if the specified value is a string.\n * @param {*} value Value to test\n * @returns {boolean} `true` if the value is a string\n */\nutil.isString = function isString(value) {\n return typeof value === \"string\" || value instanceof String;\n};\n\n/**\n * Tests if the specified value is a non-null object.\n * @param {*} value Value to test\n * @returns {boolean} `true` if the value is a non-null object\n */\nutil.isObject = function isObject(value) {\n return value && typeof value === \"object\";\n};\n\n/**\n * Checks if a property on a message is considered to be present.\n * This is an alias of {@link util.isSet}.\n * @function\n * @param {Object} obj Plain object or message instance\n * @param {string} prop Property name\n * @returns {boolean} `true` if considered to be present, otherwise `false`\n */\nutil.isset =\n\n/**\n * Checks if a property on a message is considered to be present.\n * @param {Object} obj Plain object or message instance\n * @param {string} prop Property name\n * @returns {boolean} `true` if considered to be present, otherwise `false`\n */\nutil.isSet = function isSet(obj, prop) {\n var value = obj[prop];\n if (value != null && obj.hasOwnProperty(prop)) // eslint-disable-line eqeqeq, no-prototype-builtins\n return typeof value !== \"object\" || (Array.isArray(value) ? value.length : Object.keys(value).length) > 0;\n return false;\n};\n\n/**\n * Any compatible Buffer instance.\n * This is a minimal stand-alone definition of a Buffer instance. The actual type is that exported by node's typings.\n * @interface Buffer\n * @extends Uint8Array\n */\n\n/**\n * Node's Buffer class if available.\n * @type {Constructor}\n */\nutil.Buffer = (function() {\n try {\n var Buffer = util.inquire(\"buffer\").Buffer;\n // refuse to use non-node buffers if not explicitly assigned (perf reasons):\n return Buffer.prototype.utf8Write ? Buffer : /* istanbul ignore next */ null;\n } catch (e) {\n /* istanbul ignore next */\n return null;\n }\n})();\n\n// Internal alias of or polyfull for Buffer.from.\nutil._Buffer_from = null;\n\n// Internal alias of or polyfill for Buffer.allocUnsafe.\nutil._Buffer_allocUnsafe = null;\n\n/**\n * Creates a new buffer of whatever type supported by the environment.\n * @param {number|number[]} [sizeOrArray=0] Buffer size or number array\n * @returns {Uint8Array|Buffer} Buffer\n */\nutil.newBuffer = function newBuffer(sizeOrArray) {\n /* istanbul ignore next */\n return typeof sizeOrArray === \"number\"\n ? util.Buffer\n ? util._Buffer_allocUnsafe(sizeOrArray)\n : new util.Array(sizeOrArray)\n : util.Buffer\n ? util._Buffer_from(sizeOrArray)\n : typeof Uint8Array === \"undefined\"\n ? sizeOrArray\n : new Uint8Array(sizeOrArray);\n};\n\n/**\n * Array implementation used in the browser. `Uint8Array` if supported, otherwise `Array`.\n * @type {Constructor}\n */\nutil.Array = typeof Uint8Array !== \"undefined\" ? Uint8Array /* istanbul ignore next */ : Array;\n\n/**\n * Any compatible Long instance.\n * This is a minimal stand-alone definition of a Long instance. The actual type is that exported by long.js.\n * @interface Long\n * @property {number} low Low bits\n * @property {number} high High bits\n * @property {boolean} unsigned Whether unsigned or not\n */\n\n/**\n * Long.js's Long class if available.\n * @type {Constructor}\n */\nutil.Long = /* istanbul ignore next */ util.global.dcodeIO && /* istanbul ignore next */ util.global.dcodeIO.Long\n || /* istanbul ignore next */ util.global.Long\n || util.inquire(\"long\");\n\n/**\n * Regular expression used to verify 2 bit (`bool`) map keys.\n * @type {RegExp}\n * @const\n */\nutil.key2Re = /^true|false|0|1$/;\n\n/**\n * Regular expression used to verify 32 bit (`int32` etc.) map keys.\n * @type {RegExp}\n * @const\n */\nutil.key32Re = /^-?(?:0|[1-9][0-9]*)$/;\n\n/**\n * Regular expression used to verify 64 bit (`int64` etc.) map keys.\n * @type {RegExp}\n * @const\n */\nutil.key64Re = /^(?:[\\\\x00-\\\\xff]{8}|-?(?:0|[1-9][0-9]*))$/;\n\n/**\n * Converts a number or long to an 8 characters long hash string.\n * @param {Long|number} value Value to convert\n * @returns {string} Hash\n */\nutil.longToHash = function longToHash(value) {\n return value\n ? util.LongBits.from(value).toHash()\n : util.LongBits.zeroHash;\n};\n\n/**\n * Converts an 8 characters long hash string to a long or number.\n * @param {string} hash Hash\n * @param {boolean} [unsigned=false] Whether unsigned or not\n * @returns {Long|number} Original value\n */\nutil.longFromHash = function longFromHash(hash, unsigned) {\n var bits = util.LongBits.fromHash(hash);\n if (util.Long)\n return util.Long.fromBits(bits.lo, bits.hi, unsigned);\n return bits.toNumber(Boolean(unsigned));\n};\n\n/**\n * Merges the properties of the source object into the destination object.\n * @memberof util\n * @param {Object.} dst Destination object\n * @param {Object.} src Source object\n * @param {boolean} [ifNotSet=false] Merges only if the key is not already set\n * @returns {Object.} Destination object\n */\nfunction merge(dst, src, ifNotSet) { // used by converters\n for (var keys = Object.keys(src), i = 0; i < keys.length; ++i)\n if (dst[keys[i]] === undefined || !ifNotSet)\n dst[keys[i]] = src[keys[i]];\n return dst;\n}\n\nutil.merge = merge;\n\n/**\n * Converts the first character of a string to lower case.\n * @param {string} str String to convert\n * @returns {string} Converted string\n */\nutil.lcFirst = function lcFirst(str) {\n return str.charAt(0).toLowerCase() + str.substring(1);\n};\n\n/**\n * Creates a custom error constructor.\n * @memberof util\n * @param {string} name Error name\n * @returns {Constructor} Custom error constructor\n */\nfunction newError(name) {\n\n function CustomError(message, properties) {\n\n if (!(this instanceof CustomError))\n return new CustomError(message, properties);\n\n // Error.call(this, message);\n // ^ just returns a new error instance because the ctor can be called as a function\n\n Object.defineProperty(this, \"message\", { get: function() { return message; } });\n\n /* istanbul ignore next */\n if (Error.captureStackTrace) // node\n Error.captureStackTrace(this, CustomError);\n else\n Object.defineProperty(this, \"stack\", { value: new Error().stack || \"\" });\n\n if (properties)\n merge(this, properties);\n }\n\n CustomError.prototype = Object.create(Error.prototype, {\n constructor: {\n value: CustomError,\n writable: true,\n enumerable: false,\n configurable: true,\n },\n name: {\n get() { return name; },\n set: undefined,\n enumerable: false,\n // configurable: false would accurately preserve the behavior of\n // the original, but I'm guessing that was not intentional.\n // For an actual error subclass, this property would\n // be configurable.\n configurable: true,\n },\n toString: {\n value() { return this.name + \": \" + this.message; },\n writable: true,\n enumerable: false,\n configurable: true,\n },\n });\n\n return CustomError;\n}\n\nutil.newError = newError;\n\n/**\n * Constructs a new protocol error.\n * @classdesc Error subclass indicating a protocol specifc error.\n * @memberof util\n * @extends Error\n * @template T extends Message\n * @constructor\n * @param {string} message Error message\n * @param {Object.} [properties] Additional properties\n * @example\n * try {\n * MyMessage.decode(someBuffer); // throws if required fields are missing\n * } catch (e) {\n * if (e instanceof ProtocolError && e.instance)\n * console.log(\"decoded so far: \" + JSON.stringify(e.instance));\n * }\n */\nutil.ProtocolError = newError(\"ProtocolError\");\n\n/**\n * So far decoded message instance.\n * @name util.ProtocolError#instance\n * @type {Message}\n */\n\n/**\n * A OneOf getter as returned by {@link util.oneOfGetter}.\n * @typedef OneOfGetter\n * @type {function}\n * @returns {string|undefined} Set field name, if any\n */\n\n/**\n * Builds a getter for a oneof's present field name.\n * @param {string[]} fieldNames Field names\n * @returns {OneOfGetter} Unbound getter\n */\nutil.oneOfGetter = function getOneOf(fieldNames) {\n var fieldMap = {};\n for (var i = 0; i < fieldNames.length; ++i)\n fieldMap[fieldNames[i]] = 1;\n\n /**\n * @returns {string|undefined} Set field name, if any\n * @this Object\n * @ignore\n */\n return function() { // eslint-disable-line consistent-return\n for (var keys = Object.keys(this), i = keys.length - 1; i > -1; --i)\n if (fieldMap[keys[i]] === 1 && this[keys[i]] !== undefined && this[keys[i]] !== null)\n return keys[i];\n };\n};\n\n/**\n * A OneOf setter as returned by {@link util.oneOfSetter}.\n * @typedef OneOfSetter\n * @type {function}\n * @param {string|undefined} value Field name\n * @returns {undefined}\n */\n\n/**\n * Builds a setter for a oneof's present field name.\n * @param {string[]} fieldNames Field names\n * @returns {OneOfSetter} Unbound setter\n */\nutil.oneOfSetter = function setOneOf(fieldNames) {\n\n /**\n * @param {string} name Field name\n * @returns {undefined}\n * @this Object\n * @ignore\n */\n return function(name) {\n for (var i = 0; i < fieldNames.length; ++i)\n if (fieldNames[i] !== name)\n delete this[fieldNames[i]];\n };\n};\n\n/**\n * Default conversion options used for {@link Message#toJSON} implementations.\n *\n * These options are close to proto3's JSON mapping with the exception that internal types like Any are handled just like messages. More precisely:\n *\n * - Longs become strings\n * - Enums become string keys\n * - Bytes become base64 encoded strings\n * - (Sub-)Messages become plain objects\n * - Maps become plain objects with all string keys\n * - Repeated fields become arrays\n * - NaN and Infinity for float and double fields become strings\n *\n * @type {IConversionOptions}\n * @see https://developers.google.com/protocol-buffers/docs/proto3?hl=en#json\n */\nutil.toJSONOptions = {\n longs: String,\n enums: String,\n bytes: String,\n json: true\n};\n\n// Sets up buffer utility according to the environment (called in index-minimal)\nutil._configure = function() {\n var Buffer = util.Buffer;\n /* istanbul ignore if */\n if (!Buffer) {\n util._Buffer_from = util._Buffer_allocUnsafe = null;\n return;\n }\n // because node 4.x buffers are incompatible & immutable\n // see: https://github.com/dcodeIO/protobuf.js/pull/665\n util._Buffer_from = Buffer.from !== Uint8Array.from && Buffer.from ||\n /* istanbul ignore next */\n function Buffer_from(value, encoding) {\n return new Buffer(value, encoding);\n };\n util._Buffer_allocUnsafe = Buffer.allocUnsafe ||\n /* istanbul ignore next */\n function Buffer_allocUnsafe(size) {\n return new Buffer(size);\n };\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/util/minimal.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/verifier.js":
/*!*************************************************!*\
!*** ./node_modules/protobufjs/src/verifier.js ***!
\*************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = verifier;\n\nvar Enum = __webpack_require__(/*! ./enum */ \"./node_modules/protobufjs/src/enum.js\"),\n util = __webpack_require__(/*! ./util */ \"./node_modules/protobufjs/src/util.js\");\n\nfunction invalid(field, expected) {\n return field.name + \": \" + expected + (field.repeated && expected !== \"array\" ? \"[]\" : field.map && expected !== \"object\" ? \"{k:\"+field.keyType+\"}\" : \"\") + \" expected\";\n}\n\n/**\n * Generates a partial value verifier.\n * @param {Codegen} gen Codegen instance\n * @param {Field} field Reflected field\n * @param {number} fieldIndex Field index\n * @param {string} ref Variable reference\n * @returns {Codegen} Codegen instance\n * @ignore\n */\nfunction genVerifyValue(gen, field, fieldIndex, ref) {\n /* eslint-disable no-unexpected-multiline */\n if (field.resolvedType) {\n if (field.resolvedType instanceof Enum) { gen\n (\"switch(%s){\", ref)\n (\"default:\")\n (\"return%j\", invalid(field, \"enum value\"));\n for (var keys = Object.keys(field.resolvedType.values), j = 0; j < keys.length; ++j) gen\n (\"case %i:\", field.resolvedType.values[keys[j]]);\n gen\n (\"break\")\n (\"}\");\n } else {\n gen\n (\"{\")\n (\"var e=types[%i].verify(%s);\", fieldIndex, ref)\n (\"if(e)\")\n (\"return%j+e\", field.name + \".\")\n (\"}\");\n }\n } else {\n switch (field.type) {\n case \"int32\":\n case \"uint32\":\n case \"sint32\":\n case \"fixed32\":\n case \"sfixed32\": gen\n (\"if(!util.isInteger(%s))\", ref)\n (\"return%j\", invalid(field, \"integer\"));\n break;\n case \"int64\":\n case \"uint64\":\n case \"sint64\":\n case \"fixed64\":\n case \"sfixed64\": gen\n (\"if(!util.isInteger(%s)&&!(%s&&util.isInteger(%s.low)&&util.isInteger(%s.high)))\", ref, ref, ref, ref)\n (\"return%j\", invalid(field, \"integer|Long\"));\n break;\n case \"float\":\n case \"double\": gen\n (\"if(typeof %s!==\\\"number\\\")\", ref)\n (\"return%j\", invalid(field, \"number\"));\n break;\n case \"bool\": gen\n (\"if(typeof %s!==\\\"boolean\\\")\", ref)\n (\"return%j\", invalid(field, \"boolean\"));\n break;\n case \"string\": gen\n (\"if(!util.isString(%s))\", ref)\n (\"return%j\", invalid(field, \"string\"));\n break;\n case \"bytes\": gen\n (\"if(!(%s&&typeof %s.length===\\\"number\\\"||util.isString(%s)))\", ref, ref, ref)\n (\"return%j\", invalid(field, \"buffer\"));\n break;\n }\n }\n return gen;\n /* eslint-enable no-unexpected-multiline */\n}\n\n/**\n * Generates a partial key verifier.\n * @param {Codegen} gen Codegen instance\n * @param {Field} field Reflected field\n * @param {string} ref Variable reference\n * @returns {Codegen} Codegen instance\n * @ignore\n */\nfunction genVerifyKey(gen, field, ref) {\n /* eslint-disable no-unexpected-multiline */\n switch (field.keyType) {\n case \"int32\":\n case \"uint32\":\n case \"sint32\":\n case \"fixed32\":\n case \"sfixed32\": gen\n (\"if(!util.key32Re.test(%s))\", ref)\n (\"return%j\", invalid(field, \"integer key\"));\n break;\n case \"int64\":\n case \"uint64\":\n case \"sint64\":\n case \"fixed64\":\n case \"sfixed64\": gen\n (\"if(!util.key64Re.test(%s))\", ref) // see comment above: x is ok, d is not\n (\"return%j\", invalid(field, \"integer|Long key\"));\n break;\n case \"bool\": gen\n (\"if(!util.key2Re.test(%s))\", ref)\n (\"return%j\", invalid(field, \"boolean key\"));\n break;\n }\n return gen;\n /* eslint-enable no-unexpected-multiline */\n}\n\n/**\n * Generates a verifier specific to the specified message type.\n * @param {Type} mtype Message type\n * @returns {Codegen} Codegen instance\n */\nfunction verifier(mtype) {\n /* eslint-disable no-unexpected-multiline */\n\n var gen = util.codegen([\"m\"], mtype.name + \"$verify\")\n (\"if(typeof m!==\\\"object\\\"||m===null)\")\n (\"return%j\", \"object expected\");\n var oneofs = mtype.oneofsArray,\n seenFirstField = {};\n if (oneofs.length) gen\n (\"var p={}\");\n\n for (var i = 0; i < /* initializes */ mtype.fieldsArray.length; ++i) {\n var field = mtype._fieldsArray[i].resolve(),\n ref = \"m\" + util.safeProp(field.name);\n\n if (field.optional) gen\n (\"if(%s!=null&&m.hasOwnProperty(%j)){\", ref, field.name); // !== undefined && !== null\n\n // map fields\n if (field.map) { gen\n (\"if(!util.isObject(%s))\", ref)\n (\"return%j\", invalid(field, \"object\"))\n (\"var k=Object.keys(%s)\", ref)\n (\"for(var i=0;i {
"use strict";
eval("\n\n/**\n * Wrappers for common types.\n * @type {Object.}\n * @const\n */\nvar wrappers = exports;\n\nvar Message = __webpack_require__(/*! ./message */ \"./node_modules/protobufjs/src/message.js\");\n\n/**\n * From object converter part of an {@link IWrapper}.\n * @typedef WrapperFromObjectConverter\n * @type {function}\n * @param {Object.} object Plain object\n * @returns {Message<{}>} Message instance\n * @this Type\n */\n\n/**\n * To object converter part of an {@link IWrapper}.\n * @typedef WrapperToObjectConverter\n * @type {function}\n * @param {Message<{}>} message Message instance\n * @param {IConversionOptions} [options] Conversion options\n * @returns {Object.} Plain object\n * @this Type\n */\n\n/**\n * Common type wrapper part of {@link wrappers}.\n * @interface IWrapper\n * @property {WrapperFromObjectConverter} [fromObject] From object converter\n * @property {WrapperToObjectConverter} [toObject] To object converter\n */\n\n// Custom wrapper for Any\nwrappers[\".google.protobuf.Any\"] = {\n\n fromObject: function(object) {\n\n // unwrap value type if mapped\n if (object && object[\"@type\"]) {\n // Only use fully qualified type name after the last '/'\n var name = object[\"@type\"].substring(object[\"@type\"].lastIndexOf(\"/\") + 1);\n var type = this.lookup(name);\n /* istanbul ignore else */\n if (type) {\n // type_url does not accept leading \".\"\n var type_url = object[\"@type\"].charAt(0) === \".\" ?\n object[\"@type\"].slice(1) : object[\"@type\"];\n // type_url prefix is optional, but path seperator is required\n if (type_url.indexOf(\"/\") === -1) {\n type_url = \"/\" + type_url;\n }\n return this.create({\n type_url: type_url,\n value: type.encode(type.fromObject(object)).finish()\n });\n }\n }\n\n return this.fromObject(object);\n },\n\n toObject: function(message, options) {\n\n // Default prefix\n var googleApi = \"type.googleapis.com/\";\n var prefix = \"\";\n var name = \"\";\n\n // decode value if requested and unmapped\n if (options && options.json && message.type_url && message.value) {\n // Only use fully qualified type name after the last '/'\n name = message.type_url.substring(message.type_url.lastIndexOf(\"/\") + 1);\n // Separate the prefix used\n prefix = message.type_url.substring(0, message.type_url.lastIndexOf(\"/\") + 1);\n var type = this.lookup(name);\n /* istanbul ignore else */\n if (type)\n message = type.decode(message.value);\n }\n\n // wrap value if unmapped\n if (!(message instanceof this.ctor) && message instanceof Message) {\n var object = message.$type.toObject(message, options);\n var messageName = message.$type.fullName[0] === \".\" ?\n message.$type.fullName.slice(1) : message.$type.fullName;\n // Default to type.googleapis.com prefix if no prefix is used\n if (prefix === \"\") {\n prefix = googleApi;\n }\n name = prefix + messageName;\n object[\"@type\"] = name;\n return object;\n }\n\n return this.toObject(message, options);\n }\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/wrappers.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/writer.js":
/*!***********************************************!*\
!*** ./node_modules/protobufjs/src/writer.js ***!
\***********************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = Writer;\n\nvar util = __webpack_require__(/*! ./util/minimal */ \"./node_modules/protobufjs/src/util/minimal.js\");\n\nvar BufferWriter; // cyclic\n\nvar LongBits = util.LongBits,\n base64 = util.base64,\n utf8 = util.utf8;\n\n/**\n * Constructs a new writer operation instance.\n * @classdesc Scheduled writer operation.\n * @constructor\n * @param {function(*, Uint8Array, number)} fn Function to call\n * @param {number} len Value byte length\n * @param {*} val Value to write\n * @ignore\n */\nfunction Op(fn, len, val) {\n\n /**\n * Function to call.\n * @type {function(Uint8Array, number, *)}\n */\n this.fn = fn;\n\n /**\n * Value byte length.\n * @type {number}\n */\n this.len = len;\n\n /**\n * Next operation.\n * @type {Writer.Op|undefined}\n */\n this.next = undefined;\n\n /**\n * Value to write.\n * @type {*}\n */\n this.val = val; // type varies\n}\n\n/* istanbul ignore next */\nfunction noop() {} // eslint-disable-line no-empty-function\n\n/**\n * Constructs a new writer state instance.\n * @classdesc Copied writer state.\n * @memberof Writer\n * @constructor\n * @param {Writer} writer Writer to copy state from\n * @ignore\n */\nfunction State(writer) {\n\n /**\n * Current head.\n * @type {Writer.Op}\n */\n this.head = writer.head;\n\n /**\n * Current tail.\n * @type {Writer.Op}\n */\n this.tail = writer.tail;\n\n /**\n * Current buffer length.\n * @type {number}\n */\n this.len = writer.len;\n\n /**\n * Next state.\n * @type {State|null}\n */\n this.next = writer.states;\n}\n\n/**\n * Constructs a new writer instance.\n * @classdesc Wire format writer using `Uint8Array` if available, otherwise `Array`.\n * @constructor\n */\nfunction Writer() {\n\n /**\n * Current length.\n * @type {number}\n */\n this.len = 0;\n\n /**\n * Operations head.\n * @type {Object}\n */\n this.head = new Op(noop, 0, 0);\n\n /**\n * Operations tail\n * @type {Object}\n */\n this.tail = this.head;\n\n /**\n * Linked forked states.\n * @type {Object|null}\n */\n this.states = null;\n\n // When a value is written, the writer calculates its byte length and puts it into a linked\n // list of operations to perform when finish() is called. This both allows us to allocate\n // buffers of the exact required size and reduces the amount of work we have to do compared\n // to first calculating over objects and then encoding over objects. In our case, the encoding\n // part is just a linked list walk calling operations with already prepared values.\n}\n\nvar create = function create() {\n return util.Buffer\n ? function create_buffer_setup() {\n return (Writer.create = function create_buffer() {\n return new BufferWriter();\n })();\n }\n /* istanbul ignore next */\n : function create_array() {\n return new Writer();\n };\n};\n\n/**\n * Creates a new writer.\n * @function\n * @returns {BufferWriter|Writer} A {@link BufferWriter} when Buffers are supported, otherwise a {@link Writer}\n */\nWriter.create = create();\n\n/**\n * Allocates a buffer of the specified size.\n * @param {number} size Buffer size\n * @returns {Uint8Array} Buffer\n */\nWriter.alloc = function alloc(size) {\n return new util.Array(size);\n};\n\n// Use Uint8Array buffer pool in the browser, just like node does with buffers\n/* istanbul ignore else */\nif (util.Array !== Array)\n Writer.alloc = util.pool(Writer.alloc, util.Array.prototype.subarray);\n\n/**\n * Pushes a new operation to the queue.\n * @param {function(Uint8Array, number, *)} fn Function to call\n * @param {number} len Value byte length\n * @param {number} val Value to write\n * @returns {Writer} `this`\n * @private\n */\nWriter.prototype._push = function push(fn, len, val) {\n this.tail = this.tail.next = new Op(fn, len, val);\n this.len += len;\n return this;\n};\n\nfunction writeByte(val, buf, pos) {\n buf[pos] = val & 255;\n}\n\nfunction writeVarint32(val, buf, pos) {\n while (val > 127) {\n buf[pos++] = val & 127 | 128;\n val >>>= 7;\n }\n buf[pos] = val;\n}\n\n/**\n * Constructs a new varint writer operation instance.\n * @classdesc Scheduled varint writer operation.\n * @extends Op\n * @constructor\n * @param {number} len Value byte length\n * @param {number} val Value to write\n * @ignore\n */\nfunction VarintOp(len, val) {\n this.len = len;\n this.next = undefined;\n this.val = val;\n}\n\nVarintOp.prototype = Object.create(Op.prototype);\nVarintOp.prototype.fn = writeVarint32;\n\n/**\n * Writes an unsigned 32 bit value as a varint.\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.uint32 = function write_uint32(value) {\n // here, the call to this.push has been inlined and a varint specific Op subclass is used.\n // uint32 is by far the most frequently used operation and benefits significantly from this.\n this.len += (this.tail = this.tail.next = new VarintOp(\n (value = value >>> 0)\n < 128 ? 1\n : value < 16384 ? 2\n : value < 2097152 ? 3\n : value < 268435456 ? 4\n : 5,\n value)).len;\n return this;\n};\n\n/**\n * Writes a signed 32 bit value as a varint.\n * @function\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.int32 = function write_int32(value) {\n return value < 0\n ? this._push(writeVarint64, 10, LongBits.fromNumber(value)) // 10 bytes per spec\n : this.uint32(value);\n};\n\n/**\n * Writes a 32 bit value as a varint, zig-zag encoded.\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.sint32 = function write_sint32(value) {\n return this.uint32((value << 1 ^ value >> 31) >>> 0);\n};\n\nfunction writeVarint64(val, buf, pos) {\n while (val.hi) {\n buf[pos++] = val.lo & 127 | 128;\n val.lo = (val.lo >>> 7 | val.hi << 25) >>> 0;\n val.hi >>>= 7;\n }\n while (val.lo > 127) {\n buf[pos++] = val.lo & 127 | 128;\n val.lo = val.lo >>> 7;\n }\n buf[pos++] = val.lo;\n}\n\n/**\n * Writes an unsigned 64 bit value as a varint.\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.uint64 = function write_uint64(value) {\n var bits = LongBits.from(value);\n return this._push(writeVarint64, bits.length(), bits);\n};\n\n/**\n * Writes a signed 64 bit value as a varint.\n * @function\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.int64 = Writer.prototype.uint64;\n\n/**\n * Writes a signed 64 bit value as a varint, zig-zag encoded.\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.sint64 = function write_sint64(value) {\n var bits = LongBits.from(value).zzEncode();\n return this._push(writeVarint64, bits.length(), bits);\n};\n\n/**\n * Writes a boolish value as a varint.\n * @param {boolean} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.bool = function write_bool(value) {\n return this._push(writeByte, 1, value ? 1 : 0);\n};\n\nfunction writeFixed32(val, buf, pos) {\n buf[pos ] = val & 255;\n buf[pos + 1] = val >>> 8 & 255;\n buf[pos + 2] = val >>> 16 & 255;\n buf[pos + 3] = val >>> 24;\n}\n\n/**\n * Writes an unsigned 32 bit value as fixed 32 bits.\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.fixed32 = function write_fixed32(value) {\n return this._push(writeFixed32, 4, value >>> 0);\n};\n\n/**\n * Writes a signed 32 bit value as fixed 32 bits.\n * @function\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.sfixed32 = Writer.prototype.fixed32;\n\n/**\n * Writes an unsigned 64 bit value as fixed 64 bits.\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.fixed64 = function write_fixed64(value) {\n var bits = LongBits.from(value);\n return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi);\n};\n\n/**\n * Writes a signed 64 bit value as fixed 64 bits.\n * @function\n * @param {Long|number|string} value Value to write\n * @returns {Writer} `this`\n * @throws {TypeError} If `value` is a string and no long library is present.\n */\nWriter.prototype.sfixed64 = Writer.prototype.fixed64;\n\n/**\n * Writes a float (32 bit).\n * @function\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.float = function write_float(value) {\n return this._push(util.float.writeFloatLE, 4, value);\n};\n\n/**\n * Writes a double (64 bit float).\n * @function\n * @param {number} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.double = function write_double(value) {\n return this._push(util.float.writeDoubleLE, 8, value);\n};\n\nvar writeBytes = util.Array.prototype.set\n ? function writeBytes_set(val, buf, pos) {\n buf.set(val, pos); // also works for plain array values\n }\n /* istanbul ignore next */\n : function writeBytes_for(val, buf, pos) {\n for (var i = 0; i < val.length; ++i)\n buf[pos + i] = val[i];\n };\n\n/**\n * Writes a sequence of bytes.\n * @param {Uint8Array|string} value Buffer or base64 encoded string to write\n * @returns {Writer} `this`\n */\nWriter.prototype.bytes = function write_bytes(value) {\n var len = value.length >>> 0;\n if (!len)\n return this._push(writeByte, 1, 0);\n if (util.isString(value)) {\n var buf = Writer.alloc(len = base64.length(value));\n base64.decode(value, buf, 0);\n value = buf;\n }\n return this.uint32(len)._push(writeBytes, len, value);\n};\n\n/**\n * Writes a string.\n * @param {string} value Value to write\n * @returns {Writer} `this`\n */\nWriter.prototype.string = function write_string(value) {\n var len = utf8.length(value);\n return len\n ? this.uint32(len)._push(utf8.write, len, value)\n : this._push(writeByte, 1, 0);\n};\n\n/**\n * Forks this writer's state by pushing it to a stack.\n * Calling {@link Writer#reset|reset} or {@link Writer#ldelim|ldelim} resets the writer to the previous state.\n * @returns {Writer} `this`\n */\nWriter.prototype.fork = function fork() {\n this.states = new State(this);\n this.head = this.tail = new Op(noop, 0, 0);\n this.len = 0;\n return this;\n};\n\n/**\n * Resets this instance to the last state.\n * @returns {Writer} `this`\n */\nWriter.prototype.reset = function reset() {\n if (this.states) {\n this.head = this.states.head;\n this.tail = this.states.tail;\n this.len = this.states.len;\n this.states = this.states.next;\n } else {\n this.head = this.tail = new Op(noop, 0, 0);\n this.len = 0;\n }\n return this;\n};\n\n/**\n * Resets to the last state and appends the fork state's current write length as a varint followed by its operations.\n * @returns {Writer} `this`\n */\nWriter.prototype.ldelim = function ldelim() {\n var head = this.head,\n tail = this.tail,\n len = this.len;\n this.reset().uint32(len);\n if (len) {\n this.tail.next = head.next; // skip noop\n this.tail = tail;\n this.len += len;\n }\n return this;\n};\n\n/**\n * Finishes the write operation.\n * @returns {Uint8Array} Finished buffer\n */\nWriter.prototype.finish = function finish() {\n var head = this.head.next, // skip noop\n buf = this.constructor.alloc(this.len),\n pos = 0;\n while (head) {\n head.fn(head.val, buf, pos);\n pos += head.len;\n head = head.next;\n }\n // this.head = this.tail = null;\n return buf;\n};\n\nWriter._configure = function(BufferWriter_) {\n BufferWriter = BufferWriter_;\n Writer.create = create();\n BufferWriter._configure();\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/writer.js?");
/***/ }),
/***/ "./node_modules/protobufjs/src/writer_buffer.js":
/*!******************************************************!*\
!*** ./node_modules/protobufjs/src/writer_buffer.js ***!
\******************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
"use strict";
eval("\nmodule.exports = BufferWriter;\n\n// extends Writer\nvar Writer = __webpack_require__(/*! ./writer */ \"./node_modules/protobufjs/src/writer.js\");\n(BufferWriter.prototype = Object.create(Writer.prototype)).constructor = BufferWriter;\n\nvar util = __webpack_require__(/*! ./util/minimal */ \"./node_modules/protobufjs/src/util/minimal.js\");\n\n/**\n * Constructs a new buffer writer instance.\n * @classdesc Wire format writer using node buffers.\n * @extends Writer\n * @constructor\n */\nfunction BufferWriter() {\n Writer.call(this);\n}\n\nBufferWriter._configure = function () {\n /**\n * Allocates a buffer of the specified size.\n * @function\n * @param {number} size Buffer size\n * @returns {Buffer} Buffer\n */\n BufferWriter.alloc = util._Buffer_allocUnsafe;\n\n BufferWriter.writeBytesBuffer = util.Buffer && util.Buffer.prototype instanceof Uint8Array && util.Buffer.prototype.set.name === \"set\"\n ? function writeBytesBuffer_set(val, buf, pos) {\n buf.set(val, pos); // faster than copy (requires node >= 4 where Buffers extend Uint8Array and set is properly inherited)\n // also works for plain array values\n }\n /* istanbul ignore next */\n : function writeBytesBuffer_copy(val, buf, pos) {\n if (val.copy) // Buffer values\n val.copy(buf, pos, 0, val.length);\n else for (var i = 0; i < val.length;) // plain array values\n buf[pos++] = val[i++];\n };\n};\n\n\n/**\n * @override\n */\nBufferWriter.prototype.bytes = function write_bytes_buffer(value) {\n if (util.isString(value))\n value = util._Buffer_from(value, \"base64\");\n var len = value.length >>> 0;\n this.uint32(len);\n if (len)\n this._push(BufferWriter.writeBytesBuffer, len, value);\n return this;\n};\n\nfunction writeStringBuffer(val, buf, pos) {\n if (val.length < 40) // plain js is faster for short strings (probably due to redundant assertions)\n util.utf8.write(val, buf, pos);\n else if (buf.utf8Write)\n buf.utf8Write(val, pos);\n else\n buf.write(val, pos);\n}\n\n/**\n * @override\n */\nBufferWriter.prototype.string = function write_string_buffer(value) {\n var len = util.Buffer.byteLength(value);\n this.uint32(len);\n if (len)\n this._push(writeStringBuffer, len, value);\n return this;\n};\n\n\n/**\n * Finishes the write operation.\n * @name BufferWriter#finish\n * @function\n * @returns {Buffer} Finished buffer\n */\n\nBufferWriter._configure();\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/protobufjs/src/writer_buffer.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/browser.js":
/*!********************************************!*\
!*** ./node_modules/qrcode/lib/browser.js ***!
\********************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
eval("\nconst canPromise = __webpack_require__(/*! ./can-promise */ \"./node_modules/qrcode/lib/can-promise.js\")\n\nconst QRCode = __webpack_require__(/*! ./core/qrcode */ \"./node_modules/qrcode/lib/core/qrcode.js\")\nconst CanvasRenderer = __webpack_require__(/*! ./renderer/canvas */ \"./node_modules/qrcode/lib/renderer/canvas.js\")\nconst SvgRenderer = __webpack_require__(/*! ./renderer/svg-tag.js */ \"./node_modules/qrcode/lib/renderer/svg-tag.js\")\n\nfunction renderCanvas (renderFunc, canvas, text, opts, cb) {\n const args = [].slice.call(arguments, 1)\n const argsNum = args.length\n const isLastArgCb = typeof args[argsNum - 1] === 'function'\n\n if (!isLastArgCb && !canPromise()) {\n throw new Error('Callback required as last argument')\n }\n\n if (isLastArgCb) {\n if (argsNum < 2) {\n throw new Error('Too few arguments provided')\n }\n\n if (argsNum === 2) {\n cb = text\n text = canvas\n canvas = opts = undefined\n } else if (argsNum === 3) {\n if (canvas.getContext && typeof cb === 'undefined') {\n cb = opts\n opts = undefined\n } else {\n cb = opts\n opts = text\n text = canvas\n canvas = undefined\n }\n }\n } else {\n if (argsNum < 1) {\n throw new Error('Too few arguments provided')\n }\n\n if (argsNum === 1) {\n text = canvas\n canvas = opts = undefined\n } else if (argsNum === 2 && !canvas.getContext) {\n opts = text\n text = canvas\n canvas = undefined\n }\n\n return new Promise(function (resolve, reject) {\n try {\n const data = QRCode.create(text, opts)\n resolve(renderFunc(data, canvas, opts))\n } catch (e) {\n reject(e)\n }\n })\n }\n\n try {\n const data = QRCode.create(text, opts)\n cb(null, renderFunc(data, canvas, opts))\n } catch (e) {\n cb(e)\n }\n}\n\nexports.create = QRCode.create\nexports.toCanvas = renderCanvas.bind(null, CanvasRenderer.render)\nexports.toDataURL = renderCanvas.bind(null, CanvasRenderer.renderToDataURL)\n\n// only svg for now.\nexports.toString = renderCanvas.bind(null, function (data, _, opts) {\n return SvgRenderer.render(data, opts)\n})\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/browser.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/can-promise.js":
/*!************************************************!*\
!*** ./node_modules/qrcode/lib/can-promise.js ***!
\************************************************/
/***/ ((module) => {
eval("// can-promise has a crash in some versions of react native that dont have\n// standard global objects\n// https://github.com/soldair/node-qrcode/issues/157\n\nmodule.exports = function () {\n return typeof Promise === 'function' && Promise.prototype && Promise.prototype.then\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/can-promise.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/alignment-pattern.js":
/*!***********************************************************!*\
!*** ./node_modules/qrcode/lib/core/alignment-pattern.js ***!
\***********************************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
eval("/**\n * Alignment pattern are fixed reference pattern in defined positions\n * in a matrix symbology, which enables the decode software to re-synchronise\n * the coordinate mapping of the image modules in the event of moderate amounts\n * of distortion of the image.\n *\n * Alignment patterns are present only in QR Code symbols of version 2 or larger\n * and their number depends on the symbol version.\n */\n\nconst getSymbolSize = (__webpack_require__(/*! ./utils */ \"./node_modules/qrcode/lib/core/utils.js\").getSymbolSize)\n\n/**\n * Calculate the row/column coordinates of the center module of each alignment pattern\n * for the specified QR Code version.\n *\n * The alignment patterns are positioned symmetrically on either side of the diagonal\n * running from the top left corner of the symbol to the bottom right corner.\n *\n * Since positions are simmetrical only half of the coordinates are returned.\n * Each item of the array will represent in turn the x and y coordinate.\n * @see {@link getPositions}\n *\n * @param {Number} version QR Code version\n * @return {Array} Array of coordinate\n */\nexports.getRowColCoords = function getRowColCoords (version) {\n if (version === 1) return []\n\n const posCount = Math.floor(version / 7) + 2\n const size = getSymbolSize(version)\n const intervals = size === 145 ? 26 : Math.ceil((size - 13) / (2 * posCount - 2)) * 2\n const positions = [size - 7] // Last coord is always (size - 7)\n\n for (let i = 1; i < posCount - 1; i++) {\n positions[i] = positions[i - 1] - intervals\n }\n\n positions.push(6) // First coord is always 6\n\n return positions.reverse()\n}\n\n/**\n * Returns an array containing the positions of each alignment pattern.\n * Each array's element represent the center point of the pattern as (x, y) coordinates\n *\n * Coordinates are calculated expanding the row/column coordinates returned by {@link getRowColCoords}\n * and filtering out the items that overlaps with finder pattern\n *\n * @example\n * For a Version 7 symbol {@link getRowColCoords} returns values 6, 22 and 38.\n * The alignment patterns, therefore, are to be centered on (row, column)\n * positions (6,22), (22,6), (22,22), (22,38), (38,22), (38,38).\n * Note that the coordinates (6,6), (6,38), (38,6) are occupied by finder patterns\n * and are not therefore used for alignment patterns.\n *\n * let pos = getPositions(7)\n * // [[6,22], [22,6], [22,22], [22,38], [38,22], [38,38]]\n *\n * @param {Number} version QR Code version\n * @return {Array} Array of coordinates\n */\nexports.getPositions = function getPositions (version) {\n const coords = []\n const pos = exports.getRowColCoords(version)\n const posLength = pos.length\n\n for (let i = 0; i < posLength; i++) {\n for (let j = 0; j < posLength; j++) {\n // Skip if position is occupied by finder patterns\n if ((i === 0 && j === 0) || // top-left\n (i === 0 && j === posLength - 1) || // bottom-left\n (i === posLength - 1 && j === 0)) { // top-right\n continue\n }\n\n coords.push([pos[i], pos[j]])\n }\n }\n\n return coords\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/alignment-pattern.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/alphanumeric-data.js":
/*!***********************************************************!*\
!*** ./node_modules/qrcode/lib/core/alphanumeric-data.js ***!
\***********************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const Mode = __webpack_require__(/*! ./mode */ \"./node_modules/qrcode/lib/core/mode.js\")\n\n/**\n * Array of characters available in alphanumeric mode\n *\n * As per QR Code specification, to each character\n * is assigned a value from 0 to 44 which in this case coincides\n * with the array index\n *\n * @type {Array}\n */\nconst ALPHA_NUM_CHARS = [\n '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',\n 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',\n 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',\n ' ', '$', '%', '*', '+', '-', '.', '/', ':'\n]\n\nfunction AlphanumericData (data) {\n this.mode = Mode.ALPHANUMERIC\n this.data = data\n}\n\nAlphanumericData.getBitsLength = function getBitsLength (length) {\n return 11 * Math.floor(length / 2) + 6 * (length % 2)\n}\n\nAlphanumericData.prototype.getLength = function getLength () {\n return this.data.length\n}\n\nAlphanumericData.prototype.getBitsLength = function getBitsLength () {\n return AlphanumericData.getBitsLength(this.data.length)\n}\n\nAlphanumericData.prototype.write = function write (bitBuffer) {\n let i\n\n // Input data characters are divided into groups of two characters\n // and encoded as 11-bit binary codes.\n for (i = 0; i + 2 <= this.data.length; i += 2) {\n // The character value of the first character is multiplied by 45\n let value = ALPHA_NUM_CHARS.indexOf(this.data[i]) * 45\n\n // The character value of the second digit is added to the product\n value += ALPHA_NUM_CHARS.indexOf(this.data[i + 1])\n\n // The sum is then stored as 11-bit binary number\n bitBuffer.put(value, 11)\n }\n\n // If the number of input data characters is not a multiple of two,\n // the character value of the final character is encoded as a 6-bit binary number.\n if (this.data.length % 2) {\n bitBuffer.put(ALPHA_NUM_CHARS.indexOf(this.data[i]), 6)\n }\n}\n\nmodule.exports = AlphanumericData\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/alphanumeric-data.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/bit-buffer.js":
/*!****************************************************!*\
!*** ./node_modules/qrcode/lib/core/bit-buffer.js ***!
\****************************************************/
/***/ ((module) => {
eval("function BitBuffer () {\n this.buffer = []\n this.length = 0\n}\n\nBitBuffer.prototype = {\n\n get: function (index) {\n const bufIndex = Math.floor(index / 8)\n return ((this.buffer[bufIndex] >>> (7 - index % 8)) & 1) === 1\n },\n\n put: function (num, length) {\n for (let i = 0; i < length; i++) {\n this.putBit(((num >>> (length - i - 1)) & 1) === 1)\n }\n },\n\n getLengthInBits: function () {\n return this.length\n },\n\n putBit: function (bit) {\n const bufIndex = Math.floor(this.length / 8)\n if (this.buffer.length <= bufIndex) {\n this.buffer.push(0)\n }\n\n if (bit) {\n this.buffer[bufIndex] |= (0x80 >>> (this.length % 8))\n }\n\n this.length++\n }\n}\n\nmodule.exports = BitBuffer\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/bit-buffer.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/bit-matrix.js":
/*!****************************************************!*\
!*** ./node_modules/qrcode/lib/core/bit-matrix.js ***!
\****************************************************/
/***/ ((module) => {
eval("/**\n * Helper class to handle QR Code symbol modules\n *\n * @param {Number} size Symbol size\n */\nfunction BitMatrix (size) {\n if (!size || size < 1) {\n throw new Error('BitMatrix size must be defined and greater than 0')\n }\n\n this.size = size\n this.data = new Uint8Array(size * size)\n this.reservedBit = new Uint8Array(size * size)\n}\n\n/**\n * Set bit value at specified location\n * If reserved flag is set, this bit will be ignored during masking process\n *\n * @param {Number} row\n * @param {Number} col\n * @param {Boolean} value\n * @param {Boolean} reserved\n */\nBitMatrix.prototype.set = function (row, col, value, reserved) {\n const index = row * this.size + col\n this.data[index] = value\n if (reserved) this.reservedBit[index] = true\n}\n\n/**\n * Returns bit value at specified location\n *\n * @param {Number} row\n * @param {Number} col\n * @return {Boolean}\n */\nBitMatrix.prototype.get = function (row, col) {\n return this.data[row * this.size + col]\n}\n\n/**\n * Applies xor operator at specified location\n * (used during masking process)\n *\n * @param {Number} row\n * @param {Number} col\n * @param {Boolean} value\n */\nBitMatrix.prototype.xor = function (row, col, value) {\n this.data[row * this.size + col] ^= value\n}\n\n/**\n * Check if bit at specified location is reserved\n *\n * @param {Number} row\n * @param {Number} col\n * @return {Boolean}\n */\nBitMatrix.prototype.isReserved = function (row, col) {\n return this.reservedBit[row * this.size + col]\n}\n\nmodule.exports = BitMatrix\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/bit-matrix.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/byte-data.js":
/*!***************************************************!*\
!*** ./node_modules/qrcode/lib/core/byte-data.js ***!
\***************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const encodeUtf8 = __webpack_require__(/*! encode-utf8 */ \"./node_modules/encode-utf8/index.js\")\nconst Mode = __webpack_require__(/*! ./mode */ \"./node_modules/qrcode/lib/core/mode.js\")\n\nfunction ByteData (data) {\n this.mode = Mode.BYTE\n if (typeof (data) === 'string') {\n data = encodeUtf8(data)\n }\n this.data = new Uint8Array(data)\n}\n\nByteData.getBitsLength = function getBitsLength (length) {\n return length * 8\n}\n\nByteData.prototype.getLength = function getLength () {\n return this.data.length\n}\n\nByteData.prototype.getBitsLength = function getBitsLength () {\n return ByteData.getBitsLength(this.data.length)\n}\n\nByteData.prototype.write = function (bitBuffer) {\n for (let i = 0, l = this.data.length; i < l; i++) {\n bitBuffer.put(this.data[i], 8)\n }\n}\n\nmodule.exports = ByteData\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/byte-data.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/error-correction-code.js":
/*!***************************************************************!*\
!*** ./node_modules/qrcode/lib/core/error-correction-code.js ***!
\***************************************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
eval("const ECLevel = __webpack_require__(/*! ./error-correction-level */ \"./node_modules/qrcode/lib/core/error-correction-level.js\")\r\n\r\nconst EC_BLOCKS_TABLE = [\r\n// L M Q H\r\n 1, 1, 1, 1,\r\n 1, 1, 1, 1,\r\n 1, 1, 2, 2,\r\n 1, 2, 2, 4,\r\n 1, 2, 4, 4,\r\n 2, 4, 4, 4,\r\n 2, 4, 6, 5,\r\n 2, 4, 6, 6,\r\n 2, 5, 8, 8,\r\n 4, 5, 8, 8,\r\n 4, 5, 8, 11,\r\n 4, 8, 10, 11,\r\n 4, 9, 12, 16,\r\n 4, 9, 16, 16,\r\n 6, 10, 12, 18,\r\n 6, 10, 17, 16,\r\n 6, 11, 16, 19,\r\n 6, 13, 18, 21,\r\n 7, 14, 21, 25,\r\n 8, 16, 20, 25,\r\n 8, 17, 23, 25,\r\n 9, 17, 23, 34,\r\n 9, 18, 25, 30,\r\n 10, 20, 27, 32,\r\n 12, 21, 29, 35,\r\n 12, 23, 34, 37,\r\n 12, 25, 34, 40,\r\n 13, 26, 35, 42,\r\n 14, 28, 38, 45,\r\n 15, 29, 40, 48,\r\n 16, 31, 43, 51,\r\n 17, 33, 45, 54,\r\n 18, 35, 48, 57,\r\n 19, 37, 51, 60,\r\n 19, 38, 53, 63,\r\n 20, 40, 56, 66,\r\n 21, 43, 59, 70,\r\n 22, 45, 62, 74,\r\n 24, 47, 65, 77,\r\n 25, 49, 68, 81\r\n]\r\n\r\nconst EC_CODEWORDS_TABLE = [\r\n// L M Q H\r\n 7, 10, 13, 17,\r\n 10, 16, 22, 28,\r\n 15, 26, 36, 44,\r\n 20, 36, 52, 64,\r\n 26, 48, 72, 88,\r\n 36, 64, 96, 112,\r\n 40, 72, 108, 130,\r\n 48, 88, 132, 156,\r\n 60, 110, 160, 192,\r\n 72, 130, 192, 224,\r\n 80, 150, 224, 264,\r\n 96, 176, 260, 308,\r\n 104, 198, 288, 352,\r\n 120, 216, 320, 384,\r\n 132, 240, 360, 432,\r\n 144, 280, 408, 480,\r\n 168, 308, 448, 532,\r\n 180, 338, 504, 588,\r\n 196, 364, 546, 650,\r\n 224, 416, 600, 700,\r\n 224, 442, 644, 750,\r\n 252, 476, 690, 816,\r\n 270, 504, 750, 900,\r\n 300, 560, 810, 960,\r\n 312, 588, 870, 1050,\r\n 336, 644, 952, 1110,\r\n 360, 700, 1020, 1200,\r\n 390, 728, 1050, 1260,\r\n 420, 784, 1140, 1350,\r\n 450, 812, 1200, 1440,\r\n 480, 868, 1290, 1530,\r\n 510, 924, 1350, 1620,\r\n 540, 980, 1440, 1710,\r\n 570, 1036, 1530, 1800,\r\n 570, 1064, 1590, 1890,\r\n 600, 1120, 1680, 1980,\r\n 630, 1204, 1770, 2100,\r\n 660, 1260, 1860, 2220,\r\n 720, 1316, 1950, 2310,\r\n 750, 1372, 2040, 2430\r\n]\r\n\r\n/**\r\n * Returns the number of error correction block that the QR Code should contain\r\n * for the specified version and error correction level.\r\n *\r\n * @param {Number} version QR Code version\r\n * @param {Number} errorCorrectionLevel Error correction level\r\n * @return {Number} Number of error correction blocks\r\n */\r\nexports.getBlocksCount = function getBlocksCount (version, errorCorrectionLevel) {\r\n switch (errorCorrectionLevel) {\r\n case ECLevel.L:\r\n return EC_BLOCKS_TABLE[(version - 1) * 4 + 0]\r\n case ECLevel.M:\r\n return EC_BLOCKS_TABLE[(version - 1) * 4 + 1]\r\n case ECLevel.Q:\r\n return EC_BLOCKS_TABLE[(version - 1) * 4 + 2]\r\n case ECLevel.H:\r\n return EC_BLOCKS_TABLE[(version - 1) * 4 + 3]\r\n default:\r\n return undefined\r\n }\r\n}\r\n\r\n/**\r\n * Returns the number of error correction codewords to use for the specified\r\n * version and error correction level.\r\n *\r\n * @param {Number} version QR Code version\r\n * @param {Number} errorCorrectionLevel Error correction level\r\n * @return {Number} Number of error correction codewords\r\n */\r\nexports.getTotalCodewordsCount = function getTotalCodewordsCount (version, errorCorrectionLevel) {\r\n switch (errorCorrectionLevel) {\r\n case ECLevel.L:\r\n return EC_CODEWORDS_TABLE[(version - 1) * 4 + 0]\r\n case ECLevel.M:\r\n return EC_CODEWORDS_TABLE[(version - 1) * 4 + 1]\r\n case ECLevel.Q:\r\n return EC_CODEWORDS_TABLE[(version - 1) * 4 + 2]\r\n case ECLevel.H:\r\n return EC_CODEWORDS_TABLE[(version - 1) * 4 + 3]\r\n default:\r\n return undefined\r\n }\r\n}\r\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/error-correction-code.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/error-correction-level.js":
/*!****************************************************************!*\
!*** ./node_modules/qrcode/lib/core/error-correction-level.js ***!
\****************************************************************/
/***/ ((__unused_webpack_module, exports) => {
eval("exports.L = { bit: 1 }\nexports.M = { bit: 0 }\nexports.Q = { bit: 3 }\nexports.H = { bit: 2 }\n\nfunction fromString (string) {\n if (typeof string !== 'string') {\n throw new Error('Param is not a string')\n }\n\n const lcStr = string.toLowerCase()\n\n switch (lcStr) {\n case 'l':\n case 'low':\n return exports.L\n\n case 'm':\n case 'medium':\n return exports.M\n\n case 'q':\n case 'quartile':\n return exports.Q\n\n case 'h':\n case 'high':\n return exports.H\n\n default:\n throw new Error('Unknown EC Level: ' + string)\n }\n}\n\nexports.isValid = function isValid (level) {\n return level && typeof level.bit !== 'undefined' &&\n level.bit >= 0 && level.bit < 4\n}\n\nexports.from = function from (value, defaultValue) {\n if (exports.isValid(value)) {\n return value\n }\n\n try {\n return fromString(value)\n } catch (e) {\n return defaultValue\n }\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/error-correction-level.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/finder-pattern.js":
/*!********************************************************!*\
!*** ./node_modules/qrcode/lib/core/finder-pattern.js ***!
\********************************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
eval("const getSymbolSize = (__webpack_require__(/*! ./utils */ \"./node_modules/qrcode/lib/core/utils.js\").getSymbolSize)\nconst FINDER_PATTERN_SIZE = 7\n\n/**\n * Returns an array containing the positions of each finder pattern.\n * Each array's element represent the top-left point of the pattern as (x, y) coordinates\n *\n * @param {Number} version QR Code version\n * @return {Array} Array of coordinates\n */\nexports.getPositions = function getPositions (version) {\n const size = getSymbolSize(version)\n\n return [\n // top-left\n [0, 0],\n // top-right\n [size - FINDER_PATTERN_SIZE, 0],\n // bottom-left\n [0, size - FINDER_PATTERN_SIZE]\n ]\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/finder-pattern.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/format-info.js":
/*!*****************************************************!*\
!*** ./node_modules/qrcode/lib/core/format-info.js ***!
\*****************************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
eval("const Utils = __webpack_require__(/*! ./utils */ \"./node_modules/qrcode/lib/core/utils.js\")\n\nconst G15 = (1 << 10) | (1 << 8) | (1 << 5) | (1 << 4) | (1 << 2) | (1 << 1) | (1 << 0)\nconst G15_MASK = (1 << 14) | (1 << 12) | (1 << 10) | (1 << 4) | (1 << 1)\nconst G15_BCH = Utils.getBCHDigit(G15)\n\n/**\n * Returns format information with relative error correction bits\n *\n * The format information is a 15-bit sequence containing 5 data bits,\n * with 10 error correction bits calculated using the (15, 5) BCH code.\n *\n * @param {Number} errorCorrectionLevel Error correction level\n * @param {Number} mask Mask pattern\n * @return {Number} Encoded format information bits\n */\nexports.getEncodedBits = function getEncodedBits (errorCorrectionLevel, mask) {\n const data = ((errorCorrectionLevel.bit << 3) | mask)\n let d = data << 10\n\n while (Utils.getBCHDigit(d) - G15_BCH >= 0) {\n d ^= (G15 << (Utils.getBCHDigit(d) - G15_BCH))\n }\n\n // xor final data with mask pattern in order to ensure that\n // no combination of Error Correction Level and data mask pattern\n // will result in an all-zero data string\n return ((data << 10) | d) ^ G15_MASK\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/format-info.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/galois-field.js":
/*!******************************************************!*\
!*** ./node_modules/qrcode/lib/core/galois-field.js ***!
\******************************************************/
/***/ ((__unused_webpack_module, exports) => {
eval("const EXP_TABLE = new Uint8Array(512)\nconst LOG_TABLE = new Uint8Array(256)\n/**\n * Precompute the log and anti-log tables for faster computation later\n *\n * For each possible value in the galois field 2^8, we will pre-compute\n * the logarithm and anti-logarithm (exponential) of this value\n *\n * ref {@link https://en.wikiversity.org/wiki/Reed%E2%80%93Solomon_codes_for_coders#Introduction_to_mathematical_fields}\n */\n;(function initTables () {\n let x = 1\n for (let i = 0; i < 255; i++) {\n EXP_TABLE[i] = x\n LOG_TABLE[x] = i\n\n x <<= 1 // multiply by 2\n\n // The QR code specification says to use byte-wise modulo 100011101 arithmetic.\n // This means that when a number is 256 or larger, it should be XORed with 0x11D.\n if (x & 0x100) { // similar to x >= 256, but a lot faster (because 0x100 == 256)\n x ^= 0x11D\n }\n }\n\n // Optimization: double the size of the anti-log table so that we don't need to mod 255 to\n // stay inside the bounds (because we will mainly use this table for the multiplication of\n // two GF numbers, no more).\n // @see {@link mul}\n for (let i = 255; i < 512; i++) {\n EXP_TABLE[i] = EXP_TABLE[i - 255]\n }\n}())\n\n/**\n * Returns log value of n inside Galois Field\n *\n * @param {Number} n\n * @return {Number}\n */\nexports.log = function log (n) {\n if (n < 1) throw new Error('log(' + n + ')')\n return LOG_TABLE[n]\n}\n\n/**\n * Returns anti-log value of n inside Galois Field\n *\n * @param {Number} n\n * @return {Number}\n */\nexports.exp = function exp (n) {\n return EXP_TABLE[n]\n}\n\n/**\n * Multiplies two number inside Galois Field\n *\n * @param {Number} x\n * @param {Number} y\n * @return {Number}\n */\nexports.mul = function mul (x, y) {\n if (x === 0 || y === 0) return 0\n\n // should be EXP_TABLE[(LOG_TABLE[x] + LOG_TABLE[y]) % 255] if EXP_TABLE wasn't oversized\n // @see {@link initTables}\n return EXP_TABLE[LOG_TABLE[x] + LOG_TABLE[y]]\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/galois-field.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/kanji-data.js":
/*!****************************************************!*\
!*** ./node_modules/qrcode/lib/core/kanji-data.js ***!
\****************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const Mode = __webpack_require__(/*! ./mode */ \"./node_modules/qrcode/lib/core/mode.js\")\nconst Utils = __webpack_require__(/*! ./utils */ \"./node_modules/qrcode/lib/core/utils.js\")\n\nfunction KanjiData (data) {\n this.mode = Mode.KANJI\n this.data = data\n}\n\nKanjiData.getBitsLength = function getBitsLength (length) {\n return length * 13\n}\n\nKanjiData.prototype.getLength = function getLength () {\n return this.data.length\n}\n\nKanjiData.prototype.getBitsLength = function getBitsLength () {\n return KanjiData.getBitsLength(this.data.length)\n}\n\nKanjiData.prototype.write = function (bitBuffer) {\n let i\n\n // In the Shift JIS system, Kanji characters are represented by a two byte combination.\n // These byte values are shifted from the JIS X 0208 values.\n // JIS X 0208 gives details of the shift coded representation.\n for (i = 0; i < this.data.length; i++) {\n let value = Utils.toSJIS(this.data[i])\n\n // For characters with Shift JIS values from 0x8140 to 0x9FFC:\n if (value >= 0x8140 && value <= 0x9FFC) {\n // Subtract 0x8140 from Shift JIS value\n value -= 0x8140\n\n // For characters with Shift JIS values from 0xE040 to 0xEBBF\n } else if (value >= 0xE040 && value <= 0xEBBF) {\n // Subtract 0xC140 from Shift JIS value\n value -= 0xC140\n } else {\n throw new Error(\n 'Invalid SJIS character: ' + this.data[i] + '\\n' +\n 'Make sure your charset is UTF-8')\n }\n\n // Multiply most significant byte of result by 0xC0\n // and add least significant byte to product\n value = (((value >>> 8) & 0xff) * 0xC0) + (value & 0xff)\n\n // Convert result to a 13-bit binary string\n bitBuffer.put(value, 13)\n }\n}\n\nmodule.exports = KanjiData\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/kanji-data.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/mask-pattern.js":
/*!******************************************************!*\
!*** ./node_modules/qrcode/lib/core/mask-pattern.js ***!
\******************************************************/
/***/ ((__unused_webpack_module, exports) => {
eval("/**\n * Data mask pattern reference\n * @type {Object}\n */\nexports.Patterns = {\n PATTERN000: 0,\n PATTERN001: 1,\n PATTERN010: 2,\n PATTERN011: 3,\n PATTERN100: 4,\n PATTERN101: 5,\n PATTERN110: 6,\n PATTERN111: 7\n}\n\n/**\n * Weighted penalty scores for the undesirable features\n * @type {Object}\n */\nconst PenaltyScores = {\n N1: 3,\n N2: 3,\n N3: 40,\n N4: 10\n}\n\n/**\n * Check if mask pattern value is valid\n *\n * @param {Number} mask Mask pattern\n * @return {Boolean} true if valid, false otherwise\n */\nexports.isValid = function isValid (mask) {\n return mask != null && mask !== '' && !isNaN(mask) && mask >= 0 && mask <= 7\n}\n\n/**\n * Returns mask pattern from a value.\n * If value is not valid, returns undefined\n *\n * @param {Number|String} value Mask pattern value\n * @return {Number} Valid mask pattern or undefined\n */\nexports.from = function from (value) {\n return exports.isValid(value) ? parseInt(value, 10) : undefined\n}\n\n/**\n* Find adjacent modules in row/column with the same color\n* and assign a penalty value.\n*\n* Points: N1 + i\n* i is the amount by which the number of adjacent modules of the same color exceeds 5\n*/\nexports.getPenaltyN1 = function getPenaltyN1 (data) {\n const size = data.size\n let points = 0\n let sameCountCol = 0\n let sameCountRow = 0\n let lastCol = null\n let lastRow = null\n\n for (let row = 0; row < size; row++) {\n sameCountCol = sameCountRow = 0\n lastCol = lastRow = null\n\n for (let col = 0; col < size; col++) {\n let module = data.get(row, col)\n if (module === lastCol) {\n sameCountCol++\n } else {\n if (sameCountCol >= 5) points += PenaltyScores.N1 + (sameCountCol - 5)\n lastCol = module\n sameCountCol = 1\n }\n\n module = data.get(col, row)\n if (module === lastRow) {\n sameCountRow++\n } else {\n if (sameCountRow >= 5) points += PenaltyScores.N1 + (sameCountRow - 5)\n lastRow = module\n sameCountRow = 1\n }\n }\n\n if (sameCountCol >= 5) points += PenaltyScores.N1 + (sameCountCol - 5)\n if (sameCountRow >= 5) points += PenaltyScores.N1 + (sameCountRow - 5)\n }\n\n return points\n}\n\n/**\n * Find 2x2 blocks with the same color and assign a penalty value\n *\n * Points: N2 * (m - 1) * (n - 1)\n */\nexports.getPenaltyN2 = function getPenaltyN2 (data) {\n const size = data.size\n let points = 0\n\n for (let row = 0; row < size - 1; row++) {\n for (let col = 0; col < size - 1; col++) {\n const last = data.get(row, col) +\n data.get(row, col + 1) +\n data.get(row + 1, col) +\n data.get(row + 1, col + 1)\n\n if (last === 4 || last === 0) points++\n }\n }\n\n return points * PenaltyScores.N2\n}\n\n/**\n * Find 1:1:3:1:1 ratio (dark:light:dark:light:dark) pattern in row/column,\n * preceded or followed by light area 4 modules wide\n *\n * Points: N3 * number of pattern found\n */\nexports.getPenaltyN3 = function getPenaltyN3 (data) {\n const size = data.size\n let points = 0\n let bitsCol = 0\n let bitsRow = 0\n\n for (let row = 0; row < size; row++) {\n bitsCol = bitsRow = 0\n for (let col = 0; col < size; col++) {\n bitsCol = ((bitsCol << 1) & 0x7FF) | data.get(row, col)\n if (col >= 10 && (bitsCol === 0x5D0 || bitsCol === 0x05D)) points++\n\n bitsRow = ((bitsRow << 1) & 0x7FF) | data.get(col, row)\n if (col >= 10 && (bitsRow === 0x5D0 || bitsRow === 0x05D)) points++\n }\n }\n\n return points * PenaltyScores.N3\n}\n\n/**\n * Calculate proportion of dark modules in entire symbol\n *\n * Points: N4 * k\n *\n * k is the rating of the deviation of the proportion of dark modules\n * in the symbol from 50% in steps of 5%\n */\nexports.getPenaltyN4 = function getPenaltyN4 (data) {\n let darkCount = 0\n const modulesCount = data.data.length\n\n for (let i = 0; i < modulesCount; i++) darkCount += data.data[i]\n\n const k = Math.abs(Math.ceil((darkCount * 100 / modulesCount) / 5) - 10)\n\n return k * PenaltyScores.N4\n}\n\n/**\n * Return mask value at given position\n *\n * @param {Number} maskPattern Pattern reference value\n * @param {Number} i Row\n * @param {Number} j Column\n * @return {Boolean} Mask value\n */\nfunction getMaskAt (maskPattern, i, j) {\n switch (maskPattern) {\n case exports.Patterns.PATTERN000: return (i + j) % 2 === 0\n case exports.Patterns.PATTERN001: return i % 2 === 0\n case exports.Patterns.PATTERN010: return j % 3 === 0\n case exports.Patterns.PATTERN011: return (i + j) % 3 === 0\n case exports.Patterns.PATTERN100: return (Math.floor(i / 2) + Math.floor(j / 3)) % 2 === 0\n case exports.Patterns.PATTERN101: return (i * j) % 2 + (i * j) % 3 === 0\n case exports.Patterns.PATTERN110: return ((i * j) % 2 + (i * j) % 3) % 2 === 0\n case exports.Patterns.PATTERN111: return ((i * j) % 3 + (i + j) % 2) % 2 === 0\n\n default: throw new Error('bad maskPattern:' + maskPattern)\n }\n}\n\n/**\n * Apply a mask pattern to a BitMatrix\n *\n * @param {Number} pattern Pattern reference number\n * @param {BitMatrix} data BitMatrix data\n */\nexports.applyMask = function applyMask (pattern, data) {\n const size = data.size\n\n for (let col = 0; col < size; col++) {\n for (let row = 0; row < size; row++) {\n if (data.isReserved(row, col)) continue\n data.xor(row, col, getMaskAt(pattern, row, col))\n }\n }\n}\n\n/**\n * Returns the best mask pattern for data\n *\n * @param {BitMatrix} data\n * @return {Number} Mask pattern reference number\n */\nexports.getBestMask = function getBestMask (data, setupFormatFunc) {\n const numPatterns = Object.keys(exports.Patterns).length\n let bestPattern = 0\n let lowerPenalty = Infinity\n\n for (let p = 0; p < numPatterns; p++) {\n setupFormatFunc(p)\n exports.applyMask(p, data)\n\n // Calculate penalty\n const penalty =\n exports.getPenaltyN1(data) +\n exports.getPenaltyN2(data) +\n exports.getPenaltyN3(data) +\n exports.getPenaltyN4(data)\n\n // Undo previously applied mask\n exports.applyMask(p, data)\n\n if (penalty < lowerPenalty) {\n lowerPenalty = penalty\n bestPattern = p\n }\n }\n\n return bestPattern\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/mask-pattern.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/mode.js":
/*!**********************************************!*\
!*** ./node_modules/qrcode/lib/core/mode.js ***!
\**********************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
eval("const VersionCheck = __webpack_require__(/*! ./version-check */ \"./node_modules/qrcode/lib/core/version-check.js\")\nconst Regex = __webpack_require__(/*! ./regex */ \"./node_modules/qrcode/lib/core/regex.js\")\n\n/**\n * Numeric mode encodes data from the decimal digit set (0 - 9)\n * (byte values 30HEX to 39HEX).\n * Normally, 3 data characters are represented by 10 bits.\n *\n * @type {Object}\n */\nexports.NUMERIC = {\n id: 'Numeric',\n bit: 1 << 0,\n ccBits: [10, 12, 14]\n}\n\n/**\n * Alphanumeric mode encodes data from a set of 45 characters,\n * i.e. 10 numeric digits (0 - 9),\n * 26 alphabetic characters (A - Z),\n * and 9 symbols (SP, $, %, *, +, -, ., /, :).\n * Normally, two input characters are represented by 11 bits.\n *\n * @type {Object}\n */\nexports.ALPHANUMERIC = {\n id: 'Alphanumeric',\n bit: 1 << 1,\n ccBits: [9, 11, 13]\n}\n\n/**\n * In byte mode, data is encoded at 8 bits per character.\n *\n * @type {Object}\n */\nexports.BYTE = {\n id: 'Byte',\n bit: 1 << 2,\n ccBits: [8, 16, 16]\n}\n\n/**\n * The Kanji mode efficiently encodes Kanji characters in accordance with\n * the Shift JIS system based on JIS X 0208.\n * The Shift JIS values are shifted from the JIS X 0208 values.\n * JIS X 0208 gives details of the shift coded representation.\n * Each two-byte character value is compacted to a 13-bit binary codeword.\n *\n * @type {Object}\n */\nexports.KANJI = {\n id: 'Kanji',\n bit: 1 << 3,\n ccBits: [8, 10, 12]\n}\n\n/**\n * Mixed mode will contain a sequences of data in a combination of any of\n * the modes described above\n *\n * @type {Object}\n */\nexports.MIXED = {\n bit: -1\n}\n\n/**\n * Returns the number of bits needed to store the data length\n * according to QR Code specifications.\n *\n * @param {Mode} mode Data mode\n * @param {Number} version QR Code version\n * @return {Number} Number of bits\n */\nexports.getCharCountIndicator = function getCharCountIndicator (mode, version) {\n if (!mode.ccBits) throw new Error('Invalid mode: ' + mode)\n\n if (!VersionCheck.isValid(version)) {\n throw new Error('Invalid version: ' + version)\n }\n\n if (version >= 1 && version < 10) return mode.ccBits[0]\n else if (version < 27) return mode.ccBits[1]\n return mode.ccBits[2]\n}\n\n/**\n * Returns the most efficient mode to store the specified data\n *\n * @param {String} dataStr Input data string\n * @return {Mode} Best mode\n */\nexports.getBestModeForData = function getBestModeForData (dataStr) {\n if (Regex.testNumeric(dataStr)) return exports.NUMERIC\n else if (Regex.testAlphanumeric(dataStr)) return exports.ALPHANUMERIC\n else if (Regex.testKanji(dataStr)) return exports.KANJI\n else return exports.BYTE\n}\n\n/**\n * Return mode name as string\n *\n * @param {Mode} mode Mode object\n * @returns {String} Mode name\n */\nexports.toString = function toString (mode) {\n if (mode && mode.id) return mode.id\n throw new Error('Invalid mode')\n}\n\n/**\n * Check if input param is a valid mode object\n *\n * @param {Mode} mode Mode object\n * @returns {Boolean} True if valid mode, false otherwise\n */\nexports.isValid = function isValid (mode) {\n return mode && mode.bit && mode.ccBits\n}\n\n/**\n * Get mode object from its name\n *\n * @param {String} string Mode name\n * @returns {Mode} Mode object\n */\nfunction fromString (string) {\n if (typeof string !== 'string') {\n throw new Error('Param is not a string')\n }\n\n const lcStr = string.toLowerCase()\n\n switch (lcStr) {\n case 'numeric':\n return exports.NUMERIC\n case 'alphanumeric':\n return exports.ALPHANUMERIC\n case 'kanji':\n return exports.KANJI\n case 'byte':\n return exports.BYTE\n default:\n throw new Error('Unknown mode: ' + string)\n }\n}\n\n/**\n * Returns mode from a value.\n * If value is not a valid mode, returns defaultValue\n *\n * @param {Mode|String} value Encoding mode\n * @param {Mode} defaultValue Fallback value\n * @return {Mode} Encoding mode\n */\nexports.from = function from (value, defaultValue) {\n if (exports.isValid(value)) {\n return value\n }\n\n try {\n return fromString(value)\n } catch (e) {\n return defaultValue\n }\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/mode.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/numeric-data.js":
/*!******************************************************!*\
!*** ./node_modules/qrcode/lib/core/numeric-data.js ***!
\******************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const Mode = __webpack_require__(/*! ./mode */ \"./node_modules/qrcode/lib/core/mode.js\")\n\nfunction NumericData (data) {\n this.mode = Mode.NUMERIC\n this.data = data.toString()\n}\n\nNumericData.getBitsLength = function getBitsLength (length) {\n return 10 * Math.floor(length / 3) + ((length % 3) ? ((length % 3) * 3 + 1) : 0)\n}\n\nNumericData.prototype.getLength = function getLength () {\n return this.data.length\n}\n\nNumericData.prototype.getBitsLength = function getBitsLength () {\n return NumericData.getBitsLength(this.data.length)\n}\n\nNumericData.prototype.write = function write (bitBuffer) {\n let i, group, value\n\n // The input data string is divided into groups of three digits,\n // and each group is converted to its 10-bit binary equivalent.\n for (i = 0; i + 3 <= this.data.length; i += 3) {\n group = this.data.substr(i, 3)\n value = parseInt(group, 10)\n\n bitBuffer.put(value, 10)\n }\n\n // If the number of input digits is not an exact multiple of three,\n // the final one or two digits are converted to 4 or 7 bits respectively.\n const remainingNum = this.data.length - i\n if (remainingNum > 0) {\n group = this.data.substr(i)\n value = parseInt(group, 10)\n\n bitBuffer.put(value, remainingNum * 3 + 1)\n }\n}\n\nmodule.exports = NumericData\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/numeric-data.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/polynomial.js":
/*!****************************************************!*\
!*** ./node_modules/qrcode/lib/core/polynomial.js ***!
\****************************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
eval("const GF = __webpack_require__(/*! ./galois-field */ \"./node_modules/qrcode/lib/core/galois-field.js\")\n\n/**\n * Multiplies two polynomials inside Galois Field\n *\n * @param {Uint8Array} p1 Polynomial\n * @param {Uint8Array} p2 Polynomial\n * @return {Uint8Array} Product of p1 and p2\n */\nexports.mul = function mul (p1, p2) {\n const coeff = new Uint8Array(p1.length + p2.length - 1)\n\n for (let i = 0; i < p1.length; i++) {\n for (let j = 0; j < p2.length; j++) {\n coeff[i + j] ^= GF.mul(p1[i], p2[j])\n }\n }\n\n return coeff\n}\n\n/**\n * Calculate the remainder of polynomials division\n *\n * @param {Uint8Array} divident Polynomial\n * @param {Uint8Array} divisor Polynomial\n * @return {Uint8Array} Remainder\n */\nexports.mod = function mod (divident, divisor) {\n let result = new Uint8Array(divident)\n\n while ((result.length - divisor.length) >= 0) {\n const coeff = result[0]\n\n for (let i = 0; i < divisor.length; i++) {\n result[i] ^= GF.mul(divisor[i], coeff)\n }\n\n // remove all zeros from buffer head\n let offset = 0\n while (offset < result.length && result[offset] === 0) offset++\n result = result.slice(offset)\n }\n\n return result\n}\n\n/**\n * Generate an irreducible generator polynomial of specified degree\n * (used by Reed-Solomon encoder)\n *\n * @param {Number} degree Degree of the generator polynomial\n * @return {Uint8Array} Buffer containing polynomial coefficients\n */\nexports.generateECPolynomial = function generateECPolynomial (degree) {\n let poly = new Uint8Array([1])\n for (let i = 0; i < degree; i++) {\n poly = exports.mul(poly, new Uint8Array([1, GF.exp(i)]))\n }\n\n return poly\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/polynomial.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/qrcode.js":
/*!************************************************!*\
!*** ./node_modules/qrcode/lib/core/qrcode.js ***!
\************************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
eval("const Utils = __webpack_require__(/*! ./utils */ \"./node_modules/qrcode/lib/core/utils.js\")\nconst ECLevel = __webpack_require__(/*! ./error-correction-level */ \"./node_modules/qrcode/lib/core/error-correction-level.js\")\nconst BitBuffer = __webpack_require__(/*! ./bit-buffer */ \"./node_modules/qrcode/lib/core/bit-buffer.js\")\nconst BitMatrix = __webpack_require__(/*! ./bit-matrix */ \"./node_modules/qrcode/lib/core/bit-matrix.js\")\nconst AlignmentPattern = __webpack_require__(/*! ./alignment-pattern */ \"./node_modules/qrcode/lib/core/alignment-pattern.js\")\nconst FinderPattern = __webpack_require__(/*! ./finder-pattern */ \"./node_modules/qrcode/lib/core/finder-pattern.js\")\nconst MaskPattern = __webpack_require__(/*! ./mask-pattern */ \"./node_modules/qrcode/lib/core/mask-pattern.js\")\nconst ECCode = __webpack_require__(/*! ./error-correction-code */ \"./node_modules/qrcode/lib/core/error-correction-code.js\")\nconst ReedSolomonEncoder = __webpack_require__(/*! ./reed-solomon-encoder */ \"./node_modules/qrcode/lib/core/reed-solomon-encoder.js\")\nconst Version = __webpack_require__(/*! ./version */ \"./node_modules/qrcode/lib/core/version.js\")\nconst FormatInfo = __webpack_require__(/*! ./format-info */ \"./node_modules/qrcode/lib/core/format-info.js\")\nconst Mode = __webpack_require__(/*! ./mode */ \"./node_modules/qrcode/lib/core/mode.js\")\nconst Segments = __webpack_require__(/*! ./segments */ \"./node_modules/qrcode/lib/core/segments.js\")\n\n/**\n * QRCode for JavaScript\n *\n * modified by Ryan Day for nodejs support\n * Copyright (c) 2011 Ryan Day\n *\n * Licensed under the MIT license:\n * http://www.opensource.org/licenses/mit-license.php\n *\n//---------------------------------------------------------------------\n// QRCode for JavaScript\n//\n// Copyright (c) 2009 Kazuhiko Arase\n//\n// URL: http://www.d-project.com/\n//\n// Licensed under the MIT license:\n// http://www.opensource.org/licenses/mit-license.php\n//\n// The word \"QR Code\" is registered trademark of\n// DENSO WAVE INCORPORATED\n// http://www.denso-wave.com/qrcode/faqpatent-e.html\n//\n//---------------------------------------------------------------------\n*/\n\n/**\n * Add finder patterns bits to matrix\n *\n * @param {BitMatrix} matrix Modules matrix\n * @param {Number} version QR Code version\n */\nfunction setupFinderPattern (matrix, version) {\n const size = matrix.size\n const pos = FinderPattern.getPositions(version)\n\n for (let i = 0; i < pos.length; i++) {\n const row = pos[i][0]\n const col = pos[i][1]\n\n for (let r = -1; r <= 7; r++) {\n if (row + r <= -1 || size <= row + r) continue\n\n for (let c = -1; c <= 7; c++) {\n if (col + c <= -1 || size <= col + c) continue\n\n if ((r >= 0 && r <= 6 && (c === 0 || c === 6)) ||\n (c >= 0 && c <= 6 && (r === 0 || r === 6)) ||\n (r >= 2 && r <= 4 && c >= 2 && c <= 4)) {\n matrix.set(row + r, col + c, true, true)\n } else {\n matrix.set(row + r, col + c, false, true)\n }\n }\n }\n }\n}\n\n/**\n * Add timing pattern bits to matrix\n *\n * Note: this function must be called before {@link setupAlignmentPattern}\n *\n * @param {BitMatrix} matrix Modules matrix\n */\nfunction setupTimingPattern (matrix) {\n const size = matrix.size\n\n for (let r = 8; r < size - 8; r++) {\n const value = r % 2 === 0\n matrix.set(r, 6, value, true)\n matrix.set(6, r, value, true)\n }\n}\n\n/**\n * Add alignment patterns bits to matrix\n *\n * Note: this function must be called after {@link setupTimingPattern}\n *\n * @param {BitMatrix} matrix Modules matrix\n * @param {Number} version QR Code version\n */\nfunction setupAlignmentPattern (matrix, version) {\n const pos = AlignmentPattern.getPositions(version)\n\n for (let i = 0; i < pos.length; i++) {\n const row = pos[i][0]\n const col = pos[i][1]\n\n for (let r = -2; r <= 2; r++) {\n for (let c = -2; c <= 2; c++) {\n if (r === -2 || r === 2 || c === -2 || c === 2 ||\n (r === 0 && c === 0)) {\n matrix.set(row + r, col + c, true, true)\n } else {\n matrix.set(row + r, col + c, false, true)\n }\n }\n }\n }\n}\n\n/**\n * Add version info bits to matrix\n *\n * @param {BitMatrix} matrix Modules matrix\n * @param {Number} version QR Code version\n */\nfunction setupVersionInfo (matrix, version) {\n const size = matrix.size\n const bits = Version.getEncodedBits(version)\n let row, col, mod\n\n for (let i = 0; i < 18; i++) {\n row = Math.floor(i / 3)\n col = i % 3 + size - 8 - 3\n mod = ((bits >> i) & 1) === 1\n\n matrix.set(row, col, mod, true)\n matrix.set(col, row, mod, true)\n }\n}\n\n/**\n * Add format info bits to matrix\n *\n * @param {BitMatrix} matrix Modules matrix\n * @param {ErrorCorrectionLevel} errorCorrectionLevel Error correction level\n * @param {Number} maskPattern Mask pattern reference value\n */\nfunction setupFormatInfo (matrix, errorCorrectionLevel, maskPattern) {\n const size = matrix.size\n const bits = FormatInfo.getEncodedBits(errorCorrectionLevel, maskPattern)\n let i, mod\n\n for (i = 0; i < 15; i++) {\n mod = ((bits >> i) & 1) === 1\n\n // vertical\n if (i < 6) {\n matrix.set(i, 8, mod, true)\n } else if (i < 8) {\n matrix.set(i + 1, 8, mod, true)\n } else {\n matrix.set(size - 15 + i, 8, mod, true)\n }\n\n // horizontal\n if (i < 8) {\n matrix.set(8, size - i - 1, mod, true)\n } else if (i < 9) {\n matrix.set(8, 15 - i - 1 + 1, mod, true)\n } else {\n matrix.set(8, 15 - i - 1, mod, true)\n }\n }\n\n // fixed module\n matrix.set(size - 8, 8, 1, true)\n}\n\n/**\n * Add encoded data bits to matrix\n *\n * @param {BitMatrix} matrix Modules matrix\n * @param {Uint8Array} data Data codewords\n */\nfunction setupData (matrix, data) {\n const size = matrix.size\n let inc = -1\n let row = size - 1\n let bitIndex = 7\n let byteIndex = 0\n\n for (let col = size - 1; col > 0; col -= 2) {\n if (col === 6) col--\n\n while (true) {\n for (let c = 0; c < 2; c++) {\n if (!matrix.isReserved(row, col - c)) {\n let dark = false\n\n if (byteIndex < data.length) {\n dark = (((data[byteIndex] >>> bitIndex) & 1) === 1)\n }\n\n matrix.set(row, col - c, dark)\n bitIndex--\n\n if (bitIndex === -1) {\n byteIndex++\n bitIndex = 7\n }\n }\n }\n\n row += inc\n\n if (row < 0 || size <= row) {\n row -= inc\n inc = -inc\n break\n }\n }\n }\n}\n\n/**\n * Create encoded codewords from data input\n *\n * @param {Number} version QR Code version\n * @param {ErrorCorrectionLevel} errorCorrectionLevel Error correction level\n * @param {ByteData} data Data input\n * @return {Uint8Array} Buffer containing encoded codewords\n */\nfunction createData (version, errorCorrectionLevel, segments) {\n // Prepare data buffer\n const buffer = new BitBuffer()\n\n segments.forEach(function (data) {\n // prefix data with mode indicator (4 bits)\n buffer.put(data.mode.bit, 4)\n\n // Prefix data with character count indicator.\n // The character count indicator is a string of bits that represents the\n // number of characters that are being encoded.\n // The character count indicator must be placed after the mode indicator\n // and must be a certain number of bits long, depending on the QR version\n // and data mode\n // @see {@link Mode.getCharCountIndicator}.\n buffer.put(data.getLength(), Mode.getCharCountIndicator(data.mode, version))\n\n // add binary data sequence to buffer\n data.write(buffer)\n })\n\n // Calculate required number of bits\n const totalCodewords = Utils.getSymbolTotalCodewords(version)\n const ecTotalCodewords = ECCode.getTotalCodewordsCount(version, errorCorrectionLevel)\n const dataTotalCodewordsBits = (totalCodewords - ecTotalCodewords) * 8\n\n // Add a terminator.\n // If the bit string is shorter than the total number of required bits,\n // a terminator of up to four 0s must be added to the right side of the string.\n // If the bit string is more than four bits shorter than the required number of bits,\n // add four 0s to the end.\n if (buffer.getLengthInBits() + 4 <= dataTotalCodewordsBits) {\n buffer.put(0, 4)\n }\n\n // If the bit string is fewer than four bits shorter, add only the number of 0s that\n // are needed to reach the required number of bits.\n\n // After adding the terminator, if the number of bits in the string is not a multiple of 8,\n // pad the string on the right with 0s to make the string's length a multiple of 8.\n while (buffer.getLengthInBits() % 8 !== 0) {\n buffer.putBit(0)\n }\n\n // Add pad bytes if the string is still shorter than the total number of required bits.\n // Extend the buffer to fill the data capacity of the symbol corresponding to\n // the Version and Error Correction Level by adding the Pad Codewords 11101100 (0xEC)\n // and 00010001 (0x11) alternately.\n const remainingByte = (dataTotalCodewordsBits - buffer.getLengthInBits()) / 8\n for (let i = 0; i < remainingByte; i++) {\n buffer.put(i % 2 ? 0x11 : 0xEC, 8)\n }\n\n return createCodewords(buffer, version, errorCorrectionLevel)\n}\n\n/**\n * Encode input data with Reed-Solomon and return codewords with\n * relative error correction bits\n *\n * @param {BitBuffer} bitBuffer Data to encode\n * @param {Number} version QR Code version\n * @param {ErrorCorrectionLevel} errorCorrectionLevel Error correction level\n * @return {Uint8Array} Buffer containing encoded codewords\n */\nfunction createCodewords (bitBuffer, version, errorCorrectionLevel) {\n // Total codewords for this QR code version (Data + Error correction)\n const totalCodewords = Utils.getSymbolTotalCodewords(version)\n\n // Total number of error correction codewords\n const ecTotalCodewords = ECCode.getTotalCodewordsCount(version, errorCorrectionLevel)\n\n // Total number of data codewords\n const dataTotalCodewords = totalCodewords - ecTotalCodewords\n\n // Total number of blocks\n const ecTotalBlocks = ECCode.getBlocksCount(version, errorCorrectionLevel)\n\n // Calculate how many blocks each group should contain\n const blocksInGroup2 = totalCodewords % ecTotalBlocks\n const blocksInGroup1 = ecTotalBlocks - blocksInGroup2\n\n const totalCodewordsInGroup1 = Math.floor(totalCodewords / ecTotalBlocks)\n\n const dataCodewordsInGroup1 = Math.floor(dataTotalCodewords / ecTotalBlocks)\n const dataCodewordsInGroup2 = dataCodewordsInGroup1 + 1\n\n // Number of EC codewords is the same for both groups\n const ecCount = totalCodewordsInGroup1 - dataCodewordsInGroup1\n\n // Initialize a Reed-Solomon encoder with a generator polynomial of degree ecCount\n const rs = new ReedSolomonEncoder(ecCount)\n\n let offset = 0\n const dcData = new Array(ecTotalBlocks)\n const ecData = new Array(ecTotalBlocks)\n let maxDataSize = 0\n const buffer = new Uint8Array(bitBuffer.buffer)\n\n // Divide the buffer into the required number of blocks\n for (let b = 0; b < ecTotalBlocks; b++) {\n const dataSize = b < blocksInGroup1 ? dataCodewordsInGroup1 : dataCodewordsInGroup2\n\n // extract a block of data from buffer\n dcData[b] = buffer.slice(offset, offset + dataSize)\n\n // Calculate EC codewords for this data block\n ecData[b] = rs.encode(dcData[b])\n\n offset += dataSize\n maxDataSize = Math.max(maxDataSize, dataSize)\n }\n\n // Create final data\n // Interleave the data and error correction codewords from each block\n const data = new Uint8Array(totalCodewords)\n let index = 0\n let i, r\n\n // Add data codewords\n for (i = 0; i < maxDataSize; i++) {\n for (r = 0; r < ecTotalBlocks; r++) {\n if (i < dcData[r].length) {\n data[index++] = dcData[r][i]\n }\n }\n }\n\n // Apped EC codewords\n for (i = 0; i < ecCount; i++) {\n for (r = 0; r < ecTotalBlocks; r++) {\n data[index++] = ecData[r][i]\n }\n }\n\n return data\n}\n\n/**\n * Build QR Code symbol\n *\n * @param {String} data Input string\n * @param {Number} version QR Code version\n * @param {ErrorCorretionLevel} errorCorrectionLevel Error level\n * @param {MaskPattern} maskPattern Mask pattern\n * @return {Object} Object containing symbol data\n */\nfunction createSymbol (data, version, errorCorrectionLevel, maskPattern) {\n let segments\n\n if (Array.isArray(data)) {\n segments = Segments.fromArray(data)\n } else if (typeof data === 'string') {\n let estimatedVersion = version\n\n if (!estimatedVersion) {\n const rawSegments = Segments.rawSplit(data)\n\n // Estimate best version that can contain raw splitted segments\n estimatedVersion = Version.getBestVersionForData(rawSegments, errorCorrectionLevel)\n }\n\n // Build optimized segments\n // If estimated version is undefined, try with the highest version\n segments = Segments.fromString(data, estimatedVersion || 40)\n } else {\n throw new Error('Invalid data')\n }\n\n // Get the min version that can contain data\n const bestVersion = Version.getBestVersionForData(segments, errorCorrectionLevel)\n\n // If no version is found, data cannot be stored\n if (!bestVersion) {\n throw new Error('The amount of data is too big to be stored in a QR Code')\n }\n\n // If not specified, use min version as default\n if (!version) {\n version = bestVersion\n\n // Check if the specified version can contain the data\n } else if (version < bestVersion) {\n throw new Error('\\n' +\n 'The chosen QR Code version cannot contain this amount of data.\\n' +\n 'Minimum version required to store current data is: ' + bestVersion + '.\\n'\n )\n }\n\n const dataBits = createData(version, errorCorrectionLevel, segments)\n\n // Allocate matrix buffer\n const moduleCount = Utils.getSymbolSize(version)\n const modules = new BitMatrix(moduleCount)\n\n // Add function modules\n setupFinderPattern(modules, version)\n setupTimingPattern(modules)\n setupAlignmentPattern(modules, version)\n\n // Add temporary dummy bits for format info just to set them as reserved.\n // This is needed to prevent these bits from being masked by {@link MaskPattern.applyMask}\n // since the masking operation must be performed only on the encoding region.\n // These blocks will be replaced with correct values later in code.\n setupFormatInfo(modules, errorCorrectionLevel, 0)\n\n if (version >= 7) {\n setupVersionInfo(modules, version)\n }\n\n // Add data codewords\n setupData(modules, dataBits)\n\n if (isNaN(maskPattern)) {\n // Find best mask pattern\n maskPattern = MaskPattern.getBestMask(modules,\n setupFormatInfo.bind(null, modules, errorCorrectionLevel))\n }\n\n // Apply mask pattern\n MaskPattern.applyMask(maskPattern, modules)\n\n // Replace format info bits with correct values\n setupFormatInfo(modules, errorCorrectionLevel, maskPattern)\n\n return {\n modules: modules,\n version: version,\n errorCorrectionLevel: errorCorrectionLevel,\n maskPattern: maskPattern,\n segments: segments\n }\n}\n\n/**\n * QR Code\n *\n * @param {String | Array} data Input data\n * @param {Object} options Optional configurations\n * @param {Number} options.version QR Code version\n * @param {String} options.errorCorrectionLevel Error correction level\n * @param {Function} options.toSJISFunc Helper func to convert utf8 to sjis\n */\nexports.create = function create (data, options) {\n if (typeof data === 'undefined' || data === '') {\n throw new Error('No input text')\n }\n\n let errorCorrectionLevel = ECLevel.M\n let version\n let mask\n\n if (typeof options !== 'undefined') {\n // Use higher error correction level as default\n errorCorrectionLevel = ECLevel.from(options.errorCorrectionLevel, ECLevel.M)\n version = Version.from(options.version)\n mask = MaskPattern.from(options.maskPattern)\n\n if (options.toSJISFunc) {\n Utils.setToSJISFunction(options.toSJISFunc)\n }\n }\n\n return createSymbol(data, version, errorCorrectionLevel, mask)\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/qrcode.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/reed-solomon-encoder.js":
/*!**************************************************************!*\
!*** ./node_modules/qrcode/lib/core/reed-solomon-encoder.js ***!
\**************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const Polynomial = __webpack_require__(/*! ./polynomial */ \"./node_modules/qrcode/lib/core/polynomial.js\")\n\nfunction ReedSolomonEncoder (degree) {\n this.genPoly = undefined\n this.degree = degree\n\n if (this.degree) this.initialize(this.degree)\n}\n\n/**\n * Initialize the encoder.\n * The input param should correspond to the number of error correction codewords.\n *\n * @param {Number} degree\n */\nReedSolomonEncoder.prototype.initialize = function initialize (degree) {\n // create an irreducible generator polynomial\n this.degree = degree\n this.genPoly = Polynomial.generateECPolynomial(this.degree)\n}\n\n/**\n * Encodes a chunk of data\n *\n * @param {Uint8Array} data Buffer containing input data\n * @return {Uint8Array} Buffer containing encoded data\n */\nReedSolomonEncoder.prototype.encode = function encode (data) {\n if (!this.genPoly) {\n throw new Error('Encoder not initialized')\n }\n\n // Calculate EC for this data block\n // extends data size to data+genPoly size\n const paddedData = new Uint8Array(data.length + this.degree)\n paddedData.set(data)\n\n // The error correction codewords are the remainder after dividing the data codewords\n // by a generator polynomial\n const remainder = Polynomial.mod(paddedData, this.genPoly)\n\n // return EC data blocks (last n byte, where n is the degree of genPoly)\n // If coefficients number in remainder are less than genPoly degree,\n // pad with 0s to the left to reach the needed number of coefficients\n const start = this.degree - remainder.length\n if (start > 0) {\n const buff = new Uint8Array(this.degree)\n buff.set(remainder, start)\n\n return buff\n }\n\n return remainder\n}\n\nmodule.exports = ReedSolomonEncoder\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/reed-solomon-encoder.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/regex.js":
/*!***********************************************!*\
!*** ./node_modules/qrcode/lib/core/regex.js ***!
\***********************************************/
/***/ ((__unused_webpack_module, exports) => {
eval("const numeric = '[0-9]+'\nconst alphanumeric = '[A-Z $%*+\\\\-./:]+'\nlet kanji = '(?:[u3000-u303F]|[u3040-u309F]|[u30A0-u30FF]|' +\n '[uFF00-uFFEF]|[u4E00-u9FAF]|[u2605-u2606]|[u2190-u2195]|u203B|' +\n '[u2010u2015u2018u2019u2025u2026u201Cu201Du2225u2260]|' +\n '[u0391-u0451]|[u00A7u00A8u00B1u00B4u00D7u00F7])+'\nkanji = kanji.replace(/u/g, '\\\\u')\n\nconst byte = '(?:(?![A-Z0-9 $%*+\\\\-./:]|' + kanji + ')(?:.|[\\r\\n]))+'\n\nexports.KANJI = new RegExp(kanji, 'g')\nexports.BYTE_KANJI = new RegExp('[^A-Z0-9 $%*+\\\\-./:]+', 'g')\nexports.BYTE = new RegExp(byte, 'g')\nexports.NUMERIC = new RegExp(numeric, 'g')\nexports.ALPHANUMERIC = new RegExp(alphanumeric, 'g')\n\nconst TEST_KANJI = new RegExp('^' + kanji + '$')\nconst TEST_NUMERIC = new RegExp('^' + numeric + '$')\nconst TEST_ALPHANUMERIC = new RegExp('^[A-Z0-9 $%*+\\\\-./:]+$')\n\nexports.testKanji = function testKanji (str) {\n return TEST_KANJI.test(str)\n}\n\nexports.testNumeric = function testNumeric (str) {\n return TEST_NUMERIC.test(str)\n}\n\nexports.testAlphanumeric = function testAlphanumeric (str) {\n return TEST_ALPHANUMERIC.test(str)\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/regex.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/segments.js":
/*!**************************************************!*\
!*** ./node_modules/qrcode/lib/core/segments.js ***!
\**************************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
eval("const Mode = __webpack_require__(/*! ./mode */ \"./node_modules/qrcode/lib/core/mode.js\")\nconst NumericData = __webpack_require__(/*! ./numeric-data */ \"./node_modules/qrcode/lib/core/numeric-data.js\")\nconst AlphanumericData = __webpack_require__(/*! ./alphanumeric-data */ \"./node_modules/qrcode/lib/core/alphanumeric-data.js\")\nconst ByteData = __webpack_require__(/*! ./byte-data */ \"./node_modules/qrcode/lib/core/byte-data.js\")\nconst KanjiData = __webpack_require__(/*! ./kanji-data */ \"./node_modules/qrcode/lib/core/kanji-data.js\")\nconst Regex = __webpack_require__(/*! ./regex */ \"./node_modules/qrcode/lib/core/regex.js\")\nconst Utils = __webpack_require__(/*! ./utils */ \"./node_modules/qrcode/lib/core/utils.js\")\nconst dijkstra = __webpack_require__(/*! dijkstrajs */ \"./node_modules/dijkstrajs/dijkstra.js\")\n\n/**\n * Returns UTF8 byte length\n *\n * @param {String} str Input string\n * @return {Number} Number of byte\n */\nfunction getStringByteLength (str) {\n return unescape(encodeURIComponent(str)).length\n}\n\n/**\n * Get a list of segments of the specified mode\n * from a string\n *\n * @param {Mode} mode Segment mode\n * @param {String} str String to process\n * @return {Array} Array of object with segments data\n */\nfunction getSegments (regex, mode, str) {\n const segments = []\n let result\n\n while ((result = regex.exec(str)) !== null) {\n segments.push({\n data: result[0],\n index: result.index,\n mode: mode,\n length: result[0].length\n })\n }\n\n return segments\n}\n\n/**\n * Extracts a series of segments with the appropriate\n * modes from a string\n *\n * @param {String} dataStr Input string\n * @return {Array} Array of object with segments data\n */\nfunction getSegmentsFromString (dataStr) {\n const numSegs = getSegments(Regex.NUMERIC, Mode.NUMERIC, dataStr)\n const alphaNumSegs = getSegments(Regex.ALPHANUMERIC, Mode.ALPHANUMERIC, dataStr)\n let byteSegs\n let kanjiSegs\n\n if (Utils.isKanjiModeEnabled()) {\n byteSegs = getSegments(Regex.BYTE, Mode.BYTE, dataStr)\n kanjiSegs = getSegments(Regex.KANJI, Mode.KANJI, dataStr)\n } else {\n byteSegs = getSegments(Regex.BYTE_KANJI, Mode.BYTE, dataStr)\n kanjiSegs = []\n }\n\n const segs = numSegs.concat(alphaNumSegs, byteSegs, kanjiSegs)\n\n return segs\n .sort(function (s1, s2) {\n return s1.index - s2.index\n })\n .map(function (obj) {\n return {\n data: obj.data,\n mode: obj.mode,\n length: obj.length\n }\n })\n}\n\n/**\n * Returns how many bits are needed to encode a string of\n * specified length with the specified mode\n *\n * @param {Number} length String length\n * @param {Mode} mode Segment mode\n * @return {Number} Bit length\n */\nfunction getSegmentBitsLength (length, mode) {\n switch (mode) {\n case Mode.NUMERIC:\n return NumericData.getBitsLength(length)\n case Mode.ALPHANUMERIC:\n return AlphanumericData.getBitsLength(length)\n case Mode.KANJI:\n return KanjiData.getBitsLength(length)\n case Mode.BYTE:\n return ByteData.getBitsLength(length)\n }\n}\n\n/**\n * Merges adjacent segments which have the same mode\n *\n * @param {Array} segs Array of object with segments data\n * @return {Array} Array of object with segments data\n */\nfunction mergeSegments (segs) {\n return segs.reduce(function (acc, curr) {\n const prevSeg = acc.length - 1 >= 0 ? acc[acc.length - 1] : null\n if (prevSeg && prevSeg.mode === curr.mode) {\n acc[acc.length - 1].data += curr.data\n return acc\n }\n\n acc.push(curr)\n return acc\n }, [])\n}\n\n/**\n * Generates a list of all possible nodes combination which\n * will be used to build a segments graph.\n *\n * Nodes are divided by groups. Each group will contain a list of all the modes\n * in which is possible to encode the given text.\n *\n * For example the text '12345' can be encoded as Numeric, Alphanumeric or Byte.\n * The group for '12345' will contain then 3 objects, one for each\n * possible encoding mode.\n *\n * Each node represents a possible segment.\n *\n * @param {Array} segs Array of object with segments data\n * @return {Array} Array of object with segments data\n */\nfunction buildNodes (segs) {\n const nodes = []\n for (let i = 0; i < segs.length; i++) {\n const seg = segs[i]\n\n switch (seg.mode) {\n case Mode.NUMERIC:\n nodes.push([seg,\n { data: seg.data, mode: Mode.ALPHANUMERIC, length: seg.length },\n { data: seg.data, mode: Mode.BYTE, length: seg.length }\n ])\n break\n case Mode.ALPHANUMERIC:\n nodes.push([seg,\n { data: seg.data, mode: Mode.BYTE, length: seg.length }\n ])\n break\n case Mode.KANJI:\n nodes.push([seg,\n { data: seg.data, mode: Mode.BYTE, length: getStringByteLength(seg.data) }\n ])\n break\n case Mode.BYTE:\n nodes.push([\n { data: seg.data, mode: Mode.BYTE, length: getStringByteLength(seg.data) }\n ])\n }\n }\n\n return nodes\n}\n\n/**\n * Builds a graph from a list of nodes.\n * All segments in each node group will be connected with all the segments of\n * the next group and so on.\n *\n * At each connection will be assigned a weight depending on the\n * segment's byte length.\n *\n * @param {Array} nodes Array of object with segments data\n * @param {Number} version QR Code version\n * @return {Object} Graph of all possible segments\n */\nfunction buildGraph (nodes, version) {\n const table = {}\n const graph = { start: {} }\n let prevNodeIds = ['start']\n\n for (let i = 0; i < nodes.length; i++) {\n const nodeGroup = nodes[i]\n const currentNodeIds = []\n\n for (let j = 0; j < nodeGroup.length; j++) {\n const node = nodeGroup[j]\n const key = '' + i + j\n\n currentNodeIds.push(key)\n table[key] = { node: node, lastCount: 0 }\n graph[key] = {}\n\n for (let n = 0; n < prevNodeIds.length; n++) {\n const prevNodeId = prevNodeIds[n]\n\n if (table[prevNodeId] && table[prevNodeId].node.mode === node.mode) {\n graph[prevNodeId][key] =\n getSegmentBitsLength(table[prevNodeId].lastCount + node.length, node.mode) -\n getSegmentBitsLength(table[prevNodeId].lastCount, node.mode)\n\n table[prevNodeId].lastCount += node.length\n } else {\n if (table[prevNodeId]) table[prevNodeId].lastCount = node.length\n\n graph[prevNodeId][key] = getSegmentBitsLength(node.length, node.mode) +\n 4 + Mode.getCharCountIndicator(node.mode, version) // switch cost\n }\n }\n }\n\n prevNodeIds = currentNodeIds\n }\n\n for (let n = 0; n < prevNodeIds.length; n++) {\n graph[prevNodeIds[n]].end = 0\n }\n\n return { map: graph, table: table }\n}\n\n/**\n * Builds a segment from a specified data and mode.\n * If a mode is not specified, the more suitable will be used.\n *\n * @param {String} data Input data\n * @param {Mode | String} modesHint Data mode\n * @return {Segment} Segment\n */\nfunction buildSingleSegment (data, modesHint) {\n let mode\n const bestMode = Mode.getBestModeForData(data)\n\n mode = Mode.from(modesHint, bestMode)\n\n // Make sure data can be encoded\n if (mode !== Mode.BYTE && mode.bit < bestMode.bit) {\n throw new Error('\"' + data + '\"' +\n ' cannot be encoded with mode ' + Mode.toString(mode) +\n '.\\n Suggested mode is: ' + Mode.toString(bestMode))\n }\n\n // Use Mode.BYTE if Kanji support is disabled\n if (mode === Mode.KANJI && !Utils.isKanjiModeEnabled()) {\n mode = Mode.BYTE\n }\n\n switch (mode) {\n case Mode.NUMERIC:\n return new NumericData(data)\n\n case Mode.ALPHANUMERIC:\n return new AlphanumericData(data)\n\n case Mode.KANJI:\n return new KanjiData(data)\n\n case Mode.BYTE:\n return new ByteData(data)\n }\n}\n\n/**\n * Builds a list of segments from an array.\n * Array can contain Strings or Objects with segment's info.\n *\n * For each item which is a string, will be generated a segment with the given\n * string and the more appropriate encoding mode.\n *\n * For each item which is an object, will be generated a segment with the given\n * data and mode.\n * Objects must contain at least the property \"data\".\n * If property \"mode\" is not present, the more suitable mode will be used.\n *\n * @param {Array} array Array of objects with segments data\n * @return {Array} Array of Segments\n */\nexports.fromArray = function fromArray (array) {\n return array.reduce(function (acc, seg) {\n if (typeof seg === 'string') {\n acc.push(buildSingleSegment(seg, null))\n } else if (seg.data) {\n acc.push(buildSingleSegment(seg.data, seg.mode))\n }\n\n return acc\n }, [])\n}\n\n/**\n * Builds an optimized sequence of segments from a string,\n * which will produce the shortest possible bitstream.\n *\n * @param {String} data Input string\n * @param {Number} version QR Code version\n * @return {Array} Array of segments\n */\nexports.fromString = function fromString (data, version) {\n const segs = getSegmentsFromString(data, Utils.isKanjiModeEnabled())\n\n const nodes = buildNodes(segs)\n const graph = buildGraph(nodes, version)\n const path = dijkstra.find_path(graph.map, 'start', 'end')\n\n const optimizedSegs = []\n for (let i = 1; i < path.length - 1; i++) {\n optimizedSegs.push(graph.table[path[i]].node)\n }\n\n return exports.fromArray(mergeSegments(optimizedSegs))\n}\n\n/**\n * Splits a string in various segments with the modes which\n * best represent their content.\n * The produced segments are far from being optimized.\n * The output of this function is only used to estimate a QR Code version\n * which may contain the data.\n *\n * @param {string} data Input string\n * @return {Array} Array of segments\n */\nexports.rawSplit = function rawSplit (data) {\n return exports.fromArray(\n getSegmentsFromString(data, Utils.isKanjiModeEnabled())\n )\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/segments.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/utils.js":
/*!***********************************************!*\
!*** ./node_modules/qrcode/lib/core/utils.js ***!
\***********************************************/
/***/ ((__unused_webpack_module, exports) => {
eval("let toSJISFunction\nconst CODEWORDS_COUNT = [\n 0, // Not used\n 26, 44, 70, 100, 134, 172, 196, 242, 292, 346,\n 404, 466, 532, 581, 655, 733, 815, 901, 991, 1085,\n 1156, 1258, 1364, 1474, 1588, 1706, 1828, 1921, 2051, 2185,\n 2323, 2465, 2611, 2761, 2876, 3034, 3196, 3362, 3532, 3706\n]\n\n/**\n * Returns the QR Code size for the specified version\n *\n * @param {Number} version QR Code version\n * @return {Number} size of QR code\n */\nexports.getSymbolSize = function getSymbolSize (version) {\n if (!version) throw new Error('\"version\" cannot be null or undefined')\n if (version < 1 || version > 40) throw new Error('\"version\" should be in range from 1 to 40')\n return version * 4 + 17\n}\n\n/**\n * Returns the total number of codewords used to store data and EC information.\n *\n * @param {Number} version QR Code version\n * @return {Number} Data length in bits\n */\nexports.getSymbolTotalCodewords = function getSymbolTotalCodewords (version) {\n return CODEWORDS_COUNT[version]\n}\n\n/**\n * Encode data with Bose-Chaudhuri-Hocquenghem\n *\n * @param {Number} data Value to encode\n * @return {Number} Encoded value\n */\nexports.getBCHDigit = function (data) {\n let digit = 0\n\n while (data !== 0) {\n digit++\n data >>>= 1\n }\n\n return digit\n}\n\nexports.setToSJISFunction = function setToSJISFunction (f) {\n if (typeof f !== 'function') {\n throw new Error('\"toSJISFunc\" is not a valid function.')\n }\n\n toSJISFunction = f\n}\n\nexports.isKanjiModeEnabled = function () {\n return typeof toSJISFunction !== 'undefined'\n}\n\nexports.toSJIS = function toSJIS (kanji) {\n return toSJISFunction(kanji)\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/utils.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/version-check.js":
/*!*******************************************************!*\
!*** ./node_modules/qrcode/lib/core/version-check.js ***!
\*******************************************************/
/***/ ((__unused_webpack_module, exports) => {
eval("/**\n * Check if QR Code version is valid\n *\n * @param {Number} version QR Code version\n * @return {Boolean} true if valid version, false otherwise\n */\nexports.isValid = function isValid (version) {\n return !isNaN(version) && version >= 1 && version <= 40\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/version-check.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/core/version.js":
/*!*************************************************!*\
!*** ./node_modules/qrcode/lib/core/version.js ***!
\*************************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
eval("const Utils = __webpack_require__(/*! ./utils */ \"./node_modules/qrcode/lib/core/utils.js\")\nconst ECCode = __webpack_require__(/*! ./error-correction-code */ \"./node_modules/qrcode/lib/core/error-correction-code.js\")\nconst ECLevel = __webpack_require__(/*! ./error-correction-level */ \"./node_modules/qrcode/lib/core/error-correction-level.js\")\nconst Mode = __webpack_require__(/*! ./mode */ \"./node_modules/qrcode/lib/core/mode.js\")\nconst VersionCheck = __webpack_require__(/*! ./version-check */ \"./node_modules/qrcode/lib/core/version-check.js\")\n\n// Generator polynomial used to encode version information\nconst G18 = (1 << 12) | (1 << 11) | (1 << 10) | (1 << 9) | (1 << 8) | (1 << 5) | (1 << 2) | (1 << 0)\nconst G18_BCH = Utils.getBCHDigit(G18)\n\nfunction getBestVersionForDataLength (mode, length, errorCorrectionLevel) {\n for (let currentVersion = 1; currentVersion <= 40; currentVersion++) {\n if (length <= exports.getCapacity(currentVersion, errorCorrectionLevel, mode)) {\n return currentVersion\n }\n }\n\n return undefined\n}\n\nfunction getReservedBitsCount (mode, version) {\n // Character count indicator + mode indicator bits\n return Mode.getCharCountIndicator(mode, version) + 4\n}\n\nfunction getTotalBitsFromDataArray (segments, version) {\n let totalBits = 0\n\n segments.forEach(function (data) {\n const reservedBits = getReservedBitsCount(data.mode, version)\n totalBits += reservedBits + data.getBitsLength()\n })\n\n return totalBits\n}\n\nfunction getBestVersionForMixedData (segments, errorCorrectionLevel) {\n for (let currentVersion = 1; currentVersion <= 40; currentVersion++) {\n const length = getTotalBitsFromDataArray(segments, currentVersion)\n if (length <= exports.getCapacity(currentVersion, errorCorrectionLevel, Mode.MIXED)) {\n return currentVersion\n }\n }\n\n return undefined\n}\n\n/**\n * Returns version number from a value.\n * If value is not a valid version, returns defaultValue\n *\n * @param {Number|String} value QR Code version\n * @param {Number} defaultValue Fallback value\n * @return {Number} QR Code version number\n */\nexports.from = function from (value, defaultValue) {\n if (VersionCheck.isValid(value)) {\n return parseInt(value, 10)\n }\n\n return defaultValue\n}\n\n/**\n * Returns how much data can be stored with the specified QR code version\n * and error correction level\n *\n * @param {Number} version QR Code version (1-40)\n * @param {Number} errorCorrectionLevel Error correction level\n * @param {Mode} mode Data mode\n * @return {Number} Quantity of storable data\n */\nexports.getCapacity = function getCapacity (version, errorCorrectionLevel, mode) {\n if (!VersionCheck.isValid(version)) {\n throw new Error('Invalid QR Code version')\n }\n\n // Use Byte mode as default\n if (typeof mode === 'undefined') mode = Mode.BYTE\n\n // Total codewords for this QR code version (Data + Error correction)\n const totalCodewords = Utils.getSymbolTotalCodewords(version)\n\n // Total number of error correction codewords\n const ecTotalCodewords = ECCode.getTotalCodewordsCount(version, errorCorrectionLevel)\n\n // Total number of data codewords\n const dataTotalCodewordsBits = (totalCodewords - ecTotalCodewords) * 8\n\n if (mode === Mode.MIXED) return dataTotalCodewordsBits\n\n const usableBits = dataTotalCodewordsBits - getReservedBitsCount(mode, version)\n\n // Return max number of storable codewords\n switch (mode) {\n case Mode.NUMERIC:\n return Math.floor((usableBits / 10) * 3)\n\n case Mode.ALPHANUMERIC:\n return Math.floor((usableBits / 11) * 2)\n\n case Mode.KANJI:\n return Math.floor(usableBits / 13)\n\n case Mode.BYTE:\n default:\n return Math.floor(usableBits / 8)\n }\n}\n\n/**\n * Returns the minimum version needed to contain the amount of data\n *\n * @param {Segment} data Segment of data\n * @param {Number} [errorCorrectionLevel=H] Error correction level\n * @param {Mode} mode Data mode\n * @return {Number} QR Code version\n */\nexports.getBestVersionForData = function getBestVersionForData (data, errorCorrectionLevel) {\n let seg\n\n const ecl = ECLevel.from(errorCorrectionLevel, ECLevel.M)\n\n if (Array.isArray(data)) {\n if (data.length > 1) {\n return getBestVersionForMixedData(data, ecl)\n }\n\n if (data.length === 0) {\n return 1\n }\n\n seg = data[0]\n } else {\n seg = data\n }\n\n return getBestVersionForDataLength(seg.mode, seg.getLength(), ecl)\n}\n\n/**\n * Returns version information with relative error correction bits\n *\n * The version information is included in QR Code symbols of version 7 or larger.\n * It consists of an 18-bit sequence containing 6 data bits,\n * with 12 error correction bits calculated using the (18, 6) Golay code.\n *\n * @param {Number} version QR Code version\n * @return {Number} Encoded version info bits\n */\nexports.getEncodedBits = function getEncodedBits (version) {\n if (!VersionCheck.isValid(version) || version < 7) {\n throw new Error('Invalid QR Code version')\n }\n\n let d = version << 12\n\n while (Utils.getBCHDigit(d) - G18_BCH >= 0) {\n d ^= (G18 << (Utils.getBCHDigit(d) - G18_BCH))\n }\n\n return (version << 12) | d\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/core/version.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/renderer/canvas.js":
/*!****************************************************!*\
!*** ./node_modules/qrcode/lib/renderer/canvas.js ***!
\****************************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
eval("const Utils = __webpack_require__(/*! ./utils */ \"./node_modules/qrcode/lib/renderer/utils.js\")\n\nfunction clearCanvas (ctx, canvas, size) {\n ctx.clearRect(0, 0, canvas.width, canvas.height)\n\n if (!canvas.style) canvas.style = {}\n canvas.height = size\n canvas.width = size\n canvas.style.height = size + 'px'\n canvas.style.width = size + 'px'\n}\n\nfunction getCanvasElement () {\n try {\n return document.createElement('canvas')\n } catch (e) {\n throw new Error('You need to specify a canvas element')\n }\n}\n\nexports.render = function render (qrData, canvas, options) {\n let opts = options\n let canvasEl = canvas\n\n if (typeof opts === 'undefined' && (!canvas || !canvas.getContext)) {\n opts = canvas\n canvas = undefined\n }\n\n if (!canvas) {\n canvasEl = getCanvasElement()\n }\n\n opts = Utils.getOptions(opts)\n const size = Utils.getImageWidth(qrData.modules.size, opts)\n\n const ctx = canvasEl.getContext('2d')\n const image = ctx.createImageData(size, size)\n Utils.qrToImageData(image.data, qrData, opts)\n\n clearCanvas(ctx, canvasEl, size)\n ctx.putImageData(image, 0, 0)\n\n return canvasEl\n}\n\nexports.renderToDataURL = function renderToDataURL (qrData, canvas, options) {\n let opts = options\n\n if (typeof opts === 'undefined' && (!canvas || !canvas.getContext)) {\n opts = canvas\n canvas = undefined\n }\n\n if (!opts) opts = {}\n\n const canvasEl = exports.render(qrData, canvas, opts)\n\n const type = opts.type || 'image/png'\n const rendererOpts = opts.rendererOpts || {}\n\n return canvasEl.toDataURL(type, rendererOpts.quality)\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/renderer/canvas.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/renderer/svg-tag.js":
/*!*****************************************************!*\
!*** ./node_modules/qrcode/lib/renderer/svg-tag.js ***!
\*****************************************************/
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
eval("const Utils = __webpack_require__(/*! ./utils */ \"./node_modules/qrcode/lib/renderer/utils.js\")\n\nfunction getColorAttrib (color, attrib) {\n const alpha = color.a / 255\n const str = attrib + '=\"' + color.hex + '\"'\n\n return alpha < 1\n ? str + ' ' + attrib + '-opacity=\"' + alpha.toFixed(2).slice(1) + '\"'\n : str\n}\n\nfunction svgCmd (cmd, x, y) {\n let str = cmd + x\n if (typeof y !== 'undefined') str += ' ' + y\n\n return str\n}\n\nfunction qrToPath (data, size, margin) {\n let path = ''\n let moveBy = 0\n let newRow = false\n let lineLength = 0\n\n for (let i = 0; i < data.length; i++) {\n const col = Math.floor(i % size)\n const row = Math.floor(i / size)\n\n if (!col && !newRow) newRow = true\n\n if (data[i]) {\n lineLength++\n\n if (!(i > 0 && col > 0 && data[i - 1])) {\n path += newRow\n ? svgCmd('M', col + margin, 0.5 + row + margin)\n : svgCmd('m', moveBy, 0)\n\n moveBy = 0\n newRow = false\n }\n\n if (!(col + 1 < size && data[i + 1])) {\n path += svgCmd('h', lineLength)\n lineLength = 0\n }\n } else {\n moveBy++\n }\n }\n\n return path\n}\n\nexports.render = function render (qrData, options, cb) {\n const opts = Utils.getOptions(options)\n const size = qrData.modules.size\n const data = qrData.modules.data\n const qrcodesize = size + opts.margin * 2\n\n const bg = !opts.color.light.a\n ? ''\n : ''\n\n const path =\n ''\n\n const viewBox = 'viewBox=\"' + '0 0 ' + qrcodesize + ' ' + qrcodesize + '\"'\n\n const width = !opts.width ? '' : 'width=\"' + opts.width + '\" height=\"' + opts.width + '\" '\n\n const svgTag = '\\n'\n\n if (typeof cb === 'function') {\n cb(null, svgTag)\n }\n\n return svgTag\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/renderer/svg-tag.js?");
/***/ }),
/***/ "./node_modules/qrcode/lib/renderer/utils.js":
/*!***************************************************!*\
!*** ./node_modules/qrcode/lib/renderer/utils.js ***!
\***************************************************/
/***/ ((__unused_webpack_module, exports) => {
eval("function hex2rgba (hex) {\n if (typeof hex === 'number') {\n hex = hex.toString()\n }\n\n if (typeof hex !== 'string') {\n throw new Error('Color should be defined as hex string')\n }\n\n let hexCode = hex.slice().replace('#', '').split('')\n if (hexCode.length < 3 || hexCode.length === 5 || hexCode.length > 8) {\n throw new Error('Invalid hex color: ' + hex)\n }\n\n // Convert from short to long form (fff -> ffffff)\n if (hexCode.length === 3 || hexCode.length === 4) {\n hexCode = Array.prototype.concat.apply([], hexCode.map(function (c) {\n return [c, c]\n }))\n }\n\n // Add default alpha value\n if (hexCode.length === 6) hexCode.push('F', 'F')\n\n const hexValue = parseInt(hexCode.join(''), 16)\n\n return {\n r: (hexValue >> 24) & 255,\n g: (hexValue >> 16) & 255,\n b: (hexValue >> 8) & 255,\n a: hexValue & 255,\n hex: '#' + hexCode.slice(0, 6).join('')\n }\n}\n\nexports.getOptions = function getOptions (options) {\n if (!options) options = {}\n if (!options.color) options.color = {}\n\n const margin = typeof options.margin === 'undefined' ||\n options.margin === null ||\n options.margin < 0\n ? 4\n : options.margin\n\n const width = options.width && options.width >= 21 ? options.width : undefined\n const scale = options.scale || 4\n\n return {\n width: width,\n scale: width ? 4 : scale,\n margin: margin,\n color: {\n dark: hex2rgba(options.color.dark || '#000000ff'),\n light: hex2rgba(options.color.light || '#ffffffff')\n },\n type: options.type,\n rendererOpts: options.rendererOpts || {}\n }\n}\n\nexports.getScale = function getScale (qrSize, opts) {\n return opts.width && opts.width >= qrSize + opts.margin * 2\n ? opts.width / (qrSize + opts.margin * 2)\n : opts.scale\n}\n\nexports.getImageWidth = function getImageWidth (qrSize, opts) {\n const scale = exports.getScale(qrSize, opts)\n return Math.floor((qrSize + opts.margin * 2) * scale)\n}\n\nexports.qrToImageData = function qrToImageData (imgData, qr, opts) {\n const size = qr.modules.size\n const data = qr.modules.data\n const scale = exports.getScale(size, opts)\n const symbolSize = Math.floor((size + opts.margin * 2) * scale)\n const scaledMargin = opts.margin * scale\n const palette = [opts.color.light, opts.color.dark]\n\n for (let i = 0; i < symbolSize; i++) {\n for (let j = 0; j < symbolSize; j++) {\n let posDst = (i * symbolSize + j) * 4\n let pxColor = opts.color.light\n\n if (i >= scaledMargin && j >= scaledMargin &&\n i < symbolSize - scaledMargin && j < symbolSize - scaledMargin) {\n const iSrc = Math.floor((i - scaledMargin) / scale)\n const jSrc = Math.floor((j - scaledMargin) / scale)\n pxColor = palette[data[iSrc * size + jSrc] ? 1 : 0]\n }\n\n imgData[posDst++] = pxColor.r\n imgData[posDst++] = pxColor.g\n imgData[posDst++] = pxColor.b\n imgData[posDst] = pxColor.a\n }\n }\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/qrcode/lib/renderer/utils.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/index.js":
/*!*****************************************************!*\
!*** ./node_modules/rate-limiter-flexible/index.js ***!
\*****************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const RateLimiterRedis = __webpack_require__(/*! ./lib/RateLimiterRedis */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterRedis.js\");\nconst RateLimiterMongo = __webpack_require__(/*! ./lib/RateLimiterMongo */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterMongo.js\");\nconst RateLimiterMySQL = __webpack_require__(/*! ./lib/RateLimiterMySQL */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterMySQL.js\");\nconst RateLimiterPostgres = __webpack_require__(/*! ./lib/RateLimiterPostgres */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterPostgres.js\");\nconst {RateLimiterClusterMaster, RateLimiterClusterMasterPM2, RateLimiterCluster} = __webpack_require__(/*! ./lib/RateLimiterCluster */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterCluster.js\");\nconst RateLimiterMemory = __webpack_require__(/*! ./lib/RateLimiterMemory */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterMemory.js\");\nconst RateLimiterMemcache = __webpack_require__(/*! ./lib/RateLimiterMemcache */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterMemcache.js\");\nconst RLWrapperBlackAndWhite = __webpack_require__(/*! ./lib/RLWrapperBlackAndWhite */ \"./node_modules/rate-limiter-flexible/lib/RLWrapperBlackAndWhite.js\");\nconst RateLimiterUnion = __webpack_require__(/*! ./lib/RateLimiterUnion */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterUnion.js\");\nconst RateLimiterQueue = __webpack_require__(/*! ./lib/RateLimiterQueue */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterQueue.js\");\nconst BurstyRateLimiter = __webpack_require__(/*! ./lib/BurstyRateLimiter */ \"./node_modules/rate-limiter-flexible/lib/BurstyRateLimiter.js\");\nconst RateLimiterRes = __webpack_require__(/*! ./lib/RateLimiterRes */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js\");\n\nmodule.exports = {\n RateLimiterRedis,\n RateLimiterMongo,\n RateLimiterMySQL,\n RateLimiterPostgres,\n RateLimiterMemory,\n RateLimiterMemcache,\n RateLimiterClusterMaster,\n RateLimiterClusterMasterPM2,\n RateLimiterCluster,\n RLWrapperBlackAndWhite,\n RateLimiterUnion,\n RateLimiterQueue,\n BurstyRateLimiter,\n RateLimiterRes,\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/index.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/BurstyRateLimiter.js":
/*!*********************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/BurstyRateLimiter.js ***!
\*********************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const RateLimiterRes = __webpack_require__(/*! ./RateLimiterRes */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js\");\n\n/**\n * Bursty rate limiter exposes only msBeforeNext time and doesn't expose points from bursty limiter by default\n * @type {BurstyRateLimiter}\n */\nmodule.exports = class BurstyRateLimiter {\n constructor(rateLimiter, burstLimiter) {\n this._rateLimiter = rateLimiter;\n this._burstLimiter = burstLimiter\n }\n\n /**\n * Merge rate limiter response objects. Responses can be null\n *\n * @param {RateLimiterRes} [rlRes] Rate limiter response\n * @param {RateLimiterRes} [blRes] Bursty limiter response\n */\n _combineRes(rlRes, blRes) {\n return new RateLimiterRes(\n rlRes.remainingPoints,\n Math.min(rlRes.msBeforeNext, blRes.msBeforeNext),\n rlRes.consumedPoints,\n rlRes.isFirstInDuration\n )\n }\n\n /**\n * @param key\n * @param pointsToConsume\n * @param options\n * @returns {Promise}\n */\n consume(key, pointsToConsume = 1, options = {}) {\n return this._rateLimiter.consume(key, pointsToConsume, options)\n .catch((rlRej) => {\n if (rlRej instanceof RateLimiterRes) {\n return this._burstLimiter.consume(key, pointsToConsume, options)\n .then((blRes) => {\n return Promise.resolve(this._combineRes(rlRej, blRes))\n })\n .catch((blRej) => {\n if (blRej instanceof RateLimiterRes) {\n return Promise.reject(this._combineRes(rlRej, blRej))\n } else {\n return Promise.reject(blRej)\n }\n }\n )\n } else {\n return Promise.reject(rlRej)\n }\n })\n }\n\n /**\n * It doesn't expose available points from burstLimiter\n *\n * @param key\n * @returns {Promise}\n */\n get(key) {\n return Promise.all([\n this._rateLimiter.get(key),\n this._burstLimiter.get(key),\n ]).then(([rlRes, blRes]) => {\n return this._combineRes(rlRes, blRes);\n });\n }\n\n get points() {\n return this._rateLimiter.points;\n }\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/lib/BurstyRateLimiter.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/RLWrapperBlackAndWhite.js":
/*!**************************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/RLWrapperBlackAndWhite.js ***!
\**************************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const RateLimiterRes = __webpack_require__(/*! ./RateLimiterRes */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js\");\n\nmodule.exports = class RLWrapperBlackAndWhite {\n constructor(opts = {}) {\n this.limiter = opts.limiter;\n this.blackList = opts.blackList;\n this.whiteList = opts.whiteList;\n this.isBlackListed = opts.isBlackListed;\n this.isWhiteListed = opts.isWhiteListed;\n this.runActionAnyway = opts.runActionAnyway;\n }\n\n get limiter() {\n return this._limiter;\n }\n\n set limiter(value) {\n if (typeof value === 'undefined') {\n throw new Error('limiter is not set');\n }\n\n this._limiter = value;\n }\n\n get runActionAnyway() {\n return this._runActionAnyway;\n }\n\n set runActionAnyway(value) {\n this._runActionAnyway = typeof value === 'undefined' ? false : value;\n }\n\n get blackList() {\n return this._blackList;\n }\n\n set blackList(value) {\n this._blackList = Array.isArray(value) ? value : [];\n }\n\n get isBlackListed() {\n return this._isBlackListed;\n }\n\n set isBlackListed(func) {\n if (typeof func === 'undefined') {\n func = () => false;\n }\n if (typeof func !== 'function') {\n throw new Error('isBlackListed must be function');\n }\n this._isBlackListed = func;\n }\n\n get whiteList() {\n return this._whiteList;\n }\n\n set whiteList(value) {\n this._whiteList = Array.isArray(value) ? value : [];\n }\n\n get isWhiteListed() {\n return this._isWhiteListed;\n }\n\n set isWhiteListed(func) {\n if (typeof func === 'undefined') {\n func = () => false;\n }\n if (typeof func !== 'function') {\n throw new Error('isWhiteListed must be function');\n }\n this._isWhiteListed = func;\n }\n\n isBlackListedSomewhere(key) {\n return this.blackList.indexOf(key) >= 0 || this.isBlackListed(key);\n }\n\n isWhiteListedSomewhere(key) {\n return this.whiteList.indexOf(key) >= 0 || this.isWhiteListed(key);\n }\n\n getBlackRes() {\n return new RateLimiterRes(0, Number.MAX_SAFE_INTEGER, 0, false);\n }\n\n getWhiteRes() {\n return new RateLimiterRes(Number.MAX_SAFE_INTEGER, 0, 0, false);\n }\n\n rejectBlack() {\n return Promise.reject(this.getBlackRes());\n }\n\n resolveBlack() {\n return Promise.resolve(this.getBlackRes());\n }\n\n resolveWhite() {\n return Promise.resolve(this.getWhiteRes());\n }\n\n consume(key, pointsToConsume = 1) {\n let res;\n if (this.isWhiteListedSomewhere(key)) {\n res = this.resolveWhite();\n } else if (this.isBlackListedSomewhere(key)) {\n res = this.rejectBlack();\n }\n\n if (typeof res === 'undefined') {\n return this.limiter.consume(key, pointsToConsume);\n }\n\n if (this.runActionAnyway) {\n this.limiter.consume(key, pointsToConsume).catch(() => {});\n }\n return res;\n }\n\n block(key, secDuration) {\n let res;\n if (this.isWhiteListedSomewhere(key)) {\n res = this.resolveWhite();\n } else if (this.isBlackListedSomewhere(key)) {\n res = this.resolveBlack();\n }\n\n if (typeof res === 'undefined') {\n return this.limiter.block(key, secDuration);\n }\n\n if (this.runActionAnyway) {\n this.limiter.block(key, secDuration).catch(() => {});\n }\n return res;\n }\n\n penalty(key, points) {\n let res;\n if (this.isWhiteListedSomewhere(key)) {\n res = this.resolveWhite();\n } else if (this.isBlackListedSomewhere(key)) {\n res = this.resolveBlack();\n }\n\n if (typeof res === 'undefined') {\n return this.limiter.penalty(key, points);\n }\n\n if (this.runActionAnyway) {\n this.limiter.penalty(key, points).catch(() => {});\n }\n return res;\n }\n\n reward(key, points) {\n let res;\n if (this.isWhiteListedSomewhere(key)) {\n res = this.resolveWhite();\n } else if (this.isBlackListedSomewhere(key)) {\n res = this.resolveBlack();\n }\n\n if (typeof res === 'undefined') {\n return this.limiter.reward(key, points);\n }\n\n if (this.runActionAnyway) {\n this.limiter.reward(key, points).catch(() => {});\n }\n return res;\n }\n\n get(key) {\n let res;\n if (this.isWhiteListedSomewhere(key)) {\n res = this.resolveWhite();\n } else if (this.isBlackListedSomewhere(key)) {\n res = this.resolveBlack();\n }\n\n if (typeof res === 'undefined' || this.runActionAnyway) {\n return this.limiter.get(key);\n }\n\n return res;\n }\n\n delete(key) {\n return this.limiter.delete(key);\n }\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/lib/RLWrapperBlackAndWhite.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/RateLimiterAbstract.js":
/*!***********************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/RateLimiterAbstract.js ***!
\***********************************************************************/
/***/ ((module) => {
eval("module.exports = class RateLimiterAbstract {\n /**\n *\n * @param opts Object Defaults {\n * points: 4, // Number of points\n * duration: 1, // Per seconds\n * blockDuration: 0, // Block if consumed more than points in current duration for blockDuration seconds\n * execEvenly: false, // Execute allowed actions evenly over duration\n * execEvenlyMinDelayMs: duration * 1000 / points, // ms, works with execEvenly=true option\n * keyPrefix: 'rlflx',\n * }\n */\n constructor(opts = {}) {\n this.points = opts.points;\n this.duration = opts.duration;\n this.blockDuration = opts.blockDuration;\n this.execEvenly = opts.execEvenly;\n this.execEvenlyMinDelayMs = opts.execEvenlyMinDelayMs;\n this.keyPrefix = opts.keyPrefix;\n }\n\n get points() {\n return this._points;\n }\n\n set points(value) {\n this._points = value >= 0 ? value : 4;\n }\n\n get duration() {\n return this._duration;\n }\n\n set duration(value) {\n this._duration = typeof value === 'undefined' ? 1 : value;\n }\n\n get msDuration() {\n return this.duration * 1000;\n }\n\n get blockDuration() {\n return this._blockDuration;\n }\n\n set blockDuration(value) {\n this._blockDuration = typeof value === 'undefined' ? 0 : value;\n }\n\n get msBlockDuration() {\n return this.blockDuration * 1000;\n }\n\n get execEvenly() {\n return this._execEvenly;\n }\n\n set execEvenly(value) {\n this._execEvenly = typeof value === 'undefined' ? false : Boolean(value);\n }\n\n get execEvenlyMinDelayMs() {\n return this._execEvenlyMinDelayMs;\n }\n\n set execEvenlyMinDelayMs(value) {\n this._execEvenlyMinDelayMs = typeof value === 'undefined' ? Math.ceil(this.msDuration / this.points) : value;\n }\n\n get keyPrefix() {\n return this._keyPrefix;\n }\n\n set keyPrefix(value) {\n if (typeof value === 'undefined') {\n value = 'rlflx';\n }\n if (typeof value !== 'string') {\n throw new Error('keyPrefix must be string');\n }\n this._keyPrefix = value;\n }\n\n _getKeySecDuration(options = {}) {\n return options && options.customDuration >= 0\n ? options.customDuration\n : this.duration;\n }\n\n getKey(key) {\n return this.keyPrefix.length > 0 ? `${this.keyPrefix}:${key}` : key;\n }\n\n parseKey(rlKey) {\n return rlKey.substring(this.keyPrefix.length);\n }\n\n consume() {\n throw new Error(\"You have to implement the method 'consume'!\");\n }\n\n penalty() {\n throw new Error(\"You have to implement the method 'penalty'!\");\n }\n\n reward() {\n throw new Error(\"You have to implement the method 'reward'!\");\n }\n\n get() {\n throw new Error(\"You have to implement the method 'get'!\");\n }\n\n set() {\n throw new Error(\"You have to implement the method 'set'!\");\n }\n\n block() {\n throw new Error(\"You have to implement the method 'block'!\");\n }\n\n delete() {\n throw new Error(\"You have to implement the method 'delete'!\");\n }\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/lib/RateLimiterAbstract.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/RateLimiterCluster.js":
/*!**********************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/RateLimiterCluster.js ***!
\**********************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("/**\n * Implements rate limiting in cluster using built-in IPC\n *\n * Two classes are described here: master and worker\n * Master have to be create in the master process without any options.\n * Any number of rate limiters can be created in workers, but each rate limiter must be with unique keyPrefix\n *\n * Workflow:\n * 1. master rate limiter created in master process\n * 2. worker rate limiter sends 'init' message with necessary options during creating\n * 3. master receives options and adds new rate limiter by keyPrefix if it isn't created yet\n * 4. master sends 'init' back to worker's rate limiter\n * 5. worker can process requests immediately,\n * but they will be postponed by 'workerWaitInit' until master sends 'init' to worker\n * 6. every request to worker rate limiter creates a promise\n * 7. if master doesn't response for 'timeout', promise is rejected\n * 8. master sends 'resolve' or 'reject' command to worker\n * 9. worker resolves or rejects promise depending on message from master\n *\n */\n\nconst cluster = __webpack_require__(/*! cluster */ \"?76c6\");\nconst crypto = __webpack_require__(/*! crypto */ \"?4a7d\");\nconst RateLimiterAbstract = __webpack_require__(/*! ./RateLimiterAbstract */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterAbstract.js\");\nconst RateLimiterMemory = __webpack_require__(/*! ./RateLimiterMemory */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterMemory.js\");\nconst RateLimiterRes = __webpack_require__(/*! ./RateLimiterRes */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js\");\n\nconst channel = 'rate_limiter_flexible';\nlet masterInstance = null;\n\nconst masterSendToWorker = function (worker, msg, type, res) {\n let data;\n if (res === null || res === true || res === false) {\n data = res;\n } else {\n data = {\n remainingPoints: res.remainingPoints,\n msBeforeNext: res.msBeforeNext,\n consumedPoints: res.consumedPoints,\n isFirstInDuration: res.isFirstInDuration,\n };\n }\n worker.send({\n channel,\n keyPrefix: msg.keyPrefix, // which rate limiter exactly\n promiseId: msg.promiseId,\n type,\n data,\n });\n};\n\nconst workerWaitInit = function (payload) {\n setTimeout(() => {\n if (this._initiated) {\n process.send(payload);\n // Promise will be removed by timeout if too long\n } else if (typeof this._promises[payload.promiseId] !== 'undefined') {\n workerWaitInit.call(this, payload);\n }\n }, 30);\n};\n\nconst workerSendToMaster = function (func, promiseId, key, arg, opts) {\n const payload = {\n channel,\n keyPrefix: this.keyPrefix,\n func,\n promiseId,\n data: {\n key,\n arg,\n opts,\n },\n };\n\n if (!this._initiated) {\n // Wait init before sending messages to master\n workerWaitInit.call(this, payload);\n } else {\n process.send(payload);\n }\n};\n\nconst masterProcessMsg = function (worker, msg) {\n if (!msg || msg.channel !== channel || typeof this._rateLimiters[msg.keyPrefix] === 'undefined') {\n return false;\n }\n\n let promise;\n\n switch (msg.func) {\n case 'consume':\n promise = this._rateLimiters[msg.keyPrefix].consume(msg.data.key, msg.data.arg, msg.data.opts);\n break;\n case 'penalty':\n promise = this._rateLimiters[msg.keyPrefix].penalty(msg.data.key, msg.data.arg, msg.data.opts);\n break;\n case 'reward':\n promise = this._rateLimiters[msg.keyPrefix].reward(msg.data.key, msg.data.arg, msg.data.opts);\n break;\n case 'block':\n promise = this._rateLimiters[msg.keyPrefix].block(msg.data.key, msg.data.arg, msg.data.opts);\n break;\n case 'get':\n promise = this._rateLimiters[msg.keyPrefix].get(msg.data.key, msg.data.opts);\n break;\n case 'delete':\n promise = this._rateLimiters[msg.keyPrefix].delete(msg.data.key, msg.data.opts);\n break;\n default:\n return false;\n }\n\n if (promise) {\n promise\n .then((res) => {\n masterSendToWorker(worker, msg, 'resolve', res);\n })\n .catch((rejRes) => {\n masterSendToWorker(worker, msg, 'reject', rejRes);\n });\n }\n};\n\nconst workerProcessMsg = function (msg) {\n if (!msg || msg.channel !== channel || msg.keyPrefix !== this.keyPrefix) {\n return false;\n }\n\n if (this._promises[msg.promiseId]) {\n clearTimeout(this._promises[msg.promiseId].timeoutId);\n let res;\n if (msg.data === null || msg.data === true || msg.data === false) {\n res = msg.data;\n } else {\n res = new RateLimiterRes(\n msg.data.remainingPoints,\n msg.data.msBeforeNext,\n msg.data.consumedPoints,\n msg.data.isFirstInDuration // eslint-disable-line comma-dangle\n );\n }\n\n switch (msg.type) {\n case 'resolve':\n this._promises[msg.promiseId].resolve(res);\n break;\n case 'reject':\n this._promises[msg.promiseId].reject(res);\n break;\n default:\n throw new Error(`RateLimiterCluster: no such message type '${msg.type}'`);\n }\n\n delete this._promises[msg.promiseId];\n }\n};\n/**\n * Prepare options to send to master\n * Master will create rate limiter depending on options\n *\n * @returns {{points: *, duration: *, blockDuration: *, execEvenly: *, execEvenlyMinDelayMs: *, keyPrefix: *}}\n */\nconst getOpts = function () {\n return {\n points: this.points,\n duration: this.duration,\n blockDuration: this.blockDuration,\n execEvenly: this.execEvenly,\n execEvenlyMinDelayMs: this.execEvenlyMinDelayMs,\n keyPrefix: this.keyPrefix,\n };\n};\n\nconst savePromise = function (resolve, reject) {\n const hrtime = process.hrtime();\n let promiseId = hrtime[0].toString() + hrtime[1].toString();\n\n if (typeof this._promises[promiseId] !== 'undefined') {\n promiseId += crypto.randomBytes(12).toString('base64');\n }\n\n this._promises[promiseId] = {\n resolve,\n reject,\n timeoutId: setTimeout(() => {\n delete this._promises[promiseId];\n reject(new Error('RateLimiterCluster timeout: no answer from master in time'));\n }, this.timeoutMs),\n };\n\n return promiseId;\n};\n\nclass RateLimiterClusterMaster {\n constructor() {\n if (masterInstance) {\n return masterInstance;\n }\n\n this._rateLimiters = {};\n\n cluster.setMaxListeners(0);\n\n cluster.on('message', (worker, msg) => {\n if (msg && msg.channel === channel && msg.type === 'init') {\n // If init request, check or create rate limiter by key prefix and send 'init' back to worker\n if (typeof this._rateLimiters[msg.opts.keyPrefix] === 'undefined') {\n this._rateLimiters[msg.opts.keyPrefix] = new RateLimiterMemory(msg.opts);\n }\n\n worker.send({\n channel,\n type: 'init',\n keyPrefix: msg.opts.keyPrefix,\n });\n } else {\n masterProcessMsg.call(this, worker, msg);\n }\n });\n\n masterInstance = this;\n }\n}\n\nclass RateLimiterClusterMasterPM2 {\n constructor(pm2) {\n if (masterInstance) {\n return masterInstance;\n }\n\n this._rateLimiters = {};\n\n pm2.launchBus((err, pm2Bus) => {\n pm2Bus.on('process:msg', (packet) => {\n const msg = packet.raw;\n if (msg && msg.channel === channel && msg.type === 'init') {\n // If init request, check or create rate limiter by key prefix and send 'init' back to worker\n if (typeof this._rateLimiters[msg.opts.keyPrefix] === 'undefined') {\n this._rateLimiters[msg.opts.keyPrefix] = new RateLimiterMemory(msg.opts);\n }\n\n pm2.sendDataToProcessId(packet.process.pm_id, {\n data: {},\n topic: channel,\n channel,\n type: 'init',\n keyPrefix: msg.opts.keyPrefix,\n }, (sendErr, res) => {\n if (sendErr) {\n console.log(sendErr, res);\n }\n });\n } else {\n const worker = {\n send: (msgData) => {\n const pm2Message = msgData;\n pm2Message.topic = channel;\n if (typeof pm2Message.data === 'undefined') {\n pm2Message.data = {};\n }\n pm2.sendDataToProcessId(packet.process.pm_id, pm2Message, (sendErr, res) => {\n if (sendErr) {\n console.log(sendErr, res);\n }\n });\n },\n };\n masterProcessMsg.call(this, worker, msg);\n }\n });\n });\n\n masterInstance = this;\n }\n}\n\nclass RateLimiterClusterWorker extends RateLimiterAbstract {\n get timeoutMs() {\n return this._timeoutMs;\n }\n\n set timeoutMs(value) {\n this._timeoutMs = typeof value === 'undefined' ? 5000 : Math.abs(parseInt(value));\n }\n\n constructor(opts = {}) {\n super(opts);\n\n process.setMaxListeners(0);\n\n this.timeoutMs = opts.timeoutMs;\n\n this._initiated = false;\n\n process.on('message', (msg) => {\n if (msg && msg.channel === channel && msg.type === 'init' && msg.keyPrefix === this.keyPrefix) {\n this._initiated = true;\n } else {\n workerProcessMsg.call(this, msg);\n }\n });\n\n // Create limiter on master with specific options\n process.send({\n channel,\n type: 'init',\n opts: getOpts.call(this),\n });\n\n this._promises = {};\n }\n\n consume(key, pointsToConsume = 1, options = {}) {\n return new Promise((resolve, reject) => {\n const promiseId = savePromise.call(this, resolve, reject);\n\n workerSendToMaster.call(this, 'consume', promiseId, key, pointsToConsume, options);\n });\n }\n\n penalty(key, points = 1, options = {}) {\n return new Promise((resolve, reject) => {\n const promiseId = savePromise.call(this, resolve, reject);\n\n workerSendToMaster.call(this, 'penalty', promiseId, key, points, options);\n });\n }\n\n reward(key, points = 1, options = {}) {\n return new Promise((resolve, reject) => {\n const promiseId = savePromise.call(this, resolve, reject);\n\n workerSendToMaster.call(this, 'reward', promiseId, key, points, options);\n });\n }\n\n block(key, secDuration, options = {}) {\n return new Promise((resolve, reject) => {\n const promiseId = savePromise.call(this, resolve, reject);\n\n workerSendToMaster.call(this, 'block', promiseId, key, secDuration, options);\n });\n }\n\n get(key, options = {}) {\n return new Promise((resolve, reject) => {\n const promiseId = savePromise.call(this, resolve, reject);\n\n workerSendToMaster.call(this, 'get', promiseId, key, options);\n });\n }\n\n delete(key, options = {}) {\n return new Promise((resolve, reject) => {\n const promiseId = savePromise.call(this, resolve, reject);\n\n workerSendToMaster.call(this, 'delete', promiseId, key, options);\n });\n }\n}\n\nmodule.exports = {\n RateLimiterClusterMaster,\n RateLimiterClusterMasterPM2,\n RateLimiterCluster: RateLimiterClusterWorker,\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/lib/RateLimiterCluster.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/RateLimiterMemcache.js":
/*!***********************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/RateLimiterMemcache.js ***!
\***********************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const RateLimiterStoreAbstract = __webpack_require__(/*! ./RateLimiterStoreAbstract */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterStoreAbstract.js\");\nconst RateLimiterRes = __webpack_require__(/*! ./RateLimiterRes */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js\");\n\nclass RateLimiterMemcache extends RateLimiterStoreAbstract {\n /**\n *\n * @param {Object} opts\n * Defaults {\n * ... see other in RateLimiterStoreAbstract\n *\n * storeClient: memcacheClient\n * }\n */\n constructor(opts) {\n super(opts);\n\n this.client = opts.storeClient;\n }\n\n _getRateLimiterRes(rlKey, changedPoints, result) {\n const res = new RateLimiterRes();\n res.consumedPoints = parseInt(result.consumedPoints);\n res.isFirstInDuration = result.consumedPoints === changedPoints;\n res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);\n res.msBeforeNext = result.msBeforeNext;\n\n return res;\n }\n\n _upsert(rlKey, points, msDuration, forceExpire = false, options = {}) {\n return new Promise((resolve, reject) => {\n const nowMs = Date.now();\n const secDuration = Math.floor(msDuration / 1000);\n\n if (forceExpire) {\n this.client.set(rlKey, points, secDuration, (err) => {\n if (!err) {\n this.client.set(\n `${rlKey}_expire`,\n secDuration > 0 ? nowMs + (secDuration * 1000) : -1,\n secDuration,\n () => {\n const res = {\n consumedPoints: points,\n msBeforeNext: secDuration > 0 ? secDuration * 1000 : -1,\n };\n resolve(res);\n }\n );\n } else {\n reject(err);\n }\n });\n } else {\n this.client.incr(rlKey, points, (err, consumedPoints) => {\n if (err || consumedPoints === false) {\n this.client.add(rlKey, points, secDuration, (errAddKey, createdNew) => {\n if (errAddKey || !createdNew) {\n // Try to upsert again in case of race condition\n if (typeof options.attemptNumber === 'undefined' || options.attemptNumber < 3) {\n const nextOptions = Object.assign({}, options);\n nextOptions.attemptNumber = nextOptions.attemptNumber ? (nextOptions.attemptNumber + 1) : 1;\n\n this._upsert(rlKey, points, msDuration, forceExpire, nextOptions)\n .then(resUpsert => resolve(resUpsert))\n .catch(errUpsert => reject(errUpsert));\n } else {\n reject(new Error('Can not add key'));\n }\n } else {\n this.client.add(\n `${rlKey}_expire`,\n secDuration > 0 ? nowMs + (secDuration * 1000) : -1,\n secDuration,\n () => {\n const res = {\n consumedPoints: points,\n msBeforeNext: secDuration > 0 ? secDuration * 1000 : -1,\n };\n resolve(res);\n }\n );\n }\n });\n } else {\n this.client.get(`${rlKey}_expire`, (errGetExpire, resGetExpireMs) => {\n if (errGetExpire) {\n reject(errGetExpire);\n } else {\n const expireMs = resGetExpireMs === false ? 0 : resGetExpireMs;\n const res = {\n consumedPoints,\n msBeforeNext: expireMs >= 0 ? Math.max(expireMs - nowMs, 0) : -1,\n };\n resolve(res);\n }\n });\n }\n });\n }\n });\n }\n\n _get(rlKey) {\n return new Promise((resolve, reject) => {\n const nowMs = Date.now();\n\n this.client.get(rlKey, (err, consumedPoints) => {\n if (!consumedPoints) {\n resolve(null);\n } else {\n this.client.get(`${rlKey}_expire`, (errGetExpire, resGetExpireMs) => {\n if (errGetExpire) {\n reject(errGetExpire);\n } else {\n const expireMs = resGetExpireMs === false ? 0 : resGetExpireMs;\n const res = {\n consumedPoints,\n msBeforeNext: expireMs >= 0 ? Math.max(expireMs - nowMs, 0) : -1,\n };\n resolve(res);\n }\n });\n }\n });\n });\n }\n\n _delete(rlKey) {\n return new Promise((resolve, reject) => {\n this.client.del(rlKey, (err, res) => {\n if (err) {\n reject(err);\n } else if (res === false) {\n resolve(res);\n } else {\n this.client.del(`${rlKey}_expire`, (errDelExpire) => {\n if (errDelExpire) {\n reject(errDelExpire);\n } else {\n resolve(res);\n }\n });\n }\n });\n });\n }\n}\n\nmodule.exports = RateLimiterMemcache;\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/lib/RateLimiterMemcache.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/RateLimiterMemory.js":
/*!*********************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/RateLimiterMemory.js ***!
\*********************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const RateLimiterAbstract = __webpack_require__(/*! ./RateLimiterAbstract */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterAbstract.js\");\nconst MemoryStorage = __webpack_require__(/*! ./component/MemoryStorage/MemoryStorage */ \"./node_modules/rate-limiter-flexible/lib/component/MemoryStorage/MemoryStorage.js\");\nconst RateLimiterRes = __webpack_require__(/*! ./RateLimiterRes */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js\");\n\nclass RateLimiterMemory extends RateLimiterAbstract {\n constructor(opts = {}) {\n super(opts);\n\n this._memoryStorage = new MemoryStorage();\n }\n /**\n *\n * @param key\n * @param pointsToConsume\n * @param {Object} options\n * @returns {Promise}\n */\n consume(key, pointsToConsume = 1, options = {}) {\n return new Promise((resolve, reject) => {\n const rlKey = this.getKey(key);\n const secDuration = this._getKeySecDuration(options);\n let res = this._memoryStorage.incrby(rlKey, pointsToConsume, secDuration);\n res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);\n\n if (res.consumedPoints > this.points) {\n // Block only first time when consumed more than points\n if (this.blockDuration > 0 && res.consumedPoints <= (this.points + pointsToConsume)) {\n // Block key\n res = this._memoryStorage.set(rlKey, res.consumedPoints, this.blockDuration);\n }\n reject(res);\n } else if (this.execEvenly && res.msBeforeNext > 0 && !res.isFirstInDuration) {\n // Execute evenly\n let delay = Math.ceil(res.msBeforeNext / (res.remainingPoints + 2));\n if (delay < this.execEvenlyMinDelayMs) {\n delay = res.consumedPoints * this.execEvenlyMinDelayMs;\n }\n\n setTimeout(resolve, delay, res);\n } else {\n resolve(res);\n }\n });\n }\n\n penalty(key, points = 1, options = {}) {\n const rlKey = this.getKey(key);\n return new Promise((resolve) => {\n const secDuration = this._getKeySecDuration(options);\n const res = this._memoryStorage.incrby(rlKey, points, secDuration);\n res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);\n resolve(res);\n });\n }\n\n reward(key, points = 1, options = {}) {\n const rlKey = this.getKey(key);\n return new Promise((resolve) => {\n const secDuration = this._getKeySecDuration(options);\n const res = this._memoryStorage.incrby(rlKey, -points, secDuration);\n res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);\n resolve(res);\n });\n }\n\n /**\n * Block any key for secDuration seconds\n *\n * @param key\n * @param secDuration\n */\n block(key, secDuration) {\n const msDuration = secDuration * 1000;\n const initPoints = this.points + 1;\n\n this._memoryStorage.set(this.getKey(key), initPoints, secDuration);\n return Promise.resolve(\n new RateLimiterRes(0, msDuration === 0 ? -1 : msDuration, initPoints)\n );\n }\n\n set(key, points, secDuration) {\n const msDuration = (secDuration >= 0 ? secDuration : this.duration) * 1000;\n\n this._memoryStorage.set(this.getKey(key), points, secDuration);\n return Promise.resolve(\n new RateLimiterRes(0, msDuration === 0 ? -1 : msDuration, points)\n );\n }\n\n get(key) {\n const res = this._memoryStorage.get(this.getKey(key));\n if (res !== null) {\n res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);\n }\n\n return Promise.resolve(res);\n }\n\n delete(key) {\n return Promise.resolve(this._memoryStorage.delete(this.getKey(key)));\n }\n}\n\nmodule.exports = RateLimiterMemory;\n\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/lib/RateLimiterMemory.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/RateLimiterMongo.js":
/*!********************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/RateLimiterMongo.js ***!
\********************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const RateLimiterStoreAbstract = __webpack_require__(/*! ./RateLimiterStoreAbstract */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterStoreAbstract.js\");\nconst RateLimiterRes = __webpack_require__(/*! ./RateLimiterRes */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js\");\n\n/**\n * Get MongoDB driver version as upsert options differ\n * @params {Object} Client instance\n * @returns {Object} Version Object containing major, feature & minor versions.\n */\nfunction getDriverVersion(client) {\n try {\n const _client = client.client ? client.client : client;\n\n const { version } = _client.topology.s.options.metadata.driver;\n const _v = version.split('.').map(v => parseInt(v));\n\n return {\n major: _v[0],\n feature: _v[1],\n patch: _v[2],\n };\n } catch (err) {\n return { major: 0, feature: 0, patch: 0 };\n }\n}\n\nclass RateLimiterMongo extends RateLimiterStoreAbstract {\n /**\n *\n * @param {Object} opts\n * Defaults {\n * indexKeyPrefix: {attr1: 1, attr2: 1}\n * ... see other in RateLimiterStoreAbstract\n *\n * mongo: MongoClient\n * }\n */\n constructor(opts) {\n super(opts);\n\n this.dbName = opts.dbName;\n this.tableName = opts.tableName;\n this.indexKeyPrefix = opts.indexKeyPrefix;\n\n if (opts.mongo) {\n this.client = opts.mongo;\n } else {\n this.client = opts.storeClient;\n }\n if (typeof this.client.then === 'function') {\n // If Promise\n this.client\n .then((conn) => {\n this.client = conn;\n this._initCollection();\n this._driverVersion = getDriverVersion(this.client);\n });\n } else {\n this._initCollection();\n this._driverVersion = getDriverVersion(this.client);\n }\n }\n\n get dbName() {\n return this._dbName;\n }\n\n set dbName(value) {\n this._dbName = typeof value === 'undefined' ? RateLimiterMongo.getDbName() : value;\n }\n\n static getDbName() {\n return 'node-rate-limiter-flexible';\n }\n\n get tableName() {\n return this._tableName;\n }\n\n set tableName(value) {\n this._tableName = typeof value === 'undefined' ? this.keyPrefix : value;\n }\n\n get client() {\n return this._client;\n }\n\n set client(value) {\n if (typeof value === 'undefined') {\n throw new Error('mongo is not set');\n }\n this._client = value;\n }\n\n get indexKeyPrefix() {\n return this._indexKeyPrefix;\n }\n\n set indexKeyPrefix(obj) {\n this._indexKeyPrefix = obj || {};\n }\n\n _initCollection() {\n const db = typeof this.client.db === 'function'\n ? this.client.db(this.dbName)\n : this.client;\n\n const collection = db.collection(this.tableName);\n collection.createIndex({ expire: -1 }, { expireAfterSeconds: 0 });\n collection.createIndex(Object.assign({}, this.indexKeyPrefix, { key: 1 }), { unique: true });\n\n this._collection = collection;\n }\n\n _getRateLimiterRes(rlKey, changedPoints, result) {\n const res = new RateLimiterRes();\n\n let doc;\n if (typeof result.value === 'undefined') {\n doc = result;\n } else {\n doc = result.value;\n }\n\n res.isFirstInDuration = doc.points === changedPoints;\n res.consumedPoints = doc.points;\n\n res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);\n res.msBeforeNext = doc.expire !== null\n ? Math.max(new Date(doc.expire).getTime() - Date.now(), 0)\n : -1;\n\n return res;\n }\n\n _upsert(key, points, msDuration, forceExpire = false, options = {}) {\n if (!this._collection) {\n return Promise.reject(Error('Mongo connection is not established'));\n }\n\n const docAttrs = options.attrs || {};\n\n let where;\n let upsertData;\n if (forceExpire) {\n where = { key };\n where = Object.assign(where, docAttrs);\n upsertData = {\n $set: {\n key,\n points,\n expire: msDuration > 0 ? new Date(Date.now() + msDuration) : null,\n },\n };\n upsertData.$set = Object.assign(upsertData.$set, docAttrs);\n } else {\n where = {\n $or: [\n { expire: { $gt: new Date() } },\n { expire: { $eq: null } },\n ],\n key,\n };\n where = Object.assign(where, docAttrs);\n upsertData = {\n $setOnInsert: {\n key,\n expire: msDuration > 0 ? new Date(Date.now() + msDuration) : null,\n },\n $inc: { points },\n };\n upsertData.$setOnInsert = Object.assign(upsertData.$setOnInsert, docAttrs);\n }\n\n // Options for collection updates differ between driver versions\n const upsertOptions = {\n upsert: true,\n };\n if ((this._driverVersion.major >= 4) ||\n (this._driverVersion.major === 3 &&\n (this._driverVersion.feature >=7) || \n (this._driverVersion.feature >= 6 && \n this._driverVersion.patch >= 7 ))) \n {\n upsertOptions.returnDocument = 'after';\n } else {\n upsertOptions.returnOriginal = false;\n }\n\n /*\n * 1. Find actual limit and increment points\n * 2. If limit expired, but Mongo doesn't clean doc by TTL yet, try to replace limit doc completely\n * 3. If 2 or more Mongo threads try to insert the new limit doc, only the first succeed\n * 4. Try to upsert from step 1. Actual limit is created now, points are incremented without problems\n */\n return new Promise((resolve, reject) => {\n this._collection.findOneAndUpdate(\n where,\n upsertData,\n upsertOptions\n ).then((res) => {\n resolve(res);\n }).catch((errUpsert) => {\n if (errUpsert && errUpsert.code === 11000) { // E11000 duplicate key error collection\n const replaceWhere = Object.assign({ // try to replace OLD limit doc\n $or: [\n { expire: { $lte: new Date() } },\n { expire: { $eq: null } },\n ],\n key,\n }, docAttrs);\n\n const replaceTo = {\n $set: Object.assign({\n key,\n points,\n expire: msDuration > 0 ? new Date(Date.now() + msDuration) : null,\n }, docAttrs)\n };\n\n this._collection.findOneAndUpdate(\n replaceWhere,\n replaceTo,\n upsertOptions\n ).then((res) => {\n resolve(res);\n }).catch((errReplace) => {\n if (errReplace && errReplace.code === 11000) { // E11000 duplicate key error collection\n this._upsert(key, points, msDuration, forceExpire)\n .then(res => resolve(res))\n .catch(err => reject(err));\n } else {\n reject(errReplace);\n }\n });\n } else {\n reject(errUpsert);\n }\n });\n });\n }\n\n _get(rlKey, options = {}) {\n if (!this._collection) {\n return Promise.reject(Error('Mongo connection is not established'));\n }\n\n const docAttrs = options.attrs || {};\n\n const where = Object.assign({\n key: rlKey,\n $or: [\n { expire: { $gt: new Date() } },\n { expire: { $eq: null } },\n ],\n }, docAttrs);\n\n return this._collection.findOne(where);\n }\n\n _delete(rlKey, options = {}) {\n if (!this._collection) {\n return Promise.reject(Error('Mongo connection is not established'));\n }\n\n const docAttrs = options.attrs || {};\n const where = Object.assign({ key: rlKey }, docAttrs);\n\n return this._collection.deleteOne(where)\n .then(res => res.deletedCount > 0);\n }\n}\n\nmodule.exports = RateLimiterMongo;\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/lib/RateLimiterMongo.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/RateLimiterMySQL.js":
/*!********************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/RateLimiterMySQL.js ***!
\********************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const RateLimiterStoreAbstract = __webpack_require__(/*! ./RateLimiterStoreAbstract */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterStoreAbstract.js\");\nconst RateLimiterRes = __webpack_require__(/*! ./RateLimiterRes */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js\");\n\nclass RateLimiterMySQL extends RateLimiterStoreAbstract {\n /**\n * @callback callback\n * @param {Object} err\n *\n * @param {Object} opts\n * @param {callback} cb\n * Defaults {\n * ... see other in RateLimiterStoreAbstract\n *\n * storeClient: anySqlClient,\n * storeType: 'knex', // required only for Knex instance\n * dbName: 'string',\n * tableName: 'string',\n * }\n */\n constructor(opts, cb = null) {\n super(opts);\n\n this.client = opts.storeClient;\n this.clientType = opts.storeType;\n\n this.dbName = opts.dbName;\n this.tableName = opts.tableName;\n\n this.clearExpiredByTimeout = opts.clearExpiredByTimeout;\n\n this.tableCreated = opts.tableCreated;\n if (!this.tableCreated) {\n this._createDbAndTable()\n .then(() => {\n this.tableCreated = true;\n if (this.clearExpiredByTimeout) {\n this._clearExpiredHourAgo();\n }\n if (typeof cb === 'function') {\n cb();\n }\n })\n .catch((err) => {\n if (typeof cb === 'function') {\n cb(err);\n } else {\n throw err;\n }\n });\n } else {\n if (this.clearExpiredByTimeout) {\n this._clearExpiredHourAgo();\n }\n if (typeof cb === 'function') {\n cb();\n }\n }\n }\n\n clearExpired(expire) {\n return new Promise((resolve) => {\n this._getConnection()\n .then((conn) => {\n conn.query(`DELETE FROM ??.?? WHERE expire < ?`, [this.dbName, this.tableName, expire], () => {\n this._releaseConnection(conn);\n resolve();\n });\n })\n .catch(() => {\n resolve();\n });\n });\n }\n\n _clearExpiredHourAgo() {\n if (this._clearExpiredTimeoutId) {\n clearTimeout(this._clearExpiredTimeoutId);\n }\n this._clearExpiredTimeoutId = setTimeout(() => {\n this.clearExpired(Date.now() - 3600000) // Never rejected\n .then(() => {\n this._clearExpiredHourAgo();\n });\n }, 300000);\n this._clearExpiredTimeoutId.unref();\n }\n\n /**\n *\n * @return Promise\n * @private\n */\n _getConnection() {\n switch (this.clientType) {\n case 'pool':\n return new Promise((resolve, reject) => {\n this.client.getConnection((errConn, conn) => {\n if (errConn) {\n return reject(errConn);\n }\n\n resolve(conn);\n });\n });\n case 'sequelize':\n return this.client.connectionManager.getConnection();\n case 'knex':\n return this.client.client.acquireConnection();\n default:\n return Promise.resolve(this.client);\n }\n }\n\n _releaseConnection(conn) {\n switch (this.clientType) {\n case 'pool':\n return conn.release();\n case 'sequelize':\n return this.client.connectionManager.releaseConnection(conn);\n case 'knex':\n return this.client.client.releaseConnection(conn);\n default:\n return true;\n }\n }\n\n /**\n *\n * @returns {Promise}\n * @private\n */\n _createDbAndTable() {\n return new Promise((resolve, reject) => {\n this._getConnection()\n .then((conn) => {\n conn.query(`CREATE DATABASE IF NOT EXISTS \\`${this.dbName}\\`;`, (errDb) => {\n if (errDb) {\n this._releaseConnection(conn);\n return reject(errDb);\n }\n conn.query(this._getCreateTableStmt(), (err) => {\n if (err) {\n this._releaseConnection(conn);\n return reject(err);\n }\n this._releaseConnection(conn);\n resolve();\n });\n });\n })\n .catch((err) => {\n reject(err);\n });\n });\n }\n\n _getCreateTableStmt() {\n return `CREATE TABLE IF NOT EXISTS \\`${this.dbName}\\`.\\`${this.tableName}\\` (` +\n '`key` VARCHAR(255) CHARACTER SET utf8 NOT NULL,' +\n '`points` INT(9) NOT NULL default 0,' +\n '`expire` BIGINT UNSIGNED,' +\n 'PRIMARY KEY (`key`)' +\n ') ENGINE = INNODB;';\n }\n\n get clientType() {\n return this._clientType;\n }\n\n set clientType(value) {\n if (typeof value === 'undefined') {\n if (this.client.constructor.name === 'Connection') {\n value = 'connection';\n } else if (this.client.constructor.name === 'Pool') {\n value = 'pool';\n } else if (this.client.constructor.name === 'Sequelize') {\n value = 'sequelize';\n } else {\n throw new Error('storeType is not defined');\n }\n }\n this._clientType = value.toLowerCase();\n }\n\n get dbName() {\n return this._dbName;\n }\n\n set dbName(value) {\n this._dbName = typeof value === 'undefined' ? 'rtlmtrflx' : value;\n }\n\n get tableName() {\n return this._tableName;\n }\n\n set tableName(value) {\n this._tableName = typeof value === 'undefined' ? this.keyPrefix : value;\n }\n\n get tableCreated() {\n return this._tableCreated\n }\n\n set tableCreated(value) {\n this._tableCreated = typeof value === 'undefined' ? false : !!value;\n }\n\n get clearExpiredByTimeout() {\n return this._clearExpiredByTimeout;\n }\n\n set clearExpiredByTimeout(value) {\n this._clearExpiredByTimeout = typeof value === 'undefined' ? true : Boolean(value);\n }\n\n _getRateLimiterRes(rlKey, changedPoints, result) {\n const res = new RateLimiterRes();\n const [row] = result;\n\n res.isFirstInDuration = changedPoints === row.points;\n res.consumedPoints = res.isFirstInDuration ? changedPoints : row.points;\n\n res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);\n res.msBeforeNext = row.expire\n ? Math.max(row.expire - Date.now(), 0)\n : -1;\n\n return res;\n }\n\n _upsertTransaction(conn, key, points, msDuration, forceExpire) {\n return new Promise((resolve, reject) => {\n conn.query('BEGIN', (errBegin) => {\n if (errBegin) {\n conn.rollback();\n\n return reject(errBegin);\n }\n\n const dateNow = Date.now();\n const newExpire = msDuration > 0 ? dateNow + msDuration : null;\n\n let q;\n let values;\n if (forceExpire) {\n q = `INSERT INTO ??.?? VALUES (?, ?, ?)\n ON DUPLICATE KEY UPDATE \n points = ?, \n expire = ?;`;\n values = [\n this.dbName, this.tableName, key, points, newExpire,\n points,\n newExpire,\n ];\n } else {\n q = `INSERT INTO ??.?? VALUES (?, ?, ?)\n ON DUPLICATE KEY UPDATE \n points = IF(expire <= ?, ?, points + (?)), \n expire = IF(expire <= ?, ?, expire);`;\n values = [\n this.dbName, this.tableName, key, points, newExpire,\n dateNow, points, points,\n dateNow, newExpire,\n ];\n }\n\n conn.query(q, values, (errUpsert) => {\n if (errUpsert) {\n conn.rollback();\n\n return reject(errUpsert);\n }\n conn.query('SELECT points, expire FROM ??.?? WHERE `key` = ?;', [this.dbName, this.tableName, key], (errSelect, res) => {\n if (errSelect) {\n conn.rollback();\n\n return reject(errSelect);\n }\n\n conn.query('COMMIT', (err) => {\n if (err) {\n conn.rollback();\n\n return reject(err);\n }\n\n resolve(res);\n });\n });\n });\n });\n });\n }\n\n _upsert(key, points, msDuration, forceExpire = false) {\n if (!this.tableCreated) {\n return Promise.reject(Error('Table is not created yet'));\n }\n\n return new Promise((resolve, reject) => {\n this._getConnection()\n .then((conn) => {\n this._upsertTransaction(conn, key, points, msDuration, forceExpire)\n .then((res) => {\n resolve(res);\n this._releaseConnection(conn);\n })\n .catch((err) => {\n reject(err);\n this._releaseConnection(conn);\n });\n })\n .catch((err) => {\n reject(err);\n });\n });\n }\n\n _get(rlKey) {\n if (!this.tableCreated) {\n return Promise.reject(Error('Table is not created yet'));\n }\n\n return new Promise((resolve, reject) => {\n this._getConnection()\n .then((conn) => {\n conn.query(\n 'SELECT points, expire FROM ??.?? WHERE `key` = ? AND (`expire` > ? OR `expire` IS NULL)',\n [this.dbName, this.tableName, rlKey, Date.now()],\n (err, res) => {\n if (err) {\n reject(err);\n } else if (res.length === 0) {\n resolve(null);\n } else {\n resolve(res);\n }\n\n this._releaseConnection(conn);\n } // eslint-disable-line\n );\n })\n .catch((err) => {\n reject(err);\n });\n });\n }\n\n _delete(rlKey) {\n if (!this.tableCreated) {\n return Promise.reject(Error('Table is not created yet'));\n }\n\n return new Promise((resolve, reject) => {\n this._getConnection()\n .then((conn) => {\n conn.query(\n 'DELETE FROM ??.?? WHERE `key` = ?',\n [this.dbName, this.tableName, rlKey],\n (err, res) => {\n if (err) {\n reject(err);\n } else {\n resolve(res.affectedRows > 0);\n }\n\n this._releaseConnection(conn);\n } // eslint-disable-line\n );\n })\n .catch((err) => {\n reject(err);\n });\n });\n }\n}\n\nmodule.exports = RateLimiterMySQL;\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/lib/RateLimiterMySQL.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/RateLimiterPostgres.js":
/*!***********************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/RateLimiterPostgres.js ***!
\***********************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const RateLimiterStoreAbstract = __webpack_require__(/*! ./RateLimiterStoreAbstract */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterStoreAbstract.js\");\nconst RateLimiterRes = __webpack_require__(/*! ./RateLimiterRes */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js\");\n\nclass RateLimiterPostgres extends RateLimiterStoreAbstract {\n /**\n * @callback callback\n * @param {Object} err\n *\n * @param {Object} opts\n * @param {callback} cb\n * Defaults {\n * ... see other in RateLimiterStoreAbstract\n *\n * storeClient: postgresClient,\n * storeType: 'knex', // required only for Knex instance\n * tableName: 'string',\n * }\n */\n constructor(opts, cb = null) {\n super(opts);\n\n this.client = opts.storeClient;\n this.clientType = opts.storeType;\n\n this.tableName = opts.tableName;\n\n this.clearExpiredByTimeout = opts.clearExpiredByTimeout;\n\n this.tableCreated = opts.tableCreated;\n if (!this.tableCreated) {\n this._createTable()\n .then(() => {\n this.tableCreated = true;\n if (this.clearExpiredByTimeout) {\n this._clearExpiredHourAgo();\n }\n if (typeof cb === 'function') {\n cb();\n }\n })\n .catch((err) => {\n if (typeof cb === 'function') {\n cb(err);\n } else {\n throw err;\n }\n });\n } else {\n if (typeof cb === 'function') {\n cb();\n }\n }\n }\n\n clearExpired(expire) {\n return new Promise((resolve) => {\n const q = {\n name: 'rlflx-clear-expired',\n text: `DELETE FROM ${this.tableName} WHERE expire < $1`,\n values: [expire],\n };\n this._query(q)\n .then(() => {\n resolve();\n })\n .catch(() => {\n // Deleting expired query is not critical\n resolve();\n });\n });\n }\n\n /**\n * Delete all rows expired 1 hour ago once per 5 minutes\n *\n * @private\n */\n _clearExpiredHourAgo() {\n if (this._clearExpiredTimeoutId) {\n clearTimeout(this._clearExpiredTimeoutId);\n }\n this._clearExpiredTimeoutId = setTimeout(() => {\n this.clearExpired(Date.now() - 3600000) // Never rejected\n .then(() => {\n this._clearExpiredHourAgo();\n });\n }, 300000);\n this._clearExpiredTimeoutId.unref();\n }\n\n /**\n *\n * @return Promise\n * @private\n */\n _getConnection() {\n switch (this.clientType) {\n case 'pool':\n return Promise.resolve(this.client);\n case 'sequelize':\n return this.client.connectionManager.getConnection();\n case 'knex':\n return this.client.client.acquireConnection();\n case 'typeorm':\n return Promise.resolve(this.client.driver.master);\n default:\n return Promise.resolve(this.client);\n }\n }\n\n _releaseConnection(conn) {\n switch (this.clientType) {\n case 'pool':\n return true;\n case 'sequelize':\n return this.client.connectionManager.releaseConnection(conn);\n case 'knex':\n return this.client.client.releaseConnection(conn);\n case 'typeorm':\n return true;\n default:\n return true;\n }\n }\n\n /**\n *\n * @returns {Promise}\n * @private\n */\n _createTable() {\n return new Promise((resolve, reject) => {\n this._query({\n text: this._getCreateTableStmt(),\n })\n .then(() => {\n resolve();\n })\n .catch((err) => {\n if (err.code === '23505') {\n // Error: duplicate key value violates unique constraint \"pg_type_typname_nsp_index\"\n // Postgres doesn't handle concurrent table creation\n // It is supposed, that table is created by another worker\n resolve();\n } else {\n reject(err);\n }\n });\n });\n }\n\n _getCreateTableStmt() {\n return `CREATE TABLE IF NOT EXISTS ${this.tableName} ( \n key varchar(255) PRIMARY KEY,\n points integer NOT NULL DEFAULT 0,\n expire bigint\n );`;\n }\n\n get clientType() {\n return this._clientType;\n }\n\n set clientType(value) {\n const constructorName = this.client.constructor.name;\n\n if (typeof value === 'undefined') {\n if (constructorName === 'Client') {\n value = 'client';\n } else if (\n constructorName === 'Pool' ||\n constructorName === 'BoundPool'\n ) {\n value = 'pool';\n } else if (constructorName === 'Sequelize') {\n value = 'sequelize';\n } else {\n throw new Error('storeType is not defined');\n }\n }\n\n this._clientType = value.toLowerCase();\n }\n\n get tableName() {\n return this._tableName;\n }\n\n set tableName(value) {\n this._tableName = typeof value === 'undefined' ? this.keyPrefix : value;\n }\n\n get tableCreated() {\n return this._tableCreated\n }\n\n set tableCreated(value) {\n this._tableCreated = typeof value === 'undefined' ? false : !!value;\n }\n\n get clearExpiredByTimeout() {\n return this._clearExpiredByTimeout;\n }\n\n set clearExpiredByTimeout(value) {\n this._clearExpiredByTimeout = typeof value === 'undefined' ? true : Boolean(value);\n }\n\n _getRateLimiterRes(rlKey, changedPoints, result) {\n const res = new RateLimiterRes();\n const row = result.rows[0];\n\n res.isFirstInDuration = changedPoints === row.points;\n res.consumedPoints = res.isFirstInDuration ? changedPoints : row.points;\n\n res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);\n res.msBeforeNext = row.expire\n ? Math.max(row.expire - Date.now(), 0)\n : -1;\n\n return res;\n }\n\n _query(q) {\n const prefix = this.tableName.toLowerCase();\n const queryObj = { name: `${prefix}:${q.name}`, text: q.text, values: q.values };\n return new Promise((resolve, reject) => {\n this._getConnection()\n .then((conn) => {\n conn.query(queryObj)\n .then((res) => {\n resolve(res);\n this._releaseConnection(conn);\n })\n .catch((err) => {\n reject(err);\n this._releaseConnection(conn);\n });\n })\n .catch((err) => {\n reject(err);\n });\n });\n }\n\n _upsert(key, points, msDuration, forceExpire = false) {\n if (!this.tableCreated) {\n return Promise.reject(Error('Table is not created yet'));\n }\n\n const newExpire = msDuration > 0 ? Date.now() + msDuration : null;\n const expireQ = forceExpire\n ? ' $3 '\n : ` CASE\n WHEN ${this.tableName}.expire <= $4 THEN $3\n ELSE ${this.tableName}.expire\n END `;\n\n return this._query({\n name: forceExpire ? 'rlflx-upsert-force' : 'rlflx-upsert',\n text: `\n INSERT INTO ${this.tableName} VALUES ($1, $2, $3)\n ON CONFLICT(key) DO UPDATE SET\n points = CASE\n WHEN (${this.tableName}.expire <= $4 OR 1=${forceExpire ? 1 : 0}) THEN $2\n ELSE ${this.tableName}.points + ($2)\n END,\n expire = ${expireQ}\n RETURNING points, expire;`,\n values: [key, points, newExpire, Date.now()],\n });\n }\n\n _get(rlKey) {\n if (!this.tableCreated) {\n return Promise.reject(Error('Table is not created yet'));\n }\n\n return new Promise((resolve, reject) => {\n this._query({\n name: 'rlflx-get',\n text: `\n SELECT points, expire FROM ${this.tableName} WHERE key = $1 AND (expire > $2 OR expire IS NULL);`,\n values: [rlKey, Date.now()],\n })\n .then((res) => {\n if (res.rowCount === 0) {\n res = null;\n }\n resolve(res);\n })\n .catch((err) => {\n reject(err);\n });\n });\n }\n\n _delete(rlKey) {\n if (!this.tableCreated) {\n return Promise.reject(Error('Table is not created yet'));\n }\n\n return this._query({\n name: 'rlflx-delete',\n text: `DELETE FROM ${this.tableName} WHERE key = $1`,\n values: [rlKey],\n })\n .then(res => res.rowCount > 0);\n }\n}\n\nmodule.exports = RateLimiterPostgres;\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/lib/RateLimiterPostgres.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/RateLimiterQueue.js":
/*!********************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/RateLimiterQueue.js ***!
\********************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const RateLimiterQueueError = __webpack_require__(/*! ./component/RateLimiterQueueError */ \"./node_modules/rate-limiter-flexible/lib/component/RateLimiterQueueError.js\")\nconst MAX_QUEUE_SIZE = 4294967295;\nconst KEY_DEFAULT = 'limiter';\n\nmodule.exports = class RateLimiterQueue {\n constructor(limiterFlexible, opts = {\n maxQueueSize: MAX_QUEUE_SIZE,\n }) {\n this._queueLimiters = {\n KEY_DEFAULT: new RateLimiterQueueInternal(limiterFlexible, opts)\n };\n this._limiterFlexible = limiterFlexible;\n this._maxQueueSize = opts.maxQueueSize\n }\n\n getTokensRemaining(key = KEY_DEFAULT) {\n if (this._queueLimiters[key]) {\n return this._queueLimiters[key].getTokensRemaining()\n } else {\n return Promise.resolve(this._limiterFlexible.points)\n }\n }\n\n removeTokens(tokens, key = KEY_DEFAULT) {\n if (!this._queueLimiters[key]) {\n this._queueLimiters[key] = new RateLimiterQueueInternal(\n this._limiterFlexible, {\n key,\n maxQueueSize: this._maxQueueSize,\n })\n }\n\n return this._queueLimiters[key].removeTokens(tokens)\n }\n};\n\nclass RateLimiterQueueInternal {\n\n constructor(limiterFlexible, opts = {\n maxQueueSize: MAX_QUEUE_SIZE,\n key: KEY_DEFAULT,\n }) {\n this._key = opts.key;\n this._waitTimeout = null;\n this._queue = [];\n this._limiterFlexible = limiterFlexible;\n\n this._maxQueueSize = opts.maxQueueSize\n }\n\n getTokensRemaining() {\n return this._limiterFlexible.get(this._key)\n .then((rlRes) => {\n return rlRes !== null ? rlRes.remainingPoints : this._limiterFlexible.points;\n })\n }\n\n removeTokens(tokens) {\n const _this = this;\n\n return new Promise((resolve, reject) => {\n if (tokens > _this._limiterFlexible.points) {\n reject(new RateLimiterQueueError(`Requested tokens ${tokens} exceeds maximum ${_this._limiterFlexible.points} tokens per interval`));\n return\n }\n\n if (_this._queue.length > 0) {\n _this._queueRequest.call(_this, resolve, reject, tokens);\n } else {\n _this._limiterFlexible.consume(_this._key, tokens)\n .then((res) => {\n resolve(res.remainingPoints);\n })\n .catch((rej) => {\n if (rej instanceof Error) {\n reject(rej);\n } else {\n _this._queueRequest.call(_this, resolve, reject, tokens);\n if (_this._waitTimeout === null) {\n _this._waitTimeout = setTimeout(_this._processFIFO.bind(_this), rej.msBeforeNext);\n }\n }\n });\n }\n })\n }\n\n _queueRequest(resolve, reject, tokens) {\n const _this = this;\n if (_this._queue.length < _this._maxQueueSize) {\n _this._queue.push({resolve, reject, tokens});\n } else {\n reject(new RateLimiterQueueError(`Number of requests reached it's maximum ${_this._maxQueueSize}`))\n }\n }\n\n _processFIFO() {\n const _this = this;\n\n if (_this._waitTimeout !== null) {\n clearTimeout(_this._waitTimeout);\n _this._waitTimeout = null;\n }\n\n if (_this._queue.length === 0) {\n return;\n }\n\n const item = _this._queue.shift();\n _this._limiterFlexible.consume(_this._key, item.tokens)\n .then((res) => {\n item.resolve(res.remainingPoints);\n _this._processFIFO.call(_this);\n })\n .catch((rej) => {\n if (rej instanceof Error) {\n item.reject(rej);\n _this._processFIFO.call(_this);\n } else {\n _this._queue.unshift(item);\n if (_this._waitTimeout === null) {\n _this._waitTimeout = setTimeout(_this._processFIFO.bind(_this), rej.msBeforeNext);\n }\n }\n });\n }\n}\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/lib/RateLimiterQueue.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/RateLimiterRedis.js":
/*!********************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/RateLimiterRedis.js ***!
\********************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const RateLimiterStoreAbstract = __webpack_require__(/*! ./RateLimiterStoreAbstract */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterStoreAbstract.js\");\nconst RateLimiterRes = __webpack_require__(/*! ./RateLimiterRes */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js\");\n\nconst incrTtlLuaScript = `redis.call('set', KEYS[1], 0, 'EX', ARGV[2], 'NX') \\\nlocal consumed = redis.call('incrby', KEYS[1], ARGV[1]) \\\nlocal ttl = redis.call('pttl', KEYS[1]) \\\nif ttl == -1 then \\\n redis.call('expire', KEYS[1], ARGV[2]) \\\n ttl = 1000 * ARGV[2] \\\nend \\\nreturn {consumed, ttl} \\\n`;\n\nclass RateLimiterRedis extends RateLimiterStoreAbstract {\n /**\n *\n * @param {Object} opts\n * Defaults {\n * ... see other in RateLimiterStoreAbstract\n *\n * redis: RedisClient\n * rejectIfRedisNotReady: boolean = false - reject / invoke insuranceLimiter immediately when redis connection is not \"ready\"\n * }\n */\n constructor(opts) {\n super(opts);\n if (opts.redis) {\n this.client = opts.redis;\n } else {\n this.client = opts.storeClient;\n }\n\n this._rejectIfRedisNotReady = !!opts.rejectIfRedisNotReady;\n\n if (typeof this.client.defineCommand === 'function') {\n this.client.defineCommand(\"rlflxIncr\", {\n numberOfKeys: 1,\n lua: incrTtlLuaScript,\n });\n }\n }\n\n /**\n * Prevent actual redis call if redis connection is not ready\n * Because of different connection state checks for ioredis and node-redis, only this clients would be actually checked.\n * For any other clients all the requests would be passed directly to redis client\n * @return {boolean}\n * @private\n */\n _isRedisReady() {\n if (!this._rejectIfRedisNotReady) {\n return true;\n }\n // ioredis client\n if (this.client.status && this.client.status !== 'ready') {\n return false;\n }\n // node-redis client\n if (typeof this.client.isReady === 'function' && !this.client.isReady()) {\n return false;\n }\n return true;\n }\n\n _getRateLimiterRes(rlKey, changedPoints, result) {\n let [consumed, resTtlMs] = result;\n // Support ioredis results format\n if (Array.isArray(consumed)) {\n [, consumed] = consumed;\n [, resTtlMs] = resTtlMs;\n }\n\n const res = new RateLimiterRes();\n res.consumedPoints = parseInt(consumed);\n res.isFirstInDuration = res.consumedPoints === changedPoints;\n res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);\n res.msBeforeNext = resTtlMs;\n\n return res;\n }\n\n _upsert(rlKey, points, msDuration, forceExpire = false) {\n return new Promise((resolve, reject) => {\n if (!this._isRedisReady()) {\n return reject(new Error('Redis connection is not ready'));\n }\n\n const secDuration = Math.floor(msDuration / 1000);\n const multi = this.client.multi();\n if (forceExpire) {\n if (secDuration > 0) {\n multi.set(rlKey, points, 'EX', secDuration);\n } else {\n multi.set(rlKey, points);\n }\n\n multi.pttl(rlKey)\n .exec((err, res) => {\n if (err) {\n return reject(err);\n }\n\n return resolve(res);\n });\n } else {\n if (secDuration > 0) {\n const incrCallback = function(err, result) {\n if (err) {\n return reject(err);\n }\n\n return resolve(result);\n };\n\n if (typeof this.client.rlflxIncr === 'function') {\n this.client.rlflxIncr(rlKey, points, secDuration, incrCallback);\n } else {\n this.client.eval(incrTtlLuaScript, 1, rlKey, points, secDuration, incrCallback);\n }\n } else {\n multi.incrby(rlKey, points)\n .pttl(rlKey)\n .exec((err, res) => {\n if (err) {\n return reject(err);\n }\n\n return resolve(res);\n });\n }\n }\n });\n }\n\n _get(rlKey) {\n return new Promise((resolve, reject) => {\n if (!this._isRedisReady()) {\n return reject(new Error('Redis connection is not ready'));\n }\n\n this.client\n .multi()\n .get(rlKey)\n .pttl(rlKey)\n .exec((err, res) => {\n if (err) {\n reject(err);\n } else {\n const [points] = res;\n if (points === null) {\n return resolve(null)\n }\n\n resolve(res);\n }\n });\n });\n }\n\n _delete(rlKey) {\n return new Promise((resolve, reject) => {\n this.client.del(rlKey, (err, res) => {\n if (err) {\n reject(err);\n } else {\n resolve(res > 0);\n }\n });\n });\n }\n}\n\nmodule.exports = RateLimiterRedis;\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/lib/RateLimiterRedis.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js":
/*!******************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js ***!
\******************************************************************/
/***/ ((module) => {
eval("module.exports = class RateLimiterRes {\n constructor(remainingPoints, msBeforeNext, consumedPoints, isFirstInDuration) {\n this.remainingPoints = typeof remainingPoints === 'undefined' ? 0 : remainingPoints; // Remaining points in current duration\n this.msBeforeNext = typeof msBeforeNext === 'undefined' ? 0 : msBeforeNext; // Milliseconds before next action\n this.consumedPoints = typeof consumedPoints === 'undefined' ? 0 : consumedPoints; // Consumed points in current duration\n this.isFirstInDuration = typeof isFirstInDuration === 'undefined' ? false : isFirstInDuration;\n }\n\n get msBeforeNext() {\n return this._msBeforeNext;\n }\n\n set msBeforeNext(ms) {\n this._msBeforeNext = ms;\n return this;\n }\n\n get remainingPoints() {\n return this._remainingPoints;\n }\n\n set remainingPoints(p) {\n this._remainingPoints = p;\n return this;\n }\n\n get consumedPoints() {\n return this._consumedPoints;\n }\n\n set consumedPoints(p) {\n this._consumedPoints = p;\n return this;\n }\n\n get isFirstInDuration() {\n return this._isFirstInDuration;\n }\n\n set isFirstInDuration(value) {\n this._isFirstInDuration = Boolean(value);\n }\n\n _getDecoratedProperties() {\n return {\n remainingPoints: this.remainingPoints,\n msBeforeNext: this.msBeforeNext,\n consumedPoints: this.consumedPoints,\n isFirstInDuration: this.isFirstInDuration,\n };\n }\n\n [Symbol.for(\"nodejs.util.inspect.custom\")]() {\n return this._getDecoratedProperties();\n }\n\n toString() {\n return JSON.stringify(this._getDecoratedProperties());\n }\n\n toJSON() {\n return this._getDecoratedProperties();\n }\n};\n\n\n//# sourceURL=webpack://@waku/noise-rtc/./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js?");
/***/ }),
/***/ "./node_modules/rate-limiter-flexible/lib/RateLimiterStoreAbstract.js":
/*!****************************************************************************!*\
!*** ./node_modules/rate-limiter-flexible/lib/RateLimiterStoreAbstract.js ***!
\****************************************************************************/
/***/ ((module, __unused_webpack_exports, __webpack_require__) => {
eval("const RateLimiterAbstract = __webpack_require__(/*! ./RateLimiterAbstract */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterAbstract.js\");\nconst BlockedKeys = __webpack_require__(/*! ./component/BlockedKeys */ \"./node_modules/rate-limiter-flexible/lib/component/BlockedKeys/index.js\");\nconst RateLimiterRes = __webpack_require__(/*! ./RateLimiterRes */ \"./node_modules/rate-limiter-flexible/lib/RateLimiterRes.js\");\n\nmodule.exports = class RateLimiterStoreAbstract extends RateLimiterAbstract {\n /**\n *\n * @param opts Object Defaults {\n * ... see other in RateLimiterAbstract\n *\n * inMemoryBlockOnConsumed: 40, // Number of points when key is blocked\n * inMemoryBlockDuration: 10, // Block duration in seconds\n * insuranceLimiter: RateLimiterAbstract\n * }\n */\n constructor(opts = {}) {\n super(opts);\n\n this.inMemoryBlockOnConsumed = opts.inMemoryBlockOnConsumed || opts.inmemoryBlockOnConsumed;\n this.inMemoryBlockDuration = opts.inMemoryBlockDuration || opts.inmemoryBlockDuration;\n this.insuranceLimiter = opts.insuranceLimiter;\n this._inMemoryBlockedKeys = new BlockedKeys();\n }\n\n get client() {\n return this._client;\n }\n\n set client(value) {\n if (typeof value === 'undefined') {\n throw new Error('storeClient is not set');\n }\n this._client = value;\n }\n\n /**\n * Have to be launched after consume\n * It blocks key and execute evenly depending on result from store\n *\n * It uses _getRateLimiterRes function to prepare RateLimiterRes from store result\n *\n * @param resolve\n * @param reject\n * @param rlKey\n * @param changedPoints\n * @param storeResult\n * @param {Object} options\n * @private\n */\n _afterConsume(resolve, reject, rlKey, changedPoints, storeResult, options = {}) {\n const res = this._getRateLimiterRes(rlKey, changedPoints, storeResult);\n\n if (this.inMemoryBlockOnConsumed > 0 && !(this.inMemoryBlockDuration > 0)\n && res.consumedPoints >= this.inMemoryBlockOnConsumed\n ) {\n this._inMemoryBlockedKeys.addMs(rlKey, res.msBeforeNext);\n if (res.consumedPoints > this.points) {\n return reject(res);\n } else {\n return resolve(res)\n }\n } else if (res.consumedPoints > this.points) {\n let blockPromise = Promise.resolve();\n // Block only first time when consumed more than points\n if (this.blockDuration > 0 && res.consumedPoints <= (this.points + changedPoints)) {\n res.msBeforeNext = this.msBlockDuration;\n blockPromise = this._block(rlKey, res.consumedPoints, this.msBlockDuration, options);\n }\n\n if (this.inMemoryBlockOnConsumed > 0 && res.consumedPoints >= this.inMemoryBlockOnConsumed) {\n // Block key for this.inMemoryBlockDuration seconds\n this._inMemoryBlockedKeys.add(rlKey, this.inMemoryBlockDuration);\n res.msBeforeNext = this.msInMemoryBlockDuration;\n }\n\n blockPromise\n .then(() => {\n reject(res);\n })\n .catch((err) => {\n reject(err);\n });\n } else if (this.execEvenly && res.msBeforeNext > 0 && !res.isFirstInDuration) {\n let delay = Math.ceil(res.msBeforeNext / (res.remainingPoints + 2));\n if (delay < this.execEvenlyMinDelayMs) {\n delay = res.consumedPoints * this.execEvenlyMinDelayMs;\n }\n\n setTimeout(resolve, delay, res);\n } else {\n resolve(res);\n }\n }\n\n _handleError(err, funcName, resolve, reject, key, data = false, options = {}) {\n if (!(this.insuranceLimiter instanceof RateLimiterAbstract)) {\n reject(err);\n } else {\n this.insuranceLimiter[funcName](key, data, options)\n .then((res) => {\n resolve(res);\n })\n .catch((res) => {\n reject(res);\n });\n }\n }\n\n /**\n * @deprecated Use camelCase version\n * @returns {BlockedKeys}\n * @private\n */\n get _inmemoryBlockedKeys() {\n return this._inMemoryBlockedKeys\n }\n\n /**\n * @deprecated Use camelCase version\n * @param rlKey\n * @returns {number}\n */\n getInmemoryBlockMsBeforeExpire(rlKey) {\n return this.getInMemoryBlockMsBeforeExpire(rlKey)\n }\n\n /**\n * @deprecated Use camelCase version\n * @returns {number|number}\n */\n get inmemoryBlockOnConsumed() {\n return this.inMemoryBlockOnConsumed;\n }\n\n /**\n * @deprecated Use camelCase version\n * @param value\n */\n set inmemoryBlockOnConsumed(value) {\n this.inMemoryBlockOnConsumed = value;\n }\n\n /**\n * @deprecated Use camelCase version\n * @returns {number|number}\n */\n get inmemoryBlockDuration() {\n return this.inMemoryBlockDuration;\n }\n\n /**\n * @deprecated Use camelCase version\n * @param value\n */\n set inmemoryBlockDuration(value) {\n this.inMemoryBlockDuration = value\n }\n\n /**\n * @deprecated Use camelCase version\n * @returns {number}\n */\n get msInmemoryBlockDuration() {\n return this.inMemoryBlockDuration * 1000;\n }\n\n getInMemoryBlockMsBeforeExpire(rlKey) {\n if (this.inMemoryBlockOnConsumed > 0) {\n return this._inMemoryBlockedKeys.msBeforeExpire(rlKey);\n }\n\n return 0;\n }\n\n get inMemoryBlockOnConsumed() {\n return this._inMemoryBlockOnConsumed;\n }\n\n set inMemoryBlockOnConsumed(value) {\n this._inMemoryBlockOnConsumed = value ? parseInt(value) : 0;\n if (this.inMemoryBlockOnConsumed > 0 && this.points > this.inMemoryBlockOnConsumed) {\n throw new Error('inMemoryBlockOnConsumed option must be greater or equal \"points\" option');\n }\n }\n\n get inMemoryBlockDuration() {\n return this._inMemoryBlockDuration;\n }\n\n set inMemoryBlockDuration(value) {\n this._inMemoryBlockDuration = value ? parseInt(value) : 0;\n if (this.inMemoryBlockDuration > 0 && this.inMemoryBlockOnConsumed === 0) {\n throw new Error('inMemoryBlockOnConsumed option must be set up');\n }\n }\n\n get msInMemoryBlockDuration() {\n return this._inMemoryBlockDuration * 1000;\n }\n\n get insuranceLimiter() {\n return this._insuranceLimiter;\n }\n\n set insuranceLimiter(value) {\n if (typeof value !== 'undefined' && !(value instanceof RateLimiterAbstract)) {\n throw new Error('insuranceLimiter must be instance of RateLimiterAbstract');\n }\n this._insuranceLimiter = value;\n if (this._insuranceLimiter) {\n this._insuranceLimiter.blockDuration = this.blockDuration;\n this._insuranceLimiter.execEvenly = this.execEvenly;\n }\n }\n\n /**\n * Block any key for secDuration seconds\n *\n * @param key\n * @param secDuration\n * @param {Object} options\n *\n * @return Promise\n */\n block(key, secDuration, options = {}) {\n const msDuration = secDuration * 1000;\n return this._block(this.getKey(key), this.points + 1, msDuration, options);\n }\n\n /**\n * Set points by key for any duration\n *\n * @param key\n * @param points\n * @param secDuration\n * @param {Object} options\n *\n * @return Promise\n */\n set(key, points, secDuration, options = {}) {\n const msDuration = (secDuration >= 0 ? secDuration : this.duration) * 1000;\n return this._block(this.getKey(key), points, msDuration, options);\n }\n\n /**\n *\n * @param key\n * @param pointsToConsume\n * @param {Object} options\n * @returns Promise\n */\n consume(key, pointsToConsume = 1, options = {}) {\n return new Promise((resolve, reject) => {\n const rlKey = this.getKey(key);\n\n const inMemoryBlockMsBeforeExpire = this.getInMemoryBlockMsBeforeExpire(rlKey);\n if (inMemoryBlockMsBeforeExpire > 0) {\n return reject(new RateLimiterRes(0, inMemoryBlockMsBeforeExpire));\n }\n\n this._upsert(rlKey, pointsToConsume, this._getKeySecDuration(options) * 1000, false, options)\n .then((res) => {\n this._afterConsume(resolve, reject, rlKey, pointsToConsume, res);\n })\n .catch((err) => {\n this._handleError(err, 'consume', resolve, reject, key, pointsToConsume, options);\n });\n });\n }\n\n /**\n *\n * @param key\n * @param points\n * @param {Object} options\n * @returns Promise\n */\n penalty(key, points = 1, options = {}) {\n const rlKey = this.getKey(key);\n return new Promise((resolve, reject) => {\n this._upsert(rlKey, points, this._getKeySecDuration(options) * 1000, false, options)\n .then((res) => {\n resolve(this._getRateLimiterRes(rlKey, points, res));\n })\n .catch((err) => {\n this._handleError(err, 'penalty', resolve, reject, key, points, options);\n });\n });\n }\n\n /**\n *\n * @param key\n * @param points\n * @param {Object} options\n * @returns Promise\n */\n reward(key, points = 1, options = {}) {\n const rlKey = this.getKey(key);\n return new Promise((resolve, reject) => {\n this._upsert(rlKey, -points, this._getKeySecDuration(options) * 1000, false, options)\n .then((res) => {\n resolve(this._getRateLimiterRes(rlKey, -points, res));\n })\n .catch((err) => {\n this._handleError(err, 'reward', resolve, reject, key, points, options);\n });\n });\n }\n\n /**\n *\n * @param key\n * @param {Object} options\n * @returns Promise|null\n */\n get(key, options = {}) {\n const rlKey = this.getKey(key);\n return new Promise((resolve, reject) => {\n this._get(rlKey, options)\n .then((res) => {\n if (res === null || typeof res === 'undefined') {\n resolve(null);\n } else {\n resolve(this._getRateLimiterRes(rlKey, 0, res));\n }\n })\n .catch((err) => {\n this._handleError(err, 'get', resolve, reject, key, options);\n });\n });\n }\n\n /**\n *\n * @param key\n * @param {Object} options\n * @returns Promise\n */\n delete(key, options = {}) {\n const rlKey = this.getKey(key);\n return new Promise((resolve, reject) => {\n this._delete(rlKey, options)\n .then((res) => {\n this._inMemoryBlockedKeys.delete(rlKey);\n resolve(res);\n })\n .catch((err) => {\n this._handleError(err, 'delete', resolve, reject, key, options);\n });\n });\n }\n\n /**\n * Cleanup keys no-matter expired or not.\n */\n deleteInMemoryBlockedAll() {\n this._inMemoryBlockedKeys.delete();\n }\n\n /**\n * Get RateLimiterRes object filled depending on storeResult, which specific for exact store\n *\n * @param rlKey\n * @param changedPoints\n * @param storeResult\n * @private\n */\n _getRateLimiterRes(rlKey, changedPoints, storeResult) { // eslint-disable-line no-unused-vars\n throw new Error(\"You have to implement the method '_getRateLimiterRes'!\");\n }\n\n /**\n * Block key for this.msBlockDuration milliseconds\n * Usually, it just prolongs lifetime of key\n *\n * @param rlKey\n * @param initPoints\n * @param msDuration\n * @param {Object} options\n *\n * @return Promise\n */\n _block(rlKey, initPoints, msDuration, options = {}) {\n return new Promise((resolve, reject) => {\n this._upsert(rlKey, initPoints, msDuration, true, options)\n .then(() => {\n resolve(new RateLimiterRes(0, msDuration > 0 ? msDuration : -1, initPoints));\n })\n .catch((err) => {\n this._handleError(err, 'block', resolve, reject, this.parseKey(rlKey), msDuration / 1000, options);\n });\n });\n }\n\n /**\n * Have to be implemented in every limiter\n * Resolve with raw result from Store OR null if rlKey is not set\n * or Reject with error\n *\n * @param rlKey\n * @param {Object} options\n * @private\n *\n * @return Promise\n */\n _get(rlKey, options = {}) { // eslint-disable-line no-unused-vars\n throw new Error(\"You have to implement the method '_get'!\");\n }\n\n /**\n * Have to be implemented\n * Resolve with true OR false if rlKey doesn't exist\n * or Reject with error\n *\n * @param rlKey\n * @param {Object} options\n * @private\n *\n * @return Promise\n */\n _delete(rlKey, options = {}) { // eslint-disable-line no-unused-vars\n throw new Error(\"You have to implement the method '_delete'!\");\n }\n\n /**\n * Have to be implemented\n * Resolve with object used for {@link _getRateLimiterRes} to generate {@link RateLimiterRes}\n *\n * @param {string} rlKey\n * @param {number} points\n * @param {number} msDuration\n * @param {boolean} forceExpire\n * @param {Object} options\n * @abstract\n *\n * @return Promise