mirror of
https://github.com/logos-messaging/examples.waku.org.git
synced 2026-01-05 06:13:11 +00:00
47493 lines
1.5 MiB
47493 lines
1.5 MiB
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
|
|
|
|
function getDefaultExportFromCjs (x) {
|
|
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
|
|
}
|
|
|
|
function getAugmentedNamespace(n) {
|
|
var f = n.default;
|
|
if (typeof f == "function") {
|
|
var a = function () {
|
|
return f.apply(this, arguments);
|
|
};
|
|
a.prototype = f.prototype;
|
|
} else a = {};
|
|
Object.defineProperty(a, '__esModule', {value: true});
|
|
Object.keys(n).forEach(function (k) {
|
|
var d = Object.getOwnPropertyDescriptor(n, k);
|
|
Object.defineProperty(a, k, d.get ? d : {
|
|
enumerable: true,
|
|
get: function () {
|
|
return n[k];
|
|
}
|
|
});
|
|
});
|
|
return a;
|
|
}
|
|
|
|
var browser = {exports: {}};
|
|
|
|
/**
|
|
* Helpers.
|
|
*/
|
|
|
|
var ms;
|
|
var hasRequiredMs;
|
|
|
|
function requireMs () {
|
|
if (hasRequiredMs) return ms;
|
|
hasRequiredMs = 1;
|
|
var s = 1000;
|
|
var m = s * 60;
|
|
var h = m * 60;
|
|
var d = h * 24;
|
|
var w = d * 7;
|
|
var y = d * 365.25;
|
|
|
|
/**
|
|
* Parse or format the given `val`.
|
|
*
|
|
* Options:
|
|
*
|
|
* - `long` verbose formatting [false]
|
|
*
|
|
* @param {String|Number} val
|
|
* @param {Object} [options]
|
|
* @throws {Error} throw an error if val is not a non-empty string or a number
|
|
* @return {String|Number}
|
|
* @api public
|
|
*/
|
|
|
|
ms = function(val, options) {
|
|
options = options || {};
|
|
var type = typeof val;
|
|
if (type === 'string' && val.length > 0) {
|
|
return parse(val);
|
|
} else if (type === 'number' && isFinite(val)) {
|
|
return options.long ? fmtLong(val) : fmtShort(val);
|
|
}
|
|
throw new Error(
|
|
'val is not a non-empty string or a valid number. val=' +
|
|
JSON.stringify(val)
|
|
);
|
|
};
|
|
|
|
/**
|
|
* Parse the given `str` and return milliseconds.
|
|
*
|
|
* @param {String} str
|
|
* @return {Number}
|
|
* @api private
|
|
*/
|
|
|
|
function parse(str) {
|
|
str = String(str);
|
|
if (str.length > 100) {
|
|
return;
|
|
}
|
|
var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
|
|
str
|
|
);
|
|
if (!match) {
|
|
return;
|
|
}
|
|
var n = parseFloat(match[1]);
|
|
var type = (match[2] || 'ms').toLowerCase();
|
|
switch (type) {
|
|
case 'years':
|
|
case 'year':
|
|
case 'yrs':
|
|
case 'yr':
|
|
case 'y':
|
|
return n * y;
|
|
case 'weeks':
|
|
case 'week':
|
|
case 'w':
|
|
return n * w;
|
|
case 'days':
|
|
case 'day':
|
|
case 'd':
|
|
return n * d;
|
|
case 'hours':
|
|
case 'hour':
|
|
case 'hrs':
|
|
case 'hr':
|
|
case 'h':
|
|
return n * h;
|
|
case 'minutes':
|
|
case 'minute':
|
|
case 'mins':
|
|
case 'min':
|
|
case 'm':
|
|
return n * m;
|
|
case 'seconds':
|
|
case 'second':
|
|
case 'secs':
|
|
case 'sec':
|
|
case 's':
|
|
return n * s;
|
|
case 'milliseconds':
|
|
case 'millisecond':
|
|
case 'msecs':
|
|
case 'msec':
|
|
case 'ms':
|
|
return n;
|
|
default:
|
|
return undefined;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Short format for `ms`.
|
|
*
|
|
* @param {Number} ms
|
|
* @return {String}
|
|
* @api private
|
|
*/
|
|
|
|
function fmtShort(ms) {
|
|
var msAbs = Math.abs(ms);
|
|
if (msAbs >= d) {
|
|
return Math.round(ms / d) + 'd';
|
|
}
|
|
if (msAbs >= h) {
|
|
return Math.round(ms / h) + 'h';
|
|
}
|
|
if (msAbs >= m) {
|
|
return Math.round(ms / m) + 'm';
|
|
}
|
|
if (msAbs >= s) {
|
|
return Math.round(ms / s) + 's';
|
|
}
|
|
return ms + 'ms';
|
|
}
|
|
|
|
/**
|
|
* Long format for `ms`.
|
|
*
|
|
* @param {Number} ms
|
|
* @return {String}
|
|
* @api private
|
|
*/
|
|
|
|
function fmtLong(ms) {
|
|
var msAbs = Math.abs(ms);
|
|
if (msAbs >= d) {
|
|
return plural(ms, msAbs, d, 'day');
|
|
}
|
|
if (msAbs >= h) {
|
|
return plural(ms, msAbs, h, 'hour');
|
|
}
|
|
if (msAbs >= m) {
|
|
return plural(ms, msAbs, m, 'minute');
|
|
}
|
|
if (msAbs >= s) {
|
|
return plural(ms, msAbs, s, 'second');
|
|
}
|
|
return ms + ' ms';
|
|
}
|
|
|
|
/**
|
|
* Pluralization helper.
|
|
*/
|
|
|
|
function plural(ms, msAbs, n, name) {
|
|
var isPlural = msAbs >= n * 1.5;
|
|
return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');
|
|
}
|
|
return ms;
|
|
}
|
|
|
|
/**
|
|
* This is the common logic for both the Node.js and web browser
|
|
* implementations of `debug()`.
|
|
*/
|
|
|
|
function setup(env) {
|
|
createDebug.debug = createDebug;
|
|
createDebug.default = createDebug;
|
|
createDebug.coerce = coerce;
|
|
createDebug.disable = disable;
|
|
createDebug.enable = enable;
|
|
createDebug.enabled = enabled;
|
|
createDebug.humanize = requireMs();
|
|
createDebug.destroy = destroy;
|
|
|
|
Object.keys(env).forEach(key => {
|
|
createDebug[key] = env[key];
|
|
});
|
|
|
|
/**
|
|
* The currently active debug mode names, and names to skip.
|
|
*/
|
|
|
|
createDebug.names = [];
|
|
createDebug.skips = [];
|
|
|
|
/**
|
|
* Map of special "%n" handling functions, for the debug "format" argument.
|
|
*
|
|
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
|
|
*/
|
|
createDebug.formatters = {};
|
|
|
|
/**
|
|
* Selects a color for a debug namespace
|
|
* @param {String} namespace The namespace string for the debug instance to be colored
|
|
* @return {Number|String} An ANSI color code for the given namespace
|
|
* @api private
|
|
*/
|
|
function selectColor(namespace) {
|
|
let hash = 0;
|
|
|
|
for (let i = 0; i < namespace.length; i++) {
|
|
hash = ((hash << 5) - hash) + namespace.charCodeAt(i);
|
|
hash |= 0; // Convert to 32bit integer
|
|
}
|
|
|
|
return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
|
|
}
|
|
createDebug.selectColor = selectColor;
|
|
|
|
/**
|
|
* Create a debugger with the given `namespace`.
|
|
*
|
|
* @param {String} namespace
|
|
* @return {Function}
|
|
* @api public
|
|
*/
|
|
function createDebug(namespace) {
|
|
let prevTime;
|
|
let enableOverride = null;
|
|
let namespacesCache;
|
|
let enabledCache;
|
|
|
|
function debug(...args) {
|
|
// Disabled?
|
|
if (!debug.enabled) {
|
|
return;
|
|
}
|
|
|
|
const self = debug;
|
|
|
|
// Set `diff` timestamp
|
|
const curr = Number(new Date());
|
|
const ms = curr - (prevTime || curr);
|
|
self.diff = ms;
|
|
self.prev = prevTime;
|
|
self.curr = curr;
|
|
prevTime = curr;
|
|
|
|
args[0] = createDebug.coerce(args[0]);
|
|
|
|
if (typeof args[0] !== 'string') {
|
|
// Anything else let's inspect with %O
|
|
args.unshift('%O');
|
|
}
|
|
|
|
// Apply any `formatters` transformations
|
|
let index = 0;
|
|
args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {
|
|
// If we encounter an escaped % then don't increase the array index
|
|
if (match === '%%') {
|
|
return '%';
|
|
}
|
|
index++;
|
|
const formatter = createDebug.formatters[format];
|
|
if (typeof formatter === 'function') {
|
|
const val = args[index];
|
|
match = formatter.call(self, val);
|
|
|
|
// Now we need to remove `args[index]` since it's inlined in the `format`
|
|
args.splice(index, 1);
|
|
index--;
|
|
}
|
|
return match;
|
|
});
|
|
|
|
// Apply env-specific formatting (colors, etc.)
|
|
createDebug.formatArgs.call(self, args);
|
|
|
|
const logFn = self.log || createDebug.log;
|
|
logFn.apply(self, args);
|
|
}
|
|
|
|
debug.namespace = namespace;
|
|
debug.useColors = createDebug.useColors();
|
|
debug.color = createDebug.selectColor(namespace);
|
|
debug.extend = extend;
|
|
debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release.
|
|
|
|
Object.defineProperty(debug, 'enabled', {
|
|
enumerable: true,
|
|
configurable: false,
|
|
get: () => {
|
|
if (enableOverride !== null) {
|
|
return enableOverride;
|
|
}
|
|
if (namespacesCache !== createDebug.namespaces) {
|
|
namespacesCache = createDebug.namespaces;
|
|
enabledCache = createDebug.enabled(namespace);
|
|
}
|
|
|
|
return enabledCache;
|
|
},
|
|
set: v => {
|
|
enableOverride = v;
|
|
}
|
|
});
|
|
|
|
// Env-specific initialization logic for debug instances
|
|
if (typeof createDebug.init === 'function') {
|
|
createDebug.init(debug);
|
|
}
|
|
|
|
return debug;
|
|
}
|
|
|
|
function extend(namespace, delimiter) {
|
|
const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);
|
|
newDebug.log = this.log;
|
|
return newDebug;
|
|
}
|
|
|
|
/**
|
|
* Enables a debug mode by namespaces. This can include modes
|
|
* separated by a colon and wildcards.
|
|
*
|
|
* @param {String} namespaces
|
|
* @api public
|
|
*/
|
|
function enable(namespaces) {
|
|
createDebug.save(namespaces);
|
|
createDebug.namespaces = namespaces;
|
|
|
|
createDebug.names = [];
|
|
createDebug.skips = [];
|
|
|
|
let i;
|
|
const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
|
|
const len = split.length;
|
|
|
|
for (i = 0; i < len; i++) {
|
|
if (!split[i]) {
|
|
// ignore empty strings
|
|
continue;
|
|
}
|
|
|
|
namespaces = split[i].replace(/\*/g, '.*?');
|
|
|
|
if (namespaces[0] === '-') {
|
|
createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$'));
|
|
} else {
|
|
createDebug.names.push(new RegExp('^' + namespaces + '$'));
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Disable debug output.
|
|
*
|
|
* @return {String} namespaces
|
|
* @api public
|
|
*/
|
|
function disable() {
|
|
const namespaces = [
|
|
...createDebug.names.map(toNamespace),
|
|
...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)
|
|
].join(',');
|
|
createDebug.enable('');
|
|
return namespaces;
|
|
}
|
|
|
|
/**
|
|
* Returns true if the given mode name is enabled, false otherwise.
|
|
*
|
|
* @param {String} name
|
|
* @return {Boolean}
|
|
* @api public
|
|
*/
|
|
function enabled(name) {
|
|
if (name[name.length - 1] === '*') {
|
|
return true;
|
|
}
|
|
|
|
let i;
|
|
let len;
|
|
|
|
for (i = 0, len = createDebug.skips.length; i < len; i++) {
|
|
if (createDebug.skips[i].test(name)) {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
for (i = 0, len = createDebug.names.length; i < len; i++) {
|
|
if (createDebug.names[i].test(name)) {
|
|
return true;
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Convert regexp to namespace
|
|
*
|
|
* @param {RegExp} regxep
|
|
* @return {String} namespace
|
|
* @api private
|
|
*/
|
|
function toNamespace(regexp) {
|
|
return regexp.toString()
|
|
.substring(2, regexp.toString().length - 2)
|
|
.replace(/\.\*\?$/, '*');
|
|
}
|
|
|
|
/**
|
|
* Coerce `val`.
|
|
*
|
|
* @param {Mixed} val
|
|
* @return {Mixed}
|
|
* @api private
|
|
*/
|
|
function coerce(val) {
|
|
if (val instanceof Error) {
|
|
return val.stack || val.message;
|
|
}
|
|
return val;
|
|
}
|
|
|
|
/**
|
|
* XXX DO NOT USE. This is a temporary stub function.
|
|
* XXX It WILL be removed in the next major release.
|
|
*/
|
|
function destroy() {
|
|
console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');
|
|
}
|
|
|
|
createDebug.enable(createDebug.load());
|
|
|
|
return createDebug;
|
|
}
|
|
|
|
var common$1 = setup;
|
|
|
|
/* eslint-env browser */
|
|
|
|
(function (module, exports) {
|
|
/**
|
|
* This is the web browser implementation of `debug()`.
|
|
*/
|
|
|
|
exports.formatArgs = formatArgs;
|
|
exports.save = save;
|
|
exports.load = load;
|
|
exports.useColors = useColors;
|
|
exports.storage = localstorage();
|
|
exports.destroy = (() => {
|
|
let warned = false;
|
|
|
|
return () => {
|
|
if (!warned) {
|
|
warned = true;
|
|
console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.');
|
|
}
|
|
};
|
|
})();
|
|
|
|
/**
|
|
* Colors.
|
|
*/
|
|
|
|
exports.colors = [
|
|
'#0000CC',
|
|
'#0000FF',
|
|
'#0033CC',
|
|
'#0033FF',
|
|
'#0066CC',
|
|
'#0066FF',
|
|
'#0099CC',
|
|
'#0099FF',
|
|
'#00CC00',
|
|
'#00CC33',
|
|
'#00CC66',
|
|
'#00CC99',
|
|
'#00CCCC',
|
|
'#00CCFF',
|
|
'#3300CC',
|
|
'#3300FF',
|
|
'#3333CC',
|
|
'#3333FF',
|
|
'#3366CC',
|
|
'#3366FF',
|
|
'#3399CC',
|
|
'#3399FF',
|
|
'#33CC00',
|
|
'#33CC33',
|
|
'#33CC66',
|
|
'#33CC99',
|
|
'#33CCCC',
|
|
'#33CCFF',
|
|
'#6600CC',
|
|
'#6600FF',
|
|
'#6633CC',
|
|
'#6633FF',
|
|
'#66CC00',
|
|
'#66CC33',
|
|
'#9900CC',
|
|
'#9900FF',
|
|
'#9933CC',
|
|
'#9933FF',
|
|
'#99CC00',
|
|
'#99CC33',
|
|
'#CC0000',
|
|
'#CC0033',
|
|
'#CC0066',
|
|
'#CC0099',
|
|
'#CC00CC',
|
|
'#CC00FF',
|
|
'#CC3300',
|
|
'#CC3333',
|
|
'#CC3366',
|
|
'#CC3399',
|
|
'#CC33CC',
|
|
'#CC33FF',
|
|
'#CC6600',
|
|
'#CC6633',
|
|
'#CC9900',
|
|
'#CC9933',
|
|
'#CCCC00',
|
|
'#CCCC33',
|
|
'#FF0000',
|
|
'#FF0033',
|
|
'#FF0066',
|
|
'#FF0099',
|
|
'#FF00CC',
|
|
'#FF00FF',
|
|
'#FF3300',
|
|
'#FF3333',
|
|
'#FF3366',
|
|
'#FF3399',
|
|
'#FF33CC',
|
|
'#FF33FF',
|
|
'#FF6600',
|
|
'#FF6633',
|
|
'#FF9900',
|
|
'#FF9933',
|
|
'#FFCC00',
|
|
'#FFCC33'
|
|
];
|
|
|
|
/**
|
|
* Currently only WebKit-based Web Inspectors, Firefox >= v31,
|
|
* and the Firebug extension (any Firefox version) are known
|
|
* to support "%c" CSS customizations.
|
|
*
|
|
* TODO: add a `localStorage` variable to explicitly enable/disable colors
|
|
*/
|
|
|
|
// eslint-disable-next-line complexity
|
|
function useColors() {
|
|
// NB: In an Electron preload script, document will be defined but not fully
|
|
// initialized. Since we know we're in Chrome, we'll just detect this case
|
|
// explicitly
|
|
if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {
|
|
return true;
|
|
}
|
|
|
|
// Internet Explorer and Edge do not support colors.
|
|
if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
|
|
return false;
|
|
}
|
|
|
|
// Is webkit? http://stackoverflow.com/a/16459606/376773
|
|
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
|
|
return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) ||
|
|
// Is firebug? http://stackoverflow.com/a/398120/376773
|
|
(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) ||
|
|
// Is firefox >= v31?
|
|
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
|
|
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) ||
|
|
// Double check webkit in userAgent just in case we are in a worker
|
|
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/));
|
|
}
|
|
|
|
/**
|
|
* Colorize log arguments if enabled.
|
|
*
|
|
* @api public
|
|
*/
|
|
|
|
function formatArgs(args) {
|
|
args[0] = (this.useColors ? '%c' : '') +
|
|
this.namespace +
|
|
(this.useColors ? ' %c' : ' ') +
|
|
args[0] +
|
|
(this.useColors ? '%c ' : ' ') +
|
|
'+' + module.exports.humanize(this.diff);
|
|
|
|
if (!this.useColors) {
|
|
return;
|
|
}
|
|
|
|
const c = 'color: ' + this.color;
|
|
args.splice(1, 0, c, 'color: inherit');
|
|
|
|
// The final "%c" is somewhat tricky, because there could be other
|
|
// arguments passed either before or after the %c, so we need to
|
|
// figure out the correct index to insert the CSS into
|
|
let index = 0;
|
|
let lastC = 0;
|
|
args[0].replace(/%[a-zA-Z%]/g, match => {
|
|
if (match === '%%') {
|
|
return;
|
|
}
|
|
index++;
|
|
if (match === '%c') {
|
|
// We only are interested in the *last* %c
|
|
// (the user may have provided their own)
|
|
lastC = index;
|
|
}
|
|
});
|
|
|
|
args.splice(lastC, 0, c);
|
|
}
|
|
|
|
/**
|
|
* Invokes `console.debug()` when available.
|
|
* No-op when `console.debug` is not a "function".
|
|
* If `console.debug` is not available, falls back
|
|
* to `console.log`.
|
|
*
|
|
* @api public
|
|
*/
|
|
exports.log = console.debug || console.log || (() => {});
|
|
|
|
/**
|
|
* Save `namespaces`.
|
|
*
|
|
* @param {String} namespaces
|
|
* @api private
|
|
*/
|
|
function save(namespaces) {
|
|
try {
|
|
if (namespaces) {
|
|
exports.storage.setItem('debug', namespaces);
|
|
} else {
|
|
exports.storage.removeItem('debug');
|
|
}
|
|
} catch (error) {
|
|
// Swallow
|
|
// XXX (@Qix-) should we be logging these?
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Load `namespaces`.
|
|
*
|
|
* @return {String} returns the previously persisted debug modes
|
|
* @api private
|
|
*/
|
|
function load() {
|
|
let r;
|
|
try {
|
|
r = exports.storage.getItem('debug');
|
|
} catch (error) {
|
|
// Swallow
|
|
// XXX (@Qix-) should we be logging these?
|
|
}
|
|
|
|
// If debug isn't set in LS, and we're in Electron, try to load $DEBUG
|
|
if (!r && typeof process !== 'undefined' && 'env' in process) {
|
|
r = process.env.DEBUG;
|
|
}
|
|
|
|
return r;
|
|
}
|
|
|
|
/**
|
|
* Localstorage attempts to return the localstorage.
|
|
*
|
|
* This is necessary because safari throws
|
|
* when a user disables cookies/localstorage
|
|
* and you attempt to access it.
|
|
*
|
|
* @return {LocalStorage}
|
|
* @api private
|
|
*/
|
|
|
|
function localstorage() {
|
|
try {
|
|
// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context
|
|
// The Browser also has localStorage in the global context.
|
|
return localStorage;
|
|
} catch (error) {
|
|
// Swallow
|
|
// XXX (@Qix-) should we be logging these?
|
|
}
|
|
}
|
|
|
|
module.exports = common$1(exports);
|
|
|
|
const {formatters} = module.exports;
|
|
|
|
/**
|
|
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
|
|
*/
|
|
|
|
formatters.j = function (v) {
|
|
try {
|
|
return JSON.stringify(v);
|
|
} catch (error) {
|
|
return '[UnexpectedJSONParseError]: ' + error.message;
|
|
}
|
|
};
|
|
} (browser, browser.exports));
|
|
|
|
var debug = browser.exports;
|
|
|
|
/**
|
|
* DefaultPubSubTopic is the default gossipsub topic to use for Waku.
|
|
*/
|
|
const DefaultPubSubTopic = "/waku/2/default-waku/proto";
|
|
var StoreCodecs$1;
|
|
(function (StoreCodecs) {
|
|
StoreCodecs["V2Beta3"] = "/vac/waku/store/2.0.0-beta3";
|
|
StoreCodecs["V2Beta4"] = "/vac/waku/store/2.0.0-beta4";
|
|
})(StoreCodecs$1 || (StoreCodecs$1 = {}));
|
|
|
|
var nodeCrypto = {};
|
|
|
|
var nodeCrypto$1 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
'default': nodeCrypto
|
|
});
|
|
|
|
/*! noble-secp256k1 - MIT License (c) 2019 Paul Miller (paulmillr.com) */
|
|
const _0n$1 = BigInt(0);
|
|
const _1n$1 = BigInt(1);
|
|
const _2n$1 = BigInt(2);
|
|
const _3n = BigInt(3);
|
|
const _8n = BigInt(8);
|
|
const CURVE$1 = Object.freeze({
|
|
a: _0n$1,
|
|
b: BigInt(7),
|
|
P: BigInt('0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f'),
|
|
n: BigInt('0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141'),
|
|
h: _1n$1,
|
|
Gx: BigInt('55066263022277343669578718895168534326250603453777594175500187360389116729240'),
|
|
Gy: BigInt('32670510020758816978083085130507043184471273380659243275938904335757337482424'),
|
|
beta: BigInt('0x7ae96a2b657c07106e64479eac3434e99cf0497512f58995c1396c28719501ee'),
|
|
});
|
|
function weistrass(x) {
|
|
const { a, b } = CURVE$1;
|
|
const x2 = mod$1(x * x);
|
|
const x3 = mod$1(x2 * x);
|
|
return mod$1(x3 + a * x + b);
|
|
}
|
|
const USE_ENDOMORPHISM = CURVE$1.a === _0n$1;
|
|
class ShaError extends Error {
|
|
constructor(message) {
|
|
super(message);
|
|
}
|
|
}
|
|
class JacobianPoint {
|
|
constructor(x, y, z) {
|
|
this.x = x;
|
|
this.y = y;
|
|
this.z = z;
|
|
}
|
|
static fromAffine(p) {
|
|
if (!(p instanceof Point$1)) {
|
|
throw new TypeError('JacobianPoint#fromAffine: expected Point');
|
|
}
|
|
return new JacobianPoint(p.x, p.y, _1n$1);
|
|
}
|
|
static toAffineBatch(points) {
|
|
const toInv = invertBatch$1(points.map((p) => p.z));
|
|
return points.map((p, i) => p.toAffine(toInv[i]));
|
|
}
|
|
static normalizeZ(points) {
|
|
return JacobianPoint.toAffineBatch(points).map(JacobianPoint.fromAffine);
|
|
}
|
|
equals(other) {
|
|
if (!(other instanceof JacobianPoint))
|
|
throw new TypeError('JacobianPoint expected');
|
|
const { x: X1, y: Y1, z: Z1 } = this;
|
|
const { x: X2, y: Y2, z: Z2 } = other;
|
|
const Z1Z1 = mod$1(Z1 * Z1);
|
|
const Z2Z2 = mod$1(Z2 * Z2);
|
|
const U1 = mod$1(X1 * Z2Z2);
|
|
const U2 = mod$1(X2 * Z1Z1);
|
|
const S1 = mod$1(mod$1(Y1 * Z2) * Z2Z2);
|
|
const S2 = mod$1(mod$1(Y2 * Z1) * Z1Z1);
|
|
return U1 === U2 && S1 === S2;
|
|
}
|
|
negate() {
|
|
return new JacobianPoint(this.x, mod$1(-this.y), this.z);
|
|
}
|
|
double() {
|
|
const { x: X1, y: Y1, z: Z1 } = this;
|
|
const A = mod$1(X1 * X1);
|
|
const B = mod$1(Y1 * Y1);
|
|
const C = mod$1(B * B);
|
|
const x1b = X1 + B;
|
|
const D = mod$1(_2n$1 * (mod$1(x1b * x1b) - A - C));
|
|
const E = mod$1(_3n * A);
|
|
const F = mod$1(E * E);
|
|
const X3 = mod$1(F - _2n$1 * D);
|
|
const Y3 = mod$1(E * (D - X3) - _8n * C);
|
|
const Z3 = mod$1(_2n$1 * Y1 * Z1);
|
|
return new JacobianPoint(X3, Y3, Z3);
|
|
}
|
|
add(other) {
|
|
if (!(other instanceof JacobianPoint))
|
|
throw new TypeError('JacobianPoint expected');
|
|
const { x: X1, y: Y1, z: Z1 } = this;
|
|
const { x: X2, y: Y2, z: Z2 } = other;
|
|
if (X2 === _0n$1 || Y2 === _0n$1)
|
|
return this;
|
|
if (X1 === _0n$1 || Y1 === _0n$1)
|
|
return other;
|
|
const Z1Z1 = mod$1(Z1 * Z1);
|
|
const Z2Z2 = mod$1(Z2 * Z2);
|
|
const U1 = mod$1(X1 * Z2Z2);
|
|
const U2 = mod$1(X2 * Z1Z1);
|
|
const S1 = mod$1(mod$1(Y1 * Z2) * Z2Z2);
|
|
const S2 = mod$1(mod$1(Y2 * Z1) * Z1Z1);
|
|
const H = mod$1(U2 - U1);
|
|
const r = mod$1(S2 - S1);
|
|
if (H === _0n$1) {
|
|
if (r === _0n$1) {
|
|
return this.double();
|
|
}
|
|
else {
|
|
return JacobianPoint.ZERO;
|
|
}
|
|
}
|
|
const HH = mod$1(H * H);
|
|
const HHH = mod$1(H * HH);
|
|
const V = mod$1(U1 * HH);
|
|
const X3 = mod$1(r * r - HHH - _2n$1 * V);
|
|
const Y3 = mod$1(r * (V - X3) - S1 * HHH);
|
|
const Z3 = mod$1(Z1 * Z2 * H);
|
|
return new JacobianPoint(X3, Y3, Z3);
|
|
}
|
|
subtract(other) {
|
|
return this.add(other.negate());
|
|
}
|
|
multiplyUnsafe(scalar) {
|
|
const P0 = JacobianPoint.ZERO;
|
|
if (typeof scalar === 'bigint' && scalar === _0n$1)
|
|
return P0;
|
|
let n = normalizeScalar$1(scalar);
|
|
if (n === _1n$1)
|
|
return this;
|
|
if (!USE_ENDOMORPHISM) {
|
|
let p = P0;
|
|
let d = this;
|
|
while (n > _0n$1) {
|
|
if (n & _1n$1)
|
|
p = p.add(d);
|
|
d = d.double();
|
|
n >>= _1n$1;
|
|
}
|
|
return p;
|
|
}
|
|
let { k1neg, k1, k2neg, k2 } = splitScalarEndo(n);
|
|
let k1p = P0;
|
|
let k2p = P0;
|
|
let d = this;
|
|
while (k1 > _0n$1 || k2 > _0n$1) {
|
|
if (k1 & _1n$1)
|
|
k1p = k1p.add(d);
|
|
if (k2 & _1n$1)
|
|
k2p = k2p.add(d);
|
|
d = d.double();
|
|
k1 >>= _1n$1;
|
|
k2 >>= _1n$1;
|
|
}
|
|
if (k1neg)
|
|
k1p = k1p.negate();
|
|
if (k2neg)
|
|
k2p = k2p.negate();
|
|
k2p = new JacobianPoint(mod$1(k2p.x * CURVE$1.beta), k2p.y, k2p.z);
|
|
return k1p.add(k2p);
|
|
}
|
|
precomputeWindow(W) {
|
|
const windows = USE_ENDOMORPHISM ? 128 / W + 1 : 256 / W + 1;
|
|
const points = [];
|
|
let p = this;
|
|
let base = p;
|
|
for (let window = 0; window < windows; window++) {
|
|
base = p;
|
|
points.push(base);
|
|
for (let i = 1; i < 2 ** (W - 1); i++) {
|
|
base = base.add(p);
|
|
points.push(base);
|
|
}
|
|
p = base.double();
|
|
}
|
|
return points;
|
|
}
|
|
wNAF(n, affinePoint) {
|
|
if (!affinePoint && this.equals(JacobianPoint.BASE))
|
|
affinePoint = Point$1.BASE;
|
|
const W = (affinePoint && affinePoint._WINDOW_SIZE) || 1;
|
|
if (256 % W) {
|
|
throw new Error('Point#wNAF: Invalid precomputation window, must be power of 2');
|
|
}
|
|
let precomputes = affinePoint && pointPrecomputes$1.get(affinePoint);
|
|
if (!precomputes) {
|
|
precomputes = this.precomputeWindow(W);
|
|
if (affinePoint && W !== 1) {
|
|
precomputes = JacobianPoint.normalizeZ(precomputes);
|
|
pointPrecomputes$1.set(affinePoint, precomputes);
|
|
}
|
|
}
|
|
let p = JacobianPoint.ZERO;
|
|
let f = JacobianPoint.ZERO;
|
|
const windows = 1 + (USE_ENDOMORPHISM ? 128 / W : 256 / W);
|
|
const windowSize = 2 ** (W - 1);
|
|
const mask = BigInt(2 ** W - 1);
|
|
const maxNumber = 2 ** W;
|
|
const shiftBy = BigInt(W);
|
|
for (let window = 0; window < windows; window++) {
|
|
const offset = window * windowSize;
|
|
let wbits = Number(n & mask);
|
|
n >>= shiftBy;
|
|
if (wbits > windowSize) {
|
|
wbits -= maxNumber;
|
|
n += _1n$1;
|
|
}
|
|
if (wbits === 0) {
|
|
let pr = precomputes[offset];
|
|
if (window % 2)
|
|
pr = pr.negate();
|
|
f = f.add(pr);
|
|
}
|
|
else {
|
|
let cached = precomputes[offset + Math.abs(wbits) - 1];
|
|
if (wbits < 0)
|
|
cached = cached.negate();
|
|
p = p.add(cached);
|
|
}
|
|
}
|
|
return { p, f };
|
|
}
|
|
multiply(scalar, affinePoint) {
|
|
let n = normalizeScalar$1(scalar);
|
|
let point;
|
|
let fake;
|
|
if (USE_ENDOMORPHISM) {
|
|
const { k1neg, k1, k2neg, k2 } = splitScalarEndo(n);
|
|
let { p: k1p, f: f1p } = this.wNAF(k1, affinePoint);
|
|
let { p: k2p, f: f2p } = this.wNAF(k2, affinePoint);
|
|
if (k1neg)
|
|
k1p = k1p.negate();
|
|
if (k2neg)
|
|
k2p = k2p.negate();
|
|
k2p = new JacobianPoint(mod$1(k2p.x * CURVE$1.beta), k2p.y, k2p.z);
|
|
point = k1p.add(k2p);
|
|
fake = f1p.add(f2p);
|
|
}
|
|
else {
|
|
const { p, f } = this.wNAF(n, affinePoint);
|
|
point = p;
|
|
fake = f;
|
|
}
|
|
return JacobianPoint.normalizeZ([point, fake])[0];
|
|
}
|
|
toAffine(invZ = invert$1(this.z)) {
|
|
const { x, y, z } = this;
|
|
const iz1 = invZ;
|
|
const iz2 = mod$1(iz1 * iz1);
|
|
const iz3 = mod$1(iz2 * iz1);
|
|
const ax = mod$1(x * iz2);
|
|
const ay = mod$1(y * iz3);
|
|
const zz = mod$1(z * iz1);
|
|
if (zz !== _1n$1)
|
|
throw new Error('invZ was invalid');
|
|
return new Point$1(ax, ay);
|
|
}
|
|
}
|
|
JacobianPoint.BASE = new JacobianPoint(CURVE$1.Gx, CURVE$1.Gy, _1n$1);
|
|
JacobianPoint.ZERO = new JacobianPoint(_0n$1, _1n$1, _0n$1);
|
|
const pointPrecomputes$1 = new WeakMap();
|
|
class Point$1 {
|
|
constructor(x, y) {
|
|
this.x = x;
|
|
this.y = y;
|
|
}
|
|
_setWindowSize(windowSize) {
|
|
this._WINDOW_SIZE = windowSize;
|
|
pointPrecomputes$1.delete(this);
|
|
}
|
|
hasEvenY() {
|
|
return this.y % _2n$1 === _0n$1;
|
|
}
|
|
static fromCompressedHex(bytes) {
|
|
const isShort = bytes.length === 32;
|
|
const x = bytesToNumber(isShort ? bytes : bytes.subarray(1));
|
|
if (!isValidFieldElement(x))
|
|
throw new Error('Point is not on curve');
|
|
const y2 = weistrass(x);
|
|
let y = sqrtMod(y2);
|
|
const isYOdd = (y & _1n$1) === _1n$1;
|
|
if (isShort) {
|
|
if (isYOdd)
|
|
y = mod$1(-y);
|
|
}
|
|
else {
|
|
const isFirstByteOdd = (bytes[0] & 1) === 1;
|
|
if (isFirstByteOdd !== isYOdd)
|
|
y = mod$1(-y);
|
|
}
|
|
const point = new Point$1(x, y);
|
|
point.assertValidity();
|
|
return point;
|
|
}
|
|
static fromUncompressedHex(bytes) {
|
|
const x = bytesToNumber(bytes.subarray(1, 33));
|
|
const y = bytesToNumber(bytes.subarray(33, 65));
|
|
const point = new Point$1(x, y);
|
|
point.assertValidity();
|
|
return point;
|
|
}
|
|
static fromHex(hex) {
|
|
const bytes = ensureBytes$1(hex);
|
|
const len = bytes.length;
|
|
const header = bytes[0];
|
|
if (len === 32 || (len === 33 && (header === 0x02 || header === 0x03))) {
|
|
return this.fromCompressedHex(bytes);
|
|
}
|
|
if (len === 65 && header === 0x04)
|
|
return this.fromUncompressedHex(bytes);
|
|
throw new Error(`Point.fromHex: received invalid point. Expected 32-33 compressed bytes or 65 uncompressed bytes, not ${len}`);
|
|
}
|
|
static fromPrivateKey(privateKey) {
|
|
return Point$1.BASE.multiply(normalizePrivateKey(privateKey));
|
|
}
|
|
static fromSignature(msgHash, signature, recovery) {
|
|
msgHash = ensureBytes$1(msgHash);
|
|
const h = truncateHash(msgHash);
|
|
const { r, s } = normalizeSignature(signature);
|
|
if (recovery !== 0 && recovery !== 1) {
|
|
throw new Error('Cannot recover signature: invalid recovery bit');
|
|
}
|
|
const prefix = recovery & 1 ? '03' : '02';
|
|
const R = Point$1.fromHex(prefix + numTo32bStr(r));
|
|
const { n } = CURVE$1;
|
|
const rinv = invert$1(r, n);
|
|
const u1 = mod$1(-h * rinv, n);
|
|
const u2 = mod$1(s * rinv, n);
|
|
const Q = Point$1.BASE.multiplyAndAddUnsafe(R, u1, u2);
|
|
if (!Q)
|
|
throw new Error('Cannot recover signature: point at infinify');
|
|
Q.assertValidity();
|
|
return Q;
|
|
}
|
|
toRawBytes(isCompressed = false) {
|
|
return hexToBytes$2(this.toHex(isCompressed));
|
|
}
|
|
toHex(isCompressed = false) {
|
|
const x = numTo32bStr(this.x);
|
|
if (isCompressed) {
|
|
const prefix = this.hasEvenY() ? '02' : '03';
|
|
return `${prefix}${x}`;
|
|
}
|
|
else {
|
|
return `04${x}${numTo32bStr(this.y)}`;
|
|
}
|
|
}
|
|
toHexX() {
|
|
return this.toHex(true).slice(2);
|
|
}
|
|
toRawX() {
|
|
return this.toRawBytes(true).slice(1);
|
|
}
|
|
assertValidity() {
|
|
const msg = 'Point is not on elliptic curve';
|
|
const { x, y } = this;
|
|
if (!isValidFieldElement(x) || !isValidFieldElement(y))
|
|
throw new Error(msg);
|
|
const left = mod$1(y * y);
|
|
const right = weistrass(x);
|
|
if (mod$1(left - right) !== _0n$1)
|
|
throw new Error(msg);
|
|
}
|
|
equals(other) {
|
|
return this.x === other.x && this.y === other.y;
|
|
}
|
|
negate() {
|
|
return new Point$1(this.x, mod$1(-this.y));
|
|
}
|
|
double() {
|
|
return JacobianPoint.fromAffine(this).double().toAffine();
|
|
}
|
|
add(other) {
|
|
return JacobianPoint.fromAffine(this).add(JacobianPoint.fromAffine(other)).toAffine();
|
|
}
|
|
subtract(other) {
|
|
return this.add(other.negate());
|
|
}
|
|
multiply(scalar) {
|
|
return JacobianPoint.fromAffine(this).multiply(scalar, this).toAffine();
|
|
}
|
|
multiplyAndAddUnsafe(Q, a, b) {
|
|
const P = JacobianPoint.fromAffine(this);
|
|
const aP = a === _0n$1 || a === _1n$1 || this !== Point$1.BASE ? P.multiplyUnsafe(a) : P.multiply(a);
|
|
const bQ = JacobianPoint.fromAffine(Q).multiplyUnsafe(b);
|
|
const sum = aP.add(bQ);
|
|
return sum.equals(JacobianPoint.ZERO) ? undefined : sum.toAffine();
|
|
}
|
|
}
|
|
Point$1.BASE = new Point$1(CURVE$1.Gx, CURVE$1.Gy);
|
|
Point$1.ZERO = new Point$1(_0n$1, _0n$1);
|
|
function sliceDER(s) {
|
|
return Number.parseInt(s[0], 16) >= 8 ? '00' + s : s;
|
|
}
|
|
function parseDERInt(data) {
|
|
if (data.length < 2 || data[0] !== 0x02) {
|
|
throw new Error(`Invalid signature integer tag: ${bytesToHex$2(data)}`);
|
|
}
|
|
const len = data[1];
|
|
const res = data.subarray(2, len + 2);
|
|
if (!len || res.length !== len) {
|
|
throw new Error(`Invalid signature integer: wrong length`);
|
|
}
|
|
if (res[0] === 0x00 && res[1] <= 0x7f) {
|
|
throw new Error('Invalid signature integer: trailing length');
|
|
}
|
|
return { data: bytesToNumber(res), left: data.subarray(len + 2) };
|
|
}
|
|
function parseDERSignature(data) {
|
|
if (data.length < 2 || data[0] != 0x30) {
|
|
throw new Error(`Invalid signature tag: ${bytesToHex$2(data)}`);
|
|
}
|
|
if (data[1] !== data.length - 2) {
|
|
throw new Error('Invalid signature: incorrect length');
|
|
}
|
|
const { data: r, left: sBytes } = parseDERInt(data.subarray(2));
|
|
const { data: s, left: rBytesLeft } = parseDERInt(sBytes);
|
|
if (rBytesLeft.length) {
|
|
throw new Error(`Invalid signature: left bytes after parsing: ${bytesToHex$2(rBytesLeft)}`);
|
|
}
|
|
return { r, s };
|
|
}
|
|
class Signature$1 {
|
|
constructor(r, s) {
|
|
this.r = r;
|
|
this.s = s;
|
|
this.assertValidity();
|
|
}
|
|
static fromCompact(hex) {
|
|
const arr = hex instanceof Uint8Array;
|
|
const name = 'Signature.fromCompact';
|
|
if (typeof hex !== 'string' && !arr)
|
|
throw new TypeError(`${name}: Expected string or Uint8Array`);
|
|
const str = arr ? bytesToHex$2(hex) : hex;
|
|
if (str.length !== 128)
|
|
throw new Error(`${name}: Expected 64-byte hex`);
|
|
return new Signature$1(hexToNumber(str.slice(0, 64)), hexToNumber(str.slice(64, 128)));
|
|
}
|
|
static fromDER(hex) {
|
|
const arr = hex instanceof Uint8Array;
|
|
if (typeof hex !== 'string' && !arr)
|
|
throw new TypeError(`Signature.fromDER: Expected string or Uint8Array`);
|
|
const { r, s } = parseDERSignature(arr ? hex : hexToBytes$2(hex));
|
|
return new Signature$1(r, s);
|
|
}
|
|
static fromHex(hex) {
|
|
return this.fromDER(hex);
|
|
}
|
|
assertValidity() {
|
|
const { r, s } = this;
|
|
if (!isWithinCurveOrder(r))
|
|
throw new Error('Invalid Signature: r must be 0 < r < n');
|
|
if (!isWithinCurveOrder(s))
|
|
throw new Error('Invalid Signature: s must be 0 < s < n');
|
|
}
|
|
hasHighS() {
|
|
const HALF = CURVE$1.n >> _1n$1;
|
|
return this.s > HALF;
|
|
}
|
|
normalizeS() {
|
|
return this.hasHighS() ? new Signature$1(this.r, CURVE$1.n - this.s) : this;
|
|
}
|
|
toDERRawBytes(isCompressed = false) {
|
|
return hexToBytes$2(this.toDERHex(isCompressed));
|
|
}
|
|
toDERHex(isCompressed = false) {
|
|
const sHex = sliceDER(numberToHexUnpadded(this.s));
|
|
if (isCompressed)
|
|
return sHex;
|
|
const rHex = sliceDER(numberToHexUnpadded(this.r));
|
|
const rLen = numberToHexUnpadded(rHex.length / 2);
|
|
const sLen = numberToHexUnpadded(sHex.length / 2);
|
|
const length = numberToHexUnpadded(rHex.length / 2 + sHex.length / 2 + 4);
|
|
return `30${length}02${rLen}${rHex}02${sLen}${sHex}`;
|
|
}
|
|
toRawBytes() {
|
|
return this.toDERRawBytes();
|
|
}
|
|
toHex() {
|
|
return this.toDERHex();
|
|
}
|
|
toCompactRawBytes() {
|
|
return hexToBytes$2(this.toCompactHex());
|
|
}
|
|
toCompactHex() {
|
|
return numTo32bStr(this.r) + numTo32bStr(this.s);
|
|
}
|
|
}
|
|
function concatBytes$1(...arrays) {
|
|
if (!arrays.every((b) => b instanceof Uint8Array))
|
|
throw new Error('Uint8Array list expected');
|
|
if (arrays.length === 1)
|
|
return arrays[0];
|
|
const length = arrays.reduce((a, arr) => a + arr.length, 0);
|
|
const result = new Uint8Array(length);
|
|
for (let i = 0, pad = 0; i < arrays.length; i++) {
|
|
const arr = arrays[i];
|
|
result.set(arr, pad);
|
|
pad += arr.length;
|
|
}
|
|
return result;
|
|
}
|
|
const hexes$1 = Array.from({ length: 256 }, (v, i) => i.toString(16).padStart(2, '0'));
|
|
function bytesToHex$2(uint8a) {
|
|
if (!(uint8a instanceof Uint8Array))
|
|
throw new Error('Expected Uint8Array');
|
|
let hex = '';
|
|
for (let i = 0; i < uint8a.length; i++) {
|
|
hex += hexes$1[uint8a[i]];
|
|
}
|
|
return hex;
|
|
}
|
|
const POW_2_256$1 = BigInt('0x10000000000000000000000000000000000000000000000000000000000000000');
|
|
function numTo32bStr(num) {
|
|
if (typeof num !== 'bigint')
|
|
throw new Error('Expected bigint');
|
|
if (!(_0n$1 <= num && num < POW_2_256$1))
|
|
throw new Error('Expected number < 2^256');
|
|
return num.toString(16).padStart(64, '0');
|
|
}
|
|
function numTo32b(num) {
|
|
const b = hexToBytes$2(numTo32bStr(num));
|
|
if (b.length !== 32)
|
|
throw new Error('Error: expected 32 bytes');
|
|
return b;
|
|
}
|
|
function numberToHexUnpadded(num) {
|
|
const hex = num.toString(16);
|
|
return hex.length & 1 ? `0${hex}` : hex;
|
|
}
|
|
function hexToNumber(hex) {
|
|
if (typeof hex !== 'string') {
|
|
throw new TypeError('hexToNumber: expected string, got ' + typeof hex);
|
|
}
|
|
return BigInt(`0x${hex}`);
|
|
}
|
|
function hexToBytes$2(hex) {
|
|
if (typeof hex !== 'string') {
|
|
throw new TypeError('hexToBytes: expected string, got ' + typeof hex);
|
|
}
|
|
if (hex.length % 2)
|
|
throw new Error('hexToBytes: received invalid unpadded hex' + hex.length);
|
|
const array = new Uint8Array(hex.length / 2);
|
|
for (let i = 0; i < array.length; i++) {
|
|
const j = i * 2;
|
|
const hexByte = hex.slice(j, j + 2);
|
|
const byte = Number.parseInt(hexByte, 16);
|
|
if (Number.isNaN(byte) || byte < 0)
|
|
throw new Error('Invalid byte sequence');
|
|
array[i] = byte;
|
|
}
|
|
return array;
|
|
}
|
|
function bytesToNumber(bytes) {
|
|
return hexToNumber(bytesToHex$2(bytes));
|
|
}
|
|
function ensureBytes$1(hex) {
|
|
return hex instanceof Uint8Array ? Uint8Array.from(hex) : hexToBytes$2(hex);
|
|
}
|
|
function normalizeScalar$1(num) {
|
|
if (typeof num === 'number' && Number.isSafeInteger(num) && num > 0)
|
|
return BigInt(num);
|
|
if (typeof num === 'bigint' && isWithinCurveOrder(num))
|
|
return num;
|
|
throw new TypeError('Expected valid private scalar: 0 < scalar < curve.n');
|
|
}
|
|
function mod$1(a, b = CURVE$1.P) {
|
|
const result = a % b;
|
|
return result >= _0n$1 ? result : b + result;
|
|
}
|
|
function pow2$1(x, power) {
|
|
const { P } = CURVE$1;
|
|
let res = x;
|
|
while (power-- > _0n$1) {
|
|
res *= res;
|
|
res %= P;
|
|
}
|
|
return res;
|
|
}
|
|
function sqrtMod(x) {
|
|
const { P } = CURVE$1;
|
|
const _6n = BigInt(6);
|
|
const _11n = BigInt(11);
|
|
const _22n = BigInt(22);
|
|
const _23n = BigInt(23);
|
|
const _44n = BigInt(44);
|
|
const _88n = BigInt(88);
|
|
const b2 = (x * x * x) % P;
|
|
const b3 = (b2 * b2 * x) % P;
|
|
const b6 = (pow2$1(b3, _3n) * b3) % P;
|
|
const b9 = (pow2$1(b6, _3n) * b3) % P;
|
|
const b11 = (pow2$1(b9, _2n$1) * b2) % P;
|
|
const b22 = (pow2$1(b11, _11n) * b11) % P;
|
|
const b44 = (pow2$1(b22, _22n) * b22) % P;
|
|
const b88 = (pow2$1(b44, _44n) * b44) % P;
|
|
const b176 = (pow2$1(b88, _88n) * b88) % P;
|
|
const b220 = (pow2$1(b176, _44n) * b44) % P;
|
|
const b223 = (pow2$1(b220, _3n) * b3) % P;
|
|
const t1 = (pow2$1(b223, _23n) * b22) % P;
|
|
const t2 = (pow2$1(t1, _6n) * b2) % P;
|
|
return pow2$1(t2, _2n$1);
|
|
}
|
|
function invert$1(number, modulo = CURVE$1.P) {
|
|
if (number === _0n$1 || modulo <= _0n$1) {
|
|
throw new Error(`invert: expected positive integers, got n=${number} mod=${modulo}`);
|
|
}
|
|
let a = mod$1(number, modulo);
|
|
let b = modulo;
|
|
let x = _0n$1, u = _1n$1;
|
|
while (a !== _0n$1) {
|
|
const q = b / a;
|
|
const r = b % a;
|
|
const m = x - u * q;
|
|
b = a, a = r, x = u, u = m;
|
|
}
|
|
const gcd = b;
|
|
if (gcd !== _1n$1)
|
|
throw new Error('invert: does not exist');
|
|
return mod$1(x, modulo);
|
|
}
|
|
function invertBatch$1(nums, p = CURVE$1.P) {
|
|
const scratch = new Array(nums.length);
|
|
const lastMultiplied = nums.reduce((acc, num, i) => {
|
|
if (num === _0n$1)
|
|
return acc;
|
|
scratch[i] = acc;
|
|
return mod$1(acc * num, p);
|
|
}, _1n$1);
|
|
const inverted = invert$1(lastMultiplied, p);
|
|
nums.reduceRight((acc, num, i) => {
|
|
if (num === _0n$1)
|
|
return acc;
|
|
scratch[i] = mod$1(acc * scratch[i], p);
|
|
return mod$1(acc * num, p);
|
|
}, inverted);
|
|
return scratch;
|
|
}
|
|
const divNearest = (a, b) => (a + b / _2n$1) / b;
|
|
const ENDO = {
|
|
a1: BigInt('0x3086d221a7d46bcde86c90e49284eb15'),
|
|
b1: -_1n$1 * BigInt('0xe4437ed6010e88286f547fa90abfe4c3'),
|
|
a2: BigInt('0x114ca50f7a8e2f3f657c1108d9d44cfd8'),
|
|
b2: BigInt('0x3086d221a7d46bcde86c90e49284eb15'),
|
|
POW_2_128: BigInt('0x100000000000000000000000000000000'),
|
|
};
|
|
function splitScalarEndo(k) {
|
|
const { n } = CURVE$1;
|
|
const { a1, b1, a2, b2, POW_2_128 } = ENDO;
|
|
const c1 = divNearest(b2 * k, n);
|
|
const c2 = divNearest(-b1 * k, n);
|
|
let k1 = mod$1(k - c1 * a1 - c2 * a2, n);
|
|
let k2 = mod$1(-c1 * b1 - c2 * b2, n);
|
|
const k1neg = k1 > POW_2_128;
|
|
const k2neg = k2 > POW_2_128;
|
|
if (k1neg)
|
|
k1 = n - k1;
|
|
if (k2neg)
|
|
k2 = n - k2;
|
|
if (k1 > POW_2_128 || k2 > POW_2_128) {
|
|
throw new Error('splitScalarEndo: Endomorphism failed, k=' + k);
|
|
}
|
|
return { k1neg, k1, k2neg, k2 };
|
|
}
|
|
function truncateHash(hash) {
|
|
const { n } = CURVE$1;
|
|
const byteLength = hash.length;
|
|
const delta = byteLength * 8 - 256;
|
|
let h = bytesToNumber(hash);
|
|
if (delta > 0)
|
|
h = h >> BigInt(delta);
|
|
if (h >= n)
|
|
h -= n;
|
|
return h;
|
|
}
|
|
let _sha256Sync;
|
|
let _hmacSha256Sync;
|
|
class HmacDrbg {
|
|
constructor() {
|
|
this.v = new Uint8Array(32).fill(1);
|
|
this.k = new Uint8Array(32).fill(0);
|
|
this.counter = 0;
|
|
}
|
|
hmac(...values) {
|
|
return utils$1.hmacSha256(this.k, ...values);
|
|
}
|
|
hmacSync(...values) {
|
|
return _hmacSha256Sync(this.k, ...values);
|
|
}
|
|
checkSync() {
|
|
if (typeof _hmacSha256Sync !== 'function')
|
|
throw new ShaError('hmacSha256Sync needs to be set');
|
|
}
|
|
incr() {
|
|
if (this.counter >= 1000)
|
|
throw new Error('Tried 1,000 k values for sign(), all were invalid');
|
|
this.counter += 1;
|
|
}
|
|
async reseed(seed = new Uint8Array()) {
|
|
this.k = await this.hmac(this.v, Uint8Array.from([0x00]), seed);
|
|
this.v = await this.hmac(this.v);
|
|
if (seed.length === 0)
|
|
return;
|
|
this.k = await this.hmac(this.v, Uint8Array.from([0x01]), seed);
|
|
this.v = await this.hmac(this.v);
|
|
}
|
|
reseedSync(seed = new Uint8Array()) {
|
|
this.checkSync();
|
|
this.k = this.hmacSync(this.v, Uint8Array.from([0x00]), seed);
|
|
this.v = this.hmacSync(this.v);
|
|
if (seed.length === 0)
|
|
return;
|
|
this.k = this.hmacSync(this.v, Uint8Array.from([0x01]), seed);
|
|
this.v = this.hmacSync(this.v);
|
|
}
|
|
async generate() {
|
|
this.incr();
|
|
this.v = await this.hmac(this.v);
|
|
return this.v;
|
|
}
|
|
generateSync() {
|
|
this.checkSync();
|
|
this.incr();
|
|
this.v = this.hmacSync(this.v);
|
|
return this.v;
|
|
}
|
|
}
|
|
function isWithinCurveOrder(num) {
|
|
return _0n$1 < num && num < CURVE$1.n;
|
|
}
|
|
function isValidFieldElement(num) {
|
|
return _0n$1 < num && num < CURVE$1.P;
|
|
}
|
|
function kmdToSig(kBytes, m, d) {
|
|
const k = bytesToNumber(kBytes);
|
|
if (!isWithinCurveOrder(k))
|
|
return;
|
|
const { n } = CURVE$1;
|
|
const q = Point$1.BASE.multiply(k);
|
|
const r = mod$1(q.x, n);
|
|
if (r === _0n$1)
|
|
return;
|
|
const s = mod$1(invert$1(k, n) * mod$1(m + d * r, n), n);
|
|
if (s === _0n$1)
|
|
return;
|
|
const sig = new Signature$1(r, s);
|
|
const recovery = (q.x === sig.r ? 0 : 2) | Number(q.y & _1n$1);
|
|
return { sig, recovery };
|
|
}
|
|
function normalizePrivateKey(key) {
|
|
let num;
|
|
if (typeof key === 'bigint') {
|
|
num = key;
|
|
}
|
|
else if (typeof key === 'number' && Number.isSafeInteger(key) && key > 0) {
|
|
num = BigInt(key);
|
|
}
|
|
else if (typeof key === 'string') {
|
|
if (key.length !== 64)
|
|
throw new Error('Expected 32 bytes of private key');
|
|
num = hexToNumber(key);
|
|
}
|
|
else if (key instanceof Uint8Array) {
|
|
if (key.length !== 32)
|
|
throw new Error('Expected 32 bytes of private key');
|
|
num = bytesToNumber(key);
|
|
}
|
|
else {
|
|
throw new TypeError('Expected valid private key');
|
|
}
|
|
if (!isWithinCurveOrder(num))
|
|
throw new Error('Expected private key: 0 < key < n');
|
|
return num;
|
|
}
|
|
function normalizePublicKey(publicKey) {
|
|
if (publicKey instanceof Point$1) {
|
|
publicKey.assertValidity();
|
|
return publicKey;
|
|
}
|
|
else {
|
|
return Point$1.fromHex(publicKey);
|
|
}
|
|
}
|
|
function normalizeSignature(signature) {
|
|
if (signature instanceof Signature$1) {
|
|
signature.assertValidity();
|
|
return signature;
|
|
}
|
|
try {
|
|
return Signature$1.fromDER(signature);
|
|
}
|
|
catch (error) {
|
|
return Signature$1.fromCompact(signature);
|
|
}
|
|
}
|
|
function getPublicKey$1(privateKey, isCompressed = false) {
|
|
return Point$1.fromPrivateKey(privateKey).toRawBytes(isCompressed);
|
|
}
|
|
function bits2int(bytes) {
|
|
const slice = bytes.length > 32 ? bytes.slice(0, 32) : bytes;
|
|
return bytesToNumber(slice);
|
|
}
|
|
function bits2octets(bytes) {
|
|
const z1 = bits2int(bytes);
|
|
const z2 = mod$1(z1, CURVE$1.n);
|
|
return int2octets(z2 < _0n$1 ? z1 : z2);
|
|
}
|
|
function int2octets(num) {
|
|
return numTo32b(num);
|
|
}
|
|
function initSigArgs(msgHash, privateKey, extraEntropy) {
|
|
if (msgHash == null)
|
|
throw new Error(`sign: expected valid message hash, not "${msgHash}"`);
|
|
const h1 = ensureBytes$1(msgHash);
|
|
const d = normalizePrivateKey(privateKey);
|
|
const seedArgs = [int2octets(d), bits2octets(h1)];
|
|
if (extraEntropy != null) {
|
|
if (extraEntropy === true)
|
|
extraEntropy = utils$1.randomBytes(32);
|
|
const e = ensureBytes$1(extraEntropy);
|
|
if (e.length !== 32)
|
|
throw new Error('sign: Expected 32 bytes of extra data');
|
|
seedArgs.push(e);
|
|
}
|
|
const seed = concatBytes$1(...seedArgs);
|
|
const m = bits2int(h1);
|
|
return { seed, m, d };
|
|
}
|
|
function finalizeSig(recSig, opts) {
|
|
let { sig, recovery } = recSig;
|
|
const { canonical, der, recovered } = Object.assign({ canonical: true, der: true }, opts);
|
|
if (canonical && sig.hasHighS()) {
|
|
sig = sig.normalizeS();
|
|
recovery ^= 1;
|
|
}
|
|
const hashed = der ? sig.toDERRawBytes() : sig.toCompactRawBytes();
|
|
return recovered ? [hashed, recovery] : hashed;
|
|
}
|
|
async function sign$2(msgHash, privKey, opts = {}) {
|
|
const { seed, m, d } = initSigArgs(msgHash, privKey, opts.extraEntropy);
|
|
let sig;
|
|
const drbg = new HmacDrbg();
|
|
await drbg.reseed(seed);
|
|
while (!(sig = kmdToSig(await drbg.generate(), m, d)))
|
|
await drbg.reseed();
|
|
return finalizeSig(sig, opts);
|
|
}
|
|
const vopts = { strict: true };
|
|
function verify$1(signature, msgHash, publicKey, opts = vopts) {
|
|
let sig;
|
|
try {
|
|
sig = normalizeSignature(signature);
|
|
msgHash = ensureBytes$1(msgHash);
|
|
}
|
|
catch (error) {
|
|
return false;
|
|
}
|
|
const { r, s } = sig;
|
|
if (opts.strict && sig.hasHighS())
|
|
return false;
|
|
const h = truncateHash(msgHash);
|
|
let P;
|
|
try {
|
|
P = normalizePublicKey(publicKey);
|
|
}
|
|
catch (error) {
|
|
return false;
|
|
}
|
|
const { n } = CURVE$1;
|
|
const sinv = invert$1(s, n);
|
|
const u1 = mod$1(h * sinv, n);
|
|
const u2 = mod$1(r * sinv, n);
|
|
const R = Point$1.BASE.multiplyAndAddUnsafe(P, u1, u2);
|
|
if (!R)
|
|
return false;
|
|
const v = mod$1(R.x, n);
|
|
return v === r;
|
|
}
|
|
Point$1.BASE._setWindowSize(8);
|
|
const crypto$3 = {
|
|
node: nodeCrypto$1,
|
|
web: typeof self === 'object' && 'crypto' in self ? self.crypto : undefined,
|
|
};
|
|
const TAGGED_HASH_PREFIXES = {};
|
|
const utils$1 = {
|
|
bytesToHex: bytesToHex$2,
|
|
hexToBytes: hexToBytes$2,
|
|
concatBytes: concatBytes$1,
|
|
mod: mod$1,
|
|
invert: invert$1,
|
|
isValidPrivateKey(privateKey) {
|
|
try {
|
|
normalizePrivateKey(privateKey);
|
|
return true;
|
|
}
|
|
catch (error) {
|
|
return false;
|
|
}
|
|
},
|
|
_bigintTo32Bytes: numTo32b,
|
|
_normalizePrivateKey: normalizePrivateKey,
|
|
hashToPrivateKey: (hash) => {
|
|
hash = ensureBytes$1(hash);
|
|
if (hash.length < 40 || hash.length > 1024)
|
|
throw new Error('Expected 40-1024 bytes of private key as per FIPS 186');
|
|
const num = mod$1(bytesToNumber(hash), CURVE$1.n - _1n$1) + _1n$1;
|
|
return numTo32b(num);
|
|
},
|
|
randomBytes: (bytesLength = 32) => {
|
|
if (crypto$3.web) {
|
|
return crypto$3.web.getRandomValues(new Uint8Array(bytesLength));
|
|
}
|
|
else if (crypto$3.node) {
|
|
const { randomBytes } = crypto$3.node;
|
|
return Uint8Array.from(randomBytes(bytesLength));
|
|
}
|
|
else {
|
|
throw new Error("The environment doesn't have randomBytes function");
|
|
}
|
|
},
|
|
randomPrivateKey: () => {
|
|
return utils$1.hashToPrivateKey(utils$1.randomBytes(40));
|
|
},
|
|
sha256: async (...messages) => {
|
|
if (crypto$3.web) {
|
|
const buffer = await crypto$3.web.subtle.digest('SHA-256', concatBytes$1(...messages));
|
|
return new Uint8Array(buffer);
|
|
}
|
|
else if (crypto$3.node) {
|
|
const { createHash } = crypto$3.node;
|
|
const hash = createHash('sha256');
|
|
messages.forEach((m) => hash.update(m));
|
|
return Uint8Array.from(hash.digest());
|
|
}
|
|
else {
|
|
throw new Error("The environment doesn't have sha256 function");
|
|
}
|
|
},
|
|
hmacSha256: async (key, ...messages) => {
|
|
if (crypto$3.web) {
|
|
const ckey = await crypto$3.web.subtle.importKey('raw', key, { name: 'HMAC', hash: { name: 'SHA-256' } }, false, ['sign']);
|
|
const message = concatBytes$1(...messages);
|
|
const buffer = await crypto$3.web.subtle.sign('HMAC', ckey, message);
|
|
return new Uint8Array(buffer);
|
|
}
|
|
else if (crypto$3.node) {
|
|
const { createHmac } = crypto$3.node;
|
|
const hash = createHmac('sha256', key);
|
|
messages.forEach((m) => hash.update(m));
|
|
return Uint8Array.from(hash.digest());
|
|
}
|
|
else {
|
|
throw new Error("The environment doesn't have hmac-sha256 function");
|
|
}
|
|
},
|
|
sha256Sync: undefined,
|
|
hmacSha256Sync: undefined,
|
|
taggedHash: async (tag, ...messages) => {
|
|
let tagP = TAGGED_HASH_PREFIXES[tag];
|
|
if (tagP === undefined) {
|
|
const tagH = await utils$1.sha256(Uint8Array.from(tag, (c) => c.charCodeAt(0)));
|
|
tagP = concatBytes$1(tagH, tagH);
|
|
TAGGED_HASH_PREFIXES[tag] = tagP;
|
|
}
|
|
return utils$1.sha256(tagP, ...messages);
|
|
},
|
|
taggedHashSync: (tag, ...messages) => {
|
|
if (typeof _sha256Sync !== 'function')
|
|
throw new ShaError('sha256Sync is undefined, you need to set it');
|
|
let tagP = TAGGED_HASH_PREFIXES[tag];
|
|
if (tagP === undefined) {
|
|
const tagH = _sha256Sync(Uint8Array.from(tag, (c) => c.charCodeAt(0)));
|
|
tagP = concatBytes$1(tagH, tagH);
|
|
TAGGED_HASH_PREFIXES[tag] = tagP;
|
|
}
|
|
return _sha256Sync(tagP, ...messages);
|
|
},
|
|
precompute(windowSize = 8, point = Point$1.BASE) {
|
|
const cached = point === Point$1.BASE ? point : new Point$1(point.x, point.y);
|
|
cached._setWindowSize(windowSize);
|
|
cached.multiply(_3n);
|
|
return cached;
|
|
},
|
|
};
|
|
Object.defineProperties(utils$1, {
|
|
sha256Sync: {
|
|
configurable: false,
|
|
get() {
|
|
return _sha256Sync;
|
|
},
|
|
set(val) {
|
|
if (!_sha256Sync)
|
|
_sha256Sync = val;
|
|
},
|
|
},
|
|
hmacSha256Sync: {
|
|
configurable: false,
|
|
get() {
|
|
return _hmacSha256Sync;
|
|
},
|
|
set(val) {
|
|
if (!_hmacSha256Sync)
|
|
_hmacSha256Sync = val;
|
|
},
|
|
},
|
|
});
|
|
|
|
var sha3$1 = {exports: {}};
|
|
|
|
/**
|
|
* [js-sha3]{@link https://github.com/emn178/js-sha3}
|
|
*
|
|
* @version 0.8.0
|
|
* @author Chen, Yi-Cyuan [emn178@gmail.com]
|
|
* @copyright Chen, Yi-Cyuan 2015-2018
|
|
* @license MIT
|
|
*/
|
|
|
|
(function (module) {
|
|
/*jslint bitwise: true */
|
|
(function () {
|
|
|
|
var INPUT_ERROR = 'input is invalid type';
|
|
var FINALIZE_ERROR = 'finalize already called';
|
|
var WINDOW = typeof window === 'object';
|
|
var root = WINDOW ? window : {};
|
|
if (root.JS_SHA3_NO_WINDOW) {
|
|
WINDOW = false;
|
|
}
|
|
var WEB_WORKER = !WINDOW && typeof self === 'object';
|
|
var NODE_JS = !root.JS_SHA3_NO_NODE_JS && typeof process === 'object' && process.versions && process.versions.node;
|
|
if (NODE_JS) {
|
|
root = commonjsGlobal;
|
|
} else if (WEB_WORKER) {
|
|
root = self;
|
|
}
|
|
var COMMON_JS = !root.JS_SHA3_NO_COMMON_JS && 'object' === 'object' && module.exports;
|
|
var ARRAY_BUFFER = !root.JS_SHA3_NO_ARRAY_BUFFER && typeof ArrayBuffer !== 'undefined';
|
|
var HEX_CHARS = '0123456789abcdef'.split('');
|
|
var SHAKE_PADDING = [31, 7936, 2031616, 520093696];
|
|
var CSHAKE_PADDING = [4, 1024, 262144, 67108864];
|
|
var KECCAK_PADDING = [1, 256, 65536, 16777216];
|
|
var PADDING = [6, 1536, 393216, 100663296];
|
|
var SHIFT = [0, 8, 16, 24];
|
|
var RC = [1, 0, 32898, 0, 32906, 2147483648, 2147516416, 2147483648, 32907, 0, 2147483649,
|
|
0, 2147516545, 2147483648, 32777, 2147483648, 138, 0, 136, 0, 2147516425, 0,
|
|
2147483658, 0, 2147516555, 0, 139, 2147483648, 32905, 2147483648, 32771,
|
|
2147483648, 32770, 2147483648, 128, 2147483648, 32778, 0, 2147483658, 2147483648,
|
|
2147516545, 2147483648, 32896, 2147483648, 2147483649, 0, 2147516424, 2147483648];
|
|
var BITS = [224, 256, 384, 512];
|
|
var SHAKE_BITS = [128, 256];
|
|
var OUTPUT_TYPES = ['hex', 'buffer', 'arrayBuffer', 'array', 'digest'];
|
|
var CSHAKE_BYTEPAD = {
|
|
'128': 168,
|
|
'256': 136
|
|
};
|
|
|
|
if (root.JS_SHA3_NO_NODE_JS || !Array.isArray) {
|
|
Array.isArray = function (obj) {
|
|
return Object.prototype.toString.call(obj) === '[object Array]';
|
|
};
|
|
}
|
|
|
|
if (ARRAY_BUFFER && (root.JS_SHA3_NO_ARRAY_BUFFER_IS_VIEW || !ArrayBuffer.isView)) {
|
|
ArrayBuffer.isView = function (obj) {
|
|
return typeof obj === 'object' && obj.buffer && obj.buffer.constructor === ArrayBuffer;
|
|
};
|
|
}
|
|
|
|
var createOutputMethod = function (bits, padding, outputType) {
|
|
return function (message) {
|
|
return new Keccak(bits, padding, bits).update(message)[outputType]();
|
|
};
|
|
};
|
|
|
|
var createShakeOutputMethod = function (bits, padding, outputType) {
|
|
return function (message, outputBits) {
|
|
return new Keccak(bits, padding, outputBits).update(message)[outputType]();
|
|
};
|
|
};
|
|
|
|
var createCshakeOutputMethod = function (bits, padding, outputType) {
|
|
return function (message, outputBits, n, s) {
|
|
return methods['cshake' + bits].update(message, outputBits, n, s)[outputType]();
|
|
};
|
|
};
|
|
|
|
var createKmacOutputMethod = function (bits, padding, outputType) {
|
|
return function (key, message, outputBits, s) {
|
|
return methods['kmac' + bits].update(key, message, outputBits, s)[outputType]();
|
|
};
|
|
};
|
|
|
|
var createOutputMethods = function (method, createMethod, bits, padding) {
|
|
for (var i = 0; i < OUTPUT_TYPES.length; ++i) {
|
|
var type = OUTPUT_TYPES[i];
|
|
method[type] = createMethod(bits, padding, type);
|
|
}
|
|
return method;
|
|
};
|
|
|
|
var createMethod = function (bits, padding) {
|
|
var method = createOutputMethod(bits, padding, 'hex');
|
|
method.create = function () {
|
|
return new Keccak(bits, padding, bits);
|
|
};
|
|
method.update = function (message) {
|
|
return method.create().update(message);
|
|
};
|
|
return createOutputMethods(method, createOutputMethod, bits, padding);
|
|
};
|
|
|
|
var createShakeMethod = function (bits, padding) {
|
|
var method = createShakeOutputMethod(bits, padding, 'hex');
|
|
method.create = function (outputBits) {
|
|
return new Keccak(bits, padding, outputBits);
|
|
};
|
|
method.update = function (message, outputBits) {
|
|
return method.create(outputBits).update(message);
|
|
};
|
|
return createOutputMethods(method, createShakeOutputMethod, bits, padding);
|
|
};
|
|
|
|
var createCshakeMethod = function (bits, padding) {
|
|
var w = CSHAKE_BYTEPAD[bits];
|
|
var method = createCshakeOutputMethod(bits, padding, 'hex');
|
|
method.create = function (outputBits, n, s) {
|
|
if (!n && !s) {
|
|
return methods['shake' + bits].create(outputBits);
|
|
} else {
|
|
return new Keccak(bits, padding, outputBits).bytepad([n, s], w);
|
|
}
|
|
};
|
|
method.update = function (message, outputBits, n, s) {
|
|
return method.create(outputBits, n, s).update(message);
|
|
};
|
|
return createOutputMethods(method, createCshakeOutputMethod, bits, padding);
|
|
};
|
|
|
|
var createKmacMethod = function (bits, padding) {
|
|
var w = CSHAKE_BYTEPAD[bits];
|
|
var method = createKmacOutputMethod(bits, padding, 'hex');
|
|
method.create = function (key, outputBits, s) {
|
|
return new Kmac(bits, padding, outputBits).bytepad(['KMAC', s], w).bytepad([key], w);
|
|
};
|
|
method.update = function (key, message, outputBits, s) {
|
|
return method.create(key, outputBits, s).update(message);
|
|
};
|
|
return createOutputMethods(method, createKmacOutputMethod, bits, padding);
|
|
};
|
|
|
|
var algorithms = [
|
|
{ name: 'keccak', padding: KECCAK_PADDING, bits: BITS, createMethod: createMethod },
|
|
{ name: 'sha3', padding: PADDING, bits: BITS, createMethod: createMethod },
|
|
{ name: 'shake', padding: SHAKE_PADDING, bits: SHAKE_BITS, createMethod: createShakeMethod },
|
|
{ name: 'cshake', padding: CSHAKE_PADDING, bits: SHAKE_BITS, createMethod: createCshakeMethod },
|
|
{ name: 'kmac', padding: CSHAKE_PADDING, bits: SHAKE_BITS, createMethod: createKmacMethod }
|
|
];
|
|
|
|
var methods = {}, methodNames = [];
|
|
|
|
for (var i = 0; i < algorithms.length; ++i) {
|
|
var algorithm = algorithms[i];
|
|
var bits = algorithm.bits;
|
|
for (var j = 0; j < bits.length; ++j) {
|
|
var methodName = algorithm.name + '_' + bits[j];
|
|
methodNames.push(methodName);
|
|
methods[methodName] = algorithm.createMethod(bits[j], algorithm.padding);
|
|
if (algorithm.name !== 'sha3') {
|
|
var newMethodName = algorithm.name + bits[j];
|
|
methodNames.push(newMethodName);
|
|
methods[newMethodName] = methods[methodName];
|
|
}
|
|
}
|
|
}
|
|
|
|
function Keccak(bits, padding, outputBits) {
|
|
this.blocks = [];
|
|
this.s = [];
|
|
this.padding = padding;
|
|
this.outputBits = outputBits;
|
|
this.reset = true;
|
|
this.finalized = false;
|
|
this.block = 0;
|
|
this.start = 0;
|
|
this.blockCount = (1600 - (bits << 1)) >> 5;
|
|
this.byteCount = this.blockCount << 2;
|
|
this.outputBlocks = outputBits >> 5;
|
|
this.extraBytes = (outputBits & 31) >> 3;
|
|
|
|
for (var i = 0; i < 50; ++i) {
|
|
this.s[i] = 0;
|
|
}
|
|
}
|
|
|
|
Keccak.prototype.update = function (message) {
|
|
if (this.finalized) {
|
|
throw new Error(FINALIZE_ERROR);
|
|
}
|
|
var notString, type = typeof message;
|
|
if (type !== 'string') {
|
|
if (type === 'object') {
|
|
if (message === null) {
|
|
throw new Error(INPUT_ERROR);
|
|
} else if (ARRAY_BUFFER && message.constructor === ArrayBuffer) {
|
|
message = new Uint8Array(message);
|
|
} else if (!Array.isArray(message)) {
|
|
if (!ARRAY_BUFFER || !ArrayBuffer.isView(message)) {
|
|
throw new Error(INPUT_ERROR);
|
|
}
|
|
}
|
|
} else {
|
|
throw new Error(INPUT_ERROR);
|
|
}
|
|
notString = true;
|
|
}
|
|
var blocks = this.blocks, byteCount = this.byteCount, length = message.length,
|
|
blockCount = this.blockCount, index = 0, s = this.s, i, code;
|
|
|
|
while (index < length) {
|
|
if (this.reset) {
|
|
this.reset = false;
|
|
blocks[0] = this.block;
|
|
for (i = 1; i < blockCount + 1; ++i) {
|
|
blocks[i] = 0;
|
|
}
|
|
}
|
|
if (notString) {
|
|
for (i = this.start; index < length && i < byteCount; ++index) {
|
|
blocks[i >> 2] |= message[index] << SHIFT[i++ & 3];
|
|
}
|
|
} else {
|
|
for (i = this.start; index < length && i < byteCount; ++index) {
|
|
code = message.charCodeAt(index);
|
|
if (code < 0x80) {
|
|
blocks[i >> 2] |= code << SHIFT[i++ & 3];
|
|
} else if (code < 0x800) {
|
|
blocks[i >> 2] |= (0xc0 | (code >> 6)) << SHIFT[i++ & 3];
|
|
blocks[i >> 2] |= (0x80 | (code & 0x3f)) << SHIFT[i++ & 3];
|
|
} else if (code < 0xd800 || code >= 0xe000) {
|
|
blocks[i >> 2] |= (0xe0 | (code >> 12)) << SHIFT[i++ & 3];
|
|
blocks[i >> 2] |= (0x80 | ((code >> 6) & 0x3f)) << SHIFT[i++ & 3];
|
|
blocks[i >> 2] |= (0x80 | (code & 0x3f)) << SHIFT[i++ & 3];
|
|
} else {
|
|
code = 0x10000 + (((code & 0x3ff) << 10) | (message.charCodeAt(++index) & 0x3ff));
|
|
blocks[i >> 2] |= (0xf0 | (code >> 18)) << SHIFT[i++ & 3];
|
|
blocks[i >> 2] |= (0x80 | ((code >> 12) & 0x3f)) << SHIFT[i++ & 3];
|
|
blocks[i >> 2] |= (0x80 | ((code >> 6) & 0x3f)) << SHIFT[i++ & 3];
|
|
blocks[i >> 2] |= (0x80 | (code & 0x3f)) << SHIFT[i++ & 3];
|
|
}
|
|
}
|
|
}
|
|
this.lastByteIndex = i;
|
|
if (i >= byteCount) {
|
|
this.start = i - byteCount;
|
|
this.block = blocks[blockCount];
|
|
for (i = 0; i < blockCount; ++i) {
|
|
s[i] ^= blocks[i];
|
|
}
|
|
f(s);
|
|
this.reset = true;
|
|
} else {
|
|
this.start = i;
|
|
}
|
|
}
|
|
return this;
|
|
};
|
|
|
|
Keccak.prototype.encode = function (x, right) {
|
|
var o = x & 255, n = 1;
|
|
var bytes = [o];
|
|
x = x >> 8;
|
|
o = x & 255;
|
|
while (o > 0) {
|
|
bytes.unshift(o);
|
|
x = x >> 8;
|
|
o = x & 255;
|
|
++n;
|
|
}
|
|
if (right) {
|
|
bytes.push(n);
|
|
} else {
|
|
bytes.unshift(n);
|
|
}
|
|
this.update(bytes);
|
|
return bytes.length;
|
|
};
|
|
|
|
Keccak.prototype.encodeString = function (str) {
|
|
var notString, type = typeof str;
|
|
if (type !== 'string') {
|
|
if (type === 'object') {
|
|
if (str === null) {
|
|
throw new Error(INPUT_ERROR);
|
|
} else if (ARRAY_BUFFER && str.constructor === ArrayBuffer) {
|
|
str = new Uint8Array(str);
|
|
} else if (!Array.isArray(str)) {
|
|
if (!ARRAY_BUFFER || !ArrayBuffer.isView(str)) {
|
|
throw new Error(INPUT_ERROR);
|
|
}
|
|
}
|
|
} else {
|
|
throw new Error(INPUT_ERROR);
|
|
}
|
|
notString = true;
|
|
}
|
|
var bytes = 0, length = str.length;
|
|
if (notString) {
|
|
bytes = length;
|
|
} else {
|
|
for (var i = 0; i < str.length; ++i) {
|
|
var code = str.charCodeAt(i);
|
|
if (code < 0x80) {
|
|
bytes += 1;
|
|
} else if (code < 0x800) {
|
|
bytes += 2;
|
|
} else if (code < 0xd800 || code >= 0xe000) {
|
|
bytes += 3;
|
|
} else {
|
|
code = 0x10000 + (((code & 0x3ff) << 10) | (str.charCodeAt(++i) & 0x3ff));
|
|
bytes += 4;
|
|
}
|
|
}
|
|
}
|
|
bytes += this.encode(bytes * 8);
|
|
this.update(str);
|
|
return bytes;
|
|
};
|
|
|
|
Keccak.prototype.bytepad = function (strs, w) {
|
|
var bytes = this.encode(w);
|
|
for (var i = 0; i < strs.length; ++i) {
|
|
bytes += this.encodeString(strs[i]);
|
|
}
|
|
var paddingBytes = w - bytes % w;
|
|
var zeros = [];
|
|
zeros.length = paddingBytes;
|
|
this.update(zeros);
|
|
return this;
|
|
};
|
|
|
|
Keccak.prototype.finalize = function () {
|
|
if (this.finalized) {
|
|
return;
|
|
}
|
|
this.finalized = true;
|
|
var blocks = this.blocks, i = this.lastByteIndex, blockCount = this.blockCount, s = this.s;
|
|
blocks[i >> 2] |= this.padding[i & 3];
|
|
if (this.lastByteIndex === this.byteCount) {
|
|
blocks[0] = blocks[blockCount];
|
|
for (i = 1; i < blockCount + 1; ++i) {
|
|
blocks[i] = 0;
|
|
}
|
|
}
|
|
blocks[blockCount - 1] |= 0x80000000;
|
|
for (i = 0; i < blockCount; ++i) {
|
|
s[i] ^= blocks[i];
|
|
}
|
|
f(s);
|
|
};
|
|
|
|
Keccak.prototype.toString = Keccak.prototype.hex = function () {
|
|
this.finalize();
|
|
|
|
var blockCount = this.blockCount, s = this.s, outputBlocks = this.outputBlocks,
|
|
extraBytes = this.extraBytes, i = 0, j = 0;
|
|
var hex = '', block;
|
|
while (j < outputBlocks) {
|
|
for (i = 0; i < blockCount && j < outputBlocks; ++i, ++j) {
|
|
block = s[i];
|
|
hex += HEX_CHARS[(block >> 4) & 0x0F] + HEX_CHARS[block & 0x0F] +
|
|
HEX_CHARS[(block >> 12) & 0x0F] + HEX_CHARS[(block >> 8) & 0x0F] +
|
|
HEX_CHARS[(block >> 20) & 0x0F] + HEX_CHARS[(block >> 16) & 0x0F] +
|
|
HEX_CHARS[(block >> 28) & 0x0F] + HEX_CHARS[(block >> 24) & 0x0F];
|
|
}
|
|
if (j % blockCount === 0) {
|
|
f(s);
|
|
i = 0;
|
|
}
|
|
}
|
|
if (extraBytes) {
|
|
block = s[i];
|
|
hex += HEX_CHARS[(block >> 4) & 0x0F] + HEX_CHARS[block & 0x0F];
|
|
if (extraBytes > 1) {
|
|
hex += HEX_CHARS[(block >> 12) & 0x0F] + HEX_CHARS[(block >> 8) & 0x0F];
|
|
}
|
|
if (extraBytes > 2) {
|
|
hex += HEX_CHARS[(block >> 20) & 0x0F] + HEX_CHARS[(block >> 16) & 0x0F];
|
|
}
|
|
}
|
|
return hex;
|
|
};
|
|
|
|
Keccak.prototype.arrayBuffer = function () {
|
|
this.finalize();
|
|
|
|
var blockCount = this.blockCount, s = this.s, outputBlocks = this.outputBlocks,
|
|
extraBytes = this.extraBytes, i = 0, j = 0;
|
|
var bytes = this.outputBits >> 3;
|
|
var buffer;
|
|
if (extraBytes) {
|
|
buffer = new ArrayBuffer((outputBlocks + 1) << 2);
|
|
} else {
|
|
buffer = new ArrayBuffer(bytes);
|
|
}
|
|
var array = new Uint32Array(buffer);
|
|
while (j < outputBlocks) {
|
|
for (i = 0; i < blockCount && j < outputBlocks; ++i, ++j) {
|
|
array[j] = s[i];
|
|
}
|
|
if (j % blockCount === 0) {
|
|
f(s);
|
|
}
|
|
}
|
|
if (extraBytes) {
|
|
array[i] = s[i];
|
|
buffer = buffer.slice(0, bytes);
|
|
}
|
|
return buffer;
|
|
};
|
|
|
|
Keccak.prototype.buffer = Keccak.prototype.arrayBuffer;
|
|
|
|
Keccak.prototype.digest = Keccak.prototype.array = function () {
|
|
this.finalize();
|
|
|
|
var blockCount = this.blockCount, s = this.s, outputBlocks = this.outputBlocks,
|
|
extraBytes = this.extraBytes, i = 0, j = 0;
|
|
var array = [], offset, block;
|
|
while (j < outputBlocks) {
|
|
for (i = 0; i < blockCount && j < outputBlocks; ++i, ++j) {
|
|
offset = j << 2;
|
|
block = s[i];
|
|
array[offset] = block & 0xFF;
|
|
array[offset + 1] = (block >> 8) & 0xFF;
|
|
array[offset + 2] = (block >> 16) & 0xFF;
|
|
array[offset + 3] = (block >> 24) & 0xFF;
|
|
}
|
|
if (j % blockCount === 0) {
|
|
f(s);
|
|
}
|
|
}
|
|
if (extraBytes) {
|
|
offset = j << 2;
|
|
block = s[i];
|
|
array[offset] = block & 0xFF;
|
|
if (extraBytes > 1) {
|
|
array[offset + 1] = (block >> 8) & 0xFF;
|
|
}
|
|
if (extraBytes > 2) {
|
|
array[offset + 2] = (block >> 16) & 0xFF;
|
|
}
|
|
}
|
|
return array;
|
|
};
|
|
|
|
function Kmac(bits, padding, outputBits) {
|
|
Keccak.call(this, bits, padding, outputBits);
|
|
}
|
|
|
|
Kmac.prototype = new Keccak();
|
|
|
|
Kmac.prototype.finalize = function () {
|
|
this.encode(this.outputBits, true);
|
|
return Keccak.prototype.finalize.call(this);
|
|
};
|
|
|
|
var f = function (s) {
|
|
var h, l, n, c0, c1, c2, c3, c4, c5, c6, c7, c8, c9,
|
|
b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15, b16, b17,
|
|
b18, b19, b20, b21, b22, b23, b24, b25, b26, b27, b28, b29, b30, b31, b32, b33,
|
|
b34, b35, b36, b37, b38, b39, b40, b41, b42, b43, b44, b45, b46, b47, b48, b49;
|
|
for (n = 0; n < 48; n += 2) {
|
|
c0 = s[0] ^ s[10] ^ s[20] ^ s[30] ^ s[40];
|
|
c1 = s[1] ^ s[11] ^ s[21] ^ s[31] ^ s[41];
|
|
c2 = s[2] ^ s[12] ^ s[22] ^ s[32] ^ s[42];
|
|
c3 = s[3] ^ s[13] ^ s[23] ^ s[33] ^ s[43];
|
|
c4 = s[4] ^ s[14] ^ s[24] ^ s[34] ^ s[44];
|
|
c5 = s[5] ^ s[15] ^ s[25] ^ s[35] ^ s[45];
|
|
c6 = s[6] ^ s[16] ^ s[26] ^ s[36] ^ s[46];
|
|
c7 = s[7] ^ s[17] ^ s[27] ^ s[37] ^ s[47];
|
|
c8 = s[8] ^ s[18] ^ s[28] ^ s[38] ^ s[48];
|
|
c9 = s[9] ^ s[19] ^ s[29] ^ s[39] ^ s[49];
|
|
|
|
h = c8 ^ ((c2 << 1) | (c3 >>> 31));
|
|
l = c9 ^ ((c3 << 1) | (c2 >>> 31));
|
|
s[0] ^= h;
|
|
s[1] ^= l;
|
|
s[10] ^= h;
|
|
s[11] ^= l;
|
|
s[20] ^= h;
|
|
s[21] ^= l;
|
|
s[30] ^= h;
|
|
s[31] ^= l;
|
|
s[40] ^= h;
|
|
s[41] ^= l;
|
|
h = c0 ^ ((c4 << 1) | (c5 >>> 31));
|
|
l = c1 ^ ((c5 << 1) | (c4 >>> 31));
|
|
s[2] ^= h;
|
|
s[3] ^= l;
|
|
s[12] ^= h;
|
|
s[13] ^= l;
|
|
s[22] ^= h;
|
|
s[23] ^= l;
|
|
s[32] ^= h;
|
|
s[33] ^= l;
|
|
s[42] ^= h;
|
|
s[43] ^= l;
|
|
h = c2 ^ ((c6 << 1) | (c7 >>> 31));
|
|
l = c3 ^ ((c7 << 1) | (c6 >>> 31));
|
|
s[4] ^= h;
|
|
s[5] ^= l;
|
|
s[14] ^= h;
|
|
s[15] ^= l;
|
|
s[24] ^= h;
|
|
s[25] ^= l;
|
|
s[34] ^= h;
|
|
s[35] ^= l;
|
|
s[44] ^= h;
|
|
s[45] ^= l;
|
|
h = c4 ^ ((c8 << 1) | (c9 >>> 31));
|
|
l = c5 ^ ((c9 << 1) | (c8 >>> 31));
|
|
s[6] ^= h;
|
|
s[7] ^= l;
|
|
s[16] ^= h;
|
|
s[17] ^= l;
|
|
s[26] ^= h;
|
|
s[27] ^= l;
|
|
s[36] ^= h;
|
|
s[37] ^= l;
|
|
s[46] ^= h;
|
|
s[47] ^= l;
|
|
h = c6 ^ ((c0 << 1) | (c1 >>> 31));
|
|
l = c7 ^ ((c1 << 1) | (c0 >>> 31));
|
|
s[8] ^= h;
|
|
s[9] ^= l;
|
|
s[18] ^= h;
|
|
s[19] ^= l;
|
|
s[28] ^= h;
|
|
s[29] ^= l;
|
|
s[38] ^= h;
|
|
s[39] ^= l;
|
|
s[48] ^= h;
|
|
s[49] ^= l;
|
|
|
|
b0 = s[0];
|
|
b1 = s[1];
|
|
b32 = (s[11] << 4) | (s[10] >>> 28);
|
|
b33 = (s[10] << 4) | (s[11] >>> 28);
|
|
b14 = (s[20] << 3) | (s[21] >>> 29);
|
|
b15 = (s[21] << 3) | (s[20] >>> 29);
|
|
b46 = (s[31] << 9) | (s[30] >>> 23);
|
|
b47 = (s[30] << 9) | (s[31] >>> 23);
|
|
b28 = (s[40] << 18) | (s[41] >>> 14);
|
|
b29 = (s[41] << 18) | (s[40] >>> 14);
|
|
b20 = (s[2] << 1) | (s[3] >>> 31);
|
|
b21 = (s[3] << 1) | (s[2] >>> 31);
|
|
b2 = (s[13] << 12) | (s[12] >>> 20);
|
|
b3 = (s[12] << 12) | (s[13] >>> 20);
|
|
b34 = (s[22] << 10) | (s[23] >>> 22);
|
|
b35 = (s[23] << 10) | (s[22] >>> 22);
|
|
b16 = (s[33] << 13) | (s[32] >>> 19);
|
|
b17 = (s[32] << 13) | (s[33] >>> 19);
|
|
b48 = (s[42] << 2) | (s[43] >>> 30);
|
|
b49 = (s[43] << 2) | (s[42] >>> 30);
|
|
b40 = (s[5] << 30) | (s[4] >>> 2);
|
|
b41 = (s[4] << 30) | (s[5] >>> 2);
|
|
b22 = (s[14] << 6) | (s[15] >>> 26);
|
|
b23 = (s[15] << 6) | (s[14] >>> 26);
|
|
b4 = (s[25] << 11) | (s[24] >>> 21);
|
|
b5 = (s[24] << 11) | (s[25] >>> 21);
|
|
b36 = (s[34] << 15) | (s[35] >>> 17);
|
|
b37 = (s[35] << 15) | (s[34] >>> 17);
|
|
b18 = (s[45] << 29) | (s[44] >>> 3);
|
|
b19 = (s[44] << 29) | (s[45] >>> 3);
|
|
b10 = (s[6] << 28) | (s[7] >>> 4);
|
|
b11 = (s[7] << 28) | (s[6] >>> 4);
|
|
b42 = (s[17] << 23) | (s[16] >>> 9);
|
|
b43 = (s[16] << 23) | (s[17] >>> 9);
|
|
b24 = (s[26] << 25) | (s[27] >>> 7);
|
|
b25 = (s[27] << 25) | (s[26] >>> 7);
|
|
b6 = (s[36] << 21) | (s[37] >>> 11);
|
|
b7 = (s[37] << 21) | (s[36] >>> 11);
|
|
b38 = (s[47] << 24) | (s[46] >>> 8);
|
|
b39 = (s[46] << 24) | (s[47] >>> 8);
|
|
b30 = (s[8] << 27) | (s[9] >>> 5);
|
|
b31 = (s[9] << 27) | (s[8] >>> 5);
|
|
b12 = (s[18] << 20) | (s[19] >>> 12);
|
|
b13 = (s[19] << 20) | (s[18] >>> 12);
|
|
b44 = (s[29] << 7) | (s[28] >>> 25);
|
|
b45 = (s[28] << 7) | (s[29] >>> 25);
|
|
b26 = (s[38] << 8) | (s[39] >>> 24);
|
|
b27 = (s[39] << 8) | (s[38] >>> 24);
|
|
b8 = (s[48] << 14) | (s[49] >>> 18);
|
|
b9 = (s[49] << 14) | (s[48] >>> 18);
|
|
|
|
s[0] = b0 ^ (~b2 & b4);
|
|
s[1] = b1 ^ (~b3 & b5);
|
|
s[10] = b10 ^ (~b12 & b14);
|
|
s[11] = b11 ^ (~b13 & b15);
|
|
s[20] = b20 ^ (~b22 & b24);
|
|
s[21] = b21 ^ (~b23 & b25);
|
|
s[30] = b30 ^ (~b32 & b34);
|
|
s[31] = b31 ^ (~b33 & b35);
|
|
s[40] = b40 ^ (~b42 & b44);
|
|
s[41] = b41 ^ (~b43 & b45);
|
|
s[2] = b2 ^ (~b4 & b6);
|
|
s[3] = b3 ^ (~b5 & b7);
|
|
s[12] = b12 ^ (~b14 & b16);
|
|
s[13] = b13 ^ (~b15 & b17);
|
|
s[22] = b22 ^ (~b24 & b26);
|
|
s[23] = b23 ^ (~b25 & b27);
|
|
s[32] = b32 ^ (~b34 & b36);
|
|
s[33] = b33 ^ (~b35 & b37);
|
|
s[42] = b42 ^ (~b44 & b46);
|
|
s[43] = b43 ^ (~b45 & b47);
|
|
s[4] = b4 ^ (~b6 & b8);
|
|
s[5] = b5 ^ (~b7 & b9);
|
|
s[14] = b14 ^ (~b16 & b18);
|
|
s[15] = b15 ^ (~b17 & b19);
|
|
s[24] = b24 ^ (~b26 & b28);
|
|
s[25] = b25 ^ (~b27 & b29);
|
|
s[34] = b34 ^ (~b36 & b38);
|
|
s[35] = b35 ^ (~b37 & b39);
|
|
s[44] = b44 ^ (~b46 & b48);
|
|
s[45] = b45 ^ (~b47 & b49);
|
|
s[6] = b6 ^ (~b8 & b0);
|
|
s[7] = b7 ^ (~b9 & b1);
|
|
s[16] = b16 ^ (~b18 & b10);
|
|
s[17] = b17 ^ (~b19 & b11);
|
|
s[26] = b26 ^ (~b28 & b20);
|
|
s[27] = b27 ^ (~b29 & b21);
|
|
s[36] = b36 ^ (~b38 & b30);
|
|
s[37] = b37 ^ (~b39 & b31);
|
|
s[46] = b46 ^ (~b48 & b40);
|
|
s[47] = b47 ^ (~b49 & b41);
|
|
s[8] = b8 ^ (~b0 & b2);
|
|
s[9] = b9 ^ (~b1 & b3);
|
|
s[18] = b18 ^ (~b10 & b12);
|
|
s[19] = b19 ^ (~b11 & b13);
|
|
s[28] = b28 ^ (~b20 & b22);
|
|
s[29] = b29 ^ (~b21 & b23);
|
|
s[38] = b38 ^ (~b30 & b32);
|
|
s[39] = b39 ^ (~b31 & b33);
|
|
s[48] = b48 ^ (~b40 & b42);
|
|
s[49] = b49 ^ (~b41 & b43);
|
|
|
|
s[0] ^= RC[n];
|
|
s[1] ^= RC[n + 1];
|
|
}
|
|
};
|
|
|
|
if (COMMON_JS) {
|
|
module.exports = methods;
|
|
} else {
|
|
for (i = 0; i < methodNames.length; ++i) {
|
|
root[methodNames[i]] = methods[methodNames[i]];
|
|
}
|
|
}
|
|
})();
|
|
} (sha3$1));
|
|
|
|
var sha3 = sha3$1.exports;
|
|
|
|
function base(ALPHABET, name) {
|
|
if (ALPHABET.length >= 255) {
|
|
throw new TypeError('Alphabet too long');
|
|
}
|
|
var BASE_MAP = new Uint8Array(256);
|
|
for (var j = 0; j < BASE_MAP.length; j++) {
|
|
BASE_MAP[j] = 255;
|
|
}
|
|
for (var i = 0; i < ALPHABET.length; i++) {
|
|
var x = ALPHABET.charAt(i);
|
|
var xc = x.charCodeAt(0);
|
|
if (BASE_MAP[xc] !== 255) {
|
|
throw new TypeError(x + ' is ambiguous');
|
|
}
|
|
BASE_MAP[xc] = i;
|
|
}
|
|
var BASE = ALPHABET.length;
|
|
var LEADER = ALPHABET.charAt(0);
|
|
var FACTOR = Math.log(BASE) / Math.log(256);
|
|
var iFACTOR = Math.log(256) / Math.log(BASE);
|
|
function encode(source) {
|
|
if (source instanceof Uint8Array);
|
|
else if (ArrayBuffer.isView(source)) {
|
|
source = new Uint8Array(source.buffer, source.byteOffset, source.byteLength);
|
|
} else if (Array.isArray(source)) {
|
|
source = Uint8Array.from(source);
|
|
}
|
|
if (!(source instanceof Uint8Array)) {
|
|
throw new TypeError('Expected Uint8Array');
|
|
}
|
|
if (source.length === 0) {
|
|
return '';
|
|
}
|
|
var zeroes = 0;
|
|
var length = 0;
|
|
var pbegin = 0;
|
|
var pend = source.length;
|
|
while (pbegin !== pend && source[pbegin] === 0) {
|
|
pbegin++;
|
|
zeroes++;
|
|
}
|
|
var size = (pend - pbegin) * iFACTOR + 1 >>> 0;
|
|
var b58 = new Uint8Array(size);
|
|
while (pbegin !== pend) {
|
|
var carry = source[pbegin];
|
|
var i = 0;
|
|
for (var it1 = size - 1; (carry !== 0 || i < length) && it1 !== -1; it1--, i++) {
|
|
carry += 256 * b58[it1] >>> 0;
|
|
b58[it1] = carry % BASE >>> 0;
|
|
carry = carry / BASE >>> 0;
|
|
}
|
|
if (carry !== 0) {
|
|
throw new Error('Non-zero carry');
|
|
}
|
|
length = i;
|
|
pbegin++;
|
|
}
|
|
var it2 = size - length;
|
|
while (it2 !== size && b58[it2] === 0) {
|
|
it2++;
|
|
}
|
|
var str = LEADER.repeat(zeroes);
|
|
for (; it2 < size; ++it2) {
|
|
str += ALPHABET.charAt(b58[it2]);
|
|
}
|
|
return str;
|
|
}
|
|
function decodeUnsafe(source) {
|
|
if (typeof source !== 'string') {
|
|
throw new TypeError('Expected String');
|
|
}
|
|
if (source.length === 0) {
|
|
return new Uint8Array();
|
|
}
|
|
var psz = 0;
|
|
if (source[psz] === ' ') {
|
|
return;
|
|
}
|
|
var zeroes = 0;
|
|
var length = 0;
|
|
while (source[psz] === LEADER) {
|
|
zeroes++;
|
|
psz++;
|
|
}
|
|
var size = (source.length - psz) * FACTOR + 1 >>> 0;
|
|
var b256 = new Uint8Array(size);
|
|
while (source[psz]) {
|
|
var carry = BASE_MAP[source.charCodeAt(psz)];
|
|
if (carry === 255) {
|
|
return;
|
|
}
|
|
var i = 0;
|
|
for (var it3 = size - 1; (carry !== 0 || i < length) && it3 !== -1; it3--, i++) {
|
|
carry += BASE * b256[it3] >>> 0;
|
|
b256[it3] = carry % 256 >>> 0;
|
|
carry = carry / 256 >>> 0;
|
|
}
|
|
if (carry !== 0) {
|
|
throw new Error('Non-zero carry');
|
|
}
|
|
length = i;
|
|
psz++;
|
|
}
|
|
if (source[psz] === ' ') {
|
|
return;
|
|
}
|
|
var it4 = size - length;
|
|
while (it4 !== size && b256[it4] === 0) {
|
|
it4++;
|
|
}
|
|
var vch = new Uint8Array(zeroes + (size - it4));
|
|
var j = zeroes;
|
|
while (it4 !== size) {
|
|
vch[j++] = b256[it4++];
|
|
}
|
|
return vch;
|
|
}
|
|
function decode(string) {
|
|
var buffer = decodeUnsafe(string);
|
|
if (buffer) {
|
|
return buffer;
|
|
}
|
|
throw new Error(`Non-${ name } character`);
|
|
}
|
|
return {
|
|
encode: encode,
|
|
decodeUnsafe: decodeUnsafe,
|
|
decode: decode
|
|
};
|
|
}
|
|
var src$1 = base;
|
|
var _brrp__multiformats_scope_baseX = src$1;
|
|
|
|
const equals$2 = (aa, bb) => {
|
|
if (aa === bb)
|
|
return true;
|
|
if (aa.byteLength !== bb.byteLength) {
|
|
return false;
|
|
}
|
|
for (let ii = 0; ii < aa.byteLength; ii++) {
|
|
if (aa[ii] !== bb[ii]) {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
};
|
|
const coerce = o => {
|
|
if (o instanceof Uint8Array && o.constructor.name === 'Uint8Array')
|
|
return o;
|
|
if (o instanceof ArrayBuffer)
|
|
return new Uint8Array(o);
|
|
if (ArrayBuffer.isView(o)) {
|
|
return new Uint8Array(o.buffer, o.byteOffset, o.byteLength);
|
|
}
|
|
throw new Error('Unknown type, must be binary type');
|
|
};
|
|
const fromString$2 = str => new TextEncoder().encode(str);
|
|
const toString$4 = b => new TextDecoder().decode(b);
|
|
|
|
class Encoder {
|
|
constructor(name, prefix, baseEncode) {
|
|
this.name = name;
|
|
this.prefix = prefix;
|
|
this.baseEncode = baseEncode;
|
|
}
|
|
encode(bytes) {
|
|
if (bytes instanceof Uint8Array) {
|
|
return `${ this.prefix }${ this.baseEncode(bytes) }`;
|
|
} else {
|
|
throw Error('Unknown type, must be binary type');
|
|
}
|
|
}
|
|
}
|
|
class Decoder {
|
|
constructor(name, prefix, baseDecode) {
|
|
this.name = name;
|
|
this.prefix = prefix;
|
|
if (prefix.codePointAt(0) === undefined) {
|
|
throw new Error('Invalid prefix character');
|
|
}
|
|
this.prefixCodePoint = prefix.codePointAt(0);
|
|
this.baseDecode = baseDecode;
|
|
}
|
|
decode(text) {
|
|
if (typeof text === 'string') {
|
|
if (text.codePointAt(0) !== this.prefixCodePoint) {
|
|
throw Error(`Unable to decode multibase string ${ JSON.stringify(text) }, ${ this.name } decoder only supports inputs prefixed with ${ this.prefix }`);
|
|
}
|
|
return this.baseDecode(text.slice(this.prefix.length));
|
|
} else {
|
|
throw Error('Can only multibase decode strings');
|
|
}
|
|
}
|
|
or(decoder) {
|
|
return or(this, decoder);
|
|
}
|
|
}
|
|
class ComposedDecoder {
|
|
constructor(decoders) {
|
|
this.decoders = decoders;
|
|
}
|
|
or(decoder) {
|
|
return or(this, decoder);
|
|
}
|
|
decode(input) {
|
|
const prefix = input[0];
|
|
const decoder = this.decoders[prefix];
|
|
if (decoder) {
|
|
return decoder.decode(input);
|
|
} else {
|
|
throw RangeError(`Unable to decode multibase string ${ JSON.stringify(input) }, only inputs prefixed with ${ Object.keys(this.decoders) } are supported`);
|
|
}
|
|
}
|
|
}
|
|
const or = (left, right) => new ComposedDecoder({
|
|
...left.decoders || { [left.prefix]: left },
|
|
...right.decoders || { [right.prefix]: right }
|
|
});
|
|
class Codec {
|
|
constructor(name, prefix, baseEncode, baseDecode) {
|
|
this.name = name;
|
|
this.prefix = prefix;
|
|
this.baseEncode = baseEncode;
|
|
this.baseDecode = baseDecode;
|
|
this.encoder = new Encoder(name, prefix, baseEncode);
|
|
this.decoder = new Decoder(name, prefix, baseDecode);
|
|
}
|
|
encode(input) {
|
|
return this.encoder.encode(input);
|
|
}
|
|
decode(input) {
|
|
return this.decoder.decode(input);
|
|
}
|
|
}
|
|
const from$1 = ({name, prefix, encode, decode}) => new Codec(name, prefix, encode, decode);
|
|
const baseX = ({prefix, name, alphabet}) => {
|
|
const {encode, decode} = _brrp__multiformats_scope_baseX(alphabet, name);
|
|
return from$1({
|
|
prefix,
|
|
name,
|
|
encode,
|
|
decode: text => coerce(decode(text))
|
|
});
|
|
};
|
|
const decode$7 = (string, alphabet, bitsPerChar, name) => {
|
|
const codes = {};
|
|
for (let i = 0; i < alphabet.length; ++i) {
|
|
codes[alphabet[i]] = i;
|
|
}
|
|
let end = string.length;
|
|
while (string[end - 1] === '=') {
|
|
--end;
|
|
}
|
|
const out = new Uint8Array(end * bitsPerChar / 8 | 0);
|
|
let bits = 0;
|
|
let buffer = 0;
|
|
let written = 0;
|
|
for (let i = 0; i < end; ++i) {
|
|
const value = codes[string[i]];
|
|
if (value === undefined) {
|
|
throw new SyntaxError(`Non-${ name } character`);
|
|
}
|
|
buffer = buffer << bitsPerChar | value;
|
|
bits += bitsPerChar;
|
|
if (bits >= 8) {
|
|
bits -= 8;
|
|
out[written++] = 255 & buffer >> bits;
|
|
}
|
|
}
|
|
if (bits >= bitsPerChar || 255 & buffer << 8 - bits) {
|
|
throw new SyntaxError('Unexpected end of data');
|
|
}
|
|
return out;
|
|
};
|
|
const encode$6 = (data, alphabet, bitsPerChar) => {
|
|
const pad = alphabet[alphabet.length - 1] === '=';
|
|
const mask = (1 << bitsPerChar) - 1;
|
|
let out = '';
|
|
let bits = 0;
|
|
let buffer = 0;
|
|
for (let i = 0; i < data.length; ++i) {
|
|
buffer = buffer << 8 | data[i];
|
|
bits += 8;
|
|
while (bits > bitsPerChar) {
|
|
bits -= bitsPerChar;
|
|
out += alphabet[mask & buffer >> bits];
|
|
}
|
|
}
|
|
if (bits) {
|
|
out += alphabet[mask & buffer << bitsPerChar - bits];
|
|
}
|
|
if (pad) {
|
|
while (out.length * bitsPerChar & 7) {
|
|
out += '=';
|
|
}
|
|
}
|
|
return out;
|
|
};
|
|
const rfc4648 = ({name, prefix, bitsPerChar, alphabet}) => {
|
|
return from$1({
|
|
prefix,
|
|
name,
|
|
encode(input) {
|
|
return encode$6(input, alphabet, bitsPerChar);
|
|
},
|
|
decode(input) {
|
|
return decode$7(input, alphabet, bitsPerChar, name);
|
|
}
|
|
});
|
|
};
|
|
|
|
const identity$2 = from$1({
|
|
prefix: '\0',
|
|
name: 'identity',
|
|
encode: buf => toString$4(buf),
|
|
decode: str => fromString$2(str)
|
|
});
|
|
|
|
var identityBase = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
identity: identity$2
|
|
});
|
|
|
|
const base2 = rfc4648({
|
|
prefix: '0',
|
|
name: 'base2',
|
|
alphabet: '01',
|
|
bitsPerChar: 1
|
|
});
|
|
|
|
var base2$1 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
base2: base2
|
|
});
|
|
|
|
const base8 = rfc4648({
|
|
prefix: '7',
|
|
name: 'base8',
|
|
alphabet: '01234567',
|
|
bitsPerChar: 3
|
|
});
|
|
|
|
var base8$1 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
base8: base8
|
|
});
|
|
|
|
const base10 = baseX({
|
|
prefix: '9',
|
|
name: 'base10',
|
|
alphabet: '0123456789'
|
|
});
|
|
|
|
var base10$1 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
base10: base10
|
|
});
|
|
|
|
const base16 = rfc4648({
|
|
prefix: 'f',
|
|
name: 'base16',
|
|
alphabet: '0123456789abcdef',
|
|
bitsPerChar: 4
|
|
});
|
|
const base16upper = rfc4648({
|
|
prefix: 'F',
|
|
name: 'base16upper',
|
|
alphabet: '0123456789ABCDEF',
|
|
bitsPerChar: 4
|
|
});
|
|
|
|
var base16$1 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
base16: base16,
|
|
base16upper: base16upper
|
|
});
|
|
|
|
const base32 = rfc4648({
|
|
prefix: 'b',
|
|
name: 'base32',
|
|
alphabet: 'abcdefghijklmnopqrstuvwxyz234567',
|
|
bitsPerChar: 5
|
|
});
|
|
const base32upper = rfc4648({
|
|
prefix: 'B',
|
|
name: 'base32upper',
|
|
alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567',
|
|
bitsPerChar: 5
|
|
});
|
|
const base32pad = rfc4648({
|
|
prefix: 'c',
|
|
name: 'base32pad',
|
|
alphabet: 'abcdefghijklmnopqrstuvwxyz234567=',
|
|
bitsPerChar: 5
|
|
});
|
|
const base32padupper = rfc4648({
|
|
prefix: 'C',
|
|
name: 'base32padupper',
|
|
alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567=',
|
|
bitsPerChar: 5
|
|
});
|
|
const base32hex = rfc4648({
|
|
prefix: 'v',
|
|
name: 'base32hex',
|
|
alphabet: '0123456789abcdefghijklmnopqrstuv',
|
|
bitsPerChar: 5
|
|
});
|
|
const base32hexupper = rfc4648({
|
|
prefix: 'V',
|
|
name: 'base32hexupper',
|
|
alphabet: '0123456789ABCDEFGHIJKLMNOPQRSTUV',
|
|
bitsPerChar: 5
|
|
});
|
|
const base32hexpad = rfc4648({
|
|
prefix: 't',
|
|
name: 'base32hexpad',
|
|
alphabet: '0123456789abcdefghijklmnopqrstuv=',
|
|
bitsPerChar: 5
|
|
});
|
|
const base32hexpadupper = rfc4648({
|
|
prefix: 'T',
|
|
name: 'base32hexpadupper',
|
|
alphabet: '0123456789ABCDEFGHIJKLMNOPQRSTUV=',
|
|
bitsPerChar: 5
|
|
});
|
|
const base32z = rfc4648({
|
|
prefix: 'h',
|
|
name: 'base32z',
|
|
alphabet: 'ybndrfg8ejkmcpqxot1uwisza345h769',
|
|
bitsPerChar: 5
|
|
});
|
|
|
|
var base32$1 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
base32: base32,
|
|
base32upper: base32upper,
|
|
base32pad: base32pad,
|
|
base32padupper: base32padupper,
|
|
base32hex: base32hex,
|
|
base32hexupper: base32hexupper,
|
|
base32hexpad: base32hexpad,
|
|
base32hexpadupper: base32hexpadupper,
|
|
base32z: base32z
|
|
});
|
|
|
|
const base36 = baseX({
|
|
prefix: 'k',
|
|
name: 'base36',
|
|
alphabet: '0123456789abcdefghijklmnopqrstuvwxyz'
|
|
});
|
|
const base36upper = baseX({
|
|
prefix: 'K',
|
|
name: 'base36upper',
|
|
alphabet: '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
|
});
|
|
|
|
var base36$1 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
base36: base36,
|
|
base36upper: base36upper
|
|
});
|
|
|
|
const base58btc = baseX({
|
|
name: 'base58btc',
|
|
prefix: 'z',
|
|
alphabet: '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
|
|
});
|
|
const base58flickr = baseX({
|
|
name: 'base58flickr',
|
|
prefix: 'Z',
|
|
alphabet: '123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ'
|
|
});
|
|
|
|
var base58 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
base58btc: base58btc,
|
|
base58flickr: base58flickr
|
|
});
|
|
|
|
const base64$2 = rfc4648({
|
|
prefix: 'm',
|
|
name: 'base64',
|
|
alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/',
|
|
bitsPerChar: 6
|
|
});
|
|
const base64pad = rfc4648({
|
|
prefix: 'M',
|
|
name: 'base64pad',
|
|
alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=',
|
|
bitsPerChar: 6
|
|
});
|
|
const base64url = rfc4648({
|
|
prefix: 'u',
|
|
name: 'base64url',
|
|
alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_',
|
|
bitsPerChar: 6
|
|
});
|
|
const base64urlpad = rfc4648({
|
|
prefix: 'U',
|
|
name: 'base64urlpad',
|
|
alphabet: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_=',
|
|
bitsPerChar: 6
|
|
});
|
|
|
|
var base64$3 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
base64: base64$2,
|
|
base64pad: base64pad,
|
|
base64url: base64url,
|
|
base64urlpad: base64urlpad
|
|
});
|
|
|
|
const alphabet = Array.from('\uD83D\uDE80\uD83E\uDE90\u2604\uD83D\uDEF0\uD83C\uDF0C\uD83C\uDF11\uD83C\uDF12\uD83C\uDF13\uD83C\uDF14\uD83C\uDF15\uD83C\uDF16\uD83C\uDF17\uD83C\uDF18\uD83C\uDF0D\uD83C\uDF0F\uD83C\uDF0E\uD83D\uDC09\u2600\uD83D\uDCBB\uD83D\uDDA5\uD83D\uDCBE\uD83D\uDCBF\uD83D\uDE02\u2764\uD83D\uDE0D\uD83E\uDD23\uD83D\uDE0A\uD83D\uDE4F\uD83D\uDC95\uD83D\uDE2D\uD83D\uDE18\uD83D\uDC4D\uD83D\uDE05\uD83D\uDC4F\uD83D\uDE01\uD83D\uDD25\uD83E\uDD70\uD83D\uDC94\uD83D\uDC96\uD83D\uDC99\uD83D\uDE22\uD83E\uDD14\uD83D\uDE06\uD83D\uDE44\uD83D\uDCAA\uD83D\uDE09\u263A\uD83D\uDC4C\uD83E\uDD17\uD83D\uDC9C\uD83D\uDE14\uD83D\uDE0E\uD83D\uDE07\uD83C\uDF39\uD83E\uDD26\uD83C\uDF89\uD83D\uDC9E\u270C\u2728\uD83E\uDD37\uD83D\uDE31\uD83D\uDE0C\uD83C\uDF38\uD83D\uDE4C\uD83D\uDE0B\uD83D\uDC97\uD83D\uDC9A\uD83D\uDE0F\uD83D\uDC9B\uD83D\uDE42\uD83D\uDC93\uD83E\uDD29\uD83D\uDE04\uD83D\uDE00\uD83D\uDDA4\uD83D\uDE03\uD83D\uDCAF\uD83D\uDE48\uD83D\uDC47\uD83C\uDFB6\uD83D\uDE12\uD83E\uDD2D\u2763\uD83D\uDE1C\uD83D\uDC8B\uD83D\uDC40\uD83D\uDE2A\uD83D\uDE11\uD83D\uDCA5\uD83D\uDE4B\uD83D\uDE1E\uD83D\uDE29\uD83D\uDE21\uD83E\uDD2A\uD83D\uDC4A\uD83E\uDD73\uD83D\uDE25\uD83E\uDD24\uD83D\uDC49\uD83D\uDC83\uD83D\uDE33\u270B\uD83D\uDE1A\uD83D\uDE1D\uD83D\uDE34\uD83C\uDF1F\uD83D\uDE2C\uD83D\uDE43\uD83C\uDF40\uD83C\uDF37\uD83D\uDE3B\uD83D\uDE13\u2B50\u2705\uD83E\uDD7A\uD83C\uDF08\uD83D\uDE08\uD83E\uDD18\uD83D\uDCA6\u2714\uD83D\uDE23\uD83C\uDFC3\uD83D\uDC90\u2639\uD83C\uDF8A\uD83D\uDC98\uD83D\uDE20\u261D\uD83D\uDE15\uD83C\uDF3A\uD83C\uDF82\uD83C\uDF3B\uD83D\uDE10\uD83D\uDD95\uD83D\uDC9D\uD83D\uDE4A\uD83D\uDE39\uD83D\uDDE3\uD83D\uDCAB\uD83D\uDC80\uD83D\uDC51\uD83C\uDFB5\uD83E\uDD1E\uD83D\uDE1B\uD83D\uDD34\uD83D\uDE24\uD83C\uDF3C\uD83D\uDE2B\u26BD\uD83E\uDD19\u2615\uD83C\uDFC6\uD83E\uDD2B\uD83D\uDC48\uD83D\uDE2E\uD83D\uDE46\uD83C\uDF7B\uD83C\uDF43\uD83D\uDC36\uD83D\uDC81\uD83D\uDE32\uD83C\uDF3F\uD83E\uDDE1\uD83C\uDF81\u26A1\uD83C\uDF1E\uD83C\uDF88\u274C\u270A\uD83D\uDC4B\uD83D\uDE30\uD83E\uDD28\uD83D\uDE36\uD83E\uDD1D\uD83D\uDEB6\uD83D\uDCB0\uD83C\uDF53\uD83D\uDCA2\uD83E\uDD1F\uD83D\uDE41\uD83D\uDEA8\uD83D\uDCA8\uD83E\uDD2C\u2708\uD83C\uDF80\uD83C\uDF7A\uD83E\uDD13\uD83D\uDE19\uD83D\uDC9F\uD83C\uDF31\uD83D\uDE16\uD83D\uDC76\uD83E\uDD74\u25B6\u27A1\u2753\uD83D\uDC8E\uD83D\uDCB8\u2B07\uD83D\uDE28\uD83C\uDF1A\uD83E\uDD8B\uD83D\uDE37\uD83D\uDD7A\u26A0\uD83D\uDE45\uD83D\uDE1F\uD83D\uDE35\uD83D\uDC4E\uD83E\uDD32\uD83E\uDD20\uD83E\uDD27\uD83D\uDCCC\uD83D\uDD35\uD83D\uDC85\uD83E\uDDD0\uD83D\uDC3E\uD83C\uDF52\uD83D\uDE17\uD83E\uDD11\uD83C\uDF0A\uD83E\uDD2F\uD83D\uDC37\u260E\uD83D\uDCA7\uD83D\uDE2F\uD83D\uDC86\uD83D\uDC46\uD83C\uDFA4\uD83D\uDE47\uD83C\uDF51\u2744\uD83C\uDF34\uD83D\uDCA3\uD83D\uDC38\uD83D\uDC8C\uD83D\uDCCD\uD83E\uDD40\uD83E\uDD22\uD83D\uDC45\uD83D\uDCA1\uD83D\uDCA9\uD83D\uDC50\uD83D\uDCF8\uD83D\uDC7B\uD83E\uDD10\uD83E\uDD2E\uD83C\uDFBC\uD83E\uDD75\uD83D\uDEA9\uD83C\uDF4E\uD83C\uDF4A\uD83D\uDC7C\uD83D\uDC8D\uD83D\uDCE3\uD83E\uDD42');
|
|
const alphabetBytesToChars = alphabet.reduce((p, c, i) => {
|
|
p[i] = c;
|
|
return p;
|
|
}, []);
|
|
const alphabetCharsToBytes = alphabet.reduce((p, c, i) => {
|
|
p[c.codePointAt(0)] = i;
|
|
return p;
|
|
}, []);
|
|
function encode$5(data) {
|
|
return data.reduce((p, c) => {
|
|
p += alphabetBytesToChars[c];
|
|
return p;
|
|
}, '');
|
|
}
|
|
function decode$6(str) {
|
|
const byts = [];
|
|
for (const char of str) {
|
|
const byt = alphabetCharsToBytes[char.codePointAt(0)];
|
|
if (byt === undefined) {
|
|
throw new Error(`Non-base256emoji character: ${ char }`);
|
|
}
|
|
byts.push(byt);
|
|
}
|
|
return new Uint8Array(byts);
|
|
}
|
|
const base256emoji = from$1({
|
|
prefix: '\uD83D\uDE80',
|
|
name: 'base256emoji',
|
|
encode: encode$5,
|
|
decode: decode$6
|
|
});
|
|
|
|
var base256emoji$1 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
base256emoji: base256emoji
|
|
});
|
|
|
|
var encode_1$1 = encode$4;
|
|
var MSB$2 = 128, REST$2 = 127, MSBALL$1 = ~REST$2, INT$1 = Math.pow(2, 31);
|
|
function encode$4(num, out, offset) {
|
|
out = out || [];
|
|
offset = offset || 0;
|
|
var oldOffset = offset;
|
|
while (num >= INT$1) {
|
|
out[offset++] = num & 255 | MSB$2;
|
|
num /= 128;
|
|
}
|
|
while (num & MSBALL$1) {
|
|
out[offset++] = num & 255 | MSB$2;
|
|
num >>>= 7;
|
|
}
|
|
out[offset] = num | 0;
|
|
encode$4.bytes = offset - oldOffset + 1;
|
|
return out;
|
|
}
|
|
var decode$5 = read$1;
|
|
var MSB$1$1 = 128, REST$1$1 = 127;
|
|
function read$1(buf, offset) {
|
|
var res = 0, offset = offset || 0, shift = 0, counter = offset, b, l = buf.length;
|
|
do {
|
|
if (counter >= l) {
|
|
read$1.bytes = 0;
|
|
throw new RangeError('Could not decode varint');
|
|
}
|
|
b = buf[counter++];
|
|
res += shift < 28 ? (b & REST$1$1) << shift : (b & REST$1$1) * Math.pow(2, shift);
|
|
shift += 7;
|
|
} while (b >= MSB$1$1);
|
|
read$1.bytes = counter - offset;
|
|
return res;
|
|
}
|
|
var N1$2 = Math.pow(2, 7);
|
|
var N2$2 = Math.pow(2, 14);
|
|
var N3$2 = Math.pow(2, 21);
|
|
var N4$2 = Math.pow(2, 28);
|
|
var N5$2 = Math.pow(2, 35);
|
|
var N6$2 = Math.pow(2, 42);
|
|
var N7$2 = Math.pow(2, 49);
|
|
var N8$2 = Math.pow(2, 56);
|
|
var N9$2 = Math.pow(2, 63);
|
|
var length$1 = function (value) {
|
|
return value < N1$2 ? 1 : value < N2$2 ? 2 : value < N3$2 ? 3 : value < N4$2 ? 4 : value < N5$2 ? 5 : value < N6$2 ? 6 : value < N7$2 ? 7 : value < N8$2 ? 8 : value < N9$2 ? 9 : 10;
|
|
};
|
|
var varint$1 = {
|
|
encode: encode_1$1,
|
|
decode: decode$5,
|
|
encodingLength: length$1
|
|
};
|
|
var _brrp_varint = varint$1;
|
|
|
|
const decode$4 = (data, offset = 0) => {
|
|
const code = _brrp_varint.decode(data, offset);
|
|
return [
|
|
code,
|
|
_brrp_varint.decode.bytes
|
|
];
|
|
};
|
|
const encodeTo = (int, target, offset = 0) => {
|
|
_brrp_varint.encode(int, target, offset);
|
|
return target;
|
|
};
|
|
const encodingLength = int => {
|
|
return _brrp_varint.encodingLength(int);
|
|
};
|
|
|
|
const create$5 = (code, digest) => {
|
|
const size = digest.byteLength;
|
|
const sizeOffset = encodingLength(code);
|
|
const digestOffset = sizeOffset + encodingLength(size);
|
|
const bytes = new Uint8Array(digestOffset + size);
|
|
encodeTo(code, bytes, 0);
|
|
encodeTo(size, bytes, sizeOffset);
|
|
bytes.set(digest, digestOffset);
|
|
return new Digest(code, size, digest, bytes);
|
|
};
|
|
const decode$3 = multihash => {
|
|
const bytes = coerce(multihash);
|
|
const [code, sizeOffset] = decode$4(bytes);
|
|
const [size, digestOffset] = decode$4(bytes.subarray(sizeOffset));
|
|
const digest = bytes.subarray(sizeOffset + digestOffset);
|
|
if (digest.byteLength !== size) {
|
|
throw new Error('Incorrect length');
|
|
}
|
|
return new Digest(code, size, digest, bytes);
|
|
};
|
|
const equals$1 = (a, b) => {
|
|
if (a === b) {
|
|
return true;
|
|
} else {
|
|
return a.code === b.code && a.size === b.size && equals$2(a.bytes, b.bytes);
|
|
}
|
|
};
|
|
class Digest {
|
|
constructor(code, size, digest, bytes) {
|
|
this.code = code;
|
|
this.size = size;
|
|
this.digest = digest;
|
|
this.bytes = bytes;
|
|
}
|
|
}
|
|
|
|
const from = ({name, code, encode}) => new Hasher(name, code, encode);
|
|
class Hasher {
|
|
constructor(name, code, encode) {
|
|
this.name = name;
|
|
this.code = code;
|
|
this.encode = encode;
|
|
}
|
|
digest(input) {
|
|
if (input instanceof Uint8Array) {
|
|
const result = this.encode(input);
|
|
return result instanceof Uint8Array ? create$5(this.code, result) : result.then(digest => create$5(this.code, digest));
|
|
} else {
|
|
throw Error('Unknown type, must be binary type');
|
|
}
|
|
}
|
|
}
|
|
|
|
const sha = name => async data => new Uint8Array(await crypto.subtle.digest(name, data));
|
|
const sha256$1 = from({
|
|
name: 'sha2-256',
|
|
code: 18,
|
|
encode: sha('SHA-256')
|
|
});
|
|
const sha512$1 = from({
|
|
name: 'sha2-512',
|
|
code: 19,
|
|
encode: sha('SHA-512')
|
|
});
|
|
|
|
var sha2 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
sha256: sha256$1,
|
|
sha512: sha512$1
|
|
});
|
|
|
|
const code = 0;
|
|
const name = 'identity';
|
|
const encode$3 = coerce;
|
|
const digest = input => create$5(code, encode$3(input));
|
|
const identity = {
|
|
code,
|
|
name,
|
|
encode: encode$3,
|
|
digest
|
|
};
|
|
|
|
var identity$1 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
identity: identity
|
|
});
|
|
|
|
new TextEncoder();
|
|
new TextDecoder();
|
|
|
|
class CID {
|
|
constructor(version, code, multihash, bytes) {
|
|
this.code = code;
|
|
this.version = version;
|
|
this.multihash = multihash;
|
|
this.bytes = bytes;
|
|
this.byteOffset = bytes.byteOffset;
|
|
this.byteLength = bytes.byteLength;
|
|
this.asCID = this;
|
|
this._baseCache = new Map();
|
|
Object.defineProperties(this, {
|
|
byteOffset: hidden,
|
|
byteLength: hidden,
|
|
code: readonly,
|
|
version: readonly,
|
|
multihash: readonly,
|
|
bytes: readonly,
|
|
_baseCache: hidden,
|
|
asCID: hidden
|
|
});
|
|
}
|
|
toV0() {
|
|
switch (this.version) {
|
|
case 0: {
|
|
return this;
|
|
}
|
|
default: {
|
|
const {code, multihash} = this;
|
|
if (code !== DAG_PB_CODE) {
|
|
throw new Error('Cannot convert a non dag-pb CID to CIDv0');
|
|
}
|
|
if (multihash.code !== SHA_256_CODE) {
|
|
throw new Error('Cannot convert non sha2-256 multihash CID to CIDv0');
|
|
}
|
|
return CID.createV0(multihash);
|
|
}
|
|
}
|
|
}
|
|
toV1() {
|
|
switch (this.version) {
|
|
case 0: {
|
|
const {code, digest} = this.multihash;
|
|
const multihash = create$5(code, digest);
|
|
return CID.createV1(this.code, multihash);
|
|
}
|
|
case 1: {
|
|
return this;
|
|
}
|
|
default: {
|
|
throw Error(`Can not convert CID version ${ this.version } to version 0. This is a bug please report`);
|
|
}
|
|
}
|
|
}
|
|
equals(other) {
|
|
return other && this.code === other.code && this.version === other.version && equals$1(this.multihash, other.multihash);
|
|
}
|
|
toString(base) {
|
|
const {bytes, version, _baseCache} = this;
|
|
switch (version) {
|
|
case 0:
|
|
return toStringV0(bytes, _baseCache, base || base58btc.encoder);
|
|
default:
|
|
return toStringV1(bytes, _baseCache, base || base32.encoder);
|
|
}
|
|
}
|
|
toJSON() {
|
|
return {
|
|
code: this.code,
|
|
version: this.version,
|
|
hash: this.multihash.bytes
|
|
};
|
|
}
|
|
get [Symbol.toStringTag]() {
|
|
return 'CID';
|
|
}
|
|
[Symbol.for('nodejs.util.inspect.custom')]() {
|
|
return 'CID(' + this.toString() + ')';
|
|
}
|
|
static isCID(value) {
|
|
deprecate(/^0\.0/, IS_CID_DEPRECATION);
|
|
return !!(value && (value[cidSymbol] || value.asCID === value));
|
|
}
|
|
get toBaseEncodedString() {
|
|
throw new Error('Deprecated, use .toString()');
|
|
}
|
|
get codec() {
|
|
throw new Error('"codec" property is deprecated, use integer "code" property instead');
|
|
}
|
|
get buffer() {
|
|
throw new Error('Deprecated .buffer property, use .bytes to get Uint8Array instead');
|
|
}
|
|
get multibaseName() {
|
|
throw new Error('"multibaseName" property is deprecated');
|
|
}
|
|
get prefix() {
|
|
throw new Error('"prefix" property is deprecated');
|
|
}
|
|
static asCID(value) {
|
|
if (value instanceof CID) {
|
|
return value;
|
|
} else if (value != null && value.asCID === value) {
|
|
const {version, code, multihash, bytes} = value;
|
|
return new CID(version, code, multihash, bytes || encodeCID(version, code, multihash.bytes));
|
|
} else if (value != null && value[cidSymbol] === true) {
|
|
const {version, multihash, code} = value;
|
|
const digest = decode$3(multihash);
|
|
return CID.create(version, code, digest);
|
|
} else {
|
|
return null;
|
|
}
|
|
}
|
|
static create(version, code, digest) {
|
|
if (typeof code !== 'number') {
|
|
throw new Error('String codecs are no longer supported');
|
|
}
|
|
switch (version) {
|
|
case 0: {
|
|
if (code !== DAG_PB_CODE) {
|
|
throw new Error(`Version 0 CID must use dag-pb (code: ${ DAG_PB_CODE }) block encoding`);
|
|
} else {
|
|
return new CID(version, code, digest, digest.bytes);
|
|
}
|
|
}
|
|
case 1: {
|
|
const bytes = encodeCID(version, code, digest.bytes);
|
|
return new CID(version, code, digest, bytes);
|
|
}
|
|
default: {
|
|
throw new Error('Invalid version');
|
|
}
|
|
}
|
|
}
|
|
static createV0(digest) {
|
|
return CID.create(0, DAG_PB_CODE, digest);
|
|
}
|
|
static createV1(code, digest) {
|
|
return CID.create(1, code, digest);
|
|
}
|
|
static decode(bytes) {
|
|
const [cid, remainder] = CID.decodeFirst(bytes);
|
|
if (remainder.length) {
|
|
throw new Error('Incorrect length');
|
|
}
|
|
return cid;
|
|
}
|
|
static decodeFirst(bytes) {
|
|
const specs = CID.inspectBytes(bytes);
|
|
const prefixSize = specs.size - specs.multihashSize;
|
|
const multihashBytes = coerce(bytes.subarray(prefixSize, prefixSize + specs.multihashSize));
|
|
if (multihashBytes.byteLength !== specs.multihashSize) {
|
|
throw new Error('Incorrect length');
|
|
}
|
|
const digestBytes = multihashBytes.subarray(specs.multihashSize - specs.digestSize);
|
|
const digest = new Digest(specs.multihashCode, specs.digestSize, digestBytes, multihashBytes);
|
|
const cid = specs.version === 0 ? CID.createV0(digest) : CID.createV1(specs.codec, digest);
|
|
return [
|
|
cid,
|
|
bytes.subarray(specs.size)
|
|
];
|
|
}
|
|
static inspectBytes(initialBytes) {
|
|
let offset = 0;
|
|
const next = () => {
|
|
const [i, length] = decode$4(initialBytes.subarray(offset));
|
|
offset += length;
|
|
return i;
|
|
};
|
|
let version = next();
|
|
let codec = DAG_PB_CODE;
|
|
if (version === 18) {
|
|
version = 0;
|
|
offset = 0;
|
|
} else if (version === 1) {
|
|
codec = next();
|
|
}
|
|
if (version !== 0 && version !== 1) {
|
|
throw new RangeError(`Invalid CID version ${ version }`);
|
|
}
|
|
const prefixSize = offset;
|
|
const multihashCode = next();
|
|
const digestSize = next();
|
|
const size = offset + digestSize;
|
|
const multihashSize = size - prefixSize;
|
|
return {
|
|
version,
|
|
codec,
|
|
multihashCode,
|
|
digestSize,
|
|
multihashSize,
|
|
size
|
|
};
|
|
}
|
|
static parse(source, base) {
|
|
const [prefix, bytes] = parseCIDtoBytes(source, base);
|
|
const cid = CID.decode(bytes);
|
|
cid._baseCache.set(prefix, source);
|
|
return cid;
|
|
}
|
|
}
|
|
const parseCIDtoBytes = (source, base) => {
|
|
switch (source[0]) {
|
|
case 'Q': {
|
|
const decoder = base || base58btc;
|
|
return [
|
|
base58btc.prefix,
|
|
decoder.decode(`${ base58btc.prefix }${ source }`)
|
|
];
|
|
}
|
|
case base58btc.prefix: {
|
|
const decoder = base || base58btc;
|
|
return [
|
|
base58btc.prefix,
|
|
decoder.decode(source)
|
|
];
|
|
}
|
|
case base32.prefix: {
|
|
const decoder = base || base32;
|
|
return [
|
|
base32.prefix,
|
|
decoder.decode(source)
|
|
];
|
|
}
|
|
default: {
|
|
if (base == null) {
|
|
throw Error('To parse non base32 or base58btc encoded CID multibase decoder must be provided');
|
|
}
|
|
return [
|
|
source[0],
|
|
base.decode(source)
|
|
];
|
|
}
|
|
}
|
|
};
|
|
const toStringV0 = (bytes, cache, base) => {
|
|
const {prefix} = base;
|
|
if (prefix !== base58btc.prefix) {
|
|
throw Error(`Cannot string encode V0 in ${ base.name } encoding`);
|
|
}
|
|
const cid = cache.get(prefix);
|
|
if (cid == null) {
|
|
const cid = base.encode(bytes).slice(1);
|
|
cache.set(prefix, cid);
|
|
return cid;
|
|
} else {
|
|
return cid;
|
|
}
|
|
};
|
|
const toStringV1 = (bytes, cache, base) => {
|
|
const {prefix} = base;
|
|
const cid = cache.get(prefix);
|
|
if (cid == null) {
|
|
const cid = base.encode(bytes);
|
|
cache.set(prefix, cid);
|
|
return cid;
|
|
} else {
|
|
return cid;
|
|
}
|
|
};
|
|
const DAG_PB_CODE = 112;
|
|
const SHA_256_CODE = 18;
|
|
const encodeCID = (version, code, multihash) => {
|
|
const codeOffset = encodingLength(version);
|
|
const hashOffset = codeOffset + encodingLength(code);
|
|
const bytes = new Uint8Array(hashOffset + multihash.byteLength);
|
|
encodeTo(version, bytes, 0);
|
|
encodeTo(code, bytes, codeOffset);
|
|
bytes.set(multihash, hashOffset);
|
|
return bytes;
|
|
};
|
|
const cidSymbol = Symbol.for('@ipld/js-cid/CID');
|
|
const readonly = {
|
|
writable: false,
|
|
configurable: false,
|
|
enumerable: true
|
|
};
|
|
const hidden = {
|
|
writable: false,
|
|
enumerable: false,
|
|
configurable: false
|
|
};
|
|
const version$3 = '0.0.0-dev';
|
|
const deprecate = (range, message) => {
|
|
if (range.test(version$3)) {
|
|
console.warn(message);
|
|
} else {
|
|
throw new Error(message);
|
|
}
|
|
};
|
|
const IS_CID_DEPRECATION = `CID.isCID(v) is deprecated and will be removed in the next major release.
|
|
Following code pattern:
|
|
|
|
if (CID.isCID(value)) {
|
|
doSomethingWithCID(value)
|
|
}
|
|
|
|
Is replaced with:
|
|
|
|
const cid = CID.asCID(value)
|
|
if (cid) {
|
|
// Make sure to use cid instead of value
|
|
doSomethingWithCID(cid)
|
|
}
|
|
`;
|
|
|
|
const bases = {
|
|
...identityBase,
|
|
...base2$1,
|
|
...base8$1,
|
|
...base10$1,
|
|
...base16$1,
|
|
...base32$1,
|
|
...base36$1,
|
|
...base58,
|
|
...base64$3,
|
|
...base256emoji$1
|
|
};
|
|
({
|
|
...sha2,
|
|
...identity$1
|
|
});
|
|
|
|
function alloc(size = 0) {
|
|
if (globalThis.Buffer != null && globalThis.Buffer.alloc != null) {
|
|
return globalThis.Buffer.alloc(size);
|
|
}
|
|
return new Uint8Array(size);
|
|
}
|
|
function allocUnsafe$1(size = 0) {
|
|
if (globalThis.Buffer != null && globalThis.Buffer.allocUnsafe != null) {
|
|
return globalThis.Buffer.allocUnsafe(size);
|
|
}
|
|
return new Uint8Array(size);
|
|
}
|
|
|
|
function createCodec$1(name, prefix, encode, decode) {
|
|
return {
|
|
name,
|
|
prefix,
|
|
encoder: {
|
|
name,
|
|
prefix,
|
|
encode
|
|
},
|
|
decoder: { decode }
|
|
};
|
|
}
|
|
const string = createCodec$1('utf8', 'u', buf => {
|
|
const decoder = new TextDecoder('utf8');
|
|
return 'u' + decoder.decode(buf);
|
|
}, str => {
|
|
const encoder = new TextEncoder();
|
|
return encoder.encode(str.substring(1));
|
|
});
|
|
const ascii = createCodec$1('ascii', 'a', buf => {
|
|
let string = 'a';
|
|
for (let i = 0; i < buf.length; i++) {
|
|
string += String.fromCharCode(buf[i]);
|
|
}
|
|
return string;
|
|
}, str => {
|
|
str = str.substring(1);
|
|
const buf = allocUnsafe$1(str.length);
|
|
for (let i = 0; i < str.length; i++) {
|
|
buf[i] = str.charCodeAt(i);
|
|
}
|
|
return buf;
|
|
});
|
|
const BASES = {
|
|
utf8: string,
|
|
'utf-8': string,
|
|
hex: bases.base16,
|
|
latin1: ascii,
|
|
ascii: ascii,
|
|
binary: ascii,
|
|
...bases
|
|
};
|
|
|
|
function fromString$1(string, encoding = 'utf8') {
|
|
const base = BASES[encoding];
|
|
if (!base) {
|
|
throw new Error(`Unsupported encoding "${ encoding }"`);
|
|
}
|
|
if ((encoding === 'utf8' || encoding === 'utf-8') && globalThis.Buffer != null && globalThis.Buffer.from != null) {
|
|
return globalThis.Buffer.from(string, 'utf8');
|
|
}
|
|
return base.decoder.decode(`${ base.prefix }${ string }`);
|
|
}
|
|
|
|
function toString$3(array, encoding = 'utf8') {
|
|
const base = BASES[encoding];
|
|
if (!base) {
|
|
throw new Error(`Unsupported encoding "${ encoding }"`);
|
|
}
|
|
if ((encoding === 'utf8' || encoding === 'utf-8') && globalThis.Buffer != null && globalThis.Buffer.from != null) {
|
|
return globalThis.Buffer.from(array.buffer, array.byteOffset, array.byteLength).toString('utf8');
|
|
}
|
|
return base.encoder.encode(array).substring(1);
|
|
}
|
|
|
|
/**
|
|
* Convert input to a byte array.
|
|
*
|
|
* Handles both `0x` prefixed and non-prefixed strings.
|
|
*/
|
|
function hexToBytes$1(hex) {
|
|
if (typeof hex === "string") {
|
|
const _hex = hex.replace(/^0x/i, "");
|
|
return fromString$1(_hex.toLowerCase(), "base16");
|
|
}
|
|
return hex;
|
|
}
|
|
/**
|
|
* Convert byte array to hex string (no `0x` prefix).
|
|
*/
|
|
const bytesToHex$1 = (bytes) => toString$3(bytes, "base16");
|
|
/**
|
|
* Decode byte array to utf-8 string.
|
|
*/
|
|
const bytesToUtf8 = (b) => toString$3(b, "utf8");
|
|
/**
|
|
* Encode utf-8 string to byte array.
|
|
*/
|
|
const utf8ToBytes = (s) => fromString$1(s, "utf8");
|
|
/**
|
|
* Concatenate using Uint8Arrays as `Buffer` has a different behavior with `DataView`
|
|
*/
|
|
function concat$1(byteArrays, totalLength) {
|
|
const len = totalLength ?? byteArrays.reduce((acc, curr) => acc + curr.length, 0);
|
|
const res = new Uint8Array(len);
|
|
let offset = 0;
|
|
for (const bytes of byteArrays) {
|
|
res.set(bytes, offset);
|
|
offset += bytes.length;
|
|
}
|
|
return res;
|
|
}
|
|
|
|
const randomBytes$1 = utils$1.randomBytes;
|
|
utils$1.sha256;
|
|
function keccak256(input) {
|
|
return new Uint8Array(sha3.keccak256.arrayBuffer(input));
|
|
}
|
|
function compressPublicKey$1(publicKey) {
|
|
if (publicKey.length === 64) {
|
|
publicKey = concat$1([new Uint8Array([4]), publicKey], 65);
|
|
}
|
|
const point = Point$1.fromHex(publicKey);
|
|
return point.toRawBytes(true);
|
|
}
|
|
/**
|
|
* Verify an ECDSA signature.
|
|
*/
|
|
function verifySignature(signature, message, publicKey) {
|
|
try {
|
|
const _signature = Signature$1.fromCompact(signature.slice(0, 64));
|
|
return verify$1(_signature, message, publicKey);
|
|
}
|
|
catch {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// Maximum encoded size of an ENR
|
|
const MAX_RECORD_SIZE = 300;
|
|
const ERR_INVALID_ID = "Invalid record id";
|
|
const ERR_NO_SIGNATURE = "No valid signature found";
|
|
// The maximum length of byte size of a multiaddr to encode in the `multiaddr` field
|
|
// The size is a big endian 16-bit unsigned integer
|
|
const MULTIADDR_LENGTH_SIZE = 2;
|
|
|
|
const version$2 = "logger/5.7.0";
|
|
|
|
let _permanentCensorErrors = false;
|
|
let _censorErrors = false;
|
|
const LogLevels = { debug: 1, "default": 2, info: 2, warning: 3, error: 4, off: 5 };
|
|
let _logLevel = LogLevels["default"];
|
|
let _globalLogger = null;
|
|
function _checkNormalize() {
|
|
try {
|
|
const missing = [];
|
|
// Make sure all forms of normalization are supported
|
|
["NFD", "NFC", "NFKD", "NFKC"].forEach((form) => {
|
|
try {
|
|
if ("test".normalize(form) !== "test") {
|
|
throw new Error("bad normalize");
|
|
}
|
|
;
|
|
}
|
|
catch (error) {
|
|
missing.push(form);
|
|
}
|
|
});
|
|
if (missing.length) {
|
|
throw new Error("missing " + missing.join(", "));
|
|
}
|
|
if (String.fromCharCode(0xe9).normalize("NFD") !== String.fromCharCode(0x65, 0x0301)) {
|
|
throw new Error("broken implementation");
|
|
}
|
|
}
|
|
catch (error) {
|
|
return error.message;
|
|
}
|
|
return null;
|
|
}
|
|
const _normalizeError = _checkNormalize();
|
|
var LogLevel;
|
|
(function (LogLevel) {
|
|
LogLevel["DEBUG"] = "DEBUG";
|
|
LogLevel["INFO"] = "INFO";
|
|
LogLevel["WARNING"] = "WARNING";
|
|
LogLevel["ERROR"] = "ERROR";
|
|
LogLevel["OFF"] = "OFF";
|
|
})(LogLevel || (LogLevel = {}));
|
|
var ErrorCode;
|
|
(function (ErrorCode) {
|
|
///////////////////
|
|
// Generic Errors
|
|
// Unknown Error
|
|
ErrorCode["UNKNOWN_ERROR"] = "UNKNOWN_ERROR";
|
|
// Not Implemented
|
|
ErrorCode["NOT_IMPLEMENTED"] = "NOT_IMPLEMENTED";
|
|
// Unsupported Operation
|
|
// - operation
|
|
ErrorCode["UNSUPPORTED_OPERATION"] = "UNSUPPORTED_OPERATION";
|
|
// Network Error (i.e. Ethereum Network, such as an invalid chain ID)
|
|
// - event ("noNetwork" is not re-thrown in provider.ready; otherwise thrown)
|
|
ErrorCode["NETWORK_ERROR"] = "NETWORK_ERROR";
|
|
// Some sort of bad response from the server
|
|
ErrorCode["SERVER_ERROR"] = "SERVER_ERROR";
|
|
// Timeout
|
|
ErrorCode["TIMEOUT"] = "TIMEOUT";
|
|
///////////////////
|
|
// Operational Errors
|
|
// Buffer Overrun
|
|
ErrorCode["BUFFER_OVERRUN"] = "BUFFER_OVERRUN";
|
|
// Numeric Fault
|
|
// - operation: the operation being executed
|
|
// - fault: the reason this faulted
|
|
ErrorCode["NUMERIC_FAULT"] = "NUMERIC_FAULT";
|
|
///////////////////
|
|
// Argument Errors
|
|
// Missing new operator to an object
|
|
// - name: The name of the class
|
|
ErrorCode["MISSING_NEW"] = "MISSING_NEW";
|
|
// Invalid argument (e.g. value is incompatible with type) to a function:
|
|
// - argument: The argument name that was invalid
|
|
// - value: The value of the argument
|
|
ErrorCode["INVALID_ARGUMENT"] = "INVALID_ARGUMENT";
|
|
// Missing argument to a function:
|
|
// - count: The number of arguments received
|
|
// - expectedCount: The number of arguments expected
|
|
ErrorCode["MISSING_ARGUMENT"] = "MISSING_ARGUMENT";
|
|
// Too many arguments
|
|
// - count: The number of arguments received
|
|
// - expectedCount: The number of arguments expected
|
|
ErrorCode["UNEXPECTED_ARGUMENT"] = "UNEXPECTED_ARGUMENT";
|
|
///////////////////
|
|
// Blockchain Errors
|
|
// Call exception
|
|
// - transaction: the transaction
|
|
// - address?: the contract address
|
|
// - args?: The arguments passed into the function
|
|
// - method?: The Solidity method signature
|
|
// - errorSignature?: The EIP848 error signature
|
|
// - errorArgs?: The EIP848 error parameters
|
|
// - reason: The reason (only for EIP848 "Error(string)")
|
|
ErrorCode["CALL_EXCEPTION"] = "CALL_EXCEPTION";
|
|
// Insufficient funds (< value + gasLimit * gasPrice)
|
|
// - transaction: the transaction attempted
|
|
ErrorCode["INSUFFICIENT_FUNDS"] = "INSUFFICIENT_FUNDS";
|
|
// Nonce has already been used
|
|
// - transaction: the transaction attempted
|
|
ErrorCode["NONCE_EXPIRED"] = "NONCE_EXPIRED";
|
|
// The replacement fee for the transaction is too low
|
|
// - transaction: the transaction attempted
|
|
ErrorCode["REPLACEMENT_UNDERPRICED"] = "REPLACEMENT_UNDERPRICED";
|
|
// The gas limit could not be estimated
|
|
// - transaction: the transaction passed to estimateGas
|
|
ErrorCode["UNPREDICTABLE_GAS_LIMIT"] = "UNPREDICTABLE_GAS_LIMIT";
|
|
// The transaction was replaced by one with a higher gas price
|
|
// - reason: "cancelled", "replaced" or "repriced"
|
|
// - cancelled: true if reason == "cancelled" or reason == "replaced")
|
|
// - hash: original transaction hash
|
|
// - replacement: the full TransactionsResponse for the replacement
|
|
// - receipt: the receipt of the replacement
|
|
ErrorCode["TRANSACTION_REPLACED"] = "TRANSACTION_REPLACED";
|
|
///////////////////
|
|
// Interaction Errors
|
|
// The user rejected the action, such as signing a message or sending
|
|
// a transaction
|
|
ErrorCode["ACTION_REJECTED"] = "ACTION_REJECTED";
|
|
})(ErrorCode || (ErrorCode = {}));
|
|
const HEX = "0123456789abcdef";
|
|
class Logger {
|
|
constructor(version) {
|
|
Object.defineProperty(this, "version", {
|
|
enumerable: true,
|
|
value: version,
|
|
writable: false
|
|
});
|
|
}
|
|
_log(logLevel, args) {
|
|
const level = logLevel.toLowerCase();
|
|
if (LogLevels[level] == null) {
|
|
this.throwArgumentError("invalid log level name", "logLevel", logLevel);
|
|
}
|
|
if (_logLevel > LogLevels[level]) {
|
|
return;
|
|
}
|
|
console.log.apply(console, args);
|
|
}
|
|
debug(...args) {
|
|
this._log(Logger.levels.DEBUG, args);
|
|
}
|
|
info(...args) {
|
|
this._log(Logger.levels.INFO, args);
|
|
}
|
|
warn(...args) {
|
|
this._log(Logger.levels.WARNING, args);
|
|
}
|
|
makeError(message, code, params) {
|
|
// Errors are being censored
|
|
if (_censorErrors) {
|
|
return this.makeError("censored error", code, {});
|
|
}
|
|
if (!code) {
|
|
code = Logger.errors.UNKNOWN_ERROR;
|
|
}
|
|
if (!params) {
|
|
params = {};
|
|
}
|
|
const messageDetails = [];
|
|
Object.keys(params).forEach((key) => {
|
|
const value = params[key];
|
|
try {
|
|
if (value instanceof Uint8Array) {
|
|
let hex = "";
|
|
for (let i = 0; i < value.length; i++) {
|
|
hex += HEX[value[i] >> 4];
|
|
hex += HEX[value[i] & 0x0f];
|
|
}
|
|
messageDetails.push(key + "=Uint8Array(0x" + hex + ")");
|
|
}
|
|
else {
|
|
messageDetails.push(key + "=" + JSON.stringify(value));
|
|
}
|
|
}
|
|
catch (error) {
|
|
messageDetails.push(key + "=" + JSON.stringify(params[key].toString()));
|
|
}
|
|
});
|
|
messageDetails.push(`code=${code}`);
|
|
messageDetails.push(`version=${this.version}`);
|
|
const reason = message;
|
|
let url = "";
|
|
switch (code) {
|
|
case ErrorCode.NUMERIC_FAULT: {
|
|
url = "NUMERIC_FAULT";
|
|
const fault = message;
|
|
switch (fault) {
|
|
case "overflow":
|
|
case "underflow":
|
|
case "division-by-zero":
|
|
url += "-" + fault;
|
|
break;
|
|
case "negative-power":
|
|
case "negative-width":
|
|
url += "-unsupported";
|
|
break;
|
|
case "unbound-bitwise-result":
|
|
url += "-unbound-result";
|
|
break;
|
|
}
|
|
break;
|
|
}
|
|
case ErrorCode.CALL_EXCEPTION:
|
|
case ErrorCode.INSUFFICIENT_FUNDS:
|
|
case ErrorCode.MISSING_NEW:
|
|
case ErrorCode.NONCE_EXPIRED:
|
|
case ErrorCode.REPLACEMENT_UNDERPRICED:
|
|
case ErrorCode.TRANSACTION_REPLACED:
|
|
case ErrorCode.UNPREDICTABLE_GAS_LIMIT:
|
|
url = code;
|
|
break;
|
|
}
|
|
if (url) {
|
|
message += " [ See: https:/\/links.ethers.org/v5-errors-" + url + " ]";
|
|
}
|
|
if (messageDetails.length) {
|
|
message += " (" + messageDetails.join(", ") + ")";
|
|
}
|
|
// @TODO: Any??
|
|
const error = new Error(message);
|
|
error.reason = reason;
|
|
error.code = code;
|
|
Object.keys(params).forEach(function (key) {
|
|
error[key] = params[key];
|
|
});
|
|
return error;
|
|
}
|
|
throwError(message, code, params) {
|
|
throw this.makeError(message, code, params);
|
|
}
|
|
throwArgumentError(message, name, value) {
|
|
return this.throwError(message, Logger.errors.INVALID_ARGUMENT, {
|
|
argument: name,
|
|
value: value
|
|
});
|
|
}
|
|
assert(condition, message, code, params) {
|
|
if (!!condition) {
|
|
return;
|
|
}
|
|
this.throwError(message, code, params);
|
|
}
|
|
assertArgument(condition, message, name, value) {
|
|
if (!!condition) {
|
|
return;
|
|
}
|
|
this.throwArgumentError(message, name, value);
|
|
}
|
|
checkNormalize(message) {
|
|
if (_normalizeError) {
|
|
this.throwError("platform missing String.prototype.normalize", Logger.errors.UNSUPPORTED_OPERATION, {
|
|
operation: "String.prototype.normalize", form: _normalizeError
|
|
});
|
|
}
|
|
}
|
|
checkSafeUint53(value, message) {
|
|
if (typeof (value) !== "number") {
|
|
return;
|
|
}
|
|
if (message == null) {
|
|
message = "value not safe";
|
|
}
|
|
if (value < 0 || value >= 0x1fffffffffffff) {
|
|
this.throwError(message, Logger.errors.NUMERIC_FAULT, {
|
|
operation: "checkSafeInteger",
|
|
fault: "out-of-safe-range",
|
|
value: value
|
|
});
|
|
}
|
|
if (value % 1) {
|
|
this.throwError(message, Logger.errors.NUMERIC_FAULT, {
|
|
operation: "checkSafeInteger",
|
|
fault: "non-integer",
|
|
value: value
|
|
});
|
|
}
|
|
}
|
|
checkArgumentCount(count, expectedCount, message) {
|
|
if (message) {
|
|
message = ": " + message;
|
|
}
|
|
else {
|
|
message = "";
|
|
}
|
|
if (count < expectedCount) {
|
|
this.throwError("missing argument" + message, Logger.errors.MISSING_ARGUMENT, {
|
|
count: count,
|
|
expectedCount: expectedCount
|
|
});
|
|
}
|
|
if (count > expectedCount) {
|
|
this.throwError("too many arguments" + message, Logger.errors.UNEXPECTED_ARGUMENT, {
|
|
count: count,
|
|
expectedCount: expectedCount
|
|
});
|
|
}
|
|
}
|
|
checkNew(target, kind) {
|
|
if (target === Object || target == null) {
|
|
this.throwError("missing new", Logger.errors.MISSING_NEW, { name: kind.name });
|
|
}
|
|
}
|
|
checkAbstract(target, kind) {
|
|
if (target === kind) {
|
|
this.throwError("cannot instantiate abstract class " + JSON.stringify(kind.name) + " directly; use a sub-class", Logger.errors.UNSUPPORTED_OPERATION, { name: target.name, operation: "new" });
|
|
}
|
|
else if (target === Object || target == null) {
|
|
this.throwError("missing new", Logger.errors.MISSING_NEW, { name: kind.name });
|
|
}
|
|
}
|
|
static globalLogger() {
|
|
if (!_globalLogger) {
|
|
_globalLogger = new Logger(version$2);
|
|
}
|
|
return _globalLogger;
|
|
}
|
|
static setCensorship(censorship, permanent) {
|
|
if (!censorship && permanent) {
|
|
this.globalLogger().throwError("cannot permanently disable censorship", Logger.errors.UNSUPPORTED_OPERATION, {
|
|
operation: "setCensorship"
|
|
});
|
|
}
|
|
if (_permanentCensorErrors) {
|
|
if (!censorship) {
|
|
return;
|
|
}
|
|
this.globalLogger().throwError("error censorship permanent", Logger.errors.UNSUPPORTED_OPERATION, {
|
|
operation: "setCensorship"
|
|
});
|
|
}
|
|
_censorErrors = !!censorship;
|
|
_permanentCensorErrors = !!permanent;
|
|
}
|
|
static setLogLevel(logLevel) {
|
|
const level = LogLevels[logLevel.toLowerCase()];
|
|
if (level == null) {
|
|
Logger.globalLogger().warn("invalid log level - " + logLevel);
|
|
return;
|
|
}
|
|
_logLevel = level;
|
|
}
|
|
static from(version) {
|
|
return new Logger(version);
|
|
}
|
|
}
|
|
Logger.errors = ErrorCode;
|
|
Logger.levels = LogLevel;
|
|
|
|
const version$1 = "bytes/5.7.0";
|
|
|
|
const logger$2 = new Logger(version$1);
|
|
///////////////////////////////
|
|
function isHexable(value) {
|
|
return !!(value.toHexString);
|
|
}
|
|
function addSlice(array) {
|
|
if (array.slice) {
|
|
return array;
|
|
}
|
|
array.slice = function () {
|
|
const args = Array.prototype.slice.call(arguments);
|
|
return addSlice(new Uint8Array(Array.prototype.slice.apply(array, args)));
|
|
};
|
|
return array;
|
|
}
|
|
function isBytesLike(value) {
|
|
return ((isHexString(value) && !(value.length % 2)) || isBytes(value));
|
|
}
|
|
function isInteger(value) {
|
|
return (typeof (value) === "number" && value == value && (value % 1) === 0);
|
|
}
|
|
function isBytes(value) {
|
|
if (value == null) {
|
|
return false;
|
|
}
|
|
if (value.constructor === Uint8Array) {
|
|
return true;
|
|
}
|
|
if (typeof (value) === "string") {
|
|
return false;
|
|
}
|
|
if (!isInteger(value.length) || value.length < 0) {
|
|
return false;
|
|
}
|
|
for (let i = 0; i < value.length; i++) {
|
|
const v = value[i];
|
|
if (!isInteger(v) || v < 0 || v >= 256) {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
function arrayify(value, options) {
|
|
if (!options) {
|
|
options = {};
|
|
}
|
|
if (typeof (value) === "number") {
|
|
logger$2.checkSafeUint53(value, "invalid arrayify value");
|
|
const result = [];
|
|
while (value) {
|
|
result.unshift(value & 0xff);
|
|
value = parseInt(String(value / 256));
|
|
}
|
|
if (result.length === 0) {
|
|
result.push(0);
|
|
}
|
|
return addSlice(new Uint8Array(result));
|
|
}
|
|
if (options.allowMissingPrefix && typeof (value) === "string" && value.substring(0, 2) !== "0x") {
|
|
value = "0x" + value;
|
|
}
|
|
if (isHexable(value)) {
|
|
value = value.toHexString();
|
|
}
|
|
if (isHexString(value)) {
|
|
let hex = value.substring(2);
|
|
if (hex.length % 2) {
|
|
if (options.hexPad === "left") {
|
|
hex = "0" + hex;
|
|
}
|
|
else if (options.hexPad === "right") {
|
|
hex += "0";
|
|
}
|
|
else {
|
|
logger$2.throwArgumentError("hex data is odd-length", "value", value);
|
|
}
|
|
}
|
|
const result = [];
|
|
for (let i = 0; i < hex.length; i += 2) {
|
|
result.push(parseInt(hex.substring(i, i + 2), 16));
|
|
}
|
|
return addSlice(new Uint8Array(result));
|
|
}
|
|
if (isBytes(value)) {
|
|
return addSlice(new Uint8Array(value));
|
|
}
|
|
return logger$2.throwArgumentError("invalid arrayify value", "value", value);
|
|
}
|
|
function isHexString(value, length) {
|
|
if (typeof (value) !== "string" || !value.match(/^0x[0-9A-Fa-f]*$/)) {
|
|
return false;
|
|
}
|
|
if (length && value.length !== 2 + 2 * length) {
|
|
return false;
|
|
}
|
|
return true;
|
|
}
|
|
const HexCharacters = "0123456789abcdef";
|
|
function hexlify(value, options) {
|
|
if (!options) {
|
|
options = {};
|
|
}
|
|
if (typeof (value) === "number") {
|
|
logger$2.checkSafeUint53(value, "invalid hexlify value");
|
|
let hex = "";
|
|
while (value) {
|
|
hex = HexCharacters[value & 0xf] + hex;
|
|
value = Math.floor(value / 16);
|
|
}
|
|
if (hex.length) {
|
|
if (hex.length % 2) {
|
|
hex = "0" + hex;
|
|
}
|
|
return "0x" + hex;
|
|
}
|
|
return "0x00";
|
|
}
|
|
if (typeof (value) === "bigint") {
|
|
value = value.toString(16);
|
|
if (value.length % 2) {
|
|
return ("0x0" + value);
|
|
}
|
|
return "0x" + value;
|
|
}
|
|
if (options.allowMissingPrefix && typeof (value) === "string" && value.substring(0, 2) !== "0x") {
|
|
value = "0x" + value;
|
|
}
|
|
if (isHexable(value)) {
|
|
return value.toHexString();
|
|
}
|
|
if (isHexString(value)) {
|
|
if (value.length % 2) {
|
|
if (options.hexPad === "left") {
|
|
value = "0x0" + value.substring(2);
|
|
}
|
|
else if (options.hexPad === "right") {
|
|
value += "0";
|
|
}
|
|
else {
|
|
logger$2.throwArgumentError("hex data is odd-length", "value", value);
|
|
}
|
|
}
|
|
return value.toLowerCase();
|
|
}
|
|
if (isBytes(value)) {
|
|
let result = "0x";
|
|
for (let i = 0; i < value.length; i++) {
|
|
let v = value[i];
|
|
result += HexCharacters[(v & 0xf0) >> 4] + HexCharacters[v & 0x0f];
|
|
}
|
|
return result;
|
|
}
|
|
return logger$2.throwArgumentError("invalid hexlify value", "value", value);
|
|
}
|
|
|
|
const version = "rlp/5.7.0";
|
|
|
|
const logger$1 = new Logger(version);
|
|
function arrayifyInteger(value) {
|
|
const result = [];
|
|
while (value) {
|
|
result.unshift(value & 0xff);
|
|
value >>= 8;
|
|
}
|
|
return result;
|
|
}
|
|
function unarrayifyInteger(data, offset, length) {
|
|
let result = 0;
|
|
for (let i = 0; i < length; i++) {
|
|
result = (result * 256) + data[offset + i];
|
|
}
|
|
return result;
|
|
}
|
|
function _encode(object) {
|
|
if (Array.isArray(object)) {
|
|
let payload = [];
|
|
object.forEach(function (child) {
|
|
payload = payload.concat(_encode(child));
|
|
});
|
|
if (payload.length <= 55) {
|
|
payload.unshift(0xc0 + payload.length);
|
|
return payload;
|
|
}
|
|
const length = arrayifyInteger(payload.length);
|
|
length.unshift(0xf7 + length.length);
|
|
return length.concat(payload);
|
|
}
|
|
if (!isBytesLike(object)) {
|
|
logger$1.throwArgumentError("RLP object must be BytesLike", "object", object);
|
|
}
|
|
const data = Array.prototype.slice.call(arrayify(object));
|
|
if (data.length === 1 && data[0] <= 0x7f) {
|
|
return data;
|
|
}
|
|
else if (data.length <= 55) {
|
|
data.unshift(0x80 + data.length);
|
|
return data;
|
|
}
|
|
const length = arrayifyInteger(data.length);
|
|
length.unshift(0xb7 + length.length);
|
|
return length.concat(data);
|
|
}
|
|
function encode$2(object) {
|
|
return hexlify(_encode(object));
|
|
}
|
|
function _decodeChildren(data, offset, childOffset, length) {
|
|
const result = [];
|
|
while (childOffset < offset + 1 + length) {
|
|
const decoded = _decode(data, childOffset);
|
|
result.push(decoded.result);
|
|
childOffset += decoded.consumed;
|
|
if (childOffset > offset + 1 + length) {
|
|
logger$1.throwError("child data too short", Logger.errors.BUFFER_OVERRUN, {});
|
|
}
|
|
}
|
|
return { consumed: (1 + length), result: result };
|
|
}
|
|
// returns { consumed: number, result: Object }
|
|
function _decode(data, offset) {
|
|
if (data.length === 0) {
|
|
logger$1.throwError("data too short", Logger.errors.BUFFER_OVERRUN, {});
|
|
}
|
|
// Array with extra length prefix
|
|
if (data[offset] >= 0xf8) {
|
|
const lengthLength = data[offset] - 0xf7;
|
|
if (offset + 1 + lengthLength > data.length) {
|
|
logger$1.throwError("data short segment too short", Logger.errors.BUFFER_OVERRUN, {});
|
|
}
|
|
const length = unarrayifyInteger(data, offset + 1, lengthLength);
|
|
if (offset + 1 + lengthLength + length > data.length) {
|
|
logger$1.throwError("data long segment too short", Logger.errors.BUFFER_OVERRUN, {});
|
|
}
|
|
return _decodeChildren(data, offset, offset + 1 + lengthLength, lengthLength + length);
|
|
}
|
|
else if (data[offset] >= 0xc0) {
|
|
const length = data[offset] - 0xc0;
|
|
if (offset + 1 + length > data.length) {
|
|
logger$1.throwError("data array too short", Logger.errors.BUFFER_OVERRUN, {});
|
|
}
|
|
return _decodeChildren(data, offset, offset + 1, length);
|
|
}
|
|
else if (data[offset] >= 0xb8) {
|
|
const lengthLength = data[offset] - 0xb7;
|
|
if (offset + 1 + lengthLength > data.length) {
|
|
logger$1.throwError("data array too short", Logger.errors.BUFFER_OVERRUN, {});
|
|
}
|
|
const length = unarrayifyInteger(data, offset + 1, lengthLength);
|
|
if (offset + 1 + lengthLength + length > data.length) {
|
|
logger$1.throwError("data array too short", Logger.errors.BUFFER_OVERRUN, {});
|
|
}
|
|
const result = hexlify(data.slice(offset + 1 + lengthLength, offset + 1 + lengthLength + length));
|
|
return { consumed: (1 + lengthLength + length), result: result };
|
|
}
|
|
else if (data[offset] >= 0x80) {
|
|
const length = data[offset] - 0x80;
|
|
if (offset + 1 + length > data.length) {
|
|
logger$1.throwError("data too short", Logger.errors.BUFFER_OVERRUN, {});
|
|
}
|
|
const result = hexlify(data.slice(offset + 1, offset + 1 + length));
|
|
return { consumed: (1 + length), result: result };
|
|
}
|
|
return { consumed: 1, result: hexlify(data[offset]) };
|
|
}
|
|
function decode$2(data) {
|
|
const bytes = arrayify(data);
|
|
const decoded = _decode(bytes, 0);
|
|
if (decoded.consumed !== bytes.length) {
|
|
logger$1.throwArgumentError("invalid rlp data", "data", data);
|
|
}
|
|
return decoded.result;
|
|
}
|
|
|
|
const word = '[a-fA-F\\d:]';
|
|
|
|
const boundry = options => options && options.includeBoundaries
|
|
? `(?:(?<=\\s|^)(?=${word})|(?<=${word})(?=\\s|$))`
|
|
: '';
|
|
|
|
const v4 = '(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}';
|
|
|
|
const v6segment = '[a-fA-F\\d]{1,4}';
|
|
|
|
const v6 = `
|
|
(?:
|
|
(?:${v6segment}:){7}(?:${v6segment}|:)| // 1:2:3:4:5:6:7:: 1:2:3:4:5:6:7:8
|
|
(?:${v6segment}:){6}(?:${v4}|:${v6segment}|:)| // 1:2:3:4:5:6:: 1:2:3:4:5:6::8 1:2:3:4:5:6::8 1:2:3:4:5:6::1.2.3.4
|
|
(?:${v6segment}:){5}(?::${v4}|(?::${v6segment}){1,2}|:)| // 1:2:3:4:5:: 1:2:3:4:5::7:8 1:2:3:4:5::8 1:2:3:4:5::7:1.2.3.4
|
|
(?:${v6segment}:){4}(?:(?::${v6segment}){0,1}:${v4}|(?::${v6segment}){1,3}|:)| // 1:2:3:4:: 1:2:3:4::6:7:8 1:2:3:4::8 1:2:3:4::6:7:1.2.3.4
|
|
(?:${v6segment}:){3}(?:(?::${v6segment}){0,2}:${v4}|(?::${v6segment}){1,4}|:)| // 1:2:3:: 1:2:3::5:6:7:8 1:2:3::8 1:2:3::5:6:7:1.2.3.4
|
|
(?:${v6segment}:){2}(?:(?::${v6segment}){0,3}:${v4}|(?::${v6segment}){1,5}|:)| // 1:2:: 1:2::4:5:6:7:8 1:2::8 1:2::4:5:6:7:1.2.3.4
|
|
(?:${v6segment}:){1}(?:(?::${v6segment}){0,4}:${v4}|(?::${v6segment}){1,6}|:)| // 1:: 1::3:4:5:6:7:8 1::8 1::3:4:5:6:7:1.2.3.4
|
|
(?::(?:(?::${v6segment}){0,5}:${v4}|(?::${v6segment}){1,7}|:)) // ::2:3:4:5:6:7:8 ::2:3:4:5:6:7:8 ::8 ::1.2.3.4
|
|
)(?:%[0-9a-zA-Z]{1,})? // %eth0 %1
|
|
`.replace(/\s*\/\/.*$/gm, '').replace(/\n/g, '').trim();
|
|
|
|
// Pre-compile only the exact regexes because adding a global flag make regexes stateful
|
|
const v46Exact = new RegExp(`(?:^${v4}$)|(?:^${v6}$)`);
|
|
const v4exact = new RegExp(`^${v4}$`);
|
|
const v6exact = new RegExp(`^${v6}$`);
|
|
|
|
const ipRegex = options => options && options.exact
|
|
? v46Exact
|
|
: new RegExp(`(?:${boundry(options)}${v4}${boundry(options)})|(?:${boundry(options)}${v6}${boundry(options)})`, 'g');
|
|
|
|
ipRegex.v4 = options => options && options.exact ? v4exact : new RegExp(`${boundry(options)}${v4}${boundry(options)}`, 'g');
|
|
ipRegex.v6 = options => options && options.exact ? v6exact : new RegExp(`${boundry(options)}${v6}${boundry(options)}`, 'g');
|
|
|
|
// Even though the browser version is a no-op, we wrap it to ensure consistent behavior.
|
|
function functionTimeout(function_) {
|
|
const wrappedFunction = (...arguments_) => function_(...arguments_);
|
|
|
|
Object.defineProperty(wrappedFunction, 'name', {
|
|
value: `functionTimeout(${function_.name || '<anonymous>'})`,
|
|
configurable: true,
|
|
});
|
|
|
|
return wrappedFunction;
|
|
}
|
|
|
|
const {toString: toString$2} = Object.prototype;
|
|
|
|
function isRegexp(value) {
|
|
return toString$2.call(value) === '[object RegExp]';
|
|
}
|
|
|
|
const flagMap = {
|
|
global: 'g',
|
|
ignoreCase: 'i',
|
|
multiline: 'm',
|
|
dotAll: 's',
|
|
sticky: 'y',
|
|
unicode: 'u'
|
|
};
|
|
|
|
function clonedRegexp(regexp, options = {}) {
|
|
if (!isRegexp(regexp)) {
|
|
throw new TypeError('Expected a RegExp instance');
|
|
}
|
|
|
|
const flags = Object.keys(flagMap).map(flag => (
|
|
(typeof options[flag] === 'boolean' ? options[flag] : regexp[flag]) ? flagMap[flag] : ''
|
|
)).join('');
|
|
|
|
const clonedRegexp = new RegExp(options.source || regexp.source, flags);
|
|
|
|
clonedRegexp.lastIndex = typeof options.lastIndex === 'number' ?
|
|
options.lastIndex :
|
|
regexp.lastIndex;
|
|
|
|
return clonedRegexp;
|
|
}
|
|
|
|
function isMatch(regex, string, {timeout} = {}) {
|
|
try {
|
|
return functionTimeout(() => clonedRegexp(regex).test(string), {timeout})();
|
|
} catch (error) {
|
|
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
const maxIPv4Length = 15;
|
|
const maxIPv6Length = 45;
|
|
|
|
const options = {
|
|
timeout: 400,
|
|
};
|
|
|
|
function isIP(string) {
|
|
return isMatch(ipRegex({exact: true}), string.slice(0, maxIPv6Length), options);
|
|
}
|
|
|
|
function isIPv6(string) {
|
|
return isMatch(ipRegex.v6({exact: true}), string.slice(0, maxIPv6Length), options);
|
|
}
|
|
|
|
function isIPv4(string) {
|
|
return isMatch(ipRegex.v4({exact: true}), string.slice(0, maxIPv4Length), options);
|
|
}
|
|
|
|
const isV4 = isIPv4;
|
|
const isV6 = isIPv6;
|
|
// Copied from https://github.com/indutny/node-ip/blob/master/lib/ip.js#L7
|
|
// but with buf/offset args removed because we don't use them
|
|
const toBytes = function (ip) {
|
|
let offset = 0;
|
|
let result;
|
|
ip = ip.trim();
|
|
if (isV4(ip)) {
|
|
result = new Uint8Array(offset + 4);
|
|
ip.split(/\./g).forEach((byte) => {
|
|
result[offset++] = parseInt(byte, 10) & 0xff;
|
|
});
|
|
}
|
|
else if (isV6(ip)) {
|
|
const sections = ip.split(':', 8);
|
|
let i;
|
|
for (i = 0; i < sections.length; i++) {
|
|
const isv4 = isV4(sections[i]);
|
|
let v4Buffer;
|
|
if (isv4) {
|
|
v4Buffer = toBytes(sections[i]);
|
|
sections[i] = toString$3(v4Buffer.slice(0, 2), 'base16');
|
|
}
|
|
if (v4Buffer != null && ++i < 8) {
|
|
sections.splice(i, 0, toString$3(v4Buffer.slice(2, 4), 'base16'));
|
|
}
|
|
}
|
|
if (sections[0] === '') {
|
|
while (sections.length < 8)
|
|
sections.unshift('0');
|
|
}
|
|
else if (sections[sections.length - 1] === '') {
|
|
while (sections.length < 8)
|
|
sections.push('0');
|
|
}
|
|
else if (sections.length < 8) {
|
|
for (i = 0; i < sections.length && sections[i] !== ''; i++)
|
|
;
|
|
const argv = [i, 1];
|
|
for (i = 9 - sections.length; i > 0; i--) {
|
|
argv.push('0');
|
|
}
|
|
sections.splice.apply(sections, argv);
|
|
}
|
|
result = new Uint8Array(offset + 16);
|
|
for (i = 0; i < sections.length; i++) {
|
|
const word = parseInt(sections[i], 16);
|
|
result[offset++] = (word >> 8) & 0xff;
|
|
result[offset++] = word & 0xff;
|
|
}
|
|
}
|
|
if (result == null) {
|
|
throw new Error(`invalid ip address "${ip}"`);
|
|
}
|
|
return result;
|
|
};
|
|
// Copied from https://github.com/indutny/node-ip/blob/master/lib/ip.js#L63
|
|
const toString$1 = function (buf, offset = 0, length) {
|
|
offset = ~~offset;
|
|
length = length ?? (buf.length - offset);
|
|
const result = [];
|
|
let string = '';
|
|
const view = new DataView(buf.buffer);
|
|
if (length === 4) {
|
|
// IPv4
|
|
for (let i = 0; i < length; i++) {
|
|
result.push(buf[offset + i]);
|
|
}
|
|
string = result.join('.');
|
|
}
|
|
else if (length === 16) {
|
|
// IPv6
|
|
for (let i = 0; i < length; i += 2) {
|
|
result.push(view.getUint16(offset + i).toString(16));
|
|
}
|
|
string = result.join(':');
|
|
string = string.replace(/(^|:)0(:0)*:0(:|$)/, '$1::$3');
|
|
string = string.replace(/:{3,4}/, '::');
|
|
}
|
|
return string;
|
|
};
|
|
|
|
const V$1 = -1;
|
|
const names$1 = {};
|
|
const codes$2 = {};
|
|
const table$1 = [
|
|
[4, 32, 'ip4'],
|
|
[6, 16, 'tcp'],
|
|
[33, 16, 'dccp'],
|
|
[41, 128, 'ip6'],
|
|
[42, V$1, 'ip6zone'],
|
|
[53, V$1, 'dns', true],
|
|
[54, V$1, 'dns4', true],
|
|
[55, V$1, 'dns6', true],
|
|
[56, V$1, 'dnsaddr', true],
|
|
[132, 16, 'sctp'],
|
|
[273, 16, 'udp'],
|
|
[275, 0, 'p2p-webrtc-star'],
|
|
[276, 0, 'p2p-webrtc-direct'],
|
|
[277, 0, 'p2p-stardust'],
|
|
[280, 0, 'webrtc'],
|
|
[290, 0, 'p2p-circuit'],
|
|
[301, 0, 'udt'],
|
|
[302, 0, 'utp'],
|
|
[400, V$1, 'unix', false, true],
|
|
// `ipfs` is added before `p2p` for legacy support.
|
|
// All text representations will default to `p2p`, but `ipfs` will
|
|
// still be supported
|
|
[421, V$1, 'ipfs'],
|
|
// `p2p` is the preferred name for 421, and is now the default
|
|
[421, V$1, 'p2p'],
|
|
[443, 0, 'https'],
|
|
[444, 96, 'onion'],
|
|
[445, 296, 'onion3'],
|
|
[446, V$1, 'garlic64'],
|
|
[460, 0, 'quic'],
|
|
[465, 0, 'webtransport'],
|
|
[466, V$1, 'certhash'],
|
|
[477, 0, 'ws'],
|
|
[478, 0, 'wss'],
|
|
[479, 0, 'p2p-websocket-star'],
|
|
[480, 0, 'http'],
|
|
[777, V$1, 'memory']
|
|
];
|
|
// populate tables
|
|
table$1.forEach(row => {
|
|
const proto = createProtocol$1(...row);
|
|
codes$2[proto.code] = proto;
|
|
names$1[proto.name] = proto;
|
|
});
|
|
function createProtocol$1(code, size, name, resolvable, path) {
|
|
return {
|
|
code,
|
|
size,
|
|
name,
|
|
resolvable: Boolean(resolvable),
|
|
path: Boolean(path)
|
|
};
|
|
}
|
|
function getProtocol$1(proto) {
|
|
if (typeof proto === 'number') {
|
|
if (codes$2[proto] != null) {
|
|
return codes$2[proto];
|
|
}
|
|
throw new Error(`no protocol with code: ${proto}`);
|
|
}
|
|
else if (typeof proto === 'string') {
|
|
if (names$1[proto] != null) {
|
|
return names$1[proto];
|
|
}
|
|
throw new Error(`no protocol with name: ${proto}`);
|
|
}
|
|
throw new Error(`invalid protocol id type: ${typeof proto}`);
|
|
}
|
|
|
|
var encode_1 = encode$1;
|
|
|
|
var MSB$1 = 0x80
|
|
, REST$1 = 0x7F
|
|
, MSBALL = ~REST$1
|
|
, INT = Math.pow(2, 31);
|
|
|
|
function encode$1(num, out, offset) {
|
|
if (Number.MAX_SAFE_INTEGER && num > Number.MAX_SAFE_INTEGER) {
|
|
encode$1.bytes = 0;
|
|
throw new RangeError('Could not encode varint')
|
|
}
|
|
out = out || [];
|
|
offset = offset || 0;
|
|
var oldOffset = offset;
|
|
|
|
while(num >= INT) {
|
|
out[offset++] = (num & 0xFF) | MSB$1;
|
|
num /= 128;
|
|
}
|
|
while(num & MSBALL) {
|
|
out[offset++] = (num & 0xFF) | MSB$1;
|
|
num >>>= 7;
|
|
}
|
|
out[offset] = num | 0;
|
|
|
|
encode$1.bytes = offset - oldOffset + 1;
|
|
|
|
return out
|
|
}
|
|
|
|
var decode$1 = read;
|
|
|
|
var MSB = 0x80
|
|
, REST = 0x7F;
|
|
|
|
function read(buf, offset) {
|
|
var res = 0
|
|
, offset = offset || 0
|
|
, shift = 0
|
|
, counter = offset
|
|
, b
|
|
, l = buf.length;
|
|
|
|
do {
|
|
if (counter >= l || shift > 49) {
|
|
read.bytes = 0;
|
|
throw new RangeError('Could not decode varint')
|
|
}
|
|
b = buf[counter++];
|
|
res += shift < 28
|
|
? (b & REST) << shift
|
|
: (b & REST) * Math.pow(2, shift);
|
|
shift += 7;
|
|
} while (b >= MSB)
|
|
|
|
read.bytes = counter - offset;
|
|
|
|
return res
|
|
}
|
|
|
|
var N1$1 = Math.pow(2, 7);
|
|
var N2$1 = Math.pow(2, 14);
|
|
var N3$1 = Math.pow(2, 21);
|
|
var N4$1 = Math.pow(2, 28);
|
|
var N5$1 = Math.pow(2, 35);
|
|
var N6$1 = Math.pow(2, 42);
|
|
var N7$1 = Math.pow(2, 49);
|
|
var N8$1 = Math.pow(2, 56);
|
|
var N9$1 = Math.pow(2, 63);
|
|
|
|
var length = function (value) {
|
|
return (
|
|
value < N1$1 ? 1
|
|
: value < N2$1 ? 2
|
|
: value < N3$1 ? 3
|
|
: value < N4$1 ? 4
|
|
: value < N5$1 ? 5
|
|
: value < N6$1 ? 6
|
|
: value < N7$1 ? 7
|
|
: value < N8$1 ? 8
|
|
: value < N9$1 ? 9
|
|
: 10
|
|
)
|
|
};
|
|
|
|
var varint = {
|
|
encode: encode_1
|
|
, decode: decode$1
|
|
, encodingLength: length
|
|
};
|
|
|
|
function concat(arrays, length) {
|
|
if (!length) {
|
|
length = arrays.reduce((acc, curr) => acc + curr.length, 0);
|
|
}
|
|
const output = allocUnsafe$1(length);
|
|
let offset = 0;
|
|
for (const arr of arrays) {
|
|
output.set(arr, offset);
|
|
offset += arr.length;
|
|
}
|
|
return output;
|
|
}
|
|
|
|
/**
|
|
* Convert [code,Uint8Array] to string
|
|
*/
|
|
function convertToString(proto, buf) {
|
|
const protocol = getProtocol$1(proto);
|
|
switch (protocol.code) {
|
|
case 4: // ipv4
|
|
case 41: // ipv6
|
|
return bytes2ip(buf);
|
|
case 6: // tcp
|
|
case 273: // udp
|
|
case 33: // dccp
|
|
case 132: // sctp
|
|
return bytes2port(buf).toString();
|
|
case 53: // dns
|
|
case 54: // dns4
|
|
case 55: // dns6
|
|
case 56: // dnsaddr
|
|
case 400: // unix
|
|
case 777: // memory
|
|
return bytes2str(buf);
|
|
case 421: // ipfs
|
|
return bytes2mh(buf);
|
|
case 444: // onion
|
|
return bytes2onion(buf);
|
|
case 445: // onion3
|
|
return bytes2onion(buf);
|
|
case 466: // certhash
|
|
return bytes2mb(buf);
|
|
default:
|
|
return toString$3(buf, 'base16'); // no clue. convert to hex
|
|
}
|
|
}
|
|
function convertToBytes(proto, str) {
|
|
const protocol = getProtocol$1(proto);
|
|
switch (protocol.code) {
|
|
case 4: // ipv4
|
|
return ip2bytes(str);
|
|
case 41: // ipv6
|
|
return ip2bytes(str);
|
|
case 6: // tcp
|
|
case 273: // udp
|
|
case 33: // dccp
|
|
case 132: // sctp
|
|
return port2bytes(parseInt(str, 10));
|
|
case 53: // dns
|
|
case 54: // dns4
|
|
case 55: // dns6
|
|
case 56: // dnsaddr
|
|
case 400: // unix
|
|
case 777: // memory
|
|
return str2bytes(str);
|
|
case 421: // ipfs
|
|
return mh2bytes(str);
|
|
case 444: // onion
|
|
return onion2bytes(str);
|
|
case 445: // onion3
|
|
return onion32bytes(str);
|
|
case 466: // certhash
|
|
return mb2bytes(str);
|
|
default:
|
|
return fromString$1(str, 'base16'); // no clue. convert from hex
|
|
}
|
|
}
|
|
const decoders$1 = Object.values(bases).map((c) => c.decoder);
|
|
const anybaseDecoder = (function () {
|
|
let acc = decoders$1[0].or(decoders$1[1]);
|
|
decoders$1.slice(2).forEach((d) => (acc = acc.or(d)));
|
|
return acc;
|
|
})();
|
|
function ip2bytes(ipString) {
|
|
if (!isIP(ipString)) {
|
|
throw new Error(`invalid ip address "${ipString}"`);
|
|
}
|
|
return toBytes(ipString);
|
|
}
|
|
function bytes2ip(ipBuff) {
|
|
const ipString = toString$1(ipBuff, 0, ipBuff.length);
|
|
if (ipString == null) {
|
|
throw new Error('ipBuff is required');
|
|
}
|
|
if (!isIP(ipString)) {
|
|
throw new Error(`invalid ip address "${ipString}"`);
|
|
}
|
|
return ipString;
|
|
}
|
|
function port2bytes(port) {
|
|
const buf = new ArrayBuffer(2);
|
|
const view = new DataView(buf);
|
|
view.setUint16(0, port);
|
|
return new Uint8Array(buf);
|
|
}
|
|
function bytes2port(buf) {
|
|
const view = new DataView(buf.buffer);
|
|
return view.getUint16(buf.byteOffset);
|
|
}
|
|
function str2bytes(str) {
|
|
const buf = fromString$1(str);
|
|
const size = Uint8Array.from(varint.encode(buf.length));
|
|
return concat([size, buf], size.length + buf.length);
|
|
}
|
|
function bytes2str(buf) {
|
|
const size = varint.decode(buf);
|
|
buf = buf.slice(varint.decode.bytes);
|
|
if (buf.length !== size) {
|
|
throw new Error('inconsistent lengths');
|
|
}
|
|
return toString$3(buf);
|
|
}
|
|
function mh2bytes(hash) {
|
|
let mh;
|
|
if (hash[0] === 'Q' || hash[0] === '1') {
|
|
mh = decode$3(base58btc.decode(`z${hash}`)).bytes;
|
|
}
|
|
else {
|
|
mh = CID.parse(hash).multihash.bytes;
|
|
}
|
|
// the address is a varint prefixed multihash string representation
|
|
const size = Uint8Array.from(varint.encode(mh.length));
|
|
return concat([size, mh], size.length + mh.length);
|
|
}
|
|
function mb2bytes(mbstr) {
|
|
const mb = anybaseDecoder.decode(mbstr);
|
|
const size = Uint8Array.from(varint.encode(mb.length));
|
|
return concat([size, mb], size.length + mb.length);
|
|
}
|
|
function bytes2mb(buf) {
|
|
const size = varint.decode(buf);
|
|
const hash = buf.slice(varint.decode.bytes);
|
|
if (hash.length !== size) {
|
|
throw new Error('inconsistent lengths');
|
|
}
|
|
return 'u' + toString$3(hash, 'base64url');
|
|
}
|
|
/**
|
|
* Converts bytes to bas58btc string
|
|
*/
|
|
function bytes2mh(buf) {
|
|
const size = varint.decode(buf);
|
|
const address = buf.slice(varint.decode.bytes);
|
|
if (address.length !== size) {
|
|
throw new Error('inconsistent lengths');
|
|
}
|
|
return toString$3(address, 'base58btc');
|
|
}
|
|
function onion2bytes(str) {
|
|
const addr = str.split(':');
|
|
if (addr.length !== 2) {
|
|
throw new Error(`failed to parse onion addr: ["'${addr.join('", "')}'"]' does not contain a port number`);
|
|
}
|
|
if (addr[0].length !== 16) {
|
|
throw new Error(`failed to parse onion addr: ${addr[0]} not a Tor onion address.`);
|
|
}
|
|
// onion addresses do not include the multibase prefix, add it before decoding
|
|
const buf = base32.decode('b' + addr[0]);
|
|
// onion port number
|
|
const port = parseInt(addr[1], 10);
|
|
if (port < 1 || port > 65536) {
|
|
throw new Error('Port number is not in range(1, 65536)');
|
|
}
|
|
const portBuf = port2bytes(port);
|
|
return concat([buf, portBuf], buf.length + portBuf.length);
|
|
}
|
|
function onion32bytes(str) {
|
|
const addr = str.split(':');
|
|
if (addr.length !== 2) {
|
|
throw new Error(`failed to parse onion addr: ["'${addr.join('", "')}'"]' does not contain a port number`);
|
|
}
|
|
if (addr[0].length !== 56) {
|
|
throw new Error(`failed to parse onion addr: ${addr[0]} not a Tor onion3 address.`);
|
|
}
|
|
// onion addresses do not include the multibase prefix, add it before decoding
|
|
const buf = base32.decode(`b${addr[0]}`);
|
|
// onion port number
|
|
const port = parseInt(addr[1], 10);
|
|
if (port < 1 || port > 65536) {
|
|
throw new Error('Port number is not in range(1, 65536)');
|
|
}
|
|
const portBuf = port2bytes(port);
|
|
return concat([buf, portBuf], buf.length + portBuf.length);
|
|
}
|
|
function bytes2onion(buf) {
|
|
const addrBytes = buf.slice(0, buf.length - 2);
|
|
const portBytes = buf.slice(buf.length - 2);
|
|
const addr = toString$3(addrBytes, 'base32');
|
|
const port = bytes2port(portBytes);
|
|
return `${addr}:${port}`;
|
|
}
|
|
|
|
var protobufjs = {exports: {}};
|
|
|
|
var src = {exports: {}};
|
|
|
|
var indexLight = {exports: {}};
|
|
|
|
var indexMinimal$1 = {};
|
|
|
|
var minimal$2 = {};
|
|
|
|
var aspromise;
|
|
var hasRequiredAspromise;
|
|
|
|
function requireAspromise () {
|
|
if (hasRequiredAspromise) return aspromise;
|
|
hasRequiredAspromise = 1;
|
|
aspromise = asPromise;
|
|
|
|
/**
|
|
* Callback as used by {@link util.asPromise}.
|
|
* @typedef asPromiseCallback
|
|
* @type {function}
|
|
* @param {Error|null} error Error, if any
|
|
* @param {...*} params Additional arguments
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* Returns a promise from a node-style callback function.
|
|
* @memberof util
|
|
* @param {asPromiseCallback} fn Function to call
|
|
* @param {*} ctx Function context
|
|
* @param {...*} params Function arguments
|
|
* @returns {Promise<*>} Promisified function
|
|
*/
|
|
function asPromise(fn, ctx/*, varargs */) {
|
|
var params = new Array(arguments.length - 1),
|
|
offset = 0,
|
|
index = 2,
|
|
pending = true;
|
|
while (index < arguments.length)
|
|
params[offset++] = arguments[index++];
|
|
return new Promise(function executor(resolve, reject) {
|
|
params[offset] = function callback(err/*, varargs */) {
|
|
if (pending) {
|
|
pending = false;
|
|
if (err)
|
|
reject(err);
|
|
else {
|
|
var params = new Array(arguments.length - 1),
|
|
offset = 0;
|
|
while (offset < params.length)
|
|
params[offset++] = arguments[offset];
|
|
resolve.apply(null, params);
|
|
}
|
|
}
|
|
};
|
|
try {
|
|
fn.apply(ctx || null, params);
|
|
} catch (err) {
|
|
if (pending) {
|
|
pending = false;
|
|
reject(err);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
return aspromise;
|
|
}
|
|
|
|
var base64$1 = {};
|
|
|
|
var hasRequiredBase64;
|
|
|
|
function requireBase64 () {
|
|
if (hasRequiredBase64) return base64$1;
|
|
hasRequiredBase64 = 1;
|
|
(function (exports) {
|
|
|
|
/**
|
|
* A minimal base64 implementation for number arrays.
|
|
* @memberof util
|
|
* @namespace
|
|
*/
|
|
var base64 = exports;
|
|
|
|
/**
|
|
* Calculates the byte length of a base64 encoded string.
|
|
* @param {string} string Base64 encoded string
|
|
* @returns {number} Byte length
|
|
*/
|
|
base64.length = function length(string) {
|
|
var p = string.length;
|
|
if (!p)
|
|
return 0;
|
|
var n = 0;
|
|
while (--p % 4 > 1 && string.charAt(p) === "=")
|
|
++n;
|
|
return Math.ceil(string.length * 3) / 4 - n;
|
|
};
|
|
|
|
// Base64 encoding table
|
|
var b64 = new Array(64);
|
|
|
|
// Base64 decoding table
|
|
var s64 = new Array(123);
|
|
|
|
// 65..90, 97..122, 48..57, 43, 47
|
|
for (var i = 0; i < 64;)
|
|
s64[b64[i] = i < 26 ? i + 65 : i < 52 ? i + 71 : i < 62 ? i - 4 : i - 59 | 43] = i++;
|
|
|
|
/**
|
|
* Encodes a buffer to a base64 encoded string.
|
|
* @param {Uint8Array} buffer Source buffer
|
|
* @param {number} start Source start
|
|
* @param {number} end Source end
|
|
* @returns {string} Base64 encoded string
|
|
*/
|
|
base64.encode = function encode(buffer, start, end) {
|
|
var parts = null,
|
|
chunk = [];
|
|
var i = 0, // output index
|
|
j = 0, // goto index
|
|
t; // temporary
|
|
while (start < end) {
|
|
var b = buffer[start++];
|
|
switch (j) {
|
|
case 0:
|
|
chunk[i++] = b64[b >> 2];
|
|
t = (b & 3) << 4;
|
|
j = 1;
|
|
break;
|
|
case 1:
|
|
chunk[i++] = b64[t | b >> 4];
|
|
t = (b & 15) << 2;
|
|
j = 2;
|
|
break;
|
|
case 2:
|
|
chunk[i++] = b64[t | b >> 6];
|
|
chunk[i++] = b64[b & 63];
|
|
j = 0;
|
|
break;
|
|
}
|
|
if (i > 8191) {
|
|
(parts || (parts = [])).push(String.fromCharCode.apply(String, chunk));
|
|
i = 0;
|
|
}
|
|
}
|
|
if (j) {
|
|
chunk[i++] = b64[t];
|
|
chunk[i++] = 61;
|
|
if (j === 1)
|
|
chunk[i++] = 61;
|
|
}
|
|
if (parts) {
|
|
if (i)
|
|
parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));
|
|
return parts.join("");
|
|
}
|
|
return String.fromCharCode.apply(String, chunk.slice(0, i));
|
|
};
|
|
|
|
var invalidEncoding = "invalid encoding";
|
|
|
|
/**
|
|
* Decodes a base64 encoded string to a buffer.
|
|
* @param {string} string Source string
|
|
* @param {Uint8Array} buffer Destination buffer
|
|
* @param {number} offset Destination offset
|
|
* @returns {number} Number of bytes written
|
|
* @throws {Error} If encoding is invalid
|
|
*/
|
|
base64.decode = function decode(string, buffer, offset) {
|
|
var start = offset;
|
|
var j = 0, // goto index
|
|
t; // temporary
|
|
for (var i = 0; i < string.length;) {
|
|
var c = string.charCodeAt(i++);
|
|
if (c === 61 && j > 1)
|
|
break;
|
|
if ((c = s64[c]) === undefined)
|
|
throw Error(invalidEncoding);
|
|
switch (j) {
|
|
case 0:
|
|
t = c;
|
|
j = 1;
|
|
break;
|
|
case 1:
|
|
buffer[offset++] = t << 2 | (c & 48) >> 4;
|
|
t = c;
|
|
j = 2;
|
|
break;
|
|
case 2:
|
|
buffer[offset++] = (t & 15) << 4 | (c & 60) >> 2;
|
|
t = c;
|
|
j = 3;
|
|
break;
|
|
case 3:
|
|
buffer[offset++] = (t & 3) << 6 | c;
|
|
j = 0;
|
|
break;
|
|
}
|
|
}
|
|
if (j === 1)
|
|
throw Error(invalidEncoding);
|
|
return offset - start;
|
|
};
|
|
|
|
/**
|
|
* Tests if the specified string appears to be base64 encoded.
|
|
* @param {string} string String to test
|
|
* @returns {boolean} `true` if probably base64 encoded, otherwise false
|
|
*/
|
|
base64.test = function test(string) {
|
|
return /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/.test(string);
|
|
};
|
|
} (base64$1));
|
|
return base64$1;
|
|
}
|
|
|
|
var eventemitter;
|
|
var hasRequiredEventemitter;
|
|
|
|
function requireEventemitter () {
|
|
if (hasRequiredEventemitter) return eventemitter;
|
|
hasRequiredEventemitter = 1;
|
|
eventemitter = EventEmitter;
|
|
|
|
/**
|
|
* Constructs a new event emitter instance.
|
|
* @classdesc A minimal event emitter.
|
|
* @memberof util
|
|
* @constructor
|
|
*/
|
|
function EventEmitter() {
|
|
|
|
/**
|
|
* Registered listeners.
|
|
* @type {Object.<string,*>}
|
|
* @private
|
|
*/
|
|
this._listeners = {};
|
|
}
|
|
|
|
/**
|
|
* Registers an event listener.
|
|
* @param {string} evt Event name
|
|
* @param {function} fn Listener
|
|
* @param {*} [ctx] Listener context
|
|
* @returns {util.EventEmitter} `this`
|
|
*/
|
|
EventEmitter.prototype.on = function on(evt, fn, ctx) {
|
|
(this._listeners[evt] || (this._listeners[evt] = [])).push({
|
|
fn : fn,
|
|
ctx : ctx || this
|
|
});
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Removes an event listener or any matching listeners if arguments are omitted.
|
|
* @param {string} [evt] Event name. Removes all listeners if omitted.
|
|
* @param {function} [fn] Listener to remove. Removes all listeners of `evt` if omitted.
|
|
* @returns {util.EventEmitter} `this`
|
|
*/
|
|
EventEmitter.prototype.off = function off(evt, fn) {
|
|
if (evt === undefined)
|
|
this._listeners = {};
|
|
else {
|
|
if (fn === undefined)
|
|
this._listeners[evt] = [];
|
|
else {
|
|
var listeners = this._listeners[evt];
|
|
for (var i = 0; i < listeners.length;)
|
|
if (listeners[i].fn === fn)
|
|
listeners.splice(i, 1);
|
|
else
|
|
++i;
|
|
}
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Emits an event by calling its listeners with the specified arguments.
|
|
* @param {string} evt Event name
|
|
* @param {...*} args Arguments
|
|
* @returns {util.EventEmitter} `this`
|
|
*/
|
|
EventEmitter.prototype.emit = function emit(evt) {
|
|
var listeners = this._listeners[evt];
|
|
if (listeners) {
|
|
var args = [],
|
|
i = 1;
|
|
for (; i < arguments.length;)
|
|
args.push(arguments[i++]);
|
|
for (i = 0; i < listeners.length;)
|
|
listeners[i].fn.apply(listeners[i++].ctx, args);
|
|
}
|
|
return this;
|
|
};
|
|
return eventemitter;
|
|
}
|
|
|
|
var float;
|
|
var hasRequiredFloat;
|
|
|
|
function requireFloat () {
|
|
if (hasRequiredFloat) return float;
|
|
hasRequiredFloat = 1;
|
|
|
|
float = factory(factory);
|
|
|
|
/**
|
|
* Reads / writes floats / doubles from / to buffers.
|
|
* @name util.float
|
|
* @namespace
|
|
*/
|
|
|
|
/**
|
|
* Writes a 32 bit float to a buffer using little endian byte order.
|
|
* @name util.float.writeFloatLE
|
|
* @function
|
|
* @param {number} val Value to write
|
|
* @param {Uint8Array} buf Target buffer
|
|
* @param {number} pos Target buffer offset
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* Writes a 32 bit float to a buffer using big endian byte order.
|
|
* @name util.float.writeFloatBE
|
|
* @function
|
|
* @param {number} val Value to write
|
|
* @param {Uint8Array} buf Target buffer
|
|
* @param {number} pos Target buffer offset
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* Reads a 32 bit float from a buffer using little endian byte order.
|
|
* @name util.float.readFloatLE
|
|
* @function
|
|
* @param {Uint8Array} buf Source buffer
|
|
* @param {number} pos Source buffer offset
|
|
* @returns {number} Value read
|
|
*/
|
|
|
|
/**
|
|
* Reads a 32 bit float from a buffer using big endian byte order.
|
|
* @name util.float.readFloatBE
|
|
* @function
|
|
* @param {Uint8Array} buf Source buffer
|
|
* @param {number} pos Source buffer offset
|
|
* @returns {number} Value read
|
|
*/
|
|
|
|
/**
|
|
* Writes a 64 bit double to a buffer using little endian byte order.
|
|
* @name util.float.writeDoubleLE
|
|
* @function
|
|
* @param {number} val Value to write
|
|
* @param {Uint8Array} buf Target buffer
|
|
* @param {number} pos Target buffer offset
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* Writes a 64 bit double to a buffer using big endian byte order.
|
|
* @name util.float.writeDoubleBE
|
|
* @function
|
|
* @param {number} val Value to write
|
|
* @param {Uint8Array} buf Target buffer
|
|
* @param {number} pos Target buffer offset
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* Reads a 64 bit double from a buffer using little endian byte order.
|
|
* @name util.float.readDoubleLE
|
|
* @function
|
|
* @param {Uint8Array} buf Source buffer
|
|
* @param {number} pos Source buffer offset
|
|
* @returns {number} Value read
|
|
*/
|
|
|
|
/**
|
|
* Reads a 64 bit double from a buffer using big endian byte order.
|
|
* @name util.float.readDoubleBE
|
|
* @function
|
|
* @param {Uint8Array} buf Source buffer
|
|
* @param {number} pos Source buffer offset
|
|
* @returns {number} Value read
|
|
*/
|
|
|
|
// Factory function for the purpose of node-based testing in modified global environments
|
|
function factory(exports) {
|
|
|
|
// float: typed array
|
|
if (typeof Float32Array !== "undefined") (function() {
|
|
|
|
var f32 = new Float32Array([ -0 ]),
|
|
f8b = new Uint8Array(f32.buffer),
|
|
le = f8b[3] === 128;
|
|
|
|
function writeFloat_f32_cpy(val, buf, pos) {
|
|
f32[0] = val;
|
|
buf[pos ] = f8b[0];
|
|
buf[pos + 1] = f8b[1];
|
|
buf[pos + 2] = f8b[2];
|
|
buf[pos + 3] = f8b[3];
|
|
}
|
|
|
|
function writeFloat_f32_rev(val, buf, pos) {
|
|
f32[0] = val;
|
|
buf[pos ] = f8b[3];
|
|
buf[pos + 1] = f8b[2];
|
|
buf[pos + 2] = f8b[1];
|
|
buf[pos + 3] = f8b[0];
|
|
}
|
|
|
|
/* istanbul ignore next */
|
|
exports.writeFloatLE = le ? writeFloat_f32_cpy : writeFloat_f32_rev;
|
|
/* istanbul ignore next */
|
|
exports.writeFloatBE = le ? writeFloat_f32_rev : writeFloat_f32_cpy;
|
|
|
|
function readFloat_f32_cpy(buf, pos) {
|
|
f8b[0] = buf[pos ];
|
|
f8b[1] = buf[pos + 1];
|
|
f8b[2] = buf[pos + 2];
|
|
f8b[3] = buf[pos + 3];
|
|
return f32[0];
|
|
}
|
|
|
|
function readFloat_f32_rev(buf, pos) {
|
|
f8b[3] = buf[pos ];
|
|
f8b[2] = buf[pos + 1];
|
|
f8b[1] = buf[pos + 2];
|
|
f8b[0] = buf[pos + 3];
|
|
return f32[0];
|
|
}
|
|
|
|
/* istanbul ignore next */
|
|
exports.readFloatLE = le ? readFloat_f32_cpy : readFloat_f32_rev;
|
|
/* istanbul ignore next */
|
|
exports.readFloatBE = le ? readFloat_f32_rev : readFloat_f32_cpy;
|
|
|
|
// float: ieee754
|
|
})(); else (function() {
|
|
|
|
function writeFloat_ieee754(writeUint, val, buf, pos) {
|
|
var sign = val < 0 ? 1 : 0;
|
|
if (sign)
|
|
val = -val;
|
|
if (val === 0)
|
|
writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos);
|
|
else if (isNaN(val))
|
|
writeUint(2143289344, buf, pos);
|
|
else if (val > 3.4028234663852886e+38) // +-Infinity
|
|
writeUint((sign << 31 | 2139095040) >>> 0, buf, pos);
|
|
else if (val < 1.1754943508222875e-38) // denormal
|
|
writeUint((sign << 31 | Math.round(val / 1.401298464324817e-45)) >>> 0, buf, pos);
|
|
else {
|
|
var exponent = Math.floor(Math.log(val) / Math.LN2),
|
|
mantissa = Math.round(val * Math.pow(2, -exponent) * 8388608) & 8388607;
|
|
writeUint((sign << 31 | exponent + 127 << 23 | mantissa) >>> 0, buf, pos);
|
|
}
|
|
}
|
|
|
|
exports.writeFloatLE = writeFloat_ieee754.bind(null, writeUintLE);
|
|
exports.writeFloatBE = writeFloat_ieee754.bind(null, writeUintBE);
|
|
|
|
function readFloat_ieee754(readUint, buf, pos) {
|
|
var uint = readUint(buf, pos),
|
|
sign = (uint >> 31) * 2 + 1,
|
|
exponent = uint >>> 23 & 255,
|
|
mantissa = uint & 8388607;
|
|
return exponent === 255
|
|
? mantissa
|
|
? NaN
|
|
: sign * Infinity
|
|
: exponent === 0 // denormal
|
|
? sign * 1.401298464324817e-45 * mantissa
|
|
: sign * Math.pow(2, exponent - 150) * (mantissa + 8388608);
|
|
}
|
|
|
|
exports.readFloatLE = readFloat_ieee754.bind(null, readUintLE);
|
|
exports.readFloatBE = readFloat_ieee754.bind(null, readUintBE);
|
|
|
|
})();
|
|
|
|
// double: typed array
|
|
if (typeof Float64Array !== "undefined") (function() {
|
|
|
|
var f64 = new Float64Array([-0]),
|
|
f8b = new Uint8Array(f64.buffer),
|
|
le = f8b[7] === 128;
|
|
|
|
function writeDouble_f64_cpy(val, buf, pos) {
|
|
f64[0] = val;
|
|
buf[pos ] = f8b[0];
|
|
buf[pos + 1] = f8b[1];
|
|
buf[pos + 2] = f8b[2];
|
|
buf[pos + 3] = f8b[3];
|
|
buf[pos + 4] = f8b[4];
|
|
buf[pos + 5] = f8b[5];
|
|
buf[pos + 6] = f8b[6];
|
|
buf[pos + 7] = f8b[7];
|
|
}
|
|
|
|
function writeDouble_f64_rev(val, buf, pos) {
|
|
f64[0] = val;
|
|
buf[pos ] = f8b[7];
|
|
buf[pos + 1] = f8b[6];
|
|
buf[pos + 2] = f8b[5];
|
|
buf[pos + 3] = f8b[4];
|
|
buf[pos + 4] = f8b[3];
|
|
buf[pos + 5] = f8b[2];
|
|
buf[pos + 6] = f8b[1];
|
|
buf[pos + 7] = f8b[0];
|
|
}
|
|
|
|
/* istanbul ignore next */
|
|
exports.writeDoubleLE = le ? writeDouble_f64_cpy : writeDouble_f64_rev;
|
|
/* istanbul ignore next */
|
|
exports.writeDoubleBE = le ? writeDouble_f64_rev : writeDouble_f64_cpy;
|
|
|
|
function readDouble_f64_cpy(buf, pos) {
|
|
f8b[0] = buf[pos ];
|
|
f8b[1] = buf[pos + 1];
|
|
f8b[2] = buf[pos + 2];
|
|
f8b[3] = buf[pos + 3];
|
|
f8b[4] = buf[pos + 4];
|
|
f8b[5] = buf[pos + 5];
|
|
f8b[6] = buf[pos + 6];
|
|
f8b[7] = buf[pos + 7];
|
|
return f64[0];
|
|
}
|
|
|
|
function readDouble_f64_rev(buf, pos) {
|
|
f8b[7] = buf[pos ];
|
|
f8b[6] = buf[pos + 1];
|
|
f8b[5] = buf[pos + 2];
|
|
f8b[4] = buf[pos + 3];
|
|
f8b[3] = buf[pos + 4];
|
|
f8b[2] = buf[pos + 5];
|
|
f8b[1] = buf[pos + 6];
|
|
f8b[0] = buf[pos + 7];
|
|
return f64[0];
|
|
}
|
|
|
|
/* istanbul ignore next */
|
|
exports.readDoubleLE = le ? readDouble_f64_cpy : readDouble_f64_rev;
|
|
/* istanbul ignore next */
|
|
exports.readDoubleBE = le ? readDouble_f64_rev : readDouble_f64_cpy;
|
|
|
|
// double: ieee754
|
|
})(); else (function() {
|
|
|
|
function writeDouble_ieee754(writeUint, off0, off1, val, buf, pos) {
|
|
var sign = val < 0 ? 1 : 0;
|
|
if (sign)
|
|
val = -val;
|
|
if (val === 0) {
|
|
writeUint(0, buf, pos + off0);
|
|
writeUint(1 / val > 0 ? /* positive */ 0 : /* negative 0 */ 2147483648, buf, pos + off1);
|
|
} else if (isNaN(val)) {
|
|
writeUint(0, buf, pos + off0);
|
|
writeUint(2146959360, buf, pos + off1);
|
|
} else if (val > 1.7976931348623157e+308) { // +-Infinity
|
|
writeUint(0, buf, pos + off0);
|
|
writeUint((sign << 31 | 2146435072) >>> 0, buf, pos + off1);
|
|
} else {
|
|
var mantissa;
|
|
if (val < 2.2250738585072014e-308) { // denormal
|
|
mantissa = val / 5e-324;
|
|
writeUint(mantissa >>> 0, buf, pos + off0);
|
|
writeUint((sign << 31 | mantissa / 4294967296) >>> 0, buf, pos + off1);
|
|
} else {
|
|
var exponent = Math.floor(Math.log(val) / Math.LN2);
|
|
if (exponent === 1024)
|
|
exponent = 1023;
|
|
mantissa = val * Math.pow(2, -exponent);
|
|
writeUint(mantissa * 4503599627370496 >>> 0, buf, pos + off0);
|
|
writeUint((sign << 31 | exponent + 1023 << 20 | mantissa * 1048576 & 1048575) >>> 0, buf, pos + off1);
|
|
}
|
|
}
|
|
}
|
|
|
|
exports.writeDoubleLE = writeDouble_ieee754.bind(null, writeUintLE, 0, 4);
|
|
exports.writeDoubleBE = writeDouble_ieee754.bind(null, writeUintBE, 4, 0);
|
|
|
|
function readDouble_ieee754(readUint, off0, off1, buf, pos) {
|
|
var lo = readUint(buf, pos + off0),
|
|
hi = readUint(buf, pos + off1);
|
|
var sign = (hi >> 31) * 2 + 1,
|
|
exponent = hi >>> 20 & 2047,
|
|
mantissa = 4294967296 * (hi & 1048575) + lo;
|
|
return exponent === 2047
|
|
? mantissa
|
|
? NaN
|
|
: sign * Infinity
|
|
: exponent === 0 // denormal
|
|
? sign * 5e-324 * mantissa
|
|
: sign * Math.pow(2, exponent - 1075) * (mantissa + 4503599627370496);
|
|
}
|
|
|
|
exports.readDoubleLE = readDouble_ieee754.bind(null, readUintLE, 0, 4);
|
|
exports.readDoubleBE = readDouble_ieee754.bind(null, readUintBE, 4, 0);
|
|
|
|
})();
|
|
|
|
return exports;
|
|
}
|
|
|
|
// uint helpers
|
|
|
|
function writeUintLE(val, buf, pos) {
|
|
buf[pos ] = val & 255;
|
|
buf[pos + 1] = val >>> 8 & 255;
|
|
buf[pos + 2] = val >>> 16 & 255;
|
|
buf[pos + 3] = val >>> 24;
|
|
}
|
|
|
|
function writeUintBE(val, buf, pos) {
|
|
buf[pos ] = val >>> 24;
|
|
buf[pos + 1] = val >>> 16 & 255;
|
|
buf[pos + 2] = val >>> 8 & 255;
|
|
buf[pos + 3] = val & 255;
|
|
}
|
|
|
|
function readUintLE(buf, pos) {
|
|
return (buf[pos ]
|
|
| buf[pos + 1] << 8
|
|
| buf[pos + 2] << 16
|
|
| buf[pos + 3] << 24) >>> 0;
|
|
}
|
|
|
|
function readUintBE(buf, pos) {
|
|
return (buf[pos ] << 24
|
|
| buf[pos + 1] << 16
|
|
| buf[pos + 2] << 8
|
|
| buf[pos + 3]) >>> 0;
|
|
}
|
|
return float;
|
|
}
|
|
|
|
var inquire_1;
|
|
var hasRequiredInquire;
|
|
|
|
function requireInquire () {
|
|
if (hasRequiredInquire) return inquire_1;
|
|
hasRequiredInquire = 1;
|
|
inquire_1 = inquire;
|
|
|
|
/**
|
|
* Requires a module only if available.
|
|
* @memberof util
|
|
* @param {string} moduleName Module to require
|
|
* @returns {?Object} Required module if available and not empty, otherwise `null`
|
|
*/
|
|
function inquire(moduleName) {
|
|
try {
|
|
var mod = eval("quire".replace(/^/,"re"))(moduleName); // eslint-disable-line no-eval
|
|
if (mod && (mod.length || Object.keys(mod).length))
|
|
return mod;
|
|
} catch (e) {} // eslint-disable-line no-empty
|
|
return null;
|
|
}
|
|
return inquire_1;
|
|
}
|
|
|
|
var utf8$2 = {};
|
|
|
|
var hasRequiredUtf8;
|
|
|
|
function requireUtf8 () {
|
|
if (hasRequiredUtf8) return utf8$2;
|
|
hasRequiredUtf8 = 1;
|
|
(function (exports) {
|
|
|
|
/**
|
|
* A minimal UTF8 implementation for number arrays.
|
|
* @memberof util
|
|
* @namespace
|
|
*/
|
|
var utf8 = exports;
|
|
|
|
/**
|
|
* Calculates the UTF8 byte length of a string.
|
|
* @param {string} string String
|
|
* @returns {number} Byte length
|
|
*/
|
|
utf8.length = function utf8_length(string) {
|
|
var len = 0,
|
|
c = 0;
|
|
for (var i = 0; i < string.length; ++i) {
|
|
c = string.charCodeAt(i);
|
|
if (c < 128)
|
|
len += 1;
|
|
else if (c < 2048)
|
|
len += 2;
|
|
else if ((c & 0xFC00) === 0xD800 && (string.charCodeAt(i + 1) & 0xFC00) === 0xDC00) {
|
|
++i;
|
|
len += 4;
|
|
} else
|
|
len += 3;
|
|
}
|
|
return len;
|
|
};
|
|
|
|
/**
|
|
* Reads UTF8 bytes as a string.
|
|
* @param {Uint8Array} buffer Source buffer
|
|
* @param {number} start Source start
|
|
* @param {number} end Source end
|
|
* @returns {string} String read
|
|
*/
|
|
utf8.read = function utf8_read(buffer, start, end) {
|
|
var len = end - start;
|
|
if (len < 1)
|
|
return "";
|
|
var parts = null,
|
|
chunk = [],
|
|
i = 0, // char offset
|
|
t; // temporary
|
|
while (start < end) {
|
|
t = buffer[start++];
|
|
if (t < 128)
|
|
chunk[i++] = t;
|
|
else if (t > 191 && t < 224)
|
|
chunk[i++] = (t & 31) << 6 | buffer[start++] & 63;
|
|
else if (t > 239 && t < 365) {
|
|
t = ((t & 7) << 18 | (buffer[start++] & 63) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63) - 0x10000;
|
|
chunk[i++] = 0xD800 + (t >> 10);
|
|
chunk[i++] = 0xDC00 + (t & 1023);
|
|
} else
|
|
chunk[i++] = (t & 15) << 12 | (buffer[start++] & 63) << 6 | buffer[start++] & 63;
|
|
if (i > 8191) {
|
|
(parts || (parts = [])).push(String.fromCharCode.apply(String, chunk));
|
|
i = 0;
|
|
}
|
|
}
|
|
if (parts) {
|
|
if (i)
|
|
parts.push(String.fromCharCode.apply(String, chunk.slice(0, i)));
|
|
return parts.join("");
|
|
}
|
|
return String.fromCharCode.apply(String, chunk.slice(0, i));
|
|
};
|
|
|
|
/**
|
|
* Writes a string as UTF8 bytes.
|
|
* @param {string} string Source string
|
|
* @param {Uint8Array} buffer Destination buffer
|
|
* @param {number} offset Destination offset
|
|
* @returns {number} Bytes written
|
|
*/
|
|
utf8.write = function utf8_write(string, buffer, offset) {
|
|
var start = offset,
|
|
c1, // character 1
|
|
c2; // character 2
|
|
for (var i = 0; i < string.length; ++i) {
|
|
c1 = string.charCodeAt(i);
|
|
if (c1 < 128) {
|
|
buffer[offset++] = c1;
|
|
} else if (c1 < 2048) {
|
|
buffer[offset++] = c1 >> 6 | 192;
|
|
buffer[offset++] = c1 & 63 | 128;
|
|
} else if ((c1 & 0xFC00) === 0xD800 && ((c2 = string.charCodeAt(i + 1)) & 0xFC00) === 0xDC00) {
|
|
c1 = 0x10000 + ((c1 & 0x03FF) << 10) + (c2 & 0x03FF);
|
|
++i;
|
|
buffer[offset++] = c1 >> 18 | 240;
|
|
buffer[offset++] = c1 >> 12 & 63 | 128;
|
|
buffer[offset++] = c1 >> 6 & 63 | 128;
|
|
buffer[offset++] = c1 & 63 | 128;
|
|
} else {
|
|
buffer[offset++] = c1 >> 12 | 224;
|
|
buffer[offset++] = c1 >> 6 & 63 | 128;
|
|
buffer[offset++] = c1 & 63 | 128;
|
|
}
|
|
}
|
|
return offset - start;
|
|
};
|
|
} (utf8$2));
|
|
return utf8$2;
|
|
}
|
|
|
|
var pool_1;
|
|
var hasRequiredPool;
|
|
|
|
function requirePool () {
|
|
if (hasRequiredPool) return pool_1;
|
|
hasRequiredPool = 1;
|
|
pool_1 = pool;
|
|
|
|
/**
|
|
* An allocator as used by {@link util.pool}.
|
|
* @typedef PoolAllocator
|
|
* @type {function}
|
|
* @param {number} size Buffer size
|
|
* @returns {Uint8Array} Buffer
|
|
*/
|
|
|
|
/**
|
|
* A slicer as used by {@link util.pool}.
|
|
* @typedef PoolSlicer
|
|
* @type {function}
|
|
* @param {number} start Start offset
|
|
* @param {number} end End offset
|
|
* @returns {Uint8Array} Buffer slice
|
|
* @this {Uint8Array}
|
|
*/
|
|
|
|
/**
|
|
* A general purpose buffer pool.
|
|
* @memberof util
|
|
* @function
|
|
* @param {PoolAllocator} alloc Allocator
|
|
* @param {PoolSlicer} slice Slicer
|
|
* @param {number} [size=8192] Slab size
|
|
* @returns {PoolAllocator} Pooled allocator
|
|
*/
|
|
function pool(alloc, slice, size) {
|
|
var SIZE = size || 8192;
|
|
var MAX = SIZE >>> 1;
|
|
var slab = null;
|
|
var offset = SIZE;
|
|
return function pool_alloc(size) {
|
|
if (size < 1 || size > MAX)
|
|
return alloc(size);
|
|
if (offset + size > SIZE) {
|
|
slab = alloc(SIZE);
|
|
offset = 0;
|
|
}
|
|
var buf = slice.call(slab, offset, offset += size);
|
|
if (offset & 7) // align to 32 bit
|
|
offset = (offset | 7) + 1;
|
|
return buf;
|
|
};
|
|
}
|
|
return pool_1;
|
|
}
|
|
|
|
var longbits$1;
|
|
var hasRequiredLongbits$1;
|
|
|
|
function requireLongbits$1 () {
|
|
if (hasRequiredLongbits$1) return longbits$1;
|
|
hasRequiredLongbits$1 = 1;
|
|
longbits$1 = LongBits;
|
|
|
|
var util = requireMinimal$2();
|
|
|
|
/**
|
|
* Constructs new long bits.
|
|
* @classdesc Helper class for working with the low and high bits of a 64 bit value.
|
|
* @memberof util
|
|
* @constructor
|
|
* @param {number} lo Low 32 bits, unsigned
|
|
* @param {number} hi High 32 bits, unsigned
|
|
*/
|
|
function LongBits(lo, hi) {
|
|
|
|
// note that the casts below are theoretically unnecessary as of today, but older statically
|
|
// generated converter code might still call the ctor with signed 32bits. kept for compat.
|
|
|
|
/**
|
|
* Low bits.
|
|
* @type {number}
|
|
*/
|
|
this.lo = lo >>> 0;
|
|
|
|
/**
|
|
* High bits.
|
|
* @type {number}
|
|
*/
|
|
this.hi = hi >>> 0;
|
|
}
|
|
|
|
/**
|
|
* Zero bits.
|
|
* @memberof util.LongBits
|
|
* @type {util.LongBits}
|
|
*/
|
|
var zero = LongBits.zero = new LongBits(0, 0);
|
|
|
|
zero.toNumber = function() { return 0; };
|
|
zero.zzEncode = zero.zzDecode = function() { return this; };
|
|
zero.length = function() { return 1; };
|
|
|
|
/**
|
|
* Zero hash.
|
|
* @memberof util.LongBits
|
|
* @type {string}
|
|
*/
|
|
var zeroHash = LongBits.zeroHash = "\0\0\0\0\0\0\0\0";
|
|
|
|
/**
|
|
* Constructs new long bits from the specified number.
|
|
* @param {number} value Value
|
|
* @returns {util.LongBits} Instance
|
|
*/
|
|
LongBits.fromNumber = function fromNumber(value) {
|
|
if (value === 0)
|
|
return zero;
|
|
var sign = value < 0;
|
|
if (sign)
|
|
value = -value;
|
|
var lo = value >>> 0,
|
|
hi = (value - lo) / 4294967296 >>> 0;
|
|
if (sign) {
|
|
hi = ~hi >>> 0;
|
|
lo = ~lo >>> 0;
|
|
if (++lo > 4294967295) {
|
|
lo = 0;
|
|
if (++hi > 4294967295)
|
|
hi = 0;
|
|
}
|
|
}
|
|
return new LongBits(lo, hi);
|
|
};
|
|
|
|
/**
|
|
* Constructs new long bits from a number, long or string.
|
|
* @param {Long|number|string} value Value
|
|
* @returns {util.LongBits} Instance
|
|
*/
|
|
LongBits.from = function from(value) {
|
|
if (typeof value === "number")
|
|
return LongBits.fromNumber(value);
|
|
if (util.isString(value)) {
|
|
/* istanbul ignore else */
|
|
if (util.Long)
|
|
value = util.Long.fromString(value);
|
|
else
|
|
return LongBits.fromNumber(parseInt(value, 10));
|
|
}
|
|
return value.low || value.high ? new LongBits(value.low >>> 0, value.high >>> 0) : zero;
|
|
};
|
|
|
|
/**
|
|
* Converts this long bits to a possibly unsafe JavaScript number.
|
|
* @param {boolean} [unsigned=false] Whether unsigned or not
|
|
* @returns {number} Possibly unsafe number
|
|
*/
|
|
LongBits.prototype.toNumber = function toNumber(unsigned) {
|
|
if (!unsigned && this.hi >>> 31) {
|
|
var lo = ~this.lo + 1 >>> 0,
|
|
hi = ~this.hi >>> 0;
|
|
if (!lo)
|
|
hi = hi + 1 >>> 0;
|
|
return -(lo + hi * 4294967296);
|
|
}
|
|
return this.lo + this.hi * 4294967296;
|
|
};
|
|
|
|
/**
|
|
* Converts this long bits to a long.
|
|
* @param {boolean} [unsigned=false] Whether unsigned or not
|
|
* @returns {Long} Long
|
|
*/
|
|
LongBits.prototype.toLong = function toLong(unsigned) {
|
|
return util.Long
|
|
? new util.Long(this.lo | 0, this.hi | 0, Boolean(unsigned))
|
|
/* istanbul ignore next */
|
|
: { low: this.lo | 0, high: this.hi | 0, unsigned: Boolean(unsigned) };
|
|
};
|
|
|
|
var charCodeAt = String.prototype.charCodeAt;
|
|
|
|
/**
|
|
* Constructs new long bits from the specified 8 characters long hash.
|
|
* @param {string} hash Hash
|
|
* @returns {util.LongBits} Bits
|
|
*/
|
|
LongBits.fromHash = function fromHash(hash) {
|
|
if (hash === zeroHash)
|
|
return zero;
|
|
return new LongBits(
|
|
( charCodeAt.call(hash, 0)
|
|
| charCodeAt.call(hash, 1) << 8
|
|
| charCodeAt.call(hash, 2) << 16
|
|
| charCodeAt.call(hash, 3) << 24) >>> 0
|
|
,
|
|
( charCodeAt.call(hash, 4)
|
|
| charCodeAt.call(hash, 5) << 8
|
|
| charCodeAt.call(hash, 6) << 16
|
|
| charCodeAt.call(hash, 7) << 24) >>> 0
|
|
);
|
|
};
|
|
|
|
/**
|
|
* Converts this long bits to a 8 characters long hash.
|
|
* @returns {string} Hash
|
|
*/
|
|
LongBits.prototype.toHash = function toHash() {
|
|
return String.fromCharCode(
|
|
this.lo & 255,
|
|
this.lo >>> 8 & 255,
|
|
this.lo >>> 16 & 255,
|
|
this.lo >>> 24 ,
|
|
this.hi & 255,
|
|
this.hi >>> 8 & 255,
|
|
this.hi >>> 16 & 255,
|
|
this.hi >>> 24
|
|
);
|
|
};
|
|
|
|
/**
|
|
* Zig-zag encodes this long bits.
|
|
* @returns {util.LongBits} `this`
|
|
*/
|
|
LongBits.prototype.zzEncode = function zzEncode() {
|
|
var mask = this.hi >> 31;
|
|
this.hi = ((this.hi << 1 | this.lo >>> 31) ^ mask) >>> 0;
|
|
this.lo = ( this.lo << 1 ^ mask) >>> 0;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Zig-zag decodes this long bits.
|
|
* @returns {util.LongBits} `this`
|
|
*/
|
|
LongBits.prototype.zzDecode = function zzDecode() {
|
|
var mask = -(this.lo & 1);
|
|
this.lo = ((this.lo >>> 1 | this.hi << 31) ^ mask) >>> 0;
|
|
this.hi = ( this.hi >>> 1 ^ mask) >>> 0;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Calculates the length of this longbits when encoded as a varint.
|
|
* @returns {number} Length
|
|
*/
|
|
LongBits.prototype.length = function length() {
|
|
var part0 = this.lo,
|
|
part1 = (this.lo >>> 28 | this.hi << 4) >>> 0,
|
|
part2 = this.hi >>> 24;
|
|
return part2 === 0
|
|
? part1 === 0
|
|
? part0 < 16384
|
|
? part0 < 128 ? 1 : 2
|
|
: part0 < 2097152 ? 3 : 4
|
|
: part1 < 16384
|
|
? part1 < 128 ? 5 : 6
|
|
: part1 < 2097152 ? 7 : 8
|
|
: part2 < 128 ? 9 : 10;
|
|
};
|
|
return longbits$1;
|
|
}
|
|
|
|
var hasRequiredMinimal$2;
|
|
|
|
function requireMinimal$2 () {
|
|
if (hasRequiredMinimal$2) return minimal$2;
|
|
hasRequiredMinimal$2 = 1;
|
|
(function (exports) {
|
|
var util = exports;
|
|
|
|
// used to return a Promise where callback is omitted
|
|
util.asPromise = requireAspromise();
|
|
|
|
// converts to / from base64 encoded strings
|
|
util.base64 = requireBase64();
|
|
|
|
// base class of rpc.Service
|
|
util.EventEmitter = requireEventemitter();
|
|
|
|
// float handling accross browsers
|
|
util.float = requireFloat();
|
|
|
|
// requires modules optionally and hides the call from bundlers
|
|
util.inquire = requireInquire();
|
|
|
|
// converts to / from utf8 encoded strings
|
|
util.utf8 = requireUtf8();
|
|
|
|
// provides a node-like buffer pool in the browser
|
|
util.pool = requirePool();
|
|
|
|
// utility to work with the low and high bits of a 64 bit value
|
|
util.LongBits = requireLongbits$1();
|
|
|
|
/**
|
|
* Whether running within node or not.
|
|
* @memberof util
|
|
* @type {boolean}
|
|
*/
|
|
util.isNode = Boolean(typeof commonjsGlobal !== "undefined"
|
|
&& commonjsGlobal
|
|
&& commonjsGlobal.process
|
|
&& commonjsGlobal.process.versions
|
|
&& commonjsGlobal.process.versions.node);
|
|
|
|
/**
|
|
* Global object reference.
|
|
* @memberof util
|
|
* @type {Object}
|
|
*/
|
|
util.global = util.isNode && commonjsGlobal
|
|
|| typeof window !== "undefined" && window
|
|
|| typeof self !== "undefined" && self
|
|
|| commonjsGlobal; // eslint-disable-line no-invalid-this
|
|
|
|
/**
|
|
* An immuable empty array.
|
|
* @memberof util
|
|
* @type {Array.<*>}
|
|
* @const
|
|
*/
|
|
util.emptyArray = Object.freeze ? Object.freeze([]) : /* istanbul ignore next */ []; // used on prototypes
|
|
|
|
/**
|
|
* An immutable empty object.
|
|
* @type {Object}
|
|
* @const
|
|
*/
|
|
util.emptyObject = Object.freeze ? Object.freeze({}) : /* istanbul ignore next */ {}; // used on prototypes
|
|
|
|
/**
|
|
* Tests if the specified value is an integer.
|
|
* @function
|
|
* @param {*} value Value to test
|
|
* @returns {boolean} `true` if the value is an integer
|
|
*/
|
|
util.isInteger = Number.isInteger || /* istanbul ignore next */ function isInteger(value) {
|
|
return typeof value === "number" && isFinite(value) && Math.floor(value) === value;
|
|
};
|
|
|
|
/**
|
|
* Tests if the specified value is a string.
|
|
* @param {*} value Value to test
|
|
* @returns {boolean} `true` if the value is a string
|
|
*/
|
|
util.isString = function isString(value) {
|
|
return typeof value === "string" || value instanceof String;
|
|
};
|
|
|
|
/**
|
|
* Tests if the specified value is a non-null object.
|
|
* @param {*} value Value to test
|
|
* @returns {boolean} `true` if the value is a non-null object
|
|
*/
|
|
util.isObject = function isObject(value) {
|
|
return value && typeof value === "object";
|
|
};
|
|
|
|
/**
|
|
* Checks if a property on a message is considered to be present.
|
|
* This is an alias of {@link util.isSet}.
|
|
* @function
|
|
* @param {Object} obj Plain object or message instance
|
|
* @param {string} prop Property name
|
|
* @returns {boolean} `true` if considered to be present, otherwise `false`
|
|
*/
|
|
util.isset =
|
|
|
|
/**
|
|
* Checks if a property on a message is considered to be present.
|
|
* @param {Object} obj Plain object or message instance
|
|
* @param {string} prop Property name
|
|
* @returns {boolean} `true` if considered to be present, otherwise `false`
|
|
*/
|
|
util.isSet = function isSet(obj, prop) {
|
|
var value = obj[prop];
|
|
if (value != null && obj.hasOwnProperty(prop)) // eslint-disable-line eqeqeq, no-prototype-builtins
|
|
return typeof value !== "object" || (Array.isArray(value) ? value.length : Object.keys(value).length) > 0;
|
|
return false;
|
|
};
|
|
|
|
/**
|
|
* Any compatible Buffer instance.
|
|
* This is a minimal stand-alone definition of a Buffer instance. The actual type is that exported by node's typings.
|
|
* @interface Buffer
|
|
* @extends Uint8Array
|
|
*/
|
|
|
|
/**
|
|
* Node's Buffer class if available.
|
|
* @type {Constructor<Buffer>}
|
|
*/
|
|
util.Buffer = (function() {
|
|
try {
|
|
var Buffer = util.inquire("buffer").Buffer;
|
|
// refuse to use non-node buffers if not explicitly assigned (perf reasons):
|
|
return Buffer.prototype.utf8Write ? Buffer : /* istanbul ignore next */ null;
|
|
} catch (e) {
|
|
/* istanbul ignore next */
|
|
return null;
|
|
}
|
|
})();
|
|
|
|
// Internal alias of or polyfull for Buffer.from.
|
|
util._Buffer_from = null;
|
|
|
|
// Internal alias of or polyfill for Buffer.allocUnsafe.
|
|
util._Buffer_allocUnsafe = null;
|
|
|
|
/**
|
|
* Creates a new buffer of whatever type supported by the environment.
|
|
* @param {number|number[]} [sizeOrArray=0] Buffer size or number array
|
|
* @returns {Uint8Array|Buffer} Buffer
|
|
*/
|
|
util.newBuffer = function newBuffer(sizeOrArray) {
|
|
/* istanbul ignore next */
|
|
return typeof sizeOrArray === "number"
|
|
? util.Buffer
|
|
? util._Buffer_allocUnsafe(sizeOrArray)
|
|
: new util.Array(sizeOrArray)
|
|
: util.Buffer
|
|
? util._Buffer_from(sizeOrArray)
|
|
: typeof Uint8Array === "undefined"
|
|
? sizeOrArray
|
|
: new Uint8Array(sizeOrArray);
|
|
};
|
|
|
|
/**
|
|
* Array implementation used in the browser. `Uint8Array` if supported, otherwise `Array`.
|
|
* @type {Constructor<Uint8Array>}
|
|
*/
|
|
util.Array = typeof Uint8Array !== "undefined" ? Uint8Array /* istanbul ignore next */ : Array;
|
|
|
|
/**
|
|
* Any compatible Long instance.
|
|
* This is a minimal stand-alone definition of a Long instance. The actual type is that exported by long.js.
|
|
* @interface Long
|
|
* @property {number} low Low bits
|
|
* @property {number} high High bits
|
|
* @property {boolean} unsigned Whether unsigned or not
|
|
*/
|
|
|
|
/**
|
|
* Long.js's Long class if available.
|
|
* @type {Constructor<Long>}
|
|
*/
|
|
util.Long = /* istanbul ignore next */ util.global.dcodeIO && /* istanbul ignore next */ util.global.dcodeIO.Long
|
|
|| /* istanbul ignore next */ util.global.Long
|
|
|| util.inquire("long");
|
|
|
|
/**
|
|
* Regular expression used to verify 2 bit (`bool`) map keys.
|
|
* @type {RegExp}
|
|
* @const
|
|
*/
|
|
util.key2Re = /^true|false|0|1$/;
|
|
|
|
/**
|
|
* Regular expression used to verify 32 bit (`int32` etc.) map keys.
|
|
* @type {RegExp}
|
|
* @const
|
|
*/
|
|
util.key32Re = /^-?(?:0|[1-9][0-9]*)$/;
|
|
|
|
/**
|
|
* Regular expression used to verify 64 bit (`int64` etc.) map keys.
|
|
* @type {RegExp}
|
|
* @const
|
|
*/
|
|
util.key64Re = /^(?:[\\x00-\\xff]{8}|-?(?:0|[1-9][0-9]*))$/;
|
|
|
|
/**
|
|
* Converts a number or long to an 8 characters long hash string.
|
|
* @param {Long|number} value Value to convert
|
|
* @returns {string} Hash
|
|
*/
|
|
util.longToHash = function longToHash(value) {
|
|
return value
|
|
? util.LongBits.from(value).toHash()
|
|
: util.LongBits.zeroHash;
|
|
};
|
|
|
|
/**
|
|
* Converts an 8 characters long hash string to a long or number.
|
|
* @param {string} hash Hash
|
|
* @param {boolean} [unsigned=false] Whether unsigned or not
|
|
* @returns {Long|number} Original value
|
|
*/
|
|
util.longFromHash = function longFromHash(hash, unsigned) {
|
|
var bits = util.LongBits.fromHash(hash);
|
|
if (util.Long)
|
|
return util.Long.fromBits(bits.lo, bits.hi, unsigned);
|
|
return bits.toNumber(Boolean(unsigned));
|
|
};
|
|
|
|
/**
|
|
* Merges the properties of the source object into the destination object.
|
|
* @memberof util
|
|
* @param {Object.<string,*>} dst Destination object
|
|
* @param {Object.<string,*>} src Source object
|
|
* @param {boolean} [ifNotSet=false] Merges only if the key is not already set
|
|
* @returns {Object.<string,*>} Destination object
|
|
*/
|
|
function merge(dst, src, ifNotSet) { // used by converters
|
|
for (var keys = Object.keys(src), i = 0; i < keys.length; ++i)
|
|
if (dst[keys[i]] === undefined || !ifNotSet)
|
|
dst[keys[i]] = src[keys[i]];
|
|
return dst;
|
|
}
|
|
|
|
util.merge = merge;
|
|
|
|
/**
|
|
* Converts the first character of a string to lower case.
|
|
* @param {string} str String to convert
|
|
* @returns {string} Converted string
|
|
*/
|
|
util.lcFirst = function lcFirst(str) {
|
|
return str.charAt(0).toLowerCase() + str.substring(1);
|
|
};
|
|
|
|
/**
|
|
* Creates a custom error constructor.
|
|
* @memberof util
|
|
* @param {string} name Error name
|
|
* @returns {Constructor<Error>} Custom error constructor
|
|
*/
|
|
function newError(name) {
|
|
|
|
function CustomError(message, properties) {
|
|
|
|
if (!(this instanceof CustomError))
|
|
return new CustomError(message, properties);
|
|
|
|
// Error.call(this, message);
|
|
// ^ just returns a new error instance because the ctor can be called as a function
|
|
|
|
Object.defineProperty(this, "message", { get: function() { return message; } });
|
|
|
|
/* istanbul ignore next */
|
|
if (Error.captureStackTrace) // node
|
|
Error.captureStackTrace(this, CustomError);
|
|
else
|
|
Object.defineProperty(this, "stack", { value: new Error().stack || "" });
|
|
|
|
if (properties)
|
|
merge(this, properties);
|
|
}
|
|
|
|
CustomError.prototype = Object.create(Error.prototype, {
|
|
constructor: {
|
|
value: CustomError,
|
|
writable: true,
|
|
enumerable: false,
|
|
configurable: true,
|
|
},
|
|
name: {
|
|
get() { return name; },
|
|
set: undefined,
|
|
enumerable: false,
|
|
// configurable: false would accurately preserve the behavior of
|
|
// the original, but I'm guessing that was not intentional.
|
|
// For an actual error subclass, this property would
|
|
// be configurable.
|
|
configurable: true,
|
|
},
|
|
toString: {
|
|
value() { return this.name + ": " + this.message; },
|
|
writable: true,
|
|
enumerable: false,
|
|
configurable: true,
|
|
},
|
|
});
|
|
|
|
return CustomError;
|
|
}
|
|
|
|
util.newError = newError;
|
|
|
|
/**
|
|
* Constructs a new protocol error.
|
|
* @classdesc Error subclass indicating a protocol specifc error.
|
|
* @memberof util
|
|
* @extends Error
|
|
* @template T extends Message<T>
|
|
* @constructor
|
|
* @param {string} message Error message
|
|
* @param {Object.<string,*>} [properties] Additional properties
|
|
* @example
|
|
* try {
|
|
* MyMessage.decode(someBuffer); // throws if required fields are missing
|
|
* } catch (e) {
|
|
* if (e instanceof ProtocolError && e.instance)
|
|
* console.log("decoded so far: " + JSON.stringify(e.instance));
|
|
* }
|
|
*/
|
|
util.ProtocolError = newError("ProtocolError");
|
|
|
|
/**
|
|
* So far decoded message instance.
|
|
* @name util.ProtocolError#instance
|
|
* @type {Message<T>}
|
|
*/
|
|
|
|
/**
|
|
* A OneOf getter as returned by {@link util.oneOfGetter}.
|
|
* @typedef OneOfGetter
|
|
* @type {function}
|
|
* @returns {string|undefined} Set field name, if any
|
|
*/
|
|
|
|
/**
|
|
* Builds a getter for a oneof's present field name.
|
|
* @param {string[]} fieldNames Field names
|
|
* @returns {OneOfGetter} Unbound getter
|
|
*/
|
|
util.oneOfGetter = function getOneOf(fieldNames) {
|
|
var fieldMap = {};
|
|
for (var i = 0; i < fieldNames.length; ++i)
|
|
fieldMap[fieldNames[i]] = 1;
|
|
|
|
/**
|
|
* @returns {string|undefined} Set field name, if any
|
|
* @this Object
|
|
* @ignore
|
|
*/
|
|
return function() { // eslint-disable-line consistent-return
|
|
for (var keys = Object.keys(this), i = keys.length - 1; i > -1; --i)
|
|
if (fieldMap[keys[i]] === 1 && this[keys[i]] !== undefined && this[keys[i]] !== null)
|
|
return keys[i];
|
|
};
|
|
};
|
|
|
|
/**
|
|
* A OneOf setter as returned by {@link util.oneOfSetter}.
|
|
* @typedef OneOfSetter
|
|
* @type {function}
|
|
* @param {string|undefined} value Field name
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* Builds a setter for a oneof's present field name.
|
|
* @param {string[]} fieldNames Field names
|
|
* @returns {OneOfSetter} Unbound setter
|
|
*/
|
|
util.oneOfSetter = function setOneOf(fieldNames) {
|
|
|
|
/**
|
|
* @param {string} name Field name
|
|
* @returns {undefined}
|
|
* @this Object
|
|
* @ignore
|
|
*/
|
|
return function(name) {
|
|
for (var i = 0; i < fieldNames.length; ++i)
|
|
if (fieldNames[i] !== name)
|
|
delete this[fieldNames[i]];
|
|
};
|
|
};
|
|
|
|
/**
|
|
* Default conversion options used for {@link Message#toJSON} implementations.
|
|
*
|
|
* These options are close to proto3's JSON mapping with the exception that internal types like Any are handled just like messages. More precisely:
|
|
*
|
|
* - Longs become strings
|
|
* - Enums become string keys
|
|
* - Bytes become base64 encoded strings
|
|
* - (Sub-)Messages become plain objects
|
|
* - Maps become plain objects with all string keys
|
|
* - Repeated fields become arrays
|
|
* - NaN and Infinity for float and double fields become strings
|
|
*
|
|
* @type {IConversionOptions}
|
|
* @see https://developers.google.com/protocol-buffers/docs/proto3?hl=en#json
|
|
*/
|
|
util.toJSONOptions = {
|
|
longs: String,
|
|
enums: String,
|
|
bytes: String,
|
|
json: true
|
|
};
|
|
|
|
// Sets up buffer utility according to the environment (called in index-minimal)
|
|
util._configure = function() {
|
|
var Buffer = util.Buffer;
|
|
/* istanbul ignore if */
|
|
if (!Buffer) {
|
|
util._Buffer_from = util._Buffer_allocUnsafe = null;
|
|
return;
|
|
}
|
|
// because node 4.x buffers are incompatible & immutable
|
|
// see: https://github.com/dcodeIO/protobuf.js/pull/665
|
|
util._Buffer_from = Buffer.from !== Uint8Array.from && Buffer.from ||
|
|
/* istanbul ignore next */
|
|
function Buffer_from(value, encoding) {
|
|
return new Buffer(value, encoding);
|
|
};
|
|
util._Buffer_allocUnsafe = Buffer.allocUnsafe ||
|
|
/* istanbul ignore next */
|
|
function Buffer_allocUnsafe(size) {
|
|
return new Buffer(size);
|
|
};
|
|
};
|
|
} (minimal$2));
|
|
return minimal$2;
|
|
}
|
|
|
|
var writer$1 = Writer$2;
|
|
|
|
var util$9 = requireMinimal$2();
|
|
|
|
var BufferWriter$1; // cyclic
|
|
|
|
var LongBits$2 = util$9.LongBits,
|
|
base64 = util$9.base64,
|
|
utf8$1 = util$9.utf8;
|
|
|
|
/**
|
|
* Constructs a new writer operation instance.
|
|
* @classdesc Scheduled writer operation.
|
|
* @constructor
|
|
* @param {function(*, Uint8Array, number)} fn Function to call
|
|
* @param {number} len Value byte length
|
|
* @param {*} val Value to write
|
|
* @ignore
|
|
*/
|
|
function Op(fn, len, val) {
|
|
|
|
/**
|
|
* Function to call.
|
|
* @type {function(Uint8Array, number, *)}
|
|
*/
|
|
this.fn = fn;
|
|
|
|
/**
|
|
* Value byte length.
|
|
* @type {number}
|
|
*/
|
|
this.len = len;
|
|
|
|
/**
|
|
* Next operation.
|
|
* @type {Writer.Op|undefined}
|
|
*/
|
|
this.next = undefined;
|
|
|
|
/**
|
|
* Value to write.
|
|
* @type {*}
|
|
*/
|
|
this.val = val; // type varies
|
|
}
|
|
|
|
/* istanbul ignore next */
|
|
function noop$1() {} // eslint-disable-line no-empty-function
|
|
|
|
/**
|
|
* Constructs a new writer state instance.
|
|
* @classdesc Copied writer state.
|
|
* @memberof Writer
|
|
* @constructor
|
|
* @param {Writer} writer Writer to copy state from
|
|
* @ignore
|
|
*/
|
|
function State(writer) {
|
|
|
|
/**
|
|
* Current head.
|
|
* @type {Writer.Op}
|
|
*/
|
|
this.head = writer.head;
|
|
|
|
/**
|
|
* Current tail.
|
|
* @type {Writer.Op}
|
|
*/
|
|
this.tail = writer.tail;
|
|
|
|
/**
|
|
* Current buffer length.
|
|
* @type {number}
|
|
*/
|
|
this.len = writer.len;
|
|
|
|
/**
|
|
* Next state.
|
|
* @type {State|null}
|
|
*/
|
|
this.next = writer.states;
|
|
}
|
|
|
|
/**
|
|
* Constructs a new writer instance.
|
|
* @classdesc Wire format writer using `Uint8Array` if available, otherwise `Array`.
|
|
* @constructor
|
|
*/
|
|
function Writer$2() {
|
|
|
|
/**
|
|
* Current length.
|
|
* @type {number}
|
|
*/
|
|
this.len = 0;
|
|
|
|
/**
|
|
* Operations head.
|
|
* @type {Object}
|
|
*/
|
|
this.head = new Op(noop$1, 0, 0);
|
|
|
|
/**
|
|
* Operations tail
|
|
* @type {Object}
|
|
*/
|
|
this.tail = this.head;
|
|
|
|
/**
|
|
* Linked forked states.
|
|
* @type {Object|null}
|
|
*/
|
|
this.states = null;
|
|
|
|
// When a value is written, the writer calculates its byte length and puts it into a linked
|
|
// list of operations to perform when finish() is called. This both allows us to allocate
|
|
// buffers of the exact required size and reduces the amount of work we have to do compared
|
|
// to first calculating over objects and then encoding over objects. In our case, the encoding
|
|
// part is just a linked list walk calling operations with already prepared values.
|
|
}
|
|
|
|
var create$4 = function create() {
|
|
return util$9.Buffer
|
|
? function create_buffer_setup() {
|
|
return (Writer$2.create = function create_buffer() {
|
|
return new BufferWriter$1();
|
|
})();
|
|
}
|
|
/* istanbul ignore next */
|
|
: function create_array() {
|
|
return new Writer$2();
|
|
};
|
|
};
|
|
|
|
/**
|
|
* Creates a new writer.
|
|
* @function
|
|
* @returns {BufferWriter|Writer} A {@link BufferWriter} when Buffers are supported, otherwise a {@link Writer}
|
|
*/
|
|
Writer$2.create = create$4();
|
|
|
|
/**
|
|
* Allocates a buffer of the specified size.
|
|
* @param {number} size Buffer size
|
|
* @returns {Uint8Array} Buffer
|
|
*/
|
|
Writer$2.alloc = function alloc(size) {
|
|
return new util$9.Array(size);
|
|
};
|
|
|
|
// Use Uint8Array buffer pool in the browser, just like node does with buffers
|
|
/* istanbul ignore else */
|
|
if (util$9.Array !== Array)
|
|
Writer$2.alloc = util$9.pool(Writer$2.alloc, util$9.Array.prototype.subarray);
|
|
|
|
/**
|
|
* Pushes a new operation to the queue.
|
|
* @param {function(Uint8Array, number, *)} fn Function to call
|
|
* @param {number} len Value byte length
|
|
* @param {number} val Value to write
|
|
* @returns {Writer} `this`
|
|
* @private
|
|
*/
|
|
Writer$2.prototype._push = function push(fn, len, val) {
|
|
this.tail = this.tail.next = new Op(fn, len, val);
|
|
this.len += len;
|
|
return this;
|
|
};
|
|
|
|
function writeByte(val, buf, pos) {
|
|
buf[pos] = val & 255;
|
|
}
|
|
|
|
function writeVarint32(val, buf, pos) {
|
|
while (val > 127) {
|
|
buf[pos++] = val & 127 | 128;
|
|
val >>>= 7;
|
|
}
|
|
buf[pos] = val;
|
|
}
|
|
|
|
/**
|
|
* Constructs a new varint writer operation instance.
|
|
* @classdesc Scheduled varint writer operation.
|
|
* @extends Op
|
|
* @constructor
|
|
* @param {number} len Value byte length
|
|
* @param {number} val Value to write
|
|
* @ignore
|
|
*/
|
|
function VarintOp(len, val) {
|
|
this.len = len;
|
|
this.next = undefined;
|
|
this.val = val;
|
|
}
|
|
|
|
VarintOp.prototype = Object.create(Op.prototype);
|
|
VarintOp.prototype.fn = writeVarint32;
|
|
|
|
/**
|
|
* Writes an unsigned 32 bit value as a varint.
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.uint32 = function write_uint32(value) {
|
|
// here, the call to this.push has been inlined and a varint specific Op subclass is used.
|
|
// uint32 is by far the most frequently used operation and benefits significantly from this.
|
|
this.len += (this.tail = this.tail.next = new VarintOp(
|
|
(value = value >>> 0)
|
|
< 128 ? 1
|
|
: value < 16384 ? 2
|
|
: value < 2097152 ? 3
|
|
: value < 268435456 ? 4
|
|
: 5,
|
|
value)).len;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Writes a signed 32 bit value as a varint.
|
|
* @function
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.int32 = function write_int32(value) {
|
|
return value < 0
|
|
? this._push(writeVarint64, 10, LongBits$2.fromNumber(value)) // 10 bytes per spec
|
|
: this.uint32(value);
|
|
};
|
|
|
|
/**
|
|
* Writes a 32 bit value as a varint, zig-zag encoded.
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.sint32 = function write_sint32(value) {
|
|
return this.uint32((value << 1 ^ value >> 31) >>> 0);
|
|
};
|
|
|
|
function writeVarint64(val, buf, pos) {
|
|
while (val.hi) {
|
|
buf[pos++] = val.lo & 127 | 128;
|
|
val.lo = (val.lo >>> 7 | val.hi << 25) >>> 0;
|
|
val.hi >>>= 7;
|
|
}
|
|
while (val.lo > 127) {
|
|
buf[pos++] = val.lo & 127 | 128;
|
|
val.lo = val.lo >>> 7;
|
|
}
|
|
buf[pos++] = val.lo;
|
|
}
|
|
|
|
/**
|
|
* Writes an unsigned 64 bit value as a varint.
|
|
* @param {Long|number|string} value Value to write
|
|
* @returns {Writer} `this`
|
|
* @throws {TypeError} If `value` is a string and no long library is present.
|
|
*/
|
|
Writer$2.prototype.uint64 = function write_uint64(value) {
|
|
var bits = LongBits$2.from(value);
|
|
return this._push(writeVarint64, bits.length(), bits);
|
|
};
|
|
|
|
/**
|
|
* Writes a signed 64 bit value as a varint.
|
|
* @function
|
|
* @param {Long|number|string} value Value to write
|
|
* @returns {Writer} `this`
|
|
* @throws {TypeError} If `value` is a string and no long library is present.
|
|
*/
|
|
Writer$2.prototype.int64 = Writer$2.prototype.uint64;
|
|
|
|
/**
|
|
* Writes a signed 64 bit value as a varint, zig-zag encoded.
|
|
* @param {Long|number|string} value Value to write
|
|
* @returns {Writer} `this`
|
|
* @throws {TypeError} If `value` is a string and no long library is present.
|
|
*/
|
|
Writer$2.prototype.sint64 = function write_sint64(value) {
|
|
var bits = LongBits$2.from(value).zzEncode();
|
|
return this._push(writeVarint64, bits.length(), bits);
|
|
};
|
|
|
|
/**
|
|
* Writes a boolish value as a varint.
|
|
* @param {boolean} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.bool = function write_bool(value) {
|
|
return this._push(writeByte, 1, value ? 1 : 0);
|
|
};
|
|
|
|
function writeFixed32(val, buf, pos) {
|
|
buf[pos ] = val & 255;
|
|
buf[pos + 1] = val >>> 8 & 255;
|
|
buf[pos + 2] = val >>> 16 & 255;
|
|
buf[pos + 3] = val >>> 24;
|
|
}
|
|
|
|
/**
|
|
* Writes an unsigned 32 bit value as fixed 32 bits.
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.fixed32 = function write_fixed32(value) {
|
|
return this._push(writeFixed32, 4, value >>> 0);
|
|
};
|
|
|
|
/**
|
|
* Writes a signed 32 bit value as fixed 32 bits.
|
|
* @function
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.sfixed32 = Writer$2.prototype.fixed32;
|
|
|
|
/**
|
|
* Writes an unsigned 64 bit value as fixed 64 bits.
|
|
* @param {Long|number|string} value Value to write
|
|
* @returns {Writer} `this`
|
|
* @throws {TypeError} If `value` is a string and no long library is present.
|
|
*/
|
|
Writer$2.prototype.fixed64 = function write_fixed64(value) {
|
|
var bits = LongBits$2.from(value);
|
|
return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi);
|
|
};
|
|
|
|
/**
|
|
* Writes a signed 64 bit value as fixed 64 bits.
|
|
* @function
|
|
* @param {Long|number|string} value Value to write
|
|
* @returns {Writer} `this`
|
|
* @throws {TypeError} If `value` is a string and no long library is present.
|
|
*/
|
|
Writer$2.prototype.sfixed64 = Writer$2.prototype.fixed64;
|
|
|
|
/**
|
|
* Writes a float (32 bit).
|
|
* @function
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.float = function write_float(value) {
|
|
return this._push(util$9.float.writeFloatLE, 4, value);
|
|
};
|
|
|
|
/**
|
|
* Writes a double (64 bit float).
|
|
* @function
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.double = function write_double(value) {
|
|
return this._push(util$9.float.writeDoubleLE, 8, value);
|
|
};
|
|
|
|
var writeBytes = util$9.Array.prototype.set
|
|
? function writeBytes_set(val, buf, pos) {
|
|
buf.set(val, pos); // also works for plain array values
|
|
}
|
|
/* istanbul ignore next */
|
|
: function writeBytes_for(val, buf, pos) {
|
|
for (var i = 0; i < val.length; ++i)
|
|
buf[pos + i] = val[i];
|
|
};
|
|
|
|
/**
|
|
* Writes a sequence of bytes.
|
|
* @param {Uint8Array|string} value Buffer or base64 encoded string to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.bytes = function write_bytes(value) {
|
|
var len = value.length >>> 0;
|
|
if (!len)
|
|
return this._push(writeByte, 1, 0);
|
|
if (util$9.isString(value)) {
|
|
var buf = Writer$2.alloc(len = base64.length(value));
|
|
base64.decode(value, buf, 0);
|
|
value = buf;
|
|
}
|
|
return this.uint32(len)._push(writeBytes, len, value);
|
|
};
|
|
|
|
/**
|
|
* Writes a string.
|
|
* @param {string} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.string = function write_string(value) {
|
|
var len = utf8$1.length(value);
|
|
return len
|
|
? this.uint32(len)._push(utf8$1.write, len, value)
|
|
: this._push(writeByte, 1, 0);
|
|
};
|
|
|
|
/**
|
|
* Forks this writer's state by pushing it to a stack.
|
|
* Calling {@link Writer#reset|reset} or {@link Writer#ldelim|ldelim} resets the writer to the previous state.
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.fork = function fork() {
|
|
this.states = new State(this);
|
|
this.head = this.tail = new Op(noop$1, 0, 0);
|
|
this.len = 0;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Resets this instance to the last state.
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.reset = function reset() {
|
|
if (this.states) {
|
|
this.head = this.states.head;
|
|
this.tail = this.states.tail;
|
|
this.len = this.states.len;
|
|
this.states = this.states.next;
|
|
} else {
|
|
this.head = this.tail = new Op(noop$1, 0, 0);
|
|
this.len = 0;
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Resets to the last state and appends the fork state's current write length as a varint followed by its operations.
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer$2.prototype.ldelim = function ldelim() {
|
|
var head = this.head,
|
|
tail = this.tail,
|
|
len = this.len;
|
|
this.reset().uint32(len);
|
|
if (len) {
|
|
this.tail.next = head.next; // skip noop
|
|
this.tail = tail;
|
|
this.len += len;
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Finishes the write operation.
|
|
* @returns {Uint8Array} Finished buffer
|
|
*/
|
|
Writer$2.prototype.finish = function finish() {
|
|
var head = this.head.next, // skip noop
|
|
buf = this.constructor.alloc(this.len),
|
|
pos = 0;
|
|
while (head) {
|
|
head.fn(head.val, buf, pos);
|
|
pos += head.len;
|
|
head = head.next;
|
|
}
|
|
// this.head = this.tail = null;
|
|
return buf;
|
|
};
|
|
|
|
Writer$2._configure = function(BufferWriter_) {
|
|
BufferWriter$1 = BufferWriter_;
|
|
Writer$2.create = create$4();
|
|
BufferWriter$1._configure();
|
|
};
|
|
|
|
var writer_buffer$1 = BufferWriter;
|
|
|
|
// extends Writer
|
|
var Writer$1 = writer$1;
|
|
(BufferWriter.prototype = Object.create(Writer$1.prototype)).constructor = BufferWriter;
|
|
|
|
var util$8 = requireMinimal$2();
|
|
|
|
/**
|
|
* Constructs a new buffer writer instance.
|
|
* @classdesc Wire format writer using node buffers.
|
|
* @extends Writer
|
|
* @constructor
|
|
*/
|
|
function BufferWriter() {
|
|
Writer$1.call(this);
|
|
}
|
|
|
|
BufferWriter._configure = function () {
|
|
/**
|
|
* Allocates a buffer of the specified size.
|
|
* @function
|
|
* @param {number} size Buffer size
|
|
* @returns {Buffer} Buffer
|
|
*/
|
|
BufferWriter.alloc = util$8._Buffer_allocUnsafe;
|
|
|
|
BufferWriter.writeBytesBuffer = util$8.Buffer && util$8.Buffer.prototype instanceof Uint8Array && util$8.Buffer.prototype.set.name === "set"
|
|
? function writeBytesBuffer_set(val, buf, pos) {
|
|
buf.set(val, pos); // faster than copy (requires node >= 4 where Buffers extend Uint8Array and set is properly inherited)
|
|
// also works for plain array values
|
|
}
|
|
/* istanbul ignore next */
|
|
: function writeBytesBuffer_copy(val, buf, pos) {
|
|
if (val.copy) // Buffer values
|
|
val.copy(buf, pos, 0, val.length);
|
|
else for (var i = 0; i < val.length;) // plain array values
|
|
buf[pos++] = val[i++];
|
|
};
|
|
};
|
|
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
BufferWriter.prototype.bytes = function write_bytes_buffer(value) {
|
|
if (util$8.isString(value))
|
|
value = util$8._Buffer_from(value, "base64");
|
|
var len = value.length >>> 0;
|
|
this.uint32(len);
|
|
if (len)
|
|
this._push(BufferWriter.writeBytesBuffer, len, value);
|
|
return this;
|
|
};
|
|
|
|
function writeStringBuffer(val, buf, pos) {
|
|
if (val.length < 40) // plain js is faster for short strings (probably due to redundant assertions)
|
|
util$8.utf8.write(val, buf, pos);
|
|
else if (buf.utf8Write)
|
|
buf.utf8Write(val, pos);
|
|
else
|
|
buf.write(val, pos);
|
|
}
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
BufferWriter.prototype.string = function write_string_buffer(value) {
|
|
var len = util$8.Buffer.byteLength(value);
|
|
this.uint32(len);
|
|
if (len)
|
|
this._push(writeStringBuffer, len, value);
|
|
return this;
|
|
};
|
|
|
|
|
|
/**
|
|
* Finishes the write operation.
|
|
* @name BufferWriter#finish
|
|
* @function
|
|
* @returns {Buffer} Finished buffer
|
|
*/
|
|
|
|
BufferWriter._configure();
|
|
|
|
var reader$1 = Reader$2;
|
|
|
|
var util$7 = requireMinimal$2();
|
|
|
|
var BufferReader$1; // cyclic
|
|
|
|
var LongBits$1 = util$7.LongBits,
|
|
utf8 = util$7.utf8;
|
|
|
|
/* istanbul ignore next */
|
|
function indexOutOfRange(reader, writeLength) {
|
|
return RangeError("index out of range: " + reader.pos + " + " + (writeLength || 1) + " > " + reader.len);
|
|
}
|
|
|
|
/**
|
|
* Constructs a new reader instance using the specified buffer.
|
|
* @classdesc Wire format reader using `Uint8Array` if available, otherwise `Array`.
|
|
* @constructor
|
|
* @param {Uint8Array} buffer Buffer to read from
|
|
*/
|
|
function Reader$2(buffer) {
|
|
|
|
/**
|
|
* Read buffer.
|
|
* @type {Uint8Array}
|
|
*/
|
|
this.buf = buffer;
|
|
|
|
/**
|
|
* Read buffer position.
|
|
* @type {number}
|
|
*/
|
|
this.pos = 0;
|
|
|
|
/**
|
|
* Read buffer length.
|
|
* @type {number}
|
|
*/
|
|
this.len = buffer.length;
|
|
}
|
|
|
|
var create_array = typeof Uint8Array !== "undefined"
|
|
? function create_typed_array(buffer) {
|
|
if (buffer instanceof Uint8Array || Array.isArray(buffer))
|
|
return new Reader$2(buffer);
|
|
throw Error("illegal buffer");
|
|
}
|
|
/* istanbul ignore next */
|
|
: function create_array(buffer) {
|
|
if (Array.isArray(buffer))
|
|
return new Reader$2(buffer);
|
|
throw Error("illegal buffer");
|
|
};
|
|
|
|
var create$3 = function create() {
|
|
return util$7.Buffer
|
|
? function create_buffer_setup(buffer) {
|
|
return (Reader$2.create = function create_buffer(buffer) {
|
|
return util$7.Buffer.isBuffer(buffer)
|
|
? new BufferReader$1(buffer)
|
|
/* istanbul ignore next */
|
|
: create_array(buffer);
|
|
})(buffer);
|
|
}
|
|
/* istanbul ignore next */
|
|
: create_array;
|
|
};
|
|
|
|
/**
|
|
* Creates a new reader using the specified buffer.
|
|
* @function
|
|
* @param {Uint8Array|Buffer} buffer Buffer to read from
|
|
* @returns {Reader|BufferReader} A {@link BufferReader} if `buffer` is a Buffer, otherwise a {@link Reader}
|
|
* @throws {Error} If `buffer` is not a valid buffer
|
|
*/
|
|
Reader$2.create = create$3();
|
|
|
|
Reader$2.prototype._slice = util$7.Array.prototype.subarray || /* istanbul ignore next */ util$7.Array.prototype.slice;
|
|
|
|
/**
|
|
* Reads a varint as an unsigned 32 bit value.
|
|
* @function
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader$2.prototype.uint32 = (function read_uint32_setup() {
|
|
var value = 4294967295; // optimizer type-hint, tends to deopt otherwise (?!)
|
|
return function read_uint32() {
|
|
value = ( this.buf[this.pos] & 127 ) >>> 0; if (this.buf[this.pos++] < 128) return value;
|
|
value = (value | (this.buf[this.pos] & 127) << 7) >>> 0; if (this.buf[this.pos++] < 128) return value;
|
|
value = (value | (this.buf[this.pos] & 127) << 14) >>> 0; if (this.buf[this.pos++] < 128) return value;
|
|
value = (value | (this.buf[this.pos] & 127) << 21) >>> 0; if (this.buf[this.pos++] < 128) return value;
|
|
value = (value | (this.buf[this.pos] & 15) << 28) >>> 0; if (this.buf[this.pos++] < 128) return value;
|
|
|
|
/* istanbul ignore if */
|
|
if ((this.pos += 5) > this.len) {
|
|
this.pos = this.len;
|
|
throw indexOutOfRange(this, 10);
|
|
}
|
|
return value;
|
|
};
|
|
})();
|
|
|
|
/**
|
|
* Reads a varint as a signed 32 bit value.
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader$2.prototype.int32 = function read_int32() {
|
|
return this.uint32() | 0;
|
|
};
|
|
|
|
/**
|
|
* Reads a zig-zag encoded varint as a signed 32 bit value.
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader$2.prototype.sint32 = function read_sint32() {
|
|
var value = this.uint32();
|
|
return value >>> 1 ^ -(value & 1) | 0;
|
|
};
|
|
|
|
/* eslint-disable no-invalid-this */
|
|
|
|
function readLongVarint() {
|
|
// tends to deopt with local vars for octet etc.
|
|
var bits = new LongBits$1(0, 0);
|
|
var i = 0;
|
|
if (this.len - this.pos > 4) { // fast route (lo)
|
|
for (; i < 4; ++i) {
|
|
// 1st..4th
|
|
bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;
|
|
if (this.buf[this.pos++] < 128)
|
|
return bits;
|
|
}
|
|
// 5th
|
|
bits.lo = (bits.lo | (this.buf[this.pos] & 127) << 28) >>> 0;
|
|
bits.hi = (bits.hi | (this.buf[this.pos] & 127) >> 4) >>> 0;
|
|
if (this.buf[this.pos++] < 128)
|
|
return bits;
|
|
i = 0;
|
|
} else {
|
|
for (; i < 3; ++i) {
|
|
/* istanbul ignore if */
|
|
if (this.pos >= this.len)
|
|
throw indexOutOfRange(this);
|
|
// 1st..3th
|
|
bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;
|
|
if (this.buf[this.pos++] < 128)
|
|
return bits;
|
|
}
|
|
// 4th
|
|
bits.lo = (bits.lo | (this.buf[this.pos++] & 127) << i * 7) >>> 0;
|
|
return bits;
|
|
}
|
|
if (this.len - this.pos > 4) { // fast route (hi)
|
|
for (; i < 5; ++i) {
|
|
// 6th..10th
|
|
bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;
|
|
if (this.buf[this.pos++] < 128)
|
|
return bits;
|
|
}
|
|
} else {
|
|
for (; i < 5; ++i) {
|
|
/* istanbul ignore if */
|
|
if (this.pos >= this.len)
|
|
throw indexOutOfRange(this);
|
|
// 6th..10th
|
|
bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;
|
|
if (this.buf[this.pos++] < 128)
|
|
return bits;
|
|
}
|
|
}
|
|
/* istanbul ignore next */
|
|
throw Error("invalid varint encoding");
|
|
}
|
|
|
|
/* eslint-enable no-invalid-this */
|
|
|
|
/**
|
|
* Reads a varint as a signed 64 bit value.
|
|
* @name Reader#int64
|
|
* @function
|
|
* @returns {Long} Value read
|
|
*/
|
|
|
|
/**
|
|
* Reads a varint as an unsigned 64 bit value.
|
|
* @name Reader#uint64
|
|
* @function
|
|
* @returns {Long} Value read
|
|
*/
|
|
|
|
/**
|
|
* Reads a zig-zag encoded varint as a signed 64 bit value.
|
|
* @name Reader#sint64
|
|
* @function
|
|
* @returns {Long} Value read
|
|
*/
|
|
|
|
/**
|
|
* Reads a varint as a boolean.
|
|
* @returns {boolean} Value read
|
|
*/
|
|
Reader$2.prototype.bool = function read_bool() {
|
|
return this.uint32() !== 0;
|
|
};
|
|
|
|
function readFixed32_end(buf, end) { // note that this uses `end`, not `pos`
|
|
return (buf[end - 4]
|
|
| buf[end - 3] << 8
|
|
| buf[end - 2] << 16
|
|
| buf[end - 1] << 24) >>> 0;
|
|
}
|
|
|
|
/**
|
|
* Reads fixed 32 bits as an unsigned 32 bit integer.
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader$2.prototype.fixed32 = function read_fixed32() {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.pos + 4 > this.len)
|
|
throw indexOutOfRange(this, 4);
|
|
|
|
return readFixed32_end(this.buf, this.pos += 4);
|
|
};
|
|
|
|
/**
|
|
* Reads fixed 32 bits as a signed 32 bit integer.
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader$2.prototype.sfixed32 = function read_sfixed32() {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.pos + 4 > this.len)
|
|
throw indexOutOfRange(this, 4);
|
|
|
|
return readFixed32_end(this.buf, this.pos += 4) | 0;
|
|
};
|
|
|
|
/* eslint-disable no-invalid-this */
|
|
|
|
function readFixed64(/* this: Reader */) {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.pos + 8 > this.len)
|
|
throw indexOutOfRange(this, 8);
|
|
|
|
return new LongBits$1(readFixed32_end(this.buf, this.pos += 4), readFixed32_end(this.buf, this.pos += 4));
|
|
}
|
|
|
|
/* eslint-enable no-invalid-this */
|
|
|
|
/**
|
|
* Reads fixed 64 bits.
|
|
* @name Reader#fixed64
|
|
* @function
|
|
* @returns {Long} Value read
|
|
*/
|
|
|
|
/**
|
|
* Reads zig-zag encoded fixed 64 bits.
|
|
* @name Reader#sfixed64
|
|
* @function
|
|
* @returns {Long} Value read
|
|
*/
|
|
|
|
/**
|
|
* Reads a float (32 bit) as a number.
|
|
* @function
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader$2.prototype.float = function read_float() {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.pos + 4 > this.len)
|
|
throw indexOutOfRange(this, 4);
|
|
|
|
var value = util$7.float.readFloatLE(this.buf, this.pos);
|
|
this.pos += 4;
|
|
return value;
|
|
};
|
|
|
|
/**
|
|
* Reads a double (64 bit float) as a number.
|
|
* @function
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader$2.prototype.double = function read_double() {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.pos + 8 > this.len)
|
|
throw indexOutOfRange(this, 4);
|
|
|
|
var value = util$7.float.readDoubleLE(this.buf, this.pos);
|
|
this.pos += 8;
|
|
return value;
|
|
};
|
|
|
|
/**
|
|
* Reads a sequence of bytes preceeded by its length as a varint.
|
|
* @returns {Uint8Array} Value read
|
|
*/
|
|
Reader$2.prototype.bytes = function read_bytes() {
|
|
var length = this.uint32(),
|
|
start = this.pos,
|
|
end = this.pos + length;
|
|
|
|
/* istanbul ignore if */
|
|
if (end > this.len)
|
|
throw indexOutOfRange(this, length);
|
|
|
|
this.pos += length;
|
|
if (Array.isArray(this.buf)) // plain array
|
|
return this.buf.slice(start, end);
|
|
return start === end // fix for IE 10/Win8 and others' subarray returning array of size 1
|
|
? new this.buf.constructor(0)
|
|
: this._slice.call(this.buf, start, end);
|
|
};
|
|
|
|
/**
|
|
* Reads a string preceeded by its byte length as a varint.
|
|
* @returns {string} Value read
|
|
*/
|
|
Reader$2.prototype.string = function read_string() {
|
|
var bytes = this.bytes();
|
|
return utf8.read(bytes, 0, bytes.length);
|
|
};
|
|
|
|
/**
|
|
* Skips the specified number of bytes if specified, otherwise skips a varint.
|
|
* @param {number} [length] Length if known, otherwise a varint is assumed
|
|
* @returns {Reader} `this`
|
|
*/
|
|
Reader$2.prototype.skip = function skip(length) {
|
|
if (typeof length === "number") {
|
|
/* istanbul ignore if */
|
|
if (this.pos + length > this.len)
|
|
throw indexOutOfRange(this, length);
|
|
this.pos += length;
|
|
} else {
|
|
do {
|
|
/* istanbul ignore if */
|
|
if (this.pos >= this.len)
|
|
throw indexOutOfRange(this);
|
|
} while (this.buf[this.pos++] & 128);
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Skips the next element of the specified wire type.
|
|
* @param {number} wireType Wire type received
|
|
* @returns {Reader} `this`
|
|
*/
|
|
Reader$2.prototype.skipType = function(wireType) {
|
|
switch (wireType) {
|
|
case 0:
|
|
this.skip();
|
|
break;
|
|
case 1:
|
|
this.skip(8);
|
|
break;
|
|
case 2:
|
|
this.skip(this.uint32());
|
|
break;
|
|
case 3:
|
|
while ((wireType = this.uint32() & 7) !== 4) {
|
|
this.skipType(wireType);
|
|
}
|
|
break;
|
|
case 5:
|
|
this.skip(4);
|
|
break;
|
|
|
|
/* istanbul ignore next */
|
|
default:
|
|
throw Error("invalid wire type " + wireType + " at offset " + this.pos);
|
|
}
|
|
return this;
|
|
};
|
|
|
|
Reader$2._configure = function(BufferReader_) {
|
|
BufferReader$1 = BufferReader_;
|
|
Reader$2.create = create$3();
|
|
BufferReader$1._configure();
|
|
|
|
var fn = util$7.Long ? "toLong" : /* istanbul ignore next */ "toNumber";
|
|
util$7.merge(Reader$2.prototype, {
|
|
|
|
int64: function read_int64() {
|
|
return readLongVarint.call(this)[fn](false);
|
|
},
|
|
|
|
uint64: function read_uint64() {
|
|
return readLongVarint.call(this)[fn](true);
|
|
},
|
|
|
|
sint64: function read_sint64() {
|
|
return readLongVarint.call(this).zzDecode()[fn](false);
|
|
},
|
|
|
|
fixed64: function read_fixed64() {
|
|
return readFixed64.call(this)[fn](true);
|
|
},
|
|
|
|
sfixed64: function read_sfixed64() {
|
|
return readFixed64.call(this)[fn](false);
|
|
}
|
|
|
|
});
|
|
};
|
|
|
|
var reader_buffer$1 = BufferReader;
|
|
|
|
// extends Reader
|
|
var Reader$1 = reader$1;
|
|
(BufferReader.prototype = Object.create(Reader$1.prototype)).constructor = BufferReader;
|
|
|
|
var util$6 = requireMinimal$2();
|
|
|
|
/**
|
|
* Constructs a new buffer reader instance.
|
|
* @classdesc Wire format reader using node buffers.
|
|
* @extends Reader
|
|
* @constructor
|
|
* @param {Buffer} buffer Buffer to read from
|
|
*/
|
|
function BufferReader(buffer) {
|
|
Reader$1.call(this, buffer);
|
|
|
|
/**
|
|
* Read buffer.
|
|
* @name BufferReader#buf
|
|
* @type {Buffer}
|
|
*/
|
|
}
|
|
|
|
BufferReader._configure = function () {
|
|
/* istanbul ignore else */
|
|
if (util$6.Buffer)
|
|
BufferReader.prototype._slice = util$6.Buffer.prototype.slice;
|
|
};
|
|
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
BufferReader.prototype.string = function read_string_buffer() {
|
|
var len = this.uint32(); // modifies pos
|
|
return this.buf.utf8Slice
|
|
? this.buf.utf8Slice(this.pos, this.pos = Math.min(this.pos + len, this.len))
|
|
: this.buf.toString("utf-8", this.pos, this.pos = Math.min(this.pos + len, this.len));
|
|
};
|
|
|
|
/**
|
|
* Reads a sequence of bytes preceeded by its length as a varint.
|
|
* @name BufferReader#bytes
|
|
* @function
|
|
* @returns {Buffer} Value read
|
|
*/
|
|
|
|
BufferReader._configure();
|
|
|
|
var rpc$2 = {};
|
|
|
|
var service$2 = Service$1;
|
|
|
|
var util$5 = requireMinimal$2();
|
|
|
|
// Extends EventEmitter
|
|
(Service$1.prototype = Object.create(util$5.EventEmitter.prototype)).constructor = Service$1;
|
|
|
|
/**
|
|
* A service method callback as used by {@link rpc.ServiceMethod|ServiceMethod}.
|
|
*
|
|
* Differs from {@link RPCImplCallback} in that it is an actual callback of a service method which may not return `response = null`.
|
|
* @typedef rpc.ServiceMethodCallback
|
|
* @template TRes extends Message<TRes>
|
|
* @type {function}
|
|
* @param {Error|null} error Error, if any
|
|
* @param {TRes} [response] Response message
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* A service method part of a {@link rpc.Service} as created by {@link Service.create}.
|
|
* @typedef rpc.ServiceMethod
|
|
* @template TReq extends Message<TReq>
|
|
* @template TRes extends Message<TRes>
|
|
* @type {function}
|
|
* @param {TReq|Properties<TReq>} request Request message or plain object
|
|
* @param {rpc.ServiceMethodCallback<TRes>} [callback] Node-style callback called with the error, if any, and the response message
|
|
* @returns {Promise<Message<TRes>>} Promise if `callback` has been omitted, otherwise `undefined`
|
|
*/
|
|
|
|
/**
|
|
* Constructs a new RPC service instance.
|
|
* @classdesc An RPC service as returned by {@link Service#create}.
|
|
* @exports rpc.Service
|
|
* @extends util.EventEmitter
|
|
* @constructor
|
|
* @param {RPCImpl} rpcImpl RPC implementation
|
|
* @param {boolean} [requestDelimited=false] Whether requests are length-delimited
|
|
* @param {boolean} [responseDelimited=false] Whether responses are length-delimited
|
|
*/
|
|
function Service$1(rpcImpl, requestDelimited, responseDelimited) {
|
|
|
|
if (typeof rpcImpl !== "function")
|
|
throw TypeError("rpcImpl must be a function");
|
|
|
|
util$5.EventEmitter.call(this);
|
|
|
|
/**
|
|
* RPC implementation. Becomes `null` once the service is ended.
|
|
* @type {RPCImpl|null}
|
|
*/
|
|
this.rpcImpl = rpcImpl;
|
|
|
|
/**
|
|
* Whether requests are length-delimited.
|
|
* @type {boolean}
|
|
*/
|
|
this.requestDelimited = Boolean(requestDelimited);
|
|
|
|
/**
|
|
* Whether responses are length-delimited.
|
|
* @type {boolean}
|
|
*/
|
|
this.responseDelimited = Boolean(responseDelimited);
|
|
}
|
|
|
|
/**
|
|
* Calls a service method through {@link rpc.Service#rpcImpl|rpcImpl}.
|
|
* @param {Method|rpc.ServiceMethod<TReq,TRes>} method Reflected or static method
|
|
* @param {Constructor<TReq>} requestCtor Request constructor
|
|
* @param {Constructor<TRes>} responseCtor Response constructor
|
|
* @param {TReq|Properties<TReq>} request Request message or plain object
|
|
* @param {rpc.ServiceMethodCallback<TRes>} callback Service callback
|
|
* @returns {undefined}
|
|
* @template TReq extends Message<TReq>
|
|
* @template TRes extends Message<TRes>
|
|
*/
|
|
Service$1.prototype.rpcCall = function rpcCall(method, requestCtor, responseCtor, request, callback) {
|
|
|
|
if (!request)
|
|
throw TypeError("request must be specified");
|
|
|
|
var self = this;
|
|
if (!callback)
|
|
return util$5.asPromise(rpcCall, self, method, requestCtor, responseCtor, request);
|
|
|
|
if (!self.rpcImpl) {
|
|
setTimeout(function() { callback(Error("already ended")); }, 0);
|
|
return undefined;
|
|
}
|
|
|
|
try {
|
|
return self.rpcImpl(
|
|
method,
|
|
requestCtor[self.requestDelimited ? "encodeDelimited" : "encode"](request).finish(),
|
|
function rpcCallback(err, response) {
|
|
|
|
if (err) {
|
|
self.emit("error", err, method);
|
|
return callback(err);
|
|
}
|
|
|
|
if (response === null) {
|
|
self.end(/* endedByRPC */ true);
|
|
return undefined;
|
|
}
|
|
|
|
if (!(response instanceof responseCtor)) {
|
|
try {
|
|
response = responseCtor[self.responseDelimited ? "decodeDelimited" : "decode"](response);
|
|
} catch (err) {
|
|
self.emit("error", err, method);
|
|
return callback(err);
|
|
}
|
|
}
|
|
|
|
self.emit("data", response, method);
|
|
return callback(null, response);
|
|
}
|
|
);
|
|
} catch (err) {
|
|
self.emit("error", err, method);
|
|
setTimeout(function() { callback(err); }, 0);
|
|
return undefined;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Ends this service and emits the `end` event.
|
|
* @param {boolean} [endedByRPC=false] Whether the service has been ended by the RPC implementation.
|
|
* @returns {rpc.Service} `this`
|
|
*/
|
|
Service$1.prototype.end = function end(endedByRPC) {
|
|
if (this.rpcImpl) {
|
|
if (!endedByRPC) // signal end to rpcImpl
|
|
this.rpcImpl(null, null, null);
|
|
this.rpcImpl = null;
|
|
this.emit("end").off();
|
|
}
|
|
return this;
|
|
};
|
|
|
|
(function (exports) {
|
|
|
|
/**
|
|
* Streaming RPC helpers.
|
|
* @namespace
|
|
*/
|
|
var rpc = exports;
|
|
|
|
/**
|
|
* RPC implementation passed to {@link Service#create} performing a service request on network level, i.e. by utilizing http requests or websockets.
|
|
* @typedef RPCImpl
|
|
* @type {function}
|
|
* @param {Method|rpc.ServiceMethod<Message<{}>,Message<{}>>} method Reflected or static method being called
|
|
* @param {Uint8Array} requestData Request data
|
|
* @param {RPCImplCallback} callback Callback function
|
|
* @returns {undefined}
|
|
* @example
|
|
* function rpcImpl(method, requestData, callback) {
|
|
* if (protobuf.util.lcFirst(method.name) !== "myMethod") // compatible with static code
|
|
* throw Error("no such method");
|
|
* asynchronouslyObtainAResponse(requestData, function(err, responseData) {
|
|
* callback(err, responseData);
|
|
* });
|
|
* }
|
|
*/
|
|
|
|
/**
|
|
* Node-style callback as used by {@link RPCImpl}.
|
|
* @typedef RPCImplCallback
|
|
* @type {function}
|
|
* @param {Error|null} error Error, if any, otherwise `null`
|
|
* @param {Uint8Array|null} [response] Response data or `null` to signal end of stream, if there hasn't been an error
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
rpc.Service = service$2;
|
|
} (rpc$2));
|
|
|
|
var roots$1 = {};
|
|
|
|
(function (exports) {
|
|
var protobuf = exports;
|
|
|
|
/**
|
|
* Build type, one of `"full"`, `"light"` or `"minimal"`.
|
|
* @name build
|
|
* @type {string}
|
|
* @const
|
|
*/
|
|
protobuf.build = "minimal";
|
|
|
|
// Serialization
|
|
protobuf.Writer = writer$1;
|
|
protobuf.BufferWriter = writer_buffer$1;
|
|
protobuf.Reader = reader$1;
|
|
protobuf.BufferReader = reader_buffer$1;
|
|
|
|
// Utility
|
|
protobuf.util = requireMinimal$2();
|
|
protobuf.rpc = rpc$2;
|
|
protobuf.roots = roots$1;
|
|
protobuf.configure = configure;
|
|
|
|
/* istanbul ignore next */
|
|
/**
|
|
* Reconfigures the library according to the environment.
|
|
* @returns {undefined}
|
|
*/
|
|
function configure() {
|
|
protobuf.util._configure();
|
|
protobuf.Writer._configure(protobuf.BufferWriter);
|
|
protobuf.Reader._configure(protobuf.BufferReader);
|
|
}
|
|
|
|
// Set up buffer utility according to the environment
|
|
configure();
|
|
} (indexMinimal$1));
|
|
|
|
var util$4 = {exports: {}};
|
|
|
|
var codegen_1;
|
|
var hasRequiredCodegen;
|
|
|
|
function requireCodegen () {
|
|
if (hasRequiredCodegen) return codegen_1;
|
|
hasRequiredCodegen = 1;
|
|
codegen_1 = codegen;
|
|
|
|
/**
|
|
* Begins generating a function.
|
|
* @memberof util
|
|
* @param {string[]} functionParams Function parameter names
|
|
* @param {string} [functionName] Function name if not anonymous
|
|
* @returns {Codegen} Appender that appends code to the function's body
|
|
*/
|
|
function codegen(functionParams, functionName) {
|
|
|
|
/* istanbul ignore if */
|
|
if (typeof functionParams === "string") {
|
|
functionName = functionParams;
|
|
functionParams = undefined;
|
|
}
|
|
|
|
var body = [];
|
|
|
|
/**
|
|
* Appends code to the function's body or finishes generation.
|
|
* @typedef Codegen
|
|
* @type {function}
|
|
* @param {string|Object.<string,*>} [formatStringOrScope] Format string or, to finish the function, an object of additional scope variables, if any
|
|
* @param {...*} [formatParams] Format parameters
|
|
* @returns {Codegen|Function} Itself or the generated function if finished
|
|
* @throws {Error} If format parameter counts do not match
|
|
*/
|
|
|
|
function Codegen(formatStringOrScope) {
|
|
// note that explicit array handling below makes this ~50% faster
|
|
|
|
// finish the function
|
|
if (typeof formatStringOrScope !== "string") {
|
|
var source = toString();
|
|
if (codegen.verbose)
|
|
console.log("codegen: " + source); // eslint-disable-line no-console
|
|
source = "return " + source;
|
|
if (formatStringOrScope) {
|
|
var scopeKeys = Object.keys(formatStringOrScope),
|
|
scopeParams = new Array(scopeKeys.length + 1),
|
|
scopeValues = new Array(scopeKeys.length),
|
|
scopeOffset = 0;
|
|
while (scopeOffset < scopeKeys.length) {
|
|
scopeParams[scopeOffset] = scopeKeys[scopeOffset];
|
|
scopeValues[scopeOffset] = formatStringOrScope[scopeKeys[scopeOffset++]];
|
|
}
|
|
scopeParams[scopeOffset] = source;
|
|
return Function.apply(null, scopeParams).apply(null, scopeValues); // eslint-disable-line no-new-func
|
|
}
|
|
return Function(source)(); // eslint-disable-line no-new-func
|
|
}
|
|
|
|
// otherwise append to body
|
|
var formatParams = new Array(arguments.length - 1),
|
|
formatOffset = 0;
|
|
while (formatOffset < formatParams.length)
|
|
formatParams[formatOffset] = arguments[++formatOffset];
|
|
formatOffset = 0;
|
|
formatStringOrScope = formatStringOrScope.replace(/%([%dfijs])/g, function replace($0, $1) {
|
|
var value = formatParams[formatOffset++];
|
|
switch ($1) {
|
|
case "d": case "f": return String(Number(value));
|
|
case "i": return String(Math.floor(value));
|
|
case "j": return JSON.stringify(value);
|
|
case "s": return String(value);
|
|
}
|
|
return "%";
|
|
});
|
|
if (formatOffset !== formatParams.length)
|
|
throw Error("parameter count mismatch");
|
|
body.push(formatStringOrScope);
|
|
return Codegen;
|
|
}
|
|
|
|
function toString(functionNameOverride) {
|
|
return "function " + (functionNameOverride || functionName || "") + "(" + (functionParams && functionParams.join(",") || "") + "){\n " + body.join("\n ") + "\n}";
|
|
}
|
|
|
|
Codegen.toString = toString;
|
|
return Codegen;
|
|
}
|
|
|
|
/**
|
|
* Begins generating a function.
|
|
* @memberof util
|
|
* @function codegen
|
|
* @param {string} [functionName] Function name if not anonymous
|
|
* @returns {Codegen} Appender that appends code to the function's body
|
|
* @variation 2
|
|
*/
|
|
|
|
/**
|
|
* When set to `true`, codegen will log generated code to console. Useful for debugging.
|
|
* @name util.codegen.verbose
|
|
* @type {boolean}
|
|
*/
|
|
codegen.verbose = false;
|
|
return codegen_1;
|
|
}
|
|
|
|
var fetch_1;
|
|
var hasRequiredFetch;
|
|
|
|
function requireFetch () {
|
|
if (hasRequiredFetch) return fetch_1;
|
|
hasRequiredFetch = 1;
|
|
fetch_1 = fetch;
|
|
|
|
var asPromise = requireAspromise(),
|
|
inquire = requireInquire();
|
|
|
|
var fs = inquire("fs");
|
|
|
|
/**
|
|
* Node-style callback as used by {@link util.fetch}.
|
|
* @typedef FetchCallback
|
|
* @type {function}
|
|
* @param {?Error} error Error, if any, otherwise `null`
|
|
* @param {string} [contents] File contents, if there hasn't been an error
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* Options as used by {@link util.fetch}.
|
|
* @typedef FetchOptions
|
|
* @type {Object}
|
|
* @property {boolean} [binary=false] Whether expecting a binary response
|
|
* @property {boolean} [xhr=false] If `true`, forces the use of XMLHttpRequest
|
|
*/
|
|
|
|
/**
|
|
* Fetches the contents of a file.
|
|
* @memberof util
|
|
* @param {string} filename File path or url
|
|
* @param {FetchOptions} options Fetch options
|
|
* @param {FetchCallback} callback Callback function
|
|
* @returns {undefined}
|
|
*/
|
|
function fetch(filename, options, callback) {
|
|
if (typeof options === "function") {
|
|
callback = options;
|
|
options = {};
|
|
} else if (!options)
|
|
options = {};
|
|
|
|
if (!callback)
|
|
return asPromise(fetch, this, filename, options); // eslint-disable-line no-invalid-this
|
|
|
|
// if a node-like filesystem is present, try it first but fall back to XHR if nothing is found.
|
|
if (!options.xhr && fs && fs.readFile)
|
|
return fs.readFile(filename, function fetchReadFileCallback(err, contents) {
|
|
return err && typeof XMLHttpRequest !== "undefined"
|
|
? fetch.xhr(filename, options, callback)
|
|
: err
|
|
? callback(err)
|
|
: callback(null, options.binary ? contents : contents.toString("utf8"));
|
|
});
|
|
|
|
// use the XHR version otherwise.
|
|
return fetch.xhr(filename, options, callback);
|
|
}
|
|
|
|
/**
|
|
* Fetches the contents of a file.
|
|
* @name util.fetch
|
|
* @function
|
|
* @param {string} path File path or url
|
|
* @param {FetchCallback} callback Callback function
|
|
* @returns {undefined}
|
|
* @variation 2
|
|
*/
|
|
|
|
/**
|
|
* Fetches the contents of a file.
|
|
* @name util.fetch
|
|
* @function
|
|
* @param {string} path File path or url
|
|
* @param {FetchOptions} [options] Fetch options
|
|
* @returns {Promise<string|Uint8Array>} Promise
|
|
* @variation 3
|
|
*/
|
|
|
|
/**/
|
|
fetch.xhr = function fetch_xhr(filename, options, callback) {
|
|
var xhr = new XMLHttpRequest();
|
|
xhr.onreadystatechange /* works everywhere */ = function fetchOnReadyStateChange() {
|
|
|
|
if (xhr.readyState !== 4)
|
|
return undefined;
|
|
|
|
// local cors security errors return status 0 / empty string, too. afaik this cannot be
|
|
// reliably distinguished from an actually empty file for security reasons. feel free
|
|
// to send a pull request if you are aware of a solution.
|
|
if (xhr.status !== 0 && xhr.status !== 200)
|
|
return callback(Error("status " + xhr.status));
|
|
|
|
// if binary data is expected, make sure that some sort of array is returned, even if
|
|
// ArrayBuffers are not supported. the binary string fallback, however, is unsafe.
|
|
if (options.binary) {
|
|
var buffer = xhr.response;
|
|
if (!buffer) {
|
|
buffer = [];
|
|
for (var i = 0; i < xhr.responseText.length; ++i)
|
|
buffer.push(xhr.responseText.charCodeAt(i) & 255);
|
|
}
|
|
return callback(null, typeof Uint8Array !== "undefined" ? new Uint8Array(buffer) : buffer);
|
|
}
|
|
return callback(null, xhr.responseText);
|
|
};
|
|
|
|
if (options.binary) {
|
|
// ref: https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/Sending_and_Receiving_Binary_Data#Receiving_binary_data_in_older_browsers
|
|
if ("overrideMimeType" in xhr)
|
|
xhr.overrideMimeType("text/plain; charset=x-user-defined");
|
|
xhr.responseType = "arraybuffer";
|
|
}
|
|
|
|
xhr.open("GET", filename);
|
|
xhr.send();
|
|
};
|
|
return fetch_1;
|
|
}
|
|
|
|
var path = {};
|
|
|
|
var hasRequiredPath;
|
|
|
|
function requirePath () {
|
|
if (hasRequiredPath) return path;
|
|
hasRequiredPath = 1;
|
|
(function (exports) {
|
|
|
|
/**
|
|
* A minimal path module to resolve Unix, Windows and URL paths alike.
|
|
* @memberof util
|
|
* @namespace
|
|
*/
|
|
var path = exports;
|
|
|
|
var isAbsolute =
|
|
/**
|
|
* Tests if the specified path is absolute.
|
|
* @param {string} path Path to test
|
|
* @returns {boolean} `true` if path is absolute
|
|
*/
|
|
path.isAbsolute = function isAbsolute(path) {
|
|
return /^(?:\/|\w+:)/.test(path);
|
|
};
|
|
|
|
var normalize =
|
|
/**
|
|
* Normalizes the specified path.
|
|
* @param {string} path Path to normalize
|
|
* @returns {string} Normalized path
|
|
*/
|
|
path.normalize = function normalize(path) {
|
|
path = path.replace(/\\/g, "/")
|
|
.replace(/\/{2,}/g, "/");
|
|
var parts = path.split("/"),
|
|
absolute = isAbsolute(path),
|
|
prefix = "";
|
|
if (absolute)
|
|
prefix = parts.shift() + "/";
|
|
for (var i = 0; i < parts.length;) {
|
|
if (parts[i] === "..") {
|
|
if (i > 0 && parts[i - 1] !== "..")
|
|
parts.splice(--i, 2);
|
|
else if (absolute)
|
|
parts.splice(i, 1);
|
|
else
|
|
++i;
|
|
} else if (parts[i] === ".")
|
|
parts.splice(i, 1);
|
|
else
|
|
++i;
|
|
}
|
|
return prefix + parts.join("/");
|
|
};
|
|
|
|
/**
|
|
* Resolves the specified include path against the specified origin path.
|
|
* @param {string} originPath Path to the origin file
|
|
* @param {string} includePath Include path relative to origin path
|
|
* @param {boolean} [alreadyNormalized=false] `true` if both paths are already known to be normalized
|
|
* @returns {string} Path to the include file
|
|
*/
|
|
path.resolve = function resolve(originPath, includePath, alreadyNormalized) {
|
|
if (!alreadyNormalized)
|
|
includePath = normalize(includePath);
|
|
if (isAbsolute(includePath))
|
|
return includePath;
|
|
if (!alreadyNormalized)
|
|
originPath = normalize(originPath);
|
|
return (originPath = originPath.replace(/(?:\/|^)[^/]+$/, "")).length ? normalize(originPath + "/" + includePath) : includePath;
|
|
};
|
|
} (path));
|
|
return path;
|
|
}
|
|
|
|
var types$1 = {};
|
|
|
|
var hasRequiredTypes;
|
|
|
|
function requireTypes () {
|
|
if (hasRequiredTypes) return types$1;
|
|
hasRequiredTypes = 1;
|
|
(function (exports) {
|
|
|
|
/**
|
|
* Common type constants.
|
|
* @namespace
|
|
*/
|
|
var types = exports;
|
|
|
|
var util = requireUtil();
|
|
|
|
var s = [
|
|
"double", // 0
|
|
"float", // 1
|
|
"int32", // 2
|
|
"uint32", // 3
|
|
"sint32", // 4
|
|
"fixed32", // 5
|
|
"sfixed32", // 6
|
|
"int64", // 7
|
|
"uint64", // 8
|
|
"sint64", // 9
|
|
"fixed64", // 10
|
|
"sfixed64", // 11
|
|
"bool", // 12
|
|
"string", // 13
|
|
"bytes" // 14
|
|
];
|
|
|
|
function bake(values, offset) {
|
|
var i = 0, o = {};
|
|
offset |= 0;
|
|
while (i < values.length) o[s[i + offset]] = values[i++];
|
|
return o;
|
|
}
|
|
|
|
/**
|
|
* Basic type wire types.
|
|
* @type {Object.<string,number>}
|
|
* @const
|
|
* @property {number} double=1 Fixed64 wire type
|
|
* @property {number} float=5 Fixed32 wire type
|
|
* @property {number} int32=0 Varint wire type
|
|
* @property {number} uint32=0 Varint wire type
|
|
* @property {number} sint32=0 Varint wire type
|
|
* @property {number} fixed32=5 Fixed32 wire type
|
|
* @property {number} sfixed32=5 Fixed32 wire type
|
|
* @property {number} int64=0 Varint wire type
|
|
* @property {number} uint64=0 Varint wire type
|
|
* @property {number} sint64=0 Varint wire type
|
|
* @property {number} fixed64=1 Fixed64 wire type
|
|
* @property {number} sfixed64=1 Fixed64 wire type
|
|
* @property {number} bool=0 Varint wire type
|
|
* @property {number} string=2 Ldelim wire type
|
|
* @property {number} bytes=2 Ldelim wire type
|
|
*/
|
|
types.basic = bake([
|
|
/* double */ 1,
|
|
/* float */ 5,
|
|
/* int32 */ 0,
|
|
/* uint32 */ 0,
|
|
/* sint32 */ 0,
|
|
/* fixed32 */ 5,
|
|
/* sfixed32 */ 5,
|
|
/* int64 */ 0,
|
|
/* uint64 */ 0,
|
|
/* sint64 */ 0,
|
|
/* fixed64 */ 1,
|
|
/* sfixed64 */ 1,
|
|
/* bool */ 0,
|
|
/* string */ 2,
|
|
/* bytes */ 2
|
|
]);
|
|
|
|
/**
|
|
* Basic type defaults.
|
|
* @type {Object.<string,*>}
|
|
* @const
|
|
* @property {number} double=0 Double default
|
|
* @property {number} float=0 Float default
|
|
* @property {number} int32=0 Int32 default
|
|
* @property {number} uint32=0 Uint32 default
|
|
* @property {number} sint32=0 Sint32 default
|
|
* @property {number} fixed32=0 Fixed32 default
|
|
* @property {number} sfixed32=0 Sfixed32 default
|
|
* @property {number} int64=0 Int64 default
|
|
* @property {number} uint64=0 Uint64 default
|
|
* @property {number} sint64=0 Sint32 default
|
|
* @property {number} fixed64=0 Fixed64 default
|
|
* @property {number} sfixed64=0 Sfixed64 default
|
|
* @property {boolean} bool=false Bool default
|
|
* @property {string} string="" String default
|
|
* @property {Array.<number>} bytes=Array(0) Bytes default
|
|
* @property {null} message=null Message default
|
|
*/
|
|
types.defaults = bake([
|
|
/* double */ 0,
|
|
/* float */ 0,
|
|
/* int32 */ 0,
|
|
/* uint32 */ 0,
|
|
/* sint32 */ 0,
|
|
/* fixed32 */ 0,
|
|
/* sfixed32 */ 0,
|
|
/* int64 */ 0,
|
|
/* uint64 */ 0,
|
|
/* sint64 */ 0,
|
|
/* fixed64 */ 0,
|
|
/* sfixed64 */ 0,
|
|
/* bool */ false,
|
|
/* string */ "",
|
|
/* bytes */ util.emptyArray,
|
|
/* message */ null
|
|
]);
|
|
|
|
/**
|
|
* Basic long type wire types.
|
|
* @type {Object.<string,number>}
|
|
* @const
|
|
* @property {number} int64=0 Varint wire type
|
|
* @property {number} uint64=0 Varint wire type
|
|
* @property {number} sint64=0 Varint wire type
|
|
* @property {number} fixed64=1 Fixed64 wire type
|
|
* @property {number} sfixed64=1 Fixed64 wire type
|
|
*/
|
|
types.long = bake([
|
|
/* int64 */ 0,
|
|
/* uint64 */ 0,
|
|
/* sint64 */ 0,
|
|
/* fixed64 */ 1,
|
|
/* sfixed64 */ 1
|
|
], 7);
|
|
|
|
/**
|
|
* Allowed types for map keys with their associated wire type.
|
|
* @type {Object.<string,number>}
|
|
* @const
|
|
* @property {number} int32=0 Varint wire type
|
|
* @property {number} uint32=0 Varint wire type
|
|
* @property {number} sint32=0 Varint wire type
|
|
* @property {number} fixed32=5 Fixed32 wire type
|
|
* @property {number} sfixed32=5 Fixed32 wire type
|
|
* @property {number} int64=0 Varint wire type
|
|
* @property {number} uint64=0 Varint wire type
|
|
* @property {number} sint64=0 Varint wire type
|
|
* @property {number} fixed64=1 Fixed64 wire type
|
|
* @property {number} sfixed64=1 Fixed64 wire type
|
|
* @property {number} bool=0 Varint wire type
|
|
* @property {number} string=2 Ldelim wire type
|
|
*/
|
|
types.mapKey = bake([
|
|
/* int32 */ 0,
|
|
/* uint32 */ 0,
|
|
/* sint32 */ 0,
|
|
/* fixed32 */ 5,
|
|
/* sfixed32 */ 5,
|
|
/* int64 */ 0,
|
|
/* uint64 */ 0,
|
|
/* sint64 */ 0,
|
|
/* fixed64 */ 1,
|
|
/* sfixed64 */ 1,
|
|
/* bool */ 0,
|
|
/* string */ 2
|
|
], 2);
|
|
|
|
/**
|
|
* Allowed types for packed repeated fields with their associated wire type.
|
|
* @type {Object.<string,number>}
|
|
* @const
|
|
* @property {number} double=1 Fixed64 wire type
|
|
* @property {number} float=5 Fixed32 wire type
|
|
* @property {number} int32=0 Varint wire type
|
|
* @property {number} uint32=0 Varint wire type
|
|
* @property {number} sint32=0 Varint wire type
|
|
* @property {number} fixed32=5 Fixed32 wire type
|
|
* @property {number} sfixed32=5 Fixed32 wire type
|
|
* @property {number} int64=0 Varint wire type
|
|
* @property {number} uint64=0 Varint wire type
|
|
* @property {number} sint64=0 Varint wire type
|
|
* @property {number} fixed64=1 Fixed64 wire type
|
|
* @property {number} sfixed64=1 Fixed64 wire type
|
|
* @property {number} bool=0 Varint wire type
|
|
*/
|
|
types.packed = bake([
|
|
/* double */ 1,
|
|
/* float */ 5,
|
|
/* int32 */ 0,
|
|
/* uint32 */ 0,
|
|
/* sint32 */ 0,
|
|
/* fixed32 */ 5,
|
|
/* sfixed32 */ 5,
|
|
/* int64 */ 0,
|
|
/* uint64 */ 0,
|
|
/* sint64 */ 0,
|
|
/* fixed64 */ 1,
|
|
/* sfixed64 */ 1,
|
|
/* bool */ 0
|
|
]);
|
|
} (types$1));
|
|
return types$1;
|
|
}
|
|
|
|
var field;
|
|
var hasRequiredField;
|
|
|
|
function requireField () {
|
|
if (hasRequiredField) return field;
|
|
hasRequiredField = 1;
|
|
field = Field;
|
|
|
|
// extends ReflectionObject
|
|
var ReflectionObject = requireObject();
|
|
((Field.prototype = Object.create(ReflectionObject.prototype)).constructor = Field).className = "Field";
|
|
|
|
var Enum = require_enum(),
|
|
types = requireTypes(),
|
|
util = requireUtil();
|
|
|
|
var Type; // cyclic
|
|
|
|
var ruleRe = /^required|optional|repeated$/;
|
|
|
|
/**
|
|
* Constructs a new message field instance. Note that {@link MapField|map fields} have their own class.
|
|
* @name Field
|
|
* @classdesc Reflected message field.
|
|
* @extends FieldBase
|
|
* @constructor
|
|
* @param {string} name Unique name within its namespace
|
|
* @param {number} id Unique id within its namespace
|
|
* @param {string} type Value type
|
|
* @param {string|Object.<string,*>} [rule="optional"] Field rule
|
|
* @param {string|Object.<string,*>} [extend] Extended type if different from parent
|
|
* @param {Object.<string,*>} [options] Declared options
|
|
*/
|
|
|
|
/**
|
|
* Constructs a field from a field descriptor.
|
|
* @param {string} name Field name
|
|
* @param {IField} json Field descriptor
|
|
* @returns {Field} Created field
|
|
* @throws {TypeError} If arguments are invalid
|
|
*/
|
|
Field.fromJSON = function fromJSON(name, json) {
|
|
return new Field(name, json.id, json.type, json.rule, json.extend, json.options, json.comment);
|
|
};
|
|
|
|
/**
|
|
* Not an actual constructor. Use {@link Field} instead.
|
|
* @classdesc Base class of all reflected message fields. This is not an actual class but here for the sake of having consistent type definitions.
|
|
* @exports FieldBase
|
|
* @extends ReflectionObject
|
|
* @constructor
|
|
* @param {string} name Unique name within its namespace
|
|
* @param {number} id Unique id within its namespace
|
|
* @param {string} type Value type
|
|
* @param {string|Object.<string,*>} [rule="optional"] Field rule
|
|
* @param {string|Object.<string,*>} [extend] Extended type if different from parent
|
|
* @param {Object.<string,*>} [options] Declared options
|
|
* @param {string} [comment] Comment associated with this field
|
|
*/
|
|
function Field(name, id, type, rule, extend, options, comment) {
|
|
|
|
if (util.isObject(rule)) {
|
|
comment = extend;
|
|
options = rule;
|
|
rule = extend = undefined;
|
|
} else if (util.isObject(extend)) {
|
|
comment = options;
|
|
options = extend;
|
|
extend = undefined;
|
|
}
|
|
|
|
ReflectionObject.call(this, name, options);
|
|
|
|
if (!util.isInteger(id) || id < 0)
|
|
throw TypeError("id must be a non-negative integer");
|
|
|
|
if (!util.isString(type))
|
|
throw TypeError("type must be a string");
|
|
|
|
if (rule !== undefined && !ruleRe.test(rule = rule.toString().toLowerCase()))
|
|
throw TypeError("rule must be a string rule");
|
|
|
|
if (extend !== undefined && !util.isString(extend))
|
|
throw TypeError("extend must be a string");
|
|
|
|
/**
|
|
* Field rule, if any.
|
|
* @type {string|undefined}
|
|
*/
|
|
if (rule === "proto3_optional") {
|
|
rule = "optional";
|
|
}
|
|
this.rule = rule && rule !== "optional" ? rule : undefined; // toJSON
|
|
|
|
/**
|
|
* Field type.
|
|
* @type {string}
|
|
*/
|
|
this.type = type; // toJSON
|
|
|
|
/**
|
|
* Unique field id.
|
|
* @type {number}
|
|
*/
|
|
this.id = id; // toJSON, marker
|
|
|
|
/**
|
|
* Extended type if different from parent.
|
|
* @type {string|undefined}
|
|
*/
|
|
this.extend = extend || undefined; // toJSON
|
|
|
|
/**
|
|
* Whether this field is required.
|
|
* @type {boolean}
|
|
*/
|
|
this.required = rule === "required";
|
|
|
|
/**
|
|
* Whether this field is optional.
|
|
* @type {boolean}
|
|
*/
|
|
this.optional = !this.required;
|
|
|
|
/**
|
|
* Whether this field is repeated.
|
|
* @type {boolean}
|
|
*/
|
|
this.repeated = rule === "repeated";
|
|
|
|
/**
|
|
* Whether this field is a map or not.
|
|
* @type {boolean}
|
|
*/
|
|
this.map = false;
|
|
|
|
/**
|
|
* Message this field belongs to.
|
|
* @type {Type|null}
|
|
*/
|
|
this.message = null;
|
|
|
|
/**
|
|
* OneOf this field belongs to, if any,
|
|
* @type {OneOf|null}
|
|
*/
|
|
this.partOf = null;
|
|
|
|
/**
|
|
* The field type's default value.
|
|
* @type {*}
|
|
*/
|
|
this.typeDefault = null;
|
|
|
|
/**
|
|
* The field's default value on prototypes.
|
|
* @type {*}
|
|
*/
|
|
this.defaultValue = null;
|
|
|
|
/**
|
|
* Whether this field's value should be treated as a long.
|
|
* @type {boolean}
|
|
*/
|
|
this.long = util.Long ? types.long[type] !== undefined : /* istanbul ignore next */ false;
|
|
|
|
/**
|
|
* Whether this field's value is a buffer.
|
|
* @type {boolean}
|
|
*/
|
|
this.bytes = type === "bytes";
|
|
|
|
/**
|
|
* Resolved type if not a basic type.
|
|
* @type {Type|Enum|null}
|
|
*/
|
|
this.resolvedType = null;
|
|
|
|
/**
|
|
* Sister-field within the extended type if a declaring extension field.
|
|
* @type {Field|null}
|
|
*/
|
|
this.extensionField = null;
|
|
|
|
/**
|
|
* Sister-field within the declaring namespace if an extended field.
|
|
* @type {Field|null}
|
|
*/
|
|
this.declaringField = null;
|
|
|
|
/**
|
|
* Internally remembers whether this field is packed.
|
|
* @type {boolean|null}
|
|
* @private
|
|
*/
|
|
this._packed = null;
|
|
|
|
/**
|
|
* Comment for this field.
|
|
* @type {string|null}
|
|
*/
|
|
this.comment = comment;
|
|
}
|
|
|
|
/**
|
|
* Determines whether this field is packed. Only relevant when repeated and working with proto2.
|
|
* @name Field#packed
|
|
* @type {boolean}
|
|
* @readonly
|
|
*/
|
|
Object.defineProperty(Field.prototype, "packed", {
|
|
get: function() {
|
|
// defaults to packed=true if not explicity set to false
|
|
if (this._packed === null)
|
|
this._packed = this.getOption("packed") !== false;
|
|
return this._packed;
|
|
}
|
|
});
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
Field.prototype.setOption = function setOption(name, value, ifNotSet) {
|
|
if (name === "packed") // clear cached before setting
|
|
this._packed = null;
|
|
return ReflectionObject.prototype.setOption.call(this, name, value, ifNotSet);
|
|
};
|
|
|
|
/**
|
|
* Field descriptor.
|
|
* @interface IField
|
|
* @property {string} [rule="optional"] Field rule
|
|
* @property {string} type Field type
|
|
* @property {number} id Field id
|
|
* @property {Object.<string,*>} [options] Field options
|
|
*/
|
|
|
|
/**
|
|
* Extension field descriptor.
|
|
* @interface IExtensionField
|
|
* @extends IField
|
|
* @property {string} extend Extended type
|
|
*/
|
|
|
|
/**
|
|
* Converts this field to a field descriptor.
|
|
* @param {IToJSONOptions} [toJSONOptions] JSON conversion options
|
|
* @returns {IField} Field descriptor
|
|
*/
|
|
Field.prototype.toJSON = function toJSON(toJSONOptions) {
|
|
var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;
|
|
return util.toObject([
|
|
"rule" , this.rule !== "optional" && this.rule || undefined,
|
|
"type" , this.type,
|
|
"id" , this.id,
|
|
"extend" , this.extend,
|
|
"options" , this.options,
|
|
"comment" , keepComments ? this.comment : undefined
|
|
]);
|
|
};
|
|
|
|
/**
|
|
* Resolves this field's type references.
|
|
* @returns {Field} `this`
|
|
* @throws {Error} If any reference cannot be resolved
|
|
*/
|
|
Field.prototype.resolve = function resolve() {
|
|
|
|
if (this.resolved)
|
|
return this;
|
|
|
|
if ((this.typeDefault = types.defaults[this.type]) === undefined) { // if not a basic type, resolve it
|
|
this.resolvedType = (this.declaringField ? this.declaringField.parent : this.parent).lookupTypeOrEnum(this.type);
|
|
if (this.resolvedType instanceof Type)
|
|
this.typeDefault = null;
|
|
else // instanceof Enum
|
|
this.typeDefault = this.resolvedType.values[Object.keys(this.resolvedType.values)[0]]; // first defined
|
|
} else if (this.options && this.options.proto3_optional) {
|
|
// proto3 scalar value marked optional; should default to null
|
|
this.typeDefault = null;
|
|
}
|
|
|
|
// use explicitly set default value if present
|
|
if (this.options && this.options["default"] != null) {
|
|
this.typeDefault = this.options["default"];
|
|
if (this.resolvedType instanceof Enum && typeof this.typeDefault === "string")
|
|
this.typeDefault = this.resolvedType.values[this.typeDefault];
|
|
}
|
|
|
|
// remove unnecessary options
|
|
if (this.options) {
|
|
if (this.options.packed === true || this.options.packed !== undefined && this.resolvedType && !(this.resolvedType instanceof Enum))
|
|
delete this.options.packed;
|
|
if (!Object.keys(this.options).length)
|
|
this.options = undefined;
|
|
}
|
|
|
|
// convert to internal data type if necesssary
|
|
if (this.long) {
|
|
this.typeDefault = util.Long.fromNumber(this.typeDefault, this.type.charAt(0) === "u");
|
|
|
|
/* istanbul ignore else */
|
|
if (Object.freeze)
|
|
Object.freeze(this.typeDefault); // long instances are meant to be immutable anyway (i.e. use small int cache that even requires it)
|
|
|
|
} else if (this.bytes && typeof this.typeDefault === "string") {
|
|
var buf;
|
|
if (util.base64.test(this.typeDefault))
|
|
util.base64.decode(this.typeDefault, buf = util.newBuffer(util.base64.length(this.typeDefault)), 0);
|
|
else
|
|
util.utf8.write(this.typeDefault, buf = util.newBuffer(util.utf8.length(this.typeDefault)), 0);
|
|
this.typeDefault = buf;
|
|
}
|
|
|
|
// take special care of maps and repeated fields
|
|
if (this.map)
|
|
this.defaultValue = util.emptyObject;
|
|
else if (this.repeated)
|
|
this.defaultValue = util.emptyArray;
|
|
else
|
|
this.defaultValue = this.typeDefault;
|
|
|
|
// ensure proper value on prototype
|
|
if (this.parent instanceof Type)
|
|
this.parent.ctor.prototype[this.name] = this.defaultValue;
|
|
|
|
return ReflectionObject.prototype.resolve.call(this);
|
|
};
|
|
|
|
/**
|
|
* Decorator function as returned by {@link Field.d} and {@link MapField.d} (TypeScript).
|
|
* @typedef FieldDecorator
|
|
* @type {function}
|
|
* @param {Object} prototype Target prototype
|
|
* @param {string} fieldName Field name
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* Field decorator (TypeScript).
|
|
* @name Field.d
|
|
* @function
|
|
* @param {number} fieldId Field id
|
|
* @param {"double"|"float"|"int32"|"uint32"|"sint32"|"fixed32"|"sfixed32"|"int64"|"uint64"|"sint64"|"fixed64"|"sfixed64"|"string"|"bool"|"bytes"|Object} fieldType Field type
|
|
* @param {"optional"|"required"|"repeated"} [fieldRule="optional"] Field rule
|
|
* @param {T} [defaultValue] Default value
|
|
* @returns {FieldDecorator} Decorator function
|
|
* @template T extends number | number[] | Long | Long[] | string | string[] | boolean | boolean[] | Uint8Array | Uint8Array[] | Buffer | Buffer[]
|
|
*/
|
|
Field.d = function decorateField(fieldId, fieldType, fieldRule, defaultValue) {
|
|
|
|
// submessage: decorate the submessage and use its name as the type
|
|
if (typeof fieldType === "function")
|
|
fieldType = util.decorateType(fieldType).name;
|
|
|
|
// enum reference: create a reflected copy of the enum and keep reuseing it
|
|
else if (fieldType && typeof fieldType === "object")
|
|
fieldType = util.decorateEnum(fieldType).name;
|
|
|
|
return function fieldDecorator(prototype, fieldName) {
|
|
util.decorateType(prototype.constructor)
|
|
.add(new Field(fieldName, fieldId, fieldType, fieldRule, { "default": defaultValue }));
|
|
};
|
|
};
|
|
|
|
/**
|
|
* Field decorator (TypeScript).
|
|
* @name Field.d
|
|
* @function
|
|
* @param {number} fieldId Field id
|
|
* @param {Constructor<T>|string} fieldType Field type
|
|
* @param {"optional"|"required"|"repeated"} [fieldRule="optional"] Field rule
|
|
* @returns {FieldDecorator} Decorator function
|
|
* @template T extends Message<T>
|
|
* @variation 2
|
|
*/
|
|
// like Field.d but without a default value
|
|
|
|
// Sets up cyclic dependencies (called in index-light)
|
|
Field._configure = function configure(Type_) {
|
|
Type = Type_;
|
|
};
|
|
return field;
|
|
}
|
|
|
|
var oneof;
|
|
var hasRequiredOneof;
|
|
|
|
function requireOneof () {
|
|
if (hasRequiredOneof) return oneof;
|
|
hasRequiredOneof = 1;
|
|
oneof = OneOf;
|
|
|
|
// extends ReflectionObject
|
|
var ReflectionObject = requireObject();
|
|
((OneOf.prototype = Object.create(ReflectionObject.prototype)).constructor = OneOf).className = "OneOf";
|
|
|
|
var Field = requireField(),
|
|
util = requireUtil();
|
|
|
|
/**
|
|
* Constructs a new oneof instance.
|
|
* @classdesc Reflected oneof.
|
|
* @extends ReflectionObject
|
|
* @constructor
|
|
* @param {string} name Oneof name
|
|
* @param {string[]|Object.<string,*>} [fieldNames] Field names
|
|
* @param {Object.<string,*>} [options] Declared options
|
|
* @param {string} [comment] Comment associated with this field
|
|
*/
|
|
function OneOf(name, fieldNames, options, comment) {
|
|
if (!Array.isArray(fieldNames)) {
|
|
options = fieldNames;
|
|
fieldNames = undefined;
|
|
}
|
|
ReflectionObject.call(this, name, options);
|
|
|
|
/* istanbul ignore if */
|
|
if (!(fieldNames === undefined || Array.isArray(fieldNames)))
|
|
throw TypeError("fieldNames must be an Array");
|
|
|
|
/**
|
|
* Field names that belong to this oneof.
|
|
* @type {string[]}
|
|
*/
|
|
this.oneof = fieldNames || []; // toJSON, marker
|
|
|
|
/**
|
|
* Fields that belong to this oneof as an array for iteration.
|
|
* @type {Field[]}
|
|
* @readonly
|
|
*/
|
|
this.fieldsArray = []; // declared readonly for conformance, possibly not yet added to parent
|
|
|
|
/**
|
|
* Comment for this field.
|
|
* @type {string|null}
|
|
*/
|
|
this.comment = comment;
|
|
}
|
|
|
|
/**
|
|
* Oneof descriptor.
|
|
* @interface IOneOf
|
|
* @property {Array.<string>} oneof Oneof field names
|
|
* @property {Object.<string,*>} [options] Oneof options
|
|
*/
|
|
|
|
/**
|
|
* Constructs a oneof from a oneof descriptor.
|
|
* @param {string} name Oneof name
|
|
* @param {IOneOf} json Oneof descriptor
|
|
* @returns {OneOf} Created oneof
|
|
* @throws {TypeError} If arguments are invalid
|
|
*/
|
|
OneOf.fromJSON = function fromJSON(name, json) {
|
|
return new OneOf(name, json.oneof, json.options, json.comment);
|
|
};
|
|
|
|
/**
|
|
* Converts this oneof to a oneof descriptor.
|
|
* @param {IToJSONOptions} [toJSONOptions] JSON conversion options
|
|
* @returns {IOneOf} Oneof descriptor
|
|
*/
|
|
OneOf.prototype.toJSON = function toJSON(toJSONOptions) {
|
|
var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;
|
|
return util.toObject([
|
|
"options" , this.options,
|
|
"oneof" , this.oneof,
|
|
"comment" , keepComments ? this.comment : undefined
|
|
]);
|
|
};
|
|
|
|
/**
|
|
* Adds the fields of the specified oneof to the parent if not already done so.
|
|
* @param {OneOf} oneof The oneof
|
|
* @returns {undefined}
|
|
* @inner
|
|
* @ignore
|
|
*/
|
|
function addFieldsToParent(oneof) {
|
|
if (oneof.parent)
|
|
for (var i = 0; i < oneof.fieldsArray.length; ++i)
|
|
if (!oneof.fieldsArray[i].parent)
|
|
oneof.parent.add(oneof.fieldsArray[i]);
|
|
}
|
|
|
|
/**
|
|
* Adds a field to this oneof and removes it from its current parent, if any.
|
|
* @param {Field} field Field to add
|
|
* @returns {OneOf} `this`
|
|
*/
|
|
OneOf.prototype.add = function add(field) {
|
|
|
|
/* istanbul ignore if */
|
|
if (!(field instanceof Field))
|
|
throw TypeError("field must be a Field");
|
|
|
|
if (field.parent && field.parent !== this.parent)
|
|
field.parent.remove(field);
|
|
this.oneof.push(field.name);
|
|
this.fieldsArray.push(field);
|
|
field.partOf = this; // field.parent remains null
|
|
addFieldsToParent(this);
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Removes a field from this oneof and puts it back to the oneof's parent.
|
|
* @param {Field} field Field to remove
|
|
* @returns {OneOf} `this`
|
|
*/
|
|
OneOf.prototype.remove = function remove(field) {
|
|
|
|
/* istanbul ignore if */
|
|
if (!(field instanceof Field))
|
|
throw TypeError("field must be a Field");
|
|
|
|
var index = this.fieldsArray.indexOf(field);
|
|
|
|
/* istanbul ignore if */
|
|
if (index < 0)
|
|
throw Error(field + " is not a member of " + this);
|
|
|
|
this.fieldsArray.splice(index, 1);
|
|
index = this.oneof.indexOf(field.name);
|
|
|
|
/* istanbul ignore else */
|
|
if (index > -1) // theoretical
|
|
this.oneof.splice(index, 1);
|
|
|
|
field.partOf = null;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
OneOf.prototype.onAdd = function onAdd(parent) {
|
|
ReflectionObject.prototype.onAdd.call(this, parent);
|
|
var self = this;
|
|
// Collect present fields
|
|
for (var i = 0; i < this.oneof.length; ++i) {
|
|
var field = parent.get(this.oneof[i]);
|
|
if (field && !field.partOf) {
|
|
field.partOf = self;
|
|
self.fieldsArray.push(field);
|
|
}
|
|
}
|
|
// Add not yet present fields
|
|
addFieldsToParent(this);
|
|
};
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
OneOf.prototype.onRemove = function onRemove(parent) {
|
|
for (var i = 0, field; i < this.fieldsArray.length; ++i)
|
|
if ((field = this.fieldsArray[i]).parent)
|
|
field.parent.remove(field);
|
|
ReflectionObject.prototype.onRemove.call(this, parent);
|
|
};
|
|
|
|
/**
|
|
* Decorator function as returned by {@link OneOf.d} (TypeScript).
|
|
* @typedef OneOfDecorator
|
|
* @type {function}
|
|
* @param {Object} prototype Target prototype
|
|
* @param {string} oneofName OneOf name
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* OneOf decorator (TypeScript).
|
|
* @function
|
|
* @param {...string} fieldNames Field names
|
|
* @returns {OneOfDecorator} Decorator function
|
|
* @template T extends string
|
|
*/
|
|
OneOf.d = function decorateOneOf() {
|
|
var fieldNames = new Array(arguments.length),
|
|
index = 0;
|
|
while (index < arguments.length)
|
|
fieldNames[index] = arguments[index++];
|
|
return function oneOfDecorator(prototype, oneofName) {
|
|
util.decorateType(prototype.constructor)
|
|
.add(new OneOf(oneofName, fieldNames));
|
|
Object.defineProperty(prototype, oneofName, {
|
|
get: util.oneOfGetter(fieldNames),
|
|
set: util.oneOfSetter(fieldNames)
|
|
});
|
|
};
|
|
};
|
|
return oneof;
|
|
}
|
|
|
|
var namespace;
|
|
var hasRequiredNamespace;
|
|
|
|
function requireNamespace () {
|
|
if (hasRequiredNamespace) return namespace;
|
|
hasRequiredNamespace = 1;
|
|
namespace = Namespace;
|
|
|
|
// extends ReflectionObject
|
|
var ReflectionObject = requireObject();
|
|
((Namespace.prototype = Object.create(ReflectionObject.prototype)).constructor = Namespace).className = "Namespace";
|
|
|
|
var Field = requireField(),
|
|
util = requireUtil(),
|
|
OneOf = requireOneof();
|
|
|
|
var Type, // cyclic
|
|
Service,
|
|
Enum;
|
|
|
|
/**
|
|
* Constructs a new namespace instance.
|
|
* @name Namespace
|
|
* @classdesc Reflected namespace.
|
|
* @extends NamespaceBase
|
|
* @constructor
|
|
* @param {string} name Namespace name
|
|
* @param {Object.<string,*>} [options] Declared options
|
|
*/
|
|
|
|
/**
|
|
* Constructs a namespace from JSON.
|
|
* @memberof Namespace
|
|
* @function
|
|
* @param {string} name Namespace name
|
|
* @param {Object.<string,*>} json JSON object
|
|
* @returns {Namespace} Created namespace
|
|
* @throws {TypeError} If arguments are invalid
|
|
*/
|
|
Namespace.fromJSON = function fromJSON(name, json) {
|
|
return new Namespace(name, json.options).addJSON(json.nested);
|
|
};
|
|
|
|
/**
|
|
* Converts an array of reflection objects to JSON.
|
|
* @memberof Namespace
|
|
* @param {ReflectionObject[]} array Object array
|
|
* @param {IToJSONOptions} [toJSONOptions] JSON conversion options
|
|
* @returns {Object.<string,*>|undefined} JSON object or `undefined` when array is empty
|
|
*/
|
|
function arrayToJSON(array, toJSONOptions) {
|
|
if (!(array && array.length))
|
|
return undefined;
|
|
var obj = {};
|
|
for (var i = 0; i < array.length; ++i)
|
|
obj[array[i].name] = array[i].toJSON(toJSONOptions);
|
|
return obj;
|
|
}
|
|
|
|
Namespace.arrayToJSON = arrayToJSON;
|
|
|
|
/**
|
|
* Tests if the specified id is reserved.
|
|
* @param {Array.<number[]|string>|undefined} reserved Array of reserved ranges and names
|
|
* @param {number} id Id to test
|
|
* @returns {boolean} `true` if reserved, otherwise `false`
|
|
*/
|
|
Namespace.isReservedId = function isReservedId(reserved, id) {
|
|
if (reserved)
|
|
for (var i = 0; i < reserved.length; ++i)
|
|
if (typeof reserved[i] !== "string" && reserved[i][0] <= id && reserved[i][1] > id)
|
|
return true;
|
|
return false;
|
|
};
|
|
|
|
/**
|
|
* Tests if the specified name is reserved.
|
|
* @param {Array.<number[]|string>|undefined} reserved Array of reserved ranges and names
|
|
* @param {string} name Name to test
|
|
* @returns {boolean} `true` if reserved, otherwise `false`
|
|
*/
|
|
Namespace.isReservedName = function isReservedName(reserved, name) {
|
|
if (reserved)
|
|
for (var i = 0; i < reserved.length; ++i)
|
|
if (reserved[i] === name)
|
|
return true;
|
|
return false;
|
|
};
|
|
|
|
/**
|
|
* Not an actual constructor. Use {@link Namespace} instead.
|
|
* @classdesc Base class of all reflection objects containing nested objects. This is not an actual class but here for the sake of having consistent type definitions.
|
|
* @exports NamespaceBase
|
|
* @extends ReflectionObject
|
|
* @abstract
|
|
* @constructor
|
|
* @param {string} name Namespace name
|
|
* @param {Object.<string,*>} [options] Declared options
|
|
* @see {@link Namespace}
|
|
*/
|
|
function Namespace(name, options) {
|
|
ReflectionObject.call(this, name, options);
|
|
|
|
/**
|
|
* Nested objects by name.
|
|
* @type {Object.<string,ReflectionObject>|undefined}
|
|
*/
|
|
this.nested = undefined; // toJSON
|
|
|
|
/**
|
|
* Cached nested objects as an array.
|
|
* @type {ReflectionObject[]|null}
|
|
* @private
|
|
*/
|
|
this._nestedArray = null;
|
|
}
|
|
|
|
function clearCache(namespace) {
|
|
namespace._nestedArray = null;
|
|
return namespace;
|
|
}
|
|
|
|
/**
|
|
* Nested objects of this namespace as an array for iteration.
|
|
* @name NamespaceBase#nestedArray
|
|
* @type {ReflectionObject[]}
|
|
* @readonly
|
|
*/
|
|
Object.defineProperty(Namespace.prototype, "nestedArray", {
|
|
get: function() {
|
|
return this._nestedArray || (this._nestedArray = util.toArray(this.nested));
|
|
}
|
|
});
|
|
|
|
/**
|
|
* Namespace descriptor.
|
|
* @interface INamespace
|
|
* @property {Object.<string,*>} [options] Namespace options
|
|
* @property {Object.<string,AnyNestedObject>} [nested] Nested object descriptors
|
|
*/
|
|
|
|
/**
|
|
* Any extension field descriptor.
|
|
* @typedef AnyExtensionField
|
|
* @type {IExtensionField|IExtensionMapField}
|
|
*/
|
|
|
|
/**
|
|
* Any nested object descriptor.
|
|
* @typedef AnyNestedObject
|
|
* @type {IEnum|IType|IService|AnyExtensionField|INamespace|IOneOf}
|
|
*/
|
|
|
|
/**
|
|
* Converts this namespace to a namespace descriptor.
|
|
* @param {IToJSONOptions} [toJSONOptions] JSON conversion options
|
|
* @returns {INamespace} Namespace descriptor
|
|
*/
|
|
Namespace.prototype.toJSON = function toJSON(toJSONOptions) {
|
|
return util.toObject([
|
|
"options" , this.options,
|
|
"nested" , arrayToJSON(this.nestedArray, toJSONOptions)
|
|
]);
|
|
};
|
|
|
|
/**
|
|
* Adds nested objects to this namespace from nested object descriptors.
|
|
* @param {Object.<string,AnyNestedObject>} nestedJson Any nested object descriptors
|
|
* @returns {Namespace} `this`
|
|
*/
|
|
Namespace.prototype.addJSON = function addJSON(nestedJson) {
|
|
var ns = this;
|
|
/* istanbul ignore else */
|
|
if (nestedJson) {
|
|
for (var names = Object.keys(nestedJson), i = 0, nested; i < names.length; ++i) {
|
|
nested = nestedJson[names[i]];
|
|
ns.add( // most to least likely
|
|
( nested.fields !== undefined
|
|
? Type.fromJSON
|
|
: nested.values !== undefined
|
|
? Enum.fromJSON
|
|
: nested.methods !== undefined
|
|
? Service.fromJSON
|
|
: nested.id !== undefined
|
|
? Field.fromJSON
|
|
: Namespace.fromJSON )(names[i], nested)
|
|
);
|
|
}
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Gets the nested object of the specified name.
|
|
* @param {string} name Nested object name
|
|
* @returns {ReflectionObject|null} The reflection object or `null` if it doesn't exist
|
|
*/
|
|
Namespace.prototype.get = function get(name) {
|
|
return this.nested && this.nested[name]
|
|
|| null;
|
|
};
|
|
|
|
/**
|
|
* Gets the values of the nested {@link Enum|enum} of the specified name.
|
|
* This methods differs from {@link Namespace#get|get} in that it returns an enum's values directly and throws instead of returning `null`.
|
|
* @param {string} name Nested enum name
|
|
* @returns {Object.<string,number>} Enum values
|
|
* @throws {Error} If there is no such enum
|
|
*/
|
|
Namespace.prototype.getEnum = function getEnum(name) {
|
|
if (this.nested && this.nested[name] instanceof Enum)
|
|
return this.nested[name].values;
|
|
throw Error("no such enum: " + name);
|
|
};
|
|
|
|
/**
|
|
* Adds a nested object to this namespace.
|
|
* @param {ReflectionObject} object Nested object to add
|
|
* @returns {Namespace} `this`
|
|
* @throws {TypeError} If arguments are invalid
|
|
* @throws {Error} If there is already a nested object with this name
|
|
*/
|
|
Namespace.prototype.add = function add(object) {
|
|
|
|
if (!(object instanceof Field && object.extend !== undefined || object instanceof Type || object instanceof OneOf || object instanceof Enum || object instanceof Service || object instanceof Namespace))
|
|
throw TypeError("object must be a valid nested object");
|
|
|
|
if (!this.nested)
|
|
this.nested = {};
|
|
else {
|
|
var prev = this.get(object.name);
|
|
if (prev) {
|
|
if (prev instanceof Namespace && object instanceof Namespace && !(prev instanceof Type || prev instanceof Service)) {
|
|
// replace plain namespace but keep existing nested elements and options
|
|
var nested = prev.nestedArray;
|
|
for (var i = 0; i < nested.length; ++i)
|
|
object.add(nested[i]);
|
|
this.remove(prev);
|
|
if (!this.nested)
|
|
this.nested = {};
|
|
object.setOptions(prev.options, true);
|
|
|
|
} else
|
|
throw Error("duplicate name '" + object.name + "' in " + this);
|
|
}
|
|
}
|
|
this.nested[object.name] = object;
|
|
object.onAdd(this);
|
|
return clearCache(this);
|
|
};
|
|
|
|
/**
|
|
* Removes a nested object from this namespace.
|
|
* @param {ReflectionObject} object Nested object to remove
|
|
* @returns {Namespace} `this`
|
|
* @throws {TypeError} If arguments are invalid
|
|
* @throws {Error} If `object` is not a member of this namespace
|
|
*/
|
|
Namespace.prototype.remove = function remove(object) {
|
|
|
|
if (!(object instanceof ReflectionObject))
|
|
throw TypeError("object must be a ReflectionObject");
|
|
if (object.parent !== this)
|
|
throw Error(object + " is not a member of " + this);
|
|
|
|
delete this.nested[object.name];
|
|
if (!Object.keys(this.nested).length)
|
|
this.nested = undefined;
|
|
|
|
object.onRemove(this);
|
|
return clearCache(this);
|
|
};
|
|
|
|
/**
|
|
* Defines additial namespaces within this one if not yet existing.
|
|
* @param {string|string[]} path Path to create
|
|
* @param {*} [json] Nested types to create from JSON
|
|
* @returns {Namespace} Pointer to the last namespace created or `this` if path is empty
|
|
*/
|
|
Namespace.prototype.define = function define(path, json) {
|
|
|
|
if (util.isString(path))
|
|
path = path.split(".");
|
|
else if (!Array.isArray(path))
|
|
throw TypeError("illegal path");
|
|
if (path && path.length && path[0] === "")
|
|
throw Error("path must be relative");
|
|
|
|
var ptr = this;
|
|
while (path.length > 0) {
|
|
var part = path.shift();
|
|
if (ptr.nested && ptr.nested[part]) {
|
|
ptr = ptr.nested[part];
|
|
if (!(ptr instanceof Namespace))
|
|
throw Error("path conflicts with non-namespace objects");
|
|
} else
|
|
ptr.add(ptr = new Namespace(part));
|
|
}
|
|
if (json)
|
|
ptr.addJSON(json);
|
|
return ptr;
|
|
};
|
|
|
|
/**
|
|
* Resolves this namespace's and all its nested objects' type references. Useful to validate a reflection tree, but comes at a cost.
|
|
* @returns {Namespace} `this`
|
|
*/
|
|
Namespace.prototype.resolveAll = function resolveAll() {
|
|
var nested = this.nestedArray, i = 0;
|
|
while (i < nested.length)
|
|
if (nested[i] instanceof Namespace)
|
|
nested[i++].resolveAll();
|
|
else
|
|
nested[i++].resolve();
|
|
return this.resolve();
|
|
};
|
|
|
|
/**
|
|
* Recursively looks up the reflection object matching the specified path in the scope of this namespace.
|
|
* @param {string|string[]} path Path to look up
|
|
* @param {*|Array.<*>} filterTypes Filter types, any combination of the constructors of `protobuf.Type`, `protobuf.Enum`, `protobuf.Service` etc.
|
|
* @param {boolean} [parentAlreadyChecked=false] If known, whether the parent has already been checked
|
|
* @returns {ReflectionObject|null} Looked up object or `null` if none could be found
|
|
*/
|
|
Namespace.prototype.lookup = function lookup(path, filterTypes, parentAlreadyChecked) {
|
|
|
|
/* istanbul ignore next */
|
|
if (typeof filterTypes === "boolean") {
|
|
parentAlreadyChecked = filterTypes;
|
|
filterTypes = undefined;
|
|
} else if (filterTypes && !Array.isArray(filterTypes))
|
|
filterTypes = [ filterTypes ];
|
|
|
|
if (util.isString(path) && path.length) {
|
|
if (path === ".")
|
|
return this.root;
|
|
path = path.split(".");
|
|
} else if (!path.length)
|
|
return this;
|
|
|
|
// Start at root if path is absolute
|
|
if (path[0] === "")
|
|
return this.root.lookup(path.slice(1), filterTypes);
|
|
|
|
// Test if the first part matches any nested object, and if so, traverse if path contains more
|
|
var found = this.get(path[0]);
|
|
if (found) {
|
|
if (path.length === 1) {
|
|
if (!filterTypes || filterTypes.indexOf(found.constructor) > -1)
|
|
return found;
|
|
} else if (found instanceof Namespace && (found = found.lookup(path.slice(1), filterTypes, true)))
|
|
return found;
|
|
|
|
// Otherwise try each nested namespace
|
|
} else
|
|
for (var i = 0; i < this.nestedArray.length; ++i)
|
|
if (this._nestedArray[i] instanceof Namespace && (found = this._nestedArray[i].lookup(path, filterTypes, true)))
|
|
return found;
|
|
|
|
// If there hasn't been a match, try again at the parent
|
|
if (this.parent === null || parentAlreadyChecked)
|
|
return null;
|
|
return this.parent.lookup(path, filterTypes);
|
|
};
|
|
|
|
/**
|
|
* Looks up the reflection object at the specified path, relative to this namespace.
|
|
* @name NamespaceBase#lookup
|
|
* @function
|
|
* @param {string|string[]} path Path to look up
|
|
* @param {boolean} [parentAlreadyChecked=false] Whether the parent has already been checked
|
|
* @returns {ReflectionObject|null} Looked up object or `null` if none could be found
|
|
* @variation 2
|
|
*/
|
|
// lookup(path: string, [parentAlreadyChecked: boolean])
|
|
|
|
/**
|
|
* Looks up the {@link Type|type} at the specified path, relative to this namespace.
|
|
* Besides its signature, this methods differs from {@link Namespace#lookup|lookup} in that it throws instead of returning `null`.
|
|
* @param {string|string[]} path Path to look up
|
|
* @returns {Type} Looked up type
|
|
* @throws {Error} If `path` does not point to a type
|
|
*/
|
|
Namespace.prototype.lookupType = function lookupType(path) {
|
|
var found = this.lookup(path, [ Type ]);
|
|
if (!found)
|
|
throw Error("no such type: " + path);
|
|
return found;
|
|
};
|
|
|
|
/**
|
|
* Looks up the values of the {@link Enum|enum} at the specified path, relative to this namespace.
|
|
* Besides its signature, this methods differs from {@link Namespace#lookup|lookup} in that it throws instead of returning `null`.
|
|
* @param {string|string[]} path Path to look up
|
|
* @returns {Enum} Looked up enum
|
|
* @throws {Error} If `path` does not point to an enum
|
|
*/
|
|
Namespace.prototype.lookupEnum = function lookupEnum(path) {
|
|
var found = this.lookup(path, [ Enum ]);
|
|
if (!found)
|
|
throw Error("no such Enum '" + path + "' in " + this);
|
|
return found;
|
|
};
|
|
|
|
/**
|
|
* Looks up the {@link Type|type} or {@link Enum|enum} at the specified path, relative to this namespace.
|
|
* Besides its signature, this methods differs from {@link Namespace#lookup|lookup} in that it throws instead of returning `null`.
|
|
* @param {string|string[]} path Path to look up
|
|
* @returns {Type} Looked up type or enum
|
|
* @throws {Error} If `path` does not point to a type or enum
|
|
*/
|
|
Namespace.prototype.lookupTypeOrEnum = function lookupTypeOrEnum(path) {
|
|
var found = this.lookup(path, [ Type, Enum ]);
|
|
if (!found)
|
|
throw Error("no such Type or Enum '" + path + "' in " + this);
|
|
return found;
|
|
};
|
|
|
|
/**
|
|
* Looks up the {@link Service|service} at the specified path, relative to this namespace.
|
|
* Besides its signature, this methods differs from {@link Namespace#lookup|lookup} in that it throws instead of returning `null`.
|
|
* @param {string|string[]} path Path to look up
|
|
* @returns {Service} Looked up service
|
|
* @throws {Error} If `path` does not point to a service
|
|
*/
|
|
Namespace.prototype.lookupService = function lookupService(path) {
|
|
var found = this.lookup(path, [ Service ]);
|
|
if (!found)
|
|
throw Error("no such Service '" + path + "' in " + this);
|
|
return found;
|
|
};
|
|
|
|
// Sets up cyclic dependencies (called in index-light)
|
|
Namespace._configure = function(Type_, Service_, Enum_) {
|
|
Type = Type_;
|
|
Service = Service_;
|
|
Enum = Enum_;
|
|
};
|
|
return namespace;
|
|
}
|
|
|
|
var mapfield;
|
|
var hasRequiredMapfield;
|
|
|
|
function requireMapfield () {
|
|
if (hasRequiredMapfield) return mapfield;
|
|
hasRequiredMapfield = 1;
|
|
mapfield = MapField;
|
|
|
|
// extends Field
|
|
var Field = requireField();
|
|
((MapField.prototype = Object.create(Field.prototype)).constructor = MapField).className = "MapField";
|
|
|
|
var types = requireTypes(),
|
|
util = requireUtil();
|
|
|
|
/**
|
|
* Constructs a new map field instance.
|
|
* @classdesc Reflected map field.
|
|
* @extends FieldBase
|
|
* @constructor
|
|
* @param {string} name Unique name within its namespace
|
|
* @param {number} id Unique id within its namespace
|
|
* @param {string} keyType Key type
|
|
* @param {string} type Value type
|
|
* @param {Object.<string,*>} [options] Declared options
|
|
* @param {string} [comment] Comment associated with this field
|
|
*/
|
|
function MapField(name, id, keyType, type, options, comment) {
|
|
Field.call(this, name, id, type, undefined, undefined, options, comment);
|
|
|
|
/* istanbul ignore if */
|
|
if (!util.isString(keyType))
|
|
throw TypeError("keyType must be a string");
|
|
|
|
/**
|
|
* Key type.
|
|
* @type {string}
|
|
*/
|
|
this.keyType = keyType; // toJSON, marker
|
|
|
|
/**
|
|
* Resolved key type if not a basic type.
|
|
* @type {ReflectionObject|null}
|
|
*/
|
|
this.resolvedKeyType = null;
|
|
|
|
// Overrides Field#map
|
|
this.map = true;
|
|
}
|
|
|
|
/**
|
|
* Map field descriptor.
|
|
* @interface IMapField
|
|
* @extends {IField}
|
|
* @property {string} keyType Key type
|
|
*/
|
|
|
|
/**
|
|
* Extension map field descriptor.
|
|
* @interface IExtensionMapField
|
|
* @extends IMapField
|
|
* @property {string} extend Extended type
|
|
*/
|
|
|
|
/**
|
|
* Constructs a map field from a map field descriptor.
|
|
* @param {string} name Field name
|
|
* @param {IMapField} json Map field descriptor
|
|
* @returns {MapField} Created map field
|
|
* @throws {TypeError} If arguments are invalid
|
|
*/
|
|
MapField.fromJSON = function fromJSON(name, json) {
|
|
return new MapField(name, json.id, json.keyType, json.type, json.options, json.comment);
|
|
};
|
|
|
|
/**
|
|
* Converts this map field to a map field descriptor.
|
|
* @param {IToJSONOptions} [toJSONOptions] JSON conversion options
|
|
* @returns {IMapField} Map field descriptor
|
|
*/
|
|
MapField.prototype.toJSON = function toJSON(toJSONOptions) {
|
|
var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;
|
|
return util.toObject([
|
|
"keyType" , this.keyType,
|
|
"type" , this.type,
|
|
"id" , this.id,
|
|
"extend" , this.extend,
|
|
"options" , this.options,
|
|
"comment" , keepComments ? this.comment : undefined
|
|
]);
|
|
};
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
MapField.prototype.resolve = function resolve() {
|
|
if (this.resolved)
|
|
return this;
|
|
|
|
// Besides a value type, map fields have a key type that may be "any scalar type except for floating point types and bytes"
|
|
if (types.mapKey[this.keyType] === undefined)
|
|
throw Error("invalid key type: " + this.keyType);
|
|
|
|
return Field.prototype.resolve.call(this);
|
|
};
|
|
|
|
/**
|
|
* Map field decorator (TypeScript).
|
|
* @name MapField.d
|
|
* @function
|
|
* @param {number} fieldId Field id
|
|
* @param {"int32"|"uint32"|"sint32"|"fixed32"|"sfixed32"|"int64"|"uint64"|"sint64"|"fixed64"|"sfixed64"|"bool"|"string"} fieldKeyType Field key type
|
|
* @param {"double"|"float"|"int32"|"uint32"|"sint32"|"fixed32"|"sfixed32"|"int64"|"uint64"|"sint64"|"fixed64"|"sfixed64"|"bool"|"string"|"bytes"|Object|Constructor<{}>} fieldValueType Field value type
|
|
* @returns {FieldDecorator} Decorator function
|
|
* @template T extends { [key: string]: number | Long | string | boolean | Uint8Array | Buffer | number[] | Message<{}> }
|
|
*/
|
|
MapField.d = function decorateMapField(fieldId, fieldKeyType, fieldValueType) {
|
|
|
|
// submessage value: decorate the submessage and use its name as the type
|
|
if (typeof fieldValueType === "function")
|
|
fieldValueType = util.decorateType(fieldValueType).name;
|
|
|
|
// enum reference value: create a reflected copy of the enum and keep reuseing it
|
|
else if (fieldValueType && typeof fieldValueType === "object")
|
|
fieldValueType = util.decorateEnum(fieldValueType).name;
|
|
|
|
return function mapFieldDecorator(prototype, fieldName) {
|
|
util.decorateType(prototype.constructor)
|
|
.add(new MapField(fieldName, fieldId, fieldKeyType, fieldValueType));
|
|
};
|
|
};
|
|
return mapfield;
|
|
}
|
|
|
|
var method;
|
|
var hasRequiredMethod;
|
|
|
|
function requireMethod () {
|
|
if (hasRequiredMethod) return method;
|
|
hasRequiredMethod = 1;
|
|
method = Method;
|
|
|
|
// extends ReflectionObject
|
|
var ReflectionObject = requireObject();
|
|
((Method.prototype = Object.create(ReflectionObject.prototype)).constructor = Method).className = "Method";
|
|
|
|
var util = requireUtil();
|
|
|
|
/**
|
|
* Constructs a new service method instance.
|
|
* @classdesc Reflected service method.
|
|
* @extends ReflectionObject
|
|
* @constructor
|
|
* @param {string} name Method name
|
|
* @param {string|undefined} type Method type, usually `"rpc"`
|
|
* @param {string} requestType Request message type
|
|
* @param {string} responseType Response message type
|
|
* @param {boolean|Object.<string,*>} [requestStream] Whether the request is streamed
|
|
* @param {boolean|Object.<string,*>} [responseStream] Whether the response is streamed
|
|
* @param {Object.<string,*>} [options] Declared options
|
|
* @param {string} [comment] The comment for this method
|
|
* @param {Object.<string,*>} [parsedOptions] Declared options, properly parsed into an object
|
|
*/
|
|
function Method(name, type, requestType, responseType, requestStream, responseStream, options, comment, parsedOptions) {
|
|
|
|
/* istanbul ignore next */
|
|
if (util.isObject(requestStream)) {
|
|
options = requestStream;
|
|
requestStream = responseStream = undefined;
|
|
} else if (util.isObject(responseStream)) {
|
|
options = responseStream;
|
|
responseStream = undefined;
|
|
}
|
|
|
|
/* istanbul ignore if */
|
|
if (!(type === undefined || util.isString(type)))
|
|
throw TypeError("type must be a string");
|
|
|
|
/* istanbul ignore if */
|
|
if (!util.isString(requestType))
|
|
throw TypeError("requestType must be a string");
|
|
|
|
/* istanbul ignore if */
|
|
if (!util.isString(responseType))
|
|
throw TypeError("responseType must be a string");
|
|
|
|
ReflectionObject.call(this, name, options);
|
|
|
|
/**
|
|
* Method type.
|
|
* @type {string}
|
|
*/
|
|
this.type = type || "rpc"; // toJSON
|
|
|
|
/**
|
|
* Request type.
|
|
* @type {string}
|
|
*/
|
|
this.requestType = requestType; // toJSON, marker
|
|
|
|
/**
|
|
* Whether requests are streamed or not.
|
|
* @type {boolean|undefined}
|
|
*/
|
|
this.requestStream = requestStream ? true : undefined; // toJSON
|
|
|
|
/**
|
|
* Response type.
|
|
* @type {string}
|
|
*/
|
|
this.responseType = responseType; // toJSON
|
|
|
|
/**
|
|
* Whether responses are streamed or not.
|
|
* @type {boolean|undefined}
|
|
*/
|
|
this.responseStream = responseStream ? true : undefined; // toJSON
|
|
|
|
/**
|
|
* Resolved request type.
|
|
* @type {Type|null}
|
|
*/
|
|
this.resolvedRequestType = null;
|
|
|
|
/**
|
|
* Resolved response type.
|
|
* @type {Type|null}
|
|
*/
|
|
this.resolvedResponseType = null;
|
|
|
|
/**
|
|
* Comment for this method
|
|
* @type {string|null}
|
|
*/
|
|
this.comment = comment;
|
|
|
|
/**
|
|
* Options properly parsed into an object
|
|
*/
|
|
this.parsedOptions = parsedOptions;
|
|
}
|
|
|
|
/**
|
|
* Method descriptor.
|
|
* @interface IMethod
|
|
* @property {string} [type="rpc"] Method type
|
|
* @property {string} requestType Request type
|
|
* @property {string} responseType Response type
|
|
* @property {boolean} [requestStream=false] Whether requests are streamed
|
|
* @property {boolean} [responseStream=false] Whether responses are streamed
|
|
* @property {Object.<string,*>} [options] Method options
|
|
* @property {string} comment Method comments
|
|
* @property {Object.<string,*>} [parsedOptions] Method options properly parsed into an object
|
|
*/
|
|
|
|
/**
|
|
* Constructs a method from a method descriptor.
|
|
* @param {string} name Method name
|
|
* @param {IMethod} json Method descriptor
|
|
* @returns {Method} Created method
|
|
* @throws {TypeError} If arguments are invalid
|
|
*/
|
|
Method.fromJSON = function fromJSON(name, json) {
|
|
return new Method(name, json.type, json.requestType, json.responseType, json.requestStream, json.responseStream, json.options, json.comment, json.parsedOptions);
|
|
};
|
|
|
|
/**
|
|
* Converts this method to a method descriptor.
|
|
* @param {IToJSONOptions} [toJSONOptions] JSON conversion options
|
|
* @returns {IMethod} Method descriptor
|
|
*/
|
|
Method.prototype.toJSON = function toJSON(toJSONOptions) {
|
|
var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;
|
|
return util.toObject([
|
|
"type" , this.type !== "rpc" && /* istanbul ignore next */ this.type || undefined,
|
|
"requestType" , this.requestType,
|
|
"requestStream" , this.requestStream,
|
|
"responseType" , this.responseType,
|
|
"responseStream" , this.responseStream,
|
|
"options" , this.options,
|
|
"comment" , keepComments ? this.comment : undefined,
|
|
"parsedOptions" , this.parsedOptions,
|
|
]);
|
|
};
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
Method.prototype.resolve = function resolve() {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.resolved)
|
|
return this;
|
|
|
|
this.resolvedRequestType = this.parent.lookupType(this.requestType);
|
|
this.resolvedResponseType = this.parent.lookupType(this.responseType);
|
|
|
|
return ReflectionObject.prototype.resolve.call(this);
|
|
};
|
|
return method;
|
|
}
|
|
|
|
var service$1;
|
|
var hasRequiredService$1;
|
|
|
|
function requireService$1 () {
|
|
if (hasRequiredService$1) return service$1;
|
|
hasRequiredService$1 = 1;
|
|
service$1 = Service;
|
|
|
|
// extends Namespace
|
|
var Namespace = requireNamespace();
|
|
((Service.prototype = Object.create(Namespace.prototype)).constructor = Service).className = "Service";
|
|
|
|
var Method = requireMethod(),
|
|
util = requireUtil(),
|
|
rpc = rpc$2;
|
|
|
|
/**
|
|
* Constructs a new service instance.
|
|
* @classdesc Reflected service.
|
|
* @extends NamespaceBase
|
|
* @constructor
|
|
* @param {string} name Service name
|
|
* @param {Object.<string,*>} [options] Service options
|
|
* @throws {TypeError} If arguments are invalid
|
|
*/
|
|
function Service(name, options) {
|
|
Namespace.call(this, name, options);
|
|
|
|
/**
|
|
* Service methods.
|
|
* @type {Object.<string,Method>}
|
|
*/
|
|
this.methods = {}; // toJSON, marker
|
|
|
|
/**
|
|
* Cached methods as an array.
|
|
* @type {Method[]|null}
|
|
* @private
|
|
*/
|
|
this._methodsArray = null;
|
|
}
|
|
|
|
/**
|
|
* Service descriptor.
|
|
* @interface IService
|
|
* @extends INamespace
|
|
* @property {Object.<string,IMethod>} methods Method descriptors
|
|
*/
|
|
|
|
/**
|
|
* Constructs a service from a service descriptor.
|
|
* @param {string} name Service name
|
|
* @param {IService} json Service descriptor
|
|
* @returns {Service} Created service
|
|
* @throws {TypeError} If arguments are invalid
|
|
*/
|
|
Service.fromJSON = function fromJSON(name, json) {
|
|
var service = new Service(name, json.options);
|
|
/* istanbul ignore else */
|
|
if (json.methods)
|
|
for (var names = Object.keys(json.methods), i = 0; i < names.length; ++i)
|
|
service.add(Method.fromJSON(names[i], json.methods[names[i]]));
|
|
if (json.nested)
|
|
service.addJSON(json.nested);
|
|
service.comment = json.comment;
|
|
return service;
|
|
};
|
|
|
|
/**
|
|
* Converts this service to a service descriptor.
|
|
* @param {IToJSONOptions} [toJSONOptions] JSON conversion options
|
|
* @returns {IService} Service descriptor
|
|
*/
|
|
Service.prototype.toJSON = function toJSON(toJSONOptions) {
|
|
var inherited = Namespace.prototype.toJSON.call(this, toJSONOptions);
|
|
var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;
|
|
return util.toObject([
|
|
"options" , inherited && inherited.options || undefined,
|
|
"methods" , Namespace.arrayToJSON(this.methodsArray, toJSONOptions) || /* istanbul ignore next */ {},
|
|
"nested" , inherited && inherited.nested || undefined,
|
|
"comment" , keepComments ? this.comment : undefined
|
|
]);
|
|
};
|
|
|
|
/**
|
|
* Methods of this service as an array for iteration.
|
|
* @name Service#methodsArray
|
|
* @type {Method[]}
|
|
* @readonly
|
|
*/
|
|
Object.defineProperty(Service.prototype, "methodsArray", {
|
|
get: function() {
|
|
return this._methodsArray || (this._methodsArray = util.toArray(this.methods));
|
|
}
|
|
});
|
|
|
|
function clearCache(service) {
|
|
service._methodsArray = null;
|
|
return service;
|
|
}
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
Service.prototype.get = function get(name) {
|
|
return this.methods[name]
|
|
|| Namespace.prototype.get.call(this, name);
|
|
};
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
Service.prototype.resolveAll = function resolveAll() {
|
|
var methods = this.methodsArray;
|
|
for (var i = 0; i < methods.length; ++i)
|
|
methods[i].resolve();
|
|
return Namespace.prototype.resolve.call(this);
|
|
};
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
Service.prototype.add = function add(object) {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.get(object.name))
|
|
throw Error("duplicate name '" + object.name + "' in " + this);
|
|
|
|
if (object instanceof Method) {
|
|
this.methods[object.name] = object;
|
|
object.parent = this;
|
|
return clearCache(this);
|
|
}
|
|
return Namespace.prototype.add.call(this, object);
|
|
};
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
Service.prototype.remove = function remove(object) {
|
|
if (object instanceof Method) {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.methods[object.name] !== object)
|
|
throw Error(object + " is not a member of " + this);
|
|
|
|
delete this.methods[object.name];
|
|
object.parent = null;
|
|
return clearCache(this);
|
|
}
|
|
return Namespace.prototype.remove.call(this, object);
|
|
};
|
|
|
|
/**
|
|
* Creates a runtime service using the specified rpc implementation.
|
|
* @param {RPCImpl} rpcImpl RPC implementation
|
|
* @param {boolean} [requestDelimited=false] Whether requests are length-delimited
|
|
* @param {boolean} [responseDelimited=false] Whether responses are length-delimited
|
|
* @returns {rpc.Service} RPC service. Useful where requests and/or responses are streamed.
|
|
*/
|
|
Service.prototype.create = function create(rpcImpl, requestDelimited, responseDelimited) {
|
|
var rpcService = new rpc.Service(rpcImpl, requestDelimited, responseDelimited);
|
|
for (var i = 0, method; i < /* initializes */ this.methodsArray.length; ++i) {
|
|
var methodName = util.lcFirst((method = this._methodsArray[i]).resolve().name).replace(/[^$\w_]/g, "");
|
|
rpcService[methodName] = util.codegen(["r","c"], util.isReserved(methodName) ? methodName + "_" : methodName)("return this.rpcCall(m,q,s,r,c)")({
|
|
m: method,
|
|
q: method.resolvedRequestType.ctor,
|
|
s: method.resolvedResponseType.ctor
|
|
});
|
|
}
|
|
return rpcService;
|
|
};
|
|
return service$1;
|
|
}
|
|
|
|
var message$1 = Message;
|
|
|
|
var util$3 = requireMinimal$2();
|
|
|
|
/**
|
|
* Constructs a new message instance.
|
|
* @classdesc Abstract runtime message.
|
|
* @constructor
|
|
* @param {Properties<T>} [properties] Properties to set
|
|
* @template T extends object = object
|
|
*/
|
|
function Message(properties) {
|
|
// not used internally
|
|
if (properties)
|
|
for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i)
|
|
this[keys[i]] = properties[keys[i]];
|
|
}
|
|
|
|
/**
|
|
* Reference to the reflected type.
|
|
* @name Message.$type
|
|
* @type {Type}
|
|
* @readonly
|
|
*/
|
|
|
|
/**
|
|
* Reference to the reflected type.
|
|
* @name Message#$type
|
|
* @type {Type}
|
|
* @readonly
|
|
*/
|
|
|
|
/*eslint-disable valid-jsdoc*/
|
|
|
|
/**
|
|
* Creates a new message of this type using the specified properties.
|
|
* @param {Object.<string,*>} [properties] Properties to set
|
|
* @returns {Message<T>} Message instance
|
|
* @template T extends Message<T>
|
|
* @this Constructor<T>
|
|
*/
|
|
Message.create = function create(properties) {
|
|
return this.$type.create(properties);
|
|
};
|
|
|
|
/**
|
|
* Encodes a message of this type.
|
|
* @param {T|Object.<string,*>} message Message to encode
|
|
* @param {Writer} [writer] Writer to use
|
|
* @returns {Writer} Writer
|
|
* @template T extends Message<T>
|
|
* @this Constructor<T>
|
|
*/
|
|
Message.encode = function encode(message, writer) {
|
|
return this.$type.encode(message, writer);
|
|
};
|
|
|
|
/**
|
|
* Encodes a message of this type preceeded by its length as a varint.
|
|
* @param {T|Object.<string,*>} message Message to encode
|
|
* @param {Writer} [writer] Writer to use
|
|
* @returns {Writer} Writer
|
|
* @template T extends Message<T>
|
|
* @this Constructor<T>
|
|
*/
|
|
Message.encodeDelimited = function encodeDelimited(message, writer) {
|
|
return this.$type.encodeDelimited(message, writer);
|
|
};
|
|
|
|
/**
|
|
* Decodes a message of this type.
|
|
* @name Message.decode
|
|
* @function
|
|
* @param {Reader|Uint8Array} reader Reader or buffer to decode
|
|
* @returns {T} Decoded message
|
|
* @template T extends Message<T>
|
|
* @this Constructor<T>
|
|
*/
|
|
Message.decode = function decode(reader) {
|
|
return this.$type.decode(reader);
|
|
};
|
|
|
|
/**
|
|
* Decodes a message of this type preceeded by its length as a varint.
|
|
* @name Message.decodeDelimited
|
|
* @function
|
|
* @param {Reader|Uint8Array} reader Reader or buffer to decode
|
|
* @returns {T} Decoded message
|
|
* @template T extends Message<T>
|
|
* @this Constructor<T>
|
|
*/
|
|
Message.decodeDelimited = function decodeDelimited(reader) {
|
|
return this.$type.decodeDelimited(reader);
|
|
};
|
|
|
|
/**
|
|
* Verifies a message of this type.
|
|
* @name Message.verify
|
|
* @function
|
|
* @param {Object.<string,*>} message Plain object to verify
|
|
* @returns {string|null} `null` if valid, otherwise the reason why it is not
|
|
*/
|
|
Message.verify = function verify(message) {
|
|
return this.$type.verify(message);
|
|
};
|
|
|
|
/**
|
|
* Creates a new message of this type from a plain object. Also converts values to their respective internal types.
|
|
* @param {Object.<string,*>} object Plain object
|
|
* @returns {T} Message instance
|
|
* @template T extends Message<T>
|
|
* @this Constructor<T>
|
|
*/
|
|
Message.fromObject = function fromObject(object) {
|
|
return this.$type.fromObject(object);
|
|
};
|
|
|
|
/**
|
|
* Creates a plain object from a message of this type. Also converts values to other types if specified.
|
|
* @param {T} message Message instance
|
|
* @param {IConversionOptions} [options] Conversion options
|
|
* @returns {Object.<string,*>} Plain object
|
|
* @template T extends Message<T>
|
|
* @this Constructor<T>
|
|
*/
|
|
Message.toObject = function toObject(message, options) {
|
|
return this.$type.toObject(message, options);
|
|
};
|
|
|
|
/**
|
|
* Converts this message to JSON.
|
|
* @returns {Object.<string,*>} JSON object
|
|
*/
|
|
Message.prototype.toJSON = function toJSON() {
|
|
return this.$type.toObject(this, util$3.toJSONOptions);
|
|
};
|
|
|
|
var decoder_1;
|
|
var hasRequiredDecoder;
|
|
|
|
function requireDecoder () {
|
|
if (hasRequiredDecoder) return decoder_1;
|
|
hasRequiredDecoder = 1;
|
|
decoder_1 = decoder;
|
|
|
|
var Enum = require_enum(),
|
|
types = requireTypes(),
|
|
util = requireUtil();
|
|
|
|
function missing(field) {
|
|
return "missing required '" + field.name + "'";
|
|
}
|
|
|
|
/**
|
|
* Generates a decoder specific to the specified message type.
|
|
* @param {Type} mtype Message type
|
|
* @returns {Codegen} Codegen instance
|
|
*/
|
|
function decoder(mtype) {
|
|
/* eslint-disable no-unexpected-multiline */
|
|
var gen = util.codegen(["r", "l"], mtype.name + "$decode")
|
|
("if(!(r instanceof Reader))")
|
|
("r=Reader.create(r)")
|
|
("var c=l===undefined?r.len:r.pos+l,m=new this.ctor" + (mtype.fieldsArray.filter(function(field) { return field.map; }).length ? ",k,value" : ""))
|
|
("while(r.pos<c){")
|
|
("var t=r.uint32()");
|
|
if (mtype.group) gen
|
|
("if((t&7)===4)")
|
|
("break");
|
|
gen
|
|
("switch(t>>>3){");
|
|
|
|
var i = 0;
|
|
for (; i < /* initializes */ mtype.fieldsArray.length; ++i) {
|
|
var field = mtype._fieldsArray[i].resolve(),
|
|
type = field.resolvedType instanceof Enum ? "int32" : field.type,
|
|
ref = "m" + util.safeProp(field.name); gen
|
|
("case %i: {", field.id);
|
|
|
|
// Map fields
|
|
if (field.map) { gen
|
|
("if(%s===util.emptyObject)", ref)
|
|
("%s={}", ref)
|
|
("var c2 = r.uint32()+r.pos");
|
|
|
|
if (types.defaults[field.keyType] !== undefined) gen
|
|
("k=%j", types.defaults[field.keyType]);
|
|
else gen
|
|
("k=null");
|
|
|
|
if (types.defaults[type] !== undefined) gen
|
|
("value=%j", types.defaults[type]);
|
|
else gen
|
|
("value=null");
|
|
|
|
gen
|
|
("while(r.pos<c2){")
|
|
("var tag2=r.uint32()")
|
|
("switch(tag2>>>3){")
|
|
("case 1: k=r.%s(); break", field.keyType)
|
|
("case 2:");
|
|
|
|
if (types.basic[type] === undefined) gen
|
|
("value=types[%i].decode(r,r.uint32())", i); // can't be groups
|
|
else gen
|
|
("value=r.%s()", type);
|
|
|
|
gen
|
|
("break")
|
|
("default:")
|
|
("r.skipType(tag2&7)")
|
|
("break")
|
|
("}")
|
|
("}");
|
|
|
|
if (types.long[field.keyType] !== undefined) gen
|
|
("%s[typeof k===\"object\"?util.longToHash(k):k]=value", ref);
|
|
else gen
|
|
("%s[k]=value", ref);
|
|
|
|
// Repeated fields
|
|
} else if (field.repeated) { gen
|
|
|
|
("if(!(%s&&%s.length))", ref, ref)
|
|
("%s=[]", ref);
|
|
|
|
// Packable (always check for forward and backward compatiblity)
|
|
if (types.packed[type] !== undefined) gen
|
|
("if((t&7)===2){")
|
|
("var c2=r.uint32()+r.pos")
|
|
("while(r.pos<c2)")
|
|
("%s.push(r.%s())", ref, type)
|
|
("}else");
|
|
|
|
// Non-packed
|
|
if (types.basic[type] === undefined) gen(field.resolvedType.group
|
|
? "%s.push(types[%i].decode(r))"
|
|
: "%s.push(types[%i].decode(r,r.uint32()))", ref, i);
|
|
else gen
|
|
("%s.push(r.%s())", ref, type);
|
|
|
|
// Non-repeated
|
|
} else if (types.basic[type] === undefined) gen(field.resolvedType.group
|
|
? "%s=types[%i].decode(r)"
|
|
: "%s=types[%i].decode(r,r.uint32())", ref, i);
|
|
else gen
|
|
("%s=r.%s()", ref, type);
|
|
gen
|
|
("break")
|
|
("}");
|
|
// Unknown fields
|
|
} gen
|
|
("default:")
|
|
("r.skipType(t&7)")
|
|
("break")
|
|
|
|
("}")
|
|
("}");
|
|
|
|
// Field presence
|
|
for (i = 0; i < mtype._fieldsArray.length; ++i) {
|
|
var rfield = mtype._fieldsArray[i];
|
|
if (rfield.required) gen
|
|
("if(!m.hasOwnProperty(%j))", rfield.name)
|
|
("throw util.ProtocolError(%j,{instance:m})", missing(rfield));
|
|
}
|
|
|
|
return gen
|
|
("return m");
|
|
/* eslint-enable no-unexpected-multiline */
|
|
}
|
|
return decoder_1;
|
|
}
|
|
|
|
var verifier_1;
|
|
var hasRequiredVerifier;
|
|
|
|
function requireVerifier () {
|
|
if (hasRequiredVerifier) return verifier_1;
|
|
hasRequiredVerifier = 1;
|
|
verifier_1 = verifier;
|
|
|
|
var Enum = require_enum(),
|
|
util = requireUtil();
|
|
|
|
function invalid(field, expected) {
|
|
return field.name + ": " + expected + (field.repeated && expected !== "array" ? "[]" : field.map && expected !== "object" ? "{k:"+field.keyType+"}" : "") + " expected";
|
|
}
|
|
|
|
/**
|
|
* Generates a partial value verifier.
|
|
* @param {Codegen} gen Codegen instance
|
|
* @param {Field} field Reflected field
|
|
* @param {number} fieldIndex Field index
|
|
* @param {string} ref Variable reference
|
|
* @returns {Codegen} Codegen instance
|
|
* @ignore
|
|
*/
|
|
function genVerifyValue(gen, field, fieldIndex, ref) {
|
|
/* eslint-disable no-unexpected-multiline */
|
|
if (field.resolvedType) {
|
|
if (field.resolvedType instanceof Enum) { gen
|
|
("switch(%s){", ref)
|
|
("default:")
|
|
("return%j", invalid(field, "enum value"));
|
|
for (var keys = Object.keys(field.resolvedType.values), j = 0; j < keys.length; ++j) gen
|
|
("case %i:", field.resolvedType.values[keys[j]]);
|
|
gen
|
|
("break")
|
|
("}");
|
|
} else {
|
|
gen
|
|
("{")
|
|
("var e=types[%i].verify(%s);", fieldIndex, ref)
|
|
("if(e)")
|
|
("return%j+e", field.name + ".")
|
|
("}");
|
|
}
|
|
} else {
|
|
switch (field.type) {
|
|
case "int32":
|
|
case "uint32":
|
|
case "sint32":
|
|
case "fixed32":
|
|
case "sfixed32": gen
|
|
("if(!util.isInteger(%s))", ref)
|
|
("return%j", invalid(field, "integer"));
|
|
break;
|
|
case "int64":
|
|
case "uint64":
|
|
case "sint64":
|
|
case "fixed64":
|
|
case "sfixed64": gen
|
|
("if(!util.isInteger(%s)&&!(%s&&util.isInteger(%s.low)&&util.isInteger(%s.high)))", ref, ref, ref, ref)
|
|
("return%j", invalid(field, "integer|Long"));
|
|
break;
|
|
case "float":
|
|
case "double": gen
|
|
("if(typeof %s!==\"number\")", ref)
|
|
("return%j", invalid(field, "number"));
|
|
break;
|
|
case "bool": gen
|
|
("if(typeof %s!==\"boolean\")", ref)
|
|
("return%j", invalid(field, "boolean"));
|
|
break;
|
|
case "string": gen
|
|
("if(!util.isString(%s))", ref)
|
|
("return%j", invalid(field, "string"));
|
|
break;
|
|
case "bytes": gen
|
|
("if(!(%s&&typeof %s.length===\"number\"||util.isString(%s)))", ref, ref, ref)
|
|
("return%j", invalid(field, "buffer"));
|
|
break;
|
|
}
|
|
}
|
|
return gen;
|
|
/* eslint-enable no-unexpected-multiline */
|
|
}
|
|
|
|
/**
|
|
* Generates a partial key verifier.
|
|
* @param {Codegen} gen Codegen instance
|
|
* @param {Field} field Reflected field
|
|
* @param {string} ref Variable reference
|
|
* @returns {Codegen} Codegen instance
|
|
* @ignore
|
|
*/
|
|
function genVerifyKey(gen, field, ref) {
|
|
/* eslint-disable no-unexpected-multiline */
|
|
switch (field.keyType) {
|
|
case "int32":
|
|
case "uint32":
|
|
case "sint32":
|
|
case "fixed32":
|
|
case "sfixed32": gen
|
|
("if(!util.key32Re.test(%s))", ref)
|
|
("return%j", invalid(field, "integer key"));
|
|
break;
|
|
case "int64":
|
|
case "uint64":
|
|
case "sint64":
|
|
case "fixed64":
|
|
case "sfixed64": gen
|
|
("if(!util.key64Re.test(%s))", ref) // see comment above: x is ok, d is not
|
|
("return%j", invalid(field, "integer|Long key"));
|
|
break;
|
|
case "bool": gen
|
|
("if(!util.key2Re.test(%s))", ref)
|
|
("return%j", invalid(field, "boolean key"));
|
|
break;
|
|
}
|
|
return gen;
|
|
/* eslint-enable no-unexpected-multiline */
|
|
}
|
|
|
|
/**
|
|
* Generates a verifier specific to the specified message type.
|
|
* @param {Type} mtype Message type
|
|
* @returns {Codegen} Codegen instance
|
|
*/
|
|
function verifier(mtype) {
|
|
/* eslint-disable no-unexpected-multiline */
|
|
|
|
var gen = util.codegen(["m"], mtype.name + "$verify")
|
|
("if(typeof m!==\"object\"||m===null)")
|
|
("return%j", "object expected");
|
|
var oneofs = mtype.oneofsArray,
|
|
seenFirstField = {};
|
|
if (oneofs.length) gen
|
|
("var p={}");
|
|
|
|
for (var i = 0; i < /* initializes */ mtype.fieldsArray.length; ++i) {
|
|
var field = mtype._fieldsArray[i].resolve(),
|
|
ref = "m" + util.safeProp(field.name);
|
|
|
|
if (field.optional) gen
|
|
("if(%s!=null&&m.hasOwnProperty(%j)){", ref, field.name); // !== undefined && !== null
|
|
|
|
// map fields
|
|
if (field.map) { gen
|
|
("if(!util.isObject(%s))", ref)
|
|
("return%j", invalid(field, "object"))
|
|
("var k=Object.keys(%s)", ref)
|
|
("for(var i=0;i<k.length;++i){");
|
|
genVerifyKey(gen, field, "k[i]");
|
|
genVerifyValue(gen, field, i, ref + "[k[i]]")
|
|
("}");
|
|
|
|
// repeated fields
|
|
} else if (field.repeated) { gen
|
|
("if(!Array.isArray(%s))", ref)
|
|
("return%j", invalid(field, "array"))
|
|
("for(var i=0;i<%s.length;++i){", ref);
|
|
genVerifyValue(gen, field, i, ref + "[i]")
|
|
("}");
|
|
|
|
// required or present fields
|
|
} else {
|
|
if (field.partOf) {
|
|
var oneofProp = util.safeProp(field.partOf.name);
|
|
if (seenFirstField[field.partOf.name] === 1) gen
|
|
("if(p%s===1)", oneofProp)
|
|
("return%j", field.partOf.name + ": multiple values");
|
|
seenFirstField[field.partOf.name] = 1;
|
|
gen
|
|
("p%s=1", oneofProp);
|
|
}
|
|
genVerifyValue(gen, field, i, ref);
|
|
}
|
|
if (field.optional) gen
|
|
("}");
|
|
}
|
|
return gen
|
|
("return null");
|
|
/* eslint-enable no-unexpected-multiline */
|
|
}
|
|
return verifier_1;
|
|
}
|
|
|
|
var converter = {};
|
|
|
|
var hasRequiredConverter;
|
|
|
|
function requireConverter () {
|
|
if (hasRequiredConverter) return converter;
|
|
hasRequiredConverter = 1;
|
|
(function (exports) {
|
|
/**
|
|
* Runtime message from/to plain object converters.
|
|
* @namespace
|
|
*/
|
|
var converter = exports;
|
|
|
|
var Enum = require_enum(),
|
|
util = requireUtil();
|
|
|
|
/**
|
|
* Generates a partial value fromObject conveter.
|
|
* @param {Codegen} gen Codegen instance
|
|
* @param {Field} field Reflected field
|
|
* @param {number} fieldIndex Field index
|
|
* @param {string} prop Property reference
|
|
* @returns {Codegen} Codegen instance
|
|
* @ignore
|
|
*/
|
|
function genValuePartial_fromObject(gen, field, fieldIndex, prop) {
|
|
var defaultAlreadyEmitted = false;
|
|
/* eslint-disable no-unexpected-multiline, block-scoped-var, no-redeclare */
|
|
if (field.resolvedType) {
|
|
if (field.resolvedType instanceof Enum) { gen
|
|
("switch(d%s){", prop);
|
|
for (var values = field.resolvedType.values, keys = Object.keys(values), i = 0; i < keys.length; ++i) {
|
|
// enum unknown values passthrough
|
|
if (values[keys[i]] === field.typeDefault && !defaultAlreadyEmitted) { gen
|
|
("default:")
|
|
("if(typeof(d%s)===\"number\"){m%s=d%s;break}", prop, prop, prop);
|
|
if (!field.repeated) gen // fallback to default value only for
|
|
// arrays, to avoid leaving holes.
|
|
("break"); // for non-repeated fields, just ignore
|
|
defaultAlreadyEmitted = true;
|
|
}
|
|
gen
|
|
("case%j:", keys[i])
|
|
("case %i:", values[keys[i]])
|
|
("m%s=%j", prop, values[keys[i]])
|
|
("break");
|
|
} gen
|
|
("}");
|
|
} else gen
|
|
("if(typeof d%s!==\"object\")", prop)
|
|
("throw TypeError(%j)", field.fullName + ": object expected")
|
|
("m%s=types[%i].fromObject(d%s)", prop, fieldIndex, prop);
|
|
} else {
|
|
var isUnsigned = false;
|
|
switch (field.type) {
|
|
case "double":
|
|
case "float": gen
|
|
("m%s=Number(d%s)", prop, prop); // also catches "NaN", "Infinity"
|
|
break;
|
|
case "uint32":
|
|
case "fixed32": gen
|
|
("m%s=d%s>>>0", prop, prop);
|
|
break;
|
|
case "int32":
|
|
case "sint32":
|
|
case "sfixed32": gen
|
|
("m%s=d%s|0", prop, prop);
|
|
break;
|
|
case "uint64":
|
|
isUnsigned = true;
|
|
// eslint-disable-line no-fallthrough
|
|
case "int64":
|
|
case "sint64":
|
|
case "fixed64":
|
|
case "sfixed64": gen
|
|
("if(util.Long)")
|
|
("(m%s=util.Long.fromValue(d%s)).unsigned=%j", prop, prop, isUnsigned)
|
|
("else if(typeof d%s===\"string\")", prop)
|
|
("m%s=parseInt(d%s,10)", prop, prop)
|
|
("else if(typeof d%s===\"number\")", prop)
|
|
("m%s=d%s", prop, prop)
|
|
("else if(typeof d%s===\"object\")", prop)
|
|
("m%s=new util.LongBits(d%s.low>>>0,d%s.high>>>0).toNumber(%s)", prop, prop, prop, isUnsigned ? "true" : "");
|
|
break;
|
|
case "bytes": gen
|
|
("if(typeof d%s===\"string\")", prop)
|
|
("util.base64.decode(d%s,m%s=util.newBuffer(util.base64.length(d%s)),0)", prop, prop, prop)
|
|
("else if(d%s.length >= 0)", prop)
|
|
("m%s=d%s", prop, prop);
|
|
break;
|
|
case "string": gen
|
|
("m%s=String(d%s)", prop, prop);
|
|
break;
|
|
case "bool": gen
|
|
("m%s=Boolean(d%s)", prop, prop);
|
|
break;
|
|
/* default: gen
|
|
("m%s=d%s", prop, prop);
|
|
break; */
|
|
}
|
|
}
|
|
return gen;
|
|
/* eslint-enable no-unexpected-multiline, block-scoped-var, no-redeclare */
|
|
}
|
|
|
|
/**
|
|
* Generates a plain object to runtime message converter specific to the specified message type.
|
|
* @param {Type} mtype Message type
|
|
* @returns {Codegen} Codegen instance
|
|
*/
|
|
converter.fromObject = function fromObject(mtype) {
|
|
/* eslint-disable no-unexpected-multiline, block-scoped-var, no-redeclare */
|
|
var fields = mtype.fieldsArray;
|
|
var gen = util.codegen(["d"], mtype.name + "$fromObject")
|
|
("if(d instanceof this.ctor)")
|
|
("return d");
|
|
if (!fields.length) return gen
|
|
("return new this.ctor");
|
|
gen
|
|
("var m=new this.ctor");
|
|
for (var i = 0; i < fields.length; ++i) {
|
|
var field = fields[i].resolve(),
|
|
prop = util.safeProp(field.name);
|
|
|
|
// Map fields
|
|
if (field.map) { gen
|
|
("if(d%s){", prop)
|
|
("if(typeof d%s!==\"object\")", prop)
|
|
("throw TypeError(%j)", field.fullName + ": object expected")
|
|
("m%s={}", prop)
|
|
("for(var ks=Object.keys(d%s),i=0;i<ks.length;++i){", prop);
|
|
genValuePartial_fromObject(gen, field, /* not sorted */ i, prop + "[ks[i]]")
|
|
("}")
|
|
("}");
|
|
|
|
// Repeated fields
|
|
} else if (field.repeated) { gen
|
|
("if(d%s){", prop)
|
|
("if(!Array.isArray(d%s))", prop)
|
|
("throw TypeError(%j)", field.fullName + ": array expected")
|
|
("m%s=[]", prop)
|
|
("for(var i=0;i<d%s.length;++i){", prop);
|
|
genValuePartial_fromObject(gen, field, /* not sorted */ i, prop + "[i]")
|
|
("}")
|
|
("}");
|
|
|
|
// Non-repeated fields
|
|
} else {
|
|
if (!(field.resolvedType instanceof Enum)) gen // no need to test for null/undefined if an enum (uses switch)
|
|
("if(d%s!=null){", prop); // !== undefined && !== null
|
|
genValuePartial_fromObject(gen, field, /* not sorted */ i, prop);
|
|
if (!(field.resolvedType instanceof Enum)) gen
|
|
("}");
|
|
}
|
|
} return gen
|
|
("return m");
|
|
/* eslint-enable no-unexpected-multiline, block-scoped-var, no-redeclare */
|
|
};
|
|
|
|
/**
|
|
* Generates a partial value toObject converter.
|
|
* @param {Codegen} gen Codegen instance
|
|
* @param {Field} field Reflected field
|
|
* @param {number} fieldIndex Field index
|
|
* @param {string} prop Property reference
|
|
* @returns {Codegen} Codegen instance
|
|
* @ignore
|
|
*/
|
|
function genValuePartial_toObject(gen, field, fieldIndex, prop) {
|
|
/* eslint-disable no-unexpected-multiline, block-scoped-var, no-redeclare */
|
|
if (field.resolvedType) {
|
|
if (field.resolvedType instanceof Enum) gen
|
|
("d%s=o.enums===String?(types[%i].values[m%s]===undefined?m%s:types[%i].values[m%s]):m%s", prop, fieldIndex, prop, prop, fieldIndex, prop, prop);
|
|
else gen
|
|
("d%s=types[%i].toObject(m%s,o)", prop, fieldIndex, prop);
|
|
} else {
|
|
var isUnsigned = false;
|
|
switch (field.type) {
|
|
case "double":
|
|
case "float": gen
|
|
("d%s=o.json&&!isFinite(m%s)?String(m%s):m%s", prop, prop, prop, prop);
|
|
break;
|
|
case "uint64":
|
|
isUnsigned = true;
|
|
// eslint-disable-line no-fallthrough
|
|
case "int64":
|
|
case "sint64":
|
|
case "fixed64":
|
|
case "sfixed64": gen
|
|
("if(typeof m%s===\"number\")", prop)
|
|
("d%s=o.longs===String?String(m%s):m%s", prop, prop, prop)
|
|
("else") // Long-like
|
|
("d%s=o.longs===String?util.Long.prototype.toString.call(m%s):o.longs===Number?new util.LongBits(m%s.low>>>0,m%s.high>>>0).toNumber(%s):m%s", prop, prop, prop, prop, isUnsigned ? "true": "", prop);
|
|
break;
|
|
case "bytes": gen
|
|
("d%s=o.bytes===String?util.base64.encode(m%s,0,m%s.length):o.bytes===Array?Array.prototype.slice.call(m%s):m%s", prop, prop, prop, prop, prop);
|
|
break;
|
|
default: gen
|
|
("d%s=m%s", prop, prop);
|
|
break;
|
|
}
|
|
}
|
|
return gen;
|
|
/* eslint-enable no-unexpected-multiline, block-scoped-var, no-redeclare */
|
|
}
|
|
|
|
/**
|
|
* Generates a runtime message to plain object converter specific to the specified message type.
|
|
* @param {Type} mtype Message type
|
|
* @returns {Codegen} Codegen instance
|
|
*/
|
|
converter.toObject = function toObject(mtype) {
|
|
/* eslint-disable no-unexpected-multiline, block-scoped-var, no-redeclare */
|
|
var fields = mtype.fieldsArray.slice().sort(util.compareFieldsById);
|
|
if (!fields.length)
|
|
return util.codegen()("return {}");
|
|
var gen = util.codegen(["m", "o"], mtype.name + "$toObject")
|
|
("if(!o)")
|
|
("o={}")
|
|
("var d={}");
|
|
|
|
var repeatedFields = [],
|
|
mapFields = [],
|
|
normalFields = [],
|
|
i = 0;
|
|
for (; i < fields.length; ++i)
|
|
if (!fields[i].partOf)
|
|
( fields[i].resolve().repeated ? repeatedFields
|
|
: fields[i].map ? mapFields
|
|
: normalFields).push(fields[i]);
|
|
|
|
if (repeatedFields.length) { gen
|
|
("if(o.arrays||o.defaults){");
|
|
for (i = 0; i < repeatedFields.length; ++i) gen
|
|
("d%s=[]", util.safeProp(repeatedFields[i].name));
|
|
gen
|
|
("}");
|
|
}
|
|
|
|
if (mapFields.length) { gen
|
|
("if(o.objects||o.defaults){");
|
|
for (i = 0; i < mapFields.length; ++i) gen
|
|
("d%s={}", util.safeProp(mapFields[i].name));
|
|
gen
|
|
("}");
|
|
}
|
|
|
|
if (normalFields.length) { gen
|
|
("if(o.defaults){");
|
|
for (i = 0; i < normalFields.length; ++i) {
|
|
var field = normalFields[i],
|
|
prop = util.safeProp(field.name);
|
|
if (field.resolvedType instanceof Enum) gen
|
|
("d%s=o.enums===String?%j:%j", prop, field.resolvedType.valuesById[field.typeDefault], field.typeDefault);
|
|
else if (field.long) gen
|
|
("if(util.Long){")
|
|
("var n=new util.Long(%i,%i,%j)", field.typeDefault.low, field.typeDefault.high, field.typeDefault.unsigned)
|
|
("d%s=o.longs===String?n.toString():o.longs===Number?n.toNumber():n", prop)
|
|
("}else")
|
|
("d%s=o.longs===String?%j:%i", prop, field.typeDefault.toString(), field.typeDefault.toNumber());
|
|
else if (field.bytes) {
|
|
var arrayDefault = "[" + Array.prototype.slice.call(field.typeDefault).join(",") + "]";
|
|
gen
|
|
("if(o.bytes===String)d%s=%j", prop, String.fromCharCode.apply(String, field.typeDefault))
|
|
("else{")
|
|
("d%s=%s", prop, arrayDefault)
|
|
("if(o.bytes!==Array)d%s=util.newBuffer(d%s)", prop, prop)
|
|
("}");
|
|
} else gen
|
|
("d%s=%j", prop, field.typeDefault); // also messages (=null)
|
|
} gen
|
|
("}");
|
|
}
|
|
var hasKs2 = false;
|
|
for (i = 0; i < fields.length; ++i) {
|
|
var field = fields[i],
|
|
index = mtype._fieldsArray.indexOf(field),
|
|
prop = util.safeProp(field.name);
|
|
if (field.map) {
|
|
if (!hasKs2) { hasKs2 = true; gen
|
|
("var ks2");
|
|
} gen
|
|
("if(m%s&&(ks2=Object.keys(m%s)).length){", prop, prop)
|
|
("d%s={}", prop)
|
|
("for(var j=0;j<ks2.length;++j){");
|
|
genValuePartial_toObject(gen, field, /* sorted */ index, prop + "[ks2[j]]")
|
|
("}");
|
|
} else if (field.repeated) { gen
|
|
("if(m%s&&m%s.length){", prop, prop)
|
|
("d%s=[]", prop)
|
|
("for(var j=0;j<m%s.length;++j){", prop);
|
|
genValuePartial_toObject(gen, field, /* sorted */ index, prop + "[j]")
|
|
("}");
|
|
} else { gen
|
|
("if(m%s!=null&&m.hasOwnProperty(%j)){", prop, field.name); // !== undefined && !== null
|
|
genValuePartial_toObject(gen, field, /* sorted */ index, prop);
|
|
if (field.partOf) gen
|
|
("if(o.oneofs)")
|
|
("d%s=%j", util.safeProp(field.partOf.name), field.name);
|
|
}
|
|
gen
|
|
("}");
|
|
}
|
|
return gen
|
|
("return d");
|
|
/* eslint-enable no-unexpected-multiline, block-scoped-var, no-redeclare */
|
|
};
|
|
} (converter));
|
|
return converter;
|
|
}
|
|
|
|
var wrappers = {};
|
|
|
|
(function (exports) {
|
|
|
|
/**
|
|
* Wrappers for common types.
|
|
* @type {Object.<string,IWrapper>}
|
|
* @const
|
|
*/
|
|
var wrappers = exports;
|
|
|
|
var Message = message$1;
|
|
|
|
/**
|
|
* From object converter part of an {@link IWrapper}.
|
|
* @typedef WrapperFromObjectConverter
|
|
* @type {function}
|
|
* @param {Object.<string,*>} object Plain object
|
|
* @returns {Message<{}>} Message instance
|
|
* @this Type
|
|
*/
|
|
|
|
/**
|
|
* To object converter part of an {@link IWrapper}.
|
|
* @typedef WrapperToObjectConverter
|
|
* @type {function}
|
|
* @param {Message<{}>} message Message instance
|
|
* @param {IConversionOptions} [options] Conversion options
|
|
* @returns {Object.<string,*>} Plain object
|
|
* @this Type
|
|
*/
|
|
|
|
/**
|
|
* Common type wrapper part of {@link wrappers}.
|
|
* @interface IWrapper
|
|
* @property {WrapperFromObjectConverter} [fromObject] From object converter
|
|
* @property {WrapperToObjectConverter} [toObject] To object converter
|
|
*/
|
|
|
|
// Custom wrapper for Any
|
|
wrappers[".google.protobuf.Any"] = {
|
|
|
|
fromObject: function(object) {
|
|
|
|
// unwrap value type if mapped
|
|
if (object && object["@type"]) {
|
|
// Only use fully qualified type name after the last '/'
|
|
var name = object["@type"].substring(object["@type"].lastIndexOf("/") + 1);
|
|
var type = this.lookup(name);
|
|
/* istanbul ignore else */
|
|
if (type) {
|
|
// type_url does not accept leading "."
|
|
var type_url = object["@type"].charAt(0) === "." ?
|
|
object["@type"].slice(1) : object["@type"];
|
|
// type_url prefix is optional, but path seperator is required
|
|
if (type_url.indexOf("/") === -1) {
|
|
type_url = "/" + type_url;
|
|
}
|
|
return this.create({
|
|
type_url: type_url,
|
|
value: type.encode(type.fromObject(object)).finish()
|
|
});
|
|
}
|
|
}
|
|
|
|
return this.fromObject(object);
|
|
},
|
|
|
|
toObject: function(message, options) {
|
|
|
|
// Default prefix
|
|
var googleApi = "type.googleapis.com/";
|
|
var prefix = "";
|
|
var name = "";
|
|
|
|
// decode value if requested and unmapped
|
|
if (options && options.json && message.type_url && message.value) {
|
|
// Only use fully qualified type name after the last '/'
|
|
name = message.type_url.substring(message.type_url.lastIndexOf("/") + 1);
|
|
// Separate the prefix used
|
|
prefix = message.type_url.substring(0, message.type_url.lastIndexOf("/") + 1);
|
|
var type = this.lookup(name);
|
|
/* istanbul ignore else */
|
|
if (type)
|
|
message = type.decode(message.value);
|
|
}
|
|
|
|
// wrap value if unmapped
|
|
if (!(message instanceof this.ctor) && message instanceof Message) {
|
|
var object = message.$type.toObject(message, options);
|
|
var messageName = message.$type.fullName[0] === "." ?
|
|
message.$type.fullName.slice(1) : message.$type.fullName;
|
|
// Default to type.googleapis.com prefix if no prefix is used
|
|
if (prefix === "") {
|
|
prefix = googleApi;
|
|
}
|
|
name = prefix + messageName;
|
|
object["@type"] = name;
|
|
return object;
|
|
}
|
|
|
|
return this.toObject(message, options);
|
|
}
|
|
};
|
|
} (wrappers));
|
|
|
|
var type;
|
|
var hasRequiredType;
|
|
|
|
function requireType () {
|
|
if (hasRequiredType) return type;
|
|
hasRequiredType = 1;
|
|
type = Type;
|
|
|
|
// extends Namespace
|
|
var Namespace = requireNamespace();
|
|
((Type.prototype = Object.create(Namespace.prototype)).constructor = Type).className = "Type";
|
|
|
|
var Enum = require_enum(),
|
|
OneOf = requireOneof(),
|
|
Field = requireField(),
|
|
MapField = requireMapfield(),
|
|
Service = requireService$1(),
|
|
Message = message$1,
|
|
Reader = reader$1,
|
|
Writer = writer$1,
|
|
util = requireUtil(),
|
|
encoder = requireEncoder(),
|
|
decoder = requireDecoder(),
|
|
verifier = requireVerifier(),
|
|
converter = requireConverter(),
|
|
wrappers$1 = wrappers;
|
|
|
|
/**
|
|
* Constructs a new reflected message type instance.
|
|
* @classdesc Reflected message type.
|
|
* @extends NamespaceBase
|
|
* @constructor
|
|
* @param {string} name Message name
|
|
* @param {Object.<string,*>} [options] Declared options
|
|
*/
|
|
function Type(name, options) {
|
|
Namespace.call(this, name, options);
|
|
|
|
/**
|
|
* Message fields.
|
|
* @type {Object.<string,Field>}
|
|
*/
|
|
this.fields = {}; // toJSON, marker
|
|
|
|
/**
|
|
* Oneofs declared within this namespace, if any.
|
|
* @type {Object.<string,OneOf>}
|
|
*/
|
|
this.oneofs = undefined; // toJSON
|
|
|
|
/**
|
|
* Extension ranges, if any.
|
|
* @type {number[][]}
|
|
*/
|
|
this.extensions = undefined; // toJSON
|
|
|
|
/**
|
|
* Reserved ranges, if any.
|
|
* @type {Array.<number[]|string>}
|
|
*/
|
|
this.reserved = undefined; // toJSON
|
|
|
|
/*?
|
|
* Whether this type is a legacy group.
|
|
* @type {boolean|undefined}
|
|
*/
|
|
this.group = undefined; // toJSON
|
|
|
|
/**
|
|
* Cached fields by id.
|
|
* @type {Object.<number,Field>|null}
|
|
* @private
|
|
*/
|
|
this._fieldsById = null;
|
|
|
|
/**
|
|
* Cached fields as an array.
|
|
* @type {Field[]|null}
|
|
* @private
|
|
*/
|
|
this._fieldsArray = null;
|
|
|
|
/**
|
|
* Cached oneofs as an array.
|
|
* @type {OneOf[]|null}
|
|
* @private
|
|
*/
|
|
this._oneofsArray = null;
|
|
|
|
/**
|
|
* Cached constructor.
|
|
* @type {Constructor<{}>}
|
|
* @private
|
|
*/
|
|
this._ctor = null;
|
|
}
|
|
|
|
Object.defineProperties(Type.prototype, {
|
|
|
|
/**
|
|
* Message fields by id.
|
|
* @name Type#fieldsById
|
|
* @type {Object.<number,Field>}
|
|
* @readonly
|
|
*/
|
|
fieldsById: {
|
|
get: function() {
|
|
|
|
/* istanbul ignore if */
|
|
if (this._fieldsById)
|
|
return this._fieldsById;
|
|
|
|
this._fieldsById = {};
|
|
for (var names = Object.keys(this.fields), i = 0; i < names.length; ++i) {
|
|
var field = this.fields[names[i]],
|
|
id = field.id;
|
|
|
|
/* istanbul ignore if */
|
|
if (this._fieldsById[id])
|
|
throw Error("duplicate id " + id + " in " + this);
|
|
|
|
this._fieldsById[id] = field;
|
|
}
|
|
return this._fieldsById;
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Fields of this message as an array for iteration.
|
|
* @name Type#fieldsArray
|
|
* @type {Field[]}
|
|
* @readonly
|
|
*/
|
|
fieldsArray: {
|
|
get: function() {
|
|
return this._fieldsArray || (this._fieldsArray = util.toArray(this.fields));
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Oneofs of this message as an array for iteration.
|
|
* @name Type#oneofsArray
|
|
* @type {OneOf[]}
|
|
* @readonly
|
|
*/
|
|
oneofsArray: {
|
|
get: function() {
|
|
return this._oneofsArray || (this._oneofsArray = util.toArray(this.oneofs));
|
|
}
|
|
},
|
|
|
|
/**
|
|
* The registered constructor, if any registered, otherwise a generic constructor.
|
|
* Assigning a function replaces the internal constructor. If the function does not extend {@link Message} yet, its prototype will be setup accordingly and static methods will be populated. If it already extends {@link Message}, it will just replace the internal constructor.
|
|
* @name Type#ctor
|
|
* @type {Constructor<{}>}
|
|
*/
|
|
ctor: {
|
|
get: function() {
|
|
return this._ctor || (this.ctor = Type.generateConstructor(this)());
|
|
},
|
|
set: function(ctor) {
|
|
|
|
// Ensure proper prototype
|
|
var prototype = ctor.prototype;
|
|
if (!(prototype instanceof Message)) {
|
|
(ctor.prototype = new Message()).constructor = ctor;
|
|
util.merge(ctor.prototype, prototype);
|
|
}
|
|
|
|
// Classes and messages reference their reflected type
|
|
ctor.$type = ctor.prototype.$type = this;
|
|
|
|
// Mix in static methods
|
|
util.merge(ctor, Message, true);
|
|
|
|
this._ctor = ctor;
|
|
|
|
// Messages have non-enumerable default values on their prototype
|
|
var i = 0;
|
|
for (; i < /* initializes */ this.fieldsArray.length; ++i)
|
|
this._fieldsArray[i].resolve(); // ensures a proper value
|
|
|
|
// Messages have non-enumerable getters and setters for each virtual oneof field
|
|
var ctorProperties = {};
|
|
for (i = 0; i < /* initializes */ this.oneofsArray.length; ++i)
|
|
ctorProperties[this._oneofsArray[i].resolve().name] = {
|
|
get: util.oneOfGetter(this._oneofsArray[i].oneof),
|
|
set: util.oneOfSetter(this._oneofsArray[i].oneof)
|
|
};
|
|
if (i)
|
|
Object.defineProperties(ctor.prototype, ctorProperties);
|
|
}
|
|
}
|
|
});
|
|
|
|
/**
|
|
* Generates a constructor function for the specified type.
|
|
* @param {Type} mtype Message type
|
|
* @returns {Codegen} Codegen instance
|
|
*/
|
|
Type.generateConstructor = function generateConstructor(mtype) {
|
|
/* eslint-disable no-unexpected-multiline */
|
|
var gen = util.codegen(["p"], mtype.name);
|
|
// explicitly initialize mutable object/array fields so that these aren't just inherited from the prototype
|
|
for (var i = 0, field; i < mtype.fieldsArray.length; ++i)
|
|
if ((field = mtype._fieldsArray[i]).map) gen
|
|
("this%s={}", util.safeProp(field.name));
|
|
else if (field.repeated) gen
|
|
("this%s=[]", util.safeProp(field.name));
|
|
return gen
|
|
("if(p)for(var ks=Object.keys(p),i=0;i<ks.length;++i)if(p[ks[i]]!=null)") // omit undefined or null
|
|
("this[ks[i]]=p[ks[i]]");
|
|
/* eslint-enable no-unexpected-multiline */
|
|
};
|
|
|
|
function clearCache(type) {
|
|
type._fieldsById = type._fieldsArray = type._oneofsArray = null;
|
|
delete type.encode;
|
|
delete type.decode;
|
|
delete type.verify;
|
|
return type;
|
|
}
|
|
|
|
/**
|
|
* Message type descriptor.
|
|
* @interface IType
|
|
* @extends INamespace
|
|
* @property {Object.<string,IOneOf>} [oneofs] Oneof descriptors
|
|
* @property {Object.<string,IField>} fields Field descriptors
|
|
* @property {number[][]} [extensions] Extension ranges
|
|
* @property {number[][]} [reserved] Reserved ranges
|
|
* @property {boolean} [group=false] Whether a legacy group or not
|
|
*/
|
|
|
|
/**
|
|
* Creates a message type from a message type descriptor.
|
|
* @param {string} name Message name
|
|
* @param {IType} json Message type descriptor
|
|
* @returns {Type} Created message type
|
|
*/
|
|
Type.fromJSON = function fromJSON(name, json) {
|
|
var type = new Type(name, json.options);
|
|
type.extensions = json.extensions;
|
|
type.reserved = json.reserved;
|
|
var names = Object.keys(json.fields),
|
|
i = 0;
|
|
for (; i < names.length; ++i)
|
|
type.add(
|
|
( typeof json.fields[names[i]].keyType !== "undefined"
|
|
? MapField.fromJSON
|
|
: Field.fromJSON )(names[i], json.fields[names[i]])
|
|
);
|
|
if (json.oneofs)
|
|
for (names = Object.keys(json.oneofs), i = 0; i < names.length; ++i)
|
|
type.add(OneOf.fromJSON(names[i], json.oneofs[names[i]]));
|
|
if (json.nested)
|
|
for (names = Object.keys(json.nested), i = 0; i < names.length; ++i) {
|
|
var nested = json.nested[names[i]];
|
|
type.add( // most to least likely
|
|
( nested.id !== undefined
|
|
? Field.fromJSON
|
|
: nested.fields !== undefined
|
|
? Type.fromJSON
|
|
: nested.values !== undefined
|
|
? Enum.fromJSON
|
|
: nested.methods !== undefined
|
|
? Service.fromJSON
|
|
: Namespace.fromJSON )(names[i], nested)
|
|
);
|
|
}
|
|
if (json.extensions && json.extensions.length)
|
|
type.extensions = json.extensions;
|
|
if (json.reserved && json.reserved.length)
|
|
type.reserved = json.reserved;
|
|
if (json.group)
|
|
type.group = true;
|
|
if (json.comment)
|
|
type.comment = json.comment;
|
|
return type;
|
|
};
|
|
|
|
/**
|
|
* Converts this message type to a message type descriptor.
|
|
* @param {IToJSONOptions} [toJSONOptions] JSON conversion options
|
|
* @returns {IType} Message type descriptor
|
|
*/
|
|
Type.prototype.toJSON = function toJSON(toJSONOptions) {
|
|
var inherited = Namespace.prototype.toJSON.call(this, toJSONOptions);
|
|
var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;
|
|
return util.toObject([
|
|
"options" , inherited && inherited.options || undefined,
|
|
"oneofs" , Namespace.arrayToJSON(this.oneofsArray, toJSONOptions),
|
|
"fields" , Namespace.arrayToJSON(this.fieldsArray.filter(function(obj) { return !obj.declaringField; }), toJSONOptions) || {},
|
|
"extensions" , this.extensions && this.extensions.length ? this.extensions : undefined,
|
|
"reserved" , this.reserved && this.reserved.length ? this.reserved : undefined,
|
|
"group" , this.group || undefined,
|
|
"nested" , inherited && inherited.nested || undefined,
|
|
"comment" , keepComments ? this.comment : undefined
|
|
]);
|
|
};
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
Type.prototype.resolveAll = function resolveAll() {
|
|
var fields = this.fieldsArray, i = 0;
|
|
while (i < fields.length)
|
|
fields[i++].resolve();
|
|
var oneofs = this.oneofsArray; i = 0;
|
|
while (i < oneofs.length)
|
|
oneofs[i++].resolve();
|
|
return Namespace.prototype.resolveAll.call(this);
|
|
};
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
Type.prototype.get = function get(name) {
|
|
return this.fields[name]
|
|
|| this.oneofs && this.oneofs[name]
|
|
|| this.nested && this.nested[name]
|
|
|| null;
|
|
};
|
|
|
|
/**
|
|
* Adds a nested object to this type.
|
|
* @param {ReflectionObject} object Nested object to add
|
|
* @returns {Type} `this`
|
|
* @throws {TypeError} If arguments are invalid
|
|
* @throws {Error} If there is already a nested object with this name or, if a field, when there is already a field with this id
|
|
*/
|
|
Type.prototype.add = function add(object) {
|
|
|
|
if (this.get(object.name))
|
|
throw Error("duplicate name '" + object.name + "' in " + this);
|
|
|
|
if (object instanceof Field && object.extend === undefined) {
|
|
// NOTE: Extension fields aren't actual fields on the declaring type, but nested objects.
|
|
// The root object takes care of adding distinct sister-fields to the respective extended
|
|
// type instead.
|
|
|
|
// avoids calling the getter if not absolutely necessary because it's called quite frequently
|
|
if (this._fieldsById ? /* istanbul ignore next */ this._fieldsById[object.id] : this.fieldsById[object.id])
|
|
throw Error("duplicate id " + object.id + " in " + this);
|
|
if (this.isReservedId(object.id))
|
|
throw Error("id " + object.id + " is reserved in " + this);
|
|
if (this.isReservedName(object.name))
|
|
throw Error("name '" + object.name + "' is reserved in " + this);
|
|
|
|
if (object.parent)
|
|
object.parent.remove(object);
|
|
this.fields[object.name] = object;
|
|
object.message = this;
|
|
object.onAdd(this);
|
|
return clearCache(this);
|
|
}
|
|
if (object instanceof OneOf) {
|
|
if (!this.oneofs)
|
|
this.oneofs = {};
|
|
this.oneofs[object.name] = object;
|
|
object.onAdd(this);
|
|
return clearCache(this);
|
|
}
|
|
return Namespace.prototype.add.call(this, object);
|
|
};
|
|
|
|
/**
|
|
* Removes a nested object from this type.
|
|
* @param {ReflectionObject} object Nested object to remove
|
|
* @returns {Type} `this`
|
|
* @throws {TypeError} If arguments are invalid
|
|
* @throws {Error} If `object` is not a member of this type
|
|
*/
|
|
Type.prototype.remove = function remove(object) {
|
|
if (object instanceof Field && object.extend === undefined) {
|
|
// See Type#add for the reason why extension fields are excluded here.
|
|
|
|
/* istanbul ignore if */
|
|
if (!this.fields || this.fields[object.name] !== object)
|
|
throw Error(object + " is not a member of " + this);
|
|
|
|
delete this.fields[object.name];
|
|
object.parent = null;
|
|
object.onRemove(this);
|
|
return clearCache(this);
|
|
}
|
|
if (object instanceof OneOf) {
|
|
|
|
/* istanbul ignore if */
|
|
if (!this.oneofs || this.oneofs[object.name] !== object)
|
|
throw Error(object + " is not a member of " + this);
|
|
|
|
delete this.oneofs[object.name];
|
|
object.parent = null;
|
|
object.onRemove(this);
|
|
return clearCache(this);
|
|
}
|
|
return Namespace.prototype.remove.call(this, object);
|
|
};
|
|
|
|
/**
|
|
* Tests if the specified id is reserved.
|
|
* @param {number} id Id to test
|
|
* @returns {boolean} `true` if reserved, otherwise `false`
|
|
*/
|
|
Type.prototype.isReservedId = function isReservedId(id) {
|
|
return Namespace.isReservedId(this.reserved, id);
|
|
};
|
|
|
|
/**
|
|
* Tests if the specified name is reserved.
|
|
* @param {string} name Name to test
|
|
* @returns {boolean} `true` if reserved, otherwise `false`
|
|
*/
|
|
Type.prototype.isReservedName = function isReservedName(name) {
|
|
return Namespace.isReservedName(this.reserved, name);
|
|
};
|
|
|
|
/**
|
|
* Creates a new message of this type using the specified properties.
|
|
* @param {Object.<string,*>} [properties] Properties to set
|
|
* @returns {Message<{}>} Message instance
|
|
*/
|
|
Type.prototype.create = function create(properties) {
|
|
return new this.ctor(properties);
|
|
};
|
|
|
|
/**
|
|
* Sets up {@link Type#encode|encode}, {@link Type#decode|decode} and {@link Type#verify|verify}.
|
|
* @returns {Type} `this`
|
|
*/
|
|
Type.prototype.setup = function setup() {
|
|
// Sets up everything at once so that the prototype chain does not have to be re-evaluated
|
|
// multiple times (V8, soft-deopt prototype-check).
|
|
|
|
var fullName = this.fullName,
|
|
types = [];
|
|
for (var i = 0; i < /* initializes */ this.fieldsArray.length; ++i)
|
|
types.push(this._fieldsArray[i].resolve().resolvedType);
|
|
|
|
// Replace setup methods with type-specific generated functions
|
|
this.encode = encoder(this)({
|
|
Writer : Writer,
|
|
types : types,
|
|
util : util
|
|
});
|
|
this.decode = decoder(this)({
|
|
Reader : Reader,
|
|
types : types,
|
|
util : util
|
|
});
|
|
this.verify = verifier(this)({
|
|
types : types,
|
|
util : util
|
|
});
|
|
this.fromObject = converter.fromObject(this)({
|
|
types : types,
|
|
util : util
|
|
});
|
|
this.toObject = converter.toObject(this)({
|
|
types : types,
|
|
util : util
|
|
});
|
|
|
|
// Inject custom wrappers for common types
|
|
var wrapper = wrappers$1[fullName];
|
|
if (wrapper) {
|
|
var originalThis = Object.create(this);
|
|
// if (wrapper.fromObject) {
|
|
originalThis.fromObject = this.fromObject;
|
|
this.fromObject = wrapper.fromObject.bind(originalThis);
|
|
// }
|
|
// if (wrapper.toObject) {
|
|
originalThis.toObject = this.toObject;
|
|
this.toObject = wrapper.toObject.bind(originalThis);
|
|
// }
|
|
}
|
|
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Encodes a message of this type. Does not implicitly {@link Type#verify|verify} messages.
|
|
* @param {Message<{}>|Object.<string,*>} message Message instance or plain object
|
|
* @param {Writer} [writer] Writer to encode to
|
|
* @returns {Writer} writer
|
|
*/
|
|
Type.prototype.encode = function encode_setup(message, writer) {
|
|
return this.setup().encode(message, writer); // overrides this method
|
|
};
|
|
|
|
/**
|
|
* Encodes a message of this type preceeded by its byte length as a varint. Does not implicitly {@link Type#verify|verify} messages.
|
|
* @param {Message<{}>|Object.<string,*>} message Message instance or plain object
|
|
* @param {Writer} [writer] Writer to encode to
|
|
* @returns {Writer} writer
|
|
*/
|
|
Type.prototype.encodeDelimited = function encodeDelimited(message, writer) {
|
|
return this.encode(message, writer && writer.len ? writer.fork() : writer).ldelim();
|
|
};
|
|
|
|
/**
|
|
* Decodes a message of this type.
|
|
* @param {Reader|Uint8Array} reader Reader or buffer to decode from
|
|
* @param {number} [length] Length of the message, if known beforehand
|
|
* @returns {Message<{}>} Decoded message
|
|
* @throws {Error} If the payload is not a reader or valid buffer
|
|
* @throws {util.ProtocolError<{}>} If required fields are missing
|
|
*/
|
|
Type.prototype.decode = function decode_setup(reader, length) {
|
|
return this.setup().decode(reader, length); // overrides this method
|
|
};
|
|
|
|
/**
|
|
* Decodes a message of this type preceeded by its byte length as a varint.
|
|
* @param {Reader|Uint8Array} reader Reader or buffer to decode from
|
|
* @returns {Message<{}>} Decoded message
|
|
* @throws {Error} If the payload is not a reader or valid buffer
|
|
* @throws {util.ProtocolError} If required fields are missing
|
|
*/
|
|
Type.prototype.decodeDelimited = function decodeDelimited(reader) {
|
|
if (!(reader instanceof Reader))
|
|
reader = Reader.create(reader);
|
|
return this.decode(reader, reader.uint32());
|
|
};
|
|
|
|
/**
|
|
* Verifies that field values are valid and that required fields are present.
|
|
* @param {Object.<string,*>} message Plain object to verify
|
|
* @returns {null|string} `null` if valid, otherwise the reason why it is not
|
|
*/
|
|
Type.prototype.verify = function verify_setup(message) {
|
|
return this.setup().verify(message); // overrides this method
|
|
};
|
|
|
|
/**
|
|
* Creates a new message of this type from a plain object. Also converts values to their respective internal types.
|
|
* @param {Object.<string,*>} object Plain object to convert
|
|
* @returns {Message<{}>} Message instance
|
|
*/
|
|
Type.prototype.fromObject = function fromObject(object) {
|
|
return this.setup().fromObject(object);
|
|
};
|
|
|
|
/**
|
|
* Conversion options as used by {@link Type#toObject} and {@link Message.toObject}.
|
|
* @interface IConversionOptions
|
|
* @property {Function} [longs] Long conversion type.
|
|
* Valid values are `String` and `Number` (the global types).
|
|
* Defaults to copy the present value, which is a possibly unsafe number without and a {@link Long} with a long library.
|
|
* @property {Function} [enums] Enum value conversion type.
|
|
* Only valid value is `String` (the global type).
|
|
* Defaults to copy the present value, which is the numeric id.
|
|
* @property {Function} [bytes] Bytes value conversion type.
|
|
* Valid values are `Array` and (a base64 encoded) `String` (the global types).
|
|
* Defaults to copy the present value, which usually is a Buffer under node and an Uint8Array in the browser.
|
|
* @property {boolean} [defaults=false] Also sets default values on the resulting object
|
|
* @property {boolean} [arrays=false] Sets empty arrays for missing repeated fields even if `defaults=false`
|
|
* @property {boolean} [objects=false] Sets empty objects for missing map fields even if `defaults=false`
|
|
* @property {boolean} [oneofs=false] Includes virtual oneof properties set to the present field's name, if any
|
|
* @property {boolean} [json=false] Performs additional JSON compatibility conversions, i.e. NaN and Infinity to strings
|
|
*/
|
|
|
|
/**
|
|
* Creates a plain object from a message of this type. Also converts values to other types if specified.
|
|
* @param {Message<{}>} message Message instance
|
|
* @param {IConversionOptions} [options] Conversion options
|
|
* @returns {Object.<string,*>} Plain object
|
|
*/
|
|
Type.prototype.toObject = function toObject(message, options) {
|
|
return this.setup().toObject(message, options);
|
|
};
|
|
|
|
/**
|
|
* Decorator function as returned by {@link Type.d} (TypeScript).
|
|
* @typedef TypeDecorator
|
|
* @type {function}
|
|
* @param {Constructor<T>} target Target constructor
|
|
* @returns {undefined}
|
|
* @template T extends Message<T>
|
|
*/
|
|
|
|
/**
|
|
* Type decorator (TypeScript).
|
|
* @param {string} [typeName] Type name, defaults to the constructor's name
|
|
* @returns {TypeDecorator<T>} Decorator function
|
|
* @template T extends Message<T>
|
|
*/
|
|
Type.d = function decorateType(typeName) {
|
|
return function typeDecorator(target) {
|
|
util.decorateType(target, typeName);
|
|
};
|
|
};
|
|
return type;
|
|
}
|
|
|
|
var root;
|
|
var hasRequiredRoot;
|
|
|
|
function requireRoot () {
|
|
if (hasRequiredRoot) return root;
|
|
hasRequiredRoot = 1;
|
|
root = Root;
|
|
|
|
// extends Namespace
|
|
var Namespace = requireNamespace();
|
|
((Root.prototype = Object.create(Namespace.prototype)).constructor = Root).className = "Root";
|
|
|
|
var Field = requireField(),
|
|
Enum = require_enum(),
|
|
OneOf = requireOneof(),
|
|
util = requireUtil();
|
|
|
|
var Type, // cyclic
|
|
parse, // might be excluded
|
|
common; // "
|
|
|
|
/**
|
|
* Constructs a new root namespace instance.
|
|
* @classdesc Root namespace wrapping all types, enums, services, sub-namespaces etc. that belong together.
|
|
* @extends NamespaceBase
|
|
* @constructor
|
|
* @param {Object.<string,*>} [options] Top level options
|
|
*/
|
|
function Root(options) {
|
|
Namespace.call(this, "", options);
|
|
|
|
/**
|
|
* Deferred extension fields.
|
|
* @type {Field[]}
|
|
*/
|
|
this.deferred = [];
|
|
|
|
/**
|
|
* Resolved file names of loaded files.
|
|
* @type {string[]}
|
|
*/
|
|
this.files = [];
|
|
}
|
|
|
|
/**
|
|
* Loads a namespace descriptor into a root namespace.
|
|
* @param {INamespace} json Nameespace descriptor
|
|
* @param {Root} [root] Root namespace, defaults to create a new one if omitted
|
|
* @returns {Root} Root namespace
|
|
*/
|
|
Root.fromJSON = function fromJSON(json, root) {
|
|
if (!root)
|
|
root = new Root();
|
|
if (json.options)
|
|
root.setOptions(json.options);
|
|
return root.addJSON(json.nested);
|
|
};
|
|
|
|
/**
|
|
* Resolves the path of an imported file, relative to the importing origin.
|
|
* This method exists so you can override it with your own logic in case your imports are scattered over multiple directories.
|
|
* @function
|
|
* @param {string} origin The file name of the importing file
|
|
* @param {string} target The file name being imported
|
|
* @returns {string|null} Resolved path to `target` or `null` to skip the file
|
|
*/
|
|
Root.prototype.resolvePath = util.path.resolve;
|
|
|
|
/**
|
|
* Fetch content from file path or url
|
|
* This method exists so you can override it with your own logic.
|
|
* @function
|
|
* @param {string} path File path or url
|
|
* @param {FetchCallback} callback Callback function
|
|
* @returns {undefined}
|
|
*/
|
|
Root.prototype.fetch = util.fetch;
|
|
|
|
// A symbol-like function to safely signal synchronous loading
|
|
/* istanbul ignore next */
|
|
function SYNC() {} // eslint-disable-line no-empty-function
|
|
|
|
/**
|
|
* Loads one or multiple .proto or preprocessed .json files into this root namespace and calls the callback.
|
|
* @param {string|string[]} filename Names of one or multiple files to load
|
|
* @param {IParseOptions} options Parse options
|
|
* @param {LoadCallback} callback Callback function
|
|
* @returns {undefined}
|
|
*/
|
|
Root.prototype.load = function load(filename, options, callback) {
|
|
if (typeof options === "function") {
|
|
callback = options;
|
|
options = undefined;
|
|
}
|
|
var self = this;
|
|
if (!callback)
|
|
return util.asPromise(load, self, filename, options);
|
|
|
|
var sync = callback === SYNC; // undocumented
|
|
|
|
// Finishes loading by calling the callback (exactly once)
|
|
function finish(err, root) {
|
|
/* istanbul ignore if */
|
|
if (!callback)
|
|
return;
|
|
var cb = callback;
|
|
callback = null;
|
|
if (sync)
|
|
throw err;
|
|
cb(err, root);
|
|
}
|
|
|
|
// Bundled definition existence checking
|
|
function getBundledFileName(filename) {
|
|
var idx = filename.lastIndexOf("google/protobuf/");
|
|
if (idx > -1) {
|
|
var altname = filename.substring(idx);
|
|
if (altname in common) return altname;
|
|
}
|
|
return null;
|
|
}
|
|
|
|
// Processes a single file
|
|
function process(filename, source) {
|
|
try {
|
|
if (util.isString(source) && source.charAt(0) === "{")
|
|
source = JSON.parse(source);
|
|
if (!util.isString(source))
|
|
self.setOptions(source.options).addJSON(source.nested);
|
|
else {
|
|
parse.filename = filename;
|
|
var parsed = parse(source, self, options),
|
|
resolved,
|
|
i = 0;
|
|
if (parsed.imports)
|
|
for (; i < parsed.imports.length; ++i)
|
|
if (resolved = getBundledFileName(parsed.imports[i]) || self.resolvePath(filename, parsed.imports[i]))
|
|
fetch(resolved);
|
|
if (parsed.weakImports)
|
|
for (i = 0; i < parsed.weakImports.length; ++i)
|
|
if (resolved = getBundledFileName(parsed.weakImports[i]) || self.resolvePath(filename, parsed.weakImports[i]))
|
|
fetch(resolved, true);
|
|
}
|
|
} catch (err) {
|
|
finish(err);
|
|
}
|
|
if (!sync && !queued)
|
|
finish(null, self); // only once anyway
|
|
}
|
|
|
|
// Fetches a single file
|
|
function fetch(filename, weak) {
|
|
|
|
// Skip if already loaded / attempted
|
|
if (self.files.indexOf(filename) > -1)
|
|
return;
|
|
self.files.push(filename);
|
|
|
|
// Shortcut bundled definitions
|
|
if (filename in common) {
|
|
if (sync)
|
|
process(filename, common[filename]);
|
|
else {
|
|
++queued;
|
|
setTimeout(function() {
|
|
--queued;
|
|
process(filename, common[filename]);
|
|
});
|
|
}
|
|
return;
|
|
}
|
|
|
|
// Otherwise fetch from disk or network
|
|
if (sync) {
|
|
var source;
|
|
try {
|
|
source = util.fs.readFileSync(filename).toString("utf8");
|
|
} catch (err) {
|
|
if (!weak)
|
|
finish(err);
|
|
return;
|
|
}
|
|
process(filename, source);
|
|
} else {
|
|
++queued;
|
|
self.fetch(filename, function(err, source) {
|
|
--queued;
|
|
/* istanbul ignore if */
|
|
if (!callback)
|
|
return; // terminated meanwhile
|
|
if (err) {
|
|
/* istanbul ignore else */
|
|
if (!weak)
|
|
finish(err);
|
|
else if (!queued) // can't be covered reliably
|
|
finish(null, self);
|
|
return;
|
|
}
|
|
process(filename, source);
|
|
});
|
|
}
|
|
}
|
|
var queued = 0;
|
|
|
|
// Assembling the root namespace doesn't require working type
|
|
// references anymore, so we can load everything in parallel
|
|
if (util.isString(filename))
|
|
filename = [ filename ];
|
|
for (var i = 0, resolved; i < filename.length; ++i)
|
|
if (resolved = self.resolvePath("", filename[i]))
|
|
fetch(resolved);
|
|
|
|
if (sync)
|
|
return self;
|
|
if (!queued)
|
|
finish(null, self);
|
|
return undefined;
|
|
};
|
|
// function load(filename:string, options:IParseOptions, callback:LoadCallback):undefined
|
|
|
|
/**
|
|
* Loads one or multiple .proto or preprocessed .json files into this root namespace and calls the callback.
|
|
* @function Root#load
|
|
* @param {string|string[]} filename Names of one or multiple files to load
|
|
* @param {LoadCallback} callback Callback function
|
|
* @returns {undefined}
|
|
* @variation 2
|
|
*/
|
|
// function load(filename:string, callback:LoadCallback):undefined
|
|
|
|
/**
|
|
* Loads one or multiple .proto or preprocessed .json files into this root namespace and returns a promise.
|
|
* @function Root#load
|
|
* @param {string|string[]} filename Names of one or multiple files to load
|
|
* @param {IParseOptions} [options] Parse options. Defaults to {@link parse.defaults} when omitted.
|
|
* @returns {Promise<Root>} Promise
|
|
* @variation 3
|
|
*/
|
|
// function load(filename:string, [options:IParseOptions]):Promise<Root>
|
|
|
|
/**
|
|
* Synchronously loads one or multiple .proto or preprocessed .json files into this root namespace (node only).
|
|
* @function Root#loadSync
|
|
* @param {string|string[]} filename Names of one or multiple files to load
|
|
* @param {IParseOptions} [options] Parse options. Defaults to {@link parse.defaults} when omitted.
|
|
* @returns {Root} Root namespace
|
|
* @throws {Error} If synchronous fetching is not supported (i.e. in browsers) or if a file's syntax is invalid
|
|
*/
|
|
Root.prototype.loadSync = function loadSync(filename, options) {
|
|
if (!util.isNode)
|
|
throw Error("not supported");
|
|
return this.load(filename, options, SYNC);
|
|
};
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
Root.prototype.resolveAll = function resolveAll() {
|
|
if (this.deferred.length)
|
|
throw Error("unresolvable extensions: " + this.deferred.map(function(field) {
|
|
return "'extend " + field.extend + "' in " + field.parent.fullName;
|
|
}).join(", "));
|
|
return Namespace.prototype.resolveAll.call(this);
|
|
};
|
|
|
|
// only uppercased (and thus conflict-free) children are exposed, see below
|
|
var exposeRe = /^[A-Z]/;
|
|
|
|
/**
|
|
* Handles a deferred declaring extension field by creating a sister field to represent it within its extended type.
|
|
* @param {Root} root Root instance
|
|
* @param {Field} field Declaring extension field witin the declaring type
|
|
* @returns {boolean} `true` if successfully added to the extended type, `false` otherwise
|
|
* @inner
|
|
* @ignore
|
|
*/
|
|
function tryHandleExtension(root, field) {
|
|
var extendedType = field.parent.lookup(field.extend);
|
|
if (extendedType) {
|
|
var sisterField = new Field(field.fullName, field.id, field.type, field.rule, undefined, field.options);
|
|
sisterField.declaringField = field;
|
|
field.extensionField = sisterField;
|
|
extendedType.add(sisterField);
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Called when any object is added to this root or its sub-namespaces.
|
|
* @param {ReflectionObject} object Object added
|
|
* @returns {undefined}
|
|
* @private
|
|
*/
|
|
Root.prototype._handleAdd = function _handleAdd(object) {
|
|
if (object instanceof Field) {
|
|
|
|
if (/* an extension field (implies not part of a oneof) */ object.extend !== undefined && /* not already handled */ !object.extensionField)
|
|
if (!tryHandleExtension(this, object))
|
|
this.deferred.push(object);
|
|
|
|
} else if (object instanceof Enum) {
|
|
|
|
if (exposeRe.test(object.name))
|
|
object.parent[object.name] = object.values; // expose enum values as property of its parent
|
|
|
|
} else if (!(object instanceof OneOf)) /* everything else is a namespace */ {
|
|
|
|
if (object instanceof Type) // Try to handle any deferred extensions
|
|
for (var i = 0; i < this.deferred.length;)
|
|
if (tryHandleExtension(this, this.deferred[i]))
|
|
this.deferred.splice(i, 1);
|
|
else
|
|
++i;
|
|
for (var j = 0; j < /* initializes */ object.nestedArray.length; ++j) // recurse into the namespace
|
|
this._handleAdd(object._nestedArray[j]);
|
|
if (exposeRe.test(object.name))
|
|
object.parent[object.name] = object; // expose namespace as property of its parent
|
|
}
|
|
|
|
// The above also adds uppercased (and thus conflict-free) nested types, services and enums as
|
|
// properties of namespaces just like static code does. This allows using a .d.ts generated for
|
|
// a static module with reflection-based solutions where the condition is met.
|
|
};
|
|
|
|
/**
|
|
* Called when any object is removed from this root or its sub-namespaces.
|
|
* @param {ReflectionObject} object Object removed
|
|
* @returns {undefined}
|
|
* @private
|
|
*/
|
|
Root.prototype._handleRemove = function _handleRemove(object) {
|
|
if (object instanceof Field) {
|
|
|
|
if (/* an extension field */ object.extend !== undefined) {
|
|
if (/* already handled */ object.extensionField) { // remove its sister field
|
|
object.extensionField.parent.remove(object.extensionField);
|
|
object.extensionField = null;
|
|
} else { // cancel the extension
|
|
var index = this.deferred.indexOf(object);
|
|
/* istanbul ignore else */
|
|
if (index > -1)
|
|
this.deferred.splice(index, 1);
|
|
}
|
|
}
|
|
|
|
} else if (object instanceof Enum) {
|
|
|
|
if (exposeRe.test(object.name))
|
|
delete object.parent[object.name]; // unexpose enum values
|
|
|
|
} else if (object instanceof Namespace) {
|
|
|
|
for (var i = 0; i < /* initializes */ object.nestedArray.length; ++i) // recurse into the namespace
|
|
this._handleRemove(object._nestedArray[i]);
|
|
|
|
if (exposeRe.test(object.name))
|
|
delete object.parent[object.name]; // unexpose namespaces
|
|
|
|
}
|
|
};
|
|
|
|
// Sets up cyclic dependencies (called in index-light)
|
|
Root._configure = function(Type_, parse_, common_) {
|
|
Type = Type_;
|
|
parse = parse_;
|
|
common = common_;
|
|
};
|
|
return root;
|
|
}
|
|
|
|
var hasRequiredUtil;
|
|
|
|
function requireUtil () {
|
|
if (hasRequiredUtil) return util$4.exports;
|
|
hasRequiredUtil = 1;
|
|
(function (module) {
|
|
|
|
/**
|
|
* Various utility functions.
|
|
* @namespace
|
|
*/
|
|
var util = module.exports = requireMinimal$2();
|
|
|
|
var roots = roots$1;
|
|
|
|
var Type, // cyclic
|
|
Enum;
|
|
|
|
util.codegen = requireCodegen();
|
|
util.fetch = requireFetch();
|
|
util.path = requirePath();
|
|
|
|
/**
|
|
* Node's fs module if available.
|
|
* @type {Object.<string,*>}
|
|
*/
|
|
util.fs = util.inquire("fs");
|
|
|
|
/**
|
|
* Converts an object's values to an array.
|
|
* @param {Object.<string,*>} object Object to convert
|
|
* @returns {Array.<*>} Converted array
|
|
*/
|
|
util.toArray = function toArray(object) {
|
|
if (object) {
|
|
var keys = Object.keys(object),
|
|
array = new Array(keys.length),
|
|
index = 0;
|
|
while (index < keys.length)
|
|
array[index] = object[keys[index++]];
|
|
return array;
|
|
}
|
|
return [];
|
|
};
|
|
|
|
/**
|
|
* Converts an array of keys immediately followed by their respective value to an object, omitting undefined values.
|
|
* @param {Array.<*>} array Array to convert
|
|
* @returns {Object.<string,*>} Converted object
|
|
*/
|
|
util.toObject = function toObject(array) {
|
|
var object = {},
|
|
index = 0;
|
|
while (index < array.length) {
|
|
var key = array[index++],
|
|
val = array[index++];
|
|
if (val !== undefined)
|
|
object[key] = val;
|
|
}
|
|
return object;
|
|
};
|
|
|
|
var safePropBackslashRe = /\\/g,
|
|
safePropQuoteRe = /"/g;
|
|
|
|
/**
|
|
* Tests whether the specified name is a reserved word in JS.
|
|
* @param {string} name Name to test
|
|
* @returns {boolean} `true` if reserved, otherwise `false`
|
|
*/
|
|
util.isReserved = function isReserved(name) {
|
|
return /^(?:do|if|in|for|let|new|try|var|case|else|enum|eval|false|null|this|true|void|with|break|catch|class|const|super|throw|while|yield|delete|export|import|public|return|static|switch|typeof|default|extends|finally|package|private|continue|debugger|function|arguments|interface|protected|implements|instanceof)$/.test(name);
|
|
};
|
|
|
|
/**
|
|
* Returns a safe property accessor for the specified property name.
|
|
* @param {string} prop Property name
|
|
* @returns {string} Safe accessor
|
|
*/
|
|
util.safeProp = function safeProp(prop) {
|
|
if (!/^[$\w_]+$/.test(prop) || util.isReserved(prop))
|
|
return "[\"" + prop.replace(safePropBackslashRe, "\\\\").replace(safePropQuoteRe, "\\\"") + "\"]";
|
|
return "." + prop;
|
|
};
|
|
|
|
/**
|
|
* Converts the first character of a string to upper case.
|
|
* @param {string} str String to convert
|
|
* @returns {string} Converted string
|
|
*/
|
|
util.ucFirst = function ucFirst(str) {
|
|
return str.charAt(0).toUpperCase() + str.substring(1);
|
|
};
|
|
|
|
var camelCaseRe = /_([a-z])/g;
|
|
|
|
/**
|
|
* Converts a string to camel case.
|
|
* @param {string} str String to convert
|
|
* @returns {string} Converted string
|
|
*/
|
|
util.camelCase = function camelCase(str) {
|
|
return str.substring(0, 1)
|
|
+ str.substring(1)
|
|
.replace(camelCaseRe, function($0, $1) { return $1.toUpperCase(); });
|
|
};
|
|
|
|
/**
|
|
* Compares reflected fields by id.
|
|
* @param {Field} a First field
|
|
* @param {Field} b Second field
|
|
* @returns {number} Comparison value
|
|
*/
|
|
util.compareFieldsById = function compareFieldsById(a, b) {
|
|
return a.id - b.id;
|
|
};
|
|
|
|
/**
|
|
* Decorator helper for types (TypeScript).
|
|
* @param {Constructor<T>} ctor Constructor function
|
|
* @param {string} [typeName] Type name, defaults to the constructor's name
|
|
* @returns {Type} Reflected type
|
|
* @template T extends Message<T>
|
|
* @property {Root} root Decorators root
|
|
*/
|
|
util.decorateType = function decorateType(ctor, typeName) {
|
|
|
|
/* istanbul ignore if */
|
|
if (ctor.$type) {
|
|
if (typeName && ctor.$type.name !== typeName) {
|
|
util.decorateRoot.remove(ctor.$type);
|
|
ctor.$type.name = typeName;
|
|
util.decorateRoot.add(ctor.$type);
|
|
}
|
|
return ctor.$type;
|
|
}
|
|
|
|
/* istanbul ignore next */
|
|
if (!Type)
|
|
Type = requireType();
|
|
|
|
var type = new Type(typeName || ctor.name);
|
|
util.decorateRoot.add(type);
|
|
type.ctor = ctor; // sets up .encode, .decode etc.
|
|
Object.defineProperty(ctor, "$type", { value: type, enumerable: false });
|
|
Object.defineProperty(ctor.prototype, "$type", { value: type, enumerable: false });
|
|
return type;
|
|
};
|
|
|
|
var decorateEnumIndex = 0;
|
|
|
|
/**
|
|
* Decorator helper for enums (TypeScript).
|
|
* @param {Object} object Enum object
|
|
* @returns {Enum} Reflected enum
|
|
*/
|
|
util.decorateEnum = function decorateEnum(object) {
|
|
|
|
/* istanbul ignore if */
|
|
if (object.$type)
|
|
return object.$type;
|
|
|
|
/* istanbul ignore next */
|
|
if (!Enum)
|
|
Enum = require_enum();
|
|
|
|
var enm = new Enum("Enum" + decorateEnumIndex++, object);
|
|
util.decorateRoot.add(enm);
|
|
Object.defineProperty(object, "$type", { value: enm, enumerable: false });
|
|
return enm;
|
|
};
|
|
|
|
|
|
/**
|
|
* Sets the value of a property by property path. If a value already exists, it is turned to an array
|
|
* @param {Object.<string,*>} dst Destination object
|
|
* @param {string} path dot '.' delimited path of the property to set
|
|
* @param {Object} value the value to set
|
|
* @returns {Object.<string,*>} Destination object
|
|
*/
|
|
util.setProperty = function setProperty(dst, path, value) {
|
|
function setProp(dst, path, value) {
|
|
var part = path.shift();
|
|
if (part === "__proto__") {
|
|
return dst;
|
|
}
|
|
if (path.length > 0) {
|
|
dst[part] = setProp(dst[part] || {}, path, value);
|
|
} else {
|
|
var prevValue = dst[part];
|
|
if (prevValue)
|
|
value = [].concat(prevValue).concat(value);
|
|
dst[part] = value;
|
|
}
|
|
return dst;
|
|
}
|
|
|
|
if (typeof dst !== "object")
|
|
throw TypeError("dst must be an object");
|
|
if (!path)
|
|
throw TypeError("path must be specified");
|
|
|
|
path = path.split(".");
|
|
return setProp(dst, path, value);
|
|
};
|
|
|
|
/**
|
|
* Decorator root (TypeScript).
|
|
* @name util.decorateRoot
|
|
* @type {Root}
|
|
* @readonly
|
|
*/
|
|
Object.defineProperty(util, "decorateRoot", {
|
|
get: function() {
|
|
return roots["decorated"] || (roots["decorated"] = new (requireRoot())());
|
|
}
|
|
});
|
|
} (util$4));
|
|
return util$4.exports;
|
|
}
|
|
|
|
var object;
|
|
var hasRequiredObject;
|
|
|
|
function requireObject () {
|
|
if (hasRequiredObject) return object;
|
|
hasRequiredObject = 1;
|
|
object = ReflectionObject;
|
|
|
|
ReflectionObject.className = "ReflectionObject";
|
|
|
|
var util = requireUtil();
|
|
|
|
var Root; // cyclic
|
|
|
|
/**
|
|
* Constructs a new reflection object instance.
|
|
* @classdesc Base class of all reflection objects.
|
|
* @constructor
|
|
* @param {string} name Object name
|
|
* @param {Object.<string,*>} [options] Declared options
|
|
* @abstract
|
|
*/
|
|
function ReflectionObject(name, options) {
|
|
|
|
if (!util.isString(name))
|
|
throw TypeError("name must be a string");
|
|
|
|
if (options && !util.isObject(options))
|
|
throw TypeError("options must be an object");
|
|
|
|
/**
|
|
* Options.
|
|
* @type {Object.<string,*>|undefined}
|
|
*/
|
|
this.options = options; // toJSON
|
|
|
|
/**
|
|
* Parsed Options.
|
|
* @type {Array.<Object.<string,*>>|undefined}
|
|
*/
|
|
this.parsedOptions = null;
|
|
|
|
/**
|
|
* Unique name within its namespace.
|
|
* @type {string}
|
|
*/
|
|
this.name = name;
|
|
|
|
/**
|
|
* Parent namespace.
|
|
* @type {Namespace|null}
|
|
*/
|
|
this.parent = null;
|
|
|
|
/**
|
|
* Whether already resolved or not.
|
|
* @type {boolean}
|
|
*/
|
|
this.resolved = false;
|
|
|
|
/**
|
|
* Comment text, if any.
|
|
* @type {string|null}
|
|
*/
|
|
this.comment = null;
|
|
|
|
/**
|
|
* Defining file name.
|
|
* @type {string|null}
|
|
*/
|
|
this.filename = null;
|
|
}
|
|
|
|
Object.defineProperties(ReflectionObject.prototype, {
|
|
|
|
/**
|
|
* Reference to the root namespace.
|
|
* @name ReflectionObject#root
|
|
* @type {Root}
|
|
* @readonly
|
|
*/
|
|
root: {
|
|
get: function() {
|
|
var ptr = this;
|
|
while (ptr.parent !== null)
|
|
ptr = ptr.parent;
|
|
return ptr;
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Full name including leading dot.
|
|
* @name ReflectionObject#fullName
|
|
* @type {string}
|
|
* @readonly
|
|
*/
|
|
fullName: {
|
|
get: function() {
|
|
var path = [ this.name ],
|
|
ptr = this.parent;
|
|
while (ptr) {
|
|
path.unshift(ptr.name);
|
|
ptr = ptr.parent;
|
|
}
|
|
return path.join(".");
|
|
}
|
|
}
|
|
});
|
|
|
|
/**
|
|
* Converts this reflection object to its descriptor representation.
|
|
* @returns {Object.<string,*>} Descriptor
|
|
* @abstract
|
|
*/
|
|
ReflectionObject.prototype.toJSON = /* istanbul ignore next */ function toJSON() {
|
|
throw Error(); // not implemented, shouldn't happen
|
|
};
|
|
|
|
/**
|
|
* Called when this object is added to a parent.
|
|
* @param {ReflectionObject} parent Parent added to
|
|
* @returns {undefined}
|
|
*/
|
|
ReflectionObject.prototype.onAdd = function onAdd(parent) {
|
|
if (this.parent && this.parent !== parent)
|
|
this.parent.remove(this);
|
|
this.parent = parent;
|
|
this.resolved = false;
|
|
var root = parent.root;
|
|
if (root instanceof Root)
|
|
root._handleAdd(this);
|
|
};
|
|
|
|
/**
|
|
* Called when this object is removed from a parent.
|
|
* @param {ReflectionObject} parent Parent removed from
|
|
* @returns {undefined}
|
|
*/
|
|
ReflectionObject.prototype.onRemove = function onRemove(parent) {
|
|
var root = parent.root;
|
|
if (root instanceof Root)
|
|
root._handleRemove(this);
|
|
this.parent = null;
|
|
this.resolved = false;
|
|
};
|
|
|
|
/**
|
|
* Resolves this objects type references.
|
|
* @returns {ReflectionObject} `this`
|
|
*/
|
|
ReflectionObject.prototype.resolve = function resolve() {
|
|
if (this.resolved)
|
|
return this;
|
|
if (this.root instanceof Root)
|
|
this.resolved = true; // only if part of a root
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Gets an option value.
|
|
* @param {string} name Option name
|
|
* @returns {*} Option value or `undefined` if not set
|
|
*/
|
|
ReflectionObject.prototype.getOption = function getOption(name) {
|
|
if (this.options)
|
|
return this.options[name];
|
|
return undefined;
|
|
};
|
|
|
|
/**
|
|
* Sets an option.
|
|
* @param {string} name Option name
|
|
* @param {*} value Option value
|
|
* @param {boolean} [ifNotSet] Sets the option only if it isn't currently set
|
|
* @returns {ReflectionObject} `this`
|
|
*/
|
|
ReflectionObject.prototype.setOption = function setOption(name, value, ifNotSet) {
|
|
if (!ifNotSet || !this.options || this.options[name] === undefined)
|
|
(this.options || (this.options = {}))[name] = value;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Sets a parsed option.
|
|
* @param {string} name parsed Option name
|
|
* @param {*} value Option value
|
|
* @param {string} propName dot '.' delimited full path of property within the option to set. if undefined\empty, will add a new option with that value
|
|
* @returns {ReflectionObject} `this`
|
|
*/
|
|
ReflectionObject.prototype.setParsedOption = function setParsedOption(name, value, propName) {
|
|
if (!this.parsedOptions) {
|
|
this.parsedOptions = [];
|
|
}
|
|
var parsedOptions = this.parsedOptions;
|
|
if (propName) {
|
|
// If setting a sub property of an option then try to merge it
|
|
// with an existing option
|
|
var opt = parsedOptions.find(function (opt) {
|
|
return Object.prototype.hasOwnProperty.call(opt, name);
|
|
});
|
|
if (opt) {
|
|
// If we found an existing option - just merge the property value
|
|
var newValue = opt[name];
|
|
util.setProperty(newValue, propName, value);
|
|
} else {
|
|
// otherwise, create a new option, set it's property and add it to the list
|
|
opt = {};
|
|
opt[name] = util.setProperty({}, propName, value);
|
|
parsedOptions.push(opt);
|
|
}
|
|
} else {
|
|
// Always create a new option when setting the value of the option itself
|
|
var newOpt = {};
|
|
newOpt[name] = value;
|
|
parsedOptions.push(newOpt);
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Sets multiple options.
|
|
* @param {Object.<string,*>} options Options to set
|
|
* @param {boolean} [ifNotSet] Sets an option only if it isn't currently set
|
|
* @returns {ReflectionObject} `this`
|
|
*/
|
|
ReflectionObject.prototype.setOptions = function setOptions(options, ifNotSet) {
|
|
if (options)
|
|
for (var keys = Object.keys(options), i = 0; i < keys.length; ++i)
|
|
this.setOption(keys[i], options[keys[i]], ifNotSet);
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Converts this instance to its string representation.
|
|
* @returns {string} Class name[, space, full name]
|
|
*/
|
|
ReflectionObject.prototype.toString = function toString() {
|
|
var className = this.constructor.className,
|
|
fullName = this.fullName;
|
|
if (fullName.length)
|
|
return className + " " + fullName;
|
|
return className;
|
|
};
|
|
|
|
// Sets up cyclic dependencies (called in index-light)
|
|
ReflectionObject._configure = function(Root_) {
|
|
Root = Root_;
|
|
};
|
|
return object;
|
|
}
|
|
|
|
var _enum;
|
|
var hasRequired_enum;
|
|
|
|
function require_enum () {
|
|
if (hasRequired_enum) return _enum;
|
|
hasRequired_enum = 1;
|
|
_enum = Enum;
|
|
|
|
// extends ReflectionObject
|
|
var ReflectionObject = requireObject();
|
|
((Enum.prototype = Object.create(ReflectionObject.prototype)).constructor = Enum).className = "Enum";
|
|
|
|
var Namespace = requireNamespace(),
|
|
util = requireUtil();
|
|
|
|
/**
|
|
* Constructs a new enum instance.
|
|
* @classdesc Reflected enum.
|
|
* @extends ReflectionObject
|
|
* @constructor
|
|
* @param {string} name Unique name within its namespace
|
|
* @param {Object.<string,number>} [values] Enum values as an object, by name
|
|
* @param {Object.<string,*>} [options] Declared options
|
|
* @param {string} [comment] The comment for this enum
|
|
* @param {Object.<string,string>} [comments] The value comments for this enum
|
|
* @param {Object.<string,Object<string,*>>|undefined} [valuesOptions] The value options for this enum
|
|
*/
|
|
function Enum(name, values, options, comment, comments, valuesOptions) {
|
|
ReflectionObject.call(this, name, options);
|
|
|
|
if (values && typeof values !== "object")
|
|
throw TypeError("values must be an object");
|
|
|
|
/**
|
|
* Enum values by id.
|
|
* @type {Object.<number,string>}
|
|
*/
|
|
this.valuesById = {};
|
|
|
|
/**
|
|
* Enum values by name.
|
|
* @type {Object.<string,number>}
|
|
*/
|
|
this.values = Object.create(this.valuesById); // toJSON, marker
|
|
|
|
/**
|
|
* Enum comment text.
|
|
* @type {string|null}
|
|
*/
|
|
this.comment = comment;
|
|
|
|
/**
|
|
* Value comment texts, if any.
|
|
* @type {Object.<string,string>}
|
|
*/
|
|
this.comments = comments || {};
|
|
|
|
/**
|
|
* Values options, if any
|
|
* @type {Object<string, Object<string, *>>|undefined}
|
|
*/
|
|
this.valuesOptions = valuesOptions;
|
|
|
|
/**
|
|
* Reserved ranges, if any.
|
|
* @type {Array.<number[]|string>}
|
|
*/
|
|
this.reserved = undefined; // toJSON
|
|
|
|
// Note that values inherit valuesById on their prototype which makes them a TypeScript-
|
|
// compatible enum. This is used by pbts to write actual enum definitions that work for
|
|
// static and reflection code alike instead of emitting generic object definitions.
|
|
|
|
if (values)
|
|
for (var keys = Object.keys(values), i = 0; i < keys.length; ++i)
|
|
if (typeof values[keys[i]] === "number") // use forward entries only
|
|
this.valuesById[ this.values[keys[i]] = values[keys[i]] ] = keys[i];
|
|
}
|
|
|
|
/**
|
|
* Enum descriptor.
|
|
* @interface IEnum
|
|
* @property {Object.<string,number>} values Enum values
|
|
* @property {Object.<string,*>} [options] Enum options
|
|
*/
|
|
|
|
/**
|
|
* Constructs an enum from an enum descriptor.
|
|
* @param {string} name Enum name
|
|
* @param {IEnum} json Enum descriptor
|
|
* @returns {Enum} Created enum
|
|
* @throws {TypeError} If arguments are invalid
|
|
*/
|
|
Enum.fromJSON = function fromJSON(name, json) {
|
|
var enm = new Enum(name, json.values, json.options, json.comment, json.comments);
|
|
enm.reserved = json.reserved;
|
|
return enm;
|
|
};
|
|
|
|
/**
|
|
* Converts this enum to an enum descriptor.
|
|
* @param {IToJSONOptions} [toJSONOptions] JSON conversion options
|
|
* @returns {IEnum} Enum descriptor
|
|
*/
|
|
Enum.prototype.toJSON = function toJSON(toJSONOptions) {
|
|
var keepComments = toJSONOptions ? Boolean(toJSONOptions.keepComments) : false;
|
|
return util.toObject([
|
|
"options" , this.options,
|
|
"valuesOptions" , this.valuesOptions,
|
|
"values" , this.values,
|
|
"reserved" , this.reserved && this.reserved.length ? this.reserved : undefined,
|
|
"comment" , keepComments ? this.comment : undefined,
|
|
"comments" , keepComments ? this.comments : undefined
|
|
]);
|
|
};
|
|
|
|
/**
|
|
* Adds a value to this enum.
|
|
* @param {string} name Value name
|
|
* @param {number} id Value id
|
|
* @param {string} [comment] Comment, if any
|
|
* @param {Object.<string, *>|undefined} [options] Options, if any
|
|
* @returns {Enum} `this`
|
|
* @throws {TypeError} If arguments are invalid
|
|
* @throws {Error} If there is already a value with this name or id
|
|
*/
|
|
Enum.prototype.add = function add(name, id, comment, options) {
|
|
// utilized by the parser but not by .fromJSON
|
|
|
|
if (!util.isString(name))
|
|
throw TypeError("name must be a string");
|
|
|
|
if (!util.isInteger(id))
|
|
throw TypeError("id must be an integer");
|
|
|
|
if (this.values[name] !== undefined)
|
|
throw Error("duplicate name '" + name + "' in " + this);
|
|
|
|
if (this.isReservedId(id))
|
|
throw Error("id " + id + " is reserved in " + this);
|
|
|
|
if (this.isReservedName(name))
|
|
throw Error("name '" + name + "' is reserved in " + this);
|
|
|
|
if (this.valuesById[id] !== undefined) {
|
|
if (!(this.options && this.options.allow_alias))
|
|
throw Error("duplicate id " + id + " in " + this);
|
|
this.values[name] = id;
|
|
} else
|
|
this.valuesById[this.values[name] = id] = name;
|
|
|
|
if (options) {
|
|
if (this.valuesOptions === undefined)
|
|
this.valuesOptions = {};
|
|
this.valuesOptions[name] = options || null;
|
|
}
|
|
|
|
this.comments[name] = comment || null;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Removes a value from this enum
|
|
* @param {string} name Value name
|
|
* @returns {Enum} `this`
|
|
* @throws {TypeError} If arguments are invalid
|
|
* @throws {Error} If `name` is not a name of this enum
|
|
*/
|
|
Enum.prototype.remove = function remove(name) {
|
|
|
|
if (!util.isString(name))
|
|
throw TypeError("name must be a string");
|
|
|
|
var val = this.values[name];
|
|
if (val == null)
|
|
throw Error("name '" + name + "' does not exist in " + this);
|
|
|
|
delete this.valuesById[val];
|
|
delete this.values[name];
|
|
delete this.comments[name];
|
|
if (this.valuesOptions)
|
|
delete this.valuesOptions[name];
|
|
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Tests if the specified id is reserved.
|
|
* @param {number} id Id to test
|
|
* @returns {boolean} `true` if reserved, otherwise `false`
|
|
*/
|
|
Enum.prototype.isReservedId = function isReservedId(id) {
|
|
return Namespace.isReservedId(this.reserved, id);
|
|
};
|
|
|
|
/**
|
|
* Tests if the specified name is reserved.
|
|
* @param {string} name Name to test
|
|
* @returns {boolean} `true` if reserved, otherwise `false`
|
|
*/
|
|
Enum.prototype.isReservedName = function isReservedName(name) {
|
|
return Namespace.isReservedName(this.reserved, name);
|
|
};
|
|
return _enum;
|
|
}
|
|
|
|
var encoder_1;
|
|
var hasRequiredEncoder;
|
|
|
|
function requireEncoder () {
|
|
if (hasRequiredEncoder) return encoder_1;
|
|
hasRequiredEncoder = 1;
|
|
encoder_1 = encoder;
|
|
|
|
var Enum = require_enum(),
|
|
types = requireTypes(),
|
|
util = requireUtil();
|
|
|
|
/**
|
|
* Generates a partial message type encoder.
|
|
* @param {Codegen} gen Codegen instance
|
|
* @param {Field} field Reflected field
|
|
* @param {number} fieldIndex Field index
|
|
* @param {string} ref Variable reference
|
|
* @returns {Codegen} Codegen instance
|
|
* @ignore
|
|
*/
|
|
function genTypePartial(gen, field, fieldIndex, ref) {
|
|
return field.resolvedType.group
|
|
? gen("types[%i].encode(%s,w.uint32(%i)).uint32(%i)", fieldIndex, ref, (field.id << 3 | 3) >>> 0, (field.id << 3 | 4) >>> 0)
|
|
: gen("types[%i].encode(%s,w.uint32(%i).fork()).ldelim()", fieldIndex, ref, (field.id << 3 | 2) >>> 0);
|
|
}
|
|
|
|
/**
|
|
* Generates an encoder specific to the specified message type.
|
|
* @param {Type} mtype Message type
|
|
* @returns {Codegen} Codegen instance
|
|
*/
|
|
function encoder(mtype) {
|
|
/* eslint-disable no-unexpected-multiline, block-scoped-var, no-redeclare */
|
|
var gen = util.codegen(["m", "w"], mtype.name + "$encode")
|
|
("if(!w)")
|
|
("w=Writer.create()");
|
|
|
|
var i, ref;
|
|
|
|
// "when a message is serialized its known fields should be written sequentially by field number"
|
|
var fields = /* initializes */ mtype.fieldsArray.slice().sort(util.compareFieldsById);
|
|
|
|
for (var i = 0; i < fields.length; ++i) {
|
|
var field = fields[i].resolve(),
|
|
index = mtype._fieldsArray.indexOf(field),
|
|
type = field.resolvedType instanceof Enum ? "int32" : field.type,
|
|
wireType = types.basic[type];
|
|
ref = "m" + util.safeProp(field.name);
|
|
|
|
// Map fields
|
|
if (field.map) {
|
|
gen
|
|
("if(%s!=null&&Object.hasOwnProperty.call(m,%j)){", ref, field.name) // !== undefined && !== null
|
|
("for(var ks=Object.keys(%s),i=0;i<ks.length;++i){", ref)
|
|
("w.uint32(%i).fork().uint32(%i).%s(ks[i])", (field.id << 3 | 2) >>> 0, 8 | types.mapKey[field.keyType], field.keyType);
|
|
if (wireType === undefined) gen
|
|
("types[%i].encode(%s[ks[i]],w.uint32(18).fork()).ldelim().ldelim()", index, ref); // can't be groups
|
|
else gen
|
|
(".uint32(%i).%s(%s[ks[i]]).ldelim()", 16 | wireType, type, ref);
|
|
gen
|
|
("}")
|
|
("}");
|
|
|
|
// Repeated fields
|
|
} else if (field.repeated) { gen
|
|
("if(%s!=null&&%s.length){", ref, ref); // !== undefined && !== null
|
|
|
|
// Packed repeated
|
|
if (field.packed && types.packed[type] !== undefined) { gen
|
|
|
|
("w.uint32(%i).fork()", (field.id << 3 | 2) >>> 0)
|
|
("for(var i=0;i<%s.length;++i)", ref)
|
|
("w.%s(%s[i])", type, ref)
|
|
("w.ldelim()");
|
|
|
|
// Non-packed
|
|
} else { gen
|
|
|
|
("for(var i=0;i<%s.length;++i)", ref);
|
|
if (wireType === undefined)
|
|
genTypePartial(gen, field, index, ref + "[i]");
|
|
else gen
|
|
("w.uint32(%i).%s(%s[i])", (field.id << 3 | wireType) >>> 0, type, ref);
|
|
|
|
} gen
|
|
("}");
|
|
|
|
// Non-repeated
|
|
} else {
|
|
if (field.optional) gen
|
|
("if(%s!=null&&Object.hasOwnProperty.call(m,%j))", ref, field.name); // !== undefined && !== null
|
|
|
|
if (wireType === undefined)
|
|
genTypePartial(gen, field, index, ref);
|
|
else gen
|
|
("w.uint32(%i).%s(%s)", (field.id << 3 | wireType) >>> 0, type, ref);
|
|
|
|
}
|
|
}
|
|
|
|
return gen
|
|
("return w");
|
|
/* eslint-enable no-unexpected-multiline, block-scoped-var, no-redeclare */
|
|
}
|
|
return encoder_1;
|
|
}
|
|
|
|
(function (module) {
|
|
var protobuf = module.exports = indexMinimal$1;
|
|
|
|
protobuf.build = "light";
|
|
|
|
/**
|
|
* A node-style callback as used by {@link load} and {@link Root#load}.
|
|
* @typedef LoadCallback
|
|
* @type {function}
|
|
* @param {Error|null} error Error, if any, otherwise `null`
|
|
* @param {Root} [root] Root, if there hasn't been an error
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* Loads one or multiple .proto or preprocessed .json files into a common root namespace and calls the callback.
|
|
* @param {string|string[]} filename One or multiple files to load
|
|
* @param {Root} root Root namespace, defaults to create a new one if omitted.
|
|
* @param {LoadCallback} callback Callback function
|
|
* @returns {undefined}
|
|
* @see {@link Root#load}
|
|
*/
|
|
function load(filename, root, callback) {
|
|
if (typeof root === "function") {
|
|
callback = root;
|
|
root = new protobuf.Root();
|
|
} else if (!root)
|
|
root = new protobuf.Root();
|
|
return root.load(filename, callback);
|
|
}
|
|
|
|
/**
|
|
* Loads one or multiple .proto or preprocessed .json files into a common root namespace and calls the callback.
|
|
* @name load
|
|
* @function
|
|
* @param {string|string[]} filename One or multiple files to load
|
|
* @param {LoadCallback} callback Callback function
|
|
* @returns {undefined}
|
|
* @see {@link Root#load}
|
|
* @variation 2
|
|
*/
|
|
// function load(filename:string, callback:LoadCallback):undefined
|
|
|
|
/**
|
|
* Loads one or multiple .proto or preprocessed .json files into a common root namespace and returns a promise.
|
|
* @name load
|
|
* @function
|
|
* @param {string|string[]} filename One or multiple files to load
|
|
* @param {Root} [root] Root namespace, defaults to create a new one if omitted.
|
|
* @returns {Promise<Root>} Promise
|
|
* @see {@link Root#load}
|
|
* @variation 3
|
|
*/
|
|
// function load(filename:string, [root:Root]):Promise<Root>
|
|
|
|
protobuf.load = load;
|
|
|
|
/**
|
|
* Synchronously loads one or multiple .proto or preprocessed .json files into a common root namespace (node only).
|
|
* @param {string|string[]} filename One or multiple files to load
|
|
* @param {Root} [root] Root namespace, defaults to create a new one if omitted.
|
|
* @returns {Root} Root namespace
|
|
* @throws {Error} If synchronous fetching is not supported (i.e. in browsers) or if a file's syntax is invalid
|
|
* @see {@link Root#loadSync}
|
|
*/
|
|
function loadSync(filename, root) {
|
|
if (!root)
|
|
root = new protobuf.Root();
|
|
return root.loadSync(filename);
|
|
}
|
|
|
|
protobuf.loadSync = loadSync;
|
|
|
|
// Serialization
|
|
protobuf.encoder = requireEncoder();
|
|
protobuf.decoder = requireDecoder();
|
|
protobuf.verifier = requireVerifier();
|
|
protobuf.converter = requireConverter();
|
|
|
|
// Reflection
|
|
protobuf.ReflectionObject = requireObject();
|
|
protobuf.Namespace = requireNamespace();
|
|
protobuf.Root = requireRoot();
|
|
protobuf.Enum = require_enum();
|
|
protobuf.Type = requireType();
|
|
protobuf.Field = requireField();
|
|
protobuf.OneOf = requireOneof();
|
|
protobuf.MapField = requireMapfield();
|
|
protobuf.Service = requireService$1();
|
|
protobuf.Method = requireMethod();
|
|
|
|
// Runtime
|
|
protobuf.Message = message$1;
|
|
protobuf.wrappers = wrappers;
|
|
|
|
// Utility
|
|
protobuf.types = requireTypes();
|
|
protobuf.util = requireUtil();
|
|
|
|
// Set up possibly cyclic reflection dependencies
|
|
protobuf.ReflectionObject._configure(protobuf.Root);
|
|
protobuf.Namespace._configure(protobuf.Type, protobuf.Service, protobuf.Enum);
|
|
protobuf.Root._configure(protobuf.Type);
|
|
protobuf.Field._configure(protobuf.Type);
|
|
} (indexLight));
|
|
|
|
var tokenize_1 = tokenize$1;
|
|
|
|
var delimRe = /[\s{}=;:[\],'"()<>]/g,
|
|
stringDoubleRe = /(?:"([^"\\]*(?:\\.[^"\\]*)*)")/g,
|
|
stringSingleRe = /(?:'([^'\\]*(?:\\.[^'\\]*)*)')/g;
|
|
|
|
var setCommentRe = /^ *[*/]+ */,
|
|
setCommentAltRe = /^\s*\*?\/*/,
|
|
setCommentSplitRe = /\n/g,
|
|
whitespaceRe = /\s/,
|
|
unescapeRe = /\\(.?)/g;
|
|
|
|
var unescapeMap = {
|
|
"0": "\0",
|
|
"r": "\r",
|
|
"n": "\n",
|
|
"t": "\t"
|
|
};
|
|
|
|
/**
|
|
* Unescapes a string.
|
|
* @param {string} str String to unescape
|
|
* @returns {string} Unescaped string
|
|
* @property {Object.<string,string>} map Special characters map
|
|
* @memberof tokenize
|
|
*/
|
|
function unescape$1(str) {
|
|
return str.replace(unescapeRe, function($0, $1) {
|
|
switch ($1) {
|
|
case "\\":
|
|
case "":
|
|
return $1;
|
|
default:
|
|
return unescapeMap[$1] || "";
|
|
}
|
|
});
|
|
}
|
|
|
|
tokenize$1.unescape = unescape$1;
|
|
|
|
/**
|
|
* Gets the next token and advances.
|
|
* @typedef TokenizerHandleNext
|
|
* @type {function}
|
|
* @returns {string|null} Next token or `null` on eof
|
|
*/
|
|
|
|
/**
|
|
* Peeks for the next token.
|
|
* @typedef TokenizerHandlePeek
|
|
* @type {function}
|
|
* @returns {string|null} Next token or `null` on eof
|
|
*/
|
|
|
|
/**
|
|
* Pushes a token back to the stack.
|
|
* @typedef TokenizerHandlePush
|
|
* @type {function}
|
|
* @param {string} token Token
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* Skips the next token.
|
|
* @typedef TokenizerHandleSkip
|
|
* @type {function}
|
|
* @param {string} expected Expected token
|
|
* @param {boolean} [optional=false] If optional
|
|
* @returns {boolean} Whether the token matched
|
|
* @throws {Error} If the token didn't match and is not optional
|
|
*/
|
|
|
|
/**
|
|
* Gets the comment on the previous line or, alternatively, the line comment on the specified line.
|
|
* @typedef TokenizerHandleCmnt
|
|
* @type {function}
|
|
* @param {number} [line] Line number
|
|
* @returns {string|null} Comment text or `null` if none
|
|
*/
|
|
|
|
/**
|
|
* Handle object returned from {@link tokenize}.
|
|
* @interface ITokenizerHandle
|
|
* @property {TokenizerHandleNext} next Gets the next token and advances (`null` on eof)
|
|
* @property {TokenizerHandlePeek} peek Peeks for the next token (`null` on eof)
|
|
* @property {TokenizerHandlePush} push Pushes a token back to the stack
|
|
* @property {TokenizerHandleSkip} skip Skips a token, returns its presence and advances or, if non-optional and not present, throws
|
|
* @property {TokenizerHandleCmnt} cmnt Gets the comment on the previous line or the line comment on the specified line, if any
|
|
* @property {number} line Current line number
|
|
*/
|
|
|
|
/**
|
|
* Tokenizes the given .proto source and returns an object with useful utility functions.
|
|
* @param {string} source Source contents
|
|
* @param {boolean} alternateCommentMode Whether we should activate alternate comment parsing mode.
|
|
* @returns {ITokenizerHandle} Tokenizer handle
|
|
*/
|
|
function tokenize$1(source, alternateCommentMode) {
|
|
/* eslint-disable callback-return */
|
|
source = source.toString();
|
|
|
|
var offset = 0,
|
|
length = source.length,
|
|
line = 1,
|
|
lastCommentLine = 0,
|
|
comments = {};
|
|
|
|
var stack = [];
|
|
|
|
var stringDelim = null;
|
|
|
|
/* istanbul ignore next */
|
|
/**
|
|
* Creates an error for illegal syntax.
|
|
* @param {string} subject Subject
|
|
* @returns {Error} Error created
|
|
* @inner
|
|
*/
|
|
function illegal(subject) {
|
|
return Error("illegal " + subject + " (line " + line + ")");
|
|
}
|
|
|
|
/**
|
|
* Reads a string till its end.
|
|
* @returns {string} String read
|
|
* @inner
|
|
*/
|
|
function readString() {
|
|
var re = stringDelim === "'" ? stringSingleRe : stringDoubleRe;
|
|
re.lastIndex = offset - 1;
|
|
var match = re.exec(source);
|
|
if (!match)
|
|
throw illegal("string");
|
|
offset = re.lastIndex;
|
|
push(stringDelim);
|
|
stringDelim = null;
|
|
return unescape$1(match[1]);
|
|
}
|
|
|
|
/**
|
|
* Gets the character at `pos` within the source.
|
|
* @param {number} pos Position
|
|
* @returns {string} Character
|
|
* @inner
|
|
*/
|
|
function charAt(pos) {
|
|
return source.charAt(pos);
|
|
}
|
|
|
|
/**
|
|
* Sets the current comment text.
|
|
* @param {number} start Start offset
|
|
* @param {number} end End offset
|
|
* @param {boolean} isLeading set if a leading comment
|
|
* @returns {undefined}
|
|
* @inner
|
|
*/
|
|
function setComment(start, end, isLeading) {
|
|
var comment = {
|
|
type: source.charAt(start++),
|
|
lineEmpty: false,
|
|
leading: isLeading,
|
|
};
|
|
var lookback;
|
|
if (alternateCommentMode) {
|
|
lookback = 2; // alternate comment parsing: "//" or "/*"
|
|
} else {
|
|
lookback = 3; // "///" or "/**"
|
|
}
|
|
var commentOffset = start - lookback,
|
|
c;
|
|
do {
|
|
if (--commentOffset < 0 ||
|
|
(c = source.charAt(commentOffset)) === "\n") {
|
|
comment.lineEmpty = true;
|
|
break;
|
|
}
|
|
} while (c === " " || c === "\t");
|
|
var lines = source
|
|
.substring(start, end)
|
|
.split(setCommentSplitRe);
|
|
for (var i = 0; i < lines.length; ++i)
|
|
lines[i] = lines[i]
|
|
.replace(alternateCommentMode ? setCommentAltRe : setCommentRe, "")
|
|
.trim();
|
|
comment.text = lines
|
|
.join("\n")
|
|
.trim();
|
|
|
|
comments[line] = comment;
|
|
lastCommentLine = line;
|
|
}
|
|
|
|
function isDoubleSlashCommentLine(startOffset) {
|
|
var endOffset = findEndOfLine(startOffset);
|
|
|
|
// see if remaining line matches comment pattern
|
|
var lineText = source.substring(startOffset, endOffset);
|
|
// look for 1 or 2 slashes since startOffset would already point past
|
|
// the first slash that started the comment.
|
|
var isComment = /^\s*\/{1,2}/.test(lineText);
|
|
return isComment;
|
|
}
|
|
|
|
function findEndOfLine(cursor) {
|
|
// find end of cursor's line
|
|
var endOffset = cursor;
|
|
while (endOffset < length && charAt(endOffset) !== "\n") {
|
|
endOffset++;
|
|
}
|
|
return endOffset;
|
|
}
|
|
|
|
/**
|
|
* Obtains the next token.
|
|
* @returns {string|null} Next token or `null` on eof
|
|
* @inner
|
|
*/
|
|
function next() {
|
|
if (stack.length > 0)
|
|
return stack.shift();
|
|
if (stringDelim)
|
|
return readString();
|
|
var repeat,
|
|
prev,
|
|
curr,
|
|
start,
|
|
isDoc,
|
|
isLeadingComment = offset === 0;
|
|
do {
|
|
if (offset === length)
|
|
return null;
|
|
repeat = false;
|
|
while (whitespaceRe.test(curr = charAt(offset))) {
|
|
if (curr === "\n") {
|
|
isLeadingComment = true;
|
|
++line;
|
|
}
|
|
if (++offset === length)
|
|
return null;
|
|
}
|
|
|
|
if (charAt(offset) === "/") {
|
|
if (++offset === length) {
|
|
throw illegal("comment");
|
|
}
|
|
if (charAt(offset) === "/") { // Line
|
|
if (!alternateCommentMode) {
|
|
// check for triple-slash comment
|
|
isDoc = charAt(start = offset + 1) === "/";
|
|
|
|
while (charAt(++offset) !== "\n") {
|
|
if (offset === length) {
|
|
return null;
|
|
}
|
|
}
|
|
++offset;
|
|
if (isDoc) {
|
|
setComment(start, offset - 1, isLeadingComment);
|
|
// Trailing comment cannot not be multi-line,
|
|
// so leading comment state should be reset to handle potential next comments
|
|
isLeadingComment = true;
|
|
}
|
|
++line;
|
|
repeat = true;
|
|
} else {
|
|
// check for double-slash comments, consolidating consecutive lines
|
|
start = offset;
|
|
isDoc = false;
|
|
if (isDoubleSlashCommentLine(offset)) {
|
|
isDoc = true;
|
|
do {
|
|
offset = findEndOfLine(offset);
|
|
if (offset === length) {
|
|
break;
|
|
}
|
|
offset++;
|
|
if (!isLeadingComment) {
|
|
// Trailing comment cannot not be multi-line
|
|
break;
|
|
}
|
|
} while (isDoubleSlashCommentLine(offset));
|
|
} else {
|
|
offset = Math.min(length, findEndOfLine(offset) + 1);
|
|
}
|
|
if (isDoc) {
|
|
setComment(start, offset, isLeadingComment);
|
|
isLeadingComment = true;
|
|
}
|
|
line++;
|
|
repeat = true;
|
|
}
|
|
} else if ((curr = charAt(offset)) === "*") { /* Block */
|
|
// check for /** (regular comment mode) or /* (alternate comment mode)
|
|
start = offset + 1;
|
|
isDoc = alternateCommentMode || charAt(start) === "*";
|
|
do {
|
|
if (curr === "\n") {
|
|
++line;
|
|
}
|
|
if (++offset === length) {
|
|
throw illegal("comment");
|
|
}
|
|
prev = curr;
|
|
curr = charAt(offset);
|
|
} while (prev !== "*" || curr !== "/");
|
|
++offset;
|
|
if (isDoc) {
|
|
setComment(start, offset - 2, isLeadingComment);
|
|
isLeadingComment = true;
|
|
}
|
|
repeat = true;
|
|
} else {
|
|
return "/";
|
|
}
|
|
}
|
|
} while (repeat);
|
|
|
|
// offset !== length if we got here
|
|
|
|
var end = offset;
|
|
delimRe.lastIndex = 0;
|
|
var delim = delimRe.test(charAt(end++));
|
|
if (!delim)
|
|
while (end < length && !delimRe.test(charAt(end)))
|
|
++end;
|
|
var token = source.substring(offset, offset = end);
|
|
if (token === "\"" || token === "'")
|
|
stringDelim = token;
|
|
return token;
|
|
}
|
|
|
|
/**
|
|
* Pushes a token back to the stack.
|
|
* @param {string} token Token
|
|
* @returns {undefined}
|
|
* @inner
|
|
*/
|
|
function push(token) {
|
|
stack.push(token);
|
|
}
|
|
|
|
/**
|
|
* Peeks for the next token.
|
|
* @returns {string|null} Token or `null` on eof
|
|
* @inner
|
|
*/
|
|
function peek() {
|
|
if (!stack.length) {
|
|
var token = next();
|
|
if (token === null)
|
|
return null;
|
|
push(token);
|
|
}
|
|
return stack[0];
|
|
}
|
|
|
|
/**
|
|
* Skips a token.
|
|
* @param {string} expected Expected token
|
|
* @param {boolean} [optional=false] Whether the token is optional
|
|
* @returns {boolean} `true` when skipped, `false` if not
|
|
* @throws {Error} When a required token is not present
|
|
* @inner
|
|
*/
|
|
function skip(expected, optional) {
|
|
var actual = peek(),
|
|
equals = actual === expected;
|
|
if (equals) {
|
|
next();
|
|
return true;
|
|
}
|
|
if (!optional)
|
|
throw illegal("token '" + actual + "', '" + expected + "' expected");
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Gets a comment.
|
|
* @param {number} [trailingLine] Line number if looking for a trailing comment
|
|
* @returns {string|null} Comment text
|
|
* @inner
|
|
*/
|
|
function cmnt(trailingLine) {
|
|
var ret = null;
|
|
var comment;
|
|
if (trailingLine === undefined) {
|
|
comment = comments[line - 1];
|
|
delete comments[line - 1];
|
|
if (comment && (alternateCommentMode || comment.type === "*" || comment.lineEmpty)) {
|
|
ret = comment.leading ? comment.text : null;
|
|
}
|
|
} else {
|
|
/* istanbul ignore else */
|
|
if (lastCommentLine < trailingLine) {
|
|
peek();
|
|
}
|
|
comment = comments[trailingLine];
|
|
delete comments[trailingLine];
|
|
if (comment && !comment.lineEmpty && (alternateCommentMode || comment.type === "/")) {
|
|
ret = comment.leading ? null : comment.text;
|
|
}
|
|
}
|
|
return ret;
|
|
}
|
|
|
|
return Object.defineProperty({
|
|
next: next,
|
|
peek: peek,
|
|
push: push,
|
|
skip: skip,
|
|
cmnt: cmnt
|
|
}, "line", {
|
|
get: function() { return line; }
|
|
});
|
|
/* eslint-enable callback-return */
|
|
}
|
|
|
|
var parse_1 = parse$1;
|
|
|
|
parse$1.filename = null;
|
|
parse$1.defaults = { keepCase: false };
|
|
|
|
var tokenize = tokenize_1,
|
|
Root = requireRoot(),
|
|
Type = requireType(),
|
|
Field = requireField(),
|
|
MapField = requireMapfield(),
|
|
OneOf = requireOneof(),
|
|
Enum = require_enum(),
|
|
Service = requireService$1(),
|
|
Method = requireMethod(),
|
|
types = requireTypes(),
|
|
util$2 = requireUtil();
|
|
|
|
var base10Re = /^[1-9][0-9]*$/,
|
|
base10NegRe = /^-?[1-9][0-9]*$/,
|
|
base16Re = /^0[x][0-9a-fA-F]+$/,
|
|
base16NegRe = /^-?0[x][0-9a-fA-F]+$/,
|
|
base8Re = /^0[0-7]+$/,
|
|
base8NegRe = /^-?0[0-7]+$/,
|
|
numberRe = /^(?![eE])[0-9]*(?:\.[0-9]*)?(?:[eE][+-]?[0-9]+)?$/,
|
|
nameRe = /^[a-zA-Z_][a-zA-Z_0-9]*$/,
|
|
typeRefRe = /^(?:\.?[a-zA-Z_][a-zA-Z_0-9]*)(?:\.[a-zA-Z_][a-zA-Z_0-9]*)*$/,
|
|
fqTypeRefRe = /^(?:\.[a-zA-Z_][a-zA-Z_0-9]*)+$/;
|
|
|
|
/**
|
|
* Result object returned from {@link parse}.
|
|
* @interface IParserResult
|
|
* @property {string|undefined} package Package name, if declared
|
|
* @property {string[]|undefined} imports Imports, if any
|
|
* @property {string[]|undefined} weakImports Weak imports, if any
|
|
* @property {string|undefined} syntax Syntax, if specified (either `"proto2"` or `"proto3"`)
|
|
* @property {Root} root Populated root instance
|
|
*/
|
|
|
|
/**
|
|
* Options modifying the behavior of {@link parse}.
|
|
* @interface IParseOptions
|
|
* @property {boolean} [keepCase=false] Keeps field casing instead of converting to camel case
|
|
* @property {boolean} [alternateCommentMode=false] Recognize double-slash comments in addition to doc-block comments.
|
|
* @property {boolean} [preferTrailingComment=false] Use trailing comment when both leading comment and trailing comment exist.
|
|
*/
|
|
|
|
/**
|
|
* Options modifying the behavior of JSON serialization.
|
|
* @interface IToJSONOptions
|
|
* @property {boolean} [keepComments=false] Serializes comments.
|
|
*/
|
|
|
|
/**
|
|
* Parses the given .proto source and returns an object with the parsed contents.
|
|
* @param {string} source Source contents
|
|
* @param {Root} root Root to populate
|
|
* @param {IParseOptions} [options] Parse options. Defaults to {@link parse.defaults} when omitted.
|
|
* @returns {IParserResult} Parser result
|
|
* @property {string} filename=null Currently processing file name for error reporting, if known
|
|
* @property {IParseOptions} defaults Default {@link IParseOptions}
|
|
*/
|
|
function parse$1(source, root, options) {
|
|
/* eslint-disable callback-return */
|
|
if (!(root instanceof Root)) {
|
|
options = root;
|
|
root = new Root();
|
|
}
|
|
if (!options)
|
|
options = parse$1.defaults;
|
|
|
|
var preferTrailingComment = options.preferTrailingComment || false;
|
|
var tn = tokenize(source, options.alternateCommentMode || false),
|
|
next = tn.next,
|
|
push = tn.push,
|
|
peek = tn.peek,
|
|
skip = tn.skip,
|
|
cmnt = tn.cmnt;
|
|
|
|
var head = true,
|
|
pkg,
|
|
imports,
|
|
weakImports,
|
|
syntax,
|
|
isProto3 = false;
|
|
|
|
var ptr = root;
|
|
|
|
var applyCase = options.keepCase ? function(name) { return name; } : util$2.camelCase;
|
|
|
|
/* istanbul ignore next */
|
|
function illegal(token, name, insideTryCatch) {
|
|
var filename = parse$1.filename;
|
|
if (!insideTryCatch)
|
|
parse$1.filename = null;
|
|
return Error("illegal " + (name || "token") + " '" + token + "' (" + (filename ? filename + ", " : "") + "line " + tn.line + ")");
|
|
}
|
|
|
|
function readString() {
|
|
var values = [],
|
|
token;
|
|
do {
|
|
/* istanbul ignore if */
|
|
if ((token = next()) !== "\"" && token !== "'")
|
|
throw illegal(token);
|
|
|
|
values.push(next());
|
|
skip(token);
|
|
token = peek();
|
|
} while (token === "\"" || token === "'");
|
|
return values.join("");
|
|
}
|
|
|
|
function readValue(acceptTypeRef) {
|
|
var token = next();
|
|
switch (token) {
|
|
case "'":
|
|
case "\"":
|
|
push(token);
|
|
return readString();
|
|
case "true": case "TRUE":
|
|
return true;
|
|
case "false": case "FALSE":
|
|
return false;
|
|
}
|
|
try {
|
|
return parseNumber(token, /* insideTryCatch */ true);
|
|
} catch (e) {
|
|
|
|
/* istanbul ignore else */
|
|
if (acceptTypeRef && typeRefRe.test(token))
|
|
return token;
|
|
|
|
/* istanbul ignore next */
|
|
throw illegal(token, "value");
|
|
}
|
|
}
|
|
|
|
function readRanges(target, acceptStrings) {
|
|
var token, start;
|
|
do {
|
|
if (acceptStrings && ((token = peek()) === "\"" || token === "'"))
|
|
target.push(readString());
|
|
else
|
|
target.push([ start = parseId(next()), skip("to", true) ? parseId(next()) : start ]);
|
|
} while (skip(",", true));
|
|
skip(";");
|
|
}
|
|
|
|
function parseNumber(token, insideTryCatch) {
|
|
var sign = 1;
|
|
if (token.charAt(0) === "-") {
|
|
sign = -1;
|
|
token = token.substring(1);
|
|
}
|
|
switch (token) {
|
|
case "inf": case "INF": case "Inf":
|
|
return sign * Infinity;
|
|
case "nan": case "NAN": case "Nan": case "NaN":
|
|
return NaN;
|
|
case "0":
|
|
return 0;
|
|
}
|
|
if (base10Re.test(token))
|
|
return sign * parseInt(token, 10);
|
|
if (base16Re.test(token))
|
|
return sign * parseInt(token, 16);
|
|
if (base8Re.test(token))
|
|
return sign * parseInt(token, 8);
|
|
|
|
/* istanbul ignore else */
|
|
if (numberRe.test(token))
|
|
return sign * parseFloat(token);
|
|
|
|
/* istanbul ignore next */
|
|
throw illegal(token, "number", insideTryCatch);
|
|
}
|
|
|
|
function parseId(token, acceptNegative) {
|
|
switch (token) {
|
|
case "max": case "MAX": case "Max":
|
|
return 536870911;
|
|
case "0":
|
|
return 0;
|
|
}
|
|
|
|
/* istanbul ignore if */
|
|
if (!acceptNegative && token.charAt(0) === "-")
|
|
throw illegal(token, "id");
|
|
|
|
if (base10NegRe.test(token))
|
|
return parseInt(token, 10);
|
|
if (base16NegRe.test(token))
|
|
return parseInt(token, 16);
|
|
|
|
/* istanbul ignore else */
|
|
if (base8NegRe.test(token))
|
|
return parseInt(token, 8);
|
|
|
|
/* istanbul ignore next */
|
|
throw illegal(token, "id");
|
|
}
|
|
|
|
function parsePackage() {
|
|
|
|
/* istanbul ignore if */
|
|
if (pkg !== undefined)
|
|
throw illegal("package");
|
|
|
|
pkg = next();
|
|
|
|
/* istanbul ignore if */
|
|
if (!typeRefRe.test(pkg))
|
|
throw illegal(pkg, "name");
|
|
|
|
ptr = ptr.define(pkg);
|
|
skip(";");
|
|
}
|
|
|
|
function parseImport() {
|
|
var token = peek();
|
|
var whichImports;
|
|
switch (token) {
|
|
case "weak":
|
|
whichImports = weakImports || (weakImports = []);
|
|
next();
|
|
break;
|
|
case "public":
|
|
next();
|
|
// eslint-disable-line no-fallthrough
|
|
default:
|
|
whichImports = imports || (imports = []);
|
|
break;
|
|
}
|
|
token = readString();
|
|
skip(";");
|
|
whichImports.push(token);
|
|
}
|
|
|
|
function parseSyntax() {
|
|
skip("=");
|
|
syntax = readString();
|
|
isProto3 = syntax === "proto3";
|
|
|
|
/* istanbul ignore if */
|
|
if (!isProto3 && syntax !== "proto2")
|
|
throw illegal(syntax, "syntax");
|
|
|
|
skip(";");
|
|
}
|
|
|
|
function parseCommon(parent, token) {
|
|
switch (token) {
|
|
|
|
case "option":
|
|
parseOption(parent, token);
|
|
skip(";");
|
|
return true;
|
|
|
|
case "message":
|
|
parseType(parent, token);
|
|
return true;
|
|
|
|
case "enum":
|
|
parseEnum(parent, token);
|
|
return true;
|
|
|
|
case "service":
|
|
parseService(parent, token);
|
|
return true;
|
|
|
|
case "extend":
|
|
parseExtension(parent, token);
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
function ifBlock(obj, fnIf, fnElse) {
|
|
var trailingLine = tn.line;
|
|
if (obj) {
|
|
if(typeof obj.comment !== "string") {
|
|
obj.comment = cmnt(); // try block-type comment
|
|
}
|
|
obj.filename = parse$1.filename;
|
|
}
|
|
if (skip("{", true)) {
|
|
var token;
|
|
while ((token = next()) !== "}")
|
|
fnIf(token);
|
|
skip(";", true);
|
|
} else {
|
|
if (fnElse)
|
|
fnElse();
|
|
skip(";");
|
|
if (obj && (typeof obj.comment !== "string" || preferTrailingComment))
|
|
obj.comment = cmnt(trailingLine) || obj.comment; // try line-type comment
|
|
}
|
|
}
|
|
|
|
function parseType(parent, token) {
|
|
|
|
/* istanbul ignore if */
|
|
if (!nameRe.test(token = next()))
|
|
throw illegal(token, "type name");
|
|
|
|
var type = new Type(token);
|
|
ifBlock(type, function parseType_block(token) {
|
|
if (parseCommon(type, token))
|
|
return;
|
|
|
|
switch (token) {
|
|
|
|
case "map":
|
|
parseMapField(type);
|
|
break;
|
|
|
|
case "required":
|
|
case "repeated":
|
|
parseField(type, token);
|
|
break;
|
|
|
|
case "optional":
|
|
/* istanbul ignore if */
|
|
if (isProto3) {
|
|
parseField(type, "proto3_optional");
|
|
} else {
|
|
parseField(type, "optional");
|
|
}
|
|
break;
|
|
|
|
case "oneof":
|
|
parseOneOf(type, token);
|
|
break;
|
|
|
|
case "extensions":
|
|
readRanges(type.extensions || (type.extensions = []));
|
|
break;
|
|
|
|
case "reserved":
|
|
readRanges(type.reserved || (type.reserved = []), true);
|
|
break;
|
|
|
|
default:
|
|
/* istanbul ignore if */
|
|
if (!isProto3 || !typeRefRe.test(token))
|
|
throw illegal(token);
|
|
|
|
push(token);
|
|
parseField(type, "optional");
|
|
break;
|
|
}
|
|
});
|
|
parent.add(type);
|
|
}
|
|
|
|
function parseField(parent, rule, extend) {
|
|
var type = next();
|
|
if (type === "group") {
|
|
parseGroup(parent, rule);
|
|
return;
|
|
}
|
|
|
|
/* istanbul ignore if */
|
|
if (!typeRefRe.test(type))
|
|
throw illegal(type, "type");
|
|
|
|
var name = next();
|
|
|
|
/* istanbul ignore if */
|
|
if (!nameRe.test(name))
|
|
throw illegal(name, "name");
|
|
|
|
name = applyCase(name);
|
|
skip("=");
|
|
|
|
var field = new Field(name, parseId(next()), type, rule, extend);
|
|
ifBlock(field, function parseField_block(token) {
|
|
|
|
/* istanbul ignore else */
|
|
if (token === "option") {
|
|
parseOption(field, token);
|
|
skip(";");
|
|
} else
|
|
throw illegal(token);
|
|
|
|
}, function parseField_line() {
|
|
parseInlineOptions(field);
|
|
});
|
|
|
|
if (rule === "proto3_optional") {
|
|
// for proto3 optional fields, we create a single-member Oneof to mimic "optional" behavior
|
|
var oneof = new OneOf("_" + name);
|
|
field.setOption("proto3_optional", true);
|
|
oneof.add(field);
|
|
parent.add(oneof);
|
|
} else {
|
|
parent.add(field);
|
|
}
|
|
|
|
// JSON defaults to packed=true if not set so we have to set packed=false explicity when
|
|
// parsing proto2 descriptors without the option, where applicable. This must be done for
|
|
// all known packable types and anything that could be an enum (= is not a basic type).
|
|
if (!isProto3 && field.repeated && (types.packed[type] !== undefined || types.basic[type] === undefined))
|
|
field.setOption("packed", false, /* ifNotSet */ true);
|
|
}
|
|
|
|
function parseGroup(parent, rule) {
|
|
var name = next();
|
|
|
|
/* istanbul ignore if */
|
|
if (!nameRe.test(name))
|
|
throw illegal(name, "name");
|
|
|
|
var fieldName = util$2.lcFirst(name);
|
|
if (name === fieldName)
|
|
name = util$2.ucFirst(name);
|
|
skip("=");
|
|
var id = parseId(next());
|
|
var type = new Type(name);
|
|
type.group = true;
|
|
var field = new Field(fieldName, id, name, rule);
|
|
field.filename = parse$1.filename;
|
|
ifBlock(type, function parseGroup_block(token) {
|
|
switch (token) {
|
|
|
|
case "option":
|
|
parseOption(type, token);
|
|
skip(";");
|
|
break;
|
|
|
|
case "required":
|
|
case "repeated":
|
|
parseField(type, token);
|
|
break;
|
|
|
|
case "optional":
|
|
/* istanbul ignore if */
|
|
if (isProto3) {
|
|
parseField(type, "proto3_optional");
|
|
} else {
|
|
parseField(type, "optional");
|
|
}
|
|
break;
|
|
|
|
case "message":
|
|
parseType(type, token);
|
|
break;
|
|
|
|
case "enum":
|
|
parseEnum(type, token);
|
|
break;
|
|
|
|
/* istanbul ignore next */
|
|
default:
|
|
throw illegal(token); // there are no groups with proto3 semantics
|
|
}
|
|
});
|
|
parent.add(type)
|
|
.add(field);
|
|
}
|
|
|
|
function parseMapField(parent) {
|
|
skip("<");
|
|
var keyType = next();
|
|
|
|
/* istanbul ignore if */
|
|
if (types.mapKey[keyType] === undefined)
|
|
throw illegal(keyType, "type");
|
|
|
|
skip(",");
|
|
var valueType = next();
|
|
|
|
/* istanbul ignore if */
|
|
if (!typeRefRe.test(valueType))
|
|
throw illegal(valueType, "type");
|
|
|
|
skip(">");
|
|
var name = next();
|
|
|
|
/* istanbul ignore if */
|
|
if (!nameRe.test(name))
|
|
throw illegal(name, "name");
|
|
|
|
skip("=");
|
|
var field = new MapField(applyCase(name), parseId(next()), keyType, valueType);
|
|
ifBlock(field, function parseMapField_block(token) {
|
|
|
|
/* istanbul ignore else */
|
|
if (token === "option") {
|
|
parseOption(field, token);
|
|
skip(";");
|
|
} else
|
|
throw illegal(token);
|
|
|
|
}, function parseMapField_line() {
|
|
parseInlineOptions(field);
|
|
});
|
|
parent.add(field);
|
|
}
|
|
|
|
function parseOneOf(parent, token) {
|
|
|
|
/* istanbul ignore if */
|
|
if (!nameRe.test(token = next()))
|
|
throw illegal(token, "name");
|
|
|
|
var oneof = new OneOf(applyCase(token));
|
|
ifBlock(oneof, function parseOneOf_block(token) {
|
|
if (token === "option") {
|
|
parseOption(oneof, token);
|
|
skip(";");
|
|
} else {
|
|
push(token);
|
|
parseField(oneof, "optional");
|
|
}
|
|
});
|
|
parent.add(oneof);
|
|
}
|
|
|
|
function parseEnum(parent, token) {
|
|
|
|
/* istanbul ignore if */
|
|
if (!nameRe.test(token = next()))
|
|
throw illegal(token, "name");
|
|
|
|
var enm = new Enum(token);
|
|
ifBlock(enm, function parseEnum_block(token) {
|
|
switch(token) {
|
|
case "option":
|
|
parseOption(enm, token);
|
|
skip(";");
|
|
break;
|
|
|
|
case "reserved":
|
|
readRanges(enm.reserved || (enm.reserved = []), true);
|
|
break;
|
|
|
|
default:
|
|
parseEnumValue(enm, token);
|
|
}
|
|
});
|
|
parent.add(enm);
|
|
}
|
|
|
|
function parseEnumValue(parent, token) {
|
|
|
|
/* istanbul ignore if */
|
|
if (!nameRe.test(token))
|
|
throw illegal(token, "name");
|
|
|
|
skip("=");
|
|
var value = parseId(next(), true),
|
|
dummy = {
|
|
options: undefined
|
|
};
|
|
dummy.setOption = function(name, value) {
|
|
if (this.options === undefined)
|
|
this.options = {};
|
|
this.options[name] = value;
|
|
};
|
|
ifBlock(dummy, function parseEnumValue_block(token) {
|
|
|
|
/* istanbul ignore else */
|
|
if (token === "option") {
|
|
parseOption(dummy, token); // skip
|
|
skip(";");
|
|
} else
|
|
throw illegal(token);
|
|
|
|
}, function parseEnumValue_line() {
|
|
parseInlineOptions(dummy); // skip
|
|
});
|
|
parent.add(token, value, dummy.comment, dummy.options);
|
|
}
|
|
|
|
function parseOption(parent, token) {
|
|
var isCustom = skip("(", true);
|
|
|
|
/* istanbul ignore if */
|
|
if (!typeRefRe.test(token = next()))
|
|
throw illegal(token, "name");
|
|
|
|
var name = token;
|
|
var option = name;
|
|
var propName;
|
|
|
|
if (isCustom) {
|
|
skip(")");
|
|
name = "(" + name + ")";
|
|
option = name;
|
|
token = peek();
|
|
if (fqTypeRefRe.test(token)) {
|
|
propName = token.slice(1); //remove '.' before property name
|
|
name += token;
|
|
next();
|
|
}
|
|
}
|
|
skip("=");
|
|
var optionValue = parseOptionValue(parent, name);
|
|
setParsedOption(parent, option, optionValue, propName);
|
|
}
|
|
|
|
function parseOptionValue(parent, name) {
|
|
// { a: "foo" b { c: "bar" } }
|
|
if (skip("{", true)) {
|
|
var objectResult = {};
|
|
|
|
while (!skip("}", true)) {
|
|
/* istanbul ignore if */
|
|
if (!nameRe.test(token = next())) {
|
|
throw illegal(token, "name");
|
|
}
|
|
|
|
var value;
|
|
var propName = token;
|
|
|
|
skip(":", true);
|
|
|
|
if (peek() === "{")
|
|
value = parseOptionValue(parent, name + "." + token);
|
|
else if (peek() === "[") {
|
|
// option (my_option) = {
|
|
// repeated_value: [ "foo", "bar" ]
|
|
// };
|
|
value = [];
|
|
var lastValue;
|
|
if (skip("[", true)) {
|
|
do {
|
|
lastValue = readValue(true);
|
|
value.push(lastValue);
|
|
} while (skip(",", true));
|
|
skip("]");
|
|
if (typeof lastValue !== "undefined") {
|
|
setOption(parent, name + "." + token, lastValue);
|
|
}
|
|
}
|
|
} else {
|
|
value = readValue(true);
|
|
setOption(parent, name + "." + token, value);
|
|
}
|
|
|
|
var prevValue = objectResult[propName];
|
|
|
|
if (prevValue)
|
|
value = [].concat(prevValue).concat(value);
|
|
|
|
objectResult[propName] = value;
|
|
|
|
// Semicolons and commas can be optional
|
|
skip(",", true);
|
|
skip(";", true);
|
|
}
|
|
|
|
return objectResult;
|
|
}
|
|
|
|
var simpleValue = readValue(true);
|
|
setOption(parent, name, simpleValue);
|
|
return simpleValue;
|
|
// Does not enforce a delimiter to be universal
|
|
}
|
|
|
|
function setOption(parent, name, value) {
|
|
if (parent.setOption)
|
|
parent.setOption(name, value);
|
|
}
|
|
|
|
function setParsedOption(parent, name, value, propName) {
|
|
if (parent.setParsedOption)
|
|
parent.setParsedOption(name, value, propName);
|
|
}
|
|
|
|
function parseInlineOptions(parent) {
|
|
if (skip("[", true)) {
|
|
do {
|
|
parseOption(parent, "option");
|
|
} while (skip(",", true));
|
|
skip("]");
|
|
}
|
|
return parent;
|
|
}
|
|
|
|
function parseService(parent, token) {
|
|
|
|
/* istanbul ignore if */
|
|
if (!nameRe.test(token = next()))
|
|
throw illegal(token, "service name");
|
|
|
|
var service = new Service(token);
|
|
ifBlock(service, function parseService_block(token) {
|
|
if (parseCommon(service, token))
|
|
return;
|
|
|
|
/* istanbul ignore else */
|
|
if (token === "rpc")
|
|
parseMethod(service, token);
|
|
else
|
|
throw illegal(token);
|
|
});
|
|
parent.add(service);
|
|
}
|
|
|
|
function parseMethod(parent, token) {
|
|
// Get the comment of the preceding line now (if one exists) in case the
|
|
// method is defined across multiple lines.
|
|
var commentText = cmnt();
|
|
|
|
var type = token;
|
|
|
|
/* istanbul ignore if */
|
|
if (!nameRe.test(token = next()))
|
|
throw illegal(token, "name");
|
|
|
|
var name = token,
|
|
requestType, requestStream,
|
|
responseType, responseStream;
|
|
|
|
skip("(");
|
|
if (skip("stream", true))
|
|
requestStream = true;
|
|
|
|
/* istanbul ignore if */
|
|
if (!typeRefRe.test(token = next()))
|
|
throw illegal(token);
|
|
|
|
requestType = token;
|
|
skip(")"); skip("returns"); skip("(");
|
|
if (skip("stream", true))
|
|
responseStream = true;
|
|
|
|
/* istanbul ignore if */
|
|
if (!typeRefRe.test(token = next()))
|
|
throw illegal(token);
|
|
|
|
responseType = token;
|
|
skip(")");
|
|
|
|
var method = new Method(name, type, requestType, responseType, requestStream, responseStream);
|
|
method.comment = commentText;
|
|
ifBlock(method, function parseMethod_block(token) {
|
|
|
|
/* istanbul ignore else */
|
|
if (token === "option") {
|
|
parseOption(method, token);
|
|
skip(";");
|
|
} else
|
|
throw illegal(token);
|
|
|
|
});
|
|
parent.add(method);
|
|
}
|
|
|
|
function parseExtension(parent, token) {
|
|
|
|
/* istanbul ignore if */
|
|
if (!typeRefRe.test(token = next()))
|
|
throw illegal(token, "reference");
|
|
|
|
var reference = token;
|
|
ifBlock(null, function parseExtension_block(token) {
|
|
switch (token) {
|
|
|
|
case "required":
|
|
case "repeated":
|
|
parseField(parent, token, reference);
|
|
break;
|
|
|
|
case "optional":
|
|
/* istanbul ignore if */
|
|
if (isProto3) {
|
|
parseField(parent, "proto3_optional", reference);
|
|
} else {
|
|
parseField(parent, "optional", reference);
|
|
}
|
|
break;
|
|
|
|
default:
|
|
/* istanbul ignore if */
|
|
if (!isProto3 || !typeRefRe.test(token))
|
|
throw illegal(token);
|
|
push(token);
|
|
parseField(parent, "optional", reference);
|
|
break;
|
|
}
|
|
});
|
|
}
|
|
|
|
var token;
|
|
while ((token = next()) !== null) {
|
|
switch (token) {
|
|
|
|
case "package":
|
|
|
|
/* istanbul ignore if */
|
|
if (!head)
|
|
throw illegal(token);
|
|
|
|
parsePackage();
|
|
break;
|
|
|
|
case "import":
|
|
|
|
/* istanbul ignore if */
|
|
if (!head)
|
|
throw illegal(token);
|
|
|
|
parseImport();
|
|
break;
|
|
|
|
case "syntax":
|
|
|
|
/* istanbul ignore if */
|
|
if (!head)
|
|
throw illegal(token);
|
|
|
|
parseSyntax();
|
|
break;
|
|
|
|
case "option":
|
|
|
|
parseOption(ptr, token);
|
|
skip(";");
|
|
break;
|
|
|
|
default:
|
|
|
|
/* istanbul ignore else */
|
|
if (parseCommon(ptr, token)) {
|
|
head = false;
|
|
continue;
|
|
}
|
|
|
|
/* istanbul ignore next */
|
|
throw illegal(token);
|
|
}
|
|
}
|
|
|
|
parse$1.filename = null;
|
|
return {
|
|
"package" : pkg,
|
|
"imports" : imports,
|
|
weakImports : weakImports,
|
|
syntax : syntax,
|
|
root : root
|
|
};
|
|
}
|
|
|
|
var common_1 = common;
|
|
|
|
var commonRe = /\/|\./;
|
|
|
|
/**
|
|
* Provides common type definitions.
|
|
* Can also be used to provide additional google types or your own custom types.
|
|
* @param {string} name Short name as in `google/protobuf/[name].proto` or full file name
|
|
* @param {Object.<string,*>} json JSON definition within `google.protobuf` if a short name, otherwise the file's root definition
|
|
* @returns {undefined}
|
|
* @property {INamespace} google/protobuf/any.proto Any
|
|
* @property {INamespace} google/protobuf/duration.proto Duration
|
|
* @property {INamespace} google/protobuf/empty.proto Empty
|
|
* @property {INamespace} google/protobuf/field_mask.proto FieldMask
|
|
* @property {INamespace} google/protobuf/struct.proto Struct, Value, NullValue and ListValue
|
|
* @property {INamespace} google/protobuf/timestamp.proto Timestamp
|
|
* @property {INamespace} google/protobuf/wrappers.proto Wrappers
|
|
* @example
|
|
* // manually provides descriptor.proto (assumes google/protobuf/ namespace and .proto extension)
|
|
* protobuf.common("descriptor", descriptorJson);
|
|
*
|
|
* // manually provides a custom definition (uses my.foo namespace)
|
|
* protobuf.common("my/foo/bar.proto", myFooBarJson);
|
|
*/
|
|
function common(name, json) {
|
|
if (!commonRe.test(name)) {
|
|
name = "google/protobuf/" + name + ".proto";
|
|
json = { nested: { google: { nested: { protobuf: { nested: json } } } } };
|
|
}
|
|
common[name] = json;
|
|
}
|
|
|
|
// Not provided because of limited use (feel free to discuss or to provide yourself):
|
|
//
|
|
// google/protobuf/descriptor.proto
|
|
// google/protobuf/source_context.proto
|
|
// google/protobuf/type.proto
|
|
//
|
|
// Stripped and pre-parsed versions of these non-bundled files are instead available as part of
|
|
// the repository or package within the google/protobuf directory.
|
|
|
|
common("any", {
|
|
|
|
/**
|
|
* Properties of a google.protobuf.Any message.
|
|
* @interface IAny
|
|
* @type {Object}
|
|
* @property {string} [typeUrl]
|
|
* @property {Uint8Array} [bytes]
|
|
* @memberof common
|
|
*/
|
|
Any: {
|
|
fields: {
|
|
type_url: {
|
|
type: "string",
|
|
id: 1
|
|
},
|
|
value: {
|
|
type: "bytes",
|
|
id: 2
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
var timeType;
|
|
|
|
common("duration", {
|
|
|
|
/**
|
|
* Properties of a google.protobuf.Duration message.
|
|
* @interface IDuration
|
|
* @type {Object}
|
|
* @property {number|Long} [seconds]
|
|
* @property {number} [nanos]
|
|
* @memberof common
|
|
*/
|
|
Duration: timeType = {
|
|
fields: {
|
|
seconds: {
|
|
type: "int64",
|
|
id: 1
|
|
},
|
|
nanos: {
|
|
type: "int32",
|
|
id: 2
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
common("timestamp", {
|
|
|
|
/**
|
|
* Properties of a google.protobuf.Timestamp message.
|
|
* @interface ITimestamp
|
|
* @type {Object}
|
|
* @property {number|Long} [seconds]
|
|
* @property {number} [nanos]
|
|
* @memberof common
|
|
*/
|
|
Timestamp: timeType
|
|
});
|
|
|
|
common("empty", {
|
|
|
|
/**
|
|
* Properties of a google.protobuf.Empty message.
|
|
* @interface IEmpty
|
|
* @memberof common
|
|
*/
|
|
Empty: {
|
|
fields: {}
|
|
}
|
|
});
|
|
|
|
common("struct", {
|
|
|
|
/**
|
|
* Properties of a google.protobuf.Struct message.
|
|
* @interface IStruct
|
|
* @type {Object}
|
|
* @property {Object.<string,IValue>} [fields]
|
|
* @memberof common
|
|
*/
|
|
Struct: {
|
|
fields: {
|
|
fields: {
|
|
keyType: "string",
|
|
type: "Value",
|
|
id: 1
|
|
}
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Properties of a google.protobuf.Value message.
|
|
* @interface IValue
|
|
* @type {Object}
|
|
* @property {string} [kind]
|
|
* @property {0} [nullValue]
|
|
* @property {number} [numberValue]
|
|
* @property {string} [stringValue]
|
|
* @property {boolean} [boolValue]
|
|
* @property {IStruct} [structValue]
|
|
* @property {IListValue} [listValue]
|
|
* @memberof common
|
|
*/
|
|
Value: {
|
|
oneofs: {
|
|
kind: {
|
|
oneof: [
|
|
"nullValue",
|
|
"numberValue",
|
|
"stringValue",
|
|
"boolValue",
|
|
"structValue",
|
|
"listValue"
|
|
]
|
|
}
|
|
},
|
|
fields: {
|
|
nullValue: {
|
|
type: "NullValue",
|
|
id: 1
|
|
},
|
|
numberValue: {
|
|
type: "double",
|
|
id: 2
|
|
},
|
|
stringValue: {
|
|
type: "string",
|
|
id: 3
|
|
},
|
|
boolValue: {
|
|
type: "bool",
|
|
id: 4
|
|
},
|
|
structValue: {
|
|
type: "Struct",
|
|
id: 5
|
|
},
|
|
listValue: {
|
|
type: "ListValue",
|
|
id: 6
|
|
}
|
|
}
|
|
},
|
|
|
|
NullValue: {
|
|
values: {
|
|
NULL_VALUE: 0
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Properties of a google.protobuf.ListValue message.
|
|
* @interface IListValue
|
|
* @type {Object}
|
|
* @property {Array.<IValue>} [values]
|
|
* @memberof common
|
|
*/
|
|
ListValue: {
|
|
fields: {
|
|
values: {
|
|
rule: "repeated",
|
|
type: "Value",
|
|
id: 1
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
common("wrappers", {
|
|
|
|
/**
|
|
* Properties of a google.protobuf.DoubleValue message.
|
|
* @interface IDoubleValue
|
|
* @type {Object}
|
|
* @property {number} [value]
|
|
* @memberof common
|
|
*/
|
|
DoubleValue: {
|
|
fields: {
|
|
value: {
|
|
type: "double",
|
|
id: 1
|
|
}
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Properties of a google.protobuf.FloatValue message.
|
|
* @interface IFloatValue
|
|
* @type {Object}
|
|
* @property {number} [value]
|
|
* @memberof common
|
|
*/
|
|
FloatValue: {
|
|
fields: {
|
|
value: {
|
|
type: "float",
|
|
id: 1
|
|
}
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Properties of a google.protobuf.Int64Value message.
|
|
* @interface IInt64Value
|
|
* @type {Object}
|
|
* @property {number|Long} [value]
|
|
* @memberof common
|
|
*/
|
|
Int64Value: {
|
|
fields: {
|
|
value: {
|
|
type: "int64",
|
|
id: 1
|
|
}
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Properties of a google.protobuf.UInt64Value message.
|
|
* @interface IUInt64Value
|
|
* @type {Object}
|
|
* @property {number|Long} [value]
|
|
* @memberof common
|
|
*/
|
|
UInt64Value: {
|
|
fields: {
|
|
value: {
|
|
type: "uint64",
|
|
id: 1
|
|
}
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Properties of a google.protobuf.Int32Value message.
|
|
* @interface IInt32Value
|
|
* @type {Object}
|
|
* @property {number} [value]
|
|
* @memberof common
|
|
*/
|
|
Int32Value: {
|
|
fields: {
|
|
value: {
|
|
type: "int32",
|
|
id: 1
|
|
}
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Properties of a google.protobuf.UInt32Value message.
|
|
* @interface IUInt32Value
|
|
* @type {Object}
|
|
* @property {number} [value]
|
|
* @memberof common
|
|
*/
|
|
UInt32Value: {
|
|
fields: {
|
|
value: {
|
|
type: "uint32",
|
|
id: 1
|
|
}
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Properties of a google.protobuf.BoolValue message.
|
|
* @interface IBoolValue
|
|
* @type {Object}
|
|
* @property {boolean} [value]
|
|
* @memberof common
|
|
*/
|
|
BoolValue: {
|
|
fields: {
|
|
value: {
|
|
type: "bool",
|
|
id: 1
|
|
}
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Properties of a google.protobuf.StringValue message.
|
|
* @interface IStringValue
|
|
* @type {Object}
|
|
* @property {string} [value]
|
|
* @memberof common
|
|
*/
|
|
StringValue: {
|
|
fields: {
|
|
value: {
|
|
type: "string",
|
|
id: 1
|
|
}
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Properties of a google.protobuf.BytesValue message.
|
|
* @interface IBytesValue
|
|
* @type {Object}
|
|
* @property {Uint8Array} [value]
|
|
* @memberof common
|
|
*/
|
|
BytesValue: {
|
|
fields: {
|
|
value: {
|
|
type: "bytes",
|
|
id: 1
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
common("field_mask", {
|
|
|
|
/**
|
|
* Properties of a google.protobuf.FieldMask message.
|
|
* @interface IDoubleValue
|
|
* @type {Object}
|
|
* @property {number} [value]
|
|
* @memberof common
|
|
*/
|
|
FieldMask: {
|
|
fields: {
|
|
paths: {
|
|
rule: "repeated",
|
|
type: "string",
|
|
id: 1
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
/**
|
|
* Gets the root definition of the specified common proto file.
|
|
*
|
|
* Bundled definitions are:
|
|
* - google/protobuf/any.proto
|
|
* - google/protobuf/duration.proto
|
|
* - google/protobuf/empty.proto
|
|
* - google/protobuf/field_mask.proto
|
|
* - google/protobuf/struct.proto
|
|
* - google/protobuf/timestamp.proto
|
|
* - google/protobuf/wrappers.proto
|
|
*
|
|
* @param {string} file Proto file name
|
|
* @returns {INamespace|null} Root definition or `null` if not defined
|
|
*/
|
|
common.get = function get(file) {
|
|
return common[file] || null;
|
|
};
|
|
|
|
(function (module) {
|
|
var protobuf = module.exports = indexLight.exports;
|
|
|
|
protobuf.build = "full";
|
|
|
|
// Parser
|
|
protobuf.tokenize = tokenize_1;
|
|
protobuf.parse = parse_1;
|
|
protobuf.common = common_1;
|
|
|
|
// Configure parser
|
|
protobuf.Root._configure(protobuf.Type, protobuf.parse, protobuf.common);
|
|
} (src));
|
|
|
|
(function (module) {
|
|
module.exports = src.exports;
|
|
} (protobufjs));
|
|
|
|
var pb = /*@__PURE__*/getDefaultExportFromCjs(protobufjs.exports);
|
|
|
|
const Reader = pb.Reader;
|
|
// monkey patch the reader to add native bigint support
|
|
const methods$1 = [
|
|
'uint64', 'int64', 'sint64', 'fixed64', 'sfixed64'
|
|
];
|
|
methods$1.forEach(method => {
|
|
// @ts-expect-error
|
|
const original = Reader.prototype[method];
|
|
// @ts-expect-error
|
|
Reader.prototype[method] = function () {
|
|
return BigInt(original.call(this).toString());
|
|
};
|
|
});
|
|
function decodeMessage(buf, codec) {
|
|
const reader = Reader.create(buf instanceof Uint8Array ? buf : buf.subarray());
|
|
// @ts-expect-error
|
|
return codec.decode(reader);
|
|
}
|
|
|
|
const Writer = pb.Writer;
|
|
// monkey patch the writer to add native bigint support
|
|
const methods = [
|
|
'uint64', 'int64', 'sint64', 'fixed64', 'sfixed64'
|
|
];
|
|
methods.forEach(method => {
|
|
// @ts-expect-error
|
|
const original = Writer.prototype[method];
|
|
// @ts-expect-error
|
|
Writer.prototype[method] = function (val) {
|
|
return original.call(this, val.toString());
|
|
};
|
|
});
|
|
function encodeMessage(message, codec) {
|
|
const w = Writer.create();
|
|
// @ts-expect-error
|
|
codec.encode(message, w, {
|
|
lengthDelimited: false
|
|
});
|
|
return w.finish();
|
|
}
|
|
|
|
// https://developers.google.com/protocol-buffers/docs/encoding#structure
|
|
var CODEC_TYPES;
|
|
(function (CODEC_TYPES) {
|
|
CODEC_TYPES[CODEC_TYPES["VARINT"] = 0] = "VARINT";
|
|
CODEC_TYPES[CODEC_TYPES["BIT64"] = 1] = "BIT64";
|
|
CODEC_TYPES[CODEC_TYPES["LENGTH_DELIMITED"] = 2] = "LENGTH_DELIMITED";
|
|
CODEC_TYPES[CODEC_TYPES["START_GROUP"] = 3] = "START_GROUP";
|
|
CODEC_TYPES[CODEC_TYPES["END_GROUP"] = 4] = "END_GROUP";
|
|
CODEC_TYPES[CODEC_TYPES["BIT32"] = 5] = "BIT32";
|
|
})(CODEC_TYPES || (CODEC_TYPES = {}));
|
|
function createCodec(name, type, encode, decode) {
|
|
return {
|
|
name,
|
|
type,
|
|
encode,
|
|
decode
|
|
};
|
|
}
|
|
|
|
function enumeration(v) {
|
|
function findValue(val) {
|
|
// Use the reverse mapping to look up the enum key for the stored value
|
|
// https://www.typescriptlang.org/docs/handbook/enums.html#reverse-mappings
|
|
if (v[val.toString()] == null) {
|
|
throw new Error('Invalid enum value');
|
|
}
|
|
return v[val];
|
|
}
|
|
const encode = function enumEncode(val, writer) {
|
|
const enumValue = findValue(val);
|
|
writer.int32(enumValue);
|
|
};
|
|
const decode = function enumDecode(reader) {
|
|
const val = reader.uint32();
|
|
return findValue(val);
|
|
};
|
|
// @ts-expect-error yeah yeah
|
|
return createCodec('enum', CODEC_TYPES.VARINT, encode, decode);
|
|
}
|
|
|
|
function message(encode, decode) {
|
|
return createCodec('message', CODEC_TYPES.LENGTH_DELIMITED, encode, decode);
|
|
}
|
|
|
|
/* eslint-disable import/export */
|
|
var KeyType;
|
|
(function (KeyType) {
|
|
KeyType["RSA"] = "RSA";
|
|
KeyType["Ed25519"] = "Ed25519";
|
|
KeyType["Secp256k1"] = "Secp256k1";
|
|
})(KeyType || (KeyType = {}));
|
|
var __KeyTypeValues;
|
|
(function (__KeyTypeValues) {
|
|
__KeyTypeValues[__KeyTypeValues["RSA"] = 0] = "RSA";
|
|
__KeyTypeValues[__KeyTypeValues["Ed25519"] = 1] = "Ed25519";
|
|
__KeyTypeValues[__KeyTypeValues["Secp256k1"] = 2] = "Secp256k1";
|
|
})(__KeyTypeValues || (__KeyTypeValues = {}));
|
|
(function (KeyType) {
|
|
KeyType.codec = () => {
|
|
return enumeration(__KeyTypeValues);
|
|
};
|
|
})(KeyType || (KeyType = {}));
|
|
var PublicKey;
|
|
(function (PublicKey) {
|
|
let _codec;
|
|
PublicKey.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.Type != null) {
|
|
writer.uint32(8);
|
|
KeyType.codec().encode(obj.Type, writer);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "Type" was not found in object');
|
|
}
|
|
if (obj.Data != null) {
|
|
writer.uint32(18);
|
|
writer.bytes(obj.Data);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "Data" was not found in object');
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
Type: KeyType.RSA,
|
|
Data: new Uint8Array(0)
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.Type = KeyType.codec().decode(reader);
|
|
break;
|
|
case 2:
|
|
obj.Data = reader.bytes();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
if (obj.Type == null) {
|
|
throw new Error('Protocol error: value for required field "Type" was not found in protobuf');
|
|
}
|
|
if (obj.Data == null) {
|
|
throw new Error('Protocol error: value for required field "Data" was not found in protobuf');
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
PublicKey.encode = (obj) => {
|
|
return encodeMessage(obj, PublicKey.codec());
|
|
};
|
|
PublicKey.decode = (buf) => {
|
|
return decodeMessage(buf, PublicKey.codec());
|
|
};
|
|
})(PublicKey || (PublicKey = {}));
|
|
var PrivateKey;
|
|
(function (PrivateKey) {
|
|
let _codec;
|
|
PrivateKey.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.Type != null) {
|
|
writer.uint32(8);
|
|
KeyType.codec().encode(obj.Type, writer);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "Type" was not found in object');
|
|
}
|
|
if (obj.Data != null) {
|
|
writer.uint32(18);
|
|
writer.bytes(obj.Data);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "Data" was not found in object');
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
Type: KeyType.RSA,
|
|
Data: new Uint8Array(0)
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.Type = KeyType.codec().decode(reader);
|
|
break;
|
|
case 2:
|
|
obj.Data = reader.bytes();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
if (obj.Type == null) {
|
|
throw new Error('Protocol error: value for required field "Type" was not found in protobuf');
|
|
}
|
|
if (obj.Data == null) {
|
|
throw new Error('Protocol error: value for required field "Data" was not found in protobuf');
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
PrivateKey.encode = (obj) => {
|
|
return encodeMessage(obj, PrivateKey.codec());
|
|
};
|
|
PrivateKey.decode = (buf) => {
|
|
return decodeMessage(buf, PrivateKey.codec());
|
|
};
|
|
})(PrivateKey || (PrivateKey = {}));
|
|
|
|
/**
|
|
* Node.js module for Forge.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright 2011-2016 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$m = {
|
|
// default options
|
|
options: {
|
|
usePureJavaScript: false
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Base-N/Base-X encoding/decoding functions.
|
|
*
|
|
* Original implementation from base-x:
|
|
* https://github.com/cryptocoinjs/base-x
|
|
*
|
|
* Which is MIT licensed:
|
|
*
|
|
* The MIT License (MIT)
|
|
*
|
|
* Copyright base-x contributors (c) 2016
|
|
*
|
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
* of this software and associated documentation files (the "Software"), to deal
|
|
* in the Software without restriction, including without limitation the rights
|
|
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
* copies of the Software, and to permit persons to whom the Software is
|
|
* furnished to do so, subject to the following conditions:
|
|
*
|
|
* The above copyright notice and this permission notice shall be included in
|
|
* all copies or substantial portions of the Software.
|
|
*
|
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
* DEALINGS IN THE SOFTWARE.
|
|
*/
|
|
|
|
var api = {};
|
|
var baseN$1 = api;
|
|
|
|
// baseN alphabet indexes
|
|
var _reverseAlphabets = {};
|
|
|
|
/**
|
|
* BaseN-encodes a Uint8Array using the given alphabet.
|
|
*
|
|
* @param input the Uint8Array to encode.
|
|
* @param maxline the maximum number of encoded characters per line to use,
|
|
* defaults to none.
|
|
*
|
|
* @return the baseN-encoded output string.
|
|
*/
|
|
api.encode = function(input, alphabet, maxline) {
|
|
if(typeof alphabet !== 'string') {
|
|
throw new TypeError('"alphabet" must be a string.');
|
|
}
|
|
if(maxline !== undefined && typeof maxline !== 'number') {
|
|
throw new TypeError('"maxline" must be a number.');
|
|
}
|
|
|
|
var output = '';
|
|
|
|
if(!(input instanceof Uint8Array)) {
|
|
// assume forge byte buffer
|
|
output = _encodeWithByteBuffer(input, alphabet);
|
|
} else {
|
|
var i = 0;
|
|
var base = alphabet.length;
|
|
var first = alphabet.charAt(0);
|
|
var digits = [0];
|
|
for(i = 0; i < input.length; ++i) {
|
|
for(var j = 0, carry = input[i]; j < digits.length; ++j) {
|
|
carry += digits[j] << 8;
|
|
digits[j] = carry % base;
|
|
carry = (carry / base) | 0;
|
|
}
|
|
|
|
while(carry > 0) {
|
|
digits.push(carry % base);
|
|
carry = (carry / base) | 0;
|
|
}
|
|
}
|
|
|
|
// deal with leading zeros
|
|
for(i = 0; input[i] === 0 && i < input.length - 1; ++i) {
|
|
output += first;
|
|
}
|
|
// convert digits to a string
|
|
for(i = digits.length - 1; i >= 0; --i) {
|
|
output += alphabet[digits[i]];
|
|
}
|
|
}
|
|
|
|
if(maxline) {
|
|
var regex = new RegExp('.{1,' + maxline + '}', 'g');
|
|
output = output.match(regex).join('\r\n');
|
|
}
|
|
|
|
return output;
|
|
};
|
|
|
|
/**
|
|
* Decodes a baseN-encoded (using the given alphabet) string to a
|
|
* Uint8Array.
|
|
*
|
|
* @param input the baseN-encoded input string.
|
|
*
|
|
* @return the Uint8Array.
|
|
*/
|
|
api.decode = function(input, alphabet) {
|
|
if(typeof input !== 'string') {
|
|
throw new TypeError('"input" must be a string.');
|
|
}
|
|
if(typeof alphabet !== 'string') {
|
|
throw new TypeError('"alphabet" must be a string.');
|
|
}
|
|
|
|
var table = _reverseAlphabets[alphabet];
|
|
if(!table) {
|
|
// compute reverse alphabet
|
|
table = _reverseAlphabets[alphabet] = [];
|
|
for(var i = 0; i < alphabet.length; ++i) {
|
|
table[alphabet.charCodeAt(i)] = i;
|
|
}
|
|
}
|
|
|
|
// remove whitespace characters
|
|
input = input.replace(/\s/g, '');
|
|
|
|
var base = alphabet.length;
|
|
var first = alphabet.charAt(0);
|
|
var bytes = [0];
|
|
for(var i = 0; i < input.length; i++) {
|
|
var value = table[input.charCodeAt(i)];
|
|
if(value === undefined) {
|
|
return;
|
|
}
|
|
|
|
for(var j = 0, carry = value; j < bytes.length; ++j) {
|
|
carry += bytes[j] * base;
|
|
bytes[j] = carry & 0xff;
|
|
carry >>= 8;
|
|
}
|
|
|
|
while(carry > 0) {
|
|
bytes.push(carry & 0xff);
|
|
carry >>= 8;
|
|
}
|
|
}
|
|
|
|
// deal with leading zeros
|
|
for(var k = 0; input[k] === first && k < input.length - 1; ++k) {
|
|
bytes.push(0);
|
|
}
|
|
|
|
if(typeof Buffer !== 'undefined') {
|
|
return Buffer.from(bytes.reverse());
|
|
}
|
|
|
|
return new Uint8Array(bytes.reverse());
|
|
};
|
|
|
|
function _encodeWithByteBuffer(input, alphabet) {
|
|
var i = 0;
|
|
var base = alphabet.length;
|
|
var first = alphabet.charAt(0);
|
|
var digits = [0];
|
|
for(i = 0; i < input.length(); ++i) {
|
|
for(var j = 0, carry = input.at(i); j < digits.length; ++j) {
|
|
carry += digits[j] << 8;
|
|
digits[j] = carry % base;
|
|
carry = (carry / base) | 0;
|
|
}
|
|
|
|
while(carry > 0) {
|
|
digits.push(carry % base);
|
|
carry = (carry / base) | 0;
|
|
}
|
|
}
|
|
|
|
var output = '';
|
|
|
|
// deal with leading zeros
|
|
for(i = 0; input.at(i) === 0 && i < input.length() - 1; ++i) {
|
|
output += first;
|
|
}
|
|
// convert digits to a string
|
|
for(i = digits.length - 1; i >= 0; --i) {
|
|
output += alphabet[digits[i]];
|
|
}
|
|
|
|
return output;
|
|
}
|
|
|
|
/**
|
|
* Utility functions for web applications.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2010-2018 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$l = forge$m;
|
|
var baseN = baseN$1;
|
|
|
|
/* Utilities API */
|
|
var util$1 = forge$l.util = forge$l.util || {};
|
|
|
|
// define setImmediate and nextTick
|
|
(function() {
|
|
// use native nextTick (unless we're in webpack)
|
|
// webpack (or better node-libs-browser polyfill) sets process.browser.
|
|
// this way we can detect webpack properly
|
|
if(typeof process !== 'undefined' && process.nextTick && !process.browser) {
|
|
util$1.nextTick = process.nextTick;
|
|
if(typeof setImmediate === 'function') {
|
|
util$1.setImmediate = setImmediate;
|
|
} else {
|
|
// polyfill setImmediate with nextTick, older versions of node
|
|
// (those w/o setImmediate) won't totally starve IO
|
|
util$1.setImmediate = util$1.nextTick;
|
|
}
|
|
return;
|
|
}
|
|
|
|
// polyfill nextTick with native setImmediate
|
|
if(typeof setImmediate === 'function') {
|
|
util$1.setImmediate = function() { return setImmediate.apply(undefined, arguments); };
|
|
util$1.nextTick = function(callback) {
|
|
return setImmediate(callback);
|
|
};
|
|
return;
|
|
}
|
|
|
|
/* Note: A polyfill upgrade pattern is used here to allow combining
|
|
polyfills. For example, MutationObserver is fast, but blocks UI updates,
|
|
so it needs to allow UI updates periodically, so it falls back on
|
|
postMessage or setTimeout. */
|
|
|
|
// polyfill with setTimeout
|
|
util$1.setImmediate = function(callback) {
|
|
setTimeout(callback, 0);
|
|
};
|
|
|
|
// upgrade polyfill to use postMessage
|
|
if(typeof window !== 'undefined' &&
|
|
typeof window.postMessage === 'function') {
|
|
var msg = 'forge.setImmediate';
|
|
var callbacks = [];
|
|
util$1.setImmediate = function(callback) {
|
|
callbacks.push(callback);
|
|
// only send message when one hasn't been sent in
|
|
// the current turn of the event loop
|
|
if(callbacks.length === 1) {
|
|
window.postMessage(msg, '*');
|
|
}
|
|
};
|
|
function handler(event) {
|
|
if(event.source === window && event.data === msg) {
|
|
event.stopPropagation();
|
|
var copy = callbacks.slice();
|
|
callbacks.length = 0;
|
|
copy.forEach(function(callback) {
|
|
callback();
|
|
});
|
|
}
|
|
}
|
|
window.addEventListener('message', handler, true);
|
|
}
|
|
|
|
// upgrade polyfill to use MutationObserver
|
|
if(typeof MutationObserver !== 'undefined') {
|
|
// polyfill with MutationObserver
|
|
var now = Date.now();
|
|
var attr = true;
|
|
var div = document.createElement('div');
|
|
var callbacks = [];
|
|
new MutationObserver(function() {
|
|
var copy = callbacks.slice();
|
|
callbacks.length = 0;
|
|
copy.forEach(function(callback) {
|
|
callback();
|
|
});
|
|
}).observe(div, {attributes: true});
|
|
var oldSetImmediate = util$1.setImmediate;
|
|
util$1.setImmediate = function(callback) {
|
|
if(Date.now() - now > 15) {
|
|
now = Date.now();
|
|
oldSetImmediate(callback);
|
|
} else {
|
|
callbacks.push(callback);
|
|
// only trigger observer when it hasn't been triggered in
|
|
// the current turn of the event loop
|
|
if(callbacks.length === 1) {
|
|
div.setAttribute('a', attr = !attr);
|
|
}
|
|
}
|
|
};
|
|
}
|
|
|
|
util$1.nextTick = util$1.setImmediate;
|
|
})();
|
|
|
|
// check if running under Node.js
|
|
util$1.isNodejs =
|
|
typeof process !== 'undefined' && process.versions && process.versions.node;
|
|
|
|
|
|
// 'self' will also work in Web Workers (instance of WorkerGlobalScope) while
|
|
// it will point to `window` in the main thread.
|
|
// To remain compatible with older browsers, we fall back to 'window' if 'self'
|
|
// is not available.
|
|
util$1.globalScope = (function() {
|
|
if(util$1.isNodejs) {
|
|
return commonjsGlobal;
|
|
}
|
|
|
|
return typeof self === 'undefined' ? window : self;
|
|
})();
|
|
|
|
// define isArray
|
|
util$1.isArray = Array.isArray || function(x) {
|
|
return Object.prototype.toString.call(x) === '[object Array]';
|
|
};
|
|
|
|
// define isArrayBuffer
|
|
util$1.isArrayBuffer = function(x) {
|
|
return typeof ArrayBuffer !== 'undefined' && x instanceof ArrayBuffer;
|
|
};
|
|
|
|
// define isArrayBufferView
|
|
util$1.isArrayBufferView = function(x) {
|
|
return x && util$1.isArrayBuffer(x.buffer) && x.byteLength !== undefined;
|
|
};
|
|
|
|
/**
|
|
* Ensure a bits param is 8, 16, 24, or 32. Used to validate input for
|
|
* algorithms where bit manipulation, JavaScript limitations, and/or algorithm
|
|
* design only allow for byte operations of a limited size.
|
|
*
|
|
* @param n number of bits.
|
|
*
|
|
* Throw Error if n invalid.
|
|
*/
|
|
function _checkBitsParam(n) {
|
|
if(!(n === 8 || n === 16 || n === 24 || n === 32)) {
|
|
throw new Error('Only 8, 16, 24, or 32 bits supported: ' + n);
|
|
}
|
|
}
|
|
|
|
// TODO: set ByteBuffer to best available backing
|
|
util$1.ByteBuffer = ByteStringBuffer;
|
|
|
|
/** Buffer w/BinaryString backing */
|
|
|
|
/**
|
|
* Constructor for a binary string backed byte buffer.
|
|
*
|
|
* @param [b] the bytes to wrap (either encoded as string, one byte per
|
|
* character, or as an ArrayBuffer or Typed Array).
|
|
*/
|
|
function ByteStringBuffer(b) {
|
|
// TODO: update to match DataBuffer API
|
|
|
|
// the data in this buffer
|
|
this.data = '';
|
|
// the pointer for reading from this buffer
|
|
this.read = 0;
|
|
|
|
if(typeof b === 'string') {
|
|
this.data = b;
|
|
} else if(util$1.isArrayBuffer(b) || util$1.isArrayBufferView(b)) {
|
|
if(typeof Buffer !== 'undefined' && b instanceof Buffer) {
|
|
this.data = b.toString('binary');
|
|
} else {
|
|
// convert native buffer to forge buffer
|
|
// FIXME: support native buffers internally instead
|
|
var arr = new Uint8Array(b);
|
|
try {
|
|
this.data = String.fromCharCode.apply(null, arr);
|
|
} catch(e) {
|
|
for(var i = 0; i < arr.length; ++i) {
|
|
this.putByte(arr[i]);
|
|
}
|
|
}
|
|
}
|
|
} else if(b instanceof ByteStringBuffer ||
|
|
(typeof b === 'object' && typeof b.data === 'string' &&
|
|
typeof b.read === 'number')) {
|
|
// copy existing buffer
|
|
this.data = b.data;
|
|
this.read = b.read;
|
|
}
|
|
|
|
// used for v8 optimization
|
|
this._constructedStringLength = 0;
|
|
}
|
|
util$1.ByteStringBuffer = ByteStringBuffer;
|
|
|
|
/* Note: This is an optimization for V8-based browsers. When V8 concatenates
|
|
a string, the strings are only joined logically using a "cons string" or
|
|
"constructed/concatenated string". These containers keep references to one
|
|
another and can result in very large memory usage. For example, if a 2MB
|
|
string is constructed by concatenating 4 bytes together at a time, the
|
|
memory usage will be ~44MB; so ~22x increase. The strings are only joined
|
|
together when an operation requiring their joining takes place, such as
|
|
substr(). This function is called when adding data to this buffer to ensure
|
|
these types of strings are periodically joined to reduce the memory
|
|
footprint. */
|
|
var _MAX_CONSTRUCTED_STRING_LENGTH = 4096;
|
|
util$1.ByteStringBuffer.prototype._optimizeConstructedString = function(x) {
|
|
this._constructedStringLength += x;
|
|
if(this._constructedStringLength > _MAX_CONSTRUCTED_STRING_LENGTH) {
|
|
// this substr() should cause the constructed string to join
|
|
this.data.substr(0, 1);
|
|
this._constructedStringLength = 0;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Gets the number of bytes in this buffer.
|
|
*
|
|
* @return the number of bytes in this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.length = function() {
|
|
return this.data.length - this.read;
|
|
};
|
|
|
|
/**
|
|
* Gets whether or not this buffer is empty.
|
|
*
|
|
* @return true if this buffer is empty, false if not.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.isEmpty = function() {
|
|
return this.length() <= 0;
|
|
};
|
|
|
|
/**
|
|
* Puts a byte in this buffer.
|
|
*
|
|
* @param b the byte to put.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.putByte = function(b) {
|
|
return this.putBytes(String.fromCharCode(b));
|
|
};
|
|
|
|
/**
|
|
* Puts a byte in this buffer N times.
|
|
*
|
|
* @param b the byte to put.
|
|
* @param n the number of bytes of value b to put.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.fillWithByte = function(b, n) {
|
|
b = String.fromCharCode(b);
|
|
var d = this.data;
|
|
while(n > 0) {
|
|
if(n & 1) {
|
|
d += b;
|
|
}
|
|
n >>>= 1;
|
|
if(n > 0) {
|
|
b += b;
|
|
}
|
|
}
|
|
this.data = d;
|
|
this._optimizeConstructedString(n);
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts bytes in this buffer.
|
|
*
|
|
* @param bytes the bytes (as a binary encoded string) to put.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.putBytes = function(bytes) {
|
|
this.data += bytes;
|
|
this._optimizeConstructedString(bytes.length);
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts a UTF-16 encoded string into this buffer.
|
|
*
|
|
* @param str the string to put.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.putString = function(str) {
|
|
return this.putBytes(util$1.encodeUtf8(str));
|
|
};
|
|
|
|
/**
|
|
* Puts a 16-bit integer in this buffer in big-endian order.
|
|
*
|
|
* @param i the 16-bit integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.putInt16 = function(i) {
|
|
return this.putBytes(
|
|
String.fromCharCode(i >> 8 & 0xFF) +
|
|
String.fromCharCode(i & 0xFF));
|
|
};
|
|
|
|
/**
|
|
* Puts a 24-bit integer in this buffer in big-endian order.
|
|
*
|
|
* @param i the 24-bit integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.putInt24 = function(i) {
|
|
return this.putBytes(
|
|
String.fromCharCode(i >> 16 & 0xFF) +
|
|
String.fromCharCode(i >> 8 & 0xFF) +
|
|
String.fromCharCode(i & 0xFF));
|
|
};
|
|
|
|
/**
|
|
* Puts a 32-bit integer in this buffer in big-endian order.
|
|
*
|
|
* @param i the 32-bit integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.putInt32 = function(i) {
|
|
return this.putBytes(
|
|
String.fromCharCode(i >> 24 & 0xFF) +
|
|
String.fromCharCode(i >> 16 & 0xFF) +
|
|
String.fromCharCode(i >> 8 & 0xFF) +
|
|
String.fromCharCode(i & 0xFF));
|
|
};
|
|
|
|
/**
|
|
* Puts a 16-bit integer in this buffer in little-endian order.
|
|
*
|
|
* @param i the 16-bit integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.putInt16Le = function(i) {
|
|
return this.putBytes(
|
|
String.fromCharCode(i & 0xFF) +
|
|
String.fromCharCode(i >> 8 & 0xFF));
|
|
};
|
|
|
|
/**
|
|
* Puts a 24-bit integer in this buffer in little-endian order.
|
|
*
|
|
* @param i the 24-bit integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.putInt24Le = function(i) {
|
|
return this.putBytes(
|
|
String.fromCharCode(i & 0xFF) +
|
|
String.fromCharCode(i >> 8 & 0xFF) +
|
|
String.fromCharCode(i >> 16 & 0xFF));
|
|
};
|
|
|
|
/**
|
|
* Puts a 32-bit integer in this buffer in little-endian order.
|
|
*
|
|
* @param i the 32-bit integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.putInt32Le = function(i) {
|
|
return this.putBytes(
|
|
String.fromCharCode(i & 0xFF) +
|
|
String.fromCharCode(i >> 8 & 0xFF) +
|
|
String.fromCharCode(i >> 16 & 0xFF) +
|
|
String.fromCharCode(i >> 24 & 0xFF));
|
|
};
|
|
|
|
/**
|
|
* Puts an n-bit integer in this buffer in big-endian order.
|
|
*
|
|
* @param i the n-bit integer.
|
|
* @param n the number of bits in the integer (8, 16, 24, or 32).
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.putInt = function(i, n) {
|
|
_checkBitsParam(n);
|
|
var bytes = '';
|
|
do {
|
|
n -= 8;
|
|
bytes += String.fromCharCode((i >> n) & 0xFF);
|
|
} while(n > 0);
|
|
return this.putBytes(bytes);
|
|
};
|
|
|
|
/**
|
|
* Puts a signed n-bit integer in this buffer in big-endian order. Two's
|
|
* complement representation is used.
|
|
*
|
|
* @param i the n-bit integer.
|
|
* @param n the number of bits in the integer (8, 16, 24, or 32).
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.putSignedInt = function(i, n) {
|
|
// putInt checks n
|
|
if(i < 0) {
|
|
i += 2 << (n - 1);
|
|
}
|
|
return this.putInt(i, n);
|
|
};
|
|
|
|
/**
|
|
* Puts the given buffer into this buffer.
|
|
*
|
|
* @param buffer the buffer to put into this one.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.putBuffer = function(buffer) {
|
|
return this.putBytes(buffer.getBytes());
|
|
};
|
|
|
|
/**
|
|
* Gets a byte from this buffer and advances the read pointer by 1.
|
|
*
|
|
* @return the byte.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.getByte = function() {
|
|
return this.data.charCodeAt(this.read++);
|
|
};
|
|
|
|
/**
|
|
* Gets a uint16 from this buffer in big-endian order and advances the read
|
|
* pointer by 2.
|
|
*
|
|
* @return the uint16.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.getInt16 = function() {
|
|
var rval = (
|
|
this.data.charCodeAt(this.read) << 8 ^
|
|
this.data.charCodeAt(this.read + 1));
|
|
this.read += 2;
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a uint24 from this buffer in big-endian order and advances the read
|
|
* pointer by 3.
|
|
*
|
|
* @return the uint24.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.getInt24 = function() {
|
|
var rval = (
|
|
this.data.charCodeAt(this.read) << 16 ^
|
|
this.data.charCodeAt(this.read + 1) << 8 ^
|
|
this.data.charCodeAt(this.read + 2));
|
|
this.read += 3;
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a uint32 from this buffer in big-endian order and advances the read
|
|
* pointer by 4.
|
|
*
|
|
* @return the word.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.getInt32 = function() {
|
|
var rval = (
|
|
this.data.charCodeAt(this.read) << 24 ^
|
|
this.data.charCodeAt(this.read + 1) << 16 ^
|
|
this.data.charCodeAt(this.read + 2) << 8 ^
|
|
this.data.charCodeAt(this.read + 3));
|
|
this.read += 4;
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a uint16 from this buffer in little-endian order and advances the read
|
|
* pointer by 2.
|
|
*
|
|
* @return the uint16.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.getInt16Le = function() {
|
|
var rval = (
|
|
this.data.charCodeAt(this.read) ^
|
|
this.data.charCodeAt(this.read + 1) << 8);
|
|
this.read += 2;
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a uint24 from this buffer in little-endian order and advances the read
|
|
* pointer by 3.
|
|
*
|
|
* @return the uint24.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.getInt24Le = function() {
|
|
var rval = (
|
|
this.data.charCodeAt(this.read) ^
|
|
this.data.charCodeAt(this.read + 1) << 8 ^
|
|
this.data.charCodeAt(this.read + 2) << 16);
|
|
this.read += 3;
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a uint32 from this buffer in little-endian order and advances the read
|
|
* pointer by 4.
|
|
*
|
|
* @return the word.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.getInt32Le = function() {
|
|
var rval = (
|
|
this.data.charCodeAt(this.read) ^
|
|
this.data.charCodeAt(this.read + 1) << 8 ^
|
|
this.data.charCodeAt(this.read + 2) << 16 ^
|
|
this.data.charCodeAt(this.read + 3) << 24);
|
|
this.read += 4;
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets an n-bit integer from this buffer in big-endian order and advances the
|
|
* read pointer by ceil(n/8).
|
|
*
|
|
* @param n the number of bits in the integer (8, 16, 24, or 32).
|
|
*
|
|
* @return the integer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.getInt = function(n) {
|
|
_checkBitsParam(n);
|
|
var rval = 0;
|
|
do {
|
|
// TODO: Use (rval * 0x100) if adding support for 33 to 53 bits.
|
|
rval = (rval << 8) + this.data.charCodeAt(this.read++);
|
|
n -= 8;
|
|
} while(n > 0);
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a signed n-bit integer from this buffer in big-endian order, using
|
|
* two's complement, and advances the read pointer by n/8.
|
|
*
|
|
* @param n the number of bits in the integer (8, 16, 24, or 32).
|
|
*
|
|
* @return the integer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.getSignedInt = function(n) {
|
|
// getInt checks n
|
|
var x = this.getInt(n);
|
|
var max = 2 << (n - 2);
|
|
if(x >= max) {
|
|
x -= max << 1;
|
|
}
|
|
return x;
|
|
};
|
|
|
|
/**
|
|
* Reads bytes out as a binary encoded string and clears them from the
|
|
* buffer. Note that the resulting string is binary encoded (in node.js this
|
|
* encoding is referred to as `binary`, it is *not* `utf8`).
|
|
*
|
|
* @param count the number of bytes to read, undefined or null for all.
|
|
*
|
|
* @return a binary encoded string of bytes.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.getBytes = function(count) {
|
|
var rval;
|
|
if(count) {
|
|
// read count bytes
|
|
count = Math.min(this.length(), count);
|
|
rval = this.data.slice(this.read, this.read + count);
|
|
this.read += count;
|
|
} else if(count === 0) {
|
|
rval = '';
|
|
} else {
|
|
// read all bytes, optimize to only copy when needed
|
|
rval = (this.read === 0) ? this.data : this.data.slice(this.read);
|
|
this.clear();
|
|
}
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a binary encoded string of the bytes from this buffer without
|
|
* modifying the read pointer.
|
|
*
|
|
* @param count the number of bytes to get, omit to get all.
|
|
*
|
|
* @return a string full of binary encoded characters.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.bytes = function(count) {
|
|
return (typeof(count) === 'undefined' ?
|
|
this.data.slice(this.read) :
|
|
this.data.slice(this.read, this.read + count));
|
|
};
|
|
|
|
/**
|
|
* Gets a byte at the given index without modifying the read pointer.
|
|
*
|
|
* @param i the byte index.
|
|
*
|
|
* @return the byte.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.at = function(i) {
|
|
return this.data.charCodeAt(this.read + i);
|
|
};
|
|
|
|
/**
|
|
* Puts a byte at the given index without modifying the read pointer.
|
|
*
|
|
* @param i the byte index.
|
|
* @param b the byte to put.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.setAt = function(i, b) {
|
|
this.data = this.data.substr(0, this.read + i) +
|
|
String.fromCharCode(b) +
|
|
this.data.substr(this.read + i + 1);
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Gets the last byte without modifying the read pointer.
|
|
*
|
|
* @return the last byte.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.last = function() {
|
|
return this.data.charCodeAt(this.data.length - 1);
|
|
};
|
|
|
|
/**
|
|
* Creates a copy of this buffer.
|
|
*
|
|
* @return the copy.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.copy = function() {
|
|
var c = util$1.createBuffer(this.data);
|
|
c.read = this.read;
|
|
return c;
|
|
};
|
|
|
|
/**
|
|
* Compacts this buffer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.compact = function() {
|
|
if(this.read > 0) {
|
|
this.data = this.data.slice(this.read);
|
|
this.read = 0;
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Clears this buffer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.clear = function() {
|
|
this.data = '';
|
|
this.read = 0;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Shortens this buffer by triming bytes off of the end of this buffer.
|
|
*
|
|
* @param count the number of bytes to trim off.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.truncate = function(count) {
|
|
var len = Math.max(0, this.length() - count);
|
|
this.data = this.data.substr(this.read, len);
|
|
this.read = 0;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Converts this buffer to a hexadecimal string.
|
|
*
|
|
* @return a hexadecimal string.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.toHex = function() {
|
|
var rval = '';
|
|
for(var i = this.read; i < this.data.length; ++i) {
|
|
var b = this.data.charCodeAt(i);
|
|
if(b < 16) {
|
|
rval += '0';
|
|
}
|
|
rval += b.toString(16);
|
|
}
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Converts this buffer to a UTF-16 string (standard JavaScript string).
|
|
*
|
|
* @return a UTF-16 string.
|
|
*/
|
|
util$1.ByteStringBuffer.prototype.toString = function() {
|
|
return util$1.decodeUtf8(this.bytes());
|
|
};
|
|
|
|
/** End Buffer w/BinaryString backing */
|
|
|
|
/** Buffer w/UInt8Array backing */
|
|
|
|
/**
|
|
* FIXME: Experimental. Do not use yet.
|
|
*
|
|
* Constructor for an ArrayBuffer-backed byte buffer.
|
|
*
|
|
* The buffer may be constructed from a string, an ArrayBuffer, DataView, or a
|
|
* TypedArray.
|
|
*
|
|
* If a string is given, its encoding should be provided as an option,
|
|
* otherwise it will default to 'binary'. A 'binary' string is encoded such
|
|
* that each character is one byte in length and size.
|
|
*
|
|
* If an ArrayBuffer, DataView, or TypedArray is given, it will be used
|
|
* *directly* without any copying. Note that, if a write to the buffer requires
|
|
* more space, the buffer will allocate a new backing ArrayBuffer to
|
|
* accommodate. The starting read and write offsets for the buffer may be
|
|
* given as options.
|
|
*
|
|
* @param [b] the initial bytes for this buffer.
|
|
* @param options the options to use:
|
|
* [readOffset] the starting read offset to use (default: 0).
|
|
* [writeOffset] the starting write offset to use (default: the
|
|
* length of the first parameter).
|
|
* [growSize] the minimum amount, in bytes, to grow the buffer by to
|
|
* accommodate writes (default: 1024).
|
|
* [encoding] the encoding ('binary', 'utf8', 'utf16', 'hex') for the
|
|
* first parameter, if it is a string (default: 'binary').
|
|
*/
|
|
function DataBuffer(b, options) {
|
|
// default options
|
|
options = options || {};
|
|
|
|
// pointers for read from/write to buffer
|
|
this.read = options.readOffset || 0;
|
|
this.growSize = options.growSize || 1024;
|
|
|
|
var isArrayBuffer = util$1.isArrayBuffer(b);
|
|
var isArrayBufferView = util$1.isArrayBufferView(b);
|
|
if(isArrayBuffer || isArrayBufferView) {
|
|
// use ArrayBuffer directly
|
|
if(isArrayBuffer) {
|
|
this.data = new DataView(b);
|
|
} else {
|
|
// TODO: adjust read/write offset based on the type of view
|
|
// or specify that this must be done in the options ... that the
|
|
// offsets are byte-based
|
|
this.data = new DataView(b.buffer, b.byteOffset, b.byteLength);
|
|
}
|
|
this.write = ('writeOffset' in options ?
|
|
options.writeOffset : this.data.byteLength);
|
|
return;
|
|
}
|
|
|
|
// initialize to empty array buffer and add any given bytes using putBytes
|
|
this.data = new DataView(new ArrayBuffer(0));
|
|
this.write = 0;
|
|
|
|
if(b !== null && b !== undefined) {
|
|
this.putBytes(b);
|
|
}
|
|
|
|
if('writeOffset' in options) {
|
|
this.write = options.writeOffset;
|
|
}
|
|
}
|
|
util$1.DataBuffer = DataBuffer;
|
|
|
|
/**
|
|
* Gets the number of bytes in this buffer.
|
|
*
|
|
* @return the number of bytes in this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.length = function() {
|
|
return this.write - this.read;
|
|
};
|
|
|
|
/**
|
|
* Gets whether or not this buffer is empty.
|
|
*
|
|
* @return true if this buffer is empty, false if not.
|
|
*/
|
|
util$1.DataBuffer.prototype.isEmpty = function() {
|
|
return this.length() <= 0;
|
|
};
|
|
|
|
/**
|
|
* Ensures this buffer has enough empty space to accommodate the given number
|
|
* of bytes. An optional parameter may be given that indicates a minimum
|
|
* amount to grow the buffer if necessary. If the parameter is not given,
|
|
* the buffer will be grown by some previously-specified default amount
|
|
* or heuristic.
|
|
*
|
|
* @param amount the number of bytes to accommodate.
|
|
* @param [growSize] the minimum amount, in bytes, to grow the buffer by if
|
|
* necessary.
|
|
*/
|
|
util$1.DataBuffer.prototype.accommodate = function(amount, growSize) {
|
|
if(this.length() >= amount) {
|
|
return this;
|
|
}
|
|
growSize = Math.max(growSize || this.growSize, amount);
|
|
|
|
// grow buffer
|
|
var src = new Uint8Array(
|
|
this.data.buffer, this.data.byteOffset, this.data.byteLength);
|
|
var dst = new Uint8Array(this.length() + growSize);
|
|
dst.set(src);
|
|
this.data = new DataView(dst.buffer);
|
|
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts a byte in this buffer.
|
|
*
|
|
* @param b the byte to put.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.putByte = function(b) {
|
|
this.accommodate(1);
|
|
this.data.setUint8(this.write++, b);
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts a byte in this buffer N times.
|
|
*
|
|
* @param b the byte to put.
|
|
* @param n the number of bytes of value b to put.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.fillWithByte = function(b, n) {
|
|
this.accommodate(n);
|
|
for(var i = 0; i < n; ++i) {
|
|
this.data.setUint8(b);
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts bytes in this buffer. The bytes may be given as a string, an
|
|
* ArrayBuffer, a DataView, or a TypedArray.
|
|
*
|
|
* @param bytes the bytes to put.
|
|
* @param [encoding] the encoding for the first parameter ('binary', 'utf8',
|
|
* 'utf16', 'hex'), if it is a string (default: 'binary').
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.putBytes = function(bytes, encoding) {
|
|
if(util$1.isArrayBufferView(bytes)) {
|
|
var src = new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
var len = src.byteLength - src.byteOffset;
|
|
this.accommodate(len);
|
|
var dst = new Uint8Array(this.data.buffer, this.write);
|
|
dst.set(src);
|
|
this.write += len;
|
|
return this;
|
|
}
|
|
|
|
if(util$1.isArrayBuffer(bytes)) {
|
|
var src = new Uint8Array(bytes);
|
|
this.accommodate(src.byteLength);
|
|
var dst = new Uint8Array(this.data.buffer);
|
|
dst.set(src, this.write);
|
|
this.write += src.byteLength;
|
|
return this;
|
|
}
|
|
|
|
// bytes is a util.DataBuffer or equivalent
|
|
if(bytes instanceof util$1.DataBuffer ||
|
|
(typeof bytes === 'object' &&
|
|
typeof bytes.read === 'number' && typeof bytes.write === 'number' &&
|
|
util$1.isArrayBufferView(bytes.data))) {
|
|
var src = new Uint8Array(bytes.data.byteLength, bytes.read, bytes.length());
|
|
this.accommodate(src.byteLength);
|
|
var dst = new Uint8Array(bytes.data.byteLength, this.write);
|
|
dst.set(src);
|
|
this.write += src.byteLength;
|
|
return this;
|
|
}
|
|
|
|
if(bytes instanceof util$1.ByteStringBuffer) {
|
|
// copy binary string and process as the same as a string parameter below
|
|
bytes = bytes.data;
|
|
encoding = 'binary';
|
|
}
|
|
|
|
// string conversion
|
|
encoding = encoding || 'binary';
|
|
if(typeof bytes === 'string') {
|
|
var view;
|
|
|
|
// decode from string
|
|
if(encoding === 'hex') {
|
|
this.accommodate(Math.ceil(bytes.length / 2));
|
|
view = new Uint8Array(this.data.buffer, this.write);
|
|
this.write += util$1.binary.hex.decode(bytes, view, this.write);
|
|
return this;
|
|
}
|
|
if(encoding === 'base64') {
|
|
this.accommodate(Math.ceil(bytes.length / 4) * 3);
|
|
view = new Uint8Array(this.data.buffer, this.write);
|
|
this.write += util$1.binary.base64.decode(bytes, view, this.write);
|
|
return this;
|
|
}
|
|
|
|
// encode text as UTF-8 bytes
|
|
if(encoding === 'utf8') {
|
|
// encode as UTF-8 then decode string as raw binary
|
|
bytes = util$1.encodeUtf8(bytes);
|
|
encoding = 'binary';
|
|
}
|
|
|
|
// decode string as raw binary
|
|
if(encoding === 'binary' || encoding === 'raw') {
|
|
// one byte per character
|
|
this.accommodate(bytes.length);
|
|
view = new Uint8Array(this.data.buffer, this.write);
|
|
this.write += util$1.binary.raw.decode(view);
|
|
return this;
|
|
}
|
|
|
|
// encode text as UTF-16 bytes
|
|
if(encoding === 'utf16') {
|
|
// two bytes per character
|
|
this.accommodate(bytes.length * 2);
|
|
view = new Uint16Array(this.data.buffer, this.write);
|
|
this.write += util$1.text.utf16.encode(view);
|
|
return this;
|
|
}
|
|
|
|
throw new Error('Invalid encoding: ' + encoding);
|
|
}
|
|
|
|
throw Error('Invalid parameter: ' + bytes);
|
|
};
|
|
|
|
/**
|
|
* Puts the given buffer into this buffer.
|
|
*
|
|
* @param buffer the buffer to put into this one.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.putBuffer = function(buffer) {
|
|
this.putBytes(buffer);
|
|
buffer.clear();
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts a string into this buffer.
|
|
*
|
|
* @param str the string to put.
|
|
* @param [encoding] the encoding for the string (default: 'utf16').
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.putString = function(str) {
|
|
return this.putBytes(str, 'utf16');
|
|
};
|
|
|
|
/**
|
|
* Puts a 16-bit integer in this buffer in big-endian order.
|
|
*
|
|
* @param i the 16-bit integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.putInt16 = function(i) {
|
|
this.accommodate(2);
|
|
this.data.setInt16(this.write, i);
|
|
this.write += 2;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts a 24-bit integer in this buffer in big-endian order.
|
|
*
|
|
* @param i the 24-bit integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.putInt24 = function(i) {
|
|
this.accommodate(3);
|
|
this.data.setInt16(this.write, i >> 8 & 0xFFFF);
|
|
this.data.setInt8(this.write, i >> 16 & 0xFF);
|
|
this.write += 3;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts a 32-bit integer in this buffer in big-endian order.
|
|
*
|
|
* @param i the 32-bit integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.putInt32 = function(i) {
|
|
this.accommodate(4);
|
|
this.data.setInt32(this.write, i);
|
|
this.write += 4;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts a 16-bit integer in this buffer in little-endian order.
|
|
*
|
|
* @param i the 16-bit integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.putInt16Le = function(i) {
|
|
this.accommodate(2);
|
|
this.data.setInt16(this.write, i, true);
|
|
this.write += 2;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts a 24-bit integer in this buffer in little-endian order.
|
|
*
|
|
* @param i the 24-bit integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.putInt24Le = function(i) {
|
|
this.accommodate(3);
|
|
this.data.setInt8(this.write, i >> 16 & 0xFF);
|
|
this.data.setInt16(this.write, i >> 8 & 0xFFFF, true);
|
|
this.write += 3;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts a 32-bit integer in this buffer in little-endian order.
|
|
*
|
|
* @param i the 32-bit integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.putInt32Le = function(i) {
|
|
this.accommodate(4);
|
|
this.data.setInt32(this.write, i, true);
|
|
this.write += 4;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts an n-bit integer in this buffer in big-endian order.
|
|
*
|
|
* @param i the n-bit integer.
|
|
* @param n the number of bits in the integer (8, 16, 24, or 32).
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.putInt = function(i, n) {
|
|
_checkBitsParam(n);
|
|
this.accommodate(n / 8);
|
|
do {
|
|
n -= 8;
|
|
this.data.setInt8(this.write++, (i >> n) & 0xFF);
|
|
} while(n > 0);
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Puts a signed n-bit integer in this buffer in big-endian order. Two's
|
|
* complement representation is used.
|
|
*
|
|
* @param i the n-bit integer.
|
|
* @param n the number of bits in the integer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.putSignedInt = function(i, n) {
|
|
_checkBitsParam(n);
|
|
this.accommodate(n / 8);
|
|
if(i < 0) {
|
|
i += 2 << (n - 1);
|
|
}
|
|
return this.putInt(i, n);
|
|
};
|
|
|
|
/**
|
|
* Gets a byte from this buffer and advances the read pointer by 1.
|
|
*
|
|
* @return the byte.
|
|
*/
|
|
util$1.DataBuffer.prototype.getByte = function() {
|
|
return this.data.getInt8(this.read++);
|
|
};
|
|
|
|
/**
|
|
* Gets a uint16 from this buffer in big-endian order and advances the read
|
|
* pointer by 2.
|
|
*
|
|
* @return the uint16.
|
|
*/
|
|
util$1.DataBuffer.prototype.getInt16 = function() {
|
|
var rval = this.data.getInt16(this.read);
|
|
this.read += 2;
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a uint24 from this buffer in big-endian order and advances the read
|
|
* pointer by 3.
|
|
*
|
|
* @return the uint24.
|
|
*/
|
|
util$1.DataBuffer.prototype.getInt24 = function() {
|
|
var rval = (
|
|
this.data.getInt16(this.read) << 8 ^
|
|
this.data.getInt8(this.read + 2));
|
|
this.read += 3;
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a uint32 from this buffer in big-endian order and advances the read
|
|
* pointer by 4.
|
|
*
|
|
* @return the word.
|
|
*/
|
|
util$1.DataBuffer.prototype.getInt32 = function() {
|
|
var rval = this.data.getInt32(this.read);
|
|
this.read += 4;
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a uint16 from this buffer in little-endian order and advances the read
|
|
* pointer by 2.
|
|
*
|
|
* @return the uint16.
|
|
*/
|
|
util$1.DataBuffer.prototype.getInt16Le = function() {
|
|
var rval = this.data.getInt16(this.read, true);
|
|
this.read += 2;
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a uint24 from this buffer in little-endian order and advances the read
|
|
* pointer by 3.
|
|
*
|
|
* @return the uint24.
|
|
*/
|
|
util$1.DataBuffer.prototype.getInt24Le = function() {
|
|
var rval = (
|
|
this.data.getInt8(this.read) ^
|
|
this.data.getInt16(this.read + 1, true) << 8);
|
|
this.read += 3;
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a uint32 from this buffer in little-endian order and advances the read
|
|
* pointer by 4.
|
|
*
|
|
* @return the word.
|
|
*/
|
|
util$1.DataBuffer.prototype.getInt32Le = function() {
|
|
var rval = this.data.getInt32(this.read, true);
|
|
this.read += 4;
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets an n-bit integer from this buffer in big-endian order and advances the
|
|
* read pointer by n/8.
|
|
*
|
|
* @param n the number of bits in the integer (8, 16, 24, or 32).
|
|
*
|
|
* @return the integer.
|
|
*/
|
|
util$1.DataBuffer.prototype.getInt = function(n) {
|
|
_checkBitsParam(n);
|
|
var rval = 0;
|
|
do {
|
|
// TODO: Use (rval * 0x100) if adding support for 33 to 53 bits.
|
|
rval = (rval << 8) + this.data.getInt8(this.read++);
|
|
n -= 8;
|
|
} while(n > 0);
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a signed n-bit integer from this buffer in big-endian order, using
|
|
* two's complement, and advances the read pointer by n/8.
|
|
*
|
|
* @param n the number of bits in the integer (8, 16, 24, or 32).
|
|
*
|
|
* @return the integer.
|
|
*/
|
|
util$1.DataBuffer.prototype.getSignedInt = function(n) {
|
|
// getInt checks n
|
|
var x = this.getInt(n);
|
|
var max = 2 << (n - 2);
|
|
if(x >= max) {
|
|
x -= max << 1;
|
|
}
|
|
return x;
|
|
};
|
|
|
|
/**
|
|
* Reads bytes out as a binary encoded string and clears them from the
|
|
* buffer.
|
|
*
|
|
* @param count the number of bytes to read, undefined or null for all.
|
|
*
|
|
* @return a binary encoded string of bytes.
|
|
*/
|
|
util$1.DataBuffer.prototype.getBytes = function(count) {
|
|
// TODO: deprecate this method, it is poorly named and
|
|
// this.toString('binary') replaces it
|
|
// add a toTypedArray()/toArrayBuffer() function
|
|
var rval;
|
|
if(count) {
|
|
// read count bytes
|
|
count = Math.min(this.length(), count);
|
|
rval = this.data.slice(this.read, this.read + count);
|
|
this.read += count;
|
|
} else if(count === 0) {
|
|
rval = '';
|
|
} else {
|
|
// read all bytes, optimize to only copy when needed
|
|
rval = (this.read === 0) ? this.data : this.data.slice(this.read);
|
|
this.clear();
|
|
}
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Gets a binary encoded string of the bytes from this buffer without
|
|
* modifying the read pointer.
|
|
*
|
|
* @param count the number of bytes to get, omit to get all.
|
|
*
|
|
* @return a string full of binary encoded characters.
|
|
*/
|
|
util$1.DataBuffer.prototype.bytes = function(count) {
|
|
// TODO: deprecate this method, it is poorly named, add "getString()"
|
|
return (typeof(count) === 'undefined' ?
|
|
this.data.slice(this.read) :
|
|
this.data.slice(this.read, this.read + count));
|
|
};
|
|
|
|
/**
|
|
* Gets a byte at the given index without modifying the read pointer.
|
|
*
|
|
* @param i the byte index.
|
|
*
|
|
* @return the byte.
|
|
*/
|
|
util$1.DataBuffer.prototype.at = function(i) {
|
|
return this.data.getUint8(this.read + i);
|
|
};
|
|
|
|
/**
|
|
* Puts a byte at the given index without modifying the read pointer.
|
|
*
|
|
* @param i the byte index.
|
|
* @param b the byte to put.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.setAt = function(i, b) {
|
|
this.data.setUint8(i, b);
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Gets the last byte without modifying the read pointer.
|
|
*
|
|
* @return the last byte.
|
|
*/
|
|
util$1.DataBuffer.prototype.last = function() {
|
|
return this.data.getUint8(this.write - 1);
|
|
};
|
|
|
|
/**
|
|
* Creates a copy of this buffer.
|
|
*
|
|
* @return the copy.
|
|
*/
|
|
util$1.DataBuffer.prototype.copy = function() {
|
|
return new util$1.DataBuffer(this);
|
|
};
|
|
|
|
/**
|
|
* Compacts this buffer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.compact = function() {
|
|
if(this.read > 0) {
|
|
var src = new Uint8Array(this.data.buffer, this.read);
|
|
var dst = new Uint8Array(src.byteLength);
|
|
dst.set(src);
|
|
this.data = new DataView(dst);
|
|
this.write -= this.read;
|
|
this.read = 0;
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Clears this buffer.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.clear = function() {
|
|
this.data = new DataView(new ArrayBuffer(0));
|
|
this.read = this.write = 0;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Shortens this buffer by triming bytes off of the end of this buffer.
|
|
*
|
|
* @param count the number of bytes to trim off.
|
|
*
|
|
* @return this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.truncate = function(count) {
|
|
this.write = Math.max(0, this.length() - count);
|
|
this.read = Math.min(this.read, this.write);
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Converts this buffer to a hexadecimal string.
|
|
*
|
|
* @return a hexadecimal string.
|
|
*/
|
|
util$1.DataBuffer.prototype.toHex = function() {
|
|
var rval = '';
|
|
for(var i = this.read; i < this.data.byteLength; ++i) {
|
|
var b = this.data.getUint8(i);
|
|
if(b < 16) {
|
|
rval += '0';
|
|
}
|
|
rval += b.toString(16);
|
|
}
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Converts this buffer to a string, using the given encoding. If no
|
|
* encoding is given, 'utf8' (UTF-8) is used.
|
|
*
|
|
* @param [encoding] the encoding to use: 'binary', 'utf8', 'utf16', 'hex',
|
|
* 'base64' (default: 'utf8').
|
|
*
|
|
* @return a string representation of the bytes in this buffer.
|
|
*/
|
|
util$1.DataBuffer.prototype.toString = function(encoding) {
|
|
var view = new Uint8Array(this.data, this.read, this.length());
|
|
encoding = encoding || 'utf8';
|
|
|
|
// encode to string
|
|
if(encoding === 'binary' || encoding === 'raw') {
|
|
return util$1.binary.raw.encode(view);
|
|
}
|
|
if(encoding === 'hex') {
|
|
return util$1.binary.hex.encode(view);
|
|
}
|
|
if(encoding === 'base64') {
|
|
return util$1.binary.base64.encode(view);
|
|
}
|
|
|
|
// decode to text
|
|
if(encoding === 'utf8') {
|
|
return util$1.text.utf8.decode(view);
|
|
}
|
|
if(encoding === 'utf16') {
|
|
return util$1.text.utf16.decode(view);
|
|
}
|
|
|
|
throw new Error('Invalid encoding: ' + encoding);
|
|
};
|
|
|
|
/** End Buffer w/UInt8Array backing */
|
|
|
|
/**
|
|
* Creates a buffer that stores bytes. A value may be given to populate the
|
|
* buffer with data. This value can either be string of encoded bytes or a
|
|
* regular string of characters. When passing a string of binary encoded
|
|
* bytes, the encoding `raw` should be given. This is also the default. When
|
|
* passing a string of characters, the encoding `utf8` should be given.
|
|
*
|
|
* @param [input] a string with encoded bytes to store in the buffer.
|
|
* @param [encoding] (default: 'raw', other: 'utf8').
|
|
*/
|
|
util$1.createBuffer = function(input, encoding) {
|
|
// TODO: deprecate, use new ByteBuffer() instead
|
|
encoding = encoding || 'raw';
|
|
if(input !== undefined && encoding === 'utf8') {
|
|
input = util$1.encodeUtf8(input);
|
|
}
|
|
return new util$1.ByteBuffer(input);
|
|
};
|
|
|
|
/**
|
|
* Fills a string with a particular value. If you want the string to be a byte
|
|
* string, pass in String.fromCharCode(theByte).
|
|
*
|
|
* @param c the character to fill the string with, use String.fromCharCode
|
|
* to fill the string with a byte value.
|
|
* @param n the number of characters of value c to fill with.
|
|
*
|
|
* @return the filled string.
|
|
*/
|
|
util$1.fillString = function(c, n) {
|
|
var s = '';
|
|
while(n > 0) {
|
|
if(n & 1) {
|
|
s += c;
|
|
}
|
|
n >>>= 1;
|
|
if(n > 0) {
|
|
c += c;
|
|
}
|
|
}
|
|
return s;
|
|
};
|
|
|
|
/**
|
|
* Performs a per byte XOR between two byte strings and returns the result as a
|
|
* string of bytes.
|
|
*
|
|
* @param s1 first string of bytes.
|
|
* @param s2 second string of bytes.
|
|
* @param n the number of bytes to XOR.
|
|
*
|
|
* @return the XOR'd result.
|
|
*/
|
|
util$1.xorBytes = function(s1, s2, n) {
|
|
var s3 = '';
|
|
var b = '';
|
|
var t = '';
|
|
var i = 0;
|
|
var c = 0;
|
|
for(; n > 0; --n, ++i) {
|
|
b = s1.charCodeAt(i) ^ s2.charCodeAt(i);
|
|
if(c >= 10) {
|
|
s3 += t;
|
|
t = '';
|
|
c = 0;
|
|
}
|
|
t += String.fromCharCode(b);
|
|
++c;
|
|
}
|
|
s3 += t;
|
|
return s3;
|
|
};
|
|
|
|
/**
|
|
* Converts a hex string into a 'binary' encoded string of bytes.
|
|
*
|
|
* @param hex the hexadecimal string to convert.
|
|
*
|
|
* @return the binary-encoded string of bytes.
|
|
*/
|
|
util$1.hexToBytes = function(hex) {
|
|
// TODO: deprecate: "Deprecated. Use util.binary.hex.decode instead."
|
|
var rval = '';
|
|
var i = 0;
|
|
if(hex.length & 1 == 1) {
|
|
// odd number of characters, convert first character alone
|
|
i = 1;
|
|
rval += String.fromCharCode(parseInt(hex[0], 16));
|
|
}
|
|
// convert 2 characters (1 byte) at a time
|
|
for(; i < hex.length; i += 2) {
|
|
rval += String.fromCharCode(parseInt(hex.substr(i, 2), 16));
|
|
}
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Converts a 'binary' encoded string of bytes to hex.
|
|
*
|
|
* @param bytes the byte string to convert.
|
|
*
|
|
* @return the string of hexadecimal characters.
|
|
*/
|
|
util$1.bytesToHex = function(bytes) {
|
|
// TODO: deprecate: "Deprecated. Use util.binary.hex.encode instead."
|
|
return util$1.createBuffer(bytes).toHex();
|
|
};
|
|
|
|
/**
|
|
* Converts an 32-bit integer to 4-big-endian byte string.
|
|
*
|
|
* @param i the integer.
|
|
*
|
|
* @return the byte string.
|
|
*/
|
|
util$1.int32ToBytes = function(i) {
|
|
return (
|
|
String.fromCharCode(i >> 24 & 0xFF) +
|
|
String.fromCharCode(i >> 16 & 0xFF) +
|
|
String.fromCharCode(i >> 8 & 0xFF) +
|
|
String.fromCharCode(i & 0xFF));
|
|
};
|
|
|
|
// base64 characters, reverse mapping
|
|
var _base64 =
|
|
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=';
|
|
var _base64Idx = [
|
|
/*43 -43 = 0*/
|
|
/*'+', 1, 2, 3,'/' */
|
|
62, -1, -1, -1, 63,
|
|
|
|
/*'0','1','2','3','4','5','6','7','8','9' */
|
|
52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
|
|
|
|
/*15, 16, 17,'=', 19, 20, 21 */
|
|
-1, -1, -1, 64, -1, -1, -1,
|
|
|
|
/*65 - 43 = 22*/
|
|
/*'A','B','C','D','E','F','G','H','I','J','K','L','M', */
|
|
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
|
|
|
|
/*'N','O','P','Q','R','S','T','U','V','W','X','Y','Z' */
|
|
13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
|
|
|
|
/*91 - 43 = 48 */
|
|
/*48, 49, 50, 51, 52, 53 */
|
|
-1, -1, -1, -1, -1, -1,
|
|
|
|
/*97 - 43 = 54*/
|
|
/*'a','b','c','d','e','f','g','h','i','j','k','l','m' */
|
|
26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,
|
|
|
|
/*'n','o','p','q','r','s','t','u','v','w','x','y','z' */
|
|
39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51
|
|
];
|
|
|
|
// base58 characters (Bitcoin alphabet)
|
|
var _base58 = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz';
|
|
|
|
/**
|
|
* Base64 encodes a 'binary' encoded string of bytes.
|
|
*
|
|
* @param input the binary encoded string of bytes to base64-encode.
|
|
* @param maxline the maximum number of encoded characters per line to use,
|
|
* defaults to none.
|
|
*
|
|
* @return the base64-encoded output.
|
|
*/
|
|
util$1.encode64 = function(input, maxline) {
|
|
// TODO: deprecate: "Deprecated. Use util.binary.base64.encode instead."
|
|
var line = '';
|
|
var output = '';
|
|
var chr1, chr2, chr3;
|
|
var i = 0;
|
|
while(i < input.length) {
|
|
chr1 = input.charCodeAt(i++);
|
|
chr2 = input.charCodeAt(i++);
|
|
chr3 = input.charCodeAt(i++);
|
|
|
|
// encode 4 character group
|
|
line += _base64.charAt(chr1 >> 2);
|
|
line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4));
|
|
if(isNaN(chr2)) {
|
|
line += '==';
|
|
} else {
|
|
line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6));
|
|
line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63);
|
|
}
|
|
|
|
if(maxline && line.length > maxline) {
|
|
output += line.substr(0, maxline) + '\r\n';
|
|
line = line.substr(maxline);
|
|
}
|
|
}
|
|
output += line;
|
|
return output;
|
|
};
|
|
|
|
/**
|
|
* Base64 decodes a string into a 'binary' encoded string of bytes.
|
|
*
|
|
* @param input the base64-encoded input.
|
|
*
|
|
* @return the binary encoded string.
|
|
*/
|
|
util$1.decode64 = function(input) {
|
|
// TODO: deprecate: "Deprecated. Use util.binary.base64.decode instead."
|
|
|
|
// remove all non-base64 characters
|
|
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, '');
|
|
|
|
var output = '';
|
|
var enc1, enc2, enc3, enc4;
|
|
var i = 0;
|
|
|
|
while(i < input.length) {
|
|
enc1 = _base64Idx[input.charCodeAt(i++) - 43];
|
|
enc2 = _base64Idx[input.charCodeAt(i++) - 43];
|
|
enc3 = _base64Idx[input.charCodeAt(i++) - 43];
|
|
enc4 = _base64Idx[input.charCodeAt(i++) - 43];
|
|
|
|
output += String.fromCharCode((enc1 << 2) | (enc2 >> 4));
|
|
if(enc3 !== 64) {
|
|
// decoded at least 2 bytes
|
|
output += String.fromCharCode(((enc2 & 15) << 4) | (enc3 >> 2));
|
|
if(enc4 !== 64) {
|
|
// decoded 3 bytes
|
|
output += String.fromCharCode(((enc3 & 3) << 6) | enc4);
|
|
}
|
|
}
|
|
}
|
|
|
|
return output;
|
|
};
|
|
|
|
/**
|
|
* Encodes the given string of characters (a standard JavaScript
|
|
* string) as a binary encoded string where the bytes represent
|
|
* a UTF-8 encoded string of characters. Non-ASCII characters will be
|
|
* encoded as multiple bytes according to UTF-8.
|
|
*
|
|
* @param str a standard string of characters to encode.
|
|
*
|
|
* @return the binary encoded string.
|
|
*/
|
|
util$1.encodeUtf8 = function(str) {
|
|
return unescape(encodeURIComponent(str));
|
|
};
|
|
|
|
/**
|
|
* Decodes a binary encoded string that contains bytes that
|
|
* represent a UTF-8 encoded string of characters -- into a
|
|
* string of characters (a standard JavaScript string).
|
|
*
|
|
* @param str the binary encoded string to decode.
|
|
*
|
|
* @return the resulting standard string of characters.
|
|
*/
|
|
util$1.decodeUtf8 = function(str) {
|
|
return decodeURIComponent(escape(str));
|
|
};
|
|
|
|
// binary encoding/decoding tools
|
|
// FIXME: Experimental. Do not use yet.
|
|
util$1.binary = {
|
|
raw: {},
|
|
hex: {},
|
|
base64: {},
|
|
base58: {},
|
|
baseN : {
|
|
encode: baseN.encode,
|
|
decode: baseN.decode
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Encodes a Uint8Array as a binary-encoded string. This encoding uses
|
|
* a value between 0 and 255 for each character.
|
|
*
|
|
* @param bytes the Uint8Array to encode.
|
|
*
|
|
* @return the binary-encoded string.
|
|
*/
|
|
util$1.binary.raw.encode = function(bytes) {
|
|
return String.fromCharCode.apply(null, bytes);
|
|
};
|
|
|
|
/**
|
|
* Decodes a binary-encoded string to a Uint8Array. This encoding uses
|
|
* a value between 0 and 255 for each character.
|
|
*
|
|
* @param str the binary-encoded string to decode.
|
|
* @param [output] an optional Uint8Array to write the output to; if it
|
|
* is too small, an exception will be thrown.
|
|
* @param [offset] the start offset for writing to the output (default: 0).
|
|
*
|
|
* @return the Uint8Array or the number of bytes written if output was given.
|
|
*/
|
|
util$1.binary.raw.decode = function(str, output, offset) {
|
|
var out = output;
|
|
if(!out) {
|
|
out = new Uint8Array(str.length);
|
|
}
|
|
offset = offset || 0;
|
|
var j = offset;
|
|
for(var i = 0; i < str.length; ++i) {
|
|
out[j++] = str.charCodeAt(i);
|
|
}
|
|
return output ? (j - offset) : out;
|
|
};
|
|
|
|
/**
|
|
* Encodes a 'binary' string, ArrayBuffer, DataView, TypedArray, or
|
|
* ByteBuffer as a string of hexadecimal characters.
|
|
*
|
|
* @param bytes the bytes to convert.
|
|
*
|
|
* @return the string of hexadecimal characters.
|
|
*/
|
|
util$1.binary.hex.encode = util$1.bytesToHex;
|
|
|
|
/**
|
|
* Decodes a hex-encoded string to a Uint8Array.
|
|
*
|
|
* @param hex the hexadecimal string to convert.
|
|
* @param [output] an optional Uint8Array to write the output to; if it
|
|
* is too small, an exception will be thrown.
|
|
* @param [offset] the start offset for writing to the output (default: 0).
|
|
*
|
|
* @return the Uint8Array or the number of bytes written if output was given.
|
|
*/
|
|
util$1.binary.hex.decode = function(hex, output, offset) {
|
|
var out = output;
|
|
if(!out) {
|
|
out = new Uint8Array(Math.ceil(hex.length / 2));
|
|
}
|
|
offset = offset || 0;
|
|
var i = 0, j = offset;
|
|
if(hex.length & 1) {
|
|
// odd number of characters, convert first character alone
|
|
i = 1;
|
|
out[j++] = parseInt(hex[0], 16);
|
|
}
|
|
// convert 2 characters (1 byte) at a time
|
|
for(; i < hex.length; i += 2) {
|
|
out[j++] = parseInt(hex.substr(i, 2), 16);
|
|
}
|
|
return output ? (j - offset) : out;
|
|
};
|
|
|
|
/**
|
|
* Base64-encodes a Uint8Array.
|
|
*
|
|
* @param input the Uint8Array to encode.
|
|
* @param maxline the maximum number of encoded characters per line to use,
|
|
* defaults to none.
|
|
*
|
|
* @return the base64-encoded output string.
|
|
*/
|
|
util$1.binary.base64.encode = function(input, maxline) {
|
|
var line = '';
|
|
var output = '';
|
|
var chr1, chr2, chr3;
|
|
var i = 0;
|
|
while(i < input.byteLength) {
|
|
chr1 = input[i++];
|
|
chr2 = input[i++];
|
|
chr3 = input[i++];
|
|
|
|
// encode 4 character group
|
|
line += _base64.charAt(chr1 >> 2);
|
|
line += _base64.charAt(((chr1 & 3) << 4) | (chr2 >> 4));
|
|
if(isNaN(chr2)) {
|
|
line += '==';
|
|
} else {
|
|
line += _base64.charAt(((chr2 & 15) << 2) | (chr3 >> 6));
|
|
line += isNaN(chr3) ? '=' : _base64.charAt(chr3 & 63);
|
|
}
|
|
|
|
if(maxline && line.length > maxline) {
|
|
output += line.substr(0, maxline) + '\r\n';
|
|
line = line.substr(maxline);
|
|
}
|
|
}
|
|
output += line;
|
|
return output;
|
|
};
|
|
|
|
/**
|
|
* Decodes a base64-encoded string to a Uint8Array.
|
|
*
|
|
* @param input the base64-encoded input string.
|
|
* @param [output] an optional Uint8Array to write the output to; if it
|
|
* is too small, an exception will be thrown.
|
|
* @param [offset] the start offset for writing to the output (default: 0).
|
|
*
|
|
* @return the Uint8Array or the number of bytes written if output was given.
|
|
*/
|
|
util$1.binary.base64.decode = function(input, output, offset) {
|
|
var out = output;
|
|
if(!out) {
|
|
out = new Uint8Array(Math.ceil(input.length / 4) * 3);
|
|
}
|
|
|
|
// remove all non-base64 characters
|
|
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, '');
|
|
|
|
offset = offset || 0;
|
|
var enc1, enc2, enc3, enc4;
|
|
var i = 0, j = offset;
|
|
|
|
while(i < input.length) {
|
|
enc1 = _base64Idx[input.charCodeAt(i++) - 43];
|
|
enc2 = _base64Idx[input.charCodeAt(i++) - 43];
|
|
enc3 = _base64Idx[input.charCodeAt(i++) - 43];
|
|
enc4 = _base64Idx[input.charCodeAt(i++) - 43];
|
|
|
|
out[j++] = (enc1 << 2) | (enc2 >> 4);
|
|
if(enc3 !== 64) {
|
|
// decoded at least 2 bytes
|
|
out[j++] = ((enc2 & 15) << 4) | (enc3 >> 2);
|
|
if(enc4 !== 64) {
|
|
// decoded 3 bytes
|
|
out[j++] = ((enc3 & 3) << 6) | enc4;
|
|
}
|
|
}
|
|
}
|
|
|
|
// make sure result is the exact decoded length
|
|
return output ? (j - offset) : out.subarray(0, j);
|
|
};
|
|
|
|
// add support for base58 encoding/decoding with Bitcoin alphabet
|
|
util$1.binary.base58.encode = function(input, maxline) {
|
|
return util$1.binary.baseN.encode(input, _base58, maxline);
|
|
};
|
|
util$1.binary.base58.decode = function(input, maxline) {
|
|
return util$1.binary.baseN.decode(input, _base58, maxline);
|
|
};
|
|
|
|
// text encoding/decoding tools
|
|
// FIXME: Experimental. Do not use yet.
|
|
util$1.text = {
|
|
utf8: {},
|
|
utf16: {}
|
|
};
|
|
|
|
/**
|
|
* Encodes the given string as UTF-8 in a Uint8Array.
|
|
*
|
|
* @param str the string to encode.
|
|
* @param [output] an optional Uint8Array to write the output to; if it
|
|
* is too small, an exception will be thrown.
|
|
* @param [offset] the start offset for writing to the output (default: 0).
|
|
*
|
|
* @return the Uint8Array or the number of bytes written if output was given.
|
|
*/
|
|
util$1.text.utf8.encode = function(str, output, offset) {
|
|
str = util$1.encodeUtf8(str);
|
|
var out = output;
|
|
if(!out) {
|
|
out = new Uint8Array(str.length);
|
|
}
|
|
offset = offset || 0;
|
|
var j = offset;
|
|
for(var i = 0; i < str.length; ++i) {
|
|
out[j++] = str.charCodeAt(i);
|
|
}
|
|
return output ? (j - offset) : out;
|
|
};
|
|
|
|
/**
|
|
* Decodes the UTF-8 contents from a Uint8Array.
|
|
*
|
|
* @param bytes the Uint8Array to decode.
|
|
*
|
|
* @return the resulting string.
|
|
*/
|
|
util$1.text.utf8.decode = function(bytes) {
|
|
return util$1.decodeUtf8(String.fromCharCode.apply(null, bytes));
|
|
};
|
|
|
|
/**
|
|
* Encodes the given string as UTF-16 in a Uint8Array.
|
|
*
|
|
* @param str the string to encode.
|
|
* @param [output] an optional Uint8Array to write the output to; if it
|
|
* is too small, an exception will be thrown.
|
|
* @param [offset] the start offset for writing to the output (default: 0).
|
|
*
|
|
* @return the Uint8Array or the number of bytes written if output was given.
|
|
*/
|
|
util$1.text.utf16.encode = function(str, output, offset) {
|
|
var out = output;
|
|
if(!out) {
|
|
out = new Uint8Array(str.length * 2);
|
|
}
|
|
var view = new Uint16Array(out.buffer);
|
|
offset = offset || 0;
|
|
var j = offset;
|
|
var k = offset;
|
|
for(var i = 0; i < str.length; ++i) {
|
|
view[k++] = str.charCodeAt(i);
|
|
j += 2;
|
|
}
|
|
return output ? (j - offset) : out;
|
|
};
|
|
|
|
/**
|
|
* Decodes the UTF-16 contents from a Uint8Array.
|
|
*
|
|
* @param bytes the Uint8Array to decode.
|
|
*
|
|
* @return the resulting string.
|
|
*/
|
|
util$1.text.utf16.decode = function(bytes) {
|
|
return String.fromCharCode.apply(null, new Uint16Array(bytes.buffer));
|
|
};
|
|
|
|
/**
|
|
* Deflates the given data using a flash interface.
|
|
*
|
|
* @param api the flash interface.
|
|
* @param bytes the data.
|
|
* @param raw true to return only raw deflate data, false to include zlib
|
|
* header and trailer.
|
|
*
|
|
* @return the deflated data as a string.
|
|
*/
|
|
util$1.deflate = function(api, bytes, raw) {
|
|
bytes = util$1.decode64(api.deflate(util$1.encode64(bytes)).rval);
|
|
|
|
// strip zlib header and trailer if necessary
|
|
if(raw) {
|
|
// zlib header is 2 bytes (CMF,FLG) where FLG indicates that
|
|
// there is a 4-byte DICT (alder-32) block before the data if
|
|
// its 5th bit is set
|
|
var start = 2;
|
|
var flg = bytes.charCodeAt(1);
|
|
if(flg & 0x20) {
|
|
start = 6;
|
|
}
|
|
// zlib trailer is 4 bytes of adler-32
|
|
bytes = bytes.substring(start, bytes.length - 4);
|
|
}
|
|
|
|
return bytes;
|
|
};
|
|
|
|
/**
|
|
* Inflates the given data using a flash interface.
|
|
*
|
|
* @param api the flash interface.
|
|
* @param bytes the data.
|
|
* @param raw true if the incoming data has no zlib header or trailer and is
|
|
* raw DEFLATE data.
|
|
*
|
|
* @return the inflated data as a string, null on error.
|
|
*/
|
|
util$1.inflate = function(api, bytes, raw) {
|
|
// TODO: add zlib header and trailer if necessary/possible
|
|
var rval = api.inflate(util$1.encode64(bytes)).rval;
|
|
return (rval === null) ? null : util$1.decode64(rval);
|
|
};
|
|
|
|
/**
|
|
* Sets a storage object.
|
|
*
|
|
* @param api the storage interface.
|
|
* @param id the storage ID to use.
|
|
* @param obj the storage object, null to remove.
|
|
*/
|
|
var _setStorageObject = function(api, id, obj) {
|
|
if(!api) {
|
|
throw new Error('WebStorage not available.');
|
|
}
|
|
|
|
var rval;
|
|
if(obj === null) {
|
|
rval = api.removeItem(id);
|
|
} else {
|
|
// json-encode and base64-encode object
|
|
obj = util$1.encode64(JSON.stringify(obj));
|
|
rval = api.setItem(id, obj);
|
|
}
|
|
|
|
// handle potential flash error
|
|
if(typeof(rval) !== 'undefined' && rval.rval !== true) {
|
|
var error = new Error(rval.error.message);
|
|
error.id = rval.error.id;
|
|
error.name = rval.error.name;
|
|
throw error;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Gets a storage object.
|
|
*
|
|
* @param api the storage interface.
|
|
* @param id the storage ID to use.
|
|
*
|
|
* @return the storage object entry or null if none exists.
|
|
*/
|
|
var _getStorageObject = function(api, id) {
|
|
if(!api) {
|
|
throw new Error('WebStorage not available.');
|
|
}
|
|
|
|
// get the existing entry
|
|
var rval = api.getItem(id);
|
|
|
|
/* Note: We check api.init because we can't do (api == localStorage)
|
|
on IE because of "Class doesn't support Automation" exception. Only
|
|
the flash api has an init method so this works too, but we need a
|
|
better solution in the future. */
|
|
|
|
// flash returns item wrapped in an object, handle special case
|
|
if(api.init) {
|
|
if(rval.rval === null) {
|
|
if(rval.error) {
|
|
var error = new Error(rval.error.message);
|
|
error.id = rval.error.id;
|
|
error.name = rval.error.name;
|
|
throw error;
|
|
}
|
|
// no error, but also no item
|
|
rval = null;
|
|
} else {
|
|
rval = rval.rval;
|
|
}
|
|
}
|
|
|
|
// handle decoding
|
|
if(rval !== null) {
|
|
// base64-decode and json-decode data
|
|
rval = JSON.parse(util$1.decode64(rval));
|
|
}
|
|
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Stores an item in local storage.
|
|
*
|
|
* @param api the storage interface.
|
|
* @param id the storage ID to use.
|
|
* @param key the key for the item.
|
|
* @param data the data for the item (any javascript object/primitive).
|
|
*/
|
|
var _setItem = function(api, id, key, data) {
|
|
// get storage object
|
|
var obj = _getStorageObject(api, id);
|
|
if(obj === null) {
|
|
// create a new storage object
|
|
obj = {};
|
|
}
|
|
// update key
|
|
obj[key] = data;
|
|
|
|
// set storage object
|
|
_setStorageObject(api, id, obj);
|
|
};
|
|
|
|
/**
|
|
* Gets an item from local storage.
|
|
*
|
|
* @param api the storage interface.
|
|
* @param id the storage ID to use.
|
|
* @param key the key for the item.
|
|
*
|
|
* @return the item.
|
|
*/
|
|
var _getItem = function(api, id, key) {
|
|
// get storage object
|
|
var rval = _getStorageObject(api, id);
|
|
if(rval !== null) {
|
|
// return data at key
|
|
rval = (key in rval) ? rval[key] : null;
|
|
}
|
|
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Removes an item from local storage.
|
|
*
|
|
* @param api the storage interface.
|
|
* @param id the storage ID to use.
|
|
* @param key the key for the item.
|
|
*/
|
|
var _removeItem = function(api, id, key) {
|
|
// get storage object
|
|
var obj = _getStorageObject(api, id);
|
|
if(obj !== null && key in obj) {
|
|
// remove key
|
|
delete obj[key];
|
|
|
|
// see if entry has no keys remaining
|
|
var empty = true;
|
|
for(var prop in obj) {
|
|
empty = false;
|
|
break;
|
|
}
|
|
if(empty) {
|
|
// remove entry entirely if no keys are left
|
|
obj = null;
|
|
}
|
|
|
|
// set storage object
|
|
_setStorageObject(api, id, obj);
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Clears the local disk storage identified by the given ID.
|
|
*
|
|
* @param api the storage interface.
|
|
* @param id the storage ID to use.
|
|
*/
|
|
var _clearItems = function(api, id) {
|
|
_setStorageObject(api, id, null);
|
|
};
|
|
|
|
/**
|
|
* Calls a storage function.
|
|
*
|
|
* @param func the function to call.
|
|
* @param args the arguments for the function.
|
|
* @param location the location argument.
|
|
*
|
|
* @return the return value from the function.
|
|
*/
|
|
var _callStorageFunction = function(func, args, location) {
|
|
var rval = null;
|
|
|
|
// default storage types
|
|
if(typeof(location) === 'undefined') {
|
|
location = ['web', 'flash'];
|
|
}
|
|
|
|
// apply storage types in order of preference
|
|
var type;
|
|
var done = false;
|
|
var exception = null;
|
|
for(var idx in location) {
|
|
type = location[idx];
|
|
try {
|
|
if(type === 'flash' || type === 'both') {
|
|
if(args[0] === null) {
|
|
throw new Error('Flash local storage not available.');
|
|
}
|
|
rval = func.apply(this, args);
|
|
done = (type === 'flash');
|
|
}
|
|
if(type === 'web' || type === 'both') {
|
|
args[0] = localStorage;
|
|
rval = func.apply(this, args);
|
|
done = true;
|
|
}
|
|
} catch(ex) {
|
|
exception = ex;
|
|
}
|
|
if(done) {
|
|
break;
|
|
}
|
|
}
|
|
|
|
if(!done) {
|
|
throw exception;
|
|
}
|
|
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Stores an item on local disk.
|
|
*
|
|
* The available types of local storage include 'flash', 'web', and 'both'.
|
|
*
|
|
* The type 'flash' refers to flash local storage (SharedObject). In order
|
|
* to use flash local storage, the 'api' parameter must be valid. The type
|
|
* 'web' refers to WebStorage, if supported by the browser. The type 'both'
|
|
* refers to storing using both 'flash' and 'web', not just one or the
|
|
* other.
|
|
*
|
|
* The location array should list the storage types to use in order of
|
|
* preference:
|
|
*
|
|
* ['flash']: flash only storage
|
|
* ['web']: web only storage
|
|
* ['both']: try to store in both
|
|
* ['flash','web']: store in flash first, but if not available, 'web'
|
|
* ['web','flash']: store in web first, but if not available, 'flash'
|
|
*
|
|
* The location array defaults to: ['web', 'flash']
|
|
*
|
|
* @param api the flash interface, null to use only WebStorage.
|
|
* @param id the storage ID to use.
|
|
* @param key the key for the item.
|
|
* @param data the data for the item (any javascript object/primitive).
|
|
* @param location an array with the preferred types of storage to use.
|
|
*/
|
|
util$1.setItem = function(api, id, key, data, location) {
|
|
_callStorageFunction(_setItem, arguments, location);
|
|
};
|
|
|
|
/**
|
|
* Gets an item on local disk.
|
|
*
|
|
* Set setItem() for details on storage types.
|
|
*
|
|
* @param api the flash interface, null to use only WebStorage.
|
|
* @param id the storage ID to use.
|
|
* @param key the key for the item.
|
|
* @param location an array with the preferred types of storage to use.
|
|
*
|
|
* @return the item.
|
|
*/
|
|
util$1.getItem = function(api, id, key, location) {
|
|
return _callStorageFunction(_getItem, arguments, location);
|
|
};
|
|
|
|
/**
|
|
* Removes an item on local disk.
|
|
*
|
|
* Set setItem() for details on storage types.
|
|
*
|
|
* @param api the flash interface.
|
|
* @param id the storage ID to use.
|
|
* @param key the key for the item.
|
|
* @param location an array with the preferred types of storage to use.
|
|
*/
|
|
util$1.removeItem = function(api, id, key, location) {
|
|
_callStorageFunction(_removeItem, arguments, location);
|
|
};
|
|
|
|
/**
|
|
* Clears the local disk storage identified by the given ID.
|
|
*
|
|
* Set setItem() for details on storage types.
|
|
*
|
|
* @param api the flash interface if flash is available.
|
|
* @param id the storage ID to use.
|
|
* @param location an array with the preferred types of storage to use.
|
|
*/
|
|
util$1.clearItems = function(api, id, location) {
|
|
_callStorageFunction(_clearItems, arguments, location);
|
|
};
|
|
|
|
/**
|
|
* Check if an object is empty.
|
|
*
|
|
* Taken from:
|
|
* http://stackoverflow.com/questions/679915/how-do-i-test-for-an-empty-javascript-object-from-json/679937#679937
|
|
*
|
|
* @param object the object to check.
|
|
*/
|
|
util$1.isEmpty = function(obj) {
|
|
for(var prop in obj) {
|
|
if(obj.hasOwnProperty(prop)) {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
};
|
|
|
|
/**
|
|
* Format with simple printf-style interpolation.
|
|
*
|
|
* %%: literal '%'
|
|
* %s,%o: convert next argument into a string.
|
|
*
|
|
* @param format the string to format.
|
|
* @param ... arguments to interpolate into the format string.
|
|
*/
|
|
util$1.format = function(format) {
|
|
var re = /%./g;
|
|
// current match
|
|
var match;
|
|
// current part
|
|
var part;
|
|
// current arg index
|
|
var argi = 0;
|
|
// collected parts to recombine later
|
|
var parts = [];
|
|
// last index found
|
|
var last = 0;
|
|
// loop while matches remain
|
|
while((match = re.exec(format))) {
|
|
part = format.substring(last, re.lastIndex - 2);
|
|
// don't add empty strings (ie, parts between %s%s)
|
|
if(part.length > 0) {
|
|
parts.push(part);
|
|
}
|
|
last = re.lastIndex;
|
|
// switch on % code
|
|
var code = match[0][1];
|
|
switch(code) {
|
|
case 's':
|
|
case 'o':
|
|
// check if enough arguments were given
|
|
if(argi < arguments.length) {
|
|
parts.push(arguments[argi++ + 1]);
|
|
} else {
|
|
parts.push('<?>');
|
|
}
|
|
break;
|
|
// FIXME: do proper formating for numbers, etc
|
|
//case 'f':
|
|
//case 'd':
|
|
case '%':
|
|
parts.push('%');
|
|
break;
|
|
default:
|
|
parts.push('<%' + code + '?>');
|
|
}
|
|
}
|
|
// add trailing part of format string
|
|
parts.push(format.substring(last));
|
|
return parts.join('');
|
|
};
|
|
|
|
/**
|
|
* Formats a number.
|
|
*
|
|
* http://snipplr.com/view/5945/javascript-numberformat--ported-from-php/
|
|
*/
|
|
util$1.formatNumber = function(number, decimals, dec_point, thousands_sep) {
|
|
// http://kevin.vanzonneveld.net
|
|
// + original by: Jonas Raoni Soares Silva (http://www.jsfromhell.com)
|
|
// + improved by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)
|
|
// + bugfix by: Michael White (http://crestidg.com)
|
|
// + bugfix by: Benjamin Lupton
|
|
// + bugfix by: Allan Jensen (http://www.winternet.no)
|
|
// + revised by: Jonas Raoni Soares Silva (http://www.jsfromhell.com)
|
|
// * example 1: number_format(1234.5678, 2, '.', '');
|
|
// * returns 1: 1234.57
|
|
|
|
var n = number, c = isNaN(decimals = Math.abs(decimals)) ? 2 : decimals;
|
|
var d = dec_point === undefined ? ',' : dec_point;
|
|
var t = thousands_sep === undefined ?
|
|
'.' : thousands_sep, s = n < 0 ? '-' : '';
|
|
var i = parseInt((n = Math.abs(+n || 0).toFixed(c)), 10) + '';
|
|
var j = (i.length > 3) ? i.length % 3 : 0;
|
|
return s + (j ? i.substr(0, j) + t : '') +
|
|
i.substr(j).replace(/(\d{3})(?=\d)/g, '$1' + t) +
|
|
(c ? d + Math.abs(n - i).toFixed(c).slice(2) : '');
|
|
};
|
|
|
|
/**
|
|
* Formats a byte size.
|
|
*
|
|
* http://snipplr.com/view/5949/format-humanize-file-byte-size-presentation-in-javascript/
|
|
*/
|
|
util$1.formatSize = function(size) {
|
|
if(size >= 1073741824) {
|
|
size = util$1.formatNumber(size / 1073741824, 2, '.', '') + ' GiB';
|
|
} else if(size >= 1048576) {
|
|
size = util$1.formatNumber(size / 1048576, 2, '.', '') + ' MiB';
|
|
} else if(size >= 1024) {
|
|
size = util$1.formatNumber(size / 1024, 0) + ' KiB';
|
|
} else {
|
|
size = util$1.formatNumber(size, 0) + ' bytes';
|
|
}
|
|
return size;
|
|
};
|
|
|
|
/**
|
|
* Converts an IPv4 or IPv6 string representation into bytes (in network order).
|
|
*
|
|
* @param ip the IPv4 or IPv6 address to convert.
|
|
*
|
|
* @return the 4-byte IPv6 or 16-byte IPv6 address or null if the address can't
|
|
* be parsed.
|
|
*/
|
|
util$1.bytesFromIP = function(ip) {
|
|
if(ip.indexOf('.') !== -1) {
|
|
return util$1.bytesFromIPv4(ip);
|
|
}
|
|
if(ip.indexOf(':') !== -1) {
|
|
return util$1.bytesFromIPv6(ip);
|
|
}
|
|
return null;
|
|
};
|
|
|
|
/**
|
|
* Converts an IPv4 string representation into bytes (in network order).
|
|
*
|
|
* @param ip the IPv4 address to convert.
|
|
*
|
|
* @return the 4-byte address or null if the address can't be parsed.
|
|
*/
|
|
util$1.bytesFromIPv4 = function(ip) {
|
|
ip = ip.split('.');
|
|
if(ip.length !== 4) {
|
|
return null;
|
|
}
|
|
var b = util$1.createBuffer();
|
|
for(var i = 0; i < ip.length; ++i) {
|
|
var num = parseInt(ip[i], 10);
|
|
if(isNaN(num)) {
|
|
return null;
|
|
}
|
|
b.putByte(num);
|
|
}
|
|
return b.getBytes();
|
|
};
|
|
|
|
/**
|
|
* Converts an IPv6 string representation into bytes (in network order).
|
|
*
|
|
* @param ip the IPv6 address to convert.
|
|
*
|
|
* @return the 16-byte address or null if the address can't be parsed.
|
|
*/
|
|
util$1.bytesFromIPv6 = function(ip) {
|
|
var blanks = 0;
|
|
ip = ip.split(':').filter(function(e) {
|
|
if(e.length === 0) ++blanks;
|
|
return true;
|
|
});
|
|
var zeros = (8 - ip.length + blanks) * 2;
|
|
var b = util$1.createBuffer();
|
|
for(var i = 0; i < 8; ++i) {
|
|
if(!ip[i] || ip[i].length === 0) {
|
|
b.fillWithByte(0, zeros);
|
|
zeros = 0;
|
|
continue;
|
|
}
|
|
var bytes = util$1.hexToBytes(ip[i]);
|
|
if(bytes.length < 2) {
|
|
b.putByte(0);
|
|
}
|
|
b.putBytes(bytes);
|
|
}
|
|
return b.getBytes();
|
|
};
|
|
|
|
/**
|
|
* Converts 4-bytes into an IPv4 string representation or 16-bytes into
|
|
* an IPv6 string representation. The bytes must be in network order.
|
|
*
|
|
* @param bytes the bytes to convert.
|
|
*
|
|
* @return the IPv4 or IPv6 string representation if 4 or 16 bytes,
|
|
* respectively, are given, otherwise null.
|
|
*/
|
|
util$1.bytesToIP = function(bytes) {
|
|
if(bytes.length === 4) {
|
|
return util$1.bytesToIPv4(bytes);
|
|
}
|
|
if(bytes.length === 16) {
|
|
return util$1.bytesToIPv6(bytes);
|
|
}
|
|
return null;
|
|
};
|
|
|
|
/**
|
|
* Converts 4-bytes into an IPv4 string representation. The bytes must be
|
|
* in network order.
|
|
*
|
|
* @param bytes the bytes to convert.
|
|
*
|
|
* @return the IPv4 string representation or null for an invalid # of bytes.
|
|
*/
|
|
util$1.bytesToIPv4 = function(bytes) {
|
|
if(bytes.length !== 4) {
|
|
return null;
|
|
}
|
|
var ip = [];
|
|
for(var i = 0; i < bytes.length; ++i) {
|
|
ip.push(bytes.charCodeAt(i));
|
|
}
|
|
return ip.join('.');
|
|
};
|
|
|
|
/**
|
|
* Converts 16-bytes into an IPv16 string representation. The bytes must be
|
|
* in network order.
|
|
*
|
|
* @param bytes the bytes to convert.
|
|
*
|
|
* @return the IPv16 string representation or null for an invalid # of bytes.
|
|
*/
|
|
util$1.bytesToIPv6 = function(bytes) {
|
|
if(bytes.length !== 16) {
|
|
return null;
|
|
}
|
|
var ip = [];
|
|
var zeroGroups = [];
|
|
var zeroMaxGroup = 0;
|
|
for(var i = 0; i < bytes.length; i += 2) {
|
|
var hex = util$1.bytesToHex(bytes[i] + bytes[i + 1]);
|
|
// canonicalize zero representation
|
|
while(hex[0] === '0' && hex !== '0') {
|
|
hex = hex.substr(1);
|
|
}
|
|
if(hex === '0') {
|
|
var last = zeroGroups[zeroGroups.length - 1];
|
|
var idx = ip.length;
|
|
if(!last || idx !== last.end + 1) {
|
|
zeroGroups.push({start: idx, end: idx});
|
|
} else {
|
|
last.end = idx;
|
|
if((last.end - last.start) >
|
|
(zeroGroups[zeroMaxGroup].end - zeroGroups[zeroMaxGroup].start)) {
|
|
zeroMaxGroup = zeroGroups.length - 1;
|
|
}
|
|
}
|
|
}
|
|
ip.push(hex);
|
|
}
|
|
if(zeroGroups.length > 0) {
|
|
var group = zeroGroups[zeroMaxGroup];
|
|
// only shorten group of length > 0
|
|
if(group.end - group.start > 0) {
|
|
ip.splice(group.start, group.end - group.start + 1, '');
|
|
if(group.start === 0) {
|
|
ip.unshift('');
|
|
}
|
|
if(group.end === 7) {
|
|
ip.push('');
|
|
}
|
|
}
|
|
}
|
|
return ip.join(':');
|
|
};
|
|
|
|
/**
|
|
* Estimates the number of processes that can be run concurrently. If
|
|
* creating Web Workers, keep in mind that the main JavaScript process needs
|
|
* its own core.
|
|
*
|
|
* @param options the options to use:
|
|
* update true to force an update (not use the cached value).
|
|
* @param callback(err, max) called once the operation completes.
|
|
*/
|
|
util$1.estimateCores = function(options, callback) {
|
|
if(typeof options === 'function') {
|
|
callback = options;
|
|
options = {};
|
|
}
|
|
options = options || {};
|
|
if('cores' in util$1 && !options.update) {
|
|
return callback(null, util$1.cores);
|
|
}
|
|
if(typeof navigator !== 'undefined' &&
|
|
'hardwareConcurrency' in navigator &&
|
|
navigator.hardwareConcurrency > 0) {
|
|
util$1.cores = navigator.hardwareConcurrency;
|
|
return callback(null, util$1.cores);
|
|
}
|
|
if(typeof Worker === 'undefined') {
|
|
// workers not available
|
|
util$1.cores = 1;
|
|
return callback(null, util$1.cores);
|
|
}
|
|
if(typeof Blob === 'undefined') {
|
|
// can't estimate, default to 2
|
|
util$1.cores = 2;
|
|
return callback(null, util$1.cores);
|
|
}
|
|
|
|
// create worker concurrency estimation code as blob
|
|
var blobUrl = URL.createObjectURL(new Blob(['(',
|
|
function() {
|
|
self.addEventListener('message', function(e) {
|
|
// run worker for 4 ms
|
|
var st = Date.now();
|
|
var et = st + 4;
|
|
self.postMessage({st: st, et: et});
|
|
});
|
|
}.toString(),
|
|
')()'], {type: 'application/javascript'}));
|
|
|
|
// take 5 samples using 16 workers
|
|
sample([], 5, 16);
|
|
|
|
function sample(max, samples, numWorkers) {
|
|
if(samples === 0) {
|
|
// get overlap average
|
|
var avg = Math.floor(max.reduce(function(avg, x) {
|
|
return avg + x;
|
|
}, 0) / max.length);
|
|
util$1.cores = Math.max(1, avg);
|
|
URL.revokeObjectURL(blobUrl);
|
|
return callback(null, util$1.cores);
|
|
}
|
|
map(numWorkers, function(err, results) {
|
|
max.push(reduce(numWorkers, results));
|
|
sample(max, samples - 1, numWorkers);
|
|
});
|
|
}
|
|
|
|
function map(numWorkers, callback) {
|
|
var workers = [];
|
|
var results = [];
|
|
for(var i = 0; i < numWorkers; ++i) {
|
|
var worker = new Worker(blobUrl);
|
|
worker.addEventListener('message', function(e) {
|
|
results.push(e.data);
|
|
if(results.length === numWorkers) {
|
|
for(var i = 0; i < numWorkers; ++i) {
|
|
workers[i].terminate();
|
|
}
|
|
callback(null, results);
|
|
}
|
|
});
|
|
workers.push(worker);
|
|
}
|
|
for(var i = 0; i < numWorkers; ++i) {
|
|
workers[i].postMessage(i);
|
|
}
|
|
}
|
|
|
|
function reduce(numWorkers, results) {
|
|
// find overlapping time windows
|
|
var overlaps = [];
|
|
for(var n = 0; n < numWorkers; ++n) {
|
|
var r1 = results[n];
|
|
var overlap = overlaps[n] = [];
|
|
for(var i = 0; i < numWorkers; ++i) {
|
|
if(n === i) {
|
|
continue;
|
|
}
|
|
var r2 = results[i];
|
|
if((r1.st > r2.st && r1.st < r2.et) ||
|
|
(r2.st > r1.st && r2.st < r1.et)) {
|
|
overlap.push(i);
|
|
}
|
|
}
|
|
}
|
|
// get maximum overlaps ... don't include overlapping worker itself
|
|
// as the main JS process was also being scheduled during the work and
|
|
// would have to be subtracted from the estimate anyway
|
|
return overlaps.reduce(function(max, overlap) {
|
|
return Math.max(max, overlap.length);
|
|
}, 0);
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Object IDs for ASN.1.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2010-2013 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$k = forge$m;
|
|
|
|
forge$k.pki = forge$k.pki || {};
|
|
var oids$1 = forge$k.pki.oids = forge$k.oids = forge$k.oids || {};
|
|
|
|
// set id to name mapping and name to id mapping
|
|
function _IN(id, name) {
|
|
oids$1[id] = name;
|
|
oids$1[name] = id;
|
|
}
|
|
// set id to name mapping only
|
|
function _I_(id, name) {
|
|
oids$1[id] = name;
|
|
}
|
|
|
|
// algorithm OIDs
|
|
_IN('1.2.840.113549.1.1.1', 'rsaEncryption');
|
|
// Note: md2 & md4 not implemented
|
|
//_IN('1.2.840.113549.1.1.2', 'md2WithRSAEncryption');
|
|
//_IN('1.2.840.113549.1.1.3', 'md4WithRSAEncryption');
|
|
_IN('1.2.840.113549.1.1.4', 'md5WithRSAEncryption');
|
|
_IN('1.2.840.113549.1.1.5', 'sha1WithRSAEncryption');
|
|
_IN('1.2.840.113549.1.1.7', 'RSAES-OAEP');
|
|
_IN('1.2.840.113549.1.1.8', 'mgf1');
|
|
_IN('1.2.840.113549.1.1.9', 'pSpecified');
|
|
_IN('1.2.840.113549.1.1.10', 'RSASSA-PSS');
|
|
_IN('1.2.840.113549.1.1.11', 'sha256WithRSAEncryption');
|
|
_IN('1.2.840.113549.1.1.12', 'sha384WithRSAEncryption');
|
|
_IN('1.2.840.113549.1.1.13', 'sha512WithRSAEncryption');
|
|
// Edwards-curve Digital Signature Algorithm (EdDSA) Ed25519
|
|
_IN('1.3.101.112', 'EdDSA25519');
|
|
|
|
_IN('1.2.840.10040.4.3', 'dsa-with-sha1');
|
|
|
|
_IN('1.3.14.3.2.7', 'desCBC');
|
|
|
|
_IN('1.3.14.3.2.26', 'sha1');
|
|
// Deprecated equivalent of sha1WithRSAEncryption
|
|
_IN('1.3.14.3.2.29', 'sha1WithRSASignature');
|
|
_IN('2.16.840.1.101.3.4.2.1', 'sha256');
|
|
_IN('2.16.840.1.101.3.4.2.2', 'sha384');
|
|
_IN('2.16.840.1.101.3.4.2.3', 'sha512');
|
|
_IN('2.16.840.1.101.3.4.2.4', 'sha224');
|
|
_IN('2.16.840.1.101.3.4.2.5', 'sha512-224');
|
|
_IN('2.16.840.1.101.3.4.2.6', 'sha512-256');
|
|
_IN('1.2.840.113549.2.2', 'md2');
|
|
_IN('1.2.840.113549.2.5', 'md5');
|
|
|
|
// pkcs#7 content types
|
|
_IN('1.2.840.113549.1.7.1', 'data');
|
|
_IN('1.2.840.113549.1.7.2', 'signedData');
|
|
_IN('1.2.840.113549.1.7.3', 'envelopedData');
|
|
_IN('1.2.840.113549.1.7.4', 'signedAndEnvelopedData');
|
|
_IN('1.2.840.113549.1.7.5', 'digestedData');
|
|
_IN('1.2.840.113549.1.7.6', 'encryptedData');
|
|
|
|
// pkcs#9 oids
|
|
_IN('1.2.840.113549.1.9.1', 'emailAddress');
|
|
_IN('1.2.840.113549.1.9.2', 'unstructuredName');
|
|
_IN('1.2.840.113549.1.9.3', 'contentType');
|
|
_IN('1.2.840.113549.1.9.4', 'messageDigest');
|
|
_IN('1.2.840.113549.1.9.5', 'signingTime');
|
|
_IN('1.2.840.113549.1.9.6', 'counterSignature');
|
|
_IN('1.2.840.113549.1.9.7', 'challengePassword');
|
|
_IN('1.2.840.113549.1.9.8', 'unstructuredAddress');
|
|
_IN('1.2.840.113549.1.9.14', 'extensionRequest');
|
|
|
|
_IN('1.2.840.113549.1.9.20', 'friendlyName');
|
|
_IN('1.2.840.113549.1.9.21', 'localKeyId');
|
|
_IN('1.2.840.113549.1.9.22.1', 'x509Certificate');
|
|
|
|
// pkcs#12 safe bags
|
|
_IN('1.2.840.113549.1.12.10.1.1', 'keyBag');
|
|
_IN('1.2.840.113549.1.12.10.1.2', 'pkcs8ShroudedKeyBag');
|
|
_IN('1.2.840.113549.1.12.10.1.3', 'certBag');
|
|
_IN('1.2.840.113549.1.12.10.1.4', 'crlBag');
|
|
_IN('1.2.840.113549.1.12.10.1.5', 'secretBag');
|
|
_IN('1.2.840.113549.1.12.10.1.6', 'safeContentsBag');
|
|
|
|
// password-based-encryption for pkcs#12
|
|
_IN('1.2.840.113549.1.5.13', 'pkcs5PBES2');
|
|
_IN('1.2.840.113549.1.5.12', 'pkcs5PBKDF2');
|
|
|
|
_IN('1.2.840.113549.1.12.1.1', 'pbeWithSHAAnd128BitRC4');
|
|
_IN('1.2.840.113549.1.12.1.2', 'pbeWithSHAAnd40BitRC4');
|
|
_IN('1.2.840.113549.1.12.1.3', 'pbeWithSHAAnd3-KeyTripleDES-CBC');
|
|
_IN('1.2.840.113549.1.12.1.4', 'pbeWithSHAAnd2-KeyTripleDES-CBC');
|
|
_IN('1.2.840.113549.1.12.1.5', 'pbeWithSHAAnd128BitRC2-CBC');
|
|
_IN('1.2.840.113549.1.12.1.6', 'pbewithSHAAnd40BitRC2-CBC');
|
|
|
|
// hmac OIDs
|
|
_IN('1.2.840.113549.2.7', 'hmacWithSHA1');
|
|
_IN('1.2.840.113549.2.8', 'hmacWithSHA224');
|
|
_IN('1.2.840.113549.2.9', 'hmacWithSHA256');
|
|
_IN('1.2.840.113549.2.10', 'hmacWithSHA384');
|
|
_IN('1.2.840.113549.2.11', 'hmacWithSHA512');
|
|
|
|
// symmetric key algorithm oids
|
|
_IN('1.2.840.113549.3.7', 'des-EDE3-CBC');
|
|
_IN('2.16.840.1.101.3.4.1.2', 'aes128-CBC');
|
|
_IN('2.16.840.1.101.3.4.1.22', 'aes192-CBC');
|
|
_IN('2.16.840.1.101.3.4.1.42', 'aes256-CBC');
|
|
|
|
// certificate issuer/subject OIDs
|
|
_IN('2.5.4.3', 'commonName');
|
|
_IN('2.5.4.4', 'surname');
|
|
_IN('2.5.4.5', 'serialNumber');
|
|
_IN('2.5.4.6', 'countryName');
|
|
_IN('2.5.4.7', 'localityName');
|
|
_IN('2.5.4.8', 'stateOrProvinceName');
|
|
_IN('2.5.4.9', 'streetAddress');
|
|
_IN('2.5.4.10', 'organizationName');
|
|
_IN('2.5.4.11', 'organizationalUnitName');
|
|
_IN('2.5.4.12', 'title');
|
|
_IN('2.5.4.13', 'description');
|
|
_IN('2.5.4.15', 'businessCategory');
|
|
_IN('2.5.4.17', 'postalCode');
|
|
_IN('2.5.4.42', 'givenName');
|
|
_IN('1.3.6.1.4.1.311.60.2.1.2', 'jurisdictionOfIncorporationStateOrProvinceName');
|
|
_IN('1.3.6.1.4.1.311.60.2.1.3', 'jurisdictionOfIncorporationCountryName');
|
|
|
|
// X.509 extension OIDs
|
|
_IN('2.16.840.1.113730.1.1', 'nsCertType');
|
|
_IN('2.16.840.1.113730.1.13', 'nsComment'); // deprecated in theory; still widely used
|
|
_I_('2.5.29.1', 'authorityKeyIdentifier'); // deprecated, use .35
|
|
_I_('2.5.29.2', 'keyAttributes'); // obsolete use .37 or .15
|
|
_I_('2.5.29.3', 'certificatePolicies'); // deprecated, use .32
|
|
_I_('2.5.29.4', 'keyUsageRestriction'); // obsolete use .37 or .15
|
|
_I_('2.5.29.5', 'policyMapping'); // deprecated use .33
|
|
_I_('2.5.29.6', 'subtreesConstraint'); // obsolete use .30
|
|
_I_('2.5.29.7', 'subjectAltName'); // deprecated use .17
|
|
_I_('2.5.29.8', 'issuerAltName'); // deprecated use .18
|
|
_I_('2.5.29.9', 'subjectDirectoryAttributes');
|
|
_I_('2.5.29.10', 'basicConstraints'); // deprecated use .19
|
|
_I_('2.5.29.11', 'nameConstraints'); // deprecated use .30
|
|
_I_('2.5.29.12', 'policyConstraints'); // deprecated use .36
|
|
_I_('2.5.29.13', 'basicConstraints'); // deprecated use .19
|
|
_IN('2.5.29.14', 'subjectKeyIdentifier');
|
|
_IN('2.5.29.15', 'keyUsage');
|
|
_I_('2.5.29.16', 'privateKeyUsagePeriod');
|
|
_IN('2.5.29.17', 'subjectAltName');
|
|
_IN('2.5.29.18', 'issuerAltName');
|
|
_IN('2.5.29.19', 'basicConstraints');
|
|
_I_('2.5.29.20', 'cRLNumber');
|
|
_I_('2.5.29.21', 'cRLReason');
|
|
_I_('2.5.29.22', 'expirationDate');
|
|
_I_('2.5.29.23', 'instructionCode');
|
|
_I_('2.5.29.24', 'invalidityDate');
|
|
_I_('2.5.29.25', 'cRLDistributionPoints'); // deprecated use .31
|
|
_I_('2.5.29.26', 'issuingDistributionPoint'); // deprecated use .28
|
|
_I_('2.5.29.27', 'deltaCRLIndicator');
|
|
_I_('2.5.29.28', 'issuingDistributionPoint');
|
|
_I_('2.5.29.29', 'certificateIssuer');
|
|
_I_('2.5.29.30', 'nameConstraints');
|
|
_IN('2.5.29.31', 'cRLDistributionPoints');
|
|
_IN('2.5.29.32', 'certificatePolicies');
|
|
_I_('2.5.29.33', 'policyMappings');
|
|
_I_('2.5.29.34', 'policyConstraints'); // deprecated use .36
|
|
_IN('2.5.29.35', 'authorityKeyIdentifier');
|
|
_I_('2.5.29.36', 'policyConstraints');
|
|
_IN('2.5.29.37', 'extKeyUsage');
|
|
_I_('2.5.29.46', 'freshestCRL');
|
|
_I_('2.5.29.54', 'inhibitAnyPolicy');
|
|
|
|
// extKeyUsage purposes
|
|
_IN('1.3.6.1.4.1.11129.2.4.2', 'timestampList');
|
|
_IN('1.3.6.1.5.5.7.1.1', 'authorityInfoAccess');
|
|
_IN('1.3.6.1.5.5.7.3.1', 'serverAuth');
|
|
_IN('1.3.6.1.5.5.7.3.2', 'clientAuth');
|
|
_IN('1.3.6.1.5.5.7.3.3', 'codeSigning');
|
|
_IN('1.3.6.1.5.5.7.3.4', 'emailProtection');
|
|
_IN('1.3.6.1.5.5.7.3.8', 'timeStamping');
|
|
|
|
/**
|
|
* Javascript implementation of Abstract Syntax Notation Number One.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2010-2015 Digital Bazaar, Inc.
|
|
*
|
|
* An API for storing data using the Abstract Syntax Notation Number One
|
|
* format using DER (Distinguished Encoding Rules) encoding. This encoding is
|
|
* commonly used to store data for PKI, i.e. X.509 Certificates, and this
|
|
* implementation exists for that purpose.
|
|
*
|
|
* Abstract Syntax Notation Number One (ASN.1) is used to define the abstract
|
|
* syntax of information without restricting the way the information is encoded
|
|
* for transmission. It provides a standard that allows for open systems
|
|
* communication. ASN.1 defines the syntax of information data and a number of
|
|
* simple data types as well as a notation for describing them and specifying
|
|
* values for them.
|
|
*
|
|
* The RSA algorithm creates public and private keys that are often stored in
|
|
* X.509 or PKCS#X formats -- which use ASN.1 (encoded in DER format). This
|
|
* class provides the most basic functionality required to store and load DSA
|
|
* keys that are encoded according to ASN.1.
|
|
*
|
|
* The most common binary encodings for ASN.1 are BER (Basic Encoding Rules)
|
|
* and DER (Distinguished Encoding Rules). DER is just a subset of BER that
|
|
* has stricter requirements for how data must be encoded.
|
|
*
|
|
* Each ASN.1 structure has a tag (a byte identifying the ASN.1 structure type)
|
|
* and a byte array for the value of this ASN1 structure which may be data or a
|
|
* list of ASN.1 structures.
|
|
*
|
|
* Each ASN.1 structure using BER is (Tag-Length-Value):
|
|
*
|
|
* | byte 0 | bytes X | bytes Y |
|
|
* |--------|---------|----------
|
|
* | tag | length | value |
|
|
*
|
|
* ASN.1 allows for tags to be of "High-tag-number form" which allows a tag to
|
|
* be two or more octets, but that is not supported by this class. A tag is
|
|
* only 1 byte. Bits 1-5 give the tag number (ie the data type within a
|
|
* particular 'class'), 6 indicates whether or not the ASN.1 value is
|
|
* constructed from other ASN.1 values, and bits 7 and 8 give the 'class'. If
|
|
* bits 7 and 8 are both zero, the class is UNIVERSAL. If only bit 7 is set,
|
|
* then the class is APPLICATION. If only bit 8 is set, then the class is
|
|
* CONTEXT_SPECIFIC. If both bits 7 and 8 are set, then the class is PRIVATE.
|
|
* The tag numbers for the data types for the class UNIVERSAL are listed below:
|
|
*
|
|
* UNIVERSAL 0 Reserved for use by the encoding rules
|
|
* UNIVERSAL 1 Boolean type
|
|
* UNIVERSAL 2 Integer type
|
|
* UNIVERSAL 3 Bitstring type
|
|
* UNIVERSAL 4 Octetstring type
|
|
* UNIVERSAL 5 Null type
|
|
* UNIVERSAL 6 Object identifier type
|
|
* UNIVERSAL 7 Object descriptor type
|
|
* UNIVERSAL 8 External type and Instance-of type
|
|
* UNIVERSAL 9 Real type
|
|
* UNIVERSAL 10 Enumerated type
|
|
* UNIVERSAL 11 Embedded-pdv type
|
|
* UNIVERSAL 12 UTF8String type
|
|
* UNIVERSAL 13 Relative object identifier type
|
|
* UNIVERSAL 14-15 Reserved for future editions
|
|
* UNIVERSAL 16 Sequence and Sequence-of types
|
|
* UNIVERSAL 17 Set and Set-of types
|
|
* UNIVERSAL 18-22, 25-30 Character string types
|
|
* UNIVERSAL 23-24 Time types
|
|
*
|
|
* The length of an ASN.1 structure is specified after the tag identifier.
|
|
* There is a definite form and an indefinite form. The indefinite form may
|
|
* be used if the encoding is constructed and not all immediately available.
|
|
* The indefinite form is encoded using a length byte with only the 8th bit
|
|
* set. The end of the constructed object is marked using end-of-contents
|
|
* octets (two zero bytes).
|
|
*
|
|
* The definite form looks like this:
|
|
*
|
|
* The length may take up 1 or more bytes, it depends on the length of the
|
|
* value of the ASN.1 structure. DER encoding requires that if the ASN.1
|
|
* structure has a value that has a length greater than 127, more than 1 byte
|
|
* will be used to store its length, otherwise just one byte will be used.
|
|
* This is strict.
|
|
*
|
|
* In the case that the length of the ASN.1 value is less than 127, 1 octet
|
|
* (byte) is used to store the "short form" length. The 8th bit has a value of
|
|
* 0 indicating the length is "short form" and not "long form" and bits 7-1
|
|
* give the length of the data. (The 8th bit is the left-most, most significant
|
|
* bit: also known as big endian or network format).
|
|
*
|
|
* In the case that the length of the ASN.1 value is greater than 127, 2 to
|
|
* 127 octets (bytes) are used to store the "long form" length. The first
|
|
* byte's 8th bit is set to 1 to indicate the length is "long form." Bits 7-1
|
|
* give the number of additional octets. All following octets are in base 256
|
|
* with the most significant digit first (typical big-endian binary unsigned
|
|
* integer storage). So, for instance, if the length of a value was 257, the
|
|
* first byte would be set to:
|
|
*
|
|
* 10000010 = 130 = 0x82.
|
|
*
|
|
* This indicates there are 2 octets (base 256) for the length. The second and
|
|
* third bytes (the octets just mentioned) would store the length in base 256:
|
|
*
|
|
* octet 2: 00000001 = 1 * 256^1 = 256
|
|
* octet 3: 00000001 = 1 * 256^0 = 1
|
|
* total = 257
|
|
*
|
|
* The algorithm for converting a js integer value of 257 to base-256 is:
|
|
*
|
|
* var value = 257;
|
|
* var bytes = [];
|
|
* bytes[0] = (value >>> 8) & 0xFF; // most significant byte first
|
|
* bytes[1] = value & 0xFF; // least significant byte last
|
|
*
|
|
* On the ASN.1 UNIVERSAL Object Identifier (OID) type:
|
|
*
|
|
* An OID can be written like: "value1.value2.value3...valueN"
|
|
*
|
|
* The DER encoding rules:
|
|
*
|
|
* The first byte has the value 40 * value1 + value2.
|
|
* The following bytes, if any, encode the remaining values. Each value is
|
|
* encoded in base 128, most significant digit first (big endian), with as
|
|
* few digits as possible, and the most significant bit of each byte set
|
|
* to 1 except the last in each value's encoding. For example: Given the
|
|
* OID "1.2.840.113549", its DER encoding is (remember each byte except the
|
|
* last one in each encoding is OR'd with 0x80):
|
|
*
|
|
* byte 1: 40 * 1 + 2 = 42 = 0x2A.
|
|
* bytes 2-3: 128 * 6 + 72 = 840 = 6 72 = 6 72 = 0x0648 = 0x8648
|
|
* bytes 4-6: 16384 * 6 + 128 * 119 + 13 = 6 119 13 = 0x06770D = 0x86F70D
|
|
*
|
|
* The final value is: 0x2A864886F70D.
|
|
* The full OID (including ASN.1 tag and length of 6 bytes) is:
|
|
* 0x06062A864886F70D
|
|
*/
|
|
|
|
var forge$j = forge$m;
|
|
|
|
|
|
|
|
/* ASN.1 API */
|
|
var asn1$2 = forge$j.asn1 = forge$j.asn1 || {};
|
|
|
|
/**
|
|
* ASN.1 classes.
|
|
*/
|
|
asn1$2.Class = {
|
|
UNIVERSAL: 0x00,
|
|
APPLICATION: 0x40,
|
|
CONTEXT_SPECIFIC: 0x80,
|
|
PRIVATE: 0xC0
|
|
};
|
|
|
|
/**
|
|
* ASN.1 types. Not all types are supported by this implementation, only
|
|
* those necessary to implement a simple PKI are implemented.
|
|
*/
|
|
asn1$2.Type = {
|
|
NONE: 0,
|
|
BOOLEAN: 1,
|
|
INTEGER: 2,
|
|
BITSTRING: 3,
|
|
OCTETSTRING: 4,
|
|
NULL: 5,
|
|
OID: 6,
|
|
ODESC: 7,
|
|
EXTERNAL: 8,
|
|
REAL: 9,
|
|
ENUMERATED: 10,
|
|
EMBEDDED: 11,
|
|
UTF8: 12,
|
|
ROID: 13,
|
|
SEQUENCE: 16,
|
|
SET: 17,
|
|
PRINTABLESTRING: 19,
|
|
IA5STRING: 22,
|
|
UTCTIME: 23,
|
|
GENERALIZEDTIME: 24,
|
|
BMPSTRING: 30
|
|
};
|
|
|
|
/**
|
|
* Creates a new asn1 object.
|
|
*
|
|
* @param tagClass the tag class for the object.
|
|
* @param type the data type (tag number) for the object.
|
|
* @param constructed true if the asn1 object is in constructed form.
|
|
* @param value the value for the object, if it is not constructed.
|
|
* @param [options] the options to use:
|
|
* [bitStringContents] the plain BIT STRING content including padding
|
|
* byte.
|
|
*
|
|
* @return the asn1 object.
|
|
*/
|
|
asn1$2.create = function(tagClass, type, constructed, value, options) {
|
|
/* An asn1 object has a tagClass, a type, a constructed flag, and a
|
|
value. The value's type depends on the constructed flag. If
|
|
constructed, it will contain a list of other asn1 objects. If not,
|
|
it will contain the ASN.1 value as an array of bytes formatted
|
|
according to the ASN.1 data type. */
|
|
|
|
// remove undefined values
|
|
if(forge$j.util.isArray(value)) {
|
|
var tmp = [];
|
|
for(var i = 0; i < value.length; ++i) {
|
|
if(value[i] !== undefined) {
|
|
tmp.push(value[i]);
|
|
}
|
|
}
|
|
value = tmp;
|
|
}
|
|
|
|
var obj = {
|
|
tagClass: tagClass,
|
|
type: type,
|
|
constructed: constructed,
|
|
composed: constructed || forge$j.util.isArray(value),
|
|
value: value
|
|
};
|
|
if(options && 'bitStringContents' in options) {
|
|
// TODO: copy byte buffer if it's a buffer not a string
|
|
obj.bitStringContents = options.bitStringContents;
|
|
// TODO: add readonly flag to avoid this overhead
|
|
// save copy to detect changes
|
|
obj.original = asn1$2.copy(obj);
|
|
}
|
|
return obj;
|
|
};
|
|
|
|
/**
|
|
* Copies an asn1 object.
|
|
*
|
|
* @param obj the asn1 object.
|
|
* @param [options] copy options:
|
|
* [excludeBitStringContents] true to not copy bitStringContents
|
|
*
|
|
* @return the a copy of the asn1 object.
|
|
*/
|
|
asn1$2.copy = function(obj, options) {
|
|
var copy;
|
|
|
|
if(forge$j.util.isArray(obj)) {
|
|
copy = [];
|
|
for(var i = 0; i < obj.length; ++i) {
|
|
copy.push(asn1$2.copy(obj[i], options));
|
|
}
|
|
return copy;
|
|
}
|
|
|
|
if(typeof obj === 'string') {
|
|
// TODO: copy byte buffer if it's a buffer not a string
|
|
return obj;
|
|
}
|
|
|
|
copy = {
|
|
tagClass: obj.tagClass,
|
|
type: obj.type,
|
|
constructed: obj.constructed,
|
|
composed: obj.composed,
|
|
value: asn1$2.copy(obj.value, options)
|
|
};
|
|
if(options && !options.excludeBitStringContents) {
|
|
// TODO: copy byte buffer if it's a buffer not a string
|
|
copy.bitStringContents = obj.bitStringContents;
|
|
}
|
|
return copy;
|
|
};
|
|
|
|
/**
|
|
* Compares asn1 objects for equality.
|
|
*
|
|
* Note this function does not run in constant time.
|
|
*
|
|
* @param obj1 the first asn1 object.
|
|
* @param obj2 the second asn1 object.
|
|
* @param [options] compare options:
|
|
* [includeBitStringContents] true to compare bitStringContents
|
|
*
|
|
* @return true if the asn1 objects are equal.
|
|
*/
|
|
asn1$2.equals = function(obj1, obj2, options) {
|
|
if(forge$j.util.isArray(obj1)) {
|
|
if(!forge$j.util.isArray(obj2)) {
|
|
return false;
|
|
}
|
|
if(obj1.length !== obj2.length) {
|
|
return false;
|
|
}
|
|
for(var i = 0; i < obj1.length; ++i) {
|
|
if(!asn1$2.equals(obj1[i], obj2[i])) {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
|
|
if(typeof obj1 !== typeof obj2) {
|
|
return false;
|
|
}
|
|
|
|
if(typeof obj1 === 'string') {
|
|
return obj1 === obj2;
|
|
}
|
|
|
|
var equal = obj1.tagClass === obj2.tagClass &&
|
|
obj1.type === obj2.type &&
|
|
obj1.constructed === obj2.constructed &&
|
|
obj1.composed === obj2.composed &&
|
|
asn1$2.equals(obj1.value, obj2.value);
|
|
if(options && options.includeBitStringContents) {
|
|
equal = equal && (obj1.bitStringContents === obj2.bitStringContents);
|
|
}
|
|
|
|
return equal;
|
|
};
|
|
|
|
/**
|
|
* Gets the length of a BER-encoded ASN.1 value.
|
|
*
|
|
* In case the length is not specified, undefined is returned.
|
|
*
|
|
* @param b the BER-encoded ASN.1 byte buffer, starting with the first
|
|
* length byte.
|
|
*
|
|
* @return the length of the BER-encoded ASN.1 value or undefined.
|
|
*/
|
|
asn1$2.getBerValueLength = function(b) {
|
|
// TODO: move this function and related DER/BER functions to a der.js
|
|
// file; better abstract ASN.1 away from der/ber.
|
|
var b2 = b.getByte();
|
|
if(b2 === 0x80) {
|
|
return undefined;
|
|
}
|
|
|
|
// see if the length is "short form" or "long form" (bit 8 set)
|
|
var length;
|
|
var longForm = b2 & 0x80;
|
|
if(!longForm) {
|
|
// length is just the first byte
|
|
length = b2;
|
|
} else {
|
|
// the number of bytes the length is specified in bits 7 through 1
|
|
// and each length byte is in big-endian base-256
|
|
length = b.getInt((b2 & 0x7F) << 3);
|
|
}
|
|
return length;
|
|
};
|
|
|
|
/**
|
|
* Check if the byte buffer has enough bytes. Throws an Error if not.
|
|
*
|
|
* @param bytes the byte buffer to parse from.
|
|
* @param remaining the bytes remaining in the current parsing state.
|
|
* @param n the number of bytes the buffer must have.
|
|
*/
|
|
function _checkBufferLength(bytes, remaining, n) {
|
|
if(n > remaining) {
|
|
var error = new Error('Too few bytes to parse DER.');
|
|
error.available = bytes.length();
|
|
error.remaining = remaining;
|
|
error.requested = n;
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Gets the length of a BER-encoded ASN.1 value.
|
|
*
|
|
* In case the length is not specified, undefined is returned.
|
|
*
|
|
* @param bytes the byte buffer to parse from.
|
|
* @param remaining the bytes remaining in the current parsing state.
|
|
*
|
|
* @return the length of the BER-encoded ASN.1 value or undefined.
|
|
*/
|
|
var _getValueLength = function(bytes, remaining) {
|
|
// TODO: move this function and related DER/BER functions to a der.js
|
|
// file; better abstract ASN.1 away from der/ber.
|
|
// fromDer already checked that this byte exists
|
|
var b2 = bytes.getByte();
|
|
remaining--;
|
|
if(b2 === 0x80) {
|
|
return undefined;
|
|
}
|
|
|
|
// see if the length is "short form" or "long form" (bit 8 set)
|
|
var length;
|
|
var longForm = b2 & 0x80;
|
|
if(!longForm) {
|
|
// length is just the first byte
|
|
length = b2;
|
|
} else {
|
|
// the number of bytes the length is specified in bits 7 through 1
|
|
// and each length byte is in big-endian base-256
|
|
var longFormBytes = b2 & 0x7F;
|
|
_checkBufferLength(bytes, remaining, longFormBytes);
|
|
length = bytes.getInt(longFormBytes << 3);
|
|
}
|
|
// FIXME: this will only happen for 32 bit getInt with high bit set
|
|
if(length < 0) {
|
|
throw new Error('Negative length: ' + length);
|
|
}
|
|
return length;
|
|
};
|
|
|
|
/**
|
|
* Parses an asn1 object from a byte buffer in DER format.
|
|
*
|
|
* @param bytes the byte buffer to parse from.
|
|
* @param [strict] true to be strict when checking value lengths, false to
|
|
* allow truncated values (default: true).
|
|
* @param [options] object with options or boolean strict flag
|
|
* [strict] true to be strict when checking value lengths, false to
|
|
* allow truncated values (default: true).
|
|
* [parseAllBytes] true to ensure all bytes are parsed
|
|
* (default: true)
|
|
* [decodeBitStrings] true to attempt to decode the content of
|
|
* BIT STRINGs (not OCTET STRINGs) using strict mode. Note that
|
|
* without schema support to understand the data context this can
|
|
* erroneously decode values that happen to be valid ASN.1. This
|
|
* flag will be deprecated or removed as soon as schema support is
|
|
* available. (default: true)
|
|
*
|
|
* @throws Will throw an error for various malformed input conditions.
|
|
*
|
|
* @return the parsed asn1 object.
|
|
*/
|
|
asn1$2.fromDer = function(bytes, options) {
|
|
if(options === undefined) {
|
|
options = {
|
|
strict: true,
|
|
parseAllBytes: true,
|
|
decodeBitStrings: true
|
|
};
|
|
}
|
|
if(typeof options === 'boolean') {
|
|
options = {
|
|
strict: options,
|
|
parseAllBytes: true,
|
|
decodeBitStrings: true
|
|
};
|
|
}
|
|
if(!('strict' in options)) {
|
|
options.strict = true;
|
|
}
|
|
if(!('parseAllBytes' in options)) {
|
|
options.parseAllBytes = true;
|
|
}
|
|
if(!('decodeBitStrings' in options)) {
|
|
options.decodeBitStrings = true;
|
|
}
|
|
|
|
// wrap in buffer if needed
|
|
if(typeof bytes === 'string') {
|
|
bytes = forge$j.util.createBuffer(bytes);
|
|
}
|
|
|
|
var byteCount = bytes.length();
|
|
var value = _fromDer(bytes, bytes.length(), 0, options);
|
|
if(options.parseAllBytes && bytes.length() !== 0) {
|
|
var error = new Error('Unparsed DER bytes remain after ASN.1 parsing.');
|
|
error.byteCount = byteCount;
|
|
error.remaining = bytes.length();
|
|
throw error;
|
|
}
|
|
return value;
|
|
};
|
|
|
|
/**
|
|
* Internal function to parse an asn1 object from a byte buffer in DER format.
|
|
*
|
|
* @param bytes the byte buffer to parse from.
|
|
* @param remaining the number of bytes remaining for this chunk.
|
|
* @param depth the current parsing depth.
|
|
* @param options object with same options as fromDer().
|
|
*
|
|
* @return the parsed asn1 object.
|
|
*/
|
|
function _fromDer(bytes, remaining, depth, options) {
|
|
// temporary storage for consumption calculations
|
|
var start;
|
|
|
|
// minimum length for ASN.1 DER structure is 2
|
|
_checkBufferLength(bytes, remaining, 2);
|
|
|
|
// get the first byte
|
|
var b1 = bytes.getByte();
|
|
// consumed one byte
|
|
remaining--;
|
|
|
|
// get the tag class
|
|
var tagClass = (b1 & 0xC0);
|
|
|
|
// get the type (bits 1-5)
|
|
var type = b1 & 0x1F;
|
|
|
|
// get the variable value length and adjust remaining bytes
|
|
start = bytes.length();
|
|
var length = _getValueLength(bytes, remaining);
|
|
remaining -= start - bytes.length();
|
|
|
|
// ensure there are enough bytes to get the value
|
|
if(length !== undefined && length > remaining) {
|
|
if(options.strict) {
|
|
var error = new Error('Too few bytes to read ASN.1 value.');
|
|
error.available = bytes.length();
|
|
error.remaining = remaining;
|
|
error.requested = length;
|
|
throw error;
|
|
}
|
|
// Note: be lenient with truncated values and use remaining state bytes
|
|
length = remaining;
|
|
}
|
|
|
|
// value storage
|
|
var value;
|
|
// possible BIT STRING contents storage
|
|
var bitStringContents;
|
|
|
|
// constructed flag is bit 6 (32 = 0x20) of the first byte
|
|
var constructed = ((b1 & 0x20) === 0x20);
|
|
if(constructed) {
|
|
// parse child asn1 objects from the value
|
|
value = [];
|
|
if(length === undefined) {
|
|
// asn1 object of indefinite length, read until end tag
|
|
for(;;) {
|
|
_checkBufferLength(bytes, remaining, 2);
|
|
if(bytes.bytes(2) === String.fromCharCode(0, 0)) {
|
|
bytes.getBytes(2);
|
|
remaining -= 2;
|
|
break;
|
|
}
|
|
start = bytes.length();
|
|
value.push(_fromDer(bytes, remaining, depth + 1, options));
|
|
remaining -= start - bytes.length();
|
|
}
|
|
} else {
|
|
// parsing asn1 object of definite length
|
|
while(length > 0) {
|
|
start = bytes.length();
|
|
value.push(_fromDer(bytes, length, depth + 1, options));
|
|
remaining -= start - bytes.length();
|
|
length -= start - bytes.length();
|
|
}
|
|
}
|
|
}
|
|
|
|
// if a BIT STRING, save the contents including padding
|
|
if(value === undefined && tagClass === asn1$2.Class.UNIVERSAL &&
|
|
type === asn1$2.Type.BITSTRING) {
|
|
bitStringContents = bytes.bytes(length);
|
|
}
|
|
|
|
// determine if a non-constructed value should be decoded as a composed
|
|
// value that contains other ASN.1 objects. BIT STRINGs (and OCTET STRINGs)
|
|
// can be used this way.
|
|
if(value === undefined && options.decodeBitStrings &&
|
|
tagClass === asn1$2.Class.UNIVERSAL &&
|
|
// FIXME: OCTET STRINGs not yet supported here
|
|
// .. other parts of forge expect to decode OCTET STRINGs manually
|
|
(type === asn1$2.Type.BITSTRING /*|| type === asn1.Type.OCTETSTRING*/) &&
|
|
length > 1) {
|
|
// save read position
|
|
var savedRead = bytes.read;
|
|
var savedRemaining = remaining;
|
|
var unused = 0;
|
|
if(type === asn1$2.Type.BITSTRING) {
|
|
/* The first octet gives the number of bits by which the length of the
|
|
bit string is less than the next multiple of eight (this is called
|
|
the "number of unused bits").
|
|
|
|
The second and following octets give the value of the bit string
|
|
converted to an octet string. */
|
|
_checkBufferLength(bytes, remaining, 1);
|
|
unused = bytes.getByte();
|
|
remaining--;
|
|
}
|
|
// if all bits are used, maybe the BIT/OCTET STRING holds ASN.1 objs
|
|
if(unused === 0) {
|
|
try {
|
|
// attempt to parse child asn1 object from the value
|
|
// (stored in array to signal composed value)
|
|
start = bytes.length();
|
|
var subOptions = {
|
|
// enforce strict mode to avoid parsing ASN.1 from plain data
|
|
strict: true,
|
|
decodeBitStrings: true
|
|
};
|
|
var composed = _fromDer(bytes, remaining, depth + 1, subOptions);
|
|
var used = start - bytes.length();
|
|
remaining -= used;
|
|
if(type == asn1$2.Type.BITSTRING) {
|
|
used++;
|
|
}
|
|
|
|
// if the data all decoded and the class indicates UNIVERSAL or
|
|
// CONTEXT_SPECIFIC then assume we've got an encapsulated ASN.1 object
|
|
var tc = composed.tagClass;
|
|
if(used === length &&
|
|
(tc === asn1$2.Class.UNIVERSAL || tc === asn1$2.Class.CONTEXT_SPECIFIC)) {
|
|
value = [composed];
|
|
}
|
|
} catch(ex) {
|
|
}
|
|
}
|
|
if(value === undefined) {
|
|
// restore read position
|
|
bytes.read = savedRead;
|
|
remaining = savedRemaining;
|
|
}
|
|
}
|
|
|
|
if(value === undefined) {
|
|
// asn1 not constructed or composed, get raw value
|
|
// TODO: do DER to OID conversion and vice-versa in .toDer?
|
|
|
|
if(length === undefined) {
|
|
if(options.strict) {
|
|
throw new Error('Non-constructed ASN.1 object of indefinite length.');
|
|
}
|
|
// be lenient and use remaining state bytes
|
|
length = remaining;
|
|
}
|
|
|
|
if(type === asn1$2.Type.BMPSTRING) {
|
|
value = '';
|
|
for(; length > 0; length -= 2) {
|
|
_checkBufferLength(bytes, remaining, 2);
|
|
value += String.fromCharCode(bytes.getInt16());
|
|
remaining -= 2;
|
|
}
|
|
} else {
|
|
value = bytes.getBytes(length);
|
|
remaining -= length;
|
|
}
|
|
}
|
|
|
|
// add BIT STRING contents if available
|
|
var asn1Options = bitStringContents === undefined ? null : {
|
|
bitStringContents: bitStringContents
|
|
};
|
|
|
|
// create and return asn1 object
|
|
return asn1$2.create(tagClass, type, constructed, value, asn1Options);
|
|
}
|
|
|
|
/**
|
|
* Converts the given asn1 object to a buffer of bytes in DER format.
|
|
*
|
|
* @param asn1 the asn1 object to convert to bytes.
|
|
*
|
|
* @return the buffer of bytes.
|
|
*/
|
|
asn1$2.toDer = function(obj) {
|
|
var bytes = forge$j.util.createBuffer();
|
|
|
|
// build the first byte
|
|
var b1 = obj.tagClass | obj.type;
|
|
|
|
// for storing the ASN.1 value
|
|
var value = forge$j.util.createBuffer();
|
|
|
|
// use BIT STRING contents if available and data not changed
|
|
var useBitStringContents = false;
|
|
if('bitStringContents' in obj) {
|
|
useBitStringContents = true;
|
|
if(obj.original) {
|
|
useBitStringContents = asn1$2.equals(obj, obj.original);
|
|
}
|
|
}
|
|
|
|
if(useBitStringContents) {
|
|
value.putBytes(obj.bitStringContents);
|
|
} else if(obj.composed) {
|
|
// if composed, use each child asn1 object's DER bytes as value
|
|
// turn on 6th bit (0x20 = 32) to indicate asn1 is constructed
|
|
// from other asn1 objects
|
|
if(obj.constructed) {
|
|
b1 |= 0x20;
|
|
} else {
|
|
// type is a bit string, add unused bits of 0x00
|
|
value.putByte(0x00);
|
|
}
|
|
|
|
// add all of the child DER bytes together
|
|
for(var i = 0; i < obj.value.length; ++i) {
|
|
if(obj.value[i] !== undefined) {
|
|
value.putBuffer(asn1$2.toDer(obj.value[i]));
|
|
}
|
|
}
|
|
} else {
|
|
// use asn1.value directly
|
|
if(obj.type === asn1$2.Type.BMPSTRING) {
|
|
for(var i = 0; i < obj.value.length; ++i) {
|
|
value.putInt16(obj.value.charCodeAt(i));
|
|
}
|
|
} else {
|
|
// ensure integer is minimally-encoded
|
|
// TODO: should all leading bytes be stripped vs just one?
|
|
// .. ex '00 00 01' => '01'?
|
|
if(obj.type === asn1$2.Type.INTEGER &&
|
|
obj.value.length > 1 &&
|
|
// leading 0x00 for positive integer
|
|
((obj.value.charCodeAt(0) === 0 &&
|
|
(obj.value.charCodeAt(1) & 0x80) === 0) ||
|
|
// leading 0xFF for negative integer
|
|
(obj.value.charCodeAt(0) === 0xFF &&
|
|
(obj.value.charCodeAt(1) & 0x80) === 0x80))) {
|
|
value.putBytes(obj.value.substr(1));
|
|
} else {
|
|
value.putBytes(obj.value);
|
|
}
|
|
}
|
|
}
|
|
|
|
// add tag byte
|
|
bytes.putByte(b1);
|
|
|
|
// use "short form" encoding
|
|
if(value.length() <= 127) {
|
|
// one byte describes the length
|
|
// bit 8 = 0 and bits 7-1 = length
|
|
bytes.putByte(value.length() & 0x7F);
|
|
} else {
|
|
// use "long form" encoding
|
|
// 2 to 127 bytes describe the length
|
|
// first byte: bit 8 = 1 and bits 7-1 = # of additional bytes
|
|
// other bytes: length in base 256, big-endian
|
|
var len = value.length();
|
|
var lenBytes = '';
|
|
do {
|
|
lenBytes += String.fromCharCode(len & 0xFF);
|
|
len = len >>> 8;
|
|
} while(len > 0);
|
|
|
|
// set first byte to # bytes used to store the length and turn on
|
|
// bit 8 to indicate long-form length is used
|
|
bytes.putByte(lenBytes.length | 0x80);
|
|
|
|
// concatenate length bytes in reverse since they were generated
|
|
// little endian and we need big endian
|
|
for(var i = lenBytes.length - 1; i >= 0; --i) {
|
|
bytes.putByte(lenBytes.charCodeAt(i));
|
|
}
|
|
}
|
|
|
|
// concatenate value bytes
|
|
bytes.putBuffer(value);
|
|
return bytes;
|
|
};
|
|
|
|
/**
|
|
* Converts an OID dot-separated string to a byte buffer. The byte buffer
|
|
* contains only the DER-encoded value, not any tag or length bytes.
|
|
*
|
|
* @param oid the OID dot-separated string.
|
|
*
|
|
* @return the byte buffer.
|
|
*/
|
|
asn1$2.oidToDer = function(oid) {
|
|
// split OID into individual values
|
|
var values = oid.split('.');
|
|
var bytes = forge$j.util.createBuffer();
|
|
|
|
// first byte is 40 * value1 + value2
|
|
bytes.putByte(40 * parseInt(values[0], 10) + parseInt(values[1], 10));
|
|
// other bytes are each value in base 128 with 8th bit set except for
|
|
// the last byte for each value
|
|
var last, valueBytes, value, b;
|
|
for(var i = 2; i < values.length; ++i) {
|
|
// produce value bytes in reverse because we don't know how many
|
|
// bytes it will take to store the value
|
|
last = true;
|
|
valueBytes = [];
|
|
value = parseInt(values[i], 10);
|
|
do {
|
|
b = value & 0x7F;
|
|
value = value >>> 7;
|
|
// if value is not last, then turn on 8th bit
|
|
if(!last) {
|
|
b |= 0x80;
|
|
}
|
|
valueBytes.push(b);
|
|
last = false;
|
|
} while(value > 0);
|
|
|
|
// add value bytes in reverse (needs to be in big endian)
|
|
for(var n = valueBytes.length - 1; n >= 0; --n) {
|
|
bytes.putByte(valueBytes[n]);
|
|
}
|
|
}
|
|
|
|
return bytes;
|
|
};
|
|
|
|
/**
|
|
* Converts a DER-encoded byte buffer to an OID dot-separated string. The
|
|
* byte buffer should contain only the DER-encoded value, not any tag or
|
|
* length bytes.
|
|
*
|
|
* @param bytes the byte buffer.
|
|
*
|
|
* @return the OID dot-separated string.
|
|
*/
|
|
asn1$2.derToOid = function(bytes) {
|
|
var oid;
|
|
|
|
// wrap in buffer if needed
|
|
if(typeof bytes === 'string') {
|
|
bytes = forge$j.util.createBuffer(bytes);
|
|
}
|
|
|
|
// first byte is 40 * value1 + value2
|
|
var b = bytes.getByte();
|
|
oid = Math.floor(b / 40) + '.' + (b % 40);
|
|
|
|
// other bytes are each value in base 128 with 8th bit set except for
|
|
// the last byte for each value
|
|
var value = 0;
|
|
while(bytes.length() > 0) {
|
|
b = bytes.getByte();
|
|
value = value << 7;
|
|
// not the last byte for the value
|
|
if(b & 0x80) {
|
|
value += b & 0x7F;
|
|
} else {
|
|
// last byte
|
|
oid += '.' + (value + b);
|
|
value = 0;
|
|
}
|
|
}
|
|
|
|
return oid;
|
|
};
|
|
|
|
/**
|
|
* Converts a UTCTime value to a date.
|
|
*
|
|
* Note: GeneralizedTime has 4 digits for the year and is used for X.509
|
|
* dates past 2049. Parsing that structure hasn't been implemented yet.
|
|
*
|
|
* @param utc the UTCTime value to convert.
|
|
*
|
|
* @return the date.
|
|
*/
|
|
asn1$2.utcTimeToDate = function(utc) {
|
|
/* The following formats can be used:
|
|
|
|
YYMMDDhhmmZ
|
|
YYMMDDhhmm+hh'mm'
|
|
YYMMDDhhmm-hh'mm'
|
|
YYMMDDhhmmssZ
|
|
YYMMDDhhmmss+hh'mm'
|
|
YYMMDDhhmmss-hh'mm'
|
|
|
|
Where:
|
|
|
|
YY is the least significant two digits of the year
|
|
MM is the month (01 to 12)
|
|
DD is the day (01 to 31)
|
|
hh is the hour (00 to 23)
|
|
mm are the minutes (00 to 59)
|
|
ss are the seconds (00 to 59)
|
|
Z indicates that local time is GMT, + indicates that local time is
|
|
later than GMT, and - indicates that local time is earlier than GMT
|
|
hh' is the absolute value of the offset from GMT in hours
|
|
mm' is the absolute value of the offset from GMT in minutes */
|
|
var date = new Date();
|
|
|
|
// if YY >= 50 use 19xx, if YY < 50 use 20xx
|
|
var year = parseInt(utc.substr(0, 2), 10);
|
|
year = (year >= 50) ? 1900 + year : 2000 + year;
|
|
var MM = parseInt(utc.substr(2, 2), 10) - 1; // use 0-11 for month
|
|
var DD = parseInt(utc.substr(4, 2), 10);
|
|
var hh = parseInt(utc.substr(6, 2), 10);
|
|
var mm = parseInt(utc.substr(8, 2), 10);
|
|
var ss = 0;
|
|
|
|
// not just YYMMDDhhmmZ
|
|
if(utc.length > 11) {
|
|
// get character after minutes
|
|
var c = utc.charAt(10);
|
|
var end = 10;
|
|
|
|
// see if seconds are present
|
|
if(c !== '+' && c !== '-') {
|
|
// get seconds
|
|
ss = parseInt(utc.substr(10, 2), 10);
|
|
end += 2;
|
|
}
|
|
}
|
|
|
|
// update date
|
|
date.setUTCFullYear(year, MM, DD);
|
|
date.setUTCHours(hh, mm, ss, 0);
|
|
|
|
if(end) {
|
|
// get +/- after end of time
|
|
c = utc.charAt(end);
|
|
if(c === '+' || c === '-') {
|
|
// get hours+minutes offset
|
|
var hhoffset = parseInt(utc.substr(end + 1, 2), 10);
|
|
var mmoffset = parseInt(utc.substr(end + 4, 2), 10);
|
|
|
|
// calculate offset in milliseconds
|
|
var offset = hhoffset * 60 + mmoffset;
|
|
offset *= 60000;
|
|
|
|
// apply offset
|
|
if(c === '+') {
|
|
date.setTime(+date - offset);
|
|
} else {
|
|
date.setTime(+date + offset);
|
|
}
|
|
}
|
|
}
|
|
|
|
return date;
|
|
};
|
|
|
|
/**
|
|
* Converts a GeneralizedTime value to a date.
|
|
*
|
|
* @param gentime the GeneralizedTime value to convert.
|
|
*
|
|
* @return the date.
|
|
*/
|
|
asn1$2.generalizedTimeToDate = function(gentime) {
|
|
/* The following formats can be used:
|
|
|
|
YYYYMMDDHHMMSS
|
|
YYYYMMDDHHMMSS.fff
|
|
YYYYMMDDHHMMSSZ
|
|
YYYYMMDDHHMMSS.fffZ
|
|
YYYYMMDDHHMMSS+hh'mm'
|
|
YYYYMMDDHHMMSS.fff+hh'mm'
|
|
YYYYMMDDHHMMSS-hh'mm'
|
|
YYYYMMDDHHMMSS.fff-hh'mm'
|
|
|
|
Where:
|
|
|
|
YYYY is the year
|
|
MM is the month (01 to 12)
|
|
DD is the day (01 to 31)
|
|
hh is the hour (00 to 23)
|
|
mm are the minutes (00 to 59)
|
|
ss are the seconds (00 to 59)
|
|
.fff is the second fraction, accurate to three decimal places
|
|
Z indicates that local time is GMT, + indicates that local time is
|
|
later than GMT, and - indicates that local time is earlier than GMT
|
|
hh' is the absolute value of the offset from GMT in hours
|
|
mm' is the absolute value of the offset from GMT in minutes */
|
|
var date = new Date();
|
|
|
|
var YYYY = parseInt(gentime.substr(0, 4), 10);
|
|
var MM = parseInt(gentime.substr(4, 2), 10) - 1; // use 0-11 for month
|
|
var DD = parseInt(gentime.substr(6, 2), 10);
|
|
var hh = parseInt(gentime.substr(8, 2), 10);
|
|
var mm = parseInt(gentime.substr(10, 2), 10);
|
|
var ss = parseInt(gentime.substr(12, 2), 10);
|
|
var fff = 0;
|
|
var offset = 0;
|
|
var isUTC = false;
|
|
|
|
if(gentime.charAt(gentime.length - 1) === 'Z') {
|
|
isUTC = true;
|
|
}
|
|
|
|
var end = gentime.length - 5, c = gentime.charAt(end);
|
|
if(c === '+' || c === '-') {
|
|
// get hours+minutes offset
|
|
var hhoffset = parseInt(gentime.substr(end + 1, 2), 10);
|
|
var mmoffset = parseInt(gentime.substr(end + 4, 2), 10);
|
|
|
|
// calculate offset in milliseconds
|
|
offset = hhoffset * 60 + mmoffset;
|
|
offset *= 60000;
|
|
|
|
// apply offset
|
|
if(c === '+') {
|
|
offset *= -1;
|
|
}
|
|
|
|
isUTC = true;
|
|
}
|
|
|
|
// check for second fraction
|
|
if(gentime.charAt(14) === '.') {
|
|
fff = parseFloat(gentime.substr(14), 10) * 1000;
|
|
}
|
|
|
|
if(isUTC) {
|
|
date.setUTCFullYear(YYYY, MM, DD);
|
|
date.setUTCHours(hh, mm, ss, fff);
|
|
|
|
// apply offset
|
|
date.setTime(+date + offset);
|
|
} else {
|
|
date.setFullYear(YYYY, MM, DD);
|
|
date.setHours(hh, mm, ss, fff);
|
|
}
|
|
|
|
return date;
|
|
};
|
|
|
|
/**
|
|
* Converts a date to a UTCTime value.
|
|
*
|
|
* Note: GeneralizedTime has 4 digits for the year and is used for X.509
|
|
* dates past 2049. Converting to a GeneralizedTime hasn't been
|
|
* implemented yet.
|
|
*
|
|
* @param date the date to convert.
|
|
*
|
|
* @return the UTCTime value.
|
|
*/
|
|
asn1$2.dateToUtcTime = function(date) {
|
|
// TODO: validate; currently assumes proper format
|
|
if(typeof date === 'string') {
|
|
return date;
|
|
}
|
|
|
|
var rval = '';
|
|
|
|
// create format YYMMDDhhmmssZ
|
|
var format = [];
|
|
format.push(('' + date.getUTCFullYear()).substr(2));
|
|
format.push('' + (date.getUTCMonth() + 1));
|
|
format.push('' + date.getUTCDate());
|
|
format.push('' + date.getUTCHours());
|
|
format.push('' + date.getUTCMinutes());
|
|
format.push('' + date.getUTCSeconds());
|
|
|
|
// ensure 2 digits are used for each format entry
|
|
for(var i = 0; i < format.length; ++i) {
|
|
if(format[i].length < 2) {
|
|
rval += '0';
|
|
}
|
|
rval += format[i];
|
|
}
|
|
rval += 'Z';
|
|
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Converts a date to a GeneralizedTime value.
|
|
*
|
|
* @param date the date to convert.
|
|
*
|
|
* @return the GeneralizedTime value as a string.
|
|
*/
|
|
asn1$2.dateToGeneralizedTime = function(date) {
|
|
// TODO: validate; currently assumes proper format
|
|
if(typeof date === 'string') {
|
|
return date;
|
|
}
|
|
|
|
var rval = '';
|
|
|
|
// create format YYYYMMDDHHMMSSZ
|
|
var format = [];
|
|
format.push('' + date.getUTCFullYear());
|
|
format.push('' + (date.getUTCMonth() + 1));
|
|
format.push('' + date.getUTCDate());
|
|
format.push('' + date.getUTCHours());
|
|
format.push('' + date.getUTCMinutes());
|
|
format.push('' + date.getUTCSeconds());
|
|
|
|
// ensure 2 digits are used for each format entry
|
|
for(var i = 0; i < format.length; ++i) {
|
|
if(format[i].length < 2) {
|
|
rval += '0';
|
|
}
|
|
rval += format[i];
|
|
}
|
|
rval += 'Z';
|
|
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Converts a javascript integer to a DER-encoded byte buffer to be used
|
|
* as the value for an INTEGER type.
|
|
*
|
|
* @param x the integer.
|
|
*
|
|
* @return the byte buffer.
|
|
*/
|
|
asn1$2.integerToDer = function(x) {
|
|
var rval = forge$j.util.createBuffer();
|
|
if(x >= -0x80 && x < 0x80) {
|
|
return rval.putSignedInt(x, 8);
|
|
}
|
|
if(x >= -0x8000 && x < 0x8000) {
|
|
return rval.putSignedInt(x, 16);
|
|
}
|
|
if(x >= -0x800000 && x < 0x800000) {
|
|
return rval.putSignedInt(x, 24);
|
|
}
|
|
if(x >= -0x80000000 && x < 0x80000000) {
|
|
return rval.putSignedInt(x, 32);
|
|
}
|
|
var error = new Error('Integer too large; max is 32-bits.');
|
|
error.integer = x;
|
|
throw error;
|
|
};
|
|
|
|
/**
|
|
* Converts a DER-encoded byte buffer to a javascript integer. This is
|
|
* typically used to decode the value of an INTEGER type.
|
|
*
|
|
* @param bytes the byte buffer.
|
|
*
|
|
* @return the integer.
|
|
*/
|
|
asn1$2.derToInteger = function(bytes) {
|
|
// wrap in buffer if needed
|
|
if(typeof bytes === 'string') {
|
|
bytes = forge$j.util.createBuffer(bytes);
|
|
}
|
|
|
|
var n = bytes.length() * 8;
|
|
if(n > 32) {
|
|
throw new Error('Integer too large; max is 32-bits.');
|
|
}
|
|
return bytes.getSignedInt(n);
|
|
};
|
|
|
|
/**
|
|
* Validates that the given ASN.1 object is at least a super set of the
|
|
* given ASN.1 structure. Only tag classes and types are checked. An
|
|
* optional map may also be provided to capture ASN.1 values while the
|
|
* structure is checked.
|
|
*
|
|
* To capture an ASN.1 value, set an object in the validator's 'capture'
|
|
* parameter to the key to use in the capture map. To capture the full
|
|
* ASN.1 object, specify 'captureAsn1'. To capture BIT STRING bytes, including
|
|
* the leading unused bits counter byte, specify 'captureBitStringContents'.
|
|
* To capture BIT STRING bytes, without the leading unused bits counter byte,
|
|
* specify 'captureBitStringValue'.
|
|
*
|
|
* Objects in the validator may set a field 'optional' to true to indicate
|
|
* that it isn't necessary to pass validation.
|
|
*
|
|
* @param obj the ASN.1 object to validate.
|
|
* @param v the ASN.1 structure validator.
|
|
* @param capture an optional map to capture values in.
|
|
* @param errors an optional array for storing validation errors.
|
|
*
|
|
* @return true on success, false on failure.
|
|
*/
|
|
asn1$2.validate = function(obj, v, capture, errors) {
|
|
var rval = false;
|
|
|
|
// ensure tag class and type are the same if specified
|
|
if((obj.tagClass === v.tagClass || typeof(v.tagClass) === 'undefined') &&
|
|
(obj.type === v.type || typeof(v.type) === 'undefined')) {
|
|
// ensure constructed flag is the same if specified
|
|
if(obj.constructed === v.constructed ||
|
|
typeof(v.constructed) === 'undefined') {
|
|
rval = true;
|
|
|
|
// handle sub values
|
|
if(v.value && forge$j.util.isArray(v.value)) {
|
|
var j = 0;
|
|
for(var i = 0; rval && i < v.value.length; ++i) {
|
|
rval = v.value[i].optional || false;
|
|
if(obj.value[j]) {
|
|
rval = asn1$2.validate(obj.value[j], v.value[i], capture, errors);
|
|
if(rval) {
|
|
++j;
|
|
} else if(v.value[i].optional) {
|
|
rval = true;
|
|
}
|
|
}
|
|
if(!rval && errors) {
|
|
errors.push(
|
|
'[' + v.name + '] ' +
|
|
'Tag class "' + v.tagClass + '", type "' +
|
|
v.type + '" expected value length "' +
|
|
v.value.length + '", got "' +
|
|
obj.value.length + '"');
|
|
}
|
|
}
|
|
}
|
|
|
|
if(rval && capture) {
|
|
if(v.capture) {
|
|
capture[v.capture] = obj.value;
|
|
}
|
|
if(v.captureAsn1) {
|
|
capture[v.captureAsn1] = obj;
|
|
}
|
|
if(v.captureBitStringContents && 'bitStringContents' in obj) {
|
|
capture[v.captureBitStringContents] = obj.bitStringContents;
|
|
}
|
|
if(v.captureBitStringValue && 'bitStringContents' in obj) {
|
|
if(obj.bitStringContents.length < 2) {
|
|
capture[v.captureBitStringValue] = '';
|
|
} else {
|
|
// FIXME: support unused bits with data shifting
|
|
var unused = obj.bitStringContents.charCodeAt(0);
|
|
if(unused !== 0) {
|
|
throw new Error(
|
|
'captureBitStringValue only supported for zero unused bits');
|
|
}
|
|
capture[v.captureBitStringValue] = obj.bitStringContents.slice(1);
|
|
}
|
|
}
|
|
}
|
|
} else if(errors) {
|
|
errors.push(
|
|
'[' + v.name + '] ' +
|
|
'Expected constructed "' + v.constructed + '", got "' +
|
|
obj.constructed + '"');
|
|
}
|
|
} else if(errors) {
|
|
if(obj.tagClass !== v.tagClass) {
|
|
errors.push(
|
|
'[' + v.name + '] ' +
|
|
'Expected tag class "' + v.tagClass + '", got "' +
|
|
obj.tagClass + '"');
|
|
}
|
|
if(obj.type !== v.type) {
|
|
errors.push(
|
|
'[' + v.name + '] ' +
|
|
'Expected type "' + v.type + '", got "' + obj.type + '"');
|
|
}
|
|
}
|
|
return rval;
|
|
};
|
|
|
|
// regex for testing for non-latin characters
|
|
var _nonLatinRegex = /[^\\u0000-\\u00ff]/;
|
|
|
|
/**
|
|
* Pretty prints an ASN.1 object to a string.
|
|
*
|
|
* @param obj the object to write out.
|
|
* @param level the level in the tree.
|
|
* @param indentation the indentation to use.
|
|
*
|
|
* @return the string.
|
|
*/
|
|
asn1$2.prettyPrint = function(obj, level, indentation) {
|
|
var rval = '';
|
|
|
|
// set default level and indentation
|
|
level = level || 0;
|
|
indentation = indentation || 2;
|
|
|
|
// start new line for deep levels
|
|
if(level > 0) {
|
|
rval += '\n';
|
|
}
|
|
|
|
// create indent
|
|
var indent = '';
|
|
for(var i = 0; i < level * indentation; ++i) {
|
|
indent += ' ';
|
|
}
|
|
|
|
// print class:type
|
|
rval += indent + 'Tag: ';
|
|
switch(obj.tagClass) {
|
|
case asn1$2.Class.UNIVERSAL:
|
|
rval += 'Universal:';
|
|
break;
|
|
case asn1$2.Class.APPLICATION:
|
|
rval += 'Application:';
|
|
break;
|
|
case asn1$2.Class.CONTEXT_SPECIFIC:
|
|
rval += 'Context-Specific:';
|
|
break;
|
|
case asn1$2.Class.PRIVATE:
|
|
rval += 'Private:';
|
|
break;
|
|
}
|
|
|
|
if(obj.tagClass === asn1$2.Class.UNIVERSAL) {
|
|
rval += obj.type;
|
|
|
|
// known types
|
|
switch(obj.type) {
|
|
case asn1$2.Type.NONE:
|
|
rval += ' (None)';
|
|
break;
|
|
case asn1$2.Type.BOOLEAN:
|
|
rval += ' (Boolean)';
|
|
break;
|
|
case asn1$2.Type.INTEGER:
|
|
rval += ' (Integer)';
|
|
break;
|
|
case asn1$2.Type.BITSTRING:
|
|
rval += ' (Bit string)';
|
|
break;
|
|
case asn1$2.Type.OCTETSTRING:
|
|
rval += ' (Octet string)';
|
|
break;
|
|
case asn1$2.Type.NULL:
|
|
rval += ' (Null)';
|
|
break;
|
|
case asn1$2.Type.OID:
|
|
rval += ' (Object Identifier)';
|
|
break;
|
|
case asn1$2.Type.ODESC:
|
|
rval += ' (Object Descriptor)';
|
|
break;
|
|
case asn1$2.Type.EXTERNAL:
|
|
rval += ' (External or Instance of)';
|
|
break;
|
|
case asn1$2.Type.REAL:
|
|
rval += ' (Real)';
|
|
break;
|
|
case asn1$2.Type.ENUMERATED:
|
|
rval += ' (Enumerated)';
|
|
break;
|
|
case asn1$2.Type.EMBEDDED:
|
|
rval += ' (Embedded PDV)';
|
|
break;
|
|
case asn1$2.Type.UTF8:
|
|
rval += ' (UTF8)';
|
|
break;
|
|
case asn1$2.Type.ROID:
|
|
rval += ' (Relative Object Identifier)';
|
|
break;
|
|
case asn1$2.Type.SEQUENCE:
|
|
rval += ' (Sequence)';
|
|
break;
|
|
case asn1$2.Type.SET:
|
|
rval += ' (Set)';
|
|
break;
|
|
case asn1$2.Type.PRINTABLESTRING:
|
|
rval += ' (Printable String)';
|
|
break;
|
|
case asn1$2.Type.IA5String:
|
|
rval += ' (IA5String (ASCII))';
|
|
break;
|
|
case asn1$2.Type.UTCTIME:
|
|
rval += ' (UTC time)';
|
|
break;
|
|
case asn1$2.Type.GENERALIZEDTIME:
|
|
rval += ' (Generalized time)';
|
|
break;
|
|
case asn1$2.Type.BMPSTRING:
|
|
rval += ' (BMP String)';
|
|
break;
|
|
}
|
|
} else {
|
|
rval += obj.type;
|
|
}
|
|
|
|
rval += '\n';
|
|
rval += indent + 'Constructed: ' + obj.constructed + '\n';
|
|
|
|
if(obj.composed) {
|
|
var subvalues = 0;
|
|
var sub = '';
|
|
for(var i = 0; i < obj.value.length; ++i) {
|
|
if(obj.value[i] !== undefined) {
|
|
subvalues += 1;
|
|
sub += asn1$2.prettyPrint(obj.value[i], level + 1, indentation);
|
|
if((i + 1) < obj.value.length) {
|
|
sub += ',';
|
|
}
|
|
}
|
|
}
|
|
rval += indent + 'Sub values: ' + subvalues + sub;
|
|
} else {
|
|
rval += indent + 'Value: ';
|
|
if(obj.type === asn1$2.Type.OID) {
|
|
var oid = asn1$2.derToOid(obj.value);
|
|
rval += oid;
|
|
if(forge$j.pki && forge$j.pki.oids) {
|
|
if(oid in forge$j.pki.oids) {
|
|
rval += ' (' + forge$j.pki.oids[oid] + ') ';
|
|
}
|
|
}
|
|
}
|
|
if(obj.type === asn1$2.Type.INTEGER) {
|
|
try {
|
|
rval += asn1$2.derToInteger(obj.value);
|
|
} catch(ex) {
|
|
rval += '0x' + forge$j.util.bytesToHex(obj.value);
|
|
}
|
|
} else if(obj.type === asn1$2.Type.BITSTRING) {
|
|
// TODO: shift bits as needed to display without padding
|
|
if(obj.value.length > 1) {
|
|
// remove unused bits field
|
|
rval += '0x' + forge$j.util.bytesToHex(obj.value.slice(1));
|
|
} else {
|
|
rval += '(none)';
|
|
}
|
|
// show unused bit count
|
|
if(obj.value.length > 0) {
|
|
var unused = obj.value.charCodeAt(0);
|
|
if(unused == 1) {
|
|
rval += ' (1 unused bit shown)';
|
|
} else if(unused > 1) {
|
|
rval += ' (' + unused + ' unused bits shown)';
|
|
}
|
|
}
|
|
} else if(obj.type === asn1$2.Type.OCTETSTRING) {
|
|
if(!_nonLatinRegex.test(obj.value)) {
|
|
rval += '(' + obj.value + ') ';
|
|
}
|
|
rval += '0x' + forge$j.util.bytesToHex(obj.value);
|
|
} else if(obj.type === asn1$2.Type.UTF8) {
|
|
try {
|
|
rval += forge$j.util.decodeUtf8(obj.value);
|
|
} catch(e) {
|
|
if(e.message === 'URI malformed') {
|
|
rval +=
|
|
'0x' + forge$j.util.bytesToHex(obj.value) + ' (malformed UTF8)';
|
|
} else {
|
|
throw e;
|
|
}
|
|
}
|
|
} else if(obj.type === asn1$2.Type.PRINTABLESTRING ||
|
|
obj.type === asn1$2.Type.IA5String) {
|
|
rval += obj.value;
|
|
} else if(_nonLatinRegex.test(obj.value)) {
|
|
rval += '0x' + forge$j.util.bytesToHex(obj.value);
|
|
} else if(obj.value.length === 0) {
|
|
rval += '[null]';
|
|
} else {
|
|
rval += obj.value;
|
|
}
|
|
}
|
|
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Cipher base API.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2010-2014 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$i = forge$m;
|
|
|
|
|
|
forge$i.cipher = forge$i.cipher || {};
|
|
|
|
// registered algorithms
|
|
forge$i.cipher.algorithms = forge$i.cipher.algorithms || {};
|
|
|
|
/**
|
|
* Creates a cipher object that can be used to encrypt data using the given
|
|
* algorithm and key. The algorithm may be provided as a string value for a
|
|
* previously registered algorithm or it may be given as a cipher algorithm
|
|
* API object.
|
|
*
|
|
* @param algorithm the algorithm to use, either a string or an algorithm API
|
|
* object.
|
|
* @param key the key to use, as a binary-encoded string of bytes or a
|
|
* byte buffer.
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$i.cipher.createCipher = function(algorithm, key) {
|
|
var api = algorithm;
|
|
if(typeof api === 'string') {
|
|
api = forge$i.cipher.getAlgorithm(api);
|
|
if(api) {
|
|
api = api();
|
|
}
|
|
}
|
|
if(!api) {
|
|
throw new Error('Unsupported algorithm: ' + algorithm);
|
|
}
|
|
|
|
// assume block cipher
|
|
return new forge$i.cipher.BlockCipher({
|
|
algorithm: api,
|
|
key: key,
|
|
decrypt: false
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Creates a decipher object that can be used to decrypt data using the given
|
|
* algorithm and key. The algorithm may be provided as a string value for a
|
|
* previously registered algorithm or it may be given as a cipher algorithm
|
|
* API object.
|
|
*
|
|
* @param algorithm the algorithm to use, either a string or an algorithm API
|
|
* object.
|
|
* @param key the key to use, as a binary-encoded string of bytes or a
|
|
* byte buffer.
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$i.cipher.createDecipher = function(algorithm, key) {
|
|
var api = algorithm;
|
|
if(typeof api === 'string') {
|
|
api = forge$i.cipher.getAlgorithm(api);
|
|
if(api) {
|
|
api = api();
|
|
}
|
|
}
|
|
if(!api) {
|
|
throw new Error('Unsupported algorithm: ' + algorithm);
|
|
}
|
|
|
|
// assume block cipher
|
|
return new forge$i.cipher.BlockCipher({
|
|
algorithm: api,
|
|
key: key,
|
|
decrypt: true
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Registers an algorithm by name. If the name was already registered, the
|
|
* algorithm API object will be overwritten.
|
|
*
|
|
* @param name the name of the algorithm.
|
|
* @param algorithm the algorithm API object.
|
|
*/
|
|
forge$i.cipher.registerAlgorithm = function(name, algorithm) {
|
|
name = name.toUpperCase();
|
|
forge$i.cipher.algorithms[name] = algorithm;
|
|
};
|
|
|
|
/**
|
|
* Gets a registered algorithm by name.
|
|
*
|
|
* @param name the name of the algorithm.
|
|
*
|
|
* @return the algorithm, if found, null if not.
|
|
*/
|
|
forge$i.cipher.getAlgorithm = function(name) {
|
|
name = name.toUpperCase();
|
|
if(name in forge$i.cipher.algorithms) {
|
|
return forge$i.cipher.algorithms[name];
|
|
}
|
|
return null;
|
|
};
|
|
|
|
var BlockCipher = forge$i.cipher.BlockCipher = function(options) {
|
|
this.algorithm = options.algorithm;
|
|
this.mode = this.algorithm.mode;
|
|
this.blockSize = this.mode.blockSize;
|
|
this._finish = false;
|
|
this._input = null;
|
|
this.output = null;
|
|
this._op = options.decrypt ? this.mode.decrypt : this.mode.encrypt;
|
|
this._decrypt = options.decrypt;
|
|
this.algorithm.initialize(options);
|
|
};
|
|
|
|
/**
|
|
* Starts or restarts the encryption or decryption process, whichever
|
|
* was previously configured.
|
|
*
|
|
* For non-GCM mode, the IV may be a binary-encoded string of bytes, an array
|
|
* of bytes, a byte buffer, or an array of 32-bit integers. If the IV is in
|
|
* bytes, then it must be Nb (16) bytes in length. If the IV is given in as
|
|
* 32-bit integers, then it must be 4 integers long.
|
|
*
|
|
* Note: an IV is not required or used in ECB mode.
|
|
*
|
|
* For GCM-mode, the IV must be given as a binary-encoded string of bytes or
|
|
* a byte buffer. The number of bytes should be 12 (96 bits) as recommended
|
|
* by NIST SP-800-38D but another length may be given.
|
|
*
|
|
* @param options the options to use:
|
|
* iv the initialization vector to use as a binary-encoded string of
|
|
* bytes, null to reuse the last ciphered block from a previous
|
|
* update() (this "residue" method is for legacy support only).
|
|
* additionalData additional authentication data as a binary-encoded
|
|
* string of bytes, for 'GCM' mode, (default: none).
|
|
* tagLength desired length of authentication tag, in bits, for
|
|
* 'GCM' mode (0-128, default: 128).
|
|
* tag the authentication tag to check if decrypting, as a
|
|
* binary-encoded string of bytes.
|
|
* output the output the buffer to write to, null to create one.
|
|
*/
|
|
BlockCipher.prototype.start = function(options) {
|
|
options = options || {};
|
|
var opts = {};
|
|
for(var key in options) {
|
|
opts[key] = options[key];
|
|
}
|
|
opts.decrypt = this._decrypt;
|
|
this._finish = false;
|
|
this._input = forge$i.util.createBuffer();
|
|
this.output = options.output || forge$i.util.createBuffer();
|
|
this.mode.start(opts);
|
|
};
|
|
|
|
/**
|
|
* Updates the next block according to the cipher mode.
|
|
*
|
|
* @param input the buffer to read from.
|
|
*/
|
|
BlockCipher.prototype.update = function(input) {
|
|
if(input) {
|
|
// input given, so empty it into the input buffer
|
|
this._input.putBuffer(input);
|
|
}
|
|
|
|
// do cipher operation until it needs more input and not finished
|
|
while(!this._op.call(this.mode, this._input, this.output, this._finish) &&
|
|
!this._finish) {}
|
|
|
|
// free consumed memory from input buffer
|
|
this._input.compact();
|
|
};
|
|
|
|
/**
|
|
* Finishes encrypting or decrypting.
|
|
*
|
|
* @param pad a padding function to use in CBC mode, null for default,
|
|
* signature(blockSize, buffer, decrypt).
|
|
*
|
|
* @return true if successful, false on error.
|
|
*/
|
|
BlockCipher.prototype.finish = function(pad) {
|
|
// backwards-compatibility w/deprecated padding API
|
|
// Note: will overwrite padding functions even after another start() call
|
|
if(pad && (this.mode.name === 'ECB' || this.mode.name === 'CBC')) {
|
|
this.mode.pad = function(input) {
|
|
return pad(this.blockSize, input, false);
|
|
};
|
|
this.mode.unpad = function(output) {
|
|
return pad(this.blockSize, output, true);
|
|
};
|
|
}
|
|
|
|
// build options for padding and afterFinish functions
|
|
var options = {};
|
|
options.decrypt = this._decrypt;
|
|
|
|
// get # of bytes that won't fill a block
|
|
options.overflow = this._input.length() % this.blockSize;
|
|
|
|
if(!this._decrypt && this.mode.pad) {
|
|
if(!this.mode.pad(this._input, options)) {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// do final update
|
|
this._finish = true;
|
|
this.update();
|
|
|
|
if(this._decrypt && this.mode.unpad) {
|
|
if(!this.mode.unpad(this.output, options)) {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
if(this.mode.afterFinish) {
|
|
if(!this.mode.afterFinish(this.output, options)) {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
return true;
|
|
};
|
|
|
|
/**
|
|
* Supported cipher modes.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2010-2014 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$h = forge$m;
|
|
|
|
|
|
forge$h.cipher = forge$h.cipher || {};
|
|
|
|
// supported cipher modes
|
|
var modes = forge$h.cipher.modes = forge$h.cipher.modes || {};
|
|
|
|
/** Electronic codebook (ECB) (Don't use this; it's not secure) **/
|
|
|
|
modes.ecb = function(options) {
|
|
options = options || {};
|
|
this.name = 'ECB';
|
|
this.cipher = options.cipher;
|
|
this.blockSize = options.blockSize || 16;
|
|
this._ints = this.blockSize / 4;
|
|
this._inBlock = new Array(this._ints);
|
|
this._outBlock = new Array(this._ints);
|
|
};
|
|
|
|
modes.ecb.prototype.start = function(options) {};
|
|
|
|
modes.ecb.prototype.encrypt = function(input, output, finish) {
|
|
// not enough input to encrypt
|
|
if(input.length() < this.blockSize && !(finish && input.length() > 0)) {
|
|
return true;
|
|
}
|
|
|
|
// get next block
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._inBlock[i] = input.getInt32();
|
|
}
|
|
|
|
// encrypt block
|
|
this.cipher.encrypt(this._inBlock, this._outBlock);
|
|
|
|
// write output
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
output.putInt32(this._outBlock[i]);
|
|
}
|
|
};
|
|
|
|
modes.ecb.prototype.decrypt = function(input, output, finish) {
|
|
// not enough input to decrypt
|
|
if(input.length() < this.blockSize && !(finish && input.length() > 0)) {
|
|
return true;
|
|
}
|
|
|
|
// get next block
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._inBlock[i] = input.getInt32();
|
|
}
|
|
|
|
// decrypt block
|
|
this.cipher.decrypt(this._inBlock, this._outBlock);
|
|
|
|
// write output
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
output.putInt32(this._outBlock[i]);
|
|
}
|
|
};
|
|
|
|
modes.ecb.prototype.pad = function(input, options) {
|
|
// add PKCS#7 padding to block (each pad byte is the
|
|
// value of the number of pad bytes)
|
|
var padding = (input.length() === this.blockSize ?
|
|
this.blockSize : (this.blockSize - input.length()));
|
|
input.fillWithByte(padding, padding);
|
|
return true;
|
|
};
|
|
|
|
modes.ecb.prototype.unpad = function(output, options) {
|
|
// check for error: input data not a multiple of blockSize
|
|
if(options.overflow > 0) {
|
|
return false;
|
|
}
|
|
|
|
// ensure padding byte count is valid
|
|
var len = output.length();
|
|
var count = output.at(len - 1);
|
|
if(count > (this.blockSize << 2)) {
|
|
return false;
|
|
}
|
|
|
|
// trim off padding bytes
|
|
output.truncate(count);
|
|
return true;
|
|
};
|
|
|
|
/** Cipher-block Chaining (CBC) **/
|
|
|
|
modes.cbc = function(options) {
|
|
options = options || {};
|
|
this.name = 'CBC';
|
|
this.cipher = options.cipher;
|
|
this.blockSize = options.blockSize || 16;
|
|
this._ints = this.blockSize / 4;
|
|
this._inBlock = new Array(this._ints);
|
|
this._outBlock = new Array(this._ints);
|
|
};
|
|
|
|
modes.cbc.prototype.start = function(options) {
|
|
// Note: legacy support for using IV residue (has security flaws)
|
|
// if IV is null, reuse block from previous processing
|
|
if(options.iv === null) {
|
|
// must have a previous block
|
|
if(!this._prev) {
|
|
throw new Error('Invalid IV parameter.');
|
|
}
|
|
this._iv = this._prev.slice(0);
|
|
} else if(!('iv' in options)) {
|
|
throw new Error('Invalid IV parameter.');
|
|
} else {
|
|
// save IV as "previous" block
|
|
this._iv = transformIV(options.iv, this.blockSize);
|
|
this._prev = this._iv.slice(0);
|
|
}
|
|
};
|
|
|
|
modes.cbc.prototype.encrypt = function(input, output, finish) {
|
|
// not enough input to encrypt
|
|
if(input.length() < this.blockSize && !(finish && input.length() > 0)) {
|
|
return true;
|
|
}
|
|
|
|
// get next block
|
|
// CBC XOR's IV (or previous block) with plaintext
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._inBlock[i] = this._prev[i] ^ input.getInt32();
|
|
}
|
|
|
|
// encrypt block
|
|
this.cipher.encrypt(this._inBlock, this._outBlock);
|
|
|
|
// write output, save previous block
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
output.putInt32(this._outBlock[i]);
|
|
}
|
|
this._prev = this._outBlock;
|
|
};
|
|
|
|
modes.cbc.prototype.decrypt = function(input, output, finish) {
|
|
// not enough input to decrypt
|
|
if(input.length() < this.blockSize && !(finish && input.length() > 0)) {
|
|
return true;
|
|
}
|
|
|
|
// get next block
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._inBlock[i] = input.getInt32();
|
|
}
|
|
|
|
// decrypt block
|
|
this.cipher.decrypt(this._inBlock, this._outBlock);
|
|
|
|
// write output, save previous ciphered block
|
|
// CBC XOR's IV (or previous block) with ciphertext
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
output.putInt32(this._prev[i] ^ this._outBlock[i]);
|
|
}
|
|
this._prev = this._inBlock.slice(0);
|
|
};
|
|
|
|
modes.cbc.prototype.pad = function(input, options) {
|
|
// add PKCS#7 padding to block (each pad byte is the
|
|
// value of the number of pad bytes)
|
|
var padding = (input.length() === this.blockSize ?
|
|
this.blockSize : (this.blockSize - input.length()));
|
|
input.fillWithByte(padding, padding);
|
|
return true;
|
|
};
|
|
|
|
modes.cbc.prototype.unpad = function(output, options) {
|
|
// check for error: input data not a multiple of blockSize
|
|
if(options.overflow > 0) {
|
|
return false;
|
|
}
|
|
|
|
// ensure padding byte count is valid
|
|
var len = output.length();
|
|
var count = output.at(len - 1);
|
|
if(count > (this.blockSize << 2)) {
|
|
return false;
|
|
}
|
|
|
|
// trim off padding bytes
|
|
output.truncate(count);
|
|
return true;
|
|
};
|
|
|
|
/** Cipher feedback (CFB) **/
|
|
|
|
modes.cfb = function(options) {
|
|
options = options || {};
|
|
this.name = 'CFB';
|
|
this.cipher = options.cipher;
|
|
this.blockSize = options.blockSize || 16;
|
|
this._ints = this.blockSize / 4;
|
|
this._inBlock = null;
|
|
this._outBlock = new Array(this._ints);
|
|
this._partialBlock = new Array(this._ints);
|
|
this._partialOutput = forge$h.util.createBuffer();
|
|
this._partialBytes = 0;
|
|
};
|
|
|
|
modes.cfb.prototype.start = function(options) {
|
|
if(!('iv' in options)) {
|
|
throw new Error('Invalid IV parameter.');
|
|
}
|
|
// use IV as first input
|
|
this._iv = transformIV(options.iv, this.blockSize);
|
|
this._inBlock = this._iv.slice(0);
|
|
this._partialBytes = 0;
|
|
};
|
|
|
|
modes.cfb.prototype.encrypt = function(input, output, finish) {
|
|
// not enough input to encrypt
|
|
var inputLength = input.length();
|
|
if(inputLength === 0) {
|
|
return true;
|
|
}
|
|
|
|
// encrypt block
|
|
this.cipher.encrypt(this._inBlock, this._outBlock);
|
|
|
|
// handle full block
|
|
if(this._partialBytes === 0 && inputLength >= this.blockSize) {
|
|
// XOR input with output, write input as output
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._inBlock[i] = input.getInt32() ^ this._outBlock[i];
|
|
output.putInt32(this._inBlock[i]);
|
|
}
|
|
return;
|
|
}
|
|
|
|
// handle partial block
|
|
var partialBytes = (this.blockSize - inputLength) % this.blockSize;
|
|
if(partialBytes > 0) {
|
|
partialBytes = this.blockSize - partialBytes;
|
|
}
|
|
|
|
// XOR input with output, write input as partial output
|
|
this._partialOutput.clear();
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._partialBlock[i] = input.getInt32() ^ this._outBlock[i];
|
|
this._partialOutput.putInt32(this._partialBlock[i]);
|
|
}
|
|
|
|
if(partialBytes > 0) {
|
|
// block still incomplete, restore input buffer
|
|
input.read -= this.blockSize;
|
|
} else {
|
|
// block complete, update input block
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._inBlock[i] = this._partialBlock[i];
|
|
}
|
|
}
|
|
|
|
// skip any previous partial bytes
|
|
if(this._partialBytes > 0) {
|
|
this._partialOutput.getBytes(this._partialBytes);
|
|
}
|
|
|
|
if(partialBytes > 0 && !finish) {
|
|
output.putBytes(this._partialOutput.getBytes(
|
|
partialBytes - this._partialBytes));
|
|
this._partialBytes = partialBytes;
|
|
return true;
|
|
}
|
|
|
|
output.putBytes(this._partialOutput.getBytes(
|
|
inputLength - this._partialBytes));
|
|
this._partialBytes = 0;
|
|
};
|
|
|
|
modes.cfb.prototype.decrypt = function(input, output, finish) {
|
|
// not enough input to decrypt
|
|
var inputLength = input.length();
|
|
if(inputLength === 0) {
|
|
return true;
|
|
}
|
|
|
|
// encrypt block (CFB always uses encryption mode)
|
|
this.cipher.encrypt(this._inBlock, this._outBlock);
|
|
|
|
// handle full block
|
|
if(this._partialBytes === 0 && inputLength >= this.blockSize) {
|
|
// XOR input with output, write input as output
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._inBlock[i] = input.getInt32();
|
|
output.putInt32(this._inBlock[i] ^ this._outBlock[i]);
|
|
}
|
|
return;
|
|
}
|
|
|
|
// handle partial block
|
|
var partialBytes = (this.blockSize - inputLength) % this.blockSize;
|
|
if(partialBytes > 0) {
|
|
partialBytes = this.blockSize - partialBytes;
|
|
}
|
|
|
|
// XOR input with output, write input as partial output
|
|
this._partialOutput.clear();
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._partialBlock[i] = input.getInt32();
|
|
this._partialOutput.putInt32(this._partialBlock[i] ^ this._outBlock[i]);
|
|
}
|
|
|
|
if(partialBytes > 0) {
|
|
// block still incomplete, restore input buffer
|
|
input.read -= this.blockSize;
|
|
} else {
|
|
// block complete, update input block
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._inBlock[i] = this._partialBlock[i];
|
|
}
|
|
}
|
|
|
|
// skip any previous partial bytes
|
|
if(this._partialBytes > 0) {
|
|
this._partialOutput.getBytes(this._partialBytes);
|
|
}
|
|
|
|
if(partialBytes > 0 && !finish) {
|
|
output.putBytes(this._partialOutput.getBytes(
|
|
partialBytes - this._partialBytes));
|
|
this._partialBytes = partialBytes;
|
|
return true;
|
|
}
|
|
|
|
output.putBytes(this._partialOutput.getBytes(
|
|
inputLength - this._partialBytes));
|
|
this._partialBytes = 0;
|
|
};
|
|
|
|
/** Output feedback (OFB) **/
|
|
|
|
modes.ofb = function(options) {
|
|
options = options || {};
|
|
this.name = 'OFB';
|
|
this.cipher = options.cipher;
|
|
this.blockSize = options.blockSize || 16;
|
|
this._ints = this.blockSize / 4;
|
|
this._inBlock = null;
|
|
this._outBlock = new Array(this._ints);
|
|
this._partialOutput = forge$h.util.createBuffer();
|
|
this._partialBytes = 0;
|
|
};
|
|
|
|
modes.ofb.prototype.start = function(options) {
|
|
if(!('iv' in options)) {
|
|
throw new Error('Invalid IV parameter.');
|
|
}
|
|
// use IV as first input
|
|
this._iv = transformIV(options.iv, this.blockSize);
|
|
this._inBlock = this._iv.slice(0);
|
|
this._partialBytes = 0;
|
|
};
|
|
|
|
modes.ofb.prototype.encrypt = function(input, output, finish) {
|
|
// not enough input to encrypt
|
|
var inputLength = input.length();
|
|
if(input.length() === 0) {
|
|
return true;
|
|
}
|
|
|
|
// encrypt block (OFB always uses encryption mode)
|
|
this.cipher.encrypt(this._inBlock, this._outBlock);
|
|
|
|
// handle full block
|
|
if(this._partialBytes === 0 && inputLength >= this.blockSize) {
|
|
// XOR input with output and update next input
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
output.putInt32(input.getInt32() ^ this._outBlock[i]);
|
|
this._inBlock[i] = this._outBlock[i];
|
|
}
|
|
return;
|
|
}
|
|
|
|
// handle partial block
|
|
var partialBytes = (this.blockSize - inputLength) % this.blockSize;
|
|
if(partialBytes > 0) {
|
|
partialBytes = this.blockSize - partialBytes;
|
|
}
|
|
|
|
// XOR input with output
|
|
this._partialOutput.clear();
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);
|
|
}
|
|
|
|
if(partialBytes > 0) {
|
|
// block still incomplete, restore input buffer
|
|
input.read -= this.blockSize;
|
|
} else {
|
|
// block complete, update input block
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._inBlock[i] = this._outBlock[i];
|
|
}
|
|
}
|
|
|
|
// skip any previous partial bytes
|
|
if(this._partialBytes > 0) {
|
|
this._partialOutput.getBytes(this._partialBytes);
|
|
}
|
|
|
|
if(partialBytes > 0 && !finish) {
|
|
output.putBytes(this._partialOutput.getBytes(
|
|
partialBytes - this._partialBytes));
|
|
this._partialBytes = partialBytes;
|
|
return true;
|
|
}
|
|
|
|
output.putBytes(this._partialOutput.getBytes(
|
|
inputLength - this._partialBytes));
|
|
this._partialBytes = 0;
|
|
};
|
|
|
|
modes.ofb.prototype.decrypt = modes.ofb.prototype.encrypt;
|
|
|
|
/** Counter (CTR) **/
|
|
|
|
modes.ctr = function(options) {
|
|
options = options || {};
|
|
this.name = 'CTR';
|
|
this.cipher = options.cipher;
|
|
this.blockSize = options.blockSize || 16;
|
|
this._ints = this.blockSize / 4;
|
|
this._inBlock = null;
|
|
this._outBlock = new Array(this._ints);
|
|
this._partialOutput = forge$h.util.createBuffer();
|
|
this._partialBytes = 0;
|
|
};
|
|
|
|
modes.ctr.prototype.start = function(options) {
|
|
if(!('iv' in options)) {
|
|
throw new Error('Invalid IV parameter.');
|
|
}
|
|
// use IV as first input
|
|
this._iv = transformIV(options.iv, this.blockSize);
|
|
this._inBlock = this._iv.slice(0);
|
|
this._partialBytes = 0;
|
|
};
|
|
|
|
modes.ctr.prototype.encrypt = function(input, output, finish) {
|
|
// not enough input to encrypt
|
|
var inputLength = input.length();
|
|
if(inputLength === 0) {
|
|
return true;
|
|
}
|
|
|
|
// encrypt block (CTR always uses encryption mode)
|
|
this.cipher.encrypt(this._inBlock, this._outBlock);
|
|
|
|
// handle full block
|
|
if(this._partialBytes === 0 && inputLength >= this.blockSize) {
|
|
// XOR input with output
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
output.putInt32(input.getInt32() ^ this._outBlock[i]);
|
|
}
|
|
} else {
|
|
// handle partial block
|
|
var partialBytes = (this.blockSize - inputLength) % this.blockSize;
|
|
if(partialBytes > 0) {
|
|
partialBytes = this.blockSize - partialBytes;
|
|
}
|
|
|
|
// XOR input with output
|
|
this._partialOutput.clear();
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);
|
|
}
|
|
|
|
if(partialBytes > 0) {
|
|
// block still incomplete, restore input buffer
|
|
input.read -= this.blockSize;
|
|
}
|
|
|
|
// skip any previous partial bytes
|
|
if(this._partialBytes > 0) {
|
|
this._partialOutput.getBytes(this._partialBytes);
|
|
}
|
|
|
|
if(partialBytes > 0 && !finish) {
|
|
output.putBytes(this._partialOutput.getBytes(
|
|
partialBytes - this._partialBytes));
|
|
this._partialBytes = partialBytes;
|
|
return true;
|
|
}
|
|
|
|
output.putBytes(this._partialOutput.getBytes(
|
|
inputLength - this._partialBytes));
|
|
this._partialBytes = 0;
|
|
}
|
|
|
|
// block complete, increment counter (input block)
|
|
inc32(this._inBlock);
|
|
};
|
|
|
|
modes.ctr.prototype.decrypt = modes.ctr.prototype.encrypt;
|
|
|
|
/** Galois/Counter Mode (GCM) **/
|
|
|
|
modes.gcm = function(options) {
|
|
options = options || {};
|
|
this.name = 'GCM';
|
|
this.cipher = options.cipher;
|
|
this.blockSize = options.blockSize || 16;
|
|
this._ints = this.blockSize / 4;
|
|
this._inBlock = new Array(this._ints);
|
|
this._outBlock = new Array(this._ints);
|
|
this._partialOutput = forge$h.util.createBuffer();
|
|
this._partialBytes = 0;
|
|
|
|
// R is actually this value concatenated with 120 more zero bits, but
|
|
// we only XOR against R so the other zeros have no effect -- we just
|
|
// apply this value to the first integer in a block
|
|
this._R = 0xE1000000;
|
|
};
|
|
|
|
modes.gcm.prototype.start = function(options) {
|
|
if(!('iv' in options)) {
|
|
throw new Error('Invalid IV parameter.');
|
|
}
|
|
// ensure IV is a byte buffer
|
|
var iv = forge$h.util.createBuffer(options.iv);
|
|
|
|
// no ciphered data processed yet
|
|
this._cipherLength = 0;
|
|
|
|
// default additional data is none
|
|
var additionalData;
|
|
if('additionalData' in options) {
|
|
additionalData = forge$h.util.createBuffer(options.additionalData);
|
|
} else {
|
|
additionalData = forge$h.util.createBuffer();
|
|
}
|
|
|
|
// default tag length is 128 bits
|
|
if('tagLength' in options) {
|
|
this._tagLength = options.tagLength;
|
|
} else {
|
|
this._tagLength = 128;
|
|
}
|
|
|
|
// if tag is given, ensure tag matches tag length
|
|
this._tag = null;
|
|
if(options.decrypt) {
|
|
// save tag to check later
|
|
this._tag = forge$h.util.createBuffer(options.tag).getBytes();
|
|
if(this._tag.length !== (this._tagLength / 8)) {
|
|
throw new Error('Authentication tag does not match tag length.');
|
|
}
|
|
}
|
|
|
|
// create tmp storage for hash calculation
|
|
this._hashBlock = new Array(this._ints);
|
|
|
|
// no tag generated yet
|
|
this.tag = null;
|
|
|
|
// generate hash subkey
|
|
// (apply block cipher to "zero" block)
|
|
this._hashSubkey = new Array(this._ints);
|
|
this.cipher.encrypt([0, 0, 0, 0], this._hashSubkey);
|
|
|
|
// generate table M
|
|
// use 4-bit tables (32 component decomposition of a 16 byte value)
|
|
// 8-bit tables take more space and are known to have security
|
|
// vulnerabilities (in native implementations)
|
|
this.componentBits = 4;
|
|
this._m = this.generateHashTable(this._hashSubkey, this.componentBits);
|
|
|
|
// Note: support IV length different from 96 bits? (only supporting
|
|
// 96 bits is recommended by NIST SP-800-38D)
|
|
// generate J_0
|
|
var ivLength = iv.length();
|
|
if(ivLength === 12) {
|
|
// 96-bit IV
|
|
this._j0 = [iv.getInt32(), iv.getInt32(), iv.getInt32(), 1];
|
|
} else {
|
|
// IV is NOT 96-bits
|
|
this._j0 = [0, 0, 0, 0];
|
|
while(iv.length() > 0) {
|
|
this._j0 = this.ghash(
|
|
this._hashSubkey, this._j0,
|
|
[iv.getInt32(), iv.getInt32(), iv.getInt32(), iv.getInt32()]);
|
|
}
|
|
this._j0 = this.ghash(
|
|
this._hashSubkey, this._j0, [0, 0].concat(from64To32(ivLength * 8)));
|
|
}
|
|
|
|
// generate ICB (initial counter block)
|
|
this._inBlock = this._j0.slice(0);
|
|
inc32(this._inBlock);
|
|
this._partialBytes = 0;
|
|
|
|
// consume authentication data
|
|
additionalData = forge$h.util.createBuffer(additionalData);
|
|
// save additional data length as a BE 64-bit number
|
|
this._aDataLength = from64To32(additionalData.length() * 8);
|
|
// pad additional data to 128 bit (16 byte) block size
|
|
var overflow = additionalData.length() % this.blockSize;
|
|
if(overflow) {
|
|
additionalData.fillWithByte(0, this.blockSize - overflow);
|
|
}
|
|
this._s = [0, 0, 0, 0];
|
|
while(additionalData.length() > 0) {
|
|
this._s = this.ghash(this._hashSubkey, this._s, [
|
|
additionalData.getInt32(),
|
|
additionalData.getInt32(),
|
|
additionalData.getInt32(),
|
|
additionalData.getInt32()
|
|
]);
|
|
}
|
|
};
|
|
|
|
modes.gcm.prototype.encrypt = function(input, output, finish) {
|
|
// not enough input to encrypt
|
|
var inputLength = input.length();
|
|
if(inputLength === 0) {
|
|
return true;
|
|
}
|
|
|
|
// encrypt block
|
|
this.cipher.encrypt(this._inBlock, this._outBlock);
|
|
|
|
// handle full block
|
|
if(this._partialBytes === 0 && inputLength >= this.blockSize) {
|
|
// XOR input with output
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
output.putInt32(this._outBlock[i] ^= input.getInt32());
|
|
}
|
|
this._cipherLength += this.blockSize;
|
|
} else {
|
|
// handle partial block
|
|
var partialBytes = (this.blockSize - inputLength) % this.blockSize;
|
|
if(partialBytes > 0) {
|
|
partialBytes = this.blockSize - partialBytes;
|
|
}
|
|
|
|
// XOR input with output
|
|
this._partialOutput.clear();
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._partialOutput.putInt32(input.getInt32() ^ this._outBlock[i]);
|
|
}
|
|
|
|
if(partialBytes <= 0 || finish) {
|
|
// handle overflow prior to hashing
|
|
if(finish) {
|
|
// get block overflow
|
|
var overflow = inputLength % this.blockSize;
|
|
this._cipherLength += overflow;
|
|
// truncate for hash function
|
|
this._partialOutput.truncate(this.blockSize - overflow);
|
|
} else {
|
|
this._cipherLength += this.blockSize;
|
|
}
|
|
|
|
// get output block for hashing
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this._outBlock[i] = this._partialOutput.getInt32();
|
|
}
|
|
this._partialOutput.read -= this.blockSize;
|
|
}
|
|
|
|
// skip any previous partial bytes
|
|
if(this._partialBytes > 0) {
|
|
this._partialOutput.getBytes(this._partialBytes);
|
|
}
|
|
|
|
if(partialBytes > 0 && !finish) {
|
|
// block still incomplete, restore input buffer, get partial output,
|
|
// and return early
|
|
input.read -= this.blockSize;
|
|
output.putBytes(this._partialOutput.getBytes(
|
|
partialBytes - this._partialBytes));
|
|
this._partialBytes = partialBytes;
|
|
return true;
|
|
}
|
|
|
|
output.putBytes(this._partialOutput.getBytes(
|
|
inputLength - this._partialBytes));
|
|
this._partialBytes = 0;
|
|
}
|
|
|
|
// update hash block S
|
|
this._s = this.ghash(this._hashSubkey, this._s, this._outBlock);
|
|
|
|
// increment counter (input block)
|
|
inc32(this._inBlock);
|
|
};
|
|
|
|
modes.gcm.prototype.decrypt = function(input, output, finish) {
|
|
// not enough input to decrypt
|
|
var inputLength = input.length();
|
|
if(inputLength < this.blockSize && !(finish && inputLength > 0)) {
|
|
return true;
|
|
}
|
|
|
|
// encrypt block (GCM always uses encryption mode)
|
|
this.cipher.encrypt(this._inBlock, this._outBlock);
|
|
|
|
// increment counter (input block)
|
|
inc32(this._inBlock);
|
|
|
|
// update hash block S
|
|
this._hashBlock[0] = input.getInt32();
|
|
this._hashBlock[1] = input.getInt32();
|
|
this._hashBlock[2] = input.getInt32();
|
|
this._hashBlock[3] = input.getInt32();
|
|
this._s = this.ghash(this._hashSubkey, this._s, this._hashBlock);
|
|
|
|
// XOR hash input with output
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
output.putInt32(this._outBlock[i] ^ this._hashBlock[i]);
|
|
}
|
|
|
|
// increment cipher data length
|
|
if(inputLength < this.blockSize) {
|
|
this._cipherLength += inputLength % this.blockSize;
|
|
} else {
|
|
this._cipherLength += this.blockSize;
|
|
}
|
|
};
|
|
|
|
modes.gcm.prototype.afterFinish = function(output, options) {
|
|
var rval = true;
|
|
|
|
// handle overflow
|
|
if(options.decrypt && options.overflow) {
|
|
output.truncate(this.blockSize - options.overflow);
|
|
}
|
|
|
|
// handle authentication tag
|
|
this.tag = forge$h.util.createBuffer();
|
|
|
|
// concatenate additional data length with cipher length
|
|
var lengths = this._aDataLength.concat(from64To32(this._cipherLength * 8));
|
|
|
|
// include lengths in hash
|
|
this._s = this.ghash(this._hashSubkey, this._s, lengths);
|
|
|
|
// do GCTR(J_0, S)
|
|
var tag = [];
|
|
this.cipher.encrypt(this._j0, tag);
|
|
for(var i = 0; i < this._ints; ++i) {
|
|
this.tag.putInt32(this._s[i] ^ tag[i]);
|
|
}
|
|
|
|
// trim tag to length
|
|
this.tag.truncate(this.tag.length() % (this._tagLength / 8));
|
|
|
|
// check authentication tag
|
|
if(options.decrypt && this.tag.bytes() !== this._tag) {
|
|
rval = false;
|
|
}
|
|
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* See NIST SP-800-38D 6.3 (Algorithm 1). This function performs Galois
|
|
* field multiplication. The field, GF(2^128), is defined by the polynomial:
|
|
*
|
|
* x^128 + x^7 + x^2 + x + 1
|
|
*
|
|
* Which is represented in little-endian binary form as: 11100001 (0xe1). When
|
|
* the value of a coefficient is 1, a bit is set. The value R, is the
|
|
* concatenation of this value and 120 zero bits, yielding a 128-bit value
|
|
* which matches the block size.
|
|
*
|
|
* This function will multiply two elements (vectors of bytes), X and Y, in
|
|
* the field GF(2^128). The result is initialized to zero. For each bit of
|
|
* X (out of 128), x_i, if x_i is set, then the result is multiplied (XOR'd)
|
|
* by the current value of Y. For each bit, the value of Y will be raised by
|
|
* a power of x (multiplied by the polynomial x). This can be achieved by
|
|
* shifting Y once to the right. If the current value of Y, prior to being
|
|
* multiplied by x, has 0 as its LSB, then it is a 127th degree polynomial.
|
|
* Otherwise, we must divide by R after shifting to find the remainder.
|
|
*
|
|
* @param x the first block to multiply by the second.
|
|
* @param y the second block to multiply by the first.
|
|
*
|
|
* @return the block result of the multiplication.
|
|
*/
|
|
modes.gcm.prototype.multiply = function(x, y) {
|
|
var z_i = [0, 0, 0, 0];
|
|
var v_i = y.slice(0);
|
|
|
|
// calculate Z_128 (block has 128 bits)
|
|
for(var i = 0; i < 128; ++i) {
|
|
// if x_i is 0, Z_{i+1} = Z_i (unchanged)
|
|
// else Z_{i+1} = Z_i ^ V_i
|
|
// get x_i by finding 32-bit int position, then left shift 1 by remainder
|
|
var x_i = x[(i / 32) | 0] & (1 << (31 - i % 32));
|
|
if(x_i) {
|
|
z_i[0] ^= v_i[0];
|
|
z_i[1] ^= v_i[1];
|
|
z_i[2] ^= v_i[2];
|
|
z_i[3] ^= v_i[3];
|
|
}
|
|
|
|
// if LSB(V_i) is 1, V_i = V_i >> 1
|
|
// else V_i = (V_i >> 1) ^ R
|
|
this.pow(v_i, v_i);
|
|
}
|
|
|
|
return z_i;
|
|
};
|
|
|
|
modes.gcm.prototype.pow = function(x, out) {
|
|
// if LSB(x) is 1, x = x >>> 1
|
|
// else x = (x >>> 1) ^ R
|
|
var lsb = x[3] & 1;
|
|
|
|
// always do x >>> 1:
|
|
// starting with the rightmost integer, shift each integer to the right
|
|
// one bit, pulling in the bit from the integer to the left as its top
|
|
// most bit (do this for the last 3 integers)
|
|
for(var i = 3; i > 0; --i) {
|
|
out[i] = (x[i] >>> 1) | ((x[i - 1] & 1) << 31);
|
|
}
|
|
// shift the first integer normally
|
|
out[0] = x[0] >>> 1;
|
|
|
|
// if lsb was not set, then polynomial had a degree of 127 and doesn't
|
|
// need to divided; otherwise, XOR with R to find the remainder; we only
|
|
// need to XOR the first integer since R technically ends w/120 zero bits
|
|
if(lsb) {
|
|
out[0] ^= this._R;
|
|
}
|
|
};
|
|
|
|
modes.gcm.prototype.tableMultiply = function(x) {
|
|
// assumes 4-bit tables are used
|
|
var z = [0, 0, 0, 0];
|
|
for(var i = 0; i < 32; ++i) {
|
|
var idx = (i / 8) | 0;
|
|
var x_i = (x[idx] >>> ((7 - (i % 8)) * 4)) & 0xF;
|
|
var ah = this._m[i][x_i];
|
|
z[0] ^= ah[0];
|
|
z[1] ^= ah[1];
|
|
z[2] ^= ah[2];
|
|
z[3] ^= ah[3];
|
|
}
|
|
return z;
|
|
};
|
|
|
|
/**
|
|
* A continuing version of the GHASH algorithm that operates on a single
|
|
* block. The hash block, last hash value (Ym) and the new block to hash
|
|
* are given.
|
|
*
|
|
* @param h the hash block.
|
|
* @param y the previous value for Ym, use [0, 0, 0, 0] for a new hash.
|
|
* @param x the block to hash.
|
|
*
|
|
* @return the hashed value (Ym).
|
|
*/
|
|
modes.gcm.prototype.ghash = function(h, y, x) {
|
|
y[0] ^= x[0];
|
|
y[1] ^= x[1];
|
|
y[2] ^= x[2];
|
|
y[3] ^= x[3];
|
|
return this.tableMultiply(y);
|
|
//return this.multiply(y, h);
|
|
};
|
|
|
|
/**
|
|
* Precomputes a table for multiplying against the hash subkey. This
|
|
* mechanism provides a substantial speed increase over multiplication
|
|
* performed without a table. The table-based multiplication this table is
|
|
* for solves X * H by multiplying each component of X by H and then
|
|
* composing the results together using XOR.
|
|
*
|
|
* This function can be used to generate tables with different bit sizes
|
|
* for the components, however, this implementation assumes there are
|
|
* 32 components of X (which is a 16 byte vector), therefore each component
|
|
* takes 4-bits (so the table is constructed with bits=4).
|
|
*
|
|
* @param h the hash subkey.
|
|
* @param bits the bit size for a component.
|
|
*/
|
|
modes.gcm.prototype.generateHashTable = function(h, bits) {
|
|
// TODO: There are further optimizations that would use only the
|
|
// first table M_0 (or some variant) along with a remainder table;
|
|
// this can be explored in the future
|
|
var multiplier = 8 / bits;
|
|
var perInt = 4 * multiplier;
|
|
var size = 16 * multiplier;
|
|
var m = new Array(size);
|
|
for(var i = 0; i < size; ++i) {
|
|
var tmp = [0, 0, 0, 0];
|
|
var idx = (i / perInt) | 0;
|
|
var shft = ((perInt - 1 - (i % perInt)) * bits);
|
|
tmp[idx] = (1 << (bits - 1)) << shft;
|
|
m[i] = this.generateSubHashTable(this.multiply(tmp, h), bits);
|
|
}
|
|
return m;
|
|
};
|
|
|
|
/**
|
|
* Generates a table for multiplying against the hash subkey for one
|
|
* particular component (out of all possible component values).
|
|
*
|
|
* @param mid the pre-multiplied value for the middle key of the table.
|
|
* @param bits the bit size for a component.
|
|
*/
|
|
modes.gcm.prototype.generateSubHashTable = function(mid, bits) {
|
|
// compute the table quickly by minimizing the number of
|
|
// POW operations -- they only need to be performed for powers of 2,
|
|
// all other entries can be composed from those powers using XOR
|
|
var size = 1 << bits;
|
|
var half = size >>> 1;
|
|
var m = new Array(size);
|
|
m[half] = mid.slice(0);
|
|
var i = half >>> 1;
|
|
while(i > 0) {
|
|
// raise m0[2 * i] and store in m0[i]
|
|
this.pow(m[2 * i], m[i] = []);
|
|
i >>= 1;
|
|
}
|
|
i = 2;
|
|
while(i < half) {
|
|
for(var j = 1; j < i; ++j) {
|
|
var m_i = m[i];
|
|
var m_j = m[j];
|
|
m[i + j] = [
|
|
m_i[0] ^ m_j[0],
|
|
m_i[1] ^ m_j[1],
|
|
m_i[2] ^ m_j[2],
|
|
m_i[3] ^ m_j[3]
|
|
];
|
|
}
|
|
i *= 2;
|
|
}
|
|
m[0] = [0, 0, 0, 0];
|
|
/* Note: We could avoid storing these by doing composition during multiply
|
|
calculate top half using composition by speed is preferred. */
|
|
for(i = half + 1; i < size; ++i) {
|
|
var c = m[i ^ half];
|
|
m[i] = [mid[0] ^ c[0], mid[1] ^ c[1], mid[2] ^ c[2], mid[3] ^ c[3]];
|
|
}
|
|
return m;
|
|
};
|
|
|
|
/** Utility functions */
|
|
|
|
function transformIV(iv, blockSize) {
|
|
if(typeof iv === 'string') {
|
|
// convert iv string into byte buffer
|
|
iv = forge$h.util.createBuffer(iv);
|
|
}
|
|
|
|
if(forge$h.util.isArray(iv) && iv.length > 4) {
|
|
// convert iv byte array into byte buffer
|
|
var tmp = iv;
|
|
iv = forge$h.util.createBuffer();
|
|
for(var i = 0; i < tmp.length; ++i) {
|
|
iv.putByte(tmp[i]);
|
|
}
|
|
}
|
|
|
|
if(iv.length() < blockSize) {
|
|
throw new Error(
|
|
'Invalid IV length; got ' + iv.length() +
|
|
' bytes and expected ' + blockSize + ' bytes.');
|
|
}
|
|
|
|
if(!forge$h.util.isArray(iv)) {
|
|
// convert iv byte buffer into 32-bit integer array
|
|
var ints = [];
|
|
var blocks = blockSize / 4;
|
|
for(var i = 0; i < blocks; ++i) {
|
|
ints.push(iv.getInt32());
|
|
}
|
|
iv = ints;
|
|
}
|
|
|
|
return iv;
|
|
}
|
|
|
|
function inc32(block) {
|
|
// increment last 32 bits of block only
|
|
block[block.length - 1] = (block[block.length - 1] + 1) & 0xFFFFFFFF;
|
|
}
|
|
|
|
function from64To32(num) {
|
|
// convert 64-bit number to two BE Int32s
|
|
return [(num / 0x100000000) | 0, num & 0xFFFFFFFF];
|
|
}
|
|
|
|
/**
|
|
* Advanced Encryption Standard (AES) implementation.
|
|
*
|
|
* This implementation is based on the public domain library 'jscrypto' which
|
|
* was written by:
|
|
*
|
|
* Emily Stark (estark@stanford.edu)
|
|
* Mike Hamburg (mhamburg@stanford.edu)
|
|
* Dan Boneh (dabo@cs.stanford.edu)
|
|
*
|
|
* Parts of this code are based on the OpenSSL implementation of AES:
|
|
* http://www.openssl.org
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2010-2014 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$g = forge$m;
|
|
|
|
|
|
|
|
|
|
/* AES API */
|
|
forge$g.aes = forge$g.aes || {};
|
|
|
|
/**
|
|
* Deprecated. Instead, use:
|
|
*
|
|
* var cipher = forge.cipher.createCipher('AES-<mode>', key);
|
|
* cipher.start({iv: iv});
|
|
*
|
|
* Creates an AES cipher object to encrypt data using the given symmetric key.
|
|
* The output will be stored in the 'output' member of the returned cipher.
|
|
*
|
|
* The key and iv may be given as a string of bytes, an array of bytes,
|
|
* a byte buffer, or an array of 32-bit words.
|
|
*
|
|
* @param key the symmetric key to use.
|
|
* @param iv the initialization vector to use.
|
|
* @param output the buffer to write to, null to create one.
|
|
* @param mode the cipher mode to use (default: 'CBC').
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$g.aes.startEncrypting = function(key, iv, output, mode) {
|
|
var cipher = _createCipher$1({
|
|
key: key,
|
|
output: output,
|
|
decrypt: false,
|
|
mode: mode
|
|
});
|
|
cipher.start(iv);
|
|
return cipher;
|
|
};
|
|
|
|
/**
|
|
* Deprecated. Instead, use:
|
|
*
|
|
* var cipher = forge.cipher.createCipher('AES-<mode>', key);
|
|
*
|
|
* Creates an AES cipher object to encrypt data using the given symmetric key.
|
|
*
|
|
* The key may be given as a string of bytes, an array of bytes, a
|
|
* byte buffer, or an array of 32-bit words.
|
|
*
|
|
* @param key the symmetric key to use.
|
|
* @param mode the cipher mode to use (default: 'CBC').
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$g.aes.createEncryptionCipher = function(key, mode) {
|
|
return _createCipher$1({
|
|
key: key,
|
|
output: null,
|
|
decrypt: false,
|
|
mode: mode
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Deprecated. Instead, use:
|
|
*
|
|
* var decipher = forge.cipher.createDecipher('AES-<mode>', key);
|
|
* decipher.start({iv: iv});
|
|
*
|
|
* Creates an AES cipher object to decrypt data using the given symmetric key.
|
|
* The output will be stored in the 'output' member of the returned cipher.
|
|
*
|
|
* The key and iv may be given as a string of bytes, an array of bytes,
|
|
* a byte buffer, or an array of 32-bit words.
|
|
*
|
|
* @param key the symmetric key to use.
|
|
* @param iv the initialization vector to use.
|
|
* @param output the buffer to write to, null to create one.
|
|
* @param mode the cipher mode to use (default: 'CBC').
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$g.aes.startDecrypting = function(key, iv, output, mode) {
|
|
var cipher = _createCipher$1({
|
|
key: key,
|
|
output: output,
|
|
decrypt: true,
|
|
mode: mode
|
|
});
|
|
cipher.start(iv);
|
|
return cipher;
|
|
};
|
|
|
|
/**
|
|
* Deprecated. Instead, use:
|
|
*
|
|
* var decipher = forge.cipher.createDecipher('AES-<mode>', key);
|
|
*
|
|
* Creates an AES cipher object to decrypt data using the given symmetric key.
|
|
*
|
|
* The key may be given as a string of bytes, an array of bytes, a
|
|
* byte buffer, or an array of 32-bit words.
|
|
*
|
|
* @param key the symmetric key to use.
|
|
* @param mode the cipher mode to use (default: 'CBC').
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$g.aes.createDecryptionCipher = function(key, mode) {
|
|
return _createCipher$1({
|
|
key: key,
|
|
output: null,
|
|
decrypt: true,
|
|
mode: mode
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Creates a new AES cipher algorithm object.
|
|
*
|
|
* @param name the name of the algorithm.
|
|
* @param mode the mode factory function.
|
|
*
|
|
* @return the AES algorithm object.
|
|
*/
|
|
forge$g.aes.Algorithm = function(name, mode) {
|
|
if(!init$1) {
|
|
initialize();
|
|
}
|
|
var self = this;
|
|
self.name = name;
|
|
self.mode = new mode({
|
|
blockSize: 16,
|
|
cipher: {
|
|
encrypt: function(inBlock, outBlock) {
|
|
return _updateBlock$1(self._w, inBlock, outBlock, false);
|
|
},
|
|
decrypt: function(inBlock, outBlock) {
|
|
return _updateBlock$1(self._w, inBlock, outBlock, true);
|
|
}
|
|
}
|
|
});
|
|
self._init = false;
|
|
};
|
|
|
|
/**
|
|
* Initializes this AES algorithm by expanding its key.
|
|
*
|
|
* @param options the options to use.
|
|
* key the key to use with this algorithm.
|
|
* decrypt true if the algorithm should be initialized for decryption,
|
|
* false for encryption.
|
|
*/
|
|
forge$g.aes.Algorithm.prototype.initialize = function(options) {
|
|
if(this._init) {
|
|
return;
|
|
}
|
|
|
|
var key = options.key;
|
|
var tmp;
|
|
|
|
/* Note: The key may be a string of bytes, an array of bytes, a byte
|
|
buffer, or an array of 32-bit integers. If the key is in bytes, then
|
|
it must be 16, 24, or 32 bytes in length. If it is in 32-bit
|
|
integers, it must be 4, 6, or 8 integers long. */
|
|
|
|
if(typeof key === 'string' &&
|
|
(key.length === 16 || key.length === 24 || key.length === 32)) {
|
|
// convert key string into byte buffer
|
|
key = forge$g.util.createBuffer(key);
|
|
} else if(forge$g.util.isArray(key) &&
|
|
(key.length === 16 || key.length === 24 || key.length === 32)) {
|
|
// convert key integer array into byte buffer
|
|
tmp = key;
|
|
key = forge$g.util.createBuffer();
|
|
for(var i = 0; i < tmp.length; ++i) {
|
|
key.putByte(tmp[i]);
|
|
}
|
|
}
|
|
|
|
// convert key byte buffer into 32-bit integer array
|
|
if(!forge$g.util.isArray(key)) {
|
|
tmp = key;
|
|
key = [];
|
|
|
|
// key lengths of 16, 24, 32 bytes allowed
|
|
var len = tmp.length();
|
|
if(len === 16 || len === 24 || len === 32) {
|
|
len = len >>> 2;
|
|
for(var i = 0; i < len; ++i) {
|
|
key.push(tmp.getInt32());
|
|
}
|
|
}
|
|
}
|
|
|
|
// key must be an array of 32-bit integers by now
|
|
if(!forge$g.util.isArray(key) ||
|
|
!(key.length === 4 || key.length === 6 || key.length === 8)) {
|
|
throw new Error('Invalid key parameter.');
|
|
}
|
|
|
|
// encryption operation is always used for these modes
|
|
var mode = this.mode.name;
|
|
var encryptOp = (['CFB', 'OFB', 'CTR', 'GCM'].indexOf(mode) !== -1);
|
|
|
|
// do key expansion
|
|
this._w = _expandKey(key, options.decrypt && !encryptOp);
|
|
this._init = true;
|
|
};
|
|
|
|
/**
|
|
* Expands a key. Typically only used for testing.
|
|
*
|
|
* @param key the symmetric key to expand, as an array of 32-bit words.
|
|
* @param decrypt true to expand for decryption, false for encryption.
|
|
*
|
|
* @return the expanded key.
|
|
*/
|
|
forge$g.aes._expandKey = function(key, decrypt) {
|
|
if(!init$1) {
|
|
initialize();
|
|
}
|
|
return _expandKey(key, decrypt);
|
|
};
|
|
|
|
/**
|
|
* Updates a single block. Typically only used for testing.
|
|
*
|
|
* @param w the expanded key to use.
|
|
* @param input an array of block-size 32-bit words.
|
|
* @param output an array of block-size 32-bit words.
|
|
* @param decrypt true to decrypt, false to encrypt.
|
|
*/
|
|
forge$g.aes._updateBlock = _updateBlock$1;
|
|
|
|
/** Register AES algorithms **/
|
|
|
|
registerAlgorithm$1('AES-ECB', forge$g.cipher.modes.ecb);
|
|
registerAlgorithm$1('AES-CBC', forge$g.cipher.modes.cbc);
|
|
registerAlgorithm$1('AES-CFB', forge$g.cipher.modes.cfb);
|
|
registerAlgorithm$1('AES-OFB', forge$g.cipher.modes.ofb);
|
|
registerAlgorithm$1('AES-CTR', forge$g.cipher.modes.ctr);
|
|
registerAlgorithm$1('AES-GCM', forge$g.cipher.modes.gcm);
|
|
|
|
function registerAlgorithm$1(name, mode) {
|
|
var factory = function() {
|
|
return new forge$g.aes.Algorithm(name, mode);
|
|
};
|
|
forge$g.cipher.registerAlgorithm(name, factory);
|
|
}
|
|
|
|
/** AES implementation **/
|
|
|
|
var init$1 = false; // not yet initialized
|
|
var Nb = 4; // number of words comprising the state (AES = 4)
|
|
var sbox; // non-linear substitution table used in key expansion
|
|
var isbox; // inversion of sbox
|
|
var rcon; // round constant word array
|
|
var mix; // mix-columns table
|
|
var imix; // inverse mix-columns table
|
|
|
|
/**
|
|
* Performs initialization, ie: precomputes tables to optimize for speed.
|
|
*
|
|
* One way to understand how AES works is to imagine that 'addition' and
|
|
* 'multiplication' are interfaces that require certain mathematical
|
|
* properties to hold true (ie: they are associative) but they might have
|
|
* different implementations and produce different kinds of results ...
|
|
* provided that their mathematical properties remain true. AES defines
|
|
* its own methods of addition and multiplication but keeps some important
|
|
* properties the same, ie: associativity and distributivity. The
|
|
* explanation below tries to shed some light on how AES defines addition
|
|
* and multiplication of bytes and 32-bit words in order to perform its
|
|
* encryption and decryption algorithms.
|
|
*
|
|
* The basics:
|
|
*
|
|
* The AES algorithm views bytes as binary representations of polynomials
|
|
* that have either 1 or 0 as the coefficients. It defines the addition
|
|
* or subtraction of two bytes as the XOR operation. It also defines the
|
|
* multiplication of two bytes as a finite field referred to as GF(2^8)
|
|
* (Note: 'GF' means "Galois Field" which is a field that contains a finite
|
|
* number of elements so GF(2^8) has 256 elements).
|
|
*
|
|
* This means that any two bytes can be represented as binary polynomials;
|
|
* when they multiplied together and modularly reduced by an irreducible
|
|
* polynomial of the 8th degree, the results are the field GF(2^8). The
|
|
* specific irreducible polynomial that AES uses in hexadecimal is 0x11b.
|
|
* This multiplication is associative with 0x01 as the identity:
|
|
*
|
|
* (b * 0x01 = GF(b, 0x01) = b).
|
|
*
|
|
* The operation GF(b, 0x02) can be performed at the byte level by left
|
|
* shifting b once and then XOR'ing it (to perform the modular reduction)
|
|
* with 0x11b if b is >= 128. Repeated application of the multiplication
|
|
* of 0x02 can be used to implement the multiplication of any two bytes.
|
|
*
|
|
* For instance, multiplying 0x57 and 0x13, denoted as GF(0x57, 0x13), can
|
|
* be performed by factoring 0x13 into 0x01, 0x02, and 0x10. Then these
|
|
* factors can each be multiplied by 0x57 and then added together. To do
|
|
* the multiplication, values for 0x57 multiplied by each of these 3 factors
|
|
* can be precomputed and stored in a table. To add them, the values from
|
|
* the table are XOR'd together.
|
|
*
|
|
* AES also defines addition and multiplication of words, that is 4-byte
|
|
* numbers represented as polynomials of 3 degrees where the coefficients
|
|
* are the values of the bytes.
|
|
*
|
|
* The word [a0, a1, a2, a3] is a polynomial a3x^3 + a2x^2 + a1x + a0.
|
|
*
|
|
* Addition is performed by XOR'ing like powers of x. Multiplication
|
|
* is performed in two steps, the first is an algebriac expansion as
|
|
* you would do normally (where addition is XOR). But the result is
|
|
* a polynomial larger than 3 degrees and thus it cannot fit in a word. So
|
|
* next the result is modularly reduced by an AES-specific polynomial of
|
|
* degree 4 which will always produce a polynomial of less than 4 degrees
|
|
* such that it will fit in a word. In AES, this polynomial is x^4 + 1.
|
|
*
|
|
* The modular product of two polynomials 'a' and 'b' is thus:
|
|
*
|
|
* d(x) = d3x^3 + d2x^2 + d1x + d0
|
|
* with
|
|
* d0 = GF(a0, b0) ^ GF(a3, b1) ^ GF(a2, b2) ^ GF(a1, b3)
|
|
* d1 = GF(a1, b0) ^ GF(a0, b1) ^ GF(a3, b2) ^ GF(a2, b3)
|
|
* d2 = GF(a2, b0) ^ GF(a1, b1) ^ GF(a0, b2) ^ GF(a3, b3)
|
|
* d3 = GF(a3, b0) ^ GF(a2, b1) ^ GF(a1, b2) ^ GF(a0, b3)
|
|
*
|
|
* As a matrix:
|
|
*
|
|
* [d0] = [a0 a3 a2 a1][b0]
|
|
* [d1] [a1 a0 a3 a2][b1]
|
|
* [d2] [a2 a1 a0 a3][b2]
|
|
* [d3] [a3 a2 a1 a0][b3]
|
|
*
|
|
* Special polynomials defined by AES (0x02 == {02}):
|
|
* a(x) = {03}x^3 + {01}x^2 + {01}x + {02}
|
|
* a^-1(x) = {0b}x^3 + {0d}x^2 + {09}x + {0e}.
|
|
*
|
|
* These polynomials are used in the MixColumns() and InverseMixColumns()
|
|
* operations, respectively, to cause each element in the state to affect
|
|
* the output (referred to as diffusing).
|
|
*
|
|
* RotWord() uses: a0 = a1 = a2 = {00} and a3 = {01}, which is the
|
|
* polynomial x3.
|
|
*
|
|
* The ShiftRows() method modifies the last 3 rows in the state (where
|
|
* the state is 4 words with 4 bytes per word) by shifting bytes cyclically.
|
|
* The 1st byte in the second row is moved to the end of the row. The 1st
|
|
* and 2nd bytes in the third row are moved to the end of the row. The 1st,
|
|
* 2nd, and 3rd bytes are moved in the fourth row.
|
|
*
|
|
* More details on how AES arithmetic works:
|
|
*
|
|
* In the polynomial representation of binary numbers, XOR performs addition
|
|
* and subtraction and multiplication in GF(2^8) denoted as GF(a, b)
|
|
* corresponds with the multiplication of polynomials modulo an irreducible
|
|
* polynomial of degree 8. In other words, for AES, GF(a, b) will multiply
|
|
* polynomial 'a' with polynomial 'b' and then do a modular reduction by
|
|
* an AES-specific irreducible polynomial of degree 8.
|
|
*
|
|
* A polynomial is irreducible if its only divisors are one and itself. For
|
|
* the AES algorithm, this irreducible polynomial is:
|
|
*
|
|
* m(x) = x^8 + x^4 + x^3 + x + 1,
|
|
*
|
|
* or {01}{1b} in hexadecimal notation, where each coefficient is a bit:
|
|
* 100011011 = 283 = 0x11b.
|
|
*
|
|
* For example, GF(0x57, 0x83) = 0xc1 because
|
|
*
|
|
* 0x57 = 87 = 01010111 = x^6 + x^4 + x^2 + x + 1
|
|
* 0x85 = 131 = 10000101 = x^7 + x + 1
|
|
*
|
|
* (x^6 + x^4 + x^2 + x + 1) * (x^7 + x + 1)
|
|
* = x^13 + x^11 + x^9 + x^8 + x^7 +
|
|
* x^7 + x^5 + x^3 + x^2 + x +
|
|
* x^6 + x^4 + x^2 + x + 1
|
|
* = x^13 + x^11 + x^9 + x^8 + x^6 + x^5 + x^4 + x^3 + 1 = y
|
|
* y modulo (x^8 + x^4 + x^3 + x + 1)
|
|
* = x^7 + x^6 + 1.
|
|
*
|
|
* The modular reduction by m(x) guarantees the result will be a binary
|
|
* polynomial of less than degree 8, so that it can fit in a byte.
|
|
*
|
|
* The operation to multiply a binary polynomial b with x (the polynomial
|
|
* x in binary representation is 00000010) is:
|
|
*
|
|
* b_7x^8 + b_6x^7 + b_5x^6 + b_4x^5 + b_3x^4 + b_2x^3 + b_1x^2 + b_0x^1
|
|
*
|
|
* To get GF(b, x) we must reduce that by m(x). If b_7 is 0 (that is the
|
|
* most significant bit is 0 in b) then the result is already reduced. If
|
|
* it is 1, then we can reduce it by subtracting m(x) via an XOR.
|
|
*
|
|
* It follows that multiplication by x (00000010 or 0x02) can be implemented
|
|
* by performing a left shift followed by a conditional bitwise XOR with
|
|
* 0x1b. This operation on bytes is denoted by xtime(). Multiplication by
|
|
* higher powers of x can be implemented by repeated application of xtime().
|
|
*
|
|
* By adding intermediate results, multiplication by any constant can be
|
|
* implemented. For instance:
|
|
*
|
|
* GF(0x57, 0x13) = 0xfe because:
|
|
*
|
|
* xtime(b) = (b & 128) ? (b << 1 ^ 0x11b) : (b << 1)
|
|
*
|
|
* Note: We XOR with 0x11b instead of 0x1b because in javascript our
|
|
* datatype for b can be larger than 1 byte, so a left shift will not
|
|
* automatically eliminate bits that overflow a byte ... by XOR'ing the
|
|
* overflow bit with 1 (the extra one from 0x11b) we zero it out.
|
|
*
|
|
* GF(0x57, 0x02) = xtime(0x57) = 0xae
|
|
* GF(0x57, 0x04) = xtime(0xae) = 0x47
|
|
* GF(0x57, 0x08) = xtime(0x47) = 0x8e
|
|
* GF(0x57, 0x10) = xtime(0x8e) = 0x07
|
|
*
|
|
* GF(0x57, 0x13) = GF(0x57, (0x01 ^ 0x02 ^ 0x10))
|
|
*
|
|
* And by the distributive property (since XOR is addition and GF() is
|
|
* multiplication):
|
|
*
|
|
* = GF(0x57, 0x01) ^ GF(0x57, 0x02) ^ GF(0x57, 0x10)
|
|
* = 0x57 ^ 0xae ^ 0x07
|
|
* = 0xfe.
|
|
*/
|
|
function initialize() {
|
|
init$1 = true;
|
|
|
|
/* Populate the Rcon table. These are the values given by
|
|
[x^(i-1),{00},{00},{00}] where x^(i-1) are powers of x (and x = 0x02)
|
|
in the field of GF(2^8), where i starts at 1.
|
|
|
|
rcon[0] = [0x00, 0x00, 0x00, 0x00]
|
|
rcon[1] = [0x01, 0x00, 0x00, 0x00] 2^(1-1) = 2^0 = 1
|
|
rcon[2] = [0x02, 0x00, 0x00, 0x00] 2^(2-1) = 2^1 = 2
|
|
...
|
|
rcon[9] = [0x1B, 0x00, 0x00, 0x00] 2^(9-1) = 2^8 = 0x1B
|
|
rcon[10] = [0x36, 0x00, 0x00, 0x00] 2^(10-1) = 2^9 = 0x36
|
|
|
|
We only store the first byte because it is the only one used.
|
|
*/
|
|
rcon = [0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1B, 0x36];
|
|
|
|
// compute xtime table which maps i onto GF(i, 0x02)
|
|
var xtime = new Array(256);
|
|
for(var i = 0; i < 128; ++i) {
|
|
xtime[i] = i << 1;
|
|
xtime[i + 128] = (i + 128) << 1 ^ 0x11B;
|
|
}
|
|
|
|
// compute all other tables
|
|
sbox = new Array(256);
|
|
isbox = new Array(256);
|
|
mix = new Array(4);
|
|
imix = new Array(4);
|
|
for(var i = 0; i < 4; ++i) {
|
|
mix[i] = new Array(256);
|
|
imix[i] = new Array(256);
|
|
}
|
|
var e = 0, ei = 0, e2, e4, e8, sx, sx2, me, ime;
|
|
for(var i = 0; i < 256; ++i) {
|
|
/* We need to generate the SubBytes() sbox and isbox tables so that
|
|
we can perform byte substitutions. This requires us to traverse
|
|
all of the elements in GF, find their multiplicative inverses,
|
|
and apply to each the following affine transformation:
|
|
|
|
bi' = bi ^ b(i + 4) mod 8 ^ b(i + 5) mod 8 ^ b(i + 6) mod 8 ^
|
|
b(i + 7) mod 8 ^ ci
|
|
for 0 <= i < 8, where bi is the ith bit of the byte, and ci is the
|
|
ith bit of a byte c with the value {63} or {01100011}.
|
|
|
|
It is possible to traverse every possible value in a Galois field
|
|
using what is referred to as a 'generator'. There are many
|
|
generators (128 out of 256): 3,5,6,9,11,82 to name a few. To fully
|
|
traverse GF we iterate 255 times, multiplying by our generator
|
|
each time.
|
|
|
|
On each iteration we can determine the multiplicative inverse for
|
|
the current element.
|
|
|
|
Suppose there is an element in GF 'e'. For a given generator 'g',
|
|
e = g^x. The multiplicative inverse of e is g^(255 - x). It turns
|
|
out that if use the inverse of a generator as another generator
|
|
it will produce all of the corresponding multiplicative inverses
|
|
at the same time. For this reason, we choose 5 as our inverse
|
|
generator because it only requires 2 multiplies and 1 add and its
|
|
inverse, 82, requires relatively few operations as well.
|
|
|
|
In order to apply the affine transformation, the multiplicative
|
|
inverse 'ei' of 'e' can be repeatedly XOR'd (4 times) with a
|
|
bit-cycling of 'ei'. To do this 'ei' is first stored in 's' and
|
|
'x'. Then 's' is left shifted and the high bit of 's' is made the
|
|
low bit. The resulting value is stored in 's'. Then 'x' is XOR'd
|
|
with 's' and stored in 'x'. On each subsequent iteration the same
|
|
operation is performed. When 4 iterations are complete, 'x' is
|
|
XOR'd with 'c' (0x63) and the transformed value is stored in 'x'.
|
|
For example:
|
|
|
|
s = 01000001
|
|
x = 01000001
|
|
|
|
iteration 1: s = 10000010, x ^= s
|
|
iteration 2: s = 00000101, x ^= s
|
|
iteration 3: s = 00001010, x ^= s
|
|
iteration 4: s = 00010100, x ^= s
|
|
x ^= 0x63
|
|
|
|
This can be done with a loop where s = (s << 1) | (s >> 7). However,
|
|
it can also be done by using a single 16-bit (in this case 32-bit)
|
|
number 'sx'. Since XOR is an associative operation, we can set 'sx'
|
|
to 'ei' and then XOR it with 'sx' left-shifted 1,2,3, and 4 times.
|
|
The most significant bits will flow into the high 8 bit positions
|
|
and be correctly XOR'd with one another. All that remains will be
|
|
to cycle the high 8 bits by XOR'ing them all with the lower 8 bits
|
|
afterwards.
|
|
|
|
At the same time we're populating sbox and isbox we can precompute
|
|
the multiplication we'll need to do to do MixColumns() later.
|
|
*/
|
|
|
|
// apply affine transformation
|
|
sx = ei ^ (ei << 1) ^ (ei << 2) ^ (ei << 3) ^ (ei << 4);
|
|
sx = (sx >> 8) ^ (sx & 255) ^ 0x63;
|
|
|
|
// update tables
|
|
sbox[e] = sx;
|
|
isbox[sx] = e;
|
|
|
|
/* Mixing columns is done using matrix multiplication. The columns
|
|
that are to be mixed are each a single word in the current state.
|
|
The state has Nb columns (4 columns). Therefore each column is a
|
|
4 byte word. So to mix the columns in a single column 'c' where
|
|
its rows are r0, r1, r2, and r3, we use the following matrix
|
|
multiplication:
|
|
|
|
[2 3 1 1]*[r0,c]=[r'0,c]
|
|
[1 2 3 1] [r1,c] [r'1,c]
|
|
[1 1 2 3] [r2,c] [r'2,c]
|
|
[3 1 1 2] [r3,c] [r'3,c]
|
|
|
|
r0, r1, r2, and r3 are each 1 byte of one of the words in the
|
|
state (a column). To do matrix multiplication for each mixed
|
|
column c' we multiply the corresponding row from the left matrix
|
|
with the corresponding column from the right matrix. In total, we
|
|
get 4 equations:
|
|
|
|
r0,c' = 2*r0,c + 3*r1,c + 1*r2,c + 1*r3,c
|
|
r1,c' = 1*r0,c + 2*r1,c + 3*r2,c + 1*r3,c
|
|
r2,c' = 1*r0,c + 1*r1,c + 2*r2,c + 3*r3,c
|
|
r3,c' = 3*r0,c + 1*r1,c + 1*r2,c + 2*r3,c
|
|
|
|
As usual, the multiplication is as previously defined and the
|
|
addition is XOR. In order to optimize mixing columns we can store
|
|
the multiplication results in tables. If you think of the whole
|
|
column as a word (it might help to visualize by mentally rotating
|
|
the equations above by counterclockwise 90 degrees) then you can
|
|
see that it would be useful to map the multiplications performed on
|
|
each byte (r0, r1, r2, r3) onto a word as well. For instance, we
|
|
could map 2*r0,1*r0,1*r0,3*r0 onto a word by storing 2*r0 in the
|
|
highest 8 bits and 3*r0 in the lowest 8 bits (with the other two
|
|
respectively in the middle). This means that a table can be
|
|
constructed that uses r0 as an index to the word. We can do the
|
|
same with r1, r2, and r3, creating a total of 4 tables.
|
|
|
|
To construct a full c', we can just look up each byte of c in
|
|
their respective tables and XOR the results together.
|
|
|
|
Also, to build each table we only have to calculate the word
|
|
for 2,1,1,3 for every byte ... which we can do on each iteration
|
|
of this loop since we will iterate over every byte. After we have
|
|
calculated 2,1,1,3 we can get the results for the other tables
|
|
by cycling the byte at the end to the beginning. For instance
|
|
we can take the result of table 2,1,1,3 and produce table 3,2,1,1
|
|
by moving the right most byte to the left most position just like
|
|
how you can imagine the 3 moved out of 2,1,1,3 and to the front
|
|
to produce 3,2,1,1.
|
|
|
|
There is another optimization in that the same multiples of
|
|
the current element we need in order to advance our generator
|
|
to the next iteration can be reused in performing the 2,1,1,3
|
|
calculation. We also calculate the inverse mix column tables,
|
|
with e,9,d,b being the inverse of 2,1,1,3.
|
|
|
|
When we're done, and we need to actually mix columns, the first
|
|
byte of each state word should be put through mix[0] (2,1,1,3),
|
|
the second through mix[1] (3,2,1,1) and so forth. Then they should
|
|
be XOR'd together to produce the fully mixed column.
|
|
*/
|
|
|
|
// calculate mix and imix table values
|
|
sx2 = xtime[sx];
|
|
e2 = xtime[e];
|
|
e4 = xtime[e2];
|
|
e8 = xtime[e4];
|
|
me =
|
|
(sx2 << 24) ^ // 2
|
|
(sx << 16) ^ // 1
|
|
(sx << 8) ^ // 1
|
|
(sx ^ sx2); // 3
|
|
ime =
|
|
(e2 ^ e4 ^ e8) << 24 ^ // E (14)
|
|
(e ^ e8) << 16 ^ // 9
|
|
(e ^ e4 ^ e8) << 8 ^ // D (13)
|
|
(e ^ e2 ^ e8); // B (11)
|
|
// produce each of the mix tables by rotating the 2,1,1,3 value
|
|
for(var n = 0; n < 4; ++n) {
|
|
mix[n][e] = me;
|
|
imix[n][sx] = ime;
|
|
// cycle the right most byte to the left most position
|
|
// ie: 2,1,1,3 becomes 3,2,1,1
|
|
me = me << 24 | me >>> 8;
|
|
ime = ime << 24 | ime >>> 8;
|
|
}
|
|
|
|
// get next element and inverse
|
|
if(e === 0) {
|
|
// 1 is the inverse of 1
|
|
e = ei = 1;
|
|
} else {
|
|
// e = 2e + 2*2*2*(10e)) = multiply e by 82 (chosen generator)
|
|
// ei = ei + 2*2*ei = multiply ei by 5 (inverse generator)
|
|
e = e2 ^ xtime[xtime[xtime[e2 ^ e8]]];
|
|
ei ^= xtime[xtime[ei]];
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Generates a key schedule using the AES key expansion algorithm.
|
|
*
|
|
* The AES algorithm takes the Cipher Key, K, and performs a Key Expansion
|
|
* routine to generate a key schedule. The Key Expansion generates a total
|
|
* of Nb*(Nr + 1) words: the algorithm requires an initial set of Nb words,
|
|
* and each of the Nr rounds requires Nb words of key data. The resulting
|
|
* key schedule consists of a linear array of 4-byte words, denoted [wi ],
|
|
* with i in the range 0 <= i < Nb(Nr + 1).
|
|
*
|
|
* KeyExpansion(byte key[4*Nk], word w[Nb*(Nr+1)], Nk)
|
|
* AES-128 (Nb=4, Nk=4, Nr=10)
|
|
* AES-192 (Nb=4, Nk=6, Nr=12)
|
|
* AES-256 (Nb=4, Nk=8, Nr=14)
|
|
* Note: Nr=Nk+6.
|
|
*
|
|
* Nb is the number of columns (32-bit words) comprising the State (or
|
|
* number of bytes in a block). For AES, Nb=4.
|
|
*
|
|
* @param key the key to schedule (as an array of 32-bit words).
|
|
* @param decrypt true to modify the key schedule to decrypt, false not to.
|
|
*
|
|
* @return the generated key schedule.
|
|
*/
|
|
function _expandKey(key, decrypt) {
|
|
// copy the key's words to initialize the key schedule
|
|
var w = key.slice(0);
|
|
|
|
/* RotWord() will rotate a word, moving the first byte to the last
|
|
byte's position (shifting the other bytes left).
|
|
|
|
We will be getting the value of Rcon at i / Nk. 'i' will iterate
|
|
from Nk to (Nb * Nr+1). Nk = 4 (4 byte key), Nb = 4 (4 words in
|
|
a block), Nr = Nk + 6 (10). Therefore 'i' will iterate from
|
|
4 to 44 (exclusive). Each time we iterate 4 times, i / Nk will
|
|
increase by 1. We use a counter iNk to keep track of this.
|
|
*/
|
|
|
|
// go through the rounds expanding the key
|
|
var temp, iNk = 1;
|
|
var Nk = w.length;
|
|
var Nr1 = Nk + 6 + 1;
|
|
var end = Nb * Nr1;
|
|
for(var i = Nk; i < end; ++i) {
|
|
temp = w[i - 1];
|
|
if(i % Nk === 0) {
|
|
// temp = SubWord(RotWord(temp)) ^ Rcon[i / Nk]
|
|
temp =
|
|
sbox[temp >>> 16 & 255] << 24 ^
|
|
sbox[temp >>> 8 & 255] << 16 ^
|
|
sbox[temp & 255] << 8 ^
|
|
sbox[temp >>> 24] ^ (rcon[iNk] << 24);
|
|
iNk++;
|
|
} else if(Nk > 6 && (i % Nk === 4)) {
|
|
// temp = SubWord(temp)
|
|
temp =
|
|
sbox[temp >>> 24] << 24 ^
|
|
sbox[temp >>> 16 & 255] << 16 ^
|
|
sbox[temp >>> 8 & 255] << 8 ^
|
|
sbox[temp & 255];
|
|
}
|
|
w[i] = w[i - Nk] ^ temp;
|
|
}
|
|
|
|
/* When we are updating a cipher block we always use the code path for
|
|
encryption whether we are decrypting or not (to shorten code and
|
|
simplify the generation of look up tables). However, because there
|
|
are differences in the decryption algorithm, other than just swapping
|
|
in different look up tables, we must transform our key schedule to
|
|
account for these changes:
|
|
|
|
1. The decryption algorithm gets its key rounds in reverse order.
|
|
2. The decryption algorithm adds the round key before mixing columns
|
|
instead of afterwards.
|
|
|
|
We don't need to modify our key schedule to handle the first case,
|
|
we can just traverse the key schedule in reverse order when decrypting.
|
|
|
|
The second case requires a little work.
|
|
|
|
The tables we built for performing rounds will take an input and then
|
|
perform SubBytes() and MixColumns() or, for the decrypt version,
|
|
InvSubBytes() and InvMixColumns(). But the decrypt algorithm requires
|
|
us to AddRoundKey() before InvMixColumns(). This means we'll need to
|
|
apply some transformations to the round key to inverse-mix its columns
|
|
so they'll be correct for moving AddRoundKey() to after the state has
|
|
had its columns inverse-mixed.
|
|
|
|
To inverse-mix the columns of the state when we're decrypting we use a
|
|
lookup table that will apply InvSubBytes() and InvMixColumns() at the
|
|
same time. However, the round key's bytes are not inverse-substituted
|
|
in the decryption algorithm. To get around this problem, we can first
|
|
substitute the bytes in the round key so that when we apply the
|
|
transformation via the InvSubBytes()+InvMixColumns() table, it will
|
|
undo our substitution leaving us with the original value that we
|
|
want -- and then inverse-mix that value.
|
|
|
|
This change will correctly alter our key schedule so that we can XOR
|
|
each round key with our already transformed decryption state. This
|
|
allows us to use the same code path as the encryption algorithm.
|
|
|
|
We make one more change to the decryption key. Since the decryption
|
|
algorithm runs in reverse from the encryption algorithm, we reverse
|
|
the order of the round keys to avoid having to iterate over the key
|
|
schedule backwards when running the encryption algorithm later in
|
|
decryption mode. In addition to reversing the order of the round keys,
|
|
we also swap each round key's 2nd and 4th rows. See the comments
|
|
section where rounds are performed for more details about why this is
|
|
done. These changes are done inline with the other substitution
|
|
described above.
|
|
*/
|
|
if(decrypt) {
|
|
var tmp;
|
|
var m0 = imix[0];
|
|
var m1 = imix[1];
|
|
var m2 = imix[2];
|
|
var m3 = imix[3];
|
|
var wnew = w.slice(0);
|
|
end = w.length;
|
|
for(var i = 0, wi = end - Nb; i < end; i += Nb, wi -= Nb) {
|
|
// do not sub the first or last round key (round keys are Nb
|
|
// words) as no column mixing is performed before they are added,
|
|
// but do change the key order
|
|
if(i === 0 || i === (end - Nb)) {
|
|
wnew[i] = w[wi];
|
|
wnew[i + 1] = w[wi + 3];
|
|
wnew[i + 2] = w[wi + 2];
|
|
wnew[i + 3] = w[wi + 1];
|
|
} else {
|
|
// substitute each round key byte because the inverse-mix
|
|
// table will inverse-substitute it (effectively cancel the
|
|
// substitution because round key bytes aren't sub'd in
|
|
// decryption mode) and swap indexes 3 and 1
|
|
for(var n = 0; n < Nb; ++n) {
|
|
tmp = w[wi + n];
|
|
wnew[i + (3&-n)] =
|
|
m0[sbox[tmp >>> 24]] ^
|
|
m1[sbox[tmp >>> 16 & 255]] ^
|
|
m2[sbox[tmp >>> 8 & 255]] ^
|
|
m3[sbox[tmp & 255]];
|
|
}
|
|
}
|
|
}
|
|
w = wnew;
|
|
}
|
|
|
|
return w;
|
|
}
|
|
|
|
/**
|
|
* Updates a single block (16 bytes) using AES. The update will either
|
|
* encrypt or decrypt the block.
|
|
*
|
|
* @param w the key schedule.
|
|
* @param input the input block (an array of 32-bit words).
|
|
* @param output the updated output block.
|
|
* @param decrypt true to decrypt the block, false to encrypt it.
|
|
*/
|
|
function _updateBlock$1(w, input, output, decrypt) {
|
|
/*
|
|
Cipher(byte in[4*Nb], byte out[4*Nb], word w[Nb*(Nr+1)])
|
|
begin
|
|
byte state[4,Nb]
|
|
state = in
|
|
AddRoundKey(state, w[0, Nb-1])
|
|
for round = 1 step 1 to Nr-1
|
|
SubBytes(state)
|
|
ShiftRows(state)
|
|
MixColumns(state)
|
|
AddRoundKey(state, w[round*Nb, (round+1)*Nb-1])
|
|
end for
|
|
SubBytes(state)
|
|
ShiftRows(state)
|
|
AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1])
|
|
out = state
|
|
end
|
|
|
|
InvCipher(byte in[4*Nb], byte out[4*Nb], word w[Nb*(Nr+1)])
|
|
begin
|
|
byte state[4,Nb]
|
|
state = in
|
|
AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1])
|
|
for round = Nr-1 step -1 downto 1
|
|
InvShiftRows(state)
|
|
InvSubBytes(state)
|
|
AddRoundKey(state, w[round*Nb, (round+1)*Nb-1])
|
|
InvMixColumns(state)
|
|
end for
|
|
InvShiftRows(state)
|
|
InvSubBytes(state)
|
|
AddRoundKey(state, w[0, Nb-1])
|
|
out = state
|
|
end
|
|
*/
|
|
|
|
// Encrypt: AddRoundKey(state, w[0, Nb-1])
|
|
// Decrypt: AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1])
|
|
var Nr = w.length / 4 - 1;
|
|
var m0, m1, m2, m3, sub;
|
|
if(decrypt) {
|
|
m0 = imix[0];
|
|
m1 = imix[1];
|
|
m2 = imix[2];
|
|
m3 = imix[3];
|
|
sub = isbox;
|
|
} else {
|
|
m0 = mix[0];
|
|
m1 = mix[1];
|
|
m2 = mix[2];
|
|
m3 = mix[3];
|
|
sub = sbox;
|
|
}
|
|
var a, b, c, d, a2, b2, c2;
|
|
a = input[0] ^ w[0];
|
|
b = input[decrypt ? 3 : 1] ^ w[1];
|
|
c = input[2] ^ w[2];
|
|
d = input[decrypt ? 1 : 3] ^ w[3];
|
|
var i = 3;
|
|
|
|
/* In order to share code we follow the encryption algorithm when both
|
|
encrypting and decrypting. To account for the changes required in the
|
|
decryption algorithm, we use different lookup tables when decrypting
|
|
and use a modified key schedule to account for the difference in the
|
|
order of transformations applied when performing rounds. We also get
|
|
key rounds in reverse order (relative to encryption). */
|
|
for(var round = 1; round < Nr; ++round) {
|
|
/* As described above, we'll be using table lookups to perform the
|
|
column mixing. Each column is stored as a word in the state (the
|
|
array 'input' has one column as a word at each index). In order to
|
|
mix a column, we perform these transformations on each row in c,
|
|
which is 1 byte in each word. The new column for c0 is c'0:
|
|
|
|
m0 m1 m2 m3
|
|
r0,c'0 = 2*r0,c0 + 3*r1,c0 + 1*r2,c0 + 1*r3,c0
|
|
r1,c'0 = 1*r0,c0 + 2*r1,c0 + 3*r2,c0 + 1*r3,c0
|
|
r2,c'0 = 1*r0,c0 + 1*r1,c0 + 2*r2,c0 + 3*r3,c0
|
|
r3,c'0 = 3*r0,c0 + 1*r1,c0 + 1*r2,c0 + 2*r3,c0
|
|
|
|
So using mix tables where c0 is a word with r0 being its upper
|
|
8 bits and r3 being its lower 8 bits:
|
|
|
|
m0[c0 >> 24] will yield this word: [2*r0,1*r0,1*r0,3*r0]
|
|
...
|
|
m3[c0 & 255] will yield this word: [1*r3,1*r3,3*r3,2*r3]
|
|
|
|
Therefore to mix the columns in each word in the state we
|
|
do the following (& 255 omitted for brevity):
|
|
c'0,r0 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3]
|
|
c'0,r1 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3]
|
|
c'0,r2 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3]
|
|
c'0,r3 = m0[c0 >> 24] ^ m1[c1 >> 16] ^ m2[c2 >> 8] ^ m3[c3]
|
|
|
|
However, before mixing, the algorithm requires us to perform
|
|
ShiftRows(). The ShiftRows() transformation cyclically shifts the
|
|
last 3 rows of the state over different offsets. The first row
|
|
(r = 0) is not shifted.
|
|
|
|
s'_r,c = s_r,(c + shift(r, Nb) mod Nb
|
|
for 0 < r < 4 and 0 <= c < Nb and
|
|
shift(1, 4) = 1
|
|
shift(2, 4) = 2
|
|
shift(3, 4) = 3.
|
|
|
|
This causes the first byte in r = 1 to be moved to the end of
|
|
the row, the first 2 bytes in r = 2 to be moved to the end of
|
|
the row, the first 3 bytes in r = 3 to be moved to the end of
|
|
the row:
|
|
|
|
r1: [c0 c1 c2 c3] => [c1 c2 c3 c0]
|
|
r2: [c0 c1 c2 c3] [c2 c3 c0 c1]
|
|
r3: [c0 c1 c2 c3] [c3 c0 c1 c2]
|
|
|
|
We can make these substitutions inline with our column mixing to
|
|
generate an updated set of equations to produce each word in the
|
|
state (note the columns have changed positions):
|
|
|
|
c0 c1 c2 c3 => c0 c1 c2 c3
|
|
c0 c1 c2 c3 c1 c2 c3 c0 (cycled 1 byte)
|
|
c0 c1 c2 c3 c2 c3 c0 c1 (cycled 2 bytes)
|
|
c0 c1 c2 c3 c3 c0 c1 c2 (cycled 3 bytes)
|
|
|
|
Therefore:
|
|
|
|
c'0 = 2*r0,c0 + 3*r1,c1 + 1*r2,c2 + 1*r3,c3
|
|
c'0 = 1*r0,c0 + 2*r1,c1 + 3*r2,c2 + 1*r3,c3
|
|
c'0 = 1*r0,c0 + 1*r1,c1 + 2*r2,c2 + 3*r3,c3
|
|
c'0 = 3*r0,c0 + 1*r1,c1 + 1*r2,c2 + 2*r3,c3
|
|
|
|
c'1 = 2*r0,c1 + 3*r1,c2 + 1*r2,c3 + 1*r3,c0
|
|
c'1 = 1*r0,c1 + 2*r1,c2 + 3*r2,c3 + 1*r3,c0
|
|
c'1 = 1*r0,c1 + 1*r1,c2 + 2*r2,c3 + 3*r3,c0
|
|
c'1 = 3*r0,c1 + 1*r1,c2 + 1*r2,c3 + 2*r3,c0
|
|
|
|
... and so forth for c'2 and c'3. The important distinction is
|
|
that the columns are cycling, with c0 being used with the m0
|
|
map when calculating c0, but c1 being used with the m0 map when
|
|
calculating c1 ... and so forth.
|
|
|
|
When performing the inverse we transform the mirror image and
|
|
skip the bottom row, instead of the top one, and move upwards:
|
|
|
|
c3 c2 c1 c0 => c0 c3 c2 c1 (cycled 3 bytes) *same as encryption
|
|
c3 c2 c1 c0 c1 c0 c3 c2 (cycled 2 bytes)
|
|
c3 c2 c1 c0 c2 c1 c0 c3 (cycled 1 byte) *same as encryption
|
|
c3 c2 c1 c0 c3 c2 c1 c0
|
|
|
|
If you compare the resulting matrices for ShiftRows()+MixColumns()
|
|
and for InvShiftRows()+InvMixColumns() the 2nd and 4th columns are
|
|
different (in encrypt mode vs. decrypt mode). So in order to use
|
|
the same code to handle both encryption and decryption, we will
|
|
need to do some mapping.
|
|
|
|
If in encryption mode we let a=c0, b=c1, c=c2, d=c3, and r<N> be
|
|
a row number in the state, then the resulting matrix in encryption
|
|
mode for applying the above transformations would be:
|
|
|
|
r1: a b c d
|
|
r2: b c d a
|
|
r3: c d a b
|
|
r4: d a b c
|
|
|
|
If we did the same in decryption mode we would get:
|
|
|
|
r1: a d c b
|
|
r2: b a d c
|
|
r3: c b a d
|
|
r4: d c b a
|
|
|
|
If instead we swap d and b (set b=c3 and d=c1), then we get:
|
|
|
|
r1: a b c d
|
|
r2: d a b c
|
|
r3: c d a b
|
|
r4: b c d a
|
|
|
|
Now the 1st and 3rd rows are the same as the encryption matrix. All
|
|
we need to do then to make the mapping exactly the same is to swap
|
|
the 2nd and 4th rows when in decryption mode. To do this without
|
|
having to do it on each iteration, we swapped the 2nd and 4th rows
|
|
in the decryption key schedule. We also have to do the swap above
|
|
when we first pull in the input and when we set the final output. */
|
|
a2 =
|
|
m0[a >>> 24] ^
|
|
m1[b >>> 16 & 255] ^
|
|
m2[c >>> 8 & 255] ^
|
|
m3[d & 255] ^ w[++i];
|
|
b2 =
|
|
m0[b >>> 24] ^
|
|
m1[c >>> 16 & 255] ^
|
|
m2[d >>> 8 & 255] ^
|
|
m3[a & 255] ^ w[++i];
|
|
c2 =
|
|
m0[c >>> 24] ^
|
|
m1[d >>> 16 & 255] ^
|
|
m2[a >>> 8 & 255] ^
|
|
m3[b & 255] ^ w[++i];
|
|
d =
|
|
m0[d >>> 24] ^
|
|
m1[a >>> 16 & 255] ^
|
|
m2[b >>> 8 & 255] ^
|
|
m3[c & 255] ^ w[++i];
|
|
a = a2;
|
|
b = b2;
|
|
c = c2;
|
|
}
|
|
|
|
/*
|
|
Encrypt:
|
|
SubBytes(state)
|
|
ShiftRows(state)
|
|
AddRoundKey(state, w[Nr*Nb, (Nr+1)*Nb-1])
|
|
|
|
Decrypt:
|
|
InvShiftRows(state)
|
|
InvSubBytes(state)
|
|
AddRoundKey(state, w[0, Nb-1])
|
|
*/
|
|
// Note: rows are shifted inline
|
|
output[0] =
|
|
(sub[a >>> 24] << 24) ^
|
|
(sub[b >>> 16 & 255] << 16) ^
|
|
(sub[c >>> 8 & 255] << 8) ^
|
|
(sub[d & 255]) ^ w[++i];
|
|
output[decrypt ? 3 : 1] =
|
|
(sub[b >>> 24] << 24) ^
|
|
(sub[c >>> 16 & 255] << 16) ^
|
|
(sub[d >>> 8 & 255] << 8) ^
|
|
(sub[a & 255]) ^ w[++i];
|
|
output[2] =
|
|
(sub[c >>> 24] << 24) ^
|
|
(sub[d >>> 16 & 255] << 16) ^
|
|
(sub[a >>> 8 & 255] << 8) ^
|
|
(sub[b & 255]) ^ w[++i];
|
|
output[decrypt ? 1 : 3] =
|
|
(sub[d >>> 24] << 24) ^
|
|
(sub[a >>> 16 & 255] << 16) ^
|
|
(sub[b >>> 8 & 255] << 8) ^
|
|
(sub[c & 255]) ^ w[++i];
|
|
}
|
|
|
|
/**
|
|
* Deprecated. Instead, use:
|
|
*
|
|
* forge.cipher.createCipher('AES-<mode>', key);
|
|
* forge.cipher.createDecipher('AES-<mode>', key);
|
|
*
|
|
* Creates a deprecated AES cipher object. This object's mode will default to
|
|
* CBC (cipher-block-chaining).
|
|
*
|
|
* The key and iv may be given as a string of bytes, an array of bytes, a
|
|
* byte buffer, or an array of 32-bit words.
|
|
*
|
|
* @param options the options to use.
|
|
* key the symmetric key to use.
|
|
* output the buffer to write to.
|
|
* decrypt true for decryption, false for encryption.
|
|
* mode the cipher mode to use (default: 'CBC').
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
function _createCipher$1(options) {
|
|
options = options || {};
|
|
var mode = (options.mode || 'CBC').toUpperCase();
|
|
var algorithm = 'AES-' + mode;
|
|
|
|
var cipher;
|
|
if(options.decrypt) {
|
|
cipher = forge$g.cipher.createDecipher(algorithm, options.key);
|
|
} else {
|
|
cipher = forge$g.cipher.createCipher(algorithm, options.key);
|
|
}
|
|
|
|
// backwards compatible start API
|
|
var start = cipher.start;
|
|
cipher.start = function(iv, options) {
|
|
// backwards compatibility: support second arg as output buffer
|
|
var output = null;
|
|
if(options instanceof forge$g.util.ByteBuffer) {
|
|
output = options;
|
|
options = {};
|
|
}
|
|
options = options || {};
|
|
options.output = output;
|
|
options.iv = iv;
|
|
start.call(cipher, options);
|
|
};
|
|
|
|
return cipher;
|
|
}
|
|
|
|
/**
|
|
* DES (Data Encryption Standard) implementation.
|
|
*
|
|
* This implementation supports DES as well as 3DES-EDE in ECB and CBC mode.
|
|
* It is based on the BSD-licensed implementation by Paul Tero:
|
|
*
|
|
* Paul Tero, July 2001
|
|
* http://www.tero.co.uk/des/
|
|
*
|
|
* Optimised for performance with large blocks by
|
|
* Michael Hayworth, November 2001
|
|
* http://www.netdealing.com
|
|
*
|
|
* THIS SOFTWARE IS PROVIDED "AS IS" AND
|
|
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
|
|
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
|
|
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
|
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
|
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
|
|
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
* SUCH DAMAGE.
|
|
*
|
|
* @author Stefan Siegl
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2012 Stefan Siegl <stesie@brokenpipe.de>
|
|
* Copyright (c) 2012-2014 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$f = forge$m;
|
|
|
|
|
|
|
|
|
|
/* DES API */
|
|
forge$f.des = forge$f.des || {};
|
|
|
|
/**
|
|
* Deprecated. Instead, use:
|
|
*
|
|
* var cipher = forge.cipher.createCipher('DES-<mode>', key);
|
|
* cipher.start({iv: iv});
|
|
*
|
|
* Creates an DES cipher object to encrypt data using the given symmetric key.
|
|
* The output will be stored in the 'output' member of the returned cipher.
|
|
*
|
|
* The key and iv may be given as binary-encoded strings of bytes or
|
|
* byte buffers.
|
|
*
|
|
* @param key the symmetric key to use (64 or 192 bits).
|
|
* @param iv the initialization vector to use.
|
|
* @param output the buffer to write to, null to create one.
|
|
* @param mode the cipher mode to use (default: 'CBC' if IV is
|
|
* given, 'ECB' if null).
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$f.des.startEncrypting = function(key, iv, output, mode) {
|
|
var cipher = _createCipher({
|
|
key: key,
|
|
output: output,
|
|
decrypt: false,
|
|
mode: mode || (iv === null ? 'ECB' : 'CBC')
|
|
});
|
|
cipher.start(iv);
|
|
return cipher;
|
|
};
|
|
|
|
/**
|
|
* Deprecated. Instead, use:
|
|
*
|
|
* var cipher = forge.cipher.createCipher('DES-<mode>', key);
|
|
*
|
|
* Creates an DES cipher object to encrypt data using the given symmetric key.
|
|
*
|
|
* The key may be given as a binary-encoded string of bytes or a byte buffer.
|
|
*
|
|
* @param key the symmetric key to use (64 or 192 bits).
|
|
* @param mode the cipher mode to use (default: 'CBC').
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$f.des.createEncryptionCipher = function(key, mode) {
|
|
return _createCipher({
|
|
key: key,
|
|
output: null,
|
|
decrypt: false,
|
|
mode: mode
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Deprecated. Instead, use:
|
|
*
|
|
* var decipher = forge.cipher.createDecipher('DES-<mode>', key);
|
|
* decipher.start({iv: iv});
|
|
*
|
|
* Creates an DES cipher object to decrypt data using the given symmetric key.
|
|
* The output will be stored in the 'output' member of the returned cipher.
|
|
*
|
|
* The key and iv may be given as binary-encoded strings of bytes or
|
|
* byte buffers.
|
|
*
|
|
* @param key the symmetric key to use (64 or 192 bits).
|
|
* @param iv the initialization vector to use.
|
|
* @param output the buffer to write to, null to create one.
|
|
* @param mode the cipher mode to use (default: 'CBC' if IV is
|
|
* given, 'ECB' if null).
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$f.des.startDecrypting = function(key, iv, output, mode) {
|
|
var cipher = _createCipher({
|
|
key: key,
|
|
output: output,
|
|
decrypt: true,
|
|
mode: mode || (iv === null ? 'ECB' : 'CBC')
|
|
});
|
|
cipher.start(iv);
|
|
return cipher;
|
|
};
|
|
|
|
/**
|
|
* Deprecated. Instead, use:
|
|
*
|
|
* var decipher = forge.cipher.createDecipher('DES-<mode>', key);
|
|
*
|
|
* Creates an DES cipher object to decrypt data using the given symmetric key.
|
|
*
|
|
* The key may be given as a binary-encoded string of bytes or a byte buffer.
|
|
*
|
|
* @param key the symmetric key to use (64 or 192 bits).
|
|
* @param mode the cipher mode to use (default: 'CBC').
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$f.des.createDecryptionCipher = function(key, mode) {
|
|
return _createCipher({
|
|
key: key,
|
|
output: null,
|
|
decrypt: true,
|
|
mode: mode
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Creates a new DES cipher algorithm object.
|
|
*
|
|
* @param name the name of the algorithm.
|
|
* @param mode the mode factory function.
|
|
*
|
|
* @return the DES algorithm object.
|
|
*/
|
|
forge$f.des.Algorithm = function(name, mode) {
|
|
var self = this;
|
|
self.name = name;
|
|
self.mode = new mode({
|
|
blockSize: 8,
|
|
cipher: {
|
|
encrypt: function(inBlock, outBlock) {
|
|
return _updateBlock(self._keys, inBlock, outBlock, false);
|
|
},
|
|
decrypt: function(inBlock, outBlock) {
|
|
return _updateBlock(self._keys, inBlock, outBlock, true);
|
|
}
|
|
}
|
|
});
|
|
self._init = false;
|
|
};
|
|
|
|
/**
|
|
* Initializes this DES algorithm by expanding its key.
|
|
*
|
|
* @param options the options to use.
|
|
* key the key to use with this algorithm.
|
|
* decrypt true if the algorithm should be initialized for decryption,
|
|
* false for encryption.
|
|
*/
|
|
forge$f.des.Algorithm.prototype.initialize = function(options) {
|
|
if(this._init) {
|
|
return;
|
|
}
|
|
|
|
var key = forge$f.util.createBuffer(options.key);
|
|
if(this.name.indexOf('3DES') === 0) {
|
|
if(key.length() !== 24) {
|
|
throw new Error('Invalid Triple-DES key size: ' + key.length() * 8);
|
|
}
|
|
}
|
|
|
|
// do key expansion to 16 or 48 subkeys (single or triple DES)
|
|
this._keys = _createKeys(key);
|
|
this._init = true;
|
|
};
|
|
|
|
/** Register DES algorithms **/
|
|
|
|
registerAlgorithm('DES-ECB', forge$f.cipher.modes.ecb);
|
|
registerAlgorithm('DES-CBC', forge$f.cipher.modes.cbc);
|
|
registerAlgorithm('DES-CFB', forge$f.cipher.modes.cfb);
|
|
registerAlgorithm('DES-OFB', forge$f.cipher.modes.ofb);
|
|
registerAlgorithm('DES-CTR', forge$f.cipher.modes.ctr);
|
|
|
|
registerAlgorithm('3DES-ECB', forge$f.cipher.modes.ecb);
|
|
registerAlgorithm('3DES-CBC', forge$f.cipher.modes.cbc);
|
|
registerAlgorithm('3DES-CFB', forge$f.cipher.modes.cfb);
|
|
registerAlgorithm('3DES-OFB', forge$f.cipher.modes.ofb);
|
|
registerAlgorithm('3DES-CTR', forge$f.cipher.modes.ctr);
|
|
|
|
function registerAlgorithm(name, mode) {
|
|
var factory = function() {
|
|
return new forge$f.des.Algorithm(name, mode);
|
|
};
|
|
forge$f.cipher.registerAlgorithm(name, factory);
|
|
}
|
|
|
|
/** DES implementation **/
|
|
|
|
var spfunction1 = [0x1010400,0,0x10000,0x1010404,0x1010004,0x10404,0x4,0x10000,0x400,0x1010400,0x1010404,0x400,0x1000404,0x1010004,0x1000000,0x4,0x404,0x1000400,0x1000400,0x10400,0x10400,0x1010000,0x1010000,0x1000404,0x10004,0x1000004,0x1000004,0x10004,0,0x404,0x10404,0x1000000,0x10000,0x1010404,0x4,0x1010000,0x1010400,0x1000000,0x1000000,0x400,0x1010004,0x10000,0x10400,0x1000004,0x400,0x4,0x1000404,0x10404,0x1010404,0x10004,0x1010000,0x1000404,0x1000004,0x404,0x10404,0x1010400,0x404,0x1000400,0x1000400,0,0x10004,0x10400,0,0x1010004];
|
|
var spfunction2 = [-0x7fef7fe0,-0x7fff8000,0x8000,0x108020,0x100000,0x20,-0x7fefffe0,-0x7fff7fe0,-0x7fffffe0,-0x7fef7fe0,-0x7fef8000,-0x80000000,-0x7fff8000,0x100000,0x20,-0x7fefffe0,0x108000,0x100020,-0x7fff7fe0,0,-0x80000000,0x8000,0x108020,-0x7ff00000,0x100020,-0x7fffffe0,0,0x108000,0x8020,-0x7fef8000,-0x7ff00000,0x8020,0,0x108020,-0x7fefffe0,0x100000,-0x7fff7fe0,-0x7ff00000,-0x7fef8000,0x8000,-0x7ff00000,-0x7fff8000,0x20,-0x7fef7fe0,0x108020,0x20,0x8000,-0x80000000,0x8020,-0x7fef8000,0x100000,-0x7fffffe0,0x100020,-0x7fff7fe0,-0x7fffffe0,0x100020,0x108000,0,-0x7fff8000,0x8020,-0x80000000,-0x7fefffe0,-0x7fef7fe0,0x108000];
|
|
var spfunction3 = [0x208,0x8020200,0,0x8020008,0x8000200,0,0x20208,0x8000200,0x20008,0x8000008,0x8000008,0x20000,0x8020208,0x20008,0x8020000,0x208,0x8000000,0x8,0x8020200,0x200,0x20200,0x8020000,0x8020008,0x20208,0x8000208,0x20200,0x20000,0x8000208,0x8,0x8020208,0x200,0x8000000,0x8020200,0x8000000,0x20008,0x208,0x20000,0x8020200,0x8000200,0,0x200,0x20008,0x8020208,0x8000200,0x8000008,0x200,0,0x8020008,0x8000208,0x20000,0x8000000,0x8020208,0x8,0x20208,0x20200,0x8000008,0x8020000,0x8000208,0x208,0x8020000,0x20208,0x8,0x8020008,0x20200];
|
|
var spfunction4 = [0x802001,0x2081,0x2081,0x80,0x802080,0x800081,0x800001,0x2001,0,0x802000,0x802000,0x802081,0x81,0,0x800080,0x800001,0x1,0x2000,0x800000,0x802001,0x80,0x800000,0x2001,0x2080,0x800081,0x1,0x2080,0x800080,0x2000,0x802080,0x802081,0x81,0x800080,0x800001,0x802000,0x802081,0x81,0,0,0x802000,0x2080,0x800080,0x800081,0x1,0x802001,0x2081,0x2081,0x80,0x802081,0x81,0x1,0x2000,0x800001,0x2001,0x802080,0x800081,0x2001,0x2080,0x800000,0x802001,0x80,0x800000,0x2000,0x802080];
|
|
var spfunction5 = [0x100,0x2080100,0x2080000,0x42000100,0x80000,0x100,0x40000000,0x2080000,0x40080100,0x80000,0x2000100,0x40080100,0x42000100,0x42080000,0x80100,0x40000000,0x2000000,0x40080000,0x40080000,0,0x40000100,0x42080100,0x42080100,0x2000100,0x42080000,0x40000100,0,0x42000000,0x2080100,0x2000000,0x42000000,0x80100,0x80000,0x42000100,0x100,0x2000000,0x40000000,0x2080000,0x42000100,0x40080100,0x2000100,0x40000000,0x42080000,0x2080100,0x40080100,0x100,0x2000000,0x42080000,0x42080100,0x80100,0x42000000,0x42080100,0x2080000,0,0x40080000,0x42000000,0x80100,0x2000100,0x40000100,0x80000,0,0x40080000,0x2080100,0x40000100];
|
|
var spfunction6 = [0x20000010,0x20400000,0x4000,0x20404010,0x20400000,0x10,0x20404010,0x400000,0x20004000,0x404010,0x400000,0x20000010,0x400010,0x20004000,0x20000000,0x4010,0,0x400010,0x20004010,0x4000,0x404000,0x20004010,0x10,0x20400010,0x20400010,0,0x404010,0x20404000,0x4010,0x404000,0x20404000,0x20000000,0x20004000,0x10,0x20400010,0x404000,0x20404010,0x400000,0x4010,0x20000010,0x400000,0x20004000,0x20000000,0x4010,0x20000010,0x20404010,0x404000,0x20400000,0x404010,0x20404000,0,0x20400010,0x10,0x4000,0x20400000,0x404010,0x4000,0x400010,0x20004010,0,0x20404000,0x20000000,0x400010,0x20004010];
|
|
var spfunction7 = [0x200000,0x4200002,0x4000802,0,0x800,0x4000802,0x200802,0x4200800,0x4200802,0x200000,0,0x4000002,0x2,0x4000000,0x4200002,0x802,0x4000800,0x200802,0x200002,0x4000800,0x4000002,0x4200000,0x4200800,0x200002,0x4200000,0x800,0x802,0x4200802,0x200800,0x2,0x4000000,0x200800,0x4000000,0x200800,0x200000,0x4000802,0x4000802,0x4200002,0x4200002,0x2,0x200002,0x4000000,0x4000800,0x200000,0x4200800,0x802,0x200802,0x4200800,0x802,0x4000002,0x4200802,0x4200000,0x200800,0,0x2,0x4200802,0,0x200802,0x4200000,0x800,0x4000002,0x4000800,0x800,0x200002];
|
|
var spfunction8 = [0x10001040,0x1000,0x40000,0x10041040,0x10000000,0x10001040,0x40,0x10000000,0x40040,0x10040000,0x10041040,0x41000,0x10041000,0x41040,0x1000,0x40,0x10040000,0x10000040,0x10001000,0x1040,0x41000,0x40040,0x10040040,0x10041000,0x1040,0,0,0x10040040,0x10000040,0x10001000,0x41040,0x40000,0x41040,0x40000,0x10041000,0x1000,0x40,0x10040040,0x1000,0x41040,0x10001000,0x40,0x10000040,0x10040000,0x10040040,0x10000000,0x40000,0x10001040,0,0x10041040,0x40040,0x10000040,0x10040000,0x10001000,0x10001040,0,0x10041040,0x41000,0x41000,0x1040,0x1040,0x40040,0x10000000,0x10041000];
|
|
|
|
/**
|
|
* Create necessary sub keys.
|
|
*
|
|
* @param key the 64-bit or 192-bit key.
|
|
*
|
|
* @return the expanded keys.
|
|
*/
|
|
function _createKeys(key) {
|
|
var pc2bytes0 = [0,0x4,0x20000000,0x20000004,0x10000,0x10004,0x20010000,0x20010004,0x200,0x204,0x20000200,0x20000204,0x10200,0x10204,0x20010200,0x20010204],
|
|
pc2bytes1 = [0,0x1,0x100000,0x100001,0x4000000,0x4000001,0x4100000,0x4100001,0x100,0x101,0x100100,0x100101,0x4000100,0x4000101,0x4100100,0x4100101],
|
|
pc2bytes2 = [0,0x8,0x800,0x808,0x1000000,0x1000008,0x1000800,0x1000808,0,0x8,0x800,0x808,0x1000000,0x1000008,0x1000800,0x1000808],
|
|
pc2bytes3 = [0,0x200000,0x8000000,0x8200000,0x2000,0x202000,0x8002000,0x8202000,0x20000,0x220000,0x8020000,0x8220000,0x22000,0x222000,0x8022000,0x8222000],
|
|
pc2bytes4 = [0,0x40000,0x10,0x40010,0,0x40000,0x10,0x40010,0x1000,0x41000,0x1010,0x41010,0x1000,0x41000,0x1010,0x41010],
|
|
pc2bytes5 = [0,0x400,0x20,0x420,0,0x400,0x20,0x420,0x2000000,0x2000400,0x2000020,0x2000420,0x2000000,0x2000400,0x2000020,0x2000420],
|
|
pc2bytes6 = [0,0x10000000,0x80000,0x10080000,0x2,0x10000002,0x80002,0x10080002,0,0x10000000,0x80000,0x10080000,0x2,0x10000002,0x80002,0x10080002],
|
|
pc2bytes7 = [0,0x10000,0x800,0x10800,0x20000000,0x20010000,0x20000800,0x20010800,0x20000,0x30000,0x20800,0x30800,0x20020000,0x20030000,0x20020800,0x20030800],
|
|
pc2bytes8 = [0,0x40000,0,0x40000,0x2,0x40002,0x2,0x40002,0x2000000,0x2040000,0x2000000,0x2040000,0x2000002,0x2040002,0x2000002,0x2040002],
|
|
pc2bytes9 = [0,0x10000000,0x8,0x10000008,0,0x10000000,0x8,0x10000008,0x400,0x10000400,0x408,0x10000408,0x400,0x10000400,0x408,0x10000408],
|
|
pc2bytes10 = [0,0x20,0,0x20,0x100000,0x100020,0x100000,0x100020,0x2000,0x2020,0x2000,0x2020,0x102000,0x102020,0x102000,0x102020],
|
|
pc2bytes11 = [0,0x1000000,0x200,0x1000200,0x200000,0x1200000,0x200200,0x1200200,0x4000000,0x5000000,0x4000200,0x5000200,0x4200000,0x5200000,0x4200200,0x5200200],
|
|
pc2bytes12 = [0,0x1000,0x8000000,0x8001000,0x80000,0x81000,0x8080000,0x8081000,0x10,0x1010,0x8000010,0x8001010,0x80010,0x81010,0x8080010,0x8081010],
|
|
pc2bytes13 = [0,0x4,0x100,0x104,0,0x4,0x100,0x104,0x1,0x5,0x101,0x105,0x1,0x5,0x101,0x105];
|
|
|
|
// how many iterations (1 for des, 3 for triple des)
|
|
// changed by Paul 16/6/2007 to use Triple DES for 9+ byte keys
|
|
var iterations = key.length() > 8 ? 3 : 1;
|
|
|
|
// stores the return keys
|
|
var keys = [];
|
|
|
|
// now define the left shifts which need to be done
|
|
var shifts = [0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0];
|
|
|
|
var n = 0, tmp;
|
|
for(var j = 0; j < iterations; j++) {
|
|
var left = key.getInt32();
|
|
var right = key.getInt32();
|
|
|
|
tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f;
|
|
right ^= tmp;
|
|
left ^= (tmp << 4);
|
|
|
|
tmp = ((right >>> -16) ^ left) & 0x0000ffff;
|
|
left ^= tmp;
|
|
right ^= (tmp << -16);
|
|
|
|
tmp = ((left >>> 2) ^ right) & 0x33333333;
|
|
right ^= tmp;
|
|
left ^= (tmp << 2);
|
|
|
|
tmp = ((right >>> -16) ^ left) & 0x0000ffff;
|
|
left ^= tmp;
|
|
right ^= (tmp << -16);
|
|
|
|
tmp = ((left >>> 1) ^ right) & 0x55555555;
|
|
right ^= tmp;
|
|
left ^= (tmp << 1);
|
|
|
|
tmp = ((right >>> 8) ^ left) & 0x00ff00ff;
|
|
left ^= tmp;
|
|
right ^= (tmp << 8);
|
|
|
|
tmp = ((left >>> 1) ^ right) & 0x55555555;
|
|
right ^= tmp;
|
|
left ^= (tmp << 1);
|
|
|
|
// right needs to be shifted and OR'd with last four bits of left
|
|
tmp = (left << 8) | ((right >>> 20) & 0x000000f0);
|
|
|
|
// left needs to be put upside down
|
|
left = ((right << 24) | ((right << 8) & 0xff0000) |
|
|
((right >>> 8) & 0xff00) | ((right >>> 24) & 0xf0));
|
|
right = tmp;
|
|
|
|
// now go through and perform these shifts on the left and right keys
|
|
for(var i = 0; i < shifts.length; ++i) {
|
|
//shift the keys either one or two bits to the left
|
|
if(shifts[i]) {
|
|
left = (left << 2) | (left >>> 26);
|
|
right = (right << 2) | (right >>> 26);
|
|
} else {
|
|
left = (left << 1) | (left >>> 27);
|
|
right = (right << 1) | (right >>> 27);
|
|
}
|
|
left &= -0xf;
|
|
right &= -0xf;
|
|
|
|
// now apply PC-2, in such a way that E is easier when encrypting or
|
|
// decrypting this conversion will look like PC-2 except only the last 6
|
|
// bits of each byte are used rather than 48 consecutive bits and the
|
|
// order of lines will be according to how the S selection functions will
|
|
// be applied: S2, S4, S6, S8, S1, S3, S5, S7
|
|
var lefttmp = (
|
|
pc2bytes0[left >>> 28] | pc2bytes1[(left >>> 24) & 0xf] |
|
|
pc2bytes2[(left >>> 20) & 0xf] | pc2bytes3[(left >>> 16) & 0xf] |
|
|
pc2bytes4[(left >>> 12) & 0xf] | pc2bytes5[(left >>> 8) & 0xf] |
|
|
pc2bytes6[(left >>> 4) & 0xf]);
|
|
var righttmp = (
|
|
pc2bytes7[right >>> 28] | pc2bytes8[(right >>> 24) & 0xf] |
|
|
pc2bytes9[(right >>> 20) & 0xf] | pc2bytes10[(right >>> 16) & 0xf] |
|
|
pc2bytes11[(right >>> 12) & 0xf] | pc2bytes12[(right >>> 8) & 0xf] |
|
|
pc2bytes13[(right >>> 4) & 0xf]);
|
|
tmp = ((righttmp >>> 16) ^ lefttmp) & 0x0000ffff;
|
|
keys[n++] = lefttmp ^ tmp;
|
|
keys[n++] = righttmp ^ (tmp << 16);
|
|
}
|
|
}
|
|
|
|
return keys;
|
|
}
|
|
|
|
/**
|
|
* Updates a single block (1 byte) using DES. The update will either
|
|
* encrypt or decrypt the block.
|
|
*
|
|
* @param keys the expanded keys.
|
|
* @param input the input block (an array of 32-bit words).
|
|
* @param output the updated output block.
|
|
* @param decrypt true to decrypt the block, false to encrypt it.
|
|
*/
|
|
function _updateBlock(keys, input, output, decrypt) {
|
|
// set up loops for single or triple DES
|
|
var iterations = keys.length === 32 ? 3 : 9;
|
|
var looping;
|
|
if(iterations === 3) {
|
|
looping = decrypt ? [30, -2, -2] : [0, 32, 2];
|
|
} else {
|
|
looping = (decrypt ?
|
|
[94, 62, -2, 32, 64, 2, 30, -2, -2] :
|
|
[0, 32, 2, 62, 30, -2, 64, 96, 2]);
|
|
}
|
|
|
|
var tmp;
|
|
|
|
var left = input[0];
|
|
var right = input[1];
|
|
|
|
// first each 64 bit chunk of the message must be permuted according to IP
|
|
tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f;
|
|
right ^= tmp;
|
|
left ^= (tmp << 4);
|
|
|
|
tmp = ((left >>> 16) ^ right) & 0x0000ffff;
|
|
right ^= tmp;
|
|
left ^= (tmp << 16);
|
|
|
|
tmp = ((right >>> 2) ^ left) & 0x33333333;
|
|
left ^= tmp;
|
|
right ^= (tmp << 2);
|
|
|
|
tmp = ((right >>> 8) ^ left) & 0x00ff00ff;
|
|
left ^= tmp;
|
|
right ^= (tmp << 8);
|
|
|
|
tmp = ((left >>> 1) ^ right) & 0x55555555;
|
|
right ^= tmp;
|
|
left ^= (tmp << 1);
|
|
|
|
// rotate left 1 bit
|
|
left = ((left << 1) | (left >>> 31));
|
|
right = ((right << 1) | (right >>> 31));
|
|
|
|
for(var j = 0; j < iterations; j += 3) {
|
|
var endloop = looping[j + 1];
|
|
var loopinc = looping[j + 2];
|
|
|
|
// now go through and perform the encryption or decryption
|
|
for(var i = looping[j]; i != endloop; i += loopinc) {
|
|
var right1 = right ^ keys[i];
|
|
var right2 = ((right >>> 4) | (right << 28)) ^ keys[i + 1];
|
|
|
|
// passing these bytes through the S selection functions
|
|
tmp = left;
|
|
left = right;
|
|
right = tmp ^ (
|
|
spfunction2[(right1 >>> 24) & 0x3f] |
|
|
spfunction4[(right1 >>> 16) & 0x3f] |
|
|
spfunction6[(right1 >>> 8) & 0x3f] |
|
|
spfunction8[right1 & 0x3f] |
|
|
spfunction1[(right2 >>> 24) & 0x3f] |
|
|
spfunction3[(right2 >>> 16) & 0x3f] |
|
|
spfunction5[(right2 >>> 8) & 0x3f] |
|
|
spfunction7[right2 & 0x3f]);
|
|
}
|
|
// unreverse left and right
|
|
tmp = left;
|
|
left = right;
|
|
right = tmp;
|
|
}
|
|
|
|
// rotate right 1 bit
|
|
left = ((left >>> 1) | (left << 31));
|
|
right = ((right >>> 1) | (right << 31));
|
|
|
|
// now perform IP-1, which is IP in the opposite direction
|
|
tmp = ((left >>> 1) ^ right) & 0x55555555;
|
|
right ^= tmp;
|
|
left ^= (tmp << 1);
|
|
|
|
tmp = ((right >>> 8) ^ left) & 0x00ff00ff;
|
|
left ^= tmp;
|
|
right ^= (tmp << 8);
|
|
|
|
tmp = ((right >>> 2) ^ left) & 0x33333333;
|
|
left ^= tmp;
|
|
right ^= (tmp << 2);
|
|
|
|
tmp = ((left >>> 16) ^ right) & 0x0000ffff;
|
|
right ^= tmp;
|
|
left ^= (tmp << 16);
|
|
|
|
tmp = ((left >>> 4) ^ right) & 0x0f0f0f0f;
|
|
right ^= tmp;
|
|
left ^= (tmp << 4);
|
|
|
|
output[0] = left;
|
|
output[1] = right;
|
|
}
|
|
|
|
/**
|
|
* Deprecated. Instead, use:
|
|
*
|
|
* forge.cipher.createCipher('DES-<mode>', key);
|
|
* forge.cipher.createDecipher('DES-<mode>', key);
|
|
*
|
|
* Creates a deprecated DES cipher object. This object's mode will default to
|
|
* CBC (cipher-block-chaining).
|
|
*
|
|
* The key may be given as a binary-encoded string of bytes or a byte buffer.
|
|
*
|
|
* @param options the options to use.
|
|
* key the symmetric key to use (64 or 192 bits).
|
|
* output the buffer to write to.
|
|
* decrypt true for decryption, false for encryption.
|
|
* mode the cipher mode to use (default: 'CBC').
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
function _createCipher(options) {
|
|
options = options || {};
|
|
var mode = (options.mode || 'CBC').toUpperCase();
|
|
var algorithm = 'DES-' + mode;
|
|
|
|
var cipher;
|
|
if(options.decrypt) {
|
|
cipher = forge$f.cipher.createDecipher(algorithm, options.key);
|
|
} else {
|
|
cipher = forge$f.cipher.createCipher(algorithm, options.key);
|
|
}
|
|
|
|
// backwards compatible start API
|
|
var start = cipher.start;
|
|
cipher.start = function(iv, options) {
|
|
// backwards compatibility: support second arg as output buffer
|
|
var output = null;
|
|
if(options instanceof forge$f.util.ByteBuffer) {
|
|
output = options;
|
|
options = {};
|
|
}
|
|
options = options || {};
|
|
options.output = output;
|
|
options.iv = iv;
|
|
start.call(cipher, options);
|
|
};
|
|
|
|
return cipher;
|
|
}
|
|
|
|
/**
|
|
* Node.js module for Forge message digests.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright 2011-2017 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$e = forge$m;
|
|
|
|
forge$e.md = forge$e.md || {};
|
|
forge$e.md.algorithms = forge$e.md.algorithms || {};
|
|
|
|
/**
|
|
* Hash-based Message Authentication Code implementation. Requires a message
|
|
* digest object that can be obtained, for example, from forge.md.sha1 or
|
|
* forge.md.md5.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2010-2012 Digital Bazaar, Inc. All rights reserved.
|
|
*/
|
|
|
|
var forge$d = forge$m;
|
|
|
|
|
|
|
|
/* HMAC API */
|
|
var hmac = forge$d.hmac = forge$d.hmac || {};
|
|
|
|
/**
|
|
* Creates an HMAC object that uses the given message digest object.
|
|
*
|
|
* @return an HMAC object.
|
|
*/
|
|
hmac.create = function() {
|
|
// the hmac key to use
|
|
var _key = null;
|
|
|
|
// the message digest to use
|
|
var _md = null;
|
|
|
|
// the inner padding
|
|
var _ipadding = null;
|
|
|
|
// the outer padding
|
|
var _opadding = null;
|
|
|
|
// hmac context
|
|
var ctx = {};
|
|
|
|
/**
|
|
* Starts or restarts the HMAC with the given key and message digest.
|
|
*
|
|
* @param md the message digest to use, null to reuse the previous one,
|
|
* a string to use builtin 'sha1', 'md5', 'sha256'.
|
|
* @param key the key to use as a string, array of bytes, byte buffer,
|
|
* or null to reuse the previous key.
|
|
*/
|
|
ctx.start = function(md, key) {
|
|
if(md !== null) {
|
|
if(typeof md === 'string') {
|
|
// create builtin message digest
|
|
md = md.toLowerCase();
|
|
if(md in forge$d.md.algorithms) {
|
|
_md = forge$d.md.algorithms[md].create();
|
|
} else {
|
|
throw new Error('Unknown hash algorithm "' + md + '"');
|
|
}
|
|
} else {
|
|
// store message digest
|
|
_md = md;
|
|
}
|
|
}
|
|
|
|
if(key === null) {
|
|
// reuse previous key
|
|
key = _key;
|
|
} else {
|
|
if(typeof key === 'string') {
|
|
// convert string into byte buffer
|
|
key = forge$d.util.createBuffer(key);
|
|
} else if(forge$d.util.isArray(key)) {
|
|
// convert byte array into byte buffer
|
|
var tmp = key;
|
|
key = forge$d.util.createBuffer();
|
|
for(var i = 0; i < tmp.length; ++i) {
|
|
key.putByte(tmp[i]);
|
|
}
|
|
}
|
|
|
|
// if key is longer than blocksize, hash it
|
|
var keylen = key.length();
|
|
if(keylen > _md.blockLength) {
|
|
_md.start();
|
|
_md.update(key.bytes());
|
|
key = _md.digest();
|
|
}
|
|
|
|
// mix key into inner and outer padding
|
|
// ipadding = [0x36 * blocksize] ^ key
|
|
// opadding = [0x5C * blocksize] ^ key
|
|
_ipadding = forge$d.util.createBuffer();
|
|
_opadding = forge$d.util.createBuffer();
|
|
keylen = key.length();
|
|
for(var i = 0; i < keylen; ++i) {
|
|
var tmp = key.at(i);
|
|
_ipadding.putByte(0x36 ^ tmp);
|
|
_opadding.putByte(0x5C ^ tmp);
|
|
}
|
|
|
|
// if key is shorter than blocksize, add additional padding
|
|
if(keylen < _md.blockLength) {
|
|
var tmp = _md.blockLength - keylen;
|
|
for(var i = 0; i < tmp; ++i) {
|
|
_ipadding.putByte(0x36);
|
|
_opadding.putByte(0x5C);
|
|
}
|
|
}
|
|
_key = key;
|
|
_ipadding = _ipadding.bytes();
|
|
_opadding = _opadding.bytes();
|
|
}
|
|
|
|
// digest is done like so: hash(opadding | hash(ipadding | message))
|
|
|
|
// prepare to do inner hash
|
|
// hash(ipadding | message)
|
|
_md.start();
|
|
_md.update(_ipadding);
|
|
};
|
|
|
|
/**
|
|
* Updates the HMAC with the given message bytes.
|
|
*
|
|
* @param bytes the bytes to update with.
|
|
*/
|
|
ctx.update = function(bytes) {
|
|
_md.update(bytes);
|
|
};
|
|
|
|
/**
|
|
* Produces the Message Authentication Code (MAC).
|
|
*
|
|
* @return a byte buffer containing the digest value.
|
|
*/
|
|
ctx.getMac = function() {
|
|
// digest is done like so: hash(opadding | hash(ipadding | message))
|
|
// here we do the outer hashing
|
|
var inner = _md.digest().bytes();
|
|
_md.start();
|
|
_md.update(_opadding);
|
|
_md.update(inner);
|
|
return _md.digest();
|
|
};
|
|
// alias for getMac
|
|
ctx.digest = ctx.getMac;
|
|
|
|
return ctx;
|
|
};
|
|
|
|
var require$$8 = /*@__PURE__*/getAugmentedNamespace(nodeCrypto$1);
|
|
|
|
/**
|
|
* Password-Based Key-Derivation Function #2 implementation.
|
|
*
|
|
* See RFC 2898 for details.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2010-2013 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$c = forge$m;
|
|
|
|
|
|
|
|
|
|
var pkcs5 = forge$c.pkcs5 = forge$c.pkcs5 || {};
|
|
|
|
var crypto$2;
|
|
if(forge$c.util.isNodejs && !forge$c.options.usePureJavaScript) {
|
|
crypto$2 = require$$8;
|
|
}
|
|
|
|
/**
|
|
* Derives a key from a password.
|
|
*
|
|
* @param p the password as a binary-encoded string of bytes.
|
|
* @param s the salt as a binary-encoded string of bytes.
|
|
* @param c the iteration count, a positive integer.
|
|
* @param dkLen the intended length, in bytes, of the derived key,
|
|
* (max: 2^32 - 1) * hash length of the PRF.
|
|
* @param [md] the message digest (or algorithm identifier as a string) to use
|
|
* in the PRF, defaults to SHA-1.
|
|
* @param [callback(err, key)] presence triggers asynchronous version, called
|
|
* once the operation completes.
|
|
*
|
|
* @return the derived key, as a binary-encoded string of bytes, for the
|
|
* synchronous version (if no callback is specified).
|
|
*/
|
|
forge$c.pbkdf2 = pkcs5.pbkdf2 = function(
|
|
p, s, c, dkLen, md, callback) {
|
|
if(typeof md === 'function') {
|
|
callback = md;
|
|
md = null;
|
|
}
|
|
|
|
// use native implementation if possible and not disabled, note that
|
|
// some node versions only support SHA-1, others allow digest to be changed
|
|
if(forge$c.util.isNodejs && !forge$c.options.usePureJavaScript &&
|
|
crypto$2.pbkdf2 && (md === null || typeof md !== 'object') &&
|
|
(crypto$2.pbkdf2Sync.length > 4 || (!md || md === 'sha1'))) {
|
|
if(typeof md !== 'string') {
|
|
// default prf to SHA-1
|
|
md = 'sha1';
|
|
}
|
|
p = Buffer.from(p, 'binary');
|
|
s = Buffer.from(s, 'binary');
|
|
if(!callback) {
|
|
if(crypto$2.pbkdf2Sync.length === 4) {
|
|
return crypto$2.pbkdf2Sync(p, s, c, dkLen).toString('binary');
|
|
}
|
|
return crypto$2.pbkdf2Sync(p, s, c, dkLen, md).toString('binary');
|
|
}
|
|
if(crypto$2.pbkdf2Sync.length === 4) {
|
|
return crypto$2.pbkdf2(p, s, c, dkLen, function(err, key) {
|
|
if(err) {
|
|
return callback(err);
|
|
}
|
|
callback(null, key.toString('binary'));
|
|
});
|
|
}
|
|
return crypto$2.pbkdf2(p, s, c, dkLen, md, function(err, key) {
|
|
if(err) {
|
|
return callback(err);
|
|
}
|
|
callback(null, key.toString('binary'));
|
|
});
|
|
}
|
|
|
|
if(typeof md === 'undefined' || md === null) {
|
|
// default prf to SHA-1
|
|
md = 'sha1';
|
|
}
|
|
if(typeof md === 'string') {
|
|
if(!(md in forge$c.md.algorithms)) {
|
|
throw new Error('Unknown hash algorithm: ' + md);
|
|
}
|
|
md = forge$c.md[md].create();
|
|
}
|
|
|
|
var hLen = md.digestLength;
|
|
|
|
/* 1. If dkLen > (2^32 - 1) * hLen, output "derived key too long" and
|
|
stop. */
|
|
if(dkLen > (0xFFFFFFFF * hLen)) {
|
|
var err = new Error('Derived key is too long.');
|
|
if(callback) {
|
|
return callback(err);
|
|
}
|
|
throw err;
|
|
}
|
|
|
|
/* 2. Let len be the number of hLen-octet blocks in the derived key,
|
|
rounding up, and let r be the number of octets in the last
|
|
block:
|
|
|
|
len = CEIL(dkLen / hLen),
|
|
r = dkLen - (len - 1) * hLen. */
|
|
var len = Math.ceil(dkLen / hLen);
|
|
var r = dkLen - (len - 1) * hLen;
|
|
|
|
/* 3. For each block of the derived key apply the function F defined
|
|
below to the password P, the salt S, the iteration count c, and
|
|
the block index to compute the block:
|
|
|
|
T_1 = F(P, S, c, 1),
|
|
T_2 = F(P, S, c, 2),
|
|
...
|
|
T_len = F(P, S, c, len),
|
|
|
|
where the function F is defined as the exclusive-or sum of the
|
|
first c iterates of the underlying pseudorandom function PRF
|
|
applied to the password P and the concatenation of the salt S
|
|
and the block index i:
|
|
|
|
F(P, S, c, i) = u_1 XOR u_2 XOR ... XOR u_c
|
|
|
|
where
|
|
|
|
u_1 = PRF(P, S || INT(i)),
|
|
u_2 = PRF(P, u_1),
|
|
...
|
|
u_c = PRF(P, u_{c-1}).
|
|
|
|
Here, INT(i) is a four-octet encoding of the integer i, most
|
|
significant octet first. */
|
|
var prf = forge$c.hmac.create();
|
|
prf.start(md, p);
|
|
var dk = '';
|
|
var xor, u_c, u_c1;
|
|
|
|
// sync version
|
|
if(!callback) {
|
|
for(var i = 1; i <= len; ++i) {
|
|
// PRF(P, S || INT(i)) (first iteration)
|
|
prf.start(null, null);
|
|
prf.update(s);
|
|
prf.update(forge$c.util.int32ToBytes(i));
|
|
xor = u_c1 = prf.digest().getBytes();
|
|
|
|
// PRF(P, u_{c-1}) (other iterations)
|
|
for(var j = 2; j <= c; ++j) {
|
|
prf.start(null, null);
|
|
prf.update(u_c1);
|
|
u_c = prf.digest().getBytes();
|
|
// F(p, s, c, i)
|
|
xor = forge$c.util.xorBytes(xor, u_c, hLen);
|
|
u_c1 = u_c;
|
|
}
|
|
|
|
/* 4. Concatenate the blocks and extract the first dkLen octets to
|
|
produce a derived key DK:
|
|
|
|
DK = T_1 || T_2 || ... || T_len<0..r-1> */
|
|
dk += (i < len) ? xor : xor.substr(0, r);
|
|
}
|
|
/* 5. Output the derived key DK. */
|
|
return dk;
|
|
}
|
|
|
|
// async version
|
|
var i = 1, j;
|
|
function outer() {
|
|
if(i > len) {
|
|
// done
|
|
return callback(null, dk);
|
|
}
|
|
|
|
// PRF(P, S || INT(i)) (first iteration)
|
|
prf.start(null, null);
|
|
prf.update(s);
|
|
prf.update(forge$c.util.int32ToBytes(i));
|
|
xor = u_c1 = prf.digest().getBytes();
|
|
|
|
// PRF(P, u_{c-1}) (other iterations)
|
|
j = 2;
|
|
inner();
|
|
}
|
|
|
|
function inner() {
|
|
if(j <= c) {
|
|
prf.start(null, null);
|
|
prf.update(u_c1);
|
|
u_c = prf.digest().getBytes();
|
|
// F(p, s, c, i)
|
|
xor = forge$c.util.xorBytes(xor, u_c, hLen);
|
|
u_c1 = u_c;
|
|
++j;
|
|
return forge$c.util.setImmediate(inner);
|
|
}
|
|
|
|
/* 4. Concatenate the blocks and extract the first dkLen octets to
|
|
produce a derived key DK:
|
|
|
|
DK = T_1 || T_2 || ... || T_len<0..r-1> */
|
|
dk += (i < len) ? xor : xor.substr(0, r);
|
|
|
|
++i;
|
|
outer();
|
|
}
|
|
|
|
outer();
|
|
};
|
|
|
|
/**
|
|
* Javascript implementation of basic PEM (Privacy Enhanced Mail) algorithms.
|
|
*
|
|
* See: RFC 1421.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2013-2014 Digital Bazaar, Inc.
|
|
*
|
|
* A Forge PEM object has the following fields:
|
|
*
|
|
* type: identifies the type of message (eg: "RSA PRIVATE KEY").
|
|
*
|
|
* procType: identifies the type of processing performed on the message,
|
|
* it has two subfields: version and type, eg: 4,ENCRYPTED.
|
|
*
|
|
* contentDomain: identifies the type of content in the message, typically
|
|
* only uses the value: "RFC822".
|
|
*
|
|
* dekInfo: identifies the message encryption algorithm and mode and includes
|
|
* any parameters for the algorithm, it has two subfields: algorithm and
|
|
* parameters, eg: DES-CBC,F8143EDE5960C597.
|
|
*
|
|
* headers: contains all other PEM encapsulated headers -- where order is
|
|
* significant (for pairing data like recipient ID + key info).
|
|
*
|
|
* body: the binary-encoded body.
|
|
*/
|
|
|
|
var forge$b = forge$m;
|
|
|
|
|
|
// shortcut for pem API
|
|
var pem = forge$b.pem = forge$b.pem || {};
|
|
|
|
/**
|
|
* Encodes (serializes) the given PEM object.
|
|
*
|
|
* @param msg the PEM message object to encode.
|
|
* @param options the options to use:
|
|
* maxline the maximum characters per line for the body, (default: 64).
|
|
*
|
|
* @return the PEM-formatted string.
|
|
*/
|
|
pem.encode = function(msg, options) {
|
|
options = options || {};
|
|
var rval = '-----BEGIN ' + msg.type + '-----\r\n';
|
|
|
|
// encode special headers
|
|
var header;
|
|
if(msg.procType) {
|
|
header = {
|
|
name: 'Proc-Type',
|
|
values: [String(msg.procType.version), msg.procType.type]
|
|
};
|
|
rval += foldHeader(header);
|
|
}
|
|
if(msg.contentDomain) {
|
|
header = {name: 'Content-Domain', values: [msg.contentDomain]};
|
|
rval += foldHeader(header);
|
|
}
|
|
if(msg.dekInfo) {
|
|
header = {name: 'DEK-Info', values: [msg.dekInfo.algorithm]};
|
|
if(msg.dekInfo.parameters) {
|
|
header.values.push(msg.dekInfo.parameters);
|
|
}
|
|
rval += foldHeader(header);
|
|
}
|
|
|
|
if(msg.headers) {
|
|
// encode all other headers
|
|
for(var i = 0; i < msg.headers.length; ++i) {
|
|
rval += foldHeader(msg.headers[i]);
|
|
}
|
|
}
|
|
|
|
// terminate header
|
|
if(msg.procType) {
|
|
rval += '\r\n';
|
|
}
|
|
|
|
// add body
|
|
rval += forge$b.util.encode64(msg.body, options.maxline || 64) + '\r\n';
|
|
|
|
rval += '-----END ' + msg.type + '-----\r\n';
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Decodes (deserializes) all PEM messages found in the given string.
|
|
*
|
|
* @param str the PEM-formatted string to decode.
|
|
*
|
|
* @return the PEM message objects in an array.
|
|
*/
|
|
pem.decode = function(str) {
|
|
var rval = [];
|
|
|
|
// split string into PEM messages (be lenient w/EOF on BEGIN line)
|
|
var rMessage = /\s*-----BEGIN ([A-Z0-9- ]+)-----\r?\n?([\x21-\x7e\s]+?(?:\r?\n\r?\n))?([:A-Za-z0-9+\/=\s]+?)-----END \1-----/g;
|
|
var rHeader = /([\x21-\x7e]+):\s*([\x21-\x7e\s^:]+)/;
|
|
var rCRLF = /\r?\n/;
|
|
var match;
|
|
while(true) {
|
|
match = rMessage.exec(str);
|
|
if(!match) {
|
|
break;
|
|
}
|
|
|
|
// accept "NEW CERTIFICATE REQUEST" as "CERTIFICATE REQUEST"
|
|
// https://datatracker.ietf.org/doc/html/rfc7468#section-7
|
|
var type = match[1];
|
|
if(type === 'NEW CERTIFICATE REQUEST') {
|
|
type = 'CERTIFICATE REQUEST';
|
|
}
|
|
|
|
var msg = {
|
|
type: type,
|
|
procType: null,
|
|
contentDomain: null,
|
|
dekInfo: null,
|
|
headers: [],
|
|
body: forge$b.util.decode64(match[3])
|
|
};
|
|
rval.push(msg);
|
|
|
|
// no headers
|
|
if(!match[2]) {
|
|
continue;
|
|
}
|
|
|
|
// parse headers
|
|
var lines = match[2].split(rCRLF);
|
|
var li = 0;
|
|
while(match && li < lines.length) {
|
|
// get line, trim any rhs whitespace
|
|
var line = lines[li].replace(/\s+$/, '');
|
|
|
|
// RFC2822 unfold any following folded lines
|
|
for(var nl = li + 1; nl < lines.length; ++nl) {
|
|
var next = lines[nl];
|
|
if(!/\s/.test(next[0])) {
|
|
break;
|
|
}
|
|
line += next;
|
|
li = nl;
|
|
}
|
|
|
|
// parse header
|
|
match = line.match(rHeader);
|
|
if(match) {
|
|
var header = {name: match[1], values: []};
|
|
var values = match[2].split(',');
|
|
for(var vi = 0; vi < values.length; ++vi) {
|
|
header.values.push(ltrim(values[vi]));
|
|
}
|
|
|
|
// Proc-Type must be the first header
|
|
if(!msg.procType) {
|
|
if(header.name !== 'Proc-Type') {
|
|
throw new Error('Invalid PEM formatted message. The first ' +
|
|
'encapsulated header must be "Proc-Type".');
|
|
} else if(header.values.length !== 2) {
|
|
throw new Error('Invalid PEM formatted message. The "Proc-Type" ' +
|
|
'header must have two subfields.');
|
|
}
|
|
msg.procType = {version: values[0], type: values[1]};
|
|
} else if(!msg.contentDomain && header.name === 'Content-Domain') {
|
|
// special-case Content-Domain
|
|
msg.contentDomain = values[0] || '';
|
|
} else if(!msg.dekInfo && header.name === 'DEK-Info') {
|
|
// special-case DEK-Info
|
|
if(header.values.length === 0) {
|
|
throw new Error('Invalid PEM formatted message. The "DEK-Info" ' +
|
|
'header must have at least one subfield.');
|
|
}
|
|
msg.dekInfo = {algorithm: values[0], parameters: values[1] || null};
|
|
} else {
|
|
msg.headers.push(header);
|
|
}
|
|
}
|
|
|
|
++li;
|
|
}
|
|
|
|
if(msg.procType === 'ENCRYPTED' && !msg.dekInfo) {
|
|
throw new Error('Invalid PEM formatted message. The "DEK-Info" ' +
|
|
'header must be present if "Proc-Type" is "ENCRYPTED".');
|
|
}
|
|
}
|
|
|
|
if(rval.length === 0) {
|
|
throw new Error('Invalid PEM formatted message.');
|
|
}
|
|
|
|
return rval;
|
|
};
|
|
|
|
function foldHeader(header) {
|
|
var rval = header.name + ': ';
|
|
|
|
// ensure values with CRLF are folded
|
|
var values = [];
|
|
var insertSpace = function(match, $1) {
|
|
return ' ' + $1;
|
|
};
|
|
for(var i = 0; i < header.values.length; ++i) {
|
|
values.push(header.values[i].replace(/^(\S+\r\n)/, insertSpace));
|
|
}
|
|
rval += values.join(',') + '\r\n';
|
|
|
|
// do folding
|
|
var length = 0;
|
|
var candidate = -1;
|
|
for(var i = 0; i < rval.length; ++i, ++length) {
|
|
if(length > 65 && candidate !== -1) {
|
|
var insert = rval[candidate];
|
|
if(insert === ',') {
|
|
++candidate;
|
|
rval = rval.substr(0, candidate) + '\r\n ' + rval.substr(candidate);
|
|
} else {
|
|
rval = rval.substr(0, candidate) +
|
|
'\r\n' + insert + rval.substr(candidate + 1);
|
|
}
|
|
length = (i - candidate - 1);
|
|
candidate = -1;
|
|
++i;
|
|
} else if(rval[i] === ' ' || rval[i] === '\t' || rval[i] === ',') {
|
|
candidate = i;
|
|
}
|
|
}
|
|
|
|
return rval;
|
|
}
|
|
|
|
function ltrim(str) {
|
|
return str.replace(/^\s+/, '');
|
|
}
|
|
|
|
/**
|
|
* Secure Hash Algorithm with 256-bit digest (SHA-256) implementation.
|
|
*
|
|
* See FIPS 180-2 for details.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2010-2015 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$a = forge$m;
|
|
|
|
|
|
|
|
var sha256 = forge$a.sha256 = forge$a.sha256 || {};
|
|
forge$a.md.sha256 = forge$a.md.algorithms.sha256 = sha256;
|
|
|
|
/**
|
|
* Creates a SHA-256 message digest object.
|
|
*
|
|
* @return a message digest object.
|
|
*/
|
|
sha256.create = function() {
|
|
// do initialization as necessary
|
|
if(!_initialized$2) {
|
|
_init$2();
|
|
}
|
|
|
|
// SHA-256 state contains eight 32-bit integers
|
|
var _state = null;
|
|
|
|
// input buffer
|
|
var _input = forge$a.util.createBuffer();
|
|
|
|
// used for word storage
|
|
var _w = new Array(64);
|
|
|
|
// message digest object
|
|
var md = {
|
|
algorithm: 'sha256',
|
|
blockLength: 64,
|
|
digestLength: 32,
|
|
// 56-bit length of message so far (does not including padding)
|
|
messageLength: 0,
|
|
// true message length
|
|
fullMessageLength: null,
|
|
// size of message length in bytes
|
|
messageLengthSize: 8
|
|
};
|
|
|
|
/**
|
|
* Starts the digest.
|
|
*
|
|
* @return this digest object.
|
|
*/
|
|
md.start = function() {
|
|
// up to 56-bit message length for convenience
|
|
md.messageLength = 0;
|
|
|
|
// full message length (set md.messageLength64 for backwards-compatibility)
|
|
md.fullMessageLength = md.messageLength64 = [];
|
|
var int32s = md.messageLengthSize / 4;
|
|
for(var i = 0; i < int32s; ++i) {
|
|
md.fullMessageLength.push(0);
|
|
}
|
|
_input = forge$a.util.createBuffer();
|
|
_state = {
|
|
h0: 0x6A09E667,
|
|
h1: 0xBB67AE85,
|
|
h2: 0x3C6EF372,
|
|
h3: 0xA54FF53A,
|
|
h4: 0x510E527F,
|
|
h5: 0x9B05688C,
|
|
h6: 0x1F83D9AB,
|
|
h7: 0x5BE0CD19
|
|
};
|
|
return md;
|
|
};
|
|
// start digest automatically for first time
|
|
md.start();
|
|
|
|
/**
|
|
* Updates the digest with the given message input. The given input can
|
|
* treated as raw input (no encoding will be applied) or an encoding of
|
|
* 'utf8' maybe given to encode the input using UTF-8.
|
|
*
|
|
* @param msg the message input to update with.
|
|
* @param encoding the encoding to use (default: 'raw', other: 'utf8').
|
|
*
|
|
* @return this digest object.
|
|
*/
|
|
md.update = function(msg, encoding) {
|
|
if(encoding === 'utf8') {
|
|
msg = forge$a.util.encodeUtf8(msg);
|
|
}
|
|
|
|
// update message length
|
|
var len = msg.length;
|
|
md.messageLength += len;
|
|
len = [(len / 0x100000000) >>> 0, len >>> 0];
|
|
for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {
|
|
md.fullMessageLength[i] += len[1];
|
|
len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);
|
|
md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;
|
|
len[0] = ((len[1] / 0x100000000) >>> 0);
|
|
}
|
|
|
|
// add bytes to input buffer
|
|
_input.putBytes(msg);
|
|
|
|
// process bytes
|
|
_update$2(_state, _w, _input);
|
|
|
|
// compact input buffer every 2K or if empty
|
|
if(_input.read > 2048 || _input.length() === 0) {
|
|
_input.compact();
|
|
}
|
|
|
|
return md;
|
|
};
|
|
|
|
/**
|
|
* Produces the digest.
|
|
*
|
|
* @return a byte buffer containing the digest value.
|
|
*/
|
|
md.digest = function() {
|
|
/* Note: Here we copy the remaining bytes in the input buffer and
|
|
add the appropriate SHA-256 padding. Then we do the final update
|
|
on a copy of the state so that if the user wants to get
|
|
intermediate digests they can do so. */
|
|
|
|
/* Determine the number of bytes that must be added to the message
|
|
to ensure its length is congruent to 448 mod 512. In other words,
|
|
the data to be digested must be a multiple of 512 bits (or 128 bytes).
|
|
This data includes the message, some padding, and the length of the
|
|
message. Since the length of the message will be encoded as 8 bytes (64
|
|
bits), that means that the last segment of the data must have 56 bytes
|
|
(448 bits) of message and padding. Therefore, the length of the message
|
|
plus the padding must be congruent to 448 mod 512 because
|
|
512 - 128 = 448.
|
|
|
|
In order to fill up the message length it must be filled with
|
|
padding that begins with 1 bit followed by all 0 bits. Padding
|
|
must *always* be present, so if the message length is already
|
|
congruent to 448 mod 512, then 512 padding bits must be added. */
|
|
|
|
var finalBlock = forge$a.util.createBuffer();
|
|
finalBlock.putBytes(_input.bytes());
|
|
|
|
// compute remaining size to be digested (include message length size)
|
|
var remaining = (
|
|
md.fullMessageLength[md.fullMessageLength.length - 1] +
|
|
md.messageLengthSize);
|
|
|
|
// add padding for overflow blockSize - overflow
|
|
// _padding starts with 1 byte with first bit is set (byte value 128), then
|
|
// there may be up to (blockSize - 1) other pad bytes
|
|
var overflow = remaining & (md.blockLength - 1);
|
|
finalBlock.putBytes(_padding$2.substr(0, md.blockLength - overflow));
|
|
|
|
// serialize message length in bits in big-endian order; since length
|
|
// is stored in bytes we multiply by 8 and add carry from next int
|
|
var next, carry;
|
|
var bits = md.fullMessageLength[0] * 8;
|
|
for(var i = 0; i < md.fullMessageLength.length - 1; ++i) {
|
|
next = md.fullMessageLength[i + 1] * 8;
|
|
carry = (next / 0x100000000) >>> 0;
|
|
bits += carry;
|
|
finalBlock.putInt32(bits >>> 0);
|
|
bits = next >>> 0;
|
|
}
|
|
finalBlock.putInt32(bits);
|
|
|
|
var s2 = {
|
|
h0: _state.h0,
|
|
h1: _state.h1,
|
|
h2: _state.h2,
|
|
h3: _state.h3,
|
|
h4: _state.h4,
|
|
h5: _state.h5,
|
|
h6: _state.h6,
|
|
h7: _state.h7
|
|
};
|
|
_update$2(s2, _w, finalBlock);
|
|
var rval = forge$a.util.createBuffer();
|
|
rval.putInt32(s2.h0);
|
|
rval.putInt32(s2.h1);
|
|
rval.putInt32(s2.h2);
|
|
rval.putInt32(s2.h3);
|
|
rval.putInt32(s2.h4);
|
|
rval.putInt32(s2.h5);
|
|
rval.putInt32(s2.h6);
|
|
rval.putInt32(s2.h7);
|
|
return rval;
|
|
};
|
|
|
|
return md;
|
|
};
|
|
|
|
// sha-256 padding bytes not initialized yet
|
|
var _padding$2 = null;
|
|
var _initialized$2 = false;
|
|
|
|
// table of constants
|
|
var _k$1 = null;
|
|
|
|
/**
|
|
* Initializes the constant tables.
|
|
*/
|
|
function _init$2() {
|
|
// create padding
|
|
_padding$2 = String.fromCharCode(128);
|
|
_padding$2 += forge$a.util.fillString(String.fromCharCode(0x00), 64);
|
|
|
|
// create K table for SHA-256
|
|
_k$1 = [
|
|
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
|
|
0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
|
|
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
|
|
0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
|
|
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,
|
|
0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
|
|
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,
|
|
0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
|
|
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
|
|
0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
|
|
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,
|
|
0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
|
|
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,
|
|
0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
|
|
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
|
|
0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2];
|
|
|
|
// now initialized
|
|
_initialized$2 = true;
|
|
}
|
|
|
|
/**
|
|
* Updates a SHA-256 state with the given byte buffer.
|
|
*
|
|
* @param s the SHA-256 state to update.
|
|
* @param w the array to use to store words.
|
|
* @param bytes the byte buffer to update with.
|
|
*/
|
|
function _update$2(s, w, bytes) {
|
|
// consume 512 bit (64 byte) chunks
|
|
var t1, t2, s0, s1, ch, maj, i, a, b, c, d, e, f, g, h;
|
|
var len = bytes.length();
|
|
while(len >= 64) {
|
|
// the w array will be populated with sixteen 32-bit big-endian words
|
|
// and then extended into 64 32-bit words according to SHA-256
|
|
for(i = 0; i < 16; ++i) {
|
|
w[i] = bytes.getInt32();
|
|
}
|
|
for(; i < 64; ++i) {
|
|
// XOR word 2 words ago rot right 17, rot right 19, shft right 10
|
|
t1 = w[i - 2];
|
|
t1 =
|
|
((t1 >>> 17) | (t1 << 15)) ^
|
|
((t1 >>> 19) | (t1 << 13)) ^
|
|
(t1 >>> 10);
|
|
// XOR word 15 words ago rot right 7, rot right 18, shft right 3
|
|
t2 = w[i - 15];
|
|
t2 =
|
|
((t2 >>> 7) | (t2 << 25)) ^
|
|
((t2 >>> 18) | (t2 << 14)) ^
|
|
(t2 >>> 3);
|
|
// sum(t1, word 7 ago, t2, word 16 ago) modulo 2^32
|
|
w[i] = (t1 + w[i - 7] + t2 + w[i - 16]) | 0;
|
|
}
|
|
|
|
// initialize hash value for this chunk
|
|
a = s.h0;
|
|
b = s.h1;
|
|
c = s.h2;
|
|
d = s.h3;
|
|
e = s.h4;
|
|
f = s.h5;
|
|
g = s.h6;
|
|
h = s.h7;
|
|
|
|
// round function
|
|
for(i = 0; i < 64; ++i) {
|
|
// Sum1(e)
|
|
s1 =
|
|
((e >>> 6) | (e << 26)) ^
|
|
((e >>> 11) | (e << 21)) ^
|
|
((e >>> 25) | (e << 7));
|
|
// Ch(e, f, g) (optimized the same way as SHA-1)
|
|
ch = g ^ (e & (f ^ g));
|
|
// Sum0(a)
|
|
s0 =
|
|
((a >>> 2) | (a << 30)) ^
|
|
((a >>> 13) | (a << 19)) ^
|
|
((a >>> 22) | (a << 10));
|
|
// Maj(a, b, c) (optimized the same way as SHA-1)
|
|
maj = (a & b) | (c & (a ^ b));
|
|
|
|
// main algorithm
|
|
t1 = h + s1 + ch + _k$1[i] + w[i];
|
|
t2 = s0 + maj;
|
|
h = g;
|
|
g = f;
|
|
f = e;
|
|
// `>>> 0` necessary to avoid iOS/Safari 10 optimization bug
|
|
// can't truncate with `| 0`
|
|
e = (d + t1) >>> 0;
|
|
d = c;
|
|
c = b;
|
|
b = a;
|
|
// `>>> 0` necessary to avoid iOS/Safari 10 optimization bug
|
|
// can't truncate with `| 0`
|
|
a = (t1 + t2) >>> 0;
|
|
}
|
|
|
|
// update hash state
|
|
s.h0 = (s.h0 + a) | 0;
|
|
s.h1 = (s.h1 + b) | 0;
|
|
s.h2 = (s.h2 + c) | 0;
|
|
s.h3 = (s.h3 + d) | 0;
|
|
s.h4 = (s.h4 + e) | 0;
|
|
s.h5 = (s.h5 + f) | 0;
|
|
s.h6 = (s.h6 + g) | 0;
|
|
s.h7 = (s.h7 + h) | 0;
|
|
len -= 64;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* A javascript implementation of a cryptographically-secure
|
|
* Pseudo Random Number Generator (PRNG). The Fortuna algorithm is followed
|
|
* here though the use of SHA-256 is not enforced; when generating an
|
|
* a PRNG context, the hashing algorithm and block cipher used for
|
|
* the generator are specified via a plugin.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2010-2014 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$9 = forge$m;
|
|
|
|
|
|
var _crypto$1 = null;
|
|
if(forge$9.util.isNodejs && !forge$9.options.usePureJavaScript &&
|
|
!process.versions['node-webkit']) {
|
|
_crypto$1 = require$$8;
|
|
}
|
|
|
|
/* PRNG API */
|
|
var prng = forge$9.prng = forge$9.prng || {};
|
|
|
|
/**
|
|
* Creates a new PRNG context.
|
|
*
|
|
* A PRNG plugin must be passed in that will provide:
|
|
*
|
|
* 1. A function that initializes the key and seed of a PRNG context. It
|
|
* will be given a 16 byte key and a 16 byte seed. Any key expansion
|
|
* or transformation of the seed from a byte string into an array of
|
|
* integers (or similar) should be performed.
|
|
* 2. The cryptographic function used by the generator. It takes a key and
|
|
* a seed.
|
|
* 3. A seed increment function. It takes the seed and returns seed + 1.
|
|
* 4. An api to create a message digest.
|
|
*
|
|
* For an example, see random.js.
|
|
*
|
|
* @param plugin the PRNG plugin to use.
|
|
*/
|
|
prng.create = function(plugin) {
|
|
var ctx = {
|
|
plugin: plugin,
|
|
key: null,
|
|
seed: null,
|
|
time: null,
|
|
// number of reseeds so far
|
|
reseeds: 0,
|
|
// amount of data generated so far
|
|
generated: 0,
|
|
// no initial key bytes
|
|
keyBytes: ''
|
|
};
|
|
|
|
// create 32 entropy pools (each is a message digest)
|
|
var md = plugin.md;
|
|
var pools = new Array(32);
|
|
for(var i = 0; i < 32; ++i) {
|
|
pools[i] = md.create();
|
|
}
|
|
ctx.pools = pools;
|
|
|
|
// entropy pools are written to cyclically, starting at index 0
|
|
ctx.pool = 0;
|
|
|
|
/**
|
|
* Generates random bytes. The bytes may be generated synchronously or
|
|
* asynchronously. Web workers must use the asynchronous interface or
|
|
* else the behavior is undefined.
|
|
*
|
|
* @param count the number of random bytes to generate.
|
|
* @param [callback(err, bytes)] called once the operation completes.
|
|
*
|
|
* @return count random bytes as a string.
|
|
*/
|
|
ctx.generate = function(count, callback) {
|
|
// do synchronously
|
|
if(!callback) {
|
|
return ctx.generateSync(count);
|
|
}
|
|
|
|
// simple generator using counter-based CBC
|
|
var cipher = ctx.plugin.cipher;
|
|
var increment = ctx.plugin.increment;
|
|
var formatKey = ctx.plugin.formatKey;
|
|
var formatSeed = ctx.plugin.formatSeed;
|
|
var b = forge$9.util.createBuffer();
|
|
|
|
// paranoid deviation from Fortuna:
|
|
// reset key for every request to protect previously
|
|
// generated random bytes should the key be discovered;
|
|
// there is no 100ms based reseeding because of this
|
|
// forced reseed for every `generate` call
|
|
ctx.key = null;
|
|
|
|
generate();
|
|
|
|
function generate(err) {
|
|
if(err) {
|
|
return callback(err);
|
|
}
|
|
|
|
// sufficient bytes generated
|
|
if(b.length() >= count) {
|
|
return callback(null, b.getBytes(count));
|
|
}
|
|
|
|
// if amount of data generated is greater than 1 MiB, trigger reseed
|
|
if(ctx.generated > 0xfffff) {
|
|
ctx.key = null;
|
|
}
|
|
|
|
if(ctx.key === null) {
|
|
// prevent stack overflow
|
|
return forge$9.util.nextTick(function() {
|
|
_reseed(generate);
|
|
});
|
|
}
|
|
|
|
// generate the random bytes
|
|
var bytes = cipher(ctx.key, ctx.seed);
|
|
ctx.generated += bytes.length;
|
|
b.putBytes(bytes);
|
|
|
|
// generate bytes for a new key and seed
|
|
ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed)));
|
|
ctx.seed = formatSeed(cipher(ctx.key, ctx.seed));
|
|
|
|
forge$9.util.setImmediate(generate);
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Generates random bytes synchronously.
|
|
*
|
|
* @param count the number of random bytes to generate.
|
|
*
|
|
* @return count random bytes as a string.
|
|
*/
|
|
ctx.generateSync = function(count) {
|
|
// simple generator using counter-based CBC
|
|
var cipher = ctx.plugin.cipher;
|
|
var increment = ctx.plugin.increment;
|
|
var formatKey = ctx.plugin.formatKey;
|
|
var formatSeed = ctx.plugin.formatSeed;
|
|
|
|
// paranoid deviation from Fortuna:
|
|
// reset key for every request to protect previously
|
|
// generated random bytes should the key be discovered;
|
|
// there is no 100ms based reseeding because of this
|
|
// forced reseed for every `generateSync` call
|
|
ctx.key = null;
|
|
|
|
var b = forge$9.util.createBuffer();
|
|
while(b.length() < count) {
|
|
// if amount of data generated is greater than 1 MiB, trigger reseed
|
|
if(ctx.generated > 0xfffff) {
|
|
ctx.key = null;
|
|
}
|
|
|
|
if(ctx.key === null) {
|
|
_reseedSync();
|
|
}
|
|
|
|
// generate the random bytes
|
|
var bytes = cipher(ctx.key, ctx.seed);
|
|
ctx.generated += bytes.length;
|
|
b.putBytes(bytes);
|
|
|
|
// generate bytes for a new key and seed
|
|
ctx.key = formatKey(cipher(ctx.key, increment(ctx.seed)));
|
|
ctx.seed = formatSeed(cipher(ctx.key, ctx.seed));
|
|
}
|
|
|
|
return b.getBytes(count);
|
|
};
|
|
|
|
/**
|
|
* Private function that asynchronously reseeds a generator.
|
|
*
|
|
* @param callback(err) called once the operation completes.
|
|
*/
|
|
function _reseed(callback) {
|
|
if(ctx.pools[0].messageLength >= 32) {
|
|
_seed();
|
|
return callback();
|
|
}
|
|
// not enough seed data...
|
|
var needed = (32 - ctx.pools[0].messageLength) << 5;
|
|
ctx.seedFile(needed, function(err, bytes) {
|
|
if(err) {
|
|
return callback(err);
|
|
}
|
|
ctx.collect(bytes);
|
|
_seed();
|
|
callback();
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Private function that synchronously reseeds a generator.
|
|
*/
|
|
function _reseedSync() {
|
|
if(ctx.pools[0].messageLength >= 32) {
|
|
return _seed();
|
|
}
|
|
// not enough seed data...
|
|
var needed = (32 - ctx.pools[0].messageLength) << 5;
|
|
ctx.collect(ctx.seedFileSync(needed));
|
|
_seed();
|
|
}
|
|
|
|
/**
|
|
* Private function that seeds a generator once enough bytes are available.
|
|
*/
|
|
function _seed() {
|
|
// update reseed count
|
|
ctx.reseeds = (ctx.reseeds === 0xffffffff) ? 0 : ctx.reseeds + 1;
|
|
|
|
// goal is to update `key` via:
|
|
// key = hash(key + s)
|
|
// where 's' is all collected entropy from selected pools, then...
|
|
|
|
// create a plugin-based message digest
|
|
var md = ctx.plugin.md.create();
|
|
|
|
// consume current key bytes
|
|
md.update(ctx.keyBytes);
|
|
|
|
// digest the entropy of pools whose index k meet the
|
|
// condition 'n mod 2^k == 0' where n is the number of reseeds
|
|
var _2powK = 1;
|
|
for(var k = 0; k < 32; ++k) {
|
|
if(ctx.reseeds % _2powK === 0) {
|
|
md.update(ctx.pools[k].digest().getBytes());
|
|
ctx.pools[k].start();
|
|
}
|
|
_2powK = _2powK << 1;
|
|
}
|
|
|
|
// get digest for key bytes
|
|
ctx.keyBytes = md.digest().getBytes();
|
|
|
|
// paranoid deviation from Fortuna:
|
|
// update `seed` via `seed = hash(key)`
|
|
// instead of initializing to zero once and only
|
|
// ever incrementing it
|
|
md.start();
|
|
md.update(ctx.keyBytes);
|
|
var seedBytes = md.digest().getBytes();
|
|
|
|
// update state
|
|
ctx.key = ctx.plugin.formatKey(ctx.keyBytes);
|
|
ctx.seed = ctx.plugin.formatSeed(seedBytes);
|
|
ctx.generated = 0;
|
|
}
|
|
|
|
/**
|
|
* The built-in default seedFile. This seedFile is used when entropy
|
|
* is needed immediately.
|
|
*
|
|
* @param needed the number of bytes that are needed.
|
|
*
|
|
* @return the random bytes.
|
|
*/
|
|
function defaultSeedFile(needed) {
|
|
// use window.crypto.getRandomValues strong source of entropy if available
|
|
var getRandomValues = null;
|
|
var globalScope = forge$9.util.globalScope;
|
|
var _crypto = globalScope.crypto || globalScope.msCrypto;
|
|
if(_crypto && _crypto.getRandomValues) {
|
|
getRandomValues = function(arr) {
|
|
return _crypto.getRandomValues(arr);
|
|
};
|
|
}
|
|
|
|
var b = forge$9.util.createBuffer();
|
|
if(getRandomValues) {
|
|
while(b.length() < needed) {
|
|
// max byte length is 65536 before QuotaExceededError is thrown
|
|
// http://www.w3.org/TR/WebCryptoAPI/#RandomSource-method-getRandomValues
|
|
var count = Math.max(1, Math.min(needed - b.length(), 65536) / 4);
|
|
var entropy = new Uint32Array(Math.floor(count));
|
|
try {
|
|
getRandomValues(entropy);
|
|
for(var i = 0; i < entropy.length; ++i) {
|
|
b.putInt32(entropy[i]);
|
|
}
|
|
} catch(e) {
|
|
/* only ignore QuotaExceededError */
|
|
if(!(typeof QuotaExceededError !== 'undefined' &&
|
|
e instanceof QuotaExceededError)) {
|
|
throw e;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// be sad and add some weak random data
|
|
if(b.length() < needed) {
|
|
/* Draws from Park-Miller "minimal standard" 31 bit PRNG,
|
|
implemented with David G. Carta's optimization: with 32 bit math
|
|
and without division (Public Domain). */
|
|
var hi, lo, next;
|
|
var seed = Math.floor(Math.random() * 0x010000);
|
|
while(b.length() < needed) {
|
|
lo = 16807 * (seed & 0xFFFF);
|
|
hi = 16807 * (seed >> 16);
|
|
lo += (hi & 0x7FFF) << 16;
|
|
lo += hi >> 15;
|
|
lo = (lo & 0x7FFFFFFF) + (lo >> 31);
|
|
seed = lo & 0xFFFFFFFF;
|
|
|
|
// consume lower 3 bytes of seed
|
|
for(var i = 0; i < 3; ++i) {
|
|
// throw in more pseudo random
|
|
next = seed >>> (i << 3);
|
|
next ^= Math.floor(Math.random() * 0x0100);
|
|
b.putByte(next & 0xFF);
|
|
}
|
|
}
|
|
}
|
|
|
|
return b.getBytes(needed);
|
|
}
|
|
// initialize seed file APIs
|
|
if(_crypto$1) {
|
|
// use nodejs async API
|
|
ctx.seedFile = function(needed, callback) {
|
|
_crypto$1.randomBytes(needed, function(err, bytes) {
|
|
if(err) {
|
|
return callback(err);
|
|
}
|
|
callback(null, bytes.toString());
|
|
});
|
|
};
|
|
// use nodejs sync API
|
|
ctx.seedFileSync = function(needed) {
|
|
return _crypto$1.randomBytes(needed).toString();
|
|
};
|
|
} else {
|
|
ctx.seedFile = function(needed, callback) {
|
|
try {
|
|
callback(null, defaultSeedFile(needed));
|
|
} catch(e) {
|
|
callback(e);
|
|
}
|
|
};
|
|
ctx.seedFileSync = defaultSeedFile;
|
|
}
|
|
|
|
/**
|
|
* Adds entropy to a prng ctx's accumulator.
|
|
*
|
|
* @param bytes the bytes of entropy as a string.
|
|
*/
|
|
ctx.collect = function(bytes) {
|
|
// iterate over pools distributing entropy cyclically
|
|
var count = bytes.length;
|
|
for(var i = 0; i < count; ++i) {
|
|
ctx.pools[ctx.pool].update(bytes.substr(i, 1));
|
|
ctx.pool = (ctx.pool === 31) ? 0 : ctx.pool + 1;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Collects an integer of n bits.
|
|
*
|
|
* @param i the integer entropy.
|
|
* @param n the number of bits in the integer.
|
|
*/
|
|
ctx.collectInt = function(i, n) {
|
|
var bytes = '';
|
|
for(var x = 0; x < n; x += 8) {
|
|
bytes += String.fromCharCode((i >> x) & 0xFF);
|
|
}
|
|
ctx.collect(bytes);
|
|
};
|
|
|
|
/**
|
|
* Registers a Web Worker to receive immediate entropy from the main thread.
|
|
* This method is required until Web Workers can access the native crypto
|
|
* API. This method should be called twice for each created worker, once in
|
|
* the main thread, and once in the worker itself.
|
|
*
|
|
* @param worker the worker to register.
|
|
*/
|
|
ctx.registerWorker = function(worker) {
|
|
// worker receives random bytes
|
|
if(worker === self) {
|
|
ctx.seedFile = function(needed, callback) {
|
|
function listener(e) {
|
|
var data = e.data;
|
|
if(data.forge && data.forge.prng) {
|
|
self.removeEventListener('message', listener);
|
|
callback(data.forge.prng.err, data.forge.prng.bytes);
|
|
}
|
|
}
|
|
self.addEventListener('message', listener);
|
|
self.postMessage({forge: {prng: {needed: needed}}});
|
|
};
|
|
} else {
|
|
// main thread sends random bytes upon request
|
|
var listener = function(e) {
|
|
var data = e.data;
|
|
if(data.forge && data.forge.prng) {
|
|
ctx.seedFile(data.forge.prng.needed, function(err, bytes) {
|
|
worker.postMessage({forge: {prng: {err: err, bytes: bytes}}});
|
|
});
|
|
}
|
|
};
|
|
// TODO: do we need to remove the event listener when the worker dies?
|
|
worker.addEventListener('message', listener);
|
|
}
|
|
};
|
|
|
|
return ctx;
|
|
};
|
|
|
|
/**
|
|
* An API for getting cryptographically-secure random bytes. The bytes are
|
|
* generated using the Fortuna algorithm devised by Bruce Schneier and
|
|
* Niels Ferguson.
|
|
*
|
|
* Getting strong random bytes is not yet easy to do in javascript. The only
|
|
* truish random entropy that can be collected is from the mouse, keyboard, or
|
|
* from timing with respect to page loads, etc. This generator makes a poor
|
|
* attempt at providing random bytes when those sources haven't yet provided
|
|
* enough entropy to initially seed or to reseed the PRNG.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2009-2014 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$8 = forge$m;
|
|
|
|
|
|
|
|
|
|
|
|
(function() {
|
|
|
|
// forge.random already defined
|
|
if(forge$8.random && forge$8.random.getBytes) {
|
|
return;
|
|
}
|
|
|
|
(function(jQuery) {
|
|
|
|
// the default prng plugin, uses AES-128
|
|
var prng_aes = {};
|
|
var _prng_aes_output = new Array(4);
|
|
var _prng_aes_buffer = forge$8.util.createBuffer();
|
|
prng_aes.formatKey = function(key) {
|
|
// convert the key into 32-bit integers
|
|
var tmp = forge$8.util.createBuffer(key);
|
|
key = new Array(4);
|
|
key[0] = tmp.getInt32();
|
|
key[1] = tmp.getInt32();
|
|
key[2] = tmp.getInt32();
|
|
key[3] = tmp.getInt32();
|
|
|
|
// return the expanded key
|
|
return forge$8.aes._expandKey(key, false);
|
|
};
|
|
prng_aes.formatSeed = function(seed) {
|
|
// convert seed into 32-bit integers
|
|
var tmp = forge$8.util.createBuffer(seed);
|
|
seed = new Array(4);
|
|
seed[0] = tmp.getInt32();
|
|
seed[1] = tmp.getInt32();
|
|
seed[2] = tmp.getInt32();
|
|
seed[3] = tmp.getInt32();
|
|
return seed;
|
|
};
|
|
prng_aes.cipher = function(key, seed) {
|
|
forge$8.aes._updateBlock(key, seed, _prng_aes_output, false);
|
|
_prng_aes_buffer.putInt32(_prng_aes_output[0]);
|
|
_prng_aes_buffer.putInt32(_prng_aes_output[1]);
|
|
_prng_aes_buffer.putInt32(_prng_aes_output[2]);
|
|
_prng_aes_buffer.putInt32(_prng_aes_output[3]);
|
|
return _prng_aes_buffer.getBytes();
|
|
};
|
|
prng_aes.increment = function(seed) {
|
|
// FIXME: do we care about carry or signed issues?
|
|
++seed[3];
|
|
return seed;
|
|
};
|
|
prng_aes.md = forge$8.md.sha256;
|
|
|
|
/**
|
|
* Creates a new PRNG.
|
|
*/
|
|
function spawnPrng() {
|
|
var ctx = forge$8.prng.create(prng_aes);
|
|
|
|
/**
|
|
* Gets random bytes. If a native secure crypto API is unavailable, this
|
|
* method tries to make the bytes more unpredictable by drawing from data that
|
|
* can be collected from the user of the browser, eg: mouse movement.
|
|
*
|
|
* If a callback is given, this method will be called asynchronously.
|
|
*
|
|
* @param count the number of random bytes to get.
|
|
* @param [callback(err, bytes)] called once the operation completes.
|
|
*
|
|
* @return the random bytes in a string.
|
|
*/
|
|
ctx.getBytes = function(count, callback) {
|
|
return ctx.generate(count, callback);
|
|
};
|
|
|
|
/**
|
|
* Gets random bytes asynchronously. If a native secure crypto API is
|
|
* unavailable, this method tries to make the bytes more unpredictable by
|
|
* drawing from data that can be collected from the user of the browser,
|
|
* eg: mouse movement.
|
|
*
|
|
* @param count the number of random bytes to get.
|
|
*
|
|
* @return the random bytes in a string.
|
|
*/
|
|
ctx.getBytesSync = function(count) {
|
|
return ctx.generate(count);
|
|
};
|
|
|
|
return ctx;
|
|
}
|
|
|
|
// create default prng context
|
|
var _ctx = spawnPrng();
|
|
|
|
// add other sources of entropy only if window.crypto.getRandomValues is not
|
|
// available -- otherwise this source will be automatically used by the prng
|
|
var getRandomValues = null;
|
|
var globalScope = forge$8.util.globalScope;
|
|
var _crypto = globalScope.crypto || globalScope.msCrypto;
|
|
if(_crypto && _crypto.getRandomValues) {
|
|
getRandomValues = function(arr) {
|
|
return _crypto.getRandomValues(arr);
|
|
};
|
|
}
|
|
|
|
if((!forge$8.util.isNodejs && !getRandomValues)) {
|
|
|
|
// get load time entropy
|
|
_ctx.collectInt(+new Date(), 32);
|
|
|
|
// add some entropy from navigator object
|
|
if(typeof(navigator) !== 'undefined') {
|
|
var _navBytes = '';
|
|
for(var key in navigator) {
|
|
try {
|
|
if(typeof(navigator[key]) == 'string') {
|
|
_navBytes += navigator[key];
|
|
}
|
|
} catch(e) {
|
|
/* Some navigator keys might not be accessible, e.g. the geolocation
|
|
attribute throws an exception if touched in Mozilla chrome://
|
|
context.
|
|
|
|
Silently ignore this and just don't use this as a source of
|
|
entropy. */
|
|
}
|
|
}
|
|
_ctx.collect(_navBytes);
|
|
_navBytes = null;
|
|
}
|
|
|
|
// add mouse and keyboard collectors if jquery is available
|
|
if(jQuery) {
|
|
// set up mouse entropy capture
|
|
jQuery().mousemove(function(e) {
|
|
// add mouse coords
|
|
_ctx.collectInt(e.clientX, 16);
|
|
_ctx.collectInt(e.clientY, 16);
|
|
});
|
|
|
|
// set up keyboard entropy capture
|
|
jQuery().keypress(function(e) {
|
|
_ctx.collectInt(e.charCode, 8);
|
|
});
|
|
}
|
|
}
|
|
|
|
/* Random API */
|
|
if(!forge$8.random) {
|
|
forge$8.random = _ctx;
|
|
} else {
|
|
// extend forge.random with _ctx
|
|
for(var key in _ctx) {
|
|
forge$8.random[key] = _ctx[key];
|
|
}
|
|
}
|
|
|
|
// expose spawn PRNG
|
|
forge$8.random.createInstance = spawnPrng;
|
|
|
|
})(typeof(jQuery) !== 'undefined' ? jQuery : null);
|
|
|
|
})();
|
|
|
|
/**
|
|
* RC2 implementation.
|
|
*
|
|
* @author Stefan Siegl
|
|
*
|
|
* Copyright (c) 2012 Stefan Siegl <stesie@brokenpipe.de>
|
|
*
|
|
* Information on the RC2 cipher is available from RFC #2268,
|
|
* http://www.ietf.org/rfc/rfc2268.txt
|
|
*/
|
|
|
|
var forge$7 = forge$m;
|
|
|
|
|
|
var piTable = [
|
|
0xd9, 0x78, 0xf9, 0xc4, 0x19, 0xdd, 0xb5, 0xed, 0x28, 0xe9, 0xfd, 0x79, 0x4a, 0xa0, 0xd8, 0x9d,
|
|
0xc6, 0x7e, 0x37, 0x83, 0x2b, 0x76, 0x53, 0x8e, 0x62, 0x4c, 0x64, 0x88, 0x44, 0x8b, 0xfb, 0xa2,
|
|
0x17, 0x9a, 0x59, 0xf5, 0x87, 0xb3, 0x4f, 0x13, 0x61, 0x45, 0x6d, 0x8d, 0x09, 0x81, 0x7d, 0x32,
|
|
0xbd, 0x8f, 0x40, 0xeb, 0x86, 0xb7, 0x7b, 0x0b, 0xf0, 0x95, 0x21, 0x22, 0x5c, 0x6b, 0x4e, 0x82,
|
|
0x54, 0xd6, 0x65, 0x93, 0xce, 0x60, 0xb2, 0x1c, 0x73, 0x56, 0xc0, 0x14, 0xa7, 0x8c, 0xf1, 0xdc,
|
|
0x12, 0x75, 0xca, 0x1f, 0x3b, 0xbe, 0xe4, 0xd1, 0x42, 0x3d, 0xd4, 0x30, 0xa3, 0x3c, 0xb6, 0x26,
|
|
0x6f, 0xbf, 0x0e, 0xda, 0x46, 0x69, 0x07, 0x57, 0x27, 0xf2, 0x1d, 0x9b, 0xbc, 0x94, 0x43, 0x03,
|
|
0xf8, 0x11, 0xc7, 0xf6, 0x90, 0xef, 0x3e, 0xe7, 0x06, 0xc3, 0xd5, 0x2f, 0xc8, 0x66, 0x1e, 0xd7,
|
|
0x08, 0xe8, 0xea, 0xde, 0x80, 0x52, 0xee, 0xf7, 0x84, 0xaa, 0x72, 0xac, 0x35, 0x4d, 0x6a, 0x2a,
|
|
0x96, 0x1a, 0xd2, 0x71, 0x5a, 0x15, 0x49, 0x74, 0x4b, 0x9f, 0xd0, 0x5e, 0x04, 0x18, 0xa4, 0xec,
|
|
0xc2, 0xe0, 0x41, 0x6e, 0x0f, 0x51, 0xcb, 0xcc, 0x24, 0x91, 0xaf, 0x50, 0xa1, 0xf4, 0x70, 0x39,
|
|
0x99, 0x7c, 0x3a, 0x85, 0x23, 0xb8, 0xb4, 0x7a, 0xfc, 0x02, 0x36, 0x5b, 0x25, 0x55, 0x97, 0x31,
|
|
0x2d, 0x5d, 0xfa, 0x98, 0xe3, 0x8a, 0x92, 0xae, 0x05, 0xdf, 0x29, 0x10, 0x67, 0x6c, 0xba, 0xc9,
|
|
0xd3, 0x00, 0xe6, 0xcf, 0xe1, 0x9e, 0xa8, 0x2c, 0x63, 0x16, 0x01, 0x3f, 0x58, 0xe2, 0x89, 0xa9,
|
|
0x0d, 0x38, 0x34, 0x1b, 0xab, 0x33, 0xff, 0xb0, 0xbb, 0x48, 0x0c, 0x5f, 0xb9, 0xb1, 0xcd, 0x2e,
|
|
0xc5, 0xf3, 0xdb, 0x47, 0xe5, 0xa5, 0x9c, 0x77, 0x0a, 0xa6, 0x20, 0x68, 0xfe, 0x7f, 0xc1, 0xad
|
|
];
|
|
|
|
var s = [1, 2, 3, 5];
|
|
|
|
/**
|
|
* Rotate a word left by given number of bits.
|
|
*
|
|
* Bits that are shifted out on the left are put back in on the right
|
|
* hand side.
|
|
*
|
|
* @param word The word to shift left.
|
|
* @param bits The number of bits to shift by.
|
|
* @return The rotated word.
|
|
*/
|
|
var rol = function(word, bits) {
|
|
return ((word << bits) & 0xffff) | ((word & 0xffff) >> (16 - bits));
|
|
};
|
|
|
|
/**
|
|
* Rotate a word right by given number of bits.
|
|
*
|
|
* Bits that are shifted out on the right are put back in on the left
|
|
* hand side.
|
|
*
|
|
* @param word The word to shift right.
|
|
* @param bits The number of bits to shift by.
|
|
* @return The rotated word.
|
|
*/
|
|
var ror = function(word, bits) {
|
|
return ((word & 0xffff) >> bits) | ((word << (16 - bits)) & 0xffff);
|
|
};
|
|
|
|
/* RC2 API */
|
|
forge$7.rc2 = forge$7.rc2 || {};
|
|
|
|
/**
|
|
* Perform RC2 key expansion as per RFC #2268, section 2.
|
|
*
|
|
* @param key variable-length user key (between 1 and 128 bytes)
|
|
* @param effKeyBits number of effective key bits (default: 128)
|
|
* @return the expanded RC2 key (ByteBuffer of 128 bytes)
|
|
*/
|
|
forge$7.rc2.expandKey = function(key, effKeyBits) {
|
|
if(typeof key === 'string') {
|
|
key = forge$7.util.createBuffer(key);
|
|
}
|
|
effKeyBits = effKeyBits || 128;
|
|
|
|
/* introduce variables that match the names used in RFC #2268 */
|
|
var L = key;
|
|
var T = key.length();
|
|
var T1 = effKeyBits;
|
|
var T8 = Math.ceil(T1 / 8);
|
|
var TM = 0xff >> (T1 & 0x07);
|
|
var i;
|
|
|
|
for(i = T; i < 128; i++) {
|
|
L.putByte(piTable[(L.at(i - 1) + L.at(i - T)) & 0xff]);
|
|
}
|
|
|
|
L.setAt(128 - T8, piTable[L.at(128 - T8) & TM]);
|
|
|
|
for(i = 127 - T8; i >= 0; i--) {
|
|
L.setAt(i, piTable[L.at(i + 1) ^ L.at(i + T8)]);
|
|
}
|
|
|
|
return L;
|
|
};
|
|
|
|
/**
|
|
* Creates a RC2 cipher object.
|
|
*
|
|
* @param key the symmetric key to use (as base for key generation).
|
|
* @param bits the number of effective key bits.
|
|
* @param encrypt false for decryption, true for encryption.
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
var createCipher = function(key, bits, encrypt) {
|
|
var _finish = false, _input = null, _output = null, _iv = null;
|
|
var mixRound, mashRound;
|
|
var i, j, K = [];
|
|
|
|
/* Expand key and fill into K[] Array */
|
|
key = forge$7.rc2.expandKey(key, bits);
|
|
for(i = 0; i < 64; i++) {
|
|
K.push(key.getInt16Le());
|
|
}
|
|
|
|
if(encrypt) {
|
|
/**
|
|
* Perform one mixing round "in place".
|
|
*
|
|
* @param R Array of four words to perform mixing on.
|
|
*/
|
|
mixRound = function(R) {
|
|
for(i = 0; i < 4; i++) {
|
|
R[i] += K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) +
|
|
((~R[(i + 3) % 4]) & R[(i + 1) % 4]);
|
|
R[i] = rol(R[i], s[i]);
|
|
j++;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Perform one mashing round "in place".
|
|
*
|
|
* @param R Array of four words to perform mashing on.
|
|
*/
|
|
mashRound = function(R) {
|
|
for(i = 0; i < 4; i++) {
|
|
R[i] += K[R[(i + 3) % 4] & 63];
|
|
}
|
|
};
|
|
} else {
|
|
/**
|
|
* Perform one r-mixing round "in place".
|
|
*
|
|
* @param R Array of four words to perform mixing on.
|
|
*/
|
|
mixRound = function(R) {
|
|
for(i = 3; i >= 0; i--) {
|
|
R[i] = ror(R[i], s[i]);
|
|
R[i] -= K[j] + (R[(i + 3) % 4] & R[(i + 2) % 4]) +
|
|
((~R[(i + 3) % 4]) & R[(i + 1) % 4]);
|
|
j--;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Perform one r-mashing round "in place".
|
|
*
|
|
* @param R Array of four words to perform mashing on.
|
|
*/
|
|
mashRound = function(R) {
|
|
for(i = 3; i >= 0; i--) {
|
|
R[i] -= K[R[(i + 3) % 4] & 63];
|
|
}
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Run the specified cipher execution plan.
|
|
*
|
|
* This function takes four words from the input buffer, applies the IV on
|
|
* it (if requested) and runs the provided execution plan.
|
|
*
|
|
* The plan must be put together in form of a array of arrays. Where the
|
|
* outer one is simply a list of steps to perform and the inner one needs
|
|
* to have two elements: the first one telling how many rounds to perform,
|
|
* the second one telling what to do (i.e. the function to call).
|
|
*
|
|
* @param {Array} plan The plan to execute.
|
|
*/
|
|
var runPlan = function(plan) {
|
|
var R = [];
|
|
|
|
/* Get data from input buffer and fill the four words into R */
|
|
for(i = 0; i < 4; i++) {
|
|
var val = _input.getInt16Le();
|
|
|
|
if(_iv !== null) {
|
|
if(encrypt) {
|
|
/* We're encrypting, apply the IV first. */
|
|
val ^= _iv.getInt16Le();
|
|
} else {
|
|
/* We're decryption, keep cipher text for next block. */
|
|
_iv.putInt16Le(val);
|
|
}
|
|
}
|
|
|
|
R.push(val & 0xffff);
|
|
}
|
|
|
|
/* Reset global "j" variable as per spec. */
|
|
j = encrypt ? 0 : 63;
|
|
|
|
/* Run execution plan. */
|
|
for(var ptr = 0; ptr < plan.length; ptr++) {
|
|
for(var ctr = 0; ctr < plan[ptr][0]; ctr++) {
|
|
plan[ptr][1](R);
|
|
}
|
|
}
|
|
|
|
/* Write back result to output buffer. */
|
|
for(i = 0; i < 4; i++) {
|
|
if(_iv !== null) {
|
|
if(encrypt) {
|
|
/* We're encrypting in CBC-mode, feed back encrypted bytes into
|
|
IV buffer to carry it forward to next block. */
|
|
_iv.putInt16Le(R[i]);
|
|
} else {
|
|
R[i] ^= _iv.getInt16Le();
|
|
}
|
|
}
|
|
|
|
_output.putInt16Le(R[i]);
|
|
}
|
|
};
|
|
|
|
/* Create cipher object */
|
|
var cipher = null;
|
|
cipher = {
|
|
/**
|
|
* Starts or restarts the encryption or decryption process, whichever
|
|
* was previously configured.
|
|
*
|
|
* To use the cipher in CBC mode, iv may be given either as a string
|
|
* of bytes, or as a byte buffer. For ECB mode, give null as iv.
|
|
*
|
|
* @param iv the initialization vector to use, null for ECB mode.
|
|
* @param output the output the buffer to write to, null to create one.
|
|
*/
|
|
start: function(iv, output) {
|
|
if(iv) {
|
|
/* CBC mode */
|
|
if(typeof iv === 'string') {
|
|
iv = forge$7.util.createBuffer(iv);
|
|
}
|
|
}
|
|
|
|
_finish = false;
|
|
_input = forge$7.util.createBuffer();
|
|
_output = output || new forge$7.util.createBuffer();
|
|
_iv = iv;
|
|
|
|
cipher.output = _output;
|
|
},
|
|
|
|
/**
|
|
* Updates the next block.
|
|
*
|
|
* @param input the buffer to read from.
|
|
*/
|
|
update: function(input) {
|
|
if(!_finish) {
|
|
// not finishing, so fill the input buffer with more input
|
|
_input.putBuffer(input);
|
|
}
|
|
|
|
while(_input.length() >= 8) {
|
|
runPlan([
|
|
[ 5, mixRound ],
|
|
[ 1, mashRound ],
|
|
[ 6, mixRound ],
|
|
[ 1, mashRound ],
|
|
[ 5, mixRound ]
|
|
]);
|
|
}
|
|
},
|
|
|
|
/**
|
|
* Finishes encrypting or decrypting.
|
|
*
|
|
* @param pad a padding function to use, null for PKCS#7 padding,
|
|
* signature(blockSize, buffer, decrypt).
|
|
*
|
|
* @return true if successful, false on error.
|
|
*/
|
|
finish: function(pad) {
|
|
var rval = true;
|
|
|
|
if(encrypt) {
|
|
if(pad) {
|
|
rval = pad(8, _input, !encrypt);
|
|
} else {
|
|
// add PKCS#7 padding to block (each pad byte is the
|
|
// value of the number of pad bytes)
|
|
var padding = (_input.length() === 8) ? 8 : (8 - _input.length());
|
|
_input.fillWithByte(padding, padding);
|
|
}
|
|
}
|
|
|
|
if(rval) {
|
|
// do final update
|
|
_finish = true;
|
|
cipher.update();
|
|
}
|
|
|
|
if(!encrypt) {
|
|
// check for error: input data not a multiple of block size
|
|
rval = (_input.length() === 0);
|
|
if(rval) {
|
|
if(pad) {
|
|
rval = pad(8, _output, !encrypt);
|
|
} else {
|
|
// ensure padding byte count is valid
|
|
var len = _output.length();
|
|
var count = _output.at(len - 1);
|
|
|
|
if(count > len) {
|
|
rval = false;
|
|
} else {
|
|
// trim off padding bytes
|
|
_output.truncate(count);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
return rval;
|
|
}
|
|
};
|
|
|
|
return cipher;
|
|
};
|
|
|
|
/**
|
|
* Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the
|
|
* given symmetric key. The output will be stored in the 'output' member
|
|
* of the returned cipher.
|
|
*
|
|
* The key and iv may be given as a string of bytes or a byte buffer.
|
|
* The cipher is initialized to use 128 effective key bits.
|
|
*
|
|
* @param key the symmetric key to use.
|
|
* @param iv the initialization vector to use.
|
|
* @param output the buffer to write to, null to create one.
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$7.rc2.startEncrypting = function(key, iv, output) {
|
|
var cipher = forge$7.rc2.createEncryptionCipher(key, 128);
|
|
cipher.start(iv, output);
|
|
return cipher;
|
|
};
|
|
|
|
/**
|
|
* Creates an RC2 cipher object to encrypt data in ECB or CBC mode using the
|
|
* given symmetric key.
|
|
*
|
|
* The key may be given as a string of bytes or a byte buffer.
|
|
*
|
|
* To start encrypting call start() on the cipher with an iv and optional
|
|
* output buffer.
|
|
*
|
|
* @param key the symmetric key to use.
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$7.rc2.createEncryptionCipher = function(key, bits) {
|
|
return createCipher(key, bits, true);
|
|
};
|
|
|
|
/**
|
|
* Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the
|
|
* given symmetric key. The output will be stored in the 'output' member
|
|
* of the returned cipher.
|
|
*
|
|
* The key and iv may be given as a string of bytes or a byte buffer.
|
|
* The cipher is initialized to use 128 effective key bits.
|
|
*
|
|
* @param key the symmetric key to use.
|
|
* @param iv the initialization vector to use.
|
|
* @param output the buffer to write to, null to create one.
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$7.rc2.startDecrypting = function(key, iv, output) {
|
|
var cipher = forge$7.rc2.createDecryptionCipher(key, 128);
|
|
cipher.start(iv, output);
|
|
return cipher;
|
|
};
|
|
|
|
/**
|
|
* Creates an RC2 cipher object to decrypt data in ECB or CBC mode using the
|
|
* given symmetric key.
|
|
*
|
|
* The key may be given as a string of bytes or a byte buffer.
|
|
*
|
|
* To start decrypting call start() on the cipher with an iv and optional
|
|
* output buffer.
|
|
*
|
|
* @param key the symmetric key to use.
|
|
*
|
|
* @return the cipher.
|
|
*/
|
|
forge$7.rc2.createDecryptionCipher = function(key, bits) {
|
|
return createCipher(key, bits, false);
|
|
};
|
|
|
|
// Copyright (c) 2005 Tom Wu
|
|
// All Rights Reserved.
|
|
// See "LICENSE" for details.
|
|
|
|
// Basic JavaScript BN library - subset useful for RSA encryption.
|
|
|
|
/*
|
|
Licensing (LICENSE)
|
|
-------------------
|
|
|
|
This software is covered under the following copyright:
|
|
*/
|
|
/*
|
|
* Copyright (c) 2003-2005 Tom Wu
|
|
* All Rights Reserved.
|
|
*
|
|
* Permission is hereby granted, free of charge, to any person obtaining
|
|
* a copy of this software and associated documentation files (the
|
|
* "Software"), to deal in the Software without restriction, including
|
|
* without limitation the rights to use, copy, modify, merge, publish,
|
|
* distribute, sublicense, and/or sell copies of the Software, and to
|
|
* permit persons to whom the Software is furnished to do so, subject to
|
|
* the following conditions:
|
|
*
|
|
* The above copyright notice and this permission notice shall be
|
|
* included in all copies or substantial portions of the Software.
|
|
*
|
|
* THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND,
|
|
* EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY
|
|
* WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
|
|
*
|
|
* IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
|
|
* INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER
|
|
* RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF
|
|
* THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT
|
|
* OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
*
|
|
* In addition, the following condition applies:
|
|
*
|
|
* All redistributions must retain an intact copy of this copyright notice
|
|
* and disclaimer.
|
|
*/
|
|
/*
|
|
Address all questions regarding this license to:
|
|
|
|
Tom Wu
|
|
tjw@cs.Stanford.EDU
|
|
*/
|
|
var forge$6 = forge$m;
|
|
|
|
forge$6.jsbn = forge$6.jsbn || {};
|
|
|
|
// Bits per digit
|
|
var dbits;
|
|
|
|
// (public) Constructor
|
|
function BigInteger$2(a,b,c) {
|
|
this.data = [];
|
|
if(a != null)
|
|
if("number" == typeof a) this.fromNumber(a,b,c);
|
|
else if(b == null && "string" != typeof a) this.fromString(a,256);
|
|
else this.fromString(a,b);
|
|
}
|
|
forge$6.jsbn.BigInteger = BigInteger$2;
|
|
|
|
// return new, unset BigInteger
|
|
function nbi() { return new BigInteger$2(null); }
|
|
|
|
// am: Compute w_j += (x*this_i), propagate carries,
|
|
// c is initial carry, returns final carry.
|
|
// c < 3*dvalue, x < 2*dvalue, this_i < dvalue
|
|
// We need to select the fastest one that works in this environment.
|
|
|
|
// am1: use a single mult and divide to get the high bits,
|
|
// max digit bits should be 26 because
|
|
// max internal value = 2*dvalue^2-2*dvalue (< 2^53)
|
|
function am1(i,x,w,j,c,n) {
|
|
while(--n >= 0) {
|
|
var v = x*this.data[i++]+w.data[j]+c;
|
|
c = Math.floor(v/0x4000000);
|
|
w.data[j++] = v&0x3ffffff;
|
|
}
|
|
return c;
|
|
}
|
|
// am2 avoids a big mult-and-extract completely.
|
|
// Max digit bits should be <= 30 because we do bitwise ops
|
|
// on values up to 2*hdvalue^2-hdvalue-1 (< 2^31)
|
|
function am2(i,x,w,j,c,n) {
|
|
var xl = x&0x7fff, xh = x>>15;
|
|
while(--n >= 0) {
|
|
var l = this.data[i]&0x7fff;
|
|
var h = this.data[i++]>>15;
|
|
var m = xh*l+h*xl;
|
|
l = xl*l+((m&0x7fff)<<15)+w.data[j]+(c&0x3fffffff);
|
|
c = (l>>>30)+(m>>>15)+xh*h+(c>>>30);
|
|
w.data[j++] = l&0x3fffffff;
|
|
}
|
|
return c;
|
|
}
|
|
// Alternately, set max digit bits to 28 since some
|
|
// browsers slow down when dealing with 32-bit numbers.
|
|
function am3(i,x,w,j,c,n) {
|
|
var xl = x&0x3fff, xh = x>>14;
|
|
while(--n >= 0) {
|
|
var l = this.data[i]&0x3fff;
|
|
var h = this.data[i++]>>14;
|
|
var m = xh*l+h*xl;
|
|
l = xl*l+((m&0x3fff)<<14)+w.data[j]+c;
|
|
c = (l>>28)+(m>>14)+xh*h;
|
|
w.data[j++] = l&0xfffffff;
|
|
}
|
|
return c;
|
|
}
|
|
|
|
// node.js (no browser)
|
|
if(typeof(navigator) === 'undefined')
|
|
{
|
|
BigInteger$2.prototype.am = am3;
|
|
dbits = 28;
|
|
} else if((navigator.appName == "Microsoft Internet Explorer")) {
|
|
BigInteger$2.prototype.am = am2;
|
|
dbits = 30;
|
|
} else if((navigator.appName != "Netscape")) {
|
|
BigInteger$2.prototype.am = am1;
|
|
dbits = 26;
|
|
} else { // Mozilla/Netscape seems to prefer am3
|
|
BigInteger$2.prototype.am = am3;
|
|
dbits = 28;
|
|
}
|
|
|
|
BigInteger$2.prototype.DB = dbits;
|
|
BigInteger$2.prototype.DM = ((1<<dbits)-1);
|
|
BigInteger$2.prototype.DV = (1<<dbits);
|
|
|
|
var BI_FP = 52;
|
|
BigInteger$2.prototype.FV = Math.pow(2,BI_FP);
|
|
BigInteger$2.prototype.F1 = BI_FP-dbits;
|
|
BigInteger$2.prototype.F2 = 2*dbits-BI_FP;
|
|
|
|
// Digit conversions
|
|
var BI_RM = "0123456789abcdefghijklmnopqrstuvwxyz";
|
|
var BI_RC = new Array();
|
|
var rr,vv;
|
|
rr = "0".charCodeAt(0);
|
|
for(vv = 0; vv <= 9; ++vv) BI_RC[rr++] = vv;
|
|
rr = "a".charCodeAt(0);
|
|
for(vv = 10; vv < 36; ++vv) BI_RC[rr++] = vv;
|
|
rr = "A".charCodeAt(0);
|
|
for(vv = 10; vv < 36; ++vv) BI_RC[rr++] = vv;
|
|
|
|
function int2char(n) { return BI_RM.charAt(n); }
|
|
function intAt(s,i) {
|
|
var c = BI_RC[s.charCodeAt(i)];
|
|
return (c==null)?-1:c;
|
|
}
|
|
|
|
// (protected) copy this to r
|
|
function bnpCopyTo(r) {
|
|
for(var i = this.t-1; i >= 0; --i) r.data[i] = this.data[i];
|
|
r.t = this.t;
|
|
r.s = this.s;
|
|
}
|
|
|
|
// (protected) set from integer value x, -DV <= x < DV
|
|
function bnpFromInt(x) {
|
|
this.t = 1;
|
|
this.s = (x<0)?-1:0;
|
|
if(x > 0) this.data[0] = x;
|
|
else if(x < -1) this.data[0] = x+this.DV;
|
|
else this.t = 0;
|
|
}
|
|
|
|
// return bigint initialized to value
|
|
function nbv(i) { var r = nbi(); r.fromInt(i); return r; }
|
|
|
|
// (protected) set from string and radix
|
|
function bnpFromString(s,b) {
|
|
var k;
|
|
if(b == 16) k = 4;
|
|
else if(b == 8) k = 3;
|
|
else if(b == 256) k = 8; // byte array
|
|
else if(b == 2) k = 1;
|
|
else if(b == 32) k = 5;
|
|
else if(b == 4) k = 2;
|
|
else { this.fromRadix(s,b); return; }
|
|
this.t = 0;
|
|
this.s = 0;
|
|
var i = s.length, mi = false, sh = 0;
|
|
while(--i >= 0) {
|
|
var x = (k==8)?s[i]&0xff:intAt(s,i);
|
|
if(x < 0) {
|
|
if(s.charAt(i) == "-") mi = true;
|
|
continue;
|
|
}
|
|
mi = false;
|
|
if(sh == 0)
|
|
this.data[this.t++] = x;
|
|
else if(sh+k > this.DB) {
|
|
this.data[this.t-1] |= (x&((1<<(this.DB-sh))-1))<<sh;
|
|
this.data[this.t++] = (x>>(this.DB-sh));
|
|
} else
|
|
this.data[this.t-1] |= x<<sh;
|
|
sh += k;
|
|
if(sh >= this.DB) sh -= this.DB;
|
|
}
|
|
if(k == 8 && (s[0]&0x80) != 0) {
|
|
this.s = -1;
|
|
if(sh > 0) this.data[this.t-1] |= ((1<<(this.DB-sh))-1)<<sh;
|
|
}
|
|
this.clamp();
|
|
if(mi) BigInteger$2.ZERO.subTo(this,this);
|
|
}
|
|
|
|
// (protected) clamp off excess high words
|
|
function bnpClamp() {
|
|
var c = this.s&this.DM;
|
|
while(this.t > 0 && this.data[this.t-1] == c) --this.t;
|
|
}
|
|
|
|
// (public) return string representation in given radix
|
|
function bnToString(b) {
|
|
if(this.s < 0) return "-"+this.negate().toString(b);
|
|
var k;
|
|
if(b == 16) k = 4;
|
|
else if(b == 8) k = 3;
|
|
else if(b == 2) k = 1;
|
|
else if(b == 32) k = 5;
|
|
else if(b == 4) k = 2;
|
|
else return this.toRadix(b);
|
|
var km = (1<<k)-1, d, m = false, r = "", i = this.t;
|
|
var p = this.DB-(i*this.DB)%k;
|
|
if(i-- > 0) {
|
|
if(p < this.DB && (d = this.data[i]>>p) > 0) { m = true; r = int2char(d); }
|
|
while(i >= 0) {
|
|
if(p < k) {
|
|
d = (this.data[i]&((1<<p)-1))<<(k-p);
|
|
d |= this.data[--i]>>(p+=this.DB-k);
|
|
} else {
|
|
d = (this.data[i]>>(p-=k))&km;
|
|
if(p <= 0) { p += this.DB; --i; }
|
|
}
|
|
if(d > 0) m = true;
|
|
if(m) r += int2char(d);
|
|
}
|
|
}
|
|
return m?r:"0";
|
|
}
|
|
|
|
// (public) -this
|
|
function bnNegate() { var r = nbi(); BigInteger$2.ZERO.subTo(this,r); return r; }
|
|
|
|
// (public) |this|
|
|
function bnAbs() { return (this.s<0)?this.negate():this; }
|
|
|
|
// (public) return + if this > a, - if this < a, 0 if equal
|
|
function bnCompareTo(a) {
|
|
var r = this.s-a.s;
|
|
if(r != 0) return r;
|
|
var i = this.t;
|
|
r = i-a.t;
|
|
if(r != 0) return (this.s<0)?-r:r;
|
|
while(--i >= 0) if((r=this.data[i]-a.data[i]) != 0) return r;
|
|
return 0;
|
|
}
|
|
|
|
// returns bit length of the integer x
|
|
function nbits(x) {
|
|
var r = 1, t;
|
|
if((t=x>>>16) != 0) { x = t; r += 16; }
|
|
if((t=x>>8) != 0) { x = t; r += 8; }
|
|
if((t=x>>4) != 0) { x = t; r += 4; }
|
|
if((t=x>>2) != 0) { x = t; r += 2; }
|
|
if((t=x>>1) != 0) { x = t; r += 1; }
|
|
return r;
|
|
}
|
|
|
|
// (public) return the number of bits in "this"
|
|
function bnBitLength() {
|
|
if(this.t <= 0) return 0;
|
|
return this.DB*(this.t-1)+nbits(this.data[this.t-1]^(this.s&this.DM));
|
|
}
|
|
|
|
// (protected) r = this << n*DB
|
|
function bnpDLShiftTo(n,r) {
|
|
var i;
|
|
for(i = this.t-1; i >= 0; --i) r.data[i+n] = this.data[i];
|
|
for(i = n-1; i >= 0; --i) r.data[i] = 0;
|
|
r.t = this.t+n;
|
|
r.s = this.s;
|
|
}
|
|
|
|
// (protected) r = this >> n*DB
|
|
function bnpDRShiftTo(n,r) {
|
|
for(var i = n; i < this.t; ++i) r.data[i-n] = this.data[i];
|
|
r.t = Math.max(this.t-n,0);
|
|
r.s = this.s;
|
|
}
|
|
|
|
// (protected) r = this << n
|
|
function bnpLShiftTo(n,r) {
|
|
var bs = n%this.DB;
|
|
var cbs = this.DB-bs;
|
|
var bm = (1<<cbs)-1;
|
|
var ds = Math.floor(n/this.DB), c = (this.s<<bs)&this.DM, i;
|
|
for(i = this.t-1; i >= 0; --i) {
|
|
r.data[i+ds+1] = (this.data[i]>>cbs)|c;
|
|
c = (this.data[i]&bm)<<bs;
|
|
}
|
|
for(i = ds-1; i >= 0; --i) r.data[i] = 0;
|
|
r.data[ds] = c;
|
|
r.t = this.t+ds+1;
|
|
r.s = this.s;
|
|
r.clamp();
|
|
}
|
|
|
|
// (protected) r = this >> n
|
|
function bnpRShiftTo(n,r) {
|
|
r.s = this.s;
|
|
var ds = Math.floor(n/this.DB);
|
|
if(ds >= this.t) { r.t = 0; return; }
|
|
var bs = n%this.DB;
|
|
var cbs = this.DB-bs;
|
|
var bm = (1<<bs)-1;
|
|
r.data[0] = this.data[ds]>>bs;
|
|
for(var i = ds+1; i < this.t; ++i) {
|
|
r.data[i-ds-1] |= (this.data[i]&bm)<<cbs;
|
|
r.data[i-ds] = this.data[i]>>bs;
|
|
}
|
|
if(bs > 0) r.data[this.t-ds-1] |= (this.s&bm)<<cbs;
|
|
r.t = this.t-ds;
|
|
r.clamp();
|
|
}
|
|
|
|
// (protected) r = this - a
|
|
function bnpSubTo(a,r) {
|
|
var i = 0, c = 0, m = Math.min(a.t,this.t);
|
|
while(i < m) {
|
|
c += this.data[i]-a.data[i];
|
|
r.data[i++] = c&this.DM;
|
|
c >>= this.DB;
|
|
}
|
|
if(a.t < this.t) {
|
|
c -= a.s;
|
|
while(i < this.t) {
|
|
c += this.data[i];
|
|
r.data[i++] = c&this.DM;
|
|
c >>= this.DB;
|
|
}
|
|
c += this.s;
|
|
} else {
|
|
c += this.s;
|
|
while(i < a.t) {
|
|
c -= a.data[i];
|
|
r.data[i++] = c&this.DM;
|
|
c >>= this.DB;
|
|
}
|
|
c -= a.s;
|
|
}
|
|
r.s = (c<0)?-1:0;
|
|
if(c < -1) r.data[i++] = this.DV+c;
|
|
else if(c > 0) r.data[i++] = c;
|
|
r.t = i;
|
|
r.clamp();
|
|
}
|
|
|
|
// (protected) r = this * a, r != this,a (HAC 14.12)
|
|
// "this" should be the larger one if appropriate.
|
|
function bnpMultiplyTo(a,r) {
|
|
var x = this.abs(), y = a.abs();
|
|
var i = x.t;
|
|
r.t = i+y.t;
|
|
while(--i >= 0) r.data[i] = 0;
|
|
for(i = 0; i < y.t; ++i) r.data[i+x.t] = x.am(0,y.data[i],r,i,0,x.t);
|
|
r.s = 0;
|
|
r.clamp();
|
|
if(this.s != a.s) BigInteger$2.ZERO.subTo(r,r);
|
|
}
|
|
|
|
// (protected) r = this^2, r != this (HAC 14.16)
|
|
function bnpSquareTo(r) {
|
|
var x = this.abs();
|
|
var i = r.t = 2*x.t;
|
|
while(--i >= 0) r.data[i] = 0;
|
|
for(i = 0; i < x.t-1; ++i) {
|
|
var c = x.am(i,x.data[i],r,2*i,0,1);
|
|
if((r.data[i+x.t]+=x.am(i+1,2*x.data[i],r,2*i+1,c,x.t-i-1)) >= x.DV) {
|
|
r.data[i+x.t] -= x.DV;
|
|
r.data[i+x.t+1] = 1;
|
|
}
|
|
}
|
|
if(r.t > 0) r.data[r.t-1] += x.am(i,x.data[i],r,2*i,0,1);
|
|
r.s = 0;
|
|
r.clamp();
|
|
}
|
|
|
|
// (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)
|
|
// r != q, this != m. q or r may be null.
|
|
function bnpDivRemTo(m,q,r) {
|
|
var pm = m.abs();
|
|
if(pm.t <= 0) return;
|
|
var pt = this.abs();
|
|
if(pt.t < pm.t) {
|
|
if(q != null) q.fromInt(0);
|
|
if(r != null) this.copyTo(r);
|
|
return;
|
|
}
|
|
if(r == null) r = nbi();
|
|
var y = nbi(), ts = this.s, ms = m.s;
|
|
var nsh = this.DB-nbits(pm.data[pm.t-1]); // normalize modulus
|
|
if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); } else { pm.copyTo(y); pt.copyTo(r); }
|
|
var ys = y.t;
|
|
var y0 = y.data[ys-1];
|
|
if(y0 == 0) return;
|
|
var yt = y0*(1<<this.F1)+((ys>1)?y.data[ys-2]>>this.F2:0);
|
|
var d1 = this.FV/yt, d2 = (1<<this.F1)/yt, e = 1<<this.F2;
|
|
var i = r.t, j = i-ys, t = (q==null)?nbi():q;
|
|
y.dlShiftTo(j,t);
|
|
if(r.compareTo(t) >= 0) {
|
|
r.data[r.t++] = 1;
|
|
r.subTo(t,r);
|
|
}
|
|
BigInteger$2.ONE.dlShiftTo(ys,t);
|
|
t.subTo(y,y); // "negative" y so we can replace sub with am later
|
|
while(y.t < ys) y.data[y.t++] = 0;
|
|
while(--j >= 0) {
|
|
// Estimate quotient digit
|
|
var qd = (r.data[--i]==y0)?this.DM:Math.floor(r.data[i]*d1+(r.data[i-1]+e)*d2);
|
|
if((r.data[i]+=y.am(0,qd,r,j,0,ys)) < qd) { // Try it out
|
|
y.dlShiftTo(j,t);
|
|
r.subTo(t,r);
|
|
while(r.data[i] < --qd) r.subTo(t,r);
|
|
}
|
|
}
|
|
if(q != null) {
|
|
r.drShiftTo(ys,q);
|
|
if(ts != ms) BigInteger$2.ZERO.subTo(q,q);
|
|
}
|
|
r.t = ys;
|
|
r.clamp();
|
|
if(nsh > 0) r.rShiftTo(nsh,r); // Denormalize remainder
|
|
if(ts < 0) BigInteger$2.ZERO.subTo(r,r);
|
|
}
|
|
|
|
// (public) this mod a
|
|
function bnMod(a) {
|
|
var r = nbi();
|
|
this.abs().divRemTo(a,null,r);
|
|
if(this.s < 0 && r.compareTo(BigInteger$2.ZERO) > 0) a.subTo(r,r);
|
|
return r;
|
|
}
|
|
|
|
// Modular reduction using "classic" algorithm
|
|
function Classic(m) { this.m = m; }
|
|
function cConvert(x) {
|
|
if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m);
|
|
else return x;
|
|
}
|
|
function cRevert(x) { return x; }
|
|
function cReduce(x) { x.divRemTo(this.m,null,x); }
|
|
function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
|
|
function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
|
|
|
|
Classic.prototype.convert = cConvert;
|
|
Classic.prototype.revert = cRevert;
|
|
Classic.prototype.reduce = cReduce;
|
|
Classic.prototype.mulTo = cMulTo;
|
|
Classic.prototype.sqrTo = cSqrTo;
|
|
|
|
// (protected) return "-1/this % 2^DB"; useful for Mont. reduction
|
|
// justification:
|
|
// xy == 1 (mod m)
|
|
// xy = 1+km
|
|
// xy(2-xy) = (1+km)(1-km)
|
|
// x[y(2-xy)] = 1-k^2m^2
|
|
// x[y(2-xy)] == 1 (mod m^2)
|
|
// if y is 1/x mod m, then y(2-xy) is 1/x mod m^2
|
|
// should reduce x and y(2-xy) by m^2 at each step to keep size bounded.
|
|
// JS multiply "overflows" differently from C/C++, so care is needed here.
|
|
function bnpInvDigit() {
|
|
if(this.t < 1) return 0;
|
|
var x = this.data[0];
|
|
if((x&1) == 0) return 0;
|
|
var y = x&3; // y == 1/x mod 2^2
|
|
y = (y*(2-(x&0xf)*y))&0xf; // y == 1/x mod 2^4
|
|
y = (y*(2-(x&0xff)*y))&0xff; // y == 1/x mod 2^8
|
|
y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff; // y == 1/x mod 2^16
|
|
// last step - calculate inverse mod DV directly;
|
|
// assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints
|
|
y = (y*(2-x*y%this.DV))%this.DV; // y == 1/x mod 2^dbits
|
|
// we really want the negative inverse, and -DV < y < DV
|
|
return (y>0)?this.DV-y:-y;
|
|
}
|
|
|
|
// Montgomery reduction
|
|
function Montgomery(m) {
|
|
this.m = m;
|
|
this.mp = m.invDigit();
|
|
this.mpl = this.mp&0x7fff;
|
|
this.mph = this.mp>>15;
|
|
this.um = (1<<(m.DB-15))-1;
|
|
this.mt2 = 2*m.t;
|
|
}
|
|
|
|
// xR mod m
|
|
function montConvert(x) {
|
|
var r = nbi();
|
|
x.abs().dlShiftTo(this.m.t,r);
|
|
r.divRemTo(this.m,null,r);
|
|
if(x.s < 0 && r.compareTo(BigInteger$2.ZERO) > 0) this.m.subTo(r,r);
|
|
return r;
|
|
}
|
|
|
|
// x/R mod m
|
|
function montRevert(x) {
|
|
var r = nbi();
|
|
x.copyTo(r);
|
|
this.reduce(r);
|
|
return r;
|
|
}
|
|
|
|
// x = x/R mod m (HAC 14.32)
|
|
function montReduce(x) {
|
|
while(x.t <= this.mt2) // pad x so am has enough room later
|
|
x.data[x.t++] = 0;
|
|
for(var i = 0; i < this.m.t; ++i) {
|
|
// faster way of calculating u0 = x.data[i]*mp mod DV
|
|
var j = x.data[i]&0x7fff;
|
|
var u0 = (j*this.mpl+(((j*this.mph+(x.data[i]>>15)*this.mpl)&this.um)<<15))&x.DM;
|
|
// use am to combine the multiply-shift-add into one call
|
|
j = i+this.m.t;
|
|
x.data[j] += this.m.am(0,u0,x,i,0,this.m.t);
|
|
// propagate carry
|
|
while(x.data[j] >= x.DV) { x.data[j] -= x.DV; x.data[++j]++; }
|
|
}
|
|
x.clamp();
|
|
x.drShiftTo(this.m.t,x);
|
|
if(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
|
|
}
|
|
|
|
// r = "x^2/R mod m"; x != r
|
|
function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
|
|
|
|
// r = "xy/R mod m"; x,y != r
|
|
function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
|
|
|
|
Montgomery.prototype.convert = montConvert;
|
|
Montgomery.prototype.revert = montRevert;
|
|
Montgomery.prototype.reduce = montReduce;
|
|
Montgomery.prototype.mulTo = montMulTo;
|
|
Montgomery.prototype.sqrTo = montSqrTo;
|
|
|
|
// (protected) true iff this is even
|
|
function bnpIsEven() { return ((this.t>0)?(this.data[0]&1):this.s) == 0; }
|
|
|
|
// (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79)
|
|
function bnpExp(e,z) {
|
|
if(e > 0xffffffff || e < 1) return BigInteger$2.ONE;
|
|
var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1;
|
|
g.copyTo(r);
|
|
while(--i >= 0) {
|
|
z.sqrTo(r,r2);
|
|
if((e&(1<<i)) > 0) z.mulTo(r2,g,r);
|
|
else { var t = r; r = r2; r2 = t; }
|
|
}
|
|
return z.revert(r);
|
|
}
|
|
|
|
// (public) this^e % m, 0 <= e < 2^32
|
|
function bnModPowInt(e,m) {
|
|
var z;
|
|
if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m);
|
|
return this.exp(e,z);
|
|
}
|
|
|
|
// protected
|
|
BigInteger$2.prototype.copyTo = bnpCopyTo;
|
|
BigInteger$2.prototype.fromInt = bnpFromInt;
|
|
BigInteger$2.prototype.fromString = bnpFromString;
|
|
BigInteger$2.prototype.clamp = bnpClamp;
|
|
BigInteger$2.prototype.dlShiftTo = bnpDLShiftTo;
|
|
BigInteger$2.prototype.drShiftTo = bnpDRShiftTo;
|
|
BigInteger$2.prototype.lShiftTo = bnpLShiftTo;
|
|
BigInteger$2.prototype.rShiftTo = bnpRShiftTo;
|
|
BigInteger$2.prototype.subTo = bnpSubTo;
|
|
BigInteger$2.prototype.multiplyTo = bnpMultiplyTo;
|
|
BigInteger$2.prototype.squareTo = bnpSquareTo;
|
|
BigInteger$2.prototype.divRemTo = bnpDivRemTo;
|
|
BigInteger$2.prototype.invDigit = bnpInvDigit;
|
|
BigInteger$2.prototype.isEven = bnpIsEven;
|
|
BigInteger$2.prototype.exp = bnpExp;
|
|
|
|
// public
|
|
BigInteger$2.prototype.toString = bnToString;
|
|
BigInteger$2.prototype.negate = bnNegate;
|
|
BigInteger$2.prototype.abs = bnAbs;
|
|
BigInteger$2.prototype.compareTo = bnCompareTo;
|
|
BigInteger$2.prototype.bitLength = bnBitLength;
|
|
BigInteger$2.prototype.mod = bnMod;
|
|
BigInteger$2.prototype.modPowInt = bnModPowInt;
|
|
|
|
// "constants"
|
|
BigInteger$2.ZERO = nbv(0);
|
|
BigInteger$2.ONE = nbv(1);
|
|
|
|
// jsbn2 lib
|
|
|
|
//Copyright (c) 2005-2009 Tom Wu
|
|
//All Rights Reserved.
|
|
//See "LICENSE" for details (See jsbn.js for LICENSE).
|
|
|
|
//Extended JavaScript BN functions, required for RSA private ops.
|
|
|
|
//Version 1.1: new BigInteger("0", 10) returns "proper" zero
|
|
|
|
//(public)
|
|
function bnClone() { var r = nbi(); this.copyTo(r); return r; }
|
|
|
|
//(public) return value as integer
|
|
function bnIntValue() {
|
|
if(this.s < 0) {
|
|
if(this.t == 1) return this.data[0]-this.DV;
|
|
else if(this.t == 0) return -1;
|
|
} else if(this.t == 1) return this.data[0];
|
|
else if(this.t == 0) return 0;
|
|
// assumes 16 < DB < 32
|
|
return ((this.data[1]&((1<<(32-this.DB))-1))<<this.DB)|this.data[0];
|
|
}
|
|
|
|
//(public) return value as byte
|
|
function bnByteValue() { return (this.t==0)?this.s:(this.data[0]<<24)>>24; }
|
|
|
|
//(public) return value as short (assumes DB>=16)
|
|
function bnShortValue() { return (this.t==0)?this.s:(this.data[0]<<16)>>16; }
|
|
|
|
//(protected) return x s.t. r^x < DV
|
|
function bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); }
|
|
|
|
//(public) 0 if this == 0, 1 if this > 0
|
|
function bnSigNum() {
|
|
if(this.s < 0) return -1;
|
|
else if(this.t <= 0 || (this.t == 1 && this.data[0] <= 0)) return 0;
|
|
else return 1;
|
|
}
|
|
|
|
//(protected) convert to radix string
|
|
function bnpToRadix(b) {
|
|
if(b == null) b = 10;
|
|
if(this.signum() == 0 || b < 2 || b > 36) return "0";
|
|
var cs = this.chunkSize(b);
|
|
var a = Math.pow(b,cs);
|
|
var d = nbv(a), y = nbi(), z = nbi(), r = "";
|
|
this.divRemTo(d,y,z);
|
|
while(y.signum() > 0) {
|
|
r = (a+z.intValue()).toString(b).substr(1) + r;
|
|
y.divRemTo(d,y,z);
|
|
}
|
|
return z.intValue().toString(b) + r;
|
|
}
|
|
|
|
//(protected) convert from radix string
|
|
function bnpFromRadix(s,b) {
|
|
this.fromInt(0);
|
|
if(b == null) b = 10;
|
|
var cs = this.chunkSize(b);
|
|
var d = Math.pow(b,cs), mi = false, j = 0, w = 0;
|
|
for(var i = 0; i < s.length; ++i) {
|
|
var x = intAt(s,i);
|
|
if(x < 0) {
|
|
if(s.charAt(i) == "-" && this.signum() == 0) mi = true;
|
|
continue;
|
|
}
|
|
w = b*w+x;
|
|
if(++j >= cs) {
|
|
this.dMultiply(d);
|
|
this.dAddOffset(w,0);
|
|
j = 0;
|
|
w = 0;
|
|
}
|
|
}
|
|
if(j > 0) {
|
|
this.dMultiply(Math.pow(b,j));
|
|
this.dAddOffset(w,0);
|
|
}
|
|
if(mi) BigInteger$2.ZERO.subTo(this,this);
|
|
}
|
|
|
|
//(protected) alternate constructor
|
|
function bnpFromNumber(a,b,c) {
|
|
if("number" == typeof b) {
|
|
// new BigInteger(int,int,RNG)
|
|
if(a < 2) this.fromInt(1);
|
|
else {
|
|
this.fromNumber(a,c);
|
|
if(!this.testBit(a-1)) // force MSB set
|
|
this.bitwiseTo(BigInteger$2.ONE.shiftLeft(a-1),op_or,this);
|
|
if(this.isEven()) this.dAddOffset(1,0); // force odd
|
|
while(!this.isProbablePrime(b)) {
|
|
this.dAddOffset(2,0);
|
|
if(this.bitLength() > a) this.subTo(BigInteger$2.ONE.shiftLeft(a-1),this);
|
|
}
|
|
}
|
|
} else {
|
|
// new BigInteger(int,RNG)
|
|
var x = new Array(), t = a&7;
|
|
x.length = (a>>3)+1;
|
|
b.nextBytes(x);
|
|
if(t > 0) x[0] &= ((1<<t)-1); else x[0] = 0;
|
|
this.fromString(x,256);
|
|
}
|
|
}
|
|
|
|
//(public) convert to bigendian byte array
|
|
function bnToByteArray() {
|
|
var i = this.t, r = new Array();
|
|
r[0] = this.s;
|
|
var p = this.DB-(i*this.DB)%8, d, k = 0;
|
|
if(i-- > 0) {
|
|
if(p < this.DB && (d = this.data[i]>>p) != (this.s&this.DM)>>p)
|
|
r[k++] = d|(this.s<<(this.DB-p));
|
|
while(i >= 0) {
|
|
if(p < 8) {
|
|
d = (this.data[i]&((1<<p)-1))<<(8-p);
|
|
d |= this.data[--i]>>(p+=this.DB-8);
|
|
} else {
|
|
d = (this.data[i]>>(p-=8))&0xff;
|
|
if(p <= 0) { p += this.DB; --i; }
|
|
}
|
|
if((d&0x80) != 0) d |= -256;
|
|
if(k == 0 && (this.s&0x80) != (d&0x80)) ++k;
|
|
if(k > 0 || d != this.s) r[k++] = d;
|
|
}
|
|
}
|
|
return r;
|
|
}
|
|
|
|
function bnEquals(a) { return(this.compareTo(a)==0); }
|
|
function bnMin(a) { return (this.compareTo(a)<0)?this:a; }
|
|
function bnMax(a) { return (this.compareTo(a)>0)?this:a; }
|
|
|
|
//(protected) r = this op a (bitwise)
|
|
function bnpBitwiseTo(a,op,r) {
|
|
var i, f, m = Math.min(a.t,this.t);
|
|
for(i = 0; i < m; ++i) r.data[i] = op(this.data[i],a.data[i]);
|
|
if(a.t < this.t) {
|
|
f = a.s&this.DM;
|
|
for(i = m; i < this.t; ++i) r.data[i] = op(this.data[i],f);
|
|
r.t = this.t;
|
|
} else {
|
|
f = this.s&this.DM;
|
|
for(i = m; i < a.t; ++i) r.data[i] = op(f,a.data[i]);
|
|
r.t = a.t;
|
|
}
|
|
r.s = op(this.s,a.s);
|
|
r.clamp();
|
|
}
|
|
|
|
//(public) this & a
|
|
function op_and(x,y) { return x&y; }
|
|
function bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; }
|
|
|
|
//(public) this | a
|
|
function op_or(x,y) { return x|y; }
|
|
function bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; }
|
|
|
|
//(public) this ^ a
|
|
function op_xor(x,y) { return x^y; }
|
|
function bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; }
|
|
|
|
//(public) this & ~a
|
|
function op_andnot(x,y) { return x&~y; }
|
|
function bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; }
|
|
|
|
//(public) ~this
|
|
function bnNot() {
|
|
var r = nbi();
|
|
for(var i = 0; i < this.t; ++i) r.data[i] = this.DM&~this.data[i];
|
|
r.t = this.t;
|
|
r.s = ~this.s;
|
|
return r;
|
|
}
|
|
|
|
//(public) this << n
|
|
function bnShiftLeft(n) {
|
|
var r = nbi();
|
|
if(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r);
|
|
return r;
|
|
}
|
|
|
|
//(public) this >> n
|
|
function bnShiftRight(n) {
|
|
var r = nbi();
|
|
if(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r);
|
|
return r;
|
|
}
|
|
|
|
//return index of lowest 1-bit in x, x < 2^31
|
|
function lbit(x) {
|
|
if(x == 0) return -1;
|
|
var r = 0;
|
|
if((x&0xffff) == 0) { x >>= 16; r += 16; }
|
|
if((x&0xff) == 0) { x >>= 8; r += 8; }
|
|
if((x&0xf) == 0) { x >>= 4; r += 4; }
|
|
if((x&3) == 0) { x >>= 2; r += 2; }
|
|
if((x&1) == 0) ++r;
|
|
return r;
|
|
}
|
|
|
|
//(public) returns index of lowest 1-bit (or -1 if none)
|
|
function bnGetLowestSetBit() {
|
|
for(var i = 0; i < this.t; ++i)
|
|
if(this.data[i] != 0) return i*this.DB+lbit(this.data[i]);
|
|
if(this.s < 0) return this.t*this.DB;
|
|
return -1;
|
|
}
|
|
|
|
//return number of 1 bits in x
|
|
function cbit(x) {
|
|
var r = 0;
|
|
while(x != 0) { x &= x-1; ++r; }
|
|
return r;
|
|
}
|
|
|
|
//(public) return number of set bits
|
|
function bnBitCount() {
|
|
var r = 0, x = this.s&this.DM;
|
|
for(var i = 0; i < this.t; ++i) r += cbit(this.data[i]^x);
|
|
return r;
|
|
}
|
|
|
|
//(public) true iff nth bit is set
|
|
function bnTestBit(n) {
|
|
var j = Math.floor(n/this.DB);
|
|
if(j >= this.t) return(this.s!=0);
|
|
return((this.data[j]&(1<<(n%this.DB)))!=0);
|
|
}
|
|
|
|
//(protected) this op (1<<n)
|
|
function bnpChangeBit(n,op) {
|
|
var r = BigInteger$2.ONE.shiftLeft(n);
|
|
this.bitwiseTo(r,op,r);
|
|
return r;
|
|
}
|
|
|
|
//(public) this | (1<<n)
|
|
function bnSetBit(n) { return this.changeBit(n,op_or); }
|
|
|
|
//(public) this & ~(1<<n)
|
|
function bnClearBit(n) { return this.changeBit(n,op_andnot); }
|
|
|
|
//(public) this ^ (1<<n)
|
|
function bnFlipBit(n) { return this.changeBit(n,op_xor); }
|
|
|
|
//(protected) r = this + a
|
|
function bnpAddTo(a,r) {
|
|
var i = 0, c = 0, m = Math.min(a.t,this.t);
|
|
while(i < m) {
|
|
c += this.data[i]+a.data[i];
|
|
r.data[i++] = c&this.DM;
|
|
c >>= this.DB;
|
|
}
|
|
if(a.t < this.t) {
|
|
c += a.s;
|
|
while(i < this.t) {
|
|
c += this.data[i];
|
|
r.data[i++] = c&this.DM;
|
|
c >>= this.DB;
|
|
}
|
|
c += this.s;
|
|
} else {
|
|
c += this.s;
|
|
while(i < a.t) {
|
|
c += a.data[i];
|
|
r.data[i++] = c&this.DM;
|
|
c >>= this.DB;
|
|
}
|
|
c += a.s;
|
|
}
|
|
r.s = (c<0)?-1:0;
|
|
if(c > 0) r.data[i++] = c;
|
|
else if(c < -1) r.data[i++] = this.DV+c;
|
|
r.t = i;
|
|
r.clamp();
|
|
}
|
|
|
|
//(public) this + a
|
|
function bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; }
|
|
|
|
//(public) this - a
|
|
function bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; }
|
|
|
|
//(public) this * a
|
|
function bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; }
|
|
|
|
//(public) this / a
|
|
function bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; }
|
|
|
|
//(public) this % a
|
|
function bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; }
|
|
|
|
//(public) [this/a,this%a]
|
|
function bnDivideAndRemainder(a) {
|
|
var q = nbi(), r = nbi();
|
|
this.divRemTo(a,q,r);
|
|
return new Array(q,r);
|
|
}
|
|
|
|
//(protected) this *= n, this >= 0, 1 < n < DV
|
|
function bnpDMultiply(n) {
|
|
this.data[this.t] = this.am(0,n-1,this,0,0,this.t);
|
|
++this.t;
|
|
this.clamp();
|
|
}
|
|
|
|
//(protected) this += n << w words, this >= 0
|
|
function bnpDAddOffset(n,w) {
|
|
if(n == 0) return;
|
|
while(this.t <= w) this.data[this.t++] = 0;
|
|
this.data[w] += n;
|
|
while(this.data[w] >= this.DV) {
|
|
this.data[w] -= this.DV;
|
|
if(++w >= this.t) this.data[this.t++] = 0;
|
|
++this.data[w];
|
|
}
|
|
}
|
|
|
|
//A "null" reducer
|
|
function NullExp() {}
|
|
function nNop(x) { return x; }
|
|
function nMulTo(x,y,r) { x.multiplyTo(y,r); }
|
|
function nSqrTo(x,r) { x.squareTo(r); }
|
|
|
|
NullExp.prototype.convert = nNop;
|
|
NullExp.prototype.revert = nNop;
|
|
NullExp.prototype.mulTo = nMulTo;
|
|
NullExp.prototype.sqrTo = nSqrTo;
|
|
|
|
//(public) this^e
|
|
function bnPow(e) { return this.exp(e,new NullExp()); }
|
|
|
|
//(protected) r = lower n words of "this * a", a.t <= n
|
|
//"this" should be the larger one if appropriate.
|
|
function bnpMultiplyLowerTo(a,n,r) {
|
|
var i = Math.min(this.t+a.t,n);
|
|
r.s = 0; // assumes a,this >= 0
|
|
r.t = i;
|
|
while(i > 0) r.data[--i] = 0;
|
|
var j;
|
|
for(j = r.t-this.t; i < j; ++i) r.data[i+this.t] = this.am(0,a.data[i],r,i,0,this.t);
|
|
for(j = Math.min(a.t,n); i < j; ++i) this.am(0,a.data[i],r,i,0,n-i);
|
|
r.clamp();
|
|
}
|
|
|
|
//(protected) r = "this * a" without lower n words, n > 0
|
|
//"this" should be the larger one if appropriate.
|
|
function bnpMultiplyUpperTo(a,n,r) {
|
|
--n;
|
|
var i = r.t = this.t+a.t-n;
|
|
r.s = 0; // assumes a,this >= 0
|
|
while(--i >= 0) r.data[i] = 0;
|
|
for(i = Math.max(n-this.t,0); i < a.t; ++i)
|
|
r.data[this.t+i-n] = this.am(n-i,a.data[i],r,0,0,this.t+i-n);
|
|
r.clamp();
|
|
r.drShiftTo(1,r);
|
|
}
|
|
|
|
//Barrett modular reduction
|
|
function Barrett(m) {
|
|
// setup Barrett
|
|
this.r2 = nbi();
|
|
this.q3 = nbi();
|
|
BigInteger$2.ONE.dlShiftTo(2*m.t,this.r2);
|
|
this.mu = this.r2.divide(m);
|
|
this.m = m;
|
|
}
|
|
|
|
function barrettConvert(x) {
|
|
if(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m);
|
|
else if(x.compareTo(this.m) < 0) return x;
|
|
else { var r = nbi(); x.copyTo(r); this.reduce(r); return r; }
|
|
}
|
|
|
|
function barrettRevert(x) { return x; }
|
|
|
|
//x = x mod m (HAC 14.42)
|
|
function barrettReduce(x) {
|
|
x.drShiftTo(this.m.t-1,this.r2);
|
|
if(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); }
|
|
this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3);
|
|
this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2);
|
|
while(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1);
|
|
x.subTo(this.r2,x);
|
|
while(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
|
|
}
|
|
|
|
//r = x^2 mod m; x != r
|
|
function barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
|
|
|
|
//r = x*y mod m; x,y != r
|
|
function barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
|
|
|
|
Barrett.prototype.convert = barrettConvert;
|
|
Barrett.prototype.revert = barrettRevert;
|
|
Barrett.prototype.reduce = barrettReduce;
|
|
Barrett.prototype.mulTo = barrettMulTo;
|
|
Barrett.prototype.sqrTo = barrettSqrTo;
|
|
|
|
//(public) this^e % m (HAC 14.85)
|
|
function bnModPow(e,m) {
|
|
var i = e.bitLength(), k, r = nbv(1), z;
|
|
if(i <= 0) return r;
|
|
else if(i < 18) k = 1;
|
|
else if(i < 48) k = 3;
|
|
else if(i < 144) k = 4;
|
|
else if(i < 768) k = 5;
|
|
else k = 6;
|
|
if(i < 8)
|
|
z = new Classic(m);
|
|
else if(m.isEven())
|
|
z = new Barrett(m);
|
|
else
|
|
z = new Montgomery(m);
|
|
|
|
// precomputation
|
|
var g = new Array(), n = 3, k1 = k-1, km = (1<<k)-1;
|
|
g[1] = z.convert(this);
|
|
if(k > 1) {
|
|
var g2 = nbi();
|
|
z.sqrTo(g[1],g2);
|
|
while(n <= km) {
|
|
g[n] = nbi();
|
|
z.mulTo(g2,g[n-2],g[n]);
|
|
n += 2;
|
|
}
|
|
}
|
|
|
|
var j = e.t-1, w, is1 = true, r2 = nbi(), t;
|
|
i = nbits(e.data[j])-1;
|
|
while(j >= 0) {
|
|
if(i >= k1) w = (e.data[j]>>(i-k1))&km;
|
|
else {
|
|
w = (e.data[j]&((1<<(i+1))-1))<<(k1-i);
|
|
if(j > 0) w |= e.data[j-1]>>(this.DB+i-k1);
|
|
}
|
|
|
|
n = k;
|
|
while((w&1) == 0) { w >>= 1; --n; }
|
|
if((i -= n) < 0) { i += this.DB; --j; }
|
|
if(is1) { // ret == 1, don't bother squaring or multiplying it
|
|
g[w].copyTo(r);
|
|
is1 = false;
|
|
} else {
|
|
while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; }
|
|
if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; }
|
|
z.mulTo(r2,g[w],r);
|
|
}
|
|
|
|
while(j >= 0 && (e.data[j]&(1<<i)) == 0) {
|
|
z.sqrTo(r,r2); t = r; r = r2; r2 = t;
|
|
if(--i < 0) { i = this.DB-1; --j; }
|
|
}
|
|
}
|
|
return z.revert(r);
|
|
}
|
|
|
|
//(public) gcd(this,a) (HAC 14.54)
|
|
function bnGCD(a) {
|
|
var x = (this.s<0)?this.negate():this.clone();
|
|
var y = (a.s<0)?a.negate():a.clone();
|
|
if(x.compareTo(y) < 0) { var t = x; x = y; y = t; }
|
|
var i = x.getLowestSetBit(), g = y.getLowestSetBit();
|
|
if(g < 0) return x;
|
|
if(i < g) g = i;
|
|
if(g > 0) {
|
|
x.rShiftTo(g,x);
|
|
y.rShiftTo(g,y);
|
|
}
|
|
while(x.signum() > 0) {
|
|
if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x);
|
|
if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y);
|
|
if(x.compareTo(y) >= 0) {
|
|
x.subTo(y,x);
|
|
x.rShiftTo(1,x);
|
|
} else {
|
|
y.subTo(x,y);
|
|
y.rShiftTo(1,y);
|
|
}
|
|
}
|
|
if(g > 0) y.lShiftTo(g,y);
|
|
return y;
|
|
}
|
|
|
|
//(protected) this % n, n < 2^26
|
|
function bnpModInt(n) {
|
|
if(n <= 0) return 0;
|
|
var d = this.DV%n, r = (this.s<0)?n-1:0;
|
|
if(this.t > 0)
|
|
if(d == 0) r = this.data[0]%n;
|
|
else for(var i = this.t-1; i >= 0; --i) r = (d*r+this.data[i])%n;
|
|
return r;
|
|
}
|
|
|
|
//(public) 1/this % m (HAC 14.61)
|
|
function bnModInverse(m) {
|
|
var ac = m.isEven();
|
|
if((this.isEven() && ac) || m.signum() == 0) return BigInteger$2.ZERO;
|
|
var u = m.clone(), v = this.clone();
|
|
var a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1);
|
|
while(u.signum() != 0) {
|
|
while(u.isEven()) {
|
|
u.rShiftTo(1,u);
|
|
if(ac) {
|
|
if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); }
|
|
a.rShiftTo(1,a);
|
|
} else if(!b.isEven()) b.subTo(m,b);
|
|
b.rShiftTo(1,b);
|
|
}
|
|
while(v.isEven()) {
|
|
v.rShiftTo(1,v);
|
|
if(ac) {
|
|
if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); }
|
|
c.rShiftTo(1,c);
|
|
} else if(!d.isEven()) d.subTo(m,d);
|
|
d.rShiftTo(1,d);
|
|
}
|
|
if(u.compareTo(v) >= 0) {
|
|
u.subTo(v,u);
|
|
if(ac) a.subTo(c,a);
|
|
b.subTo(d,b);
|
|
} else {
|
|
v.subTo(u,v);
|
|
if(ac) c.subTo(a,c);
|
|
d.subTo(b,d);
|
|
}
|
|
}
|
|
if(v.compareTo(BigInteger$2.ONE) != 0) return BigInteger$2.ZERO;
|
|
if(d.compareTo(m) >= 0) return d.subtract(m);
|
|
if(d.signum() < 0) d.addTo(m,d); else return d;
|
|
if(d.signum() < 0) return d.add(m); else return d;
|
|
}
|
|
|
|
var lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509];
|
|
var lplim = (1<<26)/lowprimes[lowprimes.length-1];
|
|
|
|
//(public) test primality with certainty >= 1-.5^t
|
|
function bnIsProbablePrime(t) {
|
|
var i, x = this.abs();
|
|
if(x.t == 1 && x.data[0] <= lowprimes[lowprimes.length-1]) {
|
|
for(i = 0; i < lowprimes.length; ++i)
|
|
if(x.data[0] == lowprimes[i]) return true;
|
|
return false;
|
|
}
|
|
if(x.isEven()) return false;
|
|
i = 1;
|
|
while(i < lowprimes.length) {
|
|
var m = lowprimes[i], j = i+1;
|
|
while(j < lowprimes.length && m < lplim) m *= lowprimes[j++];
|
|
m = x.modInt(m);
|
|
while(i < j) if(m%lowprimes[i++] == 0) return false;
|
|
}
|
|
return x.millerRabin(t);
|
|
}
|
|
|
|
//(protected) true if probably prime (HAC 4.24, Miller-Rabin)
|
|
function bnpMillerRabin(t) {
|
|
var n1 = this.subtract(BigInteger$2.ONE);
|
|
var k = n1.getLowestSetBit();
|
|
if(k <= 0) return false;
|
|
var r = n1.shiftRight(k);
|
|
var prng = bnGetPrng();
|
|
var a;
|
|
for(var i = 0; i < t; ++i) {
|
|
// select witness 'a' at random from between 1 and n1
|
|
do {
|
|
a = new BigInteger$2(this.bitLength(), prng);
|
|
}
|
|
while(a.compareTo(BigInteger$2.ONE) <= 0 || a.compareTo(n1) >= 0);
|
|
var y = a.modPow(r,this);
|
|
if(y.compareTo(BigInteger$2.ONE) != 0 && y.compareTo(n1) != 0) {
|
|
var j = 1;
|
|
while(j++ < k && y.compareTo(n1) != 0) {
|
|
y = y.modPowInt(2,this);
|
|
if(y.compareTo(BigInteger$2.ONE) == 0) return false;
|
|
}
|
|
if(y.compareTo(n1) != 0) return false;
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
|
|
// get pseudo random number generator
|
|
function bnGetPrng() {
|
|
// create prng with api that matches BigInteger secure random
|
|
return {
|
|
// x is an array to fill with bytes
|
|
nextBytes: function(x) {
|
|
for(var i = 0; i < x.length; ++i) {
|
|
x[i] = Math.floor(Math.random() * 0x0100);
|
|
}
|
|
}
|
|
};
|
|
}
|
|
|
|
//protected
|
|
BigInteger$2.prototype.chunkSize = bnpChunkSize;
|
|
BigInteger$2.prototype.toRadix = bnpToRadix;
|
|
BigInteger$2.prototype.fromRadix = bnpFromRadix;
|
|
BigInteger$2.prototype.fromNumber = bnpFromNumber;
|
|
BigInteger$2.prototype.bitwiseTo = bnpBitwiseTo;
|
|
BigInteger$2.prototype.changeBit = bnpChangeBit;
|
|
BigInteger$2.prototype.addTo = bnpAddTo;
|
|
BigInteger$2.prototype.dMultiply = bnpDMultiply;
|
|
BigInteger$2.prototype.dAddOffset = bnpDAddOffset;
|
|
BigInteger$2.prototype.multiplyLowerTo = bnpMultiplyLowerTo;
|
|
BigInteger$2.prototype.multiplyUpperTo = bnpMultiplyUpperTo;
|
|
BigInteger$2.prototype.modInt = bnpModInt;
|
|
BigInteger$2.prototype.millerRabin = bnpMillerRabin;
|
|
|
|
//public
|
|
BigInteger$2.prototype.clone = bnClone;
|
|
BigInteger$2.prototype.intValue = bnIntValue;
|
|
BigInteger$2.prototype.byteValue = bnByteValue;
|
|
BigInteger$2.prototype.shortValue = bnShortValue;
|
|
BigInteger$2.prototype.signum = bnSigNum;
|
|
BigInteger$2.prototype.toByteArray = bnToByteArray;
|
|
BigInteger$2.prototype.equals = bnEquals;
|
|
BigInteger$2.prototype.min = bnMin;
|
|
BigInteger$2.prototype.max = bnMax;
|
|
BigInteger$2.prototype.and = bnAnd;
|
|
BigInteger$2.prototype.or = bnOr;
|
|
BigInteger$2.prototype.xor = bnXor;
|
|
BigInteger$2.prototype.andNot = bnAndNot;
|
|
BigInteger$2.prototype.not = bnNot;
|
|
BigInteger$2.prototype.shiftLeft = bnShiftLeft;
|
|
BigInteger$2.prototype.shiftRight = bnShiftRight;
|
|
BigInteger$2.prototype.getLowestSetBit = bnGetLowestSetBit;
|
|
BigInteger$2.prototype.bitCount = bnBitCount;
|
|
BigInteger$2.prototype.testBit = bnTestBit;
|
|
BigInteger$2.prototype.setBit = bnSetBit;
|
|
BigInteger$2.prototype.clearBit = bnClearBit;
|
|
BigInteger$2.prototype.flipBit = bnFlipBit;
|
|
BigInteger$2.prototype.add = bnAdd;
|
|
BigInteger$2.prototype.subtract = bnSubtract;
|
|
BigInteger$2.prototype.multiply = bnMultiply;
|
|
BigInteger$2.prototype.divide = bnDivide;
|
|
BigInteger$2.prototype.remainder = bnRemainder;
|
|
BigInteger$2.prototype.divideAndRemainder = bnDivideAndRemainder;
|
|
BigInteger$2.prototype.modPow = bnModPow;
|
|
BigInteger$2.prototype.modInverse = bnModInverse;
|
|
BigInteger$2.prototype.pow = bnPow;
|
|
BigInteger$2.prototype.gcd = bnGCD;
|
|
BigInteger$2.prototype.isProbablePrime = bnIsProbablePrime;
|
|
|
|
/**
|
|
* Secure Hash Algorithm with 160-bit digest (SHA-1) implementation.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2010-2015 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$5 = forge$m;
|
|
|
|
|
|
|
|
var sha1$1 = forge$5.sha1 = forge$5.sha1 || {};
|
|
forge$5.md.sha1 = forge$5.md.algorithms.sha1 = sha1$1;
|
|
|
|
/**
|
|
* Creates a SHA-1 message digest object.
|
|
*
|
|
* @return a message digest object.
|
|
*/
|
|
sha1$1.create = function() {
|
|
// do initialization as necessary
|
|
if(!_initialized$1) {
|
|
_init$1();
|
|
}
|
|
|
|
// SHA-1 state contains five 32-bit integers
|
|
var _state = null;
|
|
|
|
// input buffer
|
|
var _input = forge$5.util.createBuffer();
|
|
|
|
// used for word storage
|
|
var _w = new Array(80);
|
|
|
|
// message digest object
|
|
var md = {
|
|
algorithm: 'sha1',
|
|
blockLength: 64,
|
|
digestLength: 20,
|
|
// 56-bit length of message so far (does not including padding)
|
|
messageLength: 0,
|
|
// true message length
|
|
fullMessageLength: null,
|
|
// size of message length in bytes
|
|
messageLengthSize: 8
|
|
};
|
|
|
|
/**
|
|
* Starts the digest.
|
|
*
|
|
* @return this digest object.
|
|
*/
|
|
md.start = function() {
|
|
// up to 56-bit message length for convenience
|
|
md.messageLength = 0;
|
|
|
|
// full message length (set md.messageLength64 for backwards-compatibility)
|
|
md.fullMessageLength = md.messageLength64 = [];
|
|
var int32s = md.messageLengthSize / 4;
|
|
for(var i = 0; i < int32s; ++i) {
|
|
md.fullMessageLength.push(0);
|
|
}
|
|
_input = forge$5.util.createBuffer();
|
|
_state = {
|
|
h0: 0x67452301,
|
|
h1: 0xEFCDAB89,
|
|
h2: 0x98BADCFE,
|
|
h3: 0x10325476,
|
|
h4: 0xC3D2E1F0
|
|
};
|
|
return md;
|
|
};
|
|
// start digest automatically for first time
|
|
md.start();
|
|
|
|
/**
|
|
* Updates the digest with the given message input. The given input can
|
|
* treated as raw input (no encoding will be applied) or an encoding of
|
|
* 'utf8' maybe given to encode the input using UTF-8.
|
|
*
|
|
* @param msg the message input to update with.
|
|
* @param encoding the encoding to use (default: 'raw', other: 'utf8').
|
|
*
|
|
* @return this digest object.
|
|
*/
|
|
md.update = function(msg, encoding) {
|
|
if(encoding === 'utf8') {
|
|
msg = forge$5.util.encodeUtf8(msg);
|
|
}
|
|
|
|
// update message length
|
|
var len = msg.length;
|
|
md.messageLength += len;
|
|
len = [(len / 0x100000000) >>> 0, len >>> 0];
|
|
for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {
|
|
md.fullMessageLength[i] += len[1];
|
|
len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);
|
|
md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;
|
|
len[0] = ((len[1] / 0x100000000) >>> 0);
|
|
}
|
|
|
|
// add bytes to input buffer
|
|
_input.putBytes(msg);
|
|
|
|
// process bytes
|
|
_update$1(_state, _w, _input);
|
|
|
|
// compact input buffer every 2K or if empty
|
|
if(_input.read > 2048 || _input.length() === 0) {
|
|
_input.compact();
|
|
}
|
|
|
|
return md;
|
|
};
|
|
|
|
/**
|
|
* Produces the digest.
|
|
*
|
|
* @return a byte buffer containing the digest value.
|
|
*/
|
|
md.digest = function() {
|
|
/* Note: Here we copy the remaining bytes in the input buffer and
|
|
add the appropriate SHA-1 padding. Then we do the final update
|
|
on a copy of the state so that if the user wants to get
|
|
intermediate digests they can do so. */
|
|
|
|
/* Determine the number of bytes that must be added to the message
|
|
to ensure its length is congruent to 448 mod 512. In other words,
|
|
the data to be digested must be a multiple of 512 bits (or 128 bytes).
|
|
This data includes the message, some padding, and the length of the
|
|
message. Since the length of the message will be encoded as 8 bytes (64
|
|
bits), that means that the last segment of the data must have 56 bytes
|
|
(448 bits) of message and padding. Therefore, the length of the message
|
|
plus the padding must be congruent to 448 mod 512 because
|
|
512 - 128 = 448.
|
|
|
|
In order to fill up the message length it must be filled with
|
|
padding that begins with 1 bit followed by all 0 bits. Padding
|
|
must *always* be present, so if the message length is already
|
|
congruent to 448 mod 512, then 512 padding bits must be added. */
|
|
|
|
var finalBlock = forge$5.util.createBuffer();
|
|
finalBlock.putBytes(_input.bytes());
|
|
|
|
// compute remaining size to be digested (include message length size)
|
|
var remaining = (
|
|
md.fullMessageLength[md.fullMessageLength.length - 1] +
|
|
md.messageLengthSize);
|
|
|
|
// add padding for overflow blockSize - overflow
|
|
// _padding starts with 1 byte with first bit is set (byte value 128), then
|
|
// there may be up to (blockSize - 1) other pad bytes
|
|
var overflow = remaining & (md.blockLength - 1);
|
|
finalBlock.putBytes(_padding$1.substr(0, md.blockLength - overflow));
|
|
|
|
// serialize message length in bits in big-endian order; since length
|
|
// is stored in bytes we multiply by 8 and add carry from next int
|
|
var next, carry;
|
|
var bits = md.fullMessageLength[0] * 8;
|
|
for(var i = 0; i < md.fullMessageLength.length - 1; ++i) {
|
|
next = md.fullMessageLength[i + 1] * 8;
|
|
carry = (next / 0x100000000) >>> 0;
|
|
bits += carry;
|
|
finalBlock.putInt32(bits >>> 0);
|
|
bits = next >>> 0;
|
|
}
|
|
finalBlock.putInt32(bits);
|
|
|
|
var s2 = {
|
|
h0: _state.h0,
|
|
h1: _state.h1,
|
|
h2: _state.h2,
|
|
h3: _state.h3,
|
|
h4: _state.h4
|
|
};
|
|
_update$1(s2, _w, finalBlock);
|
|
var rval = forge$5.util.createBuffer();
|
|
rval.putInt32(s2.h0);
|
|
rval.putInt32(s2.h1);
|
|
rval.putInt32(s2.h2);
|
|
rval.putInt32(s2.h3);
|
|
rval.putInt32(s2.h4);
|
|
return rval;
|
|
};
|
|
|
|
return md;
|
|
};
|
|
|
|
// sha-1 padding bytes not initialized yet
|
|
var _padding$1 = null;
|
|
var _initialized$1 = false;
|
|
|
|
/**
|
|
* Initializes the constant tables.
|
|
*/
|
|
function _init$1() {
|
|
// create padding
|
|
_padding$1 = String.fromCharCode(128);
|
|
_padding$1 += forge$5.util.fillString(String.fromCharCode(0x00), 64);
|
|
|
|
// now initialized
|
|
_initialized$1 = true;
|
|
}
|
|
|
|
/**
|
|
* Updates a SHA-1 state with the given byte buffer.
|
|
*
|
|
* @param s the SHA-1 state to update.
|
|
* @param w the array to use to store words.
|
|
* @param bytes the byte buffer to update with.
|
|
*/
|
|
function _update$1(s, w, bytes) {
|
|
// consume 512 bit (64 byte) chunks
|
|
var t, a, b, c, d, e, f, i;
|
|
var len = bytes.length();
|
|
while(len >= 64) {
|
|
// the w array will be populated with sixteen 32-bit big-endian words
|
|
// and then extended into 80 32-bit words according to SHA-1 algorithm
|
|
// and for 32-79 using Max Locktyukhin's optimization
|
|
|
|
// initialize hash value for this chunk
|
|
a = s.h0;
|
|
b = s.h1;
|
|
c = s.h2;
|
|
d = s.h3;
|
|
e = s.h4;
|
|
|
|
// round 1
|
|
for(i = 0; i < 16; ++i) {
|
|
t = bytes.getInt32();
|
|
w[i] = t;
|
|
f = d ^ (b & (c ^ d));
|
|
t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t;
|
|
e = d;
|
|
d = c;
|
|
// `>>> 0` necessary to avoid iOS/Safari 10 optimization bug
|
|
c = ((b << 30) | (b >>> 2)) >>> 0;
|
|
b = a;
|
|
a = t;
|
|
}
|
|
for(; i < 20; ++i) {
|
|
t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]);
|
|
t = (t << 1) | (t >>> 31);
|
|
w[i] = t;
|
|
f = d ^ (b & (c ^ d));
|
|
t = ((a << 5) | (a >>> 27)) + f + e + 0x5A827999 + t;
|
|
e = d;
|
|
d = c;
|
|
// `>>> 0` necessary to avoid iOS/Safari 10 optimization bug
|
|
c = ((b << 30) | (b >>> 2)) >>> 0;
|
|
b = a;
|
|
a = t;
|
|
}
|
|
// round 2
|
|
for(; i < 32; ++i) {
|
|
t = (w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]);
|
|
t = (t << 1) | (t >>> 31);
|
|
w[i] = t;
|
|
f = b ^ c ^ d;
|
|
t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t;
|
|
e = d;
|
|
d = c;
|
|
// `>>> 0` necessary to avoid iOS/Safari 10 optimization bug
|
|
c = ((b << 30) | (b >>> 2)) >>> 0;
|
|
b = a;
|
|
a = t;
|
|
}
|
|
for(; i < 40; ++i) {
|
|
t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]);
|
|
t = (t << 2) | (t >>> 30);
|
|
w[i] = t;
|
|
f = b ^ c ^ d;
|
|
t = ((a << 5) | (a >>> 27)) + f + e + 0x6ED9EBA1 + t;
|
|
e = d;
|
|
d = c;
|
|
// `>>> 0` necessary to avoid iOS/Safari 10 optimization bug
|
|
c = ((b << 30) | (b >>> 2)) >>> 0;
|
|
b = a;
|
|
a = t;
|
|
}
|
|
// round 3
|
|
for(; i < 60; ++i) {
|
|
t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]);
|
|
t = (t << 2) | (t >>> 30);
|
|
w[i] = t;
|
|
f = (b & c) | (d & (b ^ c));
|
|
t = ((a << 5) | (a >>> 27)) + f + e + 0x8F1BBCDC + t;
|
|
e = d;
|
|
d = c;
|
|
// `>>> 0` necessary to avoid iOS/Safari 10 optimization bug
|
|
c = ((b << 30) | (b >>> 2)) >>> 0;
|
|
b = a;
|
|
a = t;
|
|
}
|
|
// round 4
|
|
for(; i < 80; ++i) {
|
|
t = (w[i - 6] ^ w[i - 16] ^ w[i - 28] ^ w[i - 32]);
|
|
t = (t << 2) | (t >>> 30);
|
|
w[i] = t;
|
|
f = b ^ c ^ d;
|
|
t = ((a << 5) | (a >>> 27)) + f + e + 0xCA62C1D6 + t;
|
|
e = d;
|
|
d = c;
|
|
// `>>> 0` necessary to avoid iOS/Safari 10 optimization bug
|
|
c = ((b << 30) | (b >>> 2)) >>> 0;
|
|
b = a;
|
|
a = t;
|
|
}
|
|
|
|
// update hash state
|
|
s.h0 = (s.h0 + a) | 0;
|
|
s.h1 = (s.h1 + b) | 0;
|
|
s.h2 = (s.h2 + c) | 0;
|
|
s.h3 = (s.h3 + d) | 0;
|
|
s.h4 = (s.h4 + e) | 0;
|
|
|
|
len -= 64;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Partial implementation of PKCS#1 v2.2: RSA-OEAP
|
|
*
|
|
* Modified but based on the following MIT and BSD licensed code:
|
|
*
|
|
* https://github.com/kjur/jsjws/blob/master/rsa.js:
|
|
*
|
|
* The 'jsjws'(JSON Web Signature JavaScript Library) License
|
|
*
|
|
* Copyright (c) 2012 Kenji Urushima
|
|
*
|
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
* of this software and associated documentation files (the "Software"), to deal
|
|
* in the Software without restriction, including without limitation the rights
|
|
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
* copies of the Software, and to permit persons to whom the Software is
|
|
* furnished to do so, subject to the following conditions:
|
|
*
|
|
* The above copyright notice and this permission notice shall be included in
|
|
* all copies or substantial portions of the Software.
|
|
*
|
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
* THE SOFTWARE.
|
|
*
|
|
* http://webrsa.cvs.sourceforge.net/viewvc/webrsa/Client/RSAES-OAEP.js?content-type=text%2Fplain:
|
|
*
|
|
* RSAES-OAEP.js
|
|
* $Id: RSAES-OAEP.js,v 1.1.1.1 2003/03/19 15:37:20 ellispritchard Exp $
|
|
* JavaScript Implementation of PKCS #1 v2.1 RSA CRYPTOGRAPHY STANDARD (RSA Laboratories, June 14, 2002)
|
|
* Copyright (C) Ellis Pritchard, Guardian Unlimited 2003.
|
|
* Contact: ellis@nukinetics.com
|
|
* Distributed under the BSD License.
|
|
*
|
|
* Official documentation: http://www.rsa.com/rsalabs/node.asp?id=2125
|
|
*
|
|
* @author Evan Jones (http://evanjones.ca/)
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2013-2014 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$4 = forge$m;
|
|
|
|
|
|
|
|
|
|
// shortcut for PKCS#1 API
|
|
var pkcs1 = forge$4.pkcs1 = forge$4.pkcs1 || {};
|
|
|
|
/**
|
|
* Encode the given RSAES-OAEP message (M) using key, with optional label (L)
|
|
* and seed.
|
|
*
|
|
* This method does not perform RSA encryption, it only encodes the message
|
|
* using RSAES-OAEP.
|
|
*
|
|
* @param key the RSA key to use.
|
|
* @param message the message to encode.
|
|
* @param options the options to use:
|
|
* label an optional label to use.
|
|
* seed the seed to use.
|
|
* md the message digest object to use, undefined for SHA-1.
|
|
* mgf1 optional mgf1 parameters:
|
|
* md the message digest object to use for MGF1.
|
|
*
|
|
* @return the encoded message bytes.
|
|
*/
|
|
pkcs1.encode_rsa_oaep = function(key, message, options) {
|
|
// parse arguments
|
|
var label;
|
|
var seed;
|
|
var md;
|
|
var mgf1Md;
|
|
// legacy args (label, seed, md)
|
|
if(typeof options === 'string') {
|
|
label = options;
|
|
seed = arguments[3] || undefined;
|
|
md = arguments[4] || undefined;
|
|
} else if(options) {
|
|
label = options.label || undefined;
|
|
seed = options.seed || undefined;
|
|
md = options.md || undefined;
|
|
if(options.mgf1 && options.mgf1.md) {
|
|
mgf1Md = options.mgf1.md;
|
|
}
|
|
}
|
|
|
|
// default OAEP to SHA-1 message digest
|
|
if(!md) {
|
|
md = forge$4.md.sha1.create();
|
|
} else {
|
|
md.start();
|
|
}
|
|
|
|
// default MGF-1 to same as OAEP
|
|
if(!mgf1Md) {
|
|
mgf1Md = md;
|
|
}
|
|
|
|
// compute length in bytes and check output
|
|
var keyLength = Math.ceil(key.n.bitLength() / 8);
|
|
var maxLength = keyLength - 2 * md.digestLength - 2;
|
|
if(message.length > maxLength) {
|
|
var error = new Error('RSAES-OAEP input message length is too long.');
|
|
error.length = message.length;
|
|
error.maxLength = maxLength;
|
|
throw error;
|
|
}
|
|
|
|
if(!label) {
|
|
label = '';
|
|
}
|
|
md.update(label, 'raw');
|
|
var lHash = md.digest();
|
|
|
|
var PS = '';
|
|
var PS_length = maxLength - message.length;
|
|
for(var i = 0; i < PS_length; i++) {
|
|
PS += '\x00';
|
|
}
|
|
|
|
var DB = lHash.getBytes() + PS + '\x01' + message;
|
|
|
|
if(!seed) {
|
|
seed = forge$4.random.getBytes(md.digestLength);
|
|
} else if(seed.length !== md.digestLength) {
|
|
var error = new Error('Invalid RSAES-OAEP seed. The seed length must ' +
|
|
'match the digest length.');
|
|
error.seedLength = seed.length;
|
|
error.digestLength = md.digestLength;
|
|
throw error;
|
|
}
|
|
|
|
var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md);
|
|
var maskedDB = forge$4.util.xorBytes(DB, dbMask, DB.length);
|
|
|
|
var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md);
|
|
var maskedSeed = forge$4.util.xorBytes(seed, seedMask, seed.length);
|
|
|
|
// return encoded message
|
|
return '\x00' + maskedSeed + maskedDB;
|
|
};
|
|
|
|
/**
|
|
* Decode the given RSAES-OAEP encoded message (EM) using key, with optional
|
|
* label (L).
|
|
*
|
|
* This method does not perform RSA decryption, it only decodes the message
|
|
* using RSAES-OAEP.
|
|
*
|
|
* @param key the RSA key to use.
|
|
* @param em the encoded message to decode.
|
|
* @param options the options to use:
|
|
* label an optional label to use.
|
|
* md the message digest object to use for OAEP, undefined for SHA-1.
|
|
* mgf1 optional mgf1 parameters:
|
|
* md the message digest object to use for MGF1.
|
|
*
|
|
* @return the decoded message bytes.
|
|
*/
|
|
pkcs1.decode_rsa_oaep = function(key, em, options) {
|
|
// parse args
|
|
var label;
|
|
var md;
|
|
var mgf1Md;
|
|
// legacy args
|
|
if(typeof options === 'string') {
|
|
label = options;
|
|
md = arguments[3] || undefined;
|
|
} else if(options) {
|
|
label = options.label || undefined;
|
|
md = options.md || undefined;
|
|
if(options.mgf1 && options.mgf1.md) {
|
|
mgf1Md = options.mgf1.md;
|
|
}
|
|
}
|
|
|
|
// compute length in bytes
|
|
var keyLength = Math.ceil(key.n.bitLength() / 8);
|
|
|
|
if(em.length !== keyLength) {
|
|
var error = new Error('RSAES-OAEP encoded message length is invalid.');
|
|
error.length = em.length;
|
|
error.expectedLength = keyLength;
|
|
throw error;
|
|
}
|
|
|
|
// default OAEP to SHA-1 message digest
|
|
if(md === undefined) {
|
|
md = forge$4.md.sha1.create();
|
|
} else {
|
|
md.start();
|
|
}
|
|
|
|
// default MGF-1 to same as OAEP
|
|
if(!mgf1Md) {
|
|
mgf1Md = md;
|
|
}
|
|
|
|
if(keyLength < 2 * md.digestLength + 2) {
|
|
throw new Error('RSAES-OAEP key is too short for the hash function.');
|
|
}
|
|
|
|
if(!label) {
|
|
label = '';
|
|
}
|
|
md.update(label, 'raw');
|
|
var lHash = md.digest().getBytes();
|
|
|
|
// split the message into its parts
|
|
var y = em.charAt(0);
|
|
var maskedSeed = em.substring(1, md.digestLength + 1);
|
|
var maskedDB = em.substring(1 + md.digestLength);
|
|
|
|
var seedMask = rsa_mgf1(maskedDB, md.digestLength, mgf1Md);
|
|
var seed = forge$4.util.xorBytes(maskedSeed, seedMask, maskedSeed.length);
|
|
|
|
var dbMask = rsa_mgf1(seed, keyLength - md.digestLength - 1, mgf1Md);
|
|
var db = forge$4.util.xorBytes(maskedDB, dbMask, maskedDB.length);
|
|
|
|
var lHashPrime = db.substring(0, md.digestLength);
|
|
|
|
// constant time check that all values match what is expected
|
|
var error = (y !== '\x00');
|
|
|
|
// constant time check lHash vs lHashPrime
|
|
for(var i = 0; i < md.digestLength; ++i) {
|
|
error |= (lHash.charAt(i) !== lHashPrime.charAt(i));
|
|
}
|
|
|
|
// "constant time" find the 0x1 byte separating the padding (zeros) from the
|
|
// message
|
|
// TODO: It must be possible to do this in a better/smarter way?
|
|
var in_ps = 1;
|
|
var index = md.digestLength;
|
|
for(var j = md.digestLength; j < db.length; j++) {
|
|
var code = db.charCodeAt(j);
|
|
|
|
var is_0 = (code & 0x1) ^ 0x1;
|
|
|
|
// non-zero if not 0 or 1 in the ps section
|
|
var error_mask = in_ps ? 0xfffe : 0x0000;
|
|
error |= (code & error_mask);
|
|
|
|
// latch in_ps to zero after we find 0x1
|
|
in_ps = in_ps & is_0;
|
|
index += in_ps;
|
|
}
|
|
|
|
if(error || db.charCodeAt(index) !== 0x1) {
|
|
throw new Error('Invalid RSAES-OAEP padding.');
|
|
}
|
|
|
|
return db.substring(index + 1);
|
|
};
|
|
|
|
function rsa_mgf1(seed, maskLength, hash) {
|
|
// default to SHA-1 message digest
|
|
if(!hash) {
|
|
hash = forge$4.md.sha1.create();
|
|
}
|
|
var t = '';
|
|
var count = Math.ceil(maskLength / hash.digestLength);
|
|
for(var i = 0; i < count; ++i) {
|
|
var c = String.fromCharCode(
|
|
(i >> 24) & 0xFF, (i >> 16) & 0xFF, (i >> 8) & 0xFF, i & 0xFF);
|
|
hash.start();
|
|
hash.update(seed + c);
|
|
t += hash.digest().getBytes();
|
|
}
|
|
return t.substring(0, maskLength);
|
|
}
|
|
|
|
/**
|
|
* Prime number generation API.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2014 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge$3 = forge$m;
|
|
|
|
|
|
|
|
|
|
(function() {
|
|
|
|
// forge.prime already defined
|
|
if(forge$3.prime) {
|
|
return;
|
|
}
|
|
|
|
/* PRIME API */
|
|
var prime = forge$3.prime = forge$3.prime || {};
|
|
|
|
var BigInteger = forge$3.jsbn.BigInteger;
|
|
|
|
// primes are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29
|
|
var GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2];
|
|
var THIRTY = new BigInteger(null);
|
|
THIRTY.fromInt(30);
|
|
var op_or = function(x, y) {return x|y;};
|
|
|
|
/**
|
|
* Generates a random probable prime with the given number of bits.
|
|
*
|
|
* Alternative algorithms can be specified by name as a string or as an
|
|
* object with custom options like so:
|
|
*
|
|
* {
|
|
* name: 'PRIMEINC',
|
|
* options: {
|
|
* maxBlockTime: <the maximum amount of time to block the main
|
|
* thread before allowing I/O other JS to run>,
|
|
* millerRabinTests: <the number of miller-rabin tests to run>,
|
|
* workerScript: <the worker script URL>,
|
|
* workers: <the number of web workers (if supported) to use,
|
|
* -1 to use estimated cores minus one>.
|
|
* workLoad: the size of the work load, ie: number of possible prime
|
|
* numbers for each web worker to check per work assignment,
|
|
* (default: 100).
|
|
* }
|
|
* }
|
|
*
|
|
* @param bits the number of bits for the prime number.
|
|
* @param options the options to use.
|
|
* [algorithm] the algorithm to use (default: 'PRIMEINC').
|
|
* [prng] a custom crypto-secure pseudo-random number generator to use,
|
|
* that must define "getBytesSync".
|
|
*
|
|
* @return callback(err, num) called once the operation completes.
|
|
*/
|
|
prime.generateProbablePrime = function(bits, options, callback) {
|
|
if(typeof options === 'function') {
|
|
callback = options;
|
|
options = {};
|
|
}
|
|
options = options || {};
|
|
|
|
// default to PRIMEINC algorithm
|
|
var algorithm = options.algorithm || 'PRIMEINC';
|
|
if(typeof algorithm === 'string') {
|
|
algorithm = {name: algorithm};
|
|
}
|
|
algorithm.options = algorithm.options || {};
|
|
|
|
// create prng with api that matches BigInteger secure random
|
|
var prng = options.prng || forge$3.random;
|
|
var rng = {
|
|
// x is an array to fill with bytes
|
|
nextBytes: function(x) {
|
|
var b = prng.getBytesSync(x.length);
|
|
for(var i = 0; i < x.length; ++i) {
|
|
x[i] = b.charCodeAt(i);
|
|
}
|
|
}
|
|
};
|
|
|
|
if(algorithm.name === 'PRIMEINC') {
|
|
return primeincFindPrime(bits, rng, algorithm.options, callback);
|
|
}
|
|
|
|
throw new Error('Invalid prime generation algorithm: ' + algorithm.name);
|
|
};
|
|
|
|
function primeincFindPrime(bits, rng, options, callback) {
|
|
if('workers' in options) {
|
|
return primeincFindPrimeWithWorkers(bits, rng, options, callback);
|
|
}
|
|
return primeincFindPrimeWithoutWorkers(bits, rng, options, callback);
|
|
}
|
|
|
|
function primeincFindPrimeWithoutWorkers(bits, rng, options, callback) {
|
|
// initialize random number
|
|
var num = generateRandom(bits, rng);
|
|
|
|
/* Note: All primes are of the form 30k+i for i < 30 and gcd(30, i)=1. The
|
|
number we are given is always aligned at 30k + 1. Each time the number is
|
|
determined not to be prime we add to get to the next 'i', eg: if the number
|
|
was at 30k + 1 we add 6. */
|
|
var deltaIdx = 0;
|
|
|
|
// get required number of MR tests
|
|
var mrTests = getMillerRabinTests(num.bitLength());
|
|
if('millerRabinTests' in options) {
|
|
mrTests = options.millerRabinTests;
|
|
}
|
|
|
|
// find prime nearest to 'num' for maxBlockTime ms
|
|
// 10 ms gives 5ms of leeway for other calculations before dropping
|
|
// below 60fps (1000/60 == 16.67), but in reality, the number will
|
|
// likely be higher due to an 'atomic' big int modPow
|
|
var maxBlockTime = 10;
|
|
if('maxBlockTime' in options) {
|
|
maxBlockTime = options.maxBlockTime;
|
|
}
|
|
|
|
_primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback);
|
|
}
|
|
|
|
function _primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback) {
|
|
var start = +new Date();
|
|
do {
|
|
// overflow, regenerate random number
|
|
if(num.bitLength() > bits) {
|
|
num = generateRandom(bits, rng);
|
|
}
|
|
// do primality test
|
|
if(num.isProbablePrime(mrTests)) {
|
|
return callback(null, num);
|
|
}
|
|
// get next potential prime
|
|
num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0);
|
|
} while(maxBlockTime < 0 || (+new Date() - start < maxBlockTime));
|
|
|
|
// keep trying later
|
|
forge$3.util.setImmediate(function() {
|
|
_primeinc(num, bits, rng, deltaIdx, mrTests, maxBlockTime, callback);
|
|
});
|
|
}
|
|
|
|
// NOTE: This algorithm is indeterminate in nature because workers
|
|
// run in parallel looking at different segments of numbers. Even if this
|
|
// algorithm is run twice with the same input from a predictable RNG, it
|
|
// may produce different outputs.
|
|
function primeincFindPrimeWithWorkers(bits, rng, options, callback) {
|
|
// web workers unavailable
|
|
if(typeof Worker === 'undefined') {
|
|
return primeincFindPrimeWithoutWorkers(bits, rng, options, callback);
|
|
}
|
|
|
|
// initialize random number
|
|
var num = generateRandom(bits, rng);
|
|
|
|
// use web workers to generate keys
|
|
var numWorkers = options.workers;
|
|
var workLoad = options.workLoad || 100;
|
|
var range = workLoad * 30 / 8;
|
|
var workerScript = options.workerScript || 'forge/prime.worker.js';
|
|
if(numWorkers === -1) {
|
|
return forge$3.util.estimateCores(function(err, cores) {
|
|
if(err) {
|
|
// default to 2
|
|
cores = 2;
|
|
}
|
|
numWorkers = cores - 1;
|
|
generate();
|
|
});
|
|
}
|
|
generate();
|
|
|
|
function generate() {
|
|
// require at least 1 worker
|
|
numWorkers = Math.max(1, numWorkers);
|
|
|
|
// TODO: consider optimizing by starting workers outside getPrime() ...
|
|
// note that in order to clean up they will have to be made internally
|
|
// asynchronous which may actually be slower
|
|
|
|
// start workers immediately
|
|
var workers = [];
|
|
for(var i = 0; i < numWorkers; ++i) {
|
|
// FIXME: fix path or use blob URLs
|
|
workers[i] = new Worker(workerScript);
|
|
}
|
|
|
|
// listen for requests from workers and assign ranges to find prime
|
|
for(var i = 0; i < numWorkers; ++i) {
|
|
workers[i].addEventListener('message', workerMessage);
|
|
}
|
|
|
|
/* Note: The distribution of random numbers is unknown. Therefore, each
|
|
web worker is continuously allocated a range of numbers to check for a
|
|
random number until one is found.
|
|
|
|
Every 30 numbers will be checked just 8 times, because prime numbers
|
|
have the form:
|
|
|
|
30k+i, for i < 30 and gcd(30, i)=1 (there are 8 values of i for this)
|
|
|
|
Therefore, if we want a web worker to run N checks before asking for
|
|
a new range of numbers, each range must contain N*30/8 numbers.
|
|
|
|
For 100 checks (workLoad), this is a range of 375. */
|
|
|
|
var found = false;
|
|
function workerMessage(e) {
|
|
// ignore message, prime already found
|
|
if(found) {
|
|
return;
|
|
}
|
|
var data = e.data;
|
|
if(data.found) {
|
|
// terminate all workers
|
|
for(var i = 0; i < workers.length; ++i) {
|
|
workers[i].terminate();
|
|
}
|
|
found = true;
|
|
return callback(null, new BigInteger(data.prime, 16));
|
|
}
|
|
|
|
// overflow, regenerate random number
|
|
if(num.bitLength() > bits) {
|
|
num = generateRandom(bits, rng);
|
|
}
|
|
|
|
// assign new range to check
|
|
var hex = num.toString(16);
|
|
|
|
// start prime search
|
|
e.target.postMessage({
|
|
hex: hex,
|
|
workLoad: workLoad
|
|
});
|
|
|
|
num.dAddOffset(range, 0);
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Generates a random number using the given number of bits and RNG.
|
|
*
|
|
* @param bits the number of bits for the number.
|
|
* @param rng the random number generator to use.
|
|
*
|
|
* @return the random number.
|
|
*/
|
|
function generateRandom(bits, rng) {
|
|
var num = new BigInteger(bits, rng);
|
|
// force MSB set
|
|
var bits1 = bits - 1;
|
|
if(!num.testBit(bits1)) {
|
|
num.bitwiseTo(BigInteger.ONE.shiftLeft(bits1), op_or, num);
|
|
}
|
|
// align number on 30k+1 boundary
|
|
num.dAddOffset(31 - num.mod(THIRTY).byteValue(), 0);
|
|
return num;
|
|
}
|
|
|
|
/**
|
|
* Returns the required number of Miller-Rabin tests to generate a
|
|
* prime with an error probability of (1/2)^80.
|
|
*
|
|
* See Handbook of Applied Cryptography Chapter 4, Table 4.4.
|
|
*
|
|
* @param bits the bit size.
|
|
*
|
|
* @return the required number of iterations.
|
|
*/
|
|
function getMillerRabinTests(bits) {
|
|
if(bits <= 100) return 27;
|
|
if(bits <= 150) return 18;
|
|
if(bits <= 200) return 15;
|
|
if(bits <= 250) return 12;
|
|
if(bits <= 300) return 9;
|
|
if(bits <= 350) return 8;
|
|
if(bits <= 400) return 7;
|
|
if(bits <= 500) return 6;
|
|
if(bits <= 600) return 5;
|
|
if(bits <= 800) return 4;
|
|
if(bits <= 1250) return 3;
|
|
return 2;
|
|
}
|
|
|
|
})();
|
|
|
|
/**
|
|
* Javascript implementation of basic RSA algorithms.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2010-2014 Digital Bazaar, Inc.
|
|
*
|
|
* The only algorithm currently supported for PKI is RSA.
|
|
*
|
|
* An RSA key is often stored in ASN.1 DER format. The SubjectPublicKeyInfo
|
|
* ASN.1 structure is composed of an algorithm of type AlgorithmIdentifier
|
|
* and a subjectPublicKey of type bit string.
|
|
*
|
|
* The AlgorithmIdentifier contains an Object Identifier (OID) and parameters
|
|
* for the algorithm, if any. In the case of RSA, there aren't any.
|
|
*
|
|
* SubjectPublicKeyInfo ::= SEQUENCE {
|
|
* algorithm AlgorithmIdentifier,
|
|
* subjectPublicKey BIT STRING
|
|
* }
|
|
*
|
|
* AlgorithmIdentifer ::= SEQUENCE {
|
|
* algorithm OBJECT IDENTIFIER,
|
|
* parameters ANY DEFINED BY algorithm OPTIONAL
|
|
* }
|
|
*
|
|
* For an RSA public key, the subjectPublicKey is:
|
|
*
|
|
* RSAPublicKey ::= SEQUENCE {
|
|
* modulus INTEGER, -- n
|
|
* publicExponent INTEGER -- e
|
|
* }
|
|
*
|
|
* PrivateKeyInfo ::= SEQUENCE {
|
|
* version Version,
|
|
* privateKeyAlgorithm PrivateKeyAlgorithmIdentifier,
|
|
* privateKey PrivateKey,
|
|
* attributes [0] IMPLICIT Attributes OPTIONAL
|
|
* }
|
|
*
|
|
* Version ::= INTEGER
|
|
* PrivateKeyAlgorithmIdentifier ::= AlgorithmIdentifier
|
|
* PrivateKey ::= OCTET STRING
|
|
* Attributes ::= SET OF Attribute
|
|
*
|
|
* An RSA private key as the following structure:
|
|
*
|
|
* RSAPrivateKey ::= SEQUENCE {
|
|
* version Version,
|
|
* modulus INTEGER, -- n
|
|
* publicExponent INTEGER, -- e
|
|
* privateExponent INTEGER, -- d
|
|
* prime1 INTEGER, -- p
|
|
* prime2 INTEGER, -- q
|
|
* exponent1 INTEGER, -- d mod (p-1)
|
|
* exponent2 INTEGER, -- d mod (q-1)
|
|
* coefficient INTEGER -- (inverse of q) mod p
|
|
* }
|
|
*
|
|
* Version ::= INTEGER
|
|
*
|
|
* The OID for the RSA key algorithm is: 1.2.840.113549.1.1.1
|
|
*/
|
|
|
|
var forge$2 = forge$m;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if(typeof BigInteger$1 === 'undefined') {
|
|
var BigInteger$1 = forge$2.jsbn.BigInteger;
|
|
}
|
|
|
|
var _crypto = forge$2.util.isNodejs ? require$$8 : null;
|
|
|
|
// shortcut for asn.1 API
|
|
var asn1$1 = forge$2.asn1;
|
|
|
|
// shortcut for util API
|
|
var util = forge$2.util;
|
|
|
|
/*
|
|
* RSA encryption and decryption, see RFC 2313.
|
|
*/
|
|
forge$2.pki = forge$2.pki || {};
|
|
forge$2.pki.rsa = forge$2.rsa = forge$2.rsa || {};
|
|
var pki$1 = forge$2.pki;
|
|
|
|
// for finding primes, which are 30k+i for i = 1, 7, 11, 13, 17, 19, 23, 29
|
|
var GCD_30_DELTA = [6, 4, 2, 4, 2, 4, 6, 2];
|
|
|
|
// validator for a PrivateKeyInfo structure
|
|
var privateKeyValidator = {
|
|
// PrivateKeyInfo
|
|
name: 'PrivateKeyInfo',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
// Version (INTEGER)
|
|
name: 'PrivateKeyInfo.version',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'privateKeyVersion'
|
|
}, {
|
|
// privateKeyAlgorithm
|
|
name: 'PrivateKeyInfo.privateKeyAlgorithm',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
name: 'AlgorithmIdentifier.algorithm',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.OID,
|
|
constructed: false,
|
|
capture: 'privateKeyOid'
|
|
}]
|
|
}, {
|
|
// PrivateKey
|
|
name: 'PrivateKeyInfo',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.OCTETSTRING,
|
|
constructed: false,
|
|
capture: 'privateKey'
|
|
}]
|
|
};
|
|
|
|
// validator for an RSA private key
|
|
var rsaPrivateKeyValidator = {
|
|
// RSAPrivateKey
|
|
name: 'RSAPrivateKey',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
// Version (INTEGER)
|
|
name: 'RSAPrivateKey.version',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'privateKeyVersion'
|
|
}, {
|
|
// modulus (n)
|
|
name: 'RSAPrivateKey.modulus',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'privateKeyModulus'
|
|
}, {
|
|
// publicExponent (e)
|
|
name: 'RSAPrivateKey.publicExponent',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'privateKeyPublicExponent'
|
|
}, {
|
|
// privateExponent (d)
|
|
name: 'RSAPrivateKey.privateExponent',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'privateKeyPrivateExponent'
|
|
}, {
|
|
// prime1 (p)
|
|
name: 'RSAPrivateKey.prime1',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'privateKeyPrime1'
|
|
}, {
|
|
// prime2 (q)
|
|
name: 'RSAPrivateKey.prime2',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'privateKeyPrime2'
|
|
}, {
|
|
// exponent1 (d mod (p-1))
|
|
name: 'RSAPrivateKey.exponent1',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'privateKeyExponent1'
|
|
}, {
|
|
// exponent2 (d mod (q-1))
|
|
name: 'RSAPrivateKey.exponent2',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'privateKeyExponent2'
|
|
}, {
|
|
// coefficient ((inverse of q) mod p)
|
|
name: 'RSAPrivateKey.coefficient',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'privateKeyCoefficient'
|
|
}]
|
|
};
|
|
|
|
// validator for an RSA public key
|
|
var rsaPublicKeyValidator = {
|
|
// RSAPublicKey
|
|
name: 'RSAPublicKey',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
// modulus (n)
|
|
name: 'RSAPublicKey.modulus',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'publicKeyModulus'
|
|
}, {
|
|
// publicExponent (e)
|
|
name: 'RSAPublicKey.exponent',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'publicKeyExponent'
|
|
}]
|
|
};
|
|
|
|
// validator for an SubjectPublicKeyInfo structure
|
|
// Note: Currently only works with an RSA public key
|
|
var publicKeyValidator = forge$2.pki.rsa.publicKeyValidator = {
|
|
name: 'SubjectPublicKeyInfo',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.SEQUENCE,
|
|
constructed: true,
|
|
captureAsn1: 'subjectPublicKeyInfo',
|
|
value: [{
|
|
name: 'SubjectPublicKeyInfo.AlgorithmIdentifier',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
name: 'AlgorithmIdentifier.algorithm',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.OID,
|
|
constructed: false,
|
|
capture: 'publicKeyOid'
|
|
}]
|
|
}, {
|
|
// subjectPublicKey
|
|
name: 'SubjectPublicKeyInfo.subjectPublicKey',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.BITSTRING,
|
|
constructed: false,
|
|
value: [{
|
|
// RSAPublicKey
|
|
name: 'SubjectPublicKeyInfo.subjectPublicKey.RSAPublicKey',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.SEQUENCE,
|
|
constructed: true,
|
|
optional: true,
|
|
captureAsn1: 'rsaPublicKey'
|
|
}]
|
|
}]
|
|
};
|
|
|
|
// validator for a DigestInfo structure
|
|
var digestInfoValidator = {
|
|
name: 'DigestInfo',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
name: 'DigestInfo.DigestAlgorithm',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
name: 'DigestInfo.DigestAlgorithm.algorithmIdentifier',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.OID,
|
|
constructed: false,
|
|
capture: 'algorithmIdentifier'
|
|
}, {
|
|
// NULL paramters
|
|
name: 'DigestInfo.DigestAlgorithm.parameters',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.NULL,
|
|
// captured only to check existence for md2 and md5
|
|
capture: 'parameters',
|
|
optional: true,
|
|
constructed: false
|
|
}]
|
|
}, {
|
|
// digest
|
|
name: 'DigestInfo.digest',
|
|
tagClass: asn1$1.Class.UNIVERSAL,
|
|
type: asn1$1.Type.OCTETSTRING,
|
|
constructed: false,
|
|
capture: 'digest'
|
|
}]
|
|
};
|
|
|
|
/**
|
|
* Wrap digest in DigestInfo object.
|
|
*
|
|
* This function implements EMSA-PKCS1-v1_5-ENCODE as per RFC 3447.
|
|
*
|
|
* DigestInfo ::= SEQUENCE {
|
|
* digestAlgorithm DigestAlgorithmIdentifier,
|
|
* digest Digest
|
|
* }
|
|
*
|
|
* DigestAlgorithmIdentifier ::= AlgorithmIdentifier
|
|
* Digest ::= OCTET STRING
|
|
*
|
|
* @param md the message digest object with the hash to sign.
|
|
*
|
|
* @return the encoded message (ready for RSA encrytion)
|
|
*/
|
|
var emsaPkcs1v15encode = function(md) {
|
|
// get the oid for the algorithm
|
|
var oid;
|
|
if(md.algorithm in pki$1.oids) {
|
|
oid = pki$1.oids[md.algorithm];
|
|
} else {
|
|
var error = new Error('Unknown message digest algorithm.');
|
|
error.algorithm = md.algorithm;
|
|
throw error;
|
|
}
|
|
var oidBytes = asn1$1.oidToDer(oid).getBytes();
|
|
|
|
// create the digest info
|
|
var digestInfo = asn1$1.create(
|
|
asn1$1.Class.UNIVERSAL, asn1$1.Type.SEQUENCE, true, []);
|
|
var digestAlgorithm = asn1$1.create(
|
|
asn1$1.Class.UNIVERSAL, asn1$1.Type.SEQUENCE, true, []);
|
|
digestAlgorithm.value.push(asn1$1.create(
|
|
asn1$1.Class.UNIVERSAL, asn1$1.Type.OID, false, oidBytes));
|
|
digestAlgorithm.value.push(asn1$1.create(
|
|
asn1$1.Class.UNIVERSAL, asn1$1.Type.NULL, false, ''));
|
|
var digest = asn1$1.create(
|
|
asn1$1.Class.UNIVERSAL, asn1$1.Type.OCTETSTRING,
|
|
false, md.digest().getBytes());
|
|
digestInfo.value.push(digestAlgorithm);
|
|
digestInfo.value.push(digest);
|
|
|
|
// encode digest info
|
|
return asn1$1.toDer(digestInfo).getBytes();
|
|
};
|
|
|
|
/**
|
|
* Performs x^c mod n (RSA encryption or decryption operation).
|
|
*
|
|
* @param x the number to raise and mod.
|
|
* @param key the key to use.
|
|
* @param pub true if the key is public, false if private.
|
|
*
|
|
* @return the result of x^c mod n.
|
|
*/
|
|
var _modPow = function(x, key, pub) {
|
|
if(pub) {
|
|
return x.modPow(key.e, key.n);
|
|
}
|
|
|
|
if(!key.p || !key.q) {
|
|
// allow calculation without CRT params (slow)
|
|
return x.modPow(key.d, key.n);
|
|
}
|
|
|
|
// pre-compute dP, dQ, and qInv if necessary
|
|
if(!key.dP) {
|
|
key.dP = key.d.mod(key.p.subtract(BigInteger$1.ONE));
|
|
}
|
|
if(!key.dQ) {
|
|
key.dQ = key.d.mod(key.q.subtract(BigInteger$1.ONE));
|
|
}
|
|
if(!key.qInv) {
|
|
key.qInv = key.q.modInverse(key.p);
|
|
}
|
|
|
|
/* Chinese remainder theorem (CRT) states:
|
|
|
|
Suppose n1, n2, ..., nk are positive integers which are pairwise
|
|
coprime (n1 and n2 have no common factors other than 1). For any
|
|
integers x1, x2, ..., xk there exists an integer x solving the
|
|
system of simultaneous congruences (where ~= means modularly
|
|
congruent so a ~= b mod n means a mod n = b mod n):
|
|
|
|
x ~= x1 mod n1
|
|
x ~= x2 mod n2
|
|
...
|
|
x ~= xk mod nk
|
|
|
|
This system of congruences has a single simultaneous solution x
|
|
between 0 and n - 1. Furthermore, each xk solution and x itself
|
|
is congruent modulo the product n = n1*n2*...*nk.
|
|
So x1 mod n = x2 mod n = xk mod n = x mod n.
|
|
|
|
The single simultaneous solution x can be solved with the following
|
|
equation:
|
|
|
|
x = sum(xi*ri*si) mod n where ri = n/ni and si = ri^-1 mod ni.
|
|
|
|
Where x is less than n, xi = x mod ni.
|
|
|
|
For RSA we are only concerned with k = 2. The modulus n = pq, where
|
|
p and q are coprime. The RSA decryption algorithm is:
|
|
|
|
y = x^d mod n
|
|
|
|
Given the above:
|
|
|
|
x1 = x^d mod p
|
|
r1 = n/p = q
|
|
s1 = q^-1 mod p
|
|
x2 = x^d mod q
|
|
r2 = n/q = p
|
|
s2 = p^-1 mod q
|
|
|
|
So y = (x1r1s1 + x2r2s2) mod n
|
|
= ((x^d mod p)q(q^-1 mod p) + (x^d mod q)p(p^-1 mod q)) mod n
|
|
|
|
According to Fermat's Little Theorem, if the modulus P is prime,
|
|
for any integer A not evenly divisible by P, A^(P-1) ~= 1 mod P.
|
|
Since A is not divisible by P it follows that if:
|
|
N ~= M mod (P - 1), then A^N mod P = A^M mod P. Therefore:
|
|
|
|
A^N mod P = A^(M mod (P - 1)) mod P. (The latter takes less effort
|
|
to calculate). In order to calculate x^d mod p more quickly the
|
|
exponent d mod (p - 1) is stored in the RSA private key (the same
|
|
is done for x^d mod q). These values are referred to as dP and dQ
|
|
respectively. Therefore we now have:
|
|
|
|
y = ((x^dP mod p)q(q^-1 mod p) + (x^dQ mod q)p(p^-1 mod q)) mod n
|
|
|
|
Since we'll be reducing x^dP by modulo p (same for q) we can also
|
|
reduce x by p (and q respectively) before hand. Therefore, let
|
|
|
|
xp = ((x mod p)^dP mod p), and
|
|
xq = ((x mod q)^dQ mod q), yielding:
|
|
|
|
y = (xp*q*(q^-1 mod p) + xq*p*(p^-1 mod q)) mod n
|
|
|
|
This can be further reduced to a simple algorithm that only
|
|
requires 1 inverse (the q inverse is used) to be used and stored.
|
|
The algorithm is called Garner's algorithm. If qInv is the
|
|
inverse of q, we simply calculate:
|
|
|
|
y = (qInv*(xp - xq) mod p) * q + xq
|
|
|
|
However, there are two further complications. First, we need to
|
|
ensure that xp > xq to prevent signed BigIntegers from being used
|
|
so we add p until this is true (since we will be mod'ing with
|
|
p anyway). Then, there is a known timing attack on algorithms
|
|
using the CRT. To mitigate this risk, "cryptographic blinding"
|
|
should be used. This requires simply generating a random number r
|
|
between 0 and n-1 and its inverse and multiplying x by r^e before
|
|
calculating y and then multiplying y by r^-1 afterwards. Note that
|
|
r must be coprime with n (gcd(r, n) === 1) in order to have an
|
|
inverse.
|
|
*/
|
|
|
|
// cryptographic blinding
|
|
var r;
|
|
do {
|
|
r = new BigInteger$1(
|
|
forge$2.util.bytesToHex(forge$2.random.getBytes(key.n.bitLength() / 8)),
|
|
16);
|
|
} while(r.compareTo(key.n) >= 0 || !r.gcd(key.n).equals(BigInteger$1.ONE));
|
|
x = x.multiply(r.modPow(key.e, key.n)).mod(key.n);
|
|
|
|
// calculate xp and xq
|
|
var xp = x.mod(key.p).modPow(key.dP, key.p);
|
|
var xq = x.mod(key.q).modPow(key.dQ, key.q);
|
|
|
|
// xp must be larger than xq to avoid signed bit usage
|
|
while(xp.compareTo(xq) < 0) {
|
|
xp = xp.add(key.p);
|
|
}
|
|
|
|
// do last step
|
|
var y = xp.subtract(xq)
|
|
.multiply(key.qInv).mod(key.p)
|
|
.multiply(key.q).add(xq);
|
|
|
|
// remove effect of random for cryptographic blinding
|
|
y = y.multiply(r.modInverse(key.n)).mod(key.n);
|
|
|
|
return y;
|
|
};
|
|
|
|
/**
|
|
* NOTE: THIS METHOD IS DEPRECATED, use 'sign' on a private key object or
|
|
* 'encrypt' on a public key object instead.
|
|
*
|
|
* Performs RSA encryption.
|
|
*
|
|
* The parameter bt controls whether to put padding bytes before the
|
|
* message passed in. Set bt to either true or false to disable padding
|
|
* completely (in order to handle e.g. EMSA-PSS encoding seperately before),
|
|
* signaling whether the encryption operation is a public key operation
|
|
* (i.e. encrypting data) or not, i.e. private key operation (data signing).
|
|
*
|
|
* For PKCS#1 v1.5 padding pass in the block type to use, i.e. either 0x01
|
|
* (for signing) or 0x02 (for encryption). The key operation mode (private
|
|
* or public) is derived from this flag in that case).
|
|
*
|
|
* @param m the message to encrypt as a byte string.
|
|
* @param key the RSA key to use.
|
|
* @param bt for PKCS#1 v1.5 padding, the block type to use
|
|
* (0x01 for private key, 0x02 for public),
|
|
* to disable padding: true = public key, false = private key.
|
|
*
|
|
* @return the encrypted bytes as a string.
|
|
*/
|
|
pki$1.rsa.encrypt = function(m, key, bt) {
|
|
var pub = bt;
|
|
var eb;
|
|
|
|
// get the length of the modulus in bytes
|
|
var k = Math.ceil(key.n.bitLength() / 8);
|
|
|
|
if(bt !== false && bt !== true) {
|
|
// legacy, default to PKCS#1 v1.5 padding
|
|
pub = (bt === 0x02);
|
|
eb = _encodePkcs1_v1_5(m, key, bt);
|
|
} else {
|
|
eb = forge$2.util.createBuffer();
|
|
eb.putBytes(m);
|
|
}
|
|
|
|
// load encryption block as big integer 'x'
|
|
// FIXME: hex conversion inefficient, get BigInteger w/byte strings
|
|
var x = new BigInteger$1(eb.toHex(), 16);
|
|
|
|
// do RSA encryption
|
|
var y = _modPow(x, key, pub);
|
|
|
|
// convert y into the encrypted data byte string, if y is shorter in
|
|
// bytes than k, then prepend zero bytes to fill up ed
|
|
// FIXME: hex conversion inefficient, get BigInteger w/byte strings
|
|
var yhex = y.toString(16);
|
|
var ed = forge$2.util.createBuffer();
|
|
var zeros = k - Math.ceil(yhex.length / 2);
|
|
while(zeros > 0) {
|
|
ed.putByte(0x00);
|
|
--zeros;
|
|
}
|
|
ed.putBytes(forge$2.util.hexToBytes(yhex));
|
|
return ed.getBytes();
|
|
};
|
|
|
|
/**
|
|
* NOTE: THIS METHOD IS DEPRECATED, use 'decrypt' on a private key object or
|
|
* 'verify' on a public key object instead.
|
|
*
|
|
* Performs RSA decryption.
|
|
*
|
|
* The parameter ml controls whether to apply PKCS#1 v1.5 padding
|
|
* or not. Set ml = false to disable padding removal completely
|
|
* (in order to handle e.g. EMSA-PSS later on) and simply pass back
|
|
* the RSA encryption block.
|
|
*
|
|
* @param ed the encrypted data to decrypt in as a byte string.
|
|
* @param key the RSA key to use.
|
|
* @param pub true for a public key operation, false for private.
|
|
* @param ml the message length, if known, false to disable padding.
|
|
*
|
|
* @return the decrypted message as a byte string.
|
|
*/
|
|
pki$1.rsa.decrypt = function(ed, key, pub, ml) {
|
|
// get the length of the modulus in bytes
|
|
var k = Math.ceil(key.n.bitLength() / 8);
|
|
|
|
// error if the length of the encrypted data ED is not k
|
|
if(ed.length !== k) {
|
|
var error = new Error('Encrypted message length is invalid.');
|
|
error.length = ed.length;
|
|
error.expected = k;
|
|
throw error;
|
|
}
|
|
|
|
// convert encrypted data into a big integer
|
|
// FIXME: hex conversion inefficient, get BigInteger w/byte strings
|
|
var y = new BigInteger$1(forge$2.util.createBuffer(ed).toHex(), 16);
|
|
|
|
// y must be less than the modulus or it wasn't the result of
|
|
// a previous mod operation (encryption) using that modulus
|
|
if(y.compareTo(key.n) >= 0) {
|
|
throw new Error('Encrypted message is invalid.');
|
|
}
|
|
|
|
// do RSA decryption
|
|
var x = _modPow(y, key, pub);
|
|
|
|
// create the encryption block, if x is shorter in bytes than k, then
|
|
// prepend zero bytes to fill up eb
|
|
// FIXME: hex conversion inefficient, get BigInteger w/byte strings
|
|
var xhex = x.toString(16);
|
|
var eb = forge$2.util.createBuffer();
|
|
var zeros = k - Math.ceil(xhex.length / 2);
|
|
while(zeros > 0) {
|
|
eb.putByte(0x00);
|
|
--zeros;
|
|
}
|
|
eb.putBytes(forge$2.util.hexToBytes(xhex));
|
|
|
|
if(ml !== false) {
|
|
// legacy, default to PKCS#1 v1.5 padding
|
|
return _decodePkcs1_v1_5(eb.getBytes(), key, pub);
|
|
}
|
|
|
|
// return message
|
|
return eb.getBytes();
|
|
};
|
|
|
|
/**
|
|
* Creates an RSA key-pair generation state object. It is used to allow
|
|
* key-generation to be performed in steps. It also allows for a UI to
|
|
* display progress updates.
|
|
*
|
|
* @param bits the size for the private key in bits, defaults to 2048.
|
|
* @param e the public exponent to use, defaults to 65537 (0x10001).
|
|
* @param [options] the options to use.
|
|
* prng a custom crypto-secure pseudo-random number generator to use,
|
|
* that must define "getBytesSync".
|
|
* algorithm the algorithm to use (default: 'PRIMEINC').
|
|
*
|
|
* @return the state object to use to generate the key-pair.
|
|
*/
|
|
pki$1.rsa.createKeyPairGenerationState = function(bits, e, options) {
|
|
// TODO: migrate step-based prime generation code to forge.prime
|
|
|
|
// set default bits
|
|
if(typeof(bits) === 'string') {
|
|
bits = parseInt(bits, 10);
|
|
}
|
|
bits = bits || 2048;
|
|
|
|
// create prng with api that matches BigInteger secure random
|
|
options = options || {};
|
|
var prng = options.prng || forge$2.random;
|
|
var rng = {
|
|
// x is an array to fill with bytes
|
|
nextBytes: function(x) {
|
|
var b = prng.getBytesSync(x.length);
|
|
for(var i = 0; i < x.length; ++i) {
|
|
x[i] = b.charCodeAt(i);
|
|
}
|
|
}
|
|
};
|
|
|
|
var algorithm = options.algorithm || 'PRIMEINC';
|
|
|
|
// create PRIMEINC algorithm state
|
|
var rval;
|
|
if(algorithm === 'PRIMEINC') {
|
|
rval = {
|
|
algorithm: algorithm,
|
|
state: 0,
|
|
bits: bits,
|
|
rng: rng,
|
|
eInt: e || 65537,
|
|
e: new BigInteger$1(null),
|
|
p: null,
|
|
q: null,
|
|
qBits: bits >> 1,
|
|
pBits: bits - (bits >> 1),
|
|
pqState: 0,
|
|
num: null,
|
|
keys: null
|
|
};
|
|
rval.e.fromInt(rval.eInt);
|
|
} else {
|
|
throw new Error('Invalid key generation algorithm: ' + algorithm);
|
|
}
|
|
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Attempts to runs the key-generation algorithm for at most n seconds
|
|
* (approximately) using the given state. When key-generation has completed,
|
|
* the keys will be stored in state.keys.
|
|
*
|
|
* To use this function to update a UI while generating a key or to prevent
|
|
* causing browser lockups/warnings, set "n" to a value other than 0. A
|
|
* simple pattern for generating a key and showing a progress indicator is:
|
|
*
|
|
* var state = pki.rsa.createKeyPairGenerationState(2048);
|
|
* var step = function() {
|
|
* // step key-generation, run algorithm for 100 ms, repeat
|
|
* if(!forge.pki.rsa.stepKeyPairGenerationState(state, 100)) {
|
|
* setTimeout(step, 1);
|
|
* } else {
|
|
* // key-generation complete
|
|
* // TODO: turn off progress indicator here
|
|
* // TODO: use the generated key-pair in "state.keys"
|
|
* }
|
|
* };
|
|
* // TODO: turn on progress indicator here
|
|
* setTimeout(step, 0);
|
|
*
|
|
* @param state the state to use.
|
|
* @param n the maximum number of milliseconds to run the algorithm for, 0
|
|
* to run the algorithm to completion.
|
|
*
|
|
* @return true if the key-generation completed, false if not.
|
|
*/
|
|
pki$1.rsa.stepKeyPairGenerationState = function(state, n) {
|
|
// set default algorithm if not set
|
|
if(!('algorithm' in state)) {
|
|
state.algorithm = 'PRIMEINC';
|
|
}
|
|
|
|
// TODO: migrate step-based prime generation code to forge.prime
|
|
// TODO: abstract as PRIMEINC algorithm
|
|
|
|
// do key generation (based on Tom Wu's rsa.js, see jsbn.js license)
|
|
// with some minor optimizations and designed to run in steps
|
|
|
|
// local state vars
|
|
var THIRTY = new BigInteger$1(null);
|
|
THIRTY.fromInt(30);
|
|
var deltaIdx = 0;
|
|
var op_or = function(x, y) {return x | y;};
|
|
|
|
// keep stepping until time limit is reached or done
|
|
var t1 = +new Date();
|
|
var t2;
|
|
var total = 0;
|
|
while(state.keys === null && (n <= 0 || total < n)) {
|
|
// generate p or q
|
|
if(state.state === 0) {
|
|
/* Note: All primes are of the form:
|
|
|
|
30k+i, for i < 30 and gcd(30, i)=1, where there are 8 values for i
|
|
|
|
When we generate a random number, we always align it at 30k + 1. Each
|
|
time the number is determined not to be prime we add to get to the
|
|
next 'i', eg: if the number was at 30k + 1 we add 6. */
|
|
var bits = (state.p === null) ? state.pBits : state.qBits;
|
|
var bits1 = bits - 1;
|
|
|
|
// get a random number
|
|
if(state.pqState === 0) {
|
|
state.num = new BigInteger$1(bits, state.rng);
|
|
// force MSB set
|
|
if(!state.num.testBit(bits1)) {
|
|
state.num.bitwiseTo(
|
|
BigInteger$1.ONE.shiftLeft(bits1), op_or, state.num);
|
|
}
|
|
// align number on 30k+1 boundary
|
|
state.num.dAddOffset(31 - state.num.mod(THIRTY).byteValue(), 0);
|
|
deltaIdx = 0;
|
|
|
|
++state.pqState;
|
|
} else if(state.pqState === 1) {
|
|
// try to make the number a prime
|
|
if(state.num.bitLength() > bits) {
|
|
// overflow, try again
|
|
state.pqState = 0;
|
|
// do primality test
|
|
} else if(state.num.isProbablePrime(
|
|
_getMillerRabinTests(state.num.bitLength()))) {
|
|
++state.pqState;
|
|
} else {
|
|
// get next potential prime
|
|
state.num.dAddOffset(GCD_30_DELTA[deltaIdx++ % 8], 0);
|
|
}
|
|
} else if(state.pqState === 2) {
|
|
// ensure number is coprime with e
|
|
state.pqState =
|
|
(state.num.subtract(BigInteger$1.ONE).gcd(state.e)
|
|
.compareTo(BigInteger$1.ONE) === 0) ? 3 : 0;
|
|
} else if(state.pqState === 3) {
|
|
// store p or q
|
|
state.pqState = 0;
|
|
if(state.p === null) {
|
|
state.p = state.num;
|
|
} else {
|
|
state.q = state.num;
|
|
}
|
|
|
|
// advance state if both p and q are ready
|
|
if(state.p !== null && state.q !== null) {
|
|
++state.state;
|
|
}
|
|
state.num = null;
|
|
}
|
|
} else if(state.state === 1) {
|
|
// ensure p is larger than q (swap them if not)
|
|
if(state.p.compareTo(state.q) < 0) {
|
|
state.num = state.p;
|
|
state.p = state.q;
|
|
state.q = state.num;
|
|
}
|
|
++state.state;
|
|
} else if(state.state === 2) {
|
|
// compute phi: (p - 1)(q - 1) (Euler's totient function)
|
|
state.p1 = state.p.subtract(BigInteger$1.ONE);
|
|
state.q1 = state.q.subtract(BigInteger$1.ONE);
|
|
state.phi = state.p1.multiply(state.q1);
|
|
++state.state;
|
|
} else if(state.state === 3) {
|
|
// ensure e and phi are coprime
|
|
if(state.phi.gcd(state.e).compareTo(BigInteger$1.ONE) === 0) {
|
|
// phi and e are coprime, advance
|
|
++state.state;
|
|
} else {
|
|
// phi and e aren't coprime, so generate a new p and q
|
|
state.p = null;
|
|
state.q = null;
|
|
state.state = 0;
|
|
}
|
|
} else if(state.state === 4) {
|
|
// create n, ensure n is has the right number of bits
|
|
state.n = state.p.multiply(state.q);
|
|
|
|
// ensure n is right number of bits
|
|
if(state.n.bitLength() === state.bits) {
|
|
// success, advance
|
|
++state.state;
|
|
} else {
|
|
// failed, get new q
|
|
state.q = null;
|
|
state.state = 0;
|
|
}
|
|
} else if(state.state === 5) {
|
|
// set keys
|
|
var d = state.e.modInverse(state.phi);
|
|
state.keys = {
|
|
privateKey: pki$1.rsa.setPrivateKey(
|
|
state.n, state.e, d, state.p, state.q,
|
|
d.mod(state.p1), d.mod(state.q1),
|
|
state.q.modInverse(state.p)),
|
|
publicKey: pki$1.rsa.setPublicKey(state.n, state.e)
|
|
};
|
|
}
|
|
|
|
// update timing
|
|
t2 = +new Date();
|
|
total += t2 - t1;
|
|
t1 = t2;
|
|
}
|
|
|
|
return state.keys !== null;
|
|
};
|
|
|
|
/**
|
|
* Generates an RSA public-private key pair in a single call.
|
|
*
|
|
* To generate a key-pair in steps (to allow for progress updates and to
|
|
* prevent blocking or warnings in slow browsers) then use the key-pair
|
|
* generation state functions.
|
|
*
|
|
* To generate a key-pair asynchronously (either through web-workers, if
|
|
* available, or by breaking up the work on the main thread), pass a
|
|
* callback function.
|
|
*
|
|
* @param [bits] the size for the private key in bits, defaults to 2048.
|
|
* @param [e] the public exponent to use, defaults to 65537.
|
|
* @param [options] options for key-pair generation, if given then 'bits'
|
|
* and 'e' must *not* be given:
|
|
* bits the size for the private key in bits, (default: 2048).
|
|
* e the public exponent to use, (default: 65537 (0x10001)).
|
|
* workerScript the worker script URL.
|
|
* workers the number of web workers (if supported) to use,
|
|
* (default: 2).
|
|
* workLoad the size of the work load, ie: number of possible prime
|
|
* numbers for each web worker to check per work assignment,
|
|
* (default: 100).
|
|
* prng a custom crypto-secure pseudo-random number generator to use,
|
|
* that must define "getBytesSync". Disables use of native APIs.
|
|
* algorithm the algorithm to use (default: 'PRIMEINC').
|
|
* @param [callback(err, keypair)] called once the operation completes.
|
|
*
|
|
* @return an object with privateKey and publicKey properties.
|
|
*/
|
|
pki$1.rsa.generateKeyPair = function(bits, e, options, callback) {
|
|
// (bits), (options), (callback)
|
|
if(arguments.length === 1) {
|
|
if(typeof bits === 'object') {
|
|
options = bits;
|
|
bits = undefined;
|
|
} else if(typeof bits === 'function') {
|
|
callback = bits;
|
|
bits = undefined;
|
|
}
|
|
} else if(arguments.length === 2) {
|
|
// (bits, e), (bits, options), (bits, callback), (options, callback)
|
|
if(typeof bits === 'number') {
|
|
if(typeof e === 'function') {
|
|
callback = e;
|
|
e = undefined;
|
|
} else if(typeof e !== 'number') {
|
|
options = e;
|
|
e = undefined;
|
|
}
|
|
} else {
|
|
options = bits;
|
|
callback = e;
|
|
bits = undefined;
|
|
e = undefined;
|
|
}
|
|
} else if(arguments.length === 3) {
|
|
// (bits, e, options), (bits, e, callback), (bits, options, callback)
|
|
if(typeof e === 'number') {
|
|
if(typeof options === 'function') {
|
|
callback = options;
|
|
options = undefined;
|
|
}
|
|
} else {
|
|
callback = options;
|
|
options = e;
|
|
e = undefined;
|
|
}
|
|
}
|
|
options = options || {};
|
|
if(bits === undefined) {
|
|
bits = options.bits || 2048;
|
|
}
|
|
if(e === undefined) {
|
|
e = options.e || 0x10001;
|
|
}
|
|
|
|
// use native code if permitted, available, and parameters are acceptable
|
|
if(!options.prng &&
|
|
bits >= 256 && bits <= 16384 && (e === 0x10001 || e === 3)) {
|
|
if(callback) {
|
|
// try native async
|
|
if(_detectNodeCrypto('generateKeyPair')) {
|
|
return _crypto.generateKeyPair('rsa', {
|
|
modulusLength: bits,
|
|
publicExponent: e,
|
|
publicKeyEncoding: {
|
|
type: 'spki',
|
|
format: 'pem'
|
|
},
|
|
privateKeyEncoding: {
|
|
type: 'pkcs8',
|
|
format: 'pem'
|
|
}
|
|
}, function(err, pub, priv) {
|
|
if(err) {
|
|
return callback(err);
|
|
}
|
|
callback(null, {
|
|
privateKey: pki$1.privateKeyFromPem(priv),
|
|
publicKey: pki$1.publicKeyFromPem(pub)
|
|
});
|
|
});
|
|
}
|
|
if(_detectSubtleCrypto('generateKey') &&
|
|
_detectSubtleCrypto('exportKey')) {
|
|
// use standard native generateKey
|
|
return util.globalScope.crypto.subtle.generateKey({
|
|
name: 'RSASSA-PKCS1-v1_5',
|
|
modulusLength: bits,
|
|
publicExponent: _intToUint8Array(e),
|
|
hash: {name: 'SHA-256'}
|
|
}, true /* key can be exported*/, ['sign', 'verify'])
|
|
.then(function(pair) {
|
|
return util.globalScope.crypto.subtle.exportKey(
|
|
'pkcs8', pair.privateKey);
|
|
// avoiding catch(function(err) {...}) to support IE <= 8
|
|
}).then(undefined, function(err) {
|
|
callback(err);
|
|
}).then(function(pkcs8) {
|
|
if(pkcs8) {
|
|
var privateKey = pki$1.privateKeyFromAsn1(
|
|
asn1$1.fromDer(forge$2.util.createBuffer(pkcs8)));
|
|
callback(null, {
|
|
privateKey: privateKey,
|
|
publicKey: pki$1.setRsaPublicKey(privateKey.n, privateKey.e)
|
|
});
|
|
}
|
|
});
|
|
}
|
|
if(_detectSubtleMsCrypto('generateKey') &&
|
|
_detectSubtleMsCrypto('exportKey')) {
|
|
var genOp = util.globalScope.msCrypto.subtle.generateKey({
|
|
name: 'RSASSA-PKCS1-v1_5',
|
|
modulusLength: bits,
|
|
publicExponent: _intToUint8Array(e),
|
|
hash: {name: 'SHA-256'}
|
|
}, true /* key can be exported*/, ['sign', 'verify']);
|
|
genOp.oncomplete = function(e) {
|
|
var pair = e.target.result;
|
|
var exportOp = util.globalScope.msCrypto.subtle.exportKey(
|
|
'pkcs8', pair.privateKey);
|
|
exportOp.oncomplete = function(e) {
|
|
var pkcs8 = e.target.result;
|
|
var privateKey = pki$1.privateKeyFromAsn1(
|
|
asn1$1.fromDer(forge$2.util.createBuffer(pkcs8)));
|
|
callback(null, {
|
|
privateKey: privateKey,
|
|
publicKey: pki$1.setRsaPublicKey(privateKey.n, privateKey.e)
|
|
});
|
|
};
|
|
exportOp.onerror = function(err) {
|
|
callback(err);
|
|
};
|
|
};
|
|
genOp.onerror = function(err) {
|
|
callback(err);
|
|
};
|
|
return;
|
|
}
|
|
} else {
|
|
// try native sync
|
|
if(_detectNodeCrypto('generateKeyPairSync')) {
|
|
var keypair = _crypto.generateKeyPairSync('rsa', {
|
|
modulusLength: bits,
|
|
publicExponent: e,
|
|
publicKeyEncoding: {
|
|
type: 'spki',
|
|
format: 'pem'
|
|
},
|
|
privateKeyEncoding: {
|
|
type: 'pkcs8',
|
|
format: 'pem'
|
|
}
|
|
});
|
|
return {
|
|
privateKey: pki$1.privateKeyFromPem(keypair.privateKey),
|
|
publicKey: pki$1.publicKeyFromPem(keypair.publicKey)
|
|
};
|
|
}
|
|
}
|
|
}
|
|
|
|
// use JavaScript implementation
|
|
var state = pki$1.rsa.createKeyPairGenerationState(bits, e, options);
|
|
if(!callback) {
|
|
pki$1.rsa.stepKeyPairGenerationState(state, 0);
|
|
return state.keys;
|
|
}
|
|
_generateKeyPair(state, options, callback);
|
|
};
|
|
|
|
/**
|
|
* Sets an RSA public key from BigIntegers modulus and exponent.
|
|
*
|
|
* @param n the modulus.
|
|
* @param e the exponent.
|
|
*
|
|
* @return the public key.
|
|
*/
|
|
pki$1.setRsaPublicKey = pki$1.rsa.setPublicKey = function(n, e) {
|
|
var key = {
|
|
n: n,
|
|
e: e
|
|
};
|
|
|
|
/**
|
|
* Encrypts the given data with this public key. Newer applications
|
|
* should use the 'RSA-OAEP' decryption scheme, 'RSAES-PKCS1-V1_5' is for
|
|
* legacy applications.
|
|
*
|
|
* @param data the byte string to encrypt.
|
|
* @param scheme the encryption scheme to use:
|
|
* 'RSAES-PKCS1-V1_5' (default),
|
|
* 'RSA-OAEP',
|
|
* 'RAW', 'NONE', or null to perform raw RSA encryption,
|
|
* an object with an 'encode' property set to a function
|
|
* with the signature 'function(data, key)' that returns
|
|
* a binary-encoded string representing the encoded data.
|
|
* @param schemeOptions any scheme-specific options.
|
|
*
|
|
* @return the encrypted byte string.
|
|
*/
|
|
key.encrypt = function(data, scheme, schemeOptions) {
|
|
if(typeof scheme === 'string') {
|
|
scheme = scheme.toUpperCase();
|
|
} else if(scheme === undefined) {
|
|
scheme = 'RSAES-PKCS1-V1_5';
|
|
}
|
|
|
|
if(scheme === 'RSAES-PKCS1-V1_5') {
|
|
scheme = {
|
|
encode: function(m, key, pub) {
|
|
return _encodePkcs1_v1_5(m, key, 0x02).getBytes();
|
|
}
|
|
};
|
|
} else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') {
|
|
scheme = {
|
|
encode: function(m, key) {
|
|
return forge$2.pkcs1.encode_rsa_oaep(key, m, schemeOptions);
|
|
}
|
|
};
|
|
} else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) {
|
|
scheme = {encode: function(e) {return e;}};
|
|
} else if(typeof scheme === 'string') {
|
|
throw new Error('Unsupported encryption scheme: "' + scheme + '".');
|
|
}
|
|
|
|
// do scheme-based encoding then rsa encryption
|
|
var e = scheme.encode(data, key, true);
|
|
return pki$1.rsa.encrypt(e, key, true);
|
|
};
|
|
|
|
/**
|
|
* Verifies the given signature against the given digest.
|
|
*
|
|
* PKCS#1 supports multiple (currently two) signature schemes:
|
|
* RSASSA-PKCS1-V1_5 and RSASSA-PSS.
|
|
*
|
|
* By default this implementation uses the "old scheme", i.e.
|
|
* RSASSA-PKCS1-V1_5, in which case once RSA-decrypted, the
|
|
* signature is an OCTET STRING that holds a DigestInfo.
|
|
*
|
|
* DigestInfo ::= SEQUENCE {
|
|
* digestAlgorithm DigestAlgorithmIdentifier,
|
|
* digest Digest
|
|
* }
|
|
* DigestAlgorithmIdentifier ::= AlgorithmIdentifier
|
|
* Digest ::= OCTET STRING
|
|
*
|
|
* To perform PSS signature verification, provide an instance
|
|
* of Forge PSS object as the scheme parameter.
|
|
*
|
|
* @param digest the message digest hash to compare against the signature,
|
|
* as a binary-encoded string.
|
|
* @param signature the signature to verify, as a binary-encoded string.
|
|
* @param scheme signature verification scheme to use:
|
|
* 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5,
|
|
* a Forge PSS object for RSASSA-PSS,
|
|
* 'NONE' or null for none, DigestInfo will not be expected, but
|
|
* PKCS#1 v1.5 padding will still be used.
|
|
* @param options optional verify options
|
|
* _parseAllDigestBytes testing flag to control parsing of all
|
|
* digest bytes. Unsupported and not for general usage.
|
|
* (default: true)
|
|
*
|
|
* @return true if the signature was verified, false if not.
|
|
*/
|
|
key.verify = function(digest, signature, scheme, options) {
|
|
if(typeof scheme === 'string') {
|
|
scheme = scheme.toUpperCase();
|
|
} else if(scheme === undefined) {
|
|
scheme = 'RSASSA-PKCS1-V1_5';
|
|
}
|
|
if(options === undefined) {
|
|
options = {
|
|
_parseAllDigestBytes: true
|
|
};
|
|
}
|
|
if(!('_parseAllDigestBytes' in options)) {
|
|
options._parseAllDigestBytes = true;
|
|
}
|
|
|
|
if(scheme === 'RSASSA-PKCS1-V1_5') {
|
|
scheme = {
|
|
verify: function(digest, d) {
|
|
// remove padding
|
|
d = _decodePkcs1_v1_5(d, key, true);
|
|
// d is ASN.1 BER-encoded DigestInfo
|
|
var obj = asn1$1.fromDer(d, {
|
|
parseAllBytes: options._parseAllDigestBytes
|
|
});
|
|
|
|
// validate DigestInfo
|
|
var capture = {};
|
|
var errors = [];
|
|
if(!asn1$1.validate(obj, digestInfoValidator, capture, errors)) {
|
|
var error = new Error(
|
|
'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' +
|
|
'DigestInfo value.');
|
|
error.errors = errors;
|
|
throw error;
|
|
}
|
|
// check hash algorithm identifier
|
|
// see PKCS1-v1-5DigestAlgorithms in RFC 8017
|
|
// FIXME: add support to vaidator for strict value choices
|
|
var oid = asn1$1.derToOid(capture.algorithmIdentifier);
|
|
if(!(oid === forge$2.oids.md2 ||
|
|
oid === forge$2.oids.md5 ||
|
|
oid === forge$2.oids.sha1 ||
|
|
oid === forge$2.oids.sha224 ||
|
|
oid === forge$2.oids.sha256 ||
|
|
oid === forge$2.oids.sha384 ||
|
|
oid === forge$2.oids.sha512 ||
|
|
oid === forge$2.oids['sha512-224'] ||
|
|
oid === forge$2.oids['sha512-256'])) {
|
|
var error = new Error(
|
|
'Unknown RSASSA-PKCS1-v1_5 DigestAlgorithm identifier.');
|
|
error.oid = oid;
|
|
throw error;
|
|
}
|
|
|
|
// special check for md2 and md5 that NULL parameters exist
|
|
if(oid === forge$2.oids.md2 || oid === forge$2.oids.md5) {
|
|
if(!('parameters' in capture)) {
|
|
throw new Error(
|
|
'ASN.1 object does not contain a valid RSASSA-PKCS1-v1_5 ' +
|
|
'DigestInfo value. ' +
|
|
'Missing algorithm identifer NULL parameters.');
|
|
}
|
|
}
|
|
|
|
// compare the given digest to the decrypted one
|
|
return digest === capture.digest;
|
|
}
|
|
};
|
|
} else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) {
|
|
scheme = {
|
|
verify: function(digest, d) {
|
|
// remove padding
|
|
d = _decodePkcs1_v1_5(d, key, true);
|
|
return digest === d;
|
|
}
|
|
};
|
|
}
|
|
|
|
// do rsa decryption w/o any decoding, then verify -- which does decoding
|
|
var d = pki$1.rsa.decrypt(signature, key, true, false);
|
|
return scheme.verify(digest, d, key.n.bitLength());
|
|
};
|
|
|
|
return key;
|
|
};
|
|
|
|
/**
|
|
* Sets an RSA private key from BigIntegers modulus, exponent, primes,
|
|
* prime exponents, and modular multiplicative inverse.
|
|
*
|
|
* @param n the modulus.
|
|
* @param e the public exponent.
|
|
* @param d the private exponent ((inverse of e) mod n).
|
|
* @param p the first prime.
|
|
* @param q the second prime.
|
|
* @param dP exponent1 (d mod (p-1)).
|
|
* @param dQ exponent2 (d mod (q-1)).
|
|
* @param qInv ((inverse of q) mod p)
|
|
*
|
|
* @return the private key.
|
|
*/
|
|
pki$1.setRsaPrivateKey = pki$1.rsa.setPrivateKey = function(
|
|
n, e, d, p, q, dP, dQ, qInv) {
|
|
var key = {
|
|
n: n,
|
|
e: e,
|
|
d: d,
|
|
p: p,
|
|
q: q,
|
|
dP: dP,
|
|
dQ: dQ,
|
|
qInv: qInv
|
|
};
|
|
|
|
/**
|
|
* Decrypts the given data with this private key. The decryption scheme
|
|
* must match the one used to encrypt the data.
|
|
*
|
|
* @param data the byte string to decrypt.
|
|
* @param scheme the decryption scheme to use:
|
|
* 'RSAES-PKCS1-V1_5' (default),
|
|
* 'RSA-OAEP',
|
|
* 'RAW', 'NONE', or null to perform raw RSA decryption.
|
|
* @param schemeOptions any scheme-specific options.
|
|
*
|
|
* @return the decrypted byte string.
|
|
*/
|
|
key.decrypt = function(data, scheme, schemeOptions) {
|
|
if(typeof scheme === 'string') {
|
|
scheme = scheme.toUpperCase();
|
|
} else if(scheme === undefined) {
|
|
scheme = 'RSAES-PKCS1-V1_5';
|
|
}
|
|
|
|
// do rsa decryption w/o any decoding
|
|
var d = pki$1.rsa.decrypt(data, key, false, false);
|
|
|
|
if(scheme === 'RSAES-PKCS1-V1_5') {
|
|
scheme = {decode: _decodePkcs1_v1_5};
|
|
} else if(scheme === 'RSA-OAEP' || scheme === 'RSAES-OAEP') {
|
|
scheme = {
|
|
decode: function(d, key) {
|
|
return forge$2.pkcs1.decode_rsa_oaep(key, d, schemeOptions);
|
|
}
|
|
};
|
|
} else if(['RAW', 'NONE', 'NULL', null].indexOf(scheme) !== -1) {
|
|
scheme = {decode: function(d) {return d;}};
|
|
} else {
|
|
throw new Error('Unsupported encryption scheme: "' + scheme + '".');
|
|
}
|
|
|
|
// decode according to scheme
|
|
return scheme.decode(d, key, false);
|
|
};
|
|
|
|
/**
|
|
* Signs the given digest, producing a signature.
|
|
*
|
|
* PKCS#1 supports multiple (currently two) signature schemes:
|
|
* RSASSA-PKCS1-V1_5 and RSASSA-PSS.
|
|
*
|
|
* By default this implementation uses the "old scheme", i.e.
|
|
* RSASSA-PKCS1-V1_5. In order to generate a PSS signature, provide
|
|
* an instance of Forge PSS object as the scheme parameter.
|
|
*
|
|
* @param md the message digest object with the hash to sign.
|
|
* @param scheme the signature scheme to use:
|
|
* 'RSASSA-PKCS1-V1_5' or undefined for RSASSA PKCS#1 v1.5,
|
|
* a Forge PSS object for RSASSA-PSS,
|
|
* 'NONE' or null for none, DigestInfo will not be used but
|
|
* PKCS#1 v1.5 padding will still be used.
|
|
*
|
|
* @return the signature as a byte string.
|
|
*/
|
|
key.sign = function(md, scheme) {
|
|
/* Note: The internal implementation of RSA operations is being
|
|
transitioned away from a PKCS#1 v1.5 hard-coded scheme. Some legacy
|
|
code like the use of an encoding block identifier 'bt' will eventually
|
|
be removed. */
|
|
|
|
// private key operation
|
|
var bt = false;
|
|
|
|
if(typeof scheme === 'string') {
|
|
scheme = scheme.toUpperCase();
|
|
}
|
|
|
|
if(scheme === undefined || scheme === 'RSASSA-PKCS1-V1_5') {
|
|
scheme = {encode: emsaPkcs1v15encode};
|
|
bt = 0x01;
|
|
} else if(scheme === 'NONE' || scheme === 'NULL' || scheme === null) {
|
|
scheme = {encode: function() {return md;}};
|
|
bt = 0x01;
|
|
}
|
|
|
|
// encode and then encrypt
|
|
var d = scheme.encode(md, key.n.bitLength());
|
|
return pki$1.rsa.encrypt(d, key, bt);
|
|
};
|
|
|
|
return key;
|
|
};
|
|
|
|
/**
|
|
* Wraps an RSAPrivateKey ASN.1 object in an ASN.1 PrivateKeyInfo object.
|
|
*
|
|
* @param rsaKey the ASN.1 RSAPrivateKey.
|
|
*
|
|
* @return the ASN.1 PrivateKeyInfo.
|
|
*/
|
|
pki$1.wrapRsaPrivateKey = function(rsaKey) {
|
|
// PrivateKeyInfo
|
|
return asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.SEQUENCE, true, [
|
|
// version (0)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.INTEGER, false,
|
|
asn1$1.integerToDer(0).getBytes()),
|
|
// privateKeyAlgorithm
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.SEQUENCE, true, [
|
|
asn1$1.create(
|
|
asn1$1.Class.UNIVERSAL, asn1$1.Type.OID, false,
|
|
asn1$1.oidToDer(pki$1.oids.rsaEncryption).getBytes()),
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.NULL, false, '')
|
|
]),
|
|
// PrivateKey
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.OCTETSTRING, false,
|
|
asn1$1.toDer(rsaKey).getBytes())
|
|
]);
|
|
};
|
|
|
|
/**
|
|
* Converts a private key from an ASN.1 object.
|
|
*
|
|
* @param obj the ASN.1 representation of a PrivateKeyInfo containing an
|
|
* RSAPrivateKey or an RSAPrivateKey.
|
|
*
|
|
* @return the private key.
|
|
*/
|
|
pki$1.privateKeyFromAsn1 = function(obj) {
|
|
// get PrivateKeyInfo
|
|
var capture = {};
|
|
var errors = [];
|
|
if(asn1$1.validate(obj, privateKeyValidator, capture, errors)) {
|
|
obj = asn1$1.fromDer(forge$2.util.createBuffer(capture.privateKey));
|
|
}
|
|
|
|
// get RSAPrivateKey
|
|
capture = {};
|
|
errors = [];
|
|
if(!asn1$1.validate(obj, rsaPrivateKeyValidator, capture, errors)) {
|
|
var error = new Error('Cannot read private key. ' +
|
|
'ASN.1 object does not contain an RSAPrivateKey.');
|
|
error.errors = errors;
|
|
throw error;
|
|
}
|
|
|
|
// Note: Version is currently ignored.
|
|
// capture.privateKeyVersion
|
|
// FIXME: inefficient, get a BigInteger that uses byte strings
|
|
var n, e, d, p, q, dP, dQ, qInv;
|
|
n = forge$2.util.createBuffer(capture.privateKeyModulus).toHex();
|
|
e = forge$2.util.createBuffer(capture.privateKeyPublicExponent).toHex();
|
|
d = forge$2.util.createBuffer(capture.privateKeyPrivateExponent).toHex();
|
|
p = forge$2.util.createBuffer(capture.privateKeyPrime1).toHex();
|
|
q = forge$2.util.createBuffer(capture.privateKeyPrime2).toHex();
|
|
dP = forge$2.util.createBuffer(capture.privateKeyExponent1).toHex();
|
|
dQ = forge$2.util.createBuffer(capture.privateKeyExponent2).toHex();
|
|
qInv = forge$2.util.createBuffer(capture.privateKeyCoefficient).toHex();
|
|
|
|
// set private key
|
|
return pki$1.setRsaPrivateKey(
|
|
new BigInteger$1(n, 16),
|
|
new BigInteger$1(e, 16),
|
|
new BigInteger$1(d, 16),
|
|
new BigInteger$1(p, 16),
|
|
new BigInteger$1(q, 16),
|
|
new BigInteger$1(dP, 16),
|
|
new BigInteger$1(dQ, 16),
|
|
new BigInteger$1(qInv, 16));
|
|
};
|
|
|
|
/**
|
|
* Converts a private key to an ASN.1 RSAPrivateKey.
|
|
*
|
|
* @param key the private key.
|
|
*
|
|
* @return the ASN.1 representation of an RSAPrivateKey.
|
|
*/
|
|
pki$1.privateKeyToAsn1 = pki$1.privateKeyToRSAPrivateKey = function(key) {
|
|
// RSAPrivateKey
|
|
return asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.SEQUENCE, true, [
|
|
// version (0 = only 2 primes, 1 multiple primes)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.INTEGER, false,
|
|
asn1$1.integerToDer(0).getBytes()),
|
|
// modulus (n)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.INTEGER, false,
|
|
_bnToBytes(key.n)),
|
|
// publicExponent (e)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.INTEGER, false,
|
|
_bnToBytes(key.e)),
|
|
// privateExponent (d)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.INTEGER, false,
|
|
_bnToBytes(key.d)),
|
|
// privateKeyPrime1 (p)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.INTEGER, false,
|
|
_bnToBytes(key.p)),
|
|
// privateKeyPrime2 (q)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.INTEGER, false,
|
|
_bnToBytes(key.q)),
|
|
// privateKeyExponent1 (dP)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.INTEGER, false,
|
|
_bnToBytes(key.dP)),
|
|
// privateKeyExponent2 (dQ)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.INTEGER, false,
|
|
_bnToBytes(key.dQ)),
|
|
// coefficient (qInv)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.INTEGER, false,
|
|
_bnToBytes(key.qInv))
|
|
]);
|
|
};
|
|
|
|
/**
|
|
* Converts a public key from an ASN.1 SubjectPublicKeyInfo or RSAPublicKey.
|
|
*
|
|
* @param obj the asn1 representation of a SubjectPublicKeyInfo or RSAPublicKey.
|
|
*
|
|
* @return the public key.
|
|
*/
|
|
pki$1.publicKeyFromAsn1 = function(obj) {
|
|
// get SubjectPublicKeyInfo
|
|
var capture = {};
|
|
var errors = [];
|
|
if(asn1$1.validate(obj, publicKeyValidator, capture, errors)) {
|
|
// get oid
|
|
var oid = asn1$1.derToOid(capture.publicKeyOid);
|
|
if(oid !== pki$1.oids.rsaEncryption) {
|
|
var error = new Error('Cannot read public key. Unknown OID.');
|
|
error.oid = oid;
|
|
throw error;
|
|
}
|
|
obj = capture.rsaPublicKey;
|
|
}
|
|
|
|
// get RSA params
|
|
errors = [];
|
|
if(!asn1$1.validate(obj, rsaPublicKeyValidator, capture, errors)) {
|
|
var error = new Error('Cannot read public key. ' +
|
|
'ASN.1 object does not contain an RSAPublicKey.');
|
|
error.errors = errors;
|
|
throw error;
|
|
}
|
|
|
|
// FIXME: inefficient, get a BigInteger that uses byte strings
|
|
var n = forge$2.util.createBuffer(capture.publicKeyModulus).toHex();
|
|
var e = forge$2.util.createBuffer(capture.publicKeyExponent).toHex();
|
|
|
|
// set public key
|
|
return pki$1.setRsaPublicKey(
|
|
new BigInteger$1(n, 16),
|
|
new BigInteger$1(e, 16));
|
|
};
|
|
|
|
/**
|
|
* Converts a public key to an ASN.1 SubjectPublicKeyInfo.
|
|
*
|
|
* @param key the public key.
|
|
*
|
|
* @return the asn1 representation of a SubjectPublicKeyInfo.
|
|
*/
|
|
pki$1.publicKeyToAsn1 = pki$1.publicKeyToSubjectPublicKeyInfo = function(key) {
|
|
// SubjectPublicKeyInfo
|
|
return asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.SEQUENCE, true, [
|
|
// AlgorithmIdentifier
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.SEQUENCE, true, [
|
|
// algorithm
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.OID, false,
|
|
asn1$1.oidToDer(pki$1.oids.rsaEncryption).getBytes()),
|
|
// parameters (null)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.NULL, false, '')
|
|
]),
|
|
// subjectPublicKey
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.BITSTRING, false, [
|
|
pki$1.publicKeyToRSAPublicKey(key)
|
|
])
|
|
]);
|
|
};
|
|
|
|
/**
|
|
* Converts a public key to an ASN.1 RSAPublicKey.
|
|
*
|
|
* @param key the public key.
|
|
*
|
|
* @return the asn1 representation of a RSAPublicKey.
|
|
*/
|
|
pki$1.publicKeyToRSAPublicKey = function(key) {
|
|
// RSAPublicKey
|
|
return asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.SEQUENCE, true, [
|
|
// modulus (n)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.INTEGER, false,
|
|
_bnToBytes(key.n)),
|
|
// publicExponent (e)
|
|
asn1$1.create(asn1$1.Class.UNIVERSAL, asn1$1.Type.INTEGER, false,
|
|
_bnToBytes(key.e))
|
|
]);
|
|
};
|
|
|
|
/**
|
|
* Encodes a message using PKCS#1 v1.5 padding.
|
|
*
|
|
* @param m the message to encode.
|
|
* @param key the RSA key to use.
|
|
* @param bt the block type to use, i.e. either 0x01 (for signing) or 0x02
|
|
* (for encryption).
|
|
*
|
|
* @return the padded byte buffer.
|
|
*/
|
|
function _encodePkcs1_v1_5(m, key, bt) {
|
|
var eb = forge$2.util.createBuffer();
|
|
|
|
// get the length of the modulus in bytes
|
|
var k = Math.ceil(key.n.bitLength() / 8);
|
|
|
|
/* use PKCS#1 v1.5 padding */
|
|
if(m.length > (k - 11)) {
|
|
var error = new Error('Message is too long for PKCS#1 v1.5 padding.');
|
|
error.length = m.length;
|
|
error.max = k - 11;
|
|
throw error;
|
|
}
|
|
|
|
/* A block type BT, a padding string PS, and the data D shall be
|
|
formatted into an octet string EB, the encryption block:
|
|
|
|
EB = 00 || BT || PS || 00 || D
|
|
|
|
The block type BT shall be a single octet indicating the structure of
|
|
the encryption block. For this version of the document it shall have
|
|
value 00, 01, or 02. For a private-key operation, the block type
|
|
shall be 00 or 01. For a public-key operation, it shall be 02.
|
|
|
|
The padding string PS shall consist of k-3-||D|| octets. For block
|
|
type 00, the octets shall have value 00; for block type 01, they
|
|
shall have value FF; and for block type 02, they shall be
|
|
pseudorandomly generated and nonzero. This makes the length of the
|
|
encryption block EB equal to k. */
|
|
|
|
// build the encryption block
|
|
eb.putByte(0x00);
|
|
eb.putByte(bt);
|
|
|
|
// create the padding
|
|
var padNum = k - 3 - m.length;
|
|
var padByte;
|
|
// private key op
|
|
if(bt === 0x00 || bt === 0x01) {
|
|
padByte = (bt === 0x00) ? 0x00 : 0xFF;
|
|
for(var i = 0; i < padNum; ++i) {
|
|
eb.putByte(padByte);
|
|
}
|
|
} else {
|
|
// public key op
|
|
// pad with random non-zero values
|
|
while(padNum > 0) {
|
|
var numZeros = 0;
|
|
var padBytes = forge$2.random.getBytes(padNum);
|
|
for(var i = 0; i < padNum; ++i) {
|
|
padByte = padBytes.charCodeAt(i);
|
|
if(padByte === 0) {
|
|
++numZeros;
|
|
} else {
|
|
eb.putByte(padByte);
|
|
}
|
|
}
|
|
padNum = numZeros;
|
|
}
|
|
}
|
|
|
|
// zero followed by message
|
|
eb.putByte(0x00);
|
|
eb.putBytes(m);
|
|
|
|
return eb;
|
|
}
|
|
|
|
/**
|
|
* Decodes a message using PKCS#1 v1.5 padding.
|
|
*
|
|
* @param em the message to decode.
|
|
* @param key the RSA key to use.
|
|
* @param pub true if the key is a public key, false if it is private.
|
|
* @param ml the message length, if specified.
|
|
*
|
|
* @return the decoded bytes.
|
|
*/
|
|
function _decodePkcs1_v1_5(em, key, pub, ml) {
|
|
// get the length of the modulus in bytes
|
|
var k = Math.ceil(key.n.bitLength() / 8);
|
|
|
|
/* It is an error if any of the following conditions occurs:
|
|
|
|
1. The encryption block EB cannot be parsed unambiguously.
|
|
2. The padding string PS consists of fewer than eight octets
|
|
or is inconsisent with the block type BT.
|
|
3. The decryption process is a public-key operation and the block
|
|
type BT is not 00 or 01, or the decryption process is a
|
|
private-key operation and the block type is not 02.
|
|
*/
|
|
|
|
// parse the encryption block
|
|
var eb = forge$2.util.createBuffer(em);
|
|
var first = eb.getByte();
|
|
var bt = eb.getByte();
|
|
if(first !== 0x00 ||
|
|
(pub && bt !== 0x00 && bt !== 0x01) ||
|
|
(!pub && bt != 0x02) ||
|
|
(pub && bt === 0x00 && typeof(ml) === 'undefined')) {
|
|
throw new Error('Encryption block is invalid.');
|
|
}
|
|
|
|
var padNum = 0;
|
|
if(bt === 0x00) {
|
|
// check all padding bytes for 0x00
|
|
padNum = k - 3 - ml;
|
|
for(var i = 0; i < padNum; ++i) {
|
|
if(eb.getByte() !== 0x00) {
|
|
throw new Error('Encryption block is invalid.');
|
|
}
|
|
}
|
|
} else if(bt === 0x01) {
|
|
// find the first byte that isn't 0xFF, should be after all padding
|
|
padNum = 0;
|
|
while(eb.length() > 1) {
|
|
if(eb.getByte() !== 0xFF) {
|
|
--eb.read;
|
|
break;
|
|
}
|
|
++padNum;
|
|
}
|
|
} else if(bt === 0x02) {
|
|
// look for 0x00 byte
|
|
padNum = 0;
|
|
while(eb.length() > 1) {
|
|
if(eb.getByte() === 0x00) {
|
|
--eb.read;
|
|
break;
|
|
}
|
|
++padNum;
|
|
}
|
|
}
|
|
|
|
// zero must be 0x00 and padNum must be (k - 3 - message length)
|
|
var zero = eb.getByte();
|
|
if(zero !== 0x00 || padNum !== (k - 3 - eb.length())) {
|
|
throw new Error('Encryption block is invalid.');
|
|
}
|
|
|
|
return eb.getBytes();
|
|
}
|
|
|
|
/**
|
|
* Runs the key-generation algorithm asynchronously, either in the background
|
|
* via Web Workers, or using the main thread and setImmediate.
|
|
*
|
|
* @param state the key-pair generation state.
|
|
* @param [options] options for key-pair generation:
|
|
* workerScript the worker script URL.
|
|
* workers the number of web workers (if supported) to use,
|
|
* (default: 2, -1 to use estimated cores minus one).
|
|
* workLoad the size of the work load, ie: number of possible prime
|
|
* numbers for each web worker to check per work assignment,
|
|
* (default: 100).
|
|
* @param callback(err, keypair) called once the operation completes.
|
|
*/
|
|
function _generateKeyPair(state, options, callback) {
|
|
if(typeof options === 'function') {
|
|
callback = options;
|
|
options = {};
|
|
}
|
|
options = options || {};
|
|
|
|
var opts = {
|
|
algorithm: {
|
|
name: options.algorithm || 'PRIMEINC',
|
|
options: {
|
|
workers: options.workers || 2,
|
|
workLoad: options.workLoad || 100,
|
|
workerScript: options.workerScript
|
|
}
|
|
}
|
|
};
|
|
if('prng' in options) {
|
|
opts.prng = options.prng;
|
|
}
|
|
|
|
generate();
|
|
|
|
function generate() {
|
|
// find p and then q (done in series to simplify)
|
|
getPrime(state.pBits, function(err, num) {
|
|
if(err) {
|
|
return callback(err);
|
|
}
|
|
state.p = num;
|
|
if(state.q !== null) {
|
|
return finish(err, state.q);
|
|
}
|
|
getPrime(state.qBits, finish);
|
|
});
|
|
}
|
|
|
|
function getPrime(bits, callback) {
|
|
forge$2.prime.generateProbablePrime(bits, opts, callback);
|
|
}
|
|
|
|
function finish(err, num) {
|
|
if(err) {
|
|
return callback(err);
|
|
}
|
|
|
|
// set q
|
|
state.q = num;
|
|
|
|
// ensure p is larger than q (swap them if not)
|
|
if(state.p.compareTo(state.q) < 0) {
|
|
var tmp = state.p;
|
|
state.p = state.q;
|
|
state.q = tmp;
|
|
}
|
|
|
|
// ensure p is coprime with e
|
|
if(state.p.subtract(BigInteger$1.ONE).gcd(state.e)
|
|
.compareTo(BigInteger$1.ONE) !== 0) {
|
|
state.p = null;
|
|
generate();
|
|
return;
|
|
}
|
|
|
|
// ensure q is coprime with e
|
|
if(state.q.subtract(BigInteger$1.ONE).gcd(state.e)
|
|
.compareTo(BigInteger$1.ONE) !== 0) {
|
|
state.q = null;
|
|
getPrime(state.qBits, finish);
|
|
return;
|
|
}
|
|
|
|
// compute phi: (p - 1)(q - 1) (Euler's totient function)
|
|
state.p1 = state.p.subtract(BigInteger$1.ONE);
|
|
state.q1 = state.q.subtract(BigInteger$1.ONE);
|
|
state.phi = state.p1.multiply(state.q1);
|
|
|
|
// ensure e and phi are coprime
|
|
if(state.phi.gcd(state.e).compareTo(BigInteger$1.ONE) !== 0) {
|
|
// phi and e aren't coprime, so generate a new p and q
|
|
state.p = state.q = null;
|
|
generate();
|
|
return;
|
|
}
|
|
|
|
// create n, ensure n is has the right number of bits
|
|
state.n = state.p.multiply(state.q);
|
|
if(state.n.bitLength() !== state.bits) {
|
|
// failed, get new q
|
|
state.q = null;
|
|
getPrime(state.qBits, finish);
|
|
return;
|
|
}
|
|
|
|
// set keys
|
|
var d = state.e.modInverse(state.phi);
|
|
state.keys = {
|
|
privateKey: pki$1.rsa.setPrivateKey(
|
|
state.n, state.e, d, state.p, state.q,
|
|
d.mod(state.p1), d.mod(state.q1),
|
|
state.q.modInverse(state.p)),
|
|
publicKey: pki$1.rsa.setPublicKey(state.n, state.e)
|
|
};
|
|
|
|
callback(null, state.keys);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Converts a positive BigInteger into 2's-complement big-endian bytes.
|
|
*
|
|
* @param b the big integer to convert.
|
|
*
|
|
* @return the bytes.
|
|
*/
|
|
function _bnToBytes(b) {
|
|
// prepend 0x00 if first byte >= 0x80
|
|
var hex = b.toString(16);
|
|
if(hex[0] >= '8') {
|
|
hex = '00' + hex;
|
|
}
|
|
var bytes = forge$2.util.hexToBytes(hex);
|
|
|
|
// ensure integer is minimally-encoded
|
|
if(bytes.length > 1 &&
|
|
// leading 0x00 for positive integer
|
|
((bytes.charCodeAt(0) === 0 &&
|
|
(bytes.charCodeAt(1) & 0x80) === 0) ||
|
|
// leading 0xFF for negative integer
|
|
(bytes.charCodeAt(0) === 0xFF &&
|
|
(bytes.charCodeAt(1) & 0x80) === 0x80))) {
|
|
return bytes.substr(1);
|
|
}
|
|
return bytes;
|
|
}
|
|
|
|
/**
|
|
* Returns the required number of Miller-Rabin tests to generate a
|
|
* prime with an error probability of (1/2)^80.
|
|
*
|
|
* See Handbook of Applied Cryptography Chapter 4, Table 4.4.
|
|
*
|
|
* @param bits the bit size.
|
|
*
|
|
* @return the required number of iterations.
|
|
*/
|
|
function _getMillerRabinTests(bits) {
|
|
if(bits <= 100) return 27;
|
|
if(bits <= 150) return 18;
|
|
if(bits <= 200) return 15;
|
|
if(bits <= 250) return 12;
|
|
if(bits <= 300) return 9;
|
|
if(bits <= 350) return 8;
|
|
if(bits <= 400) return 7;
|
|
if(bits <= 500) return 6;
|
|
if(bits <= 600) return 5;
|
|
if(bits <= 800) return 4;
|
|
if(bits <= 1250) return 3;
|
|
return 2;
|
|
}
|
|
|
|
/**
|
|
* Performs feature detection on the Node crypto interface.
|
|
*
|
|
* @param fn the feature (function) to detect.
|
|
*
|
|
* @return true if detected, false if not.
|
|
*/
|
|
function _detectNodeCrypto(fn) {
|
|
return forge$2.util.isNodejs && typeof _crypto[fn] === 'function';
|
|
}
|
|
|
|
/**
|
|
* Performs feature detection on the SubtleCrypto interface.
|
|
*
|
|
* @param fn the feature (function) to detect.
|
|
*
|
|
* @return true if detected, false if not.
|
|
*/
|
|
function _detectSubtleCrypto(fn) {
|
|
return (typeof util.globalScope !== 'undefined' &&
|
|
typeof util.globalScope.crypto === 'object' &&
|
|
typeof util.globalScope.crypto.subtle === 'object' &&
|
|
typeof util.globalScope.crypto.subtle[fn] === 'function');
|
|
}
|
|
|
|
/**
|
|
* Performs feature detection on the deprecated Microsoft Internet Explorer
|
|
* outdated SubtleCrypto interface. This function should only be used after
|
|
* checking for the modern, standard SubtleCrypto interface.
|
|
*
|
|
* @param fn the feature (function) to detect.
|
|
*
|
|
* @return true if detected, false if not.
|
|
*/
|
|
function _detectSubtleMsCrypto(fn) {
|
|
return (typeof util.globalScope !== 'undefined' &&
|
|
typeof util.globalScope.msCrypto === 'object' &&
|
|
typeof util.globalScope.msCrypto.subtle === 'object' &&
|
|
typeof util.globalScope.msCrypto.subtle[fn] === 'function');
|
|
}
|
|
|
|
function _intToUint8Array(x) {
|
|
var bytes = forge$2.util.hexToBytes(x.toString(16));
|
|
var buffer = new Uint8Array(bytes.length);
|
|
for(var i = 0; i < bytes.length; ++i) {
|
|
buffer[i] = bytes.charCodeAt(i);
|
|
}
|
|
return buffer;
|
|
}
|
|
|
|
/**
|
|
* Password-based encryption functions.
|
|
*
|
|
* @author Dave Longley
|
|
* @author Stefan Siegl <stesie@brokenpipe.de>
|
|
*
|
|
* Copyright (c) 2010-2013 Digital Bazaar, Inc.
|
|
* Copyright (c) 2012 Stefan Siegl <stesie@brokenpipe.de>
|
|
*
|
|
* An EncryptedPrivateKeyInfo:
|
|
*
|
|
* EncryptedPrivateKeyInfo ::= SEQUENCE {
|
|
* encryptionAlgorithm EncryptionAlgorithmIdentifier,
|
|
* encryptedData EncryptedData }
|
|
*
|
|
* EncryptionAlgorithmIdentifier ::= AlgorithmIdentifier
|
|
*
|
|
* EncryptedData ::= OCTET STRING
|
|
*/
|
|
|
|
var forge$1 = forge$m;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if(typeof BigInteger === 'undefined') {
|
|
var BigInteger = forge$1.jsbn.BigInteger;
|
|
}
|
|
|
|
// shortcut for asn.1 API
|
|
var asn1 = forge$1.asn1;
|
|
|
|
/* Password-based encryption implementation. */
|
|
var pki = forge$1.pki = forge$1.pki || {};
|
|
pki.pbe = forge$1.pbe = forge$1.pbe || {};
|
|
var oids = pki.oids;
|
|
|
|
// validator for an EncryptedPrivateKeyInfo structure
|
|
// Note: Currently only works w/algorithm params
|
|
var encryptedPrivateKeyValidator = {
|
|
name: 'EncryptedPrivateKeyInfo',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
name: 'EncryptedPrivateKeyInfo.encryptionAlgorithm',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
name: 'AlgorithmIdentifier.algorithm',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.OID,
|
|
constructed: false,
|
|
capture: 'encryptionOid'
|
|
}, {
|
|
name: 'AlgorithmIdentifier.parameters',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.SEQUENCE,
|
|
constructed: true,
|
|
captureAsn1: 'encryptionParams'
|
|
}]
|
|
}, {
|
|
// encryptedData
|
|
name: 'EncryptedPrivateKeyInfo.encryptedData',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.OCTETSTRING,
|
|
constructed: false,
|
|
capture: 'encryptedData'
|
|
}]
|
|
};
|
|
|
|
// validator for a PBES2Algorithms structure
|
|
// Note: Currently only works w/PBKDF2 + AES encryption schemes
|
|
var PBES2AlgorithmsValidator = {
|
|
name: 'PBES2Algorithms',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
name: 'PBES2Algorithms.keyDerivationFunc',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
name: 'PBES2Algorithms.keyDerivationFunc.oid',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.OID,
|
|
constructed: false,
|
|
capture: 'kdfOid'
|
|
}, {
|
|
name: 'PBES2Algorithms.params',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
name: 'PBES2Algorithms.params.salt',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.OCTETSTRING,
|
|
constructed: false,
|
|
capture: 'kdfSalt'
|
|
}, {
|
|
name: 'PBES2Algorithms.params.iterationCount',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'kdfIterationCount'
|
|
}, {
|
|
name: 'PBES2Algorithms.params.keyLength',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.INTEGER,
|
|
constructed: false,
|
|
optional: true,
|
|
capture: 'keyLength'
|
|
}, {
|
|
// prf
|
|
name: 'PBES2Algorithms.params.prf',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.SEQUENCE,
|
|
constructed: true,
|
|
optional: true,
|
|
value: [{
|
|
name: 'PBES2Algorithms.params.prf.algorithm',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.OID,
|
|
constructed: false,
|
|
capture: 'prfOid'
|
|
}]
|
|
}]
|
|
}]
|
|
}, {
|
|
name: 'PBES2Algorithms.encryptionScheme',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
name: 'PBES2Algorithms.encryptionScheme.oid',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.OID,
|
|
constructed: false,
|
|
capture: 'encOid'
|
|
}, {
|
|
name: 'PBES2Algorithms.encryptionScheme.iv',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.OCTETSTRING,
|
|
constructed: false,
|
|
capture: 'encIv'
|
|
}]
|
|
}]
|
|
};
|
|
|
|
var pkcs12PbeParamsValidator = {
|
|
name: 'pkcs-12PbeParams',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.SEQUENCE,
|
|
constructed: true,
|
|
value: [{
|
|
name: 'pkcs-12PbeParams.salt',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.OCTETSTRING,
|
|
constructed: false,
|
|
capture: 'salt'
|
|
}, {
|
|
name: 'pkcs-12PbeParams.iterations',
|
|
tagClass: asn1.Class.UNIVERSAL,
|
|
type: asn1.Type.INTEGER,
|
|
constructed: false,
|
|
capture: 'iterations'
|
|
}]
|
|
};
|
|
|
|
/**
|
|
* Encrypts a ASN.1 PrivateKeyInfo object, producing an EncryptedPrivateKeyInfo.
|
|
*
|
|
* PBES2Algorithms ALGORITHM-IDENTIFIER ::=
|
|
* { {PBES2-params IDENTIFIED BY id-PBES2}, ...}
|
|
*
|
|
* id-PBES2 OBJECT IDENTIFIER ::= {pkcs-5 13}
|
|
*
|
|
* PBES2-params ::= SEQUENCE {
|
|
* keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}},
|
|
* encryptionScheme AlgorithmIdentifier {{PBES2-Encs}}
|
|
* }
|
|
*
|
|
* PBES2-KDFs ALGORITHM-IDENTIFIER ::=
|
|
* { {PBKDF2-params IDENTIFIED BY id-PBKDF2}, ... }
|
|
*
|
|
* PBES2-Encs ALGORITHM-IDENTIFIER ::= { ... }
|
|
*
|
|
* PBKDF2-params ::= SEQUENCE {
|
|
* salt CHOICE {
|
|
* specified OCTET STRING,
|
|
* otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}}
|
|
* },
|
|
* iterationCount INTEGER (1..MAX),
|
|
* keyLength INTEGER (1..MAX) OPTIONAL,
|
|
* prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1
|
|
* }
|
|
*
|
|
* @param obj the ASN.1 PrivateKeyInfo object.
|
|
* @param password the password to encrypt with.
|
|
* @param options:
|
|
* algorithm the encryption algorithm to use
|
|
* ('aes128', 'aes192', 'aes256', '3des'), defaults to 'aes128'.
|
|
* count the iteration count to use.
|
|
* saltSize the salt size to use.
|
|
* prfAlgorithm the PRF message digest algorithm to use
|
|
* ('sha1', 'sha224', 'sha256', 'sha384', 'sha512')
|
|
*
|
|
* @return the ASN.1 EncryptedPrivateKeyInfo.
|
|
*/
|
|
pki.encryptPrivateKeyInfo = function(obj, password, options) {
|
|
// set default options
|
|
options = options || {};
|
|
options.saltSize = options.saltSize || 8;
|
|
options.count = options.count || 2048;
|
|
options.algorithm = options.algorithm || 'aes128';
|
|
options.prfAlgorithm = options.prfAlgorithm || 'sha1';
|
|
|
|
// generate PBE params
|
|
var salt = forge$1.random.getBytesSync(options.saltSize);
|
|
var count = options.count;
|
|
var countBytes = asn1.integerToDer(count);
|
|
var dkLen;
|
|
var encryptionAlgorithm;
|
|
var encryptedData;
|
|
if(options.algorithm.indexOf('aes') === 0 || options.algorithm === 'des') {
|
|
// do PBES2
|
|
var ivLen, encOid, cipherFn;
|
|
switch(options.algorithm) {
|
|
case 'aes128':
|
|
dkLen = 16;
|
|
ivLen = 16;
|
|
encOid = oids['aes128-CBC'];
|
|
cipherFn = forge$1.aes.createEncryptionCipher;
|
|
break;
|
|
case 'aes192':
|
|
dkLen = 24;
|
|
ivLen = 16;
|
|
encOid = oids['aes192-CBC'];
|
|
cipherFn = forge$1.aes.createEncryptionCipher;
|
|
break;
|
|
case 'aes256':
|
|
dkLen = 32;
|
|
ivLen = 16;
|
|
encOid = oids['aes256-CBC'];
|
|
cipherFn = forge$1.aes.createEncryptionCipher;
|
|
break;
|
|
case 'des':
|
|
dkLen = 8;
|
|
ivLen = 8;
|
|
encOid = oids['desCBC'];
|
|
cipherFn = forge$1.des.createEncryptionCipher;
|
|
break;
|
|
default:
|
|
var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.');
|
|
error.algorithm = options.algorithm;
|
|
throw error;
|
|
}
|
|
|
|
// get PRF message digest
|
|
var prfAlgorithm = 'hmacWith' + options.prfAlgorithm.toUpperCase();
|
|
var md = prfAlgorithmToMessageDigest(prfAlgorithm);
|
|
|
|
// encrypt private key using pbe SHA-1 and AES/DES
|
|
var dk = forge$1.pkcs5.pbkdf2(password, salt, count, dkLen, md);
|
|
var iv = forge$1.random.getBytesSync(ivLen);
|
|
var cipher = cipherFn(dk);
|
|
cipher.start(iv);
|
|
cipher.update(asn1.toDer(obj));
|
|
cipher.finish();
|
|
encryptedData = cipher.output.getBytes();
|
|
|
|
// get PBKDF2-params
|
|
var params = createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm);
|
|
|
|
encryptionAlgorithm = asn1.create(
|
|
asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,
|
|
asn1.oidToDer(oids['pkcs5PBES2']).getBytes()),
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [
|
|
// keyDerivationFunc
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,
|
|
asn1.oidToDer(oids['pkcs5PBKDF2']).getBytes()),
|
|
// PBKDF2-params
|
|
params
|
|
]),
|
|
// encryptionScheme
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,
|
|
asn1.oidToDer(encOid).getBytes()),
|
|
// iv
|
|
asn1.create(
|
|
asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, iv)
|
|
])
|
|
])
|
|
]);
|
|
} else if(options.algorithm === '3des') {
|
|
// Do PKCS12 PBE
|
|
dkLen = 24;
|
|
|
|
var saltBytes = new forge$1.util.ByteBuffer(salt);
|
|
var dk = pki.pbe.generatePkcs12Key(password, saltBytes, 1, count, dkLen);
|
|
var iv = pki.pbe.generatePkcs12Key(password, saltBytes, 2, count, dkLen);
|
|
var cipher = forge$1.des.createEncryptionCipher(dk);
|
|
cipher.start(iv);
|
|
cipher.update(asn1.toDer(obj));
|
|
cipher.finish();
|
|
encryptedData = cipher.output.getBytes();
|
|
|
|
encryptionAlgorithm = asn1.create(
|
|
asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,
|
|
asn1.oidToDer(oids['pbeWithSHAAnd3-KeyTripleDES-CBC']).getBytes()),
|
|
// pkcs-12PbeParams
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [
|
|
// salt
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt),
|
|
// iteration count
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,
|
|
countBytes.getBytes())
|
|
])
|
|
]);
|
|
} else {
|
|
var error = new Error('Cannot encrypt private key. Unknown encryption algorithm.');
|
|
error.algorithm = options.algorithm;
|
|
throw error;
|
|
}
|
|
|
|
// EncryptedPrivateKeyInfo
|
|
var rval = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [
|
|
// encryptionAlgorithm
|
|
encryptionAlgorithm,
|
|
// encryptedData
|
|
asn1.create(
|
|
asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, encryptedData)
|
|
]);
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Decrypts a ASN.1 PrivateKeyInfo object.
|
|
*
|
|
* @param obj the ASN.1 EncryptedPrivateKeyInfo object.
|
|
* @param password the password to decrypt with.
|
|
*
|
|
* @return the ASN.1 PrivateKeyInfo on success, null on failure.
|
|
*/
|
|
pki.decryptPrivateKeyInfo = function(obj, password) {
|
|
var rval = null;
|
|
|
|
// get PBE params
|
|
var capture = {};
|
|
var errors = [];
|
|
if(!asn1.validate(obj, encryptedPrivateKeyValidator, capture, errors)) {
|
|
var error = new Error('Cannot read encrypted private key. ' +
|
|
'ASN.1 object is not a supported EncryptedPrivateKeyInfo.');
|
|
error.errors = errors;
|
|
throw error;
|
|
}
|
|
|
|
// get cipher
|
|
var oid = asn1.derToOid(capture.encryptionOid);
|
|
var cipher = pki.pbe.getCipher(oid, capture.encryptionParams, password);
|
|
|
|
// get encrypted data
|
|
var encrypted = forge$1.util.createBuffer(capture.encryptedData);
|
|
|
|
cipher.update(encrypted);
|
|
if(cipher.finish()) {
|
|
rval = asn1.fromDer(cipher.output);
|
|
}
|
|
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Converts a EncryptedPrivateKeyInfo to PEM format.
|
|
*
|
|
* @param epki the EncryptedPrivateKeyInfo.
|
|
* @param maxline the maximum characters per line, defaults to 64.
|
|
*
|
|
* @return the PEM-formatted encrypted private key.
|
|
*/
|
|
pki.encryptedPrivateKeyToPem = function(epki, maxline) {
|
|
// convert to DER, then PEM-encode
|
|
var msg = {
|
|
type: 'ENCRYPTED PRIVATE KEY',
|
|
body: asn1.toDer(epki).getBytes()
|
|
};
|
|
return forge$1.pem.encode(msg, {maxline: maxline});
|
|
};
|
|
|
|
/**
|
|
* Converts a PEM-encoded EncryptedPrivateKeyInfo to ASN.1 format. Decryption
|
|
* is not performed.
|
|
*
|
|
* @param pem the EncryptedPrivateKeyInfo in PEM-format.
|
|
*
|
|
* @return the ASN.1 EncryptedPrivateKeyInfo.
|
|
*/
|
|
pki.encryptedPrivateKeyFromPem = function(pem) {
|
|
var msg = forge$1.pem.decode(pem)[0];
|
|
|
|
if(msg.type !== 'ENCRYPTED PRIVATE KEY') {
|
|
var error = new Error('Could not convert encrypted private key from PEM; ' +
|
|
'PEM header type is "ENCRYPTED PRIVATE KEY".');
|
|
error.headerType = msg.type;
|
|
throw error;
|
|
}
|
|
if(msg.procType && msg.procType.type === 'ENCRYPTED') {
|
|
throw new Error('Could not convert encrypted private key from PEM; ' +
|
|
'PEM is encrypted.');
|
|
}
|
|
|
|
// convert DER to ASN.1 object
|
|
return asn1.fromDer(msg.body);
|
|
};
|
|
|
|
/**
|
|
* Encrypts an RSA private key. By default, the key will be wrapped in
|
|
* a PrivateKeyInfo and encrypted to produce a PKCS#8 EncryptedPrivateKeyInfo.
|
|
* This is the standard, preferred way to encrypt a private key.
|
|
*
|
|
* To produce a non-standard PEM-encrypted private key that uses encapsulated
|
|
* headers to indicate the encryption algorithm (old-style non-PKCS#8 OpenSSL
|
|
* private key encryption), set the 'legacy' option to true. Note: Using this
|
|
* option will cause the iteration count to be forced to 1.
|
|
*
|
|
* Note: The 'des' algorithm is supported, but it is not considered to be
|
|
* secure because it only uses a single 56-bit key. If possible, it is highly
|
|
* recommended that a different algorithm be used.
|
|
*
|
|
* @param rsaKey the RSA key to encrypt.
|
|
* @param password the password to use.
|
|
* @param options:
|
|
* algorithm: the encryption algorithm to use
|
|
* ('aes128', 'aes192', 'aes256', '3des', 'des').
|
|
* count: the iteration count to use.
|
|
* saltSize: the salt size to use.
|
|
* legacy: output an old non-PKCS#8 PEM-encrypted+encapsulated
|
|
* headers (DEK-Info) private key.
|
|
*
|
|
* @return the PEM-encoded ASN.1 EncryptedPrivateKeyInfo.
|
|
*/
|
|
pki.encryptRsaPrivateKey = function(rsaKey, password, options) {
|
|
// standard PKCS#8
|
|
options = options || {};
|
|
if(!options.legacy) {
|
|
// encrypt PrivateKeyInfo
|
|
var rval = pki.wrapRsaPrivateKey(pki.privateKeyToAsn1(rsaKey));
|
|
rval = pki.encryptPrivateKeyInfo(rval, password, options);
|
|
return pki.encryptedPrivateKeyToPem(rval);
|
|
}
|
|
|
|
// legacy non-PKCS#8
|
|
var algorithm;
|
|
var iv;
|
|
var dkLen;
|
|
var cipherFn;
|
|
switch(options.algorithm) {
|
|
case 'aes128':
|
|
algorithm = 'AES-128-CBC';
|
|
dkLen = 16;
|
|
iv = forge$1.random.getBytesSync(16);
|
|
cipherFn = forge$1.aes.createEncryptionCipher;
|
|
break;
|
|
case 'aes192':
|
|
algorithm = 'AES-192-CBC';
|
|
dkLen = 24;
|
|
iv = forge$1.random.getBytesSync(16);
|
|
cipherFn = forge$1.aes.createEncryptionCipher;
|
|
break;
|
|
case 'aes256':
|
|
algorithm = 'AES-256-CBC';
|
|
dkLen = 32;
|
|
iv = forge$1.random.getBytesSync(16);
|
|
cipherFn = forge$1.aes.createEncryptionCipher;
|
|
break;
|
|
case '3des':
|
|
algorithm = 'DES-EDE3-CBC';
|
|
dkLen = 24;
|
|
iv = forge$1.random.getBytesSync(8);
|
|
cipherFn = forge$1.des.createEncryptionCipher;
|
|
break;
|
|
case 'des':
|
|
algorithm = 'DES-CBC';
|
|
dkLen = 8;
|
|
iv = forge$1.random.getBytesSync(8);
|
|
cipherFn = forge$1.des.createEncryptionCipher;
|
|
break;
|
|
default:
|
|
var error = new Error('Could not encrypt RSA private key; unsupported ' +
|
|
'encryption algorithm "' + options.algorithm + '".');
|
|
error.algorithm = options.algorithm;
|
|
throw error;
|
|
}
|
|
|
|
// encrypt private key using OpenSSL legacy key derivation
|
|
var dk = forge$1.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen);
|
|
var cipher = cipherFn(dk);
|
|
cipher.start(iv);
|
|
cipher.update(asn1.toDer(pki.privateKeyToAsn1(rsaKey)));
|
|
cipher.finish();
|
|
|
|
var msg = {
|
|
type: 'RSA PRIVATE KEY',
|
|
procType: {
|
|
version: '4',
|
|
type: 'ENCRYPTED'
|
|
},
|
|
dekInfo: {
|
|
algorithm: algorithm,
|
|
parameters: forge$1.util.bytesToHex(iv).toUpperCase()
|
|
},
|
|
body: cipher.output.getBytes()
|
|
};
|
|
return forge$1.pem.encode(msg);
|
|
};
|
|
|
|
/**
|
|
* Decrypts an RSA private key.
|
|
*
|
|
* @param pem the PEM-formatted EncryptedPrivateKeyInfo to decrypt.
|
|
* @param password the password to use.
|
|
*
|
|
* @return the RSA key on success, null on failure.
|
|
*/
|
|
pki.decryptRsaPrivateKey = function(pem, password) {
|
|
var rval = null;
|
|
|
|
var msg = forge$1.pem.decode(pem)[0];
|
|
|
|
if(msg.type !== 'ENCRYPTED PRIVATE KEY' &&
|
|
msg.type !== 'PRIVATE KEY' &&
|
|
msg.type !== 'RSA PRIVATE KEY') {
|
|
var error = new Error('Could not convert private key from PEM; PEM header type ' +
|
|
'is not "ENCRYPTED PRIVATE KEY", "PRIVATE KEY", or "RSA PRIVATE KEY".');
|
|
error.headerType = error;
|
|
throw error;
|
|
}
|
|
|
|
if(msg.procType && msg.procType.type === 'ENCRYPTED') {
|
|
var dkLen;
|
|
var cipherFn;
|
|
switch(msg.dekInfo.algorithm) {
|
|
case 'DES-CBC':
|
|
dkLen = 8;
|
|
cipherFn = forge$1.des.createDecryptionCipher;
|
|
break;
|
|
case 'DES-EDE3-CBC':
|
|
dkLen = 24;
|
|
cipherFn = forge$1.des.createDecryptionCipher;
|
|
break;
|
|
case 'AES-128-CBC':
|
|
dkLen = 16;
|
|
cipherFn = forge$1.aes.createDecryptionCipher;
|
|
break;
|
|
case 'AES-192-CBC':
|
|
dkLen = 24;
|
|
cipherFn = forge$1.aes.createDecryptionCipher;
|
|
break;
|
|
case 'AES-256-CBC':
|
|
dkLen = 32;
|
|
cipherFn = forge$1.aes.createDecryptionCipher;
|
|
break;
|
|
case 'RC2-40-CBC':
|
|
dkLen = 5;
|
|
cipherFn = function(key) {
|
|
return forge$1.rc2.createDecryptionCipher(key, 40);
|
|
};
|
|
break;
|
|
case 'RC2-64-CBC':
|
|
dkLen = 8;
|
|
cipherFn = function(key) {
|
|
return forge$1.rc2.createDecryptionCipher(key, 64);
|
|
};
|
|
break;
|
|
case 'RC2-128-CBC':
|
|
dkLen = 16;
|
|
cipherFn = function(key) {
|
|
return forge$1.rc2.createDecryptionCipher(key, 128);
|
|
};
|
|
break;
|
|
default:
|
|
var error = new Error('Could not decrypt private key; unsupported ' +
|
|
'encryption algorithm "' + msg.dekInfo.algorithm + '".');
|
|
error.algorithm = msg.dekInfo.algorithm;
|
|
throw error;
|
|
}
|
|
|
|
// use OpenSSL legacy key derivation
|
|
var iv = forge$1.util.hexToBytes(msg.dekInfo.parameters);
|
|
var dk = forge$1.pbe.opensslDeriveBytes(password, iv.substr(0, 8), dkLen);
|
|
var cipher = cipherFn(dk);
|
|
cipher.start(iv);
|
|
cipher.update(forge$1.util.createBuffer(msg.body));
|
|
if(cipher.finish()) {
|
|
rval = cipher.output.getBytes();
|
|
} else {
|
|
return rval;
|
|
}
|
|
} else {
|
|
rval = msg.body;
|
|
}
|
|
|
|
if(msg.type === 'ENCRYPTED PRIVATE KEY') {
|
|
rval = pki.decryptPrivateKeyInfo(asn1.fromDer(rval), password);
|
|
} else {
|
|
// decryption already performed above
|
|
rval = asn1.fromDer(rval);
|
|
}
|
|
|
|
if(rval !== null) {
|
|
rval = pki.privateKeyFromAsn1(rval);
|
|
}
|
|
|
|
return rval;
|
|
};
|
|
|
|
/**
|
|
* Derives a PKCS#12 key.
|
|
*
|
|
* @param password the password to derive the key material from, null or
|
|
* undefined for none.
|
|
* @param salt the salt, as a ByteBuffer, to use.
|
|
* @param id the PKCS#12 ID byte (1 = key material, 2 = IV, 3 = MAC).
|
|
* @param iter the iteration count.
|
|
* @param n the number of bytes to derive from the password.
|
|
* @param md the message digest to use, defaults to SHA-1.
|
|
*
|
|
* @return a ByteBuffer with the bytes derived from the password.
|
|
*/
|
|
pki.pbe.generatePkcs12Key = function(password, salt, id, iter, n, md) {
|
|
var j, l;
|
|
|
|
if(typeof md === 'undefined' || md === null) {
|
|
if(!('sha1' in forge$1.md)) {
|
|
throw new Error('"sha1" hash algorithm unavailable.');
|
|
}
|
|
md = forge$1.md.sha1.create();
|
|
}
|
|
|
|
var u = md.digestLength;
|
|
var v = md.blockLength;
|
|
var result = new forge$1.util.ByteBuffer();
|
|
|
|
/* Convert password to Unicode byte buffer + trailing 0-byte. */
|
|
var passBuf = new forge$1.util.ByteBuffer();
|
|
if(password !== null && password !== undefined) {
|
|
for(l = 0; l < password.length; l++) {
|
|
passBuf.putInt16(password.charCodeAt(l));
|
|
}
|
|
passBuf.putInt16(0);
|
|
}
|
|
|
|
/* Length of salt and password in BYTES. */
|
|
var p = passBuf.length();
|
|
var s = salt.length();
|
|
|
|
/* 1. Construct a string, D (the "diversifier"), by concatenating
|
|
v copies of ID. */
|
|
var D = new forge$1.util.ByteBuffer();
|
|
D.fillWithByte(id, v);
|
|
|
|
/* 2. Concatenate copies of the salt together to create a string S of length
|
|
v * ceil(s / v) bytes (the final copy of the salt may be trunacted
|
|
to create S).
|
|
Note that if the salt is the empty string, then so is S. */
|
|
var Slen = v * Math.ceil(s / v);
|
|
var S = new forge$1.util.ByteBuffer();
|
|
for(l = 0; l < Slen; l++) {
|
|
S.putByte(salt.at(l % s));
|
|
}
|
|
|
|
/* 3. Concatenate copies of the password together to create a string P of
|
|
length v * ceil(p / v) bytes (the final copy of the password may be
|
|
truncated to create P).
|
|
Note that if the password is the empty string, then so is P. */
|
|
var Plen = v * Math.ceil(p / v);
|
|
var P = new forge$1.util.ByteBuffer();
|
|
for(l = 0; l < Plen; l++) {
|
|
P.putByte(passBuf.at(l % p));
|
|
}
|
|
|
|
/* 4. Set I=S||P to be the concatenation of S and P. */
|
|
var I = S;
|
|
I.putBuffer(P);
|
|
|
|
/* 5. Set c=ceil(n / u). */
|
|
var c = Math.ceil(n / u);
|
|
|
|
/* 6. For i=1, 2, ..., c, do the following: */
|
|
for(var i = 1; i <= c; i++) {
|
|
/* a) Set Ai=H^r(D||I). (l.e. the rth hash of D||I, H(H(H(...H(D||I)))) */
|
|
var buf = new forge$1.util.ByteBuffer();
|
|
buf.putBytes(D.bytes());
|
|
buf.putBytes(I.bytes());
|
|
for(var round = 0; round < iter; round++) {
|
|
md.start();
|
|
md.update(buf.getBytes());
|
|
buf = md.digest();
|
|
}
|
|
|
|
/* b) Concatenate copies of Ai to create a string B of length v bytes (the
|
|
final copy of Ai may be truncated to create B). */
|
|
var B = new forge$1.util.ByteBuffer();
|
|
for(l = 0; l < v; l++) {
|
|
B.putByte(buf.at(l % u));
|
|
}
|
|
|
|
/* c) Treating I as a concatenation I0, I1, ..., Ik-1 of v-byte blocks,
|
|
where k=ceil(s / v) + ceil(p / v), modify I by setting
|
|
Ij=(Ij+B+1) mod 2v for each j. */
|
|
var k = Math.ceil(s / v) + Math.ceil(p / v);
|
|
var Inew = new forge$1.util.ByteBuffer();
|
|
for(j = 0; j < k; j++) {
|
|
var chunk = new forge$1.util.ByteBuffer(I.getBytes(v));
|
|
var x = 0x1ff;
|
|
for(l = B.length() - 1; l >= 0; l--) {
|
|
x = x >> 8;
|
|
x += B.at(l) + chunk.at(l);
|
|
chunk.setAt(l, x & 0xff);
|
|
}
|
|
Inew.putBuffer(chunk);
|
|
}
|
|
I = Inew;
|
|
|
|
/* Add Ai to A. */
|
|
result.putBuffer(buf);
|
|
}
|
|
|
|
result.truncate(result.length() - n);
|
|
return result;
|
|
};
|
|
|
|
/**
|
|
* Get new Forge cipher object instance.
|
|
*
|
|
* @param oid the OID (in string notation).
|
|
* @param params the ASN.1 params object.
|
|
* @param password the password to decrypt with.
|
|
*
|
|
* @return new cipher object instance.
|
|
*/
|
|
pki.pbe.getCipher = function(oid, params, password) {
|
|
switch(oid) {
|
|
case pki.oids['pkcs5PBES2']:
|
|
return pki.pbe.getCipherForPBES2(oid, params, password);
|
|
|
|
case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']:
|
|
case pki.oids['pbewithSHAAnd40BitRC2-CBC']:
|
|
return pki.pbe.getCipherForPKCS12PBE(oid, params, password);
|
|
|
|
default:
|
|
var error = new Error('Cannot read encrypted PBE data block. Unsupported OID.');
|
|
error.oid = oid;
|
|
error.supportedOids = [
|
|
'pkcs5PBES2',
|
|
'pbeWithSHAAnd3-KeyTripleDES-CBC',
|
|
'pbewithSHAAnd40BitRC2-CBC'
|
|
];
|
|
throw error;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Get new Forge cipher object instance according to PBES2 params block.
|
|
*
|
|
* The returned cipher instance is already started using the IV
|
|
* from PBES2 parameter block.
|
|
*
|
|
* @param oid the PKCS#5 PBKDF2 OID (in string notation).
|
|
* @param params the ASN.1 PBES2-params object.
|
|
* @param password the password to decrypt with.
|
|
*
|
|
* @return new cipher object instance.
|
|
*/
|
|
pki.pbe.getCipherForPBES2 = function(oid, params, password) {
|
|
// get PBE params
|
|
var capture = {};
|
|
var errors = [];
|
|
if(!asn1.validate(params, PBES2AlgorithmsValidator, capture, errors)) {
|
|
var error = new Error('Cannot read password-based-encryption algorithm ' +
|
|
'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.');
|
|
error.errors = errors;
|
|
throw error;
|
|
}
|
|
|
|
// check oids
|
|
oid = asn1.derToOid(capture.kdfOid);
|
|
if(oid !== pki.oids['pkcs5PBKDF2']) {
|
|
var error = new Error('Cannot read encrypted private key. ' +
|
|
'Unsupported key derivation function OID.');
|
|
error.oid = oid;
|
|
error.supportedOids = ['pkcs5PBKDF2'];
|
|
throw error;
|
|
}
|
|
oid = asn1.derToOid(capture.encOid);
|
|
if(oid !== pki.oids['aes128-CBC'] &&
|
|
oid !== pki.oids['aes192-CBC'] &&
|
|
oid !== pki.oids['aes256-CBC'] &&
|
|
oid !== pki.oids['des-EDE3-CBC'] &&
|
|
oid !== pki.oids['desCBC']) {
|
|
var error = new Error('Cannot read encrypted private key. ' +
|
|
'Unsupported encryption scheme OID.');
|
|
error.oid = oid;
|
|
error.supportedOids = [
|
|
'aes128-CBC', 'aes192-CBC', 'aes256-CBC', 'des-EDE3-CBC', 'desCBC'];
|
|
throw error;
|
|
}
|
|
|
|
// set PBE params
|
|
var salt = capture.kdfSalt;
|
|
var count = forge$1.util.createBuffer(capture.kdfIterationCount);
|
|
count = count.getInt(count.length() << 3);
|
|
var dkLen;
|
|
var cipherFn;
|
|
switch(pki.oids[oid]) {
|
|
case 'aes128-CBC':
|
|
dkLen = 16;
|
|
cipherFn = forge$1.aes.createDecryptionCipher;
|
|
break;
|
|
case 'aes192-CBC':
|
|
dkLen = 24;
|
|
cipherFn = forge$1.aes.createDecryptionCipher;
|
|
break;
|
|
case 'aes256-CBC':
|
|
dkLen = 32;
|
|
cipherFn = forge$1.aes.createDecryptionCipher;
|
|
break;
|
|
case 'des-EDE3-CBC':
|
|
dkLen = 24;
|
|
cipherFn = forge$1.des.createDecryptionCipher;
|
|
break;
|
|
case 'desCBC':
|
|
dkLen = 8;
|
|
cipherFn = forge$1.des.createDecryptionCipher;
|
|
break;
|
|
}
|
|
|
|
// get PRF message digest
|
|
var md = prfOidToMessageDigest(capture.prfOid);
|
|
|
|
// decrypt private key using pbe with chosen PRF and AES/DES
|
|
var dk = forge$1.pkcs5.pbkdf2(password, salt, count, dkLen, md);
|
|
var iv = capture.encIv;
|
|
var cipher = cipherFn(dk);
|
|
cipher.start(iv);
|
|
|
|
return cipher;
|
|
};
|
|
|
|
/**
|
|
* Get new Forge cipher object instance for PKCS#12 PBE.
|
|
*
|
|
* The returned cipher instance is already started using the key & IV
|
|
* derived from the provided password and PKCS#12 PBE salt.
|
|
*
|
|
* @param oid The PKCS#12 PBE OID (in string notation).
|
|
* @param params The ASN.1 PKCS#12 PBE-params object.
|
|
* @param password The password to decrypt with.
|
|
*
|
|
* @return the new cipher object instance.
|
|
*/
|
|
pki.pbe.getCipherForPKCS12PBE = function(oid, params, password) {
|
|
// get PBE params
|
|
var capture = {};
|
|
var errors = [];
|
|
if(!asn1.validate(params, pkcs12PbeParamsValidator, capture, errors)) {
|
|
var error = new Error('Cannot read password-based-encryption algorithm ' +
|
|
'parameters. ASN.1 object is not a supported EncryptedPrivateKeyInfo.');
|
|
error.errors = errors;
|
|
throw error;
|
|
}
|
|
|
|
var salt = forge$1.util.createBuffer(capture.salt);
|
|
var count = forge$1.util.createBuffer(capture.iterations);
|
|
count = count.getInt(count.length() << 3);
|
|
|
|
var dkLen, dIvLen, cipherFn;
|
|
switch(oid) {
|
|
case pki.oids['pbeWithSHAAnd3-KeyTripleDES-CBC']:
|
|
dkLen = 24;
|
|
dIvLen = 8;
|
|
cipherFn = forge$1.des.startDecrypting;
|
|
break;
|
|
|
|
case pki.oids['pbewithSHAAnd40BitRC2-CBC']:
|
|
dkLen = 5;
|
|
dIvLen = 8;
|
|
cipherFn = function(key, iv) {
|
|
var cipher = forge$1.rc2.createDecryptionCipher(key, 40);
|
|
cipher.start(iv, null);
|
|
return cipher;
|
|
};
|
|
break;
|
|
|
|
default:
|
|
var error = new Error('Cannot read PKCS #12 PBE data block. Unsupported OID.');
|
|
error.oid = oid;
|
|
throw error;
|
|
}
|
|
|
|
// get PRF message digest
|
|
var md = prfOidToMessageDigest(capture.prfOid);
|
|
var key = pki.pbe.generatePkcs12Key(password, salt, 1, count, dkLen, md);
|
|
md.start();
|
|
var iv = pki.pbe.generatePkcs12Key(password, salt, 2, count, dIvLen, md);
|
|
|
|
return cipherFn(key, iv);
|
|
};
|
|
|
|
/**
|
|
* OpenSSL's legacy key derivation function.
|
|
*
|
|
* See: http://www.openssl.org/docs/crypto/EVP_BytesToKey.html
|
|
*
|
|
* @param password the password to derive the key from.
|
|
* @param salt the salt to use, null for none.
|
|
* @param dkLen the number of bytes needed for the derived key.
|
|
* @param [options] the options to use:
|
|
* [md] an optional message digest object to use.
|
|
*/
|
|
pki.pbe.opensslDeriveBytes = function(password, salt, dkLen, md) {
|
|
if(typeof md === 'undefined' || md === null) {
|
|
if(!('md5' in forge$1.md)) {
|
|
throw new Error('"md5" hash algorithm unavailable.');
|
|
}
|
|
md = forge$1.md.md5.create();
|
|
}
|
|
if(salt === null) {
|
|
salt = '';
|
|
}
|
|
var digests = [hash(md, password + salt)];
|
|
for(var length = 16, i = 1; length < dkLen; ++i, length += 16) {
|
|
digests.push(hash(md, digests[i - 1] + password + salt));
|
|
}
|
|
return digests.join('').substr(0, dkLen);
|
|
};
|
|
|
|
function hash(md, bytes) {
|
|
return md.start().update(bytes).digest().getBytes();
|
|
}
|
|
|
|
function prfOidToMessageDigest(prfOid) {
|
|
// get PRF algorithm, default to SHA-1
|
|
var prfAlgorithm;
|
|
if(!prfOid) {
|
|
prfAlgorithm = 'hmacWithSHA1';
|
|
} else {
|
|
prfAlgorithm = pki.oids[asn1.derToOid(prfOid)];
|
|
if(!prfAlgorithm) {
|
|
var error = new Error('Unsupported PRF OID.');
|
|
error.oid = prfOid;
|
|
error.supported = [
|
|
'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384',
|
|
'hmacWithSHA512'];
|
|
throw error;
|
|
}
|
|
}
|
|
return prfAlgorithmToMessageDigest(prfAlgorithm);
|
|
}
|
|
|
|
function prfAlgorithmToMessageDigest(prfAlgorithm) {
|
|
var factory = forge$1.md;
|
|
switch(prfAlgorithm) {
|
|
case 'hmacWithSHA224':
|
|
factory = forge$1.md.sha512;
|
|
case 'hmacWithSHA1':
|
|
case 'hmacWithSHA256':
|
|
case 'hmacWithSHA384':
|
|
case 'hmacWithSHA512':
|
|
prfAlgorithm = prfAlgorithm.substr(8).toLowerCase();
|
|
break;
|
|
default:
|
|
var error = new Error('Unsupported PRF algorithm.');
|
|
error.algorithm = prfAlgorithm;
|
|
error.supported = [
|
|
'hmacWithSHA1', 'hmacWithSHA224', 'hmacWithSHA256', 'hmacWithSHA384',
|
|
'hmacWithSHA512'];
|
|
throw error;
|
|
}
|
|
if(!factory || !(prfAlgorithm in factory)) {
|
|
throw new Error('Unknown hash algorithm: ' + prfAlgorithm);
|
|
}
|
|
return factory[prfAlgorithm].create();
|
|
}
|
|
|
|
function createPbkdf2Params(salt, countBytes, dkLen, prfAlgorithm) {
|
|
var params = asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [
|
|
// salt
|
|
asn1.create(
|
|
asn1.Class.UNIVERSAL, asn1.Type.OCTETSTRING, false, salt),
|
|
// iteration count
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,
|
|
countBytes.getBytes())
|
|
]);
|
|
// when PRF algorithm is not SHA-1 default, add key length and PRF algorithm
|
|
if(prfAlgorithm !== 'hmacWithSHA1') {
|
|
params.value.push(
|
|
// key length
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.INTEGER, false,
|
|
forge$1.util.hexToBytes(dkLen.toString(16))),
|
|
// AlgorithmIdentifier
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.SEQUENCE, true, [
|
|
// algorithm
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.OID, false,
|
|
asn1.oidToDer(pki.oids[prfAlgorithm]).getBytes()),
|
|
// parameters (null)
|
|
asn1.create(asn1.Class.UNIVERSAL, asn1.Type.NULL, false, '')
|
|
]));
|
|
}
|
|
return params;
|
|
}
|
|
|
|
/**
|
|
* @typedef {{ [key: string]: any }} Extensions
|
|
* @typedef {Error} Err
|
|
* @property {string} message
|
|
*/
|
|
|
|
/**
|
|
*
|
|
* @param {Error} obj
|
|
* @param {Extensions} props
|
|
* @returns {Error & Extensions}
|
|
*/
|
|
function assign(obj, props) {
|
|
for (const key in props) {
|
|
Object.defineProperty(obj, key, {
|
|
value: props[key],
|
|
enumerable: true,
|
|
configurable: true,
|
|
});
|
|
}
|
|
|
|
return obj;
|
|
}
|
|
|
|
/**
|
|
*
|
|
* @param {any} err - An Error
|
|
* @param {string|Extensions} code - A string code or props to set on the error
|
|
* @param {Extensions} [props] - Props to set on the error
|
|
* @returns {Error & Extensions}
|
|
*/
|
|
function createError(err, code, props) {
|
|
if (!err || typeof err === 'string') {
|
|
throw new TypeError('Please pass an Error to err-code');
|
|
}
|
|
|
|
if (!props) {
|
|
props = {};
|
|
}
|
|
|
|
if (typeof code === 'object') {
|
|
props = code;
|
|
code = '';
|
|
}
|
|
|
|
if (code) {
|
|
props.code = code;
|
|
}
|
|
|
|
try {
|
|
return assign(err, props);
|
|
} catch (_) {
|
|
props.message = err.message;
|
|
props.stack = err.stack;
|
|
|
|
const ErrClass = function () {};
|
|
|
|
ErrClass.prototype = Object.create(Object.getPrototypeOf(err));
|
|
|
|
// @ts-ignore
|
|
const output = assign(new ErrClass(), props);
|
|
|
|
return output;
|
|
}
|
|
}
|
|
|
|
var errCode = createError;
|
|
|
|
/* eslint-env browser */
|
|
// Check native crypto exists and is enabled (In insecure context `self.crypto`
|
|
// exists but `self.crypto.subtle` does not).
|
|
var webcrypto = {
|
|
get(win = globalThis) {
|
|
const nativeCrypto = win.crypto;
|
|
if (nativeCrypto == null || nativeCrypto.subtle == null) {
|
|
throw Object.assign(new Error('Missing Web Crypto API. ' +
|
|
'The most likely cause of this error is that this page is being accessed ' +
|
|
'from an insecure context (i.e. not HTTPS). For more information and ' +
|
|
'possible resolutions see ' +
|
|
'https://github.com/libp2p/js-libp2p-crypto/blob/master/README.md#web-crypto-api'), { code: 'ERR_MISSING_WEB_CRYPTO' });
|
|
}
|
|
return nativeCrypto;
|
|
}
|
|
};
|
|
|
|
function bigIntegerToUintBase64url(num, len) {
|
|
// Call `.abs()` to convert to unsigned
|
|
let buf = Uint8Array.from(num.abs().toByteArray()); // toByteArray converts to big endian
|
|
// toByteArray() gives us back a signed array, which will include a leading 0
|
|
// byte if the most significant bit of the number is 1:
|
|
// https://docs.microsoft.com/en-us/windows/win32/seccertenroll/about-integer
|
|
// Our number will always be positive so we should remove the leading padding.
|
|
buf = buf[0] === 0 ? buf.slice(1) : buf;
|
|
if (len != null) {
|
|
if (buf.length > len)
|
|
throw new Error('byte array longer than desired length');
|
|
buf = concat([new Uint8Array(len - buf.length), buf]);
|
|
}
|
|
return toString$3(buf, 'base64url');
|
|
}
|
|
// Convert a base64url encoded string to a BigInteger
|
|
function base64urlToBigInteger(str) {
|
|
const buf = base64urlToBuffer(str);
|
|
return new forge$m.jsbn.BigInteger(toString$3(buf, 'base16'), 16);
|
|
}
|
|
function base64urlToBuffer(str, len) {
|
|
let buf = fromString$1(str, 'base64urlpad');
|
|
if (len != null) {
|
|
if (buf.length > len)
|
|
throw new Error('byte array longer than desired length');
|
|
buf = concat([new Uint8Array(len - buf.length), buf]);
|
|
}
|
|
return buf;
|
|
}
|
|
|
|
function equals(a, b) {
|
|
if (a === b) {
|
|
return true;
|
|
}
|
|
if (a.byteLength !== b.byteLength) {
|
|
return false;
|
|
}
|
|
for (let i = 0; i < a.byteLength; i++) {
|
|
if (a[i] !== b[i]) {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
|
|
const bits = {
|
|
'P-256': 256,
|
|
'P-384': 384,
|
|
'P-521': 521
|
|
};
|
|
const curveTypes = Object.keys(bits);
|
|
curveTypes.join(' / ');
|
|
|
|
// Based off of code from https://github.com/luke-park/SecureCompatibleEncryptionExamples
|
|
function create$2(opts) {
|
|
const algorithm = opts?.algorithm ?? 'AES-GCM';
|
|
let keyLength = opts?.keyLength ?? 16;
|
|
const nonceLength = opts?.nonceLength ?? 12;
|
|
const digest = opts?.digest ?? 'SHA-256';
|
|
const saltLength = opts?.saltLength ?? 16;
|
|
const iterations = opts?.iterations ?? 32767;
|
|
const crypto = webcrypto.get();
|
|
keyLength *= 8; // Browser crypto uses bits instead of bytes
|
|
/**
|
|
* Uses the provided password to derive a pbkdf2 key. The key
|
|
* will then be used to encrypt the data.
|
|
*/
|
|
async function encrypt(data, password) {
|
|
const salt = crypto.getRandomValues(new Uint8Array(saltLength));
|
|
const nonce = crypto.getRandomValues(new Uint8Array(nonceLength));
|
|
const aesGcm = { name: algorithm, iv: nonce };
|
|
if (typeof password === 'string') {
|
|
password = fromString$1(password);
|
|
}
|
|
// Derive a key using PBKDF2.
|
|
const deriveParams = { name: 'PBKDF2', salt, iterations, hash: { name: digest } };
|
|
const rawKey = await crypto.subtle.importKey('raw', password, { name: 'PBKDF2' }, false, ['deriveKey', 'deriveBits']);
|
|
const cryptoKey = await crypto.subtle.deriveKey(deriveParams, rawKey, { name: algorithm, length: keyLength }, true, ['encrypt']);
|
|
// Encrypt the string.
|
|
const ciphertext = await crypto.subtle.encrypt(aesGcm, cryptoKey, data);
|
|
return concat([salt, aesGcm.iv, new Uint8Array(ciphertext)]);
|
|
}
|
|
/**
|
|
* Uses the provided password to derive a pbkdf2 key. The key
|
|
* will then be used to decrypt the data. The options used to create
|
|
* this decryption cipher must be the same as those used to create
|
|
* the encryption cipher.
|
|
*/
|
|
async function decrypt(data, password) {
|
|
const salt = data.slice(0, saltLength);
|
|
const nonce = data.slice(saltLength, saltLength + nonceLength);
|
|
const ciphertext = data.slice(saltLength + nonceLength);
|
|
const aesGcm = { name: algorithm, iv: nonce };
|
|
if (typeof password === 'string') {
|
|
password = fromString$1(password);
|
|
}
|
|
// Derive the key using PBKDF2.
|
|
const deriveParams = { name: 'PBKDF2', salt, iterations, hash: { name: digest } };
|
|
const rawKey = await crypto.subtle.importKey('raw', password, { name: 'PBKDF2' }, false, ['deriveKey', 'deriveBits']);
|
|
const cryptoKey = await crypto.subtle.deriveKey(deriveParams, rawKey, { name: algorithm, length: keyLength }, true, ['decrypt']);
|
|
// Decrypt the string.
|
|
const plaintext = await crypto.subtle.decrypt(aesGcm, cryptoKey, ciphertext);
|
|
return new Uint8Array(plaintext);
|
|
}
|
|
const cipher = {
|
|
encrypt,
|
|
decrypt
|
|
};
|
|
return cipher;
|
|
}
|
|
|
|
/**
|
|
* Secure Hash Algorithm with a 1024-bit block size implementation.
|
|
*
|
|
* This includes: SHA-512, SHA-384, SHA-512/224, and SHA-512/256. For
|
|
* SHA-256 (block size 512 bits), see sha256.js.
|
|
*
|
|
* See FIPS 180-4 for details.
|
|
*
|
|
* @author Dave Longley
|
|
*
|
|
* Copyright (c) 2014-2015 Digital Bazaar, Inc.
|
|
*/
|
|
|
|
var forge = forge$m;
|
|
|
|
|
|
|
|
var sha512 = forge.sha512 = forge.sha512 || {};
|
|
|
|
// SHA-512
|
|
forge.md.sha512 = forge.md.algorithms.sha512 = sha512;
|
|
|
|
// SHA-384
|
|
var sha384 = forge.sha384 = forge.sha512.sha384 = forge.sha512.sha384 || {};
|
|
sha384.create = function() {
|
|
return sha512.create('SHA-384');
|
|
};
|
|
forge.md.sha384 = forge.md.algorithms.sha384 = sha384;
|
|
|
|
// SHA-512/256
|
|
forge.sha512.sha256 = forge.sha512.sha256 || {
|
|
create: function() {
|
|
return sha512.create('SHA-512/256');
|
|
}
|
|
};
|
|
forge.md['sha512/256'] = forge.md.algorithms['sha512/256'] =
|
|
forge.sha512.sha256;
|
|
|
|
// SHA-512/224
|
|
forge.sha512.sha224 = forge.sha512.sha224 || {
|
|
create: function() {
|
|
return sha512.create('SHA-512/224');
|
|
}
|
|
};
|
|
forge.md['sha512/224'] = forge.md.algorithms['sha512/224'] =
|
|
forge.sha512.sha224;
|
|
|
|
/**
|
|
* Creates a SHA-2 message digest object.
|
|
*
|
|
* @param algorithm the algorithm to use (SHA-512, SHA-384, SHA-512/224,
|
|
* SHA-512/256).
|
|
*
|
|
* @return a message digest object.
|
|
*/
|
|
sha512.create = function(algorithm) {
|
|
// do initialization as necessary
|
|
if(!_initialized) {
|
|
_init();
|
|
}
|
|
|
|
if(typeof algorithm === 'undefined') {
|
|
algorithm = 'SHA-512';
|
|
}
|
|
|
|
if(!(algorithm in _states)) {
|
|
throw new Error('Invalid SHA-512 algorithm: ' + algorithm);
|
|
}
|
|
|
|
// SHA-512 state contains eight 64-bit integers (each as two 32-bit ints)
|
|
var _state = _states[algorithm];
|
|
var _h = null;
|
|
|
|
// input buffer
|
|
var _input = forge.util.createBuffer();
|
|
|
|
// used for 64-bit word storage
|
|
var _w = new Array(80);
|
|
for(var wi = 0; wi < 80; ++wi) {
|
|
_w[wi] = new Array(2);
|
|
}
|
|
|
|
// determine digest length by algorithm name (default)
|
|
var digestLength = 64;
|
|
switch(algorithm) {
|
|
case 'SHA-384':
|
|
digestLength = 48;
|
|
break;
|
|
case 'SHA-512/256':
|
|
digestLength = 32;
|
|
break;
|
|
case 'SHA-512/224':
|
|
digestLength = 28;
|
|
break;
|
|
}
|
|
|
|
// message digest object
|
|
var md = {
|
|
// SHA-512 => sha512
|
|
algorithm: algorithm.replace('-', '').toLowerCase(),
|
|
blockLength: 128,
|
|
digestLength: digestLength,
|
|
// 56-bit length of message so far (does not including padding)
|
|
messageLength: 0,
|
|
// true message length
|
|
fullMessageLength: null,
|
|
// size of message length in bytes
|
|
messageLengthSize: 16
|
|
};
|
|
|
|
/**
|
|
* Starts the digest.
|
|
*
|
|
* @return this digest object.
|
|
*/
|
|
md.start = function() {
|
|
// up to 56-bit message length for convenience
|
|
md.messageLength = 0;
|
|
|
|
// full message length (set md.messageLength128 for backwards-compatibility)
|
|
md.fullMessageLength = md.messageLength128 = [];
|
|
var int32s = md.messageLengthSize / 4;
|
|
for(var i = 0; i < int32s; ++i) {
|
|
md.fullMessageLength.push(0);
|
|
}
|
|
_input = forge.util.createBuffer();
|
|
_h = new Array(_state.length);
|
|
for(var i = 0; i < _state.length; ++i) {
|
|
_h[i] = _state[i].slice(0);
|
|
}
|
|
return md;
|
|
};
|
|
// start digest automatically for first time
|
|
md.start();
|
|
|
|
/**
|
|
* Updates the digest with the given message input. The given input can
|
|
* treated as raw input (no encoding will be applied) or an encoding of
|
|
* 'utf8' maybe given to encode the input using UTF-8.
|
|
*
|
|
* @param msg the message input to update with.
|
|
* @param encoding the encoding to use (default: 'raw', other: 'utf8').
|
|
*
|
|
* @return this digest object.
|
|
*/
|
|
md.update = function(msg, encoding) {
|
|
if(encoding === 'utf8') {
|
|
msg = forge.util.encodeUtf8(msg);
|
|
}
|
|
|
|
// update message length
|
|
var len = msg.length;
|
|
md.messageLength += len;
|
|
len = [(len / 0x100000000) >>> 0, len >>> 0];
|
|
for(var i = md.fullMessageLength.length - 1; i >= 0; --i) {
|
|
md.fullMessageLength[i] += len[1];
|
|
len[1] = len[0] + ((md.fullMessageLength[i] / 0x100000000) >>> 0);
|
|
md.fullMessageLength[i] = md.fullMessageLength[i] >>> 0;
|
|
len[0] = ((len[1] / 0x100000000) >>> 0);
|
|
}
|
|
|
|
// add bytes to input buffer
|
|
_input.putBytes(msg);
|
|
|
|
// process bytes
|
|
_update(_h, _w, _input);
|
|
|
|
// compact input buffer every 2K or if empty
|
|
if(_input.read > 2048 || _input.length() === 0) {
|
|
_input.compact();
|
|
}
|
|
|
|
return md;
|
|
};
|
|
|
|
/**
|
|
* Produces the digest.
|
|
*
|
|
* @return a byte buffer containing the digest value.
|
|
*/
|
|
md.digest = function() {
|
|
/* Note: Here we copy the remaining bytes in the input buffer and
|
|
add the appropriate SHA-512 padding. Then we do the final update
|
|
on a copy of the state so that if the user wants to get
|
|
intermediate digests they can do so. */
|
|
|
|
/* Determine the number of bytes that must be added to the message
|
|
to ensure its length is congruent to 896 mod 1024. In other words,
|
|
the data to be digested must be a multiple of 1024 bits (or 128 bytes).
|
|
This data includes the message, some padding, and the length of the
|
|
message. Since the length of the message will be encoded as 16 bytes (128
|
|
bits), that means that the last segment of the data must have 112 bytes
|
|
(896 bits) of message and padding. Therefore, the length of the message
|
|
plus the padding must be congruent to 896 mod 1024 because
|
|
1024 - 128 = 896.
|
|
|
|
In order to fill up the message length it must be filled with
|
|
padding that begins with 1 bit followed by all 0 bits. Padding
|
|
must *always* be present, so if the message length is already
|
|
congruent to 896 mod 1024, then 1024 padding bits must be added. */
|
|
|
|
var finalBlock = forge.util.createBuffer();
|
|
finalBlock.putBytes(_input.bytes());
|
|
|
|
// compute remaining size to be digested (include message length size)
|
|
var remaining = (
|
|
md.fullMessageLength[md.fullMessageLength.length - 1] +
|
|
md.messageLengthSize);
|
|
|
|
// add padding for overflow blockSize - overflow
|
|
// _padding starts with 1 byte with first bit is set (byte value 128), then
|
|
// there may be up to (blockSize - 1) other pad bytes
|
|
var overflow = remaining & (md.blockLength - 1);
|
|
finalBlock.putBytes(_padding.substr(0, md.blockLength - overflow));
|
|
|
|
// serialize message length in bits in big-endian order; since length
|
|
// is stored in bytes we multiply by 8 and add carry from next int
|
|
var next, carry;
|
|
var bits = md.fullMessageLength[0] * 8;
|
|
for(var i = 0; i < md.fullMessageLength.length - 1; ++i) {
|
|
next = md.fullMessageLength[i + 1] * 8;
|
|
carry = (next / 0x100000000) >>> 0;
|
|
bits += carry;
|
|
finalBlock.putInt32(bits >>> 0);
|
|
bits = next >>> 0;
|
|
}
|
|
finalBlock.putInt32(bits);
|
|
|
|
var h = new Array(_h.length);
|
|
for(var i = 0; i < _h.length; ++i) {
|
|
h[i] = _h[i].slice(0);
|
|
}
|
|
_update(h, _w, finalBlock);
|
|
var rval = forge.util.createBuffer();
|
|
var hlen;
|
|
if(algorithm === 'SHA-512') {
|
|
hlen = h.length;
|
|
} else if(algorithm === 'SHA-384') {
|
|
hlen = h.length - 2;
|
|
} else {
|
|
hlen = h.length - 4;
|
|
}
|
|
for(var i = 0; i < hlen; ++i) {
|
|
rval.putInt32(h[i][0]);
|
|
if(i !== hlen - 1 || algorithm !== 'SHA-512/224') {
|
|
rval.putInt32(h[i][1]);
|
|
}
|
|
}
|
|
return rval;
|
|
};
|
|
|
|
return md;
|
|
};
|
|
|
|
// sha-512 padding bytes not initialized yet
|
|
var _padding = null;
|
|
var _initialized = false;
|
|
|
|
// table of constants
|
|
var _k = null;
|
|
|
|
// initial hash states
|
|
var _states = null;
|
|
|
|
/**
|
|
* Initializes the constant tables.
|
|
*/
|
|
function _init() {
|
|
// create padding
|
|
_padding = String.fromCharCode(128);
|
|
_padding += forge.util.fillString(String.fromCharCode(0x00), 128);
|
|
|
|
// create K table for SHA-512
|
|
_k = [
|
|
[0x428a2f98, 0xd728ae22], [0x71374491, 0x23ef65cd],
|
|
[0xb5c0fbcf, 0xec4d3b2f], [0xe9b5dba5, 0x8189dbbc],
|
|
[0x3956c25b, 0xf348b538], [0x59f111f1, 0xb605d019],
|
|
[0x923f82a4, 0xaf194f9b], [0xab1c5ed5, 0xda6d8118],
|
|
[0xd807aa98, 0xa3030242], [0x12835b01, 0x45706fbe],
|
|
[0x243185be, 0x4ee4b28c], [0x550c7dc3, 0xd5ffb4e2],
|
|
[0x72be5d74, 0xf27b896f], [0x80deb1fe, 0x3b1696b1],
|
|
[0x9bdc06a7, 0x25c71235], [0xc19bf174, 0xcf692694],
|
|
[0xe49b69c1, 0x9ef14ad2], [0xefbe4786, 0x384f25e3],
|
|
[0x0fc19dc6, 0x8b8cd5b5], [0x240ca1cc, 0x77ac9c65],
|
|
[0x2de92c6f, 0x592b0275], [0x4a7484aa, 0x6ea6e483],
|
|
[0x5cb0a9dc, 0xbd41fbd4], [0x76f988da, 0x831153b5],
|
|
[0x983e5152, 0xee66dfab], [0xa831c66d, 0x2db43210],
|
|
[0xb00327c8, 0x98fb213f], [0xbf597fc7, 0xbeef0ee4],
|
|
[0xc6e00bf3, 0x3da88fc2], [0xd5a79147, 0x930aa725],
|
|
[0x06ca6351, 0xe003826f], [0x14292967, 0x0a0e6e70],
|
|
[0x27b70a85, 0x46d22ffc], [0x2e1b2138, 0x5c26c926],
|
|
[0x4d2c6dfc, 0x5ac42aed], [0x53380d13, 0x9d95b3df],
|
|
[0x650a7354, 0x8baf63de], [0x766a0abb, 0x3c77b2a8],
|
|
[0x81c2c92e, 0x47edaee6], [0x92722c85, 0x1482353b],
|
|
[0xa2bfe8a1, 0x4cf10364], [0xa81a664b, 0xbc423001],
|
|
[0xc24b8b70, 0xd0f89791], [0xc76c51a3, 0x0654be30],
|
|
[0xd192e819, 0xd6ef5218], [0xd6990624, 0x5565a910],
|
|
[0xf40e3585, 0x5771202a], [0x106aa070, 0x32bbd1b8],
|
|
[0x19a4c116, 0xb8d2d0c8], [0x1e376c08, 0x5141ab53],
|
|
[0x2748774c, 0xdf8eeb99], [0x34b0bcb5, 0xe19b48a8],
|
|
[0x391c0cb3, 0xc5c95a63], [0x4ed8aa4a, 0xe3418acb],
|
|
[0x5b9cca4f, 0x7763e373], [0x682e6ff3, 0xd6b2b8a3],
|
|
[0x748f82ee, 0x5defb2fc], [0x78a5636f, 0x43172f60],
|
|
[0x84c87814, 0xa1f0ab72], [0x8cc70208, 0x1a6439ec],
|
|
[0x90befffa, 0x23631e28], [0xa4506ceb, 0xde82bde9],
|
|
[0xbef9a3f7, 0xb2c67915], [0xc67178f2, 0xe372532b],
|
|
[0xca273ece, 0xea26619c], [0xd186b8c7, 0x21c0c207],
|
|
[0xeada7dd6, 0xcde0eb1e], [0xf57d4f7f, 0xee6ed178],
|
|
[0x06f067aa, 0x72176fba], [0x0a637dc5, 0xa2c898a6],
|
|
[0x113f9804, 0xbef90dae], [0x1b710b35, 0x131c471b],
|
|
[0x28db77f5, 0x23047d84], [0x32caab7b, 0x40c72493],
|
|
[0x3c9ebe0a, 0x15c9bebc], [0x431d67c4, 0x9c100d4c],
|
|
[0x4cc5d4be, 0xcb3e42b6], [0x597f299c, 0xfc657e2a],
|
|
[0x5fcb6fab, 0x3ad6faec], [0x6c44198c, 0x4a475817]
|
|
];
|
|
|
|
// initial hash states
|
|
_states = {};
|
|
_states['SHA-512'] = [
|
|
[0x6a09e667, 0xf3bcc908],
|
|
[0xbb67ae85, 0x84caa73b],
|
|
[0x3c6ef372, 0xfe94f82b],
|
|
[0xa54ff53a, 0x5f1d36f1],
|
|
[0x510e527f, 0xade682d1],
|
|
[0x9b05688c, 0x2b3e6c1f],
|
|
[0x1f83d9ab, 0xfb41bd6b],
|
|
[0x5be0cd19, 0x137e2179]
|
|
];
|
|
_states['SHA-384'] = [
|
|
[0xcbbb9d5d, 0xc1059ed8],
|
|
[0x629a292a, 0x367cd507],
|
|
[0x9159015a, 0x3070dd17],
|
|
[0x152fecd8, 0xf70e5939],
|
|
[0x67332667, 0xffc00b31],
|
|
[0x8eb44a87, 0x68581511],
|
|
[0xdb0c2e0d, 0x64f98fa7],
|
|
[0x47b5481d, 0xbefa4fa4]
|
|
];
|
|
_states['SHA-512/256'] = [
|
|
[0x22312194, 0xFC2BF72C],
|
|
[0x9F555FA3, 0xC84C64C2],
|
|
[0x2393B86B, 0x6F53B151],
|
|
[0x96387719, 0x5940EABD],
|
|
[0x96283EE2, 0xA88EFFE3],
|
|
[0xBE5E1E25, 0x53863992],
|
|
[0x2B0199FC, 0x2C85B8AA],
|
|
[0x0EB72DDC, 0x81C52CA2]
|
|
];
|
|
_states['SHA-512/224'] = [
|
|
[0x8C3D37C8, 0x19544DA2],
|
|
[0x73E19966, 0x89DCD4D6],
|
|
[0x1DFAB7AE, 0x32FF9C82],
|
|
[0x679DD514, 0x582F9FCF],
|
|
[0x0F6D2B69, 0x7BD44DA8],
|
|
[0x77E36F73, 0x04C48942],
|
|
[0x3F9D85A8, 0x6A1D36C8],
|
|
[0x1112E6AD, 0x91D692A1]
|
|
];
|
|
|
|
// now initialized
|
|
_initialized = true;
|
|
}
|
|
|
|
/**
|
|
* Updates a SHA-512 state with the given byte buffer.
|
|
*
|
|
* @param s the SHA-512 state to update.
|
|
* @param w the array to use to store words.
|
|
* @param bytes the byte buffer to update with.
|
|
*/
|
|
function _update(s, w, bytes) {
|
|
// consume 512 bit (128 byte) chunks
|
|
var t1_hi, t1_lo;
|
|
var t2_hi, t2_lo;
|
|
var s0_hi, s0_lo;
|
|
var s1_hi, s1_lo;
|
|
var ch_hi, ch_lo;
|
|
var maj_hi, maj_lo;
|
|
var a_hi, a_lo;
|
|
var b_hi, b_lo;
|
|
var c_hi, c_lo;
|
|
var d_hi, d_lo;
|
|
var e_hi, e_lo;
|
|
var f_hi, f_lo;
|
|
var g_hi, g_lo;
|
|
var h_hi, h_lo;
|
|
var i, hi, lo, w2, w7, w15, w16;
|
|
var len = bytes.length();
|
|
while(len >= 128) {
|
|
// the w array will be populated with sixteen 64-bit big-endian words
|
|
// and then extended into 64 64-bit words according to SHA-512
|
|
for(i = 0; i < 16; ++i) {
|
|
w[i][0] = bytes.getInt32() >>> 0;
|
|
w[i][1] = bytes.getInt32() >>> 0;
|
|
}
|
|
for(; i < 80; ++i) {
|
|
// for word 2 words ago: ROTR 19(x) ^ ROTR 61(x) ^ SHR 6(x)
|
|
w2 = w[i - 2];
|
|
hi = w2[0];
|
|
lo = w2[1];
|
|
|
|
// high bits
|
|
t1_hi = (
|
|
((hi >>> 19) | (lo << 13)) ^ // ROTR 19
|
|
((lo >>> 29) | (hi << 3)) ^ // ROTR 61/(swap + ROTR 29)
|
|
(hi >>> 6)) >>> 0; // SHR 6
|
|
// low bits
|
|
t1_lo = (
|
|
((hi << 13) | (lo >>> 19)) ^ // ROTR 19
|
|
((lo << 3) | (hi >>> 29)) ^ // ROTR 61/(swap + ROTR 29)
|
|
((hi << 26) | (lo >>> 6))) >>> 0; // SHR 6
|
|
|
|
// for word 15 words ago: ROTR 1(x) ^ ROTR 8(x) ^ SHR 7(x)
|
|
w15 = w[i - 15];
|
|
hi = w15[0];
|
|
lo = w15[1];
|
|
|
|
// high bits
|
|
t2_hi = (
|
|
((hi >>> 1) | (lo << 31)) ^ // ROTR 1
|
|
((hi >>> 8) | (lo << 24)) ^ // ROTR 8
|
|
(hi >>> 7)) >>> 0; // SHR 7
|
|
// low bits
|
|
t2_lo = (
|
|
((hi << 31) | (lo >>> 1)) ^ // ROTR 1
|
|
((hi << 24) | (lo >>> 8)) ^ // ROTR 8
|
|
((hi << 25) | (lo >>> 7))) >>> 0; // SHR 7
|
|
|
|
// sum(t1, word 7 ago, t2, word 16 ago) modulo 2^64 (carry lo overflow)
|
|
w7 = w[i - 7];
|
|
w16 = w[i - 16];
|
|
lo = (t1_lo + w7[1] + t2_lo + w16[1]);
|
|
w[i][0] = (t1_hi + w7[0] + t2_hi + w16[0] +
|
|
((lo / 0x100000000) >>> 0)) >>> 0;
|
|
w[i][1] = lo >>> 0;
|
|
}
|
|
|
|
// initialize hash value for this chunk
|
|
a_hi = s[0][0];
|
|
a_lo = s[0][1];
|
|
b_hi = s[1][0];
|
|
b_lo = s[1][1];
|
|
c_hi = s[2][0];
|
|
c_lo = s[2][1];
|
|
d_hi = s[3][0];
|
|
d_lo = s[3][1];
|
|
e_hi = s[4][0];
|
|
e_lo = s[4][1];
|
|
f_hi = s[5][0];
|
|
f_lo = s[5][1];
|
|
g_hi = s[6][0];
|
|
g_lo = s[6][1];
|
|
h_hi = s[7][0];
|
|
h_lo = s[7][1];
|
|
|
|
// round function
|
|
for(i = 0; i < 80; ++i) {
|
|
// Sum1(e) = ROTR 14(e) ^ ROTR 18(e) ^ ROTR 41(e)
|
|
s1_hi = (
|
|
((e_hi >>> 14) | (e_lo << 18)) ^ // ROTR 14
|
|
((e_hi >>> 18) | (e_lo << 14)) ^ // ROTR 18
|
|
((e_lo >>> 9) | (e_hi << 23))) >>> 0; // ROTR 41/(swap + ROTR 9)
|
|
s1_lo = (
|
|
((e_hi << 18) | (e_lo >>> 14)) ^ // ROTR 14
|
|
((e_hi << 14) | (e_lo >>> 18)) ^ // ROTR 18
|
|
((e_lo << 23) | (e_hi >>> 9))) >>> 0; // ROTR 41/(swap + ROTR 9)
|
|
|
|
// Ch(e, f, g) (optimized the same way as SHA-1)
|
|
ch_hi = (g_hi ^ (e_hi & (f_hi ^ g_hi))) >>> 0;
|
|
ch_lo = (g_lo ^ (e_lo & (f_lo ^ g_lo))) >>> 0;
|
|
|
|
// Sum0(a) = ROTR 28(a) ^ ROTR 34(a) ^ ROTR 39(a)
|
|
s0_hi = (
|
|
((a_hi >>> 28) | (a_lo << 4)) ^ // ROTR 28
|
|
((a_lo >>> 2) | (a_hi << 30)) ^ // ROTR 34/(swap + ROTR 2)
|
|
((a_lo >>> 7) | (a_hi << 25))) >>> 0; // ROTR 39/(swap + ROTR 7)
|
|
s0_lo = (
|
|
((a_hi << 4) | (a_lo >>> 28)) ^ // ROTR 28
|
|
((a_lo << 30) | (a_hi >>> 2)) ^ // ROTR 34/(swap + ROTR 2)
|
|
((a_lo << 25) | (a_hi >>> 7))) >>> 0; // ROTR 39/(swap + ROTR 7)
|
|
|
|
// Maj(a, b, c) (optimized the same way as SHA-1)
|
|
maj_hi = ((a_hi & b_hi) | (c_hi & (a_hi ^ b_hi))) >>> 0;
|
|
maj_lo = ((a_lo & b_lo) | (c_lo & (a_lo ^ b_lo))) >>> 0;
|
|
|
|
// main algorithm
|
|
// t1 = (h + s1 + ch + _k[i] + _w[i]) modulo 2^64 (carry lo overflow)
|
|
lo = (h_lo + s1_lo + ch_lo + _k[i][1] + w[i][1]);
|
|
t1_hi = (h_hi + s1_hi + ch_hi + _k[i][0] + w[i][0] +
|
|
((lo / 0x100000000) >>> 0)) >>> 0;
|
|
t1_lo = lo >>> 0;
|
|
|
|
// t2 = s0 + maj modulo 2^64 (carry lo overflow)
|
|
lo = s0_lo + maj_lo;
|
|
t2_hi = (s0_hi + maj_hi + ((lo / 0x100000000) >>> 0)) >>> 0;
|
|
t2_lo = lo >>> 0;
|
|
|
|
h_hi = g_hi;
|
|
h_lo = g_lo;
|
|
|
|
g_hi = f_hi;
|
|
g_lo = f_lo;
|
|
|
|
f_hi = e_hi;
|
|
f_lo = e_lo;
|
|
|
|
// e = (d + t1) modulo 2^64 (carry lo overflow)
|
|
lo = d_lo + t1_lo;
|
|
e_hi = (d_hi + t1_hi + ((lo / 0x100000000) >>> 0)) >>> 0;
|
|
e_lo = lo >>> 0;
|
|
|
|
d_hi = c_hi;
|
|
d_lo = c_lo;
|
|
|
|
c_hi = b_hi;
|
|
c_lo = b_lo;
|
|
|
|
b_hi = a_hi;
|
|
b_lo = a_lo;
|
|
|
|
// a = (t1 + t2) modulo 2^64 (carry lo overflow)
|
|
lo = t1_lo + t2_lo;
|
|
a_hi = (t1_hi + t2_hi + ((lo / 0x100000000) >>> 0)) >>> 0;
|
|
a_lo = lo >>> 0;
|
|
}
|
|
|
|
// update hash state (additional modulo 2^64)
|
|
lo = s[0][1] + a_lo;
|
|
s[0][0] = (s[0][0] + a_hi + ((lo / 0x100000000) >>> 0)) >>> 0;
|
|
s[0][1] = lo >>> 0;
|
|
|
|
lo = s[1][1] + b_lo;
|
|
s[1][0] = (s[1][0] + b_hi + ((lo / 0x100000000) >>> 0)) >>> 0;
|
|
s[1][1] = lo >>> 0;
|
|
|
|
lo = s[2][1] + c_lo;
|
|
s[2][0] = (s[2][0] + c_hi + ((lo / 0x100000000) >>> 0)) >>> 0;
|
|
s[2][1] = lo >>> 0;
|
|
|
|
lo = s[3][1] + d_lo;
|
|
s[3][0] = (s[3][0] + d_hi + ((lo / 0x100000000) >>> 0)) >>> 0;
|
|
s[3][1] = lo >>> 0;
|
|
|
|
lo = s[4][1] + e_lo;
|
|
s[4][0] = (s[4][0] + e_hi + ((lo / 0x100000000) >>> 0)) >>> 0;
|
|
s[4][1] = lo >>> 0;
|
|
|
|
lo = s[5][1] + f_lo;
|
|
s[5][0] = (s[5][0] + f_hi + ((lo / 0x100000000) >>> 0)) >>> 0;
|
|
s[5][1] = lo >>> 0;
|
|
|
|
lo = s[6][1] + g_lo;
|
|
s[6][0] = (s[6][0] + g_hi + ((lo / 0x100000000) >>> 0)) >>> 0;
|
|
s[6][1] = lo >>> 0;
|
|
|
|
lo = s[7][1] + h_lo;
|
|
s[7][0] = (s[7][0] + h_hi + ((lo / 0x100000000) >>> 0)) >>> 0;
|
|
s[7][1] = lo >>> 0;
|
|
|
|
len -= 128;
|
|
}
|
|
}
|
|
|
|
function randomBytes(length) {
|
|
if (isNaN(length) || length <= 0) {
|
|
throw errCode(new Error('random bytes length must be a Number bigger than 0'), 'ERR_INVALID_LENGTH');
|
|
}
|
|
return utils$1.randomBytes(length);
|
|
}
|
|
|
|
// Convert a PKCS#1 in ASN1 DER format to a JWK key
|
|
function pkcs1ToJwk(bytes) {
|
|
const asn1 = forge$m.asn1.fromDer(toString$3(bytes, 'ascii'));
|
|
const privateKey = forge$m.pki.privateKeyFromAsn1(asn1);
|
|
// https://tools.ietf.org/html/rfc7518#section-6.3.1
|
|
return {
|
|
kty: 'RSA',
|
|
n: bigIntegerToUintBase64url(privateKey.n),
|
|
e: bigIntegerToUintBase64url(privateKey.e),
|
|
d: bigIntegerToUintBase64url(privateKey.d),
|
|
p: bigIntegerToUintBase64url(privateKey.p),
|
|
q: bigIntegerToUintBase64url(privateKey.q),
|
|
dp: bigIntegerToUintBase64url(privateKey.dP),
|
|
dq: bigIntegerToUintBase64url(privateKey.dQ),
|
|
qi: bigIntegerToUintBase64url(privateKey.qInv),
|
|
alg: 'RS256'
|
|
};
|
|
}
|
|
// Convert a JWK key into PKCS#1 in ASN1 DER format
|
|
function jwkToPkcs1(jwk) {
|
|
if (jwk.n == null || jwk.e == null || jwk.d == null || jwk.p == null || jwk.q == null || jwk.dp == null || jwk.dq == null || jwk.qi == null) {
|
|
throw errCode(new Error('JWK was missing components'), 'ERR_INVALID_PARAMETERS');
|
|
}
|
|
const asn1 = forge$m.pki.privateKeyToAsn1({
|
|
n: base64urlToBigInteger(jwk.n),
|
|
e: base64urlToBigInteger(jwk.e),
|
|
d: base64urlToBigInteger(jwk.d),
|
|
p: base64urlToBigInteger(jwk.p),
|
|
q: base64urlToBigInteger(jwk.q),
|
|
dP: base64urlToBigInteger(jwk.dp),
|
|
dQ: base64urlToBigInteger(jwk.dq),
|
|
qInv: base64urlToBigInteger(jwk.qi)
|
|
});
|
|
return fromString$1(forge$m.asn1.toDer(asn1).getBytes(), 'ascii');
|
|
}
|
|
// Convert a PKCIX in ASN1 DER format to a JWK key
|
|
function pkixToJwk(bytes) {
|
|
const asn1 = forge$m.asn1.fromDer(toString$3(bytes, 'ascii'));
|
|
const publicKey = forge$m.pki.publicKeyFromAsn1(asn1);
|
|
return {
|
|
kty: 'RSA',
|
|
n: bigIntegerToUintBase64url(publicKey.n),
|
|
e: bigIntegerToUintBase64url(publicKey.e)
|
|
};
|
|
}
|
|
// Convert a JWK key to PKCIX in ASN1 DER format
|
|
function jwkToPkix(jwk) {
|
|
if (jwk.n == null || jwk.e == null) {
|
|
throw errCode(new Error('JWK was missing components'), 'ERR_INVALID_PARAMETERS');
|
|
}
|
|
const asn1 = forge$m.pki.publicKeyToAsn1({
|
|
n: base64urlToBigInteger(jwk.n),
|
|
e: base64urlToBigInteger(jwk.e)
|
|
});
|
|
return fromString$1(forge$m.asn1.toDer(asn1).getBytes(), 'ascii');
|
|
}
|
|
|
|
function convert(key, types) {
|
|
return types.map(t => base64urlToBigInteger(key[t]));
|
|
}
|
|
function jwk2priv(key) {
|
|
return forge$m.pki.setRsaPrivateKey(...convert(key, ['n', 'e', 'd', 'p', 'q', 'dp', 'dq', 'qi']));
|
|
}
|
|
function jwk2pub(key) {
|
|
return forge$m.pki.setRsaPublicKey(...convert(key, ['n', 'e']));
|
|
}
|
|
|
|
async function generateKey$2(bits) {
|
|
const pair = await webcrypto.get().subtle.generateKey({
|
|
name: 'RSASSA-PKCS1-v1_5',
|
|
modulusLength: bits,
|
|
publicExponent: new Uint8Array([0x01, 0x00, 0x01]),
|
|
hash: { name: 'SHA-256' }
|
|
}, true, ['sign', 'verify']);
|
|
const keys = await exportKey(pair);
|
|
return {
|
|
privateKey: keys[0],
|
|
publicKey: keys[1]
|
|
};
|
|
}
|
|
// Takes a jwk key
|
|
async function unmarshalPrivateKey$1(key) {
|
|
const privateKey = await webcrypto.get().subtle.importKey('jwk', key, {
|
|
name: 'RSASSA-PKCS1-v1_5',
|
|
hash: { name: 'SHA-256' }
|
|
}, true, ['sign']);
|
|
const pair = [
|
|
privateKey,
|
|
await derivePublicFromPrivate(key)
|
|
];
|
|
const keys = await exportKey({
|
|
privateKey: pair[0],
|
|
publicKey: pair[1]
|
|
});
|
|
return {
|
|
privateKey: keys[0],
|
|
publicKey: keys[1]
|
|
};
|
|
}
|
|
async function hashAndSign$2(key, msg) {
|
|
const privateKey = await webcrypto.get().subtle.importKey('jwk', key, {
|
|
name: 'RSASSA-PKCS1-v1_5',
|
|
hash: { name: 'SHA-256' }
|
|
}, false, ['sign']);
|
|
const sig = await webcrypto.get().subtle.sign({ name: 'RSASSA-PKCS1-v1_5' }, privateKey, Uint8Array.from(msg));
|
|
return new Uint8Array(sig, 0, sig.byteLength);
|
|
}
|
|
async function hashAndVerify$2(key, sig, msg) {
|
|
const publicKey = await webcrypto.get().subtle.importKey('jwk', key, {
|
|
name: 'RSASSA-PKCS1-v1_5',
|
|
hash: { name: 'SHA-256' }
|
|
}, false, ['verify']);
|
|
return await webcrypto.get().subtle.verify({ name: 'RSASSA-PKCS1-v1_5' }, publicKey, sig, msg);
|
|
}
|
|
async function exportKey(pair) {
|
|
if (pair.privateKey == null || pair.publicKey == null) {
|
|
throw errCode(new Error('Private and public key are required'), 'ERR_INVALID_PARAMETERS');
|
|
}
|
|
return await Promise.all([
|
|
webcrypto.get().subtle.exportKey('jwk', pair.privateKey),
|
|
webcrypto.get().subtle.exportKey('jwk', pair.publicKey)
|
|
]);
|
|
}
|
|
async function derivePublicFromPrivate(jwKey) {
|
|
return await webcrypto.get().subtle.importKey('jwk', {
|
|
kty: jwKey.kty,
|
|
n: jwKey.n,
|
|
e: jwKey.e
|
|
}, {
|
|
name: 'RSASSA-PKCS1-v1_5',
|
|
hash: { name: 'SHA-256' }
|
|
}, true, ['verify']);
|
|
}
|
|
/*
|
|
|
|
RSA encryption/decryption for the browser with webcrypto workaround
|
|
"bloody dark magic. webcrypto's why."
|
|
|
|
Explanation:
|
|
- Convert JWK to nodeForge
|
|
- Convert msg Uint8Array to nodeForge buffer: ByteBuffer is a "binary-string backed buffer", so let's make our Uint8Array a binary string
|
|
- Convert resulting nodeForge buffer to Uint8Array: it returns a binary string, turn that into a Uint8Array
|
|
|
|
*/
|
|
function convertKey(key, pub, msg, handle) {
|
|
const fkey = pub ? jwk2pub(key) : jwk2priv(key);
|
|
const fmsg = toString$3(Uint8Array.from(msg), 'ascii');
|
|
const fomsg = handle(fmsg, fkey);
|
|
return fromString$1(fomsg, 'ascii');
|
|
}
|
|
function encrypt(key, msg) {
|
|
return convertKey(key, true, msg, (msg, key) => key.encrypt(msg));
|
|
}
|
|
function decrypt(key, msg) {
|
|
return convertKey(key, false, msg, (msg, key) => key.decrypt(msg));
|
|
}
|
|
|
|
/**
|
|
* Exports the given PrivateKey as a base64 encoded string.
|
|
* The PrivateKey is encrypted via a password derived PBKDF2 key
|
|
* leveraging the aes-gcm cipher algorithm.
|
|
*/
|
|
async function exporter(privateKey, password) {
|
|
const cipher = create$2();
|
|
const encryptedKey = await cipher.encrypt(privateKey, password);
|
|
return base64$2.encode(encryptedKey);
|
|
}
|
|
|
|
class RsaPublicKey {
|
|
constructor(key) {
|
|
this._key = key;
|
|
}
|
|
async verify(data, sig) {
|
|
return await hashAndVerify$2(this._key, sig, data);
|
|
}
|
|
marshal() {
|
|
return jwkToPkix(this._key);
|
|
}
|
|
get bytes() {
|
|
return PublicKey.encode({
|
|
Type: KeyType.RSA,
|
|
Data: this.marshal()
|
|
}).subarray();
|
|
}
|
|
encrypt(bytes) {
|
|
return encrypt(this._key, bytes);
|
|
}
|
|
equals(key) {
|
|
return equals(this.bytes, key.bytes);
|
|
}
|
|
async hash() {
|
|
const { bytes } = await sha256$1.digest(this.bytes);
|
|
return bytes;
|
|
}
|
|
}
|
|
class RsaPrivateKey {
|
|
constructor(key, publicKey) {
|
|
this._key = key;
|
|
this._publicKey = publicKey;
|
|
}
|
|
genSecret() {
|
|
return randomBytes(16);
|
|
}
|
|
async sign(message) {
|
|
return await hashAndSign$2(this._key, message);
|
|
}
|
|
get public() {
|
|
if (this._publicKey == null) {
|
|
throw errCode(new Error('public key not provided'), 'ERR_PUBKEY_NOT_PROVIDED');
|
|
}
|
|
return new RsaPublicKey(this._publicKey);
|
|
}
|
|
decrypt(bytes) {
|
|
return decrypt(this._key, bytes);
|
|
}
|
|
marshal() {
|
|
return jwkToPkcs1(this._key);
|
|
}
|
|
get bytes() {
|
|
return PrivateKey.encode({
|
|
Type: KeyType.RSA,
|
|
Data: this.marshal()
|
|
}).subarray();
|
|
}
|
|
equals(key) {
|
|
return equals(this.bytes, key.bytes);
|
|
}
|
|
async hash() {
|
|
const { bytes } = await sha256$1.digest(this.bytes);
|
|
return bytes;
|
|
}
|
|
/**
|
|
* Gets the ID of the key.
|
|
*
|
|
* The key id is the base58 encoding of the SHA-256 multihash of its public key.
|
|
* The public key is a protobuf encoding containing a type and the DER encoding
|
|
* of the PKCS SubjectPublicKeyInfo.
|
|
*/
|
|
async id() {
|
|
const hash = await this.public.hash();
|
|
return toString$3(hash, 'base58btc');
|
|
}
|
|
/**
|
|
* Exports the key into a password protected PEM format
|
|
*/
|
|
async export(password, format = 'pkcs-8') {
|
|
if (format === 'pkcs-8') {
|
|
const buffer = new forge$m.util.ByteBuffer(this.marshal());
|
|
const asn1 = forge$m.asn1.fromDer(buffer);
|
|
const privateKey = forge$m.pki.privateKeyFromAsn1(asn1);
|
|
const options = {
|
|
algorithm: 'aes256',
|
|
count: 10000,
|
|
saltSize: 128 / 8,
|
|
prfAlgorithm: 'sha512'
|
|
};
|
|
return forge$m.pki.encryptRsaPrivateKey(privateKey, password, options);
|
|
}
|
|
else if (format === 'libp2p-key') {
|
|
return await exporter(this.bytes, password);
|
|
}
|
|
else {
|
|
throw errCode(new Error(`export format '${format}' is not supported`), 'ERR_INVALID_EXPORT_FORMAT');
|
|
}
|
|
}
|
|
}
|
|
async function unmarshalRsaPrivateKey(bytes) {
|
|
const jwk = pkcs1ToJwk(bytes);
|
|
const keys = await unmarshalPrivateKey$1(jwk);
|
|
return new RsaPrivateKey(keys.privateKey, keys.publicKey);
|
|
}
|
|
function unmarshalRsaPublicKey(bytes) {
|
|
const jwk = pkixToJwk(bytes);
|
|
return new RsaPublicKey(jwk);
|
|
}
|
|
async function fromJwk(jwk) {
|
|
const keys = await unmarshalPrivateKey$1(jwk);
|
|
return new RsaPrivateKey(keys.privateKey, keys.publicKey);
|
|
}
|
|
async function generateKeyPair$2(bits) {
|
|
const keys = await generateKey$2(bits);
|
|
return new RsaPrivateKey(keys.privateKey, keys.publicKey);
|
|
}
|
|
|
|
var RSA = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
RsaPublicKey: RsaPublicKey,
|
|
RsaPrivateKey: RsaPrivateKey,
|
|
unmarshalRsaPrivateKey: unmarshalRsaPrivateKey,
|
|
unmarshalRsaPublicKey: unmarshalRsaPublicKey,
|
|
fromJwk: fromJwk,
|
|
generateKeyPair: generateKeyPair$2
|
|
});
|
|
|
|
/*! noble-ed25519 - MIT License (c) 2019 Paul Miller (paulmillr.com) */
|
|
const _0n = BigInt(0);
|
|
const _1n = BigInt(1);
|
|
const _2n = BigInt(2);
|
|
const CU_O = BigInt('7237005577332262213973186563042994240857116359379907606001950938285454250989');
|
|
const CURVE = Object.freeze({
|
|
a: BigInt(-1),
|
|
d: BigInt('37095705934669439343138083508754565189542113879843219016388785533085940283555'),
|
|
P: BigInt('57896044618658097711785492504343953926634992332820282019728792003956564819949'),
|
|
l: CU_O,
|
|
n: CU_O,
|
|
h: BigInt(8),
|
|
Gx: BigInt('15112221349535400772501151409588531511454012693041857206046113283949847762202'),
|
|
Gy: BigInt('46316835694926478169428394003475163141307993866256225615783033603165251855960'),
|
|
});
|
|
const POW_2_256 = BigInt('0x10000000000000000000000000000000000000000000000000000000000000000');
|
|
const SQRT_M1 = BigInt('19681161376707505956807079304988542015446066515923890162744021073123829784752');
|
|
BigInt('6853475219497561581579357271197624642482790079785650197046958215289687604742');
|
|
const SQRT_AD_MINUS_ONE = BigInt('25063068953384623474111414158702152701244531502492656460079210482610430750235');
|
|
const INVSQRT_A_MINUS_D = BigInt('54469307008909316920995813868745141605393597292927456921205312896311721017578');
|
|
const ONE_MINUS_D_SQ = BigInt('1159843021668779879193775521855586647937357759715417654439879720876111806838');
|
|
const D_MINUS_ONE_SQ = BigInt('40440834346308536858101042469323190826248399146238708352240133220865137265952');
|
|
class ExtendedPoint {
|
|
constructor(x, y, z, t) {
|
|
this.x = x;
|
|
this.y = y;
|
|
this.z = z;
|
|
this.t = t;
|
|
}
|
|
static fromAffine(p) {
|
|
if (!(p instanceof Point)) {
|
|
throw new TypeError('ExtendedPoint#fromAffine: expected Point');
|
|
}
|
|
if (p.equals(Point.ZERO))
|
|
return ExtendedPoint.ZERO;
|
|
return new ExtendedPoint(p.x, p.y, _1n, mod(p.x * p.y));
|
|
}
|
|
static toAffineBatch(points) {
|
|
const toInv = invertBatch(points.map((p) => p.z));
|
|
return points.map((p, i) => p.toAffine(toInv[i]));
|
|
}
|
|
static normalizeZ(points) {
|
|
return this.toAffineBatch(points).map(this.fromAffine);
|
|
}
|
|
equals(other) {
|
|
assertExtPoint(other);
|
|
const { x: X1, y: Y1, z: Z1 } = this;
|
|
const { x: X2, y: Y2, z: Z2 } = other;
|
|
const X1Z2 = mod(X1 * Z2);
|
|
const X2Z1 = mod(X2 * Z1);
|
|
const Y1Z2 = mod(Y1 * Z2);
|
|
const Y2Z1 = mod(Y2 * Z1);
|
|
return X1Z2 === X2Z1 && Y1Z2 === Y2Z1;
|
|
}
|
|
negate() {
|
|
return new ExtendedPoint(mod(-this.x), this.y, this.z, mod(-this.t));
|
|
}
|
|
double() {
|
|
const { x: X1, y: Y1, z: Z1 } = this;
|
|
const { a } = CURVE;
|
|
const A = mod(X1 * X1);
|
|
const B = mod(Y1 * Y1);
|
|
const C = mod(_2n * mod(Z1 * Z1));
|
|
const D = mod(a * A);
|
|
const x1y1 = X1 + Y1;
|
|
const E = mod(mod(x1y1 * x1y1) - A - B);
|
|
const G = D + B;
|
|
const F = G - C;
|
|
const H = D - B;
|
|
const X3 = mod(E * F);
|
|
const Y3 = mod(G * H);
|
|
const T3 = mod(E * H);
|
|
const Z3 = mod(F * G);
|
|
return new ExtendedPoint(X3, Y3, Z3, T3);
|
|
}
|
|
add(other) {
|
|
assertExtPoint(other);
|
|
const { x: X1, y: Y1, z: Z1, t: T1 } = this;
|
|
const { x: X2, y: Y2, z: Z2, t: T2 } = other;
|
|
const A = mod((Y1 - X1) * (Y2 + X2));
|
|
const B = mod((Y1 + X1) * (Y2 - X2));
|
|
const F = mod(B - A);
|
|
if (F === _0n)
|
|
return this.double();
|
|
const C = mod(Z1 * _2n * T2);
|
|
const D = mod(T1 * _2n * Z2);
|
|
const E = D + C;
|
|
const G = B + A;
|
|
const H = D - C;
|
|
const X3 = mod(E * F);
|
|
const Y3 = mod(G * H);
|
|
const T3 = mod(E * H);
|
|
const Z3 = mod(F * G);
|
|
return new ExtendedPoint(X3, Y3, Z3, T3);
|
|
}
|
|
subtract(other) {
|
|
return this.add(other.negate());
|
|
}
|
|
precomputeWindow(W) {
|
|
const windows = 1 + 256 / W;
|
|
const points = [];
|
|
let p = this;
|
|
let base = p;
|
|
for (let window = 0; window < windows; window++) {
|
|
base = p;
|
|
points.push(base);
|
|
for (let i = 1; i < 2 ** (W - 1); i++) {
|
|
base = base.add(p);
|
|
points.push(base);
|
|
}
|
|
p = base.double();
|
|
}
|
|
return points;
|
|
}
|
|
wNAF(n, affinePoint) {
|
|
if (!affinePoint && this.equals(ExtendedPoint.BASE))
|
|
affinePoint = Point.BASE;
|
|
const W = (affinePoint && affinePoint._WINDOW_SIZE) || 1;
|
|
if (256 % W) {
|
|
throw new Error('Point#wNAF: Invalid precomputation window, must be power of 2');
|
|
}
|
|
let precomputes = affinePoint && pointPrecomputes.get(affinePoint);
|
|
if (!precomputes) {
|
|
precomputes = this.precomputeWindow(W);
|
|
if (affinePoint && W !== 1) {
|
|
precomputes = ExtendedPoint.normalizeZ(precomputes);
|
|
pointPrecomputes.set(affinePoint, precomputes);
|
|
}
|
|
}
|
|
let p = ExtendedPoint.ZERO;
|
|
let f = ExtendedPoint.ZERO;
|
|
const windows = 1 + 256 / W;
|
|
const windowSize = 2 ** (W - 1);
|
|
const mask = BigInt(2 ** W - 1);
|
|
const maxNumber = 2 ** W;
|
|
const shiftBy = BigInt(W);
|
|
for (let window = 0; window < windows; window++) {
|
|
const offset = window * windowSize;
|
|
let wbits = Number(n & mask);
|
|
n >>= shiftBy;
|
|
if (wbits > windowSize) {
|
|
wbits -= maxNumber;
|
|
n += _1n;
|
|
}
|
|
if (wbits === 0) {
|
|
let pr = precomputes[offset];
|
|
if (window % 2)
|
|
pr = pr.negate();
|
|
f = f.add(pr);
|
|
}
|
|
else {
|
|
let cached = precomputes[offset + Math.abs(wbits) - 1];
|
|
if (wbits < 0)
|
|
cached = cached.negate();
|
|
p = p.add(cached);
|
|
}
|
|
}
|
|
return ExtendedPoint.normalizeZ([p, f])[0];
|
|
}
|
|
multiply(scalar, affinePoint) {
|
|
return this.wNAF(normalizeScalar(scalar, CURVE.l), affinePoint);
|
|
}
|
|
multiplyUnsafe(scalar) {
|
|
let n = normalizeScalar(scalar, CURVE.l, false);
|
|
const G = ExtendedPoint.BASE;
|
|
const P0 = ExtendedPoint.ZERO;
|
|
if (n === _0n)
|
|
return P0;
|
|
if (this.equals(P0) || n === _1n)
|
|
return this;
|
|
if (this.equals(G))
|
|
return this.wNAF(n);
|
|
let p = P0;
|
|
let d = this;
|
|
while (n > _0n) {
|
|
if (n & _1n)
|
|
p = p.add(d);
|
|
d = d.double();
|
|
n >>= _1n;
|
|
}
|
|
return p;
|
|
}
|
|
isSmallOrder() {
|
|
return this.multiplyUnsafe(CURVE.h).equals(ExtendedPoint.ZERO);
|
|
}
|
|
isTorsionFree() {
|
|
return this.multiplyUnsafe(CURVE.l).equals(ExtendedPoint.ZERO);
|
|
}
|
|
toAffine(invZ = invert(this.z)) {
|
|
const { x, y, z } = this;
|
|
const ax = mod(x * invZ);
|
|
const ay = mod(y * invZ);
|
|
const zz = mod(z * invZ);
|
|
if (zz !== _1n)
|
|
throw new Error('invZ was invalid');
|
|
return new Point(ax, ay);
|
|
}
|
|
fromRistrettoBytes() {
|
|
legacyRist();
|
|
}
|
|
toRistrettoBytes() {
|
|
legacyRist();
|
|
}
|
|
fromRistrettoHash() {
|
|
legacyRist();
|
|
}
|
|
}
|
|
ExtendedPoint.BASE = new ExtendedPoint(CURVE.Gx, CURVE.Gy, _1n, mod(CURVE.Gx * CURVE.Gy));
|
|
ExtendedPoint.ZERO = new ExtendedPoint(_0n, _1n, _1n, _0n);
|
|
function assertExtPoint(other) {
|
|
if (!(other instanceof ExtendedPoint))
|
|
throw new TypeError('ExtendedPoint expected');
|
|
}
|
|
function assertRstPoint(other) {
|
|
if (!(other instanceof RistrettoPoint))
|
|
throw new TypeError('RistrettoPoint expected');
|
|
}
|
|
function legacyRist() {
|
|
throw new Error('Legacy method: switch to RistrettoPoint');
|
|
}
|
|
class RistrettoPoint {
|
|
constructor(ep) {
|
|
this.ep = ep;
|
|
}
|
|
static calcElligatorRistrettoMap(r0) {
|
|
const { d } = CURVE;
|
|
const r = mod(SQRT_M1 * r0 * r0);
|
|
const Ns = mod((r + _1n) * ONE_MINUS_D_SQ);
|
|
let c = BigInt(-1);
|
|
const D = mod((c - d * r) * mod(r + d));
|
|
let { isValid: Ns_D_is_sq, value: s } = uvRatio(Ns, D);
|
|
let s_ = mod(s * r0);
|
|
if (!edIsNegative(s_))
|
|
s_ = mod(-s_);
|
|
if (!Ns_D_is_sq)
|
|
s = s_;
|
|
if (!Ns_D_is_sq)
|
|
c = r;
|
|
const Nt = mod(c * (r - _1n) * D_MINUS_ONE_SQ - D);
|
|
const s2 = s * s;
|
|
const W0 = mod((s + s) * D);
|
|
const W1 = mod(Nt * SQRT_AD_MINUS_ONE);
|
|
const W2 = mod(_1n - s2);
|
|
const W3 = mod(_1n + s2);
|
|
return new ExtendedPoint(mod(W0 * W3), mod(W2 * W1), mod(W1 * W3), mod(W0 * W2));
|
|
}
|
|
static hashToCurve(hex) {
|
|
hex = ensureBytes(hex, 64);
|
|
const r1 = bytes255ToNumberLE(hex.slice(0, 32));
|
|
const R1 = this.calcElligatorRistrettoMap(r1);
|
|
const r2 = bytes255ToNumberLE(hex.slice(32, 64));
|
|
const R2 = this.calcElligatorRistrettoMap(r2);
|
|
return new RistrettoPoint(R1.add(R2));
|
|
}
|
|
static fromHex(hex) {
|
|
hex = ensureBytes(hex, 32);
|
|
const { a, d } = CURVE;
|
|
const emsg = 'RistrettoPoint.fromHex: the hex is not valid encoding of RistrettoPoint';
|
|
const s = bytes255ToNumberLE(hex);
|
|
if (!equalBytes(numberTo32BytesLE(s), hex) || edIsNegative(s))
|
|
throw new Error(emsg);
|
|
const s2 = mod(s * s);
|
|
const u1 = mod(_1n + a * s2);
|
|
const u2 = mod(_1n - a * s2);
|
|
const u1_2 = mod(u1 * u1);
|
|
const u2_2 = mod(u2 * u2);
|
|
const v = mod(a * d * u1_2 - u2_2);
|
|
const { isValid, value: I } = invertSqrt(mod(v * u2_2));
|
|
const Dx = mod(I * u2);
|
|
const Dy = mod(I * Dx * v);
|
|
let x = mod((s + s) * Dx);
|
|
if (edIsNegative(x))
|
|
x = mod(-x);
|
|
const y = mod(u1 * Dy);
|
|
const t = mod(x * y);
|
|
if (!isValid || edIsNegative(t) || y === _0n)
|
|
throw new Error(emsg);
|
|
return new RistrettoPoint(new ExtendedPoint(x, y, _1n, t));
|
|
}
|
|
toRawBytes() {
|
|
let { x, y, z, t } = this.ep;
|
|
const u1 = mod(mod(z + y) * mod(z - y));
|
|
const u2 = mod(x * y);
|
|
const u2sq = mod(u2 * u2);
|
|
const { value: invsqrt } = invertSqrt(mod(u1 * u2sq));
|
|
const D1 = mod(invsqrt * u1);
|
|
const D2 = mod(invsqrt * u2);
|
|
const zInv = mod(D1 * D2 * t);
|
|
let D;
|
|
if (edIsNegative(t * zInv)) {
|
|
let _x = mod(y * SQRT_M1);
|
|
let _y = mod(x * SQRT_M1);
|
|
x = _x;
|
|
y = _y;
|
|
D = mod(D1 * INVSQRT_A_MINUS_D);
|
|
}
|
|
else {
|
|
D = D2;
|
|
}
|
|
if (edIsNegative(x * zInv))
|
|
y = mod(-y);
|
|
let s = mod((z - y) * D);
|
|
if (edIsNegative(s))
|
|
s = mod(-s);
|
|
return numberTo32BytesLE(s);
|
|
}
|
|
toHex() {
|
|
return bytesToHex(this.toRawBytes());
|
|
}
|
|
toString() {
|
|
return this.toHex();
|
|
}
|
|
equals(other) {
|
|
assertRstPoint(other);
|
|
const a = this.ep;
|
|
const b = other.ep;
|
|
const one = mod(a.x * b.y) === mod(a.y * b.x);
|
|
const two = mod(a.y * b.y) === mod(a.x * b.x);
|
|
return one || two;
|
|
}
|
|
add(other) {
|
|
assertRstPoint(other);
|
|
return new RistrettoPoint(this.ep.add(other.ep));
|
|
}
|
|
subtract(other) {
|
|
assertRstPoint(other);
|
|
return new RistrettoPoint(this.ep.subtract(other.ep));
|
|
}
|
|
multiply(scalar) {
|
|
return new RistrettoPoint(this.ep.multiply(scalar));
|
|
}
|
|
multiplyUnsafe(scalar) {
|
|
return new RistrettoPoint(this.ep.multiplyUnsafe(scalar));
|
|
}
|
|
}
|
|
RistrettoPoint.BASE = new RistrettoPoint(ExtendedPoint.BASE);
|
|
RistrettoPoint.ZERO = new RistrettoPoint(ExtendedPoint.ZERO);
|
|
const pointPrecomputes = new WeakMap();
|
|
class Point {
|
|
constructor(x, y) {
|
|
this.x = x;
|
|
this.y = y;
|
|
}
|
|
_setWindowSize(windowSize) {
|
|
this._WINDOW_SIZE = windowSize;
|
|
pointPrecomputes.delete(this);
|
|
}
|
|
static fromHex(hex, strict = true) {
|
|
const { d, P } = CURVE;
|
|
hex = ensureBytes(hex, 32);
|
|
const normed = hex.slice();
|
|
normed[31] = hex[31] & ~0x80;
|
|
const y = bytesToNumberLE(normed);
|
|
if (strict && y >= P)
|
|
throw new Error('Expected 0 < hex < P');
|
|
if (!strict && y >= POW_2_256)
|
|
throw new Error('Expected 0 < hex < 2**256');
|
|
const y2 = mod(y * y);
|
|
const u = mod(y2 - _1n);
|
|
const v = mod(d * y2 + _1n);
|
|
let { isValid, value: x } = uvRatio(u, v);
|
|
if (!isValid)
|
|
throw new Error('Point.fromHex: invalid y coordinate');
|
|
const isXOdd = (x & _1n) === _1n;
|
|
const isLastByteOdd = (hex[31] & 0x80) !== 0;
|
|
if (isLastByteOdd !== isXOdd) {
|
|
x = mod(-x);
|
|
}
|
|
return new Point(x, y);
|
|
}
|
|
static async fromPrivateKey(privateKey) {
|
|
return (await getExtendedPublicKey(privateKey)).point;
|
|
}
|
|
toRawBytes() {
|
|
const bytes = numberTo32BytesLE(this.y);
|
|
bytes[31] |= this.x & _1n ? 0x80 : 0;
|
|
return bytes;
|
|
}
|
|
toHex() {
|
|
return bytesToHex(this.toRawBytes());
|
|
}
|
|
toX25519() {
|
|
const { y } = this;
|
|
const u = mod((_1n + y) * invert(_1n - y));
|
|
return numberTo32BytesLE(u);
|
|
}
|
|
isTorsionFree() {
|
|
return ExtendedPoint.fromAffine(this).isTorsionFree();
|
|
}
|
|
equals(other) {
|
|
return this.x === other.x && this.y === other.y;
|
|
}
|
|
negate() {
|
|
return new Point(mod(-this.x), this.y);
|
|
}
|
|
add(other) {
|
|
return ExtendedPoint.fromAffine(this).add(ExtendedPoint.fromAffine(other)).toAffine();
|
|
}
|
|
subtract(other) {
|
|
return this.add(other.negate());
|
|
}
|
|
multiply(scalar) {
|
|
return ExtendedPoint.fromAffine(this).multiply(scalar, this).toAffine();
|
|
}
|
|
}
|
|
Point.BASE = new Point(CURVE.Gx, CURVE.Gy);
|
|
Point.ZERO = new Point(_0n, _1n);
|
|
class Signature {
|
|
constructor(r, s) {
|
|
this.r = r;
|
|
this.s = s;
|
|
this.assertValidity();
|
|
}
|
|
static fromHex(hex) {
|
|
const bytes = ensureBytes(hex, 64);
|
|
const r = Point.fromHex(bytes.slice(0, 32), false);
|
|
const s = bytesToNumberLE(bytes.slice(32, 64));
|
|
return new Signature(r, s);
|
|
}
|
|
assertValidity() {
|
|
const { r, s } = this;
|
|
if (!(r instanceof Point))
|
|
throw new Error('Expected Point instance');
|
|
normalizeScalar(s, CURVE.l, false);
|
|
return this;
|
|
}
|
|
toRawBytes() {
|
|
const u8 = new Uint8Array(64);
|
|
u8.set(this.r.toRawBytes());
|
|
u8.set(numberTo32BytesLE(this.s), 32);
|
|
return u8;
|
|
}
|
|
toHex() {
|
|
return bytesToHex(this.toRawBytes());
|
|
}
|
|
}
|
|
function concatBytes(...arrays) {
|
|
if (!arrays.every((a) => a instanceof Uint8Array))
|
|
throw new Error('Expected Uint8Array list');
|
|
if (arrays.length === 1)
|
|
return arrays[0];
|
|
const length = arrays.reduce((a, arr) => a + arr.length, 0);
|
|
const result = new Uint8Array(length);
|
|
for (let i = 0, pad = 0; i < arrays.length; i++) {
|
|
const arr = arrays[i];
|
|
result.set(arr, pad);
|
|
pad += arr.length;
|
|
}
|
|
return result;
|
|
}
|
|
const hexes = Array.from({ length: 256 }, (v, i) => i.toString(16).padStart(2, '0'));
|
|
function bytesToHex(uint8a) {
|
|
if (!(uint8a instanceof Uint8Array))
|
|
throw new Error('Uint8Array expected');
|
|
let hex = '';
|
|
for (let i = 0; i < uint8a.length; i++) {
|
|
hex += hexes[uint8a[i]];
|
|
}
|
|
return hex;
|
|
}
|
|
function hexToBytes(hex) {
|
|
if (typeof hex !== 'string') {
|
|
throw new TypeError('hexToBytes: expected string, got ' + typeof hex);
|
|
}
|
|
if (hex.length % 2)
|
|
throw new Error('hexToBytes: received invalid unpadded hex');
|
|
const array = new Uint8Array(hex.length / 2);
|
|
for (let i = 0; i < array.length; i++) {
|
|
const j = i * 2;
|
|
const hexByte = hex.slice(j, j + 2);
|
|
const byte = Number.parseInt(hexByte, 16);
|
|
if (Number.isNaN(byte) || byte < 0)
|
|
throw new Error('Invalid byte sequence');
|
|
array[i] = byte;
|
|
}
|
|
return array;
|
|
}
|
|
function numberTo32BytesBE(num) {
|
|
const length = 32;
|
|
const hex = num.toString(16).padStart(length * 2, '0');
|
|
return hexToBytes(hex);
|
|
}
|
|
function numberTo32BytesLE(num) {
|
|
return numberTo32BytesBE(num).reverse();
|
|
}
|
|
function edIsNegative(num) {
|
|
return (mod(num) & _1n) === _1n;
|
|
}
|
|
function bytesToNumberLE(uint8a) {
|
|
if (!(uint8a instanceof Uint8Array))
|
|
throw new Error('Expected Uint8Array');
|
|
return BigInt('0x' + bytesToHex(Uint8Array.from(uint8a).reverse()));
|
|
}
|
|
const MAX_255B = BigInt('0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff');
|
|
function bytes255ToNumberLE(bytes) {
|
|
return mod(bytesToNumberLE(bytes) & MAX_255B);
|
|
}
|
|
function mod(a, b = CURVE.P) {
|
|
const res = a % b;
|
|
return res >= _0n ? res : b + res;
|
|
}
|
|
function invert(number, modulo = CURVE.P) {
|
|
if (number === _0n || modulo <= _0n) {
|
|
throw new Error(`invert: expected positive integers, got n=${number} mod=${modulo}`);
|
|
}
|
|
let a = mod(number, modulo);
|
|
let b = modulo;
|
|
let x = _0n, u = _1n;
|
|
while (a !== _0n) {
|
|
const q = b / a;
|
|
const r = b % a;
|
|
const m = x - u * q;
|
|
b = a, a = r, x = u, u = m;
|
|
}
|
|
const gcd = b;
|
|
if (gcd !== _1n)
|
|
throw new Error('invert: does not exist');
|
|
return mod(x, modulo);
|
|
}
|
|
function invertBatch(nums, p = CURVE.P) {
|
|
const tmp = new Array(nums.length);
|
|
const lastMultiplied = nums.reduce((acc, num, i) => {
|
|
if (num === _0n)
|
|
return acc;
|
|
tmp[i] = acc;
|
|
return mod(acc * num, p);
|
|
}, _1n);
|
|
const inverted = invert(lastMultiplied, p);
|
|
nums.reduceRight((acc, num, i) => {
|
|
if (num === _0n)
|
|
return acc;
|
|
tmp[i] = mod(acc * tmp[i], p);
|
|
return mod(acc * num, p);
|
|
}, inverted);
|
|
return tmp;
|
|
}
|
|
function pow2(x, power) {
|
|
const { P } = CURVE;
|
|
let res = x;
|
|
while (power-- > _0n) {
|
|
res *= res;
|
|
res %= P;
|
|
}
|
|
return res;
|
|
}
|
|
function pow_2_252_3(x) {
|
|
const { P } = CURVE;
|
|
const _5n = BigInt(5);
|
|
const _10n = BigInt(10);
|
|
const _20n = BigInt(20);
|
|
const _40n = BigInt(40);
|
|
const _80n = BigInt(80);
|
|
const x2 = (x * x) % P;
|
|
const b2 = (x2 * x) % P;
|
|
const b4 = (pow2(b2, _2n) * b2) % P;
|
|
const b5 = (pow2(b4, _1n) * x) % P;
|
|
const b10 = (pow2(b5, _5n) * b5) % P;
|
|
const b20 = (pow2(b10, _10n) * b10) % P;
|
|
const b40 = (pow2(b20, _20n) * b20) % P;
|
|
const b80 = (pow2(b40, _40n) * b40) % P;
|
|
const b160 = (pow2(b80, _80n) * b80) % P;
|
|
const b240 = (pow2(b160, _80n) * b80) % P;
|
|
const b250 = (pow2(b240, _10n) * b10) % P;
|
|
const pow_p_5_8 = (pow2(b250, _2n) * x) % P;
|
|
return { pow_p_5_8, b2 };
|
|
}
|
|
function uvRatio(u, v) {
|
|
const v3 = mod(v * v * v);
|
|
const v7 = mod(v3 * v3 * v);
|
|
const pow = pow_2_252_3(u * v7).pow_p_5_8;
|
|
let x = mod(u * v3 * pow);
|
|
const vx2 = mod(v * x * x);
|
|
const root1 = x;
|
|
const root2 = mod(x * SQRT_M1);
|
|
const useRoot1 = vx2 === u;
|
|
const useRoot2 = vx2 === mod(-u);
|
|
const noRoot = vx2 === mod(-u * SQRT_M1);
|
|
if (useRoot1)
|
|
x = root1;
|
|
if (useRoot2 || noRoot)
|
|
x = root2;
|
|
if (edIsNegative(x))
|
|
x = mod(-x);
|
|
return { isValid: useRoot1 || useRoot2, value: x };
|
|
}
|
|
function invertSqrt(number) {
|
|
return uvRatio(_1n, number);
|
|
}
|
|
function modlLE(hash) {
|
|
return mod(bytesToNumberLE(hash), CURVE.l);
|
|
}
|
|
function equalBytes(b1, b2) {
|
|
if (b1.length !== b2.length) {
|
|
return false;
|
|
}
|
|
for (let i = 0; i < b1.length; i++) {
|
|
if (b1[i] !== b2[i]) {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
function ensureBytes(hex, expectedLength) {
|
|
const bytes = hex instanceof Uint8Array ? Uint8Array.from(hex) : hexToBytes(hex);
|
|
if (typeof expectedLength === 'number' && bytes.length !== expectedLength)
|
|
throw new Error(`Expected ${expectedLength} bytes`);
|
|
return bytes;
|
|
}
|
|
function normalizeScalar(num, max, strict = true) {
|
|
if (!max)
|
|
throw new TypeError('Specify max value');
|
|
if (typeof num === 'number' && Number.isSafeInteger(num))
|
|
num = BigInt(num);
|
|
if (typeof num === 'bigint' && num < max) {
|
|
if (strict) {
|
|
if (_0n < num)
|
|
return num;
|
|
}
|
|
else {
|
|
if (_0n <= num)
|
|
return num;
|
|
}
|
|
}
|
|
throw new TypeError('Expected valid scalar: 0 < scalar < max');
|
|
}
|
|
function adjustBytes25519(bytes) {
|
|
bytes[0] &= 248;
|
|
bytes[31] &= 127;
|
|
bytes[31] |= 64;
|
|
return bytes;
|
|
}
|
|
function checkPrivateKey(key) {
|
|
key =
|
|
typeof key === 'bigint' || typeof key === 'number'
|
|
? numberTo32BytesBE(normalizeScalar(key, POW_2_256))
|
|
: ensureBytes(key);
|
|
if (key.length !== 32)
|
|
throw new Error(`Expected 32 bytes`);
|
|
return key;
|
|
}
|
|
function getKeyFromHash(hashed) {
|
|
const head = adjustBytes25519(hashed.slice(0, 32));
|
|
const prefix = hashed.slice(32, 64);
|
|
const scalar = modlLE(head);
|
|
const point = Point.BASE.multiply(scalar);
|
|
const pointBytes = point.toRawBytes();
|
|
return { head, prefix, scalar, point, pointBytes };
|
|
}
|
|
let _sha512Sync;
|
|
async function getExtendedPublicKey(key) {
|
|
return getKeyFromHash(await utils.sha512(checkPrivateKey(key)));
|
|
}
|
|
async function getPublicKey(privateKey) {
|
|
return (await getExtendedPublicKey(privateKey)).pointBytes;
|
|
}
|
|
async function sign$1(message, privateKey) {
|
|
message = ensureBytes(message);
|
|
const { prefix, scalar, pointBytes } = await getExtendedPublicKey(privateKey);
|
|
const r = modlLE(await utils.sha512(prefix, message));
|
|
const R = Point.BASE.multiply(r);
|
|
const k = modlLE(await utils.sha512(R.toRawBytes(), pointBytes, message));
|
|
const s = mod(r + k * scalar, CURVE.l);
|
|
return new Signature(R, s).toRawBytes();
|
|
}
|
|
function prepareVerification(sig, message, publicKey) {
|
|
message = ensureBytes(message);
|
|
if (!(publicKey instanceof Point))
|
|
publicKey = Point.fromHex(publicKey, false);
|
|
const { r, s } = sig instanceof Signature ? sig.assertValidity() : Signature.fromHex(sig);
|
|
const SB = ExtendedPoint.BASE.multiplyUnsafe(s);
|
|
return { r, s, SB, pub: publicKey, msg: message };
|
|
}
|
|
function finishVerification(publicKey, r, SB, hashed) {
|
|
const k = modlLE(hashed);
|
|
const kA = ExtendedPoint.fromAffine(publicKey).multiplyUnsafe(k);
|
|
const RkA = ExtendedPoint.fromAffine(r).add(kA);
|
|
return RkA.subtract(SB).multiplyUnsafe(CURVE.h).equals(ExtendedPoint.ZERO);
|
|
}
|
|
async function verify(sig, message, publicKey) {
|
|
const { r, SB, msg, pub } = prepareVerification(sig, message, publicKey);
|
|
const hashed = await utils.sha512(r.toRawBytes(), pub.toRawBytes(), msg);
|
|
return finishVerification(pub, r, SB, hashed);
|
|
}
|
|
Point.BASE._setWindowSize(8);
|
|
const crypto$1 = {
|
|
node: nodeCrypto$1,
|
|
web: typeof self === 'object' && 'crypto' in self ? self.crypto : undefined,
|
|
};
|
|
const utils = {
|
|
bytesToHex,
|
|
hexToBytes,
|
|
concatBytes,
|
|
getExtendedPublicKey,
|
|
mod,
|
|
invert,
|
|
TORSION_SUBGROUP: [
|
|
'0100000000000000000000000000000000000000000000000000000000000000',
|
|
'c7176a703d4dd84fba3c0b760d10670f2a2053fa2c39ccc64ec7fd7792ac037a',
|
|
'0000000000000000000000000000000000000000000000000000000000000080',
|
|
'26e8958fc2b227b045c3f489f2ef98f0d5dfac05d3c63339b13802886d53fc05',
|
|
'ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f',
|
|
'26e8958fc2b227b045c3f489f2ef98f0d5dfac05d3c63339b13802886d53fc85',
|
|
'0000000000000000000000000000000000000000000000000000000000000000',
|
|
'c7176a703d4dd84fba3c0b760d10670f2a2053fa2c39ccc64ec7fd7792ac03fa',
|
|
],
|
|
hashToPrivateScalar: (hash) => {
|
|
hash = ensureBytes(hash);
|
|
if (hash.length < 40 || hash.length > 1024)
|
|
throw new Error('Expected 40-1024 bytes of private key as per FIPS 186');
|
|
return mod(bytesToNumberLE(hash), CURVE.l - _1n) + _1n;
|
|
},
|
|
randomBytes: (bytesLength = 32) => {
|
|
if (crypto$1.web) {
|
|
return crypto$1.web.getRandomValues(new Uint8Array(bytesLength));
|
|
}
|
|
else if (crypto$1.node) {
|
|
const { randomBytes } = crypto$1.node;
|
|
return new Uint8Array(randomBytes(bytesLength).buffer);
|
|
}
|
|
else {
|
|
throw new Error("The environment doesn't have randomBytes function");
|
|
}
|
|
},
|
|
randomPrivateKey: () => {
|
|
return utils.randomBytes(32);
|
|
},
|
|
sha512: async (...messages) => {
|
|
const message = concatBytes(...messages);
|
|
if (crypto$1.web) {
|
|
const buffer = await crypto$1.web.subtle.digest('SHA-512', message.buffer);
|
|
return new Uint8Array(buffer);
|
|
}
|
|
else if (crypto$1.node) {
|
|
return Uint8Array.from(crypto$1.node.createHash('sha512').update(message).digest());
|
|
}
|
|
else {
|
|
throw new Error("The environment doesn't have sha512 function");
|
|
}
|
|
},
|
|
precompute(windowSize = 8, point = Point.BASE) {
|
|
const cached = point.equals(Point.BASE) ? point : new Point(point.x, point.y);
|
|
cached._setWindowSize(windowSize);
|
|
cached.multiply(_2n);
|
|
return cached;
|
|
},
|
|
sha512Sync: undefined,
|
|
};
|
|
Object.defineProperties(utils, {
|
|
sha512Sync: {
|
|
configurable: false,
|
|
get() {
|
|
return _sha512Sync;
|
|
},
|
|
set(val) {
|
|
if (!_sha512Sync)
|
|
_sha512Sync = val;
|
|
},
|
|
},
|
|
});
|
|
|
|
const PUBLIC_KEY_BYTE_LENGTH = 32;
|
|
const PRIVATE_KEY_BYTE_LENGTH = 64; // private key is actually 32 bytes but for historical reasons we concat private and public keys
|
|
const KEYS_BYTE_LENGTH = 32;
|
|
async function generateKey$1() {
|
|
// the actual private key (32 bytes)
|
|
const privateKeyRaw = utils.randomPrivateKey();
|
|
const publicKey = await getPublicKey(privateKeyRaw);
|
|
// concatenated the public key to the private key
|
|
const privateKey = concatKeys(privateKeyRaw, publicKey);
|
|
return {
|
|
privateKey,
|
|
publicKey
|
|
};
|
|
}
|
|
/**
|
|
* Generate keypair from a 32 byte uint8array
|
|
*/
|
|
async function generateKeyFromSeed(seed) {
|
|
if (seed.length !== KEYS_BYTE_LENGTH) {
|
|
throw new TypeError('"seed" must be 32 bytes in length.');
|
|
}
|
|
else if (!(seed instanceof Uint8Array)) {
|
|
throw new TypeError('"seed" must be a node.js Buffer, or Uint8Array.');
|
|
}
|
|
// based on node forges algorithm, the seed is used directly as private key
|
|
const privateKeyRaw = seed;
|
|
const publicKey = await getPublicKey(privateKeyRaw);
|
|
const privateKey = concatKeys(privateKeyRaw, publicKey);
|
|
return {
|
|
privateKey,
|
|
publicKey
|
|
};
|
|
}
|
|
async function hashAndSign$1(privateKey, msg) {
|
|
const privateKeyRaw = privateKey.slice(0, KEYS_BYTE_LENGTH);
|
|
return await sign$1(msg, privateKeyRaw);
|
|
}
|
|
async function hashAndVerify$1(publicKey, sig, msg) {
|
|
return await verify(sig, msg, publicKey);
|
|
}
|
|
function concatKeys(privateKeyRaw, publicKey) {
|
|
const privateKey = new Uint8Array(PRIVATE_KEY_BYTE_LENGTH);
|
|
for (let i = 0; i < KEYS_BYTE_LENGTH; i++) {
|
|
privateKey[i] = privateKeyRaw[i];
|
|
privateKey[KEYS_BYTE_LENGTH + i] = publicKey[i];
|
|
}
|
|
return privateKey;
|
|
}
|
|
|
|
class Ed25519PublicKey {
|
|
constructor(key) {
|
|
this._key = ensureKey(key, PUBLIC_KEY_BYTE_LENGTH);
|
|
}
|
|
async verify(data, sig) {
|
|
return await hashAndVerify$1(this._key, sig, data);
|
|
}
|
|
marshal() {
|
|
return this._key;
|
|
}
|
|
get bytes() {
|
|
return PublicKey.encode({
|
|
Type: KeyType.Ed25519,
|
|
Data: this.marshal()
|
|
}).subarray();
|
|
}
|
|
equals(key) {
|
|
return equals(this.bytes, key.bytes);
|
|
}
|
|
async hash() {
|
|
const { bytes } = await sha256$1.digest(this.bytes);
|
|
return bytes;
|
|
}
|
|
}
|
|
class Ed25519PrivateKey {
|
|
// key - 64 byte Uint8Array containing private key
|
|
// publicKey - 32 byte Uint8Array containing public key
|
|
constructor(key, publicKey) {
|
|
this._key = ensureKey(key, PRIVATE_KEY_BYTE_LENGTH);
|
|
this._publicKey = ensureKey(publicKey, PUBLIC_KEY_BYTE_LENGTH);
|
|
}
|
|
async sign(message) {
|
|
return await hashAndSign$1(this._key, message);
|
|
}
|
|
get public() {
|
|
return new Ed25519PublicKey(this._publicKey);
|
|
}
|
|
marshal() {
|
|
return this._key;
|
|
}
|
|
get bytes() {
|
|
return PrivateKey.encode({
|
|
Type: KeyType.Ed25519,
|
|
Data: this.marshal()
|
|
}).subarray();
|
|
}
|
|
equals(key) {
|
|
return equals(this.bytes, key.bytes);
|
|
}
|
|
async hash() {
|
|
const { bytes } = await sha256$1.digest(this.bytes);
|
|
return bytes;
|
|
}
|
|
/**
|
|
* Gets the ID of the key.
|
|
*
|
|
* The key id is the base58 encoding of the identity multihash containing its public key.
|
|
* The public key is a protobuf encoding containing a type and the DER encoding
|
|
* of the PKCS SubjectPublicKeyInfo.
|
|
*
|
|
* @returns {Promise<string>}
|
|
*/
|
|
async id() {
|
|
const encoding = await identity.digest(this.public.bytes);
|
|
return base58btc.encode(encoding.bytes).substring(1);
|
|
}
|
|
/**
|
|
* Exports the key into a password protected `format`
|
|
*/
|
|
async export(password, format = 'libp2p-key') {
|
|
if (format === 'libp2p-key') {
|
|
return await exporter(this.bytes, password);
|
|
}
|
|
else {
|
|
throw errCode(new Error(`export format '${format}' is not supported`), 'ERR_INVALID_EXPORT_FORMAT');
|
|
}
|
|
}
|
|
}
|
|
function unmarshalEd25519PrivateKey(bytes) {
|
|
// Try the old, redundant public key version
|
|
if (bytes.length > PRIVATE_KEY_BYTE_LENGTH) {
|
|
bytes = ensureKey(bytes, PRIVATE_KEY_BYTE_LENGTH + PUBLIC_KEY_BYTE_LENGTH);
|
|
const privateKeyBytes = bytes.slice(0, PRIVATE_KEY_BYTE_LENGTH);
|
|
const publicKeyBytes = bytes.slice(PRIVATE_KEY_BYTE_LENGTH, bytes.length);
|
|
return new Ed25519PrivateKey(privateKeyBytes, publicKeyBytes);
|
|
}
|
|
bytes = ensureKey(bytes, PRIVATE_KEY_BYTE_LENGTH);
|
|
const privateKeyBytes = bytes.slice(0, PRIVATE_KEY_BYTE_LENGTH);
|
|
const publicKeyBytes = bytes.slice(PUBLIC_KEY_BYTE_LENGTH);
|
|
return new Ed25519PrivateKey(privateKeyBytes, publicKeyBytes);
|
|
}
|
|
function unmarshalEd25519PublicKey(bytes) {
|
|
bytes = ensureKey(bytes, PUBLIC_KEY_BYTE_LENGTH);
|
|
return new Ed25519PublicKey(bytes);
|
|
}
|
|
async function generateKeyPair$1() {
|
|
const { privateKey, publicKey } = await generateKey$1();
|
|
return new Ed25519PrivateKey(privateKey, publicKey);
|
|
}
|
|
async function generateKeyPairFromSeed(seed) {
|
|
const { privateKey, publicKey } = await generateKeyFromSeed(seed);
|
|
return new Ed25519PrivateKey(privateKey, publicKey);
|
|
}
|
|
function ensureKey(key, length) {
|
|
key = Uint8Array.from(key ?? []);
|
|
if (key.length !== length) {
|
|
throw errCode(new Error(`Key must be a Uint8Array of length ${length}, got ${key.length}`), 'ERR_INVALID_KEY_TYPE');
|
|
}
|
|
return key;
|
|
}
|
|
|
|
var Ed25519 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
Ed25519PublicKey: Ed25519PublicKey,
|
|
Ed25519PrivateKey: Ed25519PrivateKey,
|
|
unmarshalEd25519PrivateKey: unmarshalEd25519PrivateKey,
|
|
unmarshalEd25519PublicKey: unmarshalEd25519PublicKey,
|
|
generateKeyPair: generateKeyPair$1,
|
|
generateKeyPairFromSeed: generateKeyPairFromSeed
|
|
});
|
|
|
|
function generateKey() {
|
|
return utils$1.randomPrivateKey();
|
|
}
|
|
/**
|
|
* Hash and sign message with private key
|
|
*/
|
|
async function hashAndSign(key, msg) {
|
|
const { digest } = await sha256$1.digest(msg);
|
|
try {
|
|
return await sign$2(digest, key);
|
|
}
|
|
catch (err) {
|
|
throw errCode(err, 'ERR_INVALID_INPUT');
|
|
}
|
|
}
|
|
/**
|
|
* Hash message and verify signature with public key
|
|
*/
|
|
async function hashAndVerify(key, sig, msg) {
|
|
try {
|
|
const { digest } = await sha256$1.digest(msg);
|
|
return verify$1(sig, digest, key);
|
|
}
|
|
catch (err) {
|
|
throw errCode(err, 'ERR_INVALID_INPUT');
|
|
}
|
|
}
|
|
function compressPublicKey(key) {
|
|
const point = Point$1.fromHex(key).toRawBytes(true);
|
|
return point;
|
|
}
|
|
function validatePrivateKey(key) {
|
|
try {
|
|
getPublicKey$1(key, true);
|
|
}
|
|
catch (err) {
|
|
throw errCode(err, 'ERR_INVALID_PRIVATE_KEY');
|
|
}
|
|
}
|
|
function validatePublicKey(key) {
|
|
try {
|
|
Point$1.fromHex(key);
|
|
}
|
|
catch (err) {
|
|
throw errCode(err, 'ERR_INVALID_PUBLIC_KEY');
|
|
}
|
|
}
|
|
function computePublicKey(privateKey) {
|
|
try {
|
|
return getPublicKey$1(privateKey, true);
|
|
}
|
|
catch (err) {
|
|
throw errCode(err, 'ERR_INVALID_PRIVATE_KEY');
|
|
}
|
|
}
|
|
|
|
class Secp256k1PublicKey {
|
|
constructor(key) {
|
|
validatePublicKey(key);
|
|
this._key = key;
|
|
}
|
|
async verify(data, sig) {
|
|
return await hashAndVerify(this._key, sig, data);
|
|
}
|
|
marshal() {
|
|
return compressPublicKey(this._key);
|
|
}
|
|
get bytes() {
|
|
return PublicKey.encode({
|
|
Type: KeyType.Secp256k1,
|
|
Data: this.marshal()
|
|
}).subarray();
|
|
}
|
|
equals(key) {
|
|
return equals(this.bytes, key.bytes);
|
|
}
|
|
async hash() {
|
|
const { bytes } = await sha256$1.digest(this.bytes);
|
|
return bytes;
|
|
}
|
|
}
|
|
class Secp256k1PrivateKey {
|
|
constructor(key, publicKey) {
|
|
this._key = key;
|
|
this._publicKey = publicKey ?? computePublicKey(key);
|
|
validatePrivateKey(this._key);
|
|
validatePublicKey(this._publicKey);
|
|
}
|
|
async sign(message) {
|
|
return await hashAndSign(this._key, message);
|
|
}
|
|
get public() {
|
|
return new Secp256k1PublicKey(this._publicKey);
|
|
}
|
|
marshal() {
|
|
return this._key;
|
|
}
|
|
get bytes() {
|
|
return PrivateKey.encode({
|
|
Type: KeyType.Secp256k1,
|
|
Data: this.marshal()
|
|
}).subarray();
|
|
}
|
|
equals(key) {
|
|
return equals(this.bytes, key.bytes);
|
|
}
|
|
async hash() {
|
|
const { bytes } = await sha256$1.digest(this.bytes);
|
|
return bytes;
|
|
}
|
|
/**
|
|
* Gets the ID of the key.
|
|
*
|
|
* The key id is the base58 encoding of the SHA-256 multihash of its public key.
|
|
* The public key is a protobuf encoding containing a type and the DER encoding
|
|
* of the PKCS SubjectPublicKeyInfo.
|
|
*/
|
|
async id() {
|
|
const hash = await this.public.hash();
|
|
return toString$3(hash, 'base58btc');
|
|
}
|
|
/**
|
|
* Exports the key into a password protected `format`
|
|
*/
|
|
async export(password, format = 'libp2p-key') {
|
|
if (format === 'libp2p-key') {
|
|
return await exporter(this.bytes, password);
|
|
}
|
|
else {
|
|
throw errCode(new Error(`export format '${format}' is not supported`), 'ERR_INVALID_EXPORT_FORMAT');
|
|
}
|
|
}
|
|
}
|
|
function unmarshalSecp256k1PrivateKey(bytes) {
|
|
return new Secp256k1PrivateKey(bytes);
|
|
}
|
|
function unmarshalSecp256k1PublicKey(bytes) {
|
|
return new Secp256k1PublicKey(bytes);
|
|
}
|
|
async function generateKeyPair() {
|
|
const privateKeyBytes = await generateKey();
|
|
return new Secp256k1PrivateKey(privateKeyBytes);
|
|
}
|
|
|
|
var Secp256k1 = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
Secp256k1PublicKey: Secp256k1PublicKey,
|
|
Secp256k1PrivateKey: Secp256k1PrivateKey,
|
|
unmarshalSecp256k1PrivateKey: unmarshalSecp256k1PrivateKey,
|
|
unmarshalSecp256k1PublicKey: unmarshalSecp256k1PublicKey,
|
|
generateKeyPair: generateKeyPair
|
|
});
|
|
|
|
const supportedKeys = {
|
|
rsa: RSA,
|
|
ed25519: Ed25519,
|
|
secp256k1: Secp256k1
|
|
};
|
|
function unsupportedKey(type) {
|
|
const supported = Object.keys(supportedKeys).join(' / ');
|
|
return errCode(new Error(`invalid or unsupported key type ${type}. Must be ${supported}`), 'ERR_UNSUPPORTED_KEY_TYPE');
|
|
}
|
|
function typeToKey(type) {
|
|
type = type.toLowerCase();
|
|
if (type === 'rsa' || type === 'ed25519' || type === 'secp256k1') {
|
|
return supportedKeys[type];
|
|
}
|
|
throw unsupportedKey(type);
|
|
}
|
|
// Converts a protobuf serialized public key into its
|
|
// representative object
|
|
function unmarshalPublicKey(buf) {
|
|
const decoded = PublicKey.decode(buf);
|
|
const data = decoded.Data;
|
|
switch (decoded.Type) {
|
|
case KeyType.RSA:
|
|
return supportedKeys.rsa.unmarshalRsaPublicKey(data);
|
|
case KeyType.Ed25519:
|
|
return supportedKeys.ed25519.unmarshalEd25519PublicKey(data);
|
|
case KeyType.Secp256k1:
|
|
return supportedKeys.secp256k1.unmarshalSecp256k1PublicKey(data);
|
|
default:
|
|
throw unsupportedKey(decoded.Type);
|
|
}
|
|
}
|
|
// Converts a public key object into a protobuf serialized public key
|
|
function marshalPublicKey(key, type) {
|
|
type = (type ?? 'rsa').toLowerCase();
|
|
typeToKey(type); // check type
|
|
return key.bytes;
|
|
}
|
|
// Converts a protobuf serialized private key into its
|
|
// representative object
|
|
async function unmarshalPrivateKey(buf) {
|
|
const decoded = PrivateKey.decode(buf);
|
|
const data = decoded.Data;
|
|
switch (decoded.Type) {
|
|
case KeyType.RSA:
|
|
return await supportedKeys.rsa.unmarshalRsaPrivateKey(data);
|
|
case KeyType.Ed25519:
|
|
return supportedKeys.ed25519.unmarshalEd25519PrivateKey(data);
|
|
case KeyType.Secp256k1:
|
|
return supportedKeys.secp256k1.unmarshalSecp256k1PrivateKey(data);
|
|
default:
|
|
throw unsupportedKey(decoded.Type);
|
|
}
|
|
}
|
|
|
|
const symbol$2 = Symbol.for('@libp2p/peer-id');
|
|
|
|
const baseDecoder = Object
|
|
.values(bases)
|
|
.map(codec => codec.decoder)
|
|
// @ts-expect-error https://github.com/multiformats/js-multiformats/issues/141
|
|
.reduce((acc, curr) => acc.or(curr), bases.identity.decoder);
|
|
// these values are from https://github.com/multiformats/multicodec/blob/master/table.csv
|
|
const LIBP2P_KEY_CODE = 0x72;
|
|
const MARSHALLED_ED225519_PUBLIC_KEY_LENGTH = 36;
|
|
const MARSHALLED_SECP256K1_PUBLIC_KEY_LENGTH = 37;
|
|
class PeerIdImpl {
|
|
constructor(init) {
|
|
this.type = init.type;
|
|
this.multihash = init.multihash;
|
|
this.privateKey = init.privateKey;
|
|
// mark string cache as non-enumerable
|
|
Object.defineProperty(this, 'string', {
|
|
enumerable: false,
|
|
writable: true
|
|
});
|
|
}
|
|
get [Symbol.toStringTag]() {
|
|
return `PeerId(${this.toString()})`;
|
|
}
|
|
get [symbol$2]() {
|
|
return true;
|
|
}
|
|
toString() {
|
|
if (this.string == null) {
|
|
this.string = base58btc.encode(this.multihash.bytes).slice(1);
|
|
}
|
|
return this.string;
|
|
}
|
|
// return self-describing String representation
|
|
// in default format from RFC 0001: https://github.com/libp2p/specs/pull/209
|
|
toCID() {
|
|
return CID.createV1(LIBP2P_KEY_CODE, this.multihash);
|
|
}
|
|
toBytes() {
|
|
return this.multihash.bytes;
|
|
}
|
|
/**
|
|
* Returns Multiaddr as a JSON encoded object
|
|
*/
|
|
toJSON() {
|
|
return this.toString();
|
|
}
|
|
/**
|
|
* Checks the equality of `this` peer against a given PeerId
|
|
*/
|
|
equals(id) {
|
|
if (id instanceof Uint8Array) {
|
|
return equals(this.multihash.bytes, id);
|
|
}
|
|
else if (typeof id === 'string') {
|
|
return peerIdFromString(id).equals(this);
|
|
}
|
|
else if (id?.multihash?.bytes != null) {
|
|
return equals(this.multihash.bytes, id.multihash.bytes);
|
|
}
|
|
else {
|
|
throw new Error('not valid Id');
|
|
}
|
|
}
|
|
}
|
|
class RSAPeerIdImpl extends PeerIdImpl {
|
|
constructor(init) {
|
|
super({ ...init, type: 'RSA' });
|
|
this.type = 'RSA';
|
|
this.publicKey = init.publicKey;
|
|
}
|
|
}
|
|
class Ed25519PeerIdImpl extends PeerIdImpl {
|
|
constructor(init) {
|
|
super({ ...init, type: 'Ed25519' });
|
|
this.type = 'Ed25519';
|
|
this.publicKey = init.multihash.digest;
|
|
}
|
|
}
|
|
class Secp256k1PeerIdImpl extends PeerIdImpl {
|
|
constructor(init) {
|
|
super({ ...init, type: 'secp256k1' });
|
|
this.type = 'secp256k1';
|
|
this.publicKey = init.multihash.digest;
|
|
}
|
|
}
|
|
function peerIdFromString(str, decoder) {
|
|
if (str.charAt(0) === '1' || str.charAt(0) === 'Q') {
|
|
// identity hash ed25519/secp256k1 key or sha2-256 hash of
|
|
// rsa public key - base58btc encoded either way
|
|
const multihash = decode$3(base58btc.decode(`z${str}`));
|
|
if (str.startsWith('12D')) {
|
|
return new Ed25519PeerIdImpl({ multihash });
|
|
}
|
|
else if (str.startsWith('16U')) {
|
|
return new Secp256k1PeerIdImpl({ multihash });
|
|
}
|
|
else {
|
|
return new RSAPeerIdImpl({ multihash });
|
|
}
|
|
}
|
|
return peerIdFromBytes(baseDecoder.decode(str));
|
|
}
|
|
function peerIdFromBytes(buf) {
|
|
try {
|
|
const multihash = decode$3(buf);
|
|
if (multihash.code === identity.code) {
|
|
if (multihash.digest.length === MARSHALLED_ED225519_PUBLIC_KEY_LENGTH) {
|
|
return new Ed25519PeerIdImpl({ multihash });
|
|
}
|
|
else if (multihash.digest.length === MARSHALLED_SECP256K1_PUBLIC_KEY_LENGTH) {
|
|
return new Secp256k1PeerIdImpl({ multihash });
|
|
}
|
|
}
|
|
if (multihash.code === sha256$1.code) {
|
|
return new RSAPeerIdImpl({ multihash });
|
|
}
|
|
}
|
|
catch {
|
|
return peerIdFromCID(CID.decode(buf));
|
|
}
|
|
throw new Error('Supplied PeerID CID is invalid');
|
|
}
|
|
function peerIdFromCID(cid) {
|
|
if (cid == null || cid.multihash == null || cid.version == null || (cid.version === 1 && cid.code !== LIBP2P_KEY_CODE)) {
|
|
throw new Error('Supplied PeerID CID is invalid');
|
|
}
|
|
const multihash = cid.multihash;
|
|
if (multihash.code === sha256$1.code) {
|
|
return new RSAPeerIdImpl({ multihash: cid.multihash });
|
|
}
|
|
else if (multihash.code === identity.code) {
|
|
if (multihash.digest.length === MARSHALLED_ED225519_PUBLIC_KEY_LENGTH) {
|
|
return new Ed25519PeerIdImpl({ multihash: cid.multihash });
|
|
}
|
|
else if (multihash.digest.length === MARSHALLED_SECP256K1_PUBLIC_KEY_LENGTH) {
|
|
return new Secp256k1PeerIdImpl({ multihash: cid.multihash });
|
|
}
|
|
}
|
|
throw new Error('Supplied PeerID CID is invalid');
|
|
}
|
|
/**
|
|
* @param publicKey - A marshalled public key
|
|
* @param privateKey - A marshalled private key
|
|
*/
|
|
async function peerIdFromKeys(publicKey, privateKey) {
|
|
if (publicKey.length === MARSHALLED_ED225519_PUBLIC_KEY_LENGTH) {
|
|
return new Ed25519PeerIdImpl({ multihash: create$5(identity.code, publicKey), privateKey });
|
|
}
|
|
if (publicKey.length === MARSHALLED_SECP256K1_PUBLIC_KEY_LENGTH) {
|
|
return new Secp256k1PeerIdImpl({ multihash: create$5(identity.code, publicKey), privateKey });
|
|
}
|
|
return new RSAPeerIdImpl({ multihash: await sha256$1.digest(publicKey), publicKey, privateKey });
|
|
}
|
|
|
|
var KeypairType;
|
|
(function (KeypairType) {
|
|
KeypairType[KeypairType["rsa"] = 0] = "rsa";
|
|
KeypairType[KeypairType["ed25519"] = 1] = "ed25519";
|
|
KeypairType[KeypairType["secp256k1"] = 2] = "secp256k1";
|
|
})(KeypairType || (KeypairType = {}));
|
|
|
|
class Secp256k1Keypair {
|
|
constructor(privateKey, publicKey) {
|
|
let pub = publicKey;
|
|
if (pub) {
|
|
pub = compressPublicKey$1(pub);
|
|
}
|
|
if ((this._privateKey = privateKey) && !this.privateKeyVerify()) {
|
|
throw new Error("Invalid private key");
|
|
}
|
|
if ((this._publicKey = pub) && !this.publicKeyVerify()) {
|
|
throw new Error("Invalid public key");
|
|
}
|
|
this.type = KeypairType.secp256k1;
|
|
}
|
|
static async generate() {
|
|
const privateKey = randomBytes$1(32);
|
|
const publicKey = getPublicKey$1(privateKey);
|
|
return new Secp256k1Keypair(privateKey, publicKey);
|
|
}
|
|
privateKeyVerify(key = this._privateKey) {
|
|
if (key) {
|
|
return utils$1.isValidPrivateKey(key);
|
|
}
|
|
return true;
|
|
}
|
|
publicKeyVerify(key = this._publicKey) {
|
|
if (key) {
|
|
try {
|
|
Point$1.fromHex(key);
|
|
return true;
|
|
}
|
|
catch {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
get privateKey() {
|
|
if (!this._privateKey) {
|
|
throw new Error();
|
|
}
|
|
return this._privateKey;
|
|
}
|
|
get publicKey() {
|
|
if (!this._publicKey) {
|
|
throw new Error();
|
|
}
|
|
return this._publicKey;
|
|
}
|
|
hasPrivateKey() {
|
|
return !!this._privateKey;
|
|
}
|
|
}
|
|
|
|
const ERR_TYPE_NOT_IMPLEMENTED = "Keypair type not implemented";
|
|
function createKeypair(type, privateKey, publicKey) {
|
|
switch (type) {
|
|
case KeypairType.secp256k1:
|
|
return new Secp256k1Keypair(privateKey, publicKey);
|
|
default:
|
|
throw new Error(ERR_TYPE_NOT_IMPLEMENTED);
|
|
}
|
|
}
|
|
async function createPeerIdFromKeypair(keypair) {
|
|
switch (keypair.type) {
|
|
case KeypairType.secp256k1: {
|
|
const publicKey = new supportedKeys.secp256k1.Secp256k1PublicKey(keypair.publicKey);
|
|
const privateKey = keypair.hasPrivateKey()
|
|
? new supportedKeys.secp256k1.Secp256k1PrivateKey(keypair.privateKey)
|
|
: undefined;
|
|
return peerIdFromKeys(publicKey.bytes, privateKey?.bytes);
|
|
}
|
|
default:
|
|
throw new Error(ERR_TYPE_NOT_IMPLEMENTED);
|
|
}
|
|
}
|
|
async function createKeypairFromPeerId(peerId) {
|
|
let keypairType;
|
|
switch (peerId.type) {
|
|
case "RSA":
|
|
keypairType = KeypairType.rsa;
|
|
break;
|
|
case "Ed25519":
|
|
keypairType = KeypairType.ed25519;
|
|
break;
|
|
case "secp256k1":
|
|
keypairType = KeypairType.secp256k1;
|
|
break;
|
|
default:
|
|
throw new Error("Unsupported peer id type");
|
|
}
|
|
const publicKey = peerId.publicKey
|
|
? unmarshalPublicKey(peerId.publicKey)
|
|
: undefined;
|
|
const privateKey = peerId.privateKey
|
|
? await unmarshalPrivateKey(peerId.privateKey)
|
|
: undefined;
|
|
return createKeypair(keypairType, privateKey?.marshal(), publicKey?.marshal());
|
|
}
|
|
|
|
/**
|
|
* string -> [[str name, str addr]... ]
|
|
*/
|
|
function stringToStringTuples(str) {
|
|
const tuples = [];
|
|
const parts = str.split('/').slice(1); // skip first empty elem
|
|
if (parts.length === 1 && parts[0] === '') {
|
|
return [];
|
|
}
|
|
for (let p = 0; p < parts.length; p++) {
|
|
const part = parts[p];
|
|
const proto = getProtocol$1(part);
|
|
if (proto.size === 0) {
|
|
tuples.push([part]);
|
|
continue;
|
|
}
|
|
p++; // advance addr part
|
|
if (p >= parts.length) {
|
|
throw ParseError('invalid address: ' + str);
|
|
}
|
|
// if it's a path proto, take the rest
|
|
if (proto.path === true) {
|
|
tuples.push([
|
|
part,
|
|
// TODO: should we need to check each path part to see if it's a proto?
|
|
// This would allow for other protocols to be added after a unix path,
|
|
// however it would have issues if the path had a protocol name in the path
|
|
cleanPath(parts.slice(p).join('/'))
|
|
]);
|
|
break;
|
|
}
|
|
tuples.push([part, parts[p]]);
|
|
}
|
|
return tuples;
|
|
}
|
|
/**
|
|
* [[str name, str addr]... ] -> string
|
|
*/
|
|
function stringTuplesToString(tuples) {
|
|
const parts = [];
|
|
tuples.map((tup) => {
|
|
const proto = protoFromTuple(tup);
|
|
parts.push(proto.name);
|
|
if (tup.length > 1 && tup[1] != null) {
|
|
parts.push(tup[1]);
|
|
}
|
|
return null;
|
|
});
|
|
return cleanPath(parts.join('/'));
|
|
}
|
|
/**
|
|
* [[str name, str addr]... ] -> [[int code, Uint8Array]... ]
|
|
*/
|
|
function stringTuplesToTuples(tuples) {
|
|
return tuples.map((tup) => {
|
|
if (!Array.isArray(tup)) {
|
|
tup = [tup];
|
|
}
|
|
const proto = protoFromTuple(tup);
|
|
if (tup.length > 1) {
|
|
return [proto.code, convertToBytes(proto.code, tup[1])];
|
|
}
|
|
return [proto.code];
|
|
});
|
|
}
|
|
/**
|
|
* Convert tuples to string tuples
|
|
*
|
|
* [[int code, Uint8Array]... ] -> [[int code, str addr]... ]
|
|
*/
|
|
function tuplesToStringTuples(tuples) {
|
|
return tuples.map(tup => {
|
|
const proto = protoFromTuple(tup);
|
|
if (tup[1] != null) {
|
|
return [proto.code, convertToString(proto.code, tup[1])];
|
|
}
|
|
return [proto.code];
|
|
});
|
|
}
|
|
/**
|
|
* [[int code, Uint8Array ]... ] -> Uint8Array
|
|
*/
|
|
function tuplesToBytes(tuples) {
|
|
return fromBytes(concat(tuples.map((tup) => {
|
|
const proto = protoFromTuple(tup);
|
|
let buf = Uint8Array.from(varint.encode(proto.code));
|
|
if (tup.length > 1 && tup[1] != null) {
|
|
buf = concat([buf, tup[1]]); // add address buffer
|
|
}
|
|
return buf;
|
|
})));
|
|
}
|
|
function sizeForAddr(p, addr) {
|
|
if (p.size > 0) {
|
|
return p.size / 8;
|
|
}
|
|
else if (p.size === 0) {
|
|
return 0;
|
|
}
|
|
else {
|
|
const size = varint.decode(addr);
|
|
return size + varint.decode.bytes;
|
|
}
|
|
}
|
|
function bytesToTuples(buf) {
|
|
const tuples = [];
|
|
let i = 0;
|
|
while (i < buf.length) {
|
|
const code = varint.decode(buf, i);
|
|
const n = varint.decode.bytes;
|
|
const p = getProtocol$1(code);
|
|
const size = sizeForAddr(p, buf.slice(i + n));
|
|
if (size === 0) {
|
|
tuples.push([code]);
|
|
i += n;
|
|
continue;
|
|
}
|
|
const addr = buf.slice(i + n, i + n + size);
|
|
i += (size + n);
|
|
if (i > buf.length) { // did not end _exactly_ at buffer.length
|
|
throw ParseError('Invalid address Uint8Array: ' + toString$3(buf, 'base16'));
|
|
}
|
|
// ok, tuple seems good.
|
|
tuples.push([code, addr]);
|
|
}
|
|
return tuples;
|
|
}
|
|
/**
|
|
* Uint8Array -> String
|
|
*/
|
|
function bytesToString(buf) {
|
|
const a = bytesToTuples(buf);
|
|
const b = tuplesToStringTuples(a);
|
|
return stringTuplesToString(b);
|
|
}
|
|
/**
|
|
* String -> Uint8Array
|
|
*/
|
|
function stringToBytes$1(str) {
|
|
str = cleanPath(str);
|
|
const a = stringToStringTuples(str);
|
|
const b = stringTuplesToTuples(a);
|
|
return tuplesToBytes(b);
|
|
}
|
|
/**
|
|
* String -> Uint8Array
|
|
*/
|
|
function fromString(str) {
|
|
return stringToBytes$1(str);
|
|
}
|
|
/**
|
|
* Uint8Array -> Uint8Array
|
|
*/
|
|
function fromBytes(buf) {
|
|
const err = validateBytes(buf);
|
|
if (err != null) {
|
|
throw err;
|
|
}
|
|
return Uint8Array.from(buf); // copy
|
|
}
|
|
function validateBytes(buf) {
|
|
try {
|
|
bytesToTuples(buf); // try to parse. will throw if breaks
|
|
}
|
|
catch (err) {
|
|
return err;
|
|
}
|
|
}
|
|
function cleanPath(str) {
|
|
return '/' + str.trim().split('/').filter((a) => a).join('/');
|
|
}
|
|
function ParseError(str) {
|
|
return new Error('Error parsing address: ' + str);
|
|
}
|
|
function protoFromTuple(tup) {
|
|
const proto = getProtocol$1(tup[0]);
|
|
return proto;
|
|
}
|
|
|
|
const inspect = Symbol.for('nodejs.util.inspect.custom');
|
|
const DNS_CODES = [
|
|
getProtocol$1('dns').code,
|
|
getProtocol$1('dns4').code,
|
|
getProtocol$1('dns6').code,
|
|
getProtocol$1('dnsaddr').code
|
|
];
|
|
const P2P_CODES = [
|
|
getProtocol$1('p2p').code,
|
|
getProtocol$1('ipfs').code
|
|
];
|
|
const resolvers = new Map();
|
|
const symbol$1 = Symbol.for('@multiformats/js-multiaddr/multiaddr');
|
|
/**
|
|
* Creates a [multiaddr](https://github.com/multiformats/multiaddr) from
|
|
* a Uint8Array, String or another Multiaddr instance
|
|
* public key.
|
|
*
|
|
*/
|
|
class Multiaddr {
|
|
/**
|
|
* @example
|
|
* ```js
|
|
* new Multiaddr('/ip4/127.0.0.1/tcp/4001')
|
|
* // <Multiaddr 047f000001060fa1 - /ip4/127.0.0.1/tcp/4001>
|
|
* ```
|
|
*
|
|
* @param {MultiaddrInput} [addr] - If String or Uint8Array, needs to adhere to the address format of a [multiaddr](https://github.com/multiformats/multiaddr#string-format)
|
|
*/
|
|
constructor(addr) {
|
|
// default
|
|
if (addr == null) {
|
|
addr = '';
|
|
}
|
|
// Define symbol
|
|
Object.defineProperty(this, symbol$1, { value: true });
|
|
if (addr instanceof Uint8Array) {
|
|
this.bytes = fromBytes(addr);
|
|
}
|
|
else if (typeof addr === 'string') {
|
|
if (addr.length > 0 && addr.charAt(0) !== '/') {
|
|
throw new Error(`multiaddr "${addr}" must start with a "/"`);
|
|
}
|
|
this.bytes = fromString(addr);
|
|
}
|
|
else if (Multiaddr.isMultiaddr(addr)) { // Multiaddr
|
|
this.bytes = fromBytes(addr.bytes); // validate + copy buffer
|
|
}
|
|
else {
|
|
throw new Error('addr must be a string, Buffer, or another Multiaddr');
|
|
}
|
|
}
|
|
/**
|
|
* Returns Multiaddr as a String
|
|
*
|
|
* @example
|
|
* ```js
|
|
* new Multiaddr('/ip4/127.0.0.1/tcp/4001').toString()
|
|
* // '/ip4/127.0.0.1/tcp/4001'
|
|
* ```
|
|
*/
|
|
toString() {
|
|
return bytesToString(this.bytes);
|
|
}
|
|
/**
|
|
* Returns Multiaddr as a JSON encoded object
|
|
*
|
|
* @example
|
|
* ```js
|
|
* JSON.stringify(new Multiaddr('/ip4/127.0.0.1/tcp/4001'))
|
|
* // '/ip4/127.0.0.1/tcp/4001'
|
|
* ```
|
|
*/
|
|
toJSON() {
|
|
return this.toString();
|
|
}
|
|
/**
|
|
* Returns Multiaddr as a convinient options object to be used with net.createConnection
|
|
*
|
|
* @example
|
|
* ```js
|
|
* new Multiaddr('/ip4/127.0.0.1/tcp/4001').toOptions()
|
|
* // { family: 4, host: '127.0.0.1', transport: 'tcp', port: 4001 }
|
|
* ```
|
|
*/
|
|
toOptions() {
|
|
const codes = this.protoCodes();
|
|
const parts = this.toString().split('/').slice(1);
|
|
let transport;
|
|
let port;
|
|
if (parts.length > 2) {
|
|
// default to https when protocol & port are omitted from DNS addrs
|
|
if (DNS_CODES.includes(codes[0]) && P2P_CODES.includes(codes[1])) {
|
|
transport = getProtocol$1('tcp').name;
|
|
port = 443;
|
|
}
|
|
else {
|
|
transport = getProtocol$1(parts[2]).name;
|
|
port = parseInt(parts[3]);
|
|
}
|
|
}
|
|
else if (DNS_CODES.includes(codes[0])) {
|
|
transport = getProtocol$1('tcp').name;
|
|
port = 443;
|
|
}
|
|
else {
|
|
throw new Error('multiaddr must have a valid format: "/{ip4, ip6, dns4, dns6, dnsaddr}/{address}/{tcp, udp}/{port}".');
|
|
}
|
|
const opts = {
|
|
family: (codes[0] === 41 || codes[0] === 55) ? 6 : 4,
|
|
host: parts[1],
|
|
transport,
|
|
port
|
|
};
|
|
return opts;
|
|
}
|
|
/**
|
|
* Returns the protocols the Multiaddr is defined with, as an array of objects, in
|
|
* left-to-right order. Each object contains the protocol code, protocol name,
|
|
* and the size of its address space in bits.
|
|
* [See list of protocols](https://github.com/multiformats/multiaddr/blob/master/protocols.csv)
|
|
*
|
|
* @example
|
|
* ```js
|
|
* new Multiaddr('/ip4/127.0.0.1/tcp/4001').protos()
|
|
* // [ { code: 4, size: 32, name: 'ip4' },
|
|
* // { code: 6, size: 16, name: 'tcp' } ]
|
|
* ```
|
|
*/
|
|
protos() {
|
|
return this.protoCodes().map(code => Object.assign({}, getProtocol$1(code)));
|
|
}
|
|
/**
|
|
* Returns the codes of the protocols in left-to-right order.
|
|
* [See list of protocols](https://github.com/multiformats/multiaddr/blob/master/protocols.csv)
|
|
*
|
|
* @example
|
|
* ```js
|
|
* Multiaddr('/ip4/127.0.0.1/tcp/4001').protoCodes()
|
|
* // [ 4, 6 ]
|
|
* ```
|
|
*/
|
|
protoCodes() {
|
|
const codes = [];
|
|
const buf = this.bytes;
|
|
let i = 0;
|
|
while (i < buf.length) {
|
|
const code = varint.decode(buf, i);
|
|
const n = varint.decode.bytes;
|
|
const p = getProtocol$1(code);
|
|
const size = sizeForAddr(p, buf.slice(i + n));
|
|
i += (size + n);
|
|
codes.push(code);
|
|
}
|
|
return codes;
|
|
}
|
|
/**
|
|
* Returns the names of the protocols in left-to-right order.
|
|
* [See list of protocols](https://github.com/multiformats/multiaddr/blob/master/protocols.csv)
|
|
*
|
|
* @example
|
|
* ```js
|
|
* new Multiaddr('/ip4/127.0.0.1/tcp/4001').protoNames()
|
|
* // [ 'ip4', 'tcp' ]
|
|
* ```
|
|
*/
|
|
protoNames() {
|
|
return this.protos().map(proto => proto.name);
|
|
}
|
|
/**
|
|
* Returns a tuple of parts
|
|
*
|
|
* @example
|
|
* ```js
|
|
* new Multiaddr("/ip4/127.0.0.1/tcp/4001").tuples()
|
|
* // [ [ 4, <Buffer 7f 00 00 01> ], [ 6, <Buffer 0f a1> ] ]
|
|
* ```
|
|
*/
|
|
tuples() {
|
|
return bytesToTuples(this.bytes);
|
|
}
|
|
/**
|
|
* Returns a tuple of string/number parts
|
|
* - tuples[][0] = code of protocol
|
|
* - tuples[][1] = contents of address
|
|
*
|
|
* @example
|
|
* ```js
|
|
* new Multiaddr("/ip4/127.0.0.1/tcp/4001").stringTuples()
|
|
* // [ [ 4, '127.0.0.1' ], [ 6, '4001' ] ]
|
|
* ```
|
|
*/
|
|
stringTuples() {
|
|
const t = bytesToTuples(this.bytes);
|
|
return tuplesToStringTuples(t);
|
|
}
|
|
/**
|
|
* Encapsulates a Multiaddr in another Multiaddr
|
|
*
|
|
* @example
|
|
* ```js
|
|
* const mh1 = new Multiaddr('/ip4/8.8.8.8/tcp/1080')
|
|
* // <Multiaddr 0408080808060438 - /ip4/8.8.8.8/tcp/1080>
|
|
*
|
|
* const mh2 = new Multiaddr('/ip4/127.0.0.1/tcp/4001')
|
|
* // <Multiaddr 047f000001060fa1 - /ip4/127.0.0.1/tcp/4001>
|
|
*
|
|
* const mh3 = mh1.encapsulate(mh2)
|
|
* // <Multiaddr 0408080808060438047f000001060fa1 - /ip4/8.8.8.8/tcp/1080/ip4/127.0.0.1/tcp/4001>
|
|
*
|
|
* mh3.toString()
|
|
* // '/ip4/8.8.8.8/tcp/1080/ip4/127.0.0.1/tcp/4001'
|
|
* ```
|
|
*
|
|
* @param {MultiaddrInput} addr - Multiaddr to add into this Multiaddr
|
|
*/
|
|
encapsulate(addr) {
|
|
addr = new Multiaddr(addr);
|
|
return new Multiaddr(this.toString() + addr.toString());
|
|
}
|
|
/**
|
|
* Decapsulates a Multiaddr from another Multiaddr
|
|
*
|
|
* @example
|
|
* ```js
|
|
* const mh1 = new Multiaddr('/ip4/8.8.8.8/tcp/1080')
|
|
* // <Multiaddr 0408080808060438 - /ip4/8.8.8.8/tcp/1080>
|
|
*
|
|
* const mh2 = new Multiaddr('/ip4/127.0.0.1/tcp/4001')
|
|
* // <Multiaddr 047f000001060fa1 - /ip4/127.0.0.1/tcp/4001>
|
|
*
|
|
* const mh3 = mh1.encapsulate(mh2)
|
|
* // <Multiaddr 0408080808060438047f000001060fa1 - /ip4/8.8.8.8/tcp/1080/ip4/127.0.0.1/tcp/4001>
|
|
*
|
|
* mh3.decapsulate(mh2).toString()
|
|
* // '/ip4/8.8.8.8/tcp/1080'
|
|
* ```
|
|
*
|
|
* @param {Multiaddr | string} addr - Multiaddr to remove from this Multiaddr
|
|
*/
|
|
decapsulate(addr) {
|
|
const addrString = addr.toString();
|
|
const s = this.toString();
|
|
const i = s.lastIndexOf(addrString);
|
|
if (i < 0) {
|
|
throw new Error(`Address ${this.toString()} does not contain subaddress: ${addr.toString()}`);
|
|
}
|
|
return new Multiaddr(s.slice(0, i));
|
|
}
|
|
/**
|
|
* A more reliable version of `decapsulate` if you are targeting a
|
|
* specific code, such as 421 (the `p2p` protocol code). The last index of the code
|
|
* will be removed from the `Multiaddr`, and a new instance will be returned.
|
|
* If the code is not present, the original `Multiaddr` is returned.
|
|
*
|
|
* @example
|
|
* ```js
|
|
* const addr = new Multiaddr('/ip4/0.0.0.0/tcp/8080/p2p/QmcgpsyWgH8Y8ajJz1Cu72KnS5uo2Aa2LpzU7kinSupNKC')
|
|
* // <Multiaddr 0400... - /ip4/0.0.0.0/tcp/8080/p2p/QmcgpsyWgH8Y8ajJz1Cu72KnS5uo2Aa2LpzU7kinSupNKC>
|
|
*
|
|
* addr.decapsulateCode(421).toString()
|
|
* // '/ip4/0.0.0.0/tcp/8080'
|
|
*
|
|
* new Multiaddr('/ip4/127.0.0.1/tcp/8080').decapsulateCode(421).toString()
|
|
* // '/ip4/127.0.0.1/tcp/8080'
|
|
* ```
|
|
*/
|
|
decapsulateCode(code) {
|
|
const tuples = this.tuples();
|
|
for (let i = tuples.length - 1; i >= 0; i--) {
|
|
if (tuples[i][0] === code) {
|
|
return new Multiaddr(tuplesToBytes(tuples.slice(0, i)));
|
|
}
|
|
}
|
|
return this;
|
|
}
|
|
/**
|
|
* Extract the peerId if the multiaddr contains one
|
|
*
|
|
* @example
|
|
* ```js
|
|
* const mh1 = new Multiaddr('/ip4/8.8.8.8/tcp/1080/ipfs/QmValidBase58string')
|
|
* // <Multiaddr 0408080808060438 - /ip4/8.8.8.8/tcp/1080/ipfs/QmValidBase58string>
|
|
*
|
|
* // should return QmValidBase58string or null if the id is missing or invalid
|
|
* const peerId = mh1.getPeerId()
|
|
* ```
|
|
*/
|
|
getPeerId() {
|
|
try {
|
|
const tuples = this.stringTuples().filter((tuple) => {
|
|
if (tuple[0] === names$1.ipfs.code) {
|
|
return true;
|
|
}
|
|
return false;
|
|
});
|
|
// Get the last ipfs tuple ['ipfs', 'peerid string']
|
|
const tuple = tuples.pop();
|
|
if (tuple?.[1] != null) {
|
|
const peerIdStr = tuple[1];
|
|
// peer id is base58btc encoded string but not multibase encoded so add the `z`
|
|
// prefix so we can validate that it is correctly encoded
|
|
if (peerIdStr[0] === 'Q' || peerIdStr[0] === '1') {
|
|
return toString$3(base58btc.decode(`z${peerIdStr}`), 'base58btc');
|
|
}
|
|
// try to parse peer id as CID
|
|
return toString$3(CID.parse(peerIdStr).multihash.bytes, 'base58btc');
|
|
}
|
|
return null;
|
|
}
|
|
catch (e) {
|
|
return null;
|
|
}
|
|
}
|
|
/**
|
|
* Extract the path if the multiaddr contains one
|
|
*
|
|
* @example
|
|
* ```js
|
|
* const mh1 = new Multiaddr('/ip4/8.8.8.8/tcp/1080/unix/tmp/p2p.sock')
|
|
* // <Multiaddr 0408080808060438 - /ip4/8.8.8.8/tcp/1080/unix/tmp/p2p.sock>
|
|
*
|
|
* // should return utf8 string or null if the id is missing or invalid
|
|
* const path = mh1.getPath()
|
|
* ```
|
|
*/
|
|
getPath() {
|
|
let path = null;
|
|
try {
|
|
path = this.stringTuples().filter((tuple) => {
|
|
const proto = getProtocol$1(tuple[0]);
|
|
if (proto.path === true) {
|
|
return true;
|
|
}
|
|
return false;
|
|
})[0][1];
|
|
if (path == null) {
|
|
path = null;
|
|
}
|
|
}
|
|
catch {
|
|
path = null;
|
|
}
|
|
return path;
|
|
}
|
|
/**
|
|
* Checks if two Multiaddrs are the same
|
|
*
|
|
* @example
|
|
* ```js
|
|
* const mh1 = new Multiaddr('/ip4/8.8.8.8/tcp/1080')
|
|
* // <Multiaddr 0408080808060438 - /ip4/8.8.8.8/tcp/1080>
|
|
*
|
|
* const mh2 = new Multiaddr('/ip4/127.0.0.1/tcp/4001')
|
|
* // <Multiaddr 047f000001060fa1 - /ip4/127.0.0.1/tcp/4001>
|
|
*
|
|
* mh1.equals(mh1)
|
|
* // true
|
|
*
|
|
* mh1.equals(mh2)
|
|
* // false
|
|
* ```
|
|
*/
|
|
equals(addr) {
|
|
return equals(this.bytes, addr.bytes);
|
|
}
|
|
/**
|
|
* Resolve multiaddr if containing resolvable hostname.
|
|
*
|
|
* @example
|
|
* ```js
|
|
* Multiaddr.resolvers.set('dnsaddr', resolverFunction)
|
|
* const mh1 = new Multiaddr('/dnsaddr/bootstrap.libp2p.io/p2p/QmbLHAnMoJPWSCR5Zhtx6BHJX9KiKNN6tpvbUcqanj75Nb')
|
|
* const resolvedMultiaddrs = await mh1.resolve()
|
|
* // [
|
|
* // <Multiaddr 04934b5353060fa1a503221220c10f9319dac35c270a6b74cd644cb3acfc1f6efc8c821f8eb282599fd1814f64 - /ip4/147.75.83.83/tcp/4001/p2p/QmbLHAnMoJPWSCR5Zhtx6BHJX9KiKNN6tpvbUcqanj75Nb>,
|
|
* // <Multiaddr 04934b53530601bbde03a503221220c10f9319dac35c270a6b74cd644cb3acfc1f6efc8c821f8eb282599fd1814f64 - /ip4/147.75.83.83/tcp/443/wss/p2p/QmbLHAnMoJPWSCR5Zhtx6BHJX9KiKNN6tpvbUcqanj75Nb>,
|
|
* // <Multiaddr 04934b535391020fa1cc03a503221220c10f9319dac35c270a6b74cd644cb3acfc1f6efc8c821f8eb282599fd1814f64 - /ip4/147.75.83.83/udp/4001/quic/p2p/QmbLHAnMoJPWSCR5Zhtx6BHJX9KiKNN6tpvbUcqanj75Nb>
|
|
* // ]
|
|
* ```
|
|
*/
|
|
async resolve(options) {
|
|
const resolvableProto = this.protos().find((p) => p.resolvable);
|
|
// Multiaddr is not resolvable?
|
|
if (resolvableProto == null) {
|
|
return [this];
|
|
}
|
|
const resolver = resolvers.get(resolvableProto.name);
|
|
if (resolver == null) {
|
|
throw errCode(new Error(`no available resolver for ${resolvableProto.name}`), 'ERR_NO_AVAILABLE_RESOLVER');
|
|
}
|
|
const addresses = await resolver(this, options);
|
|
return addresses.map((a) => new Multiaddr(a));
|
|
}
|
|
/**
|
|
* Gets a Multiaddrs node-friendly address object. Note that protocol information
|
|
* is left out: in Node (and most network systems) the protocol is unknowable
|
|
* given only the address.
|
|
*
|
|
* Has to be a ThinWaist Address, otherwise throws error
|
|
*
|
|
* @example
|
|
* ```js
|
|
* new Multiaddr('/ip4/127.0.0.1/tcp/4001').nodeAddress()
|
|
* // {family: 4, address: '127.0.0.1', port: 4001}
|
|
* ```
|
|
*/
|
|
nodeAddress() {
|
|
const options = this.toOptions();
|
|
if (options.transport !== 'tcp' && options.transport !== 'udp') {
|
|
throw new Error(`multiaddr must have a valid format - no protocol with name: "${options.transport}". Must have a valid transport protocol: "{tcp, udp}"`);
|
|
}
|
|
return {
|
|
family: options.family,
|
|
address: options.host,
|
|
port: options.port
|
|
};
|
|
}
|
|
/**
|
|
* Returns if a Multiaddr is a Thin Waist address or not.
|
|
*
|
|
* Thin Waist is if a Multiaddr adheres to the standard combination of:
|
|
*
|
|
* `{IPv4, IPv6}/{TCP, UDP}`
|
|
*
|
|
* @example
|
|
* ```js
|
|
* const mh1 = new Multiaddr('/ip4/127.0.0.1/tcp/4001')
|
|
* // <Multiaddr 047f000001060fa1 - /ip4/127.0.0.1/tcp/4001>
|
|
* const mh2 = new Multiaddr('/ip4/192.168.2.1/tcp/5001')
|
|
* // <Multiaddr 04c0a80201061389 - /ip4/192.168.2.1/tcp/5001>
|
|
* const mh3 = mh1.encapsulate(mh2)
|
|
* // <Multiaddr 047f000001060fa104c0a80201061389 - /ip4/127.0.0.1/tcp/4001/ip4/192.168.2.1/tcp/5001>
|
|
* const mh4 = new Multiaddr('/ip4/127.0.0.1/tcp/2000/wss/p2p-webrtc-star/p2p/QmcgpsyWgH8Y8ajJz1Cu72KnS5uo2Aa2LpzU7kinSooo2a')
|
|
* // <Multiaddr 047f0000010607d0de039302a503221220d52ebb89d85b02a284948203a62ff28389c57c9f42beec4ec20db76a64835843 - /ip4/127.0.0.1/tcp/2000/wss/p2p-webrtc-star/p2p/QmcgpsyWgH8Y8ajJz1Cu72KnS5uo2Aa2LpzU7kinSooo2a>
|
|
* mh1.isThinWaistAddress()
|
|
* // true
|
|
* mh2.isThinWaistAddress()
|
|
* // true
|
|
* mh3.isThinWaistAddress()
|
|
* // false
|
|
* mh4.isThinWaistAddress()
|
|
* // false
|
|
* ```
|
|
*/
|
|
isThinWaistAddress(addr) {
|
|
const protos = (addr ?? this).protos();
|
|
if (protos.length !== 2) {
|
|
return false;
|
|
}
|
|
if (protos[0].code !== 4 && protos[0].code !== 41) {
|
|
return false;
|
|
}
|
|
if (protos[1].code !== 6 && protos[1].code !== 273) {
|
|
return false;
|
|
}
|
|
return true;
|
|
}
|
|
/**
|
|
* Creates a Multiaddr from a node-friendly address object
|
|
*
|
|
* @example
|
|
* ```js
|
|
* Multiaddr.fromNodeAddress({address: '127.0.0.1', port: '4001'}, 'tcp')
|
|
* // <Multiaddr 047f000001060fa1 - /ip4/127.0.0.1/tcp/4001>
|
|
* ```
|
|
*/
|
|
static fromNodeAddress(addr, transport) {
|
|
if (addr == null) {
|
|
throw new Error('requires node address object');
|
|
}
|
|
if (transport == null) {
|
|
throw new Error('requires transport protocol');
|
|
}
|
|
let ip;
|
|
switch (addr.family) {
|
|
case 4:
|
|
ip = 'ip4';
|
|
break;
|
|
case 6:
|
|
ip = 'ip6';
|
|
break;
|
|
default:
|
|
throw Error('Invalid addr family, should be 4 or 6.');
|
|
}
|
|
return new Multiaddr('/' + [ip, addr.address, transport, addr.port].join('/'));
|
|
}
|
|
/**
|
|
* Returns if something is a Multiaddr that is a name
|
|
*/
|
|
static isName(addr) {
|
|
if (!Multiaddr.isMultiaddr(addr)) {
|
|
return false;
|
|
}
|
|
// if a part of the multiaddr is resolvable, then return true
|
|
return addr.protos().some((proto) => proto.resolvable);
|
|
}
|
|
/**
|
|
* Check if object is a CID instance
|
|
*/
|
|
static isMultiaddr(value) {
|
|
return Boolean(value?.[symbol$1]);
|
|
}
|
|
/**
|
|
* Returns Multiaddr as a human-readable string.
|
|
* For post Node.js v10.0.0.
|
|
* https://nodejs.org/api/deprecations.html#deprecations_dep0079_custom_inspection_function_on_objects_via_inspect
|
|
*
|
|
* @example
|
|
* ```js
|
|
* console.log(new Multiaddr('/ip4/127.0.0.1/tcp/4001'))
|
|
* // '<Multiaddr 047f000001060fa1 - /ip4/127.0.0.1/tcp/4001>'
|
|
* ```
|
|
*/
|
|
[inspect]() {
|
|
return '<Multiaddr ' +
|
|
toString$3(this.bytes, 'base16') + ' - ' +
|
|
bytesToString(this.bytes) + '>';
|
|
}
|
|
/**
|
|
* Returns Multiaddr as a human-readable string.
|
|
* Fallback for pre Node.js v10.0.0.
|
|
* https://nodejs.org/api/deprecations.html#deprecations_dep0079_custom_inspection_function_on_objects_via_inspect
|
|
*
|
|
* @example
|
|
* ```js
|
|
* new Multiaddr('/ip4/127.0.0.1/tcp/4001').inspect()
|
|
* // '<Multiaddr 047f000001060fa1 - /ip4/127.0.0.1/tcp/4001>'
|
|
* ```
|
|
*/
|
|
inspect() {
|
|
return '<Multiaddr ' +
|
|
toString$3(this.bytes, 'base16') + ' - ' +
|
|
bytesToString(this.bytes) + '>';
|
|
}
|
|
}
|
|
Multiaddr.resolvers = resolvers;
|
|
|
|
function multiaddrFromFields(ipFamily, protocol, ipBytes, protocolBytes) {
|
|
let ma = new Multiaddr("/" + ipFamily + "/" + convertToString(ipFamily, ipBytes));
|
|
ma = ma.encapsulate(new Multiaddr("/" + protocol + "/" + convertToString(protocol, protocolBytes)));
|
|
return ma;
|
|
}
|
|
|
|
function decodeMultiaddrs(bytes) {
|
|
const multiaddrs = [];
|
|
let index = 0;
|
|
while (index < bytes.length) {
|
|
const sizeDataView = new DataView(bytes.buffer, index, MULTIADDR_LENGTH_SIZE);
|
|
const size = sizeDataView.getUint16(0);
|
|
index += MULTIADDR_LENGTH_SIZE;
|
|
const multiaddrBytes = bytes.slice(index, index + size);
|
|
index += size;
|
|
const multiaddr = new Multiaddr(multiaddrBytes);
|
|
multiaddrs.push(multiaddr);
|
|
}
|
|
return multiaddrs;
|
|
}
|
|
function encodeMultiaddrs(multiaddrs) {
|
|
const totalLength = multiaddrs.reduce((acc, ma) => acc + MULTIADDR_LENGTH_SIZE + ma.bytes.length, 0);
|
|
const bytes = new Uint8Array(totalLength);
|
|
const dataView = new DataView(bytes.buffer);
|
|
let index = 0;
|
|
multiaddrs.forEach((multiaddr) => {
|
|
if (multiaddr.getPeerId())
|
|
throw new Error("`multiaddr` field MUST not contain peer id");
|
|
// Prepend the size of the next entry
|
|
dataView.setUint16(index, multiaddr.bytes.length);
|
|
index += MULTIADDR_LENGTH_SIZE;
|
|
bytes.set(multiaddr.bytes, index);
|
|
index += multiaddr.bytes.length;
|
|
});
|
|
return bytes;
|
|
}
|
|
|
|
async function sign(privKey, msg) {
|
|
return sign$2(keccak256(msg), privKey, {
|
|
der: false,
|
|
});
|
|
}
|
|
function nodeId(pubKey) {
|
|
const publicKey = Point$1.fromHex(pubKey);
|
|
const uncompressedPubkey = publicKey.toRawBytes(false);
|
|
return bytesToHex$1(keccak256(uncompressedPubkey.slice(1)));
|
|
}
|
|
|
|
function encodeWaku2(protocols) {
|
|
let byte = 0;
|
|
if (protocols.lightPush)
|
|
byte += 1;
|
|
byte = byte << 1;
|
|
if (protocols.filter)
|
|
byte += 1;
|
|
byte = byte << 1;
|
|
if (protocols.store)
|
|
byte += 1;
|
|
byte = byte << 1;
|
|
if (protocols.relay)
|
|
byte += 1;
|
|
return byte;
|
|
}
|
|
function decodeWaku2(byte) {
|
|
const waku2 = {
|
|
relay: false,
|
|
store: false,
|
|
filter: false,
|
|
lightPush: false,
|
|
};
|
|
if (byte % 2)
|
|
waku2.relay = true;
|
|
byte = byte >> 1;
|
|
if (byte % 2)
|
|
waku2.store = true;
|
|
byte = byte >> 1;
|
|
if (byte % 2)
|
|
waku2.filter = true;
|
|
byte = byte >> 1;
|
|
if (byte % 2)
|
|
waku2.lightPush = true;
|
|
return waku2;
|
|
}
|
|
|
|
const log$5 = debug("waku:enr");
|
|
class ENR extends Map {
|
|
constructor(kvs = {}, seq = BigInt(1), signature = null) {
|
|
super(Object.entries(kvs));
|
|
this.seq = seq;
|
|
this.signature = signature;
|
|
}
|
|
static async create(kvs = {}, seq = BigInt(1), signature = null) {
|
|
const enr = new ENR(kvs, seq, signature);
|
|
try {
|
|
const publicKey = enr.publicKey;
|
|
if (publicKey) {
|
|
const keypair = createKeypair(enr.keypairType, undefined, publicKey);
|
|
enr.peerId = await createPeerIdFromKeypair(keypair);
|
|
}
|
|
}
|
|
catch (e) {
|
|
log$5("Could not calculate peer id for ENR", e);
|
|
}
|
|
return enr;
|
|
}
|
|
static createV4(publicKey, kvs = {}) {
|
|
// EIP-778 specifies that the key must be in compressed format, 33 bytes
|
|
if (publicKey.length !== 33) {
|
|
publicKey = compressPublicKey$1(publicKey);
|
|
}
|
|
return ENR.create({
|
|
...kvs,
|
|
id: utf8ToBytes("v4"),
|
|
secp256k1: publicKey,
|
|
});
|
|
}
|
|
static async createFromPeerId(peerId, kvs = {}) {
|
|
const keypair = await createKeypairFromPeerId(peerId);
|
|
switch (keypair.type) {
|
|
case KeypairType.secp256k1:
|
|
return ENR.createV4(keypair.publicKey, kvs);
|
|
default:
|
|
throw new Error();
|
|
}
|
|
}
|
|
static async decodeFromValues(decoded) {
|
|
if (!Array.isArray(decoded)) {
|
|
throw new Error("Decoded ENR must be an array");
|
|
}
|
|
if (decoded.length % 2 !== 0) {
|
|
throw new Error("Decoded ENR must have an even number of elements");
|
|
}
|
|
const [signature, seq, ...kvs] = decoded;
|
|
if (!signature || Array.isArray(signature)) {
|
|
throw new Error("Decoded ENR invalid signature: must be a byte array");
|
|
}
|
|
if (!seq || Array.isArray(seq)) {
|
|
throw new Error("Decoded ENR invalid sequence number: must be a byte array");
|
|
}
|
|
const obj = {};
|
|
for (let i = 0; i < kvs.length; i += 2) {
|
|
try {
|
|
obj[bytesToUtf8(kvs[i])] = kvs[i + 1];
|
|
}
|
|
catch (e) {
|
|
log$5("Failed to decode ENR key to UTF-8, skipping it", kvs[i], e);
|
|
}
|
|
}
|
|
// If seq is an empty array, translate as value 0
|
|
const hexSeq = "0x" + (seq.length ? bytesToHex$1(seq) : "00");
|
|
const enr = await ENR.create(obj, BigInt(hexSeq), signature);
|
|
const rlpEncodedBytes = hexToBytes$1(encode$2([seq, ...kvs]));
|
|
if (!enr.verify(rlpEncodedBytes, signature)) {
|
|
throw new Error("Unable to verify ENR signature");
|
|
}
|
|
return enr;
|
|
}
|
|
static decode(encoded) {
|
|
const decoded = decode$2(encoded).map(hexToBytes$1);
|
|
return ENR.decodeFromValues(decoded);
|
|
}
|
|
static decodeTxt(encoded) {
|
|
if (!encoded.startsWith(this.RECORD_PREFIX)) {
|
|
throw new Error(`"string encoded ENR must start with '${this.RECORD_PREFIX}'`);
|
|
}
|
|
return ENR.decode(fromString$1(encoded.slice(4), "base64url"));
|
|
}
|
|
set(k, v) {
|
|
this.signature = null;
|
|
this.seq++;
|
|
return super.set(k, v);
|
|
}
|
|
get id() {
|
|
const id = this.get("id");
|
|
if (!id)
|
|
throw new Error("id not found.");
|
|
return bytesToUtf8(id);
|
|
}
|
|
get keypairType() {
|
|
switch (this.id) {
|
|
case "v4":
|
|
return KeypairType.secp256k1;
|
|
default:
|
|
throw new Error(ERR_INVALID_ID);
|
|
}
|
|
}
|
|
get publicKey() {
|
|
switch (this.id) {
|
|
case "v4":
|
|
return this.get("secp256k1");
|
|
default:
|
|
throw new Error(ERR_INVALID_ID);
|
|
}
|
|
}
|
|
get keypair() {
|
|
if (this.publicKey) {
|
|
const publicKey = this.publicKey;
|
|
return createKeypair(this.keypairType, undefined, publicKey);
|
|
}
|
|
return;
|
|
}
|
|
get nodeId() {
|
|
switch (this.id) {
|
|
case "v4":
|
|
return this.publicKey ? nodeId(this.publicKey) : undefined;
|
|
default:
|
|
throw new Error(ERR_INVALID_ID);
|
|
}
|
|
}
|
|
get ip() {
|
|
const raw = this.get("ip");
|
|
if (raw) {
|
|
return convertToString("ip4", raw);
|
|
}
|
|
else {
|
|
return undefined;
|
|
}
|
|
}
|
|
set ip(ip) {
|
|
if (ip) {
|
|
this.set("ip", convertToBytes("ip4", ip));
|
|
}
|
|
else {
|
|
this.delete("ip");
|
|
}
|
|
}
|
|
get tcp() {
|
|
const raw = this.get("tcp");
|
|
if (raw) {
|
|
return Number(convertToString("tcp", raw));
|
|
}
|
|
else {
|
|
return undefined;
|
|
}
|
|
}
|
|
set tcp(port) {
|
|
if (port === undefined) {
|
|
this.delete("tcp");
|
|
}
|
|
else {
|
|
this.set("tcp", convertToBytes("tcp", port.toString(10)));
|
|
}
|
|
}
|
|
get udp() {
|
|
const raw = this.get("udp");
|
|
if (raw) {
|
|
return Number(convertToString("udp", raw));
|
|
}
|
|
else {
|
|
return undefined;
|
|
}
|
|
}
|
|
set udp(port) {
|
|
if (port === undefined) {
|
|
this.delete("udp");
|
|
}
|
|
else {
|
|
this.set("udp", convertToBytes("udp", port.toString(10)));
|
|
}
|
|
}
|
|
get ip6() {
|
|
const raw = this.get("ip6");
|
|
if (raw) {
|
|
return convertToString("ip6", raw);
|
|
}
|
|
else {
|
|
return undefined;
|
|
}
|
|
}
|
|
set ip6(ip) {
|
|
if (ip) {
|
|
this.set("ip6", convertToBytes("ip6", ip));
|
|
}
|
|
else {
|
|
this.delete("ip6");
|
|
}
|
|
}
|
|
get tcp6() {
|
|
const raw = this.get("tcp6");
|
|
if (raw) {
|
|
return Number(convertToString("tcp", raw));
|
|
}
|
|
else {
|
|
return undefined;
|
|
}
|
|
}
|
|
set tcp6(port) {
|
|
if (port === undefined) {
|
|
this.delete("tcp6");
|
|
}
|
|
else {
|
|
this.set("tcp6", convertToBytes("tcp", port.toString(10)));
|
|
}
|
|
}
|
|
get udp6() {
|
|
const raw = this.get("udp6");
|
|
if (raw) {
|
|
return Number(convertToString("udp", raw));
|
|
}
|
|
else {
|
|
return undefined;
|
|
}
|
|
}
|
|
set udp6(port) {
|
|
if (port === undefined) {
|
|
this.delete("udp6");
|
|
}
|
|
else {
|
|
this.set("udp6", convertToBytes("udp", port.toString(10)));
|
|
}
|
|
}
|
|
/**
|
|
* Get the `multiaddrs` field from ENR.
|
|
*
|
|
* This field is used to store multiaddresses that cannot be stored with the current ENR pre-defined keys.
|
|
* These can be a multiaddresses that include encapsulation (e.g. wss) or do not use `ip4` nor `ip6` for the host
|
|
* address (e.g. `dns4`, `dnsaddr`, etc)..
|
|
*
|
|
* If the peer information only contains information that can be represented with the ENR pre-defined keys
|
|
* (ip, tcp, etc) then the usage of { @link getLocationMultiaddr } should be preferred.
|
|
*
|
|
* The multiaddresses stored in this field are expected to be location multiaddresses, ie, peer id less.
|
|
*/
|
|
get multiaddrs() {
|
|
const raw = this.get("multiaddrs");
|
|
if (raw)
|
|
return decodeMultiaddrs(raw);
|
|
return;
|
|
}
|
|
/**
|
|
* Set the `multiaddrs` field on the ENR.
|
|
*
|
|
* This field is used to store multiaddresses that cannot be stored with the current ENR pre-defined keys.
|
|
* These can be a multiaddresses that include encapsulation (e.g. wss) or do not use `ip4` nor `ip6` for the host
|
|
* address (e.g. `dns4`, `dnsaddr`, etc)..
|
|
*
|
|
* If the peer information only contains information that can be represented with the ENR pre-defined keys
|
|
* (ip, tcp, etc) then the usage of { @link setLocationMultiaddr } should be preferred.
|
|
* The multiaddresses stored in this field must be location multiaddresses,
|
|
* ie, without a peer id.
|
|
*/
|
|
set multiaddrs(multiaddrs) {
|
|
if (multiaddrs === undefined) {
|
|
this.delete("multiaddrs");
|
|
}
|
|
else {
|
|
const multiaddrsBuf = encodeMultiaddrs(multiaddrs);
|
|
this.set("multiaddrs", multiaddrsBuf);
|
|
}
|
|
}
|
|
getLocationMultiaddr(protocol) {
|
|
if (protocol === "udp") {
|
|
return (this.getLocationMultiaddr("udp4") || this.getLocationMultiaddr("udp6"));
|
|
}
|
|
if (protocol === "tcp") {
|
|
return (this.getLocationMultiaddr("tcp4") || this.getLocationMultiaddr("tcp6"));
|
|
}
|
|
const isIpv6 = protocol.endsWith("6");
|
|
const ipVal = this.get(isIpv6 ? "ip6" : "ip");
|
|
if (!ipVal) {
|
|
return;
|
|
}
|
|
const isUdp = protocol.startsWith("udp");
|
|
const isTcp = protocol.startsWith("tcp");
|
|
let protoName, protoVal;
|
|
if (isUdp) {
|
|
protoName = "udp";
|
|
protoVal = isIpv6 ? this.get("udp6") : this.get("udp");
|
|
}
|
|
else if (isTcp) {
|
|
protoName = "tcp";
|
|
protoVal = isIpv6 ? this.get("tcp6") : this.get("tcp");
|
|
}
|
|
else {
|
|
return;
|
|
}
|
|
if (!protoVal) {
|
|
return;
|
|
}
|
|
return multiaddrFromFields(isIpv6 ? "ip6" : "ip4", protoName, ipVal, protoVal);
|
|
}
|
|
setLocationMultiaddr(multiaddr) {
|
|
const protoNames = multiaddr.protoNames();
|
|
if (protoNames.length !== 2 &&
|
|
protoNames[1] !== "udp" &&
|
|
protoNames[1] !== "tcp") {
|
|
throw new Error("Invalid multiaddr");
|
|
}
|
|
const tuples = multiaddr.tuples();
|
|
if (!tuples[0][1] || !tuples[1][1]) {
|
|
throw new Error("Invalid multiaddr");
|
|
}
|
|
// IPv4
|
|
if (tuples[0][0] === 4) {
|
|
this.set("ip", tuples[0][1]);
|
|
this.set(protoNames[1], tuples[1][1]);
|
|
}
|
|
else {
|
|
this.set("ip6", tuples[0][1]);
|
|
this.set(protoNames[1] + "6", tuples[1][1]);
|
|
}
|
|
}
|
|
/**
|
|
* Returns the full multiaddr from the ENR fields matching the provided
|
|
* `protocol` parameter.
|
|
* To return full multiaddrs from the `multiaddrs` ENR field,
|
|
* use { @link ENR.getFullMultiaddrs }.
|
|
*
|
|
* @param protocol
|
|
*/
|
|
getFullMultiaddr(protocol) {
|
|
if (this.peerId) {
|
|
const locationMultiaddr = this.getLocationMultiaddr(protocol);
|
|
if (locationMultiaddr) {
|
|
return locationMultiaddr.encapsulate(`/p2p/${this.peerId.toString()}`);
|
|
}
|
|
}
|
|
return;
|
|
}
|
|
/**
|
|
* Returns the full multiaddrs from the `multiaddrs` ENR field.
|
|
*/
|
|
getFullMultiaddrs() {
|
|
if (this.peerId && this.multiaddrs) {
|
|
const peerId = this.peerId;
|
|
return this.multiaddrs.map((ma) => {
|
|
return ma.encapsulate(`/p2p/${peerId.toString()}`);
|
|
});
|
|
}
|
|
return [];
|
|
}
|
|
/**
|
|
* Get the `waku2` field from ENR.
|
|
*/
|
|
get waku2() {
|
|
const raw = this.get("waku2");
|
|
if (raw)
|
|
return decodeWaku2(raw[0]);
|
|
return;
|
|
}
|
|
/**
|
|
* Set the `waku2` field on the ENR.
|
|
*/
|
|
set waku2(waku2) {
|
|
if (waku2 === undefined) {
|
|
this.delete("waku2");
|
|
}
|
|
else {
|
|
const byte = encodeWaku2(waku2);
|
|
this.set("waku2", new Uint8Array([byte]));
|
|
}
|
|
}
|
|
verify(data, signature) {
|
|
if (!this.get("id") || this.id !== "v4") {
|
|
throw new Error(ERR_INVALID_ID);
|
|
}
|
|
if (!this.publicKey) {
|
|
throw new Error("Failed to verify ENR: No public key");
|
|
}
|
|
return verifySignature(signature, keccak256(data), this.publicKey);
|
|
}
|
|
async sign(data, privateKey) {
|
|
switch (this.id) {
|
|
case "v4":
|
|
this.signature = await sign(privateKey, data);
|
|
break;
|
|
default:
|
|
throw new Error(ERR_INVALID_ID);
|
|
}
|
|
return this.signature;
|
|
}
|
|
async encodeToValues(privateKey) {
|
|
// sort keys and flatten into [k, v, k, v, ...]
|
|
const content = Array.from(this.keys())
|
|
.sort((a, b) => a.localeCompare(b))
|
|
.map((k) => [k, this.get(k)])
|
|
.map(([k, v]) => [utf8ToBytes(k), v])
|
|
.flat();
|
|
content.unshift(new Uint8Array([Number(this.seq)]));
|
|
if (privateKey) {
|
|
content.unshift(await this.sign(hexToBytes$1(encode$2(content)), privateKey));
|
|
}
|
|
else {
|
|
if (!this.signature) {
|
|
throw new Error(ERR_NO_SIGNATURE);
|
|
}
|
|
content.unshift(this.signature);
|
|
}
|
|
return content;
|
|
}
|
|
async encode(privateKey) {
|
|
const encoded = hexToBytes$1(encode$2(await this.encodeToValues(privateKey)));
|
|
if (encoded.length >= MAX_RECORD_SIZE) {
|
|
throw new Error("ENR must be less than 300 bytes");
|
|
}
|
|
return encoded;
|
|
}
|
|
async encodeTxt(privateKey) {
|
|
return (ENR.RECORD_PREFIX + toString$3(await this.encode(privateKey), "base64url"));
|
|
}
|
|
}
|
|
ENR.RECORD_PREFIX = "enr:";
|
|
|
|
/* eslint-disable import/export */
|
|
var RateLimitProof$4;
|
|
(function (RateLimitProof) {
|
|
let _codec;
|
|
RateLimitProof.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.proof != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.proof);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "proof" was not found in object');
|
|
}
|
|
if (obj.merkleRoot != null) {
|
|
writer.uint32(18);
|
|
writer.bytes(obj.merkleRoot);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "merkleRoot" was not found in object');
|
|
}
|
|
if (obj.epoch != null) {
|
|
writer.uint32(26);
|
|
writer.bytes(obj.epoch);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "epoch" was not found in object');
|
|
}
|
|
if (obj.shareX != null) {
|
|
writer.uint32(34);
|
|
writer.bytes(obj.shareX);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "shareX" was not found in object');
|
|
}
|
|
if (obj.shareY != null) {
|
|
writer.uint32(42);
|
|
writer.bytes(obj.shareY);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "shareY" was not found in object');
|
|
}
|
|
if (obj.nullifier != null) {
|
|
writer.uint32(50);
|
|
writer.bytes(obj.nullifier);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "nullifier" was not found in object');
|
|
}
|
|
if (obj.rlnIdentifier != null) {
|
|
writer.uint32(58);
|
|
writer.bytes(obj.rlnIdentifier);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "rlnIdentifier" was not found in object');
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
proof: new Uint8Array(0),
|
|
merkleRoot: new Uint8Array(0),
|
|
epoch: new Uint8Array(0),
|
|
shareX: new Uint8Array(0),
|
|
shareY: new Uint8Array(0),
|
|
nullifier: new Uint8Array(0),
|
|
rlnIdentifier: new Uint8Array(0),
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.proof = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.merkleRoot = reader.bytes();
|
|
break;
|
|
case 3:
|
|
obj.epoch = reader.bytes();
|
|
break;
|
|
case 4:
|
|
obj.shareX = reader.bytes();
|
|
break;
|
|
case 5:
|
|
obj.shareY = reader.bytes();
|
|
break;
|
|
case 6:
|
|
obj.nullifier = reader.bytes();
|
|
break;
|
|
case 7:
|
|
obj.rlnIdentifier = reader.bytes();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
if (obj.proof == null) {
|
|
throw new Error('Protocol error: value for required field "proof" was not found in protobuf');
|
|
}
|
|
if (obj.merkleRoot == null) {
|
|
throw new Error('Protocol error: value for required field "merkleRoot" was not found in protobuf');
|
|
}
|
|
if (obj.epoch == null) {
|
|
throw new Error('Protocol error: value for required field "epoch" was not found in protobuf');
|
|
}
|
|
if (obj.shareX == null) {
|
|
throw new Error('Protocol error: value for required field "shareX" was not found in protobuf');
|
|
}
|
|
if (obj.shareY == null) {
|
|
throw new Error('Protocol error: value for required field "shareY" was not found in protobuf');
|
|
}
|
|
if (obj.nullifier == null) {
|
|
throw new Error('Protocol error: value for required field "nullifier" was not found in protobuf');
|
|
}
|
|
if (obj.rlnIdentifier == null) {
|
|
throw new Error('Protocol error: value for required field "rlnIdentifier" was not found in protobuf');
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
RateLimitProof.encode = (obj) => {
|
|
return encodeMessage(obj, RateLimitProof.codec());
|
|
};
|
|
RateLimitProof.decode = (buf) => {
|
|
return decodeMessage(buf, RateLimitProof.codec());
|
|
};
|
|
})(RateLimitProof$4 || (RateLimitProof$4 = {}));
|
|
var WakuMessage$4;
|
|
(function (WakuMessage) {
|
|
let _codec;
|
|
WakuMessage.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.payload != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.payload);
|
|
}
|
|
if (obj.contentTopic != null) {
|
|
writer.uint32(18);
|
|
writer.string(obj.contentTopic);
|
|
}
|
|
if (obj.version != null) {
|
|
writer.uint32(24);
|
|
writer.uint32(obj.version);
|
|
}
|
|
if (obj.timestampDeprecated != null) {
|
|
writer.uint32(33);
|
|
writer.double(obj.timestampDeprecated);
|
|
}
|
|
if (obj.timestamp != null) {
|
|
writer.uint32(80);
|
|
writer.sint64(obj.timestamp);
|
|
}
|
|
if (obj.rateLimitProof != null) {
|
|
writer.uint32(170);
|
|
RateLimitProof$4.codec().encode(obj.rateLimitProof, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.payload = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.contentTopic = reader.string();
|
|
break;
|
|
case 3:
|
|
obj.version = reader.uint32();
|
|
break;
|
|
case 4:
|
|
obj.timestampDeprecated = reader.double();
|
|
break;
|
|
case 10:
|
|
obj.timestamp = reader.sint64();
|
|
break;
|
|
case 21:
|
|
obj.rateLimitProof = RateLimitProof$4.codec().decode(reader, reader.uint32());
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
WakuMessage.encode = (obj) => {
|
|
return encodeMessage(obj, WakuMessage.codec());
|
|
};
|
|
WakuMessage.decode = (buf) => {
|
|
return decodeMessage(buf, WakuMessage.codec());
|
|
};
|
|
})(WakuMessage$4 || (WakuMessage$4 = {}));
|
|
|
|
/* eslint-disable import/export */
|
|
var TopicOnlyMessage$1;
|
|
(function (TopicOnlyMessage) {
|
|
let _codec;
|
|
TopicOnlyMessage.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.contentTopic != null) {
|
|
writer.uint32(18);
|
|
writer.string(obj.contentTopic);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 2:
|
|
obj.contentTopic = reader.string();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
TopicOnlyMessage.encode = (obj) => {
|
|
return encodeMessage(obj, TopicOnlyMessage.codec());
|
|
};
|
|
TopicOnlyMessage.decode = (buf) => {
|
|
return decodeMessage(buf, TopicOnlyMessage.codec());
|
|
};
|
|
})(TopicOnlyMessage$1 || (TopicOnlyMessage$1 = {}));
|
|
|
|
const symbol = Symbol.for('@achingbrain/uint8arraylist');
|
|
function findBufAndOffset(bufs, index) {
|
|
if (index == null || index < 0) {
|
|
throw new RangeError('index is out of bounds');
|
|
}
|
|
let offset = 0;
|
|
for (const buf of bufs) {
|
|
const bufEnd = offset + buf.byteLength;
|
|
if (index < bufEnd) {
|
|
return {
|
|
buf,
|
|
index: index - offset
|
|
};
|
|
}
|
|
offset = bufEnd;
|
|
}
|
|
throw new RangeError('index is out of bounds');
|
|
}
|
|
/**
|
|
* Check if object is a CID instance
|
|
*/
|
|
function isUint8ArrayList(value) {
|
|
return Boolean(value?.[symbol]);
|
|
}
|
|
class Uint8ArrayList {
|
|
constructor(...data) {
|
|
// Define symbol
|
|
Object.defineProperty(this, symbol, { value: true });
|
|
this.bufs = [];
|
|
this.length = 0;
|
|
if (data.length > 0) {
|
|
this.appendAll(data);
|
|
}
|
|
}
|
|
*[Symbol.iterator]() {
|
|
yield* this.bufs;
|
|
}
|
|
get byteLength() {
|
|
return this.length;
|
|
}
|
|
/**
|
|
* Add one or more `bufs` to the end of this Uint8ArrayList
|
|
*/
|
|
append(...bufs) {
|
|
this.appendAll(bufs);
|
|
}
|
|
/**
|
|
* Add all `bufs` to the end of this Uint8ArrayList
|
|
*/
|
|
appendAll(bufs) {
|
|
let length = 0;
|
|
for (const buf of bufs) {
|
|
if (buf instanceof Uint8Array) {
|
|
length += buf.byteLength;
|
|
this.bufs.push(buf);
|
|
}
|
|
else if (isUint8ArrayList(buf)) {
|
|
length += buf.byteLength;
|
|
this.bufs.push(...buf.bufs);
|
|
}
|
|
else {
|
|
throw new Error('Could not append value, must be an Uint8Array or a Uint8ArrayList');
|
|
}
|
|
}
|
|
this.length += length;
|
|
}
|
|
/**
|
|
* Add one or more `bufs` to the start of this Uint8ArrayList
|
|
*/
|
|
prepend(...bufs) {
|
|
this.prependAll(bufs);
|
|
}
|
|
/**
|
|
* Add all `bufs` to the start of this Uint8ArrayList
|
|
*/
|
|
prependAll(bufs) {
|
|
let length = 0;
|
|
for (const buf of bufs.reverse()) {
|
|
if (buf instanceof Uint8Array) {
|
|
length += buf.byteLength;
|
|
this.bufs.unshift(buf);
|
|
}
|
|
else if (isUint8ArrayList(buf)) {
|
|
length += buf.byteLength;
|
|
this.bufs.unshift(...buf.bufs);
|
|
}
|
|
else {
|
|
throw new Error('Could not prepend value, must be an Uint8Array or a Uint8ArrayList');
|
|
}
|
|
}
|
|
this.length += length;
|
|
}
|
|
/**
|
|
* Read the value at `index`
|
|
*/
|
|
get(index) {
|
|
const res = findBufAndOffset(this.bufs, index);
|
|
return res.buf[res.index];
|
|
}
|
|
/**
|
|
* Set the value at `index` to `value`
|
|
*/
|
|
set(index, value) {
|
|
const res = findBufAndOffset(this.bufs, index);
|
|
res.buf[res.index] = value;
|
|
}
|
|
/**
|
|
* Copy bytes from `buf` to the index specified by `offset`
|
|
*/
|
|
write(buf, offset = 0) {
|
|
if (buf instanceof Uint8Array) {
|
|
for (let i = 0; i < buf.length; i++) {
|
|
this.set(offset + i, buf[i]);
|
|
}
|
|
}
|
|
else if (isUint8ArrayList(buf)) {
|
|
for (let i = 0; i < buf.length; i++) {
|
|
this.set(offset + i, buf.get(i));
|
|
}
|
|
}
|
|
else {
|
|
throw new Error('Could not write value, must be an Uint8Array or a Uint8ArrayList');
|
|
}
|
|
}
|
|
/**
|
|
* Remove bytes from the front of the pool
|
|
*/
|
|
consume(bytes) {
|
|
// first, normalize the argument, in accordance with how Buffer does it
|
|
bytes = Math.trunc(bytes);
|
|
// do nothing if not a positive number
|
|
if (Number.isNaN(bytes) || bytes <= 0) {
|
|
return;
|
|
}
|
|
while (this.bufs.length > 0) {
|
|
if (bytes >= this.bufs[0].byteLength) {
|
|
bytes -= this.bufs[0].byteLength;
|
|
this.length -= this.bufs[0].byteLength;
|
|
this.bufs.shift();
|
|
}
|
|
else {
|
|
this.bufs[0] = this.bufs[0].subarray(bytes);
|
|
this.length -= bytes;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Extracts a section of an array and returns a new array.
|
|
*
|
|
* This is a copy operation as it is with Uint8Arrays and Arrays
|
|
* - note this is different to the behaviour of Node Buffers.
|
|
*/
|
|
slice(beginInclusive, endExclusive) {
|
|
const { bufs, length } = this._subList(beginInclusive, endExclusive);
|
|
return concat(bufs, length);
|
|
}
|
|
/**
|
|
* Returns a alloc from the given start and end element index.
|
|
*
|
|
* In the best case where the data extracted comes from a single Uint8Array
|
|
* internally this is a no-copy operation otherwise it is a copy operation.
|
|
*/
|
|
subarray(beginInclusive, endExclusive) {
|
|
const { bufs, length } = this._subList(beginInclusive, endExclusive);
|
|
if (bufs.length === 1) {
|
|
return bufs[0];
|
|
}
|
|
return concat(bufs, length);
|
|
}
|
|
/**
|
|
* Returns a allocList from the given start and end element index.
|
|
*
|
|
* This is a no-copy operation.
|
|
*/
|
|
sublist(beginInclusive, endExclusive) {
|
|
const { bufs, length } = this._subList(beginInclusive, endExclusive);
|
|
const list = new Uint8ArrayList();
|
|
list.length = length;
|
|
// don't loop, just set the bufs
|
|
list.bufs = bufs;
|
|
return list;
|
|
}
|
|
_subList(beginInclusive, endExclusive) {
|
|
beginInclusive = beginInclusive ?? 0;
|
|
endExclusive = endExclusive ?? this.length;
|
|
if (beginInclusive < 0) {
|
|
beginInclusive = this.length + beginInclusive;
|
|
}
|
|
if (endExclusive < 0) {
|
|
endExclusive = this.length + endExclusive;
|
|
}
|
|
if (beginInclusive < 0 || endExclusive > this.length) {
|
|
throw new RangeError('index is out of bounds');
|
|
}
|
|
if (beginInclusive === endExclusive) {
|
|
return { bufs: [], length: 0 };
|
|
}
|
|
if (beginInclusive === 0 && endExclusive === this.length) {
|
|
return { bufs: [...this.bufs], length: this.length };
|
|
}
|
|
const bufs = [];
|
|
let offset = 0;
|
|
for (let i = 0; i < this.bufs.length; i++) {
|
|
const buf = this.bufs[i];
|
|
const bufStart = offset;
|
|
const bufEnd = bufStart + buf.byteLength;
|
|
// for next loop
|
|
offset = bufEnd;
|
|
if (beginInclusive >= bufEnd) {
|
|
// start after this buf
|
|
continue;
|
|
}
|
|
const sliceStartInBuf = beginInclusive >= bufStart && beginInclusive < bufEnd;
|
|
const sliceEndsInBuf = endExclusive > bufStart && endExclusive <= bufEnd;
|
|
if (sliceStartInBuf && sliceEndsInBuf) {
|
|
// slice is wholly contained within this buffer
|
|
if (beginInclusive === bufStart && endExclusive === bufEnd) {
|
|
// requested whole buffer
|
|
bufs.push(buf);
|
|
break;
|
|
}
|
|
// requested part of buffer
|
|
const start = beginInclusive - bufStart;
|
|
bufs.push(buf.subarray(start, start + (endExclusive - beginInclusive)));
|
|
break;
|
|
}
|
|
if (sliceStartInBuf) {
|
|
// slice starts in this buffer
|
|
if (beginInclusive === 0) {
|
|
// requested whole buffer
|
|
bufs.push(buf);
|
|
continue;
|
|
}
|
|
// requested part of buffer
|
|
bufs.push(buf.subarray(beginInclusive - bufStart));
|
|
continue;
|
|
}
|
|
if (sliceEndsInBuf) {
|
|
if (endExclusive === bufEnd) {
|
|
// requested whole buffer
|
|
bufs.push(buf);
|
|
break;
|
|
}
|
|
// requested part of buffer
|
|
bufs.push(buf.subarray(0, endExclusive - bufStart));
|
|
break;
|
|
}
|
|
// slice started before this buffer and ends after it
|
|
bufs.push(buf);
|
|
}
|
|
return { bufs, length: endExclusive - beginInclusive };
|
|
}
|
|
getInt8(byteOffset) {
|
|
const buf = this.subarray(byteOffset, byteOffset + 1);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
return view.getInt8(0);
|
|
}
|
|
setInt8(byteOffset, value) {
|
|
const buf = allocUnsafe$1(1);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
view.setInt8(0, value);
|
|
this.write(buf, byteOffset);
|
|
}
|
|
getInt16(byteOffset, littleEndian) {
|
|
const buf = this.subarray(byteOffset, byteOffset + 2);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
return view.getInt16(0, littleEndian);
|
|
}
|
|
setInt16(byteOffset, value, littleEndian) {
|
|
const buf = alloc(2);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
view.setInt16(0, value, littleEndian);
|
|
this.write(buf, byteOffset);
|
|
}
|
|
getInt32(byteOffset, littleEndian) {
|
|
const buf = this.subarray(byteOffset, byteOffset + 4);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
return view.getInt32(0, littleEndian);
|
|
}
|
|
setInt32(byteOffset, value, littleEndian) {
|
|
const buf = alloc(4);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
view.setInt32(0, value, littleEndian);
|
|
this.write(buf, byteOffset);
|
|
}
|
|
getBigInt64(byteOffset, littleEndian) {
|
|
const buf = this.subarray(byteOffset, byteOffset + 8);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
return view.getBigInt64(0, littleEndian);
|
|
}
|
|
setBigInt64(byteOffset, value, littleEndian) {
|
|
const buf = alloc(8);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
view.setBigInt64(0, value, littleEndian);
|
|
this.write(buf, byteOffset);
|
|
}
|
|
getUint8(byteOffset) {
|
|
const buf = this.subarray(byteOffset, byteOffset + 1);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
return view.getUint8(0);
|
|
}
|
|
setUint8(byteOffset, value) {
|
|
const buf = allocUnsafe$1(1);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
view.setUint8(0, value);
|
|
this.write(buf, byteOffset);
|
|
}
|
|
getUint16(byteOffset, littleEndian) {
|
|
const buf = this.subarray(byteOffset, byteOffset + 2);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
return view.getUint16(0, littleEndian);
|
|
}
|
|
setUint16(byteOffset, value, littleEndian) {
|
|
const buf = alloc(2);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
view.setUint16(0, value, littleEndian);
|
|
this.write(buf, byteOffset);
|
|
}
|
|
getUint32(byteOffset, littleEndian) {
|
|
const buf = this.subarray(byteOffset, byteOffset + 4);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
return view.getUint32(0, littleEndian);
|
|
}
|
|
setUint32(byteOffset, value, littleEndian) {
|
|
const buf = alloc(4);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
view.setUint32(0, value, littleEndian);
|
|
this.write(buf, byteOffset);
|
|
}
|
|
getBigUint64(byteOffset, littleEndian) {
|
|
const buf = this.subarray(byteOffset, byteOffset + 8);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
return view.getBigUint64(0, littleEndian);
|
|
}
|
|
setBigUint64(byteOffset, value, littleEndian) {
|
|
const buf = alloc(8);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
view.setBigUint64(0, value, littleEndian);
|
|
this.write(buf, byteOffset);
|
|
}
|
|
getFloat32(byteOffset, littleEndian) {
|
|
const buf = this.subarray(byteOffset, byteOffset + 4);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
return view.getFloat32(0, littleEndian);
|
|
}
|
|
setFloat32(byteOffset, value, littleEndian) {
|
|
const buf = alloc(4);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
view.setFloat32(0, value, littleEndian);
|
|
this.write(buf, byteOffset);
|
|
}
|
|
getFloat64(byteOffset, littleEndian) {
|
|
const buf = this.subarray(byteOffset, byteOffset + 8);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
return view.getFloat64(0, littleEndian);
|
|
}
|
|
setFloat64(byteOffset, value, littleEndian) {
|
|
const buf = alloc(8);
|
|
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
view.setFloat64(0, value, littleEndian);
|
|
this.write(buf, byteOffset);
|
|
}
|
|
equals(other) {
|
|
if (other == null) {
|
|
return false;
|
|
}
|
|
if (!(other instanceof Uint8ArrayList)) {
|
|
return false;
|
|
}
|
|
if (other.bufs.length !== this.bufs.length) {
|
|
return false;
|
|
}
|
|
for (let i = 0; i < this.bufs.length; i++) {
|
|
if (!equals(this.bufs[i], other.bufs[i])) {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
}
|
|
/**
|
|
* Create a Uint8ArrayList from a pre-existing list of Uint8Arrays. Use this
|
|
* method if you know the total size of all the Uint8Arrays ahead of time.
|
|
*/
|
|
static fromUint8Arrays(bufs, length) {
|
|
const list = new Uint8ArrayList();
|
|
list.bufs = bufs;
|
|
if (length == null) {
|
|
length = bufs.reduce((acc, curr) => acc + curr.byteLength, 0);
|
|
}
|
|
list.length = length;
|
|
return list;
|
|
}
|
|
}
|
|
|
|
function accessor(buf) {
|
|
if (buf instanceof Uint8Array) {
|
|
return {
|
|
get(index) {
|
|
return buf[index];
|
|
},
|
|
set(index, value) {
|
|
buf[index] = value;
|
|
}
|
|
};
|
|
}
|
|
return {
|
|
get(index) {
|
|
return buf.get(index);
|
|
},
|
|
set(index, value) {
|
|
buf.set(index, value);
|
|
}
|
|
};
|
|
}
|
|
|
|
const TWO_32 = 4294967296;
|
|
class LongBits {
|
|
constructor(hi = 0, lo = 0) {
|
|
this.hi = hi;
|
|
this.lo = lo;
|
|
}
|
|
/**
|
|
* Returns these hi/lo bits as a BigInt
|
|
*/
|
|
toBigInt(unsigned) {
|
|
if (unsigned === true) {
|
|
return BigInt(this.lo >>> 0) + (BigInt(this.hi >>> 0) << 32n);
|
|
}
|
|
if ((this.hi >>> 31) !== 0) {
|
|
const lo = ~this.lo + 1 >>> 0;
|
|
let hi = ~this.hi >>> 0;
|
|
if (lo === 0) {
|
|
hi = hi + 1 >>> 0;
|
|
}
|
|
return -(BigInt(lo) + (BigInt(hi) << 32n));
|
|
}
|
|
return BigInt(this.lo >>> 0) + (BigInt(this.hi >>> 0) << 32n);
|
|
}
|
|
/**
|
|
* Returns these hi/lo bits as a Number - this may overflow, toBigInt
|
|
* should be preferred
|
|
*/
|
|
toNumber(unsigned) {
|
|
return Number(this.toBigInt(unsigned));
|
|
}
|
|
/**
|
|
* ZigZag decode a LongBits object
|
|
*/
|
|
zzDecode() {
|
|
const mask = -(this.lo & 1);
|
|
const lo = ((this.lo >>> 1 | this.hi << 31) ^ mask) >>> 0;
|
|
const hi = (this.hi >>> 1 ^ mask) >>> 0;
|
|
return new LongBits(hi, lo);
|
|
}
|
|
/**
|
|
* ZigZag encode a LongBits object
|
|
*/
|
|
zzEncode() {
|
|
const mask = this.hi >> 31;
|
|
const hi = ((this.hi << 1 | this.lo >>> 31) ^ mask) >>> 0;
|
|
const lo = (this.lo << 1 ^ mask) >>> 0;
|
|
return new LongBits(hi, lo);
|
|
}
|
|
/**
|
|
* Encode a LongBits object as a varint byte array
|
|
*/
|
|
toBytes(buf, offset = 0) {
|
|
const access = accessor(buf);
|
|
while (this.hi > 0) {
|
|
access.set(offset++, this.lo & 127 | 128);
|
|
this.lo = (this.lo >>> 7 | this.hi << 25) >>> 0;
|
|
this.hi >>>= 7;
|
|
}
|
|
while (this.lo > 127) {
|
|
access.set(offset++, this.lo & 127 | 128);
|
|
this.lo = this.lo >>> 7;
|
|
}
|
|
access.set(offset++, this.lo);
|
|
}
|
|
/**
|
|
* Parse a LongBits object from a BigInt
|
|
*/
|
|
static fromBigInt(value) {
|
|
if (value === 0n) {
|
|
return new LongBits();
|
|
}
|
|
const negative = value < 0;
|
|
if (negative) {
|
|
value = -value;
|
|
}
|
|
let hi = Number(value >> 32n) | 0;
|
|
let lo = Number(value - (BigInt(hi) << 32n)) | 0;
|
|
if (negative) {
|
|
hi = ~hi >>> 0;
|
|
lo = ~lo >>> 0;
|
|
if (++lo > TWO_32) {
|
|
lo = 0;
|
|
if (++hi > TWO_32) {
|
|
hi = 0;
|
|
}
|
|
}
|
|
}
|
|
return new LongBits(hi, lo);
|
|
}
|
|
/**
|
|
* Parse a LongBits object from a Number
|
|
*/
|
|
static fromNumber(value) {
|
|
if (value === 0) {
|
|
return new LongBits();
|
|
}
|
|
const sign = value < 0;
|
|
if (sign) {
|
|
value = -value;
|
|
}
|
|
let lo = value >>> 0;
|
|
let hi = (value - lo) / 4294967296 >>> 0;
|
|
if (sign) {
|
|
hi = ~hi >>> 0;
|
|
lo = ~lo >>> 0;
|
|
if (++lo > 4294967295) {
|
|
lo = 0;
|
|
if (++hi > 4294967295) {
|
|
hi = 0;
|
|
}
|
|
}
|
|
}
|
|
return new LongBits(hi, lo);
|
|
}
|
|
/**
|
|
* Parse a LongBits object from a varint byte array
|
|
*/
|
|
static fromBytes(buf, offset = 0) {
|
|
const access = accessor(buf);
|
|
// tends to deopt with local vars for octet etc.
|
|
const bits = new LongBits();
|
|
let i = 0;
|
|
if (buf.length - offset > 4) { // fast route (lo)
|
|
for (; i < 4; ++i) {
|
|
// 1st..4th
|
|
bits.lo = (bits.lo | (access.get(offset) & 127) << i * 7) >>> 0;
|
|
if (access.get(offset++) < 128) {
|
|
return bits;
|
|
}
|
|
}
|
|
// 5th
|
|
bits.lo = (bits.lo | (access.get(offset) & 127) << 28) >>> 0;
|
|
bits.hi = (bits.hi | (access.get(offset) & 127) >> 4) >>> 0;
|
|
if (access.get(offset++) < 128) {
|
|
return bits;
|
|
}
|
|
i = 0;
|
|
}
|
|
else {
|
|
for (; i < 4; ++i) {
|
|
/* istanbul ignore if */
|
|
if (offset >= buf.length) {
|
|
throw RangeError(`index out of range: ${offset} > ${buf.length}`);
|
|
}
|
|
// 1st..4th
|
|
bits.lo = (bits.lo | (access.get(offset) & 127) << i * 7) >>> 0;
|
|
if (access.get(offset++) < 128) {
|
|
return bits;
|
|
}
|
|
}
|
|
}
|
|
if (buf.length - offset > 4) { // fast route (hi)
|
|
for (; i < 5; ++i) {
|
|
// 6th..10th
|
|
bits.hi = (bits.hi | (access.get(offset) & 127) << i * 7 + 3) >>> 0;
|
|
if (access.get(offset++) < 128) {
|
|
return bits;
|
|
}
|
|
}
|
|
}
|
|
else if (offset < buf.byteLength) {
|
|
for (; i < 5; ++i) {
|
|
/* istanbul ignore if */
|
|
if (offset >= buf.length) {
|
|
throw RangeError(`index out of range: ${offset} > ${buf.length}`);
|
|
}
|
|
// 6th..10th
|
|
bits.hi = (bits.hi | (access.get(offset) & 127) << i * 7 + 3) >>> 0;
|
|
if (access.get(offset++) < 128) {
|
|
return bits;
|
|
}
|
|
}
|
|
}
|
|
/* istanbul ignore next */
|
|
throw RangeError('invalid varint encoding');
|
|
}
|
|
}
|
|
|
|
const N1 = Math.pow(2, 7);
|
|
const N2 = Math.pow(2, 14);
|
|
const N3 = Math.pow(2, 21);
|
|
const N4 = Math.pow(2, 28);
|
|
const N5 = Math.pow(2, 35);
|
|
const N6 = Math.pow(2, 42);
|
|
const N7 = Math.pow(2, 49);
|
|
const N8 = Math.pow(2, 56);
|
|
const N9 = Math.pow(2, 63);
|
|
const unsigned = {
|
|
encodingLength(value) {
|
|
if (value < N1) {
|
|
return 1;
|
|
}
|
|
if (value < N2) {
|
|
return 2;
|
|
}
|
|
if (value < N3) {
|
|
return 3;
|
|
}
|
|
if (value < N4) {
|
|
return 4;
|
|
}
|
|
if (value < N5) {
|
|
return 5;
|
|
}
|
|
if (value < N6) {
|
|
return 6;
|
|
}
|
|
if (value < N7) {
|
|
return 7;
|
|
}
|
|
if (value < N8) {
|
|
return 8;
|
|
}
|
|
if (value < N9) {
|
|
return 9;
|
|
}
|
|
return 10;
|
|
},
|
|
encode(value, buf, offset = 0) {
|
|
if (Number.MAX_SAFE_INTEGER != null && value > Number.MAX_SAFE_INTEGER) {
|
|
throw new RangeError('Could not encode varint');
|
|
}
|
|
if (buf == null) {
|
|
buf = allocUnsafe$1(unsigned.encodingLength(value));
|
|
}
|
|
LongBits.fromNumber(value).toBytes(buf, offset);
|
|
return buf;
|
|
},
|
|
decode(buf, offset = 0) {
|
|
return LongBits.fromBytes(buf, offset).toNumber(true);
|
|
}
|
|
};
|
|
|
|
function allocUnsafe(len) {
|
|
if (globalThis?.Buffer?.allocUnsafe != null) {
|
|
return globalThis.Buffer.allocUnsafe(len);
|
|
}
|
|
return new Uint8Array(len);
|
|
}
|
|
|
|
const defaultEncoder = (length) => {
|
|
const lengthLength = unsigned.encodingLength(length);
|
|
const lengthBuf = allocUnsafe(lengthLength);
|
|
unsigned.encode(length, lengthBuf);
|
|
defaultEncoder.bytes = lengthLength;
|
|
return lengthBuf;
|
|
};
|
|
defaultEncoder.bytes = 0;
|
|
function encode(options) {
|
|
options = options ?? {};
|
|
const encodeLength = options.lengthEncoder ?? defaultEncoder;
|
|
const encoder = async function* (source) {
|
|
for await (const chunk of source) {
|
|
// length + data
|
|
const length = encodeLength(chunk.byteLength);
|
|
// yield only Uint8Arrays
|
|
if (length instanceof Uint8Array) {
|
|
yield length;
|
|
}
|
|
else {
|
|
yield* length;
|
|
}
|
|
// yield only Uint8Arrays
|
|
if (chunk instanceof Uint8Array) {
|
|
yield chunk;
|
|
}
|
|
else {
|
|
yield* chunk;
|
|
}
|
|
}
|
|
};
|
|
return encoder;
|
|
}
|
|
encode.single = (chunk, options) => {
|
|
options = options ?? {};
|
|
const encodeLength = options.lengthEncoder ?? defaultEncoder;
|
|
return new Uint8ArrayList(encodeLength(chunk.byteLength), chunk);
|
|
};
|
|
|
|
/* eslint max-depth: ["error", 6] */
|
|
// Maximum length of the length section of the message
|
|
const MAX_LENGTH_LENGTH = 8; // Varint.encode(Number.MAX_SAFE_INTEGER).length
|
|
// Maximum length of the data section of the message
|
|
const MAX_DATA_LENGTH = 1024 * 1024 * 4;
|
|
var ReadMode;
|
|
(function (ReadMode) {
|
|
ReadMode[ReadMode["LENGTH"] = 0] = "LENGTH";
|
|
ReadMode[ReadMode["DATA"] = 1] = "DATA";
|
|
})(ReadMode || (ReadMode = {}));
|
|
const defaultDecoder = (buf) => {
|
|
const length = unsigned.decode(buf);
|
|
defaultDecoder.bytes = unsigned.encodingLength(length);
|
|
return length;
|
|
};
|
|
defaultDecoder.bytes = 0;
|
|
function decode(options) {
|
|
const decoder = async function* (source) {
|
|
const buffer = new Uint8ArrayList();
|
|
let mode = ReadMode.LENGTH;
|
|
let dataLength = -1;
|
|
const lengthDecoder = options?.lengthDecoder ?? defaultDecoder;
|
|
const maxLengthLength = options?.maxLengthLength ?? MAX_LENGTH_LENGTH;
|
|
const maxDataLength = options?.maxDataLength ?? MAX_DATA_LENGTH;
|
|
for await (const buf of source) {
|
|
buffer.append(buf);
|
|
while (buffer.byteLength > 0) {
|
|
if (mode === ReadMode.LENGTH) {
|
|
// read length, ignore errors for short reads
|
|
try {
|
|
dataLength = lengthDecoder(buffer);
|
|
if (dataLength < 0) {
|
|
throw errCode(new Error('invalid message length'), 'ERR_INVALID_MSG_LENGTH');
|
|
}
|
|
if (dataLength > maxDataLength) {
|
|
throw errCode(new Error('message length too long'), 'ERR_MSG_DATA_TOO_LONG');
|
|
}
|
|
const dataLengthLength = lengthDecoder.bytes;
|
|
buffer.consume(dataLengthLength);
|
|
if (options?.onLength != null) {
|
|
options.onLength(dataLength);
|
|
}
|
|
mode = ReadMode.DATA;
|
|
}
|
|
catch (err) {
|
|
if (err instanceof RangeError) {
|
|
if (buffer.byteLength > maxLengthLength) {
|
|
throw errCode(new Error('message length length too long'), 'ERR_MSG_LENGTH_TOO_LONG');
|
|
}
|
|
break;
|
|
}
|
|
throw err;
|
|
}
|
|
}
|
|
if (mode === ReadMode.DATA) {
|
|
if (buffer.byteLength < dataLength) {
|
|
// not enough data, wait for more
|
|
break;
|
|
}
|
|
const data = buffer.sublist(0, dataLength);
|
|
buffer.consume(dataLength);
|
|
if (options?.onData != null) {
|
|
options.onData(data);
|
|
}
|
|
yield data;
|
|
mode = ReadMode.LENGTH;
|
|
}
|
|
}
|
|
}
|
|
if (buffer.byteLength > 0) {
|
|
throw errCode(new Error('unexpected end of input'), 'ERR_UNEXPECTED_EOF');
|
|
}
|
|
};
|
|
return decoder;
|
|
}
|
|
/**
|
|
* @param {*} reader
|
|
* @param {import('./types').DecoderOptions} [options]
|
|
* @returns
|
|
*/
|
|
decode.fromReader = (reader, options) => {
|
|
let byteLength = 1; // Read single byte chunks until the length is known
|
|
const varByteSource = (async function* () {
|
|
while (true) {
|
|
try {
|
|
const { done, value } = await reader.next(byteLength);
|
|
if (done === true) {
|
|
return;
|
|
}
|
|
if (value != null) {
|
|
yield value;
|
|
}
|
|
}
|
|
catch (err) {
|
|
if (err.code === 'ERR_UNDER_READ') {
|
|
return { done: true, value: null };
|
|
}
|
|
throw err;
|
|
}
|
|
finally {
|
|
// Reset the byteLength so we continue to check for varints
|
|
byteLength = 1;
|
|
}
|
|
}
|
|
}());
|
|
/**
|
|
* Once the length has been parsed, read chunk for that length
|
|
*/
|
|
const onLength = (l) => { byteLength = l; };
|
|
return decode({
|
|
...(options ?? {}),
|
|
onLength
|
|
})(varByteSource);
|
|
};
|
|
|
|
// ported from https://www.npmjs.com/package/fast-fifo
|
|
class FixedFIFO$1 {
|
|
constructor(hwm) {
|
|
if (!(hwm > 0) || ((hwm - 1) & hwm) !== 0) {
|
|
throw new Error('Max size for a FixedFIFO should be a power of two');
|
|
}
|
|
this.buffer = new Array(hwm);
|
|
this.mask = hwm - 1;
|
|
this.top = 0;
|
|
this.btm = 0;
|
|
this.next = null;
|
|
}
|
|
push(data) {
|
|
if (this.buffer[this.top] !== undefined) {
|
|
return false;
|
|
}
|
|
this.buffer[this.top] = data;
|
|
this.top = (this.top + 1) & this.mask;
|
|
return true;
|
|
}
|
|
shift() {
|
|
const last = this.buffer[this.btm];
|
|
if (last === undefined) {
|
|
return undefined;
|
|
}
|
|
this.buffer[this.btm] = undefined;
|
|
this.btm = (this.btm + 1) & this.mask;
|
|
return last;
|
|
}
|
|
isEmpty() {
|
|
return this.buffer[this.btm] === undefined;
|
|
}
|
|
}
|
|
class FIFO$1 {
|
|
constructor(options = {}) {
|
|
this.hwm = options.splitLimit ?? 16;
|
|
this.head = new FixedFIFO$1(this.hwm);
|
|
this.tail = this.head;
|
|
this.size = 0;
|
|
}
|
|
calculateSize(obj) {
|
|
if (obj?.byteLength != null) {
|
|
return obj.byteLength;
|
|
}
|
|
return 1;
|
|
}
|
|
push(val) {
|
|
if (val?.value != null) {
|
|
this.size += this.calculateSize(val.value);
|
|
}
|
|
if (!this.head.push(val)) {
|
|
const prev = this.head;
|
|
this.head = prev.next = new FixedFIFO$1(2 * this.head.buffer.length);
|
|
this.head.push(val);
|
|
}
|
|
}
|
|
shift() {
|
|
let val = this.tail.shift();
|
|
if (val === undefined && (this.tail.next != null)) {
|
|
const next = this.tail.next;
|
|
this.tail.next = null;
|
|
this.tail = next;
|
|
val = this.tail.shift();
|
|
}
|
|
if (val?.value != null) {
|
|
this.size -= this.calculateSize(val.value);
|
|
}
|
|
return val;
|
|
}
|
|
isEmpty() {
|
|
return this.head.isEmpty();
|
|
}
|
|
}
|
|
|
|
function pushable$1(options = {}) {
|
|
const getNext = (buffer) => {
|
|
const next = buffer.shift();
|
|
if (next == null) {
|
|
return { done: true };
|
|
}
|
|
if (next.error != null) {
|
|
throw next.error;
|
|
}
|
|
return {
|
|
done: next.done === true,
|
|
// @ts-expect-error
|
|
value: next.value
|
|
};
|
|
};
|
|
return _pushable(getNext, options);
|
|
}
|
|
function _pushable(getNext, options) {
|
|
options = options ?? {};
|
|
let onEnd = options.onEnd;
|
|
let buffer = new FIFO$1();
|
|
let pushable;
|
|
let onNext;
|
|
let ended;
|
|
const waitNext = async () => {
|
|
if (!buffer.isEmpty()) {
|
|
return getNext(buffer);
|
|
}
|
|
if (ended) {
|
|
return { done: true };
|
|
}
|
|
return await new Promise((resolve, reject) => {
|
|
onNext = (next) => {
|
|
onNext = null;
|
|
buffer.push(next);
|
|
try {
|
|
resolve(getNext(buffer));
|
|
}
|
|
catch (err) {
|
|
reject(err);
|
|
}
|
|
return pushable;
|
|
};
|
|
});
|
|
};
|
|
const bufferNext = (next) => {
|
|
if (onNext != null) {
|
|
return onNext(next);
|
|
}
|
|
buffer.push(next);
|
|
return pushable;
|
|
};
|
|
const bufferError = (err) => {
|
|
buffer = new FIFO$1();
|
|
if (onNext != null) {
|
|
return onNext({ error: err });
|
|
}
|
|
buffer.push({ error: err });
|
|
return pushable;
|
|
};
|
|
const push = (value) => {
|
|
if (ended) {
|
|
return pushable;
|
|
}
|
|
// @ts-expect-error `byteLength` is not declared on PushType
|
|
if (options?.objectMode !== true && value?.byteLength == null) {
|
|
throw new Error('objectMode was not true but tried to push non-Uint8Array value');
|
|
}
|
|
return bufferNext({ done: false, value });
|
|
};
|
|
const end = (err) => {
|
|
if (ended)
|
|
return pushable;
|
|
ended = true;
|
|
return (err != null) ? bufferError(err) : bufferNext({ done: true });
|
|
};
|
|
const _return = () => {
|
|
buffer = new FIFO$1();
|
|
end();
|
|
return { done: true };
|
|
};
|
|
const _throw = (err) => {
|
|
end(err);
|
|
return { done: true };
|
|
};
|
|
pushable = {
|
|
[Symbol.asyncIterator]() { return this; },
|
|
next: waitNext,
|
|
return: _return,
|
|
throw: _throw,
|
|
push,
|
|
end,
|
|
get readableLength() {
|
|
return buffer.size;
|
|
}
|
|
};
|
|
if (onEnd == null) {
|
|
return pushable;
|
|
}
|
|
const _pushable = pushable;
|
|
pushable = {
|
|
[Symbol.asyncIterator]() { return this; },
|
|
next() {
|
|
return _pushable.next();
|
|
},
|
|
throw(err) {
|
|
_pushable.throw(err);
|
|
if (onEnd != null) {
|
|
onEnd(err);
|
|
onEnd = undefined;
|
|
}
|
|
return { done: true };
|
|
},
|
|
return() {
|
|
_pushable.return();
|
|
if (onEnd != null) {
|
|
onEnd();
|
|
onEnd = undefined;
|
|
}
|
|
return { done: true };
|
|
},
|
|
push,
|
|
end(err) {
|
|
_pushable.end(err);
|
|
if (onEnd != null) {
|
|
onEnd(err);
|
|
onEnd = undefined;
|
|
}
|
|
return pushable;
|
|
},
|
|
get readableLength() {
|
|
return _pushable.readableLength;
|
|
}
|
|
};
|
|
return pushable;
|
|
}
|
|
|
|
var fixedSize = class FixedFIFO {
|
|
constructor (hwm) {
|
|
if (!(hwm > 0) || ((hwm - 1) & hwm) !== 0) throw new Error('Max size for a FixedFIFO should be a power of two')
|
|
this.buffer = new Array(hwm);
|
|
this.mask = hwm - 1;
|
|
this.top = 0;
|
|
this.btm = 0;
|
|
this.next = null;
|
|
}
|
|
|
|
push (data) {
|
|
if (this.buffer[this.top] !== undefined) return false
|
|
this.buffer[this.top] = data;
|
|
this.top = (this.top + 1) & this.mask;
|
|
return true
|
|
}
|
|
|
|
shift () {
|
|
const last = this.buffer[this.btm];
|
|
if (last === undefined) return undefined
|
|
this.buffer[this.btm] = undefined;
|
|
this.btm = (this.btm + 1) & this.mask;
|
|
return last
|
|
}
|
|
|
|
peek () {
|
|
return this.buffer[this.btm]
|
|
}
|
|
|
|
isEmpty () {
|
|
return this.buffer[this.btm] === undefined
|
|
}
|
|
};
|
|
|
|
const FixedFIFO = fixedSize;
|
|
|
|
var fastFifo = class FastFIFO {
|
|
constructor (hwm) {
|
|
this.hwm = hwm || 16;
|
|
this.head = new FixedFIFO(this.hwm);
|
|
this.tail = this.head;
|
|
}
|
|
|
|
push (val) {
|
|
if (!this.head.push(val)) {
|
|
const prev = this.head;
|
|
this.head = prev.next = new FixedFIFO(2 * this.head.buffer.length);
|
|
this.head.push(val);
|
|
}
|
|
}
|
|
|
|
shift () {
|
|
const val = this.tail.shift();
|
|
if (val === undefined && this.tail.next) {
|
|
const next = this.tail.next;
|
|
this.tail.next = null;
|
|
this.tail = next;
|
|
return this.tail.shift()
|
|
}
|
|
return val
|
|
}
|
|
|
|
peek () {
|
|
return this.tail.peek()
|
|
}
|
|
|
|
isEmpty () {
|
|
return this.head.isEmpty()
|
|
}
|
|
};
|
|
|
|
const FIFO = fastFifo;
|
|
|
|
var itPushable = (options) => {
|
|
options = options || {};
|
|
let onEnd;
|
|
|
|
if (typeof options === 'function') {
|
|
onEnd = options;
|
|
options = {};
|
|
} else {
|
|
onEnd = options.onEnd;
|
|
}
|
|
|
|
let buffer = new FIFO();
|
|
let pushable, onNext, ended;
|
|
|
|
const waitNext = () => {
|
|
if (!buffer.isEmpty()) {
|
|
if (options.writev) {
|
|
let next;
|
|
const values = [];
|
|
while (!buffer.isEmpty()) {
|
|
next = buffer.shift();
|
|
if (next.error) throw next.error
|
|
values.push(next.value);
|
|
}
|
|
return { done: next.done, value: values }
|
|
}
|
|
|
|
const next = buffer.shift();
|
|
if (next.error) throw next.error
|
|
return next
|
|
}
|
|
|
|
if (ended) return { done: true }
|
|
|
|
return new Promise((resolve, reject) => {
|
|
onNext = next => {
|
|
onNext = null;
|
|
if (next.error) {
|
|
reject(next.error);
|
|
} else {
|
|
if (options.writev && !next.done) {
|
|
resolve({ done: next.done, value: [next.value] });
|
|
} else {
|
|
resolve(next);
|
|
}
|
|
}
|
|
return pushable
|
|
};
|
|
})
|
|
};
|
|
|
|
const bufferNext = next => {
|
|
if (onNext) return onNext(next)
|
|
buffer.push(next);
|
|
return pushable
|
|
};
|
|
|
|
const bufferError = err => {
|
|
buffer = new FIFO();
|
|
if (onNext) return onNext({ error: err })
|
|
buffer.push({ error: err });
|
|
return pushable
|
|
};
|
|
|
|
const push = value => {
|
|
if (ended) return pushable
|
|
return bufferNext({ done: false, value })
|
|
};
|
|
const end = err => {
|
|
if (ended) return pushable
|
|
ended = true;
|
|
return err ? bufferError(err) : bufferNext({ done: true })
|
|
};
|
|
const _return = () => {
|
|
buffer = new FIFO();
|
|
end();
|
|
return { done: true }
|
|
};
|
|
const _throw = err => {
|
|
end(err);
|
|
return { done: true }
|
|
};
|
|
|
|
pushable = {
|
|
[Symbol.asyncIterator] () { return this },
|
|
next: waitNext,
|
|
return: _return,
|
|
throw: _throw,
|
|
push,
|
|
end
|
|
};
|
|
|
|
if (!onEnd) return pushable
|
|
|
|
const _pushable = pushable;
|
|
|
|
pushable = {
|
|
[Symbol.asyncIterator] () { return this },
|
|
next () {
|
|
return _pushable.next()
|
|
},
|
|
throw (err) {
|
|
_pushable.throw(err);
|
|
if (onEnd) {
|
|
onEnd(err);
|
|
onEnd = null;
|
|
}
|
|
return { done: true }
|
|
},
|
|
return () {
|
|
_pushable.return();
|
|
if (onEnd) {
|
|
onEnd();
|
|
onEnd = null;
|
|
}
|
|
return { done: true }
|
|
},
|
|
push,
|
|
end (err) {
|
|
_pushable.end(err);
|
|
if (onEnd) {
|
|
onEnd(err);
|
|
onEnd = null;
|
|
}
|
|
return pushable
|
|
}
|
|
};
|
|
|
|
return pushable
|
|
};
|
|
|
|
const pushable = itPushable;
|
|
|
|
/**
|
|
* Treat one or more iterables as a single iterable.
|
|
*
|
|
* Nb. sources are iterated over in parallel so the
|
|
* order of emitted items is not guaranteed.
|
|
*
|
|
* @template T
|
|
* @param {...AsyncIterable<T>|Iterable<T>} sources
|
|
* @returns {AsyncIterable<T>}
|
|
*/
|
|
const merge = async function * (...sources) {
|
|
const output = pushable();
|
|
|
|
setTimeout(async () => {
|
|
try {
|
|
await Promise.all(
|
|
sources.map(async (source) => {
|
|
for await (const item of source) {
|
|
output.push(item);
|
|
}
|
|
})
|
|
);
|
|
|
|
output.end();
|
|
} catch (/** @type {any} */ err) {
|
|
output.end(err);
|
|
}
|
|
}, 0);
|
|
|
|
yield * output;
|
|
};
|
|
|
|
var itMerge = merge;
|
|
|
|
const rawPipe = (...fns) => {
|
|
let res;
|
|
while (fns.length > 0) {
|
|
res = fns.shift()(res);
|
|
}
|
|
return res;
|
|
};
|
|
const isIterable = (obj) => {
|
|
return obj != null && (typeof obj[Symbol.asyncIterator] === 'function' ||
|
|
typeof obj[Symbol.iterator] === 'function' ||
|
|
typeof obj.next === 'function' // Probably, right?
|
|
);
|
|
};
|
|
const isDuplex = (obj) => {
|
|
return obj != null && typeof obj.sink === 'function' && isIterable(obj.source);
|
|
};
|
|
const duplexPipelineFn = (duplex) => {
|
|
return (source) => {
|
|
const p = duplex.sink(source);
|
|
if (p.then != null) {
|
|
const stream = pushable$1({
|
|
objectMode: true
|
|
});
|
|
p.then(() => {
|
|
stream.end();
|
|
}, (err) => {
|
|
stream.end(err);
|
|
});
|
|
const sourceWrap = async function* () {
|
|
yield* duplex.source;
|
|
stream.end();
|
|
};
|
|
return itMerge(stream, sourceWrap());
|
|
}
|
|
return duplex.source;
|
|
};
|
|
};
|
|
function pipe(first, ...rest) {
|
|
// Duplex at start: wrap in function and return duplex source
|
|
if (isDuplex(first)) {
|
|
const duplex = first;
|
|
first = () => duplex.source;
|
|
// Iterable at start: wrap in function
|
|
}
|
|
else if (isIterable(first)) {
|
|
const source = first;
|
|
first = () => source;
|
|
}
|
|
const fns = [first, ...rest];
|
|
if (fns.length > 1) {
|
|
// Duplex at end: use duplex sink
|
|
if (isDuplex(fns[fns.length - 1])) {
|
|
fns[fns.length - 1] = fns[fns.length - 1].sink;
|
|
}
|
|
}
|
|
if (fns.length > 2) {
|
|
// Duplex in the middle, consume source with duplex sink and return duplex source
|
|
for (let i = 1; i < fns.length - 1; i++) {
|
|
if (isDuplex(fns[i])) {
|
|
fns[i] = duplexPipelineFn(fns[i]);
|
|
}
|
|
}
|
|
}
|
|
return rawPipe(...fns);
|
|
}
|
|
|
|
debug("waku:select-peer");
|
|
|
|
var REGEX = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
|
|
|
|
function validate(uuid) {
|
|
return typeof uuid === 'string' && REGEX.test(uuid);
|
|
}
|
|
|
|
/**
|
|
* Convert array of 16 byte values to UUID string format of the form:
|
|
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
|
|
*/
|
|
|
|
var byteToHex = [];
|
|
|
|
for (var i = 0; i < 256; ++i) {
|
|
byteToHex.push((i + 0x100).toString(16).substr(1));
|
|
}
|
|
|
|
function stringify(arr) {
|
|
var offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
|
|
// Note: Be careful editing this code! It's been tuned for performance
|
|
// and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
|
|
var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one
|
|
// of the following:
|
|
// - One or more input array values don't map to a hex octet (leading to
|
|
// "undefined" in the uuid)
|
|
// - Invalid input values for the RFC `version` or `variant` fields
|
|
|
|
if (!validate(uuid)) {
|
|
throw TypeError('Stringified UUID is invalid');
|
|
}
|
|
|
|
return uuid;
|
|
}
|
|
|
|
function parse(uuid) {
|
|
if (!validate(uuid)) {
|
|
throw TypeError('Invalid UUID');
|
|
}
|
|
|
|
var v;
|
|
var arr = new Uint8Array(16); // Parse ########-....-....-....-............
|
|
|
|
arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
|
|
arr[1] = v >>> 16 & 0xff;
|
|
arr[2] = v >>> 8 & 0xff;
|
|
arr[3] = v & 0xff; // Parse ........-####-....-....-............
|
|
|
|
arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
|
|
arr[5] = v & 0xff; // Parse ........-....-####-....-............
|
|
|
|
arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
|
|
arr[7] = v & 0xff; // Parse ........-....-....-####-............
|
|
|
|
arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
|
|
arr[9] = v & 0xff; // Parse ........-....-....-....-############
|
|
// (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
|
|
|
|
arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
|
|
arr[11] = v / 0x100000000 & 0xff;
|
|
arr[12] = v >>> 24 & 0xff;
|
|
arr[13] = v >>> 16 & 0xff;
|
|
arr[14] = v >>> 8 & 0xff;
|
|
arr[15] = v & 0xff;
|
|
return arr;
|
|
}
|
|
|
|
function stringToBytes(str) {
|
|
str = unescape(encodeURIComponent(str)); // UTF8 escape
|
|
|
|
var bytes = [];
|
|
|
|
for (var i = 0; i < str.length; ++i) {
|
|
bytes.push(str.charCodeAt(i));
|
|
}
|
|
|
|
return bytes;
|
|
}
|
|
|
|
var DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
|
|
var URL$1 = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
|
|
function v35 (name, version, hashfunc) {
|
|
function generateUUID(value, namespace, buf, offset) {
|
|
if (typeof value === 'string') {
|
|
value = stringToBytes(value);
|
|
}
|
|
|
|
if (typeof namespace === 'string') {
|
|
namespace = parse(namespace);
|
|
}
|
|
|
|
if (namespace.length !== 16) {
|
|
throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
|
|
} // Compute hash of namespace and value, Per 4.3
|
|
// Future: Use spread syntax when supported on all platforms, e.g. `bytes =
|
|
// hashfunc([...namespace, ... value])`
|
|
|
|
|
|
var bytes = new Uint8Array(16 + value.length);
|
|
bytes.set(namespace);
|
|
bytes.set(value, namespace.length);
|
|
bytes = hashfunc(bytes);
|
|
bytes[6] = bytes[6] & 0x0f | version;
|
|
bytes[8] = bytes[8] & 0x3f | 0x80;
|
|
|
|
if (buf) {
|
|
offset = offset || 0;
|
|
|
|
for (var i = 0; i < 16; ++i) {
|
|
buf[offset + i] = bytes[i];
|
|
}
|
|
|
|
return buf;
|
|
}
|
|
|
|
return stringify(bytes);
|
|
} // Function#name is not settable on some platforms (#270)
|
|
|
|
|
|
try {
|
|
generateUUID.name = name; // eslint-disable-next-line no-empty
|
|
} catch (err) {} // For CommonJS default export support
|
|
|
|
|
|
generateUUID.DNS = DNS;
|
|
generateUUID.URL = URL$1;
|
|
return generateUUID;
|
|
}
|
|
|
|
/*
|
|
* Browser-compatible JavaScript MD5
|
|
*
|
|
* Modification of JavaScript MD5
|
|
* https://github.com/blueimp/JavaScript-MD5
|
|
*
|
|
* Copyright 2011, Sebastian Tschan
|
|
* https://blueimp.net
|
|
*
|
|
* Licensed under the MIT license:
|
|
* https://opensource.org/licenses/MIT
|
|
*
|
|
* Based on
|
|
* A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
|
|
* Digest Algorithm, as defined in RFC 1321.
|
|
* Version 2.2 Copyright (C) Paul Johnston 1999 - 2009
|
|
* Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
|
|
* Distributed under the BSD License
|
|
* See http://pajhome.org.uk/crypt/md5 for more info.
|
|
*/
|
|
function md5(bytes) {
|
|
if (typeof bytes === 'string') {
|
|
var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
|
|
|
|
bytes = new Uint8Array(msg.length);
|
|
|
|
for (var i = 0; i < msg.length; ++i) {
|
|
bytes[i] = msg.charCodeAt(i);
|
|
}
|
|
}
|
|
|
|
return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8));
|
|
}
|
|
/*
|
|
* Convert an array of little-endian words to an array of bytes
|
|
*/
|
|
|
|
|
|
function md5ToHexEncodedArray(input) {
|
|
var output = [];
|
|
var length32 = input.length * 32;
|
|
var hexTab = '0123456789abcdef';
|
|
|
|
for (var i = 0; i < length32; i += 8) {
|
|
var x = input[i >> 5] >>> i % 32 & 0xff;
|
|
var hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16);
|
|
output.push(hex);
|
|
}
|
|
|
|
return output;
|
|
}
|
|
/**
|
|
* Calculate output length with padding and bit length
|
|
*/
|
|
|
|
|
|
function getOutputLength(inputLength8) {
|
|
return (inputLength8 + 64 >>> 9 << 4) + 14 + 1;
|
|
}
|
|
/*
|
|
* Calculate the MD5 of an array of little-endian words, and a bit length.
|
|
*/
|
|
|
|
|
|
function wordsToMd5(x, len) {
|
|
/* append padding */
|
|
x[len >> 5] |= 0x80 << len % 32;
|
|
x[getOutputLength(len) - 1] = len;
|
|
var a = 1732584193;
|
|
var b = -271733879;
|
|
var c = -1732584194;
|
|
var d = 271733878;
|
|
|
|
for (var i = 0; i < x.length; i += 16) {
|
|
var olda = a;
|
|
var oldb = b;
|
|
var oldc = c;
|
|
var oldd = d;
|
|
a = md5ff(a, b, c, d, x[i], 7, -680876936);
|
|
d = md5ff(d, a, b, c, x[i + 1], 12, -389564586);
|
|
c = md5ff(c, d, a, b, x[i + 2], 17, 606105819);
|
|
b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330);
|
|
a = md5ff(a, b, c, d, x[i + 4], 7, -176418897);
|
|
d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426);
|
|
c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341);
|
|
b = md5ff(b, c, d, a, x[i + 7], 22, -45705983);
|
|
a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416);
|
|
d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417);
|
|
c = md5ff(c, d, a, b, x[i + 10], 17, -42063);
|
|
b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162);
|
|
a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682);
|
|
d = md5ff(d, a, b, c, x[i + 13], 12, -40341101);
|
|
c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290);
|
|
b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329);
|
|
a = md5gg(a, b, c, d, x[i + 1], 5, -165796510);
|
|
d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632);
|
|
c = md5gg(c, d, a, b, x[i + 11], 14, 643717713);
|
|
b = md5gg(b, c, d, a, x[i], 20, -373897302);
|
|
a = md5gg(a, b, c, d, x[i + 5], 5, -701558691);
|
|
d = md5gg(d, a, b, c, x[i + 10], 9, 38016083);
|
|
c = md5gg(c, d, a, b, x[i + 15], 14, -660478335);
|
|
b = md5gg(b, c, d, a, x[i + 4], 20, -405537848);
|
|
a = md5gg(a, b, c, d, x[i + 9], 5, 568446438);
|
|
d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690);
|
|
c = md5gg(c, d, a, b, x[i + 3], 14, -187363961);
|
|
b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501);
|
|
a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467);
|
|
d = md5gg(d, a, b, c, x[i + 2], 9, -51403784);
|
|
c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473);
|
|
b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734);
|
|
a = md5hh(a, b, c, d, x[i + 5], 4, -378558);
|
|
d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463);
|
|
c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562);
|
|
b = md5hh(b, c, d, a, x[i + 14], 23, -35309556);
|
|
a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060);
|
|
d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353);
|
|
c = md5hh(c, d, a, b, x[i + 7], 16, -155497632);
|
|
b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640);
|
|
a = md5hh(a, b, c, d, x[i + 13], 4, 681279174);
|
|
d = md5hh(d, a, b, c, x[i], 11, -358537222);
|
|
c = md5hh(c, d, a, b, x[i + 3], 16, -722521979);
|
|
b = md5hh(b, c, d, a, x[i + 6], 23, 76029189);
|
|
a = md5hh(a, b, c, d, x[i + 9], 4, -640364487);
|
|
d = md5hh(d, a, b, c, x[i + 12], 11, -421815835);
|
|
c = md5hh(c, d, a, b, x[i + 15], 16, 530742520);
|
|
b = md5hh(b, c, d, a, x[i + 2], 23, -995338651);
|
|
a = md5ii(a, b, c, d, x[i], 6, -198630844);
|
|
d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415);
|
|
c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905);
|
|
b = md5ii(b, c, d, a, x[i + 5], 21, -57434055);
|
|
a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571);
|
|
d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606);
|
|
c = md5ii(c, d, a, b, x[i + 10], 15, -1051523);
|
|
b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799);
|
|
a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359);
|
|
d = md5ii(d, a, b, c, x[i + 15], 10, -30611744);
|
|
c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380);
|
|
b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649);
|
|
a = md5ii(a, b, c, d, x[i + 4], 6, -145523070);
|
|
d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379);
|
|
c = md5ii(c, d, a, b, x[i + 2], 15, 718787259);
|
|
b = md5ii(b, c, d, a, x[i + 9], 21, -343485551);
|
|
a = safeAdd(a, olda);
|
|
b = safeAdd(b, oldb);
|
|
c = safeAdd(c, oldc);
|
|
d = safeAdd(d, oldd);
|
|
}
|
|
|
|
return [a, b, c, d];
|
|
}
|
|
/*
|
|
* Convert an array bytes to an array of little-endian words
|
|
* Characters >255 have their high-byte silently ignored.
|
|
*/
|
|
|
|
|
|
function bytesToWords(input) {
|
|
if (input.length === 0) {
|
|
return [];
|
|
}
|
|
|
|
var length8 = input.length * 8;
|
|
var output = new Uint32Array(getOutputLength(length8));
|
|
|
|
for (var i = 0; i < length8; i += 8) {
|
|
output[i >> 5] |= (input[i / 8] & 0xff) << i % 32;
|
|
}
|
|
|
|
return output;
|
|
}
|
|
/*
|
|
* Add integers, wrapping at 2^32. This uses 16-bit operations internally
|
|
* to work around bugs in some JS interpreters.
|
|
*/
|
|
|
|
|
|
function safeAdd(x, y) {
|
|
var lsw = (x & 0xffff) + (y & 0xffff);
|
|
var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
|
|
return msw << 16 | lsw & 0xffff;
|
|
}
|
|
/*
|
|
* Bitwise rotate a 32-bit number to the left.
|
|
*/
|
|
|
|
|
|
function bitRotateLeft(num, cnt) {
|
|
return num << cnt | num >>> 32 - cnt;
|
|
}
|
|
/*
|
|
* These functions implement the four basic operations the algorithm uses.
|
|
*/
|
|
|
|
|
|
function md5cmn(q, a, b, x, s, t) {
|
|
return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b);
|
|
}
|
|
|
|
function md5ff(a, b, c, d, x, s, t) {
|
|
return md5cmn(b & c | ~b & d, a, b, x, s, t);
|
|
}
|
|
|
|
function md5gg(a, b, c, d, x, s, t) {
|
|
return md5cmn(b & d | c & ~d, a, b, x, s, t);
|
|
}
|
|
|
|
function md5hh(a, b, c, d, x, s, t) {
|
|
return md5cmn(b ^ c ^ d, a, b, x, s, t);
|
|
}
|
|
|
|
function md5ii(a, b, c, d, x, s, t) {
|
|
return md5cmn(c ^ (b | ~d), a, b, x, s, t);
|
|
}
|
|
|
|
v35('v3', 0x30, md5);
|
|
|
|
// Adapted from Chris Veness' SHA1 code at
|
|
// http://www.movable-type.co.uk/scripts/sha1.html
|
|
function f(s, x, y, z) {
|
|
switch (s) {
|
|
case 0:
|
|
return x & y ^ ~x & z;
|
|
|
|
case 1:
|
|
return x ^ y ^ z;
|
|
|
|
case 2:
|
|
return x & y ^ x & z ^ y & z;
|
|
|
|
case 3:
|
|
return x ^ y ^ z;
|
|
}
|
|
}
|
|
|
|
function ROTL(x, n) {
|
|
return x << n | x >>> 32 - n;
|
|
}
|
|
|
|
function sha1(bytes) {
|
|
var K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6];
|
|
var H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0];
|
|
|
|
if (typeof bytes === 'string') {
|
|
var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
|
|
|
|
bytes = [];
|
|
|
|
for (var i = 0; i < msg.length; ++i) {
|
|
bytes.push(msg.charCodeAt(i));
|
|
}
|
|
} else if (!Array.isArray(bytes)) {
|
|
// Convert Array-like to Array
|
|
bytes = Array.prototype.slice.call(bytes);
|
|
}
|
|
|
|
bytes.push(0x80);
|
|
var l = bytes.length / 4 + 2;
|
|
var N = Math.ceil(l / 16);
|
|
var M = new Array(N);
|
|
|
|
for (var _i = 0; _i < N; ++_i) {
|
|
var arr = new Uint32Array(16);
|
|
|
|
for (var j = 0; j < 16; ++j) {
|
|
arr[j] = bytes[_i * 64 + j * 4] << 24 | bytes[_i * 64 + j * 4 + 1] << 16 | bytes[_i * 64 + j * 4 + 2] << 8 | bytes[_i * 64 + j * 4 + 3];
|
|
}
|
|
|
|
M[_i] = arr;
|
|
}
|
|
|
|
M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32);
|
|
M[N - 1][14] = Math.floor(M[N - 1][14]);
|
|
M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff;
|
|
|
|
for (var _i2 = 0; _i2 < N; ++_i2) {
|
|
var W = new Uint32Array(80);
|
|
|
|
for (var t = 0; t < 16; ++t) {
|
|
W[t] = M[_i2][t];
|
|
}
|
|
|
|
for (var _t = 16; _t < 80; ++_t) {
|
|
W[_t] = ROTL(W[_t - 3] ^ W[_t - 8] ^ W[_t - 14] ^ W[_t - 16], 1);
|
|
}
|
|
|
|
var a = H[0];
|
|
var b = H[1];
|
|
var c = H[2];
|
|
var d = H[3];
|
|
var e = H[4];
|
|
|
|
for (var _t2 = 0; _t2 < 80; ++_t2) {
|
|
var s = Math.floor(_t2 / 20);
|
|
var T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[_t2] >>> 0;
|
|
e = d;
|
|
d = c;
|
|
c = ROTL(b, 30) >>> 0;
|
|
b = a;
|
|
a = T;
|
|
}
|
|
|
|
H[0] = H[0] + a >>> 0;
|
|
H[1] = H[1] + b >>> 0;
|
|
H[2] = H[2] + c >>> 0;
|
|
H[3] = H[3] + d >>> 0;
|
|
H[4] = H[4] + e >>> 0;
|
|
}
|
|
|
|
return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff];
|
|
}
|
|
|
|
v35('v5', 0x50, sha1);
|
|
|
|
/* eslint-disable import/export */
|
|
var FilterRequest;
|
|
(function (FilterRequest) {
|
|
(function (ContentFilter) {
|
|
let _codec;
|
|
ContentFilter.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.contentTopic != null) {
|
|
writer.uint32(10);
|
|
writer.string(obj.contentTopic);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.contentTopic = reader.string();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
ContentFilter.encode = (obj) => {
|
|
return encodeMessage(obj, ContentFilter.codec());
|
|
};
|
|
ContentFilter.decode = (buf) => {
|
|
return decodeMessage(buf, ContentFilter.codec());
|
|
};
|
|
})(FilterRequest.ContentFilter || (FilterRequest.ContentFilter = {}));
|
|
let _codec;
|
|
FilterRequest.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.subscribe != null) {
|
|
writer.uint32(8);
|
|
writer.bool(obj.subscribe);
|
|
}
|
|
if (obj.topic != null) {
|
|
writer.uint32(18);
|
|
writer.string(obj.topic);
|
|
}
|
|
if (obj.contentFilters != null) {
|
|
for (const value of obj.contentFilters) {
|
|
writer.uint32(26);
|
|
FilterRequest.ContentFilter.codec().encode(value, writer);
|
|
}
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "contentFilters" was not found in object');
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
contentFilters: [],
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.subscribe = reader.bool();
|
|
break;
|
|
case 2:
|
|
obj.topic = reader.string();
|
|
break;
|
|
case 3:
|
|
obj.contentFilters.push(FilterRequest.ContentFilter.codec().decode(reader, reader.uint32()));
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
FilterRequest.encode = (obj) => {
|
|
return encodeMessage(obj, FilterRequest.codec());
|
|
};
|
|
FilterRequest.decode = (buf) => {
|
|
return decodeMessage(buf, FilterRequest.codec());
|
|
};
|
|
})(FilterRequest || (FilterRequest = {}));
|
|
var MessagePush;
|
|
(function (MessagePush) {
|
|
let _codec;
|
|
MessagePush.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.messages != null) {
|
|
for (const value of obj.messages) {
|
|
writer.uint32(10);
|
|
WakuMessage$3.codec().encode(value, writer);
|
|
}
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "messages" was not found in object');
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
messages: [],
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.messages.push(WakuMessage$3.codec().decode(reader, reader.uint32()));
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
MessagePush.encode = (obj) => {
|
|
return encodeMessage(obj, MessagePush.codec());
|
|
};
|
|
MessagePush.decode = (buf) => {
|
|
return decodeMessage(buf, MessagePush.codec());
|
|
};
|
|
})(MessagePush || (MessagePush = {}));
|
|
var FilterRPC;
|
|
(function (FilterRPC) {
|
|
let _codec;
|
|
FilterRPC.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.requestId != null) {
|
|
writer.uint32(10);
|
|
writer.string(obj.requestId);
|
|
}
|
|
if (obj.request != null) {
|
|
writer.uint32(18);
|
|
FilterRequest.codec().encode(obj.request, writer);
|
|
}
|
|
if (obj.push != null) {
|
|
writer.uint32(26);
|
|
MessagePush.codec().encode(obj.push, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.requestId = reader.string();
|
|
break;
|
|
case 2:
|
|
obj.request = FilterRequest.codec().decode(reader, reader.uint32());
|
|
break;
|
|
case 3:
|
|
obj.push = MessagePush.codec().decode(reader, reader.uint32());
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
FilterRPC.encode = (obj) => {
|
|
return encodeMessage(obj, FilterRPC.codec());
|
|
};
|
|
FilterRPC.decode = (buf) => {
|
|
return decodeMessage(buf, FilterRPC.codec());
|
|
};
|
|
})(FilterRPC || (FilterRPC = {}));
|
|
var RateLimitProof$3;
|
|
(function (RateLimitProof) {
|
|
let _codec;
|
|
RateLimitProof.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.proof != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.proof);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "proof" was not found in object');
|
|
}
|
|
if (obj.merkleRoot != null) {
|
|
writer.uint32(18);
|
|
writer.bytes(obj.merkleRoot);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "merkleRoot" was not found in object');
|
|
}
|
|
if (obj.epoch != null) {
|
|
writer.uint32(26);
|
|
writer.bytes(obj.epoch);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "epoch" was not found in object');
|
|
}
|
|
if (obj.shareX != null) {
|
|
writer.uint32(34);
|
|
writer.bytes(obj.shareX);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "shareX" was not found in object');
|
|
}
|
|
if (obj.shareY != null) {
|
|
writer.uint32(42);
|
|
writer.bytes(obj.shareY);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "shareY" was not found in object');
|
|
}
|
|
if (obj.nullifier != null) {
|
|
writer.uint32(50);
|
|
writer.bytes(obj.nullifier);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "nullifier" was not found in object');
|
|
}
|
|
if (obj.rlnIdentifier != null) {
|
|
writer.uint32(58);
|
|
writer.bytes(obj.rlnIdentifier);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "rlnIdentifier" was not found in object');
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
proof: new Uint8Array(0),
|
|
merkleRoot: new Uint8Array(0),
|
|
epoch: new Uint8Array(0),
|
|
shareX: new Uint8Array(0),
|
|
shareY: new Uint8Array(0),
|
|
nullifier: new Uint8Array(0),
|
|
rlnIdentifier: new Uint8Array(0),
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.proof = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.merkleRoot = reader.bytes();
|
|
break;
|
|
case 3:
|
|
obj.epoch = reader.bytes();
|
|
break;
|
|
case 4:
|
|
obj.shareX = reader.bytes();
|
|
break;
|
|
case 5:
|
|
obj.shareY = reader.bytes();
|
|
break;
|
|
case 6:
|
|
obj.nullifier = reader.bytes();
|
|
break;
|
|
case 7:
|
|
obj.rlnIdentifier = reader.bytes();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
if (obj.proof == null) {
|
|
throw new Error('Protocol error: value for required field "proof" was not found in protobuf');
|
|
}
|
|
if (obj.merkleRoot == null) {
|
|
throw new Error('Protocol error: value for required field "merkleRoot" was not found in protobuf');
|
|
}
|
|
if (obj.epoch == null) {
|
|
throw new Error('Protocol error: value for required field "epoch" was not found in protobuf');
|
|
}
|
|
if (obj.shareX == null) {
|
|
throw new Error('Protocol error: value for required field "shareX" was not found in protobuf');
|
|
}
|
|
if (obj.shareY == null) {
|
|
throw new Error('Protocol error: value for required field "shareY" was not found in protobuf');
|
|
}
|
|
if (obj.nullifier == null) {
|
|
throw new Error('Protocol error: value for required field "nullifier" was not found in protobuf');
|
|
}
|
|
if (obj.rlnIdentifier == null) {
|
|
throw new Error('Protocol error: value for required field "rlnIdentifier" was not found in protobuf');
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
RateLimitProof.encode = (obj) => {
|
|
return encodeMessage(obj, RateLimitProof.codec());
|
|
};
|
|
RateLimitProof.decode = (buf) => {
|
|
return decodeMessage(buf, RateLimitProof.codec());
|
|
};
|
|
})(RateLimitProof$3 || (RateLimitProof$3 = {}));
|
|
var WakuMessage$3;
|
|
(function (WakuMessage) {
|
|
let _codec;
|
|
WakuMessage.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.payload != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.payload);
|
|
}
|
|
if (obj.contentTopic != null) {
|
|
writer.uint32(18);
|
|
writer.string(obj.contentTopic);
|
|
}
|
|
if (obj.version != null) {
|
|
writer.uint32(24);
|
|
writer.uint32(obj.version);
|
|
}
|
|
if (obj.timestampDeprecated != null) {
|
|
writer.uint32(33);
|
|
writer.double(obj.timestampDeprecated);
|
|
}
|
|
if (obj.timestamp != null) {
|
|
writer.uint32(80);
|
|
writer.sint64(obj.timestamp);
|
|
}
|
|
if (obj.rateLimitProof != null) {
|
|
writer.uint32(170);
|
|
RateLimitProof$3.codec().encode(obj.rateLimitProof, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.payload = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.contentTopic = reader.string();
|
|
break;
|
|
case 3:
|
|
obj.version = reader.uint32();
|
|
break;
|
|
case 4:
|
|
obj.timestampDeprecated = reader.double();
|
|
break;
|
|
case 10:
|
|
obj.timestamp = reader.sint64();
|
|
break;
|
|
case 21:
|
|
obj.rateLimitProof = RateLimitProof$3.codec().decode(reader, reader.uint32());
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
WakuMessage.encode = (obj) => {
|
|
return encodeMessage(obj, WakuMessage.codec());
|
|
};
|
|
WakuMessage.decode = (buf) => {
|
|
return decodeMessage(buf, WakuMessage.codec());
|
|
};
|
|
})(WakuMessage$3 || (WakuMessage$3 = {}));
|
|
|
|
debug("waku:filter");
|
|
|
|
/* eslint-disable import/export */
|
|
var PushRequest;
|
|
(function (PushRequest) {
|
|
let _codec;
|
|
PushRequest.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.pubSubTopic != null) {
|
|
writer.uint32(10);
|
|
writer.string(obj.pubSubTopic);
|
|
}
|
|
if (obj.message != null) {
|
|
writer.uint32(18);
|
|
WakuMessage$2.codec().encode(obj.message, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.pubSubTopic = reader.string();
|
|
break;
|
|
case 2:
|
|
obj.message = WakuMessage$2.codec().decode(reader, reader.uint32());
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
PushRequest.encode = (obj) => {
|
|
return encodeMessage(obj, PushRequest.codec());
|
|
};
|
|
PushRequest.decode = (buf) => {
|
|
return decodeMessage(buf, PushRequest.codec());
|
|
};
|
|
})(PushRequest || (PushRequest = {}));
|
|
var PushResponse;
|
|
(function (PushResponse) {
|
|
let _codec;
|
|
PushResponse.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.isSuccess != null) {
|
|
writer.uint32(8);
|
|
writer.bool(obj.isSuccess);
|
|
}
|
|
if (obj.info != null) {
|
|
writer.uint32(18);
|
|
writer.string(obj.info);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.isSuccess = reader.bool();
|
|
break;
|
|
case 2:
|
|
obj.info = reader.string();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
PushResponse.encode = (obj) => {
|
|
return encodeMessage(obj, PushResponse.codec());
|
|
};
|
|
PushResponse.decode = (buf) => {
|
|
return decodeMessage(buf, PushResponse.codec());
|
|
};
|
|
})(PushResponse || (PushResponse = {}));
|
|
var PushRPC;
|
|
(function (PushRPC) {
|
|
let _codec;
|
|
PushRPC.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.requestId != null) {
|
|
writer.uint32(10);
|
|
writer.string(obj.requestId);
|
|
}
|
|
if (obj.request != null) {
|
|
writer.uint32(18);
|
|
PushRequest.codec().encode(obj.request, writer);
|
|
}
|
|
if (obj.response != null) {
|
|
writer.uint32(26);
|
|
PushResponse.codec().encode(obj.response, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.requestId = reader.string();
|
|
break;
|
|
case 2:
|
|
obj.request = PushRequest.codec().decode(reader, reader.uint32());
|
|
break;
|
|
case 3:
|
|
obj.response = PushResponse.codec().decode(reader, reader.uint32());
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
PushRPC.encode = (obj) => {
|
|
return encodeMessage(obj, PushRPC.codec());
|
|
};
|
|
PushRPC.decode = (buf) => {
|
|
return decodeMessage(buf, PushRPC.codec());
|
|
};
|
|
})(PushRPC || (PushRPC = {}));
|
|
var RateLimitProof$2;
|
|
(function (RateLimitProof) {
|
|
let _codec;
|
|
RateLimitProof.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.proof != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.proof);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "proof" was not found in object');
|
|
}
|
|
if (obj.merkleRoot != null) {
|
|
writer.uint32(18);
|
|
writer.bytes(obj.merkleRoot);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "merkleRoot" was not found in object');
|
|
}
|
|
if (obj.epoch != null) {
|
|
writer.uint32(26);
|
|
writer.bytes(obj.epoch);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "epoch" was not found in object');
|
|
}
|
|
if (obj.shareX != null) {
|
|
writer.uint32(34);
|
|
writer.bytes(obj.shareX);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "shareX" was not found in object');
|
|
}
|
|
if (obj.shareY != null) {
|
|
writer.uint32(42);
|
|
writer.bytes(obj.shareY);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "shareY" was not found in object');
|
|
}
|
|
if (obj.nullifier != null) {
|
|
writer.uint32(50);
|
|
writer.bytes(obj.nullifier);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "nullifier" was not found in object');
|
|
}
|
|
if (obj.rlnIdentifier != null) {
|
|
writer.uint32(58);
|
|
writer.bytes(obj.rlnIdentifier);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "rlnIdentifier" was not found in object');
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
proof: new Uint8Array(0),
|
|
merkleRoot: new Uint8Array(0),
|
|
epoch: new Uint8Array(0),
|
|
shareX: new Uint8Array(0),
|
|
shareY: new Uint8Array(0),
|
|
nullifier: new Uint8Array(0),
|
|
rlnIdentifier: new Uint8Array(0),
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.proof = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.merkleRoot = reader.bytes();
|
|
break;
|
|
case 3:
|
|
obj.epoch = reader.bytes();
|
|
break;
|
|
case 4:
|
|
obj.shareX = reader.bytes();
|
|
break;
|
|
case 5:
|
|
obj.shareY = reader.bytes();
|
|
break;
|
|
case 6:
|
|
obj.nullifier = reader.bytes();
|
|
break;
|
|
case 7:
|
|
obj.rlnIdentifier = reader.bytes();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
if (obj.proof == null) {
|
|
throw new Error('Protocol error: value for required field "proof" was not found in protobuf');
|
|
}
|
|
if (obj.merkleRoot == null) {
|
|
throw new Error('Protocol error: value for required field "merkleRoot" was not found in protobuf');
|
|
}
|
|
if (obj.epoch == null) {
|
|
throw new Error('Protocol error: value for required field "epoch" was not found in protobuf');
|
|
}
|
|
if (obj.shareX == null) {
|
|
throw new Error('Protocol error: value for required field "shareX" was not found in protobuf');
|
|
}
|
|
if (obj.shareY == null) {
|
|
throw new Error('Protocol error: value for required field "shareY" was not found in protobuf');
|
|
}
|
|
if (obj.nullifier == null) {
|
|
throw new Error('Protocol error: value for required field "nullifier" was not found in protobuf');
|
|
}
|
|
if (obj.rlnIdentifier == null) {
|
|
throw new Error('Protocol error: value for required field "rlnIdentifier" was not found in protobuf');
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
RateLimitProof.encode = (obj) => {
|
|
return encodeMessage(obj, RateLimitProof.codec());
|
|
};
|
|
RateLimitProof.decode = (buf) => {
|
|
return decodeMessage(buf, RateLimitProof.codec());
|
|
};
|
|
})(RateLimitProof$2 || (RateLimitProof$2 = {}));
|
|
var WakuMessage$2;
|
|
(function (WakuMessage) {
|
|
let _codec;
|
|
WakuMessage.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.payload != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.payload);
|
|
}
|
|
if (obj.contentTopic != null) {
|
|
writer.uint32(18);
|
|
writer.string(obj.contentTopic);
|
|
}
|
|
if (obj.version != null) {
|
|
writer.uint32(24);
|
|
writer.uint32(obj.version);
|
|
}
|
|
if (obj.timestampDeprecated != null) {
|
|
writer.uint32(33);
|
|
writer.double(obj.timestampDeprecated);
|
|
}
|
|
if (obj.timestamp != null) {
|
|
writer.uint32(80);
|
|
writer.sint64(obj.timestamp);
|
|
}
|
|
if (obj.rateLimitProof != null) {
|
|
writer.uint32(170);
|
|
RateLimitProof$2.codec().encode(obj.rateLimitProof, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.payload = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.contentTopic = reader.string();
|
|
break;
|
|
case 3:
|
|
obj.version = reader.uint32();
|
|
break;
|
|
case 4:
|
|
obj.timestampDeprecated = reader.double();
|
|
break;
|
|
case 10:
|
|
obj.timestamp = reader.sint64();
|
|
break;
|
|
case 21:
|
|
obj.rateLimitProof = RateLimitProof$2.codec().decode(reader, reader.uint32());
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
WakuMessage.encode = (obj) => {
|
|
return encodeMessage(obj, WakuMessage.codec());
|
|
};
|
|
WakuMessage.decode = (buf) => {
|
|
return decodeMessage(buf, WakuMessage.codec());
|
|
};
|
|
})(WakuMessage$2 || (WakuMessage$2 = {}));
|
|
|
|
debug("waku:light-push");
|
|
|
|
debug("waku:message:version-0");
|
|
BigInt(1000000);
|
|
|
|
/**
|
|
* RelayCodec is the libp2p identifier for the waku relay protocol
|
|
*/
|
|
const RelayCodecs = [
|
|
"/vac/waku/relay/2.0.0-beta2",
|
|
"/vac/waku/relay/2.0.0",
|
|
];
|
|
|
|
/* eslint-disable import/export */
|
|
var Index$1;
|
|
(function (Index) {
|
|
let _codec;
|
|
Index.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.digest != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.digest);
|
|
}
|
|
if (obj.receivedTime != null) {
|
|
writer.uint32(16);
|
|
writer.sint64(obj.receivedTime);
|
|
}
|
|
if (obj.senderTime != null) {
|
|
writer.uint32(24);
|
|
writer.sint64(obj.senderTime);
|
|
}
|
|
if (obj.pubsubTopic != null) {
|
|
writer.uint32(34);
|
|
writer.string(obj.pubsubTopic);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.digest = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.receivedTime = reader.sint64();
|
|
break;
|
|
case 3:
|
|
obj.senderTime = reader.sint64();
|
|
break;
|
|
case 4:
|
|
obj.pubsubTopic = reader.string();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
Index.encode = (obj) => {
|
|
return encodeMessage(obj, Index.codec());
|
|
};
|
|
Index.decode = (buf) => {
|
|
return decodeMessage(buf, Index.codec());
|
|
};
|
|
})(Index$1 || (Index$1 = {}));
|
|
var PagingInfo$1;
|
|
(function (PagingInfo) {
|
|
(function (Direction) {
|
|
Direction["DIRECTION_BACKWARD_UNSPECIFIED"] = "DIRECTION_BACKWARD_UNSPECIFIED";
|
|
Direction["DIRECTION_FORWARD"] = "DIRECTION_FORWARD";
|
|
})(PagingInfo.Direction || (PagingInfo.Direction = {}));
|
|
let __DirectionValues;
|
|
(function (__DirectionValues) {
|
|
__DirectionValues[__DirectionValues["DIRECTION_BACKWARD_UNSPECIFIED"] = 0] = "DIRECTION_BACKWARD_UNSPECIFIED";
|
|
__DirectionValues[__DirectionValues["DIRECTION_FORWARD"] = 1] = "DIRECTION_FORWARD";
|
|
})(__DirectionValues || (__DirectionValues = {}));
|
|
(function (Direction) {
|
|
Direction.codec = () => {
|
|
return enumeration(__DirectionValues);
|
|
};
|
|
})(PagingInfo.Direction || (PagingInfo.Direction = {}));
|
|
let _codec;
|
|
PagingInfo.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.pageSize != null) {
|
|
writer.uint32(8);
|
|
writer.uint64(obj.pageSize);
|
|
}
|
|
if (obj.cursor != null) {
|
|
writer.uint32(18);
|
|
Index$1.codec().encode(obj.cursor, writer);
|
|
}
|
|
if (obj.direction != null) {
|
|
writer.uint32(24);
|
|
PagingInfo.Direction.codec().encode(obj.direction, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.pageSize = reader.uint64();
|
|
break;
|
|
case 2:
|
|
obj.cursor = Index$1.codec().decode(reader, reader.uint32());
|
|
break;
|
|
case 3:
|
|
obj.direction = PagingInfo.Direction.codec().decode(reader);
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
PagingInfo.encode = (obj) => {
|
|
return encodeMessage(obj, PagingInfo.codec());
|
|
};
|
|
PagingInfo.decode = (buf) => {
|
|
return decodeMessage(buf, PagingInfo.codec());
|
|
};
|
|
})(PagingInfo$1 || (PagingInfo$1 = {}));
|
|
var ContentFilter$1;
|
|
(function (ContentFilter) {
|
|
let _codec;
|
|
ContentFilter.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.contentTopic != null) {
|
|
writer.uint32(10);
|
|
writer.string(obj.contentTopic);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.contentTopic = reader.string();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
ContentFilter.encode = (obj) => {
|
|
return encodeMessage(obj, ContentFilter.codec());
|
|
};
|
|
ContentFilter.decode = (buf) => {
|
|
return decodeMessage(buf, ContentFilter.codec());
|
|
};
|
|
})(ContentFilter$1 || (ContentFilter$1 = {}));
|
|
var HistoryQuery$1;
|
|
(function (HistoryQuery) {
|
|
let _codec;
|
|
HistoryQuery.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.pubSubTopic != null) {
|
|
writer.uint32(18);
|
|
writer.string(obj.pubSubTopic);
|
|
}
|
|
if (obj.contentFilters != null) {
|
|
for (const value of obj.contentFilters) {
|
|
writer.uint32(26);
|
|
ContentFilter$1.codec().encode(value, writer);
|
|
}
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "contentFilters" was not found in object');
|
|
}
|
|
if (obj.pagingInfo != null) {
|
|
writer.uint32(34);
|
|
PagingInfo$1.codec().encode(obj.pagingInfo, writer);
|
|
}
|
|
if (obj.startTime != null) {
|
|
writer.uint32(40);
|
|
writer.sint64(obj.startTime);
|
|
}
|
|
if (obj.endTime != null) {
|
|
writer.uint32(48);
|
|
writer.sint64(obj.endTime);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
contentFilters: [],
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 2:
|
|
obj.pubSubTopic = reader.string();
|
|
break;
|
|
case 3:
|
|
obj.contentFilters.push(ContentFilter$1.codec().decode(reader, reader.uint32()));
|
|
break;
|
|
case 4:
|
|
obj.pagingInfo = PagingInfo$1.codec().decode(reader, reader.uint32());
|
|
break;
|
|
case 5:
|
|
obj.startTime = reader.sint64();
|
|
break;
|
|
case 6:
|
|
obj.endTime = reader.sint64();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
HistoryQuery.encode = (obj) => {
|
|
return encodeMessage(obj, HistoryQuery.codec());
|
|
};
|
|
HistoryQuery.decode = (buf) => {
|
|
return decodeMessage(buf, HistoryQuery.codec());
|
|
};
|
|
})(HistoryQuery$1 || (HistoryQuery$1 = {}));
|
|
var HistoryResponse$1;
|
|
(function (HistoryResponse) {
|
|
(function (HistoryError) {
|
|
HistoryError["ERROR_NONE_UNSPECIFIED"] = "ERROR_NONE_UNSPECIFIED";
|
|
HistoryError["ERROR_INVALID_CURSOR"] = "ERROR_INVALID_CURSOR";
|
|
})(HistoryResponse.HistoryError || (HistoryResponse.HistoryError = {}));
|
|
let __HistoryErrorValues;
|
|
(function (__HistoryErrorValues) {
|
|
__HistoryErrorValues[__HistoryErrorValues["ERROR_NONE_UNSPECIFIED"] = 0] = "ERROR_NONE_UNSPECIFIED";
|
|
__HistoryErrorValues[__HistoryErrorValues["ERROR_INVALID_CURSOR"] = 1] = "ERROR_INVALID_CURSOR";
|
|
})(__HistoryErrorValues || (__HistoryErrorValues = {}));
|
|
(function (HistoryError) {
|
|
HistoryError.codec = () => {
|
|
return enumeration(__HistoryErrorValues);
|
|
};
|
|
})(HistoryResponse.HistoryError || (HistoryResponse.HistoryError = {}));
|
|
let _codec;
|
|
HistoryResponse.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.messages != null) {
|
|
for (const value of obj.messages) {
|
|
writer.uint32(18);
|
|
WakuMessage$1.codec().encode(value, writer);
|
|
}
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "messages" was not found in object');
|
|
}
|
|
if (obj.pagingInfo != null) {
|
|
writer.uint32(26);
|
|
PagingInfo$1.codec().encode(obj.pagingInfo, writer);
|
|
}
|
|
if (obj.error != null) {
|
|
writer.uint32(32);
|
|
HistoryResponse.HistoryError.codec().encode(obj.error, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
messages: [],
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 2:
|
|
obj.messages.push(WakuMessage$1.codec().decode(reader, reader.uint32()));
|
|
break;
|
|
case 3:
|
|
obj.pagingInfo = PagingInfo$1.codec().decode(reader, reader.uint32());
|
|
break;
|
|
case 4:
|
|
obj.error = HistoryResponse.HistoryError.codec().decode(reader);
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
HistoryResponse.encode = (obj) => {
|
|
return encodeMessage(obj, HistoryResponse.codec());
|
|
};
|
|
HistoryResponse.decode = (buf) => {
|
|
return decodeMessage(buf, HistoryResponse.codec());
|
|
};
|
|
})(HistoryResponse$1 || (HistoryResponse$1 = {}));
|
|
var HistoryRPC$1;
|
|
(function (HistoryRPC) {
|
|
let _codec;
|
|
HistoryRPC.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.requestId != null) {
|
|
writer.uint32(10);
|
|
writer.string(obj.requestId);
|
|
}
|
|
if (obj.query != null) {
|
|
writer.uint32(18);
|
|
HistoryQuery$1.codec().encode(obj.query, writer);
|
|
}
|
|
if (obj.response != null) {
|
|
writer.uint32(26);
|
|
HistoryResponse$1.codec().encode(obj.response, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.requestId = reader.string();
|
|
break;
|
|
case 2:
|
|
obj.query = HistoryQuery$1.codec().decode(reader, reader.uint32());
|
|
break;
|
|
case 3:
|
|
obj.response = HistoryResponse$1.codec().decode(reader, reader.uint32());
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
HistoryRPC.encode = (obj) => {
|
|
return encodeMessage(obj, HistoryRPC.codec());
|
|
};
|
|
HistoryRPC.decode = (buf) => {
|
|
return decodeMessage(buf, HistoryRPC.codec());
|
|
};
|
|
})(HistoryRPC$1 || (HistoryRPC$1 = {}));
|
|
var RateLimitProof$1;
|
|
(function (RateLimitProof) {
|
|
let _codec;
|
|
RateLimitProof.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.proof != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.proof);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "proof" was not found in object');
|
|
}
|
|
if (obj.merkleRoot != null) {
|
|
writer.uint32(18);
|
|
writer.bytes(obj.merkleRoot);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "merkleRoot" was not found in object');
|
|
}
|
|
if (obj.epoch != null) {
|
|
writer.uint32(26);
|
|
writer.bytes(obj.epoch);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "epoch" was not found in object');
|
|
}
|
|
if (obj.shareX != null) {
|
|
writer.uint32(34);
|
|
writer.bytes(obj.shareX);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "shareX" was not found in object');
|
|
}
|
|
if (obj.shareY != null) {
|
|
writer.uint32(42);
|
|
writer.bytes(obj.shareY);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "shareY" was not found in object');
|
|
}
|
|
if (obj.nullifier != null) {
|
|
writer.uint32(50);
|
|
writer.bytes(obj.nullifier);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "nullifier" was not found in object');
|
|
}
|
|
if (obj.rlnIdentifier != null) {
|
|
writer.uint32(58);
|
|
writer.bytes(obj.rlnIdentifier);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "rlnIdentifier" was not found in object');
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
proof: new Uint8Array(0),
|
|
merkleRoot: new Uint8Array(0),
|
|
epoch: new Uint8Array(0),
|
|
shareX: new Uint8Array(0),
|
|
shareY: new Uint8Array(0),
|
|
nullifier: new Uint8Array(0),
|
|
rlnIdentifier: new Uint8Array(0),
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.proof = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.merkleRoot = reader.bytes();
|
|
break;
|
|
case 3:
|
|
obj.epoch = reader.bytes();
|
|
break;
|
|
case 4:
|
|
obj.shareX = reader.bytes();
|
|
break;
|
|
case 5:
|
|
obj.shareY = reader.bytes();
|
|
break;
|
|
case 6:
|
|
obj.nullifier = reader.bytes();
|
|
break;
|
|
case 7:
|
|
obj.rlnIdentifier = reader.bytes();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
if (obj.proof == null) {
|
|
throw new Error('Protocol error: value for required field "proof" was not found in protobuf');
|
|
}
|
|
if (obj.merkleRoot == null) {
|
|
throw new Error('Protocol error: value for required field "merkleRoot" was not found in protobuf');
|
|
}
|
|
if (obj.epoch == null) {
|
|
throw new Error('Protocol error: value for required field "epoch" was not found in protobuf');
|
|
}
|
|
if (obj.shareX == null) {
|
|
throw new Error('Protocol error: value for required field "shareX" was not found in protobuf');
|
|
}
|
|
if (obj.shareY == null) {
|
|
throw new Error('Protocol error: value for required field "shareY" was not found in protobuf');
|
|
}
|
|
if (obj.nullifier == null) {
|
|
throw new Error('Protocol error: value for required field "nullifier" was not found in protobuf');
|
|
}
|
|
if (obj.rlnIdentifier == null) {
|
|
throw new Error('Protocol error: value for required field "rlnIdentifier" was not found in protobuf');
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
RateLimitProof.encode = (obj) => {
|
|
return encodeMessage(obj, RateLimitProof.codec());
|
|
};
|
|
RateLimitProof.decode = (buf) => {
|
|
return decodeMessage(buf, RateLimitProof.codec());
|
|
};
|
|
})(RateLimitProof$1 || (RateLimitProof$1 = {}));
|
|
var WakuMessage$1;
|
|
(function (WakuMessage) {
|
|
let _codec;
|
|
WakuMessage.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.payload != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.payload);
|
|
}
|
|
if (obj.contentTopic != null) {
|
|
writer.uint32(18);
|
|
writer.string(obj.contentTopic);
|
|
}
|
|
if (obj.version != null) {
|
|
writer.uint32(24);
|
|
writer.uint32(obj.version);
|
|
}
|
|
if (obj.timestampDeprecated != null) {
|
|
writer.uint32(33);
|
|
writer.double(obj.timestampDeprecated);
|
|
}
|
|
if (obj.timestamp != null) {
|
|
writer.uint32(80);
|
|
writer.sint64(obj.timestamp);
|
|
}
|
|
if (obj.rateLimitProof != null) {
|
|
writer.uint32(170);
|
|
RateLimitProof$1.codec().encode(obj.rateLimitProof, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.payload = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.contentTopic = reader.string();
|
|
break;
|
|
case 3:
|
|
obj.version = reader.uint32();
|
|
break;
|
|
case 4:
|
|
obj.timestampDeprecated = reader.double();
|
|
break;
|
|
case 10:
|
|
obj.timestamp = reader.sint64();
|
|
break;
|
|
case 21:
|
|
obj.rateLimitProof = RateLimitProof$1.codec().decode(reader, reader.uint32());
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
WakuMessage.encode = (obj) => {
|
|
return encodeMessage(obj, WakuMessage.codec());
|
|
};
|
|
WakuMessage.decode = (buf) => {
|
|
return decodeMessage(buf, WakuMessage.codec());
|
|
};
|
|
})(WakuMessage$1 || (WakuMessage$1 = {}));
|
|
|
|
/* eslint-disable import/export */
|
|
var Index;
|
|
(function (Index) {
|
|
let _codec;
|
|
Index.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.digest != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.digest);
|
|
}
|
|
if (obj.receivedTime != null) {
|
|
writer.uint32(17);
|
|
writer.double(obj.receivedTime);
|
|
}
|
|
if (obj.senderTime != null) {
|
|
writer.uint32(25);
|
|
writer.double(obj.senderTime);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.digest = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.receivedTime = reader.double();
|
|
break;
|
|
case 3:
|
|
obj.senderTime = reader.double();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
Index.encode = (obj) => {
|
|
return encodeMessage(obj, Index.codec());
|
|
};
|
|
Index.decode = (buf) => {
|
|
return decodeMessage(buf, Index.codec());
|
|
};
|
|
})(Index || (Index = {}));
|
|
var PagingInfo;
|
|
(function (PagingInfo) {
|
|
(function (Direction) {
|
|
Direction["DIRECTION_BACKWARD_UNSPECIFIED"] = "DIRECTION_BACKWARD_UNSPECIFIED";
|
|
Direction["DIRECTION_FORWARD"] = "DIRECTION_FORWARD";
|
|
})(PagingInfo.Direction || (PagingInfo.Direction = {}));
|
|
let __DirectionValues;
|
|
(function (__DirectionValues) {
|
|
__DirectionValues[__DirectionValues["DIRECTION_BACKWARD_UNSPECIFIED"] = 0] = "DIRECTION_BACKWARD_UNSPECIFIED";
|
|
__DirectionValues[__DirectionValues["DIRECTION_FORWARD"] = 1] = "DIRECTION_FORWARD";
|
|
})(__DirectionValues || (__DirectionValues = {}));
|
|
(function (Direction) {
|
|
Direction.codec = () => {
|
|
return enumeration(__DirectionValues);
|
|
};
|
|
})(PagingInfo.Direction || (PagingInfo.Direction = {}));
|
|
let _codec;
|
|
PagingInfo.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.pageSize != null) {
|
|
writer.uint32(8);
|
|
writer.uint64(obj.pageSize);
|
|
}
|
|
if (obj.cursor != null) {
|
|
writer.uint32(18);
|
|
Index.codec().encode(obj.cursor, writer);
|
|
}
|
|
if (obj.direction != null) {
|
|
writer.uint32(24);
|
|
PagingInfo.Direction.codec().encode(obj.direction, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.pageSize = reader.uint64();
|
|
break;
|
|
case 2:
|
|
obj.cursor = Index.codec().decode(reader, reader.uint32());
|
|
break;
|
|
case 3:
|
|
obj.direction = PagingInfo.Direction.codec().decode(reader);
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
PagingInfo.encode = (obj) => {
|
|
return encodeMessage(obj, PagingInfo.codec());
|
|
};
|
|
PagingInfo.decode = (buf) => {
|
|
return decodeMessage(buf, PagingInfo.codec());
|
|
};
|
|
})(PagingInfo || (PagingInfo = {}));
|
|
var ContentFilter;
|
|
(function (ContentFilter) {
|
|
let _codec;
|
|
ContentFilter.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.contentTopic != null) {
|
|
writer.uint32(10);
|
|
writer.string(obj.contentTopic);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.contentTopic = reader.string();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
ContentFilter.encode = (obj) => {
|
|
return encodeMessage(obj, ContentFilter.codec());
|
|
};
|
|
ContentFilter.decode = (buf) => {
|
|
return decodeMessage(buf, ContentFilter.codec());
|
|
};
|
|
})(ContentFilter || (ContentFilter = {}));
|
|
var HistoryQuery;
|
|
(function (HistoryQuery) {
|
|
let _codec;
|
|
HistoryQuery.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.pubSubTopic != null) {
|
|
writer.uint32(18);
|
|
writer.string(obj.pubSubTopic);
|
|
}
|
|
if (obj.contentFilters != null) {
|
|
for (const value of obj.contentFilters) {
|
|
writer.uint32(26);
|
|
ContentFilter.codec().encode(value, writer);
|
|
}
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "contentFilters" was not found in object');
|
|
}
|
|
if (obj.pagingInfo != null) {
|
|
writer.uint32(34);
|
|
PagingInfo.codec().encode(obj.pagingInfo, writer);
|
|
}
|
|
if (obj.startTime != null) {
|
|
writer.uint32(41);
|
|
writer.double(obj.startTime);
|
|
}
|
|
if (obj.endTime != null) {
|
|
writer.uint32(49);
|
|
writer.double(obj.endTime);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
contentFilters: [],
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 2:
|
|
obj.pubSubTopic = reader.string();
|
|
break;
|
|
case 3:
|
|
obj.contentFilters.push(ContentFilter.codec().decode(reader, reader.uint32()));
|
|
break;
|
|
case 4:
|
|
obj.pagingInfo = PagingInfo.codec().decode(reader, reader.uint32());
|
|
break;
|
|
case 5:
|
|
obj.startTime = reader.double();
|
|
break;
|
|
case 6:
|
|
obj.endTime = reader.double();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
HistoryQuery.encode = (obj) => {
|
|
return encodeMessage(obj, HistoryQuery.codec());
|
|
};
|
|
HistoryQuery.decode = (buf) => {
|
|
return decodeMessage(buf, HistoryQuery.codec());
|
|
};
|
|
})(HistoryQuery || (HistoryQuery = {}));
|
|
var HistoryResponse;
|
|
(function (HistoryResponse) {
|
|
(function (HistoryError) {
|
|
HistoryError["ERROR_NONE_UNSPECIFIED"] = "ERROR_NONE_UNSPECIFIED";
|
|
HistoryError["ERROR_INVALID_CURSOR"] = "ERROR_INVALID_CURSOR";
|
|
})(HistoryResponse.HistoryError || (HistoryResponse.HistoryError = {}));
|
|
let __HistoryErrorValues;
|
|
(function (__HistoryErrorValues) {
|
|
__HistoryErrorValues[__HistoryErrorValues["ERROR_NONE_UNSPECIFIED"] = 0] = "ERROR_NONE_UNSPECIFIED";
|
|
__HistoryErrorValues[__HistoryErrorValues["ERROR_INVALID_CURSOR"] = 1] = "ERROR_INVALID_CURSOR";
|
|
})(__HistoryErrorValues || (__HistoryErrorValues = {}));
|
|
(function (HistoryError) {
|
|
HistoryError.codec = () => {
|
|
return enumeration(__HistoryErrorValues);
|
|
};
|
|
})(HistoryResponse.HistoryError || (HistoryResponse.HistoryError = {}));
|
|
let _codec;
|
|
HistoryResponse.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.messages != null) {
|
|
for (const value of obj.messages) {
|
|
writer.uint32(18);
|
|
WakuMessage.codec().encode(value, writer);
|
|
}
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "messages" was not found in object');
|
|
}
|
|
if (obj.pagingInfo != null) {
|
|
writer.uint32(26);
|
|
PagingInfo.codec().encode(obj.pagingInfo, writer);
|
|
}
|
|
if (obj.error != null) {
|
|
writer.uint32(32);
|
|
HistoryResponse.HistoryError.codec().encode(obj.error, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
messages: [],
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 2:
|
|
obj.messages.push(WakuMessage.codec().decode(reader, reader.uint32()));
|
|
break;
|
|
case 3:
|
|
obj.pagingInfo = PagingInfo.codec().decode(reader, reader.uint32());
|
|
break;
|
|
case 4:
|
|
obj.error = HistoryResponse.HistoryError.codec().decode(reader);
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
HistoryResponse.encode = (obj) => {
|
|
return encodeMessage(obj, HistoryResponse.codec());
|
|
};
|
|
HistoryResponse.decode = (buf) => {
|
|
return decodeMessage(buf, HistoryResponse.codec());
|
|
};
|
|
})(HistoryResponse || (HistoryResponse = {}));
|
|
var HistoryRPC;
|
|
(function (HistoryRPC) {
|
|
let _codec;
|
|
HistoryRPC.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.requestId != null) {
|
|
writer.uint32(10);
|
|
writer.string(obj.requestId);
|
|
}
|
|
if (obj.query != null) {
|
|
writer.uint32(18);
|
|
HistoryQuery.codec().encode(obj.query, writer);
|
|
}
|
|
if (obj.response != null) {
|
|
writer.uint32(26);
|
|
HistoryResponse.codec().encode(obj.response, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.requestId = reader.string();
|
|
break;
|
|
case 2:
|
|
obj.query = HistoryQuery.codec().decode(reader, reader.uint32());
|
|
break;
|
|
case 3:
|
|
obj.response = HistoryResponse.codec().decode(reader, reader.uint32());
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
HistoryRPC.encode = (obj) => {
|
|
return encodeMessage(obj, HistoryRPC.codec());
|
|
};
|
|
HistoryRPC.decode = (buf) => {
|
|
return decodeMessage(buf, HistoryRPC.codec());
|
|
};
|
|
})(HistoryRPC || (HistoryRPC = {}));
|
|
var RateLimitProof;
|
|
(function (RateLimitProof) {
|
|
let _codec;
|
|
RateLimitProof.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.proof != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.proof);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "proof" was not found in object');
|
|
}
|
|
if (obj.merkleRoot != null) {
|
|
writer.uint32(18);
|
|
writer.bytes(obj.merkleRoot);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "merkleRoot" was not found in object');
|
|
}
|
|
if (obj.epoch != null) {
|
|
writer.uint32(26);
|
|
writer.bytes(obj.epoch);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "epoch" was not found in object');
|
|
}
|
|
if (obj.shareX != null) {
|
|
writer.uint32(34);
|
|
writer.bytes(obj.shareX);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "shareX" was not found in object');
|
|
}
|
|
if (obj.shareY != null) {
|
|
writer.uint32(42);
|
|
writer.bytes(obj.shareY);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "shareY" was not found in object');
|
|
}
|
|
if (obj.nullifier != null) {
|
|
writer.uint32(50);
|
|
writer.bytes(obj.nullifier);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "nullifier" was not found in object');
|
|
}
|
|
if (obj.rlnIdentifier != null) {
|
|
writer.uint32(58);
|
|
writer.bytes(obj.rlnIdentifier);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "rlnIdentifier" was not found in object');
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
proof: new Uint8Array(0),
|
|
merkleRoot: new Uint8Array(0),
|
|
epoch: new Uint8Array(0),
|
|
shareX: new Uint8Array(0),
|
|
shareY: new Uint8Array(0),
|
|
nullifier: new Uint8Array(0),
|
|
rlnIdentifier: new Uint8Array(0),
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.proof = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.merkleRoot = reader.bytes();
|
|
break;
|
|
case 3:
|
|
obj.epoch = reader.bytes();
|
|
break;
|
|
case 4:
|
|
obj.shareX = reader.bytes();
|
|
break;
|
|
case 5:
|
|
obj.shareY = reader.bytes();
|
|
break;
|
|
case 6:
|
|
obj.nullifier = reader.bytes();
|
|
break;
|
|
case 7:
|
|
obj.rlnIdentifier = reader.bytes();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
if (obj.proof == null) {
|
|
throw new Error('Protocol error: value for required field "proof" was not found in protobuf');
|
|
}
|
|
if (obj.merkleRoot == null) {
|
|
throw new Error('Protocol error: value for required field "merkleRoot" was not found in protobuf');
|
|
}
|
|
if (obj.epoch == null) {
|
|
throw new Error('Protocol error: value for required field "epoch" was not found in protobuf');
|
|
}
|
|
if (obj.shareX == null) {
|
|
throw new Error('Protocol error: value for required field "shareX" was not found in protobuf');
|
|
}
|
|
if (obj.shareY == null) {
|
|
throw new Error('Protocol error: value for required field "shareY" was not found in protobuf');
|
|
}
|
|
if (obj.nullifier == null) {
|
|
throw new Error('Protocol error: value for required field "nullifier" was not found in protobuf');
|
|
}
|
|
if (obj.rlnIdentifier == null) {
|
|
throw new Error('Protocol error: value for required field "rlnIdentifier" was not found in protobuf');
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
RateLimitProof.encode = (obj) => {
|
|
return encodeMessage(obj, RateLimitProof.codec());
|
|
};
|
|
RateLimitProof.decode = (buf) => {
|
|
return decodeMessage(buf, RateLimitProof.codec());
|
|
};
|
|
})(RateLimitProof || (RateLimitProof = {}));
|
|
var WakuMessage;
|
|
(function (WakuMessage) {
|
|
let _codec;
|
|
WakuMessage.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.payload != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.payload);
|
|
}
|
|
if (obj.contentTopic != null) {
|
|
writer.uint32(18);
|
|
writer.string(obj.contentTopic);
|
|
}
|
|
if (obj.version != null) {
|
|
writer.uint32(24);
|
|
writer.uint32(obj.version);
|
|
}
|
|
if (obj.timestampDeprecated != null) {
|
|
writer.uint32(33);
|
|
writer.double(obj.timestampDeprecated);
|
|
}
|
|
if (obj.timestamp != null) {
|
|
writer.uint32(80);
|
|
writer.sint64(obj.timestamp);
|
|
}
|
|
if (obj.rateLimitProof != null) {
|
|
writer.uint32(170);
|
|
RateLimitProof.codec().encode(obj.rateLimitProof, writer);
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.payload = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.contentTopic = reader.string();
|
|
break;
|
|
case 3:
|
|
obj.version = reader.uint32();
|
|
break;
|
|
case 4:
|
|
obj.timestampDeprecated = reader.double();
|
|
break;
|
|
case 10:
|
|
obj.timestamp = reader.sint64();
|
|
break;
|
|
case 21:
|
|
obj.rateLimitProof = RateLimitProof.codec().decode(reader, reader.uint32());
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
WakuMessage.encode = (obj) => {
|
|
return encodeMessage(obj, WakuMessage.codec());
|
|
};
|
|
WakuMessage.decode = (buf) => {
|
|
return decodeMessage(buf, WakuMessage.codec());
|
|
};
|
|
})(WakuMessage || (WakuMessage = {}));
|
|
|
|
var StoreCodecs;
|
|
(function (StoreCodecs) {
|
|
StoreCodecs["V2Beta3"] = "/vac/waku/store/2.0.0-beta3";
|
|
StoreCodecs["V2Beta4"] = "/vac/waku/store/2.0.0-beta4";
|
|
})(StoreCodecs || (StoreCodecs = {}));
|
|
|
|
BigInt(1000000);
|
|
var PageDirection;
|
|
(function (PageDirection) {
|
|
PageDirection["BACKWARD"] = "backward";
|
|
PageDirection["FORWARD"] = "forward";
|
|
})(PageDirection || (PageDirection = {}));
|
|
|
|
HistoryResponse$1.HistoryError;
|
|
debug("waku:store");
|
|
|
|
debug("waku:waku");
|
|
var Protocols;
|
|
(function (Protocols) {
|
|
Protocols["Relay"] = "relay";
|
|
Protocols["Store"] = "store";
|
|
Protocols["LightPush"] = "lightpush";
|
|
Protocols["Filter"] = "filter";
|
|
})(Protocols || (Protocols = {}));
|
|
|
|
const codes$1 = {
|
|
ERR_SIGNATURE_NOT_VALID: 'ERR_SIGNATURE_NOT_VALID'
|
|
};
|
|
|
|
/* eslint-disable import/export */
|
|
var Envelope;
|
|
(function (Envelope) {
|
|
let _codec;
|
|
Envelope.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.publicKey != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.publicKey);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "publicKey" was not found in object');
|
|
}
|
|
if (obj.payloadType != null) {
|
|
writer.uint32(18);
|
|
writer.bytes(obj.payloadType);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "payloadType" was not found in object');
|
|
}
|
|
if (obj.payload != null) {
|
|
writer.uint32(26);
|
|
writer.bytes(obj.payload);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "payload" was not found in object');
|
|
}
|
|
if (obj.signature != null) {
|
|
writer.uint32(42);
|
|
writer.bytes(obj.signature);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "signature" was not found in object');
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
publicKey: new Uint8Array(0),
|
|
payloadType: new Uint8Array(0),
|
|
payload: new Uint8Array(0),
|
|
signature: new Uint8Array(0)
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.publicKey = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.payloadType = reader.bytes();
|
|
break;
|
|
case 3:
|
|
obj.payload = reader.bytes();
|
|
break;
|
|
case 5:
|
|
obj.signature = reader.bytes();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
if (obj.publicKey == null) {
|
|
throw new Error('Protocol error: value for required field "publicKey" was not found in protobuf');
|
|
}
|
|
if (obj.payloadType == null) {
|
|
throw new Error('Protocol error: value for required field "payloadType" was not found in protobuf');
|
|
}
|
|
if (obj.payload == null) {
|
|
throw new Error('Protocol error: value for required field "payload" was not found in protobuf');
|
|
}
|
|
if (obj.signature == null) {
|
|
throw new Error('Protocol error: value for required field "signature" was not found in protobuf');
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
Envelope.encode = (obj) => {
|
|
return encodeMessage(obj, Envelope.codec());
|
|
};
|
|
Envelope.decode = (buf) => {
|
|
return decodeMessage(buf, Envelope.codec());
|
|
};
|
|
})(Envelope || (Envelope = {}));
|
|
|
|
class RecordEnvelope {
|
|
/**
|
|
* The Envelope is responsible for keeping an arbitrary signed record
|
|
* by a libp2p peer.
|
|
*/
|
|
constructor(init) {
|
|
const { peerId, payloadType, payload, signature } = init;
|
|
this.peerId = peerId;
|
|
this.payloadType = payloadType;
|
|
this.payload = payload;
|
|
this.signature = signature;
|
|
}
|
|
/**
|
|
* Marshal the envelope content
|
|
*/
|
|
marshal() {
|
|
if (this.peerId.publicKey == null) {
|
|
throw new Error('Missing public key');
|
|
}
|
|
if (this.marshaled == null) {
|
|
this.marshaled = Envelope.encode({
|
|
publicKey: this.peerId.publicKey,
|
|
payloadType: this.payloadType,
|
|
payload: this.payload.subarray(),
|
|
signature: this.signature
|
|
});
|
|
}
|
|
return this.marshaled;
|
|
}
|
|
/**
|
|
* Verifies if the other Envelope is identical to this one
|
|
*/
|
|
equals(other) {
|
|
return equals(this.marshal(), other.marshal());
|
|
}
|
|
/**
|
|
* Validate envelope data signature for the given domain
|
|
*/
|
|
async validate(domain) {
|
|
const signData = formatSignaturePayload(domain, this.payloadType, this.payload);
|
|
if (this.peerId.publicKey == null) {
|
|
throw new Error('Missing public key');
|
|
}
|
|
const key = unmarshalPublicKey(this.peerId.publicKey);
|
|
return await key.verify(signData.subarray(), this.signature);
|
|
}
|
|
}
|
|
/**
|
|
* Unmarshal a serialized Envelope protobuf message
|
|
*/
|
|
RecordEnvelope.createFromProtobuf = async (data) => {
|
|
const envelopeData = Envelope.decode(data);
|
|
const peerId = await peerIdFromKeys(envelopeData.publicKey);
|
|
return new RecordEnvelope({
|
|
peerId,
|
|
payloadType: envelopeData.payloadType,
|
|
payload: envelopeData.payload,
|
|
signature: envelopeData.signature
|
|
});
|
|
};
|
|
/**
|
|
* Seal marshals the given Record, places the marshaled bytes inside an Envelope
|
|
* and signs it with the given peerId's private key
|
|
*/
|
|
RecordEnvelope.seal = async (record, peerId) => {
|
|
if (peerId.privateKey == null) {
|
|
throw new Error('Missing private key');
|
|
}
|
|
const domain = record.domain;
|
|
const payloadType = record.codec;
|
|
const payload = record.marshal();
|
|
const signData = formatSignaturePayload(domain, payloadType, payload);
|
|
const key = await unmarshalPrivateKey(peerId.privateKey);
|
|
const signature = await key.sign(signData.subarray());
|
|
return new RecordEnvelope({
|
|
peerId,
|
|
payloadType,
|
|
payload,
|
|
signature
|
|
});
|
|
};
|
|
/**
|
|
* Open and certify a given marshalled envelope.
|
|
* Data is unmarshalled and the signature validated for the given domain.
|
|
*/
|
|
RecordEnvelope.openAndCertify = async (data, domain) => {
|
|
const envelope = await RecordEnvelope.createFromProtobuf(data);
|
|
const valid = await envelope.validate(domain);
|
|
if (!valid) {
|
|
throw errCode(new Error('envelope signature is not valid for the given domain'), codes$1.ERR_SIGNATURE_NOT_VALID);
|
|
}
|
|
return envelope;
|
|
};
|
|
/**
|
|
* Helper function that prepares a Uint8Array to sign or verify a signature
|
|
*/
|
|
const formatSignaturePayload = (domain, payloadType, payload) => {
|
|
// When signing, a peer will prepare a Uint8Array by concatenating the following:
|
|
// - The length of the domain separation string string in bytes
|
|
// - The domain separation string, encoded as UTF-8
|
|
// - The length of the payload_type field in bytes
|
|
// - The value of the payload_type field
|
|
// - The length of the payload field in bytes
|
|
// - The value of the payload field
|
|
const domainUint8Array = fromString$1(domain);
|
|
const domainLength = unsigned.encode(domainUint8Array.byteLength);
|
|
const payloadTypeLength = unsigned.encode(payloadType.length);
|
|
const payloadLength = unsigned.encode(payload.length);
|
|
return new Uint8ArrayList(domainLength, domainUint8Array, payloadTypeLength, payloadType, payloadLength, payload);
|
|
};
|
|
|
|
const V = -1;
|
|
const names = {};
|
|
const codes = {};
|
|
const table = [
|
|
[4, 32, 'ip4'],
|
|
[6, 16, 'tcp'],
|
|
[33, 16, 'dccp'],
|
|
[41, 128, 'ip6'],
|
|
[42, V, 'ip6zone'],
|
|
[53, V, 'dns', true],
|
|
[54, V, 'dns4', true],
|
|
[55, V, 'dns6', true],
|
|
[56, V, 'dnsaddr', true],
|
|
[132, 16, 'sctp'],
|
|
[273, 16, 'udp'],
|
|
[275, 0, 'p2p-webrtc-star'],
|
|
[276, 0, 'p2p-webrtc-direct'],
|
|
[277, 0, 'p2p-stardust'],
|
|
[280, 0, 'webrtc'],
|
|
[290, 0, 'p2p-circuit'],
|
|
[301, 0, 'udt'],
|
|
[302, 0, 'utp'],
|
|
[400, V, 'unix', false, true],
|
|
// `ipfs` is added before `p2p` for legacy support.
|
|
// All text representations will default to `p2p`, but `ipfs` will
|
|
// still be supported
|
|
[421, V, 'ipfs'],
|
|
// `p2p` is the preferred name for 421, and is now the default
|
|
[421, V, 'p2p'],
|
|
[443, 0, 'https'],
|
|
[444, 96, 'onion'],
|
|
[445, 296, 'onion3'],
|
|
[446, V, 'garlic64'],
|
|
[460, 0, 'quic'],
|
|
[465, 0, 'webtransport'],
|
|
[466, V, 'certhash'],
|
|
[477, 0, 'ws'],
|
|
[478, 0, 'wss'],
|
|
[479, 0, 'p2p-websocket-star'],
|
|
[480, 0, 'http'],
|
|
[777, V, 'memory']
|
|
];
|
|
// populate tables
|
|
table.forEach(row => {
|
|
const proto = createProtocol(...row);
|
|
codes[proto.code] = proto;
|
|
names[proto.name] = proto;
|
|
});
|
|
function createProtocol(code, size, name, resolvable, path) {
|
|
return {
|
|
code,
|
|
size,
|
|
name,
|
|
resolvable: Boolean(resolvable),
|
|
path: Boolean(path)
|
|
};
|
|
}
|
|
function getProtocol(proto) {
|
|
if (typeof proto === 'number') {
|
|
if (codes[proto] != null) {
|
|
return codes[proto];
|
|
}
|
|
throw new Error(`no protocol with code: ${proto}`);
|
|
}
|
|
else if (typeof proto === 'string') {
|
|
if (names[proto] != null) {
|
|
return names[proto];
|
|
}
|
|
throw new Error(`no protocol with name: ${proto}`);
|
|
}
|
|
throw new Error(`invalid protocol id type: ${typeof proto}`);
|
|
}
|
|
|
|
const decoders = Object.values(bases).map((c) => c.decoder);
|
|
((function () {
|
|
let acc = decoders[0].or(decoders[1]);
|
|
decoders.slice(2).forEach((d) => (acc = acc.or(d)));
|
|
return acc;
|
|
}))();
|
|
|
|
[
|
|
getProtocol('dns').code,
|
|
getProtocol('dns4').code,
|
|
getProtocol('dns6').code,
|
|
getProtocol('dnsaddr').code
|
|
];
|
|
[
|
|
getProtocol('p2p').code,
|
|
getProtocol('ipfs').code
|
|
];
|
|
|
|
/* eslint-disable import/export */
|
|
var PeerRecord;
|
|
(function (PeerRecord) {
|
|
(function (AddressInfo) {
|
|
let _codec;
|
|
AddressInfo.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.multiaddr != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.multiaddr);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "multiaddr" was not found in object');
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
multiaddr: new Uint8Array(0)
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.multiaddr = reader.bytes();
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
if (obj.multiaddr == null) {
|
|
throw new Error('Protocol error: value for required field "multiaddr" was not found in protobuf');
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
AddressInfo.encode = (obj) => {
|
|
return encodeMessage(obj, AddressInfo.codec());
|
|
};
|
|
AddressInfo.decode = (buf) => {
|
|
return decodeMessage(buf, AddressInfo.codec());
|
|
};
|
|
})(PeerRecord.AddressInfo || (PeerRecord.AddressInfo = {}));
|
|
let _codec;
|
|
PeerRecord.codec = () => {
|
|
if (_codec == null) {
|
|
_codec = message((obj, writer, opts = {}) => {
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.fork();
|
|
}
|
|
if (obj.peerId != null) {
|
|
writer.uint32(10);
|
|
writer.bytes(obj.peerId);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "peerId" was not found in object');
|
|
}
|
|
if (obj.seq != null) {
|
|
writer.uint32(16);
|
|
writer.uint64(obj.seq);
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "seq" was not found in object');
|
|
}
|
|
if (obj.addresses != null) {
|
|
for (const value of obj.addresses) {
|
|
writer.uint32(26);
|
|
PeerRecord.AddressInfo.codec().encode(value, writer);
|
|
}
|
|
}
|
|
else {
|
|
throw new Error('Protocol error: required field "addresses" was not found in object');
|
|
}
|
|
if (opts.lengthDelimited !== false) {
|
|
writer.ldelim();
|
|
}
|
|
}, (reader, length) => {
|
|
const obj = {
|
|
peerId: new Uint8Array(0),
|
|
seq: 0n,
|
|
addresses: []
|
|
};
|
|
const end = length == null ? reader.len : reader.pos + length;
|
|
while (reader.pos < end) {
|
|
const tag = reader.uint32();
|
|
switch (tag >>> 3) {
|
|
case 1:
|
|
obj.peerId = reader.bytes();
|
|
break;
|
|
case 2:
|
|
obj.seq = reader.uint64();
|
|
break;
|
|
case 3:
|
|
obj.addresses.push(PeerRecord.AddressInfo.codec().decode(reader, reader.uint32()));
|
|
break;
|
|
default:
|
|
reader.skipType(tag & 7);
|
|
break;
|
|
}
|
|
}
|
|
if (obj.peerId == null) {
|
|
throw new Error('Protocol error: value for required field "peerId" was not found in protobuf');
|
|
}
|
|
if (obj.seq == null) {
|
|
throw new Error('Protocol error: value for required field "seq" was not found in protobuf');
|
|
}
|
|
return obj;
|
|
});
|
|
}
|
|
return _codec;
|
|
};
|
|
PeerRecord.encode = (obj) => {
|
|
return encodeMessage(obj, PeerRecord.codec());
|
|
};
|
|
PeerRecord.decode = (buf) => {
|
|
return decodeMessage(buf, PeerRecord.codec());
|
|
};
|
|
})(PeerRecord || (PeerRecord = {}));
|
|
|
|
// Add a formatter for converting to a base58 string
|
|
debug.formatters.b = (v) => {
|
|
return v == null ? 'undefined' : base58btc.baseEncode(v);
|
|
};
|
|
// Add a formatter for converting to a base32 string
|
|
debug.formatters.t = (v) => {
|
|
return v == null ? 'undefined' : base32.baseEncode(v);
|
|
};
|
|
// Add a formatter for converting to a base64 string
|
|
debug.formatters.m = (v) => {
|
|
return v == null ? 'undefined' : base64$2.baseEncode(v);
|
|
};
|
|
// Add a formatter for stringifying peer ids
|
|
debug.formatters.p = (v) => {
|
|
return v == null ? 'undefined' : v.toString();
|
|
};
|
|
// Add a formatter for stringifying CIDs
|
|
debug.formatters.c = (v) => {
|
|
return v == null ? 'undefined' : v.toString();
|
|
};
|
|
// Add a formatter for stringifying Datastore keys
|
|
debug.formatters.k = (v) => {
|
|
return v == null ? 'undefined' : v.toString();
|
|
};
|
|
function logger(name) {
|
|
return Object.assign(debug(name), {
|
|
error: debug(`${name}:error`),
|
|
trace: debug(`${name}:trace`)
|
|
});
|
|
}
|
|
|
|
const topologySymbol = Symbol.for('@libp2p/topology');
|
|
|
|
const noop = () => { };
|
|
class TopologyImpl {
|
|
constructor(init) {
|
|
this.min = init.min ?? 0;
|
|
this.max = init.max ?? Infinity;
|
|
this.peers = new Set();
|
|
this.onConnect = init.onConnect ?? noop;
|
|
this.onDisconnect = init.onDisconnect ?? noop;
|
|
}
|
|
get [Symbol.toStringTag]() {
|
|
return topologySymbol.toString();
|
|
}
|
|
get [topologySymbol]() {
|
|
return true;
|
|
}
|
|
async setRegistrar(registrar) {
|
|
this.registrar = registrar;
|
|
}
|
|
/**
|
|
* Notify about peer disconnected event
|
|
*/
|
|
disconnect(peerId) {
|
|
this.onDisconnect(peerId);
|
|
}
|
|
}
|
|
function createTopology(init) {
|
|
return new TopologyImpl(init);
|
|
}
|
|
|
|
var __classPrivateFieldGet = (undefined && undefined.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
};
|
|
var _EventEmitter_listeners;
|
|
/**
|
|
* Adds types to the EventTarget class. Hopefully this won't be necessary forever.
|
|
*
|
|
* https://github.com/microsoft/TypeScript/issues/28357
|
|
* https://github.com/microsoft/TypeScript/issues/43477
|
|
* https://github.com/microsoft/TypeScript/issues/299
|
|
* etc
|
|
*/
|
|
class EventEmitter extends EventTarget {
|
|
constructor() {
|
|
super(...arguments);
|
|
_EventEmitter_listeners.set(this, new Map());
|
|
}
|
|
listenerCount(type) {
|
|
const listeners = __classPrivateFieldGet(this, _EventEmitter_listeners, "f").get(type);
|
|
if (listeners == null) {
|
|
return 0;
|
|
}
|
|
return listeners.length;
|
|
}
|
|
addEventListener(type, listener, options) {
|
|
super.addEventListener(type, listener, options);
|
|
let list = __classPrivateFieldGet(this, _EventEmitter_listeners, "f").get(type);
|
|
if (list == null) {
|
|
list = [];
|
|
__classPrivateFieldGet(this, _EventEmitter_listeners, "f").set(type, list);
|
|
}
|
|
list.push({
|
|
callback: listener,
|
|
once: (options !== true && options !== false && options?.once) ?? false
|
|
});
|
|
}
|
|
removeEventListener(type, listener, options) {
|
|
super.removeEventListener(type.toString(), listener ?? null, options);
|
|
let list = __classPrivateFieldGet(this, _EventEmitter_listeners, "f").get(type);
|
|
if (list == null) {
|
|
return;
|
|
}
|
|
list = list.filter(({ callback }) => callback !== listener);
|
|
__classPrivateFieldGet(this, _EventEmitter_listeners, "f").set(type, list);
|
|
}
|
|
dispatchEvent(event) {
|
|
const result = super.dispatchEvent(event);
|
|
let list = __classPrivateFieldGet(this, _EventEmitter_listeners, "f").get(event.type);
|
|
if (list == null) {
|
|
return result;
|
|
}
|
|
list = list.filter(({ once }) => !once);
|
|
__classPrivateFieldGet(this, _EventEmitter_listeners, "f").set(event.type, list);
|
|
return result;
|
|
}
|
|
}
|
|
_EventEmitter_listeners = new WeakMap();
|
|
/**
|
|
* CustomEvent is a standard event but it's not supported by node.
|
|
*
|
|
* Remove this when https://github.com/nodejs/node/issues/40678 is closed.
|
|
*
|
|
* Ref: https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent
|
|
*/
|
|
class CustomEventPolyfill extends Event {
|
|
constructor(message, data) {
|
|
super(message, data);
|
|
// @ts-expect-error could be undefined
|
|
this.detail = data?.detail;
|
|
}
|
|
}
|
|
const CustomEvent = globalThis.CustomEvent ?? CustomEventPolyfill;
|
|
|
|
class MessageCache {
|
|
/**
|
|
* Holds history of messages in timebounded history arrays
|
|
*/
|
|
constructor(
|
|
/**
|
|
* The number of indices in the cache history used for gossiping. That means that a message
|
|
* won't get gossiped anymore when shift got called `gossip` many times after inserting the
|
|
* message in the cache.
|
|
*/
|
|
gossip, historyCapacity, msgIdToStrFn) {
|
|
this.gossip = gossip;
|
|
this.msgs = new Map();
|
|
this.history = [];
|
|
this.msgIdToStrFn = msgIdToStrFn;
|
|
for (let i = 0; i < historyCapacity; i++) {
|
|
this.history[i] = [];
|
|
}
|
|
}
|
|
get size() {
|
|
return this.msgs.size;
|
|
}
|
|
/**
|
|
* Adds a message to the current window and the cache
|
|
* Returns true if the message is not known and is inserted in the cache
|
|
*/
|
|
put(messageId, msg, validated = false) {
|
|
const { msgIdStr } = messageId;
|
|
// Don't add duplicate entries to the cache.
|
|
if (this.msgs.has(msgIdStr)) {
|
|
return false;
|
|
}
|
|
this.msgs.set(msgIdStr, {
|
|
message: msg,
|
|
validated,
|
|
originatingPeers: new Set(),
|
|
iwantCounts: new Map()
|
|
});
|
|
this.history[0].push({ ...messageId, topic: msg.topic });
|
|
return true;
|
|
}
|
|
observeDuplicate(msgId, fromPeerIdStr) {
|
|
const entry = this.msgs.get(msgId);
|
|
if (entry &&
|
|
// if the message is already validated, we don't need to store extra peers sending us
|
|
// duplicates as the message has already been forwarded
|
|
!entry.validated) {
|
|
entry.originatingPeers.add(fromPeerIdStr);
|
|
}
|
|
}
|
|
/**
|
|
* Retrieves a message from the cache by its ID, if it is still present
|
|
*/
|
|
get(msgId) {
|
|
return this.msgs.get(this.msgIdToStrFn(msgId))?.message;
|
|
}
|
|
/**
|
|
* Increases the iwant count for the given message by one and returns the message together
|
|
* with the iwant if the message exists.
|
|
*/
|
|
getWithIWantCount(msgIdStr, p) {
|
|
const msg = this.msgs.get(msgIdStr);
|
|
if (!msg) {
|
|
return null;
|
|
}
|
|
const count = (msg.iwantCounts.get(p) ?? 0) + 1;
|
|
msg.iwantCounts.set(p, count);
|
|
return { msg: msg.message, count };
|
|
}
|
|
/**
|
|
* Retrieves a list of message IDs for a set of topics
|
|
*/
|
|
getGossipIDs(topics) {
|
|
const msgIdsByTopic = new Map();
|
|
for (let i = 0; i < this.gossip; i++) {
|
|
this.history[i].forEach((entry) => {
|
|
const msg = this.msgs.get(entry.msgIdStr);
|
|
if (msg && msg.validated && topics.has(entry.topic)) {
|
|
let msgIds = msgIdsByTopic.get(entry.topic);
|
|
if (!msgIds) {
|
|
msgIds = [];
|
|
msgIdsByTopic.set(entry.topic, msgIds);
|
|
}
|
|
msgIds.push(entry.msgId);
|
|
}
|
|
});
|
|
}
|
|
return msgIdsByTopic;
|
|
}
|
|
/**
|
|
* Gets a message with msgId and tags it as validated.
|
|
* This function also returns the known peers that have sent us this message. This is used to
|
|
* prevent us sending redundant messages to peers who have already propagated it.
|
|
*/
|
|
validate(msgId) {
|
|
const entry = this.msgs.get(msgId);
|
|
if (!entry) {
|
|
return null;
|
|
}
|
|
const { message, originatingPeers } = entry;
|
|
entry.validated = true;
|
|
// Clear the known peers list (after a message is validated, it is forwarded and we no
|
|
// longer need to store the originating peers).
|
|
entry.originatingPeers = new Set();
|
|
return { message, originatingPeers };
|
|
}
|
|
/**
|
|
* Shifts the current window, discarding messages older than this.history.length of the cache
|
|
*/
|
|
shift() {
|
|
const last = this.history[this.history.length - 1];
|
|
last.forEach((entry) => {
|
|
this.msgs.delete(entry.msgIdStr);
|
|
});
|
|
this.history.pop();
|
|
this.history.unshift([]);
|
|
}
|
|
remove(msgId) {
|
|
const entry = this.msgs.get(msgId);
|
|
if (!entry) {
|
|
return null;
|
|
}
|
|
// Keep the message on the history vector, it will be dropped on a shift()
|
|
this.msgs.delete(msgId);
|
|
return entry;
|
|
}
|
|
}
|
|
|
|
function commonjsRequire(path) {
|
|
throw new Error('Could not dynamically require "' + path + '". Please configure the dynamicRequireTargets or/and ignoreDynamicRequires option of @rollup/plugin-commonjs appropriately for this require call to work.');
|
|
}
|
|
|
|
var rpc$1 = {exports: {}};
|
|
|
|
var minimal$1 = {exports: {}};
|
|
|
|
var indexMinimal = {};
|
|
|
|
var minimal = {};
|
|
|
|
var longbits;
|
|
var hasRequiredLongbits;
|
|
|
|
function requireLongbits () {
|
|
if (hasRequiredLongbits) return longbits;
|
|
hasRequiredLongbits = 1;
|
|
longbits = LongBits;
|
|
|
|
var util = requireMinimal$1();
|
|
|
|
/**
|
|
* Constructs new long bits.
|
|
* @classdesc Helper class for working with the low and high bits of a 64 bit value.
|
|
* @memberof util
|
|
* @constructor
|
|
* @param {number} lo Low 32 bits, unsigned
|
|
* @param {number} hi High 32 bits, unsigned
|
|
*/
|
|
function LongBits(lo, hi) {
|
|
|
|
// note that the casts below are theoretically unnecessary as of today, but older statically
|
|
// generated converter code might still call the ctor with signed 32bits. kept for compat.
|
|
|
|
/**
|
|
* Low bits.
|
|
* @type {number}
|
|
*/
|
|
this.lo = lo >>> 0;
|
|
|
|
/**
|
|
* High bits.
|
|
* @type {number}
|
|
*/
|
|
this.hi = hi >>> 0;
|
|
}
|
|
|
|
/**
|
|
* Zero bits.
|
|
* @memberof util.LongBits
|
|
* @type {util.LongBits}
|
|
*/
|
|
var zero = LongBits.zero = new LongBits(0, 0);
|
|
|
|
zero.toNumber = function() { return 0; };
|
|
zero.zzEncode = zero.zzDecode = function() { return this; };
|
|
zero.length = function() { return 1; };
|
|
|
|
/**
|
|
* Zero hash.
|
|
* @memberof util.LongBits
|
|
* @type {string}
|
|
*/
|
|
var zeroHash = LongBits.zeroHash = "\0\0\0\0\0\0\0\0";
|
|
|
|
/**
|
|
* Constructs new long bits from the specified number.
|
|
* @param {number} value Value
|
|
* @returns {util.LongBits} Instance
|
|
*/
|
|
LongBits.fromNumber = function fromNumber(value) {
|
|
if (value === 0)
|
|
return zero;
|
|
var sign = value < 0;
|
|
if (sign)
|
|
value = -value;
|
|
var lo = value >>> 0,
|
|
hi = (value - lo) / 4294967296 >>> 0;
|
|
if (sign) {
|
|
hi = ~hi >>> 0;
|
|
lo = ~lo >>> 0;
|
|
if (++lo > 4294967295) {
|
|
lo = 0;
|
|
if (++hi > 4294967295)
|
|
hi = 0;
|
|
}
|
|
}
|
|
return new LongBits(lo, hi);
|
|
};
|
|
|
|
/**
|
|
* Constructs new long bits from a number, long or string.
|
|
* @param {Long|number|string} value Value
|
|
* @returns {util.LongBits} Instance
|
|
*/
|
|
LongBits.from = function from(value) {
|
|
if (typeof value === "number")
|
|
return LongBits.fromNumber(value);
|
|
if (util.isString(value)) {
|
|
/* istanbul ignore else */
|
|
if (util.Long)
|
|
value = util.Long.fromString(value);
|
|
else
|
|
return LongBits.fromNumber(parseInt(value, 10));
|
|
}
|
|
return value.low || value.high ? new LongBits(value.low >>> 0, value.high >>> 0) : zero;
|
|
};
|
|
|
|
/**
|
|
* Converts this long bits to a possibly unsafe JavaScript number.
|
|
* @param {boolean} [unsigned=false] Whether unsigned or not
|
|
* @returns {number} Possibly unsafe number
|
|
*/
|
|
LongBits.prototype.toNumber = function toNumber(unsigned) {
|
|
if (!unsigned && this.hi >>> 31) {
|
|
var lo = ~this.lo + 1 >>> 0,
|
|
hi = ~this.hi >>> 0;
|
|
if (!lo)
|
|
hi = hi + 1 >>> 0;
|
|
return -(lo + hi * 4294967296);
|
|
}
|
|
return this.lo + this.hi * 4294967296;
|
|
};
|
|
|
|
/**
|
|
* Converts this long bits to a long.
|
|
* @param {boolean} [unsigned=false] Whether unsigned or not
|
|
* @returns {Long} Long
|
|
*/
|
|
LongBits.prototype.toLong = function toLong(unsigned) {
|
|
return util.Long
|
|
? new util.Long(this.lo | 0, this.hi | 0, Boolean(unsigned))
|
|
/* istanbul ignore next */
|
|
: { low: this.lo | 0, high: this.hi | 0, unsigned: Boolean(unsigned) };
|
|
};
|
|
|
|
var charCodeAt = String.prototype.charCodeAt;
|
|
|
|
/**
|
|
* Constructs new long bits from the specified 8 characters long hash.
|
|
* @param {string} hash Hash
|
|
* @returns {util.LongBits} Bits
|
|
*/
|
|
LongBits.fromHash = function fromHash(hash) {
|
|
if (hash === zeroHash)
|
|
return zero;
|
|
return new LongBits(
|
|
( charCodeAt.call(hash, 0)
|
|
| charCodeAt.call(hash, 1) << 8
|
|
| charCodeAt.call(hash, 2) << 16
|
|
| charCodeAt.call(hash, 3) << 24) >>> 0
|
|
,
|
|
( charCodeAt.call(hash, 4)
|
|
| charCodeAt.call(hash, 5) << 8
|
|
| charCodeAt.call(hash, 6) << 16
|
|
| charCodeAt.call(hash, 7) << 24) >>> 0
|
|
);
|
|
};
|
|
|
|
/**
|
|
* Converts this long bits to a 8 characters long hash.
|
|
* @returns {string} Hash
|
|
*/
|
|
LongBits.prototype.toHash = function toHash() {
|
|
return String.fromCharCode(
|
|
this.lo & 255,
|
|
this.lo >>> 8 & 255,
|
|
this.lo >>> 16 & 255,
|
|
this.lo >>> 24 ,
|
|
this.hi & 255,
|
|
this.hi >>> 8 & 255,
|
|
this.hi >>> 16 & 255,
|
|
this.hi >>> 24
|
|
);
|
|
};
|
|
|
|
/**
|
|
* Zig-zag encodes this long bits.
|
|
* @returns {util.LongBits} `this`
|
|
*/
|
|
LongBits.prototype.zzEncode = function zzEncode() {
|
|
var mask = this.hi >> 31;
|
|
this.hi = ((this.hi << 1 | this.lo >>> 31) ^ mask) >>> 0;
|
|
this.lo = ( this.lo << 1 ^ mask) >>> 0;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Zig-zag decodes this long bits.
|
|
* @returns {util.LongBits} `this`
|
|
*/
|
|
LongBits.prototype.zzDecode = function zzDecode() {
|
|
var mask = -(this.lo & 1);
|
|
this.lo = ((this.lo >>> 1 | this.hi << 31) ^ mask) >>> 0;
|
|
this.hi = ( this.hi >>> 1 ^ mask) >>> 0;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Calculates the length of this longbits when encoded as a varint.
|
|
* @returns {number} Length
|
|
*/
|
|
LongBits.prototype.length = function length() {
|
|
var part0 = this.lo,
|
|
part1 = (this.lo >>> 28 | this.hi << 4) >>> 0,
|
|
part2 = this.hi >>> 24;
|
|
return part2 === 0
|
|
? part1 === 0
|
|
? part0 < 16384
|
|
? part0 < 128 ? 1 : 2
|
|
: part0 < 2097152 ? 3 : 4
|
|
: part1 < 16384
|
|
? part1 < 128 ? 5 : 6
|
|
: part1 < 2097152 ? 7 : 8
|
|
: part2 < 128 ? 9 : 10;
|
|
};
|
|
return longbits;
|
|
}
|
|
|
|
var hasRequiredMinimal$1;
|
|
|
|
function requireMinimal$1 () {
|
|
if (hasRequiredMinimal$1) return minimal;
|
|
hasRequiredMinimal$1 = 1;
|
|
(function (exports) {
|
|
var util = exports;
|
|
|
|
// used to return a Promise where callback is omitted
|
|
util.asPromise = requireAspromise();
|
|
|
|
// converts to / from base64 encoded strings
|
|
util.base64 = requireBase64();
|
|
|
|
// base class of rpc.Service
|
|
util.EventEmitter = requireEventemitter();
|
|
|
|
// float handling accross browsers
|
|
util.float = requireFloat();
|
|
|
|
// requires modules optionally and hides the call from bundlers
|
|
util.inquire = requireInquire();
|
|
|
|
// converts to / from utf8 encoded strings
|
|
util.utf8 = requireUtf8();
|
|
|
|
// provides a node-like buffer pool in the browser
|
|
util.pool = requirePool();
|
|
|
|
// utility to work with the low and high bits of a 64 bit value
|
|
util.LongBits = requireLongbits();
|
|
|
|
/**
|
|
* Whether running within node or not.
|
|
* @memberof util
|
|
* @type {boolean}
|
|
*/
|
|
util.isNode = Boolean(typeof commonjsGlobal !== "undefined"
|
|
&& commonjsGlobal
|
|
&& commonjsGlobal.process
|
|
&& commonjsGlobal.process.versions
|
|
&& commonjsGlobal.process.versions.node);
|
|
|
|
/**
|
|
* Global object reference.
|
|
* @memberof util
|
|
* @type {Object}
|
|
*/
|
|
util.global = util.isNode && commonjsGlobal
|
|
|| typeof window !== "undefined" && window
|
|
|| typeof self !== "undefined" && self
|
|
|| commonjsGlobal; // eslint-disable-line no-invalid-this
|
|
|
|
/**
|
|
* An immuable empty array.
|
|
* @memberof util
|
|
* @type {Array.<*>}
|
|
* @const
|
|
*/
|
|
util.emptyArray = Object.freeze ? Object.freeze([]) : /* istanbul ignore next */ []; // used on prototypes
|
|
|
|
/**
|
|
* An immutable empty object.
|
|
* @type {Object}
|
|
* @const
|
|
*/
|
|
util.emptyObject = Object.freeze ? Object.freeze({}) : /* istanbul ignore next */ {}; // used on prototypes
|
|
|
|
/**
|
|
* Tests if the specified value is an integer.
|
|
* @function
|
|
* @param {*} value Value to test
|
|
* @returns {boolean} `true` if the value is an integer
|
|
*/
|
|
util.isInteger = Number.isInteger || /* istanbul ignore next */ function isInteger(value) {
|
|
return typeof value === "number" && isFinite(value) && Math.floor(value) === value;
|
|
};
|
|
|
|
/**
|
|
* Tests if the specified value is a string.
|
|
* @param {*} value Value to test
|
|
* @returns {boolean} `true` if the value is a string
|
|
*/
|
|
util.isString = function isString(value) {
|
|
return typeof value === "string" || value instanceof String;
|
|
};
|
|
|
|
/**
|
|
* Tests if the specified value is a non-null object.
|
|
* @param {*} value Value to test
|
|
* @returns {boolean} `true` if the value is a non-null object
|
|
*/
|
|
util.isObject = function isObject(value) {
|
|
return value && typeof value === "object";
|
|
};
|
|
|
|
/**
|
|
* Checks if a property on a message is considered to be present.
|
|
* This is an alias of {@link util.isSet}.
|
|
* @function
|
|
* @param {Object} obj Plain object or message instance
|
|
* @param {string} prop Property name
|
|
* @returns {boolean} `true` if considered to be present, otherwise `false`
|
|
*/
|
|
util.isset =
|
|
|
|
/**
|
|
* Checks if a property on a message is considered to be present.
|
|
* @param {Object} obj Plain object or message instance
|
|
* @param {string} prop Property name
|
|
* @returns {boolean} `true` if considered to be present, otherwise `false`
|
|
*/
|
|
util.isSet = function isSet(obj, prop) {
|
|
var value = obj[prop];
|
|
if (value != null && obj.hasOwnProperty(prop)) // eslint-disable-line eqeqeq, no-prototype-builtins
|
|
return typeof value !== "object" || (Array.isArray(value) ? value.length : Object.keys(value).length) > 0;
|
|
return false;
|
|
};
|
|
|
|
/**
|
|
* Any compatible Buffer instance.
|
|
* This is a minimal stand-alone definition of a Buffer instance. The actual type is that exported by node's typings.
|
|
* @interface Buffer
|
|
* @extends Uint8Array
|
|
*/
|
|
|
|
/**
|
|
* Node's Buffer class if available.
|
|
* @type {Constructor<Buffer>}
|
|
*/
|
|
util.Buffer = (function() {
|
|
try {
|
|
var Buffer = util.inquire("buffer").Buffer;
|
|
// refuse to use non-node buffers if not explicitly assigned (perf reasons):
|
|
return Buffer.prototype.utf8Write ? Buffer : /* istanbul ignore next */ null;
|
|
} catch (e) {
|
|
/* istanbul ignore next */
|
|
return null;
|
|
}
|
|
})();
|
|
|
|
// Internal alias of or polyfull for Buffer.from.
|
|
util._Buffer_from = null;
|
|
|
|
// Internal alias of or polyfill for Buffer.allocUnsafe.
|
|
util._Buffer_allocUnsafe = null;
|
|
|
|
/**
|
|
* Creates a new buffer of whatever type supported by the environment.
|
|
* @param {number|number[]} [sizeOrArray=0] Buffer size or number array
|
|
* @returns {Uint8Array|Buffer} Buffer
|
|
*/
|
|
util.newBuffer = function newBuffer(sizeOrArray) {
|
|
/* istanbul ignore next */
|
|
return typeof sizeOrArray === "number"
|
|
? util.Buffer
|
|
? util._Buffer_allocUnsafe(sizeOrArray)
|
|
: new util.Array(sizeOrArray)
|
|
: util.Buffer
|
|
? util._Buffer_from(sizeOrArray)
|
|
: typeof Uint8Array === "undefined"
|
|
? sizeOrArray
|
|
: new Uint8Array(sizeOrArray);
|
|
};
|
|
|
|
/**
|
|
* Array implementation used in the browser. `Uint8Array` if supported, otherwise `Array`.
|
|
* @type {Constructor<Uint8Array>}
|
|
*/
|
|
util.Array = typeof Uint8Array !== "undefined" ? Uint8Array /* istanbul ignore next */ : Array;
|
|
|
|
/**
|
|
* Any compatible Long instance.
|
|
* This is a minimal stand-alone definition of a Long instance. The actual type is that exported by long.js.
|
|
* @interface Long
|
|
* @property {number} low Low bits
|
|
* @property {number} high High bits
|
|
* @property {boolean} unsigned Whether unsigned or not
|
|
*/
|
|
|
|
/**
|
|
* Long.js's Long class if available.
|
|
* @type {Constructor<Long>}
|
|
*/
|
|
util.Long = /* istanbul ignore next */ util.global.dcodeIO && /* istanbul ignore next */ util.global.dcodeIO.Long
|
|
|| /* istanbul ignore next */ util.global.Long
|
|
|| util.inquire("long");
|
|
|
|
/**
|
|
* Regular expression used to verify 2 bit (`bool`) map keys.
|
|
* @type {RegExp}
|
|
* @const
|
|
*/
|
|
util.key2Re = /^true|false|0|1$/;
|
|
|
|
/**
|
|
* Regular expression used to verify 32 bit (`int32` etc.) map keys.
|
|
* @type {RegExp}
|
|
* @const
|
|
*/
|
|
util.key32Re = /^-?(?:0|[1-9][0-9]*)$/;
|
|
|
|
/**
|
|
* Regular expression used to verify 64 bit (`int64` etc.) map keys.
|
|
* @type {RegExp}
|
|
* @const
|
|
*/
|
|
util.key64Re = /^(?:[\\x00-\\xff]{8}|-?(?:0|[1-9][0-9]*))$/;
|
|
|
|
/**
|
|
* Converts a number or long to an 8 characters long hash string.
|
|
* @param {Long|number} value Value to convert
|
|
* @returns {string} Hash
|
|
*/
|
|
util.longToHash = function longToHash(value) {
|
|
return value
|
|
? util.LongBits.from(value).toHash()
|
|
: util.LongBits.zeroHash;
|
|
};
|
|
|
|
/**
|
|
* Converts an 8 characters long hash string to a long or number.
|
|
* @param {string} hash Hash
|
|
* @param {boolean} [unsigned=false] Whether unsigned or not
|
|
* @returns {Long|number} Original value
|
|
*/
|
|
util.longFromHash = function longFromHash(hash, unsigned) {
|
|
var bits = util.LongBits.fromHash(hash);
|
|
if (util.Long)
|
|
return util.Long.fromBits(bits.lo, bits.hi, unsigned);
|
|
return bits.toNumber(Boolean(unsigned));
|
|
};
|
|
|
|
/**
|
|
* Merges the properties of the source object into the destination object.
|
|
* @memberof util
|
|
* @param {Object.<string,*>} dst Destination object
|
|
* @param {Object.<string,*>} src Source object
|
|
* @param {boolean} [ifNotSet=false] Merges only if the key is not already set
|
|
* @returns {Object.<string,*>} Destination object
|
|
*/
|
|
function merge(dst, src, ifNotSet) { // used by converters
|
|
for (var keys = Object.keys(src), i = 0; i < keys.length; ++i)
|
|
if (dst[keys[i]] === undefined || !ifNotSet)
|
|
dst[keys[i]] = src[keys[i]];
|
|
return dst;
|
|
}
|
|
|
|
util.merge = merge;
|
|
|
|
/**
|
|
* Converts the first character of a string to lower case.
|
|
* @param {string} str String to convert
|
|
* @returns {string} Converted string
|
|
*/
|
|
util.lcFirst = function lcFirst(str) {
|
|
return str.charAt(0).toLowerCase() + str.substring(1);
|
|
};
|
|
|
|
/**
|
|
* Creates a custom error constructor.
|
|
* @memberof util
|
|
* @param {string} name Error name
|
|
* @returns {Constructor<Error>} Custom error constructor
|
|
*/
|
|
function newError(name) {
|
|
|
|
function CustomError(message, properties) {
|
|
|
|
if (!(this instanceof CustomError))
|
|
return new CustomError(message, properties);
|
|
|
|
// Error.call(this, message);
|
|
// ^ just returns a new error instance because the ctor can be called as a function
|
|
|
|
Object.defineProperty(this, "message", { get: function() { return message; } });
|
|
|
|
/* istanbul ignore next */
|
|
if (Error.captureStackTrace) // node
|
|
Error.captureStackTrace(this, CustomError);
|
|
else
|
|
Object.defineProperty(this, "stack", { value: new Error().stack || "" });
|
|
|
|
if (properties)
|
|
merge(this, properties);
|
|
}
|
|
|
|
(CustomError.prototype = Object.create(Error.prototype)).constructor = CustomError;
|
|
|
|
Object.defineProperty(CustomError.prototype, "name", { get: function() { return name; } });
|
|
|
|
CustomError.prototype.toString = function toString() {
|
|
return this.name + ": " + this.message;
|
|
};
|
|
|
|
return CustomError;
|
|
}
|
|
|
|
util.newError = newError;
|
|
|
|
/**
|
|
* Constructs a new protocol error.
|
|
* @classdesc Error subclass indicating a protocol specifc error.
|
|
* @memberof util
|
|
* @extends Error
|
|
* @template T extends Message<T>
|
|
* @constructor
|
|
* @param {string} message Error message
|
|
* @param {Object.<string,*>} [properties] Additional properties
|
|
* @example
|
|
* try {
|
|
* MyMessage.decode(someBuffer); // throws if required fields are missing
|
|
* } catch (e) {
|
|
* if (e instanceof ProtocolError && e.instance)
|
|
* console.log("decoded so far: " + JSON.stringify(e.instance));
|
|
* }
|
|
*/
|
|
util.ProtocolError = newError("ProtocolError");
|
|
|
|
/**
|
|
* So far decoded message instance.
|
|
* @name util.ProtocolError#instance
|
|
* @type {Message<T>}
|
|
*/
|
|
|
|
/**
|
|
* A OneOf getter as returned by {@link util.oneOfGetter}.
|
|
* @typedef OneOfGetter
|
|
* @type {function}
|
|
* @returns {string|undefined} Set field name, if any
|
|
*/
|
|
|
|
/**
|
|
* Builds a getter for a oneof's present field name.
|
|
* @param {string[]} fieldNames Field names
|
|
* @returns {OneOfGetter} Unbound getter
|
|
*/
|
|
util.oneOfGetter = function getOneOf(fieldNames) {
|
|
var fieldMap = {};
|
|
for (var i = 0; i < fieldNames.length; ++i)
|
|
fieldMap[fieldNames[i]] = 1;
|
|
|
|
/**
|
|
* @returns {string|undefined} Set field name, if any
|
|
* @this Object
|
|
* @ignore
|
|
*/
|
|
return function() { // eslint-disable-line consistent-return
|
|
for (var keys = Object.keys(this), i = keys.length - 1; i > -1; --i)
|
|
if (fieldMap[keys[i]] === 1 && this[keys[i]] !== undefined && this[keys[i]] !== null)
|
|
return keys[i];
|
|
};
|
|
};
|
|
|
|
/**
|
|
* A OneOf setter as returned by {@link util.oneOfSetter}.
|
|
* @typedef OneOfSetter
|
|
* @type {function}
|
|
* @param {string|undefined} value Field name
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* Builds a setter for a oneof's present field name.
|
|
* @param {string[]} fieldNames Field names
|
|
* @returns {OneOfSetter} Unbound setter
|
|
*/
|
|
util.oneOfSetter = function setOneOf(fieldNames) {
|
|
|
|
/**
|
|
* @param {string} name Field name
|
|
* @returns {undefined}
|
|
* @this Object
|
|
* @ignore
|
|
*/
|
|
return function(name) {
|
|
for (var i = 0; i < fieldNames.length; ++i)
|
|
if (fieldNames[i] !== name)
|
|
delete this[fieldNames[i]];
|
|
};
|
|
};
|
|
|
|
/**
|
|
* Default conversion options used for {@link Message#toJSON} implementations.
|
|
*
|
|
* These options are close to proto3's JSON mapping with the exception that internal types like Any are handled just like messages. More precisely:
|
|
*
|
|
* - Longs become strings
|
|
* - Enums become string keys
|
|
* - Bytes become base64 encoded strings
|
|
* - (Sub-)Messages become plain objects
|
|
* - Maps become plain objects with all string keys
|
|
* - Repeated fields become arrays
|
|
* - NaN and Infinity for float and double fields become strings
|
|
*
|
|
* @type {IConversionOptions}
|
|
* @see https://developers.google.com/protocol-buffers/docs/proto3?hl=en#json
|
|
*/
|
|
util.toJSONOptions = {
|
|
longs: String,
|
|
enums: String,
|
|
bytes: String,
|
|
json: true
|
|
};
|
|
|
|
// Sets up buffer utility according to the environment (called in index-minimal)
|
|
util._configure = function() {
|
|
var Buffer = util.Buffer;
|
|
/* istanbul ignore if */
|
|
if (!Buffer) {
|
|
util._Buffer_from = util._Buffer_allocUnsafe = null;
|
|
return;
|
|
}
|
|
// because node 4.x buffers are incompatible & immutable
|
|
// see: https://github.com/dcodeIO/protobuf.js/pull/665
|
|
util._Buffer_from = Buffer.from !== Uint8Array.from && Buffer.from ||
|
|
/* istanbul ignore next */
|
|
function Buffer_from(value, encoding) {
|
|
return new Buffer(value, encoding);
|
|
};
|
|
util._Buffer_allocUnsafe = Buffer.allocUnsafe ||
|
|
/* istanbul ignore next */
|
|
function Buffer_allocUnsafe(size) {
|
|
return new Buffer(size);
|
|
};
|
|
};
|
|
} (minimal));
|
|
return minimal;
|
|
}
|
|
|
|
var writer;
|
|
var hasRequiredWriter;
|
|
|
|
function requireWriter () {
|
|
if (hasRequiredWriter) return writer;
|
|
hasRequiredWriter = 1;
|
|
writer = Writer;
|
|
|
|
var util = requireMinimal$1();
|
|
|
|
var BufferWriter; // cyclic
|
|
|
|
var LongBits = util.LongBits,
|
|
base64 = util.base64,
|
|
utf8 = util.utf8;
|
|
|
|
/**
|
|
* Constructs a new writer operation instance.
|
|
* @classdesc Scheduled writer operation.
|
|
* @constructor
|
|
* @param {function(*, Uint8Array, number)} fn Function to call
|
|
* @param {number} len Value byte length
|
|
* @param {*} val Value to write
|
|
* @ignore
|
|
*/
|
|
function Op(fn, len, val) {
|
|
|
|
/**
|
|
* Function to call.
|
|
* @type {function(Uint8Array, number, *)}
|
|
*/
|
|
this.fn = fn;
|
|
|
|
/**
|
|
* Value byte length.
|
|
* @type {number}
|
|
*/
|
|
this.len = len;
|
|
|
|
/**
|
|
* Next operation.
|
|
* @type {Writer.Op|undefined}
|
|
*/
|
|
this.next = undefined;
|
|
|
|
/**
|
|
* Value to write.
|
|
* @type {*}
|
|
*/
|
|
this.val = val; // type varies
|
|
}
|
|
|
|
/* istanbul ignore next */
|
|
function noop() {} // eslint-disable-line no-empty-function
|
|
|
|
/**
|
|
* Constructs a new writer state instance.
|
|
* @classdesc Copied writer state.
|
|
* @memberof Writer
|
|
* @constructor
|
|
* @param {Writer} writer Writer to copy state from
|
|
* @ignore
|
|
*/
|
|
function State(writer) {
|
|
|
|
/**
|
|
* Current head.
|
|
* @type {Writer.Op}
|
|
*/
|
|
this.head = writer.head;
|
|
|
|
/**
|
|
* Current tail.
|
|
* @type {Writer.Op}
|
|
*/
|
|
this.tail = writer.tail;
|
|
|
|
/**
|
|
* Current buffer length.
|
|
* @type {number}
|
|
*/
|
|
this.len = writer.len;
|
|
|
|
/**
|
|
* Next state.
|
|
* @type {State|null}
|
|
*/
|
|
this.next = writer.states;
|
|
}
|
|
|
|
/**
|
|
* Constructs a new writer instance.
|
|
* @classdesc Wire format writer using `Uint8Array` if available, otherwise `Array`.
|
|
* @constructor
|
|
*/
|
|
function Writer() {
|
|
|
|
/**
|
|
* Current length.
|
|
* @type {number}
|
|
*/
|
|
this.len = 0;
|
|
|
|
/**
|
|
* Operations head.
|
|
* @type {Object}
|
|
*/
|
|
this.head = new Op(noop, 0, 0);
|
|
|
|
/**
|
|
* Operations tail
|
|
* @type {Object}
|
|
*/
|
|
this.tail = this.head;
|
|
|
|
/**
|
|
* Linked forked states.
|
|
* @type {Object|null}
|
|
*/
|
|
this.states = null;
|
|
|
|
// When a value is written, the writer calculates its byte length and puts it into a linked
|
|
// list of operations to perform when finish() is called. This both allows us to allocate
|
|
// buffers of the exact required size and reduces the amount of work we have to do compared
|
|
// to first calculating over objects and then encoding over objects. In our case, the encoding
|
|
// part is just a linked list walk calling operations with already prepared values.
|
|
}
|
|
|
|
var create = function create() {
|
|
return util.Buffer
|
|
? function create_buffer_setup() {
|
|
return (Writer.create = function create_buffer() {
|
|
return new BufferWriter();
|
|
})();
|
|
}
|
|
/* istanbul ignore next */
|
|
: function create_array() {
|
|
return new Writer();
|
|
};
|
|
};
|
|
|
|
/**
|
|
* Creates a new writer.
|
|
* @function
|
|
* @returns {BufferWriter|Writer} A {@link BufferWriter} when Buffers are supported, otherwise a {@link Writer}
|
|
*/
|
|
Writer.create = create();
|
|
|
|
/**
|
|
* Allocates a buffer of the specified size.
|
|
* @param {number} size Buffer size
|
|
* @returns {Uint8Array} Buffer
|
|
*/
|
|
Writer.alloc = function alloc(size) {
|
|
return new util.Array(size);
|
|
};
|
|
|
|
// Use Uint8Array buffer pool in the browser, just like node does with buffers
|
|
/* istanbul ignore else */
|
|
if (util.Array !== Array)
|
|
Writer.alloc = util.pool(Writer.alloc, util.Array.prototype.subarray);
|
|
|
|
/**
|
|
* Pushes a new operation to the queue.
|
|
* @param {function(Uint8Array, number, *)} fn Function to call
|
|
* @param {number} len Value byte length
|
|
* @param {number} val Value to write
|
|
* @returns {Writer} `this`
|
|
* @private
|
|
*/
|
|
Writer.prototype._push = function push(fn, len, val) {
|
|
this.tail = this.tail.next = new Op(fn, len, val);
|
|
this.len += len;
|
|
return this;
|
|
};
|
|
|
|
function writeByte(val, buf, pos) {
|
|
buf[pos] = val & 255;
|
|
}
|
|
|
|
function writeVarint32(val, buf, pos) {
|
|
while (val > 127) {
|
|
buf[pos++] = val & 127 | 128;
|
|
val >>>= 7;
|
|
}
|
|
buf[pos] = val;
|
|
}
|
|
|
|
/**
|
|
* Constructs a new varint writer operation instance.
|
|
* @classdesc Scheduled varint writer operation.
|
|
* @extends Op
|
|
* @constructor
|
|
* @param {number} len Value byte length
|
|
* @param {number} val Value to write
|
|
* @ignore
|
|
*/
|
|
function VarintOp(len, val) {
|
|
this.len = len;
|
|
this.next = undefined;
|
|
this.val = val;
|
|
}
|
|
|
|
VarintOp.prototype = Object.create(Op.prototype);
|
|
VarintOp.prototype.fn = writeVarint32;
|
|
|
|
/**
|
|
* Writes an unsigned 32 bit value as a varint.
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.uint32 = function write_uint32(value) {
|
|
// here, the call to this.push has been inlined and a varint specific Op subclass is used.
|
|
// uint32 is by far the most frequently used operation and benefits significantly from this.
|
|
this.len += (this.tail = this.tail.next = new VarintOp(
|
|
(value = value >>> 0)
|
|
< 128 ? 1
|
|
: value < 16384 ? 2
|
|
: value < 2097152 ? 3
|
|
: value < 268435456 ? 4
|
|
: 5,
|
|
value)).len;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Writes a signed 32 bit value as a varint.
|
|
* @function
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.int32 = function write_int32(value) {
|
|
return value < 0
|
|
? this._push(writeVarint64, 10, LongBits.fromNumber(value)) // 10 bytes per spec
|
|
: this.uint32(value);
|
|
};
|
|
|
|
/**
|
|
* Writes a 32 bit value as a varint, zig-zag encoded.
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.sint32 = function write_sint32(value) {
|
|
return this.uint32((value << 1 ^ value >> 31) >>> 0);
|
|
};
|
|
|
|
function writeVarint64(val, buf, pos) {
|
|
while (val.hi) {
|
|
buf[pos++] = val.lo & 127 | 128;
|
|
val.lo = (val.lo >>> 7 | val.hi << 25) >>> 0;
|
|
val.hi >>>= 7;
|
|
}
|
|
while (val.lo > 127) {
|
|
buf[pos++] = val.lo & 127 | 128;
|
|
val.lo = val.lo >>> 7;
|
|
}
|
|
buf[pos++] = val.lo;
|
|
}
|
|
|
|
/**
|
|
* Writes an unsigned 64 bit value as a varint.
|
|
* @param {Long|number|string} value Value to write
|
|
* @returns {Writer} `this`
|
|
* @throws {TypeError} If `value` is a string and no long library is present.
|
|
*/
|
|
Writer.prototype.uint64 = function write_uint64(value) {
|
|
var bits = LongBits.from(value);
|
|
return this._push(writeVarint64, bits.length(), bits);
|
|
};
|
|
|
|
/**
|
|
* Writes a signed 64 bit value as a varint.
|
|
* @function
|
|
* @param {Long|number|string} value Value to write
|
|
* @returns {Writer} `this`
|
|
* @throws {TypeError} If `value` is a string and no long library is present.
|
|
*/
|
|
Writer.prototype.int64 = Writer.prototype.uint64;
|
|
|
|
/**
|
|
* Writes a signed 64 bit value as a varint, zig-zag encoded.
|
|
* @param {Long|number|string} value Value to write
|
|
* @returns {Writer} `this`
|
|
* @throws {TypeError} If `value` is a string and no long library is present.
|
|
*/
|
|
Writer.prototype.sint64 = function write_sint64(value) {
|
|
var bits = LongBits.from(value).zzEncode();
|
|
return this._push(writeVarint64, bits.length(), bits);
|
|
};
|
|
|
|
/**
|
|
* Writes a boolish value as a varint.
|
|
* @param {boolean} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.bool = function write_bool(value) {
|
|
return this._push(writeByte, 1, value ? 1 : 0);
|
|
};
|
|
|
|
function writeFixed32(val, buf, pos) {
|
|
buf[pos ] = val & 255;
|
|
buf[pos + 1] = val >>> 8 & 255;
|
|
buf[pos + 2] = val >>> 16 & 255;
|
|
buf[pos + 3] = val >>> 24;
|
|
}
|
|
|
|
/**
|
|
* Writes an unsigned 32 bit value as fixed 32 bits.
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.fixed32 = function write_fixed32(value) {
|
|
return this._push(writeFixed32, 4, value >>> 0);
|
|
};
|
|
|
|
/**
|
|
* Writes a signed 32 bit value as fixed 32 bits.
|
|
* @function
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.sfixed32 = Writer.prototype.fixed32;
|
|
|
|
/**
|
|
* Writes an unsigned 64 bit value as fixed 64 bits.
|
|
* @param {Long|number|string} value Value to write
|
|
* @returns {Writer} `this`
|
|
* @throws {TypeError} If `value` is a string and no long library is present.
|
|
*/
|
|
Writer.prototype.fixed64 = function write_fixed64(value) {
|
|
var bits = LongBits.from(value);
|
|
return this._push(writeFixed32, 4, bits.lo)._push(writeFixed32, 4, bits.hi);
|
|
};
|
|
|
|
/**
|
|
* Writes a signed 64 bit value as fixed 64 bits.
|
|
* @function
|
|
* @param {Long|number|string} value Value to write
|
|
* @returns {Writer} `this`
|
|
* @throws {TypeError} If `value` is a string and no long library is present.
|
|
*/
|
|
Writer.prototype.sfixed64 = Writer.prototype.fixed64;
|
|
|
|
/**
|
|
* Writes a float (32 bit).
|
|
* @function
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.float = function write_float(value) {
|
|
return this._push(util.float.writeFloatLE, 4, value);
|
|
};
|
|
|
|
/**
|
|
* Writes a double (64 bit float).
|
|
* @function
|
|
* @param {number} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.double = function write_double(value) {
|
|
return this._push(util.float.writeDoubleLE, 8, value);
|
|
};
|
|
|
|
var writeBytes = util.Array.prototype.set
|
|
? function writeBytes_set(val, buf, pos) {
|
|
buf.set(val, pos); // also works for plain array values
|
|
}
|
|
/* istanbul ignore next */
|
|
: function writeBytes_for(val, buf, pos) {
|
|
for (var i = 0; i < val.length; ++i)
|
|
buf[pos + i] = val[i];
|
|
};
|
|
|
|
/**
|
|
* Writes a sequence of bytes.
|
|
* @param {Uint8Array|string} value Buffer or base64 encoded string to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.bytes = function write_bytes(value) {
|
|
var len = value.length >>> 0;
|
|
if (!len)
|
|
return this._push(writeByte, 1, 0);
|
|
if (util.isString(value)) {
|
|
var buf = Writer.alloc(len = base64.length(value));
|
|
base64.decode(value, buf, 0);
|
|
value = buf;
|
|
}
|
|
return this.uint32(len)._push(writeBytes, len, value);
|
|
};
|
|
|
|
/**
|
|
* Writes a string.
|
|
* @param {string} value Value to write
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.string = function write_string(value) {
|
|
var len = utf8.length(value);
|
|
return len
|
|
? this.uint32(len)._push(utf8.write, len, value)
|
|
: this._push(writeByte, 1, 0);
|
|
};
|
|
|
|
/**
|
|
* Forks this writer's state by pushing it to a stack.
|
|
* Calling {@link Writer#reset|reset} or {@link Writer#ldelim|ldelim} resets the writer to the previous state.
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.fork = function fork() {
|
|
this.states = new State(this);
|
|
this.head = this.tail = new Op(noop, 0, 0);
|
|
this.len = 0;
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Resets this instance to the last state.
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.reset = function reset() {
|
|
if (this.states) {
|
|
this.head = this.states.head;
|
|
this.tail = this.states.tail;
|
|
this.len = this.states.len;
|
|
this.states = this.states.next;
|
|
} else {
|
|
this.head = this.tail = new Op(noop, 0, 0);
|
|
this.len = 0;
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Resets to the last state and appends the fork state's current write length as a varint followed by its operations.
|
|
* @returns {Writer} `this`
|
|
*/
|
|
Writer.prototype.ldelim = function ldelim() {
|
|
var head = this.head,
|
|
tail = this.tail,
|
|
len = this.len;
|
|
this.reset().uint32(len);
|
|
if (len) {
|
|
this.tail.next = head.next; // skip noop
|
|
this.tail = tail;
|
|
this.len += len;
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Finishes the write operation.
|
|
* @returns {Uint8Array} Finished buffer
|
|
*/
|
|
Writer.prototype.finish = function finish() {
|
|
var head = this.head.next, // skip noop
|
|
buf = this.constructor.alloc(this.len),
|
|
pos = 0;
|
|
while (head) {
|
|
head.fn(head.val, buf, pos);
|
|
pos += head.len;
|
|
head = head.next;
|
|
}
|
|
// this.head = this.tail = null;
|
|
return buf;
|
|
};
|
|
|
|
Writer._configure = function(BufferWriter_) {
|
|
BufferWriter = BufferWriter_;
|
|
Writer.create = create();
|
|
BufferWriter._configure();
|
|
};
|
|
return writer;
|
|
}
|
|
|
|
var writer_buffer;
|
|
var hasRequiredWriter_buffer;
|
|
|
|
function requireWriter_buffer () {
|
|
if (hasRequiredWriter_buffer) return writer_buffer;
|
|
hasRequiredWriter_buffer = 1;
|
|
writer_buffer = BufferWriter;
|
|
|
|
// extends Writer
|
|
var Writer = requireWriter();
|
|
(BufferWriter.prototype = Object.create(Writer.prototype)).constructor = BufferWriter;
|
|
|
|
var util = requireMinimal$1();
|
|
|
|
/**
|
|
* Constructs a new buffer writer instance.
|
|
* @classdesc Wire format writer using node buffers.
|
|
* @extends Writer
|
|
* @constructor
|
|
*/
|
|
function BufferWriter() {
|
|
Writer.call(this);
|
|
}
|
|
|
|
BufferWriter._configure = function () {
|
|
/**
|
|
* Allocates a buffer of the specified size.
|
|
* @function
|
|
* @param {number} size Buffer size
|
|
* @returns {Buffer} Buffer
|
|
*/
|
|
BufferWriter.alloc = util._Buffer_allocUnsafe;
|
|
|
|
BufferWriter.writeBytesBuffer = util.Buffer && util.Buffer.prototype instanceof Uint8Array && util.Buffer.prototype.set.name === "set"
|
|
? function writeBytesBuffer_set(val, buf, pos) {
|
|
buf.set(val, pos); // faster than copy (requires node >= 4 where Buffers extend Uint8Array and set is properly inherited)
|
|
// also works for plain array values
|
|
}
|
|
/* istanbul ignore next */
|
|
: function writeBytesBuffer_copy(val, buf, pos) {
|
|
if (val.copy) // Buffer values
|
|
val.copy(buf, pos, 0, val.length);
|
|
else for (var i = 0; i < val.length;) // plain array values
|
|
buf[pos++] = val[i++];
|
|
};
|
|
};
|
|
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
BufferWriter.prototype.bytes = function write_bytes_buffer(value) {
|
|
if (util.isString(value))
|
|
value = util._Buffer_from(value, "base64");
|
|
var len = value.length >>> 0;
|
|
this.uint32(len);
|
|
if (len)
|
|
this._push(BufferWriter.writeBytesBuffer, len, value);
|
|
return this;
|
|
};
|
|
|
|
function writeStringBuffer(val, buf, pos) {
|
|
if (val.length < 40) // plain js is faster for short strings (probably due to redundant assertions)
|
|
util.utf8.write(val, buf, pos);
|
|
else if (buf.utf8Write)
|
|
buf.utf8Write(val, pos);
|
|
else
|
|
buf.write(val, pos);
|
|
}
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
BufferWriter.prototype.string = function write_string_buffer(value) {
|
|
var len = util.Buffer.byteLength(value);
|
|
this.uint32(len);
|
|
if (len)
|
|
this._push(writeStringBuffer, len, value);
|
|
return this;
|
|
};
|
|
|
|
|
|
/**
|
|
* Finishes the write operation.
|
|
* @name BufferWriter#finish
|
|
* @function
|
|
* @returns {Buffer} Finished buffer
|
|
*/
|
|
|
|
BufferWriter._configure();
|
|
return writer_buffer;
|
|
}
|
|
|
|
var reader;
|
|
var hasRequiredReader;
|
|
|
|
function requireReader () {
|
|
if (hasRequiredReader) return reader;
|
|
hasRequiredReader = 1;
|
|
reader = Reader;
|
|
|
|
var util = requireMinimal$1();
|
|
|
|
var BufferReader; // cyclic
|
|
|
|
var LongBits = util.LongBits,
|
|
utf8 = util.utf8;
|
|
|
|
/* istanbul ignore next */
|
|
function indexOutOfRange(reader, writeLength) {
|
|
return RangeError("index out of range: " + reader.pos + " + " + (writeLength || 1) + " > " + reader.len);
|
|
}
|
|
|
|
/**
|
|
* Constructs a new reader instance using the specified buffer.
|
|
* @classdesc Wire format reader using `Uint8Array` if available, otherwise `Array`.
|
|
* @constructor
|
|
* @param {Uint8Array} buffer Buffer to read from
|
|
*/
|
|
function Reader(buffer) {
|
|
|
|
/**
|
|
* Read buffer.
|
|
* @type {Uint8Array}
|
|
*/
|
|
this.buf = buffer;
|
|
|
|
/**
|
|
* Read buffer position.
|
|
* @type {number}
|
|
*/
|
|
this.pos = 0;
|
|
|
|
/**
|
|
* Read buffer length.
|
|
* @type {number}
|
|
*/
|
|
this.len = buffer.length;
|
|
}
|
|
|
|
var create_array = typeof Uint8Array !== "undefined"
|
|
? function create_typed_array(buffer) {
|
|
if (buffer instanceof Uint8Array || Array.isArray(buffer))
|
|
return new Reader(buffer);
|
|
throw Error("illegal buffer");
|
|
}
|
|
/* istanbul ignore next */
|
|
: function create_array(buffer) {
|
|
if (Array.isArray(buffer))
|
|
return new Reader(buffer);
|
|
throw Error("illegal buffer");
|
|
};
|
|
|
|
var create = function create() {
|
|
return util.Buffer
|
|
? function create_buffer_setup(buffer) {
|
|
return (Reader.create = function create_buffer(buffer) {
|
|
return util.Buffer.isBuffer(buffer)
|
|
? new BufferReader(buffer)
|
|
/* istanbul ignore next */
|
|
: create_array(buffer);
|
|
})(buffer);
|
|
}
|
|
/* istanbul ignore next */
|
|
: create_array;
|
|
};
|
|
|
|
/**
|
|
* Creates a new reader using the specified buffer.
|
|
* @function
|
|
* @param {Uint8Array|Buffer} buffer Buffer to read from
|
|
* @returns {Reader|BufferReader} A {@link BufferReader} if `buffer` is a Buffer, otherwise a {@link Reader}
|
|
* @throws {Error} If `buffer` is not a valid buffer
|
|
*/
|
|
Reader.create = create();
|
|
|
|
Reader.prototype._slice = util.Array.prototype.subarray || /* istanbul ignore next */ util.Array.prototype.slice;
|
|
|
|
/**
|
|
* Reads a varint as an unsigned 32 bit value.
|
|
* @function
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader.prototype.uint32 = (function read_uint32_setup() {
|
|
var value = 4294967295; // optimizer type-hint, tends to deopt otherwise (?!)
|
|
return function read_uint32() {
|
|
value = ( this.buf[this.pos] & 127 ) >>> 0; if (this.buf[this.pos++] < 128) return value;
|
|
value = (value | (this.buf[this.pos] & 127) << 7) >>> 0; if (this.buf[this.pos++] < 128) return value;
|
|
value = (value | (this.buf[this.pos] & 127) << 14) >>> 0; if (this.buf[this.pos++] < 128) return value;
|
|
value = (value | (this.buf[this.pos] & 127) << 21) >>> 0; if (this.buf[this.pos++] < 128) return value;
|
|
value = (value | (this.buf[this.pos] & 15) << 28) >>> 0; if (this.buf[this.pos++] < 128) return value;
|
|
|
|
/* istanbul ignore if */
|
|
if ((this.pos += 5) > this.len) {
|
|
this.pos = this.len;
|
|
throw indexOutOfRange(this, 10);
|
|
}
|
|
return value;
|
|
};
|
|
})();
|
|
|
|
/**
|
|
* Reads a varint as a signed 32 bit value.
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader.prototype.int32 = function read_int32() {
|
|
return this.uint32() | 0;
|
|
};
|
|
|
|
/**
|
|
* Reads a zig-zag encoded varint as a signed 32 bit value.
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader.prototype.sint32 = function read_sint32() {
|
|
var value = this.uint32();
|
|
return value >>> 1 ^ -(value & 1) | 0;
|
|
};
|
|
|
|
/* eslint-disable no-invalid-this */
|
|
|
|
function readLongVarint() {
|
|
// tends to deopt with local vars for octet etc.
|
|
var bits = new LongBits(0, 0);
|
|
var i = 0;
|
|
if (this.len - this.pos > 4) { // fast route (lo)
|
|
for (; i < 4; ++i) {
|
|
// 1st..4th
|
|
bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;
|
|
if (this.buf[this.pos++] < 128)
|
|
return bits;
|
|
}
|
|
// 5th
|
|
bits.lo = (bits.lo | (this.buf[this.pos] & 127) << 28) >>> 0;
|
|
bits.hi = (bits.hi | (this.buf[this.pos] & 127) >> 4) >>> 0;
|
|
if (this.buf[this.pos++] < 128)
|
|
return bits;
|
|
i = 0;
|
|
} else {
|
|
for (; i < 3; ++i) {
|
|
/* istanbul ignore if */
|
|
if (this.pos >= this.len)
|
|
throw indexOutOfRange(this);
|
|
// 1st..3th
|
|
bits.lo = (bits.lo | (this.buf[this.pos] & 127) << i * 7) >>> 0;
|
|
if (this.buf[this.pos++] < 128)
|
|
return bits;
|
|
}
|
|
// 4th
|
|
bits.lo = (bits.lo | (this.buf[this.pos++] & 127) << i * 7) >>> 0;
|
|
return bits;
|
|
}
|
|
if (this.len - this.pos > 4) { // fast route (hi)
|
|
for (; i < 5; ++i) {
|
|
// 6th..10th
|
|
bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;
|
|
if (this.buf[this.pos++] < 128)
|
|
return bits;
|
|
}
|
|
} else {
|
|
for (; i < 5; ++i) {
|
|
/* istanbul ignore if */
|
|
if (this.pos >= this.len)
|
|
throw indexOutOfRange(this);
|
|
// 6th..10th
|
|
bits.hi = (bits.hi | (this.buf[this.pos] & 127) << i * 7 + 3) >>> 0;
|
|
if (this.buf[this.pos++] < 128)
|
|
return bits;
|
|
}
|
|
}
|
|
/* istanbul ignore next */
|
|
throw Error("invalid varint encoding");
|
|
}
|
|
|
|
/* eslint-enable no-invalid-this */
|
|
|
|
/**
|
|
* Reads a varint as a signed 64 bit value.
|
|
* @name Reader#int64
|
|
* @function
|
|
* @returns {Long} Value read
|
|
*/
|
|
|
|
/**
|
|
* Reads a varint as an unsigned 64 bit value.
|
|
* @name Reader#uint64
|
|
* @function
|
|
* @returns {Long} Value read
|
|
*/
|
|
|
|
/**
|
|
* Reads a zig-zag encoded varint as a signed 64 bit value.
|
|
* @name Reader#sint64
|
|
* @function
|
|
* @returns {Long} Value read
|
|
*/
|
|
|
|
/**
|
|
* Reads a varint as a boolean.
|
|
* @returns {boolean} Value read
|
|
*/
|
|
Reader.prototype.bool = function read_bool() {
|
|
return this.uint32() !== 0;
|
|
};
|
|
|
|
function readFixed32_end(buf, end) { // note that this uses `end`, not `pos`
|
|
return (buf[end - 4]
|
|
| buf[end - 3] << 8
|
|
| buf[end - 2] << 16
|
|
| buf[end - 1] << 24) >>> 0;
|
|
}
|
|
|
|
/**
|
|
* Reads fixed 32 bits as an unsigned 32 bit integer.
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader.prototype.fixed32 = function read_fixed32() {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.pos + 4 > this.len)
|
|
throw indexOutOfRange(this, 4);
|
|
|
|
return readFixed32_end(this.buf, this.pos += 4);
|
|
};
|
|
|
|
/**
|
|
* Reads fixed 32 bits as a signed 32 bit integer.
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader.prototype.sfixed32 = function read_sfixed32() {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.pos + 4 > this.len)
|
|
throw indexOutOfRange(this, 4);
|
|
|
|
return readFixed32_end(this.buf, this.pos += 4) | 0;
|
|
};
|
|
|
|
/* eslint-disable no-invalid-this */
|
|
|
|
function readFixed64(/* this: Reader */) {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.pos + 8 > this.len)
|
|
throw indexOutOfRange(this, 8);
|
|
|
|
return new LongBits(readFixed32_end(this.buf, this.pos += 4), readFixed32_end(this.buf, this.pos += 4));
|
|
}
|
|
|
|
/* eslint-enable no-invalid-this */
|
|
|
|
/**
|
|
* Reads fixed 64 bits.
|
|
* @name Reader#fixed64
|
|
* @function
|
|
* @returns {Long} Value read
|
|
*/
|
|
|
|
/**
|
|
* Reads zig-zag encoded fixed 64 bits.
|
|
* @name Reader#sfixed64
|
|
* @function
|
|
* @returns {Long} Value read
|
|
*/
|
|
|
|
/**
|
|
* Reads a float (32 bit) as a number.
|
|
* @function
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader.prototype.float = function read_float() {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.pos + 4 > this.len)
|
|
throw indexOutOfRange(this, 4);
|
|
|
|
var value = util.float.readFloatLE(this.buf, this.pos);
|
|
this.pos += 4;
|
|
return value;
|
|
};
|
|
|
|
/**
|
|
* Reads a double (64 bit float) as a number.
|
|
* @function
|
|
* @returns {number} Value read
|
|
*/
|
|
Reader.prototype.double = function read_double() {
|
|
|
|
/* istanbul ignore if */
|
|
if (this.pos + 8 > this.len)
|
|
throw indexOutOfRange(this, 4);
|
|
|
|
var value = util.float.readDoubleLE(this.buf, this.pos);
|
|
this.pos += 8;
|
|
return value;
|
|
};
|
|
|
|
/**
|
|
* Reads a sequence of bytes preceeded by its length as a varint.
|
|
* @returns {Uint8Array} Value read
|
|
*/
|
|
Reader.prototype.bytes = function read_bytes() {
|
|
var length = this.uint32(),
|
|
start = this.pos,
|
|
end = this.pos + length;
|
|
|
|
/* istanbul ignore if */
|
|
if (end > this.len)
|
|
throw indexOutOfRange(this, length);
|
|
|
|
this.pos += length;
|
|
if (Array.isArray(this.buf)) // plain array
|
|
return this.buf.slice(start, end);
|
|
return start === end // fix for IE 10/Win8 and others' subarray returning array of size 1
|
|
? new this.buf.constructor(0)
|
|
: this._slice.call(this.buf, start, end);
|
|
};
|
|
|
|
/**
|
|
* Reads a string preceeded by its byte length as a varint.
|
|
* @returns {string} Value read
|
|
*/
|
|
Reader.prototype.string = function read_string() {
|
|
var bytes = this.bytes();
|
|
return utf8.read(bytes, 0, bytes.length);
|
|
};
|
|
|
|
/**
|
|
* Skips the specified number of bytes if specified, otherwise skips a varint.
|
|
* @param {number} [length] Length if known, otherwise a varint is assumed
|
|
* @returns {Reader} `this`
|
|
*/
|
|
Reader.prototype.skip = function skip(length) {
|
|
if (typeof length === "number") {
|
|
/* istanbul ignore if */
|
|
if (this.pos + length > this.len)
|
|
throw indexOutOfRange(this, length);
|
|
this.pos += length;
|
|
} else {
|
|
do {
|
|
/* istanbul ignore if */
|
|
if (this.pos >= this.len)
|
|
throw indexOutOfRange(this);
|
|
} while (this.buf[this.pos++] & 128);
|
|
}
|
|
return this;
|
|
};
|
|
|
|
/**
|
|
* Skips the next element of the specified wire type.
|
|
* @param {number} wireType Wire type received
|
|
* @returns {Reader} `this`
|
|
*/
|
|
Reader.prototype.skipType = function(wireType) {
|
|
switch (wireType) {
|
|
case 0:
|
|
this.skip();
|
|
break;
|
|
case 1:
|
|
this.skip(8);
|
|
break;
|
|
case 2:
|
|
this.skip(this.uint32());
|
|
break;
|
|
case 3:
|
|
while ((wireType = this.uint32() & 7) !== 4) {
|
|
this.skipType(wireType);
|
|
}
|
|
break;
|
|
case 5:
|
|
this.skip(4);
|
|
break;
|
|
|
|
/* istanbul ignore next */
|
|
default:
|
|
throw Error("invalid wire type " + wireType + " at offset " + this.pos);
|
|
}
|
|
return this;
|
|
};
|
|
|
|
Reader._configure = function(BufferReader_) {
|
|
BufferReader = BufferReader_;
|
|
Reader.create = create();
|
|
BufferReader._configure();
|
|
|
|
var fn = util.Long ? "toLong" : /* istanbul ignore next */ "toNumber";
|
|
util.merge(Reader.prototype, {
|
|
|
|
int64: function read_int64() {
|
|
return readLongVarint.call(this)[fn](false);
|
|
},
|
|
|
|
uint64: function read_uint64() {
|
|
return readLongVarint.call(this)[fn](true);
|
|
},
|
|
|
|
sint64: function read_sint64() {
|
|
return readLongVarint.call(this).zzDecode()[fn](false);
|
|
},
|
|
|
|
fixed64: function read_fixed64() {
|
|
return readFixed64.call(this)[fn](true);
|
|
},
|
|
|
|
sfixed64: function read_sfixed64() {
|
|
return readFixed64.call(this)[fn](false);
|
|
}
|
|
|
|
});
|
|
};
|
|
return reader;
|
|
}
|
|
|
|
var reader_buffer;
|
|
var hasRequiredReader_buffer;
|
|
|
|
function requireReader_buffer () {
|
|
if (hasRequiredReader_buffer) return reader_buffer;
|
|
hasRequiredReader_buffer = 1;
|
|
reader_buffer = BufferReader;
|
|
|
|
// extends Reader
|
|
var Reader = requireReader();
|
|
(BufferReader.prototype = Object.create(Reader.prototype)).constructor = BufferReader;
|
|
|
|
var util = requireMinimal$1();
|
|
|
|
/**
|
|
* Constructs a new buffer reader instance.
|
|
* @classdesc Wire format reader using node buffers.
|
|
* @extends Reader
|
|
* @constructor
|
|
* @param {Buffer} buffer Buffer to read from
|
|
*/
|
|
function BufferReader(buffer) {
|
|
Reader.call(this, buffer);
|
|
|
|
/**
|
|
* Read buffer.
|
|
* @name BufferReader#buf
|
|
* @type {Buffer}
|
|
*/
|
|
}
|
|
|
|
BufferReader._configure = function () {
|
|
/* istanbul ignore else */
|
|
if (util.Buffer)
|
|
BufferReader.prototype._slice = util.Buffer.prototype.slice;
|
|
};
|
|
|
|
|
|
/**
|
|
* @override
|
|
*/
|
|
BufferReader.prototype.string = function read_string_buffer() {
|
|
var len = this.uint32(); // modifies pos
|
|
return this.buf.utf8Slice
|
|
? this.buf.utf8Slice(this.pos, this.pos = Math.min(this.pos + len, this.len))
|
|
: this.buf.toString("utf-8", this.pos, this.pos = Math.min(this.pos + len, this.len));
|
|
};
|
|
|
|
/**
|
|
* Reads a sequence of bytes preceeded by its length as a varint.
|
|
* @name BufferReader#bytes
|
|
* @function
|
|
* @returns {Buffer} Value read
|
|
*/
|
|
|
|
BufferReader._configure();
|
|
return reader_buffer;
|
|
}
|
|
|
|
var rpc = {};
|
|
|
|
var service;
|
|
var hasRequiredService;
|
|
|
|
function requireService () {
|
|
if (hasRequiredService) return service;
|
|
hasRequiredService = 1;
|
|
service = Service;
|
|
|
|
var util = requireMinimal$1();
|
|
|
|
// Extends EventEmitter
|
|
(Service.prototype = Object.create(util.EventEmitter.prototype)).constructor = Service;
|
|
|
|
/**
|
|
* A service method callback as used by {@link rpc.ServiceMethod|ServiceMethod}.
|
|
*
|
|
* Differs from {@link RPCImplCallback} in that it is an actual callback of a service method which may not return `response = null`.
|
|
* @typedef rpc.ServiceMethodCallback
|
|
* @template TRes extends Message<TRes>
|
|
* @type {function}
|
|
* @param {Error|null} error Error, if any
|
|
* @param {TRes} [response] Response message
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
/**
|
|
* A service method part of a {@link rpc.Service} as created by {@link Service.create}.
|
|
* @typedef rpc.ServiceMethod
|
|
* @template TReq extends Message<TReq>
|
|
* @template TRes extends Message<TRes>
|
|
* @type {function}
|
|
* @param {TReq|Properties<TReq>} request Request message or plain object
|
|
* @param {rpc.ServiceMethodCallback<TRes>} [callback] Node-style callback called with the error, if any, and the response message
|
|
* @returns {Promise<Message<TRes>>} Promise if `callback` has been omitted, otherwise `undefined`
|
|
*/
|
|
|
|
/**
|
|
* Constructs a new RPC service instance.
|
|
* @classdesc An RPC service as returned by {@link Service#create}.
|
|
* @exports rpc.Service
|
|
* @extends util.EventEmitter
|
|
* @constructor
|
|
* @param {RPCImpl} rpcImpl RPC implementation
|
|
* @param {boolean} [requestDelimited=false] Whether requests are length-delimited
|
|
* @param {boolean} [responseDelimited=false] Whether responses are length-delimited
|
|
*/
|
|
function Service(rpcImpl, requestDelimited, responseDelimited) {
|
|
|
|
if (typeof rpcImpl !== "function")
|
|
throw TypeError("rpcImpl must be a function");
|
|
|
|
util.EventEmitter.call(this);
|
|
|
|
/**
|
|
* RPC implementation. Becomes `null` once the service is ended.
|
|
* @type {RPCImpl|null}
|
|
*/
|
|
this.rpcImpl = rpcImpl;
|
|
|
|
/**
|
|
* Whether requests are length-delimited.
|
|
* @type {boolean}
|
|
*/
|
|
this.requestDelimited = Boolean(requestDelimited);
|
|
|
|
/**
|
|
* Whether responses are length-delimited.
|
|
* @type {boolean}
|
|
*/
|
|
this.responseDelimited = Boolean(responseDelimited);
|
|
}
|
|
|
|
/**
|
|
* Calls a service method through {@link rpc.Service#rpcImpl|rpcImpl}.
|
|
* @param {Method|rpc.ServiceMethod<TReq,TRes>} method Reflected or static method
|
|
* @param {Constructor<TReq>} requestCtor Request constructor
|
|
* @param {Constructor<TRes>} responseCtor Response constructor
|
|
* @param {TReq|Properties<TReq>} request Request message or plain object
|
|
* @param {rpc.ServiceMethodCallback<TRes>} callback Service callback
|
|
* @returns {undefined}
|
|
* @template TReq extends Message<TReq>
|
|
* @template TRes extends Message<TRes>
|
|
*/
|
|
Service.prototype.rpcCall = function rpcCall(method, requestCtor, responseCtor, request, callback) {
|
|
|
|
if (!request)
|
|
throw TypeError("request must be specified");
|
|
|
|
var self = this;
|
|
if (!callback)
|
|
return util.asPromise(rpcCall, self, method, requestCtor, responseCtor, request);
|
|
|
|
if (!self.rpcImpl) {
|
|
setTimeout(function() { callback(Error("already ended")); }, 0);
|
|
return undefined;
|
|
}
|
|
|
|
try {
|
|
return self.rpcImpl(
|
|
method,
|
|
requestCtor[self.requestDelimited ? "encodeDelimited" : "encode"](request).finish(),
|
|
function rpcCallback(err, response) {
|
|
|
|
if (err) {
|
|
self.emit("error", err, method);
|
|
return callback(err);
|
|
}
|
|
|
|
if (response === null) {
|
|
self.end(/* endedByRPC */ true);
|
|
return undefined;
|
|
}
|
|
|
|
if (!(response instanceof responseCtor)) {
|
|
try {
|
|
response = responseCtor[self.responseDelimited ? "decodeDelimited" : "decode"](response);
|
|
} catch (err) {
|
|
self.emit("error", err, method);
|
|
return callback(err);
|
|
}
|
|
}
|
|
|
|
self.emit("data", response, method);
|
|
return callback(null, response);
|
|
}
|
|
);
|
|
} catch (err) {
|
|
self.emit("error", err, method);
|
|
setTimeout(function() { callback(err); }, 0);
|
|
return undefined;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Ends this service and emits the `end` event.
|
|
* @param {boolean} [endedByRPC=false] Whether the service has been ended by the RPC implementation.
|
|
* @returns {rpc.Service} `this`
|
|
*/
|
|
Service.prototype.end = function end(endedByRPC) {
|
|
if (this.rpcImpl) {
|
|
if (!endedByRPC) // signal end to rpcImpl
|
|
this.rpcImpl(null, null, null);
|
|
this.rpcImpl = null;
|
|
this.emit("end").off();
|
|
}
|
|
return this;
|
|
};
|
|
return service;
|
|
}
|
|
|
|
var hasRequiredRpc;
|
|
|
|
function requireRpc () {
|
|
if (hasRequiredRpc) return rpc;
|
|
hasRequiredRpc = 1;
|
|
(function (exports) {
|
|
|
|
/**
|
|
* Streaming RPC helpers.
|
|
* @namespace
|
|
*/
|
|
var rpc = exports;
|
|
|
|
/**
|
|
* RPC implementation passed to {@link Service#create} performing a service request on network level, i.e. by utilizing http requests or websockets.
|
|
* @typedef RPCImpl
|
|
* @type {function}
|
|
* @param {Method|rpc.ServiceMethod<Message<{}>,Message<{}>>} method Reflected or static method being called
|
|
* @param {Uint8Array} requestData Request data
|
|
* @param {RPCImplCallback} callback Callback function
|
|
* @returns {undefined}
|
|
* @example
|
|
* function rpcImpl(method, requestData, callback) {
|
|
* if (protobuf.util.lcFirst(method.name) !== "myMethod") // compatible with static code
|
|
* throw Error("no such method");
|
|
* asynchronouslyObtainAResponse(requestData, function(err, responseData) {
|
|
* callback(err, responseData);
|
|
* });
|
|
* }
|
|
*/
|
|
|
|
/**
|
|
* Node-style callback as used by {@link RPCImpl}.
|
|
* @typedef RPCImplCallback
|
|
* @type {function}
|
|
* @param {Error|null} error Error, if any, otherwise `null`
|
|
* @param {Uint8Array|null} [response] Response data or `null` to signal end of stream, if there hasn't been an error
|
|
* @returns {undefined}
|
|
*/
|
|
|
|
rpc.Service = requireService();
|
|
} (rpc));
|
|
return rpc;
|
|
}
|
|
|
|
var roots;
|
|
var hasRequiredRoots;
|
|
|
|
function requireRoots () {
|
|
if (hasRequiredRoots) return roots;
|
|
hasRequiredRoots = 1;
|
|
roots = {};
|
|
|
|
/**
|
|
* Named roots.
|
|
* This is where pbjs stores generated structures (the option `-r, --root` specifies a name).
|
|
* Can also be used manually to make roots available accross modules.
|
|
* @name roots
|
|
* @type {Object.<string,Root>}
|
|
* @example
|
|
* // pbjs -r myroot -o compiled.js ...
|
|
*
|
|
* // in another module:
|
|
* require("./compiled.js");
|
|
*
|
|
* // in any subsequent module:
|
|
* var root = protobuf.roots["myroot"];
|
|
*/
|
|
return roots;
|
|
}
|
|
|
|
var hasRequiredIndexMinimal;
|
|
|
|
function requireIndexMinimal () {
|
|
if (hasRequiredIndexMinimal) return indexMinimal;
|
|
hasRequiredIndexMinimal = 1;
|
|
(function (exports) {
|
|
var protobuf = exports;
|
|
|
|
/**
|
|
* Build type, one of `"full"`, `"light"` or `"minimal"`.
|
|
* @name build
|
|
* @type {string}
|
|
* @const
|
|
*/
|
|
protobuf.build = "minimal";
|
|
|
|
// Serialization
|
|
protobuf.Writer = requireWriter();
|
|
protobuf.BufferWriter = requireWriter_buffer();
|
|
protobuf.Reader = requireReader();
|
|
protobuf.BufferReader = requireReader_buffer();
|
|
|
|
// Utility
|
|
protobuf.util = requireMinimal$1();
|
|
protobuf.rpc = requireRpc();
|
|
protobuf.roots = requireRoots();
|
|
protobuf.configure = configure;
|
|
|
|
/* istanbul ignore next */
|
|
/**
|
|
* Reconfigures the library according to the environment.
|
|
* @returns {undefined}
|
|
*/
|
|
function configure() {
|
|
protobuf.util._configure();
|
|
protobuf.Writer._configure(protobuf.BufferWriter);
|
|
protobuf.Reader._configure(protobuf.BufferReader);
|
|
}
|
|
|
|
// Set up buffer utility according to the environment
|
|
configure();
|
|
} (indexMinimal));
|
|
return indexMinimal;
|
|
}
|
|
|
|
var hasRequiredMinimal;
|
|
|
|
function requireMinimal () {
|
|
if (hasRequiredMinimal) return minimal$1.exports;
|
|
hasRequiredMinimal = 1;
|
|
(function (module) {
|
|
module.exports = requireIndexMinimal();
|
|
} (minimal$1));
|
|
return minimal$1.exports;
|
|
}
|
|
|
|
(function (module) {
|
|
// @ts-nocheck
|
|
/*eslint-disable*/
|
|
(function (global, factory) {
|
|
/* AMD */ if (typeof commonjsRequire === 'function' && 'object' === 'object' && module && module.exports)
|
|
module.exports = factory(requireMinimal());
|
|
})(commonjsGlobal, function ($protobuf) {
|
|
// Common aliases
|
|
var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util;
|
|
// Exported root namespace
|
|
var $root = $protobuf.roots["default"] || ($protobuf.roots["default"] = {});
|
|
$root.RPC = (function () {
|
|
/**
|
|
* Properties of a RPC.
|
|
* @exports IRPC
|
|
* @interface IRPC
|
|
* @property {Array.<RPC.ISubOpts>|null} [subscriptions] RPC subscriptions
|
|
* @property {Array.<RPC.IMessage>|null} [messages] RPC messages
|
|
* @property {RPC.IControlMessage|null} [control] RPC control
|
|
*/
|
|
/**
|
|
* Constructs a new RPC.
|
|
* @exports RPC
|
|
* @classdesc Represents a RPC.
|
|
* @implements IRPC
|
|
* @constructor
|
|
* @param {IRPC=} [p] Properties to set
|
|
*/
|
|
function RPC(p) {
|
|
this.subscriptions = [];
|
|
this.messages = [];
|
|
if (p)
|
|
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
|
if (p[ks[i]] != null)
|
|
this[ks[i]] = p[ks[i]];
|
|
}
|
|
/**
|
|
* RPC subscriptions.
|
|
* @member {Array.<RPC.ISubOpts>} subscriptions
|
|
* @memberof RPC
|
|
* @instance
|
|
*/
|
|
RPC.prototype.subscriptions = $util.emptyArray;
|
|
/**
|
|
* RPC messages.
|
|
* @member {Array.<RPC.IMessage>} messages
|
|
* @memberof RPC
|
|
* @instance
|
|
*/
|
|
RPC.prototype.messages = $util.emptyArray;
|
|
/**
|
|
* RPC control.
|
|
* @member {RPC.IControlMessage|null|undefined} control
|
|
* @memberof RPC
|
|
* @instance
|
|
*/
|
|
RPC.prototype.control = null;
|
|
// OneOf field names bound to virtual getters and setters
|
|
var $oneOfFields;
|
|
/**
|
|
* RPC _control.
|
|
* @member {"control"|undefined} _control
|
|
* @memberof RPC
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(RPC.prototype, "_control", {
|
|
get: $util.oneOfGetter($oneOfFields = ["control"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* Encodes the specified RPC message. Does not implicitly {@link RPC.verify|verify} messages.
|
|
* @function encode
|
|
* @memberof RPC
|
|
* @static
|
|
* @param {IRPC} m RPC message or plain object to encode
|
|
* @param {$protobuf.Writer} [w] Writer to encode to
|
|
* @returns {$protobuf.Writer} Writer
|
|
*/
|
|
RPC.encode = function encode(m, w) {
|
|
if (!w)
|
|
w = $Writer.create();
|
|
if (m.subscriptions != null && m.subscriptions.length) {
|
|
for (var i = 0; i < m.subscriptions.length; ++i)
|
|
$root.RPC.SubOpts.encode(m.subscriptions[i], w.uint32(10).fork()).ldelim();
|
|
}
|
|
if (m.messages != null && m.messages.length) {
|
|
for (var i = 0; i < m.messages.length; ++i)
|
|
$root.RPC.Message.encode(m.messages[i], w.uint32(18).fork()).ldelim();
|
|
}
|
|
if (m.control != null && Object.hasOwnProperty.call(m, "control"))
|
|
$root.RPC.ControlMessage.encode(m.control, w.uint32(26).fork()).ldelim();
|
|
return w;
|
|
};
|
|
/**
|
|
* Decodes a RPC message from the specified reader or buffer.
|
|
* @function decode
|
|
* @memberof RPC
|
|
* @static
|
|
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
|
* @param {number} [l] Message length if known beforehand
|
|
* @returns {RPC} RPC
|
|
* @throws {Error} If the payload is not a reader or valid buffer
|
|
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
|
*/
|
|
RPC.decode = function decode(r, l) {
|
|
if (!(r instanceof $Reader))
|
|
r = $Reader.create(r);
|
|
var c = l === undefined ? r.len : r.pos + l, m = new $root.RPC();
|
|
while (r.pos < c) {
|
|
var t = r.uint32();
|
|
switch (t >>> 3) {
|
|
case 1:
|
|
if (!(m.subscriptions && m.subscriptions.length))
|
|
m.subscriptions = [];
|
|
m.subscriptions.push($root.RPC.SubOpts.decode(r, r.uint32()));
|
|
break;
|
|
case 2:
|
|
if (!(m.messages && m.messages.length))
|
|
m.messages = [];
|
|
m.messages.push($root.RPC.Message.decode(r, r.uint32()));
|
|
break;
|
|
case 3:
|
|
m.control = $root.RPC.ControlMessage.decode(r, r.uint32());
|
|
break;
|
|
default:
|
|
r.skipType(t & 7);
|
|
break;
|
|
}
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a RPC message from a plain object. Also converts values to their respective internal types.
|
|
* @function fromObject
|
|
* @memberof RPC
|
|
* @static
|
|
* @param {Object.<string,*>} d Plain object
|
|
* @returns {RPC} RPC
|
|
*/
|
|
RPC.fromObject = function fromObject(d) {
|
|
if (d instanceof $root.RPC)
|
|
return d;
|
|
var m = new $root.RPC();
|
|
if (d.subscriptions) {
|
|
if (!Array.isArray(d.subscriptions))
|
|
throw TypeError(".RPC.subscriptions: array expected");
|
|
m.subscriptions = [];
|
|
for (var i = 0; i < d.subscriptions.length; ++i) {
|
|
if (typeof d.subscriptions[i] !== "object")
|
|
throw TypeError(".RPC.subscriptions: object expected");
|
|
m.subscriptions[i] = $root.RPC.SubOpts.fromObject(d.subscriptions[i]);
|
|
}
|
|
}
|
|
if (d.messages) {
|
|
if (!Array.isArray(d.messages))
|
|
throw TypeError(".RPC.messages: array expected");
|
|
m.messages = [];
|
|
for (var i = 0; i < d.messages.length; ++i) {
|
|
if (typeof d.messages[i] !== "object")
|
|
throw TypeError(".RPC.messages: object expected");
|
|
m.messages[i] = $root.RPC.Message.fromObject(d.messages[i]);
|
|
}
|
|
}
|
|
if (d.control != null) {
|
|
if (typeof d.control !== "object")
|
|
throw TypeError(".RPC.control: object expected");
|
|
m.control = $root.RPC.ControlMessage.fromObject(d.control);
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a plain object from a RPC message. Also converts values to other types if specified.
|
|
* @function toObject
|
|
* @memberof RPC
|
|
* @static
|
|
* @param {RPC} m RPC
|
|
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
|
* @returns {Object.<string,*>} Plain object
|
|
*/
|
|
RPC.toObject = function toObject(m, o) {
|
|
if (!o)
|
|
o = {};
|
|
var d = {};
|
|
if (o.arrays || o.defaults) {
|
|
d.subscriptions = [];
|
|
d.messages = [];
|
|
}
|
|
if (m.subscriptions && m.subscriptions.length) {
|
|
d.subscriptions = [];
|
|
for (var j = 0; j < m.subscriptions.length; ++j) {
|
|
d.subscriptions[j] = $root.RPC.SubOpts.toObject(m.subscriptions[j], o);
|
|
}
|
|
}
|
|
if (m.messages && m.messages.length) {
|
|
d.messages = [];
|
|
for (var j = 0; j < m.messages.length; ++j) {
|
|
d.messages[j] = $root.RPC.Message.toObject(m.messages[j], o);
|
|
}
|
|
}
|
|
if (m.control != null && m.hasOwnProperty("control")) {
|
|
d.control = $root.RPC.ControlMessage.toObject(m.control, o);
|
|
if (o.oneofs)
|
|
d._control = "control";
|
|
}
|
|
return d;
|
|
};
|
|
/**
|
|
* Converts this RPC to JSON.
|
|
* @function toJSON
|
|
* @memberof RPC
|
|
* @instance
|
|
* @returns {Object.<string,*>} JSON object
|
|
*/
|
|
RPC.prototype.toJSON = function toJSON() {
|
|
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
|
};
|
|
RPC.SubOpts = (function () {
|
|
/**
|
|
* Properties of a SubOpts.
|
|
* @memberof RPC
|
|
* @interface ISubOpts
|
|
* @property {boolean|null} [subscribe] SubOpts subscribe
|
|
* @property {string|null} [topic] SubOpts topic
|
|
*/
|
|
/**
|
|
* Constructs a new SubOpts.
|
|
* @memberof RPC
|
|
* @classdesc Represents a SubOpts.
|
|
* @implements ISubOpts
|
|
* @constructor
|
|
* @param {RPC.ISubOpts=} [p] Properties to set
|
|
*/
|
|
function SubOpts(p) {
|
|
if (p)
|
|
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
|
if (p[ks[i]] != null)
|
|
this[ks[i]] = p[ks[i]];
|
|
}
|
|
/**
|
|
* SubOpts subscribe.
|
|
* @member {boolean|null|undefined} subscribe
|
|
* @memberof RPC.SubOpts
|
|
* @instance
|
|
*/
|
|
SubOpts.prototype.subscribe = null;
|
|
/**
|
|
* SubOpts topic.
|
|
* @member {string|null|undefined} topic
|
|
* @memberof RPC.SubOpts
|
|
* @instance
|
|
*/
|
|
SubOpts.prototype.topic = null;
|
|
// OneOf field names bound to virtual getters and setters
|
|
var $oneOfFields;
|
|
/**
|
|
* SubOpts _subscribe.
|
|
* @member {"subscribe"|undefined} _subscribe
|
|
* @memberof RPC.SubOpts
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(SubOpts.prototype, "_subscribe", {
|
|
get: $util.oneOfGetter($oneOfFields = ["subscribe"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* SubOpts _topic.
|
|
* @member {"topic"|undefined} _topic
|
|
* @memberof RPC.SubOpts
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(SubOpts.prototype, "_topic", {
|
|
get: $util.oneOfGetter($oneOfFields = ["topic"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* Encodes the specified SubOpts message. Does not implicitly {@link RPC.SubOpts.verify|verify} messages.
|
|
* @function encode
|
|
* @memberof RPC.SubOpts
|
|
* @static
|
|
* @param {RPC.ISubOpts} m SubOpts message or plain object to encode
|
|
* @param {$protobuf.Writer} [w] Writer to encode to
|
|
* @returns {$protobuf.Writer} Writer
|
|
*/
|
|
SubOpts.encode = function encode(m, w) {
|
|
if (!w)
|
|
w = $Writer.create();
|
|
if (m.subscribe != null && Object.hasOwnProperty.call(m, "subscribe"))
|
|
w.uint32(8).bool(m.subscribe);
|
|
if (m.topic != null && Object.hasOwnProperty.call(m, "topic"))
|
|
w.uint32(18).string(m.topic);
|
|
return w;
|
|
};
|
|
/**
|
|
* Decodes a SubOpts message from the specified reader or buffer.
|
|
* @function decode
|
|
* @memberof RPC.SubOpts
|
|
* @static
|
|
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
|
* @param {number} [l] Message length if known beforehand
|
|
* @returns {RPC.SubOpts} SubOpts
|
|
* @throws {Error} If the payload is not a reader or valid buffer
|
|
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
|
*/
|
|
SubOpts.decode = function decode(r, l) {
|
|
if (!(r instanceof $Reader))
|
|
r = $Reader.create(r);
|
|
var c = l === undefined ? r.len : r.pos + l, m = new $root.RPC.SubOpts();
|
|
while (r.pos < c) {
|
|
var t = r.uint32();
|
|
switch (t >>> 3) {
|
|
case 1:
|
|
m.subscribe = r.bool();
|
|
break;
|
|
case 2:
|
|
m.topic = r.string();
|
|
break;
|
|
default:
|
|
r.skipType(t & 7);
|
|
break;
|
|
}
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a SubOpts message from a plain object. Also converts values to their respective internal types.
|
|
* @function fromObject
|
|
* @memberof RPC.SubOpts
|
|
* @static
|
|
* @param {Object.<string,*>} d Plain object
|
|
* @returns {RPC.SubOpts} SubOpts
|
|
*/
|
|
SubOpts.fromObject = function fromObject(d) {
|
|
if (d instanceof $root.RPC.SubOpts)
|
|
return d;
|
|
var m = new $root.RPC.SubOpts();
|
|
if (d.subscribe != null) {
|
|
m.subscribe = Boolean(d.subscribe);
|
|
}
|
|
if (d.topic != null) {
|
|
m.topic = String(d.topic);
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a plain object from a SubOpts message. Also converts values to other types if specified.
|
|
* @function toObject
|
|
* @memberof RPC.SubOpts
|
|
* @static
|
|
* @param {RPC.SubOpts} m SubOpts
|
|
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
|
* @returns {Object.<string,*>} Plain object
|
|
*/
|
|
SubOpts.toObject = function toObject(m, o) {
|
|
if (!o)
|
|
o = {};
|
|
var d = {};
|
|
if (m.subscribe != null && m.hasOwnProperty("subscribe")) {
|
|
d.subscribe = m.subscribe;
|
|
if (o.oneofs)
|
|
d._subscribe = "subscribe";
|
|
}
|
|
if (m.topic != null && m.hasOwnProperty("topic")) {
|
|
d.topic = m.topic;
|
|
if (o.oneofs)
|
|
d._topic = "topic";
|
|
}
|
|
return d;
|
|
};
|
|
/**
|
|
* Converts this SubOpts to JSON.
|
|
* @function toJSON
|
|
* @memberof RPC.SubOpts
|
|
* @instance
|
|
* @returns {Object.<string,*>} JSON object
|
|
*/
|
|
SubOpts.prototype.toJSON = function toJSON() {
|
|
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
|
};
|
|
return SubOpts;
|
|
})();
|
|
RPC.Message = (function () {
|
|
/**
|
|
* Properties of a Message.
|
|
* @memberof RPC
|
|
* @interface IMessage
|
|
* @property {Uint8Array|null} [from] Message from
|
|
* @property {Uint8Array|null} [data] Message data
|
|
* @property {Uint8Array|null} [seqno] Message seqno
|
|
* @property {string} topic Message topic
|
|
* @property {Uint8Array|null} [signature] Message signature
|
|
* @property {Uint8Array|null} [key] Message key
|
|
*/
|
|
/**
|
|
* Constructs a new Message.
|
|
* @memberof RPC
|
|
* @classdesc Represents a Message.
|
|
* @implements IMessage
|
|
* @constructor
|
|
* @param {RPC.IMessage=} [p] Properties to set
|
|
*/
|
|
function Message(p) {
|
|
if (p)
|
|
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
|
if (p[ks[i]] != null)
|
|
this[ks[i]] = p[ks[i]];
|
|
}
|
|
/**
|
|
* Message from.
|
|
* @member {Uint8Array|null|undefined} from
|
|
* @memberof RPC.Message
|
|
* @instance
|
|
*/
|
|
Message.prototype.from = null;
|
|
/**
|
|
* Message data.
|
|
* @member {Uint8Array|null|undefined} data
|
|
* @memberof RPC.Message
|
|
* @instance
|
|
*/
|
|
Message.prototype.data = null;
|
|
/**
|
|
* Message seqno.
|
|
* @member {Uint8Array|null|undefined} seqno
|
|
* @memberof RPC.Message
|
|
* @instance
|
|
*/
|
|
Message.prototype.seqno = null;
|
|
/**
|
|
* Message topic.
|
|
* @member {string} topic
|
|
* @memberof RPC.Message
|
|
* @instance
|
|
*/
|
|
Message.prototype.topic = "";
|
|
/**
|
|
* Message signature.
|
|
* @member {Uint8Array|null|undefined} signature
|
|
* @memberof RPC.Message
|
|
* @instance
|
|
*/
|
|
Message.prototype.signature = null;
|
|
/**
|
|
* Message key.
|
|
* @member {Uint8Array|null|undefined} key
|
|
* @memberof RPC.Message
|
|
* @instance
|
|
*/
|
|
Message.prototype.key = null;
|
|
// OneOf field names bound to virtual getters and setters
|
|
var $oneOfFields;
|
|
/**
|
|
* Message _from.
|
|
* @member {"from"|undefined} _from
|
|
* @memberof RPC.Message
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(Message.prototype, "_from", {
|
|
get: $util.oneOfGetter($oneOfFields = ["from"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* Message _data.
|
|
* @member {"data"|undefined} _data
|
|
* @memberof RPC.Message
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(Message.prototype, "_data", {
|
|
get: $util.oneOfGetter($oneOfFields = ["data"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* Message _seqno.
|
|
* @member {"seqno"|undefined} _seqno
|
|
* @memberof RPC.Message
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(Message.prototype, "_seqno", {
|
|
get: $util.oneOfGetter($oneOfFields = ["seqno"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* Message _signature.
|
|
* @member {"signature"|undefined} _signature
|
|
* @memberof RPC.Message
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(Message.prototype, "_signature", {
|
|
get: $util.oneOfGetter($oneOfFields = ["signature"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* Message _key.
|
|
* @member {"key"|undefined} _key
|
|
* @memberof RPC.Message
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(Message.prototype, "_key", {
|
|
get: $util.oneOfGetter($oneOfFields = ["key"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* Encodes the specified Message message. Does not implicitly {@link RPC.Message.verify|verify} messages.
|
|
* @function encode
|
|
* @memberof RPC.Message
|
|
* @static
|
|
* @param {RPC.IMessage} m Message message or plain object to encode
|
|
* @param {$protobuf.Writer} [w] Writer to encode to
|
|
* @returns {$protobuf.Writer} Writer
|
|
*/
|
|
Message.encode = function encode(m, w) {
|
|
if (!w)
|
|
w = $Writer.create();
|
|
if (m.from != null && Object.hasOwnProperty.call(m, "from"))
|
|
w.uint32(10).bytes(m.from);
|
|
if (m.data != null && Object.hasOwnProperty.call(m, "data"))
|
|
w.uint32(18).bytes(m.data);
|
|
if (m.seqno != null && Object.hasOwnProperty.call(m, "seqno"))
|
|
w.uint32(26).bytes(m.seqno);
|
|
w.uint32(34).string(m.topic);
|
|
if (m.signature != null && Object.hasOwnProperty.call(m, "signature"))
|
|
w.uint32(42).bytes(m.signature);
|
|
if (m.key != null && Object.hasOwnProperty.call(m, "key"))
|
|
w.uint32(50).bytes(m.key);
|
|
return w;
|
|
};
|
|
/**
|
|
* Decodes a Message message from the specified reader or buffer.
|
|
* @function decode
|
|
* @memberof RPC.Message
|
|
* @static
|
|
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
|
* @param {number} [l] Message length if known beforehand
|
|
* @returns {RPC.Message} Message
|
|
* @throws {Error} If the payload is not a reader or valid buffer
|
|
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
|
*/
|
|
Message.decode = function decode(r, l) {
|
|
if (!(r instanceof $Reader))
|
|
r = $Reader.create(r);
|
|
var c = l === undefined ? r.len : r.pos + l, m = new $root.RPC.Message();
|
|
while (r.pos < c) {
|
|
var t = r.uint32();
|
|
switch (t >>> 3) {
|
|
case 1:
|
|
m.from = r.bytes();
|
|
break;
|
|
case 2:
|
|
m.data = r.bytes();
|
|
break;
|
|
case 3:
|
|
m.seqno = r.bytes();
|
|
break;
|
|
case 4:
|
|
m.topic = r.string();
|
|
break;
|
|
case 5:
|
|
m.signature = r.bytes();
|
|
break;
|
|
case 6:
|
|
m.key = r.bytes();
|
|
break;
|
|
default:
|
|
r.skipType(t & 7);
|
|
break;
|
|
}
|
|
}
|
|
if (!m.hasOwnProperty("topic"))
|
|
throw $util.ProtocolError("missing required 'topic'", { instance: m });
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a Message message from a plain object. Also converts values to their respective internal types.
|
|
* @function fromObject
|
|
* @memberof RPC.Message
|
|
* @static
|
|
* @param {Object.<string,*>} d Plain object
|
|
* @returns {RPC.Message} Message
|
|
*/
|
|
Message.fromObject = function fromObject(d) {
|
|
if (d instanceof $root.RPC.Message)
|
|
return d;
|
|
var m = new $root.RPC.Message();
|
|
if (d.from != null) {
|
|
if (typeof d.from === "string")
|
|
$util.base64.decode(d.from, m.from = $util.newBuffer($util.base64.length(d.from)), 0);
|
|
else if (d.from.length)
|
|
m.from = d.from;
|
|
}
|
|
if (d.data != null) {
|
|
if (typeof d.data === "string")
|
|
$util.base64.decode(d.data, m.data = $util.newBuffer($util.base64.length(d.data)), 0);
|
|
else if (d.data.length)
|
|
m.data = d.data;
|
|
}
|
|
if (d.seqno != null) {
|
|
if (typeof d.seqno === "string")
|
|
$util.base64.decode(d.seqno, m.seqno = $util.newBuffer($util.base64.length(d.seqno)), 0);
|
|
else if (d.seqno.length)
|
|
m.seqno = d.seqno;
|
|
}
|
|
if (d.topic != null) {
|
|
m.topic = String(d.topic);
|
|
}
|
|
if (d.signature != null) {
|
|
if (typeof d.signature === "string")
|
|
$util.base64.decode(d.signature, m.signature = $util.newBuffer($util.base64.length(d.signature)), 0);
|
|
else if (d.signature.length)
|
|
m.signature = d.signature;
|
|
}
|
|
if (d.key != null) {
|
|
if (typeof d.key === "string")
|
|
$util.base64.decode(d.key, m.key = $util.newBuffer($util.base64.length(d.key)), 0);
|
|
else if (d.key.length)
|
|
m.key = d.key;
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a plain object from a Message message. Also converts values to other types if specified.
|
|
* @function toObject
|
|
* @memberof RPC.Message
|
|
* @static
|
|
* @param {RPC.Message} m Message
|
|
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
|
* @returns {Object.<string,*>} Plain object
|
|
*/
|
|
Message.toObject = function toObject(m, o) {
|
|
if (!o)
|
|
o = {};
|
|
var d = {};
|
|
if (o.defaults) {
|
|
d.topic = "";
|
|
}
|
|
if (m.from != null && m.hasOwnProperty("from")) {
|
|
d.from = o.bytes === String ? $util.base64.encode(m.from, 0, m.from.length) : o.bytes === Array ? Array.prototype.slice.call(m.from) : m.from;
|
|
if (o.oneofs)
|
|
d._from = "from";
|
|
}
|
|
if (m.data != null && m.hasOwnProperty("data")) {
|
|
d.data = o.bytes === String ? $util.base64.encode(m.data, 0, m.data.length) : o.bytes === Array ? Array.prototype.slice.call(m.data) : m.data;
|
|
if (o.oneofs)
|
|
d._data = "data";
|
|
}
|
|
if (m.seqno != null && m.hasOwnProperty("seqno")) {
|
|
d.seqno = o.bytes === String ? $util.base64.encode(m.seqno, 0, m.seqno.length) : o.bytes === Array ? Array.prototype.slice.call(m.seqno) : m.seqno;
|
|
if (o.oneofs)
|
|
d._seqno = "seqno";
|
|
}
|
|
if (m.topic != null && m.hasOwnProperty("topic")) {
|
|
d.topic = m.topic;
|
|
}
|
|
if (m.signature != null && m.hasOwnProperty("signature")) {
|
|
d.signature = o.bytes === String ? $util.base64.encode(m.signature, 0, m.signature.length) : o.bytes === Array ? Array.prototype.slice.call(m.signature) : m.signature;
|
|
if (o.oneofs)
|
|
d._signature = "signature";
|
|
}
|
|
if (m.key != null && m.hasOwnProperty("key")) {
|
|
d.key = o.bytes === String ? $util.base64.encode(m.key, 0, m.key.length) : o.bytes === Array ? Array.prototype.slice.call(m.key) : m.key;
|
|
if (o.oneofs)
|
|
d._key = "key";
|
|
}
|
|
return d;
|
|
};
|
|
/**
|
|
* Converts this Message to JSON.
|
|
* @function toJSON
|
|
* @memberof RPC.Message
|
|
* @instance
|
|
* @returns {Object.<string,*>} JSON object
|
|
*/
|
|
Message.prototype.toJSON = function toJSON() {
|
|
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
|
};
|
|
return Message;
|
|
})();
|
|
RPC.ControlMessage = (function () {
|
|
/**
|
|
* Properties of a ControlMessage.
|
|
* @memberof RPC
|
|
* @interface IControlMessage
|
|
* @property {Array.<RPC.IControlIHave>|null} [ihave] ControlMessage ihave
|
|
* @property {Array.<RPC.IControlIWant>|null} [iwant] ControlMessage iwant
|
|
* @property {Array.<RPC.IControlGraft>|null} [graft] ControlMessage graft
|
|
* @property {Array.<RPC.IControlPrune>|null} [prune] ControlMessage prune
|
|
*/
|
|
/**
|
|
* Constructs a new ControlMessage.
|
|
* @memberof RPC
|
|
* @classdesc Represents a ControlMessage.
|
|
* @implements IControlMessage
|
|
* @constructor
|
|
* @param {RPC.IControlMessage=} [p] Properties to set
|
|
*/
|
|
function ControlMessage(p) {
|
|
this.ihave = [];
|
|
this.iwant = [];
|
|
this.graft = [];
|
|
this.prune = [];
|
|
if (p)
|
|
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
|
if (p[ks[i]] != null)
|
|
this[ks[i]] = p[ks[i]];
|
|
}
|
|
/**
|
|
* ControlMessage ihave.
|
|
* @member {Array.<RPC.IControlIHave>} ihave
|
|
* @memberof RPC.ControlMessage
|
|
* @instance
|
|
*/
|
|
ControlMessage.prototype.ihave = $util.emptyArray;
|
|
/**
|
|
* ControlMessage iwant.
|
|
* @member {Array.<RPC.IControlIWant>} iwant
|
|
* @memberof RPC.ControlMessage
|
|
* @instance
|
|
*/
|
|
ControlMessage.prototype.iwant = $util.emptyArray;
|
|
/**
|
|
* ControlMessage graft.
|
|
* @member {Array.<RPC.IControlGraft>} graft
|
|
* @memberof RPC.ControlMessage
|
|
* @instance
|
|
*/
|
|
ControlMessage.prototype.graft = $util.emptyArray;
|
|
/**
|
|
* ControlMessage prune.
|
|
* @member {Array.<RPC.IControlPrune>} prune
|
|
* @memberof RPC.ControlMessage
|
|
* @instance
|
|
*/
|
|
ControlMessage.prototype.prune = $util.emptyArray;
|
|
/**
|
|
* Encodes the specified ControlMessage message. Does not implicitly {@link RPC.ControlMessage.verify|verify} messages.
|
|
* @function encode
|
|
* @memberof RPC.ControlMessage
|
|
* @static
|
|
* @param {RPC.IControlMessage} m ControlMessage message or plain object to encode
|
|
* @param {$protobuf.Writer} [w] Writer to encode to
|
|
* @returns {$protobuf.Writer} Writer
|
|
*/
|
|
ControlMessage.encode = function encode(m, w) {
|
|
if (!w)
|
|
w = $Writer.create();
|
|
if (m.ihave != null && m.ihave.length) {
|
|
for (var i = 0; i < m.ihave.length; ++i)
|
|
$root.RPC.ControlIHave.encode(m.ihave[i], w.uint32(10).fork()).ldelim();
|
|
}
|
|
if (m.iwant != null && m.iwant.length) {
|
|
for (var i = 0; i < m.iwant.length; ++i)
|
|
$root.RPC.ControlIWant.encode(m.iwant[i], w.uint32(18).fork()).ldelim();
|
|
}
|
|
if (m.graft != null && m.graft.length) {
|
|
for (var i = 0; i < m.graft.length; ++i)
|
|
$root.RPC.ControlGraft.encode(m.graft[i], w.uint32(26).fork()).ldelim();
|
|
}
|
|
if (m.prune != null && m.prune.length) {
|
|
for (var i = 0; i < m.prune.length; ++i)
|
|
$root.RPC.ControlPrune.encode(m.prune[i], w.uint32(34).fork()).ldelim();
|
|
}
|
|
return w;
|
|
};
|
|
/**
|
|
* Decodes a ControlMessage message from the specified reader or buffer.
|
|
* @function decode
|
|
* @memberof RPC.ControlMessage
|
|
* @static
|
|
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
|
* @param {number} [l] Message length if known beforehand
|
|
* @returns {RPC.ControlMessage} ControlMessage
|
|
* @throws {Error} If the payload is not a reader or valid buffer
|
|
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
|
*/
|
|
ControlMessage.decode = function decode(r, l) {
|
|
if (!(r instanceof $Reader))
|
|
r = $Reader.create(r);
|
|
var c = l === undefined ? r.len : r.pos + l, m = new $root.RPC.ControlMessage();
|
|
while (r.pos < c) {
|
|
var t = r.uint32();
|
|
switch (t >>> 3) {
|
|
case 1:
|
|
if (!(m.ihave && m.ihave.length))
|
|
m.ihave = [];
|
|
m.ihave.push($root.RPC.ControlIHave.decode(r, r.uint32()));
|
|
break;
|
|
case 2:
|
|
if (!(m.iwant && m.iwant.length))
|
|
m.iwant = [];
|
|
m.iwant.push($root.RPC.ControlIWant.decode(r, r.uint32()));
|
|
break;
|
|
case 3:
|
|
if (!(m.graft && m.graft.length))
|
|
m.graft = [];
|
|
m.graft.push($root.RPC.ControlGraft.decode(r, r.uint32()));
|
|
break;
|
|
case 4:
|
|
if (!(m.prune && m.prune.length))
|
|
m.prune = [];
|
|
m.prune.push($root.RPC.ControlPrune.decode(r, r.uint32()));
|
|
break;
|
|
default:
|
|
r.skipType(t & 7);
|
|
break;
|
|
}
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a ControlMessage message from a plain object. Also converts values to their respective internal types.
|
|
* @function fromObject
|
|
* @memberof RPC.ControlMessage
|
|
* @static
|
|
* @param {Object.<string,*>} d Plain object
|
|
* @returns {RPC.ControlMessage} ControlMessage
|
|
*/
|
|
ControlMessage.fromObject = function fromObject(d) {
|
|
if (d instanceof $root.RPC.ControlMessage)
|
|
return d;
|
|
var m = new $root.RPC.ControlMessage();
|
|
if (d.ihave) {
|
|
if (!Array.isArray(d.ihave))
|
|
throw TypeError(".RPC.ControlMessage.ihave: array expected");
|
|
m.ihave = [];
|
|
for (var i = 0; i < d.ihave.length; ++i) {
|
|
if (typeof d.ihave[i] !== "object")
|
|
throw TypeError(".RPC.ControlMessage.ihave: object expected");
|
|
m.ihave[i] = $root.RPC.ControlIHave.fromObject(d.ihave[i]);
|
|
}
|
|
}
|
|
if (d.iwant) {
|
|
if (!Array.isArray(d.iwant))
|
|
throw TypeError(".RPC.ControlMessage.iwant: array expected");
|
|
m.iwant = [];
|
|
for (var i = 0; i < d.iwant.length; ++i) {
|
|
if (typeof d.iwant[i] !== "object")
|
|
throw TypeError(".RPC.ControlMessage.iwant: object expected");
|
|
m.iwant[i] = $root.RPC.ControlIWant.fromObject(d.iwant[i]);
|
|
}
|
|
}
|
|
if (d.graft) {
|
|
if (!Array.isArray(d.graft))
|
|
throw TypeError(".RPC.ControlMessage.graft: array expected");
|
|
m.graft = [];
|
|
for (var i = 0; i < d.graft.length; ++i) {
|
|
if (typeof d.graft[i] !== "object")
|
|
throw TypeError(".RPC.ControlMessage.graft: object expected");
|
|
m.graft[i] = $root.RPC.ControlGraft.fromObject(d.graft[i]);
|
|
}
|
|
}
|
|
if (d.prune) {
|
|
if (!Array.isArray(d.prune))
|
|
throw TypeError(".RPC.ControlMessage.prune: array expected");
|
|
m.prune = [];
|
|
for (var i = 0; i < d.prune.length; ++i) {
|
|
if (typeof d.prune[i] !== "object")
|
|
throw TypeError(".RPC.ControlMessage.prune: object expected");
|
|
m.prune[i] = $root.RPC.ControlPrune.fromObject(d.prune[i]);
|
|
}
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a plain object from a ControlMessage message. Also converts values to other types if specified.
|
|
* @function toObject
|
|
* @memberof RPC.ControlMessage
|
|
* @static
|
|
* @param {RPC.ControlMessage} m ControlMessage
|
|
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
|
* @returns {Object.<string,*>} Plain object
|
|
*/
|
|
ControlMessage.toObject = function toObject(m, o) {
|
|
if (!o)
|
|
o = {};
|
|
var d = {};
|
|
if (o.arrays || o.defaults) {
|
|
d.ihave = [];
|
|
d.iwant = [];
|
|
d.graft = [];
|
|
d.prune = [];
|
|
}
|
|
if (m.ihave && m.ihave.length) {
|
|
d.ihave = [];
|
|
for (var j = 0; j < m.ihave.length; ++j) {
|
|
d.ihave[j] = $root.RPC.ControlIHave.toObject(m.ihave[j], o);
|
|
}
|
|
}
|
|
if (m.iwant && m.iwant.length) {
|
|
d.iwant = [];
|
|
for (var j = 0; j < m.iwant.length; ++j) {
|
|
d.iwant[j] = $root.RPC.ControlIWant.toObject(m.iwant[j], o);
|
|
}
|
|
}
|
|
if (m.graft && m.graft.length) {
|
|
d.graft = [];
|
|
for (var j = 0; j < m.graft.length; ++j) {
|
|
d.graft[j] = $root.RPC.ControlGraft.toObject(m.graft[j], o);
|
|
}
|
|
}
|
|
if (m.prune && m.prune.length) {
|
|
d.prune = [];
|
|
for (var j = 0; j < m.prune.length; ++j) {
|
|
d.prune[j] = $root.RPC.ControlPrune.toObject(m.prune[j], o);
|
|
}
|
|
}
|
|
return d;
|
|
};
|
|
/**
|
|
* Converts this ControlMessage to JSON.
|
|
* @function toJSON
|
|
* @memberof RPC.ControlMessage
|
|
* @instance
|
|
* @returns {Object.<string,*>} JSON object
|
|
*/
|
|
ControlMessage.prototype.toJSON = function toJSON() {
|
|
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
|
};
|
|
return ControlMessage;
|
|
})();
|
|
RPC.ControlIHave = (function () {
|
|
/**
|
|
* Properties of a ControlIHave.
|
|
* @memberof RPC
|
|
* @interface IControlIHave
|
|
* @property {string|null} [topicID] ControlIHave topicID
|
|
* @property {Array.<Uint8Array>|null} [messageIDs] ControlIHave messageIDs
|
|
*/
|
|
/**
|
|
* Constructs a new ControlIHave.
|
|
* @memberof RPC
|
|
* @classdesc Represents a ControlIHave.
|
|
* @implements IControlIHave
|
|
* @constructor
|
|
* @param {RPC.IControlIHave=} [p] Properties to set
|
|
*/
|
|
function ControlIHave(p) {
|
|
this.messageIDs = [];
|
|
if (p)
|
|
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
|
if (p[ks[i]] != null)
|
|
this[ks[i]] = p[ks[i]];
|
|
}
|
|
/**
|
|
* ControlIHave topicID.
|
|
* @member {string|null|undefined} topicID
|
|
* @memberof RPC.ControlIHave
|
|
* @instance
|
|
*/
|
|
ControlIHave.prototype.topicID = null;
|
|
/**
|
|
* ControlIHave messageIDs.
|
|
* @member {Array.<Uint8Array>} messageIDs
|
|
* @memberof RPC.ControlIHave
|
|
* @instance
|
|
*/
|
|
ControlIHave.prototype.messageIDs = $util.emptyArray;
|
|
// OneOf field names bound to virtual getters and setters
|
|
var $oneOfFields;
|
|
/**
|
|
* ControlIHave _topicID.
|
|
* @member {"topicID"|undefined} _topicID
|
|
* @memberof RPC.ControlIHave
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(ControlIHave.prototype, "_topicID", {
|
|
get: $util.oneOfGetter($oneOfFields = ["topicID"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* Encodes the specified ControlIHave message. Does not implicitly {@link RPC.ControlIHave.verify|verify} messages.
|
|
* @function encode
|
|
* @memberof RPC.ControlIHave
|
|
* @static
|
|
* @param {RPC.IControlIHave} m ControlIHave message or plain object to encode
|
|
* @param {$protobuf.Writer} [w] Writer to encode to
|
|
* @returns {$protobuf.Writer} Writer
|
|
*/
|
|
ControlIHave.encode = function encode(m, w) {
|
|
if (!w)
|
|
w = $Writer.create();
|
|
if (m.topicID != null && Object.hasOwnProperty.call(m, "topicID"))
|
|
w.uint32(10).string(m.topicID);
|
|
if (m.messageIDs != null && m.messageIDs.length) {
|
|
for (var i = 0; i < m.messageIDs.length; ++i)
|
|
w.uint32(18).bytes(m.messageIDs[i]);
|
|
}
|
|
return w;
|
|
};
|
|
/**
|
|
* Decodes a ControlIHave message from the specified reader or buffer.
|
|
* @function decode
|
|
* @memberof RPC.ControlIHave
|
|
* @static
|
|
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
|
* @param {number} [l] Message length if known beforehand
|
|
* @returns {RPC.ControlIHave} ControlIHave
|
|
* @throws {Error} If the payload is not a reader or valid buffer
|
|
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
|
*/
|
|
ControlIHave.decode = function decode(r, l) {
|
|
if (!(r instanceof $Reader))
|
|
r = $Reader.create(r);
|
|
var c = l === undefined ? r.len : r.pos + l, m = new $root.RPC.ControlIHave();
|
|
while (r.pos < c) {
|
|
var t = r.uint32();
|
|
switch (t >>> 3) {
|
|
case 1:
|
|
m.topicID = r.string();
|
|
break;
|
|
case 2:
|
|
if (!(m.messageIDs && m.messageIDs.length))
|
|
m.messageIDs = [];
|
|
m.messageIDs.push(r.bytes());
|
|
break;
|
|
default:
|
|
r.skipType(t & 7);
|
|
break;
|
|
}
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a ControlIHave message from a plain object. Also converts values to their respective internal types.
|
|
* @function fromObject
|
|
* @memberof RPC.ControlIHave
|
|
* @static
|
|
* @param {Object.<string,*>} d Plain object
|
|
* @returns {RPC.ControlIHave} ControlIHave
|
|
*/
|
|
ControlIHave.fromObject = function fromObject(d) {
|
|
if (d instanceof $root.RPC.ControlIHave)
|
|
return d;
|
|
var m = new $root.RPC.ControlIHave();
|
|
if (d.topicID != null) {
|
|
m.topicID = String(d.topicID);
|
|
}
|
|
if (d.messageIDs) {
|
|
if (!Array.isArray(d.messageIDs))
|
|
throw TypeError(".RPC.ControlIHave.messageIDs: array expected");
|
|
m.messageIDs = [];
|
|
for (var i = 0; i < d.messageIDs.length; ++i) {
|
|
if (typeof d.messageIDs[i] === "string")
|
|
$util.base64.decode(d.messageIDs[i], m.messageIDs[i] = $util.newBuffer($util.base64.length(d.messageIDs[i])), 0);
|
|
else if (d.messageIDs[i].length)
|
|
m.messageIDs[i] = d.messageIDs[i];
|
|
}
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a plain object from a ControlIHave message. Also converts values to other types if specified.
|
|
* @function toObject
|
|
* @memberof RPC.ControlIHave
|
|
* @static
|
|
* @param {RPC.ControlIHave} m ControlIHave
|
|
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
|
* @returns {Object.<string,*>} Plain object
|
|
*/
|
|
ControlIHave.toObject = function toObject(m, o) {
|
|
if (!o)
|
|
o = {};
|
|
var d = {};
|
|
if (o.arrays || o.defaults) {
|
|
d.messageIDs = [];
|
|
}
|
|
if (m.topicID != null && m.hasOwnProperty("topicID")) {
|
|
d.topicID = m.topicID;
|
|
if (o.oneofs)
|
|
d._topicID = "topicID";
|
|
}
|
|
if (m.messageIDs && m.messageIDs.length) {
|
|
d.messageIDs = [];
|
|
for (var j = 0; j < m.messageIDs.length; ++j) {
|
|
d.messageIDs[j] = o.bytes === String ? $util.base64.encode(m.messageIDs[j], 0, m.messageIDs[j].length) : o.bytes === Array ? Array.prototype.slice.call(m.messageIDs[j]) : m.messageIDs[j];
|
|
}
|
|
}
|
|
return d;
|
|
};
|
|
/**
|
|
* Converts this ControlIHave to JSON.
|
|
* @function toJSON
|
|
* @memberof RPC.ControlIHave
|
|
* @instance
|
|
* @returns {Object.<string,*>} JSON object
|
|
*/
|
|
ControlIHave.prototype.toJSON = function toJSON() {
|
|
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
|
};
|
|
return ControlIHave;
|
|
})();
|
|
RPC.ControlIWant = (function () {
|
|
/**
|
|
* Properties of a ControlIWant.
|
|
* @memberof RPC
|
|
* @interface IControlIWant
|
|
* @property {Array.<Uint8Array>|null} [messageIDs] ControlIWant messageIDs
|
|
*/
|
|
/**
|
|
* Constructs a new ControlIWant.
|
|
* @memberof RPC
|
|
* @classdesc Represents a ControlIWant.
|
|
* @implements IControlIWant
|
|
* @constructor
|
|
* @param {RPC.IControlIWant=} [p] Properties to set
|
|
*/
|
|
function ControlIWant(p) {
|
|
this.messageIDs = [];
|
|
if (p)
|
|
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
|
if (p[ks[i]] != null)
|
|
this[ks[i]] = p[ks[i]];
|
|
}
|
|
/**
|
|
* ControlIWant messageIDs.
|
|
* @member {Array.<Uint8Array>} messageIDs
|
|
* @memberof RPC.ControlIWant
|
|
* @instance
|
|
*/
|
|
ControlIWant.prototype.messageIDs = $util.emptyArray;
|
|
/**
|
|
* Encodes the specified ControlIWant message. Does not implicitly {@link RPC.ControlIWant.verify|verify} messages.
|
|
* @function encode
|
|
* @memberof RPC.ControlIWant
|
|
* @static
|
|
* @param {RPC.IControlIWant} m ControlIWant message or plain object to encode
|
|
* @param {$protobuf.Writer} [w] Writer to encode to
|
|
* @returns {$protobuf.Writer} Writer
|
|
*/
|
|
ControlIWant.encode = function encode(m, w) {
|
|
if (!w)
|
|
w = $Writer.create();
|
|
if (m.messageIDs != null && m.messageIDs.length) {
|
|
for (var i = 0; i < m.messageIDs.length; ++i)
|
|
w.uint32(10).bytes(m.messageIDs[i]);
|
|
}
|
|
return w;
|
|
};
|
|
/**
|
|
* Decodes a ControlIWant message from the specified reader or buffer.
|
|
* @function decode
|
|
* @memberof RPC.ControlIWant
|
|
* @static
|
|
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
|
* @param {number} [l] Message length if known beforehand
|
|
* @returns {RPC.ControlIWant} ControlIWant
|
|
* @throws {Error} If the payload is not a reader or valid buffer
|
|
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
|
*/
|
|
ControlIWant.decode = function decode(r, l) {
|
|
if (!(r instanceof $Reader))
|
|
r = $Reader.create(r);
|
|
var c = l === undefined ? r.len : r.pos + l, m = new $root.RPC.ControlIWant();
|
|
while (r.pos < c) {
|
|
var t = r.uint32();
|
|
switch (t >>> 3) {
|
|
case 1:
|
|
if (!(m.messageIDs && m.messageIDs.length))
|
|
m.messageIDs = [];
|
|
m.messageIDs.push(r.bytes());
|
|
break;
|
|
default:
|
|
r.skipType(t & 7);
|
|
break;
|
|
}
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a ControlIWant message from a plain object. Also converts values to their respective internal types.
|
|
* @function fromObject
|
|
* @memberof RPC.ControlIWant
|
|
* @static
|
|
* @param {Object.<string,*>} d Plain object
|
|
* @returns {RPC.ControlIWant} ControlIWant
|
|
*/
|
|
ControlIWant.fromObject = function fromObject(d) {
|
|
if (d instanceof $root.RPC.ControlIWant)
|
|
return d;
|
|
var m = new $root.RPC.ControlIWant();
|
|
if (d.messageIDs) {
|
|
if (!Array.isArray(d.messageIDs))
|
|
throw TypeError(".RPC.ControlIWant.messageIDs: array expected");
|
|
m.messageIDs = [];
|
|
for (var i = 0; i < d.messageIDs.length; ++i) {
|
|
if (typeof d.messageIDs[i] === "string")
|
|
$util.base64.decode(d.messageIDs[i], m.messageIDs[i] = $util.newBuffer($util.base64.length(d.messageIDs[i])), 0);
|
|
else if (d.messageIDs[i].length)
|
|
m.messageIDs[i] = d.messageIDs[i];
|
|
}
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a plain object from a ControlIWant message. Also converts values to other types if specified.
|
|
* @function toObject
|
|
* @memberof RPC.ControlIWant
|
|
* @static
|
|
* @param {RPC.ControlIWant} m ControlIWant
|
|
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
|
* @returns {Object.<string,*>} Plain object
|
|
*/
|
|
ControlIWant.toObject = function toObject(m, o) {
|
|
if (!o)
|
|
o = {};
|
|
var d = {};
|
|
if (o.arrays || o.defaults) {
|
|
d.messageIDs = [];
|
|
}
|
|
if (m.messageIDs && m.messageIDs.length) {
|
|
d.messageIDs = [];
|
|
for (var j = 0; j < m.messageIDs.length; ++j) {
|
|
d.messageIDs[j] = o.bytes === String ? $util.base64.encode(m.messageIDs[j], 0, m.messageIDs[j].length) : o.bytes === Array ? Array.prototype.slice.call(m.messageIDs[j]) : m.messageIDs[j];
|
|
}
|
|
}
|
|
return d;
|
|
};
|
|
/**
|
|
* Converts this ControlIWant to JSON.
|
|
* @function toJSON
|
|
* @memberof RPC.ControlIWant
|
|
* @instance
|
|
* @returns {Object.<string,*>} JSON object
|
|
*/
|
|
ControlIWant.prototype.toJSON = function toJSON() {
|
|
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
|
};
|
|
return ControlIWant;
|
|
})();
|
|
RPC.ControlGraft = (function () {
|
|
/**
|
|
* Properties of a ControlGraft.
|
|
* @memberof RPC
|
|
* @interface IControlGraft
|
|
* @property {string|null} [topicID] ControlGraft topicID
|
|
*/
|
|
/**
|
|
* Constructs a new ControlGraft.
|
|
* @memberof RPC
|
|
* @classdesc Represents a ControlGraft.
|
|
* @implements IControlGraft
|
|
* @constructor
|
|
* @param {RPC.IControlGraft=} [p] Properties to set
|
|
*/
|
|
function ControlGraft(p) {
|
|
if (p)
|
|
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
|
if (p[ks[i]] != null)
|
|
this[ks[i]] = p[ks[i]];
|
|
}
|
|
/**
|
|
* ControlGraft topicID.
|
|
* @member {string|null|undefined} topicID
|
|
* @memberof RPC.ControlGraft
|
|
* @instance
|
|
*/
|
|
ControlGraft.prototype.topicID = null;
|
|
// OneOf field names bound to virtual getters and setters
|
|
var $oneOfFields;
|
|
/**
|
|
* ControlGraft _topicID.
|
|
* @member {"topicID"|undefined} _topicID
|
|
* @memberof RPC.ControlGraft
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(ControlGraft.prototype, "_topicID", {
|
|
get: $util.oneOfGetter($oneOfFields = ["topicID"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* Encodes the specified ControlGraft message. Does not implicitly {@link RPC.ControlGraft.verify|verify} messages.
|
|
* @function encode
|
|
* @memberof RPC.ControlGraft
|
|
* @static
|
|
* @param {RPC.IControlGraft} m ControlGraft message or plain object to encode
|
|
* @param {$protobuf.Writer} [w] Writer to encode to
|
|
* @returns {$protobuf.Writer} Writer
|
|
*/
|
|
ControlGraft.encode = function encode(m, w) {
|
|
if (!w)
|
|
w = $Writer.create();
|
|
if (m.topicID != null && Object.hasOwnProperty.call(m, "topicID"))
|
|
w.uint32(10).string(m.topicID);
|
|
return w;
|
|
};
|
|
/**
|
|
* Decodes a ControlGraft message from the specified reader or buffer.
|
|
* @function decode
|
|
* @memberof RPC.ControlGraft
|
|
* @static
|
|
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
|
* @param {number} [l] Message length if known beforehand
|
|
* @returns {RPC.ControlGraft} ControlGraft
|
|
* @throws {Error} If the payload is not a reader or valid buffer
|
|
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
|
*/
|
|
ControlGraft.decode = function decode(r, l) {
|
|
if (!(r instanceof $Reader))
|
|
r = $Reader.create(r);
|
|
var c = l === undefined ? r.len : r.pos + l, m = new $root.RPC.ControlGraft();
|
|
while (r.pos < c) {
|
|
var t = r.uint32();
|
|
switch (t >>> 3) {
|
|
case 1:
|
|
m.topicID = r.string();
|
|
break;
|
|
default:
|
|
r.skipType(t & 7);
|
|
break;
|
|
}
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a ControlGraft message from a plain object. Also converts values to their respective internal types.
|
|
* @function fromObject
|
|
* @memberof RPC.ControlGraft
|
|
* @static
|
|
* @param {Object.<string,*>} d Plain object
|
|
* @returns {RPC.ControlGraft} ControlGraft
|
|
*/
|
|
ControlGraft.fromObject = function fromObject(d) {
|
|
if (d instanceof $root.RPC.ControlGraft)
|
|
return d;
|
|
var m = new $root.RPC.ControlGraft();
|
|
if (d.topicID != null) {
|
|
m.topicID = String(d.topicID);
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a plain object from a ControlGraft message. Also converts values to other types if specified.
|
|
* @function toObject
|
|
* @memberof RPC.ControlGraft
|
|
* @static
|
|
* @param {RPC.ControlGraft} m ControlGraft
|
|
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
|
* @returns {Object.<string,*>} Plain object
|
|
*/
|
|
ControlGraft.toObject = function toObject(m, o) {
|
|
if (!o)
|
|
o = {};
|
|
var d = {};
|
|
if (m.topicID != null && m.hasOwnProperty("topicID")) {
|
|
d.topicID = m.topicID;
|
|
if (o.oneofs)
|
|
d._topicID = "topicID";
|
|
}
|
|
return d;
|
|
};
|
|
/**
|
|
* Converts this ControlGraft to JSON.
|
|
* @function toJSON
|
|
* @memberof RPC.ControlGraft
|
|
* @instance
|
|
* @returns {Object.<string,*>} JSON object
|
|
*/
|
|
ControlGraft.prototype.toJSON = function toJSON() {
|
|
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
|
};
|
|
return ControlGraft;
|
|
})();
|
|
RPC.ControlPrune = (function () {
|
|
/**
|
|
* Properties of a ControlPrune.
|
|
* @memberof RPC
|
|
* @interface IControlPrune
|
|
* @property {string|null} [topicID] ControlPrune topicID
|
|
* @property {Array.<RPC.IPeerInfo>|null} [peers] ControlPrune peers
|
|
* @property {number|null} [backoff] ControlPrune backoff
|
|
*/
|
|
/**
|
|
* Constructs a new ControlPrune.
|
|
* @memberof RPC
|
|
* @classdesc Represents a ControlPrune.
|
|
* @implements IControlPrune
|
|
* @constructor
|
|
* @param {RPC.IControlPrune=} [p] Properties to set
|
|
*/
|
|
function ControlPrune(p) {
|
|
this.peers = [];
|
|
if (p)
|
|
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
|
if (p[ks[i]] != null)
|
|
this[ks[i]] = p[ks[i]];
|
|
}
|
|
/**
|
|
* ControlPrune topicID.
|
|
* @member {string|null|undefined} topicID
|
|
* @memberof RPC.ControlPrune
|
|
* @instance
|
|
*/
|
|
ControlPrune.prototype.topicID = null;
|
|
/**
|
|
* ControlPrune peers.
|
|
* @member {Array.<RPC.IPeerInfo>} peers
|
|
* @memberof RPC.ControlPrune
|
|
* @instance
|
|
*/
|
|
ControlPrune.prototype.peers = $util.emptyArray;
|
|
/**
|
|
* ControlPrune backoff.
|
|
* @member {number|null|undefined} backoff
|
|
* @memberof RPC.ControlPrune
|
|
* @instance
|
|
*/
|
|
ControlPrune.prototype.backoff = null;
|
|
// OneOf field names bound to virtual getters and setters
|
|
var $oneOfFields;
|
|
/**
|
|
* ControlPrune _topicID.
|
|
* @member {"topicID"|undefined} _topicID
|
|
* @memberof RPC.ControlPrune
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(ControlPrune.prototype, "_topicID", {
|
|
get: $util.oneOfGetter($oneOfFields = ["topicID"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* ControlPrune _backoff.
|
|
* @member {"backoff"|undefined} _backoff
|
|
* @memberof RPC.ControlPrune
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(ControlPrune.prototype, "_backoff", {
|
|
get: $util.oneOfGetter($oneOfFields = ["backoff"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* Encodes the specified ControlPrune message. Does not implicitly {@link RPC.ControlPrune.verify|verify} messages.
|
|
* @function encode
|
|
* @memberof RPC.ControlPrune
|
|
* @static
|
|
* @param {RPC.IControlPrune} m ControlPrune message or plain object to encode
|
|
* @param {$protobuf.Writer} [w] Writer to encode to
|
|
* @returns {$protobuf.Writer} Writer
|
|
*/
|
|
ControlPrune.encode = function encode(m, w) {
|
|
if (!w)
|
|
w = $Writer.create();
|
|
if (m.topicID != null && Object.hasOwnProperty.call(m, "topicID"))
|
|
w.uint32(10).string(m.topicID);
|
|
if (m.peers != null && m.peers.length) {
|
|
for (var i = 0; i < m.peers.length; ++i)
|
|
$root.RPC.PeerInfo.encode(m.peers[i], w.uint32(18).fork()).ldelim();
|
|
}
|
|
if (m.backoff != null && Object.hasOwnProperty.call(m, "backoff"))
|
|
w.uint32(24).uint64(m.backoff);
|
|
return w;
|
|
};
|
|
/**
|
|
* Decodes a ControlPrune message from the specified reader or buffer.
|
|
* @function decode
|
|
* @memberof RPC.ControlPrune
|
|
* @static
|
|
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
|
* @param {number} [l] Message length if known beforehand
|
|
* @returns {RPC.ControlPrune} ControlPrune
|
|
* @throws {Error} If the payload is not a reader or valid buffer
|
|
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
|
*/
|
|
ControlPrune.decode = function decode(r, l) {
|
|
if (!(r instanceof $Reader))
|
|
r = $Reader.create(r);
|
|
var c = l === undefined ? r.len : r.pos + l, m = new $root.RPC.ControlPrune();
|
|
while (r.pos < c) {
|
|
var t = r.uint32();
|
|
switch (t >>> 3) {
|
|
case 1:
|
|
m.topicID = r.string();
|
|
break;
|
|
case 2:
|
|
if (!(m.peers && m.peers.length))
|
|
m.peers = [];
|
|
m.peers.push($root.RPC.PeerInfo.decode(r, r.uint32()));
|
|
break;
|
|
case 3:
|
|
m.backoff = r.uint64();
|
|
break;
|
|
default:
|
|
r.skipType(t & 7);
|
|
break;
|
|
}
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a ControlPrune message from a plain object. Also converts values to their respective internal types.
|
|
* @function fromObject
|
|
* @memberof RPC.ControlPrune
|
|
* @static
|
|
* @param {Object.<string,*>} d Plain object
|
|
* @returns {RPC.ControlPrune} ControlPrune
|
|
*/
|
|
ControlPrune.fromObject = function fromObject(d) {
|
|
if (d instanceof $root.RPC.ControlPrune)
|
|
return d;
|
|
var m = new $root.RPC.ControlPrune();
|
|
if (d.topicID != null) {
|
|
m.topicID = String(d.topicID);
|
|
}
|
|
if (d.peers) {
|
|
if (!Array.isArray(d.peers))
|
|
throw TypeError(".RPC.ControlPrune.peers: array expected");
|
|
m.peers = [];
|
|
for (var i = 0; i < d.peers.length; ++i) {
|
|
if (typeof d.peers[i] !== "object")
|
|
throw TypeError(".RPC.ControlPrune.peers: object expected");
|
|
m.peers[i] = $root.RPC.PeerInfo.fromObject(d.peers[i]);
|
|
}
|
|
}
|
|
if (d.backoff != null) {
|
|
if ($util.Long)
|
|
(m.backoff = $util.Long.fromValue(d.backoff)).unsigned = true;
|
|
else if (typeof d.backoff === "string")
|
|
m.backoff = parseInt(d.backoff, 10);
|
|
else if (typeof d.backoff === "number")
|
|
m.backoff = d.backoff;
|
|
else if (typeof d.backoff === "object")
|
|
m.backoff = new $util.LongBits(d.backoff.low >>> 0, d.backoff.high >>> 0).toNumber(true);
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a plain object from a ControlPrune message. Also converts values to other types if specified.
|
|
* @function toObject
|
|
* @memberof RPC.ControlPrune
|
|
* @static
|
|
* @param {RPC.ControlPrune} m ControlPrune
|
|
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
|
* @returns {Object.<string,*>} Plain object
|
|
*/
|
|
ControlPrune.toObject = function toObject(m, o) {
|
|
if (!o)
|
|
o = {};
|
|
var d = {};
|
|
if (o.arrays || o.defaults) {
|
|
d.peers = [];
|
|
}
|
|
if (m.topicID != null && m.hasOwnProperty("topicID")) {
|
|
d.topicID = m.topicID;
|
|
if (o.oneofs)
|
|
d._topicID = "topicID";
|
|
}
|
|
if (m.peers && m.peers.length) {
|
|
d.peers = [];
|
|
for (var j = 0; j < m.peers.length; ++j) {
|
|
d.peers[j] = $root.RPC.PeerInfo.toObject(m.peers[j], o);
|
|
}
|
|
}
|
|
if (m.backoff != null && m.hasOwnProperty("backoff")) {
|
|
if (typeof m.backoff === "number")
|
|
d.backoff = o.longs === String ? String(m.backoff) : m.backoff;
|
|
else
|
|
d.backoff = o.longs === String ? $util.Long.prototype.toString.call(m.backoff) : o.longs === Number ? new $util.LongBits(m.backoff.low >>> 0, m.backoff.high >>> 0).toNumber(true) : m.backoff;
|
|
if (o.oneofs)
|
|
d._backoff = "backoff";
|
|
}
|
|
return d;
|
|
};
|
|
/**
|
|
* Converts this ControlPrune to JSON.
|
|
* @function toJSON
|
|
* @memberof RPC.ControlPrune
|
|
* @instance
|
|
* @returns {Object.<string,*>} JSON object
|
|
*/
|
|
ControlPrune.prototype.toJSON = function toJSON() {
|
|
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
|
};
|
|
return ControlPrune;
|
|
})();
|
|
RPC.PeerInfo = (function () {
|
|
/**
|
|
* Properties of a PeerInfo.
|
|
* @memberof RPC
|
|
* @interface IPeerInfo
|
|
* @property {Uint8Array|null} [peerID] PeerInfo peerID
|
|
* @property {Uint8Array|null} [signedPeerRecord] PeerInfo signedPeerRecord
|
|
*/
|
|
/**
|
|
* Constructs a new PeerInfo.
|
|
* @memberof RPC
|
|
* @classdesc Represents a PeerInfo.
|
|
* @implements IPeerInfo
|
|
* @constructor
|
|
* @param {RPC.IPeerInfo=} [p] Properties to set
|
|
*/
|
|
function PeerInfo(p) {
|
|
if (p)
|
|
for (var ks = Object.keys(p), i = 0; i < ks.length; ++i)
|
|
if (p[ks[i]] != null)
|
|
this[ks[i]] = p[ks[i]];
|
|
}
|
|
/**
|
|
* PeerInfo peerID.
|
|
* @member {Uint8Array|null|undefined} peerID
|
|
* @memberof RPC.PeerInfo
|
|
* @instance
|
|
*/
|
|
PeerInfo.prototype.peerID = null;
|
|
/**
|
|
* PeerInfo signedPeerRecord.
|
|
* @member {Uint8Array|null|undefined} signedPeerRecord
|
|
* @memberof RPC.PeerInfo
|
|
* @instance
|
|
*/
|
|
PeerInfo.prototype.signedPeerRecord = null;
|
|
// OneOf field names bound to virtual getters and setters
|
|
var $oneOfFields;
|
|
/**
|
|
* PeerInfo _peerID.
|
|
* @member {"peerID"|undefined} _peerID
|
|
* @memberof RPC.PeerInfo
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(PeerInfo.prototype, "_peerID", {
|
|
get: $util.oneOfGetter($oneOfFields = ["peerID"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* PeerInfo _signedPeerRecord.
|
|
* @member {"signedPeerRecord"|undefined} _signedPeerRecord
|
|
* @memberof RPC.PeerInfo
|
|
* @instance
|
|
*/
|
|
Object.defineProperty(PeerInfo.prototype, "_signedPeerRecord", {
|
|
get: $util.oneOfGetter($oneOfFields = ["signedPeerRecord"]),
|
|
set: $util.oneOfSetter($oneOfFields)
|
|
});
|
|
/**
|
|
* Encodes the specified PeerInfo message. Does not implicitly {@link RPC.PeerInfo.verify|verify} messages.
|
|
* @function encode
|
|
* @memberof RPC.PeerInfo
|
|
* @static
|
|
* @param {RPC.IPeerInfo} m PeerInfo message or plain object to encode
|
|
* @param {$protobuf.Writer} [w] Writer to encode to
|
|
* @returns {$protobuf.Writer} Writer
|
|
*/
|
|
PeerInfo.encode = function encode(m, w) {
|
|
if (!w)
|
|
w = $Writer.create();
|
|
if (m.peerID != null && Object.hasOwnProperty.call(m, "peerID"))
|
|
w.uint32(10).bytes(m.peerID);
|
|
if (m.signedPeerRecord != null && Object.hasOwnProperty.call(m, "signedPeerRecord"))
|
|
w.uint32(18).bytes(m.signedPeerRecord);
|
|
return w;
|
|
};
|
|
/**
|
|
* Decodes a PeerInfo message from the specified reader or buffer.
|
|
* @function decode
|
|
* @memberof RPC.PeerInfo
|
|
* @static
|
|
* @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from
|
|
* @param {number} [l] Message length if known beforehand
|
|
* @returns {RPC.PeerInfo} PeerInfo
|
|
* @throws {Error} If the payload is not a reader or valid buffer
|
|
* @throws {$protobuf.util.ProtocolError} If required fields are missing
|
|
*/
|
|
PeerInfo.decode = function decode(r, l) {
|
|
if (!(r instanceof $Reader))
|
|
r = $Reader.create(r);
|
|
var c = l === undefined ? r.len : r.pos + l, m = new $root.RPC.PeerInfo();
|
|
while (r.pos < c) {
|
|
var t = r.uint32();
|
|
switch (t >>> 3) {
|
|
case 1:
|
|
m.peerID = r.bytes();
|
|
break;
|
|
case 2:
|
|
m.signedPeerRecord = r.bytes();
|
|
break;
|
|
default:
|
|
r.skipType(t & 7);
|
|
break;
|
|
}
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a PeerInfo message from a plain object. Also converts values to their respective internal types.
|
|
* @function fromObject
|
|
* @memberof RPC.PeerInfo
|
|
* @static
|
|
* @param {Object.<string,*>} d Plain object
|
|
* @returns {RPC.PeerInfo} PeerInfo
|
|
*/
|
|
PeerInfo.fromObject = function fromObject(d) {
|
|
if (d instanceof $root.RPC.PeerInfo)
|
|
return d;
|
|
var m = new $root.RPC.PeerInfo();
|
|
if (d.peerID != null) {
|
|
if (typeof d.peerID === "string")
|
|
$util.base64.decode(d.peerID, m.peerID = $util.newBuffer($util.base64.length(d.peerID)), 0);
|
|
else if (d.peerID.length)
|
|
m.peerID = d.peerID;
|
|
}
|
|
if (d.signedPeerRecord != null) {
|
|
if (typeof d.signedPeerRecord === "string")
|
|
$util.base64.decode(d.signedPeerRecord, m.signedPeerRecord = $util.newBuffer($util.base64.length(d.signedPeerRecord)), 0);
|
|
else if (d.signedPeerRecord.length)
|
|
m.signedPeerRecord = d.signedPeerRecord;
|
|
}
|
|
return m;
|
|
};
|
|
/**
|
|
* Creates a plain object from a PeerInfo message. Also converts values to other types if specified.
|
|
* @function toObject
|
|
* @memberof RPC.PeerInfo
|
|
* @static
|
|
* @param {RPC.PeerInfo} m PeerInfo
|
|
* @param {$protobuf.IConversionOptions} [o] Conversion options
|
|
* @returns {Object.<string,*>} Plain object
|
|
*/
|
|
PeerInfo.toObject = function toObject(m, o) {
|
|
if (!o)
|
|
o = {};
|
|
var d = {};
|
|
if (m.peerID != null && m.hasOwnProperty("peerID")) {
|
|
d.peerID = o.bytes === String ? $util.base64.encode(m.peerID, 0, m.peerID.length) : o.bytes === Array ? Array.prototype.slice.call(m.peerID) : m.peerID;
|
|
if (o.oneofs)
|
|
d._peerID = "peerID";
|
|
}
|
|
if (m.signedPeerRecord != null && m.hasOwnProperty("signedPeerRecord")) {
|
|
d.signedPeerRecord = o.bytes === String ? $util.base64.encode(m.signedPeerRecord, 0, m.signedPeerRecord.length) : o.bytes === Array ? Array.prototype.slice.call(m.signedPeerRecord) : m.signedPeerRecord;
|
|
if (o.oneofs)
|
|
d._signedPeerRecord = "signedPeerRecord";
|
|
}
|
|
return d;
|
|
};
|
|
/**
|
|
* Converts this PeerInfo to JSON.
|
|
* @function toJSON
|
|
* @memberof RPC.PeerInfo
|
|
* @instance
|
|
* @returns {Object.<string,*>} JSON object
|
|
*/
|
|
PeerInfo.prototype.toJSON = function toJSON() {
|
|
return this.constructor.toObject(this, $protobuf.util.toJSONOptions);
|
|
};
|
|
return PeerInfo;
|
|
})();
|
|
return RPC;
|
|
})();
|
|
return $root;
|
|
});
|
|
|
|
} (rpc$1));
|
|
|
|
var cjs = rpc$1.exports;
|
|
|
|
const { RPC } = cjs;
|
|
|
|
const second = 1000;
|
|
const minute = 60 * second;
|
|
// Protocol identifiers
|
|
const FloodsubID = '/floodsub/1.0.0';
|
|
/**
|
|
* The protocol ID for version 1.0.0 of the Gossipsub protocol
|
|
* It is advertised along with GossipsubIDv11 for backwards compatability
|
|
*/
|
|
const GossipsubIDv10 = '/meshsub/1.0.0';
|
|
/**
|
|
* The protocol ID for version 1.1.0 of the Gossipsub protocol
|
|
* See the spec for details about how v1.1.0 compares to v1.0.0:
|
|
* https://github.com/libp2p/specs/blob/master/pubsub/gossipsub/gossipsub-v1.1.md
|
|
*/
|
|
const GossipsubIDv11 = '/meshsub/1.1.0';
|
|
// Overlay parameters
|
|
/**
|
|
* GossipsubD sets the optimal degree for a Gossipsub topic mesh. For example, if GossipsubD == 6,
|
|
* each peer will want to have about six peers in their mesh for each topic they're subscribed to.
|
|
* GossipsubD should be set somewhere between GossipsubDlo and GossipsubDhi.
|
|
*/
|
|
const GossipsubD = 6;
|
|
/**
|
|
* GossipsubDlo sets the lower bound on the number of peers we keep in a Gossipsub topic mesh.
|
|
* If we have fewer than GossipsubDlo peers, we will attempt to graft some more into the mesh at
|
|
* the next heartbeat.
|
|
*/
|
|
const GossipsubDlo = 4;
|
|
/**
|
|
* GossipsubDhi sets the upper bound on the number of peers we keep in a Gossipsub topic mesh.
|
|
* If we have more than GossipsubDhi peers, we will select some to prune from the mesh at the next heartbeat.
|
|
*/
|
|
const GossipsubDhi = 12;
|
|
/**
|
|
* GossipsubDscore affects how peers are selected when pruning a mesh due to over subscription.
|
|
* At least GossipsubDscore of the retained peers will be high-scoring, while the remainder are
|
|
* chosen randomly.
|
|
*/
|
|
const GossipsubDscore = 4;
|
|
/**
|
|
* GossipsubDout sets the quota for the number of outbound connections to maintain in a topic mesh.
|
|
* When the mesh is pruned due to over subscription, we make sure that we have outbound connections
|
|
* to at least GossipsubDout of the survivor peers. This prevents sybil attackers from overwhelming
|
|
* our mesh with incoming connections.
|
|
*
|
|
* GossipsubDout must be set below GossipsubDlo, and must not exceed GossipsubD / 2.
|
|
*/
|
|
const GossipsubDout = 2;
|
|
// Gossip parameters
|
|
/**
|
|
* GossipsubHistoryLength controls the size of the message cache used for gossip.
|
|
* The message cache will remember messages for GossipsubHistoryLength heartbeats.
|
|
*/
|
|
const GossipsubHistoryLength = 5;
|
|
/**
|
|
* GossipsubHistoryGossip controls how many cached message ids we will advertise in
|
|
* IHAVE gossip messages. When asked for our seen message IDs, we will return
|
|
* only those from the most recent GossipsubHistoryGossip heartbeats. The slack between
|
|
* GossipsubHistoryGossip and GossipsubHistoryLength allows us to avoid advertising messages
|
|
* that will be expired by the time they're requested.
|
|
*
|
|
* GossipsubHistoryGossip must be less than or equal to GossipsubHistoryLength to
|
|
* avoid a runtime panic.
|
|
*/
|
|
const GossipsubHistoryGossip = 3;
|
|
/**
|
|
* GossipsubDlazy affects how many peers we will emit gossip to at each heartbeat.
|
|
* We will send gossip to at least GossipsubDlazy peers outside our mesh. The actual
|
|
* number may be more, depending on GossipsubGossipFactor and how many peers we're
|
|
* connected to.
|
|
*/
|
|
const GossipsubDlazy = 6;
|
|
/**
|
|
* GossipsubGossipFactor affects how many peers we will emit gossip to at each heartbeat.
|
|
* We will send gossip to GossipsubGossipFactor * (total number of non-mesh peers), or
|
|
* GossipsubDlazy, whichever is greater.
|
|
*/
|
|
const GossipsubGossipFactor = 0.25;
|
|
/**
|
|
* GossipsubGossipRetransmission controls how many times we will allow a peer to request
|
|
* the same message id through IWANT gossip before we start ignoring them. This is designed
|
|
* to prevent peers from spamming us with requests and wasting our resources.
|
|
*/
|
|
const GossipsubGossipRetransmission = 3;
|
|
// Heartbeat interval
|
|
/**
|
|
* GossipsubHeartbeatInitialDelay is the short delay before the heartbeat timer begins
|
|
* after the router is initialized.
|
|
*/
|
|
const GossipsubHeartbeatInitialDelay = 100;
|
|
/**
|
|
* GossipsubHeartbeatInterval controls the time between heartbeats.
|
|
*/
|
|
const GossipsubHeartbeatInterval = second;
|
|
/**
|
|
* GossipsubFanoutTTL controls how long we keep track of the fanout state. If it's been
|
|
* GossipsubFanoutTTL since we've published to a topic that we're not subscribed to,
|
|
* we'll delete the fanout map for that topic.
|
|
*/
|
|
const GossipsubFanoutTTL = minute;
|
|
/**
|
|
* GossipsubPrunePeers controls the number of peers to include in prune Peer eXchange.
|
|
* When we prune a peer that's eligible for PX (has a good score, etc), we will try to
|
|
* send them signed peer records for up to GossipsubPrunePeers other peers that we
|
|
* know of.
|
|
*/
|
|
const GossipsubPrunePeers = 16;
|
|
/**
|
|
* GossipsubPruneBackoff controls the backoff time for pruned peers. This is how long
|
|
* a peer must wait before attempting to graft into our mesh again after being pruned.
|
|
* When pruning a peer, we send them our value of GossipsubPruneBackoff so they know
|
|
* the minimum time to wait. Peers running older versions may not send a backoff time,
|
|
* so if we receive a prune message without one, we will wait at least GossipsubPruneBackoff
|
|
* before attempting to re-graft.
|
|
*/
|
|
const GossipsubPruneBackoff = minute;
|
|
/**
|
|
* GossipsubPruneBackoffTicks is the number of heartbeat ticks for attempting to prune expired
|
|
* backoff timers.
|
|
*/
|
|
const GossipsubPruneBackoffTicks = 15;
|
|
/**
|
|
* GossipsubDirectConnectTicks is the number of heartbeat ticks for attempting to reconnect direct peers
|
|
* that are not currently connected.
|
|
*/
|
|
const GossipsubDirectConnectTicks = 300;
|
|
/**
|
|
* GossipsubDirectConnectInitialDelay is the initial delay before opening connections to direct peers
|
|
*/
|
|
const GossipsubDirectConnectInitialDelay = second;
|
|
/**
|
|
* GossipsubOpportunisticGraftTicks is the number of heartbeat ticks for attempting to improve the mesh
|
|
* with opportunistic grafting. Every GossipsubOpportunisticGraftTicks we will attempt to select some
|
|
* high-scoring mesh peers to replace lower-scoring ones, if the median score of our mesh peers falls
|
|
* below a threshold
|
|
*/
|
|
const GossipsubOpportunisticGraftTicks = 60;
|
|
/**
|
|
* GossipsubOpportunisticGraftPeers is the number of peers to opportunistically graft.
|
|
*/
|
|
const GossipsubOpportunisticGraftPeers = 2;
|
|
/**
|
|
* If a GRAFT comes before GossipsubGraftFloodThreshold has elapsed since the last PRUNE,
|
|
* then there is an extra score penalty applied to the peer through P7.
|
|
*/
|
|
const GossipsubGraftFloodThreshold = 10 * second;
|
|
/**
|
|
* GossipsubMaxIHaveLength is the maximum number of messages to include in an IHAVE message.
|
|
* Also controls the maximum number of IHAVE ids we will accept and request with IWANT from a
|
|
* peer within a heartbeat, to protect from IHAVE floods. You should adjust this value from the
|
|
* default if your system is pushing more than 5000 messages in GossipsubHistoryGossip heartbeats;
|
|
* with the defaults this is 1666 messages/s.
|
|
*/
|
|
const GossipsubMaxIHaveLength = 5000;
|
|
/**
|
|
* GossipsubMaxIHaveMessages is the maximum number of IHAVE messages to accept from a peer within a heartbeat.
|
|
*/
|
|
const GossipsubMaxIHaveMessages = 10;
|
|
/**
|
|
* Time to wait for a message requested through IWANT following an IHAVE advertisement.
|
|
* If the message is not received within this window, a broken promise is declared and
|
|
* the router may apply bahavioural penalties.
|
|
*/
|
|
const GossipsubIWantFollowupTime = 3 * second;
|
|
/**
|
|
* Time in milliseconds to keep message ids in the seen cache
|
|
*/
|
|
const GossipsubSeenTTL = 2 * minute;
|
|
const TimeCacheDuration = 120 * 1000;
|
|
const ERR_TOPIC_VALIDATOR_REJECT = 'ERR_TOPIC_VALIDATOR_REJECT';
|
|
const ERR_TOPIC_VALIDATOR_IGNORE = 'ERR_TOPIC_VALIDATOR_IGNORE';
|
|
/**
|
|
* If peer score is better than this, we accept messages from this peer
|
|
* within ACCEPT_FROM_WHITELIST_DURATION_MS from the last time computing score.
|
|
**/
|
|
const ACCEPT_FROM_WHITELIST_THRESHOLD_SCORE = 0;
|
|
/**
|
|
* If peer score >= ACCEPT_FROM_WHITELIST_THRESHOLD_SCORE, accept up to this
|
|
* number of messages from that peer.
|
|
*/
|
|
const ACCEPT_FROM_WHITELIST_MAX_MESSAGES = 128;
|
|
/**
|
|
* If peer score >= ACCEPT_FROM_WHITELIST_THRESHOLD_SCORE, accept messages from
|
|
* this peer up to this time duration.
|
|
*/
|
|
const ACCEPT_FROM_WHITELIST_DURATION_MS = 1000;
|
|
/**
|
|
* The default MeshMessageDeliveriesWindow to be used in metrics.
|
|
*/
|
|
const DEFAULT_METRIC_MESH_MESSAGE_DELIVERIES_WINDOWS = 1000;
|
|
|
|
/**
|
|
* Create a gossipsub RPC object
|
|
*/
|
|
function createGossipRpc(messages = [], control) {
|
|
return {
|
|
subscriptions: [],
|
|
messages,
|
|
control: control
|
|
? {
|
|
graft: control.graft || [],
|
|
prune: control.prune || [],
|
|
ihave: control.ihave || [],
|
|
iwant: control.iwant || []
|
|
}
|
|
: undefined
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Pseudo-randomly shuffles an array
|
|
*
|
|
* Mutates the input array
|
|
*/
|
|
function shuffle(arr) {
|
|
if (arr.length <= 1) {
|
|
return arr;
|
|
}
|
|
const randInt = () => {
|
|
return Math.floor(Math.random() * Math.floor(arr.length));
|
|
};
|
|
for (let i = 0; i < arr.length; i++) {
|
|
const j = randInt();
|
|
const tmp = arr[i];
|
|
arr[i] = arr[j];
|
|
arr[j] = tmp;
|
|
}
|
|
return arr;
|
|
}
|
|
|
|
/**
|
|
* Browser friendly function to convert Uint8Array message id to base64 string.
|
|
*/
|
|
function messageIdToString(msgId) {
|
|
return toString$3(msgId, 'base64');
|
|
}
|
|
|
|
/**
|
|
* On the producing side:
|
|
* * Build messages with the signature, key (from may be enough for certain inlineable public key types), from and seqno fields.
|
|
*
|
|
* On the consuming side:
|
|
* * Enforce the fields to be present, reject otherwise.
|
|
* * Propagate only if the fields are valid and signature can be verified, reject otherwise.
|
|
*/
|
|
const StrictSign = 'StrictSign';
|
|
/**
|
|
* On the producing side:
|
|
* * Build messages without the signature, key, from and seqno fields.
|
|
* * The corresponding protobuf key-value pairs are absent from the marshalled message, not just empty.
|
|
*
|
|
* On the consuming side:
|
|
* * Enforce the fields to be absent, reject otherwise.
|
|
* * Propagate only if the fields are absent, reject otherwise.
|
|
* * A message_id function will not be able to use the above fields, and should instead rely on the data field. A commonplace strategy is to calculate a hash.
|
|
*/
|
|
const StrictNoSign = 'StrictNoSign';
|
|
|
|
var SignaturePolicy;
|
|
(function (SignaturePolicy) {
|
|
/**
|
|
* On the producing side:
|
|
* - Build messages with the signature, key (from may be enough for certain inlineable public key types), from and seqno fields.
|
|
*
|
|
* On the consuming side:
|
|
* - Enforce the fields to be present, reject otherwise.
|
|
* - Propagate only if the fields are valid and signature can be verified, reject otherwise.
|
|
*/
|
|
SignaturePolicy["StrictSign"] = "StrictSign";
|
|
/**
|
|
* On the producing side:
|
|
* - Build messages without the signature, key, from and seqno fields.
|
|
* - The corresponding protobuf key-value pairs are absent from the marshalled message, not just empty.
|
|
*
|
|
* On the consuming side:
|
|
* - Enforce the fields to be absent, reject otherwise.
|
|
* - Propagate only if the fields are absent, reject otherwise.
|
|
* - A message_id function will not be able to use the above fields, and should instead rely on the data field. A commonplace strategy is to calculate a hash.
|
|
*/
|
|
SignaturePolicy["StrictNoSign"] = "StrictNoSign";
|
|
})(SignaturePolicy || (SignaturePolicy = {}));
|
|
var PublishConfigType;
|
|
(function (PublishConfigType) {
|
|
PublishConfigType[PublishConfigType["Signing"] = 0] = "Signing";
|
|
PublishConfigType[PublishConfigType["Anonymous"] = 1] = "Anonymous";
|
|
})(PublishConfigType || (PublishConfigType = {}));
|
|
var MessageAcceptance;
|
|
(function (MessageAcceptance) {
|
|
/// The message is considered valid, and it should be delivered and forwarded to the network.
|
|
MessageAcceptance["Accept"] = "accept";
|
|
/// The message is neither delivered nor forwarded to the network, but the router does not
|
|
/// trigger the P₄ penalty.
|
|
MessageAcceptance["Ignore"] = "ignore";
|
|
/// The message is considered invalid, and it should be rejected and trigger the P₄ penalty.
|
|
MessageAcceptance["Reject"] = "reject";
|
|
})(MessageAcceptance || (MessageAcceptance = {}));
|
|
var RejectReason;
|
|
(function (RejectReason) {
|
|
/**
|
|
* The message failed the configured validation during decoding.
|
|
* SelfOrigin is considered a ValidationError
|
|
*/
|
|
RejectReason["Error"] = "error";
|
|
/**
|
|
* Custom validator fn reported status IGNORE.
|
|
*/
|
|
RejectReason["Ignore"] = "ignore";
|
|
/**
|
|
* Custom validator fn reported status REJECT.
|
|
*/
|
|
RejectReason["Reject"] = "reject";
|
|
/**
|
|
* The peer that sent the message OR the source from field is blacklisted.
|
|
* Causes messages to be ignored, not penalized, neither do score record creation.
|
|
*/
|
|
RejectReason["Blacklisted"] = "blacklisted";
|
|
})(RejectReason || (RejectReason = {}));
|
|
var ValidateError;
|
|
(function (ValidateError) {
|
|
/// The message has an invalid signature,
|
|
ValidateError["InvalidSignature"] = "invalid_signature";
|
|
/// The sequence number was the incorrect size
|
|
ValidateError["InvalidSeqno"] = "invalid_seqno";
|
|
/// The PeerId was invalid
|
|
ValidateError["InvalidPeerId"] = "invalid_peerid";
|
|
/// Signature existed when validation has been sent to
|
|
/// [`crate::behaviour::MessageAuthenticity::Anonymous`].
|
|
ValidateError["SignaturePresent"] = "signature_present";
|
|
/// Sequence number existed when validation has been sent to
|
|
/// [`crate::behaviour::MessageAuthenticity::Anonymous`].
|
|
ValidateError["SeqnoPresent"] = "seqno_present";
|
|
/// Message source existed when validation has been sent to
|
|
/// [`crate::behaviour::MessageAuthenticity::Anonymous`].
|
|
ValidateError["FromPresent"] = "from_present";
|
|
/// The data transformation failed.
|
|
ValidateError["TransformFailed"] = "transform_failed";
|
|
})(ValidateError || (ValidateError = {}));
|
|
var MessageStatus;
|
|
(function (MessageStatus) {
|
|
MessageStatus["duplicate"] = "duplicate";
|
|
MessageStatus["invalid"] = "invalid";
|
|
MessageStatus["valid"] = "valid";
|
|
})(MessageStatus || (MessageStatus = {}));
|
|
/**
|
|
* Typesafe conversion of MessageAcceptance -> RejectReason. TS ensures all values covered
|
|
*/
|
|
function rejectReasonFromAcceptance(acceptance) {
|
|
switch (acceptance) {
|
|
case MessageAcceptance.Ignore:
|
|
return RejectReason.Ignore;
|
|
case MessageAcceptance.Reject:
|
|
return RejectReason.Reject;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Prepare a PublishConfig object from a PeerId.
|
|
*/
|
|
async function getPublishConfigFromPeerId(signaturePolicy, peerId) {
|
|
switch (signaturePolicy) {
|
|
case StrictSign: {
|
|
if (!peerId) {
|
|
throw Error('Must provide PeerId');
|
|
}
|
|
if (peerId.privateKey == null) {
|
|
throw Error('Cannot sign message, no private key present');
|
|
}
|
|
if (peerId.publicKey == null) {
|
|
throw Error('Cannot sign message, no public key present');
|
|
}
|
|
// Transform privateKey once at initialization time instead of once per message
|
|
const privateKey = await unmarshalPrivateKey(peerId.privateKey);
|
|
return {
|
|
type: PublishConfigType.Signing,
|
|
author: peerId,
|
|
key: peerId.publicKey,
|
|
privateKey
|
|
};
|
|
}
|
|
case StrictNoSign:
|
|
return {
|
|
type: PublishConfigType.Anonymous
|
|
};
|
|
default:
|
|
throw new Error(`Unknown signature policy "${signaturePolicy}"`);
|
|
}
|
|
}
|
|
|
|
const ERR_INVALID_PEER_SCORE_PARAMS = 'ERR_INVALID_PEER_SCORE_PARAMS';
|
|
|
|
const defaultPeerScoreParams = {
|
|
topics: {},
|
|
topicScoreCap: 10.0,
|
|
appSpecificScore: () => 0.0,
|
|
appSpecificWeight: 10.0,
|
|
IPColocationFactorWeight: -5.0,
|
|
IPColocationFactorThreshold: 10.0,
|
|
IPColocationFactorWhitelist: new Set(),
|
|
behaviourPenaltyWeight: -10.0,
|
|
behaviourPenaltyThreshold: 0.0,
|
|
behaviourPenaltyDecay: 0.2,
|
|
decayInterval: 1000.0,
|
|
decayToZero: 0.1,
|
|
retainScore: 3600 * 1000
|
|
};
|
|
const defaultTopicScoreParams = {
|
|
topicWeight: 0.5,
|
|
timeInMeshWeight: 1,
|
|
timeInMeshQuantum: 1,
|
|
timeInMeshCap: 3600,
|
|
firstMessageDeliveriesWeight: 1,
|
|
firstMessageDeliveriesDecay: 0.5,
|
|
firstMessageDeliveriesCap: 2000,
|
|
meshMessageDeliveriesWeight: -1,
|
|
meshMessageDeliveriesDecay: 0.5,
|
|
meshMessageDeliveriesCap: 100,
|
|
meshMessageDeliveriesThreshold: 20,
|
|
meshMessageDeliveriesWindow: 10,
|
|
meshMessageDeliveriesActivation: 5000,
|
|
meshFailurePenaltyWeight: -1,
|
|
meshFailurePenaltyDecay: 0.5,
|
|
invalidMessageDeliveriesWeight: -1,
|
|
invalidMessageDeliveriesDecay: 0.3
|
|
};
|
|
function createPeerScoreParams(p = {}) {
|
|
return {
|
|
...defaultPeerScoreParams,
|
|
...p,
|
|
topics: p.topics
|
|
? Object.entries(p.topics).reduce((topics, [topic, topicScoreParams]) => {
|
|
topics[topic] = createTopicScoreParams(topicScoreParams);
|
|
return topics;
|
|
}, {})
|
|
: {}
|
|
};
|
|
}
|
|
function createTopicScoreParams(p = {}) {
|
|
return {
|
|
...defaultTopicScoreParams,
|
|
...p
|
|
};
|
|
}
|
|
// peer score parameter validation
|
|
function validatePeerScoreParams(p) {
|
|
for (const [topic, params] of Object.entries(p.topics)) {
|
|
try {
|
|
validateTopicScoreParams(params);
|
|
}
|
|
catch (e) {
|
|
throw errCode(new Error(`invalid score parameters for topic ${topic}: ${e.message}`), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
}
|
|
// check that the topic score is 0 or something positive
|
|
if (p.topicScoreCap < 0) {
|
|
throw errCode(new Error('invalid topic score cap; must be positive (or 0 for no cap)'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
// check that we have an app specific score; the weight can be anything (but expected positive)
|
|
if (p.appSpecificScore === null || p.appSpecificScore === undefined) {
|
|
throw errCode(new Error('missing application specific score function'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
// check the IP colocation factor
|
|
if (p.IPColocationFactorWeight > 0) {
|
|
throw errCode(new Error('invalid IPColocationFactorWeight; must be negative (or 0 to disable)'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.IPColocationFactorWeight !== 0 && p.IPColocationFactorThreshold < 1) {
|
|
throw errCode(new Error('invalid IPColocationFactorThreshold; must be at least 1'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
// check the behaviour penalty
|
|
if (p.behaviourPenaltyWeight > 0) {
|
|
throw errCode(new Error('invalid BehaviourPenaltyWeight; must be negative (or 0 to disable)'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.behaviourPenaltyWeight !== 0 && (p.behaviourPenaltyDecay <= 0 || p.behaviourPenaltyDecay >= 1)) {
|
|
throw errCode(new Error('invalid BehaviourPenaltyDecay; must be between 0 and 1'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
// check the decay parameters
|
|
if (p.decayInterval < 1000) {
|
|
throw errCode(new Error('invalid DecayInterval; must be at least 1s'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.decayToZero <= 0 || p.decayToZero >= 1) {
|
|
throw errCode(new Error('invalid DecayToZero; must be between 0 and 1'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
// no need to check the score retention; a value of 0 means that we don't retain scores
|
|
}
|
|
function validateTopicScoreParams(p) {
|
|
// make sure we have a sane topic weight
|
|
if (p.topicWeight < 0) {
|
|
throw errCode(new Error('invalid topic weight; must be >= 0'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
// check P1
|
|
if (p.timeInMeshQuantum === 0) {
|
|
throw errCode(new Error('invalid TimeInMeshQuantum; must be non zero'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.timeInMeshWeight < 0) {
|
|
throw errCode(new Error('invalid TimeInMeshWeight; must be positive (or 0 to disable)'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.timeInMeshWeight !== 0 && p.timeInMeshQuantum <= 0) {
|
|
throw errCode(new Error('invalid TimeInMeshQuantum; must be positive'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.timeInMeshWeight !== 0 && p.timeInMeshCap <= 0) {
|
|
throw errCode(new Error('invalid TimeInMeshCap; must be positive'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
// check P2
|
|
if (p.firstMessageDeliveriesWeight < 0) {
|
|
throw errCode(new Error('invallid FirstMessageDeliveriesWeight; must be positive (or 0 to disable)'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.firstMessageDeliveriesWeight !== 0 &&
|
|
(p.firstMessageDeliveriesDecay <= 0 || p.firstMessageDeliveriesDecay >= 1)) {
|
|
throw errCode(new Error('invalid FirstMessageDeliveriesDecay; must be between 0 and 1'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.firstMessageDeliveriesWeight !== 0 && p.firstMessageDeliveriesCap <= 0) {
|
|
throw errCode(new Error('invalid FirstMessageDeliveriesCap; must be positive'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
// check P3
|
|
if (p.meshMessageDeliveriesWeight > 0) {
|
|
throw errCode(new Error('invalid MeshMessageDeliveriesWeight; must be negative (or 0 to disable)'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.meshMessageDeliveriesWeight !== 0 && (p.meshMessageDeliveriesDecay <= 0 || p.meshMessageDeliveriesDecay >= 1)) {
|
|
throw errCode(new Error('invalid MeshMessageDeliveriesDecay; must be between 0 and 1'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.meshMessageDeliveriesWeight !== 0 && p.meshMessageDeliveriesCap <= 0) {
|
|
throw errCode(new Error('invalid MeshMessageDeliveriesCap; must be positive'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.meshMessageDeliveriesWeight !== 0 && p.meshMessageDeliveriesThreshold <= 0) {
|
|
throw errCode(new Error('invalid MeshMessageDeliveriesThreshold; must be positive'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.meshMessageDeliveriesWindow < 0) {
|
|
throw errCode(new Error('invalid MeshMessageDeliveriesWindow; must be non-negative'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.meshMessageDeliveriesWeight !== 0 && p.meshMessageDeliveriesActivation < 1000) {
|
|
throw errCode(new Error('invalid MeshMessageDeliveriesActivation; must be at least 1s'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
// check P3b
|
|
if (p.meshFailurePenaltyWeight > 0) {
|
|
throw errCode(new Error('invalid MeshFailurePenaltyWeight; must be negative (or 0 to disable)'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.meshFailurePenaltyWeight !== 0 && (p.meshFailurePenaltyDecay <= 0 || p.meshFailurePenaltyDecay >= 1)) {
|
|
throw errCode(new Error('invalid MeshFailurePenaltyDecay; must be between 0 and 1'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
// check P4
|
|
if (p.invalidMessageDeliveriesWeight > 0) {
|
|
throw errCode(new Error('invalid InvalidMessageDeliveriesWeight; must be negative (or 0 to disable)'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
if (p.invalidMessageDeliveriesDecay <= 0 || p.invalidMessageDeliveriesDecay >= 1) {
|
|
throw errCode(new Error('invalid InvalidMessageDeliveriesDecay; must be between 0 and 1'), ERR_INVALID_PEER_SCORE_PARAMS);
|
|
}
|
|
}
|
|
|
|
const defaultPeerScoreThresholds = {
|
|
gossipThreshold: -10,
|
|
publishThreshold: -50,
|
|
graylistThreshold: -80,
|
|
acceptPXThreshold: 10,
|
|
opportunisticGraftThreshold: 20
|
|
};
|
|
function createPeerScoreThresholds(p = {}) {
|
|
return {
|
|
...defaultPeerScoreThresholds,
|
|
...p
|
|
};
|
|
}
|
|
|
|
function computeScore(peer, pstats, params, peerIPs) {
|
|
let score = 0;
|
|
// topic stores
|
|
Object.entries(pstats.topics).forEach(([topic, tstats]) => {
|
|
// the topic parameters
|
|
const topicParams = params.topics[topic];
|
|
if (topicParams === undefined) {
|
|
// we are not scoring this topic
|
|
return;
|
|
}
|
|
let topicScore = 0;
|
|
// P1: time in Mesh
|
|
if (tstats.inMesh) {
|
|
let p1 = tstats.meshTime / topicParams.timeInMeshQuantum;
|
|
if (p1 > topicParams.timeInMeshCap) {
|
|
p1 = topicParams.timeInMeshCap;
|
|
}
|
|
topicScore += p1 * topicParams.timeInMeshWeight;
|
|
}
|
|
// P2: first message deliveries
|
|
let p2 = tstats.firstMessageDeliveries;
|
|
if (p2 > topicParams.firstMessageDeliveriesCap) {
|
|
p2 = topicParams.firstMessageDeliveriesCap;
|
|
}
|
|
topicScore += p2 * topicParams.firstMessageDeliveriesWeight;
|
|
// P3: mesh message deliveries
|
|
if (tstats.meshMessageDeliveriesActive &&
|
|
tstats.meshMessageDeliveries < topicParams.meshMessageDeliveriesThreshold) {
|
|
const deficit = topicParams.meshMessageDeliveriesThreshold - tstats.meshMessageDeliveries;
|
|
const p3 = deficit * deficit;
|
|
topicScore += p3 * topicParams.meshMessageDeliveriesWeight;
|
|
}
|
|
// P3b:
|
|
// NOTE: the weight of P3b is negative (validated in validateTopicScoreParams) so this detracts
|
|
const p3b = tstats.meshFailurePenalty;
|
|
topicScore += p3b * topicParams.meshFailurePenaltyWeight;
|
|
// P4: invalid messages
|
|
// NOTE: the weight of P4 is negative (validated in validateTopicScoreParams) so this detracts
|
|
const p4 = tstats.invalidMessageDeliveries * tstats.invalidMessageDeliveries;
|
|
topicScore += p4 * topicParams.invalidMessageDeliveriesWeight;
|
|
// update score, mixing with topic weight
|
|
score += topicScore * topicParams.topicWeight;
|
|
});
|
|
// apply the topic score cap, if any
|
|
if (params.topicScoreCap > 0 && score > params.topicScoreCap) {
|
|
score = params.topicScoreCap;
|
|
}
|
|
// P5: application-specific score
|
|
const p5 = params.appSpecificScore(peer);
|
|
score += p5 * params.appSpecificWeight;
|
|
// P6: IP colocation factor
|
|
pstats.ips.forEach((ip) => {
|
|
if (params.IPColocationFactorWhitelist.has(ip)) {
|
|
return;
|
|
}
|
|
// P6 has a cliff (IPColocationFactorThreshold)
|
|
// It's only applied if at least that many peers are connected to us from that source IP addr.
|
|
// It is quadratic, and the weight is negative (validated in validatePeerScoreParams)
|
|
const peersInIP = peerIPs.get(ip);
|
|
const numPeersInIP = peersInIP ? peersInIP.size : 0;
|
|
if (numPeersInIP > params.IPColocationFactorThreshold) {
|
|
const surplus = numPeersInIP - params.IPColocationFactorThreshold;
|
|
const p6 = surplus * surplus;
|
|
score += p6 * params.IPColocationFactorWeight;
|
|
}
|
|
});
|
|
// P7: behavioural pattern penalty
|
|
if (pstats.behaviourPenalty > params.behaviourPenaltyThreshold) {
|
|
const excess = pstats.behaviourPenalty - params.behaviourPenaltyThreshold;
|
|
const p7 = excess * excess;
|
|
score += p7 * params.behaviourPenaltyWeight;
|
|
}
|
|
return score;
|
|
}
|
|
|
|
/**
|
|
* Custom implementation of a double ended queue.
|
|
*/
|
|
function Denque(array, options) {
|
|
var options = options || {};
|
|
|
|
this._head = 0;
|
|
this._tail = 0;
|
|
this._capacity = options.capacity;
|
|
this._capacityMask = 0x3;
|
|
this._list = new Array(4);
|
|
if (Array.isArray(array)) {
|
|
this._fromArray(array);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* -------------
|
|
* PUBLIC API
|
|
* -------------
|
|
*/
|
|
|
|
/**
|
|
* Returns the item at the specified index from the list.
|
|
* 0 is the first element, 1 is the second, and so on...
|
|
* Elements at negative values are that many from the end: -1 is one before the end
|
|
* (the last element), -2 is two before the end (one before last), etc.
|
|
* @param index
|
|
* @returns {*}
|
|
*/
|
|
Denque.prototype.peekAt = function peekAt(index) {
|
|
var i = index;
|
|
// expect a number or return undefined
|
|
if ((i !== (i | 0))) {
|
|
return void 0;
|
|
}
|
|
var len = this.size();
|
|
if (i >= len || i < -len) return undefined;
|
|
if (i < 0) i += len;
|
|
i = (this._head + i) & this._capacityMask;
|
|
return this._list[i];
|
|
};
|
|
|
|
/**
|
|
* Alias for peekAt()
|
|
* @param i
|
|
* @returns {*}
|
|
*/
|
|
Denque.prototype.get = function get(i) {
|
|
return this.peekAt(i);
|
|
};
|
|
|
|
/**
|
|
* Returns the first item in the list without removing it.
|
|
* @returns {*}
|
|
*/
|
|
Denque.prototype.peek = function peek() {
|
|
if (this._head === this._tail) return undefined;
|
|
return this._list[this._head];
|
|
};
|
|
|
|
/**
|
|
* Alias for peek()
|
|
* @returns {*}
|
|
*/
|
|
Denque.prototype.peekFront = function peekFront() {
|
|
return this.peek();
|
|
};
|
|
|
|
/**
|
|
* Returns the item that is at the back of the queue without removing it.
|
|
* Uses peekAt(-1)
|
|
*/
|
|
Denque.prototype.peekBack = function peekBack() {
|
|
return this.peekAt(-1);
|
|
};
|
|
|
|
/**
|
|
* Returns the current length of the queue
|
|
* @return {Number}
|
|
*/
|
|
Object.defineProperty(Denque.prototype, 'length', {
|
|
get: function length() {
|
|
return this.size();
|
|
}
|
|
});
|
|
|
|
/**
|
|
* Return the number of items on the list, or 0 if empty.
|
|
* @returns {number}
|
|
*/
|
|
Denque.prototype.size = function size() {
|
|
if (this._head === this._tail) return 0;
|
|
if (this._head < this._tail) return this._tail - this._head;
|
|
else return this._capacityMask + 1 - (this._head - this._tail);
|
|
};
|
|
|
|
/**
|
|
* Add an item at the beginning of the list.
|
|
* @param item
|
|
*/
|
|
Denque.prototype.unshift = function unshift(item) {
|
|
if (item === undefined) return this.size();
|
|
var len = this._list.length;
|
|
this._head = (this._head - 1 + len) & this._capacityMask;
|
|
this._list[this._head] = item;
|
|
if (this._tail === this._head) this._growArray();
|
|
if (this._capacity && this.size() > this._capacity) this.pop();
|
|
if (this._head < this._tail) return this._tail - this._head;
|
|
else return this._capacityMask + 1 - (this._head - this._tail);
|
|
};
|
|
|
|
/**
|
|
* Remove and return the first item on the list,
|
|
* Returns undefined if the list is empty.
|
|
* @returns {*}
|
|
*/
|
|
Denque.prototype.shift = function shift() {
|
|
var head = this._head;
|
|
if (head === this._tail) return undefined;
|
|
var item = this._list[head];
|
|
this._list[head] = undefined;
|
|
this._head = (head + 1) & this._capacityMask;
|
|
if (head < 2 && this._tail > 10000 && this._tail <= this._list.length >>> 2) this._shrinkArray();
|
|
return item;
|
|
};
|
|
|
|
/**
|
|
* Add an item to the bottom of the list.
|
|
* @param item
|
|
*/
|
|
Denque.prototype.push = function push(item) {
|
|
if (item === undefined) return this.size();
|
|
var tail = this._tail;
|
|
this._list[tail] = item;
|
|
this._tail = (tail + 1) & this._capacityMask;
|
|
if (this._tail === this._head) {
|
|
this._growArray();
|
|
}
|
|
if (this._capacity && this.size() > this._capacity) {
|
|
this.shift();
|
|
}
|
|
if (this._head < this._tail) return this._tail - this._head;
|
|
else return this._capacityMask + 1 - (this._head - this._tail);
|
|
};
|
|
|
|
/**
|
|
* Remove and return the last item on the list.
|
|
* Returns undefined if the list is empty.
|
|
* @returns {*}
|
|
*/
|
|
Denque.prototype.pop = function pop() {
|
|
var tail = this._tail;
|
|
if (tail === this._head) return undefined;
|
|
var len = this._list.length;
|
|
this._tail = (tail - 1 + len) & this._capacityMask;
|
|
var item = this._list[this._tail];
|
|
this._list[this._tail] = undefined;
|
|
if (this._head < 2 && tail > 10000 && tail <= len >>> 2) this._shrinkArray();
|
|
return item;
|
|
};
|
|
|
|
/**
|
|
* Remove and return the item at the specified index from the list.
|
|
* Returns undefined if the list is empty.
|
|
* @param index
|
|
* @returns {*}
|
|
*/
|
|
Denque.prototype.removeOne = function removeOne(index) {
|
|
var i = index;
|
|
// expect a number or return undefined
|
|
if ((i !== (i | 0))) {
|
|
return void 0;
|
|
}
|
|
if (this._head === this._tail) return void 0;
|
|
var size = this.size();
|
|
var len = this._list.length;
|
|
if (i >= size || i < -size) return void 0;
|
|
if (i < 0) i += size;
|
|
i = (this._head + i) & this._capacityMask;
|
|
var item = this._list[i];
|
|
var k;
|
|
if (index < size / 2) {
|
|
for (k = index; k > 0; k--) {
|
|
this._list[i] = this._list[i = (i - 1 + len) & this._capacityMask];
|
|
}
|
|
this._list[i] = void 0;
|
|
this._head = (this._head + 1 + len) & this._capacityMask;
|
|
} else {
|
|
for (k = size - 1 - index; k > 0; k--) {
|
|
this._list[i] = this._list[i = ( i + 1 + len) & this._capacityMask];
|
|
}
|
|
this._list[i] = void 0;
|
|
this._tail = (this._tail - 1 + len) & this._capacityMask;
|
|
}
|
|
return item;
|
|
};
|
|
|
|
/**
|
|
* Remove number of items from the specified index from the list.
|
|
* Returns array of removed items.
|
|
* Returns undefined if the list is empty.
|
|
* @param index
|
|
* @param count
|
|
* @returns {array}
|
|
*/
|
|
Denque.prototype.remove = function remove(index, count) {
|
|
var i = index;
|
|
var removed;
|
|
var del_count = count;
|
|
// expect a number or return undefined
|
|
if ((i !== (i | 0))) {
|
|
return void 0;
|
|
}
|
|
if (this._head === this._tail) return void 0;
|
|
var size = this.size();
|
|
var len = this._list.length;
|
|
if (i >= size || i < -size || count < 1) return void 0;
|
|
if (i < 0) i += size;
|
|
if (count === 1 || !count) {
|
|
removed = new Array(1);
|
|
removed[0] = this.removeOne(i);
|
|
return removed;
|
|
}
|
|
if (i === 0 && i + count >= size) {
|
|
removed = this.toArray();
|
|
this.clear();
|
|
return removed;
|
|
}
|
|
if (i + count > size) count = size - i;
|
|
var k;
|
|
removed = new Array(count);
|
|
for (k = 0; k < count; k++) {
|
|
removed[k] = this._list[(this._head + i + k) & this._capacityMask];
|
|
}
|
|
i = (this._head + i) & this._capacityMask;
|
|
if (index + count === size) {
|
|
this._tail = (this._tail - count + len) & this._capacityMask;
|
|
for (k = count; k > 0; k--) {
|
|
this._list[i = (i + 1 + len) & this._capacityMask] = void 0;
|
|
}
|
|
return removed;
|
|
}
|
|
if (index === 0) {
|
|
this._head = (this._head + count + len) & this._capacityMask;
|
|
for (k = count - 1; k > 0; k--) {
|
|
this._list[i = (i + 1 + len) & this._capacityMask] = void 0;
|
|
}
|
|
return removed;
|
|
}
|
|
if (i < size / 2) {
|
|
this._head = (this._head + index + count + len) & this._capacityMask;
|
|
for (k = index; k > 0; k--) {
|
|
this.unshift(this._list[i = (i - 1 + len) & this._capacityMask]);
|
|
}
|
|
i = (this._head - 1 + len) & this._capacityMask;
|
|
while (del_count > 0) {
|
|
this._list[i = (i - 1 + len) & this._capacityMask] = void 0;
|
|
del_count--;
|
|
}
|
|
if (index < 0) this._tail = i;
|
|
} else {
|
|
this._tail = i;
|
|
i = (i + count + len) & this._capacityMask;
|
|
for (k = size - (count + index); k > 0; k--) {
|
|
this.push(this._list[i++]);
|
|
}
|
|
i = this._tail;
|
|
while (del_count > 0) {
|
|
this._list[i = (i + 1 + len) & this._capacityMask] = void 0;
|
|
del_count--;
|
|
}
|
|
}
|
|
if (this._head < 2 && this._tail > 10000 && this._tail <= len >>> 2) this._shrinkArray();
|
|
return removed;
|
|
};
|
|
|
|
/**
|
|
* Native splice implementation.
|
|
* Remove number of items from the specified index from the list and/or add new elements.
|
|
* Returns array of removed items or empty array if count == 0.
|
|
* Returns undefined if the list is empty.
|
|
*
|
|
* @param index
|
|
* @param count
|
|
* @param {...*} [elements]
|
|
* @returns {array}
|
|
*/
|
|
Denque.prototype.splice = function splice(index, count) {
|
|
var i = index;
|
|
// expect a number or return undefined
|
|
if ((i !== (i | 0))) {
|
|
return void 0;
|
|
}
|
|
var size = this.size();
|
|
if (i < 0) i += size;
|
|
if (i > size) return void 0;
|
|
if (arguments.length > 2) {
|
|
var k;
|
|
var temp;
|
|
var removed;
|
|
var arg_len = arguments.length;
|
|
var len = this._list.length;
|
|
var arguments_index = 2;
|
|
if (!size || i < size / 2) {
|
|
temp = new Array(i);
|
|
for (k = 0; k < i; k++) {
|
|
temp[k] = this._list[(this._head + k) & this._capacityMask];
|
|
}
|
|
if (count === 0) {
|
|
removed = [];
|
|
if (i > 0) {
|
|
this._head = (this._head + i + len) & this._capacityMask;
|
|
}
|
|
} else {
|
|
removed = this.remove(i, count);
|
|
this._head = (this._head + i + len) & this._capacityMask;
|
|
}
|
|
while (arg_len > arguments_index) {
|
|
this.unshift(arguments[--arg_len]);
|
|
}
|
|
for (k = i; k > 0; k--) {
|
|
this.unshift(temp[k - 1]);
|
|
}
|
|
} else {
|
|
temp = new Array(size - (i + count));
|
|
var leng = temp.length;
|
|
for (k = 0; k < leng; k++) {
|
|
temp[k] = this._list[(this._head + i + count + k) & this._capacityMask];
|
|
}
|
|
if (count === 0) {
|
|
removed = [];
|
|
if (i != size) {
|
|
this._tail = (this._head + i + len) & this._capacityMask;
|
|
}
|
|
} else {
|
|
removed = this.remove(i, count);
|
|
this._tail = (this._tail - leng + len) & this._capacityMask;
|
|
}
|
|
while (arguments_index < arg_len) {
|
|
this.push(arguments[arguments_index++]);
|
|
}
|
|
for (k = 0; k < leng; k++) {
|
|
this.push(temp[k]);
|
|
}
|
|
}
|
|
return removed;
|
|
} else {
|
|
return this.remove(i, count);
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Soft clear - does not reset capacity.
|
|
*/
|
|
Denque.prototype.clear = function clear() {
|
|
this._head = 0;
|
|
this._tail = 0;
|
|
};
|
|
|
|
/**
|
|
* Returns true or false whether the list is empty.
|
|
* @returns {boolean}
|
|
*/
|
|
Denque.prototype.isEmpty = function isEmpty() {
|
|
return this._head === this._tail;
|
|
};
|
|
|
|
/**
|
|
* Returns an array of all queue items.
|
|
* @returns {Array}
|
|
*/
|
|
Denque.prototype.toArray = function toArray() {
|
|
return this._copyArray(false);
|
|
};
|
|
|
|
/**
|
|
* -------------
|
|
* INTERNALS
|
|
* -------------
|
|
*/
|
|
|
|
/**
|
|
* Fills the queue with items from an array
|
|
* For use in the constructor
|
|
* @param array
|
|
* @private
|
|
*/
|
|
Denque.prototype._fromArray = function _fromArray(array) {
|
|
for (var i = 0; i < array.length; i++) this.push(array[i]);
|
|
};
|
|
|
|
/**
|
|
*
|
|
* @param fullCopy
|
|
* @returns {Array}
|
|
* @private
|
|
*/
|
|
Denque.prototype._copyArray = function _copyArray(fullCopy) {
|
|
var newArray = [];
|
|
var list = this._list;
|
|
var len = list.length;
|
|
var i;
|
|
if (fullCopy || this._head > this._tail) {
|
|
for (i = this._head; i < len; i++) newArray.push(list[i]);
|
|
for (i = 0; i < this._tail; i++) newArray.push(list[i]);
|
|
} else {
|
|
for (i = this._head; i < this._tail; i++) newArray.push(list[i]);
|
|
}
|
|
return newArray;
|
|
};
|
|
|
|
/**
|
|
* Grows the internal list array.
|
|
* @private
|
|
*/
|
|
Denque.prototype._growArray = function _growArray() {
|
|
if (this._head) {
|
|
// copy existing data, head to end, then beginning to tail.
|
|
this._list = this._copyArray(true);
|
|
this._head = 0;
|
|
}
|
|
|
|
// head is at 0 and array is now full, safe to extend
|
|
this._tail = this._list.length;
|
|
|
|
this._list.length <<= 1;
|
|
this._capacityMask = (this._capacityMask << 1) | 1;
|
|
};
|
|
|
|
/**
|
|
* Shrinks the internal list array.
|
|
* @private
|
|
*/
|
|
Denque.prototype._shrinkArray = function _shrinkArray() {
|
|
this._list.length >>>= 1;
|
|
this._capacityMask >>>= 1;
|
|
};
|
|
|
|
|
|
var denque = Denque;
|
|
|
|
var DeliveryRecordStatus;
|
|
(function (DeliveryRecordStatus) {
|
|
/**
|
|
* we don't know (yet) if the message is valid
|
|
*/
|
|
DeliveryRecordStatus[DeliveryRecordStatus["unknown"] = 0] = "unknown";
|
|
/**
|
|
* we know the message is valid
|
|
*/
|
|
DeliveryRecordStatus[DeliveryRecordStatus["valid"] = 1] = "valid";
|
|
/**
|
|
* we know the message is invalid
|
|
*/
|
|
DeliveryRecordStatus[DeliveryRecordStatus["invalid"] = 2] = "invalid";
|
|
/**
|
|
* we were instructed by the validator to ignore the message
|
|
*/
|
|
DeliveryRecordStatus[DeliveryRecordStatus["ignored"] = 3] = "ignored";
|
|
})(DeliveryRecordStatus || (DeliveryRecordStatus = {}));
|
|
/**
|
|
* Map of canonical message ID to DeliveryRecord
|
|
*
|
|
* Maintains an internal queue for efficient gc of old messages
|
|
*/
|
|
class MessageDeliveries {
|
|
constructor() {
|
|
this.records = new Map();
|
|
this.queue = new denque();
|
|
}
|
|
ensureRecord(msgIdStr) {
|
|
let drec = this.records.get(msgIdStr);
|
|
if (drec) {
|
|
return drec;
|
|
}
|
|
// record doesn't exist yet
|
|
// create record
|
|
drec = {
|
|
status: DeliveryRecordStatus.unknown,
|
|
firstSeen: Date.now(),
|
|
validated: 0,
|
|
peers: new Set()
|
|
};
|
|
this.records.set(msgIdStr, drec);
|
|
// and add msgId to the queue
|
|
const entry = {
|
|
msgId: msgIdStr,
|
|
expire: Date.now() + TimeCacheDuration
|
|
};
|
|
this.queue.push(entry);
|
|
return drec;
|
|
}
|
|
gc() {
|
|
const now = Date.now();
|
|
// queue is sorted by expiry time
|
|
// remove expired messages, remove from queue until first un-expired message found
|
|
let head = this.queue.peekFront();
|
|
while (head && head.expire < now) {
|
|
this.records.delete(head.msgId);
|
|
this.queue.shift();
|
|
head = this.queue.peekFront();
|
|
}
|
|
}
|
|
clear() {
|
|
this.records.clear();
|
|
this.queue.clear();
|
|
}
|
|
}
|
|
|
|
function isStartable(obj) {
|
|
return obj != null && typeof obj.start === 'function' && typeof obj.stop === 'function';
|
|
}
|
|
|
|
function isInitializable(obj) {
|
|
return obj != null && typeof obj.init === 'function';
|
|
}
|
|
class Components {
|
|
constructor(init = {}) {
|
|
this.started = false;
|
|
if (init.peerId != null) {
|
|
this.setPeerId(init.peerId);
|
|
}
|
|
if (init.addressManager != null) {
|
|
this.setAddressManager(init.addressManager);
|
|
}
|
|
if (init.peerStore != null) {
|
|
this.setPeerStore(init.peerStore);
|
|
}
|
|
if (init.upgrader != null) {
|
|
this.setUpgrader(init.upgrader);
|
|
}
|
|
if (init.metrics != null) {
|
|
this.setMetrics(init.metrics);
|
|
}
|
|
if (init.registrar != null) {
|
|
this.setRegistrar(init.registrar);
|
|
}
|
|
if (init.connectionManager != null) {
|
|
this.setConnectionManager(init.connectionManager);
|
|
}
|
|
if (init.transportManager != null) {
|
|
this.setTransportManager(init.transportManager);
|
|
}
|
|
if (init.connectionGater != null) {
|
|
this.setConnectionGater(init.connectionGater);
|
|
}
|
|
if (init.contentRouting != null) {
|
|
this.setContentRouting(init.contentRouting);
|
|
}
|
|
if (init.peerRouting != null) {
|
|
this.setPeerRouting(init.peerRouting);
|
|
}
|
|
if (init.datastore != null) {
|
|
this.setDatastore(init.datastore);
|
|
}
|
|
if (init.connectionProtector != null) {
|
|
this.setConnectionProtector(init.connectionProtector);
|
|
}
|
|
if (init.dht != null) {
|
|
this.setDHT(init.dht);
|
|
}
|
|
if (init.pubsub != null) {
|
|
this.setPubSub(init.pubsub);
|
|
}
|
|
}
|
|
isStarted() {
|
|
return this.started;
|
|
}
|
|
async beforeStart() {
|
|
await Promise.all(Object.values(this).filter(obj => isStartable(obj)).map(async (startable) => {
|
|
if (startable.beforeStart != null) {
|
|
await startable.beforeStart();
|
|
}
|
|
}));
|
|
}
|
|
async start() {
|
|
await Promise.all(Object.values(this).filter(obj => isStartable(obj)).map(async (startable) => {
|
|
await startable.start();
|
|
}));
|
|
this.started = true;
|
|
}
|
|
async afterStart() {
|
|
await Promise.all(Object.values(this).filter(obj => isStartable(obj)).map(async (startable) => {
|
|
if (startable.afterStart != null) {
|
|
await startable.afterStart();
|
|
}
|
|
}));
|
|
}
|
|
async beforeStop() {
|
|
await Promise.all(Object.values(this).filter(obj => isStartable(obj)).map(async (startable) => {
|
|
if (startable.beforeStop != null) {
|
|
await startable.beforeStop();
|
|
}
|
|
}));
|
|
}
|
|
async stop() {
|
|
await Promise.all(Object.values(this).filter(obj => isStartable(obj)).map(async (startable) => {
|
|
await startable.stop();
|
|
}));
|
|
this.started = false;
|
|
}
|
|
async afterStop() {
|
|
await Promise.all(Object.values(this).filter(obj => isStartable(obj)).map(async (startable) => {
|
|
if (startable.afterStop != null) {
|
|
await startable.afterStop();
|
|
}
|
|
}));
|
|
}
|
|
setPeerId(peerId) {
|
|
this.peerId = peerId;
|
|
return peerId;
|
|
}
|
|
getPeerId() {
|
|
if (this.peerId == null) {
|
|
throw errCode(new Error('peerId not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.peerId;
|
|
}
|
|
setMetrics(metrics) {
|
|
this.metrics = metrics;
|
|
if (isInitializable(metrics)) {
|
|
metrics.init(this);
|
|
}
|
|
return metrics;
|
|
}
|
|
getMetrics() {
|
|
return this.metrics;
|
|
}
|
|
setAddressManager(addressManager) {
|
|
this.addressManager = addressManager;
|
|
if (isInitializable(addressManager)) {
|
|
addressManager.init(this);
|
|
}
|
|
return addressManager;
|
|
}
|
|
getAddressManager() {
|
|
if (this.addressManager == null) {
|
|
throw errCode(new Error('addressManager not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.addressManager;
|
|
}
|
|
setPeerStore(peerStore) {
|
|
this.peerStore = peerStore;
|
|
if (isInitializable(peerStore)) {
|
|
peerStore.init(this);
|
|
}
|
|
return peerStore;
|
|
}
|
|
getPeerStore() {
|
|
if (this.peerStore == null) {
|
|
throw errCode(new Error('peerStore not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.peerStore;
|
|
}
|
|
setUpgrader(upgrader) {
|
|
this.upgrader = upgrader;
|
|
if (isInitializable(upgrader)) {
|
|
upgrader.init(this);
|
|
}
|
|
return upgrader;
|
|
}
|
|
getUpgrader() {
|
|
if (this.upgrader == null) {
|
|
throw errCode(new Error('upgrader not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.upgrader;
|
|
}
|
|
setRegistrar(registrar) {
|
|
this.registrar = registrar;
|
|
if (isInitializable(registrar)) {
|
|
registrar.init(this);
|
|
}
|
|
return registrar;
|
|
}
|
|
getRegistrar() {
|
|
if (this.registrar == null) {
|
|
throw errCode(new Error('registrar not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.registrar;
|
|
}
|
|
setConnectionManager(connectionManager) {
|
|
this.connectionManager = connectionManager;
|
|
if (isInitializable(connectionManager)) {
|
|
connectionManager.init(this);
|
|
}
|
|
return connectionManager;
|
|
}
|
|
getConnectionManager() {
|
|
if (this.connectionManager == null) {
|
|
throw errCode(new Error('connectionManager not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.connectionManager;
|
|
}
|
|
setTransportManager(transportManager) {
|
|
this.transportManager = transportManager;
|
|
if (isInitializable(transportManager)) {
|
|
transportManager.init(this);
|
|
}
|
|
return transportManager;
|
|
}
|
|
getTransportManager() {
|
|
if (this.transportManager == null) {
|
|
throw errCode(new Error('transportManager not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.transportManager;
|
|
}
|
|
setConnectionGater(connectionGater) {
|
|
this.connectionGater = connectionGater;
|
|
if (isInitializable(connectionGater)) {
|
|
connectionGater.init(this);
|
|
}
|
|
return connectionGater;
|
|
}
|
|
getConnectionGater() {
|
|
if (this.connectionGater == null) {
|
|
throw errCode(new Error('connectionGater not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.connectionGater;
|
|
}
|
|
setContentRouting(contentRouting) {
|
|
this.contentRouting = contentRouting;
|
|
if (isInitializable(contentRouting)) {
|
|
contentRouting.init(this);
|
|
}
|
|
return contentRouting;
|
|
}
|
|
getContentRouting() {
|
|
if (this.contentRouting == null) {
|
|
throw errCode(new Error('contentRouting not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.contentRouting;
|
|
}
|
|
setPeerRouting(peerRouting) {
|
|
this.peerRouting = peerRouting;
|
|
if (isInitializable(peerRouting)) {
|
|
peerRouting.init(this);
|
|
}
|
|
return peerRouting;
|
|
}
|
|
getPeerRouting() {
|
|
if (this.peerRouting == null) {
|
|
throw errCode(new Error('peerRouting not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.peerRouting;
|
|
}
|
|
setDatastore(datastore) {
|
|
this.datastore = datastore;
|
|
if (isInitializable(datastore)) {
|
|
datastore.init(this);
|
|
}
|
|
return datastore;
|
|
}
|
|
getDatastore() {
|
|
if (this.datastore == null) {
|
|
throw errCode(new Error('datastore not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.datastore;
|
|
}
|
|
setConnectionProtector(connectionProtector) {
|
|
this.connectionProtector = connectionProtector;
|
|
if (isInitializable(connectionProtector)) {
|
|
connectionProtector.init(this);
|
|
}
|
|
return connectionProtector;
|
|
}
|
|
getConnectionProtector() {
|
|
return this.connectionProtector;
|
|
}
|
|
setDHT(dht) {
|
|
this.dht = dht;
|
|
if (isInitializable(dht)) {
|
|
dht.init(this);
|
|
}
|
|
return dht;
|
|
}
|
|
getDHT() {
|
|
if (this.dht == null) {
|
|
throw errCode(new Error('dht not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.dht;
|
|
}
|
|
setPubSub(pubsub) {
|
|
this.pubsub = pubsub;
|
|
if (isInitializable(pubsub)) {
|
|
pubsub.init(this);
|
|
}
|
|
return pubsub;
|
|
}
|
|
getPubSub() {
|
|
if (this.pubsub == null) {
|
|
throw errCode(new Error('pubsub not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.pubsub;
|
|
}
|
|
setDialer(dialer) {
|
|
this.dialer = dialer;
|
|
if (isInitializable(dialer)) {
|
|
dialer.init(this);
|
|
}
|
|
return dialer;
|
|
}
|
|
getDialer() {
|
|
if (this.dialer == null) {
|
|
throw errCode(new Error('dialer not set'), 'ERR_SERVICE_MISSING');
|
|
}
|
|
return this.dialer;
|
|
}
|
|
}
|
|
|
|
const log$4 = logger('libp2p:gossipsub:score');
|
|
class PeerScore {
|
|
constructor(params, metrics, opts) {
|
|
this.params = params;
|
|
this.metrics = metrics;
|
|
/**
|
|
* Per-peer stats for score calculation
|
|
*/
|
|
this.peerStats = new Map();
|
|
/**
|
|
* IP colocation tracking; maps IP => set of peers.
|
|
*/
|
|
this.peerIPs = new Map();
|
|
/**
|
|
* Cache score up to decayInterval if topic stats are unchanged.
|
|
*/
|
|
this.scoreCache = new Map();
|
|
/**
|
|
* Recent message delivery timing/participants
|
|
*/
|
|
this.deliveryRecords = new MessageDeliveries();
|
|
this.components = new Components();
|
|
validatePeerScoreParams(params);
|
|
this.scoreCacheValidityMs = opts.scoreCacheValidityMs;
|
|
this.computeScore = opts.computeScore ?? computeScore;
|
|
}
|
|
init(components) {
|
|
this.components = components;
|
|
}
|
|
get size() {
|
|
return this.peerStats.size;
|
|
}
|
|
/**
|
|
* Start PeerScore instance
|
|
*/
|
|
start() {
|
|
if (this._backgroundInterval) {
|
|
log$4('Peer score already running');
|
|
return;
|
|
}
|
|
this._backgroundInterval = setInterval(() => this.background(), this.params.decayInterval);
|
|
log$4('started');
|
|
}
|
|
/**
|
|
* Stop PeerScore instance
|
|
*/
|
|
stop() {
|
|
if (!this._backgroundInterval) {
|
|
log$4('Peer score already stopped');
|
|
return;
|
|
}
|
|
clearInterval(this._backgroundInterval);
|
|
delete this._backgroundInterval;
|
|
this.peerIPs.clear();
|
|
this.peerStats.clear();
|
|
this.deliveryRecords.clear();
|
|
log$4('stopped');
|
|
}
|
|
/**
|
|
* Periodic maintenance
|
|
*/
|
|
background() {
|
|
this.refreshScores();
|
|
this.updateIPs();
|
|
this.deliveryRecords.gc();
|
|
}
|
|
dumpPeerScoreStats() {
|
|
return Object.fromEntries(Array.from(this.peerStats.entries()).map(([peer, stats]) => [peer, stats]));
|
|
}
|
|
/**
|
|
* Decays scores, and purges score records for disconnected peers once their expiry has elapsed.
|
|
*/
|
|
refreshScores() {
|
|
const now = Date.now();
|
|
const decayToZero = this.params.decayToZero;
|
|
this.peerStats.forEach((pstats, id) => {
|
|
if (!pstats.connected) {
|
|
// has the retention period expired?
|
|
if (now > pstats.expire) {
|
|
// yes, throw it away (but clean up the IP tracking first)
|
|
this.removeIPs(id, pstats.ips);
|
|
this.peerStats.delete(id);
|
|
this.scoreCache.delete(id);
|
|
}
|
|
// we don't decay retained scores, as the peer is not active.
|
|
// this way the peer cannot reset a negative score by simply disconnecting and reconnecting,
|
|
// unless the retention period has elapsed.
|
|
// similarly, a well behaved peer does not lose its score by getting disconnected.
|
|
return;
|
|
}
|
|
Object.entries(pstats.topics).forEach(([topic, tstats]) => {
|
|
const tparams = this.params.topics[topic];
|
|
if (tparams === undefined) {
|
|
// we are not scoring this topic
|
|
// should be unreachable, we only add scored topics to pstats
|
|
return;
|
|
}
|
|
// decay counters
|
|
tstats.firstMessageDeliveries *= tparams.firstMessageDeliveriesDecay;
|
|
if (tstats.firstMessageDeliveries < decayToZero) {
|
|
tstats.firstMessageDeliveries = 0;
|
|
}
|
|
tstats.meshMessageDeliveries *= tparams.meshMessageDeliveriesDecay;
|
|
if (tstats.meshMessageDeliveries < decayToZero) {
|
|
tstats.meshMessageDeliveries = 0;
|
|
}
|
|
tstats.meshFailurePenalty *= tparams.meshFailurePenaltyDecay;
|
|
if (tstats.meshFailurePenalty < decayToZero) {
|
|
tstats.meshFailurePenalty = 0;
|
|
}
|
|
tstats.invalidMessageDeliveries *= tparams.invalidMessageDeliveriesDecay;
|
|
if (tstats.invalidMessageDeliveries < decayToZero) {
|
|
tstats.invalidMessageDeliveries = 0;
|
|
}
|
|
// update mesh time and activate mesh message delivery parameter if need be
|
|
if (tstats.inMesh) {
|
|
tstats.meshTime = now - tstats.graftTime;
|
|
if (tstats.meshTime > tparams.meshMessageDeliveriesActivation) {
|
|
tstats.meshMessageDeliveriesActive = true;
|
|
}
|
|
}
|
|
});
|
|
// decay P7 counter
|
|
pstats.behaviourPenalty *= this.params.behaviourPenaltyDecay;
|
|
if (pstats.behaviourPenalty < decayToZero) {
|
|
pstats.behaviourPenalty = 0;
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Return the score for a peer
|
|
*/
|
|
score(id) {
|
|
this.metrics?.scoreFnCalls.inc();
|
|
const pstats = this.peerStats.get(id);
|
|
if (!pstats) {
|
|
return 0;
|
|
}
|
|
const now = Date.now();
|
|
const cacheEntry = this.scoreCache.get(id);
|
|
// Found cached score within validity period
|
|
if (cacheEntry && cacheEntry.cacheUntil > now) {
|
|
return cacheEntry.score;
|
|
}
|
|
this.metrics?.scoreFnRuns.inc();
|
|
const score = this.computeScore(id, pstats, this.params, this.peerIPs);
|
|
const cacheUntil = now + this.scoreCacheValidityMs;
|
|
if (cacheEntry) {
|
|
this.metrics?.scoreCachedDelta.observe(Math.abs(score - cacheEntry.score));
|
|
cacheEntry.score = score;
|
|
cacheEntry.cacheUntil = cacheUntil;
|
|
}
|
|
else {
|
|
this.scoreCache.set(id, { score, cacheUntil });
|
|
}
|
|
return score;
|
|
}
|
|
/**
|
|
* Apply a behavioural penalty to a peer
|
|
*/
|
|
addPenalty(id, penalty, penaltyLabel) {
|
|
const pstats = this.peerStats.get(id);
|
|
if (pstats) {
|
|
pstats.behaviourPenalty += penalty;
|
|
this.metrics?.onScorePenalty(penaltyLabel);
|
|
}
|
|
}
|
|
addPeer(id) {
|
|
// create peer stats (not including topic stats for each topic to be scored)
|
|
// topic stats will be added as needed
|
|
const pstats = {
|
|
connected: true,
|
|
expire: 0,
|
|
topics: {},
|
|
ips: [],
|
|
behaviourPenalty: 0
|
|
};
|
|
this.peerStats.set(id, pstats);
|
|
// get + update peer IPs
|
|
const ips = this.getIPs(id);
|
|
this.setIPs(id, ips, pstats.ips);
|
|
pstats.ips = ips;
|
|
}
|
|
removePeer(id) {
|
|
const pstats = this.peerStats.get(id);
|
|
if (!pstats) {
|
|
return;
|
|
}
|
|
// decide whether to retain the score; this currently only retains non-positive scores
|
|
// to dissuade attacks on the score function.
|
|
if (this.score(id) > 0) {
|
|
this.removeIPs(id, pstats.ips);
|
|
this.peerStats.delete(id);
|
|
return;
|
|
}
|
|
// furthermore, when we decide to retain the score, the firstMessageDelivery counters are
|
|
// reset to 0 and mesh delivery penalties applied.
|
|
Object.entries(pstats.topics).forEach(([topic, tstats]) => {
|
|
tstats.firstMessageDeliveries = 0;
|
|
const threshold = this.params.topics[topic].meshMessageDeliveriesThreshold;
|
|
if (tstats.inMesh && tstats.meshMessageDeliveriesActive && tstats.meshMessageDeliveries < threshold) {
|
|
const deficit = threshold - tstats.meshMessageDeliveries;
|
|
tstats.meshFailurePenalty += deficit * deficit;
|
|
}
|
|
tstats.inMesh = false;
|
|
tstats.meshMessageDeliveriesActive = false;
|
|
});
|
|
pstats.connected = false;
|
|
pstats.expire = Date.now() + this.params.retainScore;
|
|
}
|
|
/** Handles scoring functionality as a peer GRAFTs to a topic. */
|
|
graft(id, topic) {
|
|
const pstats = this.peerStats.get(id);
|
|
if (pstats) {
|
|
const tstats = this.getPtopicStats(pstats, topic);
|
|
if (tstats) {
|
|
// if we are scoring the topic, update the mesh status.
|
|
tstats.inMesh = true;
|
|
tstats.graftTime = Date.now();
|
|
tstats.meshTime = 0;
|
|
tstats.meshMessageDeliveriesActive = false;
|
|
}
|
|
}
|
|
}
|
|
/** Handles scoring functionality as a peer PRUNEs from a topic. */
|
|
prune(id, topic) {
|
|
const pstats = this.peerStats.get(id);
|
|
if (pstats) {
|
|
const tstats = this.getPtopicStats(pstats, topic);
|
|
if (tstats) {
|
|
// sticky mesh delivery rate failure penalty
|
|
const threshold = this.params.topics[topic].meshMessageDeliveriesThreshold;
|
|
if (tstats.meshMessageDeliveriesActive && tstats.meshMessageDeliveries < threshold) {
|
|
const deficit = threshold - tstats.meshMessageDeliveries;
|
|
tstats.meshFailurePenalty += deficit * deficit;
|
|
}
|
|
tstats.meshMessageDeliveriesActive = false;
|
|
tstats.inMesh = false;
|
|
// TODO: Consider clearing score cache on important penalties
|
|
// this.scoreCache.delete(id)
|
|
}
|
|
}
|
|
}
|
|
validateMessage(msgIdStr) {
|
|
this.deliveryRecords.ensureRecord(msgIdStr);
|
|
}
|
|
deliverMessage(from, msgIdStr, topic) {
|
|
this.markFirstMessageDelivery(from, topic);
|
|
const drec = this.deliveryRecords.ensureRecord(msgIdStr);
|
|
const now = Date.now();
|
|
// defensive check that this is the first delivery trace -- delivery status should be unknown
|
|
if (drec.status !== DeliveryRecordStatus.unknown) {
|
|
log$4('unexpected delivery: message from %s was first seen %s ago and has delivery status %s', from, now - drec.firstSeen, DeliveryRecordStatus[drec.status]);
|
|
return;
|
|
}
|
|
// mark the message as valid and reward mesh peers that have already forwarded it to us
|
|
drec.status = DeliveryRecordStatus.valid;
|
|
drec.validated = now;
|
|
drec.peers.forEach((p) => {
|
|
// this check is to make sure a peer can't send us a message twice and get a double count
|
|
// if it is a first delivery.
|
|
if (p !== from.toString()) {
|
|
this.markDuplicateMessageDelivery(p, topic);
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Similar to `rejectMessage` except does not require the message id or reason for an invalid message.
|
|
*/
|
|
rejectInvalidMessage(from, topic) {
|
|
this.markInvalidMessageDelivery(from, topic);
|
|
}
|
|
rejectMessage(from, msgIdStr, topic, reason) {
|
|
switch (reason) {
|
|
// these messages are not tracked, but the peer is penalized as they are invalid
|
|
case RejectReason.Error:
|
|
this.markInvalidMessageDelivery(from, topic);
|
|
return;
|
|
// we ignore those messages, so do nothing.
|
|
case RejectReason.Blacklisted:
|
|
return;
|
|
// the rest are handled after record creation
|
|
}
|
|
const drec = this.deliveryRecords.ensureRecord(msgIdStr);
|
|
// defensive check that this is the first rejection -- delivery status should be unknown
|
|
if (drec.status !== DeliveryRecordStatus.unknown) {
|
|
log$4('unexpected rejection: message from %s was first seen %s ago and has delivery status %d', from, Date.now() - drec.firstSeen, DeliveryRecordStatus[drec.status]);
|
|
return;
|
|
}
|
|
if (reason === RejectReason.Ignore) {
|
|
// we were explicitly instructed by the validator to ignore the message but not penalize the peer
|
|
drec.status = DeliveryRecordStatus.ignored;
|
|
drec.peers.clear();
|
|
return;
|
|
}
|
|
// mark the message as invalid and penalize peers that have already forwarded it.
|
|
drec.status = DeliveryRecordStatus.invalid;
|
|
this.markInvalidMessageDelivery(from, topic);
|
|
drec.peers.forEach((p) => {
|
|
this.markInvalidMessageDelivery(p, topic);
|
|
});
|
|
// release the delivery time tracking map to free some memory early
|
|
drec.peers.clear();
|
|
}
|
|
duplicateMessage(from, msgIdStr, topic) {
|
|
const drec = this.deliveryRecords.ensureRecord(msgIdStr);
|
|
if (drec.peers.has(from)) {
|
|
// we have already seen this duplicate
|
|
return;
|
|
}
|
|
switch (drec.status) {
|
|
case DeliveryRecordStatus.unknown:
|
|
// the message is being validated; track the peer delivery and wait for
|
|
// the Deliver/Reject/Ignore notification.
|
|
drec.peers.add(from);
|
|
break;
|
|
case DeliveryRecordStatus.valid:
|
|
// mark the peer delivery time to only count a duplicate delivery once.
|
|
drec.peers.add(from);
|
|
this.markDuplicateMessageDelivery(from, topic, drec.validated);
|
|
break;
|
|
case DeliveryRecordStatus.invalid:
|
|
// we no longer track delivery time
|
|
this.markInvalidMessageDelivery(from, topic);
|
|
break;
|
|
case DeliveryRecordStatus.ignored:
|
|
// the message was ignored; do nothing (we don't know if it was valid)
|
|
break;
|
|
}
|
|
}
|
|
/**
|
|
* Increments the "invalid message deliveries" counter for all scored topics the message is published in.
|
|
*/
|
|
markInvalidMessageDelivery(from, topic) {
|
|
const pstats = this.peerStats.get(from);
|
|
if (pstats) {
|
|
const tstats = this.getPtopicStats(pstats, topic);
|
|
if (tstats) {
|
|
tstats.invalidMessageDeliveries += 1;
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Increments the "first message deliveries" counter for all scored topics the message is published in,
|
|
* as well as the "mesh message deliveries" counter, if the peer is in the mesh for the topic.
|
|
* Messages already known (with the seenCache) are counted with markDuplicateMessageDelivery()
|
|
*/
|
|
markFirstMessageDelivery(from, topic) {
|
|
const pstats = this.peerStats.get(from);
|
|
if (pstats) {
|
|
const tstats = this.getPtopicStats(pstats, topic);
|
|
if (tstats) {
|
|
let cap = this.params.topics[topic].firstMessageDeliveriesCap;
|
|
tstats.firstMessageDeliveries = Math.min(cap, tstats.firstMessageDeliveries + 1);
|
|
if (tstats.inMesh) {
|
|
cap = this.params.topics[topic].meshMessageDeliveriesCap;
|
|
tstats.meshMessageDeliveries = Math.min(cap, tstats.meshMessageDeliveries + 1);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Increments the "mesh message deliveries" counter for messages we've seen before,
|
|
* as long the message was received within the P3 window.
|
|
*/
|
|
markDuplicateMessageDelivery(from, topic, validatedTime) {
|
|
const pstats = this.peerStats.get(from);
|
|
if (pstats) {
|
|
const now = validatedTime !== undefined ? Date.now() : 0;
|
|
const tstats = this.getPtopicStats(pstats, topic);
|
|
if (tstats && tstats.inMesh) {
|
|
const tparams = this.params.topics[topic];
|
|
// check against the mesh delivery window -- if the validated time is passed as 0, then
|
|
// the message was received before we finished validation and thus falls within the mesh
|
|
// delivery window.
|
|
if (validatedTime !== undefined) {
|
|
const deliveryDelayMs = now - validatedTime;
|
|
const isLateDelivery = deliveryDelayMs > tparams.meshMessageDeliveriesWindow;
|
|
this.metrics?.onDuplicateMsgDelivery(topic, deliveryDelayMs, isLateDelivery);
|
|
if (isLateDelivery) {
|
|
return;
|
|
}
|
|
}
|
|
const cap = tparams.meshMessageDeliveriesCap;
|
|
tstats.meshMessageDeliveries = Math.min(cap, tstats.meshMessageDeliveries + 1);
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Gets the current IPs for a peer.
|
|
*/
|
|
getIPs(id) {
|
|
return this.components
|
|
.getConnectionManager()
|
|
.getConnections(peerIdFromString(id))
|
|
.map((c) => c.remoteAddr.toOptions().host);
|
|
}
|
|
/**
|
|
* Adds tracking for the new IPs in the list, and removes tracking from the obsolete IPs.
|
|
*/
|
|
setIPs(id, newIPs, oldIPs) {
|
|
// add the new IPs to the tracking
|
|
// eslint-disable-next-line no-labels
|
|
addNewIPs: for (const ip of newIPs) {
|
|
// check if it is in the old ips list
|
|
for (const xip of oldIPs) {
|
|
if (ip === xip) {
|
|
// eslint-disable-next-line no-labels
|
|
continue addNewIPs;
|
|
}
|
|
}
|
|
// no, it's a new one -- add it to the tracker
|
|
let peers = this.peerIPs.get(ip);
|
|
if (!peers) {
|
|
peers = new Set();
|
|
this.peerIPs.set(ip, peers);
|
|
}
|
|
peers.add(id);
|
|
}
|
|
// remove the obsolete old IPs from the tracking
|
|
// eslint-disable-next-line no-labels
|
|
removeOldIPs: for (const ip of oldIPs) {
|
|
// check if it is in the new ips list
|
|
for (const xip of newIPs) {
|
|
if (ip === xip) {
|
|
// eslint-disable-next-line no-labels
|
|
continue removeOldIPs;
|
|
}
|
|
}
|
|
// no, its obselete -- remove it from the tracker
|
|
const peers = this.peerIPs.get(ip);
|
|
if (!peers) {
|
|
continue;
|
|
}
|
|
peers.delete(id);
|
|
if (!peers.size) {
|
|
this.peerIPs.delete(ip);
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Removes an IP list from the tracking list for a peer.
|
|
*/
|
|
removeIPs(id, ips) {
|
|
ips.forEach((ip) => {
|
|
const peers = this.peerIPs.get(ip);
|
|
if (!peers) {
|
|
return;
|
|
}
|
|
peers.delete(id);
|
|
if (!peers.size) {
|
|
this.peerIPs.delete(ip);
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Update all peer IPs to currently open connections
|
|
*/
|
|
updateIPs() {
|
|
this.peerStats.forEach((pstats, id) => {
|
|
const newIPs = this.getIPs(id);
|
|
this.setIPs(id, newIPs, pstats.ips);
|
|
pstats.ips = newIPs;
|
|
});
|
|
}
|
|
/**
|
|
* Returns topic stats if they exist, otherwise if the supplied parameters score the
|
|
* topic, inserts the default stats and returns a reference to those. If neither apply, returns None.
|
|
*/
|
|
getPtopicStats(pstats, topic) {
|
|
let topicStats = pstats.topics[topic];
|
|
if (topicStats !== undefined) {
|
|
return topicStats;
|
|
}
|
|
if (this.params.topics[topic] !== undefined) {
|
|
topicStats = {
|
|
inMesh: false,
|
|
graftTime: 0,
|
|
meshTime: 0,
|
|
firstMessageDeliveries: 0,
|
|
meshMessageDeliveries: 0,
|
|
meshMessageDeliveriesActive: false,
|
|
meshFailurePenalty: 0,
|
|
invalidMessageDeliveries: 0
|
|
};
|
|
pstats.topics[topic] = topicStats;
|
|
return topicStats;
|
|
}
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* IWantTracer is an internal tracer that tracks IWANT requests in order to penalize
|
|
* peers who don't follow up on IWANT requests after an IHAVE advertisement.
|
|
* The tracking of promises is probabilistic to avoid using too much memory.
|
|
*
|
|
* Note: Do not confuse these 'promises' with JS Promise objects.
|
|
* These 'promises' are merely expectations of a peer's behavior.
|
|
*/
|
|
class IWantTracer {
|
|
constructor(gossipsubIWantFollowupMs, msgIdToStrFn, metrics) {
|
|
this.gossipsubIWantFollowupMs = gossipsubIWantFollowupMs;
|
|
this.msgIdToStrFn = msgIdToStrFn;
|
|
this.metrics = metrics;
|
|
/**
|
|
* Promises to deliver a message
|
|
* Map per message id, per peer, promise expiration time
|
|
*/
|
|
this.promises = new Map();
|
|
/**
|
|
* First request time by msgId. Used for metrics to track expire times.
|
|
* Necessary to know if peers are actually breaking promises or simply sending them a bit later
|
|
*/
|
|
this.requestMsByMsg = new Map();
|
|
this.requestMsByMsgExpire = 10 * gossipsubIWantFollowupMs;
|
|
}
|
|
get size() {
|
|
return this.promises.size;
|
|
}
|
|
get requestMsByMsgSize() {
|
|
return this.requestMsByMsg.size;
|
|
}
|
|
/**
|
|
* Track a promise to deliver a message from a list of msgIds we are requesting
|
|
*/
|
|
addPromise(from, msgIds) {
|
|
// pick msgId randomly from the list
|
|
const ix = Math.floor(Math.random() * msgIds.length);
|
|
const msgId = msgIds[ix];
|
|
const msgIdStr = this.msgIdToStrFn(msgId);
|
|
let expireByPeer = this.promises.get(msgIdStr);
|
|
if (!expireByPeer) {
|
|
expireByPeer = new Map();
|
|
this.promises.set(msgIdStr, expireByPeer);
|
|
}
|
|
const now = Date.now();
|
|
// If a promise for this message id and peer already exists we don't update the expiry
|
|
if (!expireByPeer.has(from)) {
|
|
expireByPeer.set(from, now + this.gossipsubIWantFollowupMs);
|
|
if (this.metrics) {
|
|
this.metrics.iwantPromiseStarted.inc(1);
|
|
if (!this.requestMsByMsg.has(msgIdStr)) {
|
|
this.requestMsByMsg.set(msgIdStr, now);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Returns the number of broken promises for each peer who didn't follow up on an IWANT request.
|
|
*
|
|
* This should be called not too often relative to the expire times, since it iterates over the whole data.
|
|
*/
|
|
getBrokenPromises() {
|
|
const now = Date.now();
|
|
const result = new Map();
|
|
let brokenPromises = 0;
|
|
this.promises.forEach((expireByPeer, msgId) => {
|
|
expireByPeer.forEach((expire, p) => {
|
|
// the promise has been broken
|
|
if (expire < now) {
|
|
// add 1 to result
|
|
result.set(p, (result.get(p) ?? 0) + 1);
|
|
// delete from tracked promises
|
|
expireByPeer.delete(p);
|
|
// for metrics
|
|
brokenPromises++;
|
|
}
|
|
});
|
|
// clean up empty promises for a msgId
|
|
if (!expireByPeer.size) {
|
|
this.promises.delete(msgId);
|
|
}
|
|
});
|
|
this.metrics?.iwantPromiseBroken.inc(brokenPromises);
|
|
return result;
|
|
}
|
|
/**
|
|
* Someone delivered a message, stop tracking promises for it
|
|
*/
|
|
deliverMessage(msgIdStr) {
|
|
this.trackMessage(msgIdStr);
|
|
const expireByPeer = this.promises.get(msgIdStr);
|
|
// Expired promise, check requestMsByMsg
|
|
if (expireByPeer) {
|
|
this.promises.delete(msgIdStr);
|
|
if (this.metrics) {
|
|
this.metrics.iwantPromiseResolved.inc(1);
|
|
this.metrics.iwantPromiseResolvedPeers.inc(expireByPeer.size);
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* A message got rejected, so we can stop tracking promises and let the score penalty apply from invalid message delivery,
|
|
* unless its an obviously invalid message.
|
|
*/
|
|
rejectMessage(msgIdStr, reason) {
|
|
this.trackMessage(msgIdStr);
|
|
// A message got rejected, so we can stop tracking promises and let the score penalty apply.
|
|
// With the expection of obvious invalid messages
|
|
switch (reason) {
|
|
case RejectReason.Error:
|
|
return;
|
|
}
|
|
this.promises.delete(msgIdStr);
|
|
}
|
|
clear() {
|
|
this.promises.clear();
|
|
}
|
|
prune() {
|
|
const maxMs = Date.now() - this.requestMsByMsgExpire;
|
|
for (const [k, v] of this.requestMsByMsg.entries()) {
|
|
if (v < maxMs) {
|
|
// messages that stay too long in the requestMsByMsg map, delete
|
|
this.requestMsByMsg.delete(k);
|
|
}
|
|
else {
|
|
// recent messages, keep them
|
|
// sort by insertion order
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
trackMessage(msgIdStr) {
|
|
if (this.metrics) {
|
|
const requestMs = this.requestMsByMsg.get(msgIdStr);
|
|
if (requestMs !== undefined) {
|
|
this.metrics.iwantPromiseDeliveryTime.observe((Date.now() - requestMs) / 1000);
|
|
this.requestMsByMsg.delete(msgIdStr);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* This is similar to https://github.com/daviddias/time-cache/blob/master/src/index.js
|
|
* for our own need, we don't use lodash throttle to improve performance.
|
|
* This gives 4x - 5x performance gain compared to npm TimeCache
|
|
*/
|
|
class SimpleTimeCache {
|
|
constructor(opts) {
|
|
this.entries = new Map();
|
|
this.validityMs = opts.validityMs;
|
|
// allow negative validityMs so that this does not cache anything, spec test compliance.spec.js
|
|
// sends duplicate messages and expect peer to receive all. Application likely uses positive validityMs
|
|
}
|
|
get size() {
|
|
return this.entries.size;
|
|
}
|
|
put(key, value) {
|
|
this.entries.set(key, { value, validUntilMs: Date.now() + this.validityMs });
|
|
}
|
|
prune() {
|
|
const now = Date.now();
|
|
for (const [k, v] of this.entries.entries()) {
|
|
if (v.validUntilMs < now) {
|
|
this.entries.delete(k);
|
|
}
|
|
else {
|
|
// sort by insertion order
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
has(key) {
|
|
return this.entries.has(key);
|
|
}
|
|
get(key) {
|
|
const value = this.entries.get(key);
|
|
return value && value.validUntilMs >= Date.now() ? value.value : undefined;
|
|
}
|
|
clear() {
|
|
this.entries.clear();
|
|
}
|
|
}
|
|
|
|
var MessageSource;
|
|
(function (MessageSource) {
|
|
MessageSource["forward"] = "forward";
|
|
MessageSource["publish"] = "publish";
|
|
})(MessageSource || (MessageSource = {}));
|
|
var InclusionReason;
|
|
(function (InclusionReason) {
|
|
/** Peer was a fanaout peer. */
|
|
InclusionReason["Fanout"] = "fanout";
|
|
/** Included from random selection. */
|
|
InclusionReason["Random"] = "random";
|
|
/** Peer subscribed. */
|
|
InclusionReason["Subscribed"] = "subscribed";
|
|
/** On heartbeat, peer was included to fill the outbound quota. */
|
|
InclusionReason["Outbound"] = "outbound";
|
|
/** On heartbeat, not enough peers in mesh */
|
|
InclusionReason["NotEnough"] = "not_enough";
|
|
/** On heartbeat opportunistic grafting due to low mesh score */
|
|
InclusionReason["Opportunistic"] = "opportunistic";
|
|
})(InclusionReason || (InclusionReason = {}));
|
|
/// Reasons why a peer was removed from the mesh.
|
|
var ChurnReason;
|
|
(function (ChurnReason) {
|
|
/// Peer disconnected.
|
|
ChurnReason["Dc"] = "disconnected";
|
|
/// Peer had a bad score.
|
|
ChurnReason["BadScore"] = "bad_score";
|
|
/// Peer sent a PRUNE.
|
|
ChurnReason["Prune"] = "prune";
|
|
/// Peer unsubscribed.
|
|
ChurnReason["Unsub"] = "unsubscribed";
|
|
/// Too many peers.
|
|
ChurnReason["Excess"] = "excess";
|
|
})(ChurnReason || (ChurnReason = {}));
|
|
/// Kinds of reasons a peer's score has been penalized
|
|
var ScorePenalty;
|
|
(function (ScorePenalty) {
|
|
/// A peer grafted before waiting the back-off time.
|
|
ScorePenalty["GraftBackoff"] = "graft_backoff";
|
|
/// A Peer did not respond to an IWANT request in time.
|
|
ScorePenalty["BrokenPromise"] = "broken_promise";
|
|
/// A Peer did not send enough messages as expected.
|
|
ScorePenalty["MessageDeficit"] = "message_deficit";
|
|
/// Too many peers under one IP address.
|
|
ScorePenalty["IPColocation"] = "IP_colocation";
|
|
})(ScorePenalty || (ScorePenalty = {}));
|
|
var IHaveIgnoreReason;
|
|
(function (IHaveIgnoreReason) {
|
|
IHaveIgnoreReason["LowScore"] = "low_score";
|
|
IHaveIgnoreReason["MaxIhave"] = "max_ihave";
|
|
IHaveIgnoreReason["MaxIasked"] = "max_iasked";
|
|
})(IHaveIgnoreReason || (IHaveIgnoreReason = {}));
|
|
var ScoreThreshold;
|
|
(function (ScoreThreshold) {
|
|
ScoreThreshold["graylist"] = "graylist";
|
|
ScoreThreshold["publish"] = "publish";
|
|
ScoreThreshold["gossip"] = "gossip";
|
|
ScoreThreshold["mesh"] = "mesh";
|
|
})(ScoreThreshold || (ScoreThreshold = {}));
|
|
/**
|
|
* A collection of metrics used throughout the Gossipsub behaviour.
|
|
*/
|
|
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
|
function getMetrics(register, topicStrToLabel, opts) {
|
|
// Using function style instead of class to prevent having to re-declare all MetricsPrometheus types.
|
|
return {
|
|
/* Metrics for static config */
|
|
protocolsEnabled: register.gauge({
|
|
name: 'gossipsub_protocol',
|
|
help: 'Status of enabled protocols',
|
|
labelNames: ['protocol']
|
|
}),
|
|
/* Metrics per known topic */
|
|
/** Status of our subscription to this topic. This metric allows analyzing other topic metrics
|
|
* filtered by our current subscription status.
|
|
* = rust-libp2p `topic_subscription_status` */
|
|
topicSubscriptionStatus: register.gauge({
|
|
name: 'gossipsub_topic_subscription_status',
|
|
help: 'Status of our subscription to this topic',
|
|
labelNames: ['topicStr']
|
|
}),
|
|
/** Number of peers subscribed to each topic. This allows us to analyze a topic's behaviour
|
|
* regardless of our subscription status. */
|
|
topicPeersCount: register.gauge({
|
|
name: 'gossipsub_topic_peer_count',
|
|
help: 'Number of peers subscribed to each topic',
|
|
labelNames: ['topicStr']
|
|
}),
|
|
/* Metrics regarding mesh state */
|
|
/** Number of peers in our mesh. This metric should be updated with the count of peers for a
|
|
* topic in the mesh regardless of inclusion and churn events.
|
|
* = rust-libp2p `mesh_peer_counts` */
|
|
meshPeerCounts: register.gauge({
|
|
name: 'gossipsub_mesh_peer_count',
|
|
help: 'Number of peers in our mesh',
|
|
labelNames: ['topicStr']
|
|
}),
|
|
/** Number of times we include peers in a topic mesh for different reasons.
|
|
* = rust-libp2p `mesh_peer_inclusion_events` */
|
|
meshPeerInclusionEvents: register.gauge({
|
|
name: 'gossipsub_mesh_peer_inclusion_events_total',
|
|
help: 'Number of times we include peers in a topic mesh for different reasons',
|
|
labelNames: ['topic', 'reason']
|
|
}),
|
|
/** Number of times we remove peers in a topic mesh for different reasons.
|
|
* = rust-libp2p `mesh_peer_churn_events` */
|
|
meshPeerChurnEvents: register.gauge({
|
|
name: 'gossipsub_peer_churn_events_total',
|
|
help: 'Number of times we remove peers in a topic mesh for different reasons',
|
|
labelNames: ['topic', 'reason']
|
|
}),
|
|
/* General Metrics */
|
|
/** Gossipsub supports floodsub, gossipsub v1.0 and gossipsub v1.1. Peers are classified based
|
|
* on which protocol they support. This metric keeps track of the number of peers that are
|
|
* connected of each type. */
|
|
peersPerProtocol: register.gauge({
|
|
name: 'gossipsub_peers_per_protocol_count',
|
|
help: 'Peers connected for each topic',
|
|
labelNames: ['protocol']
|
|
}),
|
|
/** The time it takes to complete one iteration of the heartbeat. */
|
|
heartbeatDuration: register.histogram({
|
|
name: 'gossipsub_heartbeat_duration_seconds',
|
|
help: 'The time it takes to complete one iteration of the heartbeat',
|
|
// Should take <10ms, over 1s it's a huge issue that needs debugging, since a heartbeat will be cancelled
|
|
buckets: [0.01, 0.1, 1]
|
|
}),
|
|
/** Heartbeat run took longer than heartbeat interval so next is skipped */
|
|
heartbeatSkipped: register.gauge({
|
|
name: 'gossipsub_heartbeat_skipped',
|
|
help: 'Heartbeat run took longer than heartbeat interval so next is skipped'
|
|
}),
|
|
/** Message validation results for each topic.
|
|
* Invalid == Reject?
|
|
* = rust-libp2p `invalid_messages`, `accepted_messages`, `ignored_messages`, `rejected_messages` */
|
|
asyncValidationResult: register.gauge({
|
|
name: 'gossipsub_async_validation_result_total',
|
|
help: 'Message validation result for each topic',
|
|
labelNames: ['topic', 'acceptance']
|
|
}),
|
|
/** When the user validates a message, it tries to re propagate it to its mesh peers. If the
|
|
* message expires from the memcache before it can be validated, we count this a cache miss
|
|
* and it is an indicator that the memcache size should be increased.
|
|
* = rust-libp2p `mcache_misses` */
|
|
asyncValidationMcacheHit: register.gauge({
|
|
name: 'gossipsub_async_validation_mcache_hit_total',
|
|
help: 'Async validation result reported by the user layer',
|
|
labelNames: ['hit']
|
|
}),
|
|
// RPC outgoing. Track byte length + data structure sizes
|
|
rpcRecvBytes: register.gauge({ name: 'gossipsub_rpc_recv_bytes_total', help: 'RPC recv' }),
|
|
rpcRecvCount: register.gauge({ name: 'gossipsub_rpc_recv_count_total', help: 'RPC recv' }),
|
|
rpcRecvSubscription: register.gauge({ name: 'gossipsub_rpc_recv_subscription_total', help: 'RPC recv' }),
|
|
rpcRecvMessage: register.gauge({ name: 'gossipsub_rpc_recv_message_total', help: 'RPC recv' }),
|
|
rpcRecvControl: register.gauge({ name: 'gossipsub_rpc_recv_control_total', help: 'RPC recv' }),
|
|
rpcRecvIHave: register.gauge({ name: 'gossipsub_rpc_recv_ihave_total', help: 'RPC recv' }),
|
|
rpcRecvIWant: register.gauge({ name: 'gossipsub_rpc_recv_iwant_total', help: 'RPC recv' }),
|
|
rpcRecvGraft: register.gauge({ name: 'gossipsub_rpc_recv_graft_total', help: 'RPC recv' }),
|
|
rpcRecvPrune: register.gauge({ name: 'gossipsub_rpc_recv_prune_total', help: 'RPC recv' }),
|
|
/** Total count of RPC dropped because acceptFrom() == false */
|
|
rpcRecvNotAccepted: register.gauge({
|
|
name: 'gossipsub_rpc_rcv_not_accepted_total',
|
|
help: 'Total count of RPC dropped because acceptFrom() == false'
|
|
}),
|
|
// RPC incoming. Track byte length + data structure sizes
|
|
rpcSentBytes: register.gauge({ name: 'gossipsub_rpc_sent_bytes_total', help: 'RPC sent' }),
|
|
rpcSentCount: register.gauge({ name: 'gossipsub_rpc_sent_count_total', help: 'RPC sent' }),
|
|
rpcSentSubscription: register.gauge({ name: 'gossipsub_rpc_sent_subscription_total', help: 'RPC sent' }),
|
|
rpcSentMessage: register.gauge({ name: 'gossipsub_rpc_sent_message_total', help: 'RPC sent' }),
|
|
rpcSentControl: register.gauge({ name: 'gossipsub_rpc_sent_control_total', help: 'RPC sent' }),
|
|
rpcSentIHave: register.gauge({ name: 'gossipsub_rpc_sent_ihave_total', help: 'RPC sent' }),
|
|
rpcSentIWant: register.gauge({ name: 'gossipsub_rpc_sent_iwant_total', help: 'RPC sent' }),
|
|
rpcSentGraft: register.gauge({ name: 'gossipsub_rpc_sent_graft_total', help: 'RPC sent' }),
|
|
rpcSentPrune: register.gauge({ name: 'gossipsub_rpc_sent_prune_total', help: 'RPC sent' }),
|
|
// publish message. Track peers sent to and bytes
|
|
/** Total count of msg published by topic */
|
|
msgPublishCount: register.gauge({
|
|
name: 'gossipsub_msg_publish_count_total',
|
|
help: 'Total count of msg published by topic',
|
|
labelNames: ['topic']
|
|
}),
|
|
/** Total count of peers that we publish a msg to */
|
|
msgPublishPeers: register.gauge({
|
|
name: 'gossipsub_msg_publish_peers_total',
|
|
help: 'Total count of peers that we publish a msg to',
|
|
labelNames: ['topic']
|
|
}),
|
|
/** Total count of peers (by group) that we publish a msg to */
|
|
// NOTE: Do not use 'group' label since it's a generic already used by Prometheus to group instances
|
|
msgPublishPeersByGroup: register.gauge({
|
|
name: 'gossipsub_msg_publish_peers_by_group',
|
|
help: 'Total count of peers (by group) that we publish a msg to',
|
|
labelNames: ['topic', 'peerGroup']
|
|
}),
|
|
/** Total count of msg publish data.length bytes */
|
|
msgPublishBytes: register.gauge({
|
|
name: 'gossipsub_msg_publish_bytes_total',
|
|
help: 'Total count of msg publish data.length bytes',
|
|
labelNames: ['topic']
|
|
}),
|
|
/** Total count of msg forwarded by topic */
|
|
msgForwardCount: register.gauge({
|
|
name: 'gossipsub_msg_forward_count_total',
|
|
help: 'Total count of msg forwarded by topic',
|
|
labelNames: ['topic']
|
|
}),
|
|
/** Total count of peers that we forward a msg to */
|
|
msgForwardPeers: register.gauge({
|
|
name: 'gossipsub_msg_forward_peers_total',
|
|
help: 'Total count of peers that we forward a msg to',
|
|
labelNames: ['topic']
|
|
}),
|
|
/** Total count of recv msgs before any validation */
|
|
msgReceivedPreValidation: register.gauge({
|
|
name: 'gossipsub_msg_received_prevalidation_total',
|
|
help: 'Total count of recv msgs before any validation',
|
|
labelNames: ['topic']
|
|
}),
|
|
/** Tracks distribution of recv msgs by duplicate, invalid, valid */
|
|
msgReceivedStatus: register.gauge({
|
|
name: 'gossipsub_msg_received_status_total',
|
|
help: 'Tracks distribution of recv msgs by duplicate, invalid, valid',
|
|
labelNames: ['topic', 'status']
|
|
}),
|
|
/** Tracks specific reason of invalid */
|
|
msgReceivedInvalid: register.gauge({
|
|
name: 'gossipsub_msg_received_invalid_total',
|
|
help: 'Tracks specific reason of invalid',
|
|
labelNames: ['topic', 'error']
|
|
}),
|
|
/** Track duplicate message delivery time */
|
|
duplicateMsgDeliveryDelay: register.histogram({
|
|
name: 'gossisub_duplicate_msg_delivery_delay_seconds',
|
|
help: 'Time since the 1st duplicated message validated',
|
|
labelNames: ['topic'],
|
|
buckets: [
|
|
0.25 * opts.maxMeshMessageDeliveriesWindowSec,
|
|
0.5 * opts.maxMeshMessageDeliveriesWindowSec,
|
|
1 * opts.maxMeshMessageDeliveriesWindowSec,
|
|
2 * opts.maxMeshMessageDeliveriesWindowSec,
|
|
4 * opts.maxMeshMessageDeliveriesWindowSec
|
|
]
|
|
}),
|
|
/** Total count of late msg delivery total by topic */
|
|
duplicateMsgLateDelivery: register.gauge({
|
|
name: 'gossisub_duplicate_msg_late_delivery_total',
|
|
help: 'Total count of late duplicate message delivery by topic, which triggers P3 penalty',
|
|
labelNames: ['topic']
|
|
}),
|
|
/* Metrics related to scoring */
|
|
/** Total times score() is called */
|
|
scoreFnCalls: register.gauge({
|
|
name: 'gossipsub_score_fn_calls_total',
|
|
help: 'Total times score() is called'
|
|
}),
|
|
/** Total times score() call actually computed computeScore(), no cache */
|
|
scoreFnRuns: register.gauge({
|
|
name: 'gossipsub_score_fn_runs_total',
|
|
help: 'Total times score() call actually computed computeScore(), no cache'
|
|
}),
|
|
scoreCachedDelta: register.histogram({
|
|
name: 'gossipsub_score_cache_delta',
|
|
help: 'Delta of score between cached values that expired',
|
|
buckets: [10, 100, 1000]
|
|
}),
|
|
/** Current count of peers by score threshold */
|
|
peersByScoreThreshold: register.gauge({
|
|
name: 'gossipsub_peers_by_score_threshold_count',
|
|
help: 'Current count of peers by score threshold',
|
|
labelNames: ['threshold']
|
|
}),
|
|
score: register.avgMinMax({
|
|
name: 'gossipsub_score',
|
|
help: 'Avg min max of gossip scores',
|
|
labelNames: ['topic', 'p']
|
|
}),
|
|
/** Separate score weights */
|
|
scoreWeights: register.avgMinMax({
|
|
name: 'gossipsub_score_weights',
|
|
help: 'Separate score weights',
|
|
labelNames: ['topic', 'p']
|
|
}),
|
|
/** Histogram of the scores for each mesh topic. */
|
|
// TODO: Not implemented
|
|
scorePerMesh: register.avgMinMax({
|
|
name: 'gossipsub_score_per_mesh',
|
|
help: 'Histogram of the scores for each mesh topic',
|
|
labelNames: ['topic']
|
|
}),
|
|
/** A counter of the kind of penalties being applied to peers. */
|
|
// TODO: Not fully implemented
|
|
scoringPenalties: register.gauge({
|
|
name: 'gossipsub_scoring_penalties_total',
|
|
help: 'A counter of the kind of penalties being applied to peers',
|
|
labelNames: ['penalty']
|
|
}),
|
|
behaviourPenalty: register.histogram({
|
|
name: 'gossipsub_peer_stat_behaviour_penalty',
|
|
help: 'Current peer stat behaviour_penalty at each scrape',
|
|
buckets: [
|
|
0.25 * opts.behaviourPenaltyThreshold,
|
|
0.5 * opts.behaviourPenaltyThreshold,
|
|
1 * opts.behaviourPenaltyThreshold,
|
|
2 * opts.behaviourPenaltyThreshold,
|
|
4 * opts.behaviourPenaltyThreshold
|
|
]
|
|
}),
|
|
// TODO:
|
|
// - iasked per peer (on heartbeat)
|
|
// - when promise is resolved, track messages from promises
|
|
/** Total received IHAVE messages that we ignore for some reason */
|
|
ihaveRcvIgnored: register.gauge({
|
|
name: 'gossipsub_ihave_rcv_ignored_total',
|
|
help: 'Total received IHAVE messages that we ignore for some reason',
|
|
labelNames: ['reason']
|
|
}),
|
|
/** Total received IHAVE messages by topic */
|
|
ihaveRcvMsgids: register.gauge({
|
|
name: 'gossipsub_ihave_rcv_msgids_total',
|
|
help: 'Total received IHAVE messages by topic',
|
|
labelNames: ['topic']
|
|
}),
|
|
/** Total messages per topic we don't have. Not actual requests.
|
|
* The number of times we have decided that an IWANT control message is required for this
|
|
* topic. A very high metric might indicate an underperforming network.
|
|
* = rust-libp2p `topic_iwant_msgs` */
|
|
ihaveRcvNotSeenMsgids: register.gauge({
|
|
name: 'gossipsub_ihave_rcv_not_seen_msgids_total',
|
|
help: 'Total messages per topic we do not have, not actual requests',
|
|
labelNames: ['topic']
|
|
}),
|
|
/** Total received IWANT messages by topic */
|
|
iwantRcvMsgids: register.gauge({
|
|
name: 'gossipsub_iwant_rcv_msgids_total',
|
|
help: 'Total received IWANT messages by topic',
|
|
labelNames: ['topic']
|
|
}),
|
|
/** Total requested messageIDs that we don't have */
|
|
iwantRcvDonthaveMsgids: register.gauge({
|
|
name: 'gossipsub_iwant_rcv_dont_have_msgids_total',
|
|
help: 'Total requested messageIDs that we do not have'
|
|
}),
|
|
iwantPromiseStarted: register.gauge({
|
|
name: 'gossipsub_iwant_promise_sent_total',
|
|
help: 'Total count of started IWANT promises'
|
|
}),
|
|
/** Total count of resolved IWANT promises */
|
|
iwantPromiseResolved: register.gauge({
|
|
name: 'gossipsub_iwant_promise_resolved_total',
|
|
help: 'Total count of resolved IWANT promises'
|
|
}),
|
|
/** Total count of peers we have asked IWANT promises that are resolved */
|
|
iwantPromiseResolvedPeers: register.gauge({
|
|
name: 'gossipsub_iwant_promise_resolved_peers',
|
|
help: 'Total count of peers we have asked IWANT promises that are resolved'
|
|
}),
|
|
iwantPromiseBroken: register.gauge({
|
|
name: 'gossipsub_iwant_promise_broken',
|
|
help: 'Total count of broken IWANT promises'
|
|
}),
|
|
/** Histogram of delivery time of resolved IWANT promises */
|
|
iwantPromiseDeliveryTime: register.histogram({
|
|
name: 'gossipsub_iwant_promise_delivery_seconds',
|
|
help: 'Histogram of delivery time of resolved IWANT promises',
|
|
buckets: [
|
|
0.5 * opts.gossipPromiseExpireSec,
|
|
1 * opts.gossipPromiseExpireSec,
|
|
2 * opts.gossipPromiseExpireSec,
|
|
4 * opts.gossipPromiseExpireSec
|
|
]
|
|
}),
|
|
/* Data structure sizes */
|
|
/** Unbounded cache sizes */
|
|
cacheSize: register.gauge({
|
|
name: 'gossipsub_cache_size',
|
|
help: 'Unbounded cache sizes',
|
|
labelNames: ['cache']
|
|
}),
|
|
/** Current mcache msg count */
|
|
mcacheSize: register.gauge({
|
|
name: 'gossipsub_mcache_size',
|
|
help: 'Current mcache msg count'
|
|
}),
|
|
topicStrToLabel: topicStrToLabel,
|
|
toTopic(topicStr) {
|
|
return this.topicStrToLabel.get(topicStr) ?? topicStr;
|
|
},
|
|
/** We joined a topic */
|
|
onJoin(topicStr) {
|
|
this.topicSubscriptionStatus.set({ topicStr }, 1);
|
|
this.meshPeerCounts.set({ topicStr }, 0); // Reset count
|
|
},
|
|
/** We left a topic */
|
|
onLeave(topicStr) {
|
|
this.topicSubscriptionStatus.set({ topicStr }, 0);
|
|
this.meshPeerCounts.set({ topicStr }, 0); // Reset count
|
|
},
|
|
/** Register the inclusion of peers in our mesh due to some reason. */
|
|
onAddToMesh(topicStr, reason, count) {
|
|
const topic = this.toTopic(topicStr);
|
|
this.meshPeerInclusionEvents.inc({ topic, reason }, count);
|
|
},
|
|
/** Register the removal of peers in our mesh due to some reason */
|
|
// - remove_peer_from_mesh()
|
|
// - heartbeat() Churn::BadScore
|
|
// - heartbeat() Churn::Excess
|
|
// - on_disconnect() Churn::Ds
|
|
onRemoveFromMesh(topicStr, reason, count) {
|
|
const topic = this.toTopic(topicStr);
|
|
this.meshPeerChurnEvents.inc({ topic, reason }, count);
|
|
},
|
|
onReportValidationMcacheHit(hit) {
|
|
this.asyncValidationMcacheHit.inc({ hit: hit ? 'hit' : 'miss' });
|
|
},
|
|
onReportValidation(topicStr, acceptance) {
|
|
const topic = this.toTopic(topicStr);
|
|
this.asyncValidationResult.inc({ topic: topic, acceptance });
|
|
},
|
|
/**
|
|
* - in handle_graft() Penalty::GraftBackoff
|
|
* - in apply_iwant_penalties() Penalty::BrokenPromise
|
|
* - in metric_score() P3 Penalty::MessageDeficit
|
|
* - in metric_score() P6 Penalty::IPColocation
|
|
*/
|
|
onScorePenalty(penalty) {
|
|
// Can this be labeled by topic too?
|
|
this.scoringPenalties.inc({ penalty }, 1);
|
|
},
|
|
onIhaveRcv(topicStr, ihave, idonthave) {
|
|
const topic = this.toTopic(topicStr);
|
|
this.ihaveRcvMsgids.inc({ topic }, ihave);
|
|
this.ihaveRcvNotSeenMsgids.inc({ topic }, idonthave);
|
|
},
|
|
onIwantRcv(iwantByTopic, iwantDonthave) {
|
|
for (const [topicStr, iwant] of iwantByTopic) {
|
|
const topic = this.toTopic(topicStr);
|
|
this.iwantRcvMsgids.inc({ topic }, iwant);
|
|
}
|
|
this.iwantRcvDonthaveMsgids.inc(iwantDonthave);
|
|
},
|
|
onForwardMsg(topicStr, tosendCount) {
|
|
const topic = this.toTopic(topicStr);
|
|
this.msgForwardCount.inc({ topic }, 1);
|
|
this.msgForwardPeers.inc({ topic }, tosendCount);
|
|
},
|
|
onPublishMsg(topicStr, tosendGroupCount, tosendCount, dataLen) {
|
|
const topic = this.toTopic(topicStr);
|
|
this.msgPublishCount.inc({ topic }, 1);
|
|
this.msgPublishBytes.inc({ topic }, tosendCount * dataLen);
|
|
this.msgPublishPeers.inc({ topic }, tosendCount);
|
|
this.msgPublishPeersByGroup.inc({ topic, peerGroup: 'direct' }, tosendGroupCount.direct);
|
|
this.msgPublishPeersByGroup.inc({ topic, peerGroup: 'floodsub' }, tosendGroupCount.floodsub);
|
|
this.msgPublishPeersByGroup.inc({ topic, peerGroup: 'mesh' }, tosendGroupCount.mesh);
|
|
this.msgPublishPeersByGroup.inc({ topic, peerGroup: 'fanout' }, tosendGroupCount.fanout);
|
|
},
|
|
onMsgRecvPreValidation(topicStr) {
|
|
const topic = this.toTopic(topicStr);
|
|
this.msgReceivedPreValidation.inc({ topic }, 1);
|
|
},
|
|
onMsgRecvResult(topicStr, status) {
|
|
const topic = this.toTopic(topicStr);
|
|
this.msgReceivedStatus.inc({ topic, status });
|
|
},
|
|
onMsgRecvInvalid(topicStr, reason) {
|
|
const topic = this.toTopic(topicStr);
|
|
const error = reason.reason === RejectReason.Error ? reason.error : reason.reason;
|
|
this.msgReceivedInvalid.inc({ topic, error }, 1);
|
|
},
|
|
onDuplicateMsgDelivery(topicStr, deliveryDelayMs, isLateDelivery) {
|
|
this.duplicateMsgDeliveryDelay.observe(deliveryDelayMs / 1000);
|
|
if (isLateDelivery) {
|
|
const topic = this.toTopic(topicStr);
|
|
this.duplicateMsgLateDelivery.inc({ topic }, 1);
|
|
}
|
|
},
|
|
onRpcRecv(rpc, rpcBytes) {
|
|
this.rpcRecvBytes.inc(rpcBytes);
|
|
this.rpcRecvCount.inc(1);
|
|
if (rpc.subscriptions)
|
|
this.rpcRecvSubscription.inc(rpc.subscriptions.length);
|
|
if (rpc.messages)
|
|
this.rpcRecvMessage.inc(rpc.messages.length);
|
|
if (rpc.control) {
|
|
this.rpcRecvControl.inc(1);
|
|
if (rpc.control.ihave)
|
|
this.rpcRecvIHave.inc(rpc.control.ihave.length);
|
|
if (rpc.control.iwant)
|
|
this.rpcRecvIWant.inc(rpc.control.iwant.length);
|
|
if (rpc.control.graft)
|
|
this.rpcRecvGraft.inc(rpc.control.graft.length);
|
|
if (rpc.control.prune)
|
|
this.rpcRecvPrune.inc(rpc.control.prune.length);
|
|
}
|
|
},
|
|
onRpcSent(rpc, rpcBytes) {
|
|
this.rpcSentBytes.inc(rpcBytes);
|
|
this.rpcSentCount.inc(1);
|
|
if (rpc.subscriptions)
|
|
this.rpcSentSubscription.inc(rpc.subscriptions.length);
|
|
if (rpc.messages)
|
|
this.rpcSentMessage.inc(rpc.messages.length);
|
|
if (rpc.control) {
|
|
const ihave = rpc.control.ihave?.length ?? 0;
|
|
const iwant = rpc.control.iwant?.length ?? 0;
|
|
const graft = rpc.control.graft?.length ?? 0;
|
|
const prune = rpc.control.prune?.length ?? 0;
|
|
if (ihave > 0)
|
|
this.rpcSentIHave.inc(ihave);
|
|
if (iwant > 0)
|
|
this.rpcSentIWant.inc(iwant);
|
|
if (graft > 0)
|
|
this.rpcSentGraft.inc(graft);
|
|
if (prune > 0)
|
|
this.rpcSentPrune.inc(prune);
|
|
if (ihave > 0 || iwant > 0 || graft > 0 || prune > 0)
|
|
this.rpcSentControl.inc(1);
|
|
}
|
|
},
|
|
registerScores(scores, scoreThresholds) {
|
|
let graylist = 0;
|
|
let publish = 0;
|
|
let gossip = 0;
|
|
let mesh = 0;
|
|
for (const score of scores) {
|
|
if (score >= scoreThresholds.graylistThreshold)
|
|
graylist++;
|
|
if (score >= scoreThresholds.publishThreshold)
|
|
publish++;
|
|
if (score >= scoreThresholds.gossipThreshold)
|
|
gossip++;
|
|
if (score >= 0)
|
|
mesh++;
|
|
}
|
|
this.peersByScoreThreshold.set({ threshold: ScoreThreshold.graylist }, graylist);
|
|
this.peersByScoreThreshold.set({ threshold: ScoreThreshold.publish }, publish);
|
|
this.peersByScoreThreshold.set({ threshold: ScoreThreshold.gossip }, gossip);
|
|
this.peersByScoreThreshold.set({ threshold: ScoreThreshold.mesh }, mesh);
|
|
// Register full score too
|
|
this.score.set(scores);
|
|
},
|
|
registerScoreWeights(sw) {
|
|
for (const [topic, wsTopic] of sw.byTopic) {
|
|
this.scoreWeights.set({ topic, p: 'p1' }, wsTopic.p1w);
|
|
this.scoreWeights.set({ topic, p: 'p2' }, wsTopic.p2w);
|
|
this.scoreWeights.set({ topic, p: 'p3' }, wsTopic.p3w);
|
|
this.scoreWeights.set({ topic, p: 'p3b' }, wsTopic.p3bw);
|
|
this.scoreWeights.set({ topic, p: 'p4' }, wsTopic.p4w);
|
|
}
|
|
this.scoreWeights.set({ p: 'p5' }, sw.p5w);
|
|
this.scoreWeights.set({ p: 'p6' }, sw.p6w);
|
|
this.scoreWeights.set({ p: 'p7' }, sw.p7w);
|
|
},
|
|
registerScorePerMesh(mesh, scoreByPeer) {
|
|
const peersPerTopicLabel = new Map();
|
|
mesh.forEach((peers, topicStr) => {
|
|
// Aggregate by known topicLabel or throw to 'unknown'. This prevent too high cardinality
|
|
const topicLabel = this.topicStrToLabel.get(topicStr) ?? 'unknown';
|
|
let peersInMesh = peersPerTopicLabel.get(topicLabel);
|
|
if (!peersInMesh) {
|
|
peersInMesh = new Set();
|
|
peersPerTopicLabel.set(topicLabel, peersInMesh);
|
|
}
|
|
peers.forEach((p) => peersInMesh?.add(p));
|
|
});
|
|
for (const [topic, peers] of peersPerTopicLabel) {
|
|
const meshScores = [];
|
|
peers.forEach((peer) => {
|
|
meshScores.push(scoreByPeer.get(peer) ?? 0);
|
|
});
|
|
this.scorePerMesh.set({ topic }, meshScores);
|
|
}
|
|
}
|
|
};
|
|
}
|
|
|
|
const SignPrefix = fromString$1('libp2p-pubsub:');
|
|
async function buildRawMessage(publishConfig, topic, originalData, transformedData) {
|
|
switch (publishConfig.type) {
|
|
case PublishConfigType.Signing: {
|
|
const rpcMsg = {
|
|
from: publishConfig.author.toBytes(),
|
|
data: transformedData,
|
|
seqno: randomBytes(8),
|
|
topic,
|
|
signature: undefined,
|
|
key: undefined // Exclude key field for signing
|
|
};
|
|
// Get the message in bytes, and prepend with the pubsub prefix
|
|
// the signature is over the bytes "libp2p-pubsub:<protobuf-message>"
|
|
const bytes = concat([SignPrefix, RPC.Message.encode(rpcMsg).finish()]);
|
|
rpcMsg.signature = await publishConfig.privateKey.sign(bytes);
|
|
rpcMsg.key = publishConfig.key;
|
|
const msg = {
|
|
type: 'signed',
|
|
from: publishConfig.author,
|
|
data: originalData,
|
|
sequenceNumber: BigInt(`0x${toString$3(rpcMsg.seqno, 'base16')}`),
|
|
topic,
|
|
signature: rpcMsg.signature,
|
|
key: rpcMsg.key
|
|
};
|
|
return {
|
|
raw: rpcMsg,
|
|
msg: msg
|
|
};
|
|
}
|
|
case PublishConfigType.Anonymous: {
|
|
return {
|
|
raw: {
|
|
from: undefined,
|
|
data: transformedData,
|
|
seqno: undefined,
|
|
topic,
|
|
signature: undefined,
|
|
key: undefined
|
|
},
|
|
msg: {
|
|
type: 'unsigned',
|
|
data: originalData,
|
|
topic
|
|
}
|
|
};
|
|
}
|
|
}
|
|
}
|
|
async function validateToRawMessage(signaturePolicy, msg) {
|
|
// If strict-sign, verify all
|
|
// If anonymous (no-sign), ensure no preven
|
|
switch (signaturePolicy) {
|
|
case StrictNoSign:
|
|
if (msg.signature != null)
|
|
return { valid: false, error: ValidateError.SignaturePresent };
|
|
if (msg.seqno != null)
|
|
return { valid: false, error: ValidateError.SeqnoPresent };
|
|
if (msg.key != null)
|
|
return { valid: false, error: ValidateError.FromPresent };
|
|
return { valid: true, message: { type: 'unsigned', topic: msg.topic, data: msg.data ?? new Uint8Array(0) } };
|
|
case StrictSign: {
|
|
// Verify seqno
|
|
if (msg.seqno == null)
|
|
return { valid: false, error: ValidateError.InvalidSeqno };
|
|
if (msg.seqno.length !== 8) {
|
|
return { valid: false, error: ValidateError.InvalidSeqno };
|
|
}
|
|
if (msg.signature == null)
|
|
return { valid: false, error: ValidateError.InvalidSignature };
|
|
if (msg.from == null)
|
|
return { valid: false, error: ValidateError.InvalidPeerId };
|
|
let fromPeerId;
|
|
try {
|
|
// TODO: Fix PeerId types
|
|
fromPeerId = peerIdFromBytes(msg.from);
|
|
}
|
|
catch (e) {
|
|
return { valid: false, error: ValidateError.InvalidPeerId };
|
|
}
|
|
// - check from defined
|
|
// - transform source to PeerId
|
|
// - parse signature
|
|
// - get .key, else from source
|
|
// - check key == source if present
|
|
// - verify sig
|
|
let publicKey;
|
|
if (msg.key) {
|
|
publicKey = unmarshalPublicKey(msg.key);
|
|
// TODO: Should `fromPeerId.pubKey` be optional?
|
|
if (fromPeerId.publicKey !== undefined && !equals(publicKey.bytes, fromPeerId.publicKey)) {
|
|
return { valid: false, error: ValidateError.InvalidPeerId };
|
|
}
|
|
}
|
|
else {
|
|
if (fromPeerId.publicKey == null) {
|
|
return { valid: false, error: ValidateError.InvalidPeerId };
|
|
}
|
|
publicKey = unmarshalPublicKey(fromPeerId.publicKey);
|
|
}
|
|
const rpcMsgPreSign = {
|
|
from: msg.from,
|
|
data: msg.data,
|
|
seqno: msg.seqno,
|
|
topic: msg.topic,
|
|
signature: undefined,
|
|
key: undefined // Exclude key field for signing
|
|
};
|
|
// Get the message in bytes, and prepend with the pubsub prefix
|
|
// the signature is over the bytes "libp2p-pubsub:<protobuf-message>"
|
|
const bytes = concat([SignPrefix, RPC.Message.encode(rpcMsgPreSign).finish()]);
|
|
if (!(await publicKey.verify(bytes, msg.signature))) {
|
|
return { valid: false, error: ValidateError.InvalidSignature };
|
|
}
|
|
return {
|
|
valid: true,
|
|
message: {
|
|
type: 'signed',
|
|
from: fromPeerId,
|
|
data: msg.data ?? new Uint8Array(0),
|
|
sequenceNumber: BigInt(`0x${toString$3(msg.seqno, 'base16')}`),
|
|
topic: msg.topic,
|
|
signature: msg.signature,
|
|
key: msg.key ?? marshalPublicKey(publicKey)
|
|
}
|
|
};
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Generate a message id, based on the `key` and `seqno`
|
|
*/
|
|
const msgId = (key, seqno) => {
|
|
const seqnoBytes = fromString$1(seqno.toString(16).padStart(16, '0'), 'base16');
|
|
const msgId = new Uint8Array(key.length + seqnoBytes.length);
|
|
msgId.set(key, 0);
|
|
msgId.set(seqnoBytes, key.length);
|
|
return msgId;
|
|
};
|
|
|
|
/**
|
|
* Generate a message id, based on the `key` and `seqno`
|
|
*/
|
|
function msgIdFnStrictSign(msg) {
|
|
if (msg.type !== 'signed') {
|
|
throw new Error('expected signed message type');
|
|
}
|
|
// Should never happen
|
|
if (msg.sequenceNumber == null)
|
|
throw Error('missing seqno field');
|
|
// TODO: Should use .from here or key?
|
|
return msgId(msg.from.toBytes(), msg.sequenceNumber);
|
|
}
|
|
/**
|
|
* Generate a message id, based on message `data`
|
|
*/
|
|
async function msgIdFnStrictNoSign(msg) {
|
|
return await sha256$1.encode(msg.data);
|
|
}
|
|
|
|
function computeScoreWeights(peer, pstats, params, peerIPs, topicStrToLabel) {
|
|
let score = 0;
|
|
const byTopic = new Map();
|
|
// topic stores
|
|
Object.entries(pstats.topics).forEach(([topic, tstats]) => {
|
|
// the topic parameters
|
|
// Aggregate by known topicLabel or throw to 'unknown'. This prevent too high cardinality
|
|
const topicLabel = topicStrToLabel.get(topic) ?? 'unknown';
|
|
const topicParams = params.topics[topic];
|
|
if (topicParams === undefined) {
|
|
// we are not scoring this topic
|
|
return;
|
|
}
|
|
let topicScores = byTopic.get(topicLabel);
|
|
if (!topicScores) {
|
|
topicScores = {
|
|
p1w: 0,
|
|
p2w: 0,
|
|
p3w: 0,
|
|
p3bw: 0,
|
|
p4w: 0
|
|
};
|
|
byTopic.set(topicLabel, topicScores);
|
|
}
|
|
let p1w = 0;
|
|
let p2w = 0;
|
|
let p3w = 0;
|
|
let p3bw = 0;
|
|
let p4w = 0;
|
|
// P1: time in Mesh
|
|
if (tstats.inMesh) {
|
|
const p1 = Math.max(tstats.meshTime / topicParams.timeInMeshQuantum, topicParams.timeInMeshCap);
|
|
p1w += p1 * topicParams.timeInMeshWeight;
|
|
}
|
|
// P2: first message deliveries
|
|
let p2 = tstats.firstMessageDeliveries;
|
|
if (p2 > topicParams.firstMessageDeliveriesCap) {
|
|
p2 = topicParams.firstMessageDeliveriesCap;
|
|
}
|
|
p2w += p2 * topicParams.firstMessageDeliveriesWeight;
|
|
// P3: mesh message deliveries
|
|
if (tstats.meshMessageDeliveriesActive &&
|
|
tstats.meshMessageDeliveries < topicParams.meshMessageDeliveriesThreshold) {
|
|
const deficit = topicParams.meshMessageDeliveriesThreshold - tstats.meshMessageDeliveries;
|
|
const p3 = deficit * deficit;
|
|
p3w += p3 * topicParams.meshMessageDeliveriesWeight;
|
|
}
|
|
// P3b:
|
|
// NOTE: the weight of P3b is negative (validated in validateTopicScoreParams) so this detracts
|
|
const p3b = tstats.meshFailurePenalty;
|
|
p3bw += p3b * topicParams.meshFailurePenaltyWeight;
|
|
// P4: invalid messages
|
|
// NOTE: the weight of P4 is negative (validated in validateTopicScoreParams) so this detracts
|
|
const p4 = tstats.invalidMessageDeliveries * tstats.invalidMessageDeliveries;
|
|
p4w += p4 * topicParams.invalidMessageDeliveriesWeight;
|
|
// update score, mixing with topic weight
|
|
score += (p1w + p2w + p3w + p3bw + p4w) * topicParams.topicWeight;
|
|
topicScores.p1w += p1w;
|
|
topicScores.p2w += p2w;
|
|
topicScores.p3w += p3w;
|
|
topicScores.p3bw += p3bw;
|
|
topicScores.p4w += p4w;
|
|
});
|
|
// apply the topic score cap, if any
|
|
if (params.topicScoreCap > 0 && score > params.topicScoreCap) {
|
|
score = params.topicScoreCap;
|
|
// Proportionally apply cap to all individual contributions
|
|
const capF = params.topicScoreCap / score;
|
|
for (const ws of byTopic.values()) {
|
|
ws.p1w *= capF;
|
|
ws.p2w *= capF;
|
|
ws.p3w *= capF;
|
|
ws.p3bw *= capF;
|
|
ws.p4w *= capF;
|
|
}
|
|
}
|
|
let p5w = 0;
|
|
let p6w = 0;
|
|
let p7w = 0;
|
|
// P5: application-specific score
|
|
const p5 = params.appSpecificScore(peer);
|
|
p5w += p5 * params.appSpecificWeight;
|
|
// P6: IP colocation factor
|
|
pstats.ips.forEach((ip) => {
|
|
if (params.IPColocationFactorWhitelist.has(ip)) {
|
|
return;
|
|
}
|
|
// P6 has a cliff (IPColocationFactorThreshold)
|
|
// It's only applied if at least that many peers are connected to us from that source IP addr.
|
|
// It is quadratic, and the weight is negative (validated in validatePeerScoreParams)
|
|
const peersInIP = peerIPs.get(ip);
|
|
const numPeersInIP = peersInIP ? peersInIP.size : 0;
|
|
if (numPeersInIP > params.IPColocationFactorThreshold) {
|
|
const surplus = numPeersInIP - params.IPColocationFactorThreshold;
|
|
const p6 = surplus * surplus;
|
|
p6w += p6 * params.IPColocationFactorWeight;
|
|
}
|
|
});
|
|
// P7: behavioural pattern penalty
|
|
const p7 = pstats.behaviourPenalty * pstats.behaviourPenalty;
|
|
p7w += p7 * params.behaviourPenaltyWeight;
|
|
score += p5w + p6w + p7w;
|
|
return {
|
|
byTopic,
|
|
p5w,
|
|
p6w,
|
|
p7w,
|
|
score
|
|
};
|
|
}
|
|
function computeAllPeersScoreWeights(peerIdStrs, peerStats, params, peerIPs, topicStrToLabel) {
|
|
const sw = {
|
|
byTopic: new Map(),
|
|
p5w: [],
|
|
p6w: [],
|
|
p7w: [],
|
|
score: []
|
|
};
|
|
for (const peerIdStr of peerIdStrs) {
|
|
const pstats = peerStats.get(peerIdStr);
|
|
if (pstats) {
|
|
const swPeer = computeScoreWeights(peerIdStr, pstats, params, peerIPs, topicStrToLabel);
|
|
for (const [topic, swPeerTopic] of swPeer.byTopic) {
|
|
let swTopic = sw.byTopic.get(topic);
|
|
if (!swTopic) {
|
|
swTopic = {
|
|
p1w: [],
|
|
p2w: [],
|
|
p3w: [],
|
|
p3bw: [],
|
|
p4w: []
|
|
};
|
|
sw.byTopic.set(topic, swTopic);
|
|
}
|
|
swTopic.p1w.push(swPeerTopic.p1w);
|
|
swTopic.p2w.push(swPeerTopic.p2w);
|
|
swTopic.p3w.push(swPeerTopic.p3w);
|
|
swTopic.p3bw.push(swPeerTopic.p3bw);
|
|
swTopic.p4w.push(swPeerTopic.p4w);
|
|
}
|
|
sw.p5w.push(swPeer.p5w);
|
|
sw.p6w.push(swPeer.p6w);
|
|
sw.p7w.push(swPeer.p7w);
|
|
sw.score.push(swPeer.score);
|
|
}
|
|
else {
|
|
sw.p5w.push(0);
|
|
sw.p6w.push(0);
|
|
sw.p7w.push(0);
|
|
sw.score.push(0);
|
|
}
|
|
}
|
|
return sw;
|
|
}
|
|
|
|
/**
|
|
* Exclude up to `ineed` items from a set if item meets condition `cond`
|
|
*/
|
|
function removeItemsFromSet(superSet, ineed, cond = () => true) {
|
|
const subset = new Set();
|
|
if (ineed <= 0)
|
|
return subset;
|
|
for (const id of superSet) {
|
|
if (subset.size >= ineed)
|
|
break;
|
|
if (cond(id)) {
|
|
subset.add(id);
|
|
superSet.delete(id);
|
|
}
|
|
}
|
|
return subset;
|
|
}
|
|
/**
|
|
* Exclude up to `ineed` items from a set
|
|
*/
|
|
function removeFirstNItemsFromSet(superSet, ineed) {
|
|
return removeItemsFromSet(superSet, ineed, () => true);
|
|
}
|
|
|
|
class AbortError extends Error {
|
|
constructor(message, code) {
|
|
super(message ?? 'The operation was aborted');
|
|
this.type = 'aborted';
|
|
this.code = code ?? 'ABORT_ERR';
|
|
}
|
|
}
|
|
|
|
function getIterator(obj) {
|
|
if (obj != null) {
|
|
if (typeof obj[Symbol.iterator] === 'function') {
|
|
return obj[Symbol.iterator]();
|
|
}
|
|
if (typeof obj[Symbol.asyncIterator] === 'function') {
|
|
return obj[Symbol.asyncIterator]();
|
|
}
|
|
if (typeof obj.next === 'function') {
|
|
return obj; // probably an iterator
|
|
}
|
|
}
|
|
throw new Error('argument is not an iterator or iterable');
|
|
}
|
|
|
|
// Wrap an iterator to make it abortable, allow cleanup when aborted via onAbort
|
|
function abortableSource(source, signal, options) {
|
|
const opts = options ?? {};
|
|
const iterator = getIterator(source);
|
|
async function* abortable() {
|
|
let nextAbortHandler;
|
|
const abortHandler = () => {
|
|
if (nextAbortHandler != null)
|
|
nextAbortHandler();
|
|
};
|
|
signal.addEventListener('abort', abortHandler);
|
|
while (true) {
|
|
let result;
|
|
try {
|
|
if (signal.aborted) {
|
|
const { abortMessage, abortCode } = opts;
|
|
throw new AbortError(abortMessage, abortCode);
|
|
}
|
|
const abort = new Promise((resolve, reject) => {
|
|
nextAbortHandler = () => {
|
|
const { abortMessage, abortCode } = opts;
|
|
reject(new AbortError(abortMessage, abortCode));
|
|
};
|
|
});
|
|
// Race the iterator and the abort signals
|
|
result = await Promise.race([abort, iterator.next()]);
|
|
nextAbortHandler = null;
|
|
}
|
|
catch (err) {
|
|
signal.removeEventListener('abort', abortHandler);
|
|
// Might not have been aborted by a known signal
|
|
const isKnownAborter = err.type === 'aborted' && signal.aborted;
|
|
if (isKnownAborter && (opts.onAbort != null)) {
|
|
// Do any custom abort handling for the iterator
|
|
await opts.onAbort(source);
|
|
}
|
|
// End the iterator if it is a generator
|
|
if (typeof iterator.return === 'function') {
|
|
try {
|
|
const p = iterator.return();
|
|
if (p instanceof Promise) { // eslint-disable-line max-depth
|
|
p.catch(err => {
|
|
if (opts.onReturnError != null) {
|
|
opts.onReturnError(err);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
catch (err) {
|
|
if (opts.onReturnError != null) { // eslint-disable-line max-depth
|
|
opts.onReturnError(err);
|
|
}
|
|
}
|
|
}
|
|
if (isKnownAborter && opts.returnOnAbort === true) {
|
|
return;
|
|
}
|
|
throw err;
|
|
}
|
|
if (result.done === true) {
|
|
break;
|
|
}
|
|
yield result.value;
|
|
}
|
|
signal.removeEventListener('abort', abortHandler);
|
|
}
|
|
return abortable();
|
|
}
|
|
|
|
class OutboundStream {
|
|
constructor(rawStream, errCallback, opts) {
|
|
this.rawStream = rawStream;
|
|
this.pushable = pushable$1({ objectMode: false });
|
|
this.closeController = new AbortController();
|
|
this.maxBufferSize = opts.maxBufferSize ?? Infinity;
|
|
pipe(abortableSource(this.pushable, this.closeController.signal, { returnOnAbort: true }), encode(), this.rawStream).catch(errCallback);
|
|
}
|
|
get protocol() {
|
|
// TODO remove this non-nullish assertion after https://github.com/libp2p/js-libp2p-interfaces/pull/265 is incorporated
|
|
return this.rawStream.stat.protocol;
|
|
}
|
|
push(data) {
|
|
if (this.pushable.readableLength > this.maxBufferSize) {
|
|
throw Error(`OutboundStream buffer full, size > ${this.maxBufferSize}`);
|
|
}
|
|
this.pushable.push(data);
|
|
}
|
|
close() {
|
|
this.closeController.abort();
|
|
// similar to pushable.end() but clear the internal buffer
|
|
this.pushable.return();
|
|
this.rawStream.close();
|
|
}
|
|
}
|
|
class InboundStream {
|
|
constructor(rawStream) {
|
|
this.rawStream = rawStream;
|
|
this.closeController = new AbortController();
|
|
this.source = abortableSource(pipe(this.rawStream, decode()), this.closeController.signal, { returnOnAbort: true });
|
|
}
|
|
close() {
|
|
this.closeController.abort();
|
|
this.rawStream.close();
|
|
}
|
|
}
|
|
|
|
var GossipStatusCode;
|
|
(function (GossipStatusCode) {
|
|
GossipStatusCode[GossipStatusCode["started"] = 0] = "started";
|
|
GossipStatusCode[GossipStatusCode["stopped"] = 1] = "stopped";
|
|
})(GossipStatusCode || (GossipStatusCode = {}));
|
|
class GossipSub extends EventEmitter {
|
|
constructor(options = {}) {
|
|
super();
|
|
this.multicodecs = [GossipsubIDv11, GossipsubIDv10];
|
|
// State
|
|
this.peers = new Set();
|
|
this.streamsInbound = new Map();
|
|
this.streamsOutbound = new Map();
|
|
/** Ensures outbound streams are created sequentially */
|
|
this.outboundInflightQueue = pushable$1({ objectMode: true });
|
|
/** Direct peers */
|
|
this.direct = new Set();
|
|
/** Floodsub peers */
|
|
this.floodsubPeers = new Set();
|
|
/**
|
|
* Map of peer id and AcceptRequestWhileListEntry
|
|
*/
|
|
this.acceptFromWhitelist = new Map();
|
|
/**
|
|
* Map of topics to which peers are subscribed to
|
|
*/
|
|
this.topics = new Map();
|
|
/**
|
|
* List of our subscriptions
|
|
*/
|
|
this.subscriptions = new Set();
|
|
/**
|
|
* Map of topic meshes
|
|
* topic => peer id set
|
|
*/
|
|
this.mesh = new Map();
|
|
/**
|
|
* Map of topics to set of peers. These mesh peers are the ones to which we are publishing without a topic membership
|
|
* topic => peer id set
|
|
*/
|
|
this.fanout = new Map();
|
|
/**
|
|
* Map of last publish time for fanout topics
|
|
* topic => last publish time
|
|
*/
|
|
this.fanoutLastpub = new Map();
|
|
/**
|
|
* Map of pending messages to gossip
|
|
* peer id => control messages
|
|
*/
|
|
this.gossip = new Map();
|
|
/**
|
|
* Map of control messages
|
|
* peer id => control message
|
|
*/
|
|
this.control = new Map();
|
|
/**
|
|
* Number of IHAVEs received from peer in the last heartbeat
|
|
*/
|
|
this.peerhave = new Map();
|
|
/** Number of messages we have asked from peer in the last heartbeat */
|
|
this.iasked = new Map();
|
|
/** Prune backoff map */
|
|
this.backoff = new Map();
|
|
/**
|
|
* Connection direction cache, marks peers with outbound connections
|
|
* peer id => direction
|
|
*/
|
|
this.outbound = new Map();
|
|
this.topicValidators = new Map();
|
|
/**
|
|
* Number of heartbeats since the beginning of time
|
|
* This allows us to amortize some resource cleanup -- eg: backoff cleanup
|
|
*/
|
|
this.heartbeatTicks = 0;
|
|
this.components = new Components();
|
|
this.directPeerInitial = null;
|
|
this.status = { code: GossipStatusCode.stopped };
|
|
this.heartbeatTimer = null;
|
|
this.runHeartbeat = () => {
|
|
const timer = this.metrics?.heartbeatDuration.startTimer();
|
|
this.heartbeat()
|
|
.catch((err) => {
|
|
this.log('Error running heartbeat', err);
|
|
})
|
|
.finally(() => {
|
|
if (timer != null) {
|
|
timer();
|
|
}
|
|
// Schedule the next run if still in started status
|
|
if (this.status.code === GossipStatusCode.started) {
|
|
// Clear previous timeout before overwriting `status.heartbeatTimeout`, it should be completed tho.
|
|
clearTimeout(this.status.heartbeatTimeout);
|
|
// NodeJS setInterval function is innexact, calls drift by a few miliseconds on each call.
|
|
// To run the heartbeat precisely setTimeout() must be used recomputing the delay on every loop.
|
|
let msToNextHeartbeat = this.opts.heartbeatInterval - ((Date.now() - this.status.hearbeatStartMs) % this.opts.heartbeatInterval);
|
|
// If too close to next heartbeat, skip one
|
|
if (msToNextHeartbeat < this.opts.heartbeatInterval * 0.25) {
|
|
msToNextHeartbeat += this.opts.heartbeatInterval;
|
|
this.metrics?.heartbeatSkipped.inc();
|
|
}
|
|
this.status.heartbeatTimeout = setTimeout(this.runHeartbeat, msToNextHeartbeat);
|
|
}
|
|
});
|
|
};
|
|
const opts = {
|
|
fallbackToFloodsub: true,
|
|
floodPublish: true,
|
|
doPX: false,
|
|
directPeers: [],
|
|
D: GossipsubD,
|
|
Dlo: GossipsubDlo,
|
|
Dhi: GossipsubDhi,
|
|
Dscore: GossipsubDscore,
|
|
Dout: GossipsubDout,
|
|
Dlazy: GossipsubDlazy,
|
|
heartbeatInterval: GossipsubHeartbeatInterval,
|
|
fanoutTTL: GossipsubFanoutTTL,
|
|
mcacheLength: GossipsubHistoryLength,
|
|
mcacheGossip: GossipsubHistoryGossip,
|
|
seenTTL: GossipsubSeenTTL,
|
|
gossipsubIWantFollowupMs: GossipsubIWantFollowupTime,
|
|
prunePeers: GossipsubPrunePeers,
|
|
pruneBackoff: GossipsubPruneBackoff,
|
|
graftFloodThreshold: GossipsubGraftFloodThreshold,
|
|
opportunisticGraftPeers: GossipsubOpportunisticGraftPeers,
|
|
opportunisticGraftTicks: GossipsubOpportunisticGraftTicks,
|
|
directConnectTicks: GossipsubDirectConnectTicks,
|
|
...options,
|
|
scoreParams: createPeerScoreParams(options.scoreParams),
|
|
scoreThresholds: createPeerScoreThresholds(options.scoreThresholds)
|
|
};
|
|
this.globalSignaturePolicy = opts.globalSignaturePolicy ?? StrictSign;
|
|
// Also wants to get notified of peers connected using floodsub
|
|
if (opts.fallbackToFloodsub) {
|
|
this.multicodecs.push(FloodsubID);
|
|
}
|
|
// From pubsub
|
|
this.log = logger(opts.debugName ?? 'libp2p:gossipsub');
|
|
// Gossipsub
|
|
this.opts = opts;
|
|
this.direct = new Set(opts.directPeers.map((p) => p.id.toString()));
|
|
this.seenCache = new SimpleTimeCache({ validityMs: opts.seenTTL });
|
|
this.publishedMessageIds = new SimpleTimeCache({ validityMs: opts.seenTTL });
|
|
if (options.msgIdFn) {
|
|
// Use custom function
|
|
this.msgIdFn = options.msgIdFn;
|
|
}
|
|
else {
|
|
switch (this.globalSignaturePolicy) {
|
|
case StrictSign:
|
|
this.msgIdFn = msgIdFnStrictSign;
|
|
break;
|
|
case StrictNoSign:
|
|
this.msgIdFn = msgIdFnStrictNoSign;
|
|
break;
|
|
}
|
|
}
|
|
if (options.fastMsgIdFn) {
|
|
this.fastMsgIdFn = options.fastMsgIdFn;
|
|
this.fastMsgIdCache = new SimpleTimeCache({ validityMs: opts.seenTTL });
|
|
}
|
|
// By default, gossipsub only provide a browser friendly function to convert Uint8Array message id to string.
|
|
this.msgIdToStrFn = options.msgIdToStrFn ?? messageIdToString;
|
|
this.mcache = options.messageCache || new MessageCache(opts.mcacheGossip, opts.mcacheLength, this.msgIdToStrFn);
|
|
if (options.dataTransform) {
|
|
this.dataTransform = options.dataTransform;
|
|
}
|
|
if (options.metricsRegister) {
|
|
if (!options.metricsTopicStrToLabel) {
|
|
throw Error('Must set metricsTopicStrToLabel with metrics');
|
|
}
|
|
// in theory, each topic has its own meshMessageDeliveriesWindow param
|
|
// however in lodestar, we configure it mostly the same so just pick the max of positive ones
|
|
// (some topics have meshMessageDeliveriesWindow as 0)
|
|
const maxMeshMessageDeliveriesWindowMs = Math.max(...Object.values(opts.scoreParams.topics).map((topicParam) => topicParam.meshMessageDeliveriesWindow), DEFAULT_METRIC_MESH_MESSAGE_DELIVERIES_WINDOWS);
|
|
const metrics = getMetrics(options.metricsRegister, options.metricsTopicStrToLabel, {
|
|
gossipPromiseExpireSec: this.opts.gossipsubIWantFollowupMs / 1000,
|
|
behaviourPenaltyThreshold: opts.scoreParams.behaviourPenaltyThreshold,
|
|
maxMeshMessageDeliveriesWindowSec: maxMeshMessageDeliveriesWindowMs / 1000
|
|
});
|
|
metrics.mcacheSize.addCollect(() => this.onScrapeMetrics(metrics));
|
|
for (const protocol of this.multicodecs) {
|
|
metrics.protocolsEnabled.set({ protocol }, 1);
|
|
}
|
|
this.metrics = metrics;
|
|
}
|
|
else {
|
|
this.metrics = null;
|
|
}
|
|
this.gossipTracer = new IWantTracer(this.opts.gossipsubIWantFollowupMs, this.msgIdToStrFn, this.metrics);
|
|
/**
|
|
* libp2p
|
|
*/
|
|
this.score = new PeerScore(this.opts.scoreParams, this.metrics, {
|
|
scoreCacheValidityMs: opts.heartbeatInterval
|
|
});
|
|
this.maxInboundStreams = options.maxInboundStreams;
|
|
this.maxOutboundStreams = options.maxOutboundStreams;
|
|
}
|
|
getPeers() {
|
|
return [...this.peers.keys()].map((str) => peerIdFromString(str));
|
|
}
|
|
isStarted() {
|
|
return this.status.code === GossipStatusCode.started;
|
|
}
|
|
// LIFECYCLE METHODS
|
|
/**
|
|
* Pass libp2p components to interested system components
|
|
*/
|
|
async init(components) {
|
|
this.components = components;
|
|
this.score.init(components);
|
|
}
|
|
/**
|
|
* Mounts the gossipsub protocol onto the libp2p node and sends our
|
|
* our subscriptions to every peer connected
|
|
*/
|
|
async start() {
|
|
// From pubsub
|
|
if (this.isStarted()) {
|
|
return;
|
|
}
|
|
this.log('starting');
|
|
this.publishConfig = await getPublishConfigFromPeerId(this.globalSignaturePolicy, this.components.getPeerId());
|
|
// Create the outbound inflight queue
|
|
// This ensures that outbound stream creation happens sequentially
|
|
this.outboundInflightQueue = pushable$1({ objectMode: true });
|
|
pipe(this.outboundInflightQueue, async (source) => {
|
|
for await (const { peerId, connection } of source) {
|
|
await this.createOutboundStream(peerId, connection);
|
|
}
|
|
}).catch((e) => this.log.error('outbound inflight queue error', e));
|
|
// set direct peer addresses in the address book
|
|
await Promise.all(this.opts.directPeers.map(async (p) => {
|
|
await this.components.getPeerStore().addressBook.add(p.id, p.addrs);
|
|
}));
|
|
const registrar = this.components.getRegistrar();
|
|
// Incoming streams
|
|
// Called after a peer dials us
|
|
await Promise.all(this.multicodecs.map((multicodec) => registrar.handle(multicodec, this.onIncomingStream.bind(this), {
|
|
maxInboundStreams: this.maxInboundStreams,
|
|
maxOutboundStreams: this.maxOutboundStreams
|
|
})));
|
|
// # How does Gossipsub interact with libp2p? Rough guide from Mar 2022
|
|
//
|
|
// ## Setup:
|
|
// Gossipsub requests libp2p to callback, TBD
|
|
//
|
|
// `this.libp2p.handle()` registers a handler for `/meshsub/1.1.0` and other Gossipsub protocols
|
|
// The handler callback is registered in libp2p Upgrader.protocols map.
|
|
//
|
|
// Upgrader receives an inbound connection from some transport and (`Upgrader.upgradeInbound`):
|
|
// - Adds encryption (NOISE in our case)
|
|
// - Multiplex stream
|
|
// - Create a muxer and register that for each new stream call Upgrader.protocols handler
|
|
//
|
|
// ## Topology
|
|
// - new instance of Topology (unlinked to libp2p) with handlers
|
|
// - registar.register(topology)
|
|
// register protocol with topology
|
|
// Topology callbacks called on connection manager changes
|
|
const topology = createTopology({
|
|
onConnect: this.onPeerConnected.bind(this),
|
|
onDisconnect: this.onPeerDisconnected.bind(this)
|
|
});
|
|
const registrarTopologyIds = await Promise.all(this.multicodecs.map((multicodec) => registrar.register(multicodec, topology)));
|
|
// Schedule to start heartbeat after `GossipsubHeartbeatInitialDelay`
|
|
const heartbeatTimeout = setTimeout(this.runHeartbeat, GossipsubHeartbeatInitialDelay);
|
|
// Then, run heartbeat every `heartbeatInterval` offset by `GossipsubHeartbeatInitialDelay`
|
|
this.status = {
|
|
code: GossipStatusCode.started,
|
|
registrarTopologyIds,
|
|
heartbeatTimeout: heartbeatTimeout,
|
|
hearbeatStartMs: Date.now() + GossipsubHeartbeatInitialDelay
|
|
};
|
|
this.score.start();
|
|
// connect to direct peers
|
|
this.directPeerInitial = setTimeout(() => {
|
|
Promise.resolve()
|
|
.then(async () => {
|
|
await Promise.all(Array.from(this.direct).map(async (id) => await this.connect(id)));
|
|
})
|
|
.catch((err) => {
|
|
this.log(err);
|
|
});
|
|
}, GossipsubDirectConnectInitialDelay);
|
|
this.log('started');
|
|
}
|
|
/**
|
|
* Unmounts the gossipsub protocol and shuts down every connection
|
|
*/
|
|
async stop() {
|
|
this.log('stopping');
|
|
// From pubsub
|
|
if (this.status.code !== GossipStatusCode.started) {
|
|
return;
|
|
}
|
|
const { registrarTopologyIds } = this.status;
|
|
this.status = { code: GossipStatusCode.stopped };
|
|
// unregister protocol and handlers
|
|
const registrar = this.components.getRegistrar();
|
|
registrarTopologyIds.forEach((id) => registrar.unregister(id));
|
|
this.outboundInflightQueue.end();
|
|
for (const outboundStream of this.streamsOutbound.values()) {
|
|
outboundStream.close();
|
|
}
|
|
this.streamsOutbound.clear();
|
|
for (const inboundStream of this.streamsInbound.values()) {
|
|
inboundStream.close();
|
|
}
|
|
this.streamsInbound.clear();
|
|
this.peers.clear();
|
|
this.subscriptions.clear();
|
|
// Gossipsub
|
|
if (this.heartbeatTimer) {
|
|
this.heartbeatTimer.cancel();
|
|
this.heartbeatTimer = null;
|
|
}
|
|
this.score.stop();
|
|
this.mesh.clear();
|
|
this.fanout.clear();
|
|
this.fanoutLastpub.clear();
|
|
this.gossip.clear();
|
|
this.control.clear();
|
|
this.peerhave.clear();
|
|
this.iasked.clear();
|
|
this.backoff.clear();
|
|
this.outbound.clear();
|
|
this.gossipTracer.clear();
|
|
this.seenCache.clear();
|
|
if (this.fastMsgIdCache)
|
|
this.fastMsgIdCache.clear();
|
|
if (this.directPeerInitial)
|
|
clearTimeout(this.directPeerInitial);
|
|
this.log('stopped');
|
|
}
|
|
/** FOR DEBUG ONLY - Dump peer stats for all peers. Data is cloned, safe to mutate */
|
|
dumpPeerScoreStats() {
|
|
return this.score.dumpPeerScoreStats();
|
|
}
|
|
/**
|
|
* On an inbound stream opened
|
|
*/
|
|
onIncomingStream({ stream, connection }) {
|
|
if (!this.isStarted()) {
|
|
return;
|
|
}
|
|
const peerId = connection.remotePeer;
|
|
// add peer to router
|
|
this.addPeer(peerId, connection.stat.direction);
|
|
// create inbound stream
|
|
this.createInboundStream(peerId, stream);
|
|
// attempt to create outbound stream
|
|
this.outboundInflightQueue.push({ peerId, connection });
|
|
}
|
|
/**
|
|
* Registrar notifies an established connection with pubsub protocol
|
|
*/
|
|
onPeerConnected(peerId, connection) {
|
|
if (!this.isStarted()) {
|
|
return;
|
|
}
|
|
this.addPeer(peerId, connection.stat.direction);
|
|
this.outboundInflightQueue.push({ peerId, connection });
|
|
}
|
|
/**
|
|
* Registrar notifies a closing connection with pubsub protocol
|
|
*/
|
|
onPeerDisconnected(peerId) {
|
|
this.log('connection ended %p', peerId);
|
|
this.removePeer(peerId);
|
|
}
|
|
async createOutboundStream(peerId, connection) {
|
|
if (!this.isStarted()) {
|
|
return;
|
|
}
|
|
const id = peerId.toString();
|
|
if (!this.peers.has(id)) {
|
|
return;
|
|
}
|
|
// TODO make this behavior more robust
|
|
// This behavior is different than for inbound streams
|
|
// If an outbound stream already exists, don't create a new stream
|
|
if (this.streamsOutbound.has(id)) {
|
|
return;
|
|
}
|
|
try {
|
|
const stream = new OutboundStream(await connection.newStream(this.multicodecs), (e) => this.log.error('outbound pipe error', e), { maxBufferSize: this.opts.maxOutboundBufferSize });
|
|
this.log('create outbound stream %p', peerId);
|
|
this.streamsOutbound.set(id, stream);
|
|
const protocol = stream.protocol;
|
|
if (protocol === FloodsubID) {
|
|
this.floodsubPeers.add(id);
|
|
}
|
|
this.metrics?.peersPerProtocol.inc({ protocol }, 1);
|
|
// Immediately send own subscriptions via the newly attached stream
|
|
if (this.subscriptions.size > 0) {
|
|
this.log('send subscriptions to', id);
|
|
this.sendSubscriptions(id, Array.from(this.subscriptions), true);
|
|
}
|
|
}
|
|
catch (e) {
|
|
this.log.error('createOutboundStream error', e);
|
|
}
|
|
}
|
|
async createInboundStream(peerId, stream) {
|
|
if (!this.isStarted()) {
|
|
return;
|
|
}
|
|
const id = peerId.toString();
|
|
if (!this.peers.has(id)) {
|
|
return;
|
|
}
|
|
// TODO make this behavior more robust
|
|
// This behavior is different than for outbound streams
|
|
// If a peer initiates a new inbound connection
|
|
// we assume that one is the new canonical inbound stream
|
|
const priorInboundStream = this.streamsInbound.get(id);
|
|
if (priorInboundStream !== undefined) {
|
|
this.log('replacing existing inbound steam %s', id);
|
|
priorInboundStream.close();
|
|
}
|
|
this.log('create inbound stream %s', id);
|
|
const inboundStream = new InboundStream(stream);
|
|
this.streamsInbound.set(id, inboundStream);
|
|
this.pipePeerReadStream(peerId, inboundStream.source).catch((err) => this.log(err));
|
|
}
|
|
/**
|
|
* Add a peer to the router
|
|
*/
|
|
addPeer(peerId, direction) {
|
|
const id = peerId.toString();
|
|
if (!this.peers.has(id)) {
|
|
this.log('new peer %p', peerId);
|
|
this.peers.add(id);
|
|
// Add to peer scoring
|
|
this.score.addPeer(id);
|
|
// track the connection direction. Don't allow to unset outbound
|
|
if (!this.outbound.has(id)) {
|
|
this.outbound.set(id, direction === 'outbound');
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Removes a peer from the router
|
|
*/
|
|
removePeer(peerId) {
|
|
const id = peerId.toString();
|
|
if (!this.peers.has(id)) {
|
|
return;
|
|
}
|
|
// delete peer
|
|
this.log('delete peer %p', peerId);
|
|
this.peers.delete(id);
|
|
const outboundStream = this.streamsOutbound.get(id);
|
|
const inboundStream = this.streamsInbound.get(id);
|
|
if (outboundStream) {
|
|
this.metrics?.peersPerProtocol.inc({ protocol: outboundStream.protocol }, -1);
|
|
}
|
|
// close streams
|
|
outboundStream?.close();
|
|
inboundStream?.close();
|
|
// remove streams
|
|
this.streamsOutbound.delete(id);
|
|
this.streamsInbound.delete(id);
|
|
// remove peer from topics map
|
|
for (const peers of this.topics.values()) {
|
|
peers.delete(id);
|
|
}
|
|
// Remove this peer from the mesh
|
|
for (const [topicStr, peers] of this.mesh) {
|
|
if (peers.delete(id) === true) {
|
|
this.metrics?.onRemoveFromMesh(topicStr, ChurnReason.Dc, 1);
|
|
}
|
|
}
|
|
// Remove this peer from the fanout
|
|
for (const peers of this.fanout.values()) {
|
|
peers.delete(id);
|
|
}
|
|
// Remove from floodsubPeers
|
|
this.floodsubPeers.delete(id);
|
|
// Remove from gossip mapping
|
|
this.gossip.delete(id);
|
|
// Remove from control mapping
|
|
this.control.delete(id);
|
|
// Remove from backoff mapping
|
|
this.outbound.delete(id);
|
|
// Remove from peer scoring
|
|
this.score.removePeer(id);
|
|
this.acceptFromWhitelist.delete(id);
|
|
}
|
|
// API METHODS
|
|
get started() {
|
|
return this.status.code === GossipStatusCode.started;
|
|
}
|
|
/**
|
|
* Get a the peer-ids in a topic mesh
|
|
*/
|
|
getMeshPeers(topic) {
|
|
const peersInTopic = this.mesh.get(topic);
|
|
return peersInTopic ? Array.from(peersInTopic) : [];
|
|
}
|
|
/**
|
|
* Get a list of the peer-ids that are subscribed to one topic.
|
|
*/
|
|
getSubscribers(topic) {
|
|
const peersInTopic = this.topics.get(topic);
|
|
return (peersInTopic ? Array.from(peersInTopic) : []).map((str) => peerIdFromString(str));
|
|
}
|
|
/**
|
|
* Get the list of topics which the peer is subscribed to.
|
|
*/
|
|
getTopics() {
|
|
return Array.from(this.subscriptions);
|
|
}
|
|
// TODO: Reviewing Pubsub API
|
|
// MESSAGE METHODS
|
|
/**
|
|
* Responsible for processing each RPC message received by other peers.
|
|
*/
|
|
async pipePeerReadStream(peerId, stream) {
|
|
try {
|
|
await pipe(stream, async (source) => {
|
|
for await (const data of source) {
|
|
try {
|
|
// TODO: Check max gossip message size, before decodeRpc()
|
|
const rpcBytes = data.subarray();
|
|
// Note: This function may throw, it must be wrapped in a try {} catch {} to prevent closing the stream.
|
|
// TODO: What should we do if the entire RPC is invalid?
|
|
const rpc = RPC.decode(rpcBytes);
|
|
this.metrics?.onRpcRecv(rpc, rpcBytes.length);
|
|
// Since processRpc may be overridden entirely in unsafe ways,
|
|
// the simplest/safest option here is to wrap in a function and capture all errors
|
|
// to prevent a top-level unhandled exception
|
|
// This processing of rpc messages should happen without awaiting full validation/execution of prior messages
|
|
if (this.opts.awaitRpcHandler) {
|
|
await this.handleReceivedRpc(peerId, rpc);
|
|
}
|
|
else {
|
|
this.handleReceivedRpc(peerId, rpc).catch((err) => this.log(err));
|
|
}
|
|
}
|
|
catch (e) {
|
|
this.log(e);
|
|
}
|
|
}
|
|
});
|
|
}
|
|
catch (err) {
|
|
this.log.error(err);
|
|
this.onPeerDisconnected(peerId);
|
|
}
|
|
}
|
|
/**
|
|
* Handles an rpc request from a peer
|
|
*/
|
|
async handleReceivedRpc(from, rpc) {
|
|
// Check if peer is graylisted in which case we ignore the event
|
|
if (!this.acceptFrom(from.toString())) {
|
|
this.log('received message from unacceptable peer %p', from);
|
|
this.metrics?.rpcRecvNotAccepted.inc();
|
|
return;
|
|
}
|
|
this.log('rpc from %p', from);
|
|
// Handle received subscriptions
|
|
if (rpc.subscriptions && rpc.subscriptions.length > 0) {
|
|
// update peer subscriptions
|
|
rpc.subscriptions.forEach((subOpt) => {
|
|
this.handleReceivedSubscription(from, subOpt);
|
|
});
|
|
this.dispatchEvent(new CustomEvent('subscription-change', {
|
|
detail: {
|
|
peerId: from,
|
|
subscriptions: rpc.subscriptions
|
|
.filter((sub) => sub.topic !== null)
|
|
.map((sub) => {
|
|
return {
|
|
topic: sub.topic ?? '',
|
|
subscribe: Boolean(sub.subscribe)
|
|
};
|
|
})
|
|
}
|
|
}));
|
|
}
|
|
// Handle messages
|
|
// TODO: (up to limit)
|
|
if (rpc.messages) {
|
|
for (const message of rpc.messages) {
|
|
const handleReceivedMessagePromise = this.handleReceivedMessage(from, message)
|
|
// Should never throw, but handle just in case
|
|
.catch((err) => this.log(err));
|
|
if (this.opts.awaitRpcMessageHandler) {
|
|
await handleReceivedMessagePromise;
|
|
}
|
|
}
|
|
}
|
|
// Handle control messages
|
|
if (rpc.control) {
|
|
await this.handleControlMessage(from.toString(), rpc.control);
|
|
}
|
|
}
|
|
/**
|
|
* Handles a subscription change from a peer
|
|
*/
|
|
handleReceivedSubscription(from, subOpt) {
|
|
if (subOpt.topic == null) {
|
|
return;
|
|
}
|
|
this.log('subscription update from %p topic %s', from, subOpt.topic);
|
|
let topicSet = this.topics.get(subOpt.topic);
|
|
if (topicSet == null) {
|
|
topicSet = new Set();
|
|
this.topics.set(subOpt.topic, topicSet);
|
|
}
|
|
if (subOpt.subscribe) {
|
|
// subscribe peer to new topic
|
|
topicSet.add(from.toString());
|
|
}
|
|
else {
|
|
// unsubscribe from existing topic
|
|
topicSet.delete(from.toString());
|
|
}
|
|
// TODO: rust-libp2p has A LOT more logic here
|
|
}
|
|
/**
|
|
* Handles a newly received message from an RPC.
|
|
* May forward to all peers in the mesh.
|
|
*/
|
|
async handleReceivedMessage(from, rpcMsg) {
|
|
this.metrics?.onMsgRecvPreValidation(rpcMsg.topic);
|
|
const validationResult = await this.validateReceivedMessage(from, rpcMsg);
|
|
this.metrics?.onMsgRecvResult(rpcMsg.topic, validationResult.code);
|
|
switch (validationResult.code) {
|
|
case MessageStatus.duplicate:
|
|
// Report the duplicate
|
|
this.score.duplicateMessage(from.toString(), validationResult.msgIdStr, rpcMsg.topic);
|
|
this.mcache.observeDuplicate(validationResult.msgIdStr, from.toString());
|
|
return;
|
|
case MessageStatus.invalid:
|
|
// invalid messages received
|
|
// metrics.register_invalid_message(&raw_message.topic)
|
|
// Tell peer_score about reject
|
|
// Reject the original source, and any duplicates we've seen from other peers.
|
|
if (validationResult.msgIdStr) {
|
|
const msgIdStr = validationResult.msgIdStr;
|
|
this.score.rejectMessage(from.toString(), msgIdStr, rpcMsg.topic, validationResult.reason);
|
|
this.gossipTracer.rejectMessage(msgIdStr, validationResult.reason);
|
|
}
|
|
else {
|
|
this.score.rejectInvalidMessage(from.toString(), rpcMsg.topic);
|
|
}
|
|
this.metrics?.onMsgRecvInvalid(rpcMsg.topic, validationResult);
|
|
return;
|
|
case MessageStatus.valid:
|
|
// Tells score that message arrived (but is maybe not fully validated yet).
|
|
// Consider the message as delivered for gossip promises.
|
|
this.score.validateMessage(validationResult.messageId.msgIdStr);
|
|
this.gossipTracer.deliverMessage(validationResult.messageId.msgIdStr);
|
|
// Add the message to our memcache
|
|
// if no validation is required, mark the message as validated
|
|
this.mcache.put(validationResult.messageId, rpcMsg, !this.opts.asyncValidation);
|
|
// Dispatch the message to the user if we are subscribed to the topic
|
|
if (this.subscriptions.has(rpcMsg.topic)) {
|
|
const isFromSelf = this.components.getPeerId().equals(from);
|
|
if (!isFromSelf || this.opts.emitSelf) {
|
|
super.dispatchEvent(new CustomEvent('gossipsub:message', {
|
|
detail: {
|
|
propagationSource: from,
|
|
msgId: validationResult.messageId.msgIdStr,
|
|
msg: validationResult.msg
|
|
}
|
|
}));
|
|
// TODO: Add option to switch between emit per topic or all messages in one
|
|
super.dispatchEvent(new CustomEvent('message', { detail: validationResult.msg }));
|
|
}
|
|
}
|
|
// Forward the message to mesh peers, if no validation is required
|
|
// If asyncValidation is ON, expect the app layer to call reportMessageValidationResult(), then forward
|
|
if (!this.opts.asyncValidation) {
|
|
// TODO: in rust-libp2p
|
|
// .forward_msg(&msg_id, raw_message, Some(propagation_source))
|
|
this.forwardMessage(validationResult.messageId.msgIdStr, rpcMsg, from.toString());
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Handles a newly received message from an RPC.
|
|
* May forward to all peers in the mesh.
|
|
*/
|
|
async validateReceivedMessage(propagationSource, rpcMsg) {
|
|
// Fast message ID stuff
|
|
const fastMsgIdStr = this.fastMsgIdFn?.(rpcMsg);
|
|
const msgIdCached = fastMsgIdStr ? this.fastMsgIdCache?.get(fastMsgIdStr) : undefined;
|
|
if (msgIdCached) {
|
|
// This message has been seen previously. Ignore it
|
|
return { code: MessageStatus.duplicate, msgIdStr: msgIdCached };
|
|
}
|
|
// Perform basic validation on message and convert to RawGossipsubMessage for fastMsgIdFn()
|
|
const validationResult = await validateToRawMessage(this.globalSignaturePolicy, rpcMsg);
|
|
if (!validationResult.valid) {
|
|
return { code: MessageStatus.invalid, reason: RejectReason.Error, error: validationResult.error };
|
|
}
|
|
const msg = validationResult.message;
|
|
// Try and perform the data transform to the message. If it fails, consider it invalid.
|
|
try {
|
|
if (this.dataTransform) {
|
|
msg.data = this.dataTransform.inboundTransform(rpcMsg.topic, msg.data);
|
|
}
|
|
}
|
|
catch (e) {
|
|
this.log('Invalid message, transform failed', e);
|
|
return { code: MessageStatus.invalid, reason: RejectReason.Error, error: ValidateError.TransformFailed };
|
|
}
|
|
// TODO: Check if message is from a blacklisted source or propagation origin
|
|
// - Reject any message from a blacklisted peer
|
|
// - Also reject any message that originated from a blacklisted peer
|
|
// - reject messages claiming to be from ourselves but not locally published
|
|
// Calculate the message id on the transformed data.
|
|
const msgId = await this.msgIdFn(msg);
|
|
const msgIdStr = this.msgIdToStrFn(msgId);
|
|
const messageId = { msgId, msgIdStr };
|
|
// Add the message to the duplicate caches
|
|
if (fastMsgIdStr)
|
|
this.fastMsgIdCache?.put(fastMsgIdStr, msgIdStr);
|
|
if (this.seenCache.has(msgIdStr)) {
|
|
return { code: MessageStatus.duplicate, msgIdStr };
|
|
}
|
|
else {
|
|
this.seenCache.put(msgIdStr);
|
|
}
|
|
// (Optional) Provide custom validation here with dynamic validators per topic
|
|
// NOTE: This custom topicValidator() must resolve fast (< 100ms) to allow scores
|
|
// to not penalize peers for long validation times.
|
|
const topicValidator = this.topicValidators.get(rpcMsg.topic);
|
|
if (topicValidator != null) {
|
|
let acceptance;
|
|
// Use try {} catch {} in case topicValidator() is synchronous
|
|
try {
|
|
acceptance = await topicValidator(msg.topic, msg, propagationSource);
|
|
}
|
|
catch (e) {
|
|
const errCode = e.code;
|
|
if (errCode === ERR_TOPIC_VALIDATOR_IGNORE)
|
|
acceptance = MessageAcceptance.Ignore;
|
|
if (errCode === ERR_TOPIC_VALIDATOR_REJECT)
|
|
acceptance = MessageAcceptance.Reject;
|
|
else
|
|
acceptance = MessageAcceptance.Ignore;
|
|
}
|
|
if (acceptance !== MessageAcceptance.Accept) {
|
|
return { code: MessageStatus.invalid, reason: rejectReasonFromAcceptance(acceptance), msgIdStr };
|
|
}
|
|
}
|
|
return { code: MessageStatus.valid, messageId, msg };
|
|
}
|
|
/**
|
|
* Return score of a peer.
|
|
*/
|
|
getScore(peerId) {
|
|
return this.score.score(peerId);
|
|
}
|
|
/**
|
|
* Send an rpc object to a peer with subscriptions
|
|
*/
|
|
sendSubscriptions(toPeer, topics, subscribe) {
|
|
this.sendRpc(toPeer, {
|
|
subscriptions: topics.map((topic) => ({ topic, subscribe })),
|
|
messages: []
|
|
});
|
|
}
|
|
/**
|
|
* Handles an rpc control message from a peer
|
|
*/
|
|
async handleControlMessage(id, controlMsg) {
|
|
if (controlMsg === undefined) {
|
|
return;
|
|
}
|
|
const iwant = controlMsg.ihave ? this.handleIHave(id, controlMsg.ihave) : [];
|
|
const ihave = controlMsg.iwant ? this.handleIWant(id, controlMsg.iwant) : [];
|
|
const prune = controlMsg.graft ? await this.handleGraft(id, controlMsg.graft) : [];
|
|
controlMsg.prune && (await this.handlePrune(id, controlMsg.prune));
|
|
if (!iwant.length && !ihave.length && !prune.length) {
|
|
return;
|
|
}
|
|
this.sendRpc(id, createGossipRpc(ihave, { iwant, prune }));
|
|
}
|
|
/**
|
|
* Whether to accept a message from a peer
|
|
*/
|
|
acceptFrom(id) {
|
|
if (this.direct.has(id)) {
|
|
return true;
|
|
}
|
|
const now = Date.now();
|
|
const entry = this.acceptFromWhitelist.get(id);
|
|
if (entry && entry.messagesAccepted < ACCEPT_FROM_WHITELIST_MAX_MESSAGES && entry.acceptUntil >= now) {
|
|
entry.messagesAccepted += 1;
|
|
return true;
|
|
}
|
|
const score = this.score.score(id);
|
|
if (score >= ACCEPT_FROM_WHITELIST_THRESHOLD_SCORE) {
|
|
// peer is unlikely to be able to drop its score to `graylistThreshold`
|
|
// after 128 messages or 1s
|
|
this.acceptFromWhitelist.set(id, {
|
|
messagesAccepted: 0,
|
|
acceptUntil: now + ACCEPT_FROM_WHITELIST_DURATION_MS
|
|
});
|
|
}
|
|
else {
|
|
this.acceptFromWhitelist.delete(id);
|
|
}
|
|
return score >= this.opts.scoreThresholds.graylistThreshold;
|
|
}
|
|
/**
|
|
* Handles IHAVE messages
|
|
*/
|
|
handleIHave(id, ihave) {
|
|
if (!ihave.length) {
|
|
return [];
|
|
}
|
|
// we ignore IHAVE gossip from any peer whose score is below the gossips threshold
|
|
const score = this.score.score(id);
|
|
if (score < this.opts.scoreThresholds.gossipThreshold) {
|
|
this.log('IHAVE: ignoring peer %s with score below threshold [ score = %d ]', id, score);
|
|
this.metrics?.ihaveRcvIgnored.inc({ reason: IHaveIgnoreReason.LowScore });
|
|
return [];
|
|
}
|
|
// IHAVE flood protection
|
|
const peerhave = (this.peerhave.get(id) ?? 0) + 1;
|
|
this.peerhave.set(id, peerhave);
|
|
if (peerhave > GossipsubMaxIHaveMessages) {
|
|
this.log('IHAVE: peer %s has advertised too many times (%d) within this heartbeat interval; ignoring', id, peerhave);
|
|
this.metrics?.ihaveRcvIgnored.inc({ reason: IHaveIgnoreReason.MaxIhave });
|
|
return [];
|
|
}
|
|
const iasked = this.iasked.get(id) ?? 0;
|
|
if (iasked >= GossipsubMaxIHaveLength) {
|
|
this.log('IHAVE: peer %s has already advertised too many messages (%d); ignoring', id, iasked);
|
|
this.metrics?.ihaveRcvIgnored.inc({ reason: IHaveIgnoreReason.MaxIasked });
|
|
return [];
|
|
}
|
|
// string msgId => msgId
|
|
const iwant = new Map();
|
|
ihave.forEach(({ topicID, messageIDs }) => {
|
|
if (!topicID || !messageIDs || !this.mesh.has(topicID)) {
|
|
return;
|
|
}
|
|
let idonthave = 0;
|
|
messageIDs.forEach((msgId) => {
|
|
const msgIdStr = this.msgIdToStrFn(msgId);
|
|
if (!this.seenCache.has(msgIdStr)) {
|
|
iwant.set(msgIdStr, msgId);
|
|
idonthave++;
|
|
}
|
|
});
|
|
this.metrics?.onIhaveRcv(topicID, messageIDs.length, idonthave);
|
|
});
|
|
if (!iwant.size) {
|
|
return [];
|
|
}
|
|
let iask = iwant.size;
|
|
if (iask + iasked > GossipsubMaxIHaveLength) {
|
|
iask = GossipsubMaxIHaveLength - iasked;
|
|
}
|
|
this.log('IHAVE: Asking for %d out of %d messages from %s', iask, iwant.size, id);
|
|
let iwantList = Array.from(iwant.values());
|
|
// ask in random order
|
|
shuffle(iwantList);
|
|
// truncate to the messages we are actually asking for and update the iasked counter
|
|
iwantList = iwantList.slice(0, iask);
|
|
this.iasked.set(id, iasked + iask);
|
|
this.gossipTracer.addPromise(id, iwantList);
|
|
return [
|
|
{
|
|
messageIDs: iwantList
|
|
}
|
|
];
|
|
}
|
|
/**
|
|
* Handles IWANT messages
|
|
* Returns messages to send back to peer
|
|
*/
|
|
handleIWant(id, iwant) {
|
|
if (!iwant.length) {
|
|
return [];
|
|
}
|
|
// we don't respond to IWANT requests from any per whose score is below the gossip threshold
|
|
const score = this.score.score(id);
|
|
if (score < this.opts.scoreThresholds.gossipThreshold) {
|
|
this.log('IWANT: ignoring peer %s with score below threshold [score = %d]', id, score);
|
|
return [];
|
|
}
|
|
const ihave = new Map();
|
|
const iwantByTopic = new Map();
|
|
let iwantDonthave = 0;
|
|
iwant.forEach(({ messageIDs }) => {
|
|
messageIDs &&
|
|
messageIDs.forEach((msgId) => {
|
|
const msgIdStr = this.msgIdToStrFn(msgId);
|
|
const entry = this.mcache.getWithIWantCount(msgIdStr, id);
|
|
if (entry == null) {
|
|
iwantDonthave++;
|
|
return;
|
|
}
|
|
iwantByTopic.set(entry.msg.topic, 1 + (iwantByTopic.get(entry.msg.topic) ?? 0));
|
|
if (entry.count > GossipsubGossipRetransmission) {
|
|
this.log('IWANT: Peer %s has asked for message %s too many times: ignoring request', id, msgId);
|
|
return;
|
|
}
|
|
ihave.set(msgIdStr, entry.msg);
|
|
});
|
|
});
|
|
this.metrics?.onIwantRcv(iwantByTopic, iwantDonthave);
|
|
if (!ihave.size) {
|
|
this.log('IWANT: Could not provide any wanted messages to %s', id);
|
|
return [];
|
|
}
|
|
this.log('IWANT: Sending %d messages to %s', ihave.size, id);
|
|
return Array.from(ihave.values());
|
|
}
|
|
/**
|
|
* Handles Graft messages
|
|
*/
|
|
async handleGraft(id, graft) {
|
|
const prune = [];
|
|
const score = this.score.score(id);
|
|
const now = Date.now();
|
|
let doPX = this.opts.doPX;
|
|
graft.forEach(({ topicID }) => {
|
|
if (!topicID) {
|
|
return;
|
|
}
|
|
const peersInMesh = this.mesh.get(topicID);
|
|
if (!peersInMesh) {
|
|
// don't do PX when there is an unknown topic to avoid leaking our peers
|
|
doPX = false;
|
|
// spam hardening: ignore GRAFTs for unknown topics
|
|
return;
|
|
}
|
|
// check if peer is already in the mesh; if so do nothing
|
|
if (peersInMesh.has(id)) {
|
|
return;
|
|
}
|
|
// we don't GRAFT to/from direct peers; complain loudly if this happens
|
|
if (this.direct.has(id)) {
|
|
this.log('GRAFT: ignoring request from direct peer %s', id);
|
|
// this is possibly a bug from a non-reciprical configuration; send a PRUNE
|
|
prune.push(topicID);
|
|
// but don't px
|
|
doPX = false;
|
|
return;
|
|
}
|
|
// make sure we are not backing off that peer
|
|
const expire = this.backoff.get(topicID)?.get(id);
|
|
if (typeof expire === 'number' && now < expire) {
|
|
this.log('GRAFT: ignoring backed off peer %s', id);
|
|
// add behavioral penalty
|
|
this.score.addPenalty(id, 1, ScorePenalty.GraftBackoff);
|
|
// no PX
|
|
doPX = false;
|
|
// check the flood cutoff -- is the GRAFT coming too fast?
|
|
const floodCutoff = expire + this.opts.graftFloodThreshold - this.opts.pruneBackoff;
|
|
if (now < floodCutoff) {
|
|
// extra penalty
|
|
this.score.addPenalty(id, 1, ScorePenalty.GraftBackoff);
|
|
}
|
|
// refresh the backoff
|
|
this.addBackoff(id, topicID);
|
|
prune.push(topicID);
|
|
return;
|
|
}
|
|
// check the score
|
|
if (score < 0) {
|
|
// we don't GRAFT peers with negative score
|
|
this.log('GRAFT: ignoring peer %s with negative score: score=%d, topic=%s', id, score, topicID);
|
|
// we do send them PRUNE however, because it's a matter of protocol correctness
|
|
prune.push(topicID);
|
|
// but we won't PX to them
|
|
doPX = false;
|
|
// add/refresh backoff so that we don't reGRAFT too early even if the score decays
|
|
this.addBackoff(id, topicID);
|
|
return;
|
|
}
|
|
// check the number of mesh peers; if it is at (or over) Dhi, we only accept grafts
|
|
// from peers with outbound connections; this is a defensive check to restrict potential
|
|
// mesh takeover attacks combined with love bombing
|
|
if (peersInMesh.size >= this.opts.Dhi && !this.outbound.get(id)) {
|
|
prune.push(topicID);
|
|
this.addBackoff(id, topicID);
|
|
return;
|
|
}
|
|
this.log('GRAFT: Add mesh link from %s in %s', id, topicID);
|
|
this.score.graft(id, topicID);
|
|
peersInMesh.add(id);
|
|
this.metrics?.onAddToMesh(topicID, InclusionReason.Subscribed, 1);
|
|
});
|
|
if (!prune.length) {
|
|
return [];
|
|
}
|
|
return await Promise.all(prune.map((topic) => this.makePrune(id, topic, doPX)));
|
|
}
|
|
/**
|
|
* Handles Prune messages
|
|
*/
|
|
async handlePrune(id, prune) {
|
|
const score = this.score.score(id);
|
|
for (const { topicID, backoff, peers } of prune) {
|
|
if (topicID == null) {
|
|
continue;
|
|
}
|
|
const peersInMesh = this.mesh.get(topicID);
|
|
if (!peersInMesh) {
|
|
return;
|
|
}
|
|
this.log('PRUNE: Remove mesh link to %s in %s', id, topicID);
|
|
this.score.prune(id, topicID);
|
|
if (peersInMesh.has(id)) {
|
|
peersInMesh.delete(id);
|
|
this.metrics?.onRemoveFromMesh(topicID, ChurnReason.Unsub, 1);
|
|
}
|
|
// is there a backoff specified by the peer? if so obey it
|
|
if (typeof backoff === 'number' && backoff > 0) {
|
|
this.doAddBackoff(id, topicID, backoff * 1000);
|
|
}
|
|
else {
|
|
this.addBackoff(id, topicID);
|
|
}
|
|
// PX
|
|
if (peers && peers.length) {
|
|
// we ignore PX from peers with insufficient scores
|
|
if (score < this.opts.scoreThresholds.acceptPXThreshold) {
|
|
this.log('PRUNE: ignoring PX from peer %s with insufficient score [score = %d, topic = %s]', id, score, topicID);
|
|
continue;
|
|
}
|
|
await this.pxConnect(peers);
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Add standard backoff log for a peer in a topic
|
|
*/
|
|
addBackoff(id, topic) {
|
|
this.doAddBackoff(id, topic, this.opts.pruneBackoff);
|
|
}
|
|
/**
|
|
* Add backoff expiry interval for a peer in a topic
|
|
*
|
|
* @param id
|
|
* @param topic
|
|
* @param interval - backoff duration in milliseconds
|
|
*/
|
|
doAddBackoff(id, topic, interval) {
|
|
let backoff = this.backoff.get(topic);
|
|
if (!backoff) {
|
|
backoff = new Map();
|
|
this.backoff.set(topic, backoff);
|
|
}
|
|
const expire = Date.now() + interval;
|
|
const existingExpire = backoff.get(id) ?? 0;
|
|
if (existingExpire < expire) {
|
|
backoff.set(id, expire);
|
|
}
|
|
}
|
|
/**
|
|
* Apply penalties from broken IHAVE/IWANT promises
|
|
*/
|
|
applyIwantPenalties() {
|
|
this.gossipTracer.getBrokenPromises().forEach((count, p) => {
|
|
this.log("peer %s didn't follow up in %d IWANT requests; adding penalty", p, count);
|
|
this.score.addPenalty(p, count, ScorePenalty.BrokenPromise);
|
|
});
|
|
}
|
|
/**
|
|
* Clear expired backoff expiries
|
|
*/
|
|
clearBackoff() {
|
|
// we only clear once every GossipsubPruneBackoffTicks ticks to avoid iterating over the maps too much
|
|
if (this.heartbeatTicks % GossipsubPruneBackoffTicks !== 0) {
|
|
return;
|
|
}
|
|
const now = Date.now();
|
|
this.backoff.forEach((backoff, topic) => {
|
|
backoff.forEach((expire, id) => {
|
|
if (expire < now) {
|
|
backoff.delete(id);
|
|
}
|
|
});
|
|
if (backoff.size === 0) {
|
|
this.backoff.delete(topic);
|
|
}
|
|
});
|
|
}
|
|
/**
|
|
* Maybe reconnect to direct peers
|
|
*/
|
|
async directConnect() {
|
|
const toconnect = [];
|
|
this.direct.forEach((id) => {
|
|
if (!this.streamsOutbound.has(id)) {
|
|
toconnect.push(id);
|
|
}
|
|
});
|
|
await Promise.all(toconnect.map(async (id) => await this.connect(id)));
|
|
}
|
|
/**
|
|
* Maybe attempt connection given signed peer records
|
|
*/
|
|
async pxConnect(peers) {
|
|
if (peers.length > this.opts.prunePeers) {
|
|
shuffle(peers);
|
|
peers = peers.slice(0, this.opts.prunePeers);
|
|
}
|
|
const toconnect = [];
|
|
await Promise.all(peers.map(async (pi) => {
|
|
if (!pi.peerID) {
|
|
return;
|
|
}
|
|
const p = peerIdFromBytes(pi.peerID).toString();
|
|
if (this.peers.has(p)) {
|
|
return;
|
|
}
|
|
if (!pi.signedPeerRecord) {
|
|
toconnect.push(p);
|
|
return;
|
|
}
|
|
// The peer sent us a signed record
|
|
// This is not a record from the peer who sent the record, but another peer who is connected with it
|
|
// Ensure that it is valid
|
|
try {
|
|
const envelope = await RecordEnvelope.openAndCertify(pi.signedPeerRecord, 'libp2p-peer-record');
|
|
const eid = envelope.peerId;
|
|
if (!envelope.peerId.equals(p)) {
|
|
this.log("bogus peer record obtained through px: peer ID %p doesn't match expected peer %p", eid, p);
|
|
return;
|
|
}
|
|
if (!(await this.components.getPeerStore().addressBook.consumePeerRecord(envelope))) {
|
|
this.log('bogus peer record obtained through px: could not add peer record to address book');
|
|
return;
|
|
}
|
|
toconnect.push(p);
|
|
}
|
|
catch (e) {
|
|
this.log('bogus peer record obtained through px: invalid signature or not a peer record');
|
|
}
|
|
}));
|
|
if (!toconnect.length) {
|
|
return;
|
|
}
|
|
await Promise.all(toconnect.map(async (id) => await this.connect(id)));
|
|
}
|
|
/**
|
|
* Connect to a peer using the gossipsub protocol
|
|
*/
|
|
async connect(id) {
|
|
this.log('Initiating connection with %s', id);
|
|
const peerId = peerIdFromString(id);
|
|
const connection = await this.components.getConnectionManager().openConnection(peerId);
|
|
for (const multicodec of this.multicodecs) {
|
|
for (const topology of this.components.getRegistrar().getTopologies(multicodec)) {
|
|
topology.onConnect(peerId, connection);
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Subscribes to a topic
|
|
*/
|
|
subscribe(topic) {
|
|
if (this.status.code !== GossipStatusCode.started) {
|
|
throw new Error('Pubsub has not started');
|
|
}
|
|
if (!this.subscriptions.has(topic)) {
|
|
this.subscriptions.add(topic);
|
|
for (const peerId of this.peers.keys()) {
|
|
this.sendSubscriptions(peerId, [topic], true);
|
|
}
|
|
}
|
|
this.join(topic);
|
|
}
|
|
/**
|
|
* Unsubscribe to a topic
|
|
*/
|
|
unsubscribe(topic) {
|
|
if (this.status.code !== GossipStatusCode.started) {
|
|
throw new Error('Pubsub is not started');
|
|
}
|
|
const wasSubscribed = this.subscriptions.delete(topic);
|
|
this.log('unsubscribe from %s - am subscribed %s', topic, wasSubscribed);
|
|
if (wasSubscribed) {
|
|
for (const peerId of this.peers.keys()) {
|
|
this.sendSubscriptions(peerId, [topic], false);
|
|
}
|
|
}
|
|
this.leave(topic).catch((err) => {
|
|
this.log(err);
|
|
});
|
|
}
|
|
/**
|
|
* Join topic
|
|
*/
|
|
join(topic) {
|
|
if (this.status.code !== GossipStatusCode.started) {
|
|
throw new Error('Gossipsub has not started');
|
|
}
|
|
// if we are already in the mesh, return
|
|
if (this.mesh.has(topic)) {
|
|
return;
|
|
}
|
|
this.log('JOIN %s', topic);
|
|
this.metrics?.onJoin(topic);
|
|
const toAdd = new Set();
|
|
// check if we have mesh_n peers in fanout[topic] and add them to the mesh if we do,
|
|
// removing the fanout entry.
|
|
const fanoutPeers = this.fanout.get(topic);
|
|
if (fanoutPeers) {
|
|
// Remove fanout entry and the last published time
|
|
this.fanout.delete(topic);
|
|
this.fanoutLastpub.delete(topic);
|
|
// remove explicit peers, peers with negative scores, and backoffed peers
|
|
fanoutPeers.forEach((id) => {
|
|
// TODO:rust-libp2p checks `self.backoffs.is_backoff_with_slack()`
|
|
if (!this.direct.has(id) && this.score.score(id) >= 0) {
|
|
toAdd.add(id);
|
|
}
|
|
});
|
|
this.metrics?.onAddToMesh(topic, InclusionReason.Fanout, toAdd.size);
|
|
}
|
|
// check if we need to get more peers, which we randomly select
|
|
if (toAdd.size < this.opts.D) {
|
|
const fanoutCount = toAdd.size;
|
|
const newPeers = this.getRandomGossipPeers(topic, this.opts.D, (id) =>
|
|
// filter direct peers and peers with negative score
|
|
!toAdd.has(id) && !this.direct.has(id) && this.score.score(id) >= 0);
|
|
newPeers.forEach((peer) => {
|
|
toAdd.add(peer);
|
|
});
|
|
this.metrics?.onAddToMesh(topic, InclusionReason.Random, toAdd.size - fanoutCount);
|
|
}
|
|
this.mesh.set(topic, toAdd);
|
|
toAdd.forEach((id) => {
|
|
this.log('JOIN: Add mesh link to %s in %s', id, topic);
|
|
this.sendGraft(id, topic);
|
|
// rust-libp2p
|
|
// - peer_score.graft()
|
|
// - Self::control_pool_add()
|
|
// - peer_added_to_mesh()
|
|
});
|
|
}
|
|
/**
|
|
* Leave topic
|
|
*/
|
|
async leave(topic) {
|
|
if (this.status.code !== GossipStatusCode.started) {
|
|
throw new Error('Gossipsub has not started');
|
|
}
|
|
this.log('LEAVE %s', topic);
|
|
this.metrics?.onLeave(topic);
|
|
// Send PRUNE to mesh peers
|
|
const meshPeers = this.mesh.get(topic);
|
|
if (meshPeers) {
|
|
await Promise.all(Array.from(meshPeers).map(async (id) => {
|
|
this.log('LEAVE: Remove mesh link to %s in %s', id, topic);
|
|
return await this.sendPrune(id, topic);
|
|
}));
|
|
this.mesh.delete(topic);
|
|
}
|
|
}
|
|
selectPeersToForward(topic, propagationSource, excludePeers) {
|
|
const tosend = new Set();
|
|
// Add explicit peers
|
|
const peersInTopic = this.topics.get(topic);
|
|
if (peersInTopic) {
|
|
this.direct.forEach((peer) => {
|
|
if (peersInTopic.has(peer) && propagationSource !== peer && !excludePeers?.has(peer)) {
|
|
tosend.add(peer);
|
|
}
|
|
});
|
|
// As of Mar 2022, spec + golang-libp2p include this while rust-libp2p does not
|
|
// rust-libp2p: https://github.com/libp2p/rust-libp2p/blob/6cc3b4ec52c922bfcf562a29b5805c3150e37c75/protocols/gossipsub/src/behaviour.rs#L2693
|
|
// spec: https://github.com/libp2p/specs/blob/10712c55ab309086a52eec7d25f294df4fa96528/pubsub/gossipsub/gossipsub-v1.0.md?plain=1#L361
|
|
this.floodsubPeers.forEach((peer) => {
|
|
if (peersInTopic.has(peer) &&
|
|
propagationSource !== peer &&
|
|
!excludePeers?.has(peer) &&
|
|
this.score.score(peer) >= this.opts.scoreThresholds.publishThreshold) {
|
|
tosend.add(peer);
|
|
}
|
|
});
|
|
}
|
|
// add mesh peers
|
|
const meshPeers = this.mesh.get(topic);
|
|
if (meshPeers && meshPeers.size > 0) {
|
|
meshPeers.forEach((peer) => {
|
|
if (propagationSource !== peer && !excludePeers?.has(peer)) {
|
|
tosend.add(peer);
|
|
}
|
|
});
|
|
}
|
|
return tosend;
|
|
}
|
|
selectPeersToPublish(topic) {
|
|
const tosend = new Set();
|
|
const tosendCount = {
|
|
direct: 0,
|
|
floodsub: 0,
|
|
mesh: 0,
|
|
fanout: 0
|
|
};
|
|
const peersInTopic = this.topics.get(topic);
|
|
if (peersInTopic) {
|
|
// flood-publish behavior
|
|
// send to direct peers and _all_ peers meeting the publishThreshold
|
|
if (this.opts.floodPublish) {
|
|
peersInTopic.forEach((id) => {
|
|
if (this.direct.has(id)) {
|
|
tosend.add(id);
|
|
tosendCount.direct++;
|
|
}
|
|
else if (this.score.score(id) >= this.opts.scoreThresholds.publishThreshold) {
|
|
tosend.add(id);
|
|
tosendCount.floodsub++;
|
|
}
|
|
});
|
|
}
|
|
else {
|
|
// non-flood-publish behavior
|
|
// send to direct peers, subscribed floodsub peers
|
|
// and some mesh peers above publishThreshold
|
|
// direct peers (if subscribed)
|
|
this.direct.forEach((id) => {
|
|
if (peersInTopic.has(id)) {
|
|
tosend.add(id);
|
|
tosendCount.direct++;
|
|
}
|
|
});
|
|
// floodsub peers
|
|
// Note: if there are no floodsub peers, we save a loop through peersInTopic Map
|
|
this.floodsubPeers.forEach((id) => {
|
|
if (peersInTopic.has(id) && this.score.score(id) >= this.opts.scoreThresholds.publishThreshold) {
|
|
tosend.add(id);
|
|
tosendCount.floodsub++;
|
|
}
|
|
});
|
|
// Gossipsub peers handling
|
|
const meshPeers = this.mesh.get(topic);
|
|
if (meshPeers && meshPeers.size > 0) {
|
|
meshPeers.forEach((peer) => {
|
|
tosend.add(peer);
|
|
tosendCount.mesh++;
|
|
});
|
|
}
|
|
// We are not in the mesh for topic, use fanout peers
|
|
else {
|
|
const fanoutPeers = this.fanout.get(topic);
|
|
if (fanoutPeers && fanoutPeers.size > 0) {
|
|
fanoutPeers.forEach((peer) => {
|
|
tosend.add(peer);
|
|
tosendCount.fanout++;
|
|
});
|
|
}
|
|
// We have no fanout peers, select mesh_n of them and add them to the fanout
|
|
else {
|
|
// If we are not in the fanout, then pick peers in topic above the publishThreshold
|
|
const newFanoutPeers = this.getRandomGossipPeers(topic, this.opts.D, (id) => {
|
|
return this.score.score(id) >= this.opts.scoreThresholds.publishThreshold;
|
|
});
|
|
if (newFanoutPeers.size > 0) {
|
|
// eslint-disable-line max-depth
|
|
this.fanout.set(topic, newFanoutPeers);
|
|
newFanoutPeers.forEach((peer) => {
|
|
// eslint-disable-line max-depth
|
|
tosend.add(peer);
|
|
tosendCount.fanout++;
|
|
});
|
|
}
|
|
}
|
|
// We are publishing to fanout peers - update the time we published
|
|
this.fanoutLastpub.set(topic, Date.now());
|
|
}
|
|
}
|
|
}
|
|
return { tosend, tosendCount };
|
|
}
|
|
/**
|
|
* Forwards a message from our peers.
|
|
*
|
|
* For messages published by us (the app layer), this class uses `publish`
|
|
*/
|
|
forwardMessage(msgIdStr, rawMsg, propagationSource, excludePeers) {
|
|
// message is fully validated inform peer_score
|
|
if (propagationSource) {
|
|
this.score.deliverMessage(propagationSource, msgIdStr, rawMsg.topic);
|
|
}
|
|
const tosend = this.selectPeersToForward(rawMsg.topic, propagationSource, excludePeers);
|
|
// Note: Don't throw if tosend is empty, we can have a mesh with a single peer
|
|
// forward the message to peers
|
|
const rpc = createGossipRpc([rawMsg]);
|
|
tosend.forEach((id) => {
|
|
// self.send_message(*peer_id, event.clone())?;
|
|
this.sendRpc(id, rpc);
|
|
});
|
|
this.metrics?.onForwardMsg(rawMsg.topic, tosend.size);
|
|
}
|
|
/**
|
|
* App layer publishes a message to peers, return number of peers this message is published to
|
|
* Note: `async` due to crypto only if `StrictSign`, otherwise it's a sync fn.
|
|
*
|
|
* For messages not from us, this class uses `forwardMessage`.
|
|
*/
|
|
async publish(topic, data) {
|
|
const transformedData = this.dataTransform ? this.dataTransform.outboundTransform(topic, data) : data;
|
|
if (this.publishConfig == null) {
|
|
throw Error('PublishError.Uninitialized');
|
|
}
|
|
// Prepare raw message with user's publishConfig
|
|
const { raw: rawMsg, msg } = await buildRawMessage(this.publishConfig, topic, data, transformedData);
|
|
// calculate the message id from the un-transformed data
|
|
const msgId = await this.msgIdFn(msg);
|
|
const msgIdStr = this.msgIdToStrFn(msgId);
|
|
if (this.seenCache.has(msgIdStr)) {
|
|
// This message has already been seen. We don't re-publish messages that have already
|
|
// been published on the network.
|
|
throw Error('PublishError.Duplicate');
|
|
}
|
|
const { tosend, tosendCount } = this.selectPeersToPublish(topic);
|
|
const willSendToSelf = this.opts.emitSelf === true && this.subscriptions.has(topic);
|
|
if (tosend.size === 0 && !this.opts.allowPublishToZeroPeers && !willSendToSelf) {
|
|
throw Error('PublishError.InsufficientPeers');
|
|
}
|
|
// If the message isn't a duplicate and we have sent it to some peers add it to the
|
|
// duplicate cache and memcache.
|
|
this.seenCache.put(msgIdStr);
|
|
// all published messages are valid
|
|
this.mcache.put({ msgId, msgIdStr }, rawMsg, true);
|
|
// If the message is anonymous or has a random author add it to the published message ids cache.
|
|
this.publishedMessageIds.put(msgIdStr);
|
|
// Send to set of peers aggregated from direct, mesh, fanout
|
|
const rpc = createGossipRpc([rawMsg]);
|
|
for (const id of tosend) {
|
|
// self.send_message(*peer_id, event.clone())?;
|
|
const sent = this.sendRpc(id, rpc);
|
|
// did not actually send the message
|
|
if (!sent) {
|
|
tosend.delete(id);
|
|
}
|
|
}
|
|
this.metrics?.onPublishMsg(topic, tosendCount, tosend.size, rawMsg.data != null ? rawMsg.data.length : 0);
|
|
// Dispatch the message to the user if we are subscribed to the topic
|
|
if (willSendToSelf) {
|
|
tosend.add(this.components.getPeerId().toString());
|
|
super.dispatchEvent(new CustomEvent('gossipsub:message', {
|
|
detail: {
|
|
propagationSource: this.components.getPeerId(),
|
|
msgId: msgIdStr,
|
|
msg
|
|
}
|
|
}));
|
|
// TODO: Add option to switch between emit per topic or all messages in one
|
|
super.dispatchEvent(new CustomEvent('message', { detail: msg }));
|
|
}
|
|
return {
|
|
recipients: Array.from(tosend.values()).map((str) => peerIdFromString(str))
|
|
};
|
|
}
|
|
/**
|
|
* This function should be called when `asyncValidation` is `true` after
|
|
* the message got validated by the caller. Messages are stored in the `mcache` and
|
|
* validation is expected to be fast enough that the messages should still exist in the cache.
|
|
* There are three possible validation outcomes and the outcome is given in acceptance.
|
|
*
|
|
* If acceptance = `MessageAcceptance.Accept` the message will get propagated to the
|
|
* network. The `propagation_source` parameter indicates who the message was received by and
|
|
* will not be forwarded back to that peer.
|
|
*
|
|
* If acceptance = `MessageAcceptance.Reject` the message will be deleted from the memcache
|
|
* and the P₄ penalty will be applied to the `propagationSource`.
|
|
*
|
|
* If acceptance = `MessageAcceptance.Ignore` the message will be deleted from the memcache
|
|
* but no P₄ penalty will be applied.
|
|
*
|
|
* This function will return true if the message was found in the cache and false if was not
|
|
* in the cache anymore.
|
|
*
|
|
* This should only be called once per message.
|
|
*/
|
|
reportMessageValidationResult(msgId, propagationSource, acceptance) {
|
|
if (acceptance === MessageAcceptance.Accept) {
|
|
const cacheEntry = this.mcache.validate(msgId);
|
|
this.metrics?.onReportValidationMcacheHit(cacheEntry !== null);
|
|
if (cacheEntry != null) {
|
|
const { message: rawMsg, originatingPeers } = cacheEntry;
|
|
// message is fully validated inform peer_score
|
|
this.score.deliverMessage(propagationSource.toString(), msgId, rawMsg.topic);
|
|
this.forwardMessage(msgId, cacheEntry.message, propagationSource.toString(), originatingPeers);
|
|
this.metrics?.onReportValidation(rawMsg.topic, acceptance);
|
|
}
|
|
// else, Message not in cache. Ignoring forwarding
|
|
}
|
|
// Not valid
|
|
else {
|
|
const cacheEntry = this.mcache.remove(msgId);
|
|
this.metrics?.onReportValidationMcacheHit(cacheEntry !== null);
|
|
if (cacheEntry) {
|
|
const rejectReason = rejectReasonFromAcceptance(acceptance);
|
|
const { message: rawMsg, originatingPeers } = cacheEntry;
|
|
// Tell peer_score about reject
|
|
// Reject the original source, and any duplicates we've seen from other peers.
|
|
this.score.rejectMessage(propagationSource.toString(), msgId, rawMsg.topic, rejectReason);
|
|
for (const peer of originatingPeers) {
|
|
this.score.rejectMessage(peer, msgId, rawMsg.topic, rejectReason);
|
|
}
|
|
this.metrics?.onReportValidation(rawMsg.topic, acceptance);
|
|
}
|
|
// else, Message not in cache. Ignoring forwarding
|
|
}
|
|
}
|
|
/**
|
|
* Sends a GRAFT message to a peer
|
|
*/
|
|
sendGraft(id, topic) {
|
|
const graft = [
|
|
{
|
|
topicID: topic
|
|
}
|
|
];
|
|
const out = createGossipRpc([], { graft });
|
|
this.sendRpc(id, out);
|
|
}
|
|
/**
|
|
* Sends a PRUNE message to a peer
|
|
*/
|
|
async sendPrune(id, topic) {
|
|
const prune = [await this.makePrune(id, topic, this.opts.doPX)];
|
|
const out = createGossipRpc([], { prune });
|
|
this.sendRpc(id, out);
|
|
}
|
|
/**
|
|
* Send an rpc object to a peer
|
|
*/
|
|
sendRpc(id, rpc) {
|
|
const outboundStream = this.streamsOutbound.get(id);
|
|
if (!outboundStream) {
|
|
this.log(`Cannot send RPC to ${id} as there is no open stream to it available`);
|
|
return false;
|
|
}
|
|
// piggyback control message retries
|
|
const ctrl = this.control.get(id);
|
|
if (ctrl) {
|
|
this.piggybackControl(id, rpc, ctrl);
|
|
this.control.delete(id);
|
|
}
|
|
// piggyback gossip
|
|
const ihave = this.gossip.get(id);
|
|
if (ihave) {
|
|
this.piggybackGossip(id, rpc, ihave);
|
|
this.gossip.delete(id);
|
|
}
|
|
const rpcBytes = RPC.encode(rpc).finish();
|
|
try {
|
|
outboundStream.push(rpcBytes);
|
|
}
|
|
catch (e) {
|
|
this.log.error(`Cannot send rpc to ${id}`, e);
|
|
// if the peer had control messages or gossip, re-attach
|
|
if (ctrl) {
|
|
this.control.set(id, ctrl);
|
|
}
|
|
if (ihave) {
|
|
this.gossip.set(id, ihave);
|
|
}
|
|
return false;
|
|
}
|
|
this.metrics?.onRpcSent(rpc, rpcBytes.length);
|
|
return true;
|
|
}
|
|
piggybackControl(id, outRpc, ctrl) {
|
|
const tograft = (ctrl.graft || []).filter(({ topicID }) => ((topicID && this.mesh.get(topicID)) || new Set()).has(id));
|
|
const toprune = (ctrl.prune || []).filter(({ topicID }) => !((topicID && this.mesh.get(topicID)) || new Set()).has(id));
|
|
if (!tograft.length && !toprune.length) {
|
|
return;
|
|
}
|
|
if (outRpc.control) {
|
|
outRpc.control.graft = outRpc.control.graft && outRpc.control.graft.concat(tograft);
|
|
outRpc.control.prune = outRpc.control.prune && outRpc.control.prune.concat(toprune);
|
|
}
|
|
else {
|
|
outRpc.control = { graft: tograft, prune: toprune, ihave: [], iwant: [] };
|
|
}
|
|
}
|
|
piggybackGossip(id, outRpc, ihave) {
|
|
if (!outRpc.control) {
|
|
outRpc.control = { ihave: [], iwant: [], graft: [], prune: [] };
|
|
}
|
|
outRpc.control.ihave = ihave;
|
|
}
|
|
/**
|
|
* Send graft and prune messages
|
|
*
|
|
* @param tograft - peer id => topic[]
|
|
* @param toprune - peer id => topic[]
|
|
*/
|
|
async sendGraftPrune(tograft, toprune, noPX) {
|
|
const doPX = this.opts.doPX;
|
|
for (const [id, topics] of tograft) {
|
|
const graft = topics.map((topicID) => ({ topicID }));
|
|
let prune = [];
|
|
// If a peer also has prunes, process them now
|
|
const pruning = toprune.get(id);
|
|
if (pruning) {
|
|
prune = await Promise.all(pruning.map(async (topicID) => await this.makePrune(id, topicID, doPX && !(noPX.get(id) ?? false))));
|
|
toprune.delete(id);
|
|
}
|
|
const outRpc = createGossipRpc([], { graft, prune });
|
|
this.sendRpc(id, outRpc);
|
|
}
|
|
for (const [id, topics] of toprune) {
|
|
const prune = await Promise.all(topics.map(async (topicID) => await this.makePrune(id, topicID, doPX && !(noPX.get(id) ?? false))));
|
|
const outRpc = createGossipRpc([], { prune });
|
|
this.sendRpc(id, outRpc);
|
|
}
|
|
}
|
|
/**
|
|
* Emits gossip - Send IHAVE messages to a random set of gossip peers
|
|
*/
|
|
emitGossip(peersToGossipByTopic) {
|
|
const gossipIDsByTopic = this.mcache.getGossipIDs(new Set(peersToGossipByTopic.keys()));
|
|
for (const [topic, peersToGossip] of peersToGossipByTopic) {
|
|
this.doEmitGossip(topic, peersToGossip, gossipIDsByTopic.get(topic) ?? []);
|
|
}
|
|
}
|
|
/**
|
|
* Send gossip messages to GossipFactor peers above threshold with a minimum of D_lazy
|
|
* Peers are randomly selected from the heartbeat which exclude mesh + fanout peers
|
|
* We also exclude direct peers, as there is no reason to emit gossip to them
|
|
* @param topic
|
|
* @param candidateToGossip - peers to gossip
|
|
* @param messageIDs - message ids to gossip
|
|
*/
|
|
doEmitGossip(topic, candidateToGossip, messageIDs) {
|
|
if (!messageIDs.length) {
|
|
return;
|
|
}
|
|
// shuffle to emit in random order
|
|
shuffle(messageIDs);
|
|
// if we are emitting more than GossipsubMaxIHaveLength ids, truncate the list
|
|
if (messageIDs.length > GossipsubMaxIHaveLength) {
|
|
// we do the truncation (with shuffling) per peer below
|
|
this.log('too many messages for gossip; will truncate IHAVE list (%d messages)', messageIDs.length);
|
|
}
|
|
if (!candidateToGossip.size)
|
|
return;
|
|
let target = this.opts.Dlazy;
|
|
const factor = GossipsubGossipFactor * candidateToGossip.size;
|
|
let peersToGossip = candidateToGossip;
|
|
if (factor > target) {
|
|
target = factor;
|
|
}
|
|
if (target > peersToGossip.size) {
|
|
target = peersToGossip.size;
|
|
}
|
|
else {
|
|
// only shuffle if needed
|
|
peersToGossip = shuffle(Array.from(peersToGossip)).slice(0, target);
|
|
}
|
|
// Emit the IHAVE gossip to the selected peers up to the target
|
|
peersToGossip.forEach((id) => {
|
|
let peerMessageIDs = messageIDs;
|
|
if (messageIDs.length > GossipsubMaxIHaveLength) {
|
|
// shuffle and slice message IDs per peer so that we emit a different set for each peer
|
|
// we have enough reduncancy in the system that this will significantly increase the message
|
|
// coverage when we do truncate
|
|
peerMessageIDs = shuffle(peerMessageIDs.slice()).slice(0, GossipsubMaxIHaveLength);
|
|
}
|
|
this.pushGossip(id, {
|
|
topicID: topic,
|
|
messageIDs: peerMessageIDs
|
|
});
|
|
});
|
|
}
|
|
/**
|
|
* Flush gossip and control messages
|
|
*/
|
|
flush() {
|
|
// send gossip first, which will also piggyback control
|
|
for (const [peer, ihave] of this.gossip.entries()) {
|
|
this.gossip.delete(peer);
|
|
this.sendRpc(peer, createGossipRpc([], { ihave }));
|
|
}
|
|
// send the remaining control messages
|
|
for (const [peer, control] of this.control.entries()) {
|
|
this.control.delete(peer);
|
|
this.sendRpc(peer, createGossipRpc([], { graft: control.graft, prune: control.prune }));
|
|
}
|
|
}
|
|
/**
|
|
* Adds new IHAVE messages to pending gossip
|
|
*/
|
|
pushGossip(id, controlIHaveMsgs) {
|
|
this.log('Add gossip to %s', id);
|
|
const gossip = this.gossip.get(id) || [];
|
|
this.gossip.set(id, gossip.concat(controlIHaveMsgs));
|
|
}
|
|
/**
|
|
* Make a PRUNE control message for a peer in a topic
|
|
*/
|
|
async makePrune(id, topic, doPX) {
|
|
this.score.prune(id, topic);
|
|
if (this.streamsOutbound.get(id).protocol === GossipsubIDv10) {
|
|
// Gossipsub v1.0 -- no backoff, the peer won't be able to parse it anyway
|
|
return {
|
|
topicID: topic,
|
|
peers: []
|
|
};
|
|
}
|
|
// backoff is measured in seconds
|
|
// GossipsubPruneBackoff is measured in milliseconds
|
|
// The protobuf has it as a uint64
|
|
const backoff = this.opts.pruneBackoff / 1000;
|
|
if (!doPX) {
|
|
return {
|
|
topicID: topic,
|
|
peers: [],
|
|
backoff: backoff
|
|
};
|
|
}
|
|
// select peers for Peer eXchange
|
|
const peers = this.getRandomGossipPeers(topic, this.opts.prunePeers, (xid) => {
|
|
return xid !== id && this.score.score(xid) >= 0;
|
|
});
|
|
const px = await Promise.all(Array.from(peers).map(async (peerId) => {
|
|
// see if we have a signed record to send back; if we don't, just send
|
|
// the peer ID and let the pruned peer find them in the DHT -- we can't trust
|
|
// unsigned address records through PX anyways
|
|
// Finding signed records in the DHT is not supported at the time of writing in js-libp2p
|
|
const id = peerIdFromString(peerId);
|
|
return {
|
|
peerID: id.toBytes(),
|
|
signedPeerRecord: await this.components.getPeerStore().addressBook.getRawEnvelope(id)
|
|
};
|
|
}));
|
|
return {
|
|
topicID: topic,
|
|
peers: px,
|
|
backoff: backoff
|
|
};
|
|
}
|
|
/**
|
|
* Maintains the mesh and fanout maps in gossipsub.
|
|
*/
|
|
async heartbeat() {
|
|
const { D, Dlo, Dhi, Dscore, Dout, fanoutTTL } = this.opts;
|
|
this.heartbeatTicks++;
|
|
// cache scores throught the heartbeat
|
|
const scores = new Map();
|
|
const getScore = (id) => {
|
|
let s = scores.get(id);
|
|
if (s === undefined) {
|
|
s = this.score.score(id);
|
|
scores.set(id, s);
|
|
}
|
|
return s;
|
|
};
|
|
// peer id => topic[]
|
|
const tograft = new Map();
|
|
// peer id => topic[]
|
|
const toprune = new Map();
|
|
// peer id => don't px
|
|
const noPX = new Map();
|
|
// clean up expired backoffs
|
|
this.clearBackoff();
|
|
// clean up peerhave/iasked counters
|
|
this.peerhave.clear();
|
|
this.metrics?.cacheSize.set({ cache: 'iasked' }, this.iasked.size);
|
|
this.iasked.clear();
|
|
// apply IWANT request penalties
|
|
this.applyIwantPenalties();
|
|
// ensure direct peers are connected
|
|
if (this.heartbeatTicks % this.opts.directConnectTicks === 0) {
|
|
// we only do this every few ticks to allow pending connections to complete and account for restarts/downtime
|
|
await this.directConnect();
|
|
}
|
|
// EXTRA: Prune caches
|
|
this.fastMsgIdCache?.prune();
|
|
this.seenCache.prune();
|
|
this.gossipTracer.prune();
|
|
this.publishedMessageIds.prune();
|
|
/**
|
|
* Instead of calling getRandomGossipPeers multiple times to:
|
|
* + get more mesh peers
|
|
* + more outbound peers
|
|
* + oppportunistic grafting
|
|
* + emitGossip
|
|
*
|
|
* We want to loop through the topic peers only a single time and prepare gossip peers for all topics to improve the performance
|
|
*/
|
|
const peersToGossipByTopic = new Map();
|
|
// maintain the mesh for topics we have joined
|
|
this.mesh.forEach((peers, topic) => {
|
|
const peersInTopic = this.topics.get(topic);
|
|
const candidateMeshPeers = new Set();
|
|
const peersToGossip = new Set();
|
|
peersToGossipByTopic.set(topic, peersToGossip);
|
|
if (peersInTopic) {
|
|
const shuffledPeers = shuffle(Array.from(peersInTopic));
|
|
const backoff = this.backoff.get(topic);
|
|
for (const id of shuffledPeers) {
|
|
const peerStreams = this.streamsOutbound.get(id);
|
|
if (peerStreams &&
|
|
this.multicodecs.includes(peerStreams.protocol) &&
|
|
!peers.has(id) &&
|
|
!this.direct.has(id)) {
|
|
const score = getScore(id);
|
|
if ((!backoff || !backoff.has(id)) && score >= 0)
|
|
candidateMeshPeers.add(id);
|
|
// instead of having to find gossip peers after heartbeat which require another loop
|
|
// we prepare peers to gossip in a topic within heartbeat to improve performance
|
|
if (score >= this.opts.scoreThresholds.gossipThreshold)
|
|
peersToGossip.add(id);
|
|
}
|
|
}
|
|
}
|
|
// prune/graft helper functions (defined per topic)
|
|
const prunePeer = (id, reason) => {
|
|
this.log('HEARTBEAT: Remove mesh link to %s in %s', id, topic);
|
|
// no need to update peer score here as we do it in makePrune
|
|
// add prune backoff record
|
|
this.addBackoff(id, topic);
|
|
// remove peer from mesh
|
|
peers.delete(id);
|
|
// after pruning a peer from mesh, we want to gossip topic to it if its score meet the gossip threshold
|
|
if (getScore(id) >= this.opts.scoreThresholds.gossipThreshold)
|
|
peersToGossip.add(id);
|
|
this.metrics?.onRemoveFromMesh(topic, reason, 1);
|
|
// add to toprune
|
|
const topics = toprune.get(id);
|
|
if (!topics) {
|
|
toprune.set(id, [topic]);
|
|
}
|
|
else {
|
|
topics.push(topic);
|
|
}
|
|
};
|
|
const graftPeer = (id, reason) => {
|
|
this.log('HEARTBEAT: Add mesh link to %s in %s', id, topic);
|
|
// update peer score
|
|
this.score.graft(id, topic);
|
|
// add peer to mesh
|
|
peers.add(id);
|
|
// when we add a new mesh peer, we don't want to gossip messages to it
|
|
peersToGossip.delete(id);
|
|
this.metrics?.onAddToMesh(topic, reason, 1);
|
|
// add to tograft
|
|
const topics = tograft.get(id);
|
|
if (!topics) {
|
|
tograft.set(id, [topic]);
|
|
}
|
|
else {
|
|
topics.push(topic);
|
|
}
|
|
};
|
|
// drop all peers with negative score, without PX
|
|
peers.forEach((id) => {
|
|
const score = getScore(id);
|
|
// Record the score
|
|
if (score < 0) {
|
|
this.log('HEARTBEAT: Prune peer %s with negative score: score=%d, topic=%s', id, score, topic);
|
|
prunePeer(id, ChurnReason.BadScore);
|
|
noPX.set(id, true);
|
|
}
|
|
});
|
|
// do we have enough peers?
|
|
if (peers.size < Dlo) {
|
|
const ineed = D - peers.size;
|
|
// slice up to first `ineed` items and remove them from candidateMeshPeers
|
|
// same to `const newMeshPeers = candidateMeshPeers.slice(0, ineed)`
|
|
const newMeshPeers = removeFirstNItemsFromSet(candidateMeshPeers, ineed);
|
|
newMeshPeers.forEach((p) => {
|
|
graftPeer(p, InclusionReason.NotEnough);
|
|
});
|
|
}
|
|
// do we have to many peers?
|
|
if (peers.size > Dhi) {
|
|
let peersArray = Array.from(peers);
|
|
// sort by score
|
|
peersArray.sort((a, b) => getScore(b) - getScore(a));
|
|
// We keep the first D_score peers by score and the remaining up to D randomly
|
|
// under the constraint that we keep D_out peers in the mesh (if we have that many)
|
|
peersArray = peersArray.slice(0, Dscore).concat(shuffle(peersArray.slice(Dscore)));
|
|
// count the outbound peers we are keeping
|
|
let outbound = 0;
|
|
peersArray.slice(0, D).forEach((p) => {
|
|
if (this.outbound.get(p)) {
|
|
outbound++;
|
|
}
|
|
});
|
|
// if it's less than D_out, bubble up some outbound peers from the random selection
|
|
if (outbound < Dout) {
|
|
const rotate = (i) => {
|
|
// rotate the peersArray to the right and put the ith peer in the front
|
|
const p = peersArray[i];
|
|
for (let j = i; j > 0; j--) {
|
|
peersArray[j] = peersArray[j - 1];
|
|
}
|
|
peersArray[0] = p;
|
|
};
|
|
// first bubble up all outbound peers already in the selection to the front
|
|
if (outbound > 0) {
|
|
let ihave = outbound;
|
|
for (let i = 1; i < D && ihave > 0; i++) {
|
|
if (this.outbound.get(peersArray[i])) {
|
|
rotate(i);
|
|
ihave--;
|
|
}
|
|
}
|
|
}
|
|
// now bubble up enough outbound peers outside the selection to the front
|
|
let ineed = D - outbound;
|
|
for (let i = D; i < peersArray.length && ineed > 0; i++) {
|
|
if (this.outbound.get(peersArray[i])) {
|
|
rotate(i);
|
|
ineed--;
|
|
}
|
|
}
|
|
}
|
|
// prune the excess peers
|
|
peersArray.slice(D).forEach((p) => {
|
|
prunePeer(p, ChurnReason.Excess);
|
|
});
|
|
}
|
|
// do we have enough outbound peers?
|
|
if (peers.size >= Dlo) {
|
|
// count the outbound peers we have
|
|
let outbound = 0;
|
|
peers.forEach((p) => {
|
|
if (this.outbound.get(p)) {
|
|
outbound++;
|
|
}
|
|
});
|
|
// if it's less than D_out, select some peers with outbound connections and graft them
|
|
if (outbound < Dout) {
|
|
const ineed = Dout - outbound;
|
|
const newMeshPeers = removeItemsFromSet(candidateMeshPeers, ineed, (id) => this.outbound.get(id) === true);
|
|
newMeshPeers.forEach((p) => {
|
|
graftPeer(p, InclusionReason.Outbound);
|
|
});
|
|
}
|
|
}
|
|
// should we try to improve the mesh with opportunistic grafting?
|
|
if (this.heartbeatTicks % this.opts.opportunisticGraftTicks === 0 && peers.size > 1) {
|
|
// Opportunistic grafting works as follows: we check the median score of peers in the
|
|
// mesh; if this score is below the opportunisticGraftThreshold, we select a few peers at
|
|
// random with score over the median.
|
|
// The intention is to (slowly) improve an underperforming mesh by introducing good
|
|
// scoring peers that may have been gossiping at us. This allows us to get out of sticky
|
|
// situations where we are stuck with poor peers and also recover from churn of good peers.
|
|
// now compute the median peer score in the mesh
|
|
const peersList = Array.from(peers).sort((a, b) => getScore(a) - getScore(b));
|
|
const medianIndex = Math.floor(peers.size / 2);
|
|
const medianScore = getScore(peersList[medianIndex]);
|
|
// if the median score is below the threshold, select a better peer (if any) and GRAFT
|
|
if (medianScore < this.opts.scoreThresholds.opportunisticGraftThreshold) {
|
|
const ineed = this.opts.opportunisticGraftPeers;
|
|
const newMeshPeers = removeItemsFromSet(candidateMeshPeers, ineed, (id) => getScore(id) > medianScore);
|
|
for (const id of newMeshPeers) {
|
|
this.log('HEARTBEAT: Opportunistically graft peer %s on topic %s', id, topic);
|
|
graftPeer(id, InclusionReason.Opportunistic);
|
|
}
|
|
}
|
|
}
|
|
});
|
|
// expire fanout for topics we haven't published to in a while
|
|
const now = Date.now();
|
|
this.fanoutLastpub.forEach((lastpb, topic) => {
|
|
if (lastpb + fanoutTTL < now) {
|
|
this.fanout.delete(topic);
|
|
this.fanoutLastpub.delete(topic);
|
|
}
|
|
});
|
|
// maintain our fanout for topics we are publishing but we have not joined
|
|
this.fanout.forEach((fanoutPeers, topic) => {
|
|
// checks whether our peers are still in the topic and have a score above the publish threshold
|
|
const topicPeers = this.topics.get(topic);
|
|
fanoutPeers.forEach((id) => {
|
|
if (!topicPeers.has(id) || getScore(id) < this.opts.scoreThresholds.publishThreshold) {
|
|
fanoutPeers.delete(id);
|
|
}
|
|
});
|
|
const peersInTopic = this.topics.get(topic);
|
|
const candidateFanoutPeers = [];
|
|
// the fanout map contains topics to which we are not subscribed.
|
|
const peersToGossip = new Set();
|
|
peersToGossipByTopic.set(topic, peersToGossip);
|
|
if (peersInTopic) {
|
|
const shuffledPeers = shuffle(Array.from(peersInTopic));
|
|
for (const id of shuffledPeers) {
|
|
const peerStreams = this.streamsOutbound.get(id);
|
|
if (peerStreams &&
|
|
this.multicodecs.includes(peerStreams.protocol) &&
|
|
!fanoutPeers.has(id) &&
|
|
!this.direct.has(id)) {
|
|
const score = getScore(id);
|
|
if (score >= this.opts.scoreThresholds.publishThreshold)
|
|
candidateFanoutPeers.push(id);
|
|
// instead of having to find gossip peers after heartbeat which require another loop
|
|
// we prepare peers to gossip in a topic within heartbeat to improve performance
|
|
if (score >= this.opts.scoreThresholds.gossipThreshold)
|
|
peersToGossip.add(id);
|
|
}
|
|
}
|
|
}
|
|
// do we need more peers?
|
|
if (fanoutPeers.size < D) {
|
|
const ineed = D - fanoutPeers.size;
|
|
candidateFanoutPeers.slice(0, ineed).forEach((id) => {
|
|
fanoutPeers.add(id);
|
|
peersToGossip?.delete(id);
|
|
});
|
|
}
|
|
});
|
|
this.emitGossip(peersToGossipByTopic);
|
|
// send coalesced GRAFT/PRUNE messages (will piggyback gossip)
|
|
await this.sendGraftPrune(tograft, toprune, noPX);
|
|
// flush pending gossip that wasn't piggybacked above
|
|
this.flush();
|
|
// advance the message history window
|
|
this.mcache.shift();
|
|
this.dispatchEvent(new CustomEvent('gossipsub:heartbeat'));
|
|
}
|
|
/**
|
|
* Given a topic, returns up to count peers subscribed to that topic
|
|
* that pass an optional filter function
|
|
*
|
|
* @param topic
|
|
* @param count
|
|
* @param filter - a function to filter acceptable peers
|
|
*/
|
|
getRandomGossipPeers(topic, count, filter = () => true) {
|
|
const peersInTopic = this.topics.get(topic);
|
|
if (!peersInTopic) {
|
|
return new Set();
|
|
}
|
|
// Adds all peers using our protocol
|
|
// that also pass the filter function
|
|
let peers = [];
|
|
peersInTopic.forEach((id) => {
|
|
const peerStreams = this.streamsOutbound.get(id);
|
|
if (!peerStreams) {
|
|
return;
|
|
}
|
|
if (this.multicodecs.includes(peerStreams.protocol) && filter(id)) {
|
|
peers.push(id);
|
|
}
|
|
});
|
|
// Pseudo-randomly shuffles peers
|
|
peers = shuffle(peers);
|
|
if (count > 0 && peers.length > count) {
|
|
peers = peers.slice(0, count);
|
|
}
|
|
return new Set(peers);
|
|
}
|
|
onScrapeMetrics(metrics) {
|
|
/* Data structure sizes */
|
|
metrics.mcacheSize.set(this.mcache.size);
|
|
// Arbitrary size
|
|
metrics.cacheSize.set({ cache: 'direct' }, this.direct.size);
|
|
metrics.cacheSize.set({ cache: 'seenCache' }, this.seenCache.size);
|
|
metrics.cacheSize.set({ cache: 'fastMsgIdCache' }, this.fastMsgIdCache?.size ?? 0);
|
|
metrics.cacheSize.set({ cache: 'publishedMessageIds' }, this.publishedMessageIds.size);
|
|
metrics.cacheSize.set({ cache: 'mcache' }, this.mcache.size);
|
|
metrics.cacheSize.set({ cache: 'score' }, this.score.size);
|
|
metrics.cacheSize.set({ cache: 'gossipTracer.promises' }, this.gossipTracer.size);
|
|
metrics.cacheSize.set({ cache: 'gossipTracer.requests' }, this.gossipTracer.requestMsByMsgSize);
|
|
// Bounded by topic
|
|
metrics.cacheSize.set({ cache: 'topics' }, this.topics.size);
|
|
metrics.cacheSize.set({ cache: 'subscriptions' }, this.subscriptions.size);
|
|
metrics.cacheSize.set({ cache: 'mesh' }, this.mesh.size);
|
|
metrics.cacheSize.set({ cache: 'fanout' }, this.fanout.size);
|
|
// Bounded by peer
|
|
metrics.cacheSize.set({ cache: 'peers' }, this.peers.size);
|
|
metrics.cacheSize.set({ cache: 'streamsOutbound' }, this.streamsOutbound.size);
|
|
metrics.cacheSize.set({ cache: 'streamsInbound' }, this.streamsInbound.size);
|
|
metrics.cacheSize.set({ cache: 'acceptFromWhitelist' }, this.acceptFromWhitelist.size);
|
|
metrics.cacheSize.set({ cache: 'gossip' }, this.gossip.size);
|
|
metrics.cacheSize.set({ cache: 'control' }, this.control.size);
|
|
metrics.cacheSize.set({ cache: 'peerhave' }, this.peerhave.size);
|
|
metrics.cacheSize.set({ cache: 'outbound' }, this.outbound.size);
|
|
// 2D nested data structure
|
|
let backoffSize = 0;
|
|
for (const backoff of this.backoff.values()) {
|
|
backoffSize += backoff.size;
|
|
}
|
|
metrics.cacheSize.set({ cache: 'backoff' }, backoffSize);
|
|
// Peer counts
|
|
for (const [topicStr, peers] of this.topics) {
|
|
metrics.topicPeersCount.set({ topicStr }, peers.size);
|
|
}
|
|
for (const [topicStr, peers] of this.mesh) {
|
|
metrics.meshPeerCounts.set({ topicStr }, peers.size);
|
|
}
|
|
// Peer scores
|
|
const scores = [];
|
|
const scoreByPeer = new Map();
|
|
metrics.behaviourPenalty.reset();
|
|
for (const peerIdStr of this.peers.keys()) {
|
|
const score = this.score.score(peerIdStr);
|
|
scores.push(score);
|
|
scoreByPeer.set(peerIdStr, score);
|
|
metrics.behaviourPenalty.observe(this.score.peerStats.get(peerIdStr)?.behaviourPenalty ?? 0);
|
|
}
|
|
metrics.registerScores(scores, this.opts.scoreThresholds);
|
|
// Breakdown score per mesh topicLabel
|
|
metrics.registerScorePerMesh(this.mesh, scoreByPeer);
|
|
// Breakdown on each score weight
|
|
const sw = computeAllPeersScoreWeights(this.peers.keys(), this.score.peerStats, this.score.params, this.score.peerIPs, metrics.topicStrToLabel);
|
|
metrics.registerScoreWeights(sw);
|
|
}
|
|
}
|
|
GossipSub.multicodec = GossipsubIDv11;
|
|
|
|
function pushOrInitMapSet(map, key, newValue) {
|
|
let arr = map.get(key);
|
|
if (typeof arr === "undefined") {
|
|
map.set(key, new Set());
|
|
arr = map.get(key);
|
|
}
|
|
arr.add(newValue);
|
|
}
|
|
|
|
const log$3 = debug("waku:message:topic-only");
|
|
class TopicOnlyMessage {
|
|
constructor(proto) {
|
|
this.proto = proto;
|
|
}
|
|
get contentTopic() {
|
|
return this.proto.contentTopic ?? "";
|
|
}
|
|
}
|
|
class TopicOnlyDecoder {
|
|
constructor() {
|
|
this.contentTopic = "";
|
|
}
|
|
fromWireToProtoObj(bytes) {
|
|
const protoMessage = TopicOnlyMessage$1.decode(bytes);
|
|
log$3("Message decoded", protoMessage);
|
|
return Promise.resolve({
|
|
contentTopic: protoMessage.contentTopic,
|
|
payload: undefined,
|
|
rateLimitProof: undefined,
|
|
timestamp: undefined,
|
|
version: undefined,
|
|
});
|
|
}
|
|
async fromProtoObj(proto) {
|
|
return new TopicOnlyMessage(proto);
|
|
}
|
|
}
|
|
|
|
const log$2 = debug("waku:relay");
|
|
/**
|
|
* Implements the [Waku v2 Relay protocol](https://rfc.vac.dev/spec/11/).
|
|
* Must be passed as a `pubsub` module to a `Libp2p` instance.
|
|
*
|
|
* @implements {require('libp2p-interfaces/src/pubsub')}
|
|
*/
|
|
class WakuRelay extends GossipSub {
|
|
constructor(options) {
|
|
options = Object.assign(options ?? {}, {
|
|
// Ensure that no signature is included nor expected in the messages.
|
|
globalSignaturePolicy: SignaturePolicy.StrictNoSign,
|
|
fallbackToFloodsub: false,
|
|
});
|
|
super(options);
|
|
this.multicodecs = RelayCodecs;
|
|
this.observers = new Map();
|
|
this.pubSubTopic = options?.pubSubTopic ?? DefaultPubSubTopic;
|
|
// TODO: User might want to decide what decoder should be used (e.g. for RLN)
|
|
this.defaultDecoder = new TopicOnlyDecoder();
|
|
}
|
|
/**
|
|
* Mounts the gossipsub protocol onto the libp2p node
|
|
* and subscribes to the default topic.
|
|
*
|
|
* @override
|
|
* @returns {void}
|
|
*/
|
|
async start() {
|
|
await super.start();
|
|
this.subscribe(this.pubSubTopic);
|
|
}
|
|
/**
|
|
* Send Waku message.
|
|
*/
|
|
async send(encoder, message) {
|
|
const msg = await encoder.toWire(message);
|
|
if (!msg) {
|
|
log$2("Failed to encode message, aborting publish");
|
|
return { recipients: [] };
|
|
}
|
|
return this.publish(this.pubSubTopic, msg);
|
|
}
|
|
/**
|
|
* Add an observer and associated Decoder to process incoming messages on a given content topic.
|
|
*
|
|
* @returns Function to delete the observer
|
|
*/
|
|
addObserver(decoder, callback) {
|
|
const observer = {
|
|
decoder,
|
|
callback,
|
|
};
|
|
pushOrInitMapSet(this.observers, decoder.contentTopic, observer);
|
|
return () => {
|
|
const observers = this.observers.get(decoder.contentTopic);
|
|
if (observers) {
|
|
observers.delete(observer);
|
|
}
|
|
};
|
|
}
|
|
/**
|
|
* Subscribe to a pubsub topic and start emitting Waku messages to observers.
|
|
*
|
|
* @override
|
|
*/
|
|
subscribe(pubSubTopic) {
|
|
this.addEventListener("gossipsub:message", async (event) => {
|
|
if (event.detail.msg.topic !== pubSubTopic)
|
|
return;
|
|
log$2(`Message received on ${pubSubTopic}`);
|
|
const topicOnlyMsg = await this.defaultDecoder.fromWireToProtoObj(event.detail.msg.data);
|
|
if (!topicOnlyMsg || !topicOnlyMsg.contentTopic) {
|
|
log$2("Message does not have a content topic, skipping");
|
|
return;
|
|
}
|
|
const observers = this.observers.get(topicOnlyMsg.contentTopic);
|
|
if (!observers) {
|
|
return;
|
|
}
|
|
await Promise.all(Array.from(observers).map(async ({ decoder, callback }) => {
|
|
const protoMsg = await decoder.fromWireToProtoObj(event.detail.msg.data);
|
|
if (!protoMsg) {
|
|
log$2("Internal error: message previously decoded failed on 2nd pass.");
|
|
return;
|
|
}
|
|
const msg = await decoder.fromProtoObj(protoMsg);
|
|
if (msg) {
|
|
callback(msg);
|
|
}
|
|
else {
|
|
log$2("Failed to decode messages on", topicOnlyMsg.contentTopic);
|
|
}
|
|
}));
|
|
});
|
|
super.subscribe(pubSubTopic);
|
|
}
|
|
getMeshPeers(topic) {
|
|
return super.getMeshPeers(topic ?? this.pubSubTopic);
|
|
}
|
|
}
|
|
WakuRelay.multicodec = RelayCodecs[0];
|
|
WakuRelay.multicodec = RelayCodecs[RelayCodecs.length - 1];
|
|
|
|
const DefaultEpochUnitSeconds = 10; // the rln-relay epoch length in seconds
|
|
const log$1 = debug("waku:rln:epoch");
|
|
function dateToEpoch(timestamp, epochUnitSeconds = DefaultEpochUnitSeconds) {
|
|
const time = timestamp.getTime();
|
|
const epoch = Math.floor(time / 1000 / epochUnitSeconds);
|
|
log$1("generated epoch", epoch);
|
|
return epoch;
|
|
}
|
|
function epochIntToBytes(epoch) {
|
|
const bytes = new Uint8Array(32);
|
|
const db = new DataView(bytes.buffer);
|
|
db.setUint32(0, epoch, true);
|
|
log$1("encoded epoch", epoch, bytes);
|
|
return bytes;
|
|
}
|
|
function epochBytesToInt(bytes) {
|
|
const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
const epoch = dv.getUint32(0, true);
|
|
log$1("decoded epoch", epoch, bytes);
|
|
return epoch;
|
|
}
|
|
|
|
function toRLNSignal(msg) {
|
|
const contentTopicBytes = utf8ToBytes(msg.contentTopic ?? "");
|
|
return new Uint8Array([...(msg.payload ?? []), ...contentTopicBytes]);
|
|
}
|
|
class RlnMessage {
|
|
constructor(rlnInstance, msg, rateLimitProof) {
|
|
this.rlnInstance = rlnInstance;
|
|
this.msg = msg;
|
|
this.rateLimitProof = rateLimitProof;
|
|
}
|
|
verify() {
|
|
return this.rateLimitProof
|
|
? this.rlnInstance.verifyWithRoots(this.rateLimitProof, toRLNSignal(this)) // this.rlnInstance.verifyRLNProof once issue status-im/nwaku#1248 is fixed
|
|
: undefined;
|
|
}
|
|
verifyNoRoot() {
|
|
return this.rateLimitProof
|
|
? this.rlnInstance.verifyWithNoRoot(this.rateLimitProof, toRLNSignal(this)) // this.rlnInstance.verifyRLNProof once issue status-im/nwaku#1248 is fixed
|
|
: undefined;
|
|
}
|
|
get payload() {
|
|
return this.msg.payload;
|
|
}
|
|
get contentTopic() {
|
|
return this.msg.contentTopic;
|
|
}
|
|
get timestamp() {
|
|
return this.msg.timestamp;
|
|
}
|
|
get epoch() {
|
|
const bytes = this.msg.rateLimitProof?.epoch;
|
|
if (!bytes)
|
|
return;
|
|
return epochBytesToInt(bytes);
|
|
}
|
|
}
|
|
|
|
const log = debug("waku:rln:encoder");
|
|
class RLNEncoder {
|
|
constructor(encoder, rlnInstance, index, membershipKey) {
|
|
this.encoder = encoder;
|
|
this.rlnInstance = rlnInstance;
|
|
this.index = index;
|
|
if (index < 0)
|
|
throw "invalid membership index";
|
|
this.idKey = membershipKey.IDKey;
|
|
this.contentTopic = encoder.contentTopic;
|
|
}
|
|
async toWire(message) {
|
|
message.contentTopic = this.contentTopic;
|
|
message.rateLimitProof = await this.generateProof(message);
|
|
log("Proof generated", message.rateLimitProof);
|
|
return this.encoder.toWire(message);
|
|
}
|
|
async toProtoObj(message) {
|
|
message.contentTopic = this.contentTopic;
|
|
const protoMessage = await this.encoder.toProtoObj(message);
|
|
if (!protoMessage)
|
|
return;
|
|
protoMessage.rateLimitProof = await this.generateProof(message);
|
|
log("Proof generated", protoMessage.rateLimitProof);
|
|
return protoMessage;
|
|
}
|
|
async generateProof(message) {
|
|
const signal = toRLNSignal(message);
|
|
console.time("proof_gen_timer");
|
|
const proof = await this.rlnInstance.generateRLNProof(signal, this.index, message.timestamp, this.idKey);
|
|
console.timeEnd("proof_gen_timer");
|
|
return proof;
|
|
}
|
|
}
|
|
class RLNDecoder {
|
|
constructor(rlnInstance, decoder) {
|
|
this.rlnInstance = rlnInstance;
|
|
this.decoder = decoder;
|
|
}
|
|
get contentTopic() {
|
|
return this.decoder.contentTopic;
|
|
}
|
|
fromWireToProtoObj(bytes) {
|
|
const protoMessage = this.decoder.fromWireToProtoObj(bytes);
|
|
log("Message decoded", protoMessage);
|
|
return Promise.resolve(protoMessage);
|
|
}
|
|
async fromProtoObj(proto) {
|
|
const msg = await this.decoder.fromProtoObj(proto);
|
|
if (!msg)
|
|
return;
|
|
return new RlnMessage(this.rlnInstance, msg, proto.rateLimitProof);
|
|
}
|
|
}
|
|
|
|
let wasm;
|
|
|
|
const cachedTextDecoder = new TextDecoder('utf-8', { ignoreBOM: true, fatal: true });
|
|
|
|
cachedTextDecoder.decode();
|
|
|
|
let cachedUint8Memory0 = new Uint8Array();
|
|
|
|
function getUint8Memory0() {
|
|
if (cachedUint8Memory0.byteLength === 0) {
|
|
cachedUint8Memory0 = new Uint8Array(wasm.memory.buffer);
|
|
}
|
|
return cachedUint8Memory0;
|
|
}
|
|
|
|
function getStringFromWasm0(ptr, len) {
|
|
return cachedTextDecoder.decode(getUint8Memory0().subarray(ptr, ptr + len));
|
|
}
|
|
|
|
const heap = new Array(32).fill(undefined);
|
|
|
|
heap.push(undefined, null, true, false);
|
|
|
|
let heap_next = heap.length;
|
|
|
|
function addHeapObject(obj) {
|
|
if (heap_next === heap.length) heap.push(heap.length + 1);
|
|
const idx = heap_next;
|
|
heap_next = heap[idx];
|
|
|
|
heap[idx] = obj;
|
|
return idx;
|
|
}
|
|
|
|
function getObject(idx) { return heap[idx]; }
|
|
|
|
function dropObject(idx) {
|
|
if (idx < 36) return;
|
|
heap[idx] = heap_next;
|
|
heap_next = idx;
|
|
}
|
|
|
|
function takeObject(idx) {
|
|
const ret = getObject(idx);
|
|
dropObject(idx);
|
|
return ret;
|
|
}
|
|
|
|
let WASM_VECTOR_LEN = 0;
|
|
|
|
const cachedTextEncoder = new TextEncoder('utf-8');
|
|
|
|
const encodeString = (typeof cachedTextEncoder.encodeInto === 'function'
|
|
? function (arg, view) {
|
|
return cachedTextEncoder.encodeInto(arg, view);
|
|
}
|
|
: function (arg, view) {
|
|
const buf = cachedTextEncoder.encode(arg);
|
|
view.set(buf);
|
|
return {
|
|
read: arg.length,
|
|
written: buf.length
|
|
};
|
|
});
|
|
|
|
function passStringToWasm0(arg, malloc, realloc) {
|
|
|
|
if (realloc === undefined) {
|
|
const buf = cachedTextEncoder.encode(arg);
|
|
const ptr = malloc(buf.length);
|
|
getUint8Memory0().subarray(ptr, ptr + buf.length).set(buf);
|
|
WASM_VECTOR_LEN = buf.length;
|
|
return ptr;
|
|
}
|
|
|
|
let len = arg.length;
|
|
let ptr = malloc(len);
|
|
|
|
const mem = getUint8Memory0();
|
|
|
|
let offset = 0;
|
|
|
|
for (; offset < len; offset++) {
|
|
const code = arg.charCodeAt(offset);
|
|
if (code > 0x7F) break;
|
|
mem[ptr + offset] = code;
|
|
}
|
|
|
|
if (offset !== len) {
|
|
if (offset !== 0) {
|
|
arg = arg.slice(offset);
|
|
}
|
|
ptr = realloc(ptr, len, len = offset + arg.length * 3);
|
|
const view = getUint8Memory0().subarray(ptr + offset, ptr + len);
|
|
const ret = encodeString(arg, view);
|
|
|
|
offset += ret.written;
|
|
}
|
|
|
|
WASM_VECTOR_LEN = offset;
|
|
return ptr;
|
|
}
|
|
|
|
function isLikeNone(x) {
|
|
return x === undefined || x === null;
|
|
}
|
|
|
|
let cachedInt32Memory0 = new Int32Array();
|
|
|
|
function getInt32Memory0() {
|
|
if (cachedInt32Memory0.byteLength === 0) {
|
|
cachedInt32Memory0 = new Int32Array(wasm.memory.buffer);
|
|
}
|
|
return cachedInt32Memory0;
|
|
}
|
|
|
|
function debugString(val) {
|
|
// primitive types
|
|
const type = typeof val;
|
|
if (type == 'number' || type == 'boolean' || val == null) {
|
|
return `${val}`;
|
|
}
|
|
if (type == 'string') {
|
|
return `"${val}"`;
|
|
}
|
|
if (type == 'symbol') {
|
|
const description = val.description;
|
|
if (description == null) {
|
|
return 'Symbol';
|
|
} else {
|
|
return `Symbol(${description})`;
|
|
}
|
|
}
|
|
if (type == 'function') {
|
|
const name = val.name;
|
|
if (typeof name == 'string' && name.length > 0) {
|
|
return `Function(${name})`;
|
|
} else {
|
|
return 'Function';
|
|
}
|
|
}
|
|
// objects
|
|
if (Array.isArray(val)) {
|
|
const length = val.length;
|
|
let debug = '[';
|
|
if (length > 0) {
|
|
debug += debugString(val[0]);
|
|
}
|
|
for(let i = 1; i < length; i++) {
|
|
debug += ', ' + debugString(val[i]);
|
|
}
|
|
debug += ']';
|
|
return debug;
|
|
}
|
|
// Test for built-in
|
|
const builtInMatches = /\[object ([^\]]+)\]/.exec(toString.call(val));
|
|
let className;
|
|
if (builtInMatches.length > 1) {
|
|
className = builtInMatches[1];
|
|
} else {
|
|
// Failed to match the standard '[object ClassName]'
|
|
return toString.call(val);
|
|
}
|
|
if (className == 'Object') {
|
|
// we're a user defined class or Object
|
|
// JSON.stringify avoids problems with cycles, and is generally much
|
|
// easier than looping through ownProperties of `val`.
|
|
try {
|
|
return 'Object(' + JSON.stringify(val) + ')';
|
|
} catch (_) {
|
|
return 'Object';
|
|
}
|
|
}
|
|
// errors
|
|
if (val instanceof Error) {
|
|
return `${val.name}: ${val.message}\n${val.stack}`;
|
|
}
|
|
// TODO we could test for more things here, like `Set`s and `Map`s.
|
|
return className;
|
|
}
|
|
/**
|
|
*/
|
|
function init_panic_hook() {
|
|
wasm.init_panic_hook();
|
|
}
|
|
|
|
/**
|
|
* @param {number} tree_height
|
|
* @param {Uint8Array} zkey
|
|
* @param {Uint8Array} vk
|
|
* @returns {number}
|
|
*/
|
|
function newRLN(tree_height, zkey, vk) {
|
|
const ret = wasm.newRLN(tree_height, addHeapObject(zkey), addHeapObject(vk));
|
|
return ret;
|
|
}
|
|
|
|
/**
|
|
* @param {number} ctx
|
|
* @param {Uint8Array} input
|
|
* @returns {Uint8Array}
|
|
*/
|
|
function getSerializedRLNWitness(ctx, input) {
|
|
const ret = wasm.getSerializedRLNWitness(ctx, addHeapObject(input));
|
|
return takeObject(ret);
|
|
}
|
|
|
|
/**
|
|
* @param {number} ctx
|
|
* @param {Uint8Array} input
|
|
*/
|
|
function insertMember(ctx, input) {
|
|
try {
|
|
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
|
|
wasm.insertMember(retptr, ctx, addHeapObject(input));
|
|
var r0 = getInt32Memory0()[retptr / 4 + 0];
|
|
var r1 = getInt32Memory0()[retptr / 4 + 1];
|
|
if (r1) {
|
|
throw takeObject(r0);
|
|
}
|
|
} finally {
|
|
wasm.__wbindgen_add_to_stack_pointer(16);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* @param {number} ctx
|
|
* @param {Uint8Array} serialized_witness
|
|
* @returns {object}
|
|
*/
|
|
function RLNWitnessToJson(ctx, serialized_witness) {
|
|
const ret = wasm.RLNWitnessToJson(ctx, addHeapObject(serialized_witness));
|
|
return takeObject(ret);
|
|
}
|
|
|
|
let cachedUint32Memory0 = new Uint32Array();
|
|
|
|
function getUint32Memory0() {
|
|
if (cachedUint32Memory0.byteLength === 0) {
|
|
cachedUint32Memory0 = new Uint32Array(wasm.memory.buffer);
|
|
}
|
|
return cachedUint32Memory0;
|
|
}
|
|
|
|
function passArrayJsValueToWasm0(array, malloc) {
|
|
const ptr = malloc(array.length * 4);
|
|
const mem = getUint32Memory0();
|
|
for (let i = 0; i < array.length; i++) {
|
|
mem[ptr / 4 + i] = addHeapObject(array[i]);
|
|
}
|
|
WASM_VECTOR_LEN = array.length;
|
|
return ptr;
|
|
}
|
|
/**
|
|
* @param {number} ctx
|
|
* @param {(bigint)[]} calculated_witness
|
|
* @param {Uint8Array} serialized_witness
|
|
* @returns {Uint8Array}
|
|
*/
|
|
function generate_rln_proof_with_witness(ctx, calculated_witness, serialized_witness) {
|
|
try {
|
|
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
|
|
const ptr0 = passArrayJsValueToWasm0(calculated_witness, wasm.__wbindgen_malloc);
|
|
const len0 = WASM_VECTOR_LEN;
|
|
wasm.generate_rln_proof_with_witness(retptr, ctx, ptr0, len0, addHeapObject(serialized_witness));
|
|
var r0 = getInt32Memory0()[retptr / 4 + 0];
|
|
var r1 = getInt32Memory0()[retptr / 4 + 1];
|
|
var r2 = getInt32Memory0()[retptr / 4 + 2];
|
|
if (r2) {
|
|
throw takeObject(r1);
|
|
}
|
|
return takeObject(r0);
|
|
} finally {
|
|
wasm.__wbindgen_add_to_stack_pointer(16);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* @param {number} ctx
|
|
* @returns {Uint8Array}
|
|
*/
|
|
function generateMembershipKey(ctx) {
|
|
try {
|
|
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
|
|
wasm.generateMembershipKey(retptr, ctx);
|
|
var r0 = getInt32Memory0()[retptr / 4 + 0];
|
|
var r1 = getInt32Memory0()[retptr / 4 + 1];
|
|
var r2 = getInt32Memory0()[retptr / 4 + 2];
|
|
if (r2) {
|
|
throw takeObject(r1);
|
|
}
|
|
return takeObject(r0);
|
|
} finally {
|
|
wasm.__wbindgen_add_to_stack_pointer(16);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* @param {number} ctx
|
|
* @param {Uint8Array} seed
|
|
* @returns {Uint8Array}
|
|
*/
|
|
function generateSeededMembershipKey(ctx, seed) {
|
|
try {
|
|
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
|
|
wasm.generateSeededMembershipKey(retptr, ctx, addHeapObject(seed));
|
|
var r0 = getInt32Memory0()[retptr / 4 + 0];
|
|
var r1 = getInt32Memory0()[retptr / 4 + 1];
|
|
var r2 = getInt32Memory0()[retptr / 4 + 2];
|
|
if (r2) {
|
|
throw takeObject(r1);
|
|
}
|
|
return takeObject(r0);
|
|
} finally {
|
|
wasm.__wbindgen_add_to_stack_pointer(16);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* @param {number} ctx
|
|
* @param {Uint8Array} proof
|
|
* @returns {boolean}
|
|
*/
|
|
function verifyRLNProof(ctx, proof) {
|
|
try {
|
|
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
|
|
wasm.verifyRLNProof(retptr, ctx, addHeapObject(proof));
|
|
var r0 = getInt32Memory0()[retptr / 4 + 0];
|
|
var r1 = getInt32Memory0()[retptr / 4 + 1];
|
|
var r2 = getInt32Memory0()[retptr / 4 + 2];
|
|
if (r2) {
|
|
throw takeObject(r1);
|
|
}
|
|
return r0 !== 0;
|
|
} finally {
|
|
wasm.__wbindgen_add_to_stack_pointer(16);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* @param {number} ctx
|
|
* @param {Uint8Array} proof
|
|
* @param {Uint8Array} roots
|
|
* @returns {boolean}
|
|
*/
|
|
function verifyWithRoots(ctx, proof, roots) {
|
|
try {
|
|
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
|
|
wasm.verifyWithRoots(retptr, ctx, addHeapObject(proof), addHeapObject(roots));
|
|
var r0 = getInt32Memory0()[retptr / 4 + 0];
|
|
var r1 = getInt32Memory0()[retptr / 4 + 1];
|
|
var r2 = getInt32Memory0()[retptr / 4 + 2];
|
|
if (r2) {
|
|
throw takeObject(r1);
|
|
}
|
|
return r0 !== 0;
|
|
} finally {
|
|
wasm.__wbindgen_add_to_stack_pointer(16);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* @param {number} ctx
|
|
* @returns {Uint8Array}
|
|
*/
|
|
function getRoot(ctx) {
|
|
try {
|
|
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
|
|
wasm.getRoot(retptr, ctx);
|
|
var r0 = getInt32Memory0()[retptr / 4 + 0];
|
|
var r1 = getInt32Memory0()[retptr / 4 + 1];
|
|
var r2 = getInt32Memory0()[retptr / 4 + 2];
|
|
if (r2) {
|
|
throw takeObject(r1);
|
|
}
|
|
return takeObject(r0);
|
|
} finally {
|
|
wasm.__wbindgen_add_to_stack_pointer(16);
|
|
}
|
|
}
|
|
|
|
function handleError(f, args) {
|
|
try {
|
|
return f.apply(this, args);
|
|
} catch (e) {
|
|
wasm.__wbindgen_exn_store(addHeapObject(e));
|
|
}
|
|
}
|
|
|
|
function getArrayU8FromWasm0(ptr, len) {
|
|
return getUint8Memory0().subarray(ptr / 1, ptr / 1 + len);
|
|
}
|
|
|
|
async function load(module, imports) {
|
|
if (typeof Response === 'function' && module instanceof Response) {
|
|
if (typeof WebAssembly.instantiateStreaming === 'function') {
|
|
try {
|
|
return await WebAssembly.instantiateStreaming(module, imports);
|
|
|
|
} catch (e) {
|
|
if (module.headers.get('Content-Type') != 'application/wasm') {
|
|
console.warn("`WebAssembly.instantiateStreaming` failed because your server does not serve wasm with `application/wasm` MIME type. Falling back to `WebAssembly.instantiate` which is slower. Original error:\n", e);
|
|
|
|
} else {
|
|
throw e;
|
|
}
|
|
}
|
|
}
|
|
|
|
const bytes = await module.arrayBuffer();
|
|
return await WebAssembly.instantiate(bytes, imports);
|
|
|
|
} else {
|
|
const instance = await WebAssembly.instantiate(module, imports);
|
|
|
|
if (instance instanceof WebAssembly.Instance) {
|
|
return { instance, module };
|
|
|
|
} else {
|
|
return instance;
|
|
}
|
|
}
|
|
}
|
|
|
|
function getImports() {
|
|
const imports = {};
|
|
imports.wbg = {};
|
|
imports.wbg.__wbindgen_string_new = function(arg0, arg1) {
|
|
const ret = getStringFromWasm0(arg0, arg1);
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbindgen_is_string = function(arg0) {
|
|
const ret = typeof(getObject(arg0)) === 'string';
|
|
return ret;
|
|
};
|
|
imports.wbg.__wbindgen_object_drop_ref = function(arg0) {
|
|
takeObject(arg0);
|
|
};
|
|
imports.wbg.__wbindgen_error_new = function(arg0, arg1) {
|
|
const ret = new Error(getStringFromWasm0(arg0, arg1));
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbindgen_string_get = function(arg0, arg1) {
|
|
const obj = getObject(arg1);
|
|
const ret = typeof(obj) === 'string' ? obj : undefined;
|
|
var ptr0 = isLikeNone(ret) ? 0 : passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
|
|
var len0 = WASM_VECTOR_LEN;
|
|
getInt32Memory0()[arg0 / 4 + 1] = len0;
|
|
getInt32Memory0()[arg0 / 4 + 0] = ptr0;
|
|
};
|
|
imports.wbg.__wbindgen_object_clone_ref = function(arg0) {
|
|
const ret = getObject(arg0);
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_set_20cbc34131e76824 = function(arg0, arg1, arg2) {
|
|
getObject(arg0)[takeObject(arg1)] = takeObject(arg2);
|
|
};
|
|
imports.wbg.__wbg_new_abda76e883ba8a5f = function() {
|
|
const ret = new Error();
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_stack_658279fe44541cf6 = function(arg0, arg1) {
|
|
const ret = getObject(arg1).stack;
|
|
const ptr0 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
|
|
const len0 = WASM_VECTOR_LEN;
|
|
getInt32Memory0()[arg0 / 4 + 1] = len0;
|
|
getInt32Memory0()[arg0 / 4 + 0] = ptr0;
|
|
};
|
|
imports.wbg.__wbg_error_f851667af71bcfc6 = function(arg0, arg1) {
|
|
try {
|
|
console.error(getStringFromWasm0(arg0, arg1));
|
|
} finally {
|
|
wasm.__wbindgen_free(arg0, arg1);
|
|
}
|
|
};
|
|
imports.wbg.__wbindgen_is_undefined = function(arg0) {
|
|
const ret = getObject(arg0) === undefined;
|
|
return ret;
|
|
};
|
|
imports.wbg.__wbg_randomFillSync_6894564c2c334c42 = function() { return handleError(function (arg0, arg1, arg2) {
|
|
getObject(arg0).randomFillSync(getArrayU8FromWasm0(arg1, arg2));
|
|
}, arguments) };
|
|
imports.wbg.__wbg_getRandomValues_805f1c3d65988a5a = function() { return handleError(function (arg0, arg1) {
|
|
getObject(arg0).getRandomValues(getObject(arg1));
|
|
}, arguments) };
|
|
imports.wbg.__wbg_crypto_e1d53a1d73fb10b8 = function(arg0) {
|
|
const ret = getObject(arg0).crypto;
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbindgen_is_object = function(arg0) {
|
|
const val = getObject(arg0);
|
|
const ret = typeof(val) === 'object' && val !== null;
|
|
return ret;
|
|
};
|
|
imports.wbg.__wbg_process_038c26bf42b093f8 = function(arg0) {
|
|
const ret = getObject(arg0).process;
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_versions_ab37218d2f0b24a8 = function(arg0) {
|
|
const ret = getObject(arg0).versions;
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_node_080f4b19d15bc1fe = function(arg0) {
|
|
const ret = getObject(arg0).node;
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_msCrypto_6e7d3e1f92610cbb = function(arg0) {
|
|
const ret = getObject(arg0).msCrypto;
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_require_78a3dcfbdba9cbce = function() { return handleError(function () {
|
|
const ret = module.require;
|
|
return addHeapObject(ret);
|
|
}, arguments) };
|
|
imports.wbg.__wbindgen_is_function = function(arg0) {
|
|
const ret = typeof(getObject(arg0)) === 'function';
|
|
return ret;
|
|
};
|
|
imports.wbg.__wbg_new_1d9a920c6bfc44a8 = function() {
|
|
const ret = new Array();
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_newnoargs_b5b063fc6c2f0376 = function(arg0, arg1) {
|
|
const ret = new Function(getStringFromWasm0(arg0, arg1));
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_new_268f7b7dd3430798 = function() {
|
|
const ret = new Map();
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_call_97ae9d8645dc388b = function() { return handleError(function (arg0, arg1) {
|
|
const ret = getObject(arg0).call(getObject(arg1));
|
|
return addHeapObject(ret);
|
|
}, arguments) };
|
|
imports.wbg.__wbg_new_0b9bfdd97583284e = function() {
|
|
const ret = new Object();
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_self_6d479506f72c6a71 = function() { return handleError(function () {
|
|
const ret = self.self;
|
|
return addHeapObject(ret);
|
|
}, arguments) };
|
|
imports.wbg.__wbg_window_f2557cc78490aceb = function() { return handleError(function () {
|
|
const ret = window.window;
|
|
return addHeapObject(ret);
|
|
}, arguments) };
|
|
imports.wbg.__wbg_globalThis_7f206bda628d5286 = function() { return handleError(function () {
|
|
const ret = globalThis.globalThis;
|
|
return addHeapObject(ret);
|
|
}, arguments) };
|
|
imports.wbg.__wbg_global_ba75c50d1cf384f4 = function() { return handleError(function () {
|
|
const ret = global.global;
|
|
return addHeapObject(ret);
|
|
}, arguments) };
|
|
imports.wbg.__wbg_set_a68214f35c417fa9 = function(arg0, arg1, arg2) {
|
|
getObject(arg0)[arg1 >>> 0] = takeObject(arg2);
|
|
};
|
|
imports.wbg.__wbg_toString_d9cd5f001405e8ff = function() { return handleError(function (arg0, arg1) {
|
|
const ret = getObject(arg0).toString(arg1);
|
|
return addHeapObject(ret);
|
|
}, arguments) };
|
|
imports.wbg.__wbg_call_168da88779e35f61 = function() { return handleError(function (arg0, arg1, arg2) {
|
|
const ret = getObject(arg0).call(getObject(arg1), getObject(arg2));
|
|
return addHeapObject(ret);
|
|
}, arguments) };
|
|
imports.wbg.__wbg_set_933729cf5b66ac11 = function(arg0, arg1, arg2) {
|
|
const ret = getObject(arg0).set(getObject(arg1), getObject(arg2));
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_fromEntries_7abdcb92016eb4b9 = function() { return handleError(function (arg0) {
|
|
const ret = Object.fromEntries(getObject(arg0));
|
|
return addHeapObject(ret);
|
|
}, arguments) };
|
|
imports.wbg.__wbg_buffer_3f3d764d4747d564 = function(arg0) {
|
|
const ret = getObject(arg0).buffer;
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_newwithbyteoffsetandlength_d9aa266703cb98be = function(arg0, arg1, arg2) {
|
|
const ret = new Uint8Array(getObject(arg0), arg1 >>> 0, arg2 >>> 0);
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_new_8c3f0052272a457a = function(arg0) {
|
|
const ret = new Uint8Array(getObject(arg0));
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_set_83db9690f9353e79 = function(arg0, arg1, arg2) {
|
|
getObject(arg0).set(getObject(arg1), arg2 >>> 0);
|
|
};
|
|
imports.wbg.__wbg_length_9e1ae1900cb0fbd5 = function(arg0) {
|
|
const ret = getObject(arg0).length;
|
|
return ret;
|
|
};
|
|
imports.wbg.__wbg_newwithlength_f5933855e4f48a19 = function(arg0) {
|
|
const ret = new Uint8Array(arg0 >>> 0);
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbg_subarray_58ad4efbb5bcb886 = function(arg0, arg1, arg2) {
|
|
const ret = getObject(arg0).subarray(arg1 >>> 0, arg2 >>> 0);
|
|
return addHeapObject(ret);
|
|
};
|
|
imports.wbg.__wbindgen_debug_string = function(arg0, arg1) {
|
|
const ret = debugString(getObject(arg1));
|
|
const ptr0 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
|
|
const len0 = WASM_VECTOR_LEN;
|
|
getInt32Memory0()[arg0 / 4 + 1] = len0;
|
|
getInt32Memory0()[arg0 / 4 + 0] = ptr0;
|
|
};
|
|
imports.wbg.__wbindgen_throw = function(arg0, arg1) {
|
|
throw new Error(getStringFromWasm0(arg0, arg1));
|
|
};
|
|
imports.wbg.__wbindgen_memory = function() {
|
|
const ret = wasm.memory;
|
|
return addHeapObject(ret);
|
|
};
|
|
|
|
return imports;
|
|
}
|
|
|
|
function finalizeInit(instance, module) {
|
|
wasm = instance.exports;
|
|
init.__wbindgen_wasm_module = module;
|
|
cachedInt32Memory0 = new Int32Array();
|
|
cachedUint32Memory0 = new Uint32Array();
|
|
cachedUint8Memory0 = new Uint8Array();
|
|
|
|
|
|
return wasm;
|
|
}
|
|
|
|
async function init(input) {
|
|
if (typeof input === 'undefined') {
|
|
input = new URL(new URL('assets/rln_wasm_bg-c63a69c4.wasm', import.meta.url).href, import.meta.url);
|
|
}
|
|
const imports = getImports();
|
|
|
|
if (typeof input === 'string' || (typeof Request === 'function' && input instanceof Request) || (typeof URL === 'function' && input instanceof URL)) {
|
|
input = fetch(input);
|
|
}
|
|
|
|
const { instance, module } = await load(await input, imports);
|
|
|
|
return finalizeInit(instance, module);
|
|
}
|
|
|
|
// Adapted from https://github.com/feross/buffer
|
|
function checkInt(buf, value, offset, ext, max, min) {
|
|
if (value > max || value < min)
|
|
throw new RangeError('"value" argument is out of bounds');
|
|
if (offset + ext > buf.length)
|
|
throw new RangeError("Index out of range");
|
|
}
|
|
function writeUIntLE(buf, value, offset, byteLength, noAssert) {
|
|
value = +value;
|
|
offset = offset >>> 0;
|
|
byteLength = byteLength >>> 0;
|
|
if (!noAssert) {
|
|
const maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
|
checkInt(buf, value, offset, byteLength, maxBytes, 0);
|
|
}
|
|
let mul = 1;
|
|
let i = 0;
|
|
buf[offset] = value & 0xff;
|
|
while (++i < byteLength && (mul *= 0x100)) {
|
|
buf[offset + i] = (value / mul) & 0xff;
|
|
}
|
|
return buf;
|
|
}
|
|
|
|
const verificationKey = {
|
|
"protocol": "groth16",
|
|
"curve": "bn128",
|
|
"nPublic": 6,
|
|
"vk_alpha_1": [
|
|
"1805378556360488226980822394597799963030511477964155500103132920745199284516",
|
|
"11990395240534218699464972016456017378439762088320057798320175886595281336136",
|
|
"1"
|
|
],
|
|
"vk_beta_2": [
|
|
[
|
|
"11031529986141021025408838211017932346992429731488270384177563837022796743627",
|
|
"16042159910707312759082561183373181639420894978640710177581040523252926273854"
|
|
],
|
|
[
|
|
"20112698439519222240302944148895052359035104222313380895334495118294612255131",
|
|
"19441583024670359810872018179190533814486480928824742448673677460151702019379"
|
|
],
|
|
[
|
|
"1",
|
|
"0"
|
|
]
|
|
],
|
|
"vk_gamma_2": [
|
|
[
|
|
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
|
|
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
|
|
],
|
|
[
|
|
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
|
|
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
|
|
],
|
|
[
|
|
"1",
|
|
"0"
|
|
]
|
|
],
|
|
"vk_delta_2": [
|
|
[
|
|
"1948496782571164085469528023647105317580208688174386157591917599801657832035",
|
|
"20445814069256658101339037520922621162739470138213615104905368409238414511981"
|
|
],
|
|
[
|
|
"10024680869920840984813249386422727863826862577760330492647062850849851925340",
|
|
"10512156247842686783409460795717734694774542185222602679117887145206209285142"
|
|
],
|
|
[
|
|
"1",
|
|
"0"
|
|
]
|
|
],
|
|
"vk_alphabeta_12": [
|
|
[
|
|
[
|
|
"5151991366823434428398919091000210787450832786814248297320989361921939794156",
|
|
"15735191313289001022885148627913534790382722933676436876510746491415970766821"
|
|
],
|
|
[
|
|
"3387907257437913904447588318761906430938415556102110876587455322225272831272",
|
|
"1998779853452712881084781956683721603875246565720647583735935725110674288056"
|
|
],
|
|
[
|
|
"14280074182991498185075387990446437410077692353432005297922275464876153151820",
|
|
"17092408446352310039633488224969232803092763095456307462247653153107223117633"
|
|
]
|
|
],
|
|
[
|
|
[
|
|
"4359046709531668109201634396816565829237358165496082832279660960675584351266",
|
|
"4511888308846208349307186938266411423935335853916317436093178288331845821336"
|
|
],
|
|
[
|
|
"11429499807090785857812316277335883295048773373068683863667725283965356423273",
|
|
"16232274853200678548795010078253506586114563833318973594428907292096178657392"
|
|
],
|
|
[
|
|
"18068999605870933925311275504102553573815570223888590384919752303726860800970",
|
|
"17309569111965782732372130116757295842160193489132771344011460471298173784984"
|
|
]
|
|
]
|
|
],
|
|
"IC": [
|
|
[
|
|
"18693301901828818437917730940595978397160482710354161265484535387752523310572",
|
|
"17985273354976640088538673802000794244421192643855111089693820179790551470769",
|
|
"1"
|
|
],
|
|
[
|
|
"21164641723988537620541455173278629777250883365474191521194244273980931825942",
|
|
"998385854410718613441067082771678946155853656328717326195057262123686425518",
|
|
"1"
|
|
],
|
|
[
|
|
"21666968581672145768705229094968410656430989593283335488162701230986314747515",
|
|
"17996457608540683483506630273632100555125353447506062045735279661096094677264",
|
|
"1"
|
|
],
|
|
[
|
|
"20137761979695192602424300886442379728165712610493092740175904438282083668117",
|
|
"19184814924890679891263780109959113289320127263583260218200636509492157834679",
|
|
"1"
|
|
],
|
|
[
|
|
"10943171273393803842589314082509655332154393332394322726077270895078286354146",
|
|
"10872472035685319847811233167729172672344935625121511932198535224727331126439",
|
|
"1"
|
|
],
|
|
[
|
|
"13049169779481227658517545034348883391527506091990880778783387628208561946597",
|
|
"10083689369261379027228809473568899816311684698866922944902456565434209079955",
|
|
"1"
|
|
],
|
|
[
|
|
"19633516378466409167014413361365552102431118630694133723053441455184566611083",
|
|
"8059525100726933978719058611146131904598011633549012007359165766216730722269",
|
|
"1"
|
|
]
|
|
]
|
|
};
|
|
|
|
// File generated with https://github.com/iden3/circom
|
|
// following the instructions from:
|
|
// https://github.com/vacp2p/zerokit/tree/master/rln#compiling-circuits
|
|
async function builder(code, options) {
|
|
options = options || {};
|
|
let wasmModule;
|
|
try {
|
|
wasmModule = await WebAssembly.compile(code);
|
|
}
|
|
catch (err) {
|
|
console.log(err);
|
|
console.log("\nTry to run circom --c in order to generate c++ code instead\n");
|
|
throw new Error(err);
|
|
}
|
|
let wc;
|
|
let errStr = "";
|
|
let msgStr = "";
|
|
const instance = await WebAssembly.instantiate(wasmModule, {
|
|
runtime: {
|
|
exceptionHandler: function (code) {
|
|
let err;
|
|
if (code == 1) {
|
|
err = "Signal not found.\n";
|
|
}
|
|
else if (code == 2) {
|
|
err = "Too many signals set.\n";
|
|
}
|
|
else if (code == 3) {
|
|
err = "Signal already set.\n";
|
|
}
|
|
else if (code == 4) {
|
|
err = "Assert Failed.\n";
|
|
}
|
|
else if (code == 5) {
|
|
err = "Not enough memory.\n";
|
|
}
|
|
else if (code == 6) {
|
|
err = "Input signal array access exceeds the size.\n";
|
|
}
|
|
else {
|
|
err = "Unknown error.\n";
|
|
}
|
|
throw new Error(err + errStr);
|
|
},
|
|
printErrorMessage: function () {
|
|
errStr += getMessage() + "\n";
|
|
// console.error(getMessage());
|
|
},
|
|
writeBufferMessage: function () {
|
|
const msg = getMessage();
|
|
// Any calls to `log()` will always end with a `\n`, so that's when we print and reset
|
|
if (msg === "\n") {
|
|
console.log(msgStr);
|
|
msgStr = "";
|
|
}
|
|
else {
|
|
// If we've buffered other content, put a space in between the items
|
|
if (msgStr !== "") {
|
|
msgStr += " ";
|
|
}
|
|
// Then append the message to the message we are creating
|
|
msgStr += msg;
|
|
}
|
|
},
|
|
showSharedRWMemory: function () {
|
|
printSharedRWMemory();
|
|
}
|
|
}
|
|
});
|
|
const sanityCheck = options;
|
|
// options &&
|
|
// (
|
|
// options.sanityCheck ||
|
|
// options.logGetSignal ||
|
|
// options.logSetSignal ||
|
|
// options.logStartComponent ||
|
|
// options.logFinishComponent
|
|
// );
|
|
wc = new WitnessCalculator(instance, sanityCheck);
|
|
return wc;
|
|
function getMessage() {
|
|
var message = "";
|
|
var c = instance.exports.getMessageChar();
|
|
while (c != 0) {
|
|
message += String.fromCharCode(c);
|
|
c = instance.exports.getMessageChar();
|
|
}
|
|
return message;
|
|
}
|
|
function printSharedRWMemory() {
|
|
const shared_rw_memory_size = instance.exports.getFieldNumLen32();
|
|
const arr = new Uint32Array(shared_rw_memory_size);
|
|
for (let j = 0; j < shared_rw_memory_size; j++) {
|
|
arr[shared_rw_memory_size - 1 - j] = instance.exports.readSharedRWMemory(j);
|
|
}
|
|
// If we've buffered other content, put a space in between the items
|
|
if (msgStr !== "") {
|
|
msgStr += " ";
|
|
}
|
|
// Then append the value to the message we are creating
|
|
msgStr += (fromArray32(arr).toString());
|
|
}
|
|
}
|
|
class WitnessCalculator {
|
|
constructor(instance, sanityCheck) {
|
|
this.instance = instance;
|
|
this.version = this.instance.exports.getVersion();
|
|
this.n32 = this.instance.exports.getFieldNumLen32();
|
|
this.instance.exports.getRawPrime();
|
|
const arr = new Uint32Array(this.n32);
|
|
for (let i = 0; i < this.n32; i++) {
|
|
arr[this.n32 - 1 - i] = this.instance.exports.readSharedRWMemory(i);
|
|
}
|
|
this.prime = fromArray32(arr);
|
|
this.witnessSize = this.instance.exports.getWitnessSize();
|
|
this.sanityCheck = sanityCheck;
|
|
}
|
|
circom_version() {
|
|
return this.instance.exports.getVersion();
|
|
}
|
|
async _doCalculateWitness(input, sanityCheck) {
|
|
//input is assumed to be a map from signals to arrays of bigints
|
|
this.instance.exports.init((this.sanityCheck || sanityCheck) ? 1 : 0);
|
|
const keys = Object.keys(input);
|
|
var input_counter = 0;
|
|
keys.forEach((k) => {
|
|
const h = fnvHash(k);
|
|
const hMSB = parseInt(h.slice(0, 8), 16);
|
|
const hLSB = parseInt(h.slice(8, 16), 16);
|
|
const fArr = flatArray(input[k]);
|
|
let signalSize = this.instance.exports.getInputSignalSize(hMSB, hLSB);
|
|
if (signalSize < 0) {
|
|
throw new Error(`Signal ${k} not found\n`);
|
|
}
|
|
if (fArr.length < signalSize) {
|
|
throw new Error(`Not enough values for input signal ${k}\n`);
|
|
}
|
|
if (fArr.length > signalSize) {
|
|
throw new Error(`Too many values for input signal ${k}\n`);
|
|
}
|
|
for (let i = 0; i < fArr.length; i++) {
|
|
const arrFr = toArray32(BigInt(fArr[i]) % this.prime, this.n32);
|
|
for (let j = 0; j < this.n32; j++) {
|
|
this.instance.exports.writeSharedRWMemory(j, arrFr[this.n32 - 1 - j]);
|
|
}
|
|
try {
|
|
this.instance.exports.setInputSignal(hMSB, hLSB, i);
|
|
input_counter++;
|
|
}
|
|
catch (err) {
|
|
// console.log(`After adding signal ${i} of ${k}`)
|
|
throw new Error(err);
|
|
}
|
|
}
|
|
});
|
|
if (input_counter < this.instance.exports.getInputSize()) {
|
|
throw new Error(`Not all inputs have been set. Only ${input_counter} out of ${this.instance.exports.getInputSize()}`);
|
|
}
|
|
}
|
|
async calculateWitness(input, sanityCheck) {
|
|
const w = [];
|
|
await this._doCalculateWitness(input, sanityCheck);
|
|
for (let i = 0; i < this.witnessSize; i++) {
|
|
this.instance.exports.getWitness(i);
|
|
const arr = new Uint32Array(this.n32);
|
|
for (let j = 0; j < this.n32; j++) {
|
|
arr[this.n32 - 1 - j] = this.instance.exports.readSharedRWMemory(j);
|
|
}
|
|
w.push(fromArray32(arr));
|
|
}
|
|
return w;
|
|
}
|
|
async calculateBinWitness(input, sanityCheck) {
|
|
const buff32 = new Uint32Array(this.witnessSize * this.n32);
|
|
const buff = new Uint8Array(buff32.buffer);
|
|
await this._doCalculateWitness(input, sanityCheck);
|
|
for (let i = 0; i < this.witnessSize; i++) {
|
|
this.instance.exports.getWitness(i);
|
|
const pos = i * this.n32;
|
|
for (let j = 0; j < this.n32; j++) {
|
|
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
|
}
|
|
}
|
|
return buff;
|
|
}
|
|
async calculateWTNSBin(input, sanityCheck) {
|
|
const buff32 = new Uint32Array(this.witnessSize * this.n32 + this.n32 + 11);
|
|
const buff = new Uint8Array(buff32.buffer);
|
|
await this._doCalculateWitness(input, sanityCheck);
|
|
//"wtns"
|
|
buff[0] = "w".charCodeAt(0);
|
|
buff[1] = "t".charCodeAt(0);
|
|
buff[2] = "n".charCodeAt(0);
|
|
buff[3] = "s".charCodeAt(0);
|
|
//version 2
|
|
buff32[1] = 2;
|
|
//number of sections: 2
|
|
buff32[2] = 2;
|
|
//id section 1
|
|
buff32[3] = 1;
|
|
const n8 = this.n32 * 4;
|
|
//id section 1 length in 64bytes
|
|
const idSection1length = 8 + n8;
|
|
const idSection1lengthHex = idSection1length.toString(16);
|
|
buff32[4] = parseInt(idSection1lengthHex.slice(0, 8), 16);
|
|
buff32[5] = parseInt(idSection1lengthHex.slice(8, 16), 16);
|
|
//this.n32
|
|
buff32[6] = n8;
|
|
//prime number
|
|
this.instance.exports.getRawPrime();
|
|
var pos = 7;
|
|
for (let j = 0; j < this.n32; j++) {
|
|
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
|
}
|
|
pos += this.n32;
|
|
// witness size
|
|
buff32[pos] = this.witnessSize;
|
|
pos++;
|
|
//id section 2
|
|
buff32[pos] = 2;
|
|
pos++;
|
|
// section 2 length
|
|
const idSection2length = n8 * this.witnessSize;
|
|
const idSection2lengthHex = idSection2length.toString(16);
|
|
buff32[pos] = parseInt(idSection2lengthHex.slice(0, 8), 16);
|
|
buff32[pos + 1] = parseInt(idSection2lengthHex.slice(8, 16), 16);
|
|
pos += 2;
|
|
for (let i = 0; i < this.witnessSize; i++) {
|
|
this.instance.exports.getWitness(i);
|
|
for (let j = 0; j < this.n32; j++) {
|
|
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
|
}
|
|
pos += this.n32;
|
|
}
|
|
return buff;
|
|
}
|
|
}
|
|
function toArray32(rem, size) {
|
|
const res = []; //new Uint32Array(size); //has no unshift
|
|
const radix = BigInt(0x100000000);
|
|
while (rem) {
|
|
res.unshift(Number(rem % radix));
|
|
rem = rem / radix;
|
|
}
|
|
if (size) {
|
|
var i = size - res.length;
|
|
while (i > 0) {
|
|
res.unshift(0);
|
|
i--;
|
|
}
|
|
}
|
|
return res;
|
|
}
|
|
function fromArray32(arr) {
|
|
var res = BigInt(0);
|
|
const radix = BigInt(0x100000000);
|
|
for (let i = 0; i < arr.length; i++) {
|
|
res = res * radix + BigInt(arr[i]);
|
|
}
|
|
return res;
|
|
}
|
|
function flatArray(a) {
|
|
var res = [];
|
|
fillArray(res, a);
|
|
return res;
|
|
function fillArray(res, a) {
|
|
if (Array.isArray(a)) {
|
|
for (let i = 0; i < a.length; i++) {
|
|
fillArray(res, a[i]);
|
|
}
|
|
}
|
|
else {
|
|
res.push(a);
|
|
}
|
|
}
|
|
}
|
|
function fnvHash(str) {
|
|
const uint64_max = BigInt(2) ** BigInt(64);
|
|
let hash = BigInt("0xCBF29CE484222325");
|
|
for (var i = 0; i < str.length; i++) {
|
|
hash ^= BigInt(str[i].charCodeAt());
|
|
hash *= BigInt(0x100000001B3);
|
|
hash %= uint64_max;
|
|
}
|
|
let shash = hash.toString(16);
|
|
let n = 16 - shash.length;
|
|
shash = '0'.repeat(n).concat(shash);
|
|
return shash;
|
|
}
|
|
|
|
/**
|
|
* Concatenate Uint8Arrays
|
|
* @param input
|
|
* @returns concatenation of all Uint8Array received as input
|
|
*/
|
|
function concatenate(...input) {
|
|
let totalLength = 0;
|
|
for (const arr of input) {
|
|
totalLength += arr.length;
|
|
}
|
|
const result = new Uint8Array(totalLength);
|
|
let offset = 0;
|
|
for (const arr of input) {
|
|
result.set(arr, offset);
|
|
offset += arr.length;
|
|
}
|
|
return result;
|
|
}
|
|
const stringEncoder = new TextEncoder();
|
|
const DEPTH = 20;
|
|
async function loadWitnessCalculator() {
|
|
const url = new URL(new URL('assets/rln-fb4d7b4b.wasm', import.meta.url).href, import.meta.url);
|
|
const response = await fetch(url);
|
|
return await builder(new Uint8Array(await response.arrayBuffer()), false);
|
|
}
|
|
async function loadZkey() {
|
|
const url = new URL(new URL('assets/rln_final-a641c06e.zkey', import.meta.url).href, import.meta.url);
|
|
const response = await fetch(url);
|
|
return new Uint8Array(await response.arrayBuffer());
|
|
}
|
|
/**
|
|
* Create an instance of RLN
|
|
* @returns RLNInstance
|
|
*/
|
|
async function create$1() {
|
|
await init();
|
|
init_panic_hook();
|
|
const witnessCalculator = await loadWitnessCalculator();
|
|
const zkey = await loadZkey();
|
|
const vkey = stringEncoder.encode(JSON.stringify(verificationKey));
|
|
const zkRLN = newRLN(DEPTH, zkey, vkey);
|
|
return new RLNInstance(zkRLN, witnessCalculator);
|
|
}
|
|
class MembershipKey {
|
|
constructor(IDKey, IDCommitment) {
|
|
this.IDKey = IDKey;
|
|
this.IDCommitment = IDCommitment;
|
|
}
|
|
static fromBytes(memKeys) {
|
|
const idKey = memKeys.subarray(0, 32);
|
|
const idCommitment = memKeys.subarray(32);
|
|
return new MembershipKey(idKey, idCommitment);
|
|
}
|
|
}
|
|
const proofOffset = 128;
|
|
const rootOffset = proofOffset + 32;
|
|
const epochOffset = rootOffset + 32;
|
|
const shareXOffset = epochOffset + 32;
|
|
const shareYOffset = shareXOffset + 32;
|
|
const nullifierOffset = shareYOffset + 32;
|
|
const rlnIdentifierOffset = nullifierOffset + 32;
|
|
class Proof {
|
|
constructor(proofBytes) {
|
|
if (proofBytes.length < rlnIdentifierOffset)
|
|
throw "invalid proof";
|
|
// parse the proof as proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32>
|
|
this.proof = proofBytes.subarray(0, proofOffset);
|
|
this.merkleRoot = proofBytes.subarray(proofOffset, rootOffset);
|
|
this.epoch = proofBytes.subarray(rootOffset, epochOffset);
|
|
this.shareX = proofBytes.subarray(epochOffset, shareXOffset);
|
|
this.shareY = proofBytes.subarray(shareXOffset, shareYOffset);
|
|
this.nullifier = proofBytes.subarray(shareYOffset, nullifierOffset);
|
|
this.rlnIdentifier = proofBytes.subarray(nullifierOffset, rlnIdentifierOffset);
|
|
}
|
|
}
|
|
function proofToBytes(p) {
|
|
return concatenate(p.proof, p.merkleRoot, p.epoch, p.shareX, p.shareY, p.nullifier, p.rlnIdentifier);
|
|
}
|
|
class RLNInstance {
|
|
constructor(zkRLN, witnessCalculator) {
|
|
this.zkRLN = zkRLN;
|
|
this.witnessCalculator = witnessCalculator;
|
|
}
|
|
generateMembershipKey() {
|
|
const memKeys = generateMembershipKey(this.zkRLN);
|
|
return MembershipKey.fromBytes(memKeys);
|
|
}
|
|
generateSeededMembershipKey(seed) {
|
|
const uint8Seed = stringEncoder.encode(seed);
|
|
const memKeys = generateSeededMembershipKey(this.zkRLN, uint8Seed);
|
|
return MembershipKey.fromBytes(memKeys);
|
|
}
|
|
insertMember(idCommitment) {
|
|
insertMember(this.zkRLN, idCommitment);
|
|
}
|
|
getMerkleRoot() {
|
|
return getRoot(this.zkRLN);
|
|
}
|
|
serializeMessage(uint8Msg, memIndex, epoch, idKey) {
|
|
// calculate message length
|
|
const msgLen = writeUIntLE(new Uint8Array(8), uint8Msg.length, 0, 8);
|
|
// Converting index to LE bytes
|
|
const memIndexBytes = writeUIntLE(new Uint8Array(8), memIndex, 0, 8);
|
|
// [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
|
|
return concatenate(idKey, memIndexBytes, epoch, msgLen, uint8Msg);
|
|
}
|
|
async generateRLNProof(msg, index, epoch, idKey) {
|
|
if (epoch == undefined) {
|
|
epoch = epochIntToBytes(dateToEpoch(new Date()));
|
|
}
|
|
else if (epoch instanceof Date) {
|
|
epoch = epochIntToBytes(dateToEpoch(epoch));
|
|
}
|
|
if (epoch.length != 32)
|
|
throw "invalid epoch";
|
|
if (idKey.length != 32)
|
|
throw "invalid id key";
|
|
if (index < 0)
|
|
throw "index must be >= 0";
|
|
const serialized_msg = this.serializeMessage(msg, index, epoch, idKey);
|
|
const rlnWitness = getSerializedRLNWitness(this.zkRLN, serialized_msg);
|
|
const inputs = RLNWitnessToJson(this.zkRLN, rlnWitness);
|
|
const calculatedWitness = await this.witnessCalculator.calculateWitness(inputs, false); // no sanity check being used in zerokit
|
|
const proofBytes = generate_rln_proof_with_witness(this.zkRLN, calculatedWitness, rlnWitness);
|
|
return new Proof(proofBytes);
|
|
}
|
|
verifyRLNProof(proof, msg) {
|
|
let pBytes;
|
|
if (proof instanceof Uint8Array) {
|
|
pBytes = proof;
|
|
}
|
|
else {
|
|
pBytes = proofToBytes(proof);
|
|
}
|
|
// calculate message length
|
|
const msgLen = writeUIntLE(new Uint8Array(8), msg.length, 0, 8);
|
|
return verifyRLNProof(this.zkRLN, concatenate(pBytes, msgLen, msg));
|
|
}
|
|
verifyWithRoots(proof, msg) {
|
|
let pBytes;
|
|
if (proof instanceof Uint8Array) {
|
|
pBytes = proof;
|
|
}
|
|
else {
|
|
pBytes = proofToBytes(proof);
|
|
}
|
|
// calculate message length
|
|
const msgLen = writeUIntLE(new Uint8Array(8), msg.length, 0, 8);
|
|
// obtain root
|
|
const root = getRoot(this.zkRLN);
|
|
return verifyWithRoots(this.zkRLN, concatenate(pBytes, msgLen, msg), root);
|
|
}
|
|
verifyWithNoRoot(proof, msg) {
|
|
let pBytes;
|
|
if (proof instanceof Uint8Array) {
|
|
pBytes = proof;
|
|
}
|
|
else {
|
|
pBytes = proofToBytes(proof);
|
|
}
|
|
// calculate message length
|
|
const msgLen = writeUIntLE(new Uint8Array(8), msg.length, 0, 8);
|
|
return verifyWithRoots(this.zkRLN, concatenate(pBytes, msgLen, msg), new Uint8Array());
|
|
}
|
|
}
|
|
|
|
var rln = /*#__PURE__*/Object.freeze({
|
|
__proto__: null,
|
|
create: create$1,
|
|
MembershipKey: MembershipKey,
|
|
Proof: Proof,
|
|
proofToBytes: proofToBytes,
|
|
RLNInstance: RLNInstance
|
|
});
|
|
|
|
// reexport the create function, dynamically imported from rln.ts
|
|
async function create() {
|
|
// A dependency graph that contains any wasm must all be imported
|
|
// asynchronously. This file does the single async import, so
|
|
// that no one else needs to worry about it again.
|
|
const rlnModule = await Promise.resolve().then(function () { return rln; });
|
|
return await rlnModule.create();
|
|
}
|
|
|
|
export { MembershipKey, RLNDecoder, RLNEncoder, create };
|