2021-09-13 15:54:05 +10:00
#!/usr/bin/env node
/* eslint-disable */
try {
Object . freeze ( { } ) . detectStrictMode = true ;
} catch ( error ) {
throw new Error ( ` The whole PnP file got strict-mode-ified, which is known to break (Emscripten libraries aren't strict mode). This usually happens when the file goes through Babel. ` ) ;
}
var _ _non _webpack _module _ _ = module ;
function $$SETUP _STATE ( hydrateRuntimeState , basePath ) {
return hydrateRuntimeState ( {
"__info" : [
"This file is automatically generated. Do not touch it, or risk" ,
"your modifications being lost. We also recommend you not to read" ,
"it either without using the @yarnpkg/pnp package, as the data layout" ,
"is entirely unspecified and WILL change from a version to another."
] ,
"dependencyTreeRoots" : [
{
"name" : "dappconnect-sdks" ,
"reference" : "workspace:."
} ,
{
"name" : "status-communities" ,
"reference" : "workspace:packages/status-communities"
}
] ,
"enableTopLevelFallback" : true ,
"ignorePatternData" : "(^(?:\\.yarn\\/sdks(?:\\/(?!\\.{1,2}(?:\\/|$))(?:(?:(?!(?:^|\\/)\\.{1,2}(?:\\/|$)).)*?)|$))$)" ,
"fallbackExclusionList" : [
[ "dappconnect-sdks" , [ "workspace:." ] ] ,
[ "status-communities" , [ "workspace:packages/status-communities" ] ]
] ,
"fallbackPool" : [
] ,
"packageRegistryData" : [
[ null , [
[ null , {
"packageLocation" : "./" ,
"packageDependencies" : [
[ "npm-run-all" , "npm:4.1.5" ] ,
[ "prettier" , "npm:2.3.2" ]
] ,
"linkType" : "SOFT" ,
} ]
] ] ,
2021-09-14 12:50:33 +10:00
[ "@protobufjs/aspromise" , [
[ "npm:1.1.2" , {
"packageLocation" : "./.yarn/cache/@protobufjs-aspromise-npm-1.1.2-71d00b938f-011fe7ef08.zip/node_modules/@protobufjs/aspromise/" ,
"packageDependencies" : [
[ "@protobufjs/aspromise" , "npm:1.1.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@protobufjs/base64" , [
[ "npm:1.1.2" , {
"packageLocation" : "./.yarn/cache/@protobufjs-base64-npm-1.1.2-cd8ca6814a-67173ac34d.zip/node_modules/@protobufjs/base64/" ,
"packageDependencies" : [
[ "@protobufjs/base64" , "npm:1.1.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@protobufjs/codegen" , [
[ "npm:2.0.4" , {
"packageLocation" : "./.yarn/cache/@protobufjs-codegen-npm-2.0.4-36e188bbe6-59240c850b.zip/node_modules/@protobufjs/codegen/" ,
"packageDependencies" : [
[ "@protobufjs/codegen" , "npm:2.0.4" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@protobufjs/eventemitter" , [
[ "npm:1.1.0" , {
"packageLocation" : "./.yarn/cache/@protobufjs-eventemitter-npm-1.1.0-029cc7d431-0369163a3d.zip/node_modules/@protobufjs/eventemitter/" ,
"packageDependencies" : [
[ "@protobufjs/eventemitter" , "npm:1.1.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@protobufjs/fetch" , [
[ "npm:1.1.0" , {
"packageLocation" : "./.yarn/cache/@protobufjs-fetch-npm-1.1.0-ca857b7df4-3fce7e09eb.zip/node_modules/@protobufjs/fetch/" ,
"packageDependencies" : [
[ "@protobufjs/fetch" , "npm:1.1.0" ] ,
[ "@protobufjs/aspromise" , "npm:1.1.2" ] ,
[ "@protobufjs/inquire" , "npm:1.1.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@protobufjs/float" , [
[ "npm:1.0.2" , {
"packageLocation" : "./.yarn/cache/@protobufjs-float-npm-1.0.2-5678f64d08-5781e12412.zip/node_modules/@protobufjs/float/" ,
"packageDependencies" : [
[ "@protobufjs/float" , "npm:1.0.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@protobufjs/inquire" , [
[ "npm:1.1.0" , {
"packageLocation" : "./.yarn/cache/@protobufjs-inquire-npm-1.1.0-3c7759e9ce-ca06f02eaf.zip/node_modules/@protobufjs/inquire/" ,
"packageDependencies" : [
[ "@protobufjs/inquire" , "npm:1.1.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@protobufjs/path" , [
[ "npm:1.1.2" , {
"packageLocation" : "./.yarn/cache/@protobufjs-path-npm-1.1.2-641d08de76-856eeb532b.zip/node_modules/@protobufjs/path/" ,
"packageDependencies" : [
[ "@protobufjs/path" , "npm:1.1.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@protobufjs/pool" , [
[ "npm:1.1.0" , {
"packageLocation" : "./.yarn/cache/@protobufjs-pool-npm-1.1.0-47a76f96a1-d6a34fbbd2.zip/node_modules/@protobufjs/pool/" ,
"packageDependencies" : [
[ "@protobufjs/pool" , "npm:1.1.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@protobufjs/utf8" , [
[ "npm:1.1.0" , {
"packageLocation" : "./.yarn/cache/@protobufjs-utf8-npm-1.1.0-02c590807c-f9bf3163d1.zip/node_modules/@protobufjs/utf8/" ,
"packageDependencies" : [
[ "@protobufjs/utf8" , "npm:1.1.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@types/long" , [
[ "npm:4.0.1" , {
"packageLocation" : "./.yarn/cache/@types-long-npm-4.0.1-022c8b6e77-ff9653c33f.zip/node_modules/@types/long/" ,
"packageDependencies" : [
[ "@types/long" , "npm:4.0.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@types/node" , [
[ "npm:16.9.1" , {
"packageLocation" : "./.yarn/cache/@types-node-npm-16.9.1-bde6d3b0c9-41afcf183a.zip/node_modules/@types/node/" ,
"packageDependencies" : [
[ "@types/node" , "npm:16.9.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@types/object-hash" , [
[ "npm:1.3.4" , {
"packageLocation" : "./.yarn/cache/@types-object-hash-npm-1.3.4-3b3e2e44e8-fe4aa04142.zip/node_modules/@types/object-hash/" ,
"packageDependencies" : [
[ "@types/object-hash" , "npm:1.3.4" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "@types/prettier" , [
[ "npm:1.19.1" , {
"packageLocation" : "./.yarn/cache/@types-prettier-npm-1.19.1-396f22bcd2-d34229c37d.zip/node_modules/@types/prettier/" ,
"packageDependencies" : [
[ "@types/prettier" , "npm:1.19.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
2021-09-13 15:54:05 +10:00
[ "ansi-styles" , [
[ "npm:3.2.1" , {
"packageLocation" : "./.yarn/cache/ansi-styles-npm-3.2.1-8cb8107983-d85ade01c1.zip/node_modules/ansi-styles/" ,
"packageDependencies" : [
[ "ansi-styles" , "npm:3.2.1" ] ,
[ "color-convert" , "npm:1.9.3" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "balanced-match" , [
[ "npm:1.0.2" , {
"packageLocation" : "./.yarn/cache/balanced-match-npm-1.0.2-a53c126459-9706c088a2.zip/node_modules/balanced-match/" ,
"packageDependencies" : [
[ "balanced-match" , "npm:1.0.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "brace-expansion" , [
[ "npm:1.1.11" , {
"packageLocation" : "./.yarn/cache/brace-expansion-npm-1.1.11-fb95eb05ad-faf34a7bb0.zip/node_modules/brace-expansion/" ,
"packageDependencies" : [
[ "brace-expansion" , "npm:1.1.11" ] ,
[ "balanced-match" , "npm:1.0.2" ] ,
[ "concat-map" , "npm:0.0.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "call-bind" , [
[ "npm:1.0.2" , {
"packageLocation" : "./.yarn/cache/call-bind-npm-1.0.2-c957124861-f8e31de9d1.zip/node_modules/call-bind/" ,
"packageDependencies" : [
[ "call-bind" , "npm:1.0.2" ] ,
[ "function-bind" , "npm:1.1.1" ] ,
[ "get-intrinsic" , "npm:1.1.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "chalk" , [
[ "npm:2.4.2" , {
"packageLocation" : "./.yarn/cache/chalk-npm-2.4.2-3ea16dd91e-ec3661d38f.zip/node_modules/chalk/" ,
"packageDependencies" : [
[ "chalk" , "npm:2.4.2" ] ,
[ "ansi-styles" , "npm:3.2.1" ] ,
[ "escape-string-regexp" , "npm:1.0.5" ] ,
[ "supports-color" , "npm:5.5.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "color-convert" , [
[ "npm:1.9.3" , {
"packageLocation" : "./.yarn/cache/color-convert-npm-1.9.3-1fe690075e-fd7a64a17c.zip/node_modules/color-convert/" ,
"packageDependencies" : [
[ "color-convert" , "npm:1.9.3" ] ,
[ "color-name" , "npm:1.1.3" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "color-name" , [
[ "npm:1.1.3" , {
"packageLocation" : "./.yarn/cache/color-name-npm-1.1.3-728b7b5d39-09c5d3e33d.zip/node_modules/color-name/" ,
"packageDependencies" : [
[ "color-name" , "npm:1.1.3" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "concat-map" , [
[ "npm:0.0.1" , {
"packageLocation" : "./.yarn/cache/concat-map-npm-0.0.1-85a921b7ee-902a9f5d89.zip/node_modules/concat-map/" ,
"packageDependencies" : [
[ "concat-map" , "npm:0.0.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "cross-spawn" , [
[ "npm:6.0.5" , {
"packageLocation" : "./.yarn/cache/cross-spawn-npm-6.0.5-2deab6c280-f893bb0d96.zip/node_modules/cross-spawn/" ,
"packageDependencies" : [
[ "cross-spawn" , "npm:6.0.5" ] ,
[ "nice-try" , "npm:1.0.5" ] ,
[ "path-key" , "npm:2.0.1" ] ,
[ "semver" , "npm:5.7.1" ] ,
[ "shebang-command" , "npm:1.2.0" ] ,
[ "which" , "npm:1.3.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "dappconnect-sdks" , [
[ "workspace:." , {
"packageLocation" : "./" ,
"packageDependencies" : [
[ "dappconnect-sdks" , "workspace:." ] ,
[ "npm-run-all" , "npm:4.1.5" ] ,
[ "prettier" , "npm:2.3.2" ]
] ,
"linkType" : "SOFT" ,
} ]
] ] ,
2021-09-14 12:50:33 +10:00
[ "dataloader" , [
[ "npm:1.4.0" , {
"packageLocation" : "./.yarn/cache/dataloader-npm-1.4.0-ba03bd2183-e2c93d43af.zip/node_modules/dataloader/" ,
"packageDependencies" : [
[ "dataloader" , "npm:1.4.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
2021-09-13 15:54:05 +10:00
[ "define-properties" , [
[ "npm:1.1.3" , {
"packageLocation" : "./.yarn/cache/define-properties-npm-1.1.3-0f3115e2b9-da80dba55d.zip/node_modules/define-properties/" ,
"packageDependencies" : [
[ "define-properties" , "npm:1.1.3" ] ,
[ "object-keys" , "npm:1.1.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "error-ex" , [
[ "npm:1.3.2" , {
"packageLocation" : "./.yarn/cache/error-ex-npm-1.3.2-5654f80c0f-c1c2b8b65f.zip/node_modules/error-ex/" ,
"packageDependencies" : [
[ "error-ex" , "npm:1.3.2" ] ,
[ "is-arrayish" , "npm:0.2.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "es-abstract" , [
[ "npm:1.18.5" , {
"packageLocation" : "./.yarn/cache/es-abstract-npm-1.18.5-2fdd13de16-9b64145b07.zip/node_modules/es-abstract/" ,
"packageDependencies" : [
[ "es-abstract" , "npm:1.18.5" ] ,
[ "call-bind" , "npm:1.0.2" ] ,
[ "es-to-primitive" , "npm:1.2.1" ] ,
[ "function-bind" , "npm:1.1.1" ] ,
[ "get-intrinsic" , "npm:1.1.1" ] ,
[ "has" , "npm:1.0.3" ] ,
[ "has-symbols" , "npm:1.0.2" ] ,
[ "internal-slot" , "npm:1.0.3" ] ,
[ "is-callable" , "npm:1.2.4" ] ,
[ "is-negative-zero" , "npm:2.0.1" ] ,
[ "is-regex" , "npm:1.1.4" ] ,
[ "is-string" , "npm:1.0.7" ] ,
[ "object-inspect" , "npm:1.11.0" ] ,
[ "object-keys" , "npm:1.1.1" ] ,
[ "object.assign" , "npm:4.1.2" ] ,
[ "string.prototype.trimend" , "npm:1.0.4" ] ,
[ "string.prototype.trimstart" , "npm:1.0.4" ] ,
[ "unbox-primitive" , "npm:1.0.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "es-to-primitive" , [
[ "npm:1.2.1" , {
"packageLocation" : "./.yarn/cache/es-to-primitive-npm-1.2.1-b7a7eac6c5-4ead6671a2.zip/node_modules/es-to-primitive/" ,
"packageDependencies" : [
[ "es-to-primitive" , "npm:1.2.1" ] ,
[ "is-callable" , "npm:1.2.4" ] ,
[ "is-date-object" , "npm:1.0.5" ] ,
[ "is-symbol" , "npm:1.0.4" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "escape-string-regexp" , [
[ "npm:1.0.5" , {
"packageLocation" : "./.yarn/cache/escape-string-regexp-npm-1.0.5-3284de402f-6092fda75c.zip/node_modules/escape-string-regexp/" ,
"packageDependencies" : [
[ "escape-string-regexp" , "npm:1.0.5" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "function-bind" , [
[ "npm:1.1.1" , {
"packageLocation" : "./.yarn/cache/function-bind-npm-1.1.1-b56b322ae9-b32fbaebb3.zip/node_modules/function-bind/" ,
"packageDependencies" : [
[ "function-bind" , "npm:1.1.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "get-intrinsic" , [
[ "npm:1.1.1" , {
"packageLocation" : "./.yarn/cache/get-intrinsic-npm-1.1.1-7e868745da-a9fe2ca8fa.zip/node_modules/get-intrinsic/" ,
"packageDependencies" : [
[ "get-intrinsic" , "npm:1.1.1" ] ,
[ "function-bind" , "npm:1.1.1" ] ,
[ "has" , "npm:1.0.3" ] ,
[ "has-symbols" , "npm:1.0.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "graceful-fs" , [
[ "npm:4.2.8" , {
"packageLocation" : "./.yarn/cache/graceful-fs-npm-4.2.8-37c16fc3d3-5d224c8969.zip/node_modules/graceful-fs/" ,
"packageDependencies" : [
[ "graceful-fs" , "npm:4.2.8" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "has" , [
[ "npm:1.0.3" , {
"packageLocation" : "./.yarn/cache/has-npm-1.0.3-b7f00631c1-b9ad53d53b.zip/node_modules/has/" ,
"packageDependencies" : [
[ "has" , "npm:1.0.3" ] ,
[ "function-bind" , "npm:1.1.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "has-bigints" , [
[ "npm:1.0.1" , {
"packageLocation" : "./.yarn/cache/has-bigints-npm-1.0.1-1b93717a74-44ab558681.zip/node_modules/has-bigints/" ,
"packageDependencies" : [
[ "has-bigints" , "npm:1.0.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "has-flag" , [
[ "npm:3.0.0" , {
"packageLocation" : "./.yarn/cache/has-flag-npm-3.0.0-16ac11fe05-4a15638b45.zip/node_modules/has-flag/" ,
"packageDependencies" : [
[ "has-flag" , "npm:3.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "has-symbols" , [
[ "npm:1.0.2" , {
"packageLocation" : "./.yarn/cache/has-symbols-npm-1.0.2-50e53af115-2309c42607.zip/node_modules/has-symbols/" ,
"packageDependencies" : [
[ "has-symbols" , "npm:1.0.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "has-tostringtag" , [
[ "npm:1.0.0" , {
"packageLocation" : "./.yarn/cache/has-tostringtag-npm-1.0.0-b1fcf3ab55-cc12eb28cb.zip/node_modules/has-tostringtag/" ,
"packageDependencies" : [
[ "has-tostringtag" , "npm:1.0.0" ] ,
[ "has-symbols" , "npm:1.0.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "hosted-git-info" , [
[ "npm:2.8.9" , {
"packageLocation" : "./.yarn/cache/hosted-git-info-npm-2.8.9-62c44fa93f-c955394bda.zip/node_modules/hosted-git-info/" ,
"packageDependencies" : [
[ "hosted-git-info" , "npm:2.8.9" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "internal-slot" , [
[ "npm:1.0.3" , {
"packageLocation" : "./.yarn/cache/internal-slot-npm-1.0.3-9e05eea002-1944f92e98.zip/node_modules/internal-slot/" ,
"packageDependencies" : [
[ "internal-slot" , "npm:1.0.3" ] ,
[ "get-intrinsic" , "npm:1.1.1" ] ,
[ "has" , "npm:1.0.3" ] ,
[ "side-channel" , "npm:1.0.4" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "is-arrayish" , [
[ "npm:0.2.1" , {
"packageLocation" : "./.yarn/cache/is-arrayish-npm-0.2.1-23927dfb15-eef4417e3c.zip/node_modules/is-arrayish/" ,
"packageDependencies" : [
[ "is-arrayish" , "npm:0.2.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "is-bigint" , [
[ "npm:1.0.4" , {
"packageLocation" : "./.yarn/cache/is-bigint-npm-1.0.4-31c2eecbc9-c56edfe09b.zip/node_modules/is-bigint/" ,
"packageDependencies" : [
[ "is-bigint" , "npm:1.0.4" ] ,
[ "has-bigints" , "npm:1.0.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "is-boolean-object" , [
[ "npm:1.1.2" , {
"packageLocation" : "./.yarn/cache/is-boolean-object-npm-1.1.2-ecbd575e6a-c03b23dbaa.zip/node_modules/is-boolean-object/" ,
"packageDependencies" : [
[ "is-boolean-object" , "npm:1.1.2" ] ,
[ "call-bind" , "npm:1.0.2" ] ,
[ "has-tostringtag" , "npm:1.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "is-callable" , [
[ "npm:1.2.4" , {
"packageLocation" : "./.yarn/cache/is-callable-npm-1.2.4-03fc17459c-1a28d57dc4.zip/node_modules/is-callable/" ,
"packageDependencies" : [
[ "is-callable" , "npm:1.2.4" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "is-core-module" , [
[ "npm:2.6.0" , {
"packageLocation" : "./.yarn/cache/is-core-module-npm-2.6.0-3684fdf55a-183b3b96fe.zip/node_modules/is-core-module/" ,
"packageDependencies" : [
[ "is-core-module" , "npm:2.6.0" ] ,
[ "has" , "npm:1.0.3" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "is-date-object" , [
[ "npm:1.0.5" , {
"packageLocation" : "./.yarn/cache/is-date-object-npm-1.0.5-88f3d08b5e-baa9077cdf.zip/node_modules/is-date-object/" ,
"packageDependencies" : [
[ "is-date-object" , "npm:1.0.5" ] ,
[ "has-tostringtag" , "npm:1.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "is-negative-zero" , [
[ "npm:2.0.1" , {
"packageLocation" : "./.yarn/cache/is-negative-zero-npm-2.0.1-d8f3dbcfe1-a46f2e0cb5.zip/node_modules/is-negative-zero/" ,
"packageDependencies" : [
[ "is-negative-zero" , "npm:2.0.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "is-number-object" , [
[ "npm:1.0.6" , {
"packageLocation" : "./.yarn/cache/is-number-object-npm-1.0.6-88e8d0e936-c697704e8f.zip/node_modules/is-number-object/" ,
"packageDependencies" : [
[ "is-number-object" , "npm:1.0.6" ] ,
[ "has-tostringtag" , "npm:1.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "is-regex" , [
[ "npm:1.1.4" , {
"packageLocation" : "./.yarn/cache/is-regex-npm-1.1.4-cca193ef11-362399b335.zip/node_modules/is-regex/" ,
"packageDependencies" : [
[ "is-regex" , "npm:1.1.4" ] ,
[ "call-bind" , "npm:1.0.2" ] ,
[ "has-tostringtag" , "npm:1.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "is-string" , [
[ "npm:1.0.7" , {
"packageLocation" : "./.yarn/cache/is-string-npm-1.0.7-9f7066daed-323b3d0462.zip/node_modules/is-string/" ,
"packageDependencies" : [
[ "is-string" , "npm:1.0.7" ] ,
[ "has-tostringtag" , "npm:1.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "is-symbol" , [
[ "npm:1.0.4" , {
"packageLocation" : "./.yarn/cache/is-symbol-npm-1.0.4-eb9baac703-92805812ef.zip/node_modules/is-symbol/" ,
"packageDependencies" : [
[ "is-symbol" , "npm:1.0.4" ] ,
[ "has-symbols" , "npm:1.0.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "isexe" , [
[ "npm:2.0.0" , {
"packageLocation" : "./.yarn/cache/isexe-npm-2.0.0-b58870bd2e-26bf6c5480.zip/node_modules/isexe/" ,
"packageDependencies" : [
[ "isexe" , "npm:2.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "json-parse-better-errors" , [
[ "npm:1.0.2" , {
"packageLocation" : "./.yarn/cache/json-parse-better-errors-npm-1.0.2-7f37637d19-ff2b5ba2a7.zip/node_modules/json-parse-better-errors/" ,
"packageDependencies" : [
[ "json-parse-better-errors" , "npm:1.0.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "load-json-file" , [
[ "npm:4.0.0" , {
"packageLocation" : "./.yarn/cache/load-json-file-npm-4.0.0-c9f09d85eb-8f5d6d93ba.zip/node_modules/load-json-file/" ,
"packageDependencies" : [
[ "load-json-file" , "npm:4.0.0" ] ,
[ "graceful-fs" , "npm:4.2.8" ] ,
[ "parse-json" , "npm:4.0.0" ] ,
[ "pify" , "npm:3.0.0" ] ,
[ "strip-bom" , "npm:3.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
2021-09-14 12:50:33 +10:00
[ "lodash" , [
[ "npm:4.17.21" , {
"packageLocation" : "./.yarn/cache/lodash-npm-4.17.21-6382451519-eb835a2e51.zip/node_modules/lodash/" ,
"packageDependencies" : [
[ "lodash" , "npm:4.17.21" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "long" , [
[ "npm:4.0.0" , {
"packageLocation" : "./.yarn/cache/long-npm-4.0.0-ecd96a31ed-16afbe8f74.zip/node_modules/long/" ,
"packageDependencies" : [
[ "long" , "npm:4.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
2021-09-13 15:54:05 +10:00
[ "memorystream" , [
[ "npm:0.3.1" , {
"packageLocation" : "./.yarn/cache/memorystream-npm-0.3.1-ae973f1d16-f18b42440d.zip/node_modules/memorystream/" ,
"packageDependencies" : [
[ "memorystream" , "npm:0.3.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "minimatch" , [
[ "npm:3.0.4" , {
"packageLocation" : "./.yarn/cache/minimatch-npm-3.0.4-6e76f51c23-66ac295f8a.zip/node_modules/minimatch/" ,
"packageDependencies" : [
[ "minimatch" , "npm:3.0.4" ] ,
[ "brace-expansion" , "npm:1.1.11" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "nice-try" , [
[ "npm:1.0.5" , {
"packageLocation" : "./.yarn/cache/nice-try-npm-1.0.5-963856b16f-0b4af3b5bb.zip/node_modules/nice-try/" ,
"packageDependencies" : [
[ "nice-try" , "npm:1.0.5" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "normalize-package-data" , [
[ "npm:2.5.0" , {
"packageLocation" : "./.yarn/cache/normalize-package-data-npm-2.5.0-af0345deed-7999112efc.zip/node_modules/normalize-package-data/" ,
"packageDependencies" : [
[ "normalize-package-data" , "npm:2.5.0" ] ,
[ "hosted-git-info" , "npm:2.8.9" ] ,
[ "resolve" , "patch:resolve@npm%3A1.20.0#~builtin<compat/resolve>::version=1.20.0&hash=00b1ff" ] ,
[ "semver" , "npm:5.7.1" ] ,
[ "validate-npm-package-license" , "npm:3.0.4" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "npm-run-all" , [
[ "npm:4.1.5" , {
"packageLocation" : "./.yarn/cache/npm-run-all-npm-4.1.5-3281f1c563-373b72c6a3.zip/node_modules/npm-run-all/" ,
"packageDependencies" : [
[ "npm-run-all" , "npm:4.1.5" ] ,
[ "ansi-styles" , "npm:3.2.1" ] ,
[ "chalk" , "npm:2.4.2" ] ,
[ "cross-spawn" , "npm:6.0.5" ] ,
[ "memorystream" , "npm:0.3.1" ] ,
[ "minimatch" , "npm:3.0.4" ] ,
[ "pidtree" , "npm:0.3.1" ] ,
[ "read-pkg" , "npm:3.0.0" ] ,
[ "shell-quote" , "npm:1.7.2" ] ,
[ "string.prototype.padend" , "npm:3.1.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
2021-09-14 12:50:33 +10:00
[ "object-hash" , [
[ "npm:1.3.1" , {
"packageLocation" : "./.yarn/cache/object-hash-npm-1.3.1-ea495b8e52-fdcb957a2f.zip/node_modules/object-hash/" ,
"packageDependencies" : [
[ "object-hash" , "npm:1.3.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
2021-09-13 15:54:05 +10:00
[ "object-inspect" , [
[ "npm:1.11.0" , {
"packageLocation" : "./.yarn/cache/object-inspect-npm-1.11.0-c9d4bd1487-8c64f89ce3.zip/node_modules/object-inspect/" ,
"packageDependencies" : [
[ "object-inspect" , "npm:1.11.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "object-keys" , [
[ "npm:1.1.1" , {
"packageLocation" : "./.yarn/cache/object-keys-npm-1.1.1-1bf2f1be93-b363c5e764.zip/node_modules/object-keys/" ,
"packageDependencies" : [
[ "object-keys" , "npm:1.1.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "object.assign" , [
[ "npm:4.1.2" , {
"packageLocation" : "./.yarn/cache/object.assign-npm-4.1.2-d52edada1c-d621d832ed.zip/node_modules/object.assign/" ,
"packageDependencies" : [
[ "object.assign" , "npm:4.1.2" ] ,
[ "call-bind" , "npm:1.0.2" ] ,
[ "define-properties" , "npm:1.1.3" ] ,
[ "has-symbols" , "npm:1.0.2" ] ,
[ "object-keys" , "npm:1.1.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "parse-json" , [
[ "npm:4.0.0" , {
"packageLocation" : "./.yarn/cache/parse-json-npm-4.0.0-a6f7771010-0fe227d410.zip/node_modules/parse-json/" ,
"packageDependencies" : [
[ "parse-json" , "npm:4.0.0" ] ,
[ "error-ex" , "npm:1.3.2" ] ,
[ "json-parse-better-errors" , "npm:1.0.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "path-key" , [
[ "npm:2.0.1" , {
"packageLocation" : "./.yarn/cache/path-key-npm-2.0.1-b1a971833d-f7ab0ad42f.zip/node_modules/path-key/" ,
"packageDependencies" : [
[ "path-key" , "npm:2.0.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "path-parse" , [
[ "npm:1.0.7" , {
"packageLocation" : "./.yarn/cache/path-parse-npm-1.0.7-09564527b7-49abf3d811.zip/node_modules/path-parse/" ,
"packageDependencies" : [
[ "path-parse" , "npm:1.0.7" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "path-type" , [
[ "npm:3.0.0" , {
"packageLocation" : "./.yarn/cache/path-type-npm-3.0.0-252361a0eb-735b35e256.zip/node_modules/path-type/" ,
"packageDependencies" : [
[ "path-type" , "npm:3.0.0" ] ,
[ "pify" , "npm:3.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "pidtree" , [
[ "npm:0.3.1" , {
"packageLocation" : "./.yarn/cache/pidtree-npm-0.3.1-70dda1cc59-eb49025099.zip/node_modules/pidtree/" ,
"packageDependencies" : [
[ "pidtree" , "npm:0.3.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "pify" , [
[ "npm:3.0.0" , {
"packageLocation" : "./.yarn/cache/pify-npm-3.0.0-679ee405c8-6cdcbc3567.zip/node_modules/pify/" ,
"packageDependencies" : [
[ "pify" , "npm:3.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "prettier" , [
[ "npm:2.3.2" , {
"packageLocation" : "./.yarn/cache/prettier-npm-2.3.2-4467ec48dc-17ce5784ac.zip/node_modules/prettier/" ,
"packageDependencies" : [
[ "prettier" , "npm:2.3.2" ]
] ,
"linkType" : "HARD" ,
2021-09-14 12:50:33 +10:00
} ] ,
[ "npm:2.4.0" , {
"packageLocation" : "./.yarn/cache/prettier-npm-2.4.0-9f056d9529-ac1bf07566.zip/node_modules/prettier/" ,
"packageDependencies" : [
[ "prettier" , "npm:2.4.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "protobufjs" , [
[ "npm:6.11.2" , {
"packageLocation" : "./.yarn/unplugged/protobufjs-npm-6.11.2-9b422ce98e/node_modules/protobufjs/" ,
"packageDependencies" : [
[ "protobufjs" , "npm:6.11.2" ] ,
[ "@protobufjs/aspromise" , "npm:1.1.2" ] ,
[ "@protobufjs/base64" , "npm:1.1.2" ] ,
[ "@protobufjs/codegen" , "npm:2.0.4" ] ,
[ "@protobufjs/eventemitter" , "npm:1.1.0" ] ,
[ "@protobufjs/fetch" , "npm:1.1.0" ] ,
[ "@protobufjs/float" , "npm:1.0.2" ] ,
[ "@protobufjs/inquire" , "npm:1.1.0" ] ,
[ "@protobufjs/path" , "npm:1.1.2" ] ,
[ "@protobufjs/pool" , "npm:1.1.0" ] ,
[ "@protobufjs/utf8" , "npm:1.1.0" ] ,
[ "@types/long" , "npm:4.0.1" ] ,
[ "@types/node" , "npm:16.9.1" ] ,
[ "long" , "npm:4.0.0" ]
] ,
"linkType" : "HARD" ,
2021-09-13 15:54:05 +10:00
} ]
] ] ,
[ "read-pkg" , [
[ "npm:3.0.0" , {
"packageLocation" : "./.yarn/cache/read-pkg-npm-3.0.0-41471436cb-398903ebae.zip/node_modules/read-pkg/" ,
"packageDependencies" : [
[ "read-pkg" , "npm:3.0.0" ] ,
[ "load-json-file" , "npm:4.0.0" ] ,
[ "normalize-package-data" , "npm:2.5.0" ] ,
[ "path-type" , "npm:3.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "resolve" , [
[ "patch:resolve@npm%3A1.20.0#~builtin<compat/resolve>::version=1.20.0&hash=00b1ff" , {
"packageLocation" : "./.yarn/cache/resolve-patch-da1bf0dd3c-bed00be983.zip/node_modules/resolve/" ,
"packageDependencies" : [
[ "resolve" , "patch:resolve@npm%3A1.20.0#~builtin<compat/resolve>::version=1.20.0&hash=00b1ff" ] ,
[ "is-core-module" , "npm:2.6.0" ] ,
[ "path-parse" , "npm:1.0.7" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "semver" , [
[ "npm:5.7.1" , {
"packageLocation" : "./.yarn/cache/semver-npm-5.7.1-40bcea106b-57fd0acfd0.zip/node_modules/semver/" ,
"packageDependencies" : [
[ "semver" , "npm:5.7.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "shebang-command" , [
[ "npm:1.2.0" , {
"packageLocation" : "./.yarn/cache/shebang-command-npm-1.2.0-8990ba5d1d-9eed175030.zip/node_modules/shebang-command/" ,
"packageDependencies" : [
[ "shebang-command" , "npm:1.2.0" ] ,
[ "shebang-regex" , "npm:1.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "shebang-regex" , [
[ "npm:1.0.0" , {
"packageLocation" : "./.yarn/cache/shebang-regex-npm-1.0.0-c3612b74e9-404c5a752c.zip/node_modules/shebang-regex/" ,
"packageDependencies" : [
[ "shebang-regex" , "npm:1.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "shell-quote" , [
[ "npm:1.7.2" , {
"packageLocation" : "./.yarn/cache/shell-quote-npm-1.7.2-8e2768dbb0-efad426fb2.zip/node_modules/shell-quote/" ,
"packageDependencies" : [
[ "shell-quote" , "npm:1.7.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "side-channel" , [
[ "npm:1.0.4" , {
"packageLocation" : "./.yarn/cache/side-channel-npm-1.0.4-e1f38b9e06-351e41b947.zip/node_modules/side-channel/" ,
"packageDependencies" : [
[ "side-channel" , "npm:1.0.4" ] ,
[ "call-bind" , "npm:1.0.2" ] ,
[ "get-intrinsic" , "npm:1.1.1" ] ,
[ "object-inspect" , "npm:1.11.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "spdx-correct" , [
[ "npm:3.1.1" , {
"packageLocation" : "./.yarn/cache/spdx-correct-npm-3.1.1-47f574c27a-77ce438344.zip/node_modules/spdx-correct/" ,
"packageDependencies" : [
[ "spdx-correct" , "npm:3.1.1" ] ,
[ "spdx-expression-parse" , "npm:3.0.1" ] ,
[ "spdx-license-ids" , "npm:3.0.10" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "spdx-exceptions" , [
[ "npm:2.3.0" , {
"packageLocation" : "./.yarn/cache/spdx-exceptions-npm-2.3.0-2b68dad75a-cb69a26fa3.zip/node_modules/spdx-exceptions/" ,
"packageDependencies" : [
[ "spdx-exceptions" , "npm:2.3.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "spdx-expression-parse" , [
[ "npm:3.0.1" , {
"packageLocation" : "./.yarn/cache/spdx-expression-parse-npm-3.0.1-b718cbb35a-a1c6e104a2.zip/node_modules/spdx-expression-parse/" ,
"packageDependencies" : [
[ "spdx-expression-parse" , "npm:3.0.1" ] ,
[ "spdx-exceptions" , "npm:2.3.0" ] ,
[ "spdx-license-ids" , "npm:3.0.10" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "spdx-license-ids" , [
[ "npm:3.0.10" , {
"packageLocation" : "./.yarn/cache/spdx-license-ids-npm-3.0.10-ef3d6b5c60-94fde6f558.zip/node_modules/spdx-license-ids/" ,
"packageDependencies" : [
[ "spdx-license-ids" , "npm:3.0.10" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "status-communities" , [
[ "workspace:packages/status-communities" , {
"packageLocation" : "./packages/status-communities/" ,
"packageDependencies" : [
[ "status-communities" , "workspace:packages/status-communities" ] ,
2021-09-14 12:50:33 +10:00
[ "npm-run-all" , "npm:4.1.5" ] ,
[ "protobufjs" , "npm:6.11.2" ] ,
[ "ts-proto" , "npm:1.83.0" ] ,
2021-09-13 15:54:05 +10:00
[ "typescript" , "patch:typescript@npm%3A4.4.3#~builtin<compat/typescript>::version=4.4.3&hash=d8b4e7" ]
] ,
"linkType" : "SOFT" ,
} ]
] ] ,
[ "string.prototype.padend" , [
[ "npm:3.1.2" , {
"packageLocation" : "./.yarn/cache/string.prototype.padend-npm-3.1.2-83f87a893d-be5ff9ac8d.zip/node_modules/string.prototype.padend/" ,
"packageDependencies" : [
[ "string.prototype.padend" , "npm:3.1.2" ] ,
[ "call-bind" , "npm:1.0.2" ] ,
[ "define-properties" , "npm:1.1.3" ] ,
[ "es-abstract" , "npm:1.18.5" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "string.prototype.trimend" , [
[ "npm:1.0.4" , {
"packageLocation" : "./.yarn/cache/string.prototype.trimend-npm-1.0.4-a656b8fe24-17e5aa45c3.zip/node_modules/string.prototype.trimend/" ,
"packageDependencies" : [
[ "string.prototype.trimend" , "npm:1.0.4" ] ,
[ "call-bind" , "npm:1.0.2" ] ,
[ "define-properties" , "npm:1.1.3" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "string.prototype.trimstart" , [
[ "npm:1.0.4" , {
"packageLocation" : "./.yarn/cache/string.prototype.trimstart-npm-1.0.4-b31f5e7c85-3fb06818d3.zip/node_modules/string.prototype.trimstart/" ,
"packageDependencies" : [
[ "string.prototype.trimstart" , "npm:1.0.4" ] ,
[ "call-bind" , "npm:1.0.2" ] ,
[ "define-properties" , "npm:1.1.3" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "strip-bom" , [
[ "npm:3.0.0" , {
"packageLocation" : "./.yarn/cache/strip-bom-npm-3.0.0-71e8f81ff9-8d50ff27b7.zip/node_modules/strip-bom/" ,
"packageDependencies" : [
[ "strip-bom" , "npm:3.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "supports-color" , [
[ "npm:5.5.0" , {
"packageLocation" : "./.yarn/cache/supports-color-npm-5.5.0-183ac537bc-95f6f4ba5a.zip/node_modules/supports-color/" ,
"packageDependencies" : [
[ "supports-color" , "npm:5.5.0" ] ,
[ "has-flag" , "npm:3.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
2021-09-14 12:50:33 +10:00
[ "ts-poet" , [
[ "npm:4.5.0" , {
"packageLocation" : "./.yarn/unplugged/ts-poet-npm-4.5.0-5771bc00e9/node_modules/ts-poet/" ,
"packageDependencies" : [
[ "ts-poet" , "npm:4.5.0" ] ,
[ "@types/prettier" , "npm:1.19.1" ] ,
[ "lodash" , "npm:4.17.21" ] ,
[ "prettier" , "npm:2.4.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "ts-proto" , [
[ "npm:1.83.0" , {
"packageLocation" : "./.yarn/cache/ts-proto-npm-1.83.0-06a74570ad-57797769b3.zip/node_modules/ts-proto/" ,
"packageDependencies" : [
[ "ts-proto" , "npm:1.83.0" ] ,
[ "@types/object-hash" , "npm:1.3.4" ] ,
[ "dataloader" , "npm:1.4.0" ] ,
[ "object-hash" , "npm:1.3.1" ] ,
[ "protobufjs" , "npm:6.11.2" ] ,
[ "ts-poet" , "npm:4.5.0" ] ,
[ "ts-proto-descriptors" , "npm:1.3.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "ts-proto-descriptors" , [
[ "npm:1.3.1" , {
"packageLocation" : "./.yarn/cache/ts-proto-descriptors-npm-1.3.1-9f5c70fd7f-ef8acf9231.zip/node_modules/ts-proto-descriptors/" ,
"packageDependencies" : [
[ "ts-proto-descriptors" , "npm:1.3.1" ] ,
[ "long" , "npm:4.0.0" ] ,
[ "protobufjs" , "npm:6.11.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
2021-09-13 15:54:05 +10:00
[ "typescript" , [
[ "patch:typescript@npm%3A4.4.3#~builtin<compat/typescript>::version=4.4.3&hash=d8b4e7" , {
"packageLocation" : "./.yarn/cache/typescript-patch-2e9f9a47cb-215a59742a.zip/node_modules/typescript/" ,
"packageDependencies" : [
[ "typescript" , "patch:typescript@npm%3A4.4.3#~builtin<compat/typescript>::version=4.4.3&hash=d8b4e7" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "unbox-primitive" , [
[ "npm:1.0.1" , {
"packageLocation" : "./.yarn/cache/unbox-primitive-npm-1.0.1-50b9fde246-89d950e18f.zip/node_modules/unbox-primitive/" ,
"packageDependencies" : [
[ "unbox-primitive" , "npm:1.0.1" ] ,
[ "function-bind" , "npm:1.1.1" ] ,
[ "has-bigints" , "npm:1.0.1" ] ,
[ "has-symbols" , "npm:1.0.2" ] ,
[ "which-boxed-primitive" , "npm:1.0.2" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "validate-npm-package-license" , [
[ "npm:3.0.4" , {
"packageLocation" : "./.yarn/cache/validate-npm-package-license-npm-3.0.4-7af8adc7a8-35703ac889.zip/node_modules/validate-npm-package-license/" ,
"packageDependencies" : [
[ "validate-npm-package-license" , "npm:3.0.4" ] ,
[ "spdx-correct" , "npm:3.1.1" ] ,
[ "spdx-expression-parse" , "npm:3.0.1" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "which" , [
[ "npm:1.3.1" , {
"packageLocation" : "./.yarn/cache/which-npm-1.3.1-f0ebb8bdd8-f2e185c624.zip/node_modules/which/" ,
"packageDependencies" : [
[ "which" , "npm:1.3.1" ] ,
[ "isexe" , "npm:2.0.0" ]
] ,
"linkType" : "HARD" ,
} ]
] ] ,
[ "which-boxed-primitive" , [
[ "npm:1.0.2" , {
"packageLocation" : "./.yarn/cache/which-boxed-primitive-npm-1.0.2-e214f9ae5a-53ce774c73.zip/node_modules/which-boxed-primitive/" ,
"packageDependencies" : [
[ "which-boxed-primitive" , "npm:1.0.2" ] ,
[ "is-bigint" , "npm:1.0.4" ] ,
[ "is-boolean-object" , "npm:1.1.2" ] ,
[ "is-number-object" , "npm:1.0.6" ] ,
[ "is-string" , "npm:1.0.7" ] ,
[ "is-symbol" , "npm:1.0.4" ]
] ,
"linkType" : "HARD" ,
} ]
] ]
]
} , { basePath : basePath || _ _dirname } ) ;
}
( function webpackUniversalModuleDefinition ( root , factory ) {
if ( typeof exports === 'object' && typeof module === 'object' )
module . exports = factory ( ) ;
else if ( typeof define === 'function' && define . amd )
define ( [ ] , factory ) ;
else if ( typeof exports === 'object' )
exports [ "pnpHook" ] = factory ( ) ;
else
root [ "pnpHook" ] = factory ( ) ;
} ) ( global , function ( ) {
return /******/ ( ( ) => { // webpackBootstrap
/******/ var _ _webpack _modules _ _ = ( {
/***/ 368 :
/***/ ( ( module , _ _unused _webpack _exports , _ _webpack _require _ _ ) => {
var frozenFs = Object . assign ( { } , _ _webpack _require _ _ ( 747 ) ) ;
var Module = typeof Module !== "undefined" ? Module : { } ;
var moduleOverrides = { } ;
var key ;
for ( key in Module ) {
if ( Module . hasOwnProperty ( key ) ) {
moduleOverrides [ key ] = Module [ key ] ;
}
}
var arguments _ = [ ] ;
var thisProgram = "./this.program" ;
var quit _ = function ( status , toThrow ) {
throw toThrow ;
} ;
var ENVIRONMENT _IS _WORKER = false ;
var ENVIRONMENT _IS _NODE = true ;
var scriptDirectory = "" ;
function locateFile ( path ) {
if ( Module [ "locateFile" ] ) {
return Module [ "locateFile" ] ( path , scriptDirectory ) ;
}
return scriptDirectory + path ;
}
var read _ , readBinary ;
var nodeFS ;
var nodePath ;
if ( ENVIRONMENT _IS _NODE ) {
if ( ENVIRONMENT _IS _WORKER ) {
scriptDirectory = _ _webpack _require _ _ ( 622 ) . dirname ( scriptDirectory ) + "/" ;
} else {
scriptDirectory = _ _dirname + "/" ;
}
read _ = function shell _read ( filename , binary ) {
var ret = tryParseAsDataURI ( filename ) ;
if ( ret ) {
return binary ? ret : ret . toString ( ) ;
}
if ( ! nodeFS ) nodeFS = frozenFs ;
if ( ! nodePath ) nodePath = _ _webpack _require _ _ ( 622 ) ;
filename = nodePath [ "normalize" ] ( filename ) ;
return nodeFS [ "readFileSync" ] ( filename , binary ? null : "utf8" ) ;
} ;
readBinary = function readBinary ( filename ) {
var ret = read _ ( filename , true ) ;
if ( ! ret . buffer ) {
ret = new Uint8Array ( ret ) ;
}
assert ( ret . buffer ) ;
return ret ;
} ;
if ( process [ "argv" ] . length > 1 ) {
thisProgram = process [ "argv" ] [ 1 ] . replace ( /\\/g , "/" ) ;
}
arguments _ = process [ "argv" ] . slice ( 2 ) ;
if ( true ) {
module [ "exports" ] = Module ;
}
quit _ = function ( status ) {
process [ "exit" ] ( status ) ;
} ;
Module [ "inspect" ] = function ( ) {
return "[Emscripten Module object]" ;
} ;
} else {
}
var out = Module [ "print" ] || console . log . bind ( console ) ;
var err = Module [ "printErr" ] || console . warn . bind ( console ) ;
for ( key in moduleOverrides ) {
if ( moduleOverrides . hasOwnProperty ( key ) ) {
Module [ key ] = moduleOverrides [ key ] ;
}
}
moduleOverrides = null ;
if ( Module [ "arguments" ] ) arguments _ = Module [ "arguments" ] ;
if ( Module [ "thisProgram" ] ) thisProgram = Module [ "thisProgram" ] ;
if ( Module [ "quit" ] ) quit _ = Module [ "quit" ] ;
var STACK _ALIGN = 16 ;
function alignMemory ( size , factor ) {
if ( ! factor ) factor = STACK _ALIGN ;
return Math . ceil ( size / factor ) * factor ;
}
var tempRet0 = 0 ;
var setTempRet0 = function ( value ) {
tempRet0 = value ;
} ;
var wasmBinary ;
if ( Module [ "wasmBinary" ] ) wasmBinary = Module [ "wasmBinary" ] ;
var noExitRuntime = Module [ "noExitRuntime" ] || true ;
if ( typeof WebAssembly !== "object" ) {
abort ( "no native wasm support detected" ) ;
}
function getValue ( ptr , type , noSafe ) {
type = type || "i8" ;
if ( type . charAt ( type . length - 1 ) === "*" ) type = "i32" ;
switch ( type ) {
case "i1" :
return HEAP8 [ ptr >> 0 ] ;
case "i8" :
return HEAP8 [ ptr >> 0 ] ;
case "i16" :
return HEAP16 [ ptr >> 1 ] ;
case "i32" :
return HEAP32 [ ptr >> 2 ] ;
case "i64" :
return HEAP32 [ ptr >> 2 ] ;
case "float" :
return HEAPF32 [ ptr >> 2 ] ;
case "double" :
return HEAPF64 [ ptr >> 3 ] ;
default :
abort ( "invalid type for getValue: " + type ) ;
}
return null ;
}
var wasmMemory ;
var ABORT = false ;
var EXITSTATUS ;
function assert ( condition , text ) {
if ( ! condition ) {
abort ( "Assertion failed: " + text ) ;
}
}
function getCFunc ( ident ) {
var func = Module [ "_" + ident ] ;
assert (
func ,
"Cannot call unknown function " + ident + ", make sure it is exported"
) ;
return func ;
}
function ccall ( ident , returnType , argTypes , args , opts ) {
var toC = {
string : function ( str ) {
var ret = 0 ;
if ( str !== null && str !== undefined && str !== 0 ) {
var len = ( str . length << 2 ) + 1 ;
ret = stackAlloc ( len ) ;
stringToUTF8 ( str , ret , len ) ;
}
return ret ;
} ,
array : function ( arr ) {
var ret = stackAlloc ( arr . length ) ;
writeArrayToMemory ( arr , ret ) ;
return ret ;
}
} ;
function convertReturnValue ( ret ) {
if ( returnType === "string" ) return UTF8ToString ( ret ) ;
if ( returnType === "boolean" ) return Boolean ( ret ) ;
return ret ;
}
var func = getCFunc ( ident ) ;
var cArgs = [ ] ;
var stack = 0 ;
if ( args ) {
for ( var i = 0 ; i < args . length ; i ++ ) {
var converter = toC [ argTypes [ i ] ] ;
if ( converter ) {
if ( stack === 0 ) stack = stackSave ( ) ;
cArgs [ i ] = converter ( args [ i ] ) ;
} else {
cArgs [ i ] = args [ i ] ;
}
}
}
var ret = func . apply ( null , cArgs ) ;
ret = convertReturnValue ( ret ) ;
if ( stack !== 0 ) stackRestore ( stack ) ;
return ret ;
}
function cwrap ( ident , returnType , argTypes , opts ) {
argTypes = argTypes || [ ] ;
var numericArgs = argTypes . every ( function ( type ) {
return type === "number" ;
} ) ;
var numericRet = returnType !== "string" ;
if ( numericRet && numericArgs && ! opts ) {
return getCFunc ( ident ) ;
}
return function ( ) {
return ccall ( ident , returnType , argTypes , arguments , opts ) ;
} ;
}
var UTF8Decoder =
typeof TextDecoder !== "undefined" ? new TextDecoder ( "utf8" ) : undefined ;
function UTF8ArrayToString ( heap , idx , maxBytesToRead ) {
var endIdx = idx + maxBytesToRead ;
var endPtr = idx ;
while ( heap [ endPtr ] && ! ( endPtr >= endIdx ) ) ++ endPtr ;
if ( endPtr - idx > 16 && heap . subarray && UTF8Decoder ) {
return UTF8Decoder . decode ( heap . subarray ( idx , endPtr ) ) ;
} else {
var str = "" ;
while ( idx < endPtr ) {
var u0 = heap [ idx ++ ] ;
if ( ! ( u0 & 128 ) ) {
str += String . fromCharCode ( u0 ) ;
continue ;
}
var u1 = heap [ idx ++ ] & 63 ;
if ( ( u0 & 224 ) == 192 ) {
str += String . fromCharCode ( ( ( u0 & 31 ) << 6 ) | u1 ) ;
continue ;
}
var u2 = heap [ idx ++ ] & 63 ;
if ( ( u0 & 240 ) == 224 ) {
u0 = ( ( u0 & 15 ) << 12 ) | ( u1 << 6 ) | u2 ;
} else {
u0 = ( ( u0 & 7 ) << 18 ) | ( u1 << 12 ) | ( u2 << 6 ) | ( heap [ idx ++ ] & 63 ) ;
}
if ( u0 < 65536 ) {
str += String . fromCharCode ( u0 ) ;
} else {
var ch = u0 - 65536 ;
str += String . fromCharCode ( 55296 | ( ch >> 10 ) , 56320 | ( ch & 1023 ) ) ;
}
}
}
return str ;
}
function UTF8ToString ( ptr , maxBytesToRead ) {
return ptr ? UTF8ArrayToString ( HEAPU8 , ptr , maxBytesToRead ) : "" ;
}
function stringToUTF8Array ( str , heap , outIdx , maxBytesToWrite ) {
if ( ! ( maxBytesToWrite > 0 ) ) return 0 ;
var startIdx = outIdx ;
var endIdx = outIdx + maxBytesToWrite - 1 ;
for ( var i = 0 ; i < str . length ; ++ i ) {
var u = str . charCodeAt ( i ) ;
if ( u >= 55296 && u <= 57343 ) {
var u1 = str . charCodeAt ( ++ i ) ;
u = ( 65536 + ( ( u & 1023 ) << 10 ) ) | ( u1 & 1023 ) ;
}
if ( u <= 127 ) {
if ( outIdx >= endIdx ) break ;
heap [ outIdx ++ ] = u ;
} else if ( u <= 2047 ) {
if ( outIdx + 1 >= endIdx ) break ;
heap [ outIdx ++ ] = 192 | ( u >> 6 ) ;
heap [ outIdx ++ ] = 128 | ( u & 63 ) ;
} else if ( u <= 65535 ) {
if ( outIdx + 2 >= endIdx ) break ;
heap [ outIdx ++ ] = 224 | ( u >> 12 ) ;
heap [ outIdx ++ ] = 128 | ( ( u >> 6 ) & 63 ) ;
heap [ outIdx ++ ] = 128 | ( u & 63 ) ;
} else {
if ( outIdx + 3 >= endIdx ) break ;
heap [ outIdx ++ ] = 240 | ( u >> 18 ) ;
heap [ outIdx ++ ] = 128 | ( ( u >> 12 ) & 63 ) ;
heap [ outIdx ++ ] = 128 | ( ( u >> 6 ) & 63 ) ;
heap [ outIdx ++ ] = 128 | ( u & 63 ) ;
}
}
heap [ outIdx ] = 0 ;
return outIdx - startIdx ;
}
function stringToUTF8 ( str , outPtr , maxBytesToWrite ) {
return stringToUTF8Array ( str , HEAPU8 , outPtr , maxBytesToWrite ) ;
}
function lengthBytesUTF8 ( str ) {
var len = 0 ;
for ( var i = 0 ; i < str . length ; ++ i ) {
var u = str . charCodeAt ( i ) ;
if ( u >= 55296 && u <= 57343 )
u = ( 65536 + ( ( u & 1023 ) << 10 ) ) | ( str . charCodeAt ( ++ i ) & 1023 ) ;
if ( u <= 127 ) ++ len ;
else if ( u <= 2047 ) len += 2 ;
else if ( u <= 65535 ) len += 3 ;
else len += 4 ;
}
return len ;
}
function allocateUTF8 ( str ) {
var size = lengthBytesUTF8 ( str ) + 1 ;
var ret = _malloc ( size ) ;
if ( ret ) stringToUTF8Array ( str , HEAP8 , ret , size ) ;
return ret ;
}
function writeArrayToMemory ( array , buffer ) {
HEAP8 . set ( array , buffer ) ;
}
function alignUp ( x , multiple ) {
if ( x % multiple > 0 ) {
x += multiple - ( x % multiple ) ;
}
return x ;
}
var buffer , HEAP8 , HEAPU8 , HEAP16 , HEAPU16 , HEAP32 , HEAPU32 , HEAPF32 , HEAPF64 ;
function updateGlobalBufferAndViews ( buf ) {
buffer = buf ;
Module [ "HEAP8" ] = HEAP8 = new Int8Array ( buf ) ;
Module [ "HEAP16" ] = HEAP16 = new Int16Array ( buf ) ;
Module [ "HEAP32" ] = HEAP32 = new Int32Array ( buf ) ;
Module [ "HEAPU8" ] = HEAPU8 = new Uint8Array ( buf ) ;
Module [ "HEAPU16" ] = HEAPU16 = new Uint16Array ( buf ) ;
Module [ "HEAPU32" ] = HEAPU32 = new Uint32Array ( buf ) ;
Module [ "HEAPF32" ] = HEAPF32 = new Float32Array ( buf ) ;
Module [ "HEAPF64" ] = HEAPF64 = new Float64Array ( buf ) ;
}
var INITIAL _MEMORY = Module [ "INITIAL_MEMORY" ] || 16777216 ;
var wasmTable ;
var _ _ATPRERUN _ _ = [ ] ;
var _ _ATINIT _ _ = [ ] ;
var _ _ATPOSTRUN _ _ = [ ] ;
var runtimeInitialized = false ;
function preRun ( ) {
if ( Module [ "preRun" ] ) {
if ( typeof Module [ "preRun" ] == "function" )
Module [ "preRun" ] = [ Module [ "preRun" ] ] ;
while ( Module [ "preRun" ] . length ) {
addOnPreRun ( Module [ "preRun" ] . shift ( ) ) ;
}
}
callRuntimeCallbacks ( _ _ATPRERUN _ _ ) ;
}
function initRuntime ( ) {
runtimeInitialized = true ;
if ( ! Module [ "noFSInit" ] && ! FS . init . initialized ) FS . init ( ) ;
TTY . init ( ) ;
callRuntimeCallbacks ( _ _ATINIT _ _ ) ;
}
function postRun ( ) {
if ( Module [ "postRun" ] ) {
if ( typeof Module [ "postRun" ] == "function" )
Module [ "postRun" ] = [ Module [ "postRun" ] ] ;
while ( Module [ "postRun" ] . length ) {
addOnPostRun ( Module [ "postRun" ] . shift ( ) ) ;
}
}
callRuntimeCallbacks ( _ _ATPOSTRUN _ _ ) ;
}
function addOnPreRun ( cb ) {
_ _ATPRERUN _ _ . unshift ( cb ) ;
}
function addOnInit ( cb ) {
_ _ATINIT _ _ . unshift ( cb ) ;
}
function addOnPostRun ( cb ) {
_ _ATPOSTRUN _ _ . unshift ( cb ) ;
}
var runDependencies = 0 ;
var runDependencyWatcher = null ;
var dependenciesFulfilled = null ;
function getUniqueRunDependency ( id ) {
return id ;
}
function addRunDependency ( id ) {
runDependencies ++ ;
if ( Module [ "monitorRunDependencies" ] ) {
Module [ "monitorRunDependencies" ] ( runDependencies ) ;
}
}
function removeRunDependency ( id ) {
runDependencies -- ;
if ( Module [ "monitorRunDependencies" ] ) {
Module [ "monitorRunDependencies" ] ( runDependencies ) ;
}
if ( runDependencies == 0 ) {
if ( runDependencyWatcher !== null ) {
clearInterval ( runDependencyWatcher ) ;
runDependencyWatcher = null ;
}
if ( dependenciesFulfilled ) {
var callback = dependenciesFulfilled ;
dependenciesFulfilled = null ;
callback ( ) ;
}
}
}
Module [ "preloadedImages" ] = { } ;
Module [ "preloadedAudios" ] = { } ;
function abort ( what ) {
if ( Module [ "onAbort" ] ) {
Module [ "onAbort" ] ( what ) ;
}
what += "" ;
err ( what ) ;
ABORT = true ;
EXITSTATUS = 1 ;
what = "abort(" + what + "). Build with -s ASSERTIONS=1 for more info." ;
var e = new WebAssembly . RuntimeError ( what ) ;
throw e ;
}
var dataURIPrefix = "data:application/octet-stream;base64," ;
function isDataURI ( filename ) {
return filename . startsWith ( dataURIPrefix ) ;
}
var wasmBinaryFile =
" data : application / octet - stream ; base64 , AGFzbQEAAAABlAInYAF / AX9gA39 / fwF / YAF / AGACf38Bf2ACf38AYAV / f39 / fwF / YAR / f39 / AX9gA39 / fwBgBH9 + f38Bf2AAAX9gBX9 / f35 / AX5gA39 + fwF / YAF / AX5gAn9 + AX9gBH9 / fn8BfmADf35 / AX5gA39 / fgF / YAR / f35 / AX9gBn9 / f39 / fwF / YAR / f39 / AGADf39 + AX5gAn5 / AX9gA398fwBgBH9 / f38BfmADf39 / AX5gBn98f39 / fwF / YAV / f35 / fwF / YAV / fn9 / fwF / YAV / f39 / fwBgAn9 + AGACf38BfmACf3wAYAh / fn5 / f39 + fwF / YAV / f39 + fwBgAABgBX5 + f35 / AX5gAnx / AXxgAn9 + AX5gBX9 / f39 / AX4CeRQBYQFhAAIBYQFiAAABYQFjAAMBYQFkAAYBYQFlAAEBYQFmAAABYQFnAAYBYQFoAAABYQFpAAMBYQFqAAMBYQFrAAMBYQFsAAMBYQFtAAABYQFuAAUBYQFvAAEBYQFwAAMBYQFxAAEBYQFyAAABYQFzAAEBYQF0AAADggKAAgcCAgQAAQECAgANBAQOBwICAhwLEw0AAA0dFAwMAAcCDBAeAgMCAwIAAgEABwgUBBUIBgADAAwABAgIAgEGBgABAB8XAQEDAhMCAwUFEQICIA8GAgMYAQgCAQAABwUBGAAaAxIBAAcEAyERCCIHAQsVAQMABQMDAwAFBAACIwYAAQEAGw0bFw0BBAALCwMDDAwAAwAHJAMBBAgaAQECBQMBAwMABwcHAgICAiURCwgICwEmCQkAAAAKAAIABQAGBgUFBQEDBgYGBRISBgQBAQEAAAIJBgABAA4AAQEPCQABBBkJCQkAAAADCgoBAQIQAAAAAgEDAwkEAQoABQ4AAAkEBQFwAR8fBQcBAYACgIACBgkBfwFB0KDBAgsHvgI8AXUCAAF2AIABAXcAkwIBeADxAQF5AM8BAXoAzQEBQQDLAQFCAMoBAUMAyQEBRADIAQFFAMcBAUYAkgIBRwCRAgFIAI4CAUkA6QEBSgDiAQFLAOEBAUwAPQFNAOABAU4A + gEBTwD5AQFQAPIBAVEA + wEBUgDfAQFTAN4BAVQA3QEBVQDcAQFWAOMBAVcA2wEBWADaAQFZANkBAVoA2AEBXwDXAQEkAOoBAmFhAJwBAmJhANYBAmNhANUBAmRhANQBAmVhADECZmEA6wECZ2EAGwJoYQDOAQJpYQBJAmphANMBAmthANIBAmxhAGgCbWEA0QECbmEA6AECb2EA0AECcGEA5AECcWEAigICcmEA + AECc2EA9wECdGEA9gECdWEA5wECdmEA5gECd2EA5QECeGEAGAJ5YQAVAnphAQAJQQEAQQELHswBkAKNAo8CjAKLArYBiQKIAocChgKFAoQCgwKCAoECgAL / Af4B / QH8AVr1AfQB8wHwAe8B7gHtAewBCq2RCYACQAEBfyMAQRBrIgMgADYCDCADIAE2AgggAyACNgIEIAMoAgwEQCADKAIMIAMoAgg2AgAgAygCDCADKAIENgIECwvMDAEHfwJAIABFDQAgAEEIayIDIABBBGsoAgAiAUF4cSIAaiEFAkAgAUEBcQ0AIAFBA3FFDQEgAyADKAIAIgFrIgNByJsBKAIASQ0BIAAgAWohACADQcybASgCAEcEQCABQf8BTQRAIAMoAggiAiABQQN2IgRBA3RB4JsBakYaIAIgAygCDCIBRgRAQbibAUG4mwEoAgBBfiAEd3E2AgAMAwsgAiABNgIMIAEgAjYCCAwCCyADKAIYIQYCQCADIAMoAgwiAUcEQCADKAIIIgIgATYCDCABIAI2AggMAQsCQCADQRRqIgIoAgAiBA0AIANBEGoiAigCACIEDQBBACEBDAELA0AgAiEHIAQiAUEUaiICKAIAIgQNACABQRBqIQIgASgCECIEDQALIAdBADYCAAsgBkUNAQJAIAMgAygCHCICQQJ0QeidAWoiBCgCAEYEQCAEIAE2AgAgAQ0BQbybAUG8mwEoAgBBfiACd3E2AgAMAwsgBkEQQRQgBigCECADRhtqIAE2AgAgAUUNAgsgASAGNgIYIAMoAhAiAgRAIAEgAjYCECACIAE2AhgLIAMoAhQiAkUNASABIAI2AhQgAiABNgIYDAELIAUoAgQiAUEDcUEDRw0AQcCbASAANgIAIAUgAUF + cTYCBCADIABBAXI2AgQgACADaiAANgIADwsgAyAFTw0AIAUoAgQiAUEBcUUNAAJAIAFBAnFFBEAgBUHQmwEoAgBGBEBB0JsBIAM2AgBBxJsBQcSbASgCACAAaiIANgIAIAMgAEEBcjYCBCADQcybASgCAEcNA0HAmwFBADYCAEHMmwFBADYCAA8LIAVBzJsBKAIARgRAQcybASADNgIAQcCbAUHAmwEoAgAgAGoiADYCACADIABBAXI2AgQgACADaiAANgIADwsgAUF4cSAAaiEAAkAgAUH / AU0EQCAFKAIIIgIgAUEDdiIEQQN0QeCbAWpGGiACIAUoAgwiAUYEQEG4mwFBuJsBKAIAQX4gBHdxNgIADAILIAIgATYCDCABIAI2AggMAQsgBSgCGCEGAkAgBSAFKAIMIgFHBEAgBSgCCCICQcibASgCAEkaIAIgATYCDCABIAI2AggMAQsCQCAFQRRqIgIoAgAiBA0AIAVBEGoiAigCACIEDQBBACEBDAELA0AgAiEHIAQiAUEUaiICKAIAIgQNACABQRBqIQIgASgCECIEDQALIAdBADYCAAsgBkUNAAJAIAUgBSgCHCICQQJ0QeidAWoiBCgCAEYEQCAEIAE2AgAgAQ0BQbybAUG8mwEoAgBBfiACd3E2AgAMAgsgBkEQQRQgBigCECAFRhtqIAE2AgAgAUUNAQsgASAGNgIYIAUoAhAiAgRAIAEgAjYCECACIAE2AhgLIAUoAhQiAkUNACABIAI2AhQgAiABNgIYCyADIABBAXI2AgQgACADaiAANgIAIANBzJsBKAIARw0BQcCbASAANgIADwsgBSABQX5xNgIEIAMgAEEBcjYCBCAAIANqIAA2AgALIABB / wFNBEAgAEEDdiIBQQN0QeCbAWohAAJ / QbibASgCACICQQEgAXQiAXFFBEBBuJsBIAEgAnI2AgAgAAwBCyAAKAIICyECIAAgAzYCCCACIAM2AgwgAyAANgIMIAMgAjYCCA8LQR8hAiADQgA3AhAgAEH ///8HTQRAIABBCHYiASABQYD+P2pBEHZBCHEiAXQiAiACQYDgH2pBEHZBBHEiAnQiBCAEQYCAD2pBEHZBAnEiBHRBD3YgASACciAEcmsiAUEBdCAAIAFBFWp2QQFxckEcaiECCyADIAI2AhwgAkECdEHonQFqIQECQAJAAkBBvJsBKAIAIgRBASACdCIHcUUEQEG8mwEgBCAHcjYCACABIAM2AgAgAyABNgIYDAELIABBAEEZIAJBAXZrIAJBH0YbdCECIAEoAgAhAQNAIAEiBCgCBEF4cSAARg0CIAJBHXYhASACQQF0IQIgBCABQQRxaiIHQRBqKAIAIgENAAsgByADNgIQIAMgBDYCGAsgAyADNgIMIAMgAzYCCAwBCyAEKAIIIgAgAzYCDCAEIAM2AgggA0EANgIYIAMgBDYCDCADIAA2AggLQdibAUHYmwEoAgBBAWsiAEF/IAAbNgIACwtCAQF/IwBBEGsiASQAIAEgADYCDCABKAIMBEAgASgCDC0AAUEBcQRAIAEoAgwoAgQQFQsgASgCDBAVCyABQRBqJAALQwEBfyMAQRBrIgIkACACIAA2AgwgAiABNgIIIAIoAgwCfyMAQRBrIgAgAigCCDYCDCAAKAIMQQxqCxBDIAJBEGokAAuiLgEMfyMAQRBrIgwkAAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAAQfQBTQRAQbibASgCACIFQRAgAEELakF4cSAAQQtJGyIIQQN2IgJ2IgFBA3EEQCABQX9zQQFxIAJqIgNBA3QiAUHomwFqKAIAIgRBCGohAAJAIAQoAggiAiABQeCbAWo
if ( ! isDataURI ( wasmBinaryFile ) ) {
wasmBinaryFile = locateFile ( wasmBinaryFile ) ;
}
function getBinary ( file ) {
try {
if ( file == wasmBinaryFile && wasmBinary ) {
return new Uint8Array ( wasmBinary ) ;
}
var binary = tryParseAsDataURI ( file ) ;
if ( binary ) {
return binary ;
}
if ( readBinary ) {
return readBinary ( file ) ;
} else {
throw "sync fetching of the wasm failed: you can preload it to Module['wasmBinary'] manually, or emcc.py will do that for you when generating HTML (but not JS)" ;
}
} catch ( err ) {
abort ( err ) ;
}
}
function instantiateSync ( file , info ) {
var instance ;
var module ;
var binary ;
try {
binary = getBinary ( file ) ;
module = new WebAssembly . Module ( binary ) ;
instance = new WebAssembly . Instance ( module , info ) ;
} catch ( e ) {
var str = e . toString ( ) ;
err ( "failed to compile wasm module: " + str ) ;
if ( str . includes ( "imported Memory" ) || str . includes ( "memory import" ) ) {
err (
"Memory size incompatibility issues may be due to changing INITIAL_MEMORY at runtime to something too large. Use ALLOW_MEMORY_GROWTH to allow any size memory (and also make sure not to set INITIAL_MEMORY at runtime to something smaller than it was at compile time)."
) ;
}
throw e ;
}
return [ instance , module ] ;
}
function createWasm ( ) {
var info = { a : asmLibraryArg } ;
function receiveInstance ( instance , module ) {
var exports = instance . exports ;
Module [ "asm" ] = exports ;
wasmMemory = Module [ "asm" ] [ "u" ] ;
updateGlobalBufferAndViews ( wasmMemory . buffer ) ;
wasmTable = Module [ "asm" ] [ "za" ] ;
addOnInit ( Module [ "asm" ] [ "v" ] ) ;
removeRunDependency ( "wasm-instantiate" ) ;
}
addRunDependency ( "wasm-instantiate" ) ;
if ( Module [ "instantiateWasm" ] ) {
try {
var exports = Module [ "instantiateWasm" ] ( info , receiveInstance ) ;
return exports ;
} catch ( e ) {
err ( "Module.instantiateWasm callback failed with error: " + e ) ;
return false ;
}
}
var result = instantiateSync ( wasmBinaryFile , info ) ;
receiveInstance ( result [ 0 ] ) ;
return Module [ "asm" ] ;
}
var tempDouble ;
var tempI64 ;
function callRuntimeCallbacks ( callbacks ) {
while ( callbacks . length > 0 ) {
var callback = callbacks . shift ( ) ;
if ( typeof callback == "function" ) {
callback ( Module ) ;
continue ;
}
var func = callback . func ;
if ( typeof func === "number" ) {
if ( callback . arg === undefined ) {
wasmTable . get ( func ) ( ) ;
} else {
wasmTable . get ( func ) ( callback . arg ) ;
}
} else {
func ( callback . arg === undefined ? null : callback . arg ) ;
}
}
}
function _gmtime _r ( time , tmPtr ) {
var date = new Date ( HEAP32 [ time >> 2 ] * 1e3 ) ;
HEAP32 [ tmPtr >> 2 ] = date . getUTCSeconds ( ) ;
HEAP32 [ ( tmPtr + 4 ) >> 2 ] = date . getUTCMinutes ( ) ;
HEAP32 [ ( tmPtr + 8 ) >> 2 ] = date . getUTCHours ( ) ;
HEAP32 [ ( tmPtr + 12 ) >> 2 ] = date . getUTCDate ( ) ;
HEAP32 [ ( tmPtr + 16 ) >> 2 ] = date . getUTCMonth ( ) ;
HEAP32 [ ( tmPtr + 20 ) >> 2 ] = date . getUTCFullYear ( ) - 1900 ;
HEAP32 [ ( tmPtr + 24 ) >> 2 ] = date . getUTCDay ( ) ;
HEAP32 [ ( tmPtr + 36 ) >> 2 ] = 0 ;
HEAP32 [ ( tmPtr + 32 ) >> 2 ] = 0 ;
var start = Date . UTC ( date . getUTCFullYear ( ) , 0 , 1 , 0 , 0 , 0 , 0 ) ;
var yday = ( ( date . getTime ( ) - start ) / ( 1e3 * 60 * 60 * 24 ) ) | 0 ;
HEAP32 [ ( tmPtr + 28 ) >> 2 ] = yday ;
if ( ! _gmtime _r . GMTString ) _gmtime _r . GMTString = allocateUTF8 ( "GMT" ) ;
HEAP32 [ ( tmPtr + 40 ) >> 2 ] = _gmtime _r . GMTString ;
return tmPtr ;
}
function _ _ _gmtime _r ( a0 , a1 ) {
return _gmtime _r ( a0 , a1 ) ;
}
var PATH = {
splitPath : function ( filename ) {
var splitPathRe = /^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/ ;
return splitPathRe . exec ( filename ) . slice ( 1 ) ;
} ,
normalizeArray : function ( parts , allowAboveRoot ) {
var up = 0 ;
for ( var i = parts . length - 1 ; i >= 0 ; i -- ) {
var last = parts [ i ] ;
if ( last === "." ) {
parts . splice ( i , 1 ) ;
} else if ( last === ".." ) {
parts . splice ( i , 1 ) ;
up ++ ;
} else if ( up ) {
parts . splice ( i , 1 ) ;
up -- ;
}
}
if ( allowAboveRoot ) {
for ( ; up ; up -- ) {
parts . unshift ( ".." ) ;
}
}
return parts ;
} ,
normalize : function ( path ) {
var isAbsolute = path . charAt ( 0 ) === "/" ,
trailingSlash = path . substr ( - 1 ) === "/" ;
path = PATH . normalizeArray (
path . split ( "/" ) . filter ( function ( p ) {
return ! ! p ;
} ) ,
! isAbsolute
) . join ( "/" ) ;
if ( ! path && ! isAbsolute ) {
path = "." ;
}
if ( path && trailingSlash ) {
path += "/" ;
}
return ( isAbsolute ? "/" : "" ) + path ;
} ,
dirname : function ( path ) {
var result = PATH . splitPath ( path ) ,
root = result [ 0 ] ,
dir = result [ 1 ] ;
if ( ! root && ! dir ) {
return "." ;
}
if ( dir ) {
dir = dir . substr ( 0 , dir . length - 1 ) ;
}
return root + dir ;
} ,
basename : function ( path ) {
if ( path === "/" ) return "/" ;
path = PATH . normalize ( path ) ;
path = path . replace ( /\/$/ , "" ) ;
var lastSlash = path . lastIndexOf ( "/" ) ;
if ( lastSlash === - 1 ) return path ;
return path . substr ( lastSlash + 1 ) ;
} ,
extname : function ( path ) {
return PATH . splitPath ( path ) [ 3 ] ;
} ,
join : function ( ) {
var paths = Array . prototype . slice . call ( arguments , 0 ) ;
return PATH . normalize ( paths . join ( "/" ) ) ;
} ,
join2 : function ( l , r ) {
return PATH . normalize ( l + "/" + r ) ;
}
} ;
function getRandomDevice ( ) {
if (
typeof crypto === "object" &&
typeof crypto [ "getRandomValues" ] === "function"
) {
var randomBuffer = new Uint8Array ( 1 ) ;
return function ( ) {
crypto . getRandomValues ( randomBuffer ) ;
return randomBuffer [ 0 ] ;
} ;
} else if ( ENVIRONMENT _IS _NODE ) {
try {
var crypto _module = _ _webpack _require _ _ ( 417 ) ;
return function ( ) {
return crypto _module [ "randomBytes" ] ( 1 ) [ 0 ] ;
} ;
} catch ( e ) { }
}
return function ( ) {
abort ( "randomDevice" ) ;
} ;
}
var PATH _FS = {
resolve : function ( ) {
var resolvedPath = "" ,
resolvedAbsolute = false ;
for ( var i = arguments . length - 1 ; i >= - 1 && ! resolvedAbsolute ; i -- ) {
var path = i >= 0 ? arguments [ i ] : FS . cwd ( ) ;
if ( typeof path !== "string" ) {
throw new TypeError ( "Arguments to path.resolve must be strings" ) ;
} else if ( ! path ) {
return "" ;
}
resolvedPath = path + "/" + resolvedPath ;
resolvedAbsolute = path . charAt ( 0 ) === "/" ;
}
resolvedPath = PATH . normalizeArray (
resolvedPath . split ( "/" ) . filter ( function ( p ) {
return ! ! p ;
} ) ,
! resolvedAbsolute
) . join ( "/" ) ;
return ( resolvedAbsolute ? "/" : "" ) + resolvedPath || "." ;
} ,
relative : function ( from , to ) {
from = PATH _FS . resolve ( from ) . substr ( 1 ) ;
to = PATH _FS . resolve ( to ) . substr ( 1 ) ;
function trim ( arr ) {
var start = 0 ;
for ( ; start < arr . length ; start ++ ) {
if ( arr [ start ] !== "" ) break ;
}
var end = arr . length - 1 ;
for ( ; end >= 0 ; end -- ) {
if ( arr [ end ] !== "" ) break ;
}
if ( start > end ) return [ ] ;
return arr . slice ( start , end - start + 1 ) ;
}
var fromParts = trim ( from . split ( "/" ) ) ;
var toParts = trim ( to . split ( "/" ) ) ;
var length = Math . min ( fromParts . length , toParts . length ) ;
var samePartsLength = length ;
for ( var i = 0 ; i < length ; i ++ ) {
if ( fromParts [ i ] !== toParts [ i ] ) {
samePartsLength = i ;
break ;
}
}
var outputParts = [ ] ;
for ( var i = samePartsLength ; i < fromParts . length ; i ++ ) {
outputParts . push ( ".." ) ;
}
outputParts = outputParts . concat ( toParts . slice ( samePartsLength ) ) ;
return outputParts . join ( "/" ) ;
}
} ;
var TTY = {
ttys : [ ] ,
init : function ( ) { } ,
shutdown : function ( ) { } ,
register : function ( dev , ops ) {
TTY . ttys [ dev ] = { input : [ ] , output : [ ] , ops : ops } ;
FS . registerDevice ( dev , TTY . stream _ops ) ;
} ,
stream _ops : {
open : function ( stream ) {
var tty = TTY . ttys [ stream . node . rdev ] ;
if ( ! tty ) {
throw new FS . ErrnoError ( 43 ) ;
}
stream . tty = tty ;
stream . seekable = false ;
} ,
close : function ( stream ) {
stream . tty . ops . flush ( stream . tty ) ;
} ,
flush : function ( stream ) {
stream . tty . ops . flush ( stream . tty ) ;
} ,
read : function ( stream , buffer , offset , length , pos ) {
if ( ! stream . tty || ! stream . tty . ops . get _char ) {
throw new FS . ErrnoError ( 60 ) ;
}
var bytesRead = 0 ;
for ( var i = 0 ; i < length ; i ++ ) {
var result ;
try {
result = stream . tty . ops . get _char ( stream . tty ) ;
} catch ( e ) {
throw new FS . ErrnoError ( 29 ) ;
}
if ( result === undefined && bytesRead === 0 ) {
throw new FS . ErrnoError ( 6 ) ;
}
if ( result === null || result === undefined ) break ;
bytesRead ++ ;
buffer [ offset + i ] = result ;
}
if ( bytesRead ) {
stream . node . timestamp = Date . now ( ) ;
}
return bytesRead ;
} ,
write : function ( stream , buffer , offset , length , pos ) {
if ( ! stream . tty || ! stream . tty . ops . put _char ) {
throw new FS . ErrnoError ( 60 ) ;
}
try {
for ( var i = 0 ; i < length ; i ++ ) {
stream . tty . ops . put _char ( stream . tty , buffer [ offset + i ] ) ;
}
} catch ( e ) {
throw new FS . ErrnoError ( 29 ) ;
}
if ( length ) {
stream . node . timestamp = Date . now ( ) ;
}
return i ;
}
} ,
default _tty _ops : {
get _char : function ( tty ) {
if ( ! tty . input . length ) {
var result = null ;
if ( ENVIRONMENT _IS _NODE ) {
var BUFSIZE = 256 ;
var buf = Buffer . alloc ? Buffer . alloc ( BUFSIZE ) : new Buffer ( BUFSIZE ) ;
var bytesRead = 0 ;
try {
bytesRead = nodeFS . readSync (
process . stdin . fd ,
buf ,
0 ,
BUFSIZE ,
null
) ;
} catch ( e ) {
if ( e . toString ( ) . includes ( "EOF" ) ) bytesRead = 0 ;
else throw e ;
}
if ( bytesRead > 0 ) {
result = buf . slice ( 0 , bytesRead ) . toString ( "utf-8" ) ;
} else {
result = null ;
}
} else if (
typeof window != "undefined" &&
typeof window . prompt == "function"
) {
result = window . prompt ( "Input: " ) ;
if ( result !== null ) {
result += "\n" ;
}
} else if ( typeof readline == "function" ) {
result = readline ( ) ;
if ( result !== null ) {
result += "\n" ;
}
}
if ( ! result ) {
return null ;
}
tty . input = intArrayFromString ( result , true ) ;
}
return tty . input . shift ( ) ;
} ,
put _char : function ( tty , val ) {
if ( val === null || val === 10 ) {
out ( UTF8ArrayToString ( tty . output , 0 ) ) ;
tty . output = [ ] ;
} else {
if ( val != 0 ) tty . output . push ( val ) ;
}
} ,
flush : function ( tty ) {
if ( tty . output && tty . output . length > 0 ) {
out ( UTF8ArrayToString ( tty . output , 0 ) ) ;
tty . output = [ ] ;
}
}
} ,
default _tty1 _ops : {
put _char : function ( tty , val ) {
if ( val === null || val === 10 ) {
err ( UTF8ArrayToString ( tty . output , 0 ) ) ;
tty . output = [ ] ;
} else {
if ( val != 0 ) tty . output . push ( val ) ;
}
} ,
flush : function ( tty ) {
if ( tty . output && tty . output . length > 0 ) {
err ( UTF8ArrayToString ( tty . output , 0 ) ) ;
tty . output = [ ] ;
}
}
}
} ;
function mmapAlloc ( size ) {
var alignedSize = alignMemory ( size , 65536 ) ;
var ptr = _malloc ( alignedSize ) ;
while ( size < alignedSize ) HEAP8 [ ptr + size ++ ] = 0 ;
return ptr ;
}
var MEMFS = {
ops _table : null ,
mount : function ( mount ) {
return MEMFS . createNode ( null , "/" , 16384 | 511 , 0 ) ;
} ,
createNode : function ( parent , name , mode , dev ) {
if ( FS . isBlkdev ( mode ) || FS . isFIFO ( mode ) ) {
throw new FS . ErrnoError ( 63 ) ;
}
if ( ! MEMFS . ops _table ) {
MEMFS . ops _table = {
dir : {
node : {
getattr : MEMFS . node _ops . getattr ,
setattr : MEMFS . node _ops . setattr ,
lookup : MEMFS . node _ops . lookup ,
mknod : MEMFS . node _ops . mknod ,
rename : MEMFS . node _ops . rename ,
unlink : MEMFS . node _ops . unlink ,
rmdir : MEMFS . node _ops . rmdir ,
readdir : MEMFS . node _ops . readdir ,
symlink : MEMFS . node _ops . symlink
} ,
stream : { llseek : MEMFS . stream _ops . llseek }
} ,
file : {
node : {
getattr : MEMFS . node _ops . getattr ,
setattr : MEMFS . node _ops . setattr
} ,
stream : {
llseek : MEMFS . stream _ops . llseek ,
read : MEMFS . stream _ops . read ,
write : MEMFS . stream _ops . write ,
allocate : MEMFS . stream _ops . allocate ,
mmap : MEMFS . stream _ops . mmap ,
msync : MEMFS . stream _ops . msync
}
} ,
link : {
node : {
getattr : MEMFS . node _ops . getattr ,
setattr : MEMFS . node _ops . setattr ,
readlink : MEMFS . node _ops . readlink
} ,
stream : { }
} ,
chrdev : {
node : {
getattr : MEMFS . node _ops . getattr ,
setattr : MEMFS . node _ops . setattr
} ,
stream : FS . chrdev _stream _ops
}
} ;
}
var node = FS . createNode ( parent , name , mode , dev ) ;
if ( FS . isDir ( node . mode ) ) {
node . node _ops = MEMFS . ops _table . dir . node ;
node . stream _ops = MEMFS . ops _table . dir . stream ;
node . contents = { } ;
} else if ( FS . isFile ( node . mode ) ) {
node . node _ops = MEMFS . ops _table . file . node ;
node . stream _ops = MEMFS . ops _table . file . stream ;
node . usedBytes = 0 ;
node . contents = null ;
} else if ( FS . isLink ( node . mode ) ) {
node . node _ops = MEMFS . ops _table . link . node ;
node . stream _ops = MEMFS . ops _table . link . stream ;
} else if ( FS . isChrdev ( node . mode ) ) {
node . node _ops = MEMFS . ops _table . chrdev . node ;
node . stream _ops = MEMFS . ops _table . chrdev . stream ;
}
node . timestamp = Date . now ( ) ;
if ( parent ) {
parent . contents [ name ] = node ;
parent . timestamp = node . timestamp ;
}
return node ;
} ,
getFileDataAsTypedArray : function ( node ) {
if ( ! node . contents ) return new Uint8Array ( 0 ) ;
if ( node . contents . subarray )
return node . contents . subarray ( 0 , node . usedBytes ) ;
return new Uint8Array ( node . contents ) ;
} ,
expandFileStorage : function ( node , newCapacity ) {
var prevCapacity = node . contents ? node . contents . length : 0 ;
if ( prevCapacity >= newCapacity ) return ;
var CAPACITY _DOUBLING _MAX = 1024 * 1024 ;
newCapacity = Math . max (
newCapacity ,
( prevCapacity * ( prevCapacity < CAPACITY _DOUBLING _MAX ? 2 : 1.125 ) ) >>> 0
) ;
if ( prevCapacity != 0 ) newCapacity = Math . max ( newCapacity , 256 ) ;
var oldContents = node . contents ;
node . contents = new Uint8Array ( newCapacity ) ;
if ( node . usedBytes > 0 )
node . contents . set ( oldContents . subarray ( 0 , node . usedBytes ) , 0 ) ;
} ,
resizeFileStorage : function ( node , newSize ) {
if ( node . usedBytes == newSize ) return ;
if ( newSize == 0 ) {
node . contents = null ;
node . usedBytes = 0 ;
} else {
var oldContents = node . contents ;
node . contents = new Uint8Array ( newSize ) ;
if ( oldContents ) {
node . contents . set (
oldContents . subarray ( 0 , Math . min ( newSize , node . usedBytes ) )
) ;
}
node . usedBytes = newSize ;
}
} ,
node _ops : {
getattr : function ( node ) {
var attr = { } ;
attr . dev = FS . isChrdev ( node . mode ) ? node . id : 1 ;
attr . ino = node . id ;
attr . mode = node . mode ;
attr . nlink = 1 ;
attr . uid = 0 ;
attr . gid = 0 ;
attr . rdev = node . rdev ;
if ( FS . isDir ( node . mode ) ) {
attr . size = 4096 ;
} else if ( FS . isFile ( node . mode ) ) {
attr . size = node . usedBytes ;
} else if ( FS . isLink ( node . mode ) ) {
attr . size = node . link . length ;
} else {
attr . size = 0 ;
}
attr . atime = new Date ( node . timestamp ) ;
attr . mtime = new Date ( node . timestamp ) ;
attr . ctime = new Date ( node . timestamp ) ;
attr . blksize = 4096 ;
attr . blocks = Math . ceil ( attr . size / attr . blksize ) ;
return attr ;
} ,
setattr : function ( node , attr ) {
if ( attr . mode !== undefined ) {
node . mode = attr . mode ;
}
if ( attr . timestamp !== undefined ) {
node . timestamp = attr . timestamp ;
}
if ( attr . size !== undefined ) {
MEMFS . resizeFileStorage ( node , attr . size ) ;
}
} ,
lookup : function ( parent , name ) {
throw FS . genericErrors [ 44 ] ;
} ,
mknod : function ( parent , name , mode , dev ) {
return MEMFS . createNode ( parent , name , mode , dev ) ;
} ,
rename : function ( old _node , new _dir , new _name ) {
if ( FS . isDir ( old _node . mode ) ) {
var new _node ;
try {
new _node = FS . lookupNode ( new _dir , new _name ) ;
} catch ( e ) { }
if ( new _node ) {
for ( var i in new _node . contents ) {
throw new FS . ErrnoError ( 55 ) ;
}
}
}
delete old _node . parent . contents [ old _node . name ] ;
old _node . parent . timestamp = Date . now ( ) ;
old _node . name = new _name ;
new _dir . contents [ new _name ] = old _node ;
new _dir . timestamp = old _node . parent . timestamp ;
old _node . parent = new _dir ;
} ,
unlink : function ( parent , name ) {
delete parent . contents [ name ] ;
parent . timestamp = Date . now ( ) ;
} ,
rmdir : function ( parent , name ) {
var node = FS . lookupNode ( parent , name ) ;
for ( var i in node . contents ) {
throw new FS . ErrnoError ( 55 ) ;
}
delete parent . contents [ name ] ;
parent . timestamp = Date . now ( ) ;
} ,
readdir : function ( node ) {
var entries = [ "." , ".." ] ;
for ( var key in node . contents ) {
if ( ! node . contents . hasOwnProperty ( key ) ) {
continue ;
}
entries . push ( key ) ;
}
return entries ;
} ,
symlink : function ( parent , newname , oldpath ) {
var node = MEMFS . createNode ( parent , newname , 511 | 40960 , 0 ) ;
node . link = oldpath ;
return node ;
} ,
readlink : function ( node ) {
if ( ! FS . isLink ( node . mode ) ) {
throw new FS . ErrnoError ( 28 ) ;
}
return node . link ;
}
} ,
stream _ops : {
read : function ( stream , buffer , offset , length , position ) {
var contents = stream . node . contents ;
if ( position >= stream . node . usedBytes ) return 0 ;
var size = Math . min ( stream . node . usedBytes - position , length ) ;
if ( size > 8 && contents . subarray ) {
buffer . set ( contents . subarray ( position , position + size ) , offset ) ;
} else {
for ( var i = 0 ; i < size ; i ++ )
buffer [ offset + i ] = contents [ position + i ] ;
}
return size ;
} ,
write : function ( stream , buffer , offset , length , position , canOwn ) {
if ( buffer . buffer === HEAP8 . buffer ) {
canOwn = false ;
}
if ( ! length ) return 0 ;
var node = stream . node ;
node . timestamp = Date . now ( ) ;
if ( buffer . subarray && ( ! node . contents || node . contents . subarray ) ) {
if ( canOwn ) {
node . contents = buffer . subarray ( offset , offset + length ) ;
node . usedBytes = length ;
return length ;
} else if ( node . usedBytes === 0 && position === 0 ) {
node . contents = buffer . slice ( offset , offset + length ) ;
node . usedBytes = length ;
return length ;
} else if ( position + length <= node . usedBytes ) {
node . contents . set ( buffer . subarray ( offset , offset + length ) , position ) ;
return length ;
}
}
MEMFS . expandFileStorage ( node , position + length ) ;
if ( node . contents . subarray && buffer . subarray ) {
node . contents . set ( buffer . subarray ( offset , offset + length ) , position ) ;
} else {
for ( var i = 0 ; i < length ; i ++ ) {
node . contents [ position + i ] = buffer [ offset + i ] ;
}
}
node . usedBytes = Math . max ( node . usedBytes , position + length ) ;
return length ;
} ,
llseek : function ( stream , offset , whence ) {
var position = offset ;
if ( whence === 1 ) {
position += stream . position ;
} else if ( whence === 2 ) {
if ( FS . isFile ( stream . node . mode ) ) {
position += stream . node . usedBytes ;
}
}
if ( position < 0 ) {
throw new FS . ErrnoError ( 28 ) ;
}
return position ;
} ,
allocate : function ( stream , offset , length ) {
MEMFS . expandFileStorage ( stream . node , offset + length ) ;
stream . node . usedBytes = Math . max ( stream . node . usedBytes , offset + length ) ;
} ,
mmap : function ( stream , address , length , position , prot , flags ) {
if ( address !== 0 ) {
throw new FS . ErrnoError ( 28 ) ;
}
if ( ! FS . isFile ( stream . node . mode ) ) {
throw new FS . ErrnoError ( 43 ) ;
}
var ptr ;
var allocated ;
var contents = stream . node . contents ;
if ( ! ( flags & 2 ) && contents . buffer === buffer ) {
allocated = false ;
ptr = contents . byteOffset ;
} else {
if ( position > 0 || position + length < contents . length ) {
if ( contents . subarray ) {
contents = contents . subarray ( position , position + length ) ;
} else {
contents = Array . prototype . slice . call (
contents ,
position ,
position + length
) ;
}
}
allocated = true ;
ptr = mmapAlloc ( length ) ;
if ( ! ptr ) {
throw new FS . ErrnoError ( 48 ) ;
}
HEAP8 . set ( contents , ptr ) ;
}
return { ptr : ptr , allocated : allocated } ;
} ,
msync : function ( stream , buffer , offset , length , mmapFlags ) {
if ( ! FS . isFile ( stream . node . mode ) ) {
throw new FS . ErrnoError ( 43 ) ;
}
if ( mmapFlags & 2 ) {
return 0 ;
}
var bytesWritten = MEMFS . stream _ops . write (
stream ,
buffer ,
0 ,
length ,
offset ,
false
) ;
return 0 ;
}
}
} ;
var ERRNO _CODES = {
EPERM : 63 ,
ENOENT : 44 ,
ESRCH : 71 ,
EINTR : 27 ,
EIO : 29 ,
ENXIO : 60 ,
E2BIG : 1 ,
ENOEXEC : 45 ,
EBADF : 8 ,
ECHILD : 12 ,
EAGAIN : 6 ,
EWOULDBLOCK : 6 ,
ENOMEM : 48 ,
EACCES : 2 ,
EFAULT : 21 ,
ENOTBLK : 105 ,
EBUSY : 10 ,
EEXIST : 20 ,
EXDEV : 75 ,
ENODEV : 43 ,
ENOTDIR : 54 ,
EISDIR : 31 ,
EINVAL : 28 ,
ENFILE : 41 ,
EMFILE : 33 ,
ENOTTY : 59 ,
ETXTBSY : 74 ,
EFBIG : 22 ,
ENOSPC : 51 ,
ESPIPE : 70 ,
EROFS : 69 ,
EMLINK : 34 ,
EPIPE : 64 ,
EDOM : 18 ,
ERANGE : 68 ,
ENOMSG : 49 ,
EIDRM : 24 ,
ECHRNG : 106 ,
EL2NSYNC : 156 ,
EL3HLT : 107 ,
EL3RST : 108 ,
ELNRNG : 109 ,
EUNATCH : 110 ,
ENOCSI : 111 ,
EL2HLT : 112 ,
EDEADLK : 16 ,
ENOLCK : 46 ,
EBADE : 113 ,
EBADR : 114 ,
EXFULL : 115 ,
ENOANO : 104 ,
EBADRQC : 103 ,
EBADSLT : 102 ,
EDEADLOCK : 16 ,
EBFONT : 101 ,
ENOSTR : 100 ,
ENODATA : 116 ,
ETIME : 117 ,
ENOSR : 118 ,
ENONET : 119 ,
ENOPKG : 120 ,
EREMOTE : 121 ,
ENOLINK : 47 ,
EADV : 122 ,
ESRMNT : 123 ,
ECOMM : 124 ,
EPROTO : 65 ,
EMULTIHOP : 36 ,
EDOTDOT : 125 ,
EBADMSG : 9 ,
ENOTUNIQ : 126 ,
EBADFD : 127 ,
EREMCHG : 128 ,
ELIBACC : 129 ,
ELIBBAD : 130 ,
ELIBSCN : 131 ,
ELIBMAX : 132 ,
ELIBEXEC : 133 ,
ENOSYS : 52 ,
ENOTEMPTY : 55 ,
ENAMETOOLONG : 37 ,
ELOOP : 32 ,
EOPNOTSUPP : 138 ,
EPFNOSUPPORT : 139 ,
ECONNRESET : 15 ,
ENOBUFS : 42 ,
EAFNOSUPPORT : 5 ,
EPROTOTYPE : 67 ,
ENOTSOCK : 57 ,
ENOPROTOOPT : 50 ,
ESHUTDOWN : 140 ,
ECONNREFUSED : 14 ,
EADDRINUSE : 3 ,
ECONNABORTED : 13 ,
ENETUNREACH : 40 ,
ENETDOWN : 38 ,
ETIMEDOUT : 73 ,
EHOSTDOWN : 142 ,
EHOSTUNREACH : 23 ,
EINPROGRESS : 26 ,
EALREADY : 7 ,
EDESTADDRREQ : 17 ,
EMSGSIZE : 35 ,
EPROTONOSUPPORT : 66 ,
ESOCKTNOSUPPORT : 137 ,
EADDRNOTAVAIL : 4 ,
ENETRESET : 39 ,
EISCONN : 30 ,
ENOTCONN : 53 ,
ETOOMANYREFS : 141 ,
EUSERS : 136 ,
EDQUOT : 19 ,
ESTALE : 72 ,
ENOTSUP : 138 ,
ENOMEDIUM : 148 ,
EILSEQ : 25 ,
EOVERFLOW : 61 ,
ECANCELED : 11 ,
ENOTRECOVERABLE : 56 ,
EOWNERDEAD : 62 ,
ESTRPIPE : 135
} ;
var NODEFS = {
isWindows : false ,
staticInit : function ( ) {
NODEFS . isWindows = ! ! process . platform . match ( /^win/ ) ;
var flags = { fs : fs . constants } ;
if ( flags [ "fs" ] ) {
flags = flags [ "fs" ] ;
}
NODEFS . flagsForNodeMap = {
1024 : flags [ "O_APPEND" ] ,
64 : flags [ "O_CREAT" ] ,
128 : flags [ "O_EXCL" ] ,
256 : flags [ "O_NOCTTY" ] ,
0 : flags [ "O_RDONLY" ] ,
2 : flags [ "O_RDWR" ] ,
4096 : flags [ "O_SYNC" ] ,
512 : flags [ "O_TRUNC" ] ,
1 : flags [ "O_WRONLY" ]
} ;
} ,
bufferFrom : function ( arrayBuffer ) {
return Buffer [ "alloc" ] ? Buffer . from ( arrayBuffer ) : new Buffer ( arrayBuffer ) ;
} ,
convertNodeCode : function ( e ) {
var code = e . code ;
return ERRNO _CODES [ code ] ;
} ,
mount : function ( mount ) {
return NODEFS . createNode ( null , "/" , NODEFS . getMode ( mount . opts . root ) , 0 ) ;
} ,
createNode : function ( parent , name , mode , dev ) {
if ( ! FS . isDir ( mode ) && ! FS . isFile ( mode ) && ! FS . isLink ( mode ) ) {
throw new FS . ErrnoError ( 28 ) ;
}
var node = FS . createNode ( parent , name , mode ) ;
node . node _ops = NODEFS . node _ops ;
node . stream _ops = NODEFS . stream _ops ;
return node ;
} ,
getMode : function ( path ) {
var stat ;
try {
stat = fs . lstatSync ( path ) ;
if ( NODEFS . isWindows ) {
stat . mode = stat . mode | ( ( stat . mode & 292 ) >> 2 ) ;
}
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
return stat . mode ;
} ,
realPath : function ( node ) {
var parts = [ ] ;
while ( node . parent !== node ) {
parts . push ( node . name ) ;
node = node . parent ;
}
parts . push ( node . mount . opts . root ) ;
parts . reverse ( ) ;
return PATH . join . apply ( null , parts ) ;
} ,
flagsForNode : function ( flags ) {
flags &= ~ 2097152 ;
flags &= ~ 2048 ;
flags &= ~ 32768 ;
flags &= ~ 524288 ;
var newFlags = 0 ;
for ( var k in NODEFS . flagsForNodeMap ) {
if ( flags & k ) {
newFlags |= NODEFS . flagsForNodeMap [ k ] ;
flags ^= k ;
}
}
if ( ! flags ) {
return newFlags ;
} else {
throw new FS . ErrnoError ( 28 ) ;
}
} ,
node _ops : {
getattr : function ( node ) {
var path = NODEFS . realPath ( node ) ;
var stat ;
try {
stat = fs . lstatSync ( path ) ;
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
if ( NODEFS . isWindows && ! stat . blksize ) {
stat . blksize = 4096 ;
}
if ( NODEFS . isWindows && ! stat . blocks ) {
stat . blocks = ( ( stat . size + stat . blksize - 1 ) / stat . blksize ) | 0 ;
}
return {
dev : stat . dev ,
ino : stat . ino ,
mode : stat . mode ,
nlink : stat . nlink ,
uid : stat . uid ,
gid : stat . gid ,
rdev : stat . rdev ,
size : stat . size ,
atime : stat . atime ,
mtime : stat . mtime ,
ctime : stat . ctime ,
blksize : stat . blksize ,
blocks : stat . blocks
} ;
} ,
setattr : function ( node , attr ) {
var path = NODEFS . realPath ( node ) ;
try {
if ( attr . mode !== undefined ) {
fs . chmodSync ( path , attr . mode ) ;
node . mode = attr . mode ;
}
if ( attr . timestamp !== undefined ) {
var date = new Date ( attr . timestamp ) ;
fs . utimesSync ( path , date , date ) ;
}
if ( attr . size !== undefined ) {
fs . truncateSync ( path , attr . size ) ;
}
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
} ,
lookup : function ( parent , name ) {
var path = PATH . join2 ( NODEFS . realPath ( parent ) , name ) ;
var mode = NODEFS . getMode ( path ) ;
return NODEFS . createNode ( parent , name , mode ) ;
} ,
mknod : function ( parent , name , mode , dev ) {
var node = NODEFS . createNode ( parent , name , mode , dev ) ;
var path = NODEFS . realPath ( node ) ;
try {
if ( FS . isDir ( node . mode ) ) {
fs . mkdirSync ( path , node . mode ) ;
} else {
fs . writeFileSync ( path , "" , { mode : node . mode } ) ;
}
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
return node ;
} ,
rename : function ( oldNode , newDir , newName ) {
var oldPath = NODEFS . realPath ( oldNode ) ;
var newPath = PATH . join2 ( NODEFS . realPath ( newDir ) , newName ) ;
try {
fs . renameSync ( oldPath , newPath ) ;
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
oldNode . name = newName ;
} ,
unlink : function ( parent , name ) {
var path = PATH . join2 ( NODEFS . realPath ( parent ) , name ) ;
try {
fs . unlinkSync ( path ) ;
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
} ,
rmdir : function ( parent , name ) {
var path = PATH . join2 ( NODEFS . realPath ( parent ) , name ) ;
try {
fs . rmdirSync ( path ) ;
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
} ,
readdir : function ( node ) {
var path = NODEFS . realPath ( node ) ;
try {
return fs . readdirSync ( path ) ;
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
} ,
symlink : function ( parent , newName , oldPath ) {
var newPath = PATH . join2 ( NODEFS . realPath ( parent ) , newName ) ;
try {
fs . symlinkSync ( oldPath , newPath ) ;
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
} ,
readlink : function ( node ) {
var path = NODEFS . realPath ( node ) ;
try {
path = fs . readlinkSync ( path ) ;
path = NODEJS _PATH . relative (
NODEJS _PATH . resolve ( node . mount . opts . root ) ,
path
) ;
return path ;
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
}
} ,
stream _ops : {
open : function ( stream ) {
var path = NODEFS . realPath ( stream . node ) ;
try {
if ( FS . isFile ( stream . node . mode ) ) {
stream . nfd = fs . openSync ( path , NODEFS . flagsForNode ( stream . flags ) ) ;
}
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
} ,
close : function ( stream ) {
try {
if ( FS . isFile ( stream . node . mode ) && stream . nfd ) {
fs . closeSync ( stream . nfd ) ;
}
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
} ,
read : function ( stream , buffer , offset , length , position ) {
if ( length === 0 ) return 0 ;
try {
return fs . readSync (
stream . nfd ,
NODEFS . bufferFrom ( buffer . buffer ) ,
offset ,
length ,
position
) ;
} catch ( e ) {
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
} ,
write : function ( stream , buffer , offset , length , position ) {
try {
return fs . writeSync (
stream . nfd ,
NODEFS . bufferFrom ( buffer . buffer ) ,
offset ,
length ,
position
) ;
} catch ( e ) {
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
} ,
llseek : function ( stream , offset , whence ) {
var position = offset ;
if ( whence === 1 ) {
position += stream . position ;
} else if ( whence === 2 ) {
if ( FS . isFile ( stream . node . mode ) ) {
try {
var stat = fs . fstatSync ( stream . nfd ) ;
position += stat . size ;
} catch ( e ) {
throw new FS . ErrnoError ( NODEFS . convertNodeCode ( e ) ) ;
}
}
}
if ( position < 0 ) {
throw new FS . ErrnoError ( 28 ) ;
}
return position ;
} ,
mmap : function ( stream , address , length , position , prot , flags ) {
if ( address !== 0 ) {
throw new FS . ErrnoError ( 28 ) ;
}
if ( ! FS . isFile ( stream . node . mode ) ) {
throw new FS . ErrnoError ( 43 ) ;
}
var ptr = mmapAlloc ( length ) ;
NODEFS . stream _ops . read ( stream , HEAP8 , ptr , length , position ) ;
return { ptr : ptr , allocated : true } ;
} ,
msync : function ( stream , buffer , offset , length , mmapFlags ) {
if ( ! FS . isFile ( stream . node . mode ) ) {
throw new FS . ErrnoError ( 43 ) ;
}
if ( mmapFlags & 2 ) {
return 0 ;
}
var bytesWritten = NODEFS . stream _ops . write (
stream ,
buffer ,
0 ,
length ,
offset ,
false
) ;
return 0 ;
}
}
} ;
var NODERAWFS = {
lookupPath : function ( path ) {
return { path : path , node : { mode : NODEFS . getMode ( path ) } } ;
} ,
createStandardStreams : function ( ) {
FS . streams [ 0 ] = {
fd : 0 ,
nfd : 0 ,
position : 0 ,
path : "" ,
flags : 0 ,
tty : true ,
seekable : false
} ;
for ( var i = 1 ; i < 3 ; i ++ ) {
FS . streams [ i ] = {
fd : i ,
nfd : i ,
position : 0 ,
path : "" ,
flags : 577 ,
tty : true ,
seekable : false
} ;
}
} ,
cwd : function ( ) {
return process . cwd ( ) ;
} ,
chdir : function ( ) {
process . chdir . apply ( void 0 , arguments ) ;
} ,
mknod : function ( path , mode ) {
if ( FS . isDir ( path ) ) {
fs . mkdirSync ( path , mode ) ;
} else {
fs . writeFileSync ( path , "" , { mode : mode } ) ;
}
} ,
mkdir : function ( ) {
fs . mkdirSync . apply ( void 0 , arguments ) ;
} ,
symlink : function ( ) {
fs . symlinkSync . apply ( void 0 , arguments ) ;
} ,
rename : function ( ) {
fs . renameSync . apply ( void 0 , arguments ) ;
} ,
rmdir : function ( ) {
fs . rmdirSync . apply ( void 0 , arguments ) ;
} ,
readdir : function ( ) {
fs . readdirSync . apply ( void 0 , arguments ) ;
} ,
unlink : function ( ) {
fs . unlinkSync . apply ( void 0 , arguments ) ;
} ,
readlink : function ( ) {
return fs . readlinkSync . apply ( void 0 , arguments ) ;
} ,
stat : function ( ) {
return fs . statSync . apply ( void 0 , arguments ) ;
} ,
lstat : function ( ) {
return fs . lstatSync . apply ( void 0 , arguments ) ;
} ,
chmod : function ( ) {
fs . chmodSync . apply ( void 0 , arguments ) ;
} ,
fchmod : function ( ) {
fs . fchmodSync . apply ( void 0 , arguments ) ;
} ,
chown : function ( ) {
fs . chownSync . apply ( void 0 , arguments ) ;
} ,
fchown : function ( ) {
fs . fchownSync . apply ( void 0 , arguments ) ;
} ,
truncate : function ( ) {
fs . truncateSync . apply ( void 0 , arguments ) ;
} ,
ftruncate : function ( fd , len ) {
if ( len < 0 ) {
throw new FS . ErrnoError ( 28 ) ;
}
fs . ftruncateSync . apply ( void 0 , arguments ) ;
} ,
utime : function ( ) {
fs . utimesSync . apply ( void 0 , arguments ) ;
} ,
open : function ( path , flags , mode , suggestFD ) {
if ( typeof flags === "string" ) {
flags = VFS . modeStringToFlags ( flags ) ;
}
var nfd = fs . openSync ( path , NODEFS . flagsForNode ( flags ) , mode ) ;
var fd = suggestFD != null ? suggestFD : FS . nextfd ( nfd ) ;
var stream = {
fd : fd ,
nfd : nfd ,
position : 0 ,
path : path ,
flags : flags ,
seekable : true
} ;
FS . streams [ fd ] = stream ;
return stream ;
} ,
close : function ( stream ) {
if ( ! stream . stream _ops ) {
fs . closeSync ( stream . nfd ) ;
}
FS . closeStream ( stream . fd ) ;
} ,
llseek : function ( stream , offset , whence ) {
if ( stream . stream _ops ) {
return VFS . llseek ( stream , offset , whence ) ;
}
var position = offset ;
if ( whence === 1 ) {
position += stream . position ;
} else if ( whence === 2 ) {
position += fs . fstatSync ( stream . nfd ) . size ;
} else if ( whence !== 0 ) {
throw new FS . ErrnoError ( ERRNO _CODES . EINVAL ) ;
}
if ( position < 0 ) {
throw new FS . ErrnoError ( ERRNO _CODES . EINVAL ) ;
}
stream . position = position ;
return position ;
} ,
read : function ( stream , buffer , offset , length , position ) {
if ( stream . stream _ops ) {
return VFS . read ( stream , buffer , offset , length , position ) ;
}
var seeking = typeof position !== "undefined" ;
if ( ! seeking && stream . seekable ) position = stream . position ;
var bytesRead = fs . readSync (
stream . nfd ,
NODEFS . bufferFrom ( buffer . buffer ) ,
offset ,
length ,
position
) ;
if ( ! seeking ) stream . position += bytesRead ;
return bytesRead ;
} ,
write : function ( stream , buffer , offset , length , position ) {
if ( stream . stream _ops ) {
return VFS . write ( stream , buffer , offset , length , position ) ;
}
if ( stream . flags & + "1024" ) {
FS . llseek ( stream , 0 , + "2" ) ;
}
var seeking = typeof position !== "undefined" ;
if ( ! seeking && stream . seekable ) position = stream . position ;
var bytesWritten = fs . writeSync (
stream . nfd ,
NODEFS . bufferFrom ( buffer . buffer ) ,
offset ,
length ,
position
) ;
if ( ! seeking ) stream . position += bytesWritten ;
return bytesWritten ;
} ,
allocate : function ( ) {
throw new FS . ErrnoError ( ERRNO _CODES . EOPNOTSUPP ) ;
} ,
mmap : function ( stream , address , length , position , prot , flags ) {
if ( stream . stream _ops ) {
return VFS . mmap ( stream , address , length , position , prot , flags ) ;
}
if ( address !== 0 ) {
throw new FS . ErrnoError ( 28 ) ;
}
var ptr = mmapAlloc ( length ) ;
FS . read ( stream , HEAP8 , ptr , length , position ) ;
return { ptr : ptr , allocated : true } ;
} ,
msync : function ( stream , buffer , offset , length , mmapFlags ) {
if ( stream . stream _ops ) {
return VFS . msync ( stream , buffer , offset , length , mmapFlags ) ;
}
if ( mmapFlags & 2 ) {
return 0 ;
}
FS . write ( stream , buffer , 0 , length , offset ) ;
return 0 ;
} ,
munmap : function ( ) {
return 0 ;
} ,
ioctl : function ( ) {
throw new FS . ErrnoError ( ERRNO _CODES . ENOTTY ) ;
}
} ;
var FS = {
root : null ,
mounts : [ ] ,
devices : { } ,
streams : [ ] ,
nextInode : 1 ,
nameTable : null ,
currentPath : "/" ,
initialized : false ,
ignorePermissions : true ,
trackingDelegate : { } ,
tracking : { openFlags : { READ : 1 , WRITE : 2 } } ,
ErrnoError : null ,
genericErrors : { } ,
filesystems : null ,
syncFSRequests : 0 ,
lookupPath : function ( path , opts ) {
path = PATH _FS . resolve ( FS . cwd ( ) , path ) ;
opts = opts || { } ;
if ( ! path ) return { path : "" , node : null } ;
var defaults = { follow _mount : true , recurse _count : 0 } ;
for ( var key in defaults ) {
if ( opts [ key ] === undefined ) {
opts [ key ] = defaults [ key ] ;
}
}
if ( opts . recurse _count > 8 ) {
throw new FS . ErrnoError ( 32 ) ;
}
var parts = PATH . normalizeArray (
path . split ( "/" ) . filter ( function ( p ) {
return ! ! p ;
} ) ,
false
) ;
var current = FS . root ;
var current _path = "/" ;
for ( var i = 0 ; i < parts . length ; i ++ ) {
var islast = i === parts . length - 1 ;
if ( islast && opts . parent ) {
break ;
}
current = FS . lookupNode ( current , parts [ i ] ) ;
current _path = PATH . join2 ( current _path , parts [ i ] ) ;
if ( FS . isMountpoint ( current ) ) {
if ( ! islast || ( islast && opts . follow _mount ) ) {
current = current . mounted . root ;
}
}
if ( ! islast || opts . follow ) {
var count = 0 ;
while ( FS . isLink ( current . mode ) ) {
var link = FS . readlink ( current _path ) ;
current _path = PATH _FS . resolve ( PATH . dirname ( current _path ) , link ) ;
var lookup = FS . lookupPath ( current _path , {
recurse _count : opts . recurse _count
} ) ;
current = lookup . node ;
if ( count ++ > 40 ) {
throw new FS . ErrnoError ( 32 ) ;
}
}
}
}
return { path : current _path , node : current } ;
} ,
getPath : function ( node ) {
var path ;
while ( true ) {
if ( FS . isRoot ( node ) ) {
var mount = node . mount . mountpoint ;
if ( ! path ) return mount ;
return mount [ mount . length - 1 ] !== "/"
? mount + "/" + path
: mount + path ;
}
path = path ? node . name + "/" + path : node . name ;
node = node . parent ;
}
} ,
hashName : function ( parentid , name ) {
var hash = 0 ;
for ( var i = 0 ; i < name . length ; i ++ ) {
hash = ( ( hash << 5 ) - hash + name . charCodeAt ( i ) ) | 0 ;
}
return ( ( parentid + hash ) >>> 0 ) % FS . nameTable . length ;
} ,
hashAddNode : function ( node ) {
var hash = FS . hashName ( node . parent . id , node . name ) ;
node . name _next = FS . nameTable [ hash ] ;
FS . nameTable [ hash ] = node ;
} ,
hashRemoveNode : function ( node ) {
var hash = FS . hashName ( node . parent . id , node . name ) ;
if ( FS . nameTable [ hash ] === node ) {
FS . nameTable [ hash ] = node . name _next ;
} else {
var current = FS . nameTable [ hash ] ;
while ( current ) {
if ( current . name _next === node ) {
current . name _next = node . name _next ;
break ;
}
current = current . name _next ;
}
}
} ,
lookupNode : function ( parent , name ) {
var errCode = FS . mayLookup ( parent ) ;
if ( errCode ) {
throw new FS . ErrnoError ( errCode , parent ) ;
}
var hash = FS . hashName ( parent . id , name ) ;
for ( var node = FS . nameTable [ hash ] ; node ; node = node . name _next ) {
var nodeName = node . name ;
if ( node . parent . id === parent . id && nodeName === name ) {
return node ;
}
}
return FS . lookup ( parent , name ) ;
} ,
createNode : function ( parent , name , mode , rdev ) {
var node = new FS . FSNode ( parent , name , mode , rdev ) ;
FS . hashAddNode ( node ) ;
return node ;
} ,
destroyNode : function ( node ) {
FS . hashRemoveNode ( node ) ;
} ,
isRoot : function ( node ) {
return node === node . parent ;
} ,
isMountpoint : function ( node ) {
return ! ! node . mounted ;
} ,
isFile : function ( mode ) {
return ( mode & 61440 ) === 32768 ;
} ,
isDir : function ( mode ) {
return ( mode & 61440 ) === 16384 ;
} ,
isLink : function ( mode ) {
return ( mode & 61440 ) === 40960 ;
} ,
isChrdev : function ( mode ) {
return ( mode & 61440 ) === 8192 ;
} ,
isBlkdev : function ( mode ) {
return ( mode & 61440 ) === 24576 ;
} ,
isFIFO : function ( mode ) {
return ( mode & 61440 ) === 4096 ;
} ,
isSocket : function ( mode ) {
return ( mode & 49152 ) === 49152 ;
} ,
flagModes : { r : 0 , "r+" : 2 , w : 577 , "w+" : 578 , a : 1089 , "a+" : 1090 } ,
modeStringToFlags : function ( str ) {
var flags = FS . flagModes [ str ] ;
if ( typeof flags === "undefined" ) {
throw new Error ( "Unknown file open mode: " + str ) ;
}
return flags ;
} ,
flagsToPermissionString : function ( flag ) {
var perms = [ "r" , "w" , "rw" ] [ flag & 3 ] ;
if ( flag & 512 ) {
perms += "w" ;
}
return perms ;
} ,
nodePermissions : function ( node , perms ) {
if ( FS . ignorePermissions ) {
return 0 ;
}
if ( perms . includes ( "r" ) && ! ( node . mode & 292 ) ) {
return 2 ;
} else if ( perms . includes ( "w" ) && ! ( node . mode & 146 ) ) {
return 2 ;
} else if ( perms . includes ( "x" ) && ! ( node . mode & 73 ) ) {
return 2 ;
}
return 0 ;
} ,
mayLookup : function ( dir ) {
var errCode = FS . nodePermissions ( dir , "x" ) ;
if ( errCode ) return errCode ;
if ( ! dir . node _ops . lookup ) return 2 ;
return 0 ;
} ,
mayCreate : function ( dir , name ) {
try {
var node = FS . lookupNode ( dir , name ) ;
return 20 ;
} catch ( e ) { }
return FS . nodePermissions ( dir , "wx" ) ;
} ,
mayDelete : function ( dir , name , isdir ) {
var node ;
try {
node = FS . lookupNode ( dir , name ) ;
} catch ( e ) {
return e . errno ;
}
var errCode = FS . nodePermissions ( dir , "wx" ) ;
if ( errCode ) {
return errCode ;
}
if ( isdir ) {
if ( ! FS . isDir ( node . mode ) ) {
return 54 ;
}
if ( FS . isRoot ( node ) || FS . getPath ( node ) === FS . cwd ( ) ) {
return 10 ;
}
} else {
if ( FS . isDir ( node . mode ) ) {
return 31 ;
}
}
return 0 ;
} ,
mayOpen : function ( node , flags ) {
if ( ! node ) {
return 44 ;
}
if ( FS . isLink ( node . mode ) ) {
return 32 ;
} else if ( FS . isDir ( node . mode ) ) {
if ( FS . flagsToPermissionString ( flags ) !== "r" || flags & 512 ) {
return 31 ;
}
}
return FS . nodePermissions ( node , FS . flagsToPermissionString ( flags ) ) ;
} ,
MAX _OPEN _FDS : 4096 ,
nextfd : function ( fd _start , fd _end ) {
fd _start = fd _start || 0 ;
fd _end = fd _end || FS . MAX _OPEN _FDS ;
for ( var fd = fd _start ; fd <= fd _end ; fd ++ ) {
if ( ! FS . streams [ fd ] ) {
return fd ;
}
}
throw new FS . ErrnoError ( 33 ) ;
} ,
getStream : function ( fd ) {
return FS . streams [ fd ] ;
} ,
createStream : function ( stream , fd _start , fd _end ) {
if ( ! FS . FSStream ) {
FS . FSStream = function ( ) { } ;
FS . FSStream . prototype = {
object : {
get : function ( ) {
return this . node ;
} ,
set : function ( val ) {
this . node = val ;
}
} ,
isRead : {
get : function ( ) {
return ( this . flags & 2097155 ) !== 1 ;
}
} ,
isWrite : {
get : function ( ) {
return ( this . flags & 2097155 ) !== 0 ;
}
} ,
isAppend : {
get : function ( ) {
return this . flags & 1024 ;
}
}
} ;
}
var newStream = new FS . FSStream ( ) ;
for ( var p in stream ) {
newStream [ p ] = stream [ p ] ;
}
stream = newStream ;
var fd = FS . nextfd ( fd _start , fd _end ) ;
stream . fd = fd ;
FS . streams [ fd ] = stream ;
return stream ;
} ,
closeStream : function ( fd ) {
FS . streams [ fd ] = null ;
} ,
chrdev _stream _ops : {
open : function ( stream ) {
var device = FS . getDevice ( stream . node . rdev ) ;
stream . stream _ops = device . stream _ops ;
if ( stream . stream _ops . open ) {
stream . stream _ops . open ( stream ) ;
}
} ,
llseek : function ( ) {
throw new FS . ErrnoError ( 70 ) ;
}
} ,
major : function ( dev ) {
return dev >> 8 ;
} ,
minor : function ( dev ) {
return dev & 255 ;
} ,
makedev : function ( ma , mi ) {
return ( ma << 8 ) | mi ;
} ,
registerDevice : function ( dev , ops ) {
FS . devices [ dev ] = { stream _ops : ops } ;
} ,
getDevice : function ( dev ) {
return FS . devices [ dev ] ;
} ,
getMounts : function ( mount ) {
var mounts = [ ] ;
var check = [ mount ] ;
while ( check . length ) {
var m = check . pop ( ) ;
mounts . push ( m ) ;
check . push . apply ( check , m . mounts ) ;
}
return mounts ;
} ,
syncfs : function ( populate , callback ) {
if ( typeof populate === "function" ) {
callback = populate ;
populate = false ;
}
FS . syncFSRequests ++ ;
if ( FS . syncFSRequests > 1 ) {
err (
"warning: " +
FS . syncFSRequests +
" FS.syncfs operations in flight at once, probably just doing extra work"
) ;
}
var mounts = FS . getMounts ( FS . root . mount ) ;
var completed = 0 ;
function doCallback ( errCode ) {
FS . syncFSRequests -- ;
return callback ( errCode ) ;
}
function done ( errCode ) {
if ( errCode ) {
if ( ! done . errored ) {
done . errored = true ;
return doCallback ( errCode ) ;
}
return ;
}
if ( ++ completed >= mounts . length ) {
doCallback ( null ) ;
}
}
mounts . forEach ( function ( mount ) {
if ( ! mount . type . syncfs ) {
return done ( null ) ;
}
mount . type . syncfs ( mount , populate , done ) ;
} ) ;
} ,
mount : function ( type , opts , mountpoint ) {
var root = mountpoint === "/" ;
var pseudo = ! mountpoint ;
var node ;
if ( root && FS . root ) {
throw new FS . ErrnoError ( 10 ) ;
} else if ( ! root && ! pseudo ) {
var lookup = FS . lookupPath ( mountpoint , { follow _mount : false } ) ;
mountpoint = lookup . path ;
node = lookup . node ;
if ( FS . isMountpoint ( node ) ) {
throw new FS . ErrnoError ( 10 ) ;
}
if ( ! FS . isDir ( node . mode ) ) {
throw new FS . ErrnoError ( 54 ) ;
}
}
var mount = { type : type , opts : opts , mountpoint : mountpoint , mounts : [ ] } ;
var mountRoot = type . mount ( mount ) ;
mountRoot . mount = mount ;
mount . root = mountRoot ;
if ( root ) {
FS . root = mountRoot ;
} else if ( node ) {
node . mounted = mount ;
if ( node . mount ) {
node . mount . mounts . push ( mount ) ;
}
}
return mountRoot ;
} ,
unmount : function ( mountpoint ) {
var lookup = FS . lookupPath ( mountpoint , { follow _mount : false } ) ;
if ( ! FS . isMountpoint ( lookup . node ) ) {
throw new FS . ErrnoError ( 28 ) ;
}
var node = lookup . node ;
var mount = node . mounted ;
var mounts = FS . getMounts ( mount ) ;
Object . keys ( FS . nameTable ) . forEach ( function ( hash ) {
var current = FS . nameTable [ hash ] ;
while ( current ) {
var next = current . name _next ;
if ( mounts . includes ( current . mount ) ) {
FS . destroyNode ( current ) ;
}
current = next ;
}
} ) ;
node . mounted = null ;
var idx = node . mount . mounts . indexOf ( mount ) ;
node . mount . mounts . splice ( idx , 1 ) ;
} ,
lookup : function ( parent , name ) {
return parent . node _ops . lookup ( parent , name ) ;
} ,
mknod : function ( path , mode , dev ) {
var lookup = FS . lookupPath ( path , { parent : true } ) ;
var parent = lookup . node ;
var name = PATH . basename ( path ) ;
if ( ! name || name === "." || name === ".." ) {
throw new FS . ErrnoError ( 28 ) ;
}
var errCode = FS . mayCreate ( parent , name ) ;
if ( errCode ) {
throw new FS . ErrnoError ( errCode ) ;
}
if ( ! parent . node _ops . mknod ) {
throw new FS . ErrnoError ( 63 ) ;
}
return parent . node _ops . mknod ( parent , name , mode , dev ) ;
} ,
create : function ( path , mode ) {
mode = mode !== undefined ? mode : 438 ;
mode &= 4095 ;
mode |= 32768 ;
return FS . mknod ( path , mode , 0 ) ;
} ,
mkdir : function ( path , mode ) {
mode = mode !== undefined ? mode : 511 ;
mode &= 511 | 512 ;
mode |= 16384 ;
return FS . mknod ( path , mode , 0 ) ;
} ,
mkdirTree : function ( path , mode ) {
var dirs = path . split ( "/" ) ;
var d = "" ;
for ( var i = 0 ; i < dirs . length ; ++ i ) {
if ( ! dirs [ i ] ) continue ;
d += "/" + dirs [ i ] ;
try {
FS . mkdir ( d , mode ) ;
} catch ( e ) {
if ( e . errno != 20 ) throw e ;
}
}
} ,
mkdev : function ( path , mode , dev ) {
if ( typeof dev === "undefined" ) {
dev = mode ;
mode = 438 ;
}
mode |= 8192 ;
return FS . mknod ( path , mode , dev ) ;
} ,
symlink : function ( oldpath , newpath ) {
if ( ! PATH _FS . resolve ( oldpath ) ) {
throw new FS . ErrnoError ( 44 ) ;
}
var lookup = FS . lookupPath ( newpath , { parent : true } ) ;
var parent = lookup . node ;
if ( ! parent ) {
throw new FS . ErrnoError ( 44 ) ;
}
var newname = PATH . basename ( newpath ) ;
var errCode = FS . mayCreate ( parent , newname ) ;
if ( errCode ) {
throw new FS . ErrnoError ( errCode ) ;
}
if ( ! parent . node _ops . symlink ) {
throw new FS . ErrnoError ( 63 ) ;
}
return parent . node _ops . symlink ( parent , newname , oldpath ) ;
} ,
rename : function ( old _path , new _path ) {
var old _dirname = PATH . dirname ( old _path ) ;
var new _dirname = PATH . dirname ( new _path ) ;
var old _name = PATH . basename ( old _path ) ;
var new _name = PATH . basename ( new _path ) ;
var lookup , old _dir , new _dir ;
lookup = FS . lookupPath ( old _path , { parent : true } ) ;
old _dir = lookup . node ;
lookup = FS . lookupPath ( new _path , { parent : true } ) ;
new _dir = lookup . node ;
if ( ! old _dir || ! new _dir ) throw new FS . ErrnoError ( 44 ) ;
if ( old _dir . mount !== new _dir . mount ) {
throw new FS . ErrnoError ( 75 ) ;
}
var old _node = FS . lookupNode ( old _dir , old _name ) ;
var relative = PATH _FS . relative ( old _path , new _dirname ) ;
if ( relative . charAt ( 0 ) !== "." ) {
throw new FS . ErrnoError ( 28 ) ;
}
relative = PATH _FS . relative ( new _path , old _dirname ) ;
if ( relative . charAt ( 0 ) !== "." ) {
throw new FS . ErrnoError ( 55 ) ;
}
var new _node ;
try {
new _node = FS . lookupNode ( new _dir , new _name ) ;
} catch ( e ) { }
if ( old _node === new _node ) {
return ;
}
var isdir = FS . isDir ( old _node . mode ) ;
var errCode = FS . mayDelete ( old _dir , old _name , isdir ) ;
if ( errCode ) {
throw new FS . ErrnoError ( errCode ) ;
}
errCode = new _node
? FS . mayDelete ( new _dir , new _name , isdir )
: FS . mayCreate ( new _dir , new _name ) ;
if ( errCode ) {
throw new FS . ErrnoError ( errCode ) ;
}
if ( ! old _dir . node _ops . rename ) {
throw new FS . ErrnoError ( 63 ) ;
}
if ( FS . isMountpoint ( old _node ) || ( new _node && FS . isMountpoint ( new _node ) ) ) {
throw new FS . ErrnoError ( 10 ) ;
}
if ( new _dir !== old _dir ) {
errCode = FS . nodePermissions ( old _dir , "w" ) ;
if ( errCode ) {
throw new FS . ErrnoError ( errCode ) ;
}
}
try {
if ( FS . trackingDelegate [ "willMovePath" ] ) {
FS . trackingDelegate [ "willMovePath" ] ( old _path , new _path ) ;
}
} catch ( e ) {
err (
"FS.trackingDelegate['willMovePath']('" +
old _path +
"', '" +
new _path +
"') threw an exception: " +
e . message
) ;
}
FS . hashRemoveNode ( old _node ) ;
try {
old _dir . node _ops . rename ( old _node , new _dir , new _name ) ;
} catch ( e ) {
throw e ;
} finally {
FS . hashAddNode ( old _node ) ;
}
try {
if ( FS . trackingDelegate [ "onMovePath" ] )
FS . trackingDelegate [ "onMovePath" ] ( old _path , new _path ) ;
} catch ( e ) {
err (
"FS.trackingDelegate['onMovePath']('" +
old _path +
"', '" +
new _path +
"') threw an exception: " +
e . message
) ;
}
} ,
rmdir : function ( path ) {
var lookup = FS . lookupPath ( path , { parent : true } ) ;
var parent = lookup . node ;
var name = PATH . basename ( path ) ;
var node = FS . lookupNode ( parent , name ) ;
var errCode = FS . mayDelete ( parent , name , true ) ;
if ( errCode ) {
throw new FS . ErrnoError ( errCode ) ;
}
if ( ! parent . node _ops . rmdir ) {
throw new FS . ErrnoError ( 63 ) ;
}
if ( FS . isMountpoint ( node ) ) {
throw new FS . ErrnoError ( 10 ) ;
}
try {
if ( FS . trackingDelegate [ "willDeletePath" ] ) {
FS . trackingDelegate [ "willDeletePath" ] ( path ) ;
}
} catch ( e ) {
err (
"FS.trackingDelegate['willDeletePath']('" +
path +
"') threw an exception: " +
e . message
) ;
}
parent . node _ops . rmdir ( parent , name ) ;
FS . destroyNode ( node ) ;
try {
if ( FS . trackingDelegate [ "onDeletePath" ] )
FS . trackingDelegate [ "onDeletePath" ] ( path ) ;
} catch ( e ) {
err (
"FS.trackingDelegate['onDeletePath']('" +
path +
"') threw an exception: " +
e . message
) ;
}
} ,
readdir : function ( path ) {
var lookup = FS . lookupPath ( path , { follow : true } ) ;
var node = lookup . node ;
if ( ! node . node _ops . readdir ) {
throw new FS . ErrnoError ( 54 ) ;
}
return node . node _ops . readdir ( node ) ;
} ,
unlink : function ( path ) {
var lookup = FS . lookupPath ( path , { parent : true } ) ;
var parent = lookup . node ;
var name = PATH . basename ( path ) ;
var node = FS . lookupNode ( parent , name ) ;
var errCode = FS . mayDelete ( parent , name , false ) ;
if ( errCode ) {
throw new FS . ErrnoError ( errCode ) ;
}
if ( ! parent . node _ops . unlink ) {
throw new FS . ErrnoError ( 63 ) ;
}
if ( FS . isMountpoint ( node ) ) {
throw new FS . ErrnoError ( 10 ) ;
}
try {
if ( FS . trackingDelegate [ "willDeletePath" ] ) {
FS . trackingDelegate [ "willDeletePath" ] ( path ) ;
}
} catch ( e ) {
err (
"FS.trackingDelegate['willDeletePath']('" +
path +
"') threw an exception: " +
e . message
) ;
}
parent . node _ops . unlink ( parent , name ) ;
FS . destroyNode ( node ) ;
try {
if ( FS . trackingDelegate [ "onDeletePath" ] )
FS . trackingDelegate [ "onDeletePath" ] ( path ) ;
} catch ( e ) {
err (
"FS.trackingDelegate['onDeletePath']('" +
path +
"') threw an exception: " +
e . message
) ;
}
} ,
readlink : function ( path ) {
var lookup = FS . lookupPath ( path ) ;
var link = lookup . node ;
if ( ! link ) {
throw new FS . ErrnoError ( 44 ) ;
}
if ( ! link . node _ops . readlink ) {
throw new FS . ErrnoError ( 28 ) ;
}
return PATH _FS . resolve (
FS . getPath ( link . parent ) ,
link . node _ops . readlink ( link )
) ;
} ,
stat : function ( path , dontFollow ) {
var lookup = FS . lookupPath ( path , { follow : ! dontFollow } ) ;
var node = lookup . node ;
if ( ! node ) {
throw new FS . ErrnoError ( 44 ) ;
}
if ( ! node . node _ops . getattr ) {
throw new FS . ErrnoError ( 63 ) ;
}
return node . node _ops . getattr ( node ) ;
} ,
lstat : function ( path ) {
return FS . stat ( path , true ) ;
} ,
chmod : function ( path , mode , dontFollow ) {
var node ;
if ( typeof path === "string" ) {
var lookup = FS . lookupPath ( path , { follow : ! dontFollow } ) ;
node = lookup . node ;
} else {
node = path ;
}
if ( ! node . node _ops . setattr ) {
throw new FS . ErrnoError ( 63 ) ;
}
node . node _ops . setattr ( node , {
mode : ( mode & 4095 ) | ( node . mode & ~ 4095 ) ,
timestamp : Date . now ( )
} ) ;
} ,
lchmod : function ( path , mode ) {
FS . chmod ( path , mode , true ) ;
} ,
fchmod : function ( fd , mode ) {
var stream = FS . getStream ( fd ) ;
if ( ! stream ) {
throw new FS . ErrnoError ( 8 ) ;
}
FS . chmod ( stream . node , mode ) ;
} ,
chown : function ( path , uid , gid , dontFollow ) {
var node ;
if ( typeof path === "string" ) {
var lookup = FS . lookupPath ( path , { follow : ! dontFollow } ) ;
node = lookup . node ;
} else {
node = path ;
}
if ( ! node . node _ops . setattr ) {
throw new FS . ErrnoError ( 63 ) ;
}
node . node _ops . setattr ( node , { timestamp : Date . now ( ) } ) ;
} ,
lchown : function ( path , uid , gid ) {
FS . chown ( path , uid , gid , true ) ;
} ,
fchown : function ( fd , uid , gid ) {
var stream = FS . getStream ( fd ) ;
if ( ! stream ) {
throw new FS . ErrnoError ( 8 ) ;
}
FS . chown ( stream . node , uid , gid ) ;
} ,
truncate : function ( path , len ) {
if ( len < 0 ) {
throw new FS . ErrnoError ( 28 ) ;
}
var node ;
if ( typeof path === "string" ) {
var lookup = FS . lookupPath ( path , { follow : true } ) ;
node = lookup . node ;
} else {
node = path ;
}
if ( ! node . node _ops . setattr ) {
throw new FS . ErrnoError ( 63 ) ;
}
if ( FS . isDir ( node . mode ) ) {
throw new FS . ErrnoError ( 31 ) ;
}
if ( ! FS . isFile ( node . mode ) ) {
throw new FS . ErrnoError ( 28 ) ;
}
var errCode = FS . nodePermissions ( node , "w" ) ;
if ( errCode ) {
throw new FS . ErrnoError ( errCode ) ;
}
node . node _ops . setattr ( node , { size : len , timestamp : Date . now ( ) } ) ;
} ,
ftruncate : function ( fd , len ) {
var stream = FS . getStream ( fd ) ;
if ( ! stream ) {
throw new FS . ErrnoError ( 8 ) ;
}
if ( ( stream . flags & 2097155 ) === 0 ) {
throw new FS . ErrnoError ( 28 ) ;
}
FS . truncate ( stream . node , len ) ;
} ,
utime : function ( path , atime , mtime ) {
var lookup = FS . lookupPath ( path , { follow : true } ) ;
var node = lookup . node ;
node . node _ops . setattr ( node , { timestamp : Math . max ( atime , mtime ) } ) ;
} ,
open : function ( path , flags , mode , fd _start , fd _end ) {
if ( path === "" ) {
throw new FS . ErrnoError ( 44 ) ;
}
flags = typeof flags === "string" ? FS . modeStringToFlags ( flags ) : flags ;
mode = typeof mode === "undefined" ? 438 : mode ;
if ( flags & 64 ) {
mode = ( mode & 4095 ) | 32768 ;
} else {
mode = 0 ;
}
var node ;
if ( typeof path === "object" ) {
node = path ;
} else {
path = PATH . normalize ( path ) ;
try {
var lookup = FS . lookupPath ( path , { follow : ! ( flags & 131072 ) } ) ;
node = lookup . node ;
} catch ( e ) { }
}
var created = false ;
if ( flags & 64 ) {
if ( node ) {
if ( flags & 128 ) {
throw new FS . ErrnoError ( 20 ) ;
}
} else {
node = FS . mknod ( path , mode , 0 ) ;
created = true ;
}
}
if ( ! node ) {
throw new FS . ErrnoError ( 44 ) ;
}
if ( FS . isChrdev ( node . mode ) ) {
flags &= ~ 512 ;
}
if ( flags & 65536 && ! FS . isDir ( node . mode ) ) {
throw new FS . ErrnoError ( 54 ) ;
}
if ( ! created ) {
var errCode = FS . mayOpen ( node , flags ) ;
if ( errCode ) {
throw new FS . ErrnoError ( errCode ) ;
}
}
if ( flags & 512 ) {
FS . truncate ( node , 0 ) ;
}
flags &= ~ ( 128 | 512 | 131072 ) ;
var stream = FS . createStream (
{
node : node ,
path : FS . getPath ( node ) ,
flags : flags ,
seekable : true ,
position : 0 ,
stream _ops : node . stream _ops ,
ungotten : [ ] ,
error : false
} ,
fd _start ,
fd _end
) ;
if ( stream . stream _ops . open ) {
stream . stream _ops . open ( stream ) ;
}
if ( Module [ "logReadFiles" ] && ! ( flags & 1 ) ) {
if ( ! FS . readFiles ) FS . readFiles = { } ;
if ( ! ( path in FS . readFiles ) ) {
FS . readFiles [ path ] = 1 ;
err ( "FS.trackingDelegate error on read file: " + path ) ;
}
}
try {
if ( FS . trackingDelegate [ "onOpenFile" ] ) {
var trackingFlags = 0 ;
if ( ( flags & 2097155 ) !== 1 ) {
trackingFlags |= FS . tracking . openFlags . READ ;
}
if ( ( flags & 2097155 ) !== 0 ) {
trackingFlags |= FS . tracking . openFlags . WRITE ;
}
FS . trackingDelegate [ "onOpenFile" ] ( path , trackingFlags ) ;
}
} catch ( e ) {
err (
"FS.trackingDelegate['onOpenFile']('" +
path +
"', flags) threw an exception: " +
e . message
) ;
}
return stream ;
} ,
close : function ( stream ) {
if ( FS . isClosed ( stream ) ) {
throw new FS . ErrnoError ( 8 ) ;
}
if ( stream . getdents ) stream . getdents = null ;
try {
if ( stream . stream _ops . close ) {
stream . stream _ops . close ( stream ) ;
}
} catch ( e ) {
throw e ;
} finally {
FS . closeStream ( stream . fd ) ;
}
stream . fd = null ;
} ,
isClosed : function ( stream ) {
return stream . fd === null ;
} ,
llseek : function ( stream , offset , whence ) {
if ( FS . isClosed ( stream ) ) {
throw new FS . ErrnoError ( 8 ) ;
}
if ( ! stream . seekable || ! stream . stream _ops . llseek ) {
throw new FS . ErrnoError ( 70 ) ;
}
if ( whence != 0 && whence != 1 && whence != 2 ) {
throw new FS . ErrnoError ( 28 ) ;
}
stream . position = stream . stream _ops . llseek ( stream , offset , whence ) ;
stream . ungotten = [ ] ;
return stream . position ;
} ,
read : function ( stream , buffer , offset , length , position ) {
if ( length < 0 || position < 0 ) {
throw new FS . ErrnoError ( 28 ) ;
}
if ( FS . isClosed ( stream ) ) {
throw new FS . ErrnoError ( 8 ) ;
}
if ( ( stream . flags & 2097155 ) === 1 ) {
throw new FS . ErrnoError ( 8 ) ;
}
if ( FS . isDir ( stream . node . mode ) ) {
throw new FS . ErrnoError ( 31 ) ;
}
if ( ! stream . stream _ops . read ) {
throw new FS . ErrnoError ( 28 ) ;
}
var seeking = typeof position !== "undefined" ;
if ( ! seeking ) {
position = stream . position ;
} else if ( ! stream . seekable ) {
throw new FS . ErrnoError ( 70 ) ;
}
var bytesRead = stream . stream _ops . read (
stream ,
buffer ,
offset ,
length ,
position
) ;
if ( ! seeking ) stream . position += bytesRead ;
return bytesRead ;
} ,
write : function ( stream , buffer , offset , length , position , canOwn ) {
if ( length < 0 || position < 0 ) {
throw new FS . ErrnoError ( 28 ) ;
}
if ( FS . isClosed ( stream ) ) {
throw new FS . ErrnoError ( 8 ) ;
}
if ( ( stream . flags & 2097155 ) === 0 ) {
throw new FS . ErrnoError ( 8 ) ;
}
if ( FS . isDir ( stream . node . mode ) ) {
throw new FS . ErrnoError ( 31 ) ;
}
if ( ! stream . stream _ops . write ) {
throw new FS . ErrnoError ( 28 ) ;
}
if ( stream . seekable && stream . flags & 1024 ) {
FS . llseek ( stream , 0 , 2 ) ;
}
var seeking = typeof position !== "undefined" ;
if ( ! seeking ) {
position = stream . position ;
} else if ( ! stream . seekable ) {
throw new FS . ErrnoError ( 70 ) ;
}
var bytesWritten = stream . stream _ops . write (
stream ,
buffer ,
offset ,
length ,
position ,
canOwn
) ;
if ( ! seeking ) stream . position += bytesWritten ;
try {
if ( stream . path && FS . trackingDelegate [ "onWriteToFile" ] )
FS . trackingDelegate [ "onWriteToFile" ] ( stream . path ) ;
} catch ( e ) {
err (
"FS.trackingDelegate['onWriteToFile']('" +
stream . path +
"') threw an exception: " +
e . message
) ;
}
return bytesWritten ;
} ,
allocate : function ( stream , offset , length ) {
if ( FS . isClosed ( stream ) ) {
throw new FS . ErrnoError ( 8 ) ;
}
if ( offset < 0 || length <= 0 ) {
throw new FS . ErrnoError ( 28 ) ;
}
if ( ( stream . flags & 2097155 ) === 0 ) {
throw new FS . ErrnoError ( 8 ) ;
}
if ( ! FS . isFile ( stream . node . mode ) && ! FS . isDir ( stream . node . mode ) ) {
throw new FS . ErrnoError ( 43 ) ;
}
if ( ! stream . stream _ops . allocate ) {
throw new FS . ErrnoError ( 138 ) ;
}
stream . stream _ops . allocate ( stream , offset , length ) ;
} ,
mmap : function ( stream , address , length , position , prot , flags ) {
if (
( prot & 2 ) !== 0 &&
( flags & 2 ) === 0 &&
( stream . flags & 2097155 ) !== 2
) {
throw new FS . ErrnoError ( 2 ) ;
}
if ( ( stream . flags & 2097155 ) === 1 ) {
throw new FS . ErrnoError ( 2 ) ;
}
if ( ! stream . stream _ops . mmap ) {
throw new FS . ErrnoError ( 43 ) ;
}
return stream . stream _ops . mmap (
stream ,
address ,
length ,
position ,
prot ,
flags
) ;
} ,
msync : function ( stream , buffer , offset , length , mmapFlags ) {
if ( ! stream || ! stream . stream _ops . msync ) {
return 0 ;
}
return stream . stream _ops . msync ( stream , buffer , offset , length , mmapFlags ) ;
} ,
munmap : function ( stream ) {
return 0 ;
} ,
ioctl : function ( stream , cmd , arg ) {
if ( ! stream . stream _ops . ioctl ) {
throw new FS . ErrnoError ( 59 ) ;
}
return stream . stream _ops . ioctl ( stream , cmd , arg ) ;
} ,
readFile : function ( path , opts ) {
opts = opts || { } ;
opts . flags = opts . flags || 0 ;
opts . encoding = opts . encoding || "binary" ;
if ( opts . encoding !== "utf8" && opts . encoding !== "binary" ) {
throw new Error ( 'Invalid encoding type "' + opts . encoding + '"' ) ;
}
var ret ;
var stream = FS . open ( path , opts . flags ) ;
var stat = FS . stat ( path ) ;
var length = stat . size ;
var buf = new Uint8Array ( length ) ;
FS . read ( stream , buf , 0 , length , 0 ) ;
if ( opts . encoding === "utf8" ) {
ret = UTF8ArrayToString ( buf , 0 ) ;
} else if ( opts . encoding === "binary" ) {
ret = buf ;
}
FS . close ( stream ) ;
return ret ;
} ,
writeFile : function ( path , data , opts ) {
opts = opts || { } ;
opts . flags = opts . flags || 577 ;
var stream = FS . open ( path , opts . flags , opts . mode ) ;
if ( typeof data === "string" ) {
var buf = new Uint8Array ( lengthBytesUTF8 ( data ) + 1 ) ;
var actualNumBytes = stringToUTF8Array ( data , buf , 0 , buf . length ) ;
FS . write ( stream , buf , 0 , actualNumBytes , undefined , opts . canOwn ) ;
} else if ( ArrayBuffer . isView ( data ) ) {
FS . write ( stream , data , 0 , data . byteLength , undefined , opts . canOwn ) ;
} else {
throw new Error ( "Unsupported data type" ) ;
}
FS . close ( stream ) ;
} ,
cwd : function ( ) {
return FS . currentPath ;
} ,
chdir : function ( path ) {
var lookup = FS . lookupPath ( path , { follow : true } ) ;
if ( lookup . node === null ) {
throw new FS . ErrnoError ( 44 ) ;
}
if ( ! FS . isDir ( lookup . node . mode ) ) {
throw new FS . ErrnoError ( 54 ) ;
}
var errCode = FS . nodePermissions ( lookup . node , "x" ) ;
if ( errCode ) {
throw new FS . ErrnoError ( errCode ) ;
}
FS . currentPath = lookup . path ;
} ,
createDefaultDirectories : function ( ) {
FS . mkdir ( "/tmp" ) ;
FS . mkdir ( "/home" ) ;
FS . mkdir ( "/home/web_user" ) ;
} ,
createDefaultDevices : function ( ) {
FS . mkdir ( "/dev" ) ;
FS . registerDevice ( FS . makedev ( 1 , 3 ) , {
read : function ( ) {
return 0 ;
} ,
write : function ( stream , buffer , offset , length , pos ) {
return length ;
}
} ) ;
FS . mkdev ( "/dev/null" , FS . makedev ( 1 , 3 ) ) ;
TTY . register ( FS . makedev ( 5 , 0 ) , TTY . default _tty _ops ) ;
TTY . register ( FS . makedev ( 6 , 0 ) , TTY . default _tty1 _ops ) ;
FS . mkdev ( "/dev/tty" , FS . makedev ( 5 , 0 ) ) ;
FS . mkdev ( "/dev/tty1" , FS . makedev ( 6 , 0 ) ) ;
var random _device = getRandomDevice ( ) ;
FS . createDevice ( "/dev" , "random" , random _device ) ;
FS . createDevice ( "/dev" , "urandom" , random _device ) ;
FS . mkdir ( "/dev/shm" ) ;
FS . mkdir ( "/dev/shm/tmp" ) ;
} ,
createSpecialDirectories : function ( ) {
FS . mkdir ( "/proc" ) ;
var proc _self = FS . mkdir ( "/proc/self" ) ;
FS . mkdir ( "/proc/self/fd" ) ;
FS . mount (
{
mount : function ( ) {
var node = FS . createNode ( proc _self , "fd" , 16384 | 511 , 73 ) ;
node . node _ops = {
lookup : function ( parent , name ) {
var fd = + name ;
var stream = FS . getStream ( fd ) ;
if ( ! stream ) throw new FS . ErrnoError ( 8 ) ;
var ret = {
parent : null ,
mount : { mountpoint : "fake" } ,
node _ops : {
readlink : function ( ) {
return stream . path ;
}
}
} ;
ret . parent = ret ;
return ret ;
}
} ;
return node ;
}
} ,
{ } ,
"/proc/self/fd"
) ;
} ,
createStandardStreams : function ( ) {
if ( Module [ "stdin" ] ) {
FS . createDevice ( "/dev" , "stdin" , Module [ "stdin" ] ) ;
} else {
FS . symlink ( "/dev/tty" , "/dev/stdin" ) ;
}
if ( Module [ "stdout" ] ) {
FS . createDevice ( "/dev" , "stdout" , null , Module [ "stdout" ] ) ;
} else {
FS . symlink ( "/dev/tty" , "/dev/stdout" ) ;
}
if ( Module [ "stderr" ] ) {
FS . createDevice ( "/dev" , "stderr" , null , Module [ "stderr" ] ) ;
} else {
FS . symlink ( "/dev/tty1" , "/dev/stderr" ) ;
}
var stdin = FS . open ( "/dev/stdin" , 0 ) ;
var stdout = FS . open ( "/dev/stdout" , 1 ) ;
var stderr = FS . open ( "/dev/stderr" , 1 ) ;
} ,
ensureErrnoError : function ( ) {
if ( FS . ErrnoError ) return ;
FS . ErrnoError = function ErrnoError ( errno , node ) {
this . node = node ;
this . setErrno = function ( errno ) {
this . errno = errno ;
} ;
this . setErrno ( errno ) ;
this . message = "FS error" ;
} ;
FS . ErrnoError . prototype = new Error ( ) ;
FS . ErrnoError . prototype . constructor = FS . ErrnoError ;
[ 44 ] . forEach ( function ( code ) {
FS . genericErrors [ code ] = new FS . ErrnoError ( code ) ;
FS . genericErrors [ code ] . stack = "<generic error, no stack>" ;
} ) ;
} ,
staticInit : function ( ) {
FS . ensureErrnoError ( ) ;
FS . nameTable = new Array ( 4096 ) ;
FS . mount ( MEMFS , { } , "/" ) ;
FS . createDefaultDirectories ( ) ;
FS . createDefaultDevices ( ) ;
FS . createSpecialDirectories ( ) ;
FS . filesystems = { MEMFS : MEMFS , NODEFS : NODEFS } ;
} ,
init : function ( input , output , error ) {
FS . init . initialized = true ;
FS . ensureErrnoError ( ) ;
Module [ "stdin" ] = input || Module [ "stdin" ] ;
Module [ "stdout" ] = output || Module [ "stdout" ] ;
Module [ "stderr" ] = error || Module [ "stderr" ] ;
FS . createStandardStreams ( ) ;
} ,
quit : function ( ) {
FS . init . initialized = false ;
var fflush = Module [ "_fflush" ] ;
if ( fflush ) fflush ( 0 ) ;
for ( var i = 0 ; i < FS . streams . length ; i ++ ) {
var stream = FS . streams [ i ] ;
if ( ! stream ) {
continue ;
}
FS . close ( stream ) ;
}
} ,
getMode : function ( canRead , canWrite ) {
var mode = 0 ;
if ( canRead ) mode |= 292 | 73 ;
if ( canWrite ) mode |= 146 ;
return mode ;
} ,
findObject : function ( path , dontResolveLastLink ) {
var ret = FS . analyzePath ( path , dontResolveLastLink ) ;
if ( ret . exists ) {
return ret . object ;
} else {
return null ;
}
} ,
analyzePath : function ( path , dontResolveLastLink ) {
try {
var lookup = FS . lookupPath ( path , { follow : ! dontResolveLastLink } ) ;
path = lookup . path ;
} catch ( e ) { }
var ret = {
isRoot : false ,
exists : false ,
error : 0 ,
name : null ,
path : null ,
object : null ,
parentExists : false ,
parentPath : null ,
parentObject : null
} ;
try {
var lookup = FS . lookupPath ( path , { parent : true } ) ;
ret . parentExists = true ;
ret . parentPath = lookup . path ;
ret . parentObject = lookup . node ;
ret . name = PATH . basename ( path ) ;
lookup = FS . lookupPath ( path , { follow : ! dontResolveLastLink } ) ;
ret . exists = true ;
ret . path = lookup . path ;
ret . object = lookup . node ;
ret . name = lookup . node . name ;
ret . isRoot = lookup . path === "/" ;
} catch ( e ) {
ret . error = e . errno ;
}
return ret ;
} ,
createPath : function ( parent , path , canRead , canWrite ) {
parent = typeof parent === "string" ? parent : FS . getPath ( parent ) ;
var parts = path . split ( "/" ) . reverse ( ) ;
while ( parts . length ) {
var part = parts . pop ( ) ;
if ( ! part ) continue ;
var current = PATH . join2 ( parent , part ) ;
try {
FS . mkdir ( current ) ;
} catch ( e ) { }
parent = current ;
}
return current ;
} ,
createFile : function ( parent , name , properties , canRead , canWrite ) {
var path = PATH . join2 (
typeof parent === "string" ? parent : FS . getPath ( parent ) ,
name
) ;
var mode = FS . getMode ( canRead , canWrite ) ;
return FS . create ( path , mode ) ;
} ,
createDataFile : function ( parent , name , data , canRead , canWrite , canOwn ) {
var path = name
? PATH . join2 (
typeof parent === "string" ? parent : FS . getPath ( parent ) ,
name
)
: parent ;
var mode = FS . getMode ( canRead , canWrite ) ;
var node = FS . create ( path , mode ) ;
if ( data ) {
if ( typeof data === "string" ) {
var arr = new Array ( data . length ) ;
for ( var i = 0 , len = data . length ; i < len ; ++ i )
arr [ i ] = data . charCodeAt ( i ) ;
data = arr ;
}
FS . chmod ( node , mode | 146 ) ;
var stream = FS . open ( node , 577 ) ;
FS . write ( stream , data , 0 , data . length , 0 , canOwn ) ;
FS . close ( stream ) ;
FS . chmod ( node , mode ) ;
}
return node ;
} ,
createDevice : function ( parent , name , input , output ) {
var path = PATH . join2 (
typeof parent === "string" ? parent : FS . getPath ( parent ) ,
name
) ;
var mode = FS . getMode ( ! ! input , ! ! output ) ;
if ( ! FS . createDevice . major ) FS . createDevice . major = 64 ;
var dev = FS . makedev ( FS . createDevice . major ++ , 0 ) ;
FS . registerDevice ( dev , {
open : function ( stream ) {
stream . seekable = false ;
} ,
close : function ( stream ) {
if ( output && output . buffer && output . buffer . length ) {
output ( 10 ) ;
}
} ,
read : function ( stream , buffer , offset , length , pos ) {
var bytesRead = 0 ;
for ( var i = 0 ; i < length ; i ++ ) {
var result ;
try {
result = input ( ) ;
} catch ( e ) {
throw new FS . ErrnoError ( 29 ) ;
}
if ( result === undefined && bytesRead === 0 ) {
throw new FS . ErrnoError ( 6 ) ;
}
if ( result === null || result === undefined ) break ;
bytesRead ++ ;
buffer [ offset + i ] = result ;
}
if ( bytesRead ) {
stream . node . timestamp = Date . now ( ) ;
}
return bytesRead ;
} ,
write : function ( stream , buffer , offset , length , pos ) {
for ( var i = 0 ; i < length ; i ++ ) {
try {
output ( buffer [ offset + i ] ) ;
} catch ( e ) {
throw new FS . ErrnoError ( 29 ) ;
}
}
if ( length ) {
stream . node . timestamp = Date . now ( ) ;
}
return i ;
}
} ) ;
return FS . mkdev ( path , mode , dev ) ;
} ,
forceLoadFile : function ( obj ) {
if ( obj . isDevice || obj . isFolder || obj . link || obj . contents ) return true ;
if ( typeof XMLHttpRequest !== "undefined" ) {
throw new Error (
"Lazy loading should have been performed (contents set) in createLazyFile, but it was not. Lazy loading only works in web workers. Use --embed-file or --preload-file in emcc on the main thread."
) ;
} else if ( read _ ) {
try {
obj . contents = intArrayFromString ( read _ ( obj . url ) , true ) ;
obj . usedBytes = obj . contents . length ;
} catch ( e ) {
throw new FS . ErrnoError ( 29 ) ;
}
} else {
throw new Error ( "Cannot load without read() or XMLHttpRequest." ) ;
}
} ,
createLazyFile : function ( parent , name , url , canRead , canWrite ) {
function LazyUint8Array ( ) {
this . lengthKnown = false ;
this . chunks = [ ] ;
}
LazyUint8Array . prototype . get = function LazyUint8Array _get ( idx ) {
if ( idx > this . length - 1 || idx < 0 ) {
return undefined ;
}
var chunkOffset = idx % this . chunkSize ;
var chunkNum = ( idx / this . chunkSize ) | 0 ;
return this . getter ( chunkNum ) [ chunkOffset ] ;
} ;
LazyUint8Array . prototype . setDataGetter = function LazyUint8Array _setDataGetter (
getter
) {
this . getter = getter ;
} ;
LazyUint8Array . prototype . cacheLength = function LazyUint8Array _cacheLength ( ) {
var xhr = new XMLHttpRequest ( ) ;
xhr . open ( "HEAD" , url , false ) ;
xhr . send ( null ) ;
if ( ! ( ( xhr . status >= 200 && xhr . status < 300 ) || xhr . status === 304 ) )
throw new Error ( "Couldn't load " + url + ". Status: " + xhr . status ) ;
var datalength = Number ( xhr . getResponseHeader ( "Content-length" ) ) ;
var header ;
var hasByteServing =
( header = xhr . getResponseHeader ( "Accept-Ranges" ) ) && header === "bytes" ;
var usesGzip =
( header = xhr . getResponseHeader ( "Content-Encoding" ) ) &&
header === "gzip" ;
var chunkSize = 1024 * 1024 ;
if ( ! hasByteServing ) chunkSize = datalength ;
var doXHR = function ( from , to ) {
if ( from > to )
throw new Error (
"invalid range (" + from + ", " + to + ") or no bytes requested!"
) ;
if ( to > datalength - 1 )
throw new Error (
"only " + datalength + " bytes available! programmer error!"
) ;
var xhr = new XMLHttpRequest ( ) ;
xhr . open ( "GET" , url , false ) ;
if ( datalength !== chunkSize )
xhr . setRequestHeader ( "Range" , "bytes=" + from + "-" + to ) ;
if ( typeof Uint8Array != "undefined" ) xhr . responseType = "arraybuffer" ;
if ( xhr . overrideMimeType ) {
xhr . overrideMimeType ( "text/plain; charset=x-user-defined" ) ;
}
xhr . send ( null ) ;
if ( ! ( ( xhr . status >= 200 && xhr . status < 300 ) || xhr . status === 304 ) )
throw new Error ( "Couldn't load " + url + ". Status: " + xhr . status ) ;
if ( xhr . response !== undefined ) {
return new Uint8Array ( xhr . response || [ ] ) ;
} else {
return intArrayFromString ( xhr . responseText || "" , true ) ;
}
} ;
var lazyArray = this ;
lazyArray . setDataGetter ( function ( chunkNum ) {
var start = chunkNum * chunkSize ;
var end = ( chunkNum + 1 ) * chunkSize - 1 ;
end = Math . min ( end , datalength - 1 ) ;
if ( typeof lazyArray . chunks [ chunkNum ] === "undefined" ) {
lazyArray . chunks [ chunkNum ] = doXHR ( start , end ) ;
}
if ( typeof lazyArray . chunks [ chunkNum ] === "undefined" )
throw new Error ( "doXHR failed!" ) ;
return lazyArray . chunks [ chunkNum ] ;
} ) ;
if ( usesGzip || ! datalength ) {
chunkSize = datalength = 1 ;
datalength = this . getter ( 0 ) . length ;
chunkSize = datalength ;
out (
"LazyFiles on gzip forces download of the whole file when length is accessed"
) ;
}
this . _length = datalength ;
this . _chunkSize = chunkSize ;
this . lengthKnown = true ;
} ;
if ( typeof XMLHttpRequest !== "undefined" ) {
if ( ! ENVIRONMENT _IS _WORKER )
throw "Cannot do synchronous binary XHRs outside webworkers in modern browsers. Use --embed-file or --preload-file in emcc" ;
var lazyArray = new LazyUint8Array ( ) ;
Object . defineProperties ( lazyArray , {
length : {
get : function ( ) {
if ( ! this . lengthKnown ) {
this . cacheLength ( ) ;
}
return this . _length ;
}
} ,
chunkSize : {
get : function ( ) {
if ( ! this . lengthKnown ) {
this . cacheLength ( ) ;
}
return this . _chunkSize ;
}
}
} ) ;
var properties = { isDevice : false , contents : lazyArray } ;
} else {
var properties = { isDevice : false , url : url } ;
}
var node = FS . createFile ( parent , name , properties , canRead , canWrite ) ;
if ( properties . contents ) {
node . contents = properties . contents ;
} else if ( properties . url ) {
node . contents = null ;
node . url = properties . url ;
}
Object . defineProperties ( node , {
usedBytes : {
get : function ( ) {
return this . contents . length ;
}
}
} ) ;
var stream _ops = { } ;
var keys = Object . keys ( node . stream _ops ) ;
keys . forEach ( function ( key ) {
var fn = node . stream _ops [ key ] ;
stream _ops [ key ] = function forceLoadLazyFile ( ) {
FS . forceLoadFile ( node ) ;
return fn . apply ( null , arguments ) ;
} ;
} ) ;
stream _ops . read = function stream _ops _read (
stream ,
buffer ,
offset ,
length ,
position
) {
FS . forceLoadFile ( node ) ;
var contents = stream . node . contents ;
if ( position >= contents . length ) return 0 ;
var size = Math . min ( contents . length - position , length ) ;
if ( contents . slice ) {
for ( var i = 0 ; i < size ; i ++ ) {
buffer [ offset + i ] = contents [ position + i ] ;
}
} else {
for ( var i = 0 ; i < size ; i ++ ) {
buffer [ offset + i ] = contents . get ( position + i ) ;
}
}
return size ;
} ;
node . stream _ops = stream _ops ;
return node ;
} ,
createPreloadedFile : function (
parent ,
name ,
url ,
canRead ,
canWrite ,
onload ,
onerror ,
dontCreateFile ,
canOwn ,
preFinish
) {
Browser . init ( ) ;
var fullname = name ? PATH _FS . resolve ( PATH . join2 ( parent , name ) ) : parent ;
var dep = getUniqueRunDependency ( "cp " + fullname ) ;
function processData ( byteArray ) {
function finish ( byteArray ) {
if ( preFinish ) preFinish ( ) ;
if ( ! dontCreateFile ) {
FS . createDataFile ( parent , name , byteArray , canRead , canWrite , canOwn ) ;
}
if ( onload ) onload ( ) ;
removeRunDependency ( dep ) ;
}
var handled = false ;
Module [ "preloadPlugins" ] . forEach ( function ( plugin ) {
if ( handled ) return ;
if ( plugin [ "canHandle" ] ( fullname ) ) {
plugin [ "handle" ] ( byteArray , fullname , finish , function ( ) {
if ( onerror ) onerror ( ) ;
removeRunDependency ( dep ) ;
} ) ;
handled = true ;
}
} ) ;
if ( ! handled ) finish ( byteArray ) ;
}
addRunDependency ( dep ) ;
if ( typeof url == "string" ) {
Browser . asyncLoad (
url ,
function ( byteArray ) {
processData ( byteArray ) ;
} ,
onerror
) ;
} else {
processData ( url ) ;
}
} ,
indexedDB : function ( ) {
return (
window . indexedDB ||
window . mozIndexedDB ||
window . webkitIndexedDB ||
window . msIndexedDB
) ;
} ,
DB _NAME : function ( ) {
return "EM_FS_" + window . location . pathname ;
} ,
DB _VERSION : 20 ,
DB _STORE _NAME : "FILE_DATA" ,
saveFilesToDB : function ( paths , onload , onerror ) {
onload = onload || function ( ) { } ;
onerror = onerror || function ( ) { } ;
var indexedDB = FS . indexedDB ( ) ;
try {
var openRequest = indexedDB . open ( FS . DB _NAME ( ) , FS . DB _VERSION ) ;
} catch ( e ) {
return onerror ( e ) ;
}
openRequest . onupgradeneeded = function openRequest _onupgradeneeded ( ) {
out ( "creating db" ) ;
var db = openRequest . result ;
db . createObjectStore ( FS . DB _STORE _NAME ) ;
} ;
openRequest . onsuccess = function openRequest _onsuccess ( ) {
var db = openRequest . result ;
var transaction = db . transaction ( [ FS . DB _STORE _NAME ] , "readwrite" ) ;
var files = transaction . objectStore ( FS . DB _STORE _NAME ) ;
var ok = 0 ,
fail = 0 ,
total = paths . length ;
function finish ( ) {
if ( fail == 0 ) onload ( ) ;
else onerror ( ) ;
}
paths . forEach ( function ( path ) {
var putRequest = files . put ( FS . analyzePath ( path ) . object . contents , path ) ;
putRequest . onsuccess = function putRequest _onsuccess ( ) {
ok ++ ;
if ( ok + fail == total ) finish ( ) ;
} ;
putRequest . onerror = function putRequest _onerror ( ) {
fail ++ ;
if ( ok + fail == total ) finish ( ) ;
} ;
} ) ;
transaction . onerror = onerror ;
} ;
openRequest . onerror = onerror ;
} ,
loadFilesFromDB : function ( paths , onload , onerror ) {
onload = onload || function ( ) { } ;
onerror = onerror || function ( ) { } ;
var indexedDB = FS . indexedDB ( ) ;
try {
var openRequest = indexedDB . open ( FS . DB _NAME ( ) , FS . DB _VERSION ) ;
} catch ( e ) {
return onerror ( e ) ;
}
openRequest . onupgradeneeded = onerror ;
openRequest . onsuccess = function openRequest _onsuccess ( ) {
var db = openRequest . result ;
try {
var transaction = db . transaction ( [ FS . DB _STORE _NAME ] , "readonly" ) ;
} catch ( e ) {
onerror ( e ) ;
return ;
}
var files = transaction . objectStore ( FS . DB _STORE _NAME ) ;
var ok = 0 ,
fail = 0 ,
total = paths . length ;
function finish ( ) {
if ( fail == 0 ) onload ( ) ;
else onerror ( ) ;
}
paths . forEach ( function ( path ) {
var getRequest = files . get ( path ) ;
getRequest . onsuccess = function getRequest _onsuccess ( ) {
if ( FS . analyzePath ( path ) . exists ) {
FS . unlink ( path ) ;
}
FS . createDataFile (
PATH . dirname ( path ) ,
PATH . basename ( path ) ,
getRequest . result ,
true ,
true ,
true
) ;
ok ++ ;
if ( ok + fail == total ) finish ( ) ;
} ;
getRequest . onerror = function getRequest _onerror ( ) {
fail ++ ;
if ( ok + fail == total ) finish ( ) ;
} ;
} ) ;
transaction . onerror = onerror ;
} ;
openRequest . onerror = onerror ;
}
} ;
var SYSCALLS = {
mappings : { } ,
DEFAULT _POLLMASK : 5 ,
umask : 511 ,
calculateAt : function ( dirfd , path , allowEmpty ) {
if ( path [ 0 ] === "/" ) {
return path ;
}
var dir ;
if ( dirfd === - 100 ) {
dir = FS . cwd ( ) ;
} else {
var dirstream = FS . getStream ( dirfd ) ;
if ( ! dirstream ) throw new FS . ErrnoError ( 8 ) ;
dir = dirstream . path ;
}
if ( path . length == 0 ) {
if ( ! allowEmpty ) {
throw new FS . ErrnoError ( 44 ) ;
}
return dir ;
}
return PATH . join2 ( dir , path ) ;
} ,
doStat : function ( func , path , buf ) {
try {
var stat = func ( path ) ;
} catch ( e ) {
if (
e &&
e . node &&
PATH . normalize ( path ) !== PATH . normalize ( FS . getPath ( e . node ) )
) {
return - 54 ;
}
throw e ;
}
HEAP32 [ buf >> 2 ] = stat . dev ;
HEAP32 [ ( buf + 4 ) >> 2 ] = 0 ;
HEAP32 [ ( buf + 8 ) >> 2 ] = stat . ino ;
HEAP32 [ ( buf + 12 ) >> 2 ] = stat . mode ;
HEAP32 [ ( buf + 16 ) >> 2 ] = stat . nlink ;
HEAP32 [ ( buf + 20 ) >> 2 ] = stat . uid ;
HEAP32 [ ( buf + 24 ) >> 2 ] = stat . gid ;
HEAP32 [ ( buf + 28 ) >> 2 ] = stat . rdev ;
HEAP32 [ ( buf + 32 ) >> 2 ] = 0 ;
( tempI64 = [
stat . size >>> 0 ,
( ( tempDouble = stat . size ) ,
+ Math . abs ( tempDouble ) >= 1
? tempDouble > 0
? ( Math . min ( + Math . floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) >>>
0
: ~ ~ + Math . ceil ( ( tempDouble - + ( ~ ~ tempDouble >>> 0 ) ) / 4294967296 ) >>>
0
: 0 )
] ) ,
( HEAP32 [ ( buf + 40 ) >> 2 ] = tempI64 [ 0 ] ) ,
( HEAP32 [ ( buf + 44 ) >> 2 ] = tempI64 [ 1 ] ) ;
HEAP32 [ ( buf + 48 ) >> 2 ] = 4096 ;
HEAP32 [ ( buf + 52 ) >> 2 ] = stat . blocks ;
HEAP32 [ ( buf + 56 ) >> 2 ] = ( stat . atime . getTime ( ) / 1e3 ) | 0 ;
HEAP32 [ ( buf + 60 ) >> 2 ] = 0 ;
HEAP32 [ ( buf + 64 ) >> 2 ] = ( stat . mtime . getTime ( ) / 1e3 ) | 0 ;
HEAP32 [ ( buf + 68 ) >> 2 ] = 0 ;
HEAP32 [ ( buf + 72 ) >> 2 ] = ( stat . ctime . getTime ( ) / 1e3 ) | 0 ;
HEAP32 [ ( buf + 76 ) >> 2 ] = 0 ;
( tempI64 = [
stat . ino >>> 0 ,
( ( tempDouble = stat . ino ) ,
+ Math . abs ( tempDouble ) >= 1
? tempDouble > 0
? ( Math . min ( + Math . floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) >>>
0
: ~ ~ + Math . ceil ( ( tempDouble - + ( ~ ~ tempDouble >>> 0 ) ) / 4294967296 ) >>>
0
: 0 )
] ) ,
( HEAP32 [ ( buf + 80 ) >> 2 ] = tempI64 [ 0 ] ) ,
( HEAP32 [ ( buf + 84 ) >> 2 ] = tempI64 [ 1 ] ) ;
return 0 ;
} ,
doMsync : function ( addr , stream , len , flags , offset ) {
var buffer = HEAPU8 . slice ( addr , addr + len ) ;
FS . msync ( stream , buffer , offset , len , flags ) ;
} ,
doMkdir : function ( path , mode ) {
path = PATH . normalize ( path ) ;
if ( path [ path . length - 1 ] === "/" ) path = path . substr ( 0 , path . length - 1 ) ;
FS . mkdir ( path , mode , 0 ) ;
return 0 ;
} ,
doMknod : function ( path , mode , dev ) {
switch ( mode & 61440 ) {
case 32768 :
case 8192 :
case 24576 :
case 4096 :
case 49152 :
break ;
default :
return - 28 ;
}
FS . mknod ( path , mode , dev ) ;
return 0 ;
} ,
doReadlink : function ( path , buf , bufsize ) {
if ( bufsize <= 0 ) return - 28 ;
var ret = FS . readlink ( path ) ;
var len = Math . min ( bufsize , lengthBytesUTF8 ( ret ) ) ;
var endChar = HEAP8 [ buf + len ] ;
stringToUTF8 ( ret , buf , bufsize + 1 ) ;
HEAP8 [ buf + len ] = endChar ;
return len ;
} ,
doAccess : function ( path , amode ) {
if ( amode & ~ 7 ) {
return - 28 ;
}
var node ;
var lookup = FS . lookupPath ( path , { follow : true } ) ;
node = lookup . node ;
if ( ! node ) {
return - 44 ;
}
var perms = "" ;
if ( amode & 4 ) perms += "r" ;
if ( amode & 2 ) perms += "w" ;
if ( amode & 1 ) perms += "x" ;
if ( perms && FS . nodePermissions ( node , perms ) ) {
return - 2 ;
}
return 0 ;
} ,
doDup : function ( path , flags , suggestFD ) {
var suggest = FS . getStream ( suggestFD ) ;
if ( suggest ) FS . close ( suggest ) ;
return FS . open ( path , flags , 0 , suggestFD , suggestFD ) . fd ;
} ,
doReadv : function ( stream , iov , iovcnt , offset ) {
var ret = 0 ;
for ( var i = 0 ; i < iovcnt ; i ++ ) {
var ptr = HEAP32 [ ( iov + i * 8 ) >> 2 ] ;
var len = HEAP32 [ ( iov + ( i * 8 + 4 ) ) >> 2 ] ;
var curr = FS . read ( stream , HEAP8 , ptr , len , offset ) ;
if ( curr < 0 ) return - 1 ;
ret += curr ;
if ( curr < len ) break ;
}
return ret ;
} ,
doWritev : function ( stream , iov , iovcnt , offset ) {
var ret = 0 ;
for ( var i = 0 ; i < iovcnt ; i ++ ) {
var ptr = HEAP32 [ ( iov + i * 8 ) >> 2 ] ;
var len = HEAP32 [ ( iov + ( i * 8 + 4 ) ) >> 2 ] ;
var curr = FS . write ( stream , HEAP8 , ptr , len , offset ) ;
if ( curr < 0 ) return - 1 ;
ret += curr ;
}
return ret ;
} ,
varargs : undefined ,
get : function ( ) {
SYSCALLS . varargs += 4 ;
var ret = HEAP32 [ ( SYSCALLS . varargs - 4 ) >> 2 ] ;
return ret ;
} ,
getStr : function ( ptr ) {
var ret = UTF8ToString ( ptr ) ;
return ret ;
} ,
getStreamFromFD : function ( fd ) {
var stream = FS . getStream ( fd ) ;
if ( ! stream ) throw new FS . ErrnoError ( 8 ) ;
return stream ;
} ,
get64 : function ( low , high ) {
return low ;
}
} ;
function _ _ _sys _chmod ( path , mode ) {
try {
path = SYSCALLS . getStr ( path ) ;
FS . chmod ( path , mode ) ;
return 0 ;
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return - e . errno ;
}
}
function setErrNo ( value ) {
HEAP32 [ _ _ _errno _location ( ) >> 2 ] = value ;
return value ;
}
function _ _ _sys _fcntl64 ( fd , cmd , varargs ) {
SYSCALLS . varargs = varargs ;
try {
var stream = SYSCALLS . getStreamFromFD ( fd ) ;
switch ( cmd ) {
case 0 : {
var arg = SYSCALLS . get ( ) ;
if ( arg < 0 ) {
return - 28 ;
}
var newStream ;
newStream = FS . open ( stream . path , stream . flags , 0 , arg ) ;
return newStream . fd ;
}
case 1 :
case 2 :
return 0 ;
case 3 :
return stream . flags ;
case 4 : {
var arg = SYSCALLS . get ( ) ;
stream . flags |= arg ;
return 0 ;
}
case 12 : {
var arg = SYSCALLS . get ( ) ;
var offset = 0 ;
HEAP16 [ ( arg + offset ) >> 1 ] = 2 ;
return 0 ;
}
case 13 :
case 14 :
return 0 ;
case 16 :
case 8 :
return - 28 ;
case 9 :
setErrNo ( 28 ) ;
return - 1 ;
default : {
return - 28 ;
}
}
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return - e . errno ;
}
}
function _ _ _sys _fstat64 ( fd , buf ) {
try {
var stream = SYSCALLS . getStreamFromFD ( fd ) ;
return SYSCALLS . doStat ( FS . stat , stream . path , buf ) ;
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return - e . errno ;
}
}
function _ _ _sys _ioctl ( fd , op , varargs ) {
SYSCALLS . varargs = varargs ;
try {
var stream = SYSCALLS . getStreamFromFD ( fd ) ;
switch ( op ) {
case 21509 :
case 21505 : {
if ( ! stream . tty ) return - 59 ;
return 0 ;
}
case 21510 :
case 21511 :
case 21512 :
case 21506 :
case 21507 :
case 21508 : {
if ( ! stream . tty ) return - 59 ;
return 0 ;
}
case 21519 : {
if ( ! stream . tty ) return - 59 ;
var argp = SYSCALLS . get ( ) ;
HEAP32 [ argp >> 2 ] = 0 ;
return 0 ;
}
case 21520 : {
if ( ! stream . tty ) return - 59 ;
return - 28 ;
}
case 21531 : {
var argp = SYSCALLS . get ( ) ;
return FS . ioctl ( stream , op , argp ) ;
}
case 21523 : {
if ( ! stream . tty ) return - 59 ;
return 0 ;
}
case 21524 : {
if ( ! stream . tty ) return - 59 ;
return 0 ;
}
default :
abort ( "bad ioctl syscall " + op ) ;
}
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return - e . errno ;
}
}
function _ _ _sys _open ( path , flags , varargs ) {
SYSCALLS . varargs = varargs ;
try {
var pathname = SYSCALLS . getStr ( path ) ;
var mode = varargs ? SYSCALLS . get ( ) : 0 ;
var stream = FS . open ( pathname , flags , mode ) ;
return stream . fd ;
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return - e . errno ;
}
}
function _ _ _sys _rename ( old _path , new _path ) {
try {
old _path = SYSCALLS . getStr ( old _path ) ;
new _path = SYSCALLS . getStr ( new _path ) ;
FS . rename ( old _path , new _path ) ;
return 0 ;
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return - e . errno ;
}
}
function _ _ _sys _rmdir ( path ) {
try {
path = SYSCALLS . getStr ( path ) ;
FS . rmdir ( path ) ;
return 0 ;
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return - e . errno ;
}
}
function _ _ _sys _stat64 ( path , buf ) {
try {
path = SYSCALLS . getStr ( path ) ;
return SYSCALLS . doStat ( FS . stat , path , buf ) ;
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return - e . errno ;
}
}
function _ _ _sys _unlink ( path ) {
try {
path = SYSCALLS . getStr ( path ) ;
FS . unlink ( path ) ;
return 0 ;
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return - e . errno ;
}
}
function _emscripten _memcpy _big ( dest , src , num ) {
HEAPU8 . copyWithin ( dest , src , src + num ) ;
}
function emscripten _realloc _buffer ( size ) {
try {
wasmMemory . grow ( ( size - buffer . byteLength + 65535 ) >>> 16 ) ;
updateGlobalBufferAndViews ( wasmMemory . buffer ) ;
return 1 ;
} catch ( e ) { }
}
function _emscripten _resize _heap ( requestedSize ) {
var oldSize = HEAPU8 . length ;
requestedSize = requestedSize >>> 0 ;
var maxHeapSize = 2147483648 ;
if ( requestedSize > maxHeapSize ) {
return false ;
}
for ( var cutDown = 1 ; cutDown <= 4 ; cutDown *= 2 ) {
var overGrownHeapSize = oldSize * ( 1 + 0.2 / cutDown ) ;
overGrownHeapSize = Math . min ( overGrownHeapSize , requestedSize + 100663296 ) ;
var newSize = Math . min (
maxHeapSize ,
alignUp ( Math . max ( requestedSize , overGrownHeapSize ) , 65536 )
) ;
var replacement = emscripten _realloc _buffer ( newSize ) ;
if ( replacement ) {
return true ;
}
}
return false ;
}
function _fd _close ( fd ) {
try {
var stream = SYSCALLS . getStreamFromFD ( fd ) ;
FS . close ( stream ) ;
return 0 ;
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return e . errno ;
}
}
function _fd _fdstat _get ( fd , pbuf ) {
try {
var stream = SYSCALLS . getStreamFromFD ( fd ) ;
var type = stream . tty
? 2
: FS . isDir ( stream . mode )
? 3
: FS . isLink ( stream . mode )
? 7
: 4 ;
HEAP8 [ pbuf >> 0 ] = type ;
return 0 ;
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return e . errno ;
}
}
function _fd _read ( fd , iov , iovcnt , pnum ) {
try {
var stream = SYSCALLS . getStreamFromFD ( fd ) ;
var num = SYSCALLS . doReadv ( stream , iov , iovcnt ) ;
HEAP32 [ pnum >> 2 ] = num ;
return 0 ;
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return e . errno ;
}
}
function _fd _seek ( fd , offset _low , offset _high , whence , newOffset ) {
try {
var stream = SYSCALLS . getStreamFromFD ( fd ) ;
var HIGH _OFFSET = 4294967296 ;
var offset = offset _high * HIGH _OFFSET + ( offset _low >>> 0 ) ;
var DOUBLE _LIMIT = 9007199254740992 ;
if ( offset <= - DOUBLE _LIMIT || offset >= DOUBLE _LIMIT ) {
return - 61 ;
}
FS . llseek ( stream , offset , whence ) ;
( tempI64 = [
stream . position >>> 0 ,
( ( tempDouble = stream . position ) ,
+ Math . abs ( tempDouble ) >= 1
? tempDouble > 0
? ( Math . min ( + Math . floor ( tempDouble / 4294967296 ) , 4294967295 ) | 0 ) >>>
0
: ~ ~ + Math . ceil ( ( tempDouble - + ( ~ ~ tempDouble >>> 0 ) ) / 4294967296 ) >>>
0
: 0 )
] ) ,
( HEAP32 [ newOffset >> 2 ] = tempI64 [ 0 ] ) ,
( HEAP32 [ ( newOffset + 4 ) >> 2 ] = tempI64 [ 1 ] ) ;
if ( stream . getdents && offset === 0 && whence === 0 ) stream . getdents = null ;
return 0 ;
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return e . errno ;
}
}
function _fd _write ( fd , iov , iovcnt , pnum ) {
try {
var stream = SYSCALLS . getStreamFromFD ( fd ) ;
var num = SYSCALLS . doWritev ( stream , iov , iovcnt ) ;
HEAP32 [ pnum >> 2 ] = num ;
return 0 ;
} catch ( e ) {
if ( typeof FS === "undefined" || ! ( e instanceof FS . ErrnoError ) ) abort ( e ) ;
return e . errno ;
}
}
function _setTempRet0 ( val ) {
setTempRet0 ( val ) ;
}
function _time ( ptr ) {
var ret = ( Date . now ( ) / 1e3 ) | 0 ;
if ( ptr ) {
HEAP32 [ ptr >> 2 ] = ret ;
}
return ret ;
}
function _tzset ( ) {
if ( _tzset . called ) return ;
_tzset . called = true ;
var currentYear = new Date ( ) . getFullYear ( ) ;
var winter = new Date ( currentYear , 0 , 1 ) ;
var summer = new Date ( currentYear , 6 , 1 ) ;
var winterOffset = winter . getTimezoneOffset ( ) ;
var summerOffset = summer . getTimezoneOffset ( ) ;
var stdTimezoneOffset = Math . max ( winterOffset , summerOffset ) ;
HEAP32 [ _ _get _timezone ( ) >> 2 ] = stdTimezoneOffset * 60 ;
HEAP32 [ _ _get _daylight ( ) >> 2 ] = Number ( winterOffset != summerOffset ) ;
function extractZone ( date ) {
var match = date . toTimeString ( ) . match ( /\(([A-Za-z ]+)\)$/ ) ;
return match ? match [ 1 ] : "GMT" ;
}
var winterName = extractZone ( winter ) ;
var summerName = extractZone ( summer ) ;
var winterNamePtr = allocateUTF8 ( winterName ) ;
var summerNamePtr = allocateUTF8 ( summerName ) ;
if ( summerOffset < winterOffset ) {
HEAP32 [ _ _get _tzname ( ) >> 2 ] = winterNamePtr ;
HEAP32 [ ( _ _get _tzname ( ) + 4 ) >> 2 ] = summerNamePtr ;
} else {
HEAP32 [ _ _get _tzname ( ) >> 2 ] = summerNamePtr ;
HEAP32 [ ( _ _get _tzname ( ) + 4 ) >> 2 ] = winterNamePtr ;
}
}
function _timegm ( tmPtr ) {
_tzset ( ) ;
var time = Date . UTC (
HEAP32 [ ( tmPtr + 20 ) >> 2 ] + 1900 ,
HEAP32 [ ( tmPtr + 16 ) >> 2 ] ,
HEAP32 [ ( tmPtr + 12 ) >> 2 ] ,
HEAP32 [ ( tmPtr + 8 ) >> 2 ] ,
HEAP32 [ ( tmPtr + 4 ) >> 2 ] ,
HEAP32 [ tmPtr >> 2 ] ,
0
) ;
var date = new Date ( time ) ;
HEAP32 [ ( tmPtr + 24 ) >> 2 ] = date . getUTCDay ( ) ;
var start = Date . UTC ( date . getUTCFullYear ( ) , 0 , 1 , 0 , 0 , 0 , 0 ) ;
var yday = ( ( date . getTime ( ) - start ) / ( 1e3 * 60 * 60 * 24 ) ) | 0 ;
HEAP32 [ ( tmPtr + 28 ) >> 2 ] = yday ;
return ( date . getTime ( ) / 1e3 ) | 0 ;
}
var FSNode = function ( parent , name , mode , rdev ) {
if ( ! parent ) {
parent = this ;
}
this . parent = parent ;
this . mount = parent . mount ;
this . mounted = null ;
this . id = FS . nextInode ++ ;
this . name = name ;
this . mode = mode ;
this . node _ops = { } ;
this . stream _ops = { } ;
this . rdev = rdev ;
} ;
var readMode = 292 | 73 ;
var writeMode = 146 ;
Object . defineProperties ( FSNode . prototype , {
read : {
get : function ( ) {
return ( this . mode & readMode ) === readMode ;
} ,
set : function ( val ) {
val ? ( this . mode |= readMode ) : ( this . mode &= ~ readMode ) ;
}
} ,
write : {
get : function ( ) {
return ( this . mode & writeMode ) === writeMode ;
} ,
set : function ( val ) {
val ? ( this . mode |= writeMode ) : ( this . mode &= ~ writeMode ) ;
}
} ,
isFolder : {
get : function ( ) {
return FS . isDir ( this . mode ) ;
}
} ,
isDevice : {
get : function ( ) {
return FS . isChrdev ( this . mode ) ;
}
}
} ) ;
FS . FSNode = FSNode ;
FS . staticInit ( ) ;
if ( ENVIRONMENT _IS _NODE ) {
var fs = frozenFs ;
var NODEJS _PATH = _ _webpack _require _ _ ( 622 ) ;
NODEFS . staticInit ( ) ;
}
if ( ENVIRONMENT _IS _NODE ) {
var _wrapNodeError = function ( func ) {
return function ( ) {
try {
return func . apply ( this , arguments ) ;
} catch ( e ) {
if ( ! e . code ) throw e ;
throw new FS . ErrnoError ( ERRNO _CODES [ e . code ] ) ;
}
} ;
} ;
var VFS = Object . assign ( { } , FS ) ;
for ( var _key in NODERAWFS ) FS [ _key ] = _wrapNodeError ( NODERAWFS [ _key ] ) ;
} else {
throw new Error (
"NODERAWFS is currently only supported on Node.js environment."
) ;
}
function intArrayFromString ( stringy , dontAddNull , length ) {
var len = length > 0 ? length : lengthBytesUTF8 ( stringy ) + 1 ;
var u8array = new Array ( len ) ;
var numBytesWritten = stringToUTF8Array ( stringy , u8array , 0 , u8array . length ) ;
if ( dontAddNull ) u8array . length = numBytesWritten ;
return u8array ;
}
var decodeBase64 =
typeof atob === "function"
? atob
: function ( input ) {
var keyStr =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=" ;
var output = "" ;
var chr1 , chr2 , chr3 ;
var enc1 , enc2 , enc3 , enc4 ;
var i = 0 ;
input = input . replace ( /[^A-Za-z0-9\+\/\=]/g , "" ) ;
do {
enc1 = keyStr . indexOf ( input . charAt ( i ++ ) ) ;
enc2 = keyStr . indexOf ( input . charAt ( i ++ ) ) ;
enc3 = keyStr . indexOf ( input . charAt ( i ++ ) ) ;
enc4 = keyStr . indexOf ( input . charAt ( i ++ ) ) ;
chr1 = ( enc1 << 2 ) | ( enc2 >> 4 ) ;
chr2 = ( ( enc2 & 15 ) << 4 ) | ( enc3 >> 2 ) ;
chr3 = ( ( enc3 & 3 ) << 6 ) | enc4 ;
output = output + String . fromCharCode ( chr1 ) ;
if ( enc3 !== 64 ) {
output = output + String . fromCharCode ( chr2 ) ;
}
if ( enc4 !== 64 ) {
output = output + String . fromCharCode ( chr3 ) ;
}
} while ( i < input . length ) ;
return output ;
} ;
function intArrayFromBase64 ( s ) {
if ( typeof ENVIRONMENT _IS _NODE === "boolean" && ENVIRONMENT _IS _NODE ) {
var buf ;
try {
buf = Buffer . from ( s , "base64" ) ;
} catch ( _ ) {
buf = new Buffer ( s , "base64" ) ;
}
return new Uint8Array ( buf [ "buffer" ] , buf [ "byteOffset" ] , buf [ "byteLength" ] ) ;
}
try {
var decoded = decodeBase64 ( s ) ;
var bytes = new Uint8Array ( decoded . length ) ;
for ( var i = 0 ; i < decoded . length ; ++ i ) {
bytes [ i ] = decoded . charCodeAt ( i ) ;
}
return bytes ;
} catch ( _ ) {
throw new Error ( "Converting base64 string to bytes failed." ) ;
}
}
function tryParseAsDataURI ( filename ) {
if ( ! isDataURI ( filename ) ) {
return ;
}
return intArrayFromBase64 ( filename . slice ( dataURIPrefix . length ) ) ;
}
var asmLibraryArg = {
l : _ _ _gmtime _r ,
p : _ _ _sys _chmod ,
e : _ _ _sys _fcntl64 ,
k : _ _ _sys _fstat64 ,
o : _ _ _sys _ioctl ,
q : _ _ _sys _open ,
i : _ _ _sys _rename ,
r : _ _ _sys _rmdir ,
c : _ _ _sys _stat64 ,
h : _ _ _sys _unlink ,
s : _emscripten _memcpy _big ,
t : _emscripten _resize _heap ,
f : _fd _close ,
j : _fd _fdstat _get ,
g : _fd _read ,
n : _fd _seek ,
d : _fd _write ,
a : _setTempRet0 ,
b : _time ,
m : _timegm
} ;
var asm = createWasm ( ) ;
var _ _ _wasm _call _ctors = ( Module [ "___wasm_call_ctors" ] = asm [ "v" ] ) ;
var _zipstruct _stat = ( Module [ "_zipstruct_stat" ] = asm [ "w" ] ) ;
var _zipstruct _statS = ( Module [ "_zipstruct_statS" ] = asm [ "x" ] ) ;
var _zipstruct _stat _name = ( Module [ "_zipstruct_stat_name" ] = asm [ "y" ] ) ;
var _zipstruct _stat _index = ( Module [ "_zipstruct_stat_index" ] = asm [ "z" ] ) ;
var _zipstruct _stat _size = ( Module [ "_zipstruct_stat_size" ] = asm [ "A" ] ) ;
var _zipstruct _stat _mtime = ( Module [ "_zipstruct_stat_mtime" ] = asm [ "B" ] ) ;
var _zipstruct _stat _crc = ( Module [ "_zipstruct_stat_crc" ] = asm [ "C" ] ) ;
var _zipstruct _error = ( Module [ "_zipstruct_error" ] = asm [ "D" ] ) ;
var _zipstruct _errorS = ( Module [ "_zipstruct_errorS" ] = asm [ "E" ] ) ;
var _zipstruct _error _code _zip = ( Module [ "_zipstruct_error_code_zip" ] =
asm [ "F" ] ) ;
var _zipstruct _stat _comp _size = ( Module [ "_zipstruct_stat_comp_size" ] =
asm [ "G" ] ) ;
var _zipstruct _stat _comp _method = ( Module [ "_zipstruct_stat_comp_method" ] =
asm [ "H" ] ) ;
var _zip _close = ( Module [ "_zip_close" ] = asm [ "I" ] ) ;
var _zip _delete = ( Module [ "_zip_delete" ] = asm [ "J" ] ) ;
var _zip _dir _add = ( Module [ "_zip_dir_add" ] = asm [ "K" ] ) ;
var _zip _discard = ( Module [ "_zip_discard" ] = asm [ "L" ] ) ;
var _zip _error _init _with _code = ( Module [ "_zip_error_init_with_code" ] =
asm [ "M" ] ) ;
var _zip _get _error = ( Module [ "_zip_get_error" ] = asm [ "N" ] ) ;
var _zip _file _get _error = ( Module [ "_zip_file_get_error" ] = asm [ "O" ] ) ;
var _zip _error _strerror = ( Module [ "_zip_error_strerror" ] = asm [ "P" ] ) ;
var _zip _fclose = ( Module [ "_zip_fclose" ] = asm [ "Q" ] ) ;
var _zip _file _add = ( Module [ "_zip_file_add" ] = asm [ "R" ] ) ;
var _zip _file _get _external _attributes = ( Module [
"_zip_file_get_external_attributes"
] = asm [ "S" ] ) ;
var _zip _file _set _external _attributes = ( Module [
"_zip_file_set_external_attributes"
] = asm [ "T" ] ) ;
var _zip _file _set _mtime = ( Module [ "_zip_file_set_mtime" ] = asm [ "U" ] ) ;
var _zip _fopen = ( Module [ "_zip_fopen" ] = asm [ "V" ] ) ;
var _zip _fopen _index = ( Module [ "_zip_fopen_index" ] = asm [ "W" ] ) ;
var _zip _fread = ( Module [ "_zip_fread" ] = asm [ "X" ] ) ;
var _zip _get _name = ( Module [ "_zip_get_name" ] = asm [ "Y" ] ) ;
var _zip _get _num _entries = ( Module [ "_zip_get_num_entries" ] = asm [ "Z" ] ) ;
var _zip _name _locate = ( Module [ "_zip_name_locate" ] = asm [ "_" ] ) ;
var _zip _open = ( Module [ "_zip_open" ] = asm [ "$" ] ) ;
var _zip _open _from _source = ( Module [ "_zip_open_from_source" ] = asm [ "aa" ] ) ;
var _zip _set _file _compression = ( Module [ "_zip_set_file_compression" ] =
asm [ "ba" ] ) ;
var _zip _source _buffer = ( Module [ "_zip_source_buffer" ] = asm [ "ca" ] ) ;
var _zip _source _buffer _create = ( Module [ "_zip_source_buffer_create" ] =
asm [ "da" ] ) ;
var _zip _source _close = ( Module [ "_zip_source_close" ] = asm [ "ea" ] ) ;
var _zip _source _error = ( Module [ "_zip_source_error" ] = asm [ "fa" ] ) ;
var _zip _source _free = ( Module [ "_zip_source_free" ] = asm [ "ga" ] ) ;
var _zip _source _keep = ( Module [ "_zip_source_keep" ] = asm [ "ha" ] ) ;
var _zip _source _open = ( Module [ "_zip_source_open" ] = asm [ "ia" ] ) ;
var _zip _source _read = ( Module [ "_zip_source_read" ] = asm [ "ja" ] ) ;
var _zip _source _seek = ( Module [ "_zip_source_seek" ] = asm [ "ka" ] ) ;
var _zip _source _set _mtime = ( Module [ "_zip_source_set_mtime" ] = asm [ "la" ] ) ;
var _zip _source _tell = ( Module [ "_zip_source_tell" ] = asm [ "ma" ] ) ;
var _zip _stat = ( Module [ "_zip_stat" ] = asm [ "na" ] ) ;
var _zip _stat _index = ( Module [ "_zip_stat_index" ] = asm [ "oa" ] ) ;
var _zip _ext _count _symlinks = ( Module [ "_zip_ext_count_symlinks" ] = asm [ "pa" ] ) ;
var _ _ _errno _location = ( Module [ "___errno_location" ] = asm [ "qa" ] ) ;
var _ _get _tzname = ( Module [ "__get_tzname" ] = asm [ "ra" ] ) ;
var _ _get _daylight = ( Module [ "__get_daylight" ] = asm [ "sa" ] ) ;
var _ _get _timezone = ( Module [ "__get_timezone" ] = asm [ "ta" ] ) ;
var stackSave = ( Module [ "stackSave" ] = asm [ "ua" ] ) ;
var stackRestore = ( Module [ "stackRestore" ] = asm [ "va" ] ) ;
var stackAlloc = ( Module [ "stackAlloc" ] = asm [ "wa" ] ) ;
var _malloc = ( Module [ "_malloc" ] = asm [ "xa" ] ) ;
var _free = ( Module [ "_free" ] = asm [ "ya" ] ) ;
Module [ "cwrap" ] = cwrap ;
Module [ "getValue" ] = getValue ;
var calledRun ;
dependenciesFulfilled = function runCaller ( ) {
if ( ! calledRun ) run ( ) ;
if ( ! calledRun ) dependenciesFulfilled = runCaller ;
} ;
function run ( args ) {
args = args || arguments _ ;
if ( runDependencies > 0 ) {
return ;
}
preRun ( ) ;
if ( runDependencies > 0 ) {
return ;
}
function doRun ( ) {
if ( calledRun ) return ;
calledRun = true ;
Module [ "calledRun" ] = true ;
if ( ABORT ) return ;
initRuntime ( ) ;
if ( Module [ "onRuntimeInitialized" ] ) Module [ "onRuntimeInitialized" ] ( ) ;
postRun ( ) ;
}
if ( Module [ "setStatus" ] ) {
Module [ "setStatus" ] ( "Running..." ) ;
setTimeout ( function ( ) {
setTimeout ( function ( ) {
Module [ "setStatus" ] ( "" ) ;
} , 1 ) ;
doRun ( ) ;
} , 1 ) ;
} else {
doRun ( ) ;
}
}
Module [ "run" ] = run ;
if ( Module [ "preInit" ] ) {
if ( typeof Module [ "preInit" ] == "function" )
Module [ "preInit" ] = [ Module [ "preInit" ] ] ;
while ( Module [ "preInit" ] . length > 0 ) {
Module [ "preInit" ] . pop ( ) ( ) ;
}
}
run ( ) ;
/***/ } ) ,
/***/ 417 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "crypto" ) ; ;
/***/ } ) ,
/***/ 747 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "fs" ) ; ;
/***/ } ) ,
/***/ 282 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "module" ) ; ;
/***/ } ) ,
/***/ 622 :
/***/ ( ( module ) => {
"use strict" ;
module . exports = require ( "path" ) ; ;
/***/ } )
/******/ } ) ;
/************************************************************************/
/******/ // The module cache
/******/ var _ _webpack _module _cache _ _ = { } ;
/******/
/******/ // The require function
/******/ function _ _webpack _require _ _ ( moduleId ) {
/******/ // Check if module is in cache
/******/ var cachedModule = _ _webpack _module _cache _ _ [ moduleId ] ;
/******/ if ( cachedModule !== undefined ) {
/******/ return cachedModule . exports ;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = _ _webpack _module _cache _ _ [ moduleId ] = {
/******/ // no module.id needed
/******/ // no module.loaded needed
/******/ exports : { }
/******/ } ;
/******/
/******/ // Execute the module function
/******/ _ _webpack _modules _ _ [ moduleId ] ( module , module . exports , _ _webpack _require _ _ ) ;
/******/
/******/ // Return the exports of the module
/******/ return module . exports ;
/******/ }
/******/
/************************************************************************/
/******/ /* webpack/runtime/compat get default export */
/******/ ( ( ) => {
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ _ _webpack _require _ _ . n = ( module ) => {
/******/ var getter = module && module . _ _esModule ?
/******/ ( ) => ( module [ 'default' ] ) :
/******/ ( ) => ( module ) ;
/******/ _ _webpack _require _ _ . d ( getter , { a : getter } ) ;
/******/ return getter ;
/******/ } ;
/******/ } ) ( ) ;
/******/
/******/ /* webpack/runtime/define property getters */
/******/ ( ( ) => {
/******/ // define getter functions for harmony exports
/******/ _ _webpack _require _ _ . d = ( exports , definition ) => {
/******/ for ( var key in definition ) {
/******/ if ( _ _webpack _require _ _ . o ( definition , key ) && ! _ _webpack _require _ _ . o ( exports , key ) ) {
/******/ Object . defineProperty ( exports , key , { enumerable : true , get : definition [ key ] } ) ;
/******/ }
/******/ }
/******/ } ;
/******/ } ) ( ) ;
/******/
/******/ /* webpack/runtime/hasOwnProperty shorthand */
/******/ ( ( ) => {
/******/ _ _webpack _require _ _ . o = ( obj , prop ) => ( Object . prototype . hasOwnProperty . call ( obj , prop ) )
/******/ } ) ( ) ;
/******/
/************************************************************************/
var _ _webpack _exports _ _ = { } ;
// This entry need to be wrapped in an IIFE because it need to be in strict mode.
( ( ) => {
"use strict" ;
// EXPORTS
_ _webpack _require _ _ . d ( _ _webpack _exports _ _ , {
"default" : ( ) => ( /* binding */ _entryPoint )
} ) ;
// EXTERNAL MODULE: external "fs"
var external _fs _ = _ _webpack _require _ _ ( 747 ) ;
var external _fs _default = /*#__PURE__*/ _ _webpack _require _ _ . n ( external _fs _ ) ;
; // CONCATENATED MODULE: external "os"
const external _os _namespaceObject = require ( "os" ) ; ;
// EXTERNAL MODULE: external "path"
var external _path _ = _ _webpack _require _ _ ( 622 ) ;
var external _path _default = /*#__PURE__*/ _ _webpack _require _ _ . n ( external _path _ ) ;
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/path.ts
var PathType ;
( function ( PathType ) {
PathType [ PathType [ "File" ] = 0 ] = "File" ;
PathType [ PathType [ "Portable" ] = 1 ] = "Portable" ;
PathType [ PathType [ "Native" ] = 2 ] = "Native" ;
} ) ( PathType || ( PathType = { } ) ) ;
const PortablePath = {
root : ` / ` ,
dot : ` . `
} ;
const Filename = {
nodeModules : ` node_modules ` ,
manifest : ` package.json ` ,
lockfile : ` yarn.lock ` ,
virtual : ` __virtual__ ` ,
/ * *
* @ deprecated
* /
pnpJs : ` .pnp.js ` ,
pnpCjs : ` .pnp.cjs ` ,
rc : ` .yarnrc.yml `
} ;
const npath = Object . create ( ( external _path _default ( ) ) ) ;
const ppath = Object . create ( ( external _path _default ( ) ) . posix ) ;
npath . cwd = ( ) => process . cwd ( ) ;
ppath . cwd = ( ) => toPortablePath ( process . cwd ( ) ) ;
ppath . resolve = ( ... segments ) => {
if ( segments . length > 0 && ppath . isAbsolute ( segments [ 0 ] ) ) {
return external _path _default ( ) . posix . resolve ( ... segments ) ;
} else {
return external _path _default ( ) . posix . resolve ( ppath . cwd ( ) , ... segments ) ;
}
} ;
const contains = function ( pathUtils , from , to ) {
from = pathUtils . normalize ( from ) ;
to = pathUtils . normalize ( to ) ;
if ( from === to ) return ` . ` ;
if ( ! from . endsWith ( pathUtils . sep ) ) from = from + pathUtils . sep ;
if ( to . startsWith ( from ) ) {
return to . slice ( from . length ) ;
} else {
return null ;
}
} ;
npath . fromPortablePath = fromPortablePath ;
npath . toPortablePath = toPortablePath ;
npath . contains = ( from , to ) => contains ( npath , from , to ) ;
ppath . contains = ( from , to ) => contains ( ppath , from , to ) ;
const WINDOWS _PATH _REGEXP = /^([a-zA-Z]:.*)$/ ;
const UNC _WINDOWS _PATH _REGEXP = /^\\\\(\.\\)?(.*)$/ ;
const PORTABLE _PATH _REGEXP = /^\/([a-zA-Z]:.*)$/ ;
const UNC _PORTABLE _PATH _REGEXP = /^\/unc\/(\.dot\/)?(.*)$/ ; // Path should look like "/N:/berry/scripts/plugin-pack.js"
// And transform to "N:\berry\scripts\plugin-pack.js"
function fromPortablePath ( p ) {
if ( process . platform !== ` win32 ` ) return p ;
if ( p . match ( PORTABLE _PATH _REGEXP ) ) p = p . replace ( PORTABLE _PATH _REGEXP , ` $ 1 ` ) ; else if ( p . match ( UNC _PORTABLE _PATH _REGEXP ) ) p = p . replace ( UNC _PORTABLE _PATH _REGEXP , ( match , p1 , p2 ) => ` \\ \\ ${ p1 ? ` . \\ ` : ` ` } ${ p2 } ` ) ; else return p ;
return p . replace ( /\//g , ` \\ ` ) ;
} // Path should look like "N:/berry/scripts/plugin-pack.js"
// And transform to "/N:/berry/scripts/plugin-pack.js"
function toPortablePath ( p ) {
if ( process . platform !== ` win32 ` ) return p ;
if ( p . match ( WINDOWS _PATH _REGEXP ) ) p = p . replace ( WINDOWS _PATH _REGEXP , ` / $ 1 ` ) ; else if ( p . match ( UNC _WINDOWS _PATH _REGEXP ) ) p = p . replace ( UNC _WINDOWS _PATH _REGEXP , ( match , p1 , p2 ) => ` /unc/ ${ p1 ? ` .dot/ ` : ` ` } ${ p2 } ` ) ;
return p . replace ( /\\/g , ` / ` ) ;
}
function convertPath ( targetPathUtils , sourcePath ) {
return targetPathUtils === npath ? fromPortablePath ( sourcePath ) : toPortablePath ( sourcePath ) ;
}
function toFilename ( filename ) {
if ( npath . parse ( filename ) . dir !== ` ` || ppath . parse ( filename ) . dir !== ` ` ) throw new Error ( ` Invalid filename: " ${ filename } " ` ) ;
return filename ;
}
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/algorithms/copyPromise.ts
// 1980-01-01, like Fedora
const defaultTime = new Date ( 315532800 * 1000 ) ;
var LinkStrategy ;
( function ( LinkStrategy ) {
LinkStrategy [ "Allow" ] = "allow" ;
LinkStrategy [ "ReadOnly" ] = "readOnly" ;
} ) ( LinkStrategy || ( LinkStrategy = { } ) ) ;
async function copyPromise ( destinationFs , destination , sourceFs , source , opts ) {
const normalizedDestination = destinationFs . pathUtils . normalize ( destination ) ;
const normalizedSource = sourceFs . pathUtils . normalize ( source ) ;
const prelayout = [ ] ;
const postlayout = [ ] ;
await destinationFs . mkdirPromise ( destinationFs . pathUtils . dirname ( destination ) , {
recursive : true
} ) ;
const updateTime = typeof destinationFs . lutimesPromise === ` function ` ? destinationFs . lutimesPromise . bind ( destinationFs ) : destinationFs . utimesPromise . bind ( destinationFs ) ;
await copyImpl ( prelayout , postlayout , updateTime , destinationFs , normalizedDestination , sourceFs , normalizedSource , opts ) ;
for ( const operation of prelayout ) await operation ( ) ;
await Promise . all ( postlayout . map ( operation => {
return operation ( ) ;
} ) ) ;
}
async function copyImpl ( prelayout , postlayout , updateTime , destinationFs , destination , sourceFs , source , opts ) {
var _a , _b ;
const destinationStat = await maybeLStat ( destinationFs , destination ) ;
const sourceStat = await sourceFs . lstatPromise ( source ) ;
const referenceTime = opts . stableTime ? {
mtime : defaultTime ,
atime : defaultTime
} : sourceStat ;
let updated ;
switch ( true ) {
case sourceStat . isDirectory ( ) :
{
updated = await copyFolder ( prelayout , postlayout , updateTime , destinationFs , destination , destinationStat , sourceFs , source , sourceStat , opts ) ;
}
break ;
case sourceStat . isFile ( ) :
{
updated = await copyFile ( prelayout , postlayout , updateTime , destinationFs , destination , destinationStat , sourceFs , source , sourceStat , opts ) ;
}
break ;
case sourceStat . isSymbolicLink ( ) :
{
updated = await copySymlink ( prelayout , postlayout , updateTime , destinationFs , destination , destinationStat , sourceFs , source , sourceStat , opts ) ;
}
break ;
default :
{
throw new Error ( ` Unsupported file type ( ${ sourceStat . mode } ) ` ) ;
}
break ;
}
if ( updated || ( ( _a = destinationStat === null || destinationStat === void 0 ? void 0 : destinationStat . mtime ) === null || _a === void 0 ? void 0 : _a . getTime ( ) ) !== referenceTime . mtime . getTime ( ) || ( ( _b = destinationStat === null || destinationStat === void 0 ? void 0 : destinationStat . atime ) === null || _b === void 0 ? void 0 : _b . getTime ( ) ) !== referenceTime . atime . getTime ( ) ) {
postlayout . push ( ( ) => updateTime ( destination , referenceTime . atime , referenceTime . mtime ) ) ;
updated = true ;
}
if ( destinationStat === null || ( destinationStat . mode & 0o777 ) !== ( sourceStat . mode & 0o777 ) ) {
postlayout . push ( ( ) => destinationFs . chmodPromise ( destination , sourceStat . mode & 0o777 ) ) ;
updated = true ;
}
return updated ;
}
async function maybeLStat ( baseFs , p ) {
try {
return await baseFs . lstatPromise ( p ) ;
} catch ( e ) {
return null ;
}
}
async function copyFolder ( prelayout , postlayout , updateTime , destinationFs , destination , destinationStat , sourceFs , source , sourceStat , opts ) {
if ( destinationStat !== null && ! destinationStat . isDirectory ( ) ) {
if ( opts . overwrite ) {
prelayout . push ( async ( ) => destinationFs . removePromise ( destination ) ) ;
destinationStat = null ;
} else {
return false ;
}
}
let updated = false ;
if ( destinationStat === null ) {
prelayout . push ( async ( ) => destinationFs . mkdirPromise ( destination , {
mode : sourceStat . mode
} ) ) ;
updated = true ;
}
const entries = await sourceFs . readdirPromise ( source ) ;
if ( opts . stableSort ) {
for ( const entry of entries . sort ( ) ) {
if ( await copyImpl ( prelayout , postlayout , updateTime , destinationFs , destinationFs . pathUtils . join ( destination , entry ) , sourceFs , sourceFs . pathUtils . join ( source , entry ) , opts ) ) {
updated = true ;
}
}
} else {
const entriesUpdateStatus = await Promise . all ( entries . map ( async entry => {
await copyImpl ( prelayout , postlayout , updateTime , destinationFs , destinationFs . pathUtils . join ( destination , entry ) , sourceFs , sourceFs . pathUtils . join ( source , entry ) , opts ) ;
} ) ) ;
if ( entriesUpdateStatus . some ( status => status ) ) {
updated = true ;
}
}
return updated ;
}
const isCloneSupportedCache = new WeakMap ( ) ;
function makeLinkOperation ( opFs , destination , source , sourceStat , linkStrategy ) {
return async ( ) => {
await opFs . linkPromise ( source , destination ) ;
if ( linkStrategy === LinkStrategy . ReadOnly ) {
// We mutate the stat, otherwise it'll be reset by copyImpl
sourceStat . mode &= ~ 0o222 ;
await opFs . chmodPromise ( destination , sourceStat . mode ) ;
}
} ;
}
function makeCloneLinkOperation ( opFs , destination , source , sourceStat , linkStrategy ) {
const isCloneSupported = isCloneSupportedCache . get ( opFs ) ;
if ( typeof isCloneSupported === ` undefined ` ) {
return async ( ) => {
try {
await opFs . copyFilePromise ( source , destination , ( external _fs _default ( ) ) . constants . COPYFILE _FICLONE _FORCE ) ;
isCloneSupportedCache . set ( opFs , true ) ;
} catch ( err ) {
if ( err . code === ` ENOSYS ` || err . code === ` ENOTSUP ` ) {
isCloneSupportedCache . set ( opFs , false ) ;
await makeLinkOperation ( opFs , destination , source , sourceStat , linkStrategy ) ( ) ;
} else {
throw err ;
}
}
} ;
} else {
if ( isCloneSupported ) {
return async ( ) => opFs . copyFilePromise ( source , destination , ( external _fs _default ( ) ) . constants . COPYFILE _FICLONE _FORCE ) ;
} else {
return makeLinkOperation ( opFs , destination , source , sourceStat , linkStrategy ) ;
}
}
}
async function copyFile ( prelayout , postlayout , updateTime , destinationFs , destination , destinationStat , sourceFs , source , sourceStat , opts ) {
var _a ;
if ( destinationStat !== null ) {
if ( opts . overwrite ) {
prelayout . push ( async ( ) => destinationFs . removePromise ( destination ) ) ;
destinationStat = null ;
} else {
return false ;
}
}
const linkStrategy = ( _a = opts . linkStrategy ) !== null && _a !== void 0 ? _a : null ;
const op = destinationFs === sourceFs ? linkStrategy !== null ? makeCloneLinkOperation ( destinationFs , destination , source , sourceStat , linkStrategy ) : async ( ) => destinationFs . copyFilePromise ( source , destination , ( external _fs _default ( ) ) . constants . COPYFILE _FICLONE ) : linkStrategy !== null ? makeLinkOperation ( destinationFs , destination , source , sourceStat , linkStrategy ) : async ( ) => destinationFs . writeFilePromise ( destination , await sourceFs . readFilePromise ( source ) ) ;
prelayout . push ( async ( ) => op ( ) ) ;
return true ;
}
async function copySymlink ( prelayout , postlayout , updateTime , destinationFs , destination , destinationStat , sourceFs , source , sourceStat , opts ) {
if ( destinationStat !== null ) {
if ( opts . overwrite ) {
prelayout . push ( async ( ) => destinationFs . removePromise ( destination ) ) ;
destinationStat = null ;
} else {
return false ;
}
}
prelayout . push ( async ( ) => {
await destinationFs . symlinkPromise ( convertPath ( destinationFs . pathUtils , await sourceFs . readlinkPromise ( source ) ) , destination ) ;
} ) ;
return true ;
}
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/FakeFS.ts
class FakeFS {
constructor ( pathUtils ) {
this . pathUtils = pathUtils ;
}
async * genTraversePromise ( init , {
stableSort = false
} = { } ) {
const stack = [ init ] ;
while ( stack . length > 0 ) {
const p = stack . shift ( ) ;
const entry = await this . lstatPromise ( p ) ;
if ( entry . isDirectory ( ) ) {
const entries = await this . readdirPromise ( p ) ;
if ( stableSort ) {
for ( const entry of entries . sort ( ) ) {
stack . push ( this . pathUtils . join ( p , entry ) ) ;
}
} else {
throw new Error ( ` Not supported ` ) ;
}
} else {
yield p ;
}
}
}
async removePromise ( p , {
recursive = true ,
maxRetries = 5
} = { } ) {
let stat ;
try {
stat = await this . lstatPromise ( p ) ;
} catch ( error ) {
if ( error . code === ` ENOENT ` ) {
return ;
} else {
throw error ;
}
}
if ( stat . isDirectory ( ) ) {
if ( recursive ) {
const entries = await this . readdirPromise ( p ) ;
await Promise . all ( entries . map ( entry => {
return this . removePromise ( this . pathUtils . resolve ( p , entry ) ) ;
} ) ) ;
} // 5 gives 1s worth of retries at worst
let t = 0 ;
do {
try {
await this . rmdirPromise ( p ) ;
break ;
} catch ( error ) {
if ( error . code === ` EBUSY ` || error . code === ` ENOTEMPTY ` ) {
if ( maxRetries === 0 ) {
break ;
} else {
await new Promise ( resolve => setTimeout ( resolve , t * 100 ) ) ;
continue ;
}
} else {
throw error ;
}
}
} while ( t ++ < maxRetries ) ;
} else {
await this . unlinkPromise ( p ) ;
}
}
removeSync ( p , {
recursive = true
} = { } ) {
let stat ;
try {
stat = this . lstatSync ( p ) ;
} catch ( error ) {
if ( error . code === ` ENOENT ` ) {
return ;
} else {
throw error ;
}
}
if ( stat . isDirectory ( ) ) {
if ( recursive ) for ( const entry of this . readdirSync ( p ) ) this . removeSync ( this . pathUtils . resolve ( p , entry ) ) ;
this . rmdirSync ( p ) ;
} else {
this . unlinkSync ( p ) ;
}
}
async mkdirpPromise ( p , {
chmod ,
utimes
} = { } ) {
p = this . resolve ( p ) ;
if ( p === this . pathUtils . dirname ( p ) ) return ;
const parts = p . split ( this . pathUtils . sep ) ;
for ( let u = 2 ; u <= parts . length ; ++ u ) {
const subPath = parts . slice ( 0 , u ) . join ( this . pathUtils . sep ) ;
if ( ! this . existsSync ( subPath ) ) {
try {
await this . mkdirPromise ( subPath ) ;
} catch ( error ) {
if ( error . code === ` EEXIST ` ) {
continue ;
} else {
throw error ;
}
}
if ( chmod != null ) await this . chmodPromise ( subPath , chmod ) ;
if ( utimes != null ) {
await this . utimesPromise ( subPath , utimes [ 0 ] , utimes [ 1 ] ) ;
} else {
const parentStat = await this . statPromise ( this . pathUtils . dirname ( subPath ) ) ;
await this . utimesPromise ( subPath , parentStat . atime , parentStat . mtime ) ;
}
}
}
}
mkdirpSync ( p , {
chmod ,
utimes
} = { } ) {
p = this . resolve ( p ) ;
if ( p === this . pathUtils . dirname ( p ) ) return ;
const parts = p . split ( this . pathUtils . sep ) ;
for ( let u = 2 ; u <= parts . length ; ++ u ) {
const subPath = parts . slice ( 0 , u ) . join ( this . pathUtils . sep ) ;
if ( ! this . existsSync ( subPath ) ) {
try {
this . mkdirSync ( subPath ) ;
} catch ( error ) {
if ( error . code === ` EEXIST ` ) {
continue ;
} else {
throw error ;
}
}
if ( chmod != null ) this . chmodSync ( subPath , chmod ) ;
if ( utimes != null ) {
this . utimesSync ( subPath , utimes [ 0 ] , utimes [ 1 ] ) ;
} else {
const parentStat = this . statSync ( this . pathUtils . dirname ( subPath ) ) ;
this . utimesSync ( subPath , parentStat . atime , parentStat . mtime ) ;
}
}
}
}
async copyPromise ( destination , source , {
baseFs = this ,
overwrite = true ,
stableSort = false ,
stableTime = false ,
linkStrategy = null
} = { } ) {
return await copyPromise ( this , destination , baseFs , source , {
overwrite ,
stableSort ,
stableTime ,
linkStrategy
} ) ;
}
copySync ( destination , source , {
baseFs = this ,
overwrite = true
} = { } ) {
const stat = baseFs . lstatSync ( source ) ;
const exists = this . existsSync ( destination ) ;
if ( stat . isDirectory ( ) ) {
this . mkdirpSync ( destination ) ;
const directoryListing = baseFs . readdirSync ( source ) ;
for ( const entry of directoryListing ) {
this . copySync ( this . pathUtils . join ( destination , entry ) , baseFs . pathUtils . join ( source , entry ) , {
baseFs ,
overwrite
} ) ;
}
} else if ( stat . isFile ( ) ) {
if ( ! exists || overwrite ) {
if ( exists ) this . removeSync ( destination ) ;
const content = baseFs . readFileSync ( source ) ;
this . writeFileSync ( destination , content ) ;
}
} else if ( stat . isSymbolicLink ( ) ) {
if ( ! exists || overwrite ) {
if ( exists ) this . removeSync ( destination ) ;
const target = baseFs . readlinkSync ( source ) ;
this . symlinkSync ( convertPath ( this . pathUtils , target ) , destination ) ;
}
} else {
throw new Error ( ` Unsupported file type (file: ${ source } , mode: 0o ${ stat . mode . toString ( 8 ) . padStart ( 6 , ` 0 ` ) } ) ` ) ;
}
const mode = stat . mode & 0o777 ;
this . chmodSync ( destination , mode ) ;
}
async changeFilePromise ( p , content , opts = { } ) {
if ( Buffer . isBuffer ( content ) ) {
return this . changeFileBufferPromise ( p , content ) ;
} else {
return this . changeFileTextPromise ( p , content , opts ) ;
}
}
async changeFileBufferPromise ( p , content ) {
let current = Buffer . alloc ( 0 ) ;
try {
current = await this . readFilePromise ( p ) ;
} catch ( error ) { // ignore errors, no big deal
}
if ( Buffer . compare ( current , content ) === 0 ) return ;
await this . writeFilePromise ( p , content ) ;
}
async changeFileTextPromise ( p , content , {
automaticNewlines
} = { } ) {
let current = ` ` ;
try {
current = await this . readFilePromise ( p , ` utf8 ` ) ;
} catch ( error ) { // ignore errors, no big deal
}
const normalizedContent = automaticNewlines ? normalizeLineEndings ( current , content ) : content ;
if ( current === normalizedContent ) return ;
await this . writeFilePromise ( p , normalizedContent ) ;
}
changeFileSync ( p , content , opts = { } ) {
if ( Buffer . isBuffer ( content ) ) {
return this . changeFileBufferSync ( p , content ) ;
} else {
return this . changeFileTextSync ( p , content , opts ) ;
}
}
changeFileBufferSync ( p , content ) {
let current = Buffer . alloc ( 0 ) ;
try {
current = this . readFileSync ( p ) ;
} catch ( error ) { // ignore errors, no big deal
}
if ( Buffer . compare ( current , content ) === 0 ) return ;
this . writeFileSync ( p , content ) ;
}
changeFileTextSync ( p , content , {
automaticNewlines = false
} = { } ) {
let current = ` ` ;
try {
current = this . readFileSync ( p , ` utf8 ` ) ;
} catch ( error ) { // ignore errors, no big deal
}
const normalizedContent = automaticNewlines ? normalizeLineEndings ( current , content ) : content ;
if ( current === normalizedContent ) return ;
this . writeFileSync ( p , normalizedContent ) ;
}
async movePromise ( fromP , toP ) {
try {
await this . renamePromise ( fromP , toP ) ;
} catch ( error ) {
if ( error . code === ` EXDEV ` ) {
await this . copyPromise ( toP , fromP ) ;
await this . removePromise ( fromP ) ;
} else {
throw error ;
}
}
}
moveSync ( fromP , toP ) {
try {
this . renameSync ( fromP , toP ) ;
} catch ( error ) {
if ( error . code === ` EXDEV ` ) {
this . copySync ( toP , fromP ) ;
this . removeSync ( fromP ) ;
} else {
throw error ;
}
}
}
async lockPromise ( affectedPath , callback ) {
const lockPath = ` ${ affectedPath } .flock ` ;
const interval = 1000 / 60 ;
const startTime = Date . now ( ) ;
let fd = null ; // Even when we detect that a lock file exists, we still look inside to see
// whether the pid that created it is still alive. It's not foolproof
// (there are false positive), but there are no false negative and that's
// all that matters in 99% of the cases.
const isAlive = async ( ) => {
let pid ;
try {
[ pid ] = await this . readJsonPromise ( lockPath ) ;
} catch ( error ) {
// If we can't read the file repeatedly, we assume the process was
// aborted before even writing finishing writing the payload.
return Date . now ( ) - startTime < 500 ;
}
try {
// "As a special case, a signal of 0 can be used to test for the
// existence of a process" - so we check whether it's alive.
process . kill ( pid , 0 ) ;
return true ;
} catch ( error ) {
return false ;
}
} ;
while ( fd === null ) {
try {
fd = await this . openPromise ( lockPath , ` wx ` ) ;
} catch ( error ) {
if ( error . code === ` EEXIST ` ) {
if ( ! ( await isAlive ( ) ) ) {
try {
await this . unlinkPromise ( lockPath ) ;
continue ;
} catch ( error ) { // No big deal if we can't remove it. Just fallback to wait for
// it to be eventually released by its owner.
}
}
if ( Date . now ( ) - startTime < 60 * 1000 ) {
await new Promise ( resolve => setTimeout ( resolve , interval ) ) ;
} else {
throw new Error ( ` Couldn't acquire a lock in a reasonable time (via ${ lockPath } ) ` ) ;
}
} else {
throw error ;
}
}
}
await this . writePromise ( fd , JSON . stringify ( [ process . pid ] ) ) ;
try {
return await callback ( ) ;
} finally {
try {
// closePromise needs to come before unlinkPromise otherwise another process can attempt
// to get the file handle after the unlink but before close resuling in
// EPERM: operation not permitted, open
await this . closePromise ( fd ) ;
await this . unlinkPromise ( lockPath ) ;
} catch ( error ) { // noop
}
}
}
async readJsonPromise ( p ) {
const content = await this . readFilePromise ( p , ` utf8 ` ) ;
try {
return JSON . parse ( content ) ;
} catch ( error ) {
error . message += ` (in ${ p } ) ` ;
throw error ;
}
}
readJsonSync ( p ) {
const content = this . readFileSync ( p , ` utf8 ` ) ;
try {
return JSON . parse ( content ) ;
} catch ( error ) {
error . message += ` (in ${ p } ) ` ;
throw error ;
}
}
async writeJsonPromise ( p , data ) {
return await this . writeFilePromise ( p , ` ${ JSON . stringify ( data , null , 2 ) } \n ` ) ;
}
writeJsonSync ( p , data ) {
return this . writeFileSync ( p , ` ${ JSON . stringify ( data , null , 2 ) } \n ` ) ;
}
async preserveTimePromise ( p , cb ) {
const stat = await this . lstatPromise ( p ) ;
const result = await cb ( ) ;
if ( typeof result !== ` undefined ` ) p = result ;
if ( this . lutimesPromise ) {
await this . lutimesPromise ( p , stat . atime , stat . mtime ) ;
} else if ( ! stat . isSymbolicLink ( ) ) {
await this . utimesPromise ( p , stat . atime , stat . mtime ) ;
}
}
async preserveTimeSync ( p , cb ) {
const stat = this . lstatSync ( p ) ;
const result = cb ( ) ;
if ( typeof result !== ` undefined ` ) p = result ;
if ( this . lutimesSync ) {
this . lutimesSync ( p , stat . atime , stat . mtime ) ;
} else if ( ! stat . isSymbolicLink ( ) ) {
this . utimesSync ( p , stat . atime , stat . mtime ) ;
}
}
}
FakeFS . DEFAULT _TIME = 315532800 ;
class BasePortableFakeFS extends FakeFS {
constructor ( ) {
super ( ppath ) ;
}
}
function getEndOfLine ( content ) {
const matches = content . match ( /\r?\n/g ) ;
if ( matches === null ) return external _os _namespaceObject . EOL ;
const crlf = matches . filter ( nl => nl === ` \r \n ` ) . length ;
const lf = matches . length - crlf ;
return crlf > lf ? ` \r \n ` : ` \n ` ;
}
function normalizeLineEndings ( originalContent , newContent ) {
return newContent . replace ( /\r?\n/g , getEndOfLine ( originalContent ) ) ;
}
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/errors.ts
function makeError ( code , message ) {
return Object . assign ( new Error ( ` ${ code } : ${ message } ` ) , {
code
} ) ;
}
function EBUSY ( message ) {
return makeError ( ` EBUSY ` , message ) ;
}
function ENOSYS ( message , reason ) {
return makeError ( ` ENOSYS ` , ` ${ message } , ${ reason } ` ) ;
}
function EINVAL ( reason ) {
return makeError ( ` EINVAL ` , ` invalid argument, ${ reason } ` ) ;
}
function EBADF ( reason ) {
return makeError ( ` EBADF ` , ` bad file descriptor, ${ reason } ` ) ;
}
function ENOENT ( reason ) {
return makeError ( ` ENOENT ` , ` no such file or directory, ${ reason } ` ) ;
}
function ENOTDIR ( reason ) {
return makeError ( ` ENOTDIR ` , ` not a directory, ${ reason } ` ) ;
}
function EISDIR ( reason ) {
return makeError ( ` EISDIR ` , ` illegal operation on a directory, ${ reason } ` ) ;
}
function EEXIST ( reason ) {
return makeError ( ` EEXIST ` , ` file already exists, ${ reason } ` ) ;
}
function EROFS ( reason ) {
return makeError ( ` EROFS ` , ` read-only filesystem, ${ reason } ` ) ;
}
function ENOTEMPTY ( reason ) {
return makeError ( ` ENOTEMPTY ` , ` directory not empty, ${ reason } ` ) ;
}
function EOPNOTSUPP ( reason ) {
return makeError ( ` EOPNOTSUPP ` , ` operation not supported, ${ reason } ` ) ;
} // ------------------------------------------------------------------------
function ERR _DIR _CLOSED ( ) {
return makeError ( ` ERR_DIR_CLOSED ` , ` Directory handle was closed ` ) ;
} // ------------------------------------------------------------------------
class LibzipError extends Error {
constructor ( message , code ) {
super ( message ) ;
this . name = ` Libzip Error ` ;
this . code = code ;
}
}
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/NodeFS.ts
class NodeFS extends BasePortableFakeFS {
constructor ( realFs = ( external _fs _default ( ) ) ) {
super ( ) ;
this . realFs = realFs ; // @ts-expect-error
if ( typeof this . realFs . lutimes !== ` undefined ` ) {
this . lutimesPromise = this . lutimesPromiseImpl ;
this . lutimesSync = this . lutimesSyncImpl ;
}
}
getExtractHint ( ) {
return false ;
}
getRealPath ( ) {
return PortablePath . root ;
}
resolve ( p ) {
return ppath . resolve ( p ) ;
}
async openPromise ( p , flags , mode ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . open ( npath . fromPortablePath ( p ) , flags , mode , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
openSync ( p , flags , mode ) {
return this . realFs . openSync ( npath . fromPortablePath ( p ) , flags , mode ) ;
}
async opendirPromise ( p , opts ) {
return await new Promise ( ( resolve , reject ) => {
if ( typeof opts !== ` undefined ` ) {
this . realFs . opendir ( npath . fromPortablePath ( p ) , opts , this . makeCallback ( resolve , reject ) ) ;
} else {
this . realFs . opendir ( npath . fromPortablePath ( p ) , this . makeCallback ( resolve , reject ) ) ;
}
} ) . then ( dir => {
return Object . defineProperty ( dir , ` path ` , {
value : p ,
configurable : true ,
writable : true
} ) ;
} ) ;
}
opendirSync ( p , opts ) {
const dir = typeof opts !== ` undefined ` ? this . realFs . opendirSync ( npath . fromPortablePath ( p ) , opts ) : this . realFs . opendirSync ( npath . fromPortablePath ( p ) ) ;
return Object . defineProperty ( dir , ` path ` , {
value : p ,
configurable : true ,
writable : true
} ) ;
}
async readPromise ( fd , buffer , offset = 0 , length = 0 , position = - 1 ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . read ( fd , buffer , offset , length , position , ( error , bytesRead ) => {
if ( error ) {
reject ( error ) ;
} else {
resolve ( bytesRead ) ;
}
} ) ;
} ) ;
}
readSync ( fd , buffer , offset , length , position ) {
return this . realFs . readSync ( fd , buffer , offset , length , position ) ;
}
async writePromise ( fd , buffer , offset , length , position ) {
return await new Promise ( ( resolve , reject ) => {
if ( typeof buffer === ` string ` ) {
return this . realFs . write ( fd , buffer , offset , this . makeCallback ( resolve , reject ) ) ;
} else {
return this . realFs . write ( fd , buffer , offset , length , position , this . makeCallback ( resolve , reject ) ) ;
}
} ) ;
}
writeSync ( fd , buffer , offset , length , position ) {
if ( typeof buffer === ` string ` ) {
return this . realFs . writeSync ( fd , buffer , offset ) ;
} else {
return this . realFs . writeSync ( fd , buffer , offset , length , position ) ;
}
}
async closePromise ( fd ) {
await new Promise ( ( resolve , reject ) => {
this . realFs . close ( fd , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
closeSync ( fd ) {
this . realFs . closeSync ( fd ) ;
}
createReadStream ( p , opts ) {
const realPath = p !== null ? npath . fromPortablePath ( p ) : p ;
return this . realFs . createReadStream ( realPath , opts ) ;
}
createWriteStream ( p , opts ) {
const realPath = p !== null ? npath . fromPortablePath ( p ) : p ;
return this . realFs . createWriteStream ( realPath , opts ) ;
}
async realpathPromise ( p ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . realpath ( npath . fromPortablePath ( p ) , { } , this . makeCallback ( resolve , reject ) ) ;
} ) . then ( path => {
return npath . toPortablePath ( path ) ;
} ) ;
}
realpathSync ( p ) {
return npath . toPortablePath ( this . realFs . realpathSync ( npath . fromPortablePath ( p ) , { } ) ) ;
}
async existsPromise ( p ) {
return await new Promise ( resolve => {
this . realFs . exists ( npath . fromPortablePath ( p ) , resolve ) ;
} ) ;
}
accessSync ( p , mode ) {
return this . realFs . accessSync ( npath . fromPortablePath ( p ) , mode ) ;
}
async accessPromise ( p , mode ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . access ( npath . fromPortablePath ( p ) , mode , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
existsSync ( p ) {
return this . realFs . existsSync ( npath . fromPortablePath ( p ) ) ;
}
async statPromise ( p , opts ) {
return await new Promise ( ( resolve , reject ) => {
if ( opts ) {
this . realFs . stat ( npath . fromPortablePath ( p ) , opts , this . makeCallback ( resolve , reject ) ) ;
} else {
this . realFs . stat ( npath . fromPortablePath ( p ) , this . makeCallback ( resolve , reject ) ) ;
}
} ) ;
}
statSync ( p , opts ) {
if ( opts ) {
return this . realFs . statSync ( npath . fromPortablePath ( p ) , opts ) ;
} else {
return this . realFs . statSync ( npath . fromPortablePath ( p ) ) ;
}
}
async fstatPromise ( fd , opts ) {
return await new Promise ( ( resolve , reject ) => {
if ( opts ) {
// @ts-expect-error - The node typings doesn't know about the options
this . realFs . fstat ( fd , opts , this . makeCallback ( resolve , reject ) ) ;
} else {
this . realFs . fstat ( fd , this . makeCallback ( resolve , reject ) ) ;
}
} ) ;
}
fstatSync ( fd , opts ) {
if ( opts ) {
// @ts-expect-error - The node typings doesn't know about the options
return this . realFs . fstatSync ( fd , opts ) ;
} else {
return this . realFs . fstatSync ( fd ) ;
}
}
async lstatPromise ( p , opts ) {
return await new Promise ( ( resolve , reject ) => {
if ( opts ) {
// @ts-expect-error - TS does not know this takes options
this . realFs . lstat ( npath . fromPortablePath ( p ) , opts , this . makeCallback ( resolve , reject ) ) ;
} else {
this . realFs . lstat ( npath . fromPortablePath ( p ) , this . makeCallback ( resolve , reject ) ) ;
}
} ) ;
}
lstatSync ( p , opts ) {
if ( opts ) {
// @ts-expect-error - TS does not know this takes options
return this . realFs . lstatSync ( npath . fromPortablePath ( p ) , opts ) ;
} else {
return this . realFs . lstatSync ( npath . fromPortablePath ( p ) ) ;
}
}
async chmodPromise ( p , mask ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . chmod ( npath . fromPortablePath ( p ) , mask , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
chmodSync ( p , mask ) {
return this . realFs . chmodSync ( npath . fromPortablePath ( p ) , mask ) ;
}
async chownPromise ( p , uid , gid ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . chown ( npath . fromPortablePath ( p ) , uid , gid , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
chownSync ( p , uid , gid ) {
return this . realFs . chownSync ( npath . fromPortablePath ( p ) , uid , gid ) ;
}
async renamePromise ( oldP , newP ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . rename ( npath . fromPortablePath ( oldP ) , npath . fromPortablePath ( newP ) , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
renameSync ( oldP , newP ) {
return this . realFs . renameSync ( npath . fromPortablePath ( oldP ) , npath . fromPortablePath ( newP ) ) ;
}
async copyFilePromise ( sourceP , destP , flags = 0 ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . copyFile ( npath . fromPortablePath ( sourceP ) , npath . fromPortablePath ( destP ) , flags , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
copyFileSync ( sourceP , destP , flags = 0 ) {
return this . realFs . copyFileSync ( npath . fromPortablePath ( sourceP ) , npath . fromPortablePath ( destP ) , flags ) ;
}
async appendFilePromise ( p , content , opts ) {
return await new Promise ( ( resolve , reject ) => {
const fsNativePath = typeof p === ` string ` ? npath . fromPortablePath ( p ) : p ;
if ( opts ) {
this . realFs . appendFile ( fsNativePath , content , opts , this . makeCallback ( resolve , reject ) ) ;
} else {
this . realFs . appendFile ( fsNativePath , content , this . makeCallback ( resolve , reject ) ) ;
}
} ) ;
}
appendFileSync ( p , content , opts ) {
const fsNativePath = typeof p === ` string ` ? npath . fromPortablePath ( p ) : p ;
if ( opts ) {
this . realFs . appendFileSync ( fsNativePath , content , opts ) ;
} else {
this . realFs . appendFileSync ( fsNativePath , content ) ;
}
}
async writeFilePromise ( p , content , opts ) {
return await new Promise ( ( resolve , reject ) => {
const fsNativePath = typeof p === ` string ` ? npath . fromPortablePath ( p ) : p ;
if ( opts ) {
this . realFs . writeFile ( fsNativePath , content , opts , this . makeCallback ( resolve , reject ) ) ;
} else {
this . realFs . writeFile ( fsNativePath , content , this . makeCallback ( resolve , reject ) ) ;
}
} ) ;
}
writeFileSync ( p , content , opts ) {
const fsNativePath = typeof p === ` string ` ? npath . fromPortablePath ( p ) : p ;
if ( opts ) {
this . realFs . writeFileSync ( fsNativePath , content , opts ) ;
} else {
this . realFs . writeFileSync ( fsNativePath , content ) ;
}
}
async unlinkPromise ( p ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . unlink ( npath . fromPortablePath ( p ) , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
unlinkSync ( p ) {
return this . realFs . unlinkSync ( npath . fromPortablePath ( p ) ) ;
}
async utimesPromise ( p , atime , mtime ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . utimes ( npath . fromPortablePath ( p ) , atime , mtime , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
utimesSync ( p , atime , mtime ) {
this . realFs . utimesSync ( npath . fromPortablePath ( p ) , atime , mtime ) ;
}
async lutimesPromiseImpl ( p , atime , mtime ) {
// @ts-expect-error: Not yet in DefinitelyTyped
const lutimes = this . realFs . lutimes ;
if ( typeof lutimes === ` undefined ` ) throw ENOSYS ( ` unavailable Node binding ` , ` lutimes ' ${ p } ' ` ) ;
return await new Promise ( ( resolve , reject ) => {
lutimes . call ( this . realFs , npath . fromPortablePath ( p ) , atime , mtime , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
lutimesSyncImpl ( p , atime , mtime ) {
// @ts-expect-error: Not yet in DefinitelyTyped
const lutimesSync = this . realFs . lutimesSync ;
if ( typeof lutimesSync === ` undefined ` ) throw ENOSYS ( ` unavailable Node binding ` , ` lutimes ' ${ p } ' ` ) ;
lutimesSync . call ( this . realFs , npath . fromPortablePath ( p ) , atime , mtime ) ;
}
async mkdirPromise ( p , opts ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . mkdir ( npath . fromPortablePath ( p ) , opts , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
mkdirSync ( p , opts ) {
return this . realFs . mkdirSync ( npath . fromPortablePath ( p ) , opts ) ;
}
async rmdirPromise ( p , opts ) {
return await new Promise ( ( resolve , reject ) => {
// TODO: always pass opts when min node version is 12.10+
if ( opts ) {
this . realFs . rmdir ( npath . fromPortablePath ( p ) , opts , this . makeCallback ( resolve , reject ) ) ;
} else {
this . realFs . rmdir ( npath . fromPortablePath ( p ) , this . makeCallback ( resolve , reject ) ) ;
}
} ) ;
}
rmdirSync ( p , opts ) {
return this . realFs . rmdirSync ( npath . fromPortablePath ( p ) , opts ) ;
}
async linkPromise ( existingP , newP ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . link ( npath . fromPortablePath ( existingP ) , npath . fromPortablePath ( newP ) , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
linkSync ( existingP , newP ) {
return this . realFs . linkSync ( npath . fromPortablePath ( existingP ) , npath . fromPortablePath ( newP ) ) ;
}
async symlinkPromise ( target , p , type ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . symlink ( npath . fromPortablePath ( target . replace ( /\/+$/ , ` ` ) ) , npath . fromPortablePath ( p ) , type , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
symlinkSync ( target , p , type ) {
return this . realFs . symlinkSync ( npath . fromPortablePath ( target . replace ( /\/+$/ , ` ` ) ) , npath . fromPortablePath ( p ) , type ) ;
}
async readFilePromise ( p , encoding ) {
return await new Promise ( ( resolve , reject ) => {
const fsNativePath = typeof p === ` string ` ? npath . fromPortablePath ( p ) : p ;
this . realFs . readFile ( fsNativePath , encoding , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
readFileSync ( p , encoding ) {
const fsNativePath = typeof p === ` string ` ? npath . fromPortablePath ( p ) : p ;
return this . realFs . readFileSync ( fsNativePath , encoding ) ;
}
async readdirPromise ( p , opts ) {
return await new Promise ( ( resolve , reject ) => {
if ( opts === null || opts === void 0 ? void 0 : opts . withFileTypes ) {
this . realFs . readdir ( npath . fromPortablePath ( p ) , {
withFileTypes : true
} , this . makeCallback ( resolve , reject ) ) ;
} else {
this . realFs . readdir ( npath . fromPortablePath ( p ) , this . makeCallback ( value => resolve ( value ) , reject ) ) ;
}
} ) ;
}
readdirSync ( p , opts ) {
if ( opts === null || opts === void 0 ? void 0 : opts . withFileTypes ) {
return this . realFs . readdirSync ( npath . fromPortablePath ( p ) , {
withFileTypes : true
} ) ;
} else {
return this . realFs . readdirSync ( npath . fromPortablePath ( p ) ) ;
}
}
async readlinkPromise ( p ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . readlink ( npath . fromPortablePath ( p ) , this . makeCallback ( resolve , reject ) ) ;
} ) . then ( path => {
return npath . toPortablePath ( path ) ;
} ) ;
}
readlinkSync ( p ) {
return npath . toPortablePath ( this . realFs . readlinkSync ( npath . fromPortablePath ( p ) ) ) ;
}
async truncatePromise ( p , len ) {
return await new Promise ( ( resolve , reject ) => {
this . realFs . truncate ( npath . fromPortablePath ( p ) , len , this . makeCallback ( resolve , reject ) ) ;
} ) ;
}
truncateSync ( p , len ) {
return this . realFs . truncateSync ( npath . fromPortablePath ( p ) , len ) ;
}
watch ( p , a , b ) {
return this . realFs . watch ( npath . fromPortablePath ( p ) , // @ts-expect-error
a , b ) ;
}
watchFile ( p , a , b ) {
return this . realFs . watchFile ( npath . fromPortablePath ( p ) , // @ts-expect-error
a , b ) ;
}
unwatchFile ( p , cb ) {
return this . realFs . unwatchFile ( npath . fromPortablePath ( p ) , cb ) ;
}
makeCallback ( resolve , reject ) {
return ( err , result ) => {
if ( err ) {
reject ( err ) ;
} else {
resolve ( result ) ;
}
} ;
}
}
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/ProxiedFS.ts
class ProxiedFS extends FakeFS {
getExtractHint ( hints ) {
return this . baseFs . getExtractHint ( hints ) ;
}
resolve ( path ) {
return this . mapFromBase ( this . baseFs . resolve ( this . mapToBase ( path ) ) ) ;
}
getRealPath ( ) {
return this . mapFromBase ( this . baseFs . getRealPath ( ) ) ;
}
async openPromise ( p , flags , mode ) {
return this . baseFs . openPromise ( this . mapToBase ( p ) , flags , mode ) ;
}
openSync ( p , flags , mode ) {
return this . baseFs . openSync ( this . mapToBase ( p ) , flags , mode ) ;
}
async opendirPromise ( p , opts ) {
return Object . assign ( await this . baseFs . opendirPromise ( this . mapToBase ( p ) , opts ) , {
path : p
} ) ;
}
opendirSync ( p , opts ) {
return Object . assign ( this . baseFs . opendirSync ( this . mapToBase ( p ) , opts ) , {
path : p
} ) ;
}
async readPromise ( fd , buffer , offset , length , position ) {
return await this . baseFs . readPromise ( fd , buffer , offset , length , position ) ;
}
readSync ( fd , buffer , offset , length , position ) {
return this . baseFs . readSync ( fd , buffer , offset , length , position ) ;
}
async writePromise ( fd , buffer , offset , length , position ) {
if ( typeof buffer === ` string ` ) {
return await this . baseFs . writePromise ( fd , buffer , offset ) ;
} else {
return await this . baseFs . writePromise ( fd , buffer , offset , length , position ) ;
}
}
writeSync ( fd , buffer , offset , length , position ) {
if ( typeof buffer === ` string ` ) {
return this . baseFs . writeSync ( fd , buffer , offset ) ;
} else {
return this . baseFs . writeSync ( fd , buffer , offset , length , position ) ;
}
}
async closePromise ( fd ) {
return this . baseFs . closePromise ( fd ) ;
}
closeSync ( fd ) {
this . baseFs . closeSync ( fd ) ;
}
createReadStream ( p , opts ) {
return this . baseFs . createReadStream ( p !== null ? this . mapToBase ( p ) : p , opts ) ;
}
createWriteStream ( p , opts ) {
return this . baseFs . createWriteStream ( p !== null ? this . mapToBase ( p ) : p , opts ) ;
}
async realpathPromise ( p ) {
return this . mapFromBase ( await this . baseFs . realpathPromise ( this . mapToBase ( p ) ) ) ;
}
realpathSync ( p ) {
return this . mapFromBase ( this . baseFs . realpathSync ( this . mapToBase ( p ) ) ) ;
}
async existsPromise ( p ) {
return this . baseFs . existsPromise ( this . mapToBase ( p ) ) ;
}
existsSync ( p ) {
return this . baseFs . existsSync ( this . mapToBase ( p ) ) ;
}
accessSync ( p , mode ) {
return this . baseFs . accessSync ( this . mapToBase ( p ) , mode ) ;
}
async accessPromise ( p , mode ) {
return this . baseFs . accessPromise ( this . mapToBase ( p ) , mode ) ;
}
async statPromise ( p , opts ) {
return this . baseFs . statPromise ( this . mapToBase ( p ) , opts ) ;
}
statSync ( p , opts ) {
return this . baseFs . statSync ( this . mapToBase ( p ) , opts ) ;
}
async fstatPromise ( fd , opts ) {
return this . baseFs . fstatPromise ( fd , opts ) ;
}
fstatSync ( fd , opts ) {
return this . baseFs . fstatSync ( fd , opts ) ;
}
async lstatPromise ( p , opts ) {
return this . baseFs . lstatPromise ( this . mapToBase ( p ) , opts ) ;
}
lstatSync ( p , opts ) {
return this . baseFs . lstatSync ( this . mapToBase ( p ) , opts ) ;
}
async chmodPromise ( p , mask ) {
return this . baseFs . chmodPromise ( this . mapToBase ( p ) , mask ) ;
}
chmodSync ( p , mask ) {
return this . baseFs . chmodSync ( this . mapToBase ( p ) , mask ) ;
}
async chownPromise ( p , uid , gid ) {
return this . baseFs . chownPromise ( this . mapToBase ( p ) , uid , gid ) ;
}
chownSync ( p , uid , gid ) {
return this . baseFs . chownSync ( this . mapToBase ( p ) , uid , gid ) ;
}
async renamePromise ( oldP , newP ) {
return this . baseFs . renamePromise ( this . mapToBase ( oldP ) , this . mapToBase ( newP ) ) ;
}
renameSync ( oldP , newP ) {
return this . baseFs . renameSync ( this . mapToBase ( oldP ) , this . mapToBase ( newP ) ) ;
}
async copyFilePromise ( sourceP , destP , flags = 0 ) {
return this . baseFs . copyFilePromise ( this . mapToBase ( sourceP ) , this . mapToBase ( destP ) , flags ) ;
}
copyFileSync ( sourceP , destP , flags = 0 ) {
return this . baseFs . copyFileSync ( this . mapToBase ( sourceP ) , this . mapToBase ( destP ) , flags ) ;
}
async appendFilePromise ( p , content , opts ) {
return this . baseFs . appendFilePromise ( this . fsMapToBase ( p ) , content , opts ) ;
}
appendFileSync ( p , content , opts ) {
return this . baseFs . appendFileSync ( this . fsMapToBase ( p ) , content , opts ) ;
}
async writeFilePromise ( p , content , opts ) {
return this . baseFs . writeFilePromise ( this . fsMapToBase ( p ) , content , opts ) ;
}
writeFileSync ( p , content , opts ) {
return this . baseFs . writeFileSync ( this . fsMapToBase ( p ) , content , opts ) ;
}
async unlinkPromise ( p ) {
return this . baseFs . unlinkPromise ( this . mapToBase ( p ) ) ;
}
unlinkSync ( p ) {
return this . baseFs . unlinkSync ( this . mapToBase ( p ) ) ;
}
async utimesPromise ( p , atime , mtime ) {
return this . baseFs . utimesPromise ( this . mapToBase ( p ) , atime , mtime ) ;
}
utimesSync ( p , atime , mtime ) {
return this . baseFs . utimesSync ( this . mapToBase ( p ) , atime , mtime ) ;
}
async mkdirPromise ( p , opts ) {
return this . baseFs . mkdirPromise ( this . mapToBase ( p ) , opts ) ;
}
mkdirSync ( p , opts ) {
return this . baseFs . mkdirSync ( this . mapToBase ( p ) , opts ) ;
}
async rmdirPromise ( p , opts ) {
return this . baseFs . rmdirPromise ( this . mapToBase ( p ) , opts ) ;
}
rmdirSync ( p , opts ) {
return this . baseFs . rmdirSync ( this . mapToBase ( p ) , opts ) ;
}
async linkPromise ( existingP , newP ) {
return this . baseFs . linkPromise ( this . mapToBase ( existingP ) , this . mapToBase ( newP ) ) ;
}
linkSync ( existingP , newP ) {
return this . baseFs . linkSync ( this . mapToBase ( existingP ) , this . mapToBase ( newP ) ) ;
}
async symlinkPromise ( target , p , type ) {
const mappedP = this . mapToBase ( p ) ;
if ( this . pathUtils . isAbsolute ( target ) ) return this . baseFs . symlinkPromise ( this . mapToBase ( target ) , mappedP , type ) ;
const mappedAbsoluteTarget = this . mapToBase ( this . pathUtils . join ( this . pathUtils . dirname ( p ) , target ) ) ;
const mappedTarget = this . baseFs . pathUtils . relative ( this . baseFs . pathUtils . dirname ( mappedP ) , mappedAbsoluteTarget ) ;
return this . baseFs . symlinkPromise ( mappedTarget , mappedP , type ) ;
}
symlinkSync ( target , p , type ) {
const mappedP = this . mapToBase ( p ) ;
if ( this . pathUtils . isAbsolute ( target ) ) return this . baseFs . symlinkSync ( this . mapToBase ( target ) , mappedP , type ) ;
const mappedAbsoluteTarget = this . mapToBase ( this . pathUtils . join ( this . pathUtils . dirname ( p ) , target ) ) ;
const mappedTarget = this . baseFs . pathUtils . relative ( this . baseFs . pathUtils . dirname ( mappedP ) , mappedAbsoluteTarget ) ;
return this . baseFs . symlinkSync ( mappedTarget , mappedP , type ) ;
}
async readFilePromise ( p , encoding ) {
// This weird condition is required to tell TypeScript that the signatures are proper (otherwise it thinks that only the generic one is covered)
if ( encoding === ` utf8 ` ) {
return this . baseFs . readFilePromise ( this . fsMapToBase ( p ) , encoding ) ;
} else {
return this . baseFs . readFilePromise ( this . fsMapToBase ( p ) , encoding ) ;
}
}
readFileSync ( p , encoding ) {
// This weird condition is required to tell TypeScript that the signatures are proper (otherwise it thinks that only the generic one is covered)
if ( encoding === ` utf8 ` ) {
return this . baseFs . readFileSync ( this . fsMapToBase ( p ) , encoding ) ;
} else {
return this . baseFs . readFileSync ( this . fsMapToBase ( p ) , encoding ) ;
}
}
async readdirPromise ( p , opts ) {
return this . baseFs . readdirPromise ( this . mapToBase ( p ) , opts ) ;
}
readdirSync ( p , opts ) {
return this . baseFs . readdirSync ( this . mapToBase ( p ) , opts ) ;
}
async readlinkPromise ( p ) {
return this . mapFromBase ( await this . baseFs . readlinkPromise ( this . mapToBase ( p ) ) ) ;
}
readlinkSync ( p ) {
return this . mapFromBase ( this . baseFs . readlinkSync ( this . mapToBase ( p ) ) ) ;
}
async truncatePromise ( p , len ) {
return this . baseFs . truncatePromise ( this . mapToBase ( p ) , len ) ;
}
truncateSync ( p , len ) {
return this . baseFs . truncateSync ( this . mapToBase ( p ) , len ) ;
}
watch ( p , a , b ) {
return this . baseFs . watch ( this . mapToBase ( p ) , // @ts-expect-error
a , b ) ;
}
watchFile ( p , a , b ) {
return this . baseFs . watchFile ( this . mapToBase ( p ) , // @ts-expect-error
a , b ) ;
}
unwatchFile ( p , cb ) {
return this . baseFs . unwatchFile ( this . mapToBase ( p ) , cb ) ;
}
fsMapToBase ( p ) {
if ( typeof p === ` number ` ) {
return p ;
} else {
return this . mapToBase ( p ) ;
}
}
}
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/VirtualFS.ts
const NUMBER _REGEXP = /^[0-9]+$/ ; // $0: full path
// $1: virtual folder
// $2: virtual segment
// $3: hash
// $4: depth
// $5: subpath
const VIRTUAL _REGEXP = /^(\/(?:[^/]+\/)*?(?:\$\$virtual|__virtual__))((?:\/((?:[^/]+-)?[a-f0-9]+)(?:\/([^/]+))?)?((?:\/.*)?))$/ ;
const VALID _COMPONENT = /^([^/]+-)?[a-f0-9]+$/ ;
class VirtualFS extends ProxiedFS {
constructor ( {
baseFs = new NodeFS ( )
} = { } ) {
super ( ppath ) ;
this . baseFs = baseFs ;
}
static makeVirtualPath ( base , component , to ) {
if ( ppath . basename ( base ) !== ` __virtual__ ` ) throw new Error ( ` Assertion failed: Virtual folders must be named "__virtual__" ` ) ;
if ( ! ppath . basename ( component ) . match ( VALID _COMPONENT ) ) throw new Error ( ` Assertion failed: Virtual components must be ended by an hexadecimal hash ` ) ; // Obtains the relative distance between the virtual path and its actual target
const target = ppath . relative ( ppath . dirname ( base ) , to ) ;
const segments = target . split ( ` / ` ) ; // Counts how many levels we need to go back to start applying the rest of the path
let depth = 0 ;
while ( depth < segments . length && segments [ depth ] === ` .. ` ) depth += 1 ;
const finalSegments = segments . slice ( depth ) ;
const fullVirtualPath = ppath . join ( base , component , String ( depth ) , ... finalSegments ) ;
return fullVirtualPath ;
}
static resolveVirtual ( p ) {
const match = p . match ( VIRTUAL _REGEXP ) ;
if ( ! match || ! match [ 3 ] && match [ 5 ] ) return p ;
const target = ppath . dirname ( match [ 1 ] ) ;
if ( ! match [ 3 ] || ! match [ 4 ] ) return target ;
const isnum = NUMBER _REGEXP . test ( match [ 4 ] ) ;
if ( ! isnum ) return p ;
const depth = Number ( match [ 4 ] ) ;
const backstep = ` ../ ` . repeat ( depth ) ;
const subpath = match [ 5 ] || ` . ` ;
return VirtualFS . resolveVirtual ( ppath . join ( target , backstep , subpath ) ) ;
}
getExtractHint ( hints ) {
return this . baseFs . getExtractHint ( hints ) ;
}
getRealPath ( ) {
return this . baseFs . getRealPath ( ) ;
}
realpathSync ( p ) {
const match = p . match ( VIRTUAL _REGEXP ) ;
if ( ! match ) return this . baseFs . realpathSync ( p ) ;
if ( ! match [ 5 ] ) return p ;
const realpath = this . baseFs . realpathSync ( this . mapToBase ( p ) ) ;
return VirtualFS . makeVirtualPath ( match [ 1 ] , match [ 3 ] , realpath ) ;
}
async realpathPromise ( p ) {
const match = p . match ( VIRTUAL _REGEXP ) ;
if ( ! match ) return await this . baseFs . realpathPromise ( p ) ;
if ( ! match [ 5 ] ) return p ;
const realpath = await this . baseFs . realpathPromise ( this . mapToBase ( p ) ) ;
return VirtualFS . makeVirtualPath ( match [ 1 ] , match [ 3 ] , realpath ) ;
}
mapToBase ( p ) {
if ( this . pathUtils . isAbsolute ( p ) ) return VirtualFS . resolveVirtual ( p ) ;
const resolvedRoot = VirtualFS . resolveVirtual ( this . baseFs . resolve ( PortablePath . dot ) ) ;
const resolvedP = VirtualFS . resolveVirtual ( this . baseFs . resolve ( p ) ) ;
return ppath . relative ( resolvedRoot , resolvedP ) ;
}
mapFromBase ( p ) {
return p ;
}
}
; // CONCATENATED MODULE: external "stream"
const external _stream _namespaceObject = require ( "stream" ) ; ;
; // CONCATENATED MODULE: external "util"
const external _util _namespaceObject = require ( "util" ) ; ;
; // CONCATENATED MODULE: external "zlib"
const external _zlib _namespaceObject = require ( "zlib" ) ; ;
var external _zlib _default = /*#__PURE__*/ _ _webpack _require _ _ . n ( external _zlib _namespaceObject ) ;
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/algorithms/opendir.ts
class CustomDir {
constructor ( path , nextDirent , opts = { } ) {
this . path = path ;
this . nextDirent = nextDirent ;
this . opts = opts ;
this . closed = false ;
}
throwIfClosed ( ) {
if ( this . closed ) {
throw ERR _DIR _CLOSED ( ) ;
}
}
async * [ Symbol . asyncIterator ] ( ) {
try {
let dirent ; // eslint-disable-next-line no-cond-assign
while ( ( dirent = await this . read ( ) ) !== null ) {
yield dirent ;
}
} finally {
await this . close ( ) ;
}
}
read ( cb ) {
const dirent = this . readSync ( ) ;
if ( typeof cb !== ` undefined ` ) return cb ( null , dirent ) ;
return Promise . resolve ( dirent ) ;
}
readSync ( ) {
this . throwIfClosed ( ) ;
return this . nextDirent ( ) ;
}
close ( cb ) {
this . closeSync ( ) ;
if ( typeof cb !== ` undefined ` ) return cb ( null ) ;
return Promise . resolve ( ) ;
}
closeSync ( ) {
var _a , _b ;
this . throwIfClosed ( ) ;
( _b = ( _a = this . opts ) . onClose ) === null || _b === void 0 ? void 0 : _b . call ( _a ) ;
this . closed = true ;
}
}
function opendir ( fakeFs , path , entries , opts ) {
const nextDirent = ( ) => {
const filename = entries . shift ( ) ;
if ( typeof filename === ` undefined ` ) return null ;
return Object . assign ( fakeFs . statSync ( fakeFs . pathUtils . join ( path , filename ) ) , {
name : filename
} ) ;
} ;
return new CustomDir ( path , nextDirent , opts ) ;
}
; // CONCATENATED MODULE: external "events"
const external _events _namespaceObject = require ( "events" ) ; ;
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/constants.ts
const constants _S _IFMT = 0o170000 ;
const constants _S _IFDIR = 0o040000 ;
const constants _S _IFREG = 0o100000 ;
const constants _S _IFLNK = 0o120000 ;
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/statUtils.ts
class DirEntry {
constructor ( ) {
this . name = ` ` ;
this . mode = 0 ;
}
isBlockDevice ( ) {
return false ;
}
isCharacterDevice ( ) {
return false ;
}
isDirectory ( ) {
return ( this . mode & S _IFMT ) === S _IFDIR ;
}
isFIFO ( ) {
return false ;
}
isFile ( ) {
return ( this . mode & S _IFMT ) === S _IFREG ;
}
isSocket ( ) {
return false ;
}
isSymbolicLink ( ) {
return ( this . mode & S _IFMT ) === S _IFLNK ;
}
}
class StatEntry {
constructor ( ) {
this . uid = 0 ;
this . gid = 0 ;
this . size = 0 ;
this . blksize = 0 ;
this . atimeMs = 0 ;
this . mtimeMs = 0 ;
this . ctimeMs = 0 ;
this . birthtimeMs = 0 ;
this . atime = new Date ( 0 ) ;
this . mtime = new Date ( 0 ) ;
this . ctime = new Date ( 0 ) ;
this . birthtime = new Date ( 0 ) ;
this . dev = 0 ;
this . ino = 0 ;
this . mode = constants _S _IFREG | 0o644 ;
this . nlink = 1 ;
this . rdev = 0 ;
this . blocks = 1 ;
}
isBlockDevice ( ) {
return false ;
}
isCharacterDevice ( ) {
return false ;
}
isDirectory ( ) {
return ( this . mode & constants _S _IFMT ) === constants _S _IFDIR ;
}
isFIFO ( ) {
return false ;
}
isFile ( ) {
return ( this . mode & constants _S _IFMT ) === constants _S _IFREG ;
}
isSocket ( ) {
return false ;
}
isSymbolicLink ( ) {
return ( this . mode & constants _S _IFMT ) === constants _S _IFLNK ;
}
}
class BigIntStatsEntry {
constructor ( ) {
this . uid = BigInt ( 0 ) ;
this . gid = BigInt ( 0 ) ;
this . size = BigInt ( 0 ) ;
this . blksize = BigInt ( 0 ) ;
this . atimeMs = BigInt ( 0 ) ;
this . mtimeMs = BigInt ( 0 ) ;
this . ctimeMs = BigInt ( 0 ) ;
this . birthtimeMs = BigInt ( 0 ) ;
this . atimeNs = BigInt ( 0 ) ;
this . mtimeNs = BigInt ( 0 ) ;
this . ctimeNs = BigInt ( 0 ) ;
this . birthtimeNs = BigInt ( 0 ) ;
this . atime = new Date ( 0 ) ;
this . mtime = new Date ( 0 ) ;
this . ctime = new Date ( 0 ) ;
this . birthtime = new Date ( 0 ) ;
this . dev = BigInt ( 0 ) ;
this . ino = BigInt ( 0 ) ;
this . mode = BigInt ( constants _S _IFREG | 0o644 ) ;
this . nlink = BigInt ( 1 ) ;
this . rdev = BigInt ( 0 ) ;
this . blocks = BigInt ( 1 ) ;
}
isBlockDevice ( ) {
return false ;
}
isCharacterDevice ( ) {
return false ;
}
isDirectory ( ) {
return ( this . mode & BigInt ( constants _S _IFMT ) ) === BigInt ( constants _S _IFDIR ) ;
}
isFIFO ( ) {
return false ;
}
isFile ( ) {
return ( this . mode & BigInt ( constants _S _IFMT ) ) === BigInt ( constants _S _IFREG ) ;
}
isSocket ( ) {
return false ;
}
isSymbolicLink ( ) {
return ( this . mode & BigInt ( constants _S _IFMT ) ) === BigInt ( constants _S _IFLNK ) ;
}
}
function makeDefaultStats ( ) {
return new StatEntry ( ) ;
}
function makeEmptyStats ( ) {
return clearStats ( makeDefaultStats ( ) ) ;
}
/ * *
* Mutates the provided stats object to zero it out then returns it for convenience
* /
function clearStats ( stats ) {
for ( const key in stats ) {
if ( Object . prototype . hasOwnProperty . call ( stats , key ) ) {
const element = stats [ key ] ;
if ( typeof element === ` number ` ) {
// @ts-expect-error Typescript can't tell that stats[key] is a number
stats [ key ] = 0 ;
} else if ( typeof element === ` bigint ` ) {
// @ts-expect-error Typescript can't tell that stats[key] is a bigint
stats [ key ] = BigInt ( 0 ) ;
} else if ( external _util _namespaceObject . types . isDate ( element ) ) {
// @ts-expect-error Typescript can't tell that stats[key] is a bigint
stats [ key ] = new Date ( 0 ) ;
}
}
}
return stats ;
}
function convertToBigIntStats ( stats ) {
const bigintStats = new BigIntStatsEntry ( ) ;
for ( const key in stats ) {
if ( Object . prototype . hasOwnProperty . call ( stats , key ) ) {
const element = stats [ key ] ;
if ( typeof element === ` number ` ) {
// @ts-expect-error Typescript isn't able to tell this is valid
bigintStats [ key ] = BigInt ( element ) ;
} else if ( external _util _namespaceObject . types . isDate ( element ) ) {
// @ts-expect-error Typescript isn't able to tell this is valid
bigintStats [ key ] = new Date ( element ) ;
}
}
}
bigintStats . atimeNs = bigintStats . atimeMs * BigInt ( 1e6 ) ;
bigintStats . mtimeNs = bigintStats . mtimeMs * BigInt ( 1e6 ) ;
bigintStats . ctimeNs = bigintStats . ctimeMs * BigInt ( 1e6 ) ;
bigintStats . birthtimeNs = bigintStats . birthtimeMs * BigInt ( 1e6 ) ;
return bigintStats ;
}
function areStatsEqual ( a , b ) {
if ( a . atimeMs !== b . atimeMs ) return false ;
if ( a . birthtimeMs !== b . birthtimeMs ) return false ;
if ( a . blksize !== b . blksize ) return false ;
if ( a . blocks !== b . blocks ) return false ;
if ( a . ctimeMs !== b . ctimeMs ) return false ;
if ( a . dev !== b . dev ) return false ;
if ( a . gid !== b . gid ) return false ;
if ( a . ino !== b . ino ) return false ;
if ( a . isBlockDevice ( ) !== b . isBlockDevice ( ) ) return false ;
if ( a . isCharacterDevice ( ) !== b . isCharacterDevice ( ) ) return false ;
if ( a . isDirectory ( ) !== b . isDirectory ( ) ) return false ;
if ( a . isFIFO ( ) !== b . isFIFO ( ) ) return false ;
if ( a . isFile ( ) !== b . isFile ( ) ) return false ;
if ( a . isSocket ( ) !== b . isSocket ( ) ) return false ;
if ( a . isSymbolicLink ( ) !== b . isSymbolicLink ( ) ) return false ;
if ( a . mode !== b . mode ) return false ;
if ( a . mtimeMs !== b . mtimeMs ) return false ;
if ( a . nlink !== b . nlink ) return false ;
if ( a . rdev !== b . rdev ) return false ;
if ( a . size !== b . size ) return false ;
if ( a . uid !== b . uid ) return false ;
const aN = a ;
const bN = b ;
if ( aN . atimeNs !== bN . atimeNs ) return false ;
if ( aN . mtimeNs !== bN . mtimeNs ) return false ;
if ( aN . ctimeNs !== bN . ctimeNs ) return false ;
if ( aN . birthtimeNs !== bN . birthtimeNs ) return false ;
return true ;
}
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/algorithms/watchFile/CustomStatWatcher.ts
var Event ;
( function ( Event ) {
Event [ "Change" ] = "change" ;
Event [ "Stop" ] = "stop" ;
} ) ( Event || ( Event = { } ) ) ;
var Status ;
( function ( Status ) {
Status [ "Ready" ] = "ready" ;
Status [ "Running" ] = "running" ;
Status [ "Stopped" ] = "stopped" ;
} ) ( Status || ( Status = { } ) ) ;
function assertStatus ( current , expected ) {
if ( current !== expected ) {
throw new Error ( ` Invalid StatWatcher status: expected ' ${ expected } ', got ' ${ current } ' ` ) ;
}
}
class CustomStatWatcher extends external _events _namespaceObject . EventEmitter {
constructor ( fakeFs , path , {
bigint = false
} = { } ) {
super ( ) ;
this . status = Status . Ready ;
this . changeListeners = new Map ( ) ;
this . startTimeout = null ;
this . fakeFs = fakeFs ;
this . path = path ;
this . bigint = bigint ;
this . lastStats = this . stat ( ) ;
}
static create ( fakeFs , path , opts ) {
const statWatcher = new CustomStatWatcher ( fakeFs , path , opts ) ;
statWatcher . start ( ) ;
return statWatcher ;
}
start ( ) {
assertStatus ( this . status , Status . Ready ) ;
this . status = Status . Running ; // Node allows other listeners to be registered up to 3 milliseconds
// after the watcher has been started, so that's what we're doing too
this . startTimeout = setTimeout ( ( ) => {
this . startTimeout = null ; // Per the Node FS docs:
// "When an fs.watchFile operation results in an ENOENT error,
// it will invoke the listener once, with all the fields zeroed
// (or, for dates, the Unix Epoch)."
if ( ! this . fakeFs . existsSync ( this . path ) ) {
this . emit ( Event . Change , this . lastStats , this . lastStats ) ;
}
} , 3 ) ;
}
stop ( ) {
assertStatus ( this . status , Status . Running ) ;
this . status = Status . Stopped ;
if ( this . startTimeout !== null ) {
clearTimeout ( this . startTimeout ) ;
this . startTimeout = null ;
}
this . emit ( Event . Stop ) ;
}
stat ( ) {
try {
return this . fakeFs . statSync ( this . path , {
bigint : this . bigint
} ) ;
} catch ( error ) {
if ( error . code === ` ENOENT ` ) {
const statInstance = this . bigint ? new BigIntStatsEntry ( ) : new StatEntry ( ) ;
return clearStats ( statInstance ) ;
} else {
throw error ;
}
}
}
/ * *
* Creates an interval whose callback compares the current stats with the previous stats and notifies all listeners in case of changes .
*
* @ param opts . persistent Decides whether the interval should be immediately unref - ed .
* /
makeInterval ( opts ) {
const interval = setInterval ( ( ) => {
const currentStats = this . stat ( ) ;
const previousStats = this . lastStats ;
if ( areStatsEqual ( currentStats , previousStats ) ) return ;
this . lastStats = currentStats ;
this . emit ( Event . Change , currentStats , previousStats ) ;
} , opts . interval ) ;
return opts . persistent ? interval : interval . unref ( ) ;
}
/ * *
* Registers a listener and assigns it an interval .
* /
registerChangeListener ( listener , opts ) {
this . addListener ( Event . Change , listener ) ;
this . changeListeners . set ( listener , this . makeInterval ( opts ) ) ;
}
/ * *
* Unregisters the listener and clears the assigned interval .
* /
unregisterChangeListener ( listener ) {
this . removeListener ( Event . Change , listener ) ;
const interval = this . changeListeners . get ( listener ) ;
if ( typeof interval !== ` undefined ` ) clearInterval ( interval ) ;
this . changeListeners . delete ( listener ) ;
}
/ * *
* Unregisters all listeners and clears all assigned intervals .
* /
unregisterAllChangeListeners ( ) {
for ( const listener of this . changeListeners . keys ( ) ) {
this . unregisterChangeListener ( listener ) ;
}
}
hasChangeListeners ( ) {
return this . changeListeners . size > 0 ;
}
/ * *
* Refs all stored intervals .
* /
ref ( ) {
for ( const interval of this . changeListeners . values ( ) ) interval . ref ( ) ;
return this ;
}
/ * *
* Unrefs all stored intervals .
* /
unref ( ) {
for ( const interval of this . changeListeners . values ( ) ) interval . unref ( ) ;
return this ;
}
}
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/algorithms/watchFile.ts
const statWatchersByFakeFS = new WeakMap ( ) ;
function watchFile ( fakeFs , path , a , b ) {
let bigint ;
let persistent ;
let interval ;
let listener ;
switch ( typeof a ) {
case ` function ` :
{
bigint = false ;
persistent = true ;
interval = 5007 ;
listener = a ;
}
break ;
default :
{
( {
bigint = false ,
persistent = true ,
interval = 5007
} = a ) ;
listener = b ;
}
break ;
}
let statWatchers = statWatchersByFakeFS . get ( fakeFs ) ;
if ( typeof statWatchers === ` undefined ` ) statWatchersByFakeFS . set ( fakeFs , statWatchers = new Map ( ) ) ;
let statWatcher = statWatchers . get ( path ) ;
if ( typeof statWatcher === ` undefined ` ) {
statWatcher = CustomStatWatcher . create ( fakeFs , path , {
bigint
} ) ;
statWatchers . set ( path , statWatcher ) ;
}
statWatcher . registerChangeListener ( listener , {
persistent ,
interval
} ) ;
return statWatcher ;
}
function unwatchFile ( fakeFs , path , cb ) {
const statWatchers = statWatchersByFakeFS . get ( fakeFs ) ;
if ( typeof statWatchers === ` undefined ` ) return ;
const statWatcher = statWatchers . get ( path ) ;
if ( typeof statWatcher === ` undefined ` ) return ;
if ( typeof cb === ` undefined ` ) statWatcher . unregisterAllChangeListeners ( ) ; else statWatcher . unregisterChangeListener ( cb ) ;
if ( ! statWatcher . hasChangeListeners ( ) ) {
statWatcher . stop ( ) ;
statWatchers . delete ( path ) ;
}
}
function unwatchAllFiles ( fakeFs ) {
const statWatchers = statWatchersByFakeFS . get ( fakeFs ) ;
if ( typeof statWatchers === ` undefined ` ) return ;
for ( const path of statWatchers . keys ( ) ) {
unwatchFile ( fakeFs , path ) ;
}
}
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/ZipFS.ts
const DEFAULT _COMPRESSION _LEVEL = ` mixed ` ;
function toUnixTimestamp ( time ) {
if ( typeof time === ` string ` && String ( + time ) === time ) return + time ;
if ( Number . isFinite ( time ) ) {
if ( time < 0 ) {
return Date . now ( ) / 1000 ;
} else {
return time ;
}
} // convert to 123.456 UNIX timestamp
if ( ( 0 , external _util _namespaceObject . isDate ) ( time ) ) return time . getTime ( ) / 1000 ;
throw new Error ( ` Invalid time ` ) ;
}
class ZipFS extends BasePortableFakeFS {
constructor ( source , opts ) {
super ( ) ;
this . lzSource = null ;
this . listings = new Map ( ) ;
this . entries = new Map ( ) ;
/ * *
* A cache of indices mapped to file sources .
* Populated by ` setFileSource ` calls .
* Required for supporting read after write .
* /
this . fileSources = new Map ( ) ;
this . fds = new Map ( ) ;
this . nextFd = 0 ;
this . ready = false ;
this . readOnly = false ;
this . libzip = opts . libzip ;
const pathOptions = opts ;
this . level = typeof pathOptions . level !== ` undefined ` ? pathOptions . level : DEFAULT _COMPRESSION _LEVEL ;
if ( source === null ) {
source = Buffer . from ( [ 0x50 , 0x4B , 0x05 , 0x06 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 , 0x00 ] ) ;
}
if ( typeof source === ` string ` ) {
const {
baseFs = new NodeFS ( )
} = pathOptions ;
this . baseFs = baseFs ;
this . path = source ;
} else {
this . path = null ;
this . baseFs = null ;
}
if ( opts . stats ) {
this . stats = opts . stats ;
} else {
if ( typeof source === ` string ` ) {
try {
this . stats = this . baseFs . statSync ( source ) ;
} catch ( error ) {
if ( error . code === ` ENOENT ` && pathOptions . create ) {
this . stats = makeDefaultStats ( ) ;
} else {
throw error ;
}
}
} else {
this . stats = makeDefaultStats ( ) ;
}
}
const errPtr = this . libzip . malloc ( 4 ) ;
try {
let flags = 0 ;
if ( typeof source === ` string ` && pathOptions . create ) flags |= this . libzip . ZIP _CREATE | this . libzip . ZIP _TRUNCATE ;
if ( opts . readOnly ) {
flags |= this . libzip . ZIP _RDONLY ;
this . readOnly = true ;
}
if ( typeof source === ` string ` ) {
this . zip = this . libzip . open ( npath . fromPortablePath ( source ) , flags , errPtr ) ;
} else {
const lzSource = this . allocateUnattachedSource ( source ) ;
try {
this . zip = this . libzip . openFromSource ( lzSource , flags , errPtr ) ;
this . lzSource = lzSource ;
} catch ( error ) {
this . libzip . source . free ( lzSource ) ;
throw error ;
}
}
if ( this . zip === 0 ) {
const error = this . libzip . struct . errorS ( ) ;
this . libzip . error . initWithCode ( error , this . libzip . getValue ( errPtr , ` i32 ` ) ) ;
throw this . makeLibzipError ( error ) ;
}
} finally {
this . libzip . free ( errPtr ) ;
}
this . listings . set ( PortablePath . root , new Set ( ) ) ;
const entryCount = this . libzip . getNumEntries ( this . zip , 0 ) ;
for ( let t = 0 ; t < entryCount ; ++ t ) {
const raw = this . libzip . getName ( this . zip , t , 0 ) ;
if ( ppath . isAbsolute ( raw ) ) continue ;
const p = ppath . resolve ( PortablePath . root , raw ) ;
this . registerEntry ( p , t ) ; // If the raw path is a directory, register it
// to prevent empty folder being skipped
if ( raw . endsWith ( ` / ` ) ) {
this . registerListing ( p ) ;
}
}
this . symlinkCount = this . libzip . ext . countSymlinks ( this . zip ) ;
if ( this . symlinkCount === - 1 ) throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
this . ready = true ;
}
makeLibzipError ( error ) {
const errorCode = this . libzip . struct . errorCodeZip ( error ) ;
const strerror = this . libzip . error . strerror ( error ) ;
const libzipError = new LibzipError ( strerror , this . libzip . errors [ errorCode ] ) ; // This error should never come up because of the file source cache
if ( errorCode === this . libzip . errors . ZIP _ER _CHANGED ) throw new Error ( ` Assertion failed: Unexpected libzip error: ${ libzipError . message } ` ) ;
return libzipError ;
}
getExtractHint ( hints ) {
for ( const fileName of this . entries . keys ( ) ) {
const ext = this . pathUtils . extname ( fileName ) ;
if ( hints . relevantExtensions . has ( ext ) ) {
return true ;
}
}
return false ;
}
getAllFiles ( ) {
return Array . from ( this . entries . keys ( ) ) ;
}
getRealPath ( ) {
if ( ! this . path ) throw new Error ( ` ZipFS don't have real paths when loaded from a buffer ` ) ;
return this . path ;
}
getBufferAndClose ( ) {
this . prepareClose ( ) ;
if ( ! this . lzSource ) throw new Error ( ` ZipFS was not created from a Buffer ` ) ;
try {
// Prevent close from cleaning up the source
this . libzip . source . keep ( this . lzSource ) ; // Close the zip archive
if ( this . libzip . close ( this . zip ) === - 1 ) throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ; // Open the source for reading
if ( this . libzip . source . open ( this . lzSource ) === - 1 ) throw this . makeLibzipError ( this . libzip . source . error ( this . lzSource ) ) ; // Move to the end of source
if ( this . libzip . source . seek ( this . lzSource , 0 , 0 , this . libzip . SEEK _END ) === - 1 ) throw this . makeLibzipError ( this . libzip . source . error ( this . lzSource ) ) ; // Get the size of source
const size = this . libzip . source . tell ( this . lzSource ) ;
if ( size === - 1 ) throw this . makeLibzipError ( this . libzip . source . error ( this . lzSource ) ) ; // Move to the start of source
if ( this . libzip . source . seek ( this . lzSource , 0 , 0 , this . libzip . SEEK _SET ) === - 1 ) throw this . makeLibzipError ( this . libzip . source . error ( this . lzSource ) ) ;
const buffer = this . libzip . malloc ( size ) ;
if ( ! buffer ) throw new Error ( ` Couldn't allocate enough memory ` ) ;
try {
const rc = this . libzip . source . read ( this . lzSource , buffer , size ) ;
if ( rc === - 1 ) throw this . makeLibzipError ( this . libzip . source . error ( this . lzSource ) ) ; else if ( rc < size ) throw new Error ( ` Incomplete read ` ) ; else if ( rc > size ) throw new Error ( ` Overread ` ) ;
const memory = this . libzip . HEAPU8 . subarray ( buffer , buffer + size ) ;
return Buffer . from ( memory ) ;
} finally {
this . libzip . free ( buffer ) ;
}
} finally {
this . libzip . source . close ( this . lzSource ) ;
this . libzip . source . free ( this . lzSource ) ;
this . ready = false ;
}
}
prepareClose ( ) {
if ( ! this . ready ) throw EBUSY ( ` archive closed, close ` ) ;
unwatchAllFiles ( this ) ;
}
saveAndClose ( ) {
if ( ! this . path || ! this . baseFs ) throw new Error ( ` ZipFS cannot be saved and must be discarded when loaded from a buffer ` ) ;
this . prepareClose ( ) ;
if ( this . readOnly ) {
this . discardAndClose ( ) ;
return ;
}
const previousMod = this . baseFs . existsSync ( this . path ) ? this . baseFs . statSync ( this . path ) . mode & 0o777 : null ;
const rc = this . libzip . close ( this . zip ) ;
if ( rc === - 1 ) throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ; // this.libzip overrides the chmod when writing the archive, which is a weird
// behavior I don't totally understand (plus the umask seems bogus in some
// weird cases - maybe related to emscripten?)
//
// See also https://github.com/nih-at/libzip/issues/77
if ( previousMod === null ) this . baseFs . chmodSync ( this . path , this . stats . mode ) ; else if ( previousMod !== ( this . baseFs . statSync ( this . path ) . mode & 0o777 ) ) this . baseFs . chmodSync ( this . path , previousMod ) ;
this . ready = false ;
}
discardAndClose ( ) {
this . prepareClose ( ) ;
this . libzip . discard ( this . zip ) ;
this . ready = false ;
}
resolve ( p ) {
return ppath . resolve ( PortablePath . root , p ) ;
}
async openPromise ( p , flags , mode ) {
return this . openSync ( p , flags , mode ) ;
}
openSync ( p , flags , mode ) {
const fd = this . nextFd ++ ;
this . fds . set ( fd , {
cursor : 0 ,
p
} ) ;
return fd ;
}
hasOpenFileHandles ( ) {
return ! ! this . fds . size ;
}
async opendirPromise ( p , opts ) {
return this . opendirSync ( p , opts ) ;
}
opendirSync ( p , opts = { } ) {
const resolvedP = this . resolveFilename ( ` opendir ' ${ p } ' ` , p ) ;
if ( ! this . entries . has ( resolvedP ) && ! this . listings . has ( resolvedP ) ) throw ENOENT ( ` opendir ' ${ p } ' ` ) ;
const directoryListing = this . listings . get ( resolvedP ) ;
if ( ! directoryListing ) throw ENOTDIR ( ` opendir ' ${ p } ' ` ) ;
const entries = [ ... directoryListing ] ;
const fd = this . openSync ( resolvedP , ` r ` ) ;
const onClose = ( ) => {
this . closeSync ( fd ) ;
} ;
return opendir ( this , resolvedP , entries , {
onClose
} ) ;
}
async readPromise ( fd , buffer , offset , length , position ) {
return this . readSync ( fd , buffer , offset , length , position ) ;
}
readSync ( fd , buffer , offset = 0 , length = 0 , position = - 1 ) {
const entry = this . fds . get ( fd ) ;
if ( typeof entry === ` undefined ` ) throw EBADF ( ` read ` ) ;
let realPosition ;
if ( position === - 1 || position === null ) realPosition = entry . cursor ; else realPosition = position ;
const source = this . readFileSync ( entry . p ) ;
source . copy ( buffer , offset , realPosition , realPosition + length ) ;
const bytesRead = Math . max ( 0 , Math . min ( source . length - realPosition , length ) ) ;
if ( position === - 1 || position === null ) entry . cursor += bytesRead ;
return bytesRead ;
}
async writePromise ( fd , buffer , offset , length , position ) {
if ( typeof buffer === ` string ` ) {
return this . writeSync ( fd , buffer , position ) ;
} else {
return this . writeSync ( fd , buffer , offset , length , position ) ;
}
}
writeSync ( fd , buffer , offset , length , position ) {
const entry = this . fds . get ( fd ) ;
if ( typeof entry === ` undefined ` ) throw EBADF ( ` read ` ) ;
throw new Error ( ` Unimplemented ` ) ;
}
async closePromise ( fd ) {
return this . closeSync ( fd ) ;
}
closeSync ( fd ) {
const entry = this . fds . get ( fd ) ;
if ( typeof entry === ` undefined ` ) throw EBADF ( ` read ` ) ;
this . fds . delete ( fd ) ;
}
createReadStream ( p , {
encoding
} = { } ) {
if ( p === null ) throw new Error ( ` Unimplemented ` ) ;
const fd = this . openSync ( p , ` r ` ) ;
const stream = Object . assign ( new external _stream _namespaceObject . PassThrough ( {
emitClose : true ,
autoDestroy : true ,
destroy : ( error , callback ) => {
clearImmediate ( immediate ) ;
this . closeSync ( fd ) ;
callback ( error ) ;
}
} ) , {
close ( ) {
stream . destroy ( ) ;
} ,
bytesRead : 0 ,
path : p
} ) ;
const immediate = setImmediate ( async ( ) => {
try {
const data = await this . readFilePromise ( p , encoding ) ;
stream . bytesRead = data . length ;
stream . end ( data ) ;
} catch ( error ) {
stream . destroy ( error ) ;
}
} ) ;
return stream ;
}
createWriteStream ( p , {
encoding
} = { } ) {
if ( this . readOnly ) throw EROFS ( ` open ' ${ p } ' ` ) ;
if ( p === null ) throw new Error ( ` Unimplemented ` ) ;
const chunks = [ ] ;
const fd = this . openSync ( p , ` w ` ) ;
const stream = Object . assign ( new external _stream _namespaceObject . PassThrough ( {
autoDestroy : true ,
emitClose : true ,
destroy : ( error , callback ) => {
try {
if ( error ) {
callback ( error ) ;
} else {
this . writeFileSync ( p , Buffer . concat ( chunks ) , encoding ) ;
callback ( null ) ;
}
} catch ( err ) {
callback ( err ) ;
} finally {
this . closeSync ( fd ) ;
}
}
} ) , {
bytesWritten : 0 ,
path : p ,
close ( ) {
stream . destroy ( ) ;
}
} ) ;
stream . on ( ` data ` , chunk => {
const chunkBuffer = Buffer . from ( chunk ) ;
stream . bytesWritten += chunkBuffer . length ;
chunks . push ( chunkBuffer ) ;
} ) ;
return stream ;
}
async realpathPromise ( p ) {
return this . realpathSync ( p ) ;
}
realpathSync ( p ) {
const resolvedP = this . resolveFilename ( ` lstat ' ${ p } ' ` , p ) ;
if ( ! this . entries . has ( resolvedP ) && ! this . listings . has ( resolvedP ) ) throw ENOENT ( ` lstat ' ${ p } ' ` ) ;
return resolvedP ;
}
async existsPromise ( p ) {
return this . existsSync ( p ) ;
}
existsSync ( p ) {
if ( ! this . ready ) throw EBUSY ( ` archive closed, existsSync ' ${ p } ' ` ) ;
if ( this . symlinkCount === 0 ) {
const resolvedP = ppath . resolve ( PortablePath . root , p ) ;
return this . entries . has ( resolvedP ) || this . listings . has ( resolvedP ) ;
}
let resolvedP ;
try {
resolvedP = this . resolveFilename ( ` stat ' ${ p } ' ` , p ) ;
} catch ( error ) {
return false ;
}
return this . entries . has ( resolvedP ) || this . listings . has ( resolvedP ) ;
}
async accessPromise ( p , mode ) {
return this . accessSync ( p , mode ) ;
}
accessSync ( p , mode = external _fs _ . constants . F _OK ) {
const resolvedP = this . resolveFilename ( ` access ' ${ p } ' ` , p ) ;
if ( ! this . entries . has ( resolvedP ) && ! this . listings . has ( resolvedP ) ) throw ENOENT ( ` access ' ${ p } ' ` ) ;
if ( this . readOnly && mode & external _fs _ . constants . W _OK ) {
throw EROFS ( ` access ' ${ p } ' ` ) ;
}
}
async statPromise ( p , opts ) {
return this . statSync ( p , opts ) ;
}
statSync ( p , opts ) {
const resolvedP = this . resolveFilename ( ` stat ' ${ p } ' ` , p ) ;
if ( ! this . entries . has ( resolvedP ) && ! this . listings . has ( resolvedP ) ) throw ENOENT ( ` stat ' ${ p } ' ` ) ;
if ( p [ p . length - 1 ] === ` / ` && ! this . listings . has ( resolvedP ) ) throw ENOTDIR ( ` stat ' ${ p } ' ` ) ;
return this . statImpl ( ` stat ' ${ p } ' ` , resolvedP , opts ) ;
}
async fstatPromise ( fd , opts ) {
return this . fstatSync ( fd , opts ) ;
}
fstatSync ( fd , opts ) {
const entry = this . fds . get ( fd ) ;
if ( typeof entry === ` undefined ` ) throw EBADF ( ` fstatSync ` ) ;
const {
p
} = entry ;
const resolvedP = this . resolveFilename ( ` stat ' ${ p } ' ` , p ) ;
if ( ! this . entries . has ( resolvedP ) && ! this . listings . has ( resolvedP ) ) throw ENOENT ( ` stat ' ${ p } ' ` ) ;
if ( p [ p . length - 1 ] === ` / ` && ! this . listings . has ( resolvedP ) ) throw ENOTDIR ( ` stat ' ${ p } ' ` ) ;
return this . statImpl ( ` fstat ' ${ p } ' ` , resolvedP , opts ) ;
}
async lstatPromise ( p , opts ) {
return this . lstatSync ( p , opts ) ;
}
lstatSync ( p , opts ) {
const resolvedP = this . resolveFilename ( ` lstat ' ${ p } ' ` , p , false ) ;
if ( ! this . entries . has ( resolvedP ) && ! this . listings . has ( resolvedP ) ) throw ENOENT ( ` lstat ' ${ p } ' ` ) ;
if ( p [ p . length - 1 ] === ` / ` && ! this . listings . has ( resolvedP ) ) throw ENOTDIR ( ` lstat ' ${ p } ' ` ) ;
return this . statImpl ( ` lstat ' ${ p } ' ` , resolvedP , opts ) ;
}
statImpl ( reason , p , opts = { } ) {
const entry = this . entries . get ( p ) ; // File, or explicit directory
if ( typeof entry !== ` undefined ` ) {
const stat = this . libzip . struct . statS ( ) ;
const rc = this . libzip . statIndex ( this . zip , entry , 0 , 0 , stat ) ;
if ( rc === - 1 ) throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
const uid = this . stats . uid ;
const gid = this . stats . gid ;
const size = this . libzip . struct . statSize ( stat ) >>> 0 ;
const blksize = 512 ;
const blocks = Math . ceil ( size / blksize ) ;
const mtimeMs = ( this . libzip . struct . statMtime ( stat ) >>> 0 ) * 1000 ;
const atimeMs = mtimeMs ;
const birthtimeMs = mtimeMs ;
const ctimeMs = mtimeMs ;
const atime = new Date ( atimeMs ) ;
const birthtime = new Date ( birthtimeMs ) ;
const ctime = new Date ( ctimeMs ) ;
const mtime = new Date ( mtimeMs ) ;
const type = this . listings . has ( p ) ? constants _S _IFDIR : this . isSymbolicLink ( entry ) ? constants _S _IFLNK : constants _S _IFREG ;
const defaultMode = type === constants _S _IFDIR ? 0o755 : 0o644 ;
const mode = type | this . getUnixMode ( entry , defaultMode ) & 0o777 ;
const crc = this . libzip . struct . statCrc ( stat ) ;
const statInstance = Object . assign ( new StatEntry ( ) , {
uid ,
gid ,
size ,
blksize ,
blocks ,
atime ,
birthtime ,
ctime ,
mtime ,
atimeMs ,
birthtimeMs ,
ctimeMs ,
mtimeMs ,
mode ,
crc
} ) ;
return opts . bigint === true ? convertToBigIntStats ( statInstance ) : statInstance ;
} // Implicit directory
if ( this . listings . has ( p ) ) {
const uid = this . stats . uid ;
const gid = this . stats . gid ;
const size = 0 ;
const blksize = 512 ;
const blocks = 0 ;
const atimeMs = this . stats . mtimeMs ;
const birthtimeMs = this . stats . mtimeMs ;
const ctimeMs = this . stats . mtimeMs ;
const mtimeMs = this . stats . mtimeMs ;
const atime = new Date ( atimeMs ) ;
const birthtime = new Date ( birthtimeMs ) ;
const ctime = new Date ( ctimeMs ) ;
const mtime = new Date ( mtimeMs ) ;
const mode = constants _S _IFDIR | 0o755 ;
const crc = 0 ;
const statInstance = Object . assign ( new StatEntry ( ) , {
uid ,
gid ,
size ,
blksize ,
blocks ,
atime ,
birthtime ,
ctime ,
mtime ,
atimeMs ,
birthtimeMs ,
ctimeMs ,
mtimeMs ,
mode ,
crc
} ) ;
return opts . bigint === true ? convertToBigIntStats ( statInstance ) : statInstance ;
}
throw new Error ( ` Unreachable ` ) ;
}
getUnixMode ( index , defaultMode ) {
const rc = this . libzip . file . getExternalAttributes ( this . zip , index , 0 , 0 , this . libzip . uint08S , this . libzip . uint32S ) ;
if ( rc === - 1 ) throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
const opsys = this . libzip . getValue ( this . libzip . uint08S , ` i8 ` ) >>> 0 ;
if ( opsys !== this . libzip . ZIP _OPSYS _UNIX ) return defaultMode ;
return this . libzip . getValue ( this . libzip . uint32S , ` i32 ` ) >>> 16 ;
}
registerListing ( p ) {
let listing = this . listings . get ( p ) ;
if ( listing ) return listing ;
const parentListing = this . registerListing ( ppath . dirname ( p ) ) ;
listing = new Set ( ) ;
parentListing . add ( ppath . basename ( p ) ) ;
this . listings . set ( p , listing ) ;
return listing ;
}
registerEntry ( p , index ) {
const parentListing = this . registerListing ( ppath . dirname ( p ) ) ;
parentListing . add ( ppath . basename ( p ) ) ;
this . entries . set ( p , index ) ;
}
unregisterListing ( p ) {
this . listings . delete ( p ) ;
const parentListing = this . listings . get ( ppath . dirname ( p ) ) ;
parentListing === null || parentListing === void 0 ? void 0 : parentListing . delete ( ppath . basename ( p ) ) ;
}
unregisterEntry ( p ) {
this . unregisterListing ( p ) ;
const entry = this . entries . get ( p ) ;
this . entries . delete ( p ) ;
if ( typeof entry === ` undefined ` ) return ;
this . fileSources . delete ( entry ) ;
if ( this . isSymbolicLink ( entry ) ) {
this . symlinkCount -- ;
}
}
deleteEntry ( p , index ) {
this . unregisterEntry ( p ) ;
const rc = this . libzip . delete ( this . zip , index ) ;
if ( rc === - 1 ) {
throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
}
}
resolveFilename ( reason , p , resolveLastComponent = true ) {
if ( ! this . ready ) throw EBUSY ( ` archive closed, ${ reason } ` ) ;
let resolvedP = ppath . resolve ( PortablePath . root , p ) ;
if ( resolvedP === ` / ` ) return PortablePath . root ;
const fileIndex = this . entries . get ( resolvedP ) ;
if ( resolveLastComponent && fileIndex !== undefined ) {
if ( this . symlinkCount !== 0 && this . isSymbolicLink ( fileIndex ) ) {
const target = this . getFileSource ( fileIndex ) . toString ( ) ;
return this . resolveFilename ( reason , ppath . resolve ( ppath . dirname ( resolvedP ) , target ) , true ) ;
} else {
return resolvedP ;
}
}
while ( true ) {
const parentP = this . resolveFilename ( reason , ppath . dirname ( resolvedP ) , true ) ;
const isDir = this . listings . has ( parentP ) ;
const doesExist = this . entries . has ( parentP ) ;
if ( ! isDir && ! doesExist ) throw ENOENT ( reason ) ;
if ( ! isDir ) throw ENOTDIR ( reason ) ;
resolvedP = ppath . resolve ( parentP , ppath . basename ( resolvedP ) ) ;
if ( ! resolveLastComponent || this . symlinkCount === 0 ) break ;
const index = this . libzip . name . locate ( this . zip , resolvedP . slice ( 1 ) ) ;
if ( index === - 1 ) break ;
if ( this . isSymbolicLink ( index ) ) {
const target = this . getFileSource ( index ) . toString ( ) ;
resolvedP = ppath . resolve ( ppath . dirname ( resolvedP ) , target ) ;
} else {
break ;
}
}
return resolvedP ;
}
allocateBuffer ( content ) {
if ( ! Buffer . isBuffer ( content ) ) content = Buffer . from ( content ) ;
const buffer = this . libzip . malloc ( content . byteLength ) ;
if ( ! buffer ) throw new Error ( ` Couldn't allocate enough memory ` ) ; // Copy the file into the Emscripten heap
const heap = new Uint8Array ( this . libzip . HEAPU8 . buffer , buffer , content . byteLength ) ;
heap . set ( content ) ;
return {
buffer ,
byteLength : content . byteLength
} ;
}
allocateUnattachedSource ( content ) {
const error = this . libzip . struct . errorS ( ) ;
const {
buffer ,
byteLength
} = this . allocateBuffer ( content ) ;
const source = this . libzip . source . fromUnattachedBuffer ( buffer , byteLength , 0 , true , error ) ;
if ( source === 0 ) {
this . libzip . free ( error ) ;
throw this . makeLibzipError ( error ) ;
}
return source ;
}
allocateSource ( content ) {
const {
buffer ,
byteLength
} = this . allocateBuffer ( content ) ;
const source = this . libzip . source . fromBuffer ( this . zip , buffer , byteLength , 0 , true ) ;
if ( source === 0 ) {
this . libzip . free ( buffer ) ;
throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
}
return source ;
}
setFileSource ( p , content ) {
const buffer = Buffer . isBuffer ( content ) ? content : Buffer . from ( content ) ;
const target = ppath . relative ( PortablePath . root , p ) ;
const lzSource = this . allocateSource ( content ) ;
try {
const newIndex = this . libzip . file . add ( this . zip , target , lzSource , this . libzip . ZIP _FL _OVERWRITE ) ;
if ( newIndex === - 1 ) throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
if ( this . level !== ` mixed ` ) {
// Use store for level 0, and deflate for 1..9
let method ;
if ( this . level === 0 ) method = this . libzip . ZIP _CM _STORE ; else method = this . libzip . ZIP _CM _DEFLATE ;
const rc = this . libzip . file . setCompression ( this . zip , newIndex , 0 , method , this . level ) ;
if ( rc === - 1 ) {
throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
}
}
this . fileSources . set ( newIndex , buffer ) ;
return newIndex ;
} catch ( error ) {
this . libzip . source . free ( lzSource ) ;
throw error ;
}
}
isSymbolicLink ( index ) {
if ( this . symlinkCount === 0 ) return false ;
const attrs = this . libzip . file . getExternalAttributes ( this . zip , index , 0 , 0 , this . libzip . uint08S , this . libzip . uint32S ) ;
if ( attrs === - 1 ) throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
const opsys = this . libzip . getValue ( this . libzip . uint08S , ` i8 ` ) >>> 0 ;
if ( opsys !== this . libzip . ZIP _OPSYS _UNIX ) return false ;
const attributes = this . libzip . getValue ( this . libzip . uint32S , ` i32 ` ) >>> 16 ;
return ( attributes & constants _S _IFMT ) === constants _S _IFLNK ;
}
getFileSource ( index , opts = {
asyncDecompress : false
} ) {
const cachedFileSource = this . fileSources . get ( index ) ;
if ( typeof cachedFileSource !== ` undefined ` ) return cachedFileSource ;
const stat = this . libzip . struct . statS ( ) ;
const rc = this . libzip . statIndex ( this . zip , index , 0 , 0 , stat ) ;
if ( rc === - 1 ) throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
const size = this . libzip . struct . statCompSize ( stat ) ;
const compressionMethod = this . libzip . struct . statCompMethod ( stat ) ;
const buffer = this . libzip . malloc ( size ) ;
try {
const file = this . libzip . fopenIndex ( this . zip , index , 0 , this . libzip . ZIP _FL _COMPRESSED ) ;
if ( file === 0 ) throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
try {
const rc = this . libzip . fread ( file , buffer , size , 0 ) ;
if ( rc === - 1 ) throw this . makeLibzipError ( this . libzip . file . getError ( file ) ) ; else if ( rc < size ) throw new Error ( ` Incomplete read ` ) ; else if ( rc > size ) throw new Error ( ` Overread ` ) ;
const memory = this . libzip . HEAPU8 . subarray ( buffer , buffer + size ) ;
const data = Buffer . from ( memory ) ;
if ( compressionMethod === 0 ) {
this . fileSources . set ( index , data ) ;
return data ;
} else if ( opts . asyncDecompress ) {
return new Promise ( ( resolve , reject ) => {
external _zlib _default ( ) . inflateRaw ( data , ( error , result ) => {
if ( error ) {
reject ( error ) ;
} else {
this . fileSources . set ( index , result ) ;
resolve ( result ) ;
}
} ) ;
} ) ;
} else {
const decompressedData = external _zlib _default ( ) . inflateRawSync ( data ) ;
this . fileSources . set ( index , decompressedData ) ;
return decompressedData ;
}
} finally {
this . libzip . fclose ( file ) ;
}
} finally {
this . libzip . free ( buffer ) ;
}
}
async chmodPromise ( p , mask ) {
return this . chmodSync ( p , mask ) ;
}
chmodSync ( p , mask ) {
if ( this . readOnly ) throw EROFS ( ` chmod ' ${ p } ' ` ) ; // We don't allow to make the extracted entries group-writable
mask &= 0o755 ;
const resolvedP = this . resolveFilename ( ` chmod ' ${ p } ' ` , p , false ) ;
const entry = this . entries . get ( resolvedP ) ;
if ( typeof entry === ` undefined ` ) throw new Error ( ` Assertion failed: The entry should have been registered ( ${ resolvedP } ) ` ) ;
const oldMod = this . getUnixMode ( entry , constants _S _IFREG | 0o000 ) ;
const newMod = oldMod & ~ 0o777 | mask ;
const rc = this . libzip . file . setExternalAttributes ( this . zip , entry , 0 , 0 , this . libzip . ZIP _OPSYS _UNIX , newMod << 16 ) ;
if ( rc === - 1 ) {
throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
}
}
async chownPromise ( p , uid , gid ) {
return this . chownSync ( p , uid , gid ) ;
}
chownSync ( p , uid , gid ) {
throw new Error ( ` Unimplemented ` ) ;
}
async renamePromise ( oldP , newP ) {
return this . renameSync ( oldP , newP ) ;
}
renameSync ( oldP , newP ) {
throw new Error ( ` Unimplemented ` ) ;
}
async copyFilePromise ( sourceP , destP , flags ) {
const {
indexSource ,
indexDest ,
resolvedDestP
} = this . prepareCopyFile ( sourceP , destP , flags ) ;
const source = await this . getFileSource ( indexSource , {
asyncDecompress : true
} ) ;
const newIndex = this . setFileSource ( resolvedDestP , source ) ;
if ( newIndex !== indexDest ) {
this . registerEntry ( resolvedDestP , newIndex ) ;
}
}
copyFileSync ( sourceP , destP , flags = 0 ) {
const {
indexSource ,
indexDest ,
resolvedDestP
} = this . prepareCopyFile ( sourceP , destP , flags ) ;
const source = this . getFileSource ( indexSource ) ;
const newIndex = this . setFileSource ( resolvedDestP , source ) ;
if ( newIndex !== indexDest ) {
this . registerEntry ( resolvedDestP , newIndex ) ;
}
}
prepareCopyFile ( sourceP , destP , flags = 0 ) {
if ( this . readOnly ) throw EROFS ( ` copyfile ' ${ sourceP } -> ' ${ destP } ' ` ) ;
if ( ( flags & external _fs _ . constants . COPYFILE _FICLONE _FORCE ) !== 0 ) throw ENOSYS ( ` unsupported clone operation ` , ` copyfile ' ${ sourceP } ' -> ${ destP } ' ` ) ;
const resolvedSourceP = this . resolveFilename ( ` copyfile ' ${ sourceP } -> ${ destP } ' ` , sourceP ) ;
const indexSource = this . entries . get ( resolvedSourceP ) ;
if ( typeof indexSource === ` undefined ` ) throw EINVAL ( ` copyfile ' ${ sourceP } ' -> ' ${ destP } ' ` ) ;
const resolvedDestP = this . resolveFilename ( ` copyfile ' ${ sourceP } ' -> ${ destP } ' ` , destP ) ;
const indexDest = this . entries . get ( resolvedDestP ) ;
if ( ( flags & ( external _fs _ . constants . COPYFILE _EXCL | external _fs _ . constants . COPYFILE _FICLONE _FORCE ) ) !== 0 && typeof indexDest !== ` undefined ` ) throw EEXIST ( ` copyfile ' ${ sourceP } ' -> ' ${ destP } ' ` ) ;
return {
indexSource ,
resolvedDestP ,
indexDest
} ;
}
async appendFilePromise ( p , content , opts ) {
if ( this . readOnly ) throw EROFS ( ` open ' ${ p } ' ` ) ;
if ( typeof opts === ` undefined ` ) opts = {
flag : ` a `
} ; else if ( typeof opts === ` string ` ) opts = {
flag : ` a ` ,
encoding : opts
} ; else if ( typeof opts . flag === ` undefined ` ) opts = {
flag : ` a ` ,
... opts
} ;
return this . writeFilePromise ( p , content , opts ) ;
}
appendFileSync ( p , content , opts = { } ) {
if ( this . readOnly ) throw EROFS ( ` open ' ${ p } ' ` ) ;
if ( typeof opts === ` undefined ` ) opts = {
flag : ` a `
} ; else if ( typeof opts === ` string ` ) opts = {
flag : ` a ` ,
encoding : opts
} ; else if ( typeof opts . flag === ` undefined ` ) opts = {
flag : ` a ` ,
... opts
} ;
return this . writeFileSync ( p , content , opts ) ;
}
async writeFilePromise ( p , content , opts ) {
const {
encoding ,
index ,
resolvedP
} = this . prepareWriteFile ( p , opts ) ;
if ( index !== undefined && typeof opts === ` object ` && opts . flag && opts . flag . includes ( ` a ` ) ) content = Buffer . concat ( [ await this . getFileSource ( index , {
asyncDecompress : true
} ) , Buffer . from ( content ) ] ) ;
if ( encoding !== null ) content = content . toString ( encoding ) ;
const newIndex = this . setFileSource ( resolvedP , content ) ;
if ( newIndex !== index ) {
this . registerEntry ( resolvedP , newIndex ) ;
}
}
writeFileSync ( p , content , opts ) {
const {
encoding ,
index ,
resolvedP
} = this . prepareWriteFile ( p , opts ) ;
if ( index !== undefined && typeof opts === ` object ` && opts . flag && opts . flag . includes ( ` a ` ) ) content = Buffer . concat ( [ this . getFileSource ( index ) , Buffer . from ( content ) ] ) ;
if ( encoding !== null ) content = content . toString ( encoding ) ;
const newIndex = this . setFileSource ( resolvedP , content ) ;
if ( newIndex !== index ) {
this . registerEntry ( resolvedP , newIndex ) ;
}
}
prepareWriteFile ( p , opts ) {
if ( typeof p !== ` string ` ) throw EBADF ( ` read ` ) ;
if ( this . readOnly ) throw EROFS ( ` open ' ${ p } ' ` ) ;
const resolvedP = this . resolveFilename ( ` open ' ${ p } ' ` , p ) ;
if ( this . listings . has ( resolvedP ) ) throw EISDIR ( ` open ' ${ p } ' ` ) ;
let encoding = null ;
if ( typeof opts === ` string ` ) encoding = opts ; else if ( typeof opts === ` object ` && opts . encoding ) encoding = opts . encoding ;
const index = this . entries . get ( resolvedP ) ;
return {
encoding ,
resolvedP ,
index
} ;
}
async unlinkPromise ( p ) {
return this . unlinkSync ( p ) ;
}
unlinkSync ( p ) {
if ( this . readOnly ) throw EROFS ( ` unlink ' ${ p } ' ` ) ;
const resolvedP = this . resolveFilename ( ` unlink ' ${ p } ' ` , p ) ;
if ( this . listings . has ( resolvedP ) ) throw EISDIR ( ` unlink ' ${ p } ' ` ) ;
const index = this . entries . get ( resolvedP ) ;
if ( typeof index === ` undefined ` ) throw EINVAL ( ` unlink ' ${ p } ' ` ) ;
this . deleteEntry ( resolvedP , index ) ;
}
async utimesPromise ( p , atime , mtime ) {
return this . utimesSync ( p , atime , mtime ) ;
}
utimesSync ( p , atime , mtime ) {
if ( this . readOnly ) throw EROFS ( ` utimes ' ${ p } ' ` ) ;
const resolvedP = this . resolveFilename ( ` utimes ' ${ p } ' ` , p ) ;
this . utimesImpl ( resolvedP , mtime ) ;
}
async lutimesPromise ( p , atime , mtime ) {
return this . lutimesSync ( p , atime , mtime ) ;
}
lutimesSync ( p , atime , mtime ) {
if ( this . readOnly ) throw EROFS ( ` lutimes ' ${ p } ' ` ) ;
const resolvedP = this . resolveFilename ( ` utimes ' ${ p } ' ` , p , false ) ;
this . utimesImpl ( resolvedP , mtime ) ;
}
utimesImpl ( resolvedP , mtime ) {
if ( this . listings . has ( resolvedP ) ) if ( ! this . entries . has ( resolvedP ) ) this . hydrateDirectory ( resolvedP ) ;
const entry = this . entries . get ( resolvedP ) ;
if ( entry === undefined ) throw new Error ( ` Unreachable ` ) ;
const rc = this . libzip . file . setMtime ( this . zip , entry , 0 , toUnixTimestamp ( mtime ) , 0 ) ;
if ( rc === - 1 ) {
throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
}
}
async mkdirPromise ( p , opts ) {
return this . mkdirSync ( p , opts ) ;
}
mkdirSync ( p , {
mode = 0o755 ,
recursive = false
} = { } ) {
if ( recursive ) {
this . mkdirpSync ( p , {
chmod : mode
} ) ;
return ;
}
if ( this . readOnly ) throw EROFS ( ` mkdir ' ${ p } ' ` ) ;
const resolvedP = this . resolveFilename ( ` mkdir ' ${ p } ' ` , p ) ;
if ( this . entries . has ( resolvedP ) || this . listings . has ( resolvedP ) ) throw EEXIST ( ` mkdir ' ${ p } ' ` ) ;
this . hydrateDirectory ( resolvedP ) ;
this . chmodSync ( resolvedP , mode ) ;
}
async rmdirPromise ( p , opts ) {
return this . rmdirSync ( p , opts ) ;
}
rmdirSync ( p , {
recursive = false
} = { } ) {
if ( this . readOnly ) throw EROFS ( ` rmdir ' ${ p } ' ` ) ;
if ( recursive ) {
this . removeSync ( p ) ;
return ;
}
const resolvedP = this . resolveFilename ( ` rmdir ' ${ p } ' ` , p ) ;
const directoryListing = this . listings . get ( resolvedP ) ;
if ( ! directoryListing ) throw ENOTDIR ( ` rmdir ' ${ p } ' ` ) ;
if ( directoryListing . size > 0 ) throw ENOTEMPTY ( ` rmdir ' ${ p } ' ` ) ;
const index = this . entries . get ( resolvedP ) ;
if ( typeof index === ` undefined ` ) throw EINVAL ( ` rmdir ' ${ p } ' ` ) ;
this . deleteEntry ( p , index ) ;
}
hydrateDirectory ( resolvedP ) {
const index = this . libzip . dir . add ( this . zip , ppath . relative ( PortablePath . root , resolvedP ) ) ;
if ( index === - 1 ) throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
this . registerListing ( resolvedP ) ;
this . registerEntry ( resolvedP , index ) ;
return index ;
}
async linkPromise ( existingP , newP ) {
return this . linkSync ( existingP , newP ) ;
}
linkSync ( existingP , newP ) {
// Zip archives don't support hard links:
// https://stackoverflow.com/questions/8859616/are-hard-links-possible-within-a-zip-archive
throw EOPNOTSUPP ( ` link ' ${ existingP } ' -> ' ${ newP } ' ` ) ;
}
async symlinkPromise ( target , p ) {
return this . symlinkSync ( target , p ) ;
}
symlinkSync ( target , p ) {
if ( this . readOnly ) throw EROFS ( ` symlink ' ${ target } ' -> ' ${ p } ' ` ) ;
const resolvedP = this . resolveFilename ( ` symlink ' ${ target } ' -> ' ${ p } ' ` , p ) ;
if ( this . listings . has ( resolvedP ) ) throw EISDIR ( ` symlink ' ${ target } ' -> ' ${ p } ' ` ) ;
if ( this . entries . has ( resolvedP ) ) throw EEXIST ( ` symlink ' ${ target } ' -> ' ${ p } ' ` ) ;
const index = this . setFileSource ( resolvedP , target ) ;
this . registerEntry ( resolvedP , index ) ;
const rc = this . libzip . file . setExternalAttributes ( this . zip , index , 0 , 0 , this . libzip . ZIP _OPSYS _UNIX , ( constants _S _IFLNK | 0o777 ) << 16 ) ;
if ( rc === - 1 ) throw this . makeLibzipError ( this . libzip . getError ( this . zip ) ) ;
this . symlinkCount += 1 ;
}
async readFilePromise ( p , encoding ) {
// This is messed up regarding the TS signatures
if ( typeof encoding === ` object ` ) // @ts-expect-error
encoding = encoding ? encoding . encoding : undefined ;
const data = await this . readFileBuffer ( p , {
asyncDecompress : true
} ) ;
return encoding ? data . toString ( encoding ) : data ;
}
readFileSync ( p , encoding ) {
// This is messed up regarding the TS signatures
if ( typeof encoding === ` object ` ) // @ts-expect-error
encoding = encoding ? encoding . encoding : undefined ;
const data = this . readFileBuffer ( p ) ;
return encoding ? data . toString ( encoding ) : data ;
}
readFileBuffer ( p , opts = {
asyncDecompress : false
} ) {
const resolvedP = this . resolveFilename ( ` open ' ${ p } ' ` , p ) ;
if ( ! this . entries . has ( resolvedP ) && ! this . listings . has ( resolvedP ) ) throw ENOENT ( ` open ' ${ p } ' ` ) ; // Ensures that the last component is a directory, if the user said so (even if it is we'll throw right after with EISDIR anyway)
if ( p [ p . length - 1 ] === ` / ` && ! this . listings . has ( resolvedP ) ) throw ENOTDIR ( ` open ' ${ p } ' ` ) ;
if ( this . listings . has ( resolvedP ) ) throw EISDIR ( ` read ` ) ;
const entry = this . entries . get ( resolvedP ) ;
if ( entry === undefined ) throw new Error ( ` Unreachable ` ) ;
return this . getFileSource ( entry , opts ) ;
}
async readdirPromise ( p , opts ) {
return this . readdirSync ( p , opts ) ;
}
readdirSync ( p , opts ) {
const resolvedP = this . resolveFilename ( ` scandir ' ${ p } ' ` , p ) ;
if ( ! this . entries . has ( resolvedP ) && ! this . listings . has ( resolvedP ) ) throw ENOENT ( ` scandir ' ${ p } ' ` ) ;
const directoryListing = this . listings . get ( resolvedP ) ;
if ( ! directoryListing ) throw ENOTDIR ( ` scandir ' ${ p } ' ` ) ;
const entries = [ ... directoryListing ] ;
if ( ! ( opts === null || opts === void 0 ? void 0 : opts . withFileTypes ) ) return entries ;
return entries . map ( name => {
return Object . assign ( this . statImpl ( ` lstat ` , ppath . join ( p , name ) ) , {
name
} ) ;
} ) ;
}
async readlinkPromise ( p ) {
const entry = this . prepareReadlink ( p ) ;
return ( await this . getFileSource ( entry , {
asyncDecompress : true
} ) ) . toString ( ) ;
}
readlinkSync ( p ) {
const entry = this . prepareReadlink ( p ) ;
return this . getFileSource ( entry ) . toString ( ) ;
}
prepareReadlink ( p ) {
const resolvedP = this . resolveFilename ( ` readlink ' ${ p } ' ` , p , false ) ;
if ( ! this . entries . has ( resolvedP ) && ! this . listings . has ( resolvedP ) ) throw ENOENT ( ` readlink ' ${ p } ' ` ) ; // Ensure that the last component is a directory (if it is we'll throw right after with EISDIR anyway)
if ( p [ p . length - 1 ] === ` / ` && ! this . listings . has ( resolvedP ) ) throw ENOTDIR ( ` open ' ${ p } ' ` ) ;
if ( this . listings . has ( resolvedP ) ) throw EINVAL ( ` readlink ' ${ p } ' ` ) ;
const entry = this . entries . get ( resolvedP ) ;
if ( entry === undefined ) throw new Error ( ` Unreachable ` ) ;
if ( ! this . isSymbolicLink ( entry ) ) throw EINVAL ( ` readlink ' ${ p } ' ` ) ;
return entry ;
}
async truncatePromise ( p , len = 0 ) {
const resolvedP = this . resolveFilename ( ` open ' ${ p } ' ` , p ) ;
const index = this . entries . get ( resolvedP ) ;
if ( typeof index === ` undefined ` ) throw EINVAL ( ` open ' ${ p } ' ` ) ;
const source = await this . getFileSource ( index , {
asyncDecompress : true
} ) ;
const truncated = Buffer . alloc ( len , 0x00 ) ;
source . copy ( truncated ) ;
return await this . writeFilePromise ( p , truncated ) ;
}
truncateSync ( p , len = 0 ) {
const resolvedP = this . resolveFilename ( ` open ' ${ p } ' ` , p ) ;
const index = this . entries . get ( resolvedP ) ;
if ( typeof index === ` undefined ` ) throw EINVAL ( ` open ' ${ p } ' ` ) ;
const source = this . getFileSource ( index ) ;
const truncated = Buffer . alloc ( len , 0x00 ) ;
source . copy ( truncated ) ;
return this . writeFileSync ( p , truncated ) ;
}
watch ( p , a , b ) {
let persistent ;
switch ( typeof a ) {
case ` function ` :
case ` string ` :
case ` undefined ` :
{
persistent = true ;
}
break ;
default :
{
( {
persistent = true
} = a ) ;
}
break ;
}
if ( ! persistent ) return {
on : ( ) => { } ,
close : ( ) => { }
} ;
const interval = setInterval ( ( ) => { } , 24 * 60 * 60 * 1000 ) ;
return {
on : ( ) => { } ,
close : ( ) => {
clearInterval ( interval ) ;
}
} ;
}
watchFile ( p , a , b ) {
const resolvedP = this . resolveFilename ( ` open ' ${ p } ' ` , p ) ;
return watchFile ( this , resolvedP , a , b ) ;
}
unwatchFile ( p , cb ) {
const resolvedP = this . resolveFilename ( ` open ' ${ p } ' ` , p ) ;
return unwatchFile ( this , resolvedP , cb ) ;
}
}
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/ZipOpenFS.ts
const ZIP _FD = 0x80000000 ;
const DOT _ZIP = ` .zip ` ;
/ * *
* Extracts the archive part ( ending in the first ` .zip ` ) from a path .
*
* The indexOf - based implementation is ~ 3.7 x faster than a RegExp - based implementation .
* /
const getArchivePart = path => {
let idx = path . indexOf ( DOT _ZIP ) ;
if ( idx <= 0 ) return null ;
let nextCharIdx = idx ;
while ( idx >= 0 ) {
nextCharIdx = idx + DOT _ZIP . length ;
if ( path [ nextCharIdx ] === ppath . sep ) break ; // Disallow files named ".zip"
if ( path [ idx - 1 ] === ppath . sep ) return null ;
idx = path . indexOf ( DOT _ZIP , nextCharIdx ) ;
} // The path either has to end in ".zip" or contain an archive subpath (".zip/...")
if ( path . length > nextCharIdx && path [ nextCharIdx ] !== ppath . sep ) return null ;
return path . slice ( 0 , nextCharIdx ) ;
} ;
class ZipOpenFS extends BasePortableFakeFS {
constructor ( {
libzip ,
baseFs = new NodeFS ( ) ,
filter = null ,
maxOpenFiles = Infinity ,
readOnlyArchives = false ,
useCache = true ,
maxAge = 5000
} ) {
super ( ) ;
this . fdMap = new Map ( ) ;
this . nextFd = 3 ;
this . isZip = new Set ( ) ;
this . notZip = new Set ( ) ;
this . realPaths = new Map ( ) ;
this . limitOpenFilesTimeout = null ;
this . libzipFactory = typeof libzip !== ` function ` ? ( ) => libzip : libzip ;
this . baseFs = baseFs ;
this . zipInstances = useCache ? new Map ( ) : null ;
this . filter = filter ;
this . maxOpenFiles = maxOpenFiles ;
this . readOnlyArchives = readOnlyArchives ;
this . maxAge = maxAge ;
}
static async openPromise ( fn , opts ) {
const zipOpenFs = new ZipOpenFS ( opts ) ;
try {
return await fn ( zipOpenFs ) ;
} finally {
zipOpenFs . saveAndClose ( ) ;
}
}
get libzip ( ) {
if ( typeof this . libzipInstance === ` undefined ` ) this . libzipInstance = this . libzipFactory ( ) ;
return this . libzipInstance ;
}
getExtractHint ( hints ) {
return this . baseFs . getExtractHint ( hints ) ;
}
getRealPath ( ) {
return this . baseFs . getRealPath ( ) ;
}
saveAndClose ( ) {
unwatchAllFiles ( this ) ;
if ( this . zipInstances ) {
for ( const [ path , {
zipFs
} ] of this . zipInstances . entries ( ) ) {
zipFs . saveAndClose ( ) ;
this . zipInstances . delete ( path ) ;
}
}
}
discardAndClose ( ) {
unwatchAllFiles ( this ) ;
if ( this . zipInstances ) {
for ( const [ path , {
zipFs
} ] of this . zipInstances . entries ( ) ) {
zipFs . discardAndClose ( ) ;
this . zipInstances . delete ( path ) ;
}
}
}
resolve ( p ) {
return this . baseFs . resolve ( p ) ;
}
remapFd ( zipFs , fd ) {
const remappedFd = this . nextFd ++ | ZIP _FD ;
this . fdMap . set ( remappedFd , [ zipFs , fd ] ) ;
return remappedFd ;
}
async openPromise ( p , flags , mode ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . openPromise ( p , flags , mode ) ;
} , async ( zipFs , {
subPath
} ) => {
return this . remapFd ( zipFs , await zipFs . openPromise ( subPath , flags , mode ) ) ;
} ) ;
}
openSync ( p , flags , mode ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . openSync ( p , flags , mode ) ;
} , ( zipFs , {
subPath
} ) => {
return this . remapFd ( zipFs , zipFs . openSync ( subPath , flags , mode ) ) ;
} ) ;
}
async opendirPromise ( p , opts ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . opendirPromise ( p , opts ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . opendirPromise ( subPath , opts ) ;
} , {
requireSubpath : false
} ) ;
}
opendirSync ( p , opts ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . opendirSync ( p , opts ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . opendirSync ( subPath , opts ) ;
} , {
requireSubpath : false
} ) ;
}
async readPromise ( fd , buffer , offset , length , position ) {
if ( ( fd & ZIP _FD ) === 0 ) return await this . baseFs . readPromise ( fd , buffer , offset , length , position ) ;
const entry = this . fdMap . get ( fd ) ;
if ( typeof entry === ` undefined ` ) throw EBADF ( ` read ` ) ;
const [ zipFs , realFd ] = entry ;
return await zipFs . readPromise ( realFd , buffer , offset , length , position ) ;
}
readSync ( fd , buffer , offset , length , position ) {
if ( ( fd & ZIP _FD ) === 0 ) return this . baseFs . readSync ( fd , buffer , offset , length , position ) ;
const entry = this . fdMap . get ( fd ) ;
if ( typeof entry === ` undefined ` ) throw EBADF ( ` readSync ` ) ;
const [ zipFs , realFd ] = entry ;
return zipFs . readSync ( realFd , buffer , offset , length , position ) ;
}
async writePromise ( fd , buffer , offset , length , position ) {
if ( ( fd & ZIP _FD ) === 0 ) {
if ( typeof buffer === ` string ` ) {
return await this . baseFs . writePromise ( fd , buffer , offset ) ;
} else {
return await this . baseFs . writePromise ( fd , buffer , offset , length , position ) ;
}
}
const entry = this . fdMap . get ( fd ) ;
if ( typeof entry === ` undefined ` ) throw EBADF ( ` write ` ) ;
const [ zipFs , realFd ] = entry ;
if ( typeof buffer === ` string ` ) {
return await zipFs . writePromise ( realFd , buffer , offset ) ;
} else {
return await zipFs . writePromise ( realFd , buffer , offset , length , position ) ;
}
}
writeSync ( fd , buffer , offset , length , position ) {
if ( ( fd & ZIP _FD ) === 0 ) {
if ( typeof buffer === ` string ` ) {
return this . baseFs . writeSync ( fd , buffer , offset ) ;
} else {
return this . baseFs . writeSync ( fd , buffer , offset , length , position ) ;
}
}
const entry = this . fdMap . get ( fd ) ;
if ( typeof entry === ` undefined ` ) throw EBADF ( ` writeSync ` ) ;
const [ zipFs , realFd ] = entry ;
if ( typeof buffer === ` string ` ) {
return zipFs . writeSync ( realFd , buffer , offset ) ;
} else {
return zipFs . writeSync ( realFd , buffer , offset , length , position ) ;
}
}
async closePromise ( fd ) {
if ( ( fd & ZIP _FD ) === 0 ) return await this . baseFs . closePromise ( fd ) ;
const entry = this . fdMap . get ( fd ) ;
if ( typeof entry === ` undefined ` ) throw EBADF ( ` close ` ) ;
this . fdMap . delete ( fd ) ;
const [ zipFs , realFd ] = entry ;
return await zipFs . closePromise ( realFd ) ;
}
closeSync ( fd ) {
if ( ( fd & ZIP _FD ) === 0 ) return this . baseFs . closeSync ( fd ) ;
const entry = this . fdMap . get ( fd ) ;
if ( typeof entry === ` undefined ` ) throw EBADF ( ` closeSync ` ) ;
this . fdMap . delete ( fd ) ;
const [ zipFs , realFd ] = entry ;
return zipFs . closeSync ( realFd ) ;
}
createReadStream ( p , opts ) {
if ( p === null ) return this . baseFs . createReadStream ( p , opts ) ;
return this . makeCallSync ( p , ( ) => {
return this . baseFs . createReadStream ( p , opts ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . createReadStream ( subPath , opts ) ;
} ) ;
}
createWriteStream ( p , opts ) {
if ( p === null ) return this . baseFs . createWriteStream ( p , opts ) ;
return this . makeCallSync ( p , ( ) => {
return this . baseFs . createWriteStream ( p , opts ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . createWriteStream ( subPath , opts ) ;
} ) ;
}
async realpathPromise ( p ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . realpathPromise ( p ) ;
} , async ( zipFs , {
archivePath ,
subPath
} ) => {
let realArchivePath = this . realPaths . get ( archivePath ) ;
if ( typeof realArchivePath === ` undefined ` ) {
realArchivePath = await this . baseFs . realpathPromise ( archivePath ) ;
this . realPaths . set ( archivePath , realArchivePath ) ;
}
return this . pathUtils . join ( realArchivePath , this . pathUtils . relative ( PortablePath . root , await zipFs . realpathPromise ( subPath ) ) ) ;
} ) ;
}
realpathSync ( p ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . realpathSync ( p ) ;
} , ( zipFs , {
archivePath ,
subPath
} ) => {
let realArchivePath = this . realPaths . get ( archivePath ) ;
if ( typeof realArchivePath === ` undefined ` ) {
realArchivePath = this . baseFs . realpathSync ( archivePath ) ;
this . realPaths . set ( archivePath , realArchivePath ) ;
}
return this . pathUtils . join ( realArchivePath , this . pathUtils . relative ( PortablePath . root , zipFs . realpathSync ( subPath ) ) ) ;
} ) ;
}
async existsPromise ( p ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . existsPromise ( p ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . existsPromise ( subPath ) ;
} ) ;
}
existsSync ( p ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . existsSync ( p ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . existsSync ( subPath ) ;
} ) ;
}
async accessPromise ( p , mode ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . accessPromise ( p , mode ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . accessPromise ( subPath , mode ) ;
} ) ;
}
accessSync ( p , mode ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . accessSync ( p , mode ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . accessSync ( subPath , mode ) ;
} ) ;
}
async statPromise ( p , opts ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . statPromise ( p , opts ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . statPromise ( subPath , opts ) ;
} ) ;
}
statSync ( p , opts ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . statSync ( p , opts ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . statSync ( subPath , opts ) ;
} ) ;
}
async fstatPromise ( fd , opts ) {
if ( ( fd & ZIP _FD ) === 0 ) return this . baseFs . fstatPromise ( fd , opts ) ;
const entry = this . fdMap . get ( fd ) ;
if ( typeof entry === ` undefined ` ) throw EBADF ( ` fstat ` ) ;
const [ zipFs , realFd ] = entry ;
return zipFs . fstatPromise ( realFd , opts ) ;
}
fstatSync ( fd , opts ) {
if ( ( fd & ZIP _FD ) === 0 ) return this . baseFs . fstatSync ( fd , opts ) ;
const entry = this . fdMap . get ( fd ) ;
if ( typeof entry === ` undefined ` ) throw EBADF ( ` fstatSync ` ) ;
const [ zipFs , realFd ] = entry ;
return zipFs . fstatSync ( realFd , opts ) ;
}
async lstatPromise ( p , opts ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . lstatPromise ( p , opts ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . lstatPromise ( subPath , opts ) ;
} ) ;
}
lstatSync ( p , opts ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . lstatSync ( p , opts ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . lstatSync ( subPath , opts ) ;
} ) ;
}
async chmodPromise ( p , mask ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . chmodPromise ( p , mask ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . chmodPromise ( subPath , mask ) ;
} ) ;
}
chmodSync ( p , mask ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . chmodSync ( p , mask ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . chmodSync ( subPath , mask ) ;
} ) ;
}
async chownPromise ( p , uid , gid ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . chownPromise ( p , uid , gid ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . chownPromise ( subPath , uid , gid ) ;
} ) ;
}
chownSync ( p , uid , gid ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . chownSync ( p , uid , gid ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . chownSync ( subPath , uid , gid ) ;
} ) ;
}
async renamePromise ( oldP , newP ) {
return await this . makeCallPromise ( oldP , async ( ) => {
return await this . makeCallPromise ( newP , async ( ) => {
return await this . baseFs . renamePromise ( oldP , newP ) ;
} , async ( ) => {
throw Object . assign ( new Error ( ` EEXDEV: cross-device link not permitted ` ) , {
code : ` EEXDEV `
} ) ;
} ) ;
} , async ( zipFsO , {
subPath : subPathO
} ) => {
return await this . makeCallPromise ( newP , async ( ) => {
throw Object . assign ( new Error ( ` EEXDEV: cross-device link not permitted ` ) , {
code : ` EEXDEV `
} ) ;
} , async ( zipFsN , {
subPath : subPathN
} ) => {
if ( zipFsO !== zipFsN ) {
throw Object . assign ( new Error ( ` EEXDEV: cross-device link not permitted ` ) , {
code : ` EEXDEV `
} ) ;
} else {
return await zipFsO . renamePromise ( subPathO , subPathN ) ;
}
} ) ;
} ) ;
}
renameSync ( oldP , newP ) {
return this . makeCallSync ( oldP , ( ) => {
return this . makeCallSync ( newP , ( ) => {
return this . baseFs . renameSync ( oldP , newP ) ;
} , async ( ) => {
throw Object . assign ( new Error ( ` EEXDEV: cross-device link not permitted ` ) , {
code : ` EEXDEV `
} ) ;
} ) ;
} , ( zipFsO , {
subPath : subPathO
} ) => {
return this . makeCallSync ( newP , ( ) => {
throw Object . assign ( new Error ( ` EEXDEV: cross-device link not permitted ` ) , {
code : ` EEXDEV `
} ) ;
} , ( zipFsN , {
subPath : subPathN
} ) => {
if ( zipFsO !== zipFsN ) {
throw Object . assign ( new Error ( ` EEXDEV: cross-device link not permitted ` ) , {
code : ` EEXDEV `
} ) ;
} else {
return zipFsO . renameSync ( subPathO , subPathN ) ;
}
} ) ;
} ) ;
}
async copyFilePromise ( sourceP , destP , flags = 0 ) {
const fallback = async ( sourceFs , sourceP , destFs , destP ) => {
if ( ( flags & external _fs _ . constants . COPYFILE _FICLONE _FORCE ) !== 0 ) throw Object . assign ( new Error ( ` EXDEV: cross-device clone not permitted, copyfile ' ${ sourceP } ' -> ${ destP } ' ` ) , {
code : ` EXDEV `
} ) ;
if ( flags & external _fs _ . constants . COPYFILE _EXCL && ( await this . existsPromise ( sourceP ) ) ) throw Object . assign ( new Error ( ` EEXIST: file already exists, copyfile ' ${ sourceP } ' -> ' ${ destP } ' ` ) , {
code : ` EEXIST `
} ) ;
let content ;
try {
content = await sourceFs . readFilePromise ( sourceP ) ;
} catch ( error ) {
throw Object . assign ( new Error ( ` EINVAL: invalid argument, copyfile ' ${ sourceP } ' -> ' ${ destP } ' ` ) , {
code : ` EINVAL `
} ) ;
}
await destFs . writeFilePromise ( destP , content ) ;
} ;
return await this . makeCallPromise ( sourceP , async ( ) => {
return await this . makeCallPromise ( destP , async ( ) => {
return await this . baseFs . copyFilePromise ( sourceP , destP , flags ) ;
} , async ( zipFsD , {
subPath : subPathD
} ) => {
return await fallback ( this . baseFs , sourceP , zipFsD , subPathD ) ;
} ) ;
} , async ( zipFsS , {
subPath : subPathS
} ) => {
return await this . makeCallPromise ( destP , async ( ) => {
return await fallback ( zipFsS , subPathS , this . baseFs , destP ) ;
} , async ( zipFsD , {
subPath : subPathD
} ) => {
if ( zipFsS !== zipFsD ) {
return await fallback ( zipFsS , subPathS , zipFsD , subPathD ) ;
} else {
return await zipFsS . copyFilePromise ( subPathS , subPathD , flags ) ;
}
} ) ;
} ) ;
}
copyFileSync ( sourceP , destP , flags = 0 ) {
const fallback = ( sourceFs , sourceP , destFs , destP ) => {
if ( ( flags & external _fs _ . constants . COPYFILE _FICLONE _FORCE ) !== 0 ) throw Object . assign ( new Error ( ` EXDEV: cross-device clone not permitted, copyfile ' ${ sourceP } ' -> ${ destP } ' ` ) , {
code : ` EXDEV `
} ) ;
if ( flags & external _fs _ . constants . COPYFILE _EXCL && this . existsSync ( sourceP ) ) throw Object . assign ( new Error ( ` EEXIST: file already exists, copyfile ' ${ sourceP } ' -> ' ${ destP } ' ` ) , {
code : ` EEXIST `
} ) ;
let content ;
try {
content = sourceFs . readFileSync ( sourceP ) ;
} catch ( error ) {
throw Object . assign ( new Error ( ` EINVAL: invalid argument, copyfile ' ${ sourceP } ' -> ' ${ destP } ' ` ) , {
code : ` EINVAL `
} ) ;
}
destFs . writeFileSync ( destP , content ) ;
} ;
return this . makeCallSync ( sourceP , ( ) => {
return this . makeCallSync ( destP , ( ) => {
return this . baseFs . copyFileSync ( sourceP , destP , flags ) ;
} , ( zipFsD , {
subPath : subPathD
} ) => {
return fallback ( this . baseFs , sourceP , zipFsD , subPathD ) ;
} ) ;
} , ( zipFsS , {
subPath : subPathS
} ) => {
return this . makeCallSync ( destP , ( ) => {
return fallback ( zipFsS , subPathS , this . baseFs , destP ) ;
} , ( zipFsD , {
subPath : subPathD
} ) => {
if ( zipFsS !== zipFsD ) {
return fallback ( zipFsS , subPathS , zipFsD , subPathD ) ;
} else {
return zipFsS . copyFileSync ( subPathS , subPathD , flags ) ;
}
} ) ;
} ) ;
}
async appendFilePromise ( p , content , opts ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . appendFilePromise ( p , content , opts ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . appendFilePromise ( subPath , content , opts ) ;
} ) ;
}
appendFileSync ( p , content , opts ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . appendFileSync ( p , content , opts ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . appendFileSync ( subPath , content , opts ) ;
} ) ;
}
async writeFilePromise ( p , content , opts ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . writeFilePromise ( p , content , opts ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . writeFilePromise ( subPath , content , opts ) ;
} ) ;
}
writeFileSync ( p , content , opts ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . writeFileSync ( p , content , opts ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . writeFileSync ( subPath , content , opts ) ;
} ) ;
}
async unlinkPromise ( p ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . unlinkPromise ( p ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . unlinkPromise ( subPath ) ;
} ) ;
}
unlinkSync ( p ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . unlinkSync ( p ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . unlinkSync ( subPath ) ;
} ) ;
}
async utimesPromise ( p , atime , mtime ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . utimesPromise ( p , atime , mtime ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . utimesPromise ( subPath , atime , mtime ) ;
} ) ;
}
utimesSync ( p , atime , mtime ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . utimesSync ( p , atime , mtime ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . utimesSync ( subPath , atime , mtime ) ;
} ) ;
}
async mkdirPromise ( p , opts ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . mkdirPromise ( p , opts ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . mkdirPromise ( subPath , opts ) ;
} ) ;
}
mkdirSync ( p , opts ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . mkdirSync ( p , opts ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . mkdirSync ( subPath , opts ) ;
} ) ;
}
async rmdirPromise ( p , opts ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . rmdirPromise ( p , opts ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . rmdirPromise ( subPath , opts ) ;
} ) ;
}
rmdirSync ( p , opts ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . rmdirSync ( p , opts ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . rmdirSync ( subPath , opts ) ;
} ) ;
}
async linkPromise ( existingP , newP ) {
return await this . makeCallPromise ( newP , async ( ) => {
return await this . baseFs . linkPromise ( existingP , newP ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . linkPromise ( existingP , subPath ) ;
} ) ;
}
linkSync ( existingP , newP ) {
return this . makeCallSync ( newP , ( ) => {
return this . baseFs . linkSync ( existingP , newP ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . linkSync ( existingP , subPath ) ;
} ) ;
}
async symlinkPromise ( target , p , type ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . symlinkPromise ( target , p , type ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . symlinkPromise ( target , subPath ) ;
} ) ;
}
symlinkSync ( target , p , type ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . symlinkSync ( target , p , type ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . symlinkSync ( target , subPath ) ;
} ) ;
}
async readFilePromise ( p , encoding ) {
return this . makeCallPromise ( p , async ( ) => {
// This weird switch is required to tell TypeScript that the signatures are proper (otherwise it thinks that only the generic one is covered)
switch ( encoding ) {
case ` utf8 ` :
return await this . baseFs . readFilePromise ( p , encoding ) ;
default :
return await this . baseFs . readFilePromise ( p , encoding ) ;
}
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . readFilePromise ( subPath , encoding ) ;
} ) ;
}
readFileSync ( p , encoding ) {
return this . makeCallSync ( p , ( ) => {
// This weird switch is required to tell TypeScript that the signatures are proper (otherwise it thinks that only the generic one is covered)
switch ( encoding ) {
case ` utf8 ` :
return this . baseFs . readFileSync ( p , encoding ) ;
default :
return this . baseFs . readFileSync ( p , encoding ) ;
}
} , ( zipFs , {
subPath
} ) => {
return zipFs . readFileSync ( subPath , encoding ) ;
} ) ;
}
async readdirPromise ( p , opts ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . readdirPromise ( p , opts ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . readdirPromise ( subPath , opts ) ;
} , {
requireSubpath : false
} ) ;
}
readdirSync ( p , opts ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . readdirSync ( p , opts ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . readdirSync ( subPath , opts ) ;
} , {
requireSubpath : false
} ) ;
}
async readlinkPromise ( p ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . readlinkPromise ( p ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . readlinkPromise ( subPath ) ;
} ) ;
}
readlinkSync ( p ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . readlinkSync ( p ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . readlinkSync ( subPath ) ;
} ) ;
}
async truncatePromise ( p , len ) {
return await this . makeCallPromise ( p , async ( ) => {
return await this . baseFs . truncatePromise ( p , len ) ;
} , async ( zipFs , {
subPath
} ) => {
return await zipFs . truncatePromise ( subPath , len ) ;
} ) ;
}
truncateSync ( p , len ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . truncateSync ( p , len ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . truncateSync ( subPath , len ) ;
} ) ;
}
watch ( p , a , b ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . watch ( p , // @ts-expect-error
a , b ) ;
} , ( zipFs , {
subPath
} ) => {
return zipFs . watch ( subPath , // @ts-expect-error
a , b ) ;
} ) ;
}
watchFile ( p , a , b ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . watchFile ( p , // @ts-expect-error
a , b ) ;
} , ( ) => {
return watchFile ( this , p , a , b ) ;
} ) ;
}
unwatchFile ( p , cb ) {
return this . makeCallSync ( p , ( ) => {
return this . baseFs . unwatchFile ( p , cb ) ;
} , ( ) => {
return unwatchFile ( this , p , cb ) ;
} ) ;
}
async makeCallPromise ( p , discard , accept , {
requireSubpath = true
} = { } ) {
if ( typeof p !== ` string ` ) return await discard ( ) ;
const normalizedP = this . resolve ( p ) ;
const zipInfo = this . findZip ( normalizedP ) ;
if ( ! zipInfo ) return await discard ( ) ;
if ( requireSubpath && zipInfo . subPath === ` / ` ) return await discard ( ) ;
return await this . getZipPromise ( zipInfo . archivePath , async zipFs => await accept ( zipFs , zipInfo ) ) ;
}
makeCallSync ( p , discard , accept , {
requireSubpath = true
} = { } ) {
if ( typeof p !== ` string ` ) return discard ( ) ;
const normalizedP = this . resolve ( p ) ;
const zipInfo = this . findZip ( normalizedP ) ;
if ( ! zipInfo ) return discard ( ) ;
if ( requireSubpath && zipInfo . subPath === ` / ` ) return discard ( ) ;
return this . getZipSync ( zipInfo . archivePath , zipFs => accept ( zipFs , zipInfo ) ) ;
}
findZip ( p ) {
if ( this . filter && ! this . filter . test ( p ) ) return null ;
let filePath = ` ` ;
while ( true ) {
const archivePart = getArchivePart ( p . substr ( filePath . length ) ) ;
if ( ! archivePart ) return null ;
filePath = this . pathUtils . join ( filePath , archivePart ) ;
if ( this . isZip . has ( filePath ) === false ) {
if ( this . notZip . has ( filePath ) ) continue ;
try {
if ( ! this . baseFs . lstatSync ( filePath ) . isFile ( ) ) {
this . notZip . add ( filePath ) ;
continue ;
}
} catch ( _a ) {
return null ;
}
this . isZip . add ( filePath ) ;
}
return {
archivePath : filePath ,
subPath : this . pathUtils . join ( PortablePath . root , p . substr ( filePath . length ) )
} ;
}
}
limitOpenFiles ( max ) {
if ( this . zipInstances === null ) return ;
const now = Date . now ( ) ;
let nextExpiresAt = now + this . maxAge ;
let closeCount = max === null ? 0 : this . zipInstances . size - max ;
for ( const [ path , {
zipFs ,
expiresAt ,
refCount
} ] of this . zipInstances . entries ( ) ) {
if ( refCount !== 0 || zipFs . hasOpenFileHandles ( ) ) {
continue ;
} else if ( now >= expiresAt ) {
zipFs . saveAndClose ( ) ;
this . zipInstances . delete ( path ) ;
closeCount -= 1 ;
continue ;
} else if ( max === null || closeCount <= 0 ) {
nextExpiresAt = expiresAt ;
break ;
}
zipFs . saveAndClose ( ) ;
this . zipInstances . delete ( path ) ;
closeCount -= 1 ;
}
if ( this . limitOpenFilesTimeout === null && ( max === null && this . zipInstances . size > 0 || max !== null ) ) {
this . limitOpenFilesTimeout = setTimeout ( ( ) => {
this . limitOpenFilesTimeout = null ;
this . limitOpenFiles ( null ) ;
} , nextExpiresAt - now ) . unref ( ) ;
}
}
async getZipPromise ( p , accept ) {
const getZipOptions = async ( ) => ( {
baseFs : this . baseFs ,
libzip : this . libzip ,
readOnly : this . readOnlyArchives ,
stats : await this . baseFs . statPromise ( p )
} ) ;
if ( this . zipInstances ) {
let cachedZipFs = this . zipInstances . get ( p ) ;
if ( ! cachedZipFs ) {
const zipOptions = await getZipOptions ( ) ; // We need to recheck because concurrent getZipPromise calls may
// have instantiated the zip archive while we were waiting
cachedZipFs = this . zipInstances . get ( p ) ;
if ( ! cachedZipFs ) {
cachedZipFs = {
zipFs : new ZipFS ( p , zipOptions ) ,
expiresAt : 0 ,
refCount : 0
} ;
}
} // Removing then re-adding the field allows us to easily implement
// a basic LRU garbage collection strategy
this . zipInstances . delete ( p ) ;
this . limitOpenFiles ( this . maxOpenFiles - 1 ) ;
this . zipInstances . set ( p , cachedZipFs ) ;
cachedZipFs . expiresAt = Date . now ( ) + this . maxAge ;
cachedZipFs . refCount += 1 ;
try {
return await accept ( cachedZipFs . zipFs ) ;
} finally {
cachedZipFs . refCount -= 1 ;
}
} else {
const zipFs = new ZipFS ( p , await getZipOptions ( ) ) ;
try {
return await accept ( zipFs ) ;
} finally {
zipFs . saveAndClose ( ) ;
}
}
}
getZipSync ( p , accept ) {
const getZipOptions = ( ) => ( {
baseFs : this . baseFs ,
libzip : this . libzip ,
readOnly : this . readOnlyArchives ,
stats : this . baseFs . statSync ( p )
} ) ;
if ( this . zipInstances ) {
let cachedZipFs = this . zipInstances . get ( p ) ;
if ( ! cachedZipFs ) {
cachedZipFs = {
zipFs : new ZipFS ( p , getZipOptions ( ) ) ,
expiresAt : 0 ,
refCount : 0
} ;
} // Removing then re-adding the field allows us to easily implement
// a basic LRU garbage collection strategy
this . zipInstances . delete ( p ) ;
this . limitOpenFiles ( this . maxOpenFiles - 1 ) ;
this . zipInstances . set ( p , cachedZipFs ) ;
cachedZipFs . expiresAt = Date . now ( ) + this . maxAge ;
return accept ( cachedZipFs . zipFs ) ;
} else {
const zipFs = new ZipFS ( p , getZipOptions ( ) ) ;
try {
return accept ( zipFs ) ;
} finally {
zipFs . saveAndClose ( ) ;
}
}
}
}
; // CONCATENATED MODULE: ../yarnpkg-libzip/sources/makeInterface.ts
const number64 = [ ` number ` , ` number ` // high
] ;
var Errors ;
( function ( Errors ) {
Errors [ Errors [ "ZIP_ER_OK" ] = 0 ] = "ZIP_ER_OK" ;
Errors [ Errors [ "ZIP_ER_MULTIDISK" ] = 1 ] = "ZIP_ER_MULTIDISK" ;
Errors [ Errors [ "ZIP_ER_RENAME" ] = 2 ] = "ZIP_ER_RENAME" ;
Errors [ Errors [ "ZIP_ER_CLOSE" ] = 3 ] = "ZIP_ER_CLOSE" ;
Errors [ Errors [ "ZIP_ER_SEEK" ] = 4 ] = "ZIP_ER_SEEK" ;
Errors [ Errors [ "ZIP_ER_READ" ] = 5 ] = "ZIP_ER_READ" ;
Errors [ Errors [ "ZIP_ER_WRITE" ] = 6 ] = "ZIP_ER_WRITE" ;
Errors [ Errors [ "ZIP_ER_CRC" ] = 7 ] = "ZIP_ER_CRC" ;
Errors [ Errors [ "ZIP_ER_ZIPCLOSED" ] = 8 ] = "ZIP_ER_ZIPCLOSED" ;
Errors [ Errors [ "ZIP_ER_NOENT" ] = 9 ] = "ZIP_ER_NOENT" ;
Errors [ Errors [ "ZIP_ER_EXISTS" ] = 10 ] = "ZIP_ER_EXISTS" ;
Errors [ Errors [ "ZIP_ER_OPEN" ] = 11 ] = "ZIP_ER_OPEN" ;
Errors [ Errors [ "ZIP_ER_TMPOPEN" ] = 12 ] = "ZIP_ER_TMPOPEN" ;
Errors [ Errors [ "ZIP_ER_ZLIB" ] = 13 ] = "ZIP_ER_ZLIB" ;
Errors [ Errors [ "ZIP_ER_MEMORY" ] = 14 ] = "ZIP_ER_MEMORY" ;
Errors [ Errors [ "ZIP_ER_CHANGED" ] = 15 ] = "ZIP_ER_CHANGED" ;
Errors [ Errors [ "ZIP_ER_COMPNOTSUPP" ] = 16 ] = "ZIP_ER_COMPNOTSUPP" ;
Errors [ Errors [ "ZIP_ER_EOF" ] = 17 ] = "ZIP_ER_EOF" ;
Errors [ Errors [ "ZIP_ER_INVAL" ] = 18 ] = "ZIP_ER_INVAL" ;
Errors [ Errors [ "ZIP_ER_NOZIP" ] = 19 ] = "ZIP_ER_NOZIP" ;
Errors [ Errors [ "ZIP_ER_INTERNAL" ] = 20 ] = "ZIP_ER_INTERNAL" ;
Errors [ Errors [ "ZIP_ER_INCONS" ] = 21 ] = "ZIP_ER_INCONS" ;
Errors [ Errors [ "ZIP_ER_REMOVE" ] = 22 ] = "ZIP_ER_REMOVE" ;
Errors [ Errors [ "ZIP_ER_DELETED" ] = 23 ] = "ZIP_ER_DELETED" ;
Errors [ Errors [ "ZIP_ER_ENCRNOTSUPP" ] = 24 ] = "ZIP_ER_ENCRNOTSUPP" ;
Errors [ Errors [ "ZIP_ER_RDONLY" ] = 25 ] = "ZIP_ER_RDONLY" ;
Errors [ Errors [ "ZIP_ER_NOPASSWD" ] = 26 ] = "ZIP_ER_NOPASSWD" ;
Errors [ Errors [ "ZIP_ER_WRONGPASSWD" ] = 27 ] = "ZIP_ER_WRONGPASSWD" ;
Errors [ Errors [ "ZIP_ER_OPNOTSUPP" ] = 28 ] = "ZIP_ER_OPNOTSUPP" ;
Errors [ Errors [ "ZIP_ER_INUSE" ] = 29 ] = "ZIP_ER_INUSE" ;
Errors [ Errors [ "ZIP_ER_TELL" ] = 30 ] = "ZIP_ER_TELL" ;
Errors [ Errors [ "ZIP_ER_COMPRESSED_DATA" ] = 31 ] = "ZIP_ER_COMPRESSED_DATA" ;
} ) ( Errors || ( Errors = { } ) ) ;
const makeInterface = libzip => ( {
// Those are getters because they can change after memory growth
get HEAP8 ( ) {
return libzip . HEAP8 ;
} ,
get HEAPU8 ( ) {
return libzip . HEAPU8 ;
} ,
errors : Errors ,
SEEK _SET : 0 ,
SEEK _CUR : 1 ,
SEEK _END : 2 ,
ZIP _CHECKCONS : 4 ,
ZIP _CREATE : 1 ,
ZIP _EXCL : 2 ,
ZIP _TRUNCATE : 8 ,
ZIP _RDONLY : 16 ,
ZIP _FL _OVERWRITE : 8192 ,
ZIP _FL _COMPRESSED : 4 ,
ZIP _OPSYS _DOS : 0x00 ,
ZIP _OPSYS _AMIGA : 0x01 ,
ZIP _OPSYS _OPENVMS : 0x02 ,
ZIP _OPSYS _UNIX : 0x03 ,
ZIP _OPSYS _VM _CMS : 0x04 ,
ZIP _OPSYS _ATARI _ST : 0x05 ,
ZIP _OPSYS _OS _2 : 0x06 ,
ZIP _OPSYS _MACINTOSH : 0x07 ,
ZIP _OPSYS _Z _SYSTEM : 0x08 ,
ZIP _OPSYS _CPM : 0x09 ,
ZIP _OPSYS _WINDOWS _NTFS : 0x0a ,
ZIP _OPSYS _MVS : 0x0b ,
ZIP _OPSYS _VSE : 0x0c ,
ZIP _OPSYS _ACORN _RISC : 0x0d ,
ZIP _OPSYS _VFAT : 0x0e ,
ZIP _OPSYS _ALTERNATE _MVS : 0x0f ,
ZIP _OPSYS _BEOS : 0x10 ,
ZIP _OPSYS _TANDEM : 0x11 ,
ZIP _OPSYS _OS _400 : 0x12 ,
ZIP _OPSYS _OS _X : 0x13 ,
ZIP _CM _DEFAULT : - 1 ,
ZIP _CM _STORE : 0 ,
ZIP _CM _DEFLATE : 8 ,
uint08S : libzip . _malloc ( 1 ) ,
uint16S : libzip . _malloc ( 2 ) ,
uint32S : libzip . _malloc ( 4 ) ,
uint64S : libzip . _malloc ( 8 ) ,
malloc : libzip . _malloc ,
free : libzip . _free ,
getValue : libzip . getValue ,
open : libzip . cwrap ( ` zip_open ` , ` number ` , [ ` string ` , ` number ` , ` number ` ] ) ,
openFromSource : libzip . cwrap ( ` zip_open_from_source ` , ` number ` , [ ` number ` , ` number ` , ` number ` ] ) ,
close : libzip . cwrap ( ` zip_close ` , ` number ` , [ ` number ` ] ) ,
discard : libzip . cwrap ( ` zip_discard ` , null , [ ` number ` ] ) ,
getError : libzip . cwrap ( ` zip_get_error ` , ` number ` , [ ` number ` ] ) ,
getName : libzip . cwrap ( ` zip_get_name ` , ` string ` , [ ` number ` , ` number ` , ` number ` ] ) ,
getNumEntries : libzip . cwrap ( ` zip_get_num_entries ` , ` number ` , [ ` number ` , ` number ` ] ) ,
delete : libzip . cwrap ( ` zip_delete ` , ` number ` , [ ` number ` , ` number ` ] ) ,
stat : libzip . cwrap ( ` zip_stat ` , ` number ` , [ ` number ` , ` string ` , ` number ` , ` number ` ] ) ,
statIndex : libzip . cwrap ( ` zip_stat_index ` , ` number ` , [ ` number ` , ... number64 , ` number ` , ` number ` ] ) ,
fopen : libzip . cwrap ( ` zip_fopen ` , ` number ` , [ ` number ` , ` string ` , ` number ` ] ) ,
fopenIndex : libzip . cwrap ( ` zip_fopen_index ` , ` number ` , [ ` number ` , ... number64 , ` number ` ] ) ,
fread : libzip . cwrap ( ` zip_fread ` , ` number ` , [ ` number ` , ` number ` , ` number ` , ` number ` ] ) ,
fclose : libzip . cwrap ( ` zip_fclose ` , ` number ` , [ ` number ` ] ) ,
dir : {
add : libzip . cwrap ( ` zip_dir_add ` , ` number ` , [ ` number ` , ` string ` ] )
} ,
file : {
add : libzip . cwrap ( ` zip_file_add ` , ` number ` , [ ` number ` , ` string ` , ` number ` , ` number ` ] ) ,
getError : libzip . cwrap ( ` zip_file_get_error ` , ` number ` , [ ` number ` ] ) ,
getExternalAttributes : libzip . cwrap ( ` zip_file_get_external_attributes ` , ` number ` , [ ` number ` , ... number64 , ` number ` , ` number ` , ` number ` ] ) ,
setExternalAttributes : libzip . cwrap ( ` zip_file_set_external_attributes ` , ` number ` , [ ` number ` , ... number64 , ` number ` , ` number ` , ` number ` ] ) ,
setMtime : libzip . cwrap ( ` zip_file_set_mtime ` , ` number ` , [ ` number ` , ... number64 , ` number ` , ` number ` ] ) ,
setCompression : libzip . cwrap ( ` zip_set_file_compression ` , ` number ` , [ ` number ` , ... number64 , ` number ` , ` number ` ] )
} ,
ext : {
countSymlinks : libzip . cwrap ( ` zip_ext_count_symlinks ` , ` number ` , [ ` number ` ] )
} ,
error : {
initWithCode : libzip . cwrap ( ` zip_error_init_with_code ` , null , [ ` number ` , ` number ` ] ) ,
strerror : libzip . cwrap ( ` zip_error_strerror ` , ` string ` , [ ` number ` ] )
} ,
name : {
locate : libzip . cwrap ( ` zip_name_locate ` , ` number ` , [ ` number ` , ` string ` , ` number ` ] )
} ,
source : {
fromUnattachedBuffer : libzip . cwrap ( ` zip_source_buffer_create ` , ` number ` , [ ` number ` , ` number ` , ` number ` , ` number ` ] ) ,
fromBuffer : libzip . cwrap ( ` zip_source_buffer ` , ` number ` , [ ` number ` , ` number ` , ... number64 , ` number ` ] ) ,
free : libzip . cwrap ( ` zip_source_free ` , null , [ ` number ` ] ) ,
keep : libzip . cwrap ( ` zip_source_keep ` , null , [ ` number ` ] ) ,
open : libzip . cwrap ( ` zip_source_open ` , ` number ` , [ ` number ` ] ) ,
close : libzip . cwrap ( ` zip_source_close ` , ` number ` , [ ` number ` ] ) ,
seek : libzip . cwrap ( ` zip_source_seek ` , ` number ` , [ ` number ` , ... number64 , ` number ` ] ) ,
tell : libzip . cwrap ( ` zip_source_tell ` , ` number ` , [ ` number ` ] ) ,
read : libzip . cwrap ( ` zip_source_read ` , ` number ` , [ ` number ` , ` number ` , ` number ` ] ) ,
error : libzip . cwrap ( ` zip_source_error ` , ` number ` , [ ` number ` ] ) ,
setMtime : libzip . cwrap ( ` zip_source_set_mtime ` , ` number ` , [ ` number ` , ` number ` ] )
} ,
struct : {
stat : libzip . cwrap ( ` zipstruct_stat ` , ` number ` , [ ] ) ,
statS : libzip . cwrap ( ` zipstruct_statS ` , ` number ` , [ ] ) ,
statName : libzip . cwrap ( ` zipstruct_stat_name ` , ` string ` , [ ` number ` ] ) ,
statIndex : libzip . cwrap ( ` zipstruct_stat_index ` , ` number ` , [ ` number ` ] ) ,
statSize : libzip . cwrap ( ` zipstruct_stat_size ` , ` number ` , [ ` number ` ] ) ,
statCompSize : libzip . cwrap ( ` zipstruct_stat_comp_size ` , ` number ` , [ ` number ` ] ) ,
statCompMethod : libzip . cwrap ( ` zipstruct_stat_comp_method ` , ` number ` , [ ` number ` ] ) ,
statMtime : libzip . cwrap ( ` zipstruct_stat_mtime ` , ` number ` , [ ` number ` ] ) ,
statCrc : libzip . cwrap ( ` zipstruct_stat_crc ` , ` number ` , [ ` number ` ] ) ,
error : libzip . cwrap ( ` zipstruct_error ` , ` number ` , [ ] ) ,
errorS : libzip . cwrap ( ` zipstruct_errorS ` , ` number ` , [ ] ) ,
errorCodeZip : libzip . cwrap ( ` zipstruct_error_code_zip ` , ` number ` , [ ` number ` ] )
}
} ) ;
; // CONCATENATED MODULE: ../yarnpkg-libzip/sources/sync.ts
let mod = null ;
function getLibzipSync ( ) {
if ( mod === null ) mod = makeInterface ( _ _webpack _require _ _ ( 368 ) ) ;
return mod ;
}
async function getLibzipPromise ( ) {
return getLibzipSync ( ) ;
}
// EXTERNAL MODULE: external "module"
var external _module _ = _ _webpack _require _ _ ( 282 ) ;
var external _module _default = /*#__PURE__*/ _ _webpack _require _ _ . n ( external _module _ ) ;
; // CONCATENATED MODULE: external "string_decoder"
const external _string _decoder _namespaceObject = require ( "string_decoder" ) ; ;
var external _string _decoder _default = /*#__PURE__*/ _ _webpack _require _ _ . n ( external _string _decoder _namespaceObject ) ;
; // CONCATENATED MODULE: external "url"
const external _url _namespaceObject = require ( "url" ) ; ;
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/URLFS.ts
/ * *
* Adds support for file URLs to the wrapped ` baseFs ` , but * not * inside the typings .
*
* Only exists for compatibility with Node ' s behavior .
*
* Automatically wraps all FS instances passed to ` patchFs ` & ` extendFs ` .
*
* Don ' t use it !
* /
class URLFS extends ProxiedFS {
constructor ( baseFs ) {
super ( npath ) ;
this . baseFs = baseFs ;
}
mapFromBase ( path ) {
return path ;
}
mapToBase ( path ) {
if ( path instanceof external _url _namespaceObject . URL ) return ( 0 , external _url _namespaceObject . fileURLToPath ) ( path ) ;
return path ;
}
}
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/patchFs.ts
const SYNC _IMPLEMENTATIONS = new Set ( [ ` accessSync ` , ` appendFileSync ` , ` createReadStream ` , ` chmodSync ` , ` chownSync ` , ` closeSync ` , ` copyFileSync ` , ` linkSync ` , ` lstatSync ` , ` fstatSync ` , ` lutimesSync ` , ` mkdirSync ` , ` openSync ` , ` opendirSync ` , ` readSync ` , ` readlinkSync ` , ` readFileSync ` , ` readdirSync ` , ` readlinkSync ` , ` realpathSync ` , ` renameSync ` , ` rmdirSync ` , ` statSync ` , ` symlinkSync ` , ` truncateSync ` , ` unlinkSync ` , ` unwatchFile ` , ` utimesSync ` , ` watch ` , ` watchFile ` , ` writeFileSync ` , ` writeSync ` ] ) ;
const ASYNC _IMPLEMENTATIONS = new Set ( [ ` accessPromise ` , ` appendFilePromise ` , ` chmodPromise ` , ` chownPromise ` , ` closePromise ` , ` copyFilePromise ` , ` linkPromise ` , ` fstatPromise ` , ` lstatPromise ` , ` lutimesPromise ` , ` mkdirPromise ` , ` openPromise ` , ` opendirPromise ` , ` readdirPromise ` , ` realpathPromise ` , ` readFilePromise ` , ` readdirPromise ` , ` readlinkPromise ` , ` renamePromise ` , ` rmdirPromise ` , ` statPromise ` , ` symlinkPromise ` , ` truncatePromise ` , ` unlinkPromise ` , ` utimesPromise ` , ` writeFilePromise ` , ` writeSync ` ] ) ;
const FILEHANDLE _IMPLEMENTATIONS = new Set ( [ ` appendFilePromise ` , ` chmodPromise ` , ` chownPromise ` , ` closePromise ` , ` readPromise ` , ` readFilePromise ` , ` statPromise ` , ` truncatePromise ` , ` utimesPromise ` , ` writePromise ` , ` writeFilePromise ` ] ) ;
function patchFs ( patchedFs , fakeFs ) {
// We wrap the `fakeFs` with a `URLFS` to add support for URL instances
fakeFs = new URLFS ( fakeFs ) ;
const setupFn = ( target , name , replacement ) => {
const orig = target [ name ] ;
target [ name ] = replacement ; // Preserve any util.promisify implementations
if ( typeof ( orig === null || orig === void 0 ? void 0 : orig [ external _util _namespaceObject . promisify . custom ] ) !== ` undefined ` ) {
replacement [ external _util _namespaceObject . promisify . custom ] = orig [ external _util _namespaceObject . promisify . custom ] ;
}
} ;
/** Callback implementations */
{
setupFn ( patchedFs , ` exists ` , ( p , ... args ) => {
const hasCallback = typeof args [ args . length - 1 ] === ` function ` ;
const callback = hasCallback ? args . pop ( ) : ( ) => { } ;
process . nextTick ( ( ) => {
fakeFs . existsPromise ( p ) . then ( exists => {
callback ( exists ) ;
} , ( ) => {
callback ( false ) ;
} ) ;
} ) ;
} ) ;
setupFn ( patchedFs , ` read ` , ( p , buffer , ... args ) => {
const hasCallback = typeof args [ args . length - 1 ] === ` function ` ;
const callback = hasCallback ? args . pop ( ) : ( ) => { } ;
process . nextTick ( ( ) => {
fakeFs . readPromise ( p , buffer , ... args ) . then ( bytesRead => {
callback ( null , bytesRead , buffer ) ;
} , error => {
// https://github.com/nodejs/node/blob/1317252dfe8824fd9cfee125d2aaa94004db2f3b/lib/fs.js#L655-L658
// Known issue: bytesRead could theoretically be > than 0, but we currently always return 0
callback ( error , 0 , buffer ) ;
} ) ;
} ) ;
} ) ;
for ( const fnName of ASYNC _IMPLEMENTATIONS ) {
const origName = fnName . replace ( /Promise$/ , ` ` ) ;
if ( typeof patchedFs [ origName ] === ` undefined ` ) continue ;
const fakeImpl = fakeFs [ fnName ] ;
if ( typeof fakeImpl === ` undefined ` ) continue ;
const wrapper = ( ... args ) => {
const hasCallback = typeof args [ args . length - 1 ] === ` function ` ;
const callback = hasCallback ? args . pop ( ) : ( ) => { } ;
process . nextTick ( ( ) => {
fakeImpl . apply ( fakeFs , args ) . then ( result => {
callback ( null , result ) ;
} , error => {
callback ( error ) ;
} ) ;
} ) ;
} ;
setupFn ( patchedFs , origName , wrapper ) ;
}
patchedFs . realpath . native = patchedFs . realpath ;
}
/** Sync implementations */
{
setupFn ( patchedFs , ` existsSync ` , p => {
try {
return fakeFs . existsSync ( p ) ;
} catch ( error ) {
return false ;
}
} ) ;
for ( const fnName of SYNC _IMPLEMENTATIONS ) {
const origName = fnName ;
if ( typeof patchedFs [ origName ] === ` undefined ` ) continue ;
const fakeImpl = fakeFs [ fnName ] ;
if ( typeof fakeImpl === ` undefined ` ) continue ;
setupFn ( patchedFs , origName , fakeImpl . bind ( fakeFs ) ) ;
}
patchedFs . realpathSync . native = patchedFs . realpathSync ;
}
/** Promise implementations */
{
// `fs.promises` is a getter that returns a reference to require(`fs/promises`),
// so we can just patch `fs.promises` and both will be updated
const origEmitWarning = process . emitWarning ;
process . emitWarning = ( ) => { } ;
let patchedFsPromises ;
try {
patchedFsPromises = patchedFs . promises ;
} finally {
process . emitWarning = origEmitWarning ;
}
if ( typeof patchedFsPromises !== ` undefined ` ) {
// `fs.promises.exists` doesn't exist
for ( const fnName of ASYNC _IMPLEMENTATIONS ) {
const origName = fnName . replace ( /Promise$/ , ` ` ) ;
if ( typeof patchedFsPromises [ origName ] === ` undefined ` ) continue ;
const fakeImpl = fakeFs [ fnName ] ;
if ( typeof fakeImpl === ` undefined ` ) continue ; // Open is a bit particular with fs.promises: it returns a file handle
// instance instead of the traditional file descriptor number
if ( fnName === ` open ` ) continue ;
setupFn ( patchedFsPromises , origName , fakeImpl . bind ( fakeFs ) ) ;
}
class FileHandle {
constructor ( fd ) {
this . fd = fd ;
}
}
for ( const fnName of FILEHANDLE _IMPLEMENTATIONS ) {
const origName = fnName . replace ( /Promise$/ , ` ` ) ;
const fakeImpl = fakeFs [ fnName ] ;
if ( typeof fakeImpl === ` undefined ` ) continue ;
setupFn ( FileHandle . prototype , origName , function ( ... args ) {
return fakeImpl . call ( fakeFs , this . fd , ... args ) ;
} ) ;
}
setupFn ( patchedFsPromises , ` open ` , async ( ... args ) => {
// @ts-expect-error
const fd = await fakeFs . openPromise ( ... args ) ;
return new FileHandle ( fd ) ;
} ) ; // `fs.promises.realpath` doesn't have a `native` property
}
}
/** util.promisify implementations */
{
// Override the promisified version of `fs.read` to return an object as per
// https://github.com/nodejs/node/blob/dc79f3f37caf6f25b8efee4623bec31e2c20f595/lib/fs.js#L559-L560
// and
// https://github.com/nodejs/node/blob/ba684805b6c0eded76e5cd89ee00328ac7a59365/lib/internal/util.js#L293
// @ts-expect-error
patchedFs . read [ external _util _namespaceObject . promisify . custom ] = async ( p , buffer , ... args ) => {
const res = fakeFs . readPromise ( p , buffer , ... args ) ;
return {
bytesRead : await res ,
buffer
} ;
} ;
}
}
function extendFs ( realFs , fakeFs ) {
const patchedFs = Object . create ( realFs ) ;
patchFs ( patchedFs , fakeFs ) ;
return patchedFs ;
}
; // CONCATENATED MODULE: ../yarnpkg-fslib/sources/PosixFS.ts
class PosixFS extends ProxiedFS {
constructor ( baseFs ) {
super ( npath ) ;
this . baseFs = baseFs ;
}
mapFromBase ( path ) {
return npath . fromPortablePath ( path ) ;
}
mapToBase ( path ) {
return npath . toPortablePath ( path ) ;
}
}
; // CONCATENATED MODULE: ./sources/loader/internalTools.ts
var ErrorCode ;
( function ( ErrorCode ) {
ErrorCode [ "API_ERROR" ] = "API_ERROR" ;
ErrorCode [ "BUILTIN_NODE_RESOLUTION_FAILED" ] = "BUILTIN_NODE_RESOLUTION_FAILED" ;
ErrorCode [ "MISSING_DEPENDENCY" ] = "MISSING_DEPENDENCY" ;
ErrorCode [ "MISSING_PEER_DEPENDENCY" ] = "MISSING_PEER_DEPENDENCY" ;
ErrorCode [ "QUALIFIED_PATH_RESOLUTION_FAILED" ] = "QUALIFIED_PATH_RESOLUTION_FAILED" ;
ErrorCode [ "INTERNAL" ] = "INTERNAL" ;
ErrorCode [ "UNDECLARED_DEPENDENCY" ] = "UNDECLARED_DEPENDENCY" ;
ErrorCode [ "UNSUPPORTED" ] = "UNSUPPORTED" ;
} ) ( ErrorCode || ( ErrorCode = { } ) ) ; // Some errors are exposed as MODULE_NOT_FOUND for compatibility with packages
// that expect this umbrella error when the resolution fails
const MODULE _NOT _FOUND _ERRORS = new Set ( [ ErrorCode . BUILTIN _NODE _RESOLUTION _FAILED , ErrorCode . MISSING _DEPENDENCY , ErrorCode . MISSING _PEER _DEPENDENCY , ErrorCode . QUALIFIED _PATH _RESOLUTION _FAILED , ErrorCode . UNDECLARED _DEPENDENCY ] ) ;
/ * *
* Simple helper function that assign an error code to an error , so that it can more easily be caught and used
* by third - parties .
* /
function internalTools _makeError ( pnpCode , message , data = { } ) {
const code = MODULE _NOT _FOUND _ERRORS . has ( pnpCode ) ? ` MODULE_NOT_FOUND ` : pnpCode ;
const propertySpec = {
configurable : true ,
writable : true ,
enumerable : false
} ;
return Object . defineProperties ( new Error ( message ) , {
code : { ... propertySpec ,
value : code
} ,
pnpCode : { ... propertySpec ,
value : pnpCode
} ,
data : { ... propertySpec ,
value : data
}
} ) ;
}
/ * *
* Returns the module that should be used to resolve require calls . It 's usually the direct parent, except if we' re
* inside an eval expression .
* /
function getIssuerModule ( parent ) {
let issuer = parent ;
while ( issuer && ( issuer . id === ` [eval] ` || issuer . id === ` <repl> ` || ! issuer . filename ) ) issuer = issuer . parent ;
return issuer || null ;
}
function getPathForDisplay ( p ) {
return npath . normalize ( npath . fromPortablePath ( p ) ) ;
}
; // CONCATENATED MODULE: ./sources/loader/applyPatch.ts
function applyPatch ( pnpapi , opts ) {
// @ts-expect-error
const builtinModules = new Set ( external _module _ . Module . builtinModules || Object . keys ( process . binding ( ` natives ` ) ) ) ;
const isBuiltinModule = request => builtinModules . has ( request ) || request . startsWith ( ` node: ` ) ;
/ * *
* The cache that will be used for all accesses occurring outside of a PnP context .
* /
const defaultCache = { } ;
/ * *
* Used to disable the resolution hooks ( for when we want to fallback to the previous resolution - we then need
* a way to "reset" the environment temporarily )
* /
let enableNativeHooks = true ; // @ts-expect-error
process . versions . pnp = String ( pnpapi . VERSIONS . std ) ;
const moduleExports = _ _webpack _require _ _ ( 282 ) ;
moduleExports . findPnpApi = lookupSource => {
const lookupPath = lookupSource instanceof external _url _namespaceObject . URL ? ( 0 , external _url _namespaceObject . fileURLToPath ) ( lookupSource ) : lookupSource ;
const apiPath = opts . manager . findApiPathFor ( lookupPath ) ;
if ( apiPath === null ) return null ;
const apiEntry = opts . manager . getApiEntry ( apiPath , true ) ; // Check if the path is ignored
return apiEntry . instance . findPackageLocator ( lookupPath ) ? apiEntry . instance : null ;
} ;
function getRequireStack ( parent ) {
const requireStack = [ ] ;
for ( let cursor = parent ; cursor ; cursor = cursor . parent ) requireStack . push ( cursor . filename || cursor . id ) ;
return requireStack ;
} // A small note: we don't replace the cache here (and instead use the native one). This is an effort to not
// break code similar to "delete require.cache[require.resolve(FOO)]", where FOO is a package located outside
// of the Yarn dependency tree. In this case, we defer the load to the native loader. If we were to replace the
// cache by our own, the native loader would populate its own cache, which wouldn't be exposed anymore, so the
// delete call would be broken.
const originalModuleLoad = external _module _ . Module . _load ;
external _module _ . Module . _load = function ( request , parent , isMain ) {
if ( ! enableNativeHooks ) return originalModuleLoad . call ( external _module _ . Module , request , parent , isMain ) ; // Builtins are managed by the regular Node loader
if ( isBuiltinModule ( request ) ) {
try {
enableNativeHooks = false ;
return originalModuleLoad . call ( external _module _ . Module , request , parent , isMain ) ;
} finally {
enableNativeHooks = true ;
}
}
const parentApiPath = opts . manager . getApiPathFromParent ( parent ) ;
const parentApi = parentApiPath !== null ? opts . manager . getApiEntry ( parentApiPath , true ) . instance : null ; // Requests that aren't covered by the PnP runtime goes through the
// parent `_load` implementation. This is required for VSCode, for example,
// which override `_load` to provide additional builtins to its extensions.
if ( parentApi === null ) return originalModuleLoad ( request , parent , isMain ) ; // The 'pnpapi' name is reserved to return the PnP api currently in use
// by the program
if ( request === ` pnpapi ` ) return parentApi ; // Request `Module._resolveFilename` (ie. `resolveRequest`) to tell us
// which file we should load
const modulePath = external _module _ . Module . _resolveFilename ( request , parent , isMain ) ; // We check whether the module is owned by the dependency tree of the
// module that required it. If it isn't, then we need to create a new
// store and possibly load its sandboxed PnP runtime.
const isOwnedByRuntime = parentApi !== null ? parentApi . findPackageLocator ( modulePath ) !== null : false ;
const moduleApiPath = isOwnedByRuntime ? parentApiPath : opts . manager . findApiPathFor ( npath . dirname ( modulePath ) ) ;
const entry = moduleApiPath !== null ? opts . manager . getApiEntry ( moduleApiPath ) : {
instance : null ,
cache : defaultCache
} ; // Check if the module has already been created for the given file
const cacheEntry = entry . cache [ modulePath ] ;
if ( cacheEntry ) {
// When a dynamic import is used in CJS files Node adds the module
// to the cache but doesn't load it so we do it here.
//
// Keep track of and check if the module is already loading to
// handle circular requires.
//
// The explicit checks are required since `@babel/register` et al.
// create modules without the `loaded` and `load` properties
if ( cacheEntry . loaded === false && cacheEntry . isLoading !== true ) {
try {
cacheEntry . isLoading = true ;
cacheEntry . load ( modulePath ) ;
} finally {
cacheEntry . isLoading = false ;
}
}
return cacheEntry . exports ;
} // Create a new module and store it into the cache
const module = new external _module _ . Module ( modulePath , parent !== null && parent !== void 0 ? parent : undefined ) ;
module . pnpApiPath = moduleApiPath ;
entry . cache [ modulePath ] = module ; // The main module is exposed as global variable
if ( isMain ) {
process . mainModule = module ;
module . id = ` . ` ;
} // Try to load the module, and remove it from the cache if it fails
let hasThrown = true ;
try {
module . isLoading = true ;
module . load ( modulePath ) ;
hasThrown = false ;
} finally {
module . isLoading = false ;
if ( hasThrown ) {
delete external _module _ . Module . _cache [ modulePath ] ;
}
}
return module . exports ;
} ;
function getIssuerSpecsFromPaths ( paths ) {
return paths . map ( path => ( {
apiPath : opts . manager . findApiPathFor ( path ) ,
path ,
module : null
} ) ) ;
}
function getIssuerSpecsFromModule ( module ) {
var _a ;
if ( module && module . id !== ` <repl> ` && module . id !== ` internal/preload ` && ! module . parent && ! module . filename && module . paths . length > 0 ) {
return [ {
apiPath : opts . manager . findApiPathFor ( module . paths [ 0 ] ) ,
path : module . paths [ 0 ] ,
module
} ] ;
}
const issuer = getIssuerModule ( module ) ;
if ( issuer !== null ) {
const path = npath . dirname ( issuer . filename ) ;
const apiPath = opts . manager . getApiPathFromParent ( issuer ) ;
return [ {
apiPath ,
path ,
module
} ] ;
} else {
const path = process . cwd ( ) ;
const apiPath = ( _a = opts . manager . findApiPathFor ( npath . join ( path , ` [file] ` ) ) ) !== null && _a !== void 0 ? _a : opts . manager . getApiPathFromParent ( null ) ;
return [ {
apiPath ,
path ,
module
} ] ;
}
}
function makeFakeParent ( path ) {
const fakeParent = new external _module _ . Module ( ` ` ) ;
const fakeFilePath = npath . join ( path , ` [file] ` ) ;
fakeParent . paths = external _module _ . Module . _nodeModulePaths ( fakeFilePath ) ;
return fakeParent ;
} // Splits a require request into its components, or return null if the request is a file path
const pathRegExp = /^(?![a-zA-Z]:[\\/]|\\\\|\.{0,2}(?:\/|$))((?:@[^/]+\/)?[^/]+)\/*(.*|)$/ ;
const originalModuleResolveFilename = external _module _ . Module . _resolveFilename ;
external _module _ . Module . _resolveFilename = function ( request , parent , isMain , options ) {
if ( isBuiltinModule ( request ) ) return request ;
if ( ! enableNativeHooks ) return originalModuleResolveFilename . call ( external _module _ . Module , request , parent , isMain , options ) ;
if ( options && options . plugnplay === false ) {
const {
plugnplay ,
... rest
} = options ; // Workaround a bug present in some version of Node (now fixed)
// https://github.com/nodejs/node/pull/28078
const forwardedOptions = Object . keys ( rest ) . length > 0 ? rest : undefined ;
try {
enableNativeHooks = false ;
return originalModuleResolveFilename . call ( external _module _ . Module , request , parent , isMain , forwardedOptions ) ;
} finally {
enableNativeHooks = true ;
}
} // We check that all the options present here are supported; better
// to fail fast than to introduce subtle bugs in the runtime.
if ( options ) {
const optionNames = new Set ( Object . keys ( options ) ) ;
optionNames . delete ( ` paths ` ) ;
optionNames . delete ( ` plugnplay ` ) ;
if ( optionNames . size > 0 ) {
throw internalTools _makeError ( ErrorCode . UNSUPPORTED , ` Some options passed to require() aren't supported by PnP yet ( ${ Array . from ( optionNames ) . join ( ` , ` ) } ) ` ) ;
}
}
const issuerSpecs = options && options . paths ? getIssuerSpecsFromPaths ( options . paths ) : getIssuerSpecsFromModule ( parent ) ;
if ( request . match ( pathRegExp ) === null ) {
const parentDirectory = ( parent === null || parent === void 0 ? void 0 : parent . filename ) != null ? npath . dirname ( parent . filename ) : null ;
const absoluteRequest = npath . isAbsolute ( request ) ? request : parentDirectory !== null ? npath . resolve ( parentDirectory , request ) : null ;
if ( absoluteRequest !== null ) {
const apiPath = parentDirectory === npath . dirname ( absoluteRequest ) && ( parent === null || parent === void 0 ? void 0 : parent . pnpApiPath ) ? parent . pnpApiPath : opts . manager . findApiPathFor ( absoluteRequest ) ;
if ( apiPath !== null ) {
issuerSpecs . unshift ( {
apiPath ,
path : parentDirectory ,
module : null
} ) ;
}
}
}
let firstError ;
for ( const {
apiPath ,
path ,
module
} of issuerSpecs ) {
let resolution ;
const issuerApi = apiPath !== null ? opts . manager . getApiEntry ( apiPath , true ) . instance : null ;
try {
if ( issuerApi !== null ) {
resolution = issuerApi . resolveRequest ( request , path !== null ? ` ${ path } / ` : null ) ;
} else {
if ( path === null ) throw new Error ( ` Assertion failed: Expected the path to be set ` ) ;
resolution = originalModuleResolveFilename . call ( external _module _ . Module , request , module || makeFakeParent ( path ) , isMain ) ;
}
} catch ( error ) {
firstError = firstError || error ;
continue ;
}
if ( resolution !== null ) {
return resolution ;
}
}
const requireStack = getRequireStack ( parent ) ;
Object . defineProperty ( firstError , ` requireStack ` , {
configurable : true ,
writable : true ,
enumerable : false ,
value : requireStack
} ) ;
if ( requireStack . length > 0 ) firstError . message += ` \n Require stack: \n - ${ requireStack . join ( ` \n - ` ) } ` ;
if ( typeof firstError . pnpCode === ` string ` ) Error . captureStackTrace ( firstError ) ;
throw firstError ;
} ;
const originalFindPath = external _module _ . Module . _findPath ;
external _module _ . Module . _findPath = function ( request , paths , isMain ) {
if ( request === ` pnpapi ` ) return false ; // Node sometimes call this function with an absolute path and a `null` set
// of paths. This would cause the resolution to fail. To avoid that, we
// fallback on the regular resolution. We only do this when `isMain` is
// true because the Node default resolution doesn't handle well in-zip
// paths, even absolute, so we try to use it as little as possible.
if ( ! enableNativeHooks || isMain && npath . isAbsolute ( request ) ) return originalFindPath . call ( external _module _ . Module , request , paths , isMain ) ;
for ( const path of paths || [ ] ) {
let resolution ;
try {
const pnpApiPath = opts . manager . findApiPathFor ( path ) ;
if ( pnpApiPath !== null ) {
const api = opts . manager . getApiEntry ( pnpApiPath , true ) . instance ;
resolution = api . resolveRequest ( request , path ) || false ;
} else {
resolution = originalFindPath . call ( external _module _ . Module , request , [ path ] , isMain ) ;
}
} catch ( error ) {
continue ;
}
if ( resolution ) {
return resolution ;
}
}
return false ;
} ;
patchFs ( ( external _fs _default ( ) ) , new PosixFS ( opts . fakeFs ) ) ;
}
; // CONCATENATED MODULE: ./sources/loader/hydrateRuntimeState.ts
function hydrateRuntimeState ( data , {
basePath
} ) {
const portablePath = npath . toPortablePath ( basePath ) ;
const absolutePortablePath = ppath . resolve ( portablePath ) ;
const ignorePattern = data . ignorePatternData !== null ? new RegExp ( data . ignorePatternData ) : null ;
const packageLocatorsByLocations = new Map ( ) ;
const packageRegistry = new Map ( data . packageRegistryData . map ( ( [ packageName , packageStoreData ] ) => {
return [ packageName , new Map ( packageStoreData . map ( ( [ packageReference , packageInformationData ] ) => {
var _a ;
if ( packageName === null !== ( packageReference === null ) ) throw new Error ( ` Assertion failed: The name and reference should be null, or neither should ` ) ;
const discardFromLookup = ( _a = packageInformationData . discardFromLookup ) !== null && _a !== void 0 ? _a : false ; // @ts-expect-error: TypeScript isn't smart enough to understand the type assertion
const packageLocator = {
name : packageName ,
reference : packageReference
} ;
const entry = packageLocatorsByLocations . get ( packageInformationData . packageLocation ) ;
if ( ! entry ) {
packageLocatorsByLocations . set ( packageInformationData . packageLocation , {
locator : packageLocator ,
discardFromLookup
} ) ;
} else {
entry . discardFromLookup = entry . discardFromLookup && discardFromLookup ;
if ( ! discardFromLookup ) {
entry . locator = packageLocator ;
}
}
let resolvedPackageLocation = null ;
return [ packageReference , {
packageDependencies : new Map ( packageInformationData . packageDependencies ) ,
packagePeers : new Set ( packageInformationData . packagePeers ) ,
linkType : packageInformationData . linkType ,
discardFromLookup ,
// we only need this for packages that are used by the currently running script
// this is a lazy getter because `ppath.join` has some overhead
get packageLocation ( ) {
// We use ppath.join instead of ppath.resolve because:
// 1) packageInformationData.packageLocation is a relative path when part of the SerializedState
// 2) ppath.join preserves trailing slashes
return resolvedPackageLocation || ( resolvedPackageLocation = ppath . join ( absolutePortablePath , packageInformationData . packageLocation ) ) ;
}
} ] ;
} ) ) ] ;
} ) ) ;
const fallbackExclusionList = new Map ( data . fallbackExclusionList . map ( ( [ packageName , packageReferences ] ) => {
return [ packageName , new Set ( packageReferences ) ] ;
} ) ) ;
const fallbackPool = new Map ( data . fallbackPool ) ;
const dependencyTreeRoots = data . dependencyTreeRoots ;
const enableTopLevelFallback = data . enableTopLevelFallback ;
return {
basePath : portablePath ,
dependencyTreeRoots ,
enableTopLevelFallback ,
fallbackExclusionList ,
fallbackPool ,
ignorePattern ,
packageLocatorsByLocations ,
packageRegistry
} ;
}
; // CONCATENATED MODULE: ../../.yarn/cache/resolve.exports-npm-1.0.2-bbb8d62ef6-1de1e50dc6.zip/node_modules/resolve.exports/dist/index.mjs
/ * *
* @ param { object } exports
* @ param { Set < string > } keys
* /
function loop ( exports , keys ) {
if ( typeof exports === 'string' ) {
return exports ;
}
if ( exports ) {
let idx , tmp ;
if ( Array . isArray ( exports ) ) {
for ( idx = 0 ; idx < exports . length ; idx ++ ) {
if ( tmp = loop ( exports [ idx ] , keys ) ) return tmp ;
}
} else {
for ( idx in exports ) {
if ( keys . has ( idx ) ) {
return loop ( exports [ idx ] , keys ) ;
}
}
}
}
}
/ * *
* @ param { string } name The package name
* @ param { string } entry The target entry , eg "."
* @ param { number } [ condition ] Unmatched condition ?
* /
function bail ( name , entry , condition ) {
throw new Error (
condition
? ` No known conditions for " ${ entry } " entry in " ${ name } " package `
: ` Missing " ${ entry } " export in " ${ name } " package `
) ;
}
/ * *
* @ param { string } name the package name
* @ param { string } entry the target path / import
* /
function toName ( name , entry ) {
return entry === name ? '.'
: entry [ 0 ] === '.' ? entry
: entry . replace ( new RegExp ( '^' + name + '\/' ) , './' ) ;
}
/ * *
* @ param { object } pkg package . json contents
* @ param { string } [ entry ] entry name or import path
* @ param { object } [ options ]
* @ param { boolean } [ options . browser ]
* @ param { boolean } [ options . require ]
* @ param { string [ ] } [ options . conditions ]
* /
function resolve ( pkg , entry = '.' , options = { } ) {
let { name , exports } = pkg ;
if ( exports ) {
let { browser , require , conditions = [ ] } = options ;
let target = toName ( name , entry ) ;
if ( target [ 0 ] !== '.' ) target = './' + target ;
if ( typeof exports === 'string' ) {
return target === '.' ? exports : bail ( name , target ) ;
}
let allows = new Set ( [ 'default' , ... conditions ] ) ;
allows . add ( require ? 'require' : 'import' ) ;
allows . add ( browser ? 'browser' : 'node' ) ;
let key , tmp , isSingle = false ;
for ( key in exports ) {
isSingle = key [ 0 ] !== '.' ;
break ;
}
if ( isSingle ) {
return target === '.'
? loop ( exports , allows ) || bail ( name , target , 1 )
: bail ( name , target ) ;
}
if ( tmp = exports [ target ] ) {
return loop ( tmp , allows ) || bail ( name , target , 1 ) ;
}
for ( key in exports ) {
tmp = key [ key . length - 1 ] ;
if ( tmp === '/' && target . startsWith ( key ) ) {
return ( tmp = loop ( exports [ key ] , allows ) )
? ( tmp + target . substring ( key . length ) )
: bail ( name , target , 1 ) ;
}
if ( tmp === '*' && target . startsWith ( key . slice ( 0 , - 1 ) ) ) {
// do not trigger if no *content* to inject
if ( target . substring ( key . length - 1 ) . length > 0 ) {
return ( tmp = loop ( exports [ key ] , allows ) )
? tmp . replace ( '*' , target . substring ( key . length - 1 ) )
: bail ( name , target , 1 ) ;
}
}
}
return bail ( name , target ) ;
}
}
/ * *
* @ param { object } pkg
* @ param { object } [ options ]
* @ param { string | boolean } [ options . browser ]
* @ param { string [ ] } [ options . fields ]
* /
function legacy ( pkg , options = { } ) {
let i = 0 , value ,
browser = options . browser ,
fields = options . fields || [ 'module' , 'main' ] ;
if ( browser && ! fields . includes ( 'browser' ) ) {
fields . unshift ( 'browser' ) ;
}
for ( ; i < fields . length ; i ++ ) {
if ( value = pkg [ fields [ i ] ] ) {
if ( typeof value == 'string' ) {
//
} else if ( typeof value == 'object' && fields [ i ] == 'browser' ) {
if ( typeof browser == 'string' ) {
value = value [ browser = toName ( pkg . name , browser ) ] ;
if ( value == null ) return browser ;
}
} else {
continue ;
}
return typeof value == 'string'
? ( './' + value . replace ( /^\.?\// , '' ) )
: value ;
}
}
}
; // CONCATENATED MODULE: ./sources/loader/makeApi.ts
function makeApi ( runtimeState , opts ) {
const alwaysWarnOnFallback = Number ( process . env . PNP _ALWAYS _WARN _ON _FALLBACK ) > 0 ;
const debugLevel = Number ( process . env . PNP _DEBUG _LEVEL ) ; // @ts-expect-error
const builtinModules = new Set ( external _module _ . Module . builtinModules || Object . keys ( process . binding ( ` natives ` ) ) ) ;
const isBuiltinModule = request => builtinModules . has ( request ) || request . startsWith ( ` node: ` ) ; // Splits a require request into its components, or return null if the request is a file path
const pathRegExp = /^(?![a-zA-Z]:[\\/]|\\\\|\.{0,2}(?:\/|$))((?:node:)?(?:@[^/]+\/)?[^/]+)\/*(.*|)$/ ; // Matches if the path starts with a valid path qualifier (./, ../, /)
// eslint-disable-next-line no-unused-vars
const isStrictRegExp = /^(\/|\.{1,2}(\/|$))/ ; // Matches if the path must point to a directory (ie ends with /)
const isDirRegExp = /\/$/ ; // Matches if the path starts with a relative path qualifier (./, ../)
const isRelativeRegexp = /^\.{0,2}\// ; // We only instantiate one of those so that we can use strict-equal comparisons
const topLevelLocator = {
name : null ,
reference : null
} ; // Used for compatibility purposes - cf setupCompatibilityLayer
const fallbackLocators = [ ] ; // To avoid emitting the same warning multiple times
const emittedWarnings = new Set ( ) ;
if ( runtimeState . enableTopLevelFallback === true ) fallbackLocators . push ( topLevelLocator ) ;
if ( opts . compatibilityMode !== false ) {
// ESLint currently doesn't have any portable way for shared configs to
// specify their own plugins that should be used (cf issue #10125). This
// will likely get fixed at some point but it'll take time, so in the
// meantime we'll just add additional fallback entries for common shared
// configs.
// Similarly, Gatsby generates files within the `public` folder located
// within the project, but doesn't pre-resolve the `require` calls to use
// its own dependencies. Meaning that when PnP see a file from the `public`
// folder making a require, it thinks that your project forgot to list one
// of your dependencies.
for ( const name of [ ` react-scripts ` , ` gatsby ` ] ) {
const packageStore = runtimeState . packageRegistry . get ( name ) ;
if ( packageStore ) {
for ( const reference of packageStore . keys ( ) ) {
if ( reference === null ) {
throw new Error ( ` Assertion failed: This reference shouldn't be null ` ) ;
} else {
fallbackLocators . push ( {
name ,
reference
} ) ;
}
}
}
}
}
/ * *
* The setup code will be injected here . The tables listed below are guaranteed to be filled after the call to
* the $$DYNAMICALLY _GENERATED _CODE function .
* /
const {
ignorePattern ,
packageRegistry ,
packageLocatorsByLocations
} = runtimeState ;
/ * *
* Allows to print useful logs just be setting a value in the environment
* /
function makeLogEntry ( name , args ) {
return {
fn : name ,
args ,
error : null ,
result : null
} ;
}
function trace ( entry ) {
var _a , _b , _c , _d , _e , _f ;
const colors = ( _c = ( _b = ( _a = process . stderr ) === null || _a === void 0 ? void 0 : _a . hasColors ) === null || _b === void 0 ? void 0 : _b . call ( _a ) ) !== null && _c !== void 0 ? _c : process . stdout . isTTY ;
const c = ( n , str ) => ` \u 001b[ ${ n } m ${ str } \u 001b[0m ` ;
const error = entry . error ;
if ( error ) console . error ( c ( ` 31;1 ` , ` ✖ ${ ( _d = entry . error ) === null || _d === void 0 ? void 0 : _d . message . replace ( / \ n . * / s , ` ` ) } ` ) ) ; e l s e c o n s o l e . e r r o r ( c ( ` 3 3 ; 1 ` , ` ‼ R e s o l u t i o n ` ) ) ;
if ( entry . args . length > 0 ) console . error ( ) ;
for ( const arg of entry . args ) console . error ( ` ${ c ( ` 37;1 ` , ` In ← ` ) } ${ ( 0 , external _util _namespaceObject . inspect ) ( arg , {
colors ,
compact : true
} ) } ` );
if ( entry . result ) {
console . error ( ) ;
console . error ( ` ${ c ( ` 37;1 ` , ` Out → ` ) } ${ ( 0 , external _util _namespaceObject . inspect ) ( entry . result , {
colors ,
compact : true
} ) } ` );
}
const stack = ( _f = ( _e = new Error ( ) . stack . match ( /(?<=^ +)at.*/gm ) ) === null || _e === void 0 ? void 0 : _e . slice ( 2 ) ) !== null && _f !== void 0 ? _f : [ ] ;
if ( stack . length > 0 ) {
console . error ( ) ;
for ( const line of stack ) {
console . error ( ` ${ c ( ` 38;5;244 ` , line ) } ` ) ;
}
}
console . error ( ) ;
}
function maybeLog ( name , fn ) {
if ( opts . allowDebug === false ) return fn ;
if ( Number . isFinite ( debugLevel ) ) {
if ( debugLevel >= 2 ) {
return ( ... args ) => {
const logEntry = makeLogEntry ( name , args ) ;
try {
return logEntry . result = fn ( ... args ) ;
} catch ( error ) {
throw logEntry . error = error ;
} finally {
trace ( logEntry ) ;
}
} ;
} else if ( debugLevel >= 1 ) {
return ( ... args ) => {
try {
return fn ( ... args ) ;
} catch ( error ) {
const logEntry = makeLogEntry ( name , args ) ;
logEntry . error = error ;
trace ( logEntry ) ;
throw error ;
}
} ;
}
}
return fn ;
}
/ * *
* Returns information about a package in a safe way ( will throw if they cannot be retrieved )
* /
function getPackageInformationSafe ( packageLocator ) {
const packageInformation = getPackageInformation ( packageLocator ) ;
if ( ! packageInformation ) {
throw internalTools _makeError ( ErrorCode . INTERNAL , ` Couldn't find a matching entry in the dependency tree for the specified parent (this is probably an internal error) ` ) ;
}
return packageInformation ;
}
/ * *
* Returns whether the specified locator is a dependency tree root ( in which case it ' s part of the project ) or not
* /
function isDependencyTreeRoot ( packageLocator ) {
if ( packageLocator . name === null ) return true ;
for ( const dependencyTreeRoot of runtimeState . dependencyTreeRoots ) if ( dependencyTreeRoot . name === packageLocator . name && dependencyTreeRoot . reference === packageLocator . reference ) return true ;
return false ;
}
/ * *
* Implements the node resolution for the "exports" field
*
* @ returns The remapped path or ` null ` if the package doesn ' t have a package . json or an "exports" field
* /
function applyNodeExportsResolution ( unqualifiedPath ) {
const locator = findPackageLocator ( ppath . join ( unqualifiedPath , ` internal.js ` ) , {
resolveIgnored : true ,
includeDiscardFromLookup : true
} ) ;
if ( locator === null ) {
throw internalTools _makeError ( ErrorCode . INTERNAL , ` The locator that owns the " ${ unqualifiedPath } " path can't be found inside the dependency tree (this is probably an internal error) ` ) ;
}
const {
packageLocation
} = getPackageInformationSafe ( locator ) ;
const manifestPath = ppath . join ( packageLocation , Filename . manifest ) ;
if ( ! opts . fakeFs . existsSync ( manifestPath ) ) return null ;
const pkgJson = JSON . parse ( opts . fakeFs . readFileSync ( manifestPath , ` utf8 ` ) ) ;
let subpath = ppath . contains ( packageLocation , unqualifiedPath ) ;
if ( subpath === null ) {
throw internalTools _makeError ( ErrorCode . INTERNAL , ` unqualifiedPath doesn't contain the packageLocation (this is probably an internal error) ` ) ;
}
if ( ! isRelativeRegexp . test ( subpath ) ) subpath = ` ./ ${ subpath } ` ;
const resolvedExport = resolve ( pkgJson , ppath . normalize ( subpath ) , {
browser : false ,
require : true ,
// TODO: implement support for the --conditions flag
// Waiting on https://github.com/nodejs/node/issues/36935
conditions : [ ]
} ) ;
if ( typeof resolvedExport === ` string ` ) return ppath . join ( packageLocation , resolvedExport ) ;
return null ;
}
/ * *
* Implements the node resolution for folder access and extension selection
* /
function applyNodeExtensionResolution ( unqualifiedPath , candidates , {
extensions
} ) {
let stat ;
try {
candidates . push ( unqualifiedPath ) ;
stat = opts . fakeFs . statSync ( unqualifiedPath ) ;
} catch ( error ) { } // If the file exists and is a file, we can stop right there
if ( stat && ! stat . isDirectory ( ) ) return opts . fakeFs . realpathSync ( unqualifiedPath ) ; // If the file is a directory, we must check if it contains a package.json with a "main" entry
if ( stat && stat . isDirectory ( ) ) {
let pkgJson ;
try {
pkgJson = JSON . parse ( opts . fakeFs . readFileSync ( ppath . join ( unqualifiedPath , Filename . manifest ) , ` utf8 ` ) ) ;
} catch ( error ) { }
let nextUnqualifiedPath ;
if ( pkgJson && pkgJson . main ) nextUnqualifiedPath = ppath . resolve ( unqualifiedPath , pkgJson . main ) ; // If the "main" field changed the path, we start again from this new location
if ( nextUnqualifiedPath && nextUnqualifiedPath !== unqualifiedPath ) {
const resolution = applyNodeExtensionResolution ( nextUnqualifiedPath , candidates , {
extensions
} ) ;
if ( resolution !== null ) {
return resolution ;
}
}
} // Otherwise we check if we find a file that match one of the supported extensions
for ( let i = 0 , length = extensions . length ; i < length ; i ++ ) {
const candidateFile = ` ${ unqualifiedPath } ${ extensions [ i ] } ` ;
candidates . push ( candidateFile ) ;
if ( opts . fakeFs . existsSync ( candidateFile ) ) {
return candidateFile ;
}
} // Otherwise, we check if the path is a folder - in such a case, we try to use its index
if ( stat && stat . isDirectory ( ) ) {
for ( let i = 0 , length = extensions . length ; i < length ; i ++ ) {
const candidateFile = ppath . format ( {
dir : unqualifiedPath ,
name : ` index ` ,
ext : extensions [ i ]
} ) ;
candidates . push ( candidateFile ) ;
if ( opts . fakeFs . existsSync ( candidateFile ) ) {
return candidateFile ;
}
}
} // Otherwise there's nothing else we can do :(
return null ;
}
/ * *
* This function creates fake modules that can be used with the _resolveFilename function .
* Ideally it would be nice to be able to avoid this , since it causes useless allocations
* and cannot be cached efficiently ( we recompute the nodeModulePaths every time ) .
*
* Fortunately , this should only affect the fallback , and there hopefully shouldn ' t have a
* lot of them .
* /
function makeFakeModule ( path ) {
// @ts-expect-error
const fakeModule = new external _module _ . Module ( path , null ) ;
fakeModule . filename = path ;
fakeModule . paths = external _module _ . Module . _nodeModulePaths ( path ) ;
return fakeModule ;
}
/ * *
* Forward the resolution to the next resolver ( usually the native one )
* /
function callNativeResolution ( request , issuer ) {
if ( issuer . endsWith ( ` / ` ) ) issuer = ppath . join ( issuer , ` internal.js ` ) ; // Since we would need to create a fake module anyway (to call _resolveLookupPath that
// would give us the paths to give to _resolveFilename), we can as well not use
// the {paths} option at all, since it internally makes _resolveFilename create another
// fake module anyway.
return external _module _ . Module . _resolveFilename ( npath . fromPortablePath ( request ) , makeFakeModule ( npath . fromPortablePath ( issuer ) ) , false , {
plugnplay : false
} ) ;
}
/ * *
*
* /
function isPathIgnored ( path ) {
if ( ignorePattern === null ) return false ;
const subPath = ppath . contains ( runtimeState . basePath , path ) ;
if ( subPath === null ) return false ;
if ( ignorePattern . test ( subPath . replace ( /\/$/ , ` ` ) ) ) {
return true ;
} else {
return false ;
}
}
/ * *
* This key indicates which version of the standard is implemented by this resolver . The ` std ` key is the
* Plug 'n' Play standard , and any other key are third - party extensions . Third - party extensions are not allowed
* to override the standard , and can only offer new methods .
*
* If a new version of the Plug 'n' Play standard is released and some extensions conflict with newly added
* functions , they ' ll just have to fix the conflicts and bump their own version number .
* /
const VERSIONS = {
std : 3 ,
resolveVirtual : 1 ,
getAllLocators : 1
} ;
/ * *
* We export a special symbol for easy access to the top level locator .
* /
const topLevel = topLevelLocator ;
/ * *
* Gets the package information for a given locator . Returns null if they cannot be retrieved .
* /
function getPackageInformation ( {
name ,
reference
} ) {
const packageInformationStore = packageRegistry . get ( name ) ;
if ( ! packageInformationStore ) return null ;
const packageInformation = packageInformationStore . get ( reference ) ;
if ( ! packageInformation ) return null ;
return packageInformation ;
}
/ * *
* Find all packages that depend on the specified one .
*
* Note : This is a private function ; we expect consumers to implement it
* themselves . We keep it that way because this implementation isn ' t
* optimized at all , since we only need it when printing errors .
* /
function findPackageDependents ( {
name ,
reference
} ) {
const dependents = [ ] ;
for ( const [ dependentName , packageInformationStore ] of packageRegistry ) {
if ( dependentName === null ) continue ;
for ( const [ dependentReference , packageInformation ] of packageInformationStore ) {
if ( dependentReference === null ) continue ;
const dependencyReference = packageInformation . packageDependencies . get ( name ) ;
if ( dependencyReference !== reference ) continue ; // Don't forget that all packages depend on themselves
if ( dependentName === name && dependentReference === reference ) continue ;
dependents . push ( {
name : dependentName ,
reference : dependentReference
} ) ;
}
}
return dependents ;
}
/ * *
* Find all packages that broke the peer dependency on X , starting from Y .
*
* Note : This is a private function ; we expect consumers to implement it
* themselves . We keep it that way because this implementation isn ' t
* optimized at all , since we only need it when printing errors .
* /
function findBrokenPeerDependencies ( dependency , initialPackage ) {
const brokenPackages = new Map ( ) ;
const alreadyVisited = new Set ( ) ;
const traversal = currentPackage => {
const identifier = JSON . stringify ( currentPackage . name ) ;
if ( alreadyVisited . has ( identifier ) ) return ;
alreadyVisited . add ( identifier ) ;
const dependents = findPackageDependents ( currentPackage ) ;
for ( const dependent of dependents ) {
const dependentInformation = getPackageInformationSafe ( dependent ) ;
if ( dependentInformation . packagePeers . has ( dependency ) ) {
traversal ( dependent ) ;
} else {
let brokenSet = brokenPackages . get ( dependent . name ) ;
if ( typeof brokenSet === ` undefined ` ) brokenPackages . set ( dependent . name , brokenSet = new Set ( ) ) ;
brokenSet . add ( dependent . reference ) ;
}
}
} ;
traversal ( initialPackage ) ;
const brokenList = [ ] ;
for ( const name of [ ... brokenPackages . keys ( ) ] . sort ( ) ) for ( const reference of [ ... brokenPackages . get ( name ) ] . sort ( ) ) brokenList . push ( {
name ,
reference
} ) ;
return brokenList ;
}
/ * *
* Finds the package locator that owns the specified path . If none is found , returns null instead .
* /
function findPackageLocator ( location , {
resolveIgnored = false ,
includeDiscardFromLookup = false
} = { } ) {
if ( isPathIgnored ( location ) && ! resolveIgnored ) return null ;
let relativeLocation = ppath . relative ( runtimeState . basePath , location ) ;
if ( ! relativeLocation . match ( isStrictRegExp ) ) relativeLocation = ` ./ ${ relativeLocation } ` ;
if ( ! relativeLocation . endsWith ( ` / ` ) ) relativeLocation = ` ${ relativeLocation } / ` ;
do {
const entry = packageLocatorsByLocations . get ( relativeLocation ) ;
if ( typeof entry === ` undefined ` || entry . discardFromLookup && ! includeDiscardFromLookup ) {
relativeLocation = relativeLocation . substring ( 0 , relativeLocation . lastIndexOf ( ` / ` , relativeLocation . length - 2 ) + 1 ) ;
continue ;
}
return entry . locator ;
} while ( relativeLocation !== ` ` ) ;
return null ;
}
/ * *
* Transforms a request ( what ' s typically passed as argument to the require function ) into an unqualified path .
* This path is called "unqualified" because it only changes the package name to the package location on the disk ,
* which means that the end result still cannot be directly accessed ( for example , it doesn ' t try to resolve the
* file extension , or to resolve directories to their "index.js" content ) . Use the "resolveUnqualified" function
* to convert them to fully - qualified paths , or just use "resolveRequest" that do both operations in one go .
*
* Note that it is extremely important that the ` issuer ` path ends with a forward slash if the issuer is to be
* treated as a folder ( ie . "/tmp/foo/" rather than "/tmp/foo" if "foo" is a directory ) . Otherwise relative
* imports won 't be computed correctly (they' ll get resolved relative to "/tmp/" instead of "/tmp/foo/" ) .
* /
function resolveToUnqualified ( request , issuer , {
considerBuiltins = true
} = { } ) {
// The 'pnpapi' request is reserved and will always return the path to the PnP file, from everywhere
if ( request === ` pnpapi ` ) return npath . toPortablePath ( opts . pnpapiResolution ) ; // Bailout if the request is a native module
if ( considerBuiltins && isBuiltinModule ( request ) ) return null ;
const requestForDisplay = getPathForDisplay ( request ) ;
const issuerForDisplay = issuer && getPathForDisplay ( issuer ) ; // We allow disabling the pnp resolution for some subpaths.
// This is because some projects, often legacy, contain multiple
// levels of dependencies (ie. a yarn.lock inside a subfolder of
// a yarn.lock). This is typically solved using workspaces, but
// not all of them have been converted already.
if ( issuer && isPathIgnored ( issuer ) ) {
// Absolute paths that seem to belong to a PnP tree are still
// handled by our runtime even if the issuer isn't. This is
// because the native Node resolution uses a special version
// of the `stat` syscall which would otherwise bypass the
// filesystem layer we require to access the files.
if ( ! ppath . isAbsolute ( request ) || findPackageLocator ( request ) === null ) {
const result = callNativeResolution ( request , issuer ) ;
if ( result === false ) {
throw internalTools _makeError ( ErrorCode . BUILTIN _NODE _RESOLUTION _FAILED , ` The builtin node resolution algorithm was unable to resolve the requested module (it didn't go through the pnp resolver because the issuer was explicitely ignored by the regexp) \n \n Require request: " ${ requestForDisplay } " \n Required by: ${ issuerForDisplay } \n ` , {
request : requestForDisplay ,
issuer : issuerForDisplay
} ) ;
}
return npath . toPortablePath ( result ) ;
}
}
let unqualifiedPath ; // If the request is a relative or absolute path, we just return it normalized
const dependencyNameMatch = request . match ( pathRegExp ) ;
if ( ! dependencyNameMatch ) {
if ( ppath . isAbsolute ( request ) ) {
unqualifiedPath = ppath . normalize ( request ) ;
} else {
if ( ! issuer ) {
throw internalTools _makeError ( ErrorCode . API _ERROR , ` The resolveToUnqualified function must be called with a valid issuer when the path isn't a builtin nor absolute ` , {
request : requestForDisplay ,
issuer : issuerForDisplay
} ) ;
} // We use ppath.join instead of ppath.resolve because:
// 1) The request is a relative path in this branch
// 2) ppath.join preserves trailing slashes
const absoluteIssuer = ppath . resolve ( issuer ) ;
if ( issuer . match ( isDirRegExp ) ) {
unqualifiedPath = ppath . normalize ( ppath . join ( absoluteIssuer , request ) ) ;
} else {
unqualifiedPath = ppath . normalize ( ppath . join ( ppath . dirname ( absoluteIssuer ) , request ) ) ;
}
}
} else {
// Things are more hairy if it's a package require - we then need to figure out which package is needed, and in
// particular the exact version for the given location on the dependency tree
if ( ! issuer ) {
throw internalTools _makeError ( ErrorCode . API _ERROR , ` The resolveToUnqualified function must be called with a valid issuer when the path isn't a builtin nor absolute ` , {
request : requestForDisplay ,
issuer : issuerForDisplay
} ) ;
}
const [ , dependencyName , subPath ] = dependencyNameMatch ;
const issuerLocator = findPackageLocator ( issuer ) ; // If the issuer file doesn't seem to be owned by a package managed through pnp, then we resort to using the next
// resolution algorithm in the chain, usually the native Node resolution one
if ( ! issuerLocator ) {
const result = callNativeResolution ( request , issuer ) ;
if ( result === false ) {
throw internalTools _makeError ( ErrorCode . BUILTIN _NODE _RESOLUTION _FAILED , ` The builtin node resolution algorithm was unable to resolve the requested module (it didn't go through the pnp resolver because the issuer doesn't seem to be part of the Yarn-managed dependency tree). \n \n Require path: " ${ requestForDisplay } " \n Required by: ${ issuerForDisplay } \n ` , {
request : requestForDisplay ,
issuer : issuerForDisplay
} ) ;
}
return npath . toPortablePath ( result ) ;
}
const issuerInformation = getPackageInformationSafe ( issuerLocator ) ; // We obtain the dependency reference in regard to the package that request it
let dependencyReference = issuerInformation . packageDependencies . get ( dependencyName ) ;
let fallbackReference = null ; // If we can't find it, we check if we can potentially load it from the packages that have been defined as potential fallbacks.
// It's a bit of a hack, but it improves compatibility with the existing Node ecosystem. Hopefully we should eventually be able
// to kill this logic and become stricter once pnp gets enough traction and the affected packages fix themselves.
if ( dependencyReference == null ) {
if ( issuerLocator . name !== null ) {
// To allow programs to become gradually stricter, starting from the v2 we enforce that workspaces cannot depend on fallbacks.
// This works by having a list containing all their locators, and checking when a fallback is required whether it's one of them.
const exclusionEntry = runtimeState . fallbackExclusionList . get ( issuerLocator . name ) ;
const canUseFallbacks = ! exclusionEntry || ! exclusionEntry . has ( issuerLocator . reference ) ;
if ( canUseFallbacks ) {
for ( let t = 0 , T = fallbackLocators . length ; t < T ; ++ t ) {
const fallbackInformation = getPackageInformationSafe ( fallbackLocators [ t ] ) ;
const reference = fallbackInformation . packageDependencies . get ( dependencyName ) ;
if ( reference == null ) continue ;
if ( alwaysWarnOnFallback ) fallbackReference = reference ; else dependencyReference = reference ;
break ;
}
if ( runtimeState . enableTopLevelFallback ) {
if ( dependencyReference == null && fallbackReference === null ) {
const reference = runtimeState . fallbackPool . get ( dependencyName ) ;
if ( reference != null ) {
fallbackReference = reference ;
}
}
}
}
}
} // If we can't find the path, and if the package making the request is the top-level, we can offer nicer error messages
let error = null ;
if ( dependencyReference === null ) {
if ( isDependencyTreeRoot ( issuerLocator ) ) {
error = internalTools _makeError ( ErrorCode . MISSING _PEER _DEPENDENCY , ` Your application tried to access ${ dependencyName } (a peer dependency); this isn't allowed as there is no ancestor to satisfy the requirement. Use a devDependency if needed. \n \n Required package: ${ dependencyName } ${ dependencyName !== requestForDisplay ? ` (via " ${ requestForDisplay } ") ` : ` ` } \n Required by: ${ issuerForDisplay } \n ` , {
request : requestForDisplay ,
issuer : issuerForDisplay ,
dependencyName
} ) ;
} else {
const brokenAncestors = findBrokenPeerDependencies ( dependencyName , issuerLocator ) ;
if ( brokenAncestors . every ( ancestor => isDependencyTreeRoot ( ancestor ) ) ) {
error = internalTools _makeError ( ErrorCode . MISSING _PEER _DEPENDENCY , ` ${ issuerLocator . name } tried to access ${ dependencyName } (a peer dependency) but it isn't provided by your application; this makes the require call ambiguous and unsound. \n \n Required package: ${ dependencyName } ${ dependencyName !== requestForDisplay ? ` (via " ${ requestForDisplay } ") ` : ` ` } \n Required by: ${ issuerLocator . name } @ ${ issuerLocator . reference } (via ${ issuerForDisplay } ) \n ${ brokenAncestors . map ( ancestorLocator => ` Ancestor breaking the chain: ${ ancestorLocator . name } @ ${ ancestorLocator . reference } \n ` ) . join ( ` ` ) } \n ` , {
request : requestForDisplay ,
issuer : issuerForDisplay ,
issuerLocator : Object . assign ( { } , issuerLocator ) ,
dependencyName ,
brokenAncestors
} ) ;
} else {
error = internalTools _makeError ( ErrorCode . MISSING _PEER _DEPENDENCY , ` ${ issuerLocator . name } tried to access ${ dependencyName } (a peer dependency) but it isn't provided by its ancestors; this makes the require call ambiguous and unsound. \n \n Required package: ${ dependencyName } ${ dependencyName !== requestForDisplay ? ` (via " ${ requestForDisplay } ") ` : ` ` } \n Required by: ${ issuerLocator . name } @ ${ issuerLocator . reference } (via ${ issuerForDisplay } ) \n \n ${ brokenAncestors . map ( ancestorLocator => ` Ancestor breaking the chain: ${ ancestorLocator . name } @ ${ ancestorLocator . reference } \n ` ) . join ( ` ` ) } \n ` , {
request : requestForDisplay ,
issuer : issuerForDisplay ,
issuerLocator : Object . assign ( { } , issuerLocator ) ,
dependencyName ,
brokenAncestors
} ) ;
}
}
} else if ( dependencyReference === undefined ) {
if ( ! considerBuiltins && isBuiltinModule ( request ) ) {
if ( isDependencyTreeRoot ( issuerLocator ) ) {
error = internalTools _makeError ( ErrorCode . UNDECLARED _DEPENDENCY , ` Your application tried to access ${ dependencyName } . While this module is usually interpreted as a Node builtin, your resolver is running inside a non-Node resolution context where such builtins are ignored. Since ${ dependencyName } isn't otherwise declared in your dependencies, this makes the require call ambiguous and unsound. \n \n Required package: ${ dependencyName } ${ dependencyName !== requestForDisplay ? ` (via " ${ requestForDisplay } ") ` : ` ` } \n Required by: ${ issuerForDisplay } \n ` , {
request : requestForDisplay ,
issuer : issuerForDisplay ,
dependencyName
} ) ;
} else {
error = internalTools _makeError ( ErrorCode . UNDECLARED _DEPENDENCY , ` ${ issuerLocator . name } tried to access ${ dependencyName } . While this module is usually interpreted as a Node builtin, your resolver is running inside a non-Node resolution context where such builtins are ignored. Since ${ dependencyName } isn't otherwise declared in ${ issuerLocator . name } 's dependencies, this makes the require call ambiguous and unsound. \n \n Required package: ${ dependencyName } ${ dependencyName !== requestForDisplay ? ` (via " ${ requestForDisplay } ") ` : ` ` } \n Required by: ${ issuerForDisplay } \n ` , {
request : requestForDisplay ,
issuer : issuerForDisplay ,
issuerLocator : Object . assign ( { } , issuerLocator ) ,
dependencyName
} ) ;
}
} else {
if ( isDependencyTreeRoot ( issuerLocator ) ) {
error = internalTools _makeError ( ErrorCode . UNDECLARED _DEPENDENCY , ` Your application tried to access ${ dependencyName } , but it isn't declared in your dependencies; this makes the require call ambiguous and unsound. \n \n Required package: ${ dependencyName } ${ dependencyName !== requestForDisplay ? ` (via " ${ requestForDisplay } ") ` : ` ` } \n Required by: ${ issuerForDisplay } \n ` , {
request : requestForDisplay ,
issuer : issuerForDisplay ,
dependencyName
} ) ;
} else {
error = internalTools _makeError ( ErrorCode . UNDECLARED _DEPENDENCY , ` ${ issuerLocator . name } tried to access ${ dependencyName } , but it isn't declared in its dependencies; this makes the require call ambiguous and unsound. \n \n Required package: ${ dependencyName } ${ dependencyName !== requestForDisplay ? ` (via " ${ requestForDisplay } ") ` : ` ` } \n Required by: ${ issuerLocator . name } @ ${ issuerLocator . reference } (via ${ issuerForDisplay } ) \n ` , {
request : requestForDisplay ,
issuer : issuerForDisplay ,
issuerLocator : Object . assign ( { } , issuerLocator ) ,
dependencyName
} ) ;
}
}
}
if ( dependencyReference == null ) {
if ( fallbackReference === null || error === null ) throw error || new Error ( ` Assertion failed: Expected an error to have been set ` ) ;
dependencyReference = fallbackReference ;
const message = error . message . replace ( /\n.*/g , ` ` ) ;
error . message = message ;
if ( ! emittedWarnings . has ( message ) && debugLevel !== 0 ) {
emittedWarnings . add ( message ) ;
process . emitWarning ( error ) ;
}
} // We need to check that the package exists on the filesystem, because it might not have been installed
const dependencyLocator = Array . isArray ( dependencyReference ) ? {
name : dependencyReference [ 0 ] ,
reference : dependencyReference [ 1 ]
} : {
name : dependencyName ,
reference : dependencyReference
} ;
const dependencyInformation = getPackageInformationSafe ( dependencyLocator ) ;
if ( ! dependencyInformation . packageLocation ) {
throw internalTools _makeError ( ErrorCode . MISSING _DEPENDENCY , ` A dependency seems valid but didn't get installed for some reason. This might be caused by a partial install, such as dev vs prod. \n \n Required package: ${ dependencyLocator . name } @ ${ dependencyLocator . reference } ${ dependencyLocator . name !== requestForDisplay ? ` (via " ${ requestForDisplay } ") ` : ` ` } \n Required by: ${ issuerLocator . name } @ ${ issuerLocator . reference } (via ${ issuerForDisplay } ) \n ` , {
request : requestForDisplay ,
issuer : issuerForDisplay ,
dependencyLocator : Object . assign ( { } , dependencyLocator )
} ) ;
} // Now that we know which package we should resolve to, we only have to find out the file location
// packageLocation is always absolute as it's returned by getPackageInformationSafe
const dependencyLocation = dependencyInformation . packageLocation ;
if ( subPath ) {
// We use ppath.join instead of ppath.resolve because:
// 1) subPath is always a relative path
// 2) ppath.join preserves trailing slashes
unqualifiedPath = ppath . join ( dependencyLocation , subPath ) ;
} else {
unqualifiedPath = dependencyLocation ;
}
}
return ppath . normalize ( unqualifiedPath ) ;
}
function resolveUnqualifiedExport ( request , unqualifiedPath ) {
// "exports" only apply when requiring a package, not when requiring via an absolute / relative path
if ( isStrictRegExp . test ( request ) ) return unqualifiedPath ;
const unqualifiedExportPath = applyNodeExportsResolution ( unqualifiedPath ) ;
if ( unqualifiedExportPath ) {
return ppath . normalize ( unqualifiedExportPath ) ;
} else {
return unqualifiedPath ;
}
}
/ * *
* Transforms an unqualified path into a qualified path by using the Node resolution algorithm ( which automatically
* appends ".js" / ".json" , and transforms directory accesses into "index.js" ) .
* /
function resolveUnqualified ( unqualifiedPath , {
extensions = Object . keys ( external _module _ . Module . _extensions )
} = { } ) {
const candidates = [ ] ;
const qualifiedPath = applyNodeExtensionResolution ( unqualifiedPath , candidates , {
extensions
} ) ;
if ( qualifiedPath ) {
return ppath . normalize ( qualifiedPath ) ;
} else {
const unqualifiedPathForDisplay = getPathForDisplay ( unqualifiedPath ) ;
const containingPackage = findPackageLocator ( unqualifiedPath ) ;
if ( containingPackage ) {
const {
packageLocation
} = getPackageInformationSafe ( containingPackage ) ;
if ( ! opts . fakeFs . existsSync ( packageLocation ) ) {
const errorMessage = packageLocation . includes ( ` /unplugged/ ` ) ? ` Required unplugged package missing from disk. This may happen when switching branches without running installs (unplugged packages must be fully materialized on disk to work). ` : ` Required package missing from disk. If you keep your packages inside your repository then restarting the Node process may be enough. Otherwise, try to run an install first. ` ;
throw internalTools _makeError ( ErrorCode . QUALIFIED _PATH _RESOLUTION _FAILED , ` ${ errorMessage } \n \n Missing package: ${ containingPackage . name } @ ${ containingPackage . reference } \n Expected package location: ${ getPathForDisplay ( packageLocation ) } \n ` , {
unqualifiedPath : unqualifiedPathForDisplay
} ) ;
}
}
throw internalTools _makeError ( ErrorCode . QUALIFIED _PATH _RESOLUTION _FAILED , ` Qualified path resolution failed - none of those files can be found on the disk. \n \n Source path: ${ unqualifiedPathForDisplay } \n ${ candidates . map ( candidate => ` Not found: ${ getPathForDisplay ( candidate ) } \n ` ) . join ( ` ` ) } ` , {
unqualifiedPath : unqualifiedPathForDisplay
} ) ;
}
}
/ * *
* Transforms a request into a fully qualified path .
*
* Note that it is extremely important that the ` issuer ` path ends with a forward slash if the issuer is to be
* treated as a folder ( ie . "/tmp/foo/" rather than "/tmp/foo" if "foo" is a directory ) . Otherwise relative
* imports won 't be computed correctly (they' ll get resolved relative to "/tmp/" instead of "/tmp/foo/" ) .
* /
function resolveRequest ( request , issuer , {
considerBuiltins ,
extensions
} = { } ) {
const unqualifiedPath = resolveToUnqualified ( request , issuer , {
considerBuiltins
} ) ;
if ( unqualifiedPath === null ) return null ;
const isIssuerIgnored = ( ) => issuer !== null ? isPathIgnored ( issuer ) : false ;
const remappedPath = ( ! considerBuiltins || ! isBuiltinModule ( request ) ) && ! isIssuerIgnored ( ) ? resolveUnqualifiedExport ( request , unqualifiedPath ) : unqualifiedPath ;
try {
return resolveUnqualified ( remappedPath , {
extensions
} ) ;
} catch ( resolutionError ) {
if ( resolutionError . pnpCode === ` QUALIFIED_PATH_RESOLUTION_FAILED ` ) Object . assign ( resolutionError . data , {
request : getPathForDisplay ( request ) ,
issuer : issuer && getPathForDisplay ( issuer )
} ) ;
throw resolutionError ;
}
}
function resolveVirtual ( request ) {
const normalized = ppath . normalize ( request ) ;
const resolved = VirtualFS . resolveVirtual ( normalized ) ;
return resolved !== normalized ? resolved : null ;
}
return {
VERSIONS ,
topLevel ,
getLocator : ( name , referencish ) => {
if ( Array . isArray ( referencish ) ) {
return {
name : referencish [ 0 ] ,
reference : referencish [ 1 ]
} ;
} else {
return {
name ,
reference : referencish
} ;
}
} ,
getDependencyTreeRoots : ( ) => {
return [ ... runtimeState . dependencyTreeRoots ] ;
} ,
getAllLocators ( ) {
const locators = [ ] ;
for ( const [ name , entry ] of packageRegistry ) for ( const reference of entry . keys ( ) ) if ( name !== null && reference !== null ) locators . push ( {
name ,
reference
} ) ;
return locators ;
} ,
getPackageInformation : locator => {
const info = getPackageInformation ( locator ) ;
if ( info === null ) return null ;
const packageLocation = npath . fromPortablePath ( info . packageLocation ) ;
const nativeInfo = { ... info ,
packageLocation
} ;
return nativeInfo ;
} ,
findPackageLocator : path => {
return findPackageLocator ( npath . toPortablePath ( path ) ) ;
} ,
resolveToUnqualified : maybeLog ( ` resolveToUnqualified ` , ( request , issuer , opts ) => {
const portableIssuer = issuer !== null ? npath . toPortablePath ( issuer ) : null ;
const resolution = resolveToUnqualified ( npath . toPortablePath ( request ) , portableIssuer , opts ) ;
if ( resolution === null ) return null ;
return npath . fromPortablePath ( resolution ) ;
} ) ,
resolveUnqualified : maybeLog ( ` resolveUnqualified ` , ( unqualifiedPath , opts ) => {
return npath . fromPortablePath ( resolveUnqualified ( npath . toPortablePath ( unqualifiedPath ) , opts ) ) ;
} ) ,
resolveRequest : maybeLog ( ` resolveRequest ` , ( request , issuer , opts ) => {
const portableIssuer = issuer !== null ? npath . toPortablePath ( issuer ) : null ;
const resolution = resolveRequest ( npath . toPortablePath ( request ) , portableIssuer , opts ) ;
if ( resolution === null ) return null ;
return npath . fromPortablePath ( resolution ) ;
} ) ,
resolveVirtual : maybeLog ( ` resolveVirtual ` , path => {
const result = resolveVirtual ( npath . toPortablePath ( path ) ) ;
if ( result !== null ) {
return npath . fromPortablePath ( result ) ;
} else {
return null ;
}
} )
} ;
}
; // CONCATENATED MODULE: ./sources/loader/makeManager.ts
function makeManager ( pnpapi , opts ) {
const initialApiPath = npath . toPortablePath ( pnpapi . resolveToUnqualified ( ` pnpapi ` , null ) ) ;
const initialApiStats = opts . fakeFs . statSync ( npath . toPortablePath ( initialApiPath ) ) ;
const apiMetadata = new Map ( [ [ initialApiPath , {
cache : external _module _ . Module . _cache ,
instance : pnpapi ,
stats : initialApiStats ,
lastRefreshCheck : Date . now ( )
} ] ] ) ;
function loadApiInstance ( pnpApiPath ) {
const nativePath = npath . fromPortablePath ( pnpApiPath ) ; // @ts-expect-error
const module = new external _module _ . Module ( nativePath , null ) ; // @ts-expect-error
module . load ( nativePath ) ;
return module . exports ;
}
function refreshApiEntry ( pnpApiPath , apiEntry ) {
const timeNow = Date . now ( ) ;
if ( timeNow - apiEntry . lastRefreshCheck < 500 ) return ;
apiEntry . lastRefreshCheck = timeNow ;
const stats = opts . fakeFs . statSync ( pnpApiPath ) ;
if ( stats . mtime > apiEntry . stats . mtime ) {
process . emitWarning ( ` [Warning] The runtime detected new informations in a PnP file; reloading the API instance ( ${ npath . fromPortablePath ( pnpApiPath ) } ) ` ) ;
apiEntry . stats = stats ;
apiEntry . instance = loadApiInstance ( pnpApiPath ) ;
}
}
function getApiEntry ( pnpApiPath , refresh = false ) {
let apiEntry = apiMetadata . get ( pnpApiPath ) ;
if ( typeof apiEntry !== ` undefined ` ) {
if ( refresh ) {
refreshApiEntry ( pnpApiPath , apiEntry ) ;
}
} else {
apiMetadata . set ( pnpApiPath , apiEntry = {
cache : { } ,
instance : loadApiInstance ( pnpApiPath ) ,
stats : opts . fakeFs . statSync ( pnpApiPath ) ,
lastRefreshCheck : Date . now ( )
} ) ;
}
return apiEntry ;
}
const findApiPathCache = new Map ( ) ;
function addToCacheAndReturn ( start , end , target ) {
if ( target !== null ) target = VirtualFS . resolveVirtual ( target ) ;
let curr ;
let next = start ;
do {
curr = next ;
findApiPathCache . set ( curr , target ) ;
next = ppath . dirname ( curr ) ;
} while ( curr !== end ) ;
return target ;
}
function findApiPathFor ( modulePath ) {
let bestCandidate = null ;
for ( const [ apiPath , apiEntry ] of apiMetadata ) {
const locator = apiEntry . instance . findPackageLocator ( modulePath ) ;
if ( ! locator ) continue ; // No need to go the slow way when there's a single API
if ( apiMetadata . size === 1 ) return apiPath ;
const packageInformation = apiEntry . instance . getPackageInformation ( locator ) ;
if ( ! packageInformation ) throw new Error ( ` Assertion failed: Couldn't get package information for ' ${ modulePath } ' ` ) ;
if ( ! bestCandidate ) bestCandidate = {
packageLocation : packageInformation . packageLocation ,
apiPaths : [ ]
} ;
if ( packageInformation . packageLocation === bestCandidate . packageLocation ) {
bestCandidate . apiPaths . push ( apiPath ) ;
} else if ( packageInformation . packageLocation . length > bestCandidate . packageLocation . length ) {
bestCandidate = {
packageLocation : packageInformation . packageLocation ,
apiPaths : [ apiPath ]
} ;
}
}
if ( bestCandidate ) {
if ( bestCandidate . apiPaths . length === 1 ) return bestCandidate . apiPaths [ 0 ] ;
const controlSegment = bestCandidate . apiPaths . map ( apiPath => ` ${ npath . fromPortablePath ( apiPath ) } ` ) . join ( ` \n ` ) ;
throw new Error ( ` Unable to locate pnpapi, the module ' ${ modulePath } ' is controlled by multiple pnpapi instances. \n This is usually caused by using the global cache (enableGlobalCache: true) \n \n Controlled by: \n ${ controlSegment } \n ` ) ;
}
const start = ppath . resolve ( npath . toPortablePath ( modulePath ) ) ;
let curr ;
let next = start ;
do {
curr = next ;
const cached = findApiPathCache . get ( curr ) ;
if ( cached !== undefined ) return addToCacheAndReturn ( start , curr , cached ) ;
const cjsCandidate = ppath . join ( curr , Filename . pnpCjs ) ;
if ( opts . fakeFs . existsSync ( cjsCandidate ) && opts . fakeFs . statSync ( cjsCandidate ) . isFile ( ) ) return addToCacheAndReturn ( start , curr , cjsCandidate ) ; // We still support .pnp.js files to improve multi-project compatibility.
// TODO: Remove support for .pnp.js files after they stop being used.
const legacyCjsCandidate = ppath . join ( curr , Filename . pnpJs ) ;
if ( opts . fakeFs . existsSync ( legacyCjsCandidate ) && opts . fakeFs . statSync ( legacyCjsCandidate ) . isFile ( ) ) return addToCacheAndReturn ( start , curr , legacyCjsCandidate ) ;
next = ppath . dirname ( curr ) ;
} while ( curr !== PortablePath . root ) ;
return addToCacheAndReturn ( start , curr , null ) ;
}
function getApiPathFromParent ( parent ) {
if ( parent == null ) return initialApiPath ;
if ( typeof parent . pnpApiPath === ` undefined ` ) {
if ( parent . filename !== null ) {
return parent . pnpApiPath = findApiPathFor ( parent . filename ) ;
} else {
return initialApiPath ;
}
}
if ( parent . pnpApiPath !== null ) return parent . pnpApiPath ;
return null ;
}
return {
getApiPathFromParent ,
findApiPathFor ,
getApiEntry
} ;
}
; // CONCATENATED MODULE: ./sources/loader/_entryPoint.ts
// We must copy the fs into a local, because otherwise
// 1. we would make the NodeFS instance use the function that we patched (infinite loop)
// 2. Object.create(fs) isn't enough, since it won't prevent the proto from being modified
const localFs = { ... ( external _fs _default ( ) )
} ;
const nodeFs = new NodeFS ( localFs ) ;
const defaultRuntimeState = $$SETUP _STATE ( hydrateRuntimeState ) ;
const defaultPnpapiResolution = _ _filename ; // We create a virtual filesystem that will do three things:
// 1. all requests inside a folder named "__virtual___" will be remapped according the virtual folder rules
// 2. all requests going inside a Zip archive will be handled by the Zip fs implementation
// 3. any remaining request will be forwarded to Node as-is
const defaultFsLayer = new VirtualFS ( {
baseFs : new ZipOpenFS ( {
baseFs : nodeFs ,
libzip : ( ) => getLibzipSync ( ) ,
maxOpenFiles : 80 ,
readOnlyArchives : true
} )
} ) ;
let manager ;
const defaultApi = Object . assign ( makeApi ( defaultRuntimeState , {
fakeFs : defaultFsLayer ,
pnpapiResolution : defaultPnpapiResolution
} ) , {
/ * *
* Can be used to generate a different API than the default one ( for example
* to map it on ` / ` rather than the local directory path , or to use a
* different FS layer than the default one ) .
* /
makeApi : ( {
basePath = undefined ,
fakeFs = defaultFsLayer ,
pnpapiResolution = defaultPnpapiResolution ,
... rest
} ) => {
const apiRuntimeState = typeof basePath !== ` undefined ` ? $$SETUP _STATE ( hydrateRuntimeState , basePath ) : defaultRuntimeState ;
return makeApi ( apiRuntimeState , {
fakeFs ,
pnpapiResolution ,
... rest
} ) ;
} ,
/ * *
* Will inject the specified API into the environment , monkey - patching FS . Is
* automatically called when the hook is loaded through ` --require ` .
* /
setup : api => {
applyPatch ( api || defaultApi , {
fakeFs : defaultFsLayer ,
manager
} ) ;
}
} ) ;
manager = makeManager ( defaultApi , {
fakeFs : defaultFsLayer
} ) ; // eslint-disable-next-line arca/no-default-export
/* harmony default export */ const _entryPoint = ( defaultApi ) ;
if ( _ _non _webpack _module _ _ . parent && _ _non _webpack _module _ _ . parent . id === ` internal/preload ` ) {
defaultApi . setup ( ) ;
if ( _ _non _webpack _module _ _ . filename ) {
// We delete it from the cache in order to support the case where the CLI resolver is invoked from "yarn run"
// It's annoying because it might cause some issues when the file is multiple times in NODE_OPTIONS, but it shouldn't happen anyway.
delete ( external _module _default ( ) ) . _cache [ _ _non _webpack _module _ _ . filename ] ;
}
}
if ( process . mainModule === _ _non _webpack _module _ _ ) {
const reportError = ( code , message , data ) => {
process . stdout . write ( ` ${ JSON . stringify ( [ {
code ,
message ,
data
} , null ] ) } \ n ` );
} ;
const reportSuccess = resolution => {
process . stdout . write ( ` ${ JSON . stringify ( [ null , resolution ] ) } \n ` ) ;
} ;
const processResolution = ( request , issuer ) => {
try {
reportSuccess ( defaultApi . resolveRequest ( request , issuer ) ) ;
} catch ( error ) {
reportError ( error . code , error . message , error . data ) ;
}
} ;
const processRequest = data => {
try {
const [ request , issuer ] = JSON . parse ( data ) ;
processResolution ( request , issuer ) ;
} catch ( error ) {
reportError ( ` INVALID_JSON ` , error . message , error . data ) ;
}
} ;
if ( process . argv . length > 2 ) {
if ( process . argv . length !== 4 ) {
process . stderr . write ( ` Usage: ${ process . argv [ 0 ] } ${ process . argv [ 1 ] } <request> <issuer> \n ` ) ;
process . exitCode = 64 ;
/* EX_USAGE */
} else {
processResolution ( process . argv [ 2 ] , process . argv [ 3 ] ) ;
}
} else {
let buffer = ` ` ;
const decoder = new ( external _string _decoder _default ( ) ) . StringDecoder ( ) ;
process . stdin . on ( ` data ` , chunk => {
buffer += decoder . write ( chunk ) ;
do {
const index = buffer . indexOf ( ` \n ` ) ;
if ( index === - 1 ) break ;
const line = buffer . slice ( 0 , index ) ;
buffer = buffer . slice ( index + 1 ) ;
processRequest ( line ) ;
} while ( true ) ;
} ) ;
}
}
} ) ( ) ;
_ _webpack _exports _ _ = _ _webpack _exports _ _ . default ;
/******/ return _ _webpack _exports _ _ ;
/******/ } ) ( )
;
} ) ;