Removes unnecessary folders

This commit is contained in:
Andy Tudhope 2018-12-03 13:03:45 +02:00
parent 3f24e2f3ef
commit a81065e67e
No known key found for this signature in database
GPG Key ID: 02A3DFA93BF26AD2
27627 changed files with 0 additions and 2844908 deletions

File diff suppressed because one or more lines are too long

1
node_modules/.bin/JSONStream generated vendored
View File

@ -1 +0,0 @@
../JSONStream/bin.js

1
node_modules/.bin/acorn generated vendored
View File

@ -1 +0,0 @@
../acorn/bin/acorn

1
node_modules/.bin/atob generated vendored
View File

@ -1 +0,0 @@
../atob/bin/atob.js

1
node_modules/.bin/babylon generated vendored
View File

@ -1 +0,0 @@
../babylon/bin/babylon.js

View File

@ -1 +0,0 @@
../bin-version-check/cli.js

1
node_modules/.bin/browser-pack generated vendored
View File

@ -1 +0,0 @@
../browser-pack/bin/cmd.js

1
node_modules/.bin/browser-sync generated vendored
View File

@ -1 +0,0 @@
../browser-sync/dist/bin.js

1
node_modules/.bin/browserify generated vendored
View File

@ -1 +0,0 @@
../browserify/bin/cmd.js

1
node_modules/.bin/bunyan generated vendored
View File

@ -1 +0,0 @@
../hexo-bunyan/bin/bunyan

1
node_modules/.bin/cleancss generated vendored
View File

@ -1 +0,0 @@
../clean-css/bin/cleancss

1
node_modules/.bin/color-support generated vendored
View File

@ -1 +0,0 @@
../color-support/bin.js

1
node_modules/.bin/deps-sort generated vendored
View File

@ -1 +0,0 @@
../deps-sort/bin/cmd.js

1
node_modules/.bin/detective generated vendored
View File

@ -1 +0,0 @@
../detective/bin/detective.js

1
node_modules/.bin/dev-ip generated vendored
View File

@ -1 +0,0 @@
../dev-ip/lib/dev-ip.js

1
node_modules/.bin/escodegen generated vendored
View File

@ -1 +0,0 @@
../escodegen/bin/escodegen.js

1
node_modules/.bin/esgenerate generated vendored
View File

@ -1 +0,0 @@
../escodegen/bin/esgenerate.js

1
node_modules/.bin/eslint generated vendored
View File

@ -1 +0,0 @@
../eslint/bin/eslint.js

1
node_modules/.bin/esparse generated vendored
View File

@ -1 +0,0 @@
../esprima/bin/esparse.js

1
node_modules/.bin/esvalidate generated vendored
View File

@ -1 +0,0 @@
../esprima/bin/esvalidate.js

1
node_modules/.bin/executable generated vendored
View File

@ -1 +0,0 @@
../executable/cli.js

1
node_modules/.bin/exorcist generated vendored
View File

@ -1 +0,0 @@
../exorcist/bin/exorcist.js

1
node_modules/.bin/find-versions generated vendored
View File

@ -1 +0,0 @@
../find-versions/cli.js

1
node_modules/.bin/gifsicle generated vendored
View File

@ -1 +0,0 @@
../gifsicle/cli.js

1
node_modules/.bin/gulp generated vendored
View File

@ -1 +0,0 @@
../gulp/bin/gulp.js

1
node_modules/.bin/hexo generated vendored
View File

@ -1 +0,0 @@
../hexo/bin/hexo

1
node_modules/.bin/in-install generated vendored
View File

@ -1 +0,0 @@
../in-publish/in-install.js

1
node_modules/.bin/in-publish generated vendored
View File

@ -1 +0,0 @@
../in-publish/in-publish.js

View File

@ -1 +0,0 @@
../insert-module-globals/bin/cmd.js

1
node_modules/.bin/jade generated vendored
View File

@ -1 +0,0 @@
../jade/bin/jade.js

1
node_modules/.bin/jpegtran generated vendored
View File

@ -1 +0,0 @@
../jpegtran-bin/cli.js

1
node_modules/.bin/js-yaml generated vendored
View File

@ -1 +0,0 @@
../js-yaml/bin/js-yaml.js

1
node_modules/.bin/jsesc generated vendored
View File

@ -1 +0,0 @@
../jsesc/bin/jsesc

1
node_modules/.bin/json5 generated vendored
View File

@ -1 +0,0 @@
../json5/lib/cli.js

1
node_modules/.bin/loose-envify generated vendored
View File

@ -1 +0,0 @@
../loose-envify/cli.js

1
node_modules/.bin/lpad-align generated vendored
View File

@ -1 +0,0 @@
../lpad-align/cli.js

1
node_modules/.bin/lt generated vendored
View File

@ -1 +0,0 @@
../localtunnel/bin/client

1
node_modules/.bin/marked generated vendored
View File

@ -1 +0,0 @@
../marked/bin/marked

1
node_modules/.bin/md2html generated vendored
View File

@ -1 +0,0 @@
../markdown/bin/md2html.js

1
node_modules/.bin/miller-rabin generated vendored
View File

@ -1 +0,0 @@
../miller-rabin/bin/miller-rabin

1
node_modules/.bin/mime generated vendored
View File

@ -1 +0,0 @@
../mime/cli.js

1
node_modules/.bin/mkdirp generated vendored
View File

@ -1 +0,0 @@
../mkdirp/bin/cmd.js

1
node_modules/.bin/module-deps generated vendored
View File

@ -1 +0,0 @@
../module-deps/bin/cmd.js

1
node_modules/.bin/ncp generated vendored
View File

@ -1 +0,0 @@
../ncp/bin/ncp

1
node_modules/.bin/node-gyp generated vendored
View File

@ -1 +0,0 @@
../node-gyp/bin/node-gyp.js

1
node_modules/.bin/node-sass generated vendored
View File

@ -1 +0,0 @@
../node-sass/bin/node-sass

1
node_modules/.bin/nopt generated vendored
View File

@ -1 +0,0 @@
../nopt/bin/nopt.js

1
node_modules/.bin/not-in-install generated vendored
View File

@ -1 +0,0 @@
../in-publish/not-in-install.js

1
node_modules/.bin/not-in-publish generated vendored
View File

@ -1 +0,0 @@
../in-publish/not-in-publish.js

View File

@ -1 +0,0 @@
../nunjucks/bin/precompile

1
node_modules/.bin/optipng generated vendored
View File

@ -1 +0,0 @@
../optipng-bin/cli.js

View File

@ -1 +0,0 @@
../postinstall-build/cli.js

1
node_modules/.bin/qrcode generated vendored
View File

@ -1 +0,0 @@
../qrcode/bin/qrcode

1
node_modules/.bin/rc generated vendored
View File

@ -1 +0,0 @@
../rc/cli.js

1
node_modules/.bin/rimraf generated vendored
View File

@ -1 +0,0 @@
../rimraf/bin.js

1
node_modules/.bin/sassgraph generated vendored
View File

@ -1 +0,0 @@
../sass-graph/bin/sassgraph

1
node_modules/.bin/seek-bunzip generated vendored
View File

@ -1 +0,0 @@
../seek-bzip/bin/seek-bunzip

1
node_modules/.bin/seek-table generated vendored
View File

@ -1 +0,0 @@
../seek-bzip/bin/seek-bzip-table

1
node_modules/.bin/semver generated vendored
View File

@ -1 +0,0 @@
../semver/bin/semver

1
node_modules/.bin/sha.js generated vendored
View File

@ -1 +0,0 @@
../sha.js/bin.js

1
node_modules/.bin/sshpk-conv generated vendored
View File

@ -1 +0,0 @@
../sshpk/bin/sshpk-conv

1
node_modules/.bin/sshpk-sign generated vendored
View File

@ -1 +0,0 @@
../sshpk/bin/sshpk-sign

1
node_modules/.bin/sshpk-verify generated vendored
View File

@ -1 +0,0 @@
../sshpk/bin/sshpk-verify

1
node_modules/.bin/strip-bom generated vendored
View File

@ -1 +0,0 @@
../strip-bom/cli.js

1
node_modules/.bin/strip-dirs generated vendored
View File

@ -1 +0,0 @@
../strip-dirs/cli.js

1
node_modules/.bin/stylus generated vendored
View File

@ -1 +0,0 @@
../stylus/bin/stylus

1
node_modules/.bin/svgo generated vendored
View File

@ -1 +0,0 @@
../svgo/bin/svgo

1
node_modules/.bin/swig generated vendored
View File

@ -1 +0,0 @@
../swig-templates/bin/swig.js

1
node_modules/.bin/terser generated vendored
View File

@ -1 +0,0 @@
../terser/bin/uglifyjs

1
node_modules/.bin/throttleproxy generated vendored
View File

@ -1 +0,0 @@
../stream-throttle/bin/throttleproxy.js

1
node_modules/.bin/to-title-case generated vendored
View File

@ -1 +0,0 @@
../titlecase/bin.js

1
node_modules/.bin/uglifyjs generated vendored
View File

@ -1 +0,0 @@
../uglify-js/bin/uglifyjs

1
node_modules/.bin/umd generated vendored
View File

@ -1 +0,0 @@
../umd/bin/cli.js

View File

@ -1 +0,0 @@
../undeclared-identifiers/bin.js

1
node_modules/.bin/user-home generated vendored
View File

@ -1 +0,0 @@
../user-home/cli.js

1
node_modules/.bin/uuid generated vendored
View File

@ -1 +0,0 @@
../uuid/bin/uuid

1
node_modules/.bin/watchify generated vendored
View File

@ -1 +0,0 @@
../watchify/bin/cmd.js

1
node_modules/.bin/which generated vendored
View File

@ -1 +0,0 @@
../which/bin/which

1
node_modules/.bin/window-size generated vendored
View File

@ -1 +0,0 @@
../window-size/cli.js

View File

@ -1,8 +0,0 @@
language: node_js
node_js:
- 4
- 5
- 6
sudo: false

View File

@ -1,15 +0,0 @@
Apache License, Version 2.0
Copyright (c) 2011 Dominic Tarr
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

24
node_modules/JSONStream/LICENSE.MIT generated vendored
View File

@ -1,24 +0,0 @@
The MIT License
Copyright (c) 2011 Dominic Tarr
Permission is hereby granted, free of charge,
to any person obtaining a copy of this software and
associated documentation files (the "Software"), to
deal in the Software without restriction, including
without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom
the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

12
node_modules/JSONStream/bin.js generated vendored
View File

@ -1,12 +0,0 @@
#! /usr/bin/env node
var JSONStream = require('./')
if(!module.parent && process.title !== 'browser') {
process.stdin
.pipe(JSONStream.parse(process.argv[2]))
.pipe(JSONStream.stringify('[', ',\n', ']\n', 2))
.pipe(process.stdout)
}

View File

@ -1,13 +0,0 @@
var request = require('request')
, JSONStream = require('JSONStream')
, es = require('event-stream')
var parser = JSONStream.parse(['rows', true]) //emit parts that match this path (any element of the rows array)
, req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
, logger = es.mapSync(function (data) { //create a stream that logs to stderr,
console.error(data)
return data
})
req.pipe(parser)
parser.pipe(logger)

247
node_modules/JSONStream/index.js generated vendored
View File

@ -1,247 +0,0 @@
'use strict'
var Parser = require('jsonparse')
, through = require('through')
var bufferFrom = Buffer.from && Buffer.from !== Uint8Array.from
/*
the value of this.stack that creationix's jsonparse has is weird.
it makes this code ugly, but his problem is way harder that mine,
so i'll forgive him.
*/
exports.parse = function (path, map) {
var header, footer
var parser = new Parser()
var stream = through(function (chunk) {
if('string' === typeof chunk)
chunk = bufferFrom ? Buffer.from(chunk) : new Buffer(chunk)
parser.write(chunk)
},
function (data) {
if(data)
stream.write(data)
if (header)
stream.emit('header', header)
if (footer)
stream.emit('footer', footer)
stream.queue(null)
})
if('string' === typeof path)
path = path.split('.').map(function (e) {
if (e === '$*')
return {emitKey: true}
else if (e === '*')
return true
else if (e === '') // '..'.split('.') returns an empty string
return {recurse: true}
else
return e
})
var count = 0, _key
if(!path || !path.length)
path = null
parser.onValue = function (value) {
if (!this.root)
stream.root = value
if(! path) return
var i = 0 // iterates on path
var j = 0 // iterates on stack
var emitKey = false;
var emitPath = false;
while (i < path.length) {
var key = path[i]
var c
j++
if (key && !key.recurse) {
c = (j === this.stack.length) ? this : this.stack[j]
if (!c) return
if (! check(key, c.key)) {
setHeaderFooter(c.key, value)
return
}
emitKey = !!key.emitKey;
emitPath = !!key.emitPath;
i++
} else {
i++
var nextKey = path[i]
if (! nextKey) return
while (true) {
c = (j === this.stack.length) ? this : this.stack[j]
if (!c) return
if (check(nextKey, c.key)) {
i++;
if (!Object.isFrozen(this.stack[j]))
this.stack[j].value = null
break
} else {
setHeaderFooter(c.key, value)
}
j++
}
}
}
// emit header
if (header) {
stream.emit('header', header);
header = false;
}
if (j !== this.stack.length) return
count ++
var actualPath = this.stack.slice(1).map(function(element) { return element.key }).concat([this.key])
var data = value
if(null != data)
if(null != (data = map ? map(data, actualPath) : data)) {
if (emitKey || emitPath) {
data = { value: data };
if (emitKey)
data["key"] = this.key;
if (emitPath)
data["path"] = actualPath;
}
stream.queue(data)
}
if (this.value) delete this.value[this.key]
for(var k in this.stack)
if (!Object.isFrozen(this.stack[k]))
this.stack[k].value = null
}
parser._onToken = parser.onToken;
parser.onToken = function (token, value) {
parser._onToken(token, value);
if (this.stack.length === 0) {
if (stream.root) {
if(!path)
stream.queue(stream.root)
count = 0;
stream.root = null;
}
}
}
parser.onError = function (err) {
if(err.message.indexOf("at position") > -1)
err.message = "Invalid JSON (" + err.message + ")";
stream.emit('error', err)
}
return stream
function setHeaderFooter(key, value) {
// header has not been emitted yet
if (header !== false) {
header = header || {}
header[key] = value
}
// footer has not been emitted yet but header has
if (footer !== false && header === false) {
footer = footer || {}
footer[key] = value
}
}
}
function check (x, y) {
if ('string' === typeof x)
return y == x
else if (x && 'function' === typeof x.exec)
return x.exec(y)
else if ('boolean' === typeof x || 'object' === typeof x)
return x
else if ('function' === typeof x)
return x(y)
return false
}
exports.stringify = function (op, sep, cl, indent) {
indent = indent || 0
if (op === false){
op = ''
sep = '\n'
cl = ''
} else if (op == null) {
op = '[\n'
sep = '\n,\n'
cl = '\n]\n'
}
//else, what ever you like
var stream
, first = true
, anyData = false
stream = through(function (data) {
anyData = true
try {
var json = JSON.stringify(data, null, indent)
} catch (err) {
return stream.emit('error', err)
}
if(first) { first = false ; stream.queue(op + json)}
else stream.queue(sep + json)
},
function (data) {
if(!anyData)
stream.queue(op)
stream.queue(cl)
stream.queue(null)
})
return stream
}
exports.stringifyObject = function (op, sep, cl, indent) {
indent = indent || 0
if (op === false){
op = ''
sep = '\n'
cl = ''
} else if (op == null) {
op = '{\n'
sep = '\n,\n'
cl = '\n}\n'
}
//else, what ever you like
var first = true
var anyData = false
var stream = through(function (data) {
anyData = true
var json = JSON.stringify(data[0]) + ':' + JSON.stringify(data[1], null, indent)
if(first) { first = false ; this.queue(op + json)}
else this.queue(sep + json)
},
function (data) {
if(!anyData) this.queue(op)
this.queue(cl)
this.queue(null)
})
return stream
}

79
node_modules/JSONStream/package.json generated vendored
View File

@ -1,79 +0,0 @@
{
"_from": "JSONStream@^1.0.7",
"_id": "JSONStream@1.3.5",
"_inBundle": false,
"_integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==",
"_location": "/JSONStream",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "JSONStream@^1.0.7",
"name": "JSONStream",
"escapedName": "JSONStream",
"rawSpec": "^1.0.7",
"saveSpec": null,
"fetchSpec": "^1.0.7"
},
"_requiredBy": [
"/browser-pack",
"/browserify",
"/deps-sort",
"/insert-module-globals",
"/module-deps",
"/warehouse"
],
"_resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz",
"_shasum": "3208c1f08d3a4d99261ab64f92302bc15e111ca0",
"_spec": "JSONStream@^1.0.7",
"_where": "/Users/andrewtudhope/Status/keycard.status.im/node_modules/warehouse",
"author": {
"name": "Dominic Tarr",
"email": "dominic.tarr@gmail.com",
"url": "http://bit.ly/dominictarr"
},
"bin": {
"JSONStream": "./bin.js"
},
"bugs": {
"url": "https://github.com/dominictarr/JSONStream/issues"
},
"bundleDependencies": false,
"dependencies": {
"jsonparse": "^1.2.0",
"through": ">=2.2.7 <3"
},
"deprecated": false,
"description": "rawStream.pipe(JSONStream.parse()).pipe(streamOfObjects)",
"devDependencies": {
"assertions": "~2.2.2",
"event-stream": "~0.7.0",
"it-is": "~1",
"render": "~0.1.1",
"tape": "~2.12.3",
"trees": "~0.0.3"
},
"engines": {
"node": "*"
},
"homepage": "http://github.com/dominictarr/JSONStream",
"keywords": [
"json",
"stream",
"streaming",
"parser",
"async",
"parsing"
],
"license": "(MIT OR Apache-2.0)",
"name": "JSONStream",
"optionalDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/dominictarr/JSONStream.git"
},
"scripts": {
"test": "node test/run.js"
},
"version": "1.3.5"
}

View File

@ -1,207 +0,0 @@
# JSONStream
streaming JSON.parse and stringify
![](https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master)
## install
```npm install JSONStream```
## example
``` js
var request = require('request')
, JSONStream = require('JSONStream')
, es = require('event-stream')
request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
.pipe(JSONStream.parse('rows.*'))
.pipe(es.mapSync(function (data) {
console.error(data)
return data
}))
```
## JSONStream.parse(path)
parse stream of values that match a path
``` js
JSONStream.parse('rows.*.doc')
```
The `..` operator is the recursive descent operator from [JSONPath](http://goessner.net/articles/JsonPath/), which will match a child at any depth (see examples below).
If your keys have keys that include `.` or `*` etc, use an array instead.
`['row', true, /^doc/]`.
If you use an array, `RegExp`s, booleans, and/or functions. The `..` operator is also available in array representation, using `{recurse: true}`.
any object that matches the path will be emitted as 'data' (and `pipe`d down stream)
If `path` is empty or null, no 'data' events are emitted.
If you want to have keys emitted, you can prefix your `*` operator with `$`: `obj.$*` - in this case the data passed to the stream is an object with a `key` holding the key and a `value` property holding the data.
### Examples
query a couchdb view:
``` bash
curl -sS localhost:5984/tests/_all_docs&include_docs=true
```
you will get something like this:
``` js
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
},
]}
```
we are probably most interested in the `rows.*.doc`
create a `Stream` that parses the documents from the feed like this:
``` js
var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc
stream.on('data', function(data) {
console.log('received:', data);
});
//emits anything from _before_ the first match
stream.on('header', function (data) {
console.log('header:', data) // => {"total_rows":129,"offset":0}
})
```
awesome!
In case you wanted the contents the doc emitted:
``` js
var stream = JSONStream.parse(['rows', true, 'doc', {emitKey: true}]) //rows, ANYTHING, doc, items in docs with keys
stream.on('data', function(data) {
console.log('key:', data.key);
console.log('value:', data.value);
});
```
You can also emit the path:
``` js
var stream = JSONStream.parse(['rows', true, 'doc', {emitPath: true}]) //rows, ANYTHING, doc, items in docs with keys
stream.on('data', function(data) {
console.log('path:', data.path);
console.log('value:', data.value);
});
```
### recursive patterns (..)
`JSONStream.parse('docs..value')`
(or `JSONStream.parse(['docs', {recurse: true}, 'value'])` using an array)
will emit every `value` object that is a child, grand-child, etc. of the
`docs` object. In this example, it will match exactly 5 times at various depth
levels, emitting 0, 1, 2, 3 and 4 as results.
```js
{
"total": 5,
"docs": [
{
"key": {
"value": 0,
"some": "property"
}
},
{"value": 1},
{"value": 2},
{"blbl": [{}, {"a":0, "b":1, "value":3}, 10]},
{"value": 4}
]
}
```
## JSONStream.parse(pattern, map)
provide a function that can be used to map or filter
the json output. `map` is passed the value at that node of the pattern,
if `map` return non-nullish (anything but `null` or `undefined`)
that value will be emitted in the stream. If it returns a nullish value,
nothing will be emitted.
`JSONStream` also emits `'header'` and `'footer'` events,
the `'header'` event contains anything in the output that was before
the first match, and the `'footer'`, is anything after the last match.
## JSONStream.stringify(open, sep, close)
Create a writable stream.
you may pass in custom `open`, `close`, and `seperator` strings.
But, by default, `JSONStream.stringify()` will create an array,
(with default options `open='[\n', sep='\n,\n', close='\n]\n'`)
If you call `JSONStream.stringify(false)`
the elements will only be seperated by a newline.
If you only write one item this will be valid JSON.
If you write many items,
you can use a `RegExp` to split it into valid chunks.
## JSONStream.stringifyObject(open, sep, close)
Very much like `JSONStream.stringify`,
but creates a writable stream for objects instead of arrays.
Accordingly, `open='{\n', sep='\n,\n', close='\n}\n'`.
When you `.write()` to the stream you must supply an array with `[ key, data ]`
as the first argument.
## unix tool
query npm to see all the modules that browserify has ever depended on.
``` bash
curl https://registry.npmjs.org/browserify | JSONStream 'versions.*.dependencies'
```
## numbers
numbers will be emitted as numbers.
huge numbers that cannot be represented in memory as javascript numbers will be emitted as strings.
cf https://github.com/creationix/jsonparse/commit/044b268f01c4b8f97fb936fc85d3bcfba179e5bb for details.
## Acknowlegements
this module depends on https://github.com/creationix/jsonparse
by Tim Caswell
and also thanks to Florent Jaby for teaching me about parsing with:
https://github.com/Floby/node-json-streams
## license
Dual-licensed under the MIT License or the Apache License, version 2.0

41
node_modules/JSONStream/test/bool.js generated vendored
View File

@ -1,41 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
// stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
JSONStream.parse([true]),
es.writeArray(function (err, lines) {
it(lines).has(expected)
console.error('PASSED')
})
)

View File

@ -1,18 +0,0 @@
var test = require('tape')
var JSONStream = require('../')
var testData = '{"rows":[{"hello":"world"}, {"foo": "bar"}]}'
test('basic parsing', function (t) {
t.plan(2)
var parsed = JSONStream.parse("rows.*")
var parsedKeys = {}
parsed.on('data', function(match) {
parsedKeys[Object.keys(match)[0]] = true
})
parsed.on('end', function() {
t.equal(!!parsedKeys['hello'], true)
t.equal(!!parsedKeys['foo'], true)
})
parsed.write(testData)
parsed.end()
})

View File

@ -1,27 +0,0 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var JSONStream = require('../');
var server = net.createServer(function(client) {
var parser = JSONStream.parse([]);
parser.on('end', function() {
console.log('close')
console.error('PASSED');
server.close();
});
client.pipe(parser);
var n = 4
client.on('data', function () {
if(--n) return
client.end();
})
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
fs.createReadStream(file).pipe(client).on('data', console.log) //.resume();
});

View File

@ -1,29 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse('rows..rev')
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
for (var i = 0 ; i < expected.rows.length ; i++)
it(parsed[i]).deepEqual(expected.rows[i].value.rev)
console.error('PASSED')
})

View File

@ -1,30 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','depth.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['docs', {recurse: true}, 'value'])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
var expectedValues = [0, [1], {"a": 2}, "3", 4]
it(called).equal(expectedValues.length)
for (var i = 0 ; i < 5 ; i++)
it(parsed[i]).deepEqual(expectedValues[i])
console.error('PASSED')
})

View File

@ -1,44 +0,0 @@
var JSONStream = require('../')
, stream = require('stream')
, it = require('it-is')
var output = [ [], [] ]
var parser1 = JSONStream.parse(['docs', /./])
parser1.on('data', function(data) {
output[0].push(data)
})
var parser2 = JSONStream.parse(['docs', /./])
parser2.on('data', function(data) {
output[1].push(data)
})
var pending = 2
function onend () {
if (--pending > 0) return
it(output).deepEqual([
[], [{hello: 'world'}]
])
console.error('PASSED')
}
parser1.on('end', onend)
parser2.on('end', onend)
function makeReadableStream() {
var readStream = new stream.Stream()
readStream.readable = true
readStream.write = function (data) { this.emit('data', data) }
readStream.end = function (data) { this.emit('end') }
return readStream
}
var emptyArray = makeReadableStream()
emptyArray.pipe(parser1)
emptyArray.write('{"docs":[]}')
emptyArray.end()
var objectArray = makeReadableStream()
objectArray.pipe(parser2)
objectArray.write('{"docs":[{"hello":"world"}]}')
objectArray.end()

View File

@ -1,45 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','error.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows'])
, called = 0
, headerCalled = 0
, footerCalled = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('header', function (data) {
headerCalled ++
it(data).deepEqual({
error: 'error_code',
message: 'this is an error message'
})
})
parser.on('footer', function (data) {
footerCalled ++
})
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(0)
it(headerCalled).equal(1)
it(footerCalled).equal(0)
console.error('PASSED')
})

File diff suppressed because it is too large Load Diff

View File

@ -1,18 +0,0 @@
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
},
]}

View File

@ -1,15 +0,0 @@
{
"total": 5,
"docs": [
{
"key": {
"value": 0,
"some": "property"
}
},
{"value": [1]},
{"value": {"a":2}},
{"blbl": [{}, {"a":0, "b":1, "value":"3"}, 10]},
{"value": 4}
]
}

View File

@ -1 +0,0 @@
{"error": "error_code", "message": "this is an error message"}

View File

@ -1,19 +0,0 @@
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
}
],
"foo": {"bar": "baz"}}

39
node_modules/JSONStream/test/fn.js generated vendored
View File

@ -1,39 +0,0 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
function fn (s) {
return !isNaN(parseInt(s, 10))
}
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', fn])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

Some files were not shown because too many files have changed in this diff Show More