fsevents 1.0.10 → 1.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of fsevents might be problematic. Click here for more details.

Files changed (50) hide show
  1. package/node_modules/ansi-styles/package.json +4 -21
  2. package/node_modules/aws4/node_modules/lru-cache/README.md +1 -1
  3. package/node_modules/aws4/node_modules/lru-cache/lib/lru-cache.js +1 -0
  4. package/node_modules/aws4/node_modules/lru-cache/node_modules/pseudomap/package.json +2 -1
  5. package/node_modules/aws4/node_modules/lru-cache/node_modules/yallist/package.json +2 -1
  6. package/node_modules/aws4/node_modules/lru-cache/package.json +20 -12
  7. package/node_modules/aws4/package.json +2 -1
  8. package/node_modules/bl/package.json +2 -1
  9. package/node_modules/chalk/package.json +22 -17
  10. package/node_modules/dashdash/node_modules/assert-plus/package.json +2 -1
  11. package/node_modules/dashdash/package.json +2 -1
  12. package/node_modules/escape-string-regexp/package.json +2 -1
  13. package/node_modules/form-data/package.json +2 -1
  14. package/node_modules/is-my-json-valid/package.json +2 -1
  15. package/node_modules/mime-db/package.json +2 -1
  16. package/node_modules/mime-types/package.json +2 -1
  17. package/node_modules/node-pre-gyp/CHANGELOG.md +6 -0
  18. package/node_modules/node-pre-gyp/lib/util/abi_crosswalk.json +8 -0
  19. package/node_modules/node-pre-gyp/lib/util/versioning.js +14 -2
  20. package/node_modules/node-pre-gyp/package.json +15 -15
  21. package/node_modules/oauth-sign/package.json +2 -1
  22. package/node_modules/once/package.json +1 -1
  23. package/node_modules/pinkie/package.json +2 -1
  24. package/node_modules/qs/package.json +2 -1
  25. package/node_modules/readable-stream/package.json +1 -1
  26. package/node_modules/request/package.json +2 -1
  27. package/node_modules/rimraf/node_modules/glob/package.json +2 -1
  28. package/node_modules/rimraf/package.json +2 -1
  29. package/node_modules/semver/package.json +2 -1
  30. package/node_modules/sshpk/package.json +2 -1
  31. package/node_modules/strip-ansi/package.json +2 -1
  32. package/node_modules/tar-pack/package.json +1 -1
  33. package/node_modules/tough-cookie/package.json +2 -1
  34. package/node_modules/tweetnacl/README.md +12 -4
  35. package/node_modules/tweetnacl/nacl-fast.js +1 -1
  36. package/node_modules/tweetnacl/nacl-fast.min.js +2 -2
  37. package/node_modules/tweetnacl/package.json +12 -11
  38. package/node_modules/verror/package.json +3 -2
  39. package/package.json +2 -2
  40. package/node_modules/ansi-styles/license +0 -21
  41. package/node_modules/aws4/node_modules/lru-cache/.npmignore +0 -4
  42. package/node_modules/aws4/node_modules/lru-cache/.travis.yml +0 -7
  43. package/node_modules/aws4/node_modules/lru-cache/CONTRIBUTORS +0 -14
  44. package/node_modules/aws4/node_modules/lru-cache/LICENSE +0 -15
  45. package/node_modules/aws4/node_modules/lru-cache/benchmarks/insertion-time.js +0 -32
  46. package/node_modules/aws4/node_modules/lru-cache/test/basic.js +0 -520
  47. package/node_modules/aws4/node_modules/lru-cache/test/foreach.js +0 -134
  48. package/node_modules/aws4/node_modules/lru-cache/test/inspect.js +0 -54
  49. package/node_modules/aws4/node_modules/lru-cache/test/no-symbol.js +0 -3
  50. package/node_modules/aws4/node_modules/lru-cache/test/serialize.js +0 -227
@@ -1127,6 +1127,10 @@
1127
1127
  "node_abi": 46,
1128
1128
  "v8": "4.5"
1129
1129
  },
1130
+ "4.4.1": {
1131
+ "node_abi": 46,
1132
+ "v8": "4.5"
1133
+ },
1130
1134
  "5.0.0": {
1131
1135
  "node_abi": 47,
1132
1136
  "v8": "4.6"
@@ -1178,5 +1182,9 @@
1178
1182
  "5.9.0": {
1179
1183
  "node_abi": 47,
1180
1184
  "v8": "4.6"
1185
+ },
1186
+ "5.9.1": {
1187
+ "node_abi": 47,
1188
+ "v8": "4.6"
1181
1189
  }
1182
1190
  }
@@ -25,7 +25,7 @@ function get_electron_abi(runtime, target_version) {
25
25
  // erroneous CLI call
26
26
  throw new Error("Empty target version is not supported if electron is the target.");
27
27
  }
28
- // Electron guarentees that patch version update won't break native modules.
28
+ // Electron guarantees that patch version update won't break native modules.
29
29
  var sem_ver = semver.parse(target_version);
30
30
  return runtime + '-v' + sem_ver.major + '.' + sem_ver.minor;
31
31
  }
@@ -241,6 +241,18 @@ function drop_double_slashes(pathname) {
241
241
  return pathname.replace(/\/\//g,'/');
242
242
  }
243
243
 
244
+ function get_process_runtime(versions) {
245
+ var runtime = 'node';
246
+ if (versions['node-webkit']) {
247
+ runtime = 'node-webkit';
248
+ } else if (versions.electron) {
249
+ runtime = 'electron';
250
+ }
251
+ return runtime;
252
+ }
253
+
254
+ module.exports.get_process_runtime = get_process_runtime;
255
+
244
256
  var default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz';
245
257
  var default_remote_path = '';
246
258
 
@@ -249,7 +261,7 @@ module.exports.evaluate = function(package_json,options) {
249
261
  validate_config(package_json);
250
262
  var v = package_json.version;
251
263
  var module_version = semver.parse(v);
252
- var runtime = options.runtime || (process.versions['node-webkit'] ? 'node-webkit' : 'node');
264
+ var runtime = options.runtime || get_process_runtime(process.versions);
253
265
  var opts = {
254
266
  name: package_json.name,
255
267
  configuration: Boolean(options.debug) ? 'Debug' : 'Release',
@@ -1,18 +1,18 @@
1
1
  {
2
2
  "_args": [
3
3
  [
4
- "node-pre-gyp@^0.6.24",
4
+ "node-pre-gyp@0.6.25",
5
5
  "/Users/eshanker/Code/fsevents"
6
6
  ]
7
7
  ],
8
- "_from": "node-pre-gyp@>=0.6.24 <0.7.0",
9
- "_id": "node-pre-gyp@0.6.24",
8
+ "_from": "node-pre-gyp@0.6.25",
9
+ "_id": "node-pre-gyp@0.6.25",
10
10
  "_inCache": true,
11
11
  "_installable": true,
12
12
  "_location": "/node-pre-gyp",
13
13
  "_npmOperationalInternal": {
14
14
  "host": "packages-12-west.internal.npmjs.com",
15
- "tmp": "tmp/node-pre-gyp-0.6.24.tgz_1458588408378_0.791442709742114"
15
+ "tmp": "tmp/node-pre-gyp-0.6.25.tgz_1459446165913_0.5912895421497524"
16
16
  },
17
17
  "_npmUser": {
18
18
  "email": "dane@mapbox.com",
@@ -22,19 +22,19 @@
22
22
  "_phantomChildren": {},
23
23
  "_requested": {
24
24
  "name": "node-pre-gyp",
25
- "raw": "node-pre-gyp@^0.6.24",
26
- "rawSpec": "^0.6.24",
25
+ "raw": "node-pre-gyp@0.6.25",
26
+ "rawSpec": "0.6.25",
27
27
  "scope": null,
28
- "spec": ">=0.6.24 <0.7.0",
29
- "type": "range"
28
+ "spec": "0.6.25",
29
+ "type": "version"
30
30
  },
31
31
  "_requiredBy": [
32
32
  "/"
33
33
  ],
34
- "_resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.6.24.tgz",
35
- "_shasum": "0ea2b5fdd516f67116413991f8f7d89e0dffbcfa",
34
+ "_resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.6.25.tgz",
35
+ "_shasum": "2c6818775e6f1df5e353ba8024f1c0118726545b",
36
36
  "_shrinkwrap": null,
37
- "_spec": "node-pre-gyp@^0.6.24",
37
+ "_spec": "node-pre-gyp@0.6.25",
38
38
  "_where": "/Users/eshanker/Code/fsevents",
39
39
  "author": {
40
40
  "email": "dane@mapbox.com",
@@ -77,10 +77,10 @@
77
77
  },
78
78
  "directories": {},
79
79
  "dist": {
80
- "shasum": "0ea2b5fdd516f67116413991f8f7d89e0dffbcfa",
81
- "tarball": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.6.24.tgz"
80
+ "shasum": "2c6818775e6f1df5e353ba8024f1c0118726545b",
81
+ "tarball": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.6.25.tgz"
82
82
  },
83
- "gitHead": "615e645b27eac9890e84ac309bf8d627d0c675ee",
83
+ "gitHead": "92d8e8087b0834b9e6d43b7f7215bd8c42c713b0",
84
84
  "homepage": "https://github.com/mapbox/node-pre-gyp",
85
85
  "jshintConfig": {
86
86
  "globalstrict": true,
@@ -135,5 +135,5 @@
135
135
  "test": "jshint lib lib/util scripts bin/node-pre-gyp && mocha -R spec --timeout 500000",
136
136
  "update-crosswalk": "node scripts/abi_crosswalk.js"
137
137
  },
138
- "version": "0.6.24"
138
+ "version": "0.6.25"
139
139
  }
@@ -54,7 +54,8 @@
54
54
  ],
55
55
  "name": "oauth-sign",
56
56
  "optionalDependencies": {},
57
- "readme": "ERROR: No README data found!",
57
+ "readme": "oauth-sign\n==========\n\nOAuth 1 signing. Formerly a vendor lib in mikeal/request, now a standalone module. \n",
58
+ "readmeFilename": "README.md",
58
59
  "repository": {
59
60
  "url": "git+https://github.com/mikeal/oauth-sign.git"
60
61
  },
@@ -1,5 +1,5 @@
1
1
  {
2
- "_from": "once@^1.3.0",
2
+ "_from": "once@~1.3.3",
3
3
  "_id": "once@1.3.3",
4
4
  "_location": "/once",
5
5
  "_nodeVersion": "4.0.0",
@@ -61,7 +61,8 @@
61
61
  ],
62
62
  "name": "pinkie",
63
63
  "optionalDependencies": {},
64
- "readme": "ERROR: No README data found!",
64
+ "readme": "<h1 align=\"center\">\n\t<br>\n\t<img width=\"256\" src=\"media/logo.png\" alt=\"pinkie\">\n\t<br>\n\t<br>\n</h1>\n\n> Itty bitty little widdle twinkie pinkie [ES2015 Promise](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-promise-objects) implementation\n\n[![Build Status](https://travis-ci.org/floatdrop/pinkie.svg?branch=master)](https://travis-ci.org/floatdrop/pinkie) [![Coverage Status](https://coveralls.io/repos/floatdrop/pinkie/badge.svg?branch=master&service=github)](https://coveralls.io/github/floatdrop/pinkie?branch=master)\n\nThere are [tons of Promise implementations](https://github.com/promises-aplus/promises-spec/blob/master/implementations.md#standalone) out there, but all of them focus on browser compatibility and are often bloated with functionality.\n\nThis module is an exact Promise specification polyfill (like [native-promise-only](https://github.com/getify/native-promise-only)), but in Node.js land (it should be browserify-able though).\n\n\n## Install\n\n```\n$ npm install --save pinkie\n```\n\n\n## Usage\n\n```js\nvar fs = require('fs');\nvar Promise = require('pinkie');\n\nnew Promise(function (resolve, reject) {\n\tfs.readFile('foo.json', 'utf8', function (err, data) {\n\t\tif (err) {\n\t\t\treject(err);\n\t\t\treturn;\n\t\t}\n\n\t\tresolve(data);\n\t});\n});\n//=> Promise\n```\n\n\n### API\n\n`pinkie` exports bare [ES2015 Promise](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-promise-objects) implementation and polyfills [Node.js rejection events](https://nodejs.org/api/process.html#process_event_unhandledrejection). In case you forgot:\n\n#### new Promise(executor)\n\nReturns new instance of `Promise`.\n\n##### executor\n\n*Required* \nType: `function`\n\nFunction with two arguments `resolve` and `reject`. The first argument fulfills the promise, the second argument rejects it.\n\n#### pinkie.all(promises)\n\nReturns a promise that resolves when all of the promises in the `promises` Array argument have resolved.\n\n#### pinkie.race(promises)\n\nReturns a promise that resolves or rejects as soon as one of the promises in the `promises` Array resolves or rejects, with the value or reason from that promise.\n\n#### pinkie.reject(reason)\n\nReturns a Promise object that is rejected with the given `reason`.\n\n#### pinkie.resolve(value)\n\nReturns a Promise object that is resolved with the given `value`. If the `value` is a thenable (i.e. has a then method), the returned promise will \"follow\" that thenable, adopting its eventual state; otherwise the returned promise will be fulfilled with the `value`.\n\n\n## Related\n\n- [pinkie-promise](https://github.com/floatdrop/pinkie-promise) - Returns the native Promise or this module\n\n\n## License\n\nMIT © [Vsevolod Strukchinsky](http://github.com/floatdrop)\n",
65
+ "readmeFilename": "readme.md",
65
66
  "repository": {
66
67
  "type": "git",
67
68
  "url": "git+https://github.com/floatdrop/pinkie.git"
@@ -69,7 +69,8 @@
69
69
  ],
70
70
  "name": "qs",
71
71
  "optionalDependencies": {},
72
- "readme": "ERROR: No README data found!",
72
+ "readme": "# qs\n\nA querystring parsing and stringifying library with some added security.\n\n[![Build Status](https://api.travis-ci.org/ljharb/qs.svg)](http://travis-ci.org/ljharb/qs)\n\nLead Maintainer: [Jordan Harband](https://github.com/ljharb)\n\nThe **qs** module was originally created and maintained by [TJ Holowaychuk](https://github.com/visionmedia/node-querystring).\n\n## Usage\n\n```javascript\nvar qs = require('qs');\nvar assert = require('assert');\n\nvar obj = qs.parse('a=c');\nassert.deepEqual(obj, { a: 'c' });\n\nvar str = qs.stringify(obj);\nassert.equal(str, 'a=c');\n```\n\n### Parsing Objects\n\n[](#preventEval)\n```javascript\nqs.parse(string, [options]);\n```\n\n**qs** allows you to create nested objects within your query strings, by surrounding the name of sub-keys with square brackets `[]`.\nFor example, the string `'foo[bar]=baz'` converts to:\n\n```javascript\nassert.deepEqual(qs.parse('foo[bar]=baz'), {\n foo: {\n bar: 'baz'\n }\n});\n```\n\nWhen using the `plainObjects` option the parsed value is returned as a plain object, created via `Object.create(null)` and as such you should be aware that prototype methods will not exist on it and a user may set those names to whatever value they like:\n\n```javascript\nvar plainObject = qs.parse('a[hasOwnProperty]=b', { plainObjects: true });\nassert.deepEqual(plainObject, { a: { hasOwnProperty: 'b' } });\n```\n\nBy default parameters that would overwrite properties on the object prototype are ignored, if you wish to keep the data from those fields either use `plainObjects` as mentioned above, or set `allowPrototypes` to `true` which will allow user input to overwrite those properties. *WARNING* It is generally a bad idea to enable this option as it can cause problems when attempting to use the properties that have been overwritten. Always be careful with this option.\n\n```javascript\nvar protoObject = qs.parse('a[hasOwnProperty]=b', { allowPrototypes: true });\nassert.deepEqual(protoObject, { a: { hasOwnProperty: 'b' } });\n```\n\nURI encoded strings work too:\n\n```javascript\nassert.deepEqual(qs.parse('a%5Bb%5D=c'), {\n a: { b: 'c' }\n});\n```\n\nYou can also nest your objects, like `'foo[bar][baz]=foobarbaz'`:\n\n```javascript\nassert.deepEqual(qs.parse('foo[bar][baz]=foobarbaz'), {\n foo: {\n bar: {\n baz: 'foobarbaz'\n }\n }\n});\n```\n\nBy default, when nesting objects **qs** will only parse up to 5 children deep. This means if you attempt to parse a string like\n`'a[b][c][d][e][f][g][h][i]=j'` your resulting object will be:\n\n```javascript\nvar expected = {\n a: {\n b: {\n c: {\n d: {\n e: {\n f: {\n '[g][h][i]': 'j'\n }\n }\n }\n }\n }\n }\n};\nvar string = 'a[b][c][d][e][f][g][h][i]=j';\nassert.deepEqual(qs.parse(string), expected);\n```\n\nThis depth can be overridden by passing a `depth` option to `qs.parse(string, [options])`:\n\n```javascript\nvar deep = qs.parse('a[b][c][d][e][f][g][h][i]=j', { depth: 1 });\nassert.deepEqual(deep, { a: { b: { '[c][d][e][f][g][h][i]': 'j' } } });\n```\n\nThe depth limit helps mitigate abuse when **qs** is used to parse user input, and it is recommended to keep it a reasonably small number.\n\nFor similar reasons, by default **qs** will only parse up to 1000 parameters. This can be overridden by passing a `parameterLimit` option:\n\n```javascript\nvar limited = qs.parse('a=b&c=d', { parameterLimit: 1 });\nassert.deepEqual(limited, { a: 'b' });\n```\n\nAn optional delimiter can also be passed:\n\n```javascript\nvar delimited = qs.parse('a=b;c=d', { delimiter: ';' });\nassert.deepEqual(delimited, { a: 'b', c: 'd' });\n```\n\nDelimiters can be a regular expression too:\n\n```javascript\nvar regexed = qs.parse('a=b;c=d,e=f', { delimiter: /[;,]/ });\nassert.deepEqual(regexed, { a: 'b', c: 'd', e: 'f' });\n```\n\nOption `allowDots` can be used to enable dot notation:\n\n```javascript\nvar withDots = qs.parse('a.b=c', { allowDots: true });\nassert.deepEqual(withDots, { a: { b: 'c' } });\n```\n\n### Parsing Arrays\n\n**qs** can also parse arrays using a similar `[]` notation:\n\n```javascript\nvar withArray = qs.parse('a[]=b&a[]=c');\nassert.deepEqual(withArray, { a: ['b', 'c'] });\n```\n\nYou may specify an index as well:\n\n```javascript\nvar withIndexes = qs.parse('a[1]=c&a[0]=b');\nassert.deepEqual(withIndexes, { a: ['b', 'c'] });\n```\n\nNote that the only difference between an index in an array and a key in an object is that the value between the brackets must be a number\nto create an array. When creating arrays with specific indices, **qs** will compact a sparse array to only the existing values preserving\ntheir order:\n\n```javascript\nvar noSparse = qs.parse('a[1]=b&a[15]=c');\nassert.deepEqual(noSparse, { a: ['b', 'c'] });\n```\n\nNote that an empty string is also a value, and will be preserved:\n\n```javascript\nvar withEmptyString = qs.parse('a[]=&a[]=b');\nassert.deepEqual(withEmptyString, { a: ['', 'b'] });\n\nvar withIndexedEmptyString = qs.parse('a[0]=b&a[1]=&a[2]=c');\nassert.deepEqual(withIndexedEmptyString, { a: ['b', '', 'c'] });\n```\n\n**qs** will also limit specifying indices in an array to a maximum index of `20`. Any array members with an index of greater than `20` will\ninstead be converted to an object with the index as the key:\n\n```javascript\nvar withMaxIndex = qs.parse('a[100]=b');\nassert.deepEqual(withMaxIndex, { a: { '100': 'b' } });\n```\n\nThis limit can be overridden by passing an `arrayLimit` option:\n\n```javascript\nvar withArrayLimit = qs.parse('a[1]=b', { arrayLimit: 0 });\nassert.deepEqual(withArrayLimit, { a: { '1': 'b' } });\n```\n\nTo disable array parsing entirely, set `parseArrays` to `false`.\n\n```javascript\nvar noParsingArrays = qs.parse('a[]=b', { parseArrays: false });\nassert.deepEqual(noParsingArrays, { a: { '0': 'b' } });\n```\n\nIf you mix notations, **qs** will merge the two items into an object:\n\n```javascript\nvar mixedNotation = qs.parse('a[0]=b&a[b]=c');\nassert.deepEqual(mixedNotation, { a: { '0': 'b', b: 'c' } });\n```\n\nYou can also create arrays of objects:\n\n```javascript\nvar arraysOfObjects = qs.parse('a[][b]=c');\nassert.deepEqual(arraysOfObjects, { a: [{ b: 'c' }] });\n```\n\n### Stringifying\n\n[](#preventEval)\n```javascript\nqs.stringify(object, [options]);\n```\n\nWhen stringifying, **qs** by default URI encodes output. Objects are stringified as you would expect:\n\n```javascript\nassert.equal(qs.stringify({ a: 'b' }), 'a=b');\nassert.equal(qs.stringify({ a: { b: 'c' } }), 'a%5Bb%5D=c');\n```\n\nThis encoding can be disabled by setting the `encode` option to `false`:\n\n```javascript\nvar unencoded = qs.stringify({ a: { b: 'c' } }, { encode: false });\nassert.equal(unencoded, 'a[b]=c');\n```\n\nExamples beyond this point will be shown as though the output is not URI encoded for clarity. Please note that the return values in these cases *will* be URI encoded during real usage.\n\nWhen arrays are stringified, by default they are given explicit indices:\n\n```javascript\nqs.stringify({ a: ['b', 'c', 'd'] });\n// 'a[0]=b&a[1]=c&a[2]=d'\n```\n\nYou may override this by setting the `indices` option to `false`:\n\n```javascript\nqs.stringify({ a: ['b', 'c', 'd'] }, { indices: false });\n// 'a=b&a=c&a=d'\n```\n\nYou may use the `arrayFormat` option to specify the format of the output array\n\n```javascript\nqs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'indices' })\n// 'a[0]=b&a[1]=c'\nqs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'brackets' })\n// 'a[]=b&a[]=c'\nqs.stringify({ a: ['b', 'c'] }, { arrayFormat: 'repeat' })\n// 'a=b&a=c'\n```\n\nEmpty strings and null values will omit the value, but the equals sign (=) remains in place:\n\n```javascript\nassert.equal(qs.stringify({ a: '' }), 'a=');\n```\n\nProperties that are set to `undefined` will be omitted entirely:\n\n```javascript\nassert.equal(qs.stringify({ a: null, b: undefined }), 'a=');\n```\n\nThe delimiter may be overridden with stringify as well:\n\n```javascript\nassert.equal(qs.stringify({ a: 'b', c: 'd' }, { delimiter: ';' }), 'a=b;c=d');\n```\n\nFinally, you can use the `filter` option to restrict which keys will be included in the stringified output.\nIf you pass a function, it will be called for each key to obtain the replacement value. Otherwise, if you\npass an array, it will be used to select properties and array indices for stringification:\n\n```javascript\nfunction filterFunc(prefix, value) {\n if (prefix == 'b') {\n // Return an `undefined` value to omit a property.\n return;\n }\n if (prefix == 'e[f]') {\n return value.getTime();\n }\n if (prefix == 'e[g][0]') {\n return value * 2;\n }\n return value;\n}\nqs.stringify({ a: 'b', c: 'd', e: { f: new Date(123), g: [2] } }, { filter: filterFunc });\n// 'a=b&c=d&e[f]=123&e[g][0]=4'\nqs.stringify({ a: 'b', c: 'd', e: 'f' }, { filter: ['a', 'e'] });\n// 'a=b&e=f'\nqs.stringify({ a: ['b', 'c', 'd'], e: 'f' }, { filter: ['a', 0, 2] });\n// 'a[0]=b&a[2]=d'\n```\n\n### Handling of `null` values\n\nBy default, `null` values are treated like empty strings:\n\n```javascript\nvar withNull = qs.stringify({ a: null, b: '' });\nassert.equal(withNull, 'a=&b=');\n```\n\nParsing does not distinguish between parameters with and without equal signs. Both are converted to empty strings.\n\n```javascript\nvar equalsInsensitive = qs.parse('a&b=');\nassert.deepEqual(equalsInsensitive, { a: '', b: '' });\n```\n\nTo distinguish between `null` values and empty strings use the `strictNullHandling` flag. In the result string the `null`\nvalues have no `=` sign:\n\n```javascript\nvar strictNull = qs.stringify({ a: null, b: '' }, { strictNullHandling: true });\nassert.equal(strictNull, 'a&b=');\n```\n\nTo parse values without `=` back to `null` use the `strictNullHandling` flag:\n\n```javascript\nvar parsedStrictNull = qs.parse('a&b=', { strictNullHandling: true });\nassert.deepEqual(parsedStrictNull, { a: null, b: '' });\n```\n\nTo completely skip rendering keys with `null` values, use the `skipNulls` flag:\n\n```javascript\nvar nullsSkipped = qs.stringify({ a: 'b', c: null}, { skipNulls: true });\nassert.equal(nullsSkipped, 'a=b');\n```\n",
73
+ "readmeFilename": "README.md",
73
74
  "repository": {
74
75
  "type": "git",
75
76
  "url": "git+https://github.com/ljharb/qs.git"
@@ -1,5 +1,5 @@
1
1
  {
2
- "_from": "readable-stream@~2.0.5",
2
+ "_from": "readable-stream@^2.0.0 || ^1.1.13",
3
3
  "_id": "readable-stream@2.0.6",
4
4
  "_location": "/readable-stream",
5
5
  "_nodeVersion": "5.7.0",
@@ -99,7 +99,8 @@
99
99
  ],
100
100
  "name": "request",
101
101
  "optionalDependencies": {},
102
- "readme": "ERROR: No README data found!",
102
+ "readme": "\n# Request - Simplified HTTP client\n\n[![npm package](https://nodei.co/npm/request.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/request/)\n\n[![Build status](https://img.shields.io/travis/request/request.svg?style=flat-square)](https://travis-ci.org/request/request)\n[![Coverage](https://img.shields.io/codecov/c/github/request/request.svg?style=flat-square)](https://codecov.io/github/request/request?branch=master)\n[![Coverage](https://img.shields.io/coveralls/request/request.svg?style=flat-square)](https://coveralls.io/r/request/request)\n[![Dependency Status](https://img.shields.io/david/request/request.svg?style=flat-square)](https://david-dm.org/request/request)\n[![Gitter](https://img.shields.io/badge/gitter-join_chat-blue.svg?style=flat-square)](https://gitter.im/request/request?utm_source=badge)\n\n\n## Super simple to use\n\nRequest is designed to be the simplest way possible to make http calls. It supports HTTPS and follows redirects by default.\n\n```js\nvar request = require('request');\nrequest('http://www.google.com', function (error, response, body) {\n if (!error && response.statusCode == 200) {\n console.log(body) // Show the HTML for the Google homepage.\n }\n})\n```\n\n\n## Table of contents\n\n- [Streaming](#streaming)\n- [Forms](#forms)\n- [HTTP Authentication](#http-authentication)\n- [Custom HTTP Headers](#custom-http-headers)\n- [OAuth Signing](#oauth-signing)\n- [Proxies](#proxies)\n- [Unix Domain Sockets](#unix-domain-sockets)\n- [TLS/SSL Protocol](#tlsssl-protocol)\n- [Support for HAR 1.2](#support-for-har-12)\n- [**All Available Options**](#requestoptions-callback)\n\nRequest also offers [convenience methods](#convenience-methods) like\n`request.defaults` and `request.post`, and there are\nlots of [usage examples](#examples) and several\n[debugging techniques](#debugging).\n\n\n---\n\n\n## Streaming\n\nYou can stream any response to a file stream.\n\n```js\nrequest('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))\n```\n\nYou can also stream a file to a PUT or POST request. This method will also check the file extension against a mapping of file extensions to content-types (in this case `application/json`) and use the proper `content-type` in the PUT request (if the headers don’t already provide one).\n\n```js\nfs.createReadStream('file.json').pipe(request.put('http://mysite.com/obj.json'))\n```\n\nRequest can also `pipe` to itself. When doing so, `content-type` and `content-length` are preserved in the PUT headers.\n\n```js\nrequest.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'))\n```\n\nRequest emits a \"response\" event when a response is received. The `response` argument will be an instance of [http.IncomingMessage](http://nodejs.org/api/http.html#http_http_incomingmessage).\n\n```js\nrequest\n .get('http://google.com/img.png')\n .on('response', function(response) {\n console.log(response.statusCode) // 200\n console.log(response.headers['content-type']) // 'image/png'\n })\n .pipe(request.put('http://mysite.com/img.png'))\n```\n\nTo easily handle errors when streaming requests, listen to the `error` event before piping:\n\n```js\nrequest\n .get('http://mysite.com/doodle.png')\n .on('error', function(err) {\n console.log(err)\n })\n .pipe(fs.createWriteStream('doodle.png'))\n```\n\nNow let’s get fancy.\n\n```js\nhttp.createServer(function (req, resp) {\n if (req.url === '/doodle.png') {\n if (req.method === 'PUT') {\n req.pipe(request.put('http://mysite.com/doodle.png'))\n } else if (req.method === 'GET' || req.method === 'HEAD') {\n request.get('http://mysite.com/doodle.png').pipe(resp)\n }\n }\n})\n```\n\nYou can also `pipe()` from `http.ServerRequest` instances, as well as to `http.ServerResponse` instances. The HTTP method, headers, and entity-body data will be sent. Which means that, if you don't really care about security, you can do:\n\n```js\nhttp.createServer(function (req, resp) {\n if (req.url === '/doodle.png') {\n var x = request('http://mysite.com/doodle.png')\n req.pipe(x)\n x.pipe(resp)\n }\n})\n```\n\nAnd since `pipe()` returns the destination stream in ≥ Node 0.5.x you can do one line proxying. :)\n\n```js\nreq.pipe(request('http://mysite.com/doodle.png')).pipe(resp)\n```\n\nAlso, none of this new functionality conflicts with requests previous features, it just expands them.\n\n```js\nvar r = request.defaults({'proxy':'http://localproxy.com'})\n\nhttp.createServer(function (req, resp) {\n if (req.url === '/doodle.png') {\n r.get('http://google.com/doodle.png').pipe(resp)\n }\n})\n```\n\nYou can still use intermediate proxies, the requests will still follow HTTP forwards, etc.\n\n[back to top](#table-of-contents)\n\n\n---\n\n\n## Forms\n\n`request` supports `application/x-www-form-urlencoded` and `multipart/form-data` form uploads. For `multipart/related` refer to the `multipart` API.\n\n\n#### application/x-www-form-urlencoded (URL-Encoded Forms)\n\nURL-encoded forms are simple.\n\n```js\nrequest.post('http://service.com/upload', {form:{key:'value'}})\n// or\nrequest.post('http://service.com/upload').form({key:'value'})\n// or\nrequest.post({url:'http://service.com/upload', form: {key:'value'}}, function(err,httpResponse,body){ /* ... */ })\n```\n\n\n#### multipart/form-data (Multipart Form Uploads)\n\nFor `multipart/form-data` we use the [form-data](https://github.com/form-data/form-data) library by [@felixge](https://github.com/felixge). For the most cases, you can pass your upload form data via the `formData` option.\n\n\n```js\nvar formData = {\n // Pass a simple key-value pair\n my_field: 'my_value',\n // Pass data via Buffers\n my_buffer: new Buffer([1, 2, 3]),\n // Pass data via Streams\n my_file: fs.createReadStream(__dirname + '/unicycle.jpg'),\n // Pass multiple values /w an Array\n attachments: [\n fs.createReadStream(__dirname + '/attachment1.jpg'),\n fs.createReadStream(__dirname + '/attachment2.jpg')\n ],\n // Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS}\n // Use case: for some types of streams, you'll need to provide \"file\"-related information manually.\n // See the `form-data` README for more information about options: https://github.com/form-data/form-data\n custom_file: {\n value: fs.createReadStream('/dev/urandom'),\n options: {\n filename: 'topsecret.jpg',\n contentType: 'image/jpg'\n }\n }\n};\nrequest.post({url:'http://service.com/upload', formData: formData}, function optionalCallback(err, httpResponse, body) {\n if (err) {\n return console.error('upload failed:', err);\n }\n console.log('Upload successful! Server responded with:', body);\n});\n```\n\nFor advanced cases, you can access the form-data object itself via `r.form()`. This can be modified until the request is fired on the next cycle of the event-loop. (Note that this calling `form()` will clear the currently set form data for that request.)\n\n```js\n// NOTE: Advanced use-case, for normal use see 'formData' usage above\nvar r = request.post('http://service.com/upload', function optionalCallback(err, httpResponse, body) {...})\nvar form = r.form();\nform.append('my_field', 'my_value');\nform.append('my_buffer', new Buffer([1, 2, 3]));\nform.append('custom_file', fs.createReadStream(__dirname + '/unicycle.jpg'), {filename: 'unicycle.jpg'});\n```\nSee the [form-data README](https://github.com/form-data/form-data) for more information & examples.\n\n\n#### multipart/related\n\nSome variations in different HTTP implementations require a newline/CRLF before, after, or both before and after the boundary of a `multipart/related` request (using the multipart option). This has been observed in the .NET WebAPI version 4.0. You can turn on a boundary preambleCRLF or postamble by passing them as `true` to your request options.\n\n```js\n request({\n method: 'PUT',\n preambleCRLF: true,\n postambleCRLF: true,\n uri: 'http://service.com/upload',\n multipart: [\n {\n 'content-type': 'application/json',\n body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})\n },\n { body: 'I am an attachment' },\n { body: fs.createReadStream('image.png') }\n ],\n // alternatively pass an object containing additional options\n multipart: {\n chunked: false,\n data: [\n {\n 'content-type': 'application/json',\n body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})\n },\n { body: 'I am an attachment' }\n ]\n }\n },\n function (error, response, body) {\n if (error) {\n return console.error('upload failed:', error);\n }\n console.log('Upload successful! Server responded with:', body);\n })\n```\n\n[back to top](#table-of-contents)\n\n\n---\n\n\n## HTTP Authentication\n\n```js\nrequest.get('http://some.server.com/').auth('username', 'password', false);\n// or\nrequest.get('http://some.server.com/', {\n 'auth': {\n 'user': 'username',\n 'pass': 'password',\n 'sendImmediately': false\n }\n});\n// or\nrequest.get('http://some.server.com/').auth(null, null, true, 'bearerToken');\n// or\nrequest.get('http://some.server.com/', {\n 'auth': {\n 'bearer': 'bearerToken'\n }\n});\n```\n\nIf passed as an option, `auth` should be a hash containing values:\n\n- `user` || `username`\n- `pass` || `password`\n- `sendImmediately` (optional)\n- `bearer` (optional)\n\nThe method form takes parameters\n`auth(username, password, sendImmediately, bearer)`.\n\n`sendImmediately` defaults to `true`, which causes a basic or bearer\nauthentication header to be sent. If `sendImmediately` is `false`, then\n`request` will retry with a proper authentication header after receiving a\n`401` response from the server (which must contain a `WWW-Authenticate` header\nindicating the required authentication method).\n\nNote that you can also specify basic authentication using the URL itself, as\ndetailed in [RFC 1738](http://www.ietf.org/rfc/rfc1738.txt). Simply pass the\n`user:password` before the host with an `@` sign:\n\n```js\nvar username = 'username',\n password = 'password',\n url = 'http://' + username + ':' + password + '@some.server.com';\n\nrequest({url: url}, function (error, response, body) {\n // Do more stuff with 'body' here\n});\n```\n\nDigest authentication is supported, but it only works with `sendImmediately`\nset to `false`; otherwise `request` will send basic authentication on the\ninitial request, which will probably cause the request to fail.\n\nBearer authentication is supported, and is activated when the `bearer` value is\navailable. The value may be either a `String` or a `Function` returning a\n`String`. Using a function to supply the bearer token is particularly useful if\nused in conjunction with `defaults` to allow a single function to supply the\nlast known token at the time of sending a request, or to compute one on the fly.\n\n[back to top](#table-of-contents)\n\n\n---\n\n\n## Custom HTTP Headers\n\nHTTP Headers, such as `User-Agent`, can be set in the `options` object.\nIn the example below, we call the github API to find out the number\nof stars and forks for the request repository. This requires a\ncustom `User-Agent` header as well as https.\n\n```js\nvar request = require('request');\n\nvar options = {\n url: 'https://api.github.com/repos/request/request',\n headers: {\n 'User-Agent': 'request'\n }\n};\n\nfunction callback(error, response, body) {\n if (!error && response.statusCode == 200) {\n var info = JSON.parse(body);\n console.log(info.stargazers_count + \" Stars\");\n console.log(info.forks_count + \" Forks\");\n }\n}\n\nrequest(options, callback);\n```\n\n[back to top](#table-of-contents)\n\n\n---\n\n\n## OAuth Signing\n\n[OAuth version 1.0](https://tools.ietf.org/html/rfc5849) is supported. The\ndefault signing algorithm is\n[HMAC-SHA1](https://tools.ietf.org/html/rfc5849#section-3.4.2):\n\n```js\n// OAuth1.0 - 3-legged server side flow (Twitter example)\n// step 1\nvar qs = require('querystring')\n , oauth =\n { callback: 'http://mysite.com/callback/'\n , consumer_key: CONSUMER_KEY\n , consumer_secret: CONSUMER_SECRET\n }\n , url = 'https://api.twitter.com/oauth/request_token'\n ;\nrequest.post({url:url, oauth:oauth}, function (e, r, body) {\n // Ideally, you would take the body in the response\n // and construct a URL that a user clicks on (like a sign in button).\n // The verifier is only available in the response after a user has\n // verified with twitter that they are authorizing your app.\n\n // step 2\n var req_data = qs.parse(body)\n var uri = 'https://api.twitter.com/oauth/authenticate'\n + '?' + qs.stringify({oauth_token: req_data.oauth_token})\n // redirect the user to the authorize uri\n\n // step 3\n // after the user is redirected back to your server\n var auth_data = qs.parse(body)\n , oauth =\n { consumer_key: CONSUMER_KEY\n , consumer_secret: CONSUMER_SECRET\n , token: auth_data.oauth_token\n , token_secret: req_data.oauth_token_secret\n , verifier: auth_data.oauth_verifier\n }\n , url = 'https://api.twitter.com/oauth/access_token'\n ;\n request.post({url:url, oauth:oauth}, function (e, r, body) {\n // ready to make signed requests on behalf of the user\n var perm_data = qs.parse(body)\n , oauth =\n { consumer_key: CONSUMER_KEY\n , consumer_secret: CONSUMER_SECRET\n , token: perm_data.oauth_token\n , token_secret: perm_data.oauth_token_secret\n }\n , url = 'https://api.twitter.com/1.1/users/show.json'\n , qs =\n { screen_name: perm_data.screen_name\n , user_id: perm_data.user_id\n }\n ;\n request.get({url:url, oauth:oauth, qs:qs, json:true}, function (e, r, user) {\n console.log(user)\n })\n })\n})\n```\n\nFor [RSA-SHA1 signing](https://tools.ietf.org/html/rfc5849#section-3.4.3), make\nthe following changes to the OAuth options object:\n* Pass `signature_method : 'RSA-SHA1'`\n* Instead of `consumer_secret`, specify a `private_key` string in\n [PEM format](http://how2ssl.com/articles/working_with_pem_files/)\n\nFor [PLAINTEXT signing](http://oauth.net/core/1.0/#anchor22), make\nthe following changes to the OAuth options object:\n* Pass `signature_method : 'PLAINTEXT'`\n\nTo send OAuth parameters via query params or in a post body as described in The\n[Consumer Request Parameters](http://oauth.net/core/1.0/#consumer_req_param)\nsection of the oauth1 spec:\n* Pass `transport_method : 'query'` or `transport_method : 'body'` in the OAuth\n options object.\n* `transport_method` defaults to `'header'`\n\nTo use [Request Body Hash](https://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html) you can either\n* Manually generate the body hash and pass it as a string `body_hash: '...'`\n* Automatically generate the body hash by passing `body_hash: true`\n\n[back to top](#table-of-contents)\n\n\n---\n\n\n## Proxies\n\nIf you specify a `proxy` option, then the request (and any subsequent\nredirects) will be sent via a connection to the proxy server.\n\nIf your endpoint is an `https` url, and you are using a proxy, then\nrequest will send a `CONNECT` request to the proxy server *first*, and\nthen use the supplied connection to connect to the endpoint.\n\nThat is, first it will make a request like:\n\n```\nHTTP/1.1 CONNECT endpoint-server.com:80\nHost: proxy-server.com\nUser-Agent: whatever user agent you specify\n```\n\nand then the proxy server make a TCP connection to `endpoint-server`\non port `80`, and return a response that looks like:\n\n```\nHTTP/1.1 200 OK\n```\n\nAt this point, the connection is left open, and the client is\ncommunicating directly with the `endpoint-server.com` machine.\n\nSee [the wikipedia page on HTTP Tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel)\nfor more information.\n\nBy default, when proxying `http` traffic, request will simply make a\nstandard proxied `http` request. This is done by making the `url`\nsection of the initial line of the request a fully qualified url to\nthe endpoint.\n\nFor example, it will make a single request that looks like:\n\n```\nHTTP/1.1 GET http://endpoint-server.com/some-url\nHost: proxy-server.com\nOther-Headers: all go here\n\nrequest body or whatever\n```\n\nBecause a pure \"http over http\" tunnel offers no additional security\nor other features, it is generally simpler to go with a\nstraightforward HTTP proxy in this case. However, if you would like\nto force a tunneling proxy, you may set the `tunnel` option to `true`.\n\nYou can also make a standard proxied `http` request by explicitly setting\n`tunnel : false`, but **note that this will allow the proxy to see the traffic\nto/from the destination server**.\n\nIf you are using a tunneling proxy, you may set the\n`proxyHeaderWhiteList` to share certain headers with the proxy.\n\nYou can also set the `proxyHeaderExclusiveList` to share certain\nheaders only with the proxy and not with destination host.\n\nBy default, this set is:\n\n```\naccept\naccept-charset\naccept-encoding\naccept-language\naccept-ranges\ncache-control\ncontent-encoding\ncontent-language\ncontent-length\ncontent-location\ncontent-md5\ncontent-range\ncontent-type\nconnection\ndate\nexpect\nmax-forwards\npragma\nproxy-authorization\nreferer\nte\ntransfer-encoding\nuser-agent\nvia\n```\n\nNote that, when using a tunneling proxy, the `proxy-authorization`\nheader and any headers from custom `proxyHeaderExclusiveList` are\n*never* sent to the endpoint server, but only to the proxy server.\n\n\n### Controlling proxy behaviour using environment variables\n\nThe following environment variables are respected by `request`:\n\n * `HTTP_PROXY` / `http_proxy`\n * `HTTPS_PROXY` / `https_proxy`\n * `NO_PROXY` / `no_proxy`\n\nWhen `HTTP_PROXY` / `http_proxy` are set, they will be used to proxy non-SSL requests that do not have an explicit `proxy` configuration option present. Similarly, `HTTPS_PROXY` / `https_proxy` will be respected for SSL requests that do not have an explicit `proxy` configuration option. It is valid to define a proxy in one of the environment variables, but then override it for a specific request, using the `proxy` configuration option. Furthermore, the `proxy` configuration option can be explicitly set to false / null to opt out of proxying altogether for that request.\n\n`request` is also aware of the `NO_PROXY`/`no_proxy` environment variables. These variables provide a granular way to opt out of proxying, on a per-host basis. It should contain a comma separated list of hosts to opt out of proxying. It is also possible to opt of proxying when a particular destination port is used. Finally, the variable may be set to `*` to opt out of the implicit proxy configuration of the other environment variables.\n\nHere's some examples of valid `no_proxy` values:\n\n * `google.com` - don't proxy HTTP/HTTPS requests to Google.\n * `google.com:443` - don't proxy HTTPS requests to Google, but *do* proxy HTTP requests to Google.\n * `google.com:443, yahoo.com:80` - don't proxy HTTPS requests to Google, and don't proxy HTTP requests to Yahoo!\n * `*` - ignore `https_proxy`/`http_proxy` environment variables altogether.\n\n[back to top](#table-of-contents)\n\n\n---\n\n\n## UNIX Domain Sockets\n\n`request` supports making requests to [UNIX Domain Sockets](https://en.wikipedia.org/wiki/Unix_domain_socket). To make one, use the following URL scheme:\n\n```js\n/* Pattern */ 'http://unix:SOCKET:PATH'\n/* Example */ request.get('http://unix:/absolute/path/to/unix.socket:/request/path')\n```\n\nNote: The `SOCKET` path is assumed to be absolute to the root of the host file system.\n\n[back to top](#table-of-contents)\n\n\n---\n\n\n## TLS/SSL Protocol\n\nTLS/SSL Protocol options, such as `cert`, `key` and `passphrase`, can be\nset directly in `options` object, in the `agentOptions` property of the `options` object, or even in `https.globalAgent.options`. Keep in mind that, although `agentOptions` allows for a slightly wider range of configurations, the recommended way is via `options` object directly, as using `agentOptions` or `https.globalAgent.options` would not be applied in the same way in proxied environments (as data travels through a TLS connection instead of an http/https agent).\n\n```js\nvar fs = require('fs')\n , path = require('path')\n , certFile = path.resolve(__dirname, 'ssl/client.crt')\n , keyFile = path.resolve(__dirname, 'ssl/client.key')\n , caFile = path.resolve(__dirname, 'ssl/ca.cert.pem')\n , request = require('request');\n\nvar options = {\n url: 'https://api.some-server.com/',\n cert: fs.readFileSync(certFile),\n key: fs.readFileSync(keyFile),\n passphrase: 'password',\n ca: fs.readFileSync(caFile)\n }\n};\n\nrequest.get(options);\n```\n\n### Using `options.agentOptions`\n\nIn the example below, we call an API requires client side SSL certificate\n(in PEM format) with passphrase protected private key (in PEM format) and disable the SSLv3 protocol:\n\n```js\nvar fs = require('fs')\n , path = require('path')\n , certFile = path.resolve(__dirname, 'ssl/client.crt')\n , keyFile = path.resolve(__dirname, 'ssl/client.key')\n , request = require('request');\n\nvar options = {\n url: 'https://api.some-server.com/',\n agentOptions: {\n cert: fs.readFileSync(certFile),\n key: fs.readFileSync(keyFile),\n // Or use `pfx` property replacing `cert` and `key` when using private key, certificate and CA certs in PFX or PKCS12 format:\n // pfx: fs.readFileSync(pfxFilePath),\n passphrase: 'password',\n securityOptions: 'SSL_OP_NO_SSLv3'\n }\n};\n\nrequest.get(options);\n```\n\nIt is able to force using SSLv3 only by specifying `secureProtocol`:\n\n```js\nrequest.get({\n url: 'https://api.some-server.com/',\n agentOptions: {\n secureProtocol: 'SSLv3_method'\n }\n});\n```\n\nIt is possible to accept other certificates than those signed by generally allowed Certificate Authorities (CAs).\nThis can be useful, for example, when using self-signed certificates.\nTo require a different root certificate, you can specify the signing CA by adding the contents of the CA's certificate file to the `agentOptions`.\nThe certificate the domain presents must be signed by the root certificate specified:\n\n```js\nrequest.get({\n url: 'https://api.some-server.com/',\n agentOptions: {\n ca: fs.readFileSync('ca.cert.pem')\n }\n});\n```\n\n[back to top](#table-of-contents)\n\n\n---\n\n## Support for HAR 1.2\n\nThe `options.har` property will override the values: `url`, `method`, `qs`, `headers`, `form`, `formData`, `body`, `json`, as well as construct multipart data and read files from disk when `request.postData.params[].fileName` is present without a matching `value`.\n\na validation step will check if the HAR Request format matches the latest spec (v1.2) and will skip parsing if not matching.\n\n```js\n var request = require('request')\n request({\n // will be ignored\n method: 'GET',\n uri: 'http://www.google.com',\n\n // HTTP Archive Request Object\n har: {\n url: 'http://www.mockbin.com/har',\n method: 'POST',\n headers: [\n {\n name: 'content-type',\n value: 'application/x-www-form-urlencoded'\n }\n ],\n postData: {\n mimeType: 'application/x-www-form-urlencoded',\n params: [\n {\n name: 'foo',\n value: 'bar'\n },\n {\n name: 'hello',\n value: 'world'\n }\n ]\n }\n }\n })\n\n // a POST request will be sent to http://www.mockbin.com\n // with body an application/x-www-form-urlencoded body:\n // foo=bar&hello=world\n```\n\n[back to top](#table-of-contents)\n\n\n---\n\n## request(options, callback)\n\nThe first argument can be either a `url` or an `options` object. The only required option is `uri`; all others are optional.\n\n- `uri` || `url` - fully qualified uri or a parsed url object from `url.parse()`\n- `baseUrl` - fully qualified uri string used as the base url. Most useful with `request.defaults`, for example when you want to do many requests to the same domain. If `baseUrl` is `https://example.com/api/`, then requesting `/end/point?test=true` will fetch `https://example.com/api/end/point?test=true`. When `baseUrl` is given, `uri` must also be a string.\n- `method` - http method (default: `\"GET\"`)\n- `headers` - http headers (default: `{}`)\n\n---\n\n- `qs` - object containing querystring values to be appended to the `uri`\n- `qsParseOptions` - object containing options to pass to the [qs.parse](https://github.com/hapijs/qs#parsing-objects) method. Alternatively pass options to the [querystring.parse](https://nodejs.org/docs/v0.12.0/api/querystring.html#querystring_querystring_parse_str_sep_eq_options) method using this format `{sep:';', eq:':', options:{}}`\n- `qsStringifyOptions` - object containing options to pass to the [qs.stringify](https://github.com/hapijs/qs#stringifying) method. Alternatively pass options to the [querystring.stringify](https://nodejs.org/docs/v0.12.0/api/querystring.html#querystring_querystring_stringify_obj_sep_eq_options) method using this format `{sep:';', eq:':', options:{}}`. For example, to change the way arrays are converted to query strings using the `qs` module pass the `arrayFormat` option with one of `indices|brackets|repeat`\n- `useQuerystring` - If true, use `querystring` to stringify and parse\n querystrings, otherwise use `qs` (default: `false`). Set this option to\n `true` if you need arrays to be serialized as `foo=bar&foo=baz` instead of the\n default `foo[0]=bar&foo[1]=baz`.\n\n---\n\n- `body` - entity body for PATCH, POST and PUT requests. Must be a `Buffer` or `String`, unless `json` is `true`. If `json` is `true`, then `body` must be a JSON-serializable object.\n- `form` - when passed an object or a querystring, this sets `body` to a querystring representation of value, and adds `Content-type: application/x-www-form-urlencoded` header. When passed no options, a `FormData` instance is returned (and is piped to request). See \"Forms\" section above.\n- `formData` - Data to pass for a `multipart/form-data` request. See\n [Forms](#forms) section above.\n- `multipart` - array of objects which contain their own headers and `body`\n attributes. Sends a `multipart/related` request. See [Forms](#forms) section\n above.\n - Alternatively you can pass in an object `{chunked: false, data: []}` where\n `chunked` is used to specify whether the request is sent in\n [chunked transfer encoding](https://en.wikipedia.org/wiki/Chunked_transfer_encoding)\n In non-chunked requests, data items with body streams are not allowed.\n- `preambleCRLF` - append a newline/CRLF before the boundary of your `multipart/form-data` request.\n- `postambleCRLF` - append a newline/CRLF at the end of the boundary of your `multipart/form-data` request.\n- `json` - sets `body` to JSON representation of value and adds `Content-type: application/json` header. Additionally, parses the response body as JSON.\n- `jsonReviver` - a [reviver function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) that will be passed to `JSON.parse()` when parsing a JSON response body.\n\n---\n\n- `auth` - A hash containing values `user` || `username`, `pass` || `password`, and `sendImmediately` (optional). See documentation above.\n- `oauth` - Options for OAuth HMAC-SHA1 signing. See documentation above.\n- `hawk` - Options for [Hawk signing](https://github.com/hueniverse/hawk). The `credentials` key must contain the necessary signing info, [see hawk docs for details](https://github.com/hueniverse/hawk#usage-example).\n- `aws` - `object` containing AWS signing information. Should have the properties `key`, `secret`. Also requires the property `bucket`, unless you’re specifying your `bucket` as part of the path, or the request doesn’t use a bucket (i.e. GET Services). If you want to use AWS sign version 4 use the parameter `sign_version` with value `4` otherwise the default is version 2. **Note:** you need to `npm install aws4` first.\n- `httpSignature` - Options for the [HTTP Signature Scheme](https://github.com/joyent/node-http-signature/blob/master/http_signing.md) using [Joyent's library](https://github.com/joyent/node-http-signature). The `keyId` and `key` properties must be specified. See the docs for other options.\n\n---\n\n- `followRedirect` - follow HTTP 3xx responses as redirects (default: `true`). This property can also be implemented as function which gets `response` object as a single argument and should return `true` if redirects should continue or `false` otherwise.\n- `followAllRedirects` - follow non-GET HTTP 3xx responses as redirects (default: `false`)\n- `maxRedirects` - the maximum number of redirects to follow (default: `10`)\n- `removeRefererHeader` - removes the referer header when a redirect happens (default: `false`). **Note:** if true, referer header set in the initial request is preserved during redirect chain.\n\n---\n\n- `encoding` - Encoding to be used on `setEncoding` of response data. If `null`, the `body` is returned as a `Buffer`. Anything else **(including the default value of `undefined`)** will be passed as the [encoding](http://nodejs.org/api/buffer.html#buffer_buffer) parameter to `toString()` (meaning this is effectively `utf8` by default). (**Note:** if you expect binary data, you should set `encoding: null`.)\n- `gzip` - If `true`, add an `Accept-Encoding` header to request compressed content encodings from the server (if not already present) and decode supported content encodings in the response. **Note:** Automatic decoding of the response content is performed on the body data returned through `request` (both through the `request` stream and passed to the callback function) but is not performed on the `response` stream (available from the `response` event) which is the unmodified `http.IncomingMessage` object which may contain compressed data. See example below.\n- `jar` - If `true`, remember cookies for future use (or define your custom cookie jar; see examples section)\n\n---\n\n- `agent` - `http(s).Agent` instance to use\n- `agentClass` - alternatively specify your agent's class name\n- `agentOptions` - and pass its options. **Note:** for HTTPS see [tls API doc for TLS/SSL options](http://nodejs.org/api/tls.html#tls_tls_connect_options_callback) and the [documentation above](#using-optionsagentoptions).\n- `forever` - set to `true` to use the [forever-agent](https://github.com/request/forever-agent) **Note:** Defaults to `http(s).Agent({keepAlive:true})` in node 0.12+\n- `pool` - An object describing which agents to use for the request. If this option is omitted the request will use the global agent (as long as your options allow for it). Otherwise, request will search the pool for your custom agent. If no custom agent is found, a new agent will be created and added to the pool. **Note:** `pool` is used only when the `agent` option is not specified.\n - A `maxSockets` property can also be provided on the `pool` object to set the max number of sockets for all agents created (ex: `pool: {maxSockets: Infinity}`).\n - Note that if you are sending multiple requests in a loop and creating\n multiple new `pool` objects, `maxSockets` will not work as intended. To\n work around this, either use [`request.defaults`](#requestdefaultsoptions)\n with your pool options or create the pool object with the `maxSockets`\n property outside of the loop.\n- `timeout` - Integer containing the number of milliseconds to wait for a\nserver to send response headers (and start the response body) before aborting\nthe request. Note that if the underlying TCP connection cannot be established,\nthe OS-wide TCP connection timeout will overrule the `timeout` option ([the\ndefault in Linux can be anywhere from 20-120 seconds][linux-timeout]).\n\n[linux-timeout]: http://www.sekuda.com/overriding_the_default_linux_kernel_20_second_tcp_socket_connect_timeout\n\n---\n\n- `localAddress` - Local interface to bind for network connections.\n- `proxy` - An HTTP proxy to be used. Supports proxy Auth with Basic Auth, identical to support for the `url` parameter (by embedding the auth info in the `uri`)\n- `strictSSL` - If `true`, requires SSL certificates be valid. **Note:** to use your own certificate authority, you need to specify an agent that was created with that CA as an option.\n- `tunnel` - controls the behavior of\n [HTTP `CONNECT` tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_tunneling)\n as follows:\n - `undefined` (default) - `true` if the destination is `https`, `false` otherwise\n - `true` - always tunnel to the destination by making a `CONNECT` request to\n the proxy\n - `false` - request the destination as a `GET` request.\n- `proxyHeaderWhiteList` - A whitelist of headers to send to a\n tunneling proxy.\n- `proxyHeaderExclusiveList` - A whitelist of headers to send\n exclusively to a tunneling proxy and not to destination.\n\n---\n\n- `time` - If `true`, the request-response cycle (including all redirects) is timed at millisecond resolution, and the result provided on the response's `elapsedTime` property.\n- `har` - A [HAR 1.2 Request Object](http://www.softwareishard.com/blog/har-12-spec/#request), will be processed from HAR format into options overwriting matching values *(see the [HAR 1.2 section](#support-for-har-1.2) for details)*\n\nThe callback argument gets 3 arguments:\n\n1. An `error` when applicable (usually from [`http.ClientRequest`](http://nodejs.org/api/http.html#http_class_http_clientrequest) object)\n2. An [`http.IncomingMessage`](http://nodejs.org/api/http.html#http_http_incomingmessage) object\n3. The third is the `response` body (`String` or `Buffer`, or JSON object if the `json` option is supplied)\n\n[back to top](#table-of-contents)\n\n\n---\n\n## Convenience methods\n\nThere are also shorthand methods for different HTTP METHODs and some other conveniences.\n\n\n### request.defaults(options)\n\nThis method **returns a wrapper** around the normal request API that defaults\nto whatever options you pass to it.\n\n**Note:** `request.defaults()` **does not** modify the global request API;\ninstead, it **returns a wrapper** that has your default settings applied to it.\n\n**Note:** You can call `.defaults()` on the wrapper that is returned from\n`request.defaults` to add/override defaults that were previously defaulted.\n\nFor example:\n```js\n//requests using baseRequest() will set the 'x-token' header\nvar baseRequest = request.defaults({\n headers: {'x-token': 'my-token'}\n})\n\n//requests using specialRequest() will include the 'x-token' header set in\n//baseRequest and will also include the 'special' header\nvar specialRequest = baseRequest.defaults({\n headers: {special: 'special value'}\n})\n```\n\n### request.put\n\nSame as `request()`, but defaults to `method: \"PUT\"`.\n\n```js\nrequest.put(url)\n```\n\n### request.patch\n\nSame as `request()`, but defaults to `method: \"PATCH\"`.\n\n```js\nrequest.patch(url)\n```\n\n### request.post\n\nSame as `request()`, but defaults to `method: \"POST\"`.\n\n```js\nrequest.post(url)\n```\n\n### request.head\n\nSame as `request()`, but defaults to `method: \"HEAD\"`.\n\n```js\nrequest.head(url)\n```\n\n### request.del\n\nSame as `request()`, but defaults to `method: \"DELETE\"`.\n\n```js\nrequest.del(url)\n```\n\n### request.get\n\nSame as `request()` (for uniformity).\n\n```js\nrequest.get(url)\n```\n### request.cookie\n\nFunction that creates a new cookie.\n\n```js\nrequest.cookie('key1=value1')\n```\n### request.jar()\n\nFunction that creates a new cookie jar.\n\n```js\nrequest.jar()\n```\n\n[back to top](#table-of-contents)\n\n\n---\n\n\n## Debugging\n\nThere are at least three ways to debug the operation of `request`:\n\n1. Launch the node process like `NODE_DEBUG=request node script.js`\n (`lib,request,otherlib` works too).\n\n2. Set `require('request').debug = true` at any time (this does the same thing\n as #1).\n\n3. Use the [request-debug module](https://github.com/request/request-debug) to\n view request and response headers and bodies.\n\n[back to top](#table-of-contents)\n\n\n---\n\n## Timeouts\n\nMost requests to external servers should have a timeout attached, in case the\nserver is not responding in a timely manner. Without a timeout, your code may\nhave a socket open/consume resources for minutes or more.\n\nThere are two main types of timeouts: **connection timeouts** and **read\ntimeouts**. A connect timeout occurs if the timeout is hit while your client is\nattempting to establish a connection to a remote machine (corresponding to the\n[connect() call][connect] on the socket). A read timeout occurs any time the\nserver is too slow to send back a part of the response.\n\nThese two situations have widely different implications for what went wrong\nwith the request, so it's useful to be able to distinguish them. You can detect\ntimeout errors by checking `err.code` for an 'ETIMEDOUT' value. Further, you\ncan detect whether the timeout was a connection timeout by checking if the\n`err.connect` property is set to `true`.\n\n```js\nrequest.get('http://10.255.255.1', {timeout: 1500}, function(err) {\n console.log(err.code === 'ETIMEDOUT');\n // Set to `true` if the timeout was a connection timeout, `false` or\n // `undefined` otherwise.\n console.log(err.connect === true);\n process.exit(0);\n});\n```\n\n[connect]: http://linux.die.net/man/2/connect\n\n## Examples:\n\n```js\n var request = require('request')\n , rand = Math.floor(Math.random()*100000000).toString()\n ;\n request(\n { method: 'PUT'\n , uri: 'http://mikeal.iriscouch.com/testjs/' + rand\n , multipart:\n [ { 'content-type': 'application/json'\n , body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})\n }\n , { body: 'I am an attachment' }\n ]\n }\n , function (error, response, body) {\n if(response.statusCode == 201){\n console.log('document saved as: http://mikeal.iriscouch.com/testjs/'+ rand)\n } else {\n console.log('error: '+ response.statusCode)\n console.log(body)\n }\n }\n )\n```\n\nFor backwards-compatibility, response compression is not supported by default.\nTo accept gzip-compressed responses, set the `gzip` option to `true`. Note\nthat the body data passed through `request` is automatically decompressed\nwhile the response object is unmodified and will contain compressed data if\nthe server sent a compressed response.\n\n```js\n var request = require('request')\n request(\n { method: 'GET'\n , uri: 'http://www.google.com'\n , gzip: true\n }\n , function (error, response, body) {\n // body is the decompressed response body\n console.log('server encoded the data as: ' + (response.headers['content-encoding'] || 'identity'))\n console.log('the decoded data is: ' + body)\n }\n ).on('data', function(data) {\n // decompressed data as it is received\n console.log('decoded chunk: ' + data)\n })\n .on('response', function(response) {\n // unmodified http.IncomingMessage object\n response.on('data', function(data) {\n // compressed data as it is received\n console.log('received ' + data.length + ' bytes of compressed data')\n })\n })\n```\n\nCookies are disabled by default (else, they would be used in subsequent requests). To enable cookies, set `jar` to `true` (either in `defaults` or `options`).\n\n```js\nvar request = request.defaults({jar: true})\nrequest('http://www.google.com', function () {\n request('http://images.google.com')\n})\n```\n\nTo use a custom cookie jar (instead of `request`’s global cookie jar), set `jar` to an instance of `request.jar()` (either in `defaults` or `options`)\n\n```js\nvar j = request.jar()\nvar request = request.defaults({jar:j})\nrequest('http://www.google.com', function () {\n request('http://images.google.com')\n})\n```\n\nOR\n\n```js\nvar j = request.jar();\nvar cookie = request.cookie('key1=value1');\nvar url = 'http://www.google.com';\nj.setCookie(cookie, url);\nrequest({url: url, jar: j}, function () {\n request('http://images.google.com')\n})\n```\n\nTo use a custom cookie store (such as a\n[`FileCookieStore`](https://github.com/mitsuru/tough-cookie-filestore)\nwhich supports saving to and restoring from JSON files), pass it as a parameter\nto `request.jar()`:\n\n```js\nvar FileCookieStore = require('tough-cookie-filestore');\n// NOTE - currently the 'cookies.json' file must already exist!\nvar j = request.jar(new FileCookieStore('cookies.json'));\nrequest = request.defaults({ jar : j })\nrequest('http://www.google.com', function() {\n request('http://images.google.com')\n})\n```\n\nThe cookie store must be a\n[`tough-cookie`](https://github.com/SalesforceEng/tough-cookie)\nstore and it must support synchronous operations; see the\n[`CookieStore` API docs](https://github.com/SalesforceEng/tough-cookie#cookiestore-api)\nfor details.\n\nTo inspect your cookie jar after a request:\n\n```js\nvar j = request.jar()\nrequest({url: 'http://www.google.com', jar: j}, function () {\n var cookie_string = j.getCookieString(url); // \"key1=value1; key2=value2; ...\"\n var cookies = j.getCookies(url);\n // [{key: 'key1', value: 'value1', domain: \"www.google.com\", ...}, ...]\n})\n```\n\n[back to top](#table-of-contents)\n",
103
+ "readmeFilename": "README.md",
103
104
  "repository": {
104
105
  "type": "git",
105
106
  "url": "git+https://github.com/request/request.git"
@@ -66,7 +66,8 @@
66
66
  ],
67
67
  "name": "glob",
68
68
  "optionalDependencies": {},
69
- "readme": "ERROR: No README data found!",
69
+ "readme": "# Glob\n\nMatch files using the patterns the shell uses, like stars and stuff.\n\n[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Build Status](https://ci.appveyor.com/api/projects/status/kd7f3yftf7unxlsx?svg=true)](https://ci.appveyor.com/project/isaacs/node-glob) [![Coverage Status](https://coveralls.io/repos/isaacs/node-glob/badge.svg?branch=master&service=github)](https://coveralls.io/github/isaacs/node-glob?branch=master)\n\nThis is a glob implementation in JavaScript. It uses the `minimatch`\nlibrary to do its matching.\n\n![](oh-my-glob.gif)\n\n## Usage\n\nInstall with npm\n\n```\nnpm i glob\n```\n\n```javascript\nvar glob = require(\"glob\")\n\n// options is optional\nglob(\"**/*.js\", options, function (er, files) {\n // files is an array of filenames.\n // If the `nonull` option is set, and nothing\n // was found, then files is [\"**/*.js\"]\n // er is an error object or null.\n})\n```\n\n## Glob Primer\n\n\"Globs\" are the patterns you type when you do stuff like `ls *.js` on\nthe command line, or put `build/*` in a `.gitignore` file.\n\nBefore parsing the path part patterns, braced sections are expanded\ninto a set. Braced sections start with `{` and end with `}`, with any\nnumber of comma-delimited sections within. Braced sections may contain\nslash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`.\n\nThe following characters have special magic meaning when used in a\npath portion:\n\n* `*` Matches 0 or more characters in a single path portion\n* `?` Matches 1 character\n* `[...]` Matches a range of characters, similar to a RegExp range.\n If the first character of the range is `!` or `^` then it matches\n any character not in the range.\n* `!(pattern|pattern|pattern)` Matches anything that does not match\n any of the patterns provided.\n* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the\n patterns provided.\n* `+(pattern|pattern|pattern)` Matches one or more occurrences of the\n patterns provided.\n* `*(a|b|c)` Matches zero or more occurrences of the patterns provided\n* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns\n provided\n* `**` If a \"globstar\" is alone in a path portion, then it matches\n zero or more directories and subdirectories searching for matches.\n It does not crawl symlinked directories.\n\n### Dots\n\nIf a file or directory path portion has a `.` as the first character,\nthen it will not match any glob pattern unless that pattern's\ncorresponding path part also has a `.` as its first character.\n\nFor example, the pattern `a/.*/c` would match the file at `a/.b/c`.\nHowever the pattern `a/*/c` would not, because `*` does not start with\na dot character.\n\nYou can make glob treat dots as normal characters by setting\n`dot:true` in the options.\n\n### Basename Matching\n\nIf you set `matchBase:true` in the options, and the pattern has no\nslashes in it, then it will seek for any file anywhere in the tree\nwith a matching basename. For example, `*.js` would match\n`test/simple/basic.js`.\n\n### Empty Sets\n\nIf no matching files are found, then an empty array is returned. This\ndiffers from the shell, where the pattern itself is returned. For\nexample:\n\n $ echo a*s*d*f\n a*s*d*f\n\nTo get the bash-style behavior, set the `nonull:true` in the options.\n\n### See Also:\n\n* `man sh`\n* `man bash` (Search for \"Pattern Matching\")\n* `man 3 fnmatch`\n* `man 5 gitignore`\n* [minimatch documentation](https://github.com/isaacs/minimatch)\n\n## glob.hasMagic(pattern, [options])\n\nReturns `true` if there are any special characters in the pattern, and\n`false` otherwise.\n\nNote that the options affect the results. If `noext:true` is set in\nthe options object, then `+(a|b)` will not be considered a magic\npattern. If the pattern has a brace expansion, like `a/{b/c,x/y}`\nthen that is considered magical, unless `nobrace:true` is set in the\noptions.\n\n## glob(pattern, [options], cb)\n\n* `pattern` `{String}` Pattern to be matched\n* `options` `{Object}`\n* `cb` `{Function}`\n * `err` `{Error | null}`\n * `matches` `{Array<String>}` filenames found matching the pattern\n\nPerform an asynchronous glob search.\n\n## glob.sync(pattern, [options])\n\n* `pattern` `{String}` Pattern to be matched\n* `options` `{Object}`\n* return: `{Array<String>}` filenames found matching the pattern\n\nPerform a synchronous glob search.\n\n## Class: glob.Glob\n\nCreate a Glob object by instantiating the `glob.Glob` class.\n\n```javascript\nvar Glob = require(\"glob\").Glob\nvar mg = new Glob(pattern, options, cb)\n```\n\nIt's an EventEmitter, and starts walking the filesystem to find matches\nimmediately.\n\n### new glob.Glob(pattern, [options], [cb])\n\n* `pattern` `{String}` pattern to search for\n* `options` `{Object}`\n* `cb` `{Function}` Called when an error occurs, or matches are found\n * `err` `{Error | null}`\n * `matches` `{Array<String>}` filenames found matching the pattern\n\nNote that if the `sync` flag is set in the options, then matches will\nbe immediately available on the `g.found` member.\n\n### Properties\n\n* `minimatch` The minimatch object that the glob uses.\n* `options` The options object passed in.\n* `aborted` Boolean which is set to true when calling `abort()`. There\n is no way at this time to continue a glob search after aborting, but\n you can re-use the statCache to avoid having to duplicate syscalls.\n* `cache` Convenience object. Each field has the following possible\n values:\n * `false` - Path does not exist\n * `true` - Path exists\n * `'FILE'` - Path exists, and is not a directory\n * `'DIR'` - Path exists, and is a directory\n * `[file, entries, ...]` - Path exists, is a directory, and the\n array value is the results of `fs.readdir`\n* `statCache` Cache of `fs.stat` results, to prevent statting the same\n path multiple times.\n* `symlinks` A record of which paths are symbolic links, which is\n relevant in resolving `**` patterns.\n* `realpathCache` An optional object which is passed to `fs.realpath`\n to minimize unnecessary syscalls. It is stored on the instantiated\n Glob object, and may be re-used.\n\n### Events\n\n* `end` When the matching is finished, this is emitted with all the\n matches found. If the `nonull` option is set, and no match was found,\n then the `matches` list contains the original pattern. The matches\n are sorted, unless the `nosort` flag is set.\n* `match` Every time a match is found, this is emitted with the specific\n thing that matched. It is not deduplicated or resolved to a realpath.\n* `error` Emitted when an unexpected error is encountered, or whenever\n any fs error occurs if `options.strict` is set.\n* `abort` When `abort()` is called, this event is raised.\n\n### Methods\n\n* `pause` Temporarily stop the search\n* `resume` Resume the search\n* `abort` Stop the search forever\n\n### Options\n\nAll the options that can be passed to Minimatch can also be passed to\nGlob to change pattern matching behavior. Also, some have been added,\nor have glob-specific ramifications.\n\nAll options are false by default, unless otherwise noted.\n\nAll options are added to the Glob object, as well.\n\nIf you are running many `glob` operations, you can pass a Glob object\nas the `options` argument to a subsequent operation to shortcut some\n`stat` and `readdir` calls. At the very least, you may pass in shared\n`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that\nparallel glob operations will be sped up by sharing information about\nthe filesystem.\n\n* `cwd` The current working directory in which to search. Defaults\n to `process.cwd()`.\n* `root` The place where patterns starting with `/` will be mounted\n onto. Defaults to `path.resolve(options.cwd, \"/\")` (`/` on Unix\n systems, and `C:\\` or some such on Windows.)\n* `dot` Include `.dot` files in normal matches and `globstar` matches.\n Note that an explicit dot in a portion of the pattern will always\n match dot files.\n* `nomount` By default, a pattern starting with a forward-slash will be\n \"mounted\" onto the root setting, so that a valid filesystem path is\n returned. Set this flag to disable that behavior.\n* `mark` Add a `/` character to directory matches. Note that this\n requires additional stat calls.\n* `nosort` Don't sort the results.\n* `stat` Set to true to stat *all* results. This reduces performance\n somewhat, and is completely unnecessary, unless `readdir` is presumed\n to be an untrustworthy indicator of file existence.\n* `silent` When an unusual error is encountered when attempting to\n read a directory, a warning will be printed to stderr. Set the\n `silent` option to true to suppress these warnings.\n* `strict` When an unusual error is encountered when attempting to\n read a directory, the process will just continue on in search of\n other matches. Set the `strict` option to raise an error in these\n cases.\n* `cache` See `cache` property above. Pass in a previously generated\n cache object to save some fs calls.\n* `statCache` A cache of results of filesystem information, to prevent\n unnecessary stat calls. While it should not normally be necessary\n to set this, you may pass the statCache from one glob() call to the\n options object of another, if you know that the filesystem will not\n change between calls. (See \"Race Conditions\" below.)\n* `symlinks` A cache of known symbolic links. You may pass in a\n previously generated `symlinks` object to save `lstat` calls when\n resolving `**` matches.\n* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead.\n* `nounique` In some cases, brace-expanded patterns can result in the\n same file showing up multiple times in the result set. By default,\n this implementation prevents duplicates in the result set. Set this\n flag to disable that behavior.\n* `nonull` Set to never return an empty set, instead returning a set\n containing the pattern itself. This is the default in glob(3).\n* `debug` Set to enable debug logging in minimatch and glob.\n* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets.\n* `noglobstar` Do not match `**` against multiple filenames. (Ie,\n treat it as a normal `*` instead.)\n* `noext` Do not match `+(a|b)` \"extglob\" patterns.\n* `nocase` Perform a case-insensitive match. Note: on\n case-insensitive filesystems, non-magic patterns will match by\n default, since `stat` and `readdir` will not raise errors.\n* `matchBase` Perform a basename-only match if the pattern does not\n contain any slash characters. That is, `*.js` would be treated as\n equivalent to `**/*.js`, matching all js files in all directories.\n* `nodir` Do not match directories, only files. (Note: to match\n *only* directories, simply put a `/` at the end of the pattern.)\n* `ignore` Add a pattern or an array of glob patterns to exclude matches.\n Note: `ignore` patterns are *always* in `dot:true` mode, regardless\n of any other settings.\n* `follow` Follow symlinked directories when expanding `**` patterns.\n Note that this can result in a lot of duplicate references in the\n presence of cyclic links.\n* `realpath` Set to true to call `fs.realpath` on all of the results.\n In the case of a symlink that cannot be resolved, the full absolute\n path to the matched entry is returned (though it will usually be a\n broken symlink)\n\n## Comparisons to other fnmatch/glob implementations\n\nWhile strict compliance with the existing standards is a worthwhile\ngoal, some discrepancies exist between node-glob and other\nimplementations, and are intentional.\n\nThe double-star character `**` is supported by default, unless the\n`noglobstar` flag is set. This is supported in the manner of bsdglob\nand bash 4.3, where `**` only has special significance if it is the only\nthing in a path part. That is, `a/**/b` will match `a/x/y/b`, but\n`a/**b` will not.\n\nNote that symlinked directories are not crawled as part of a `**`,\nthough their contents may match against subsequent portions of the\npattern. This prevents infinite loops and duplicates and the like.\n\nIf an escaped pattern has no matches, and the `nonull` flag is set,\nthen glob returns the pattern as-provided, rather than\ninterpreting the character escapes. For example,\n`glob.match([], \"\\\\*a\\\\?\")` will return `\"\\\\*a\\\\?\"` rather than\n`\"*a?\"`. This is akin to setting the `nullglob` option in bash, except\nthat it does not resolve escaped pattern characters.\n\nIf brace expansion is not disabled, then it is performed before any\nother interpretation of the glob pattern. Thus, a pattern like\n`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded\n**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are\nchecked for validity. Since those two are valid, matching proceeds.\n\n### Comments and Negation\n\nPreviously, this module let you mark a pattern as a \"comment\" if it\nstarted with a `#` character, or a \"negated\" pattern if it started\nwith a `!` character.\n\nThese options were deprecated in version 5, and removed in version 6.\n\nTo specify things that should not match, use the `ignore` option.\n\n## Windows\n\n**Please only use forward-slashes in glob expressions.**\n\nThough windows uses either `/` or `\\` as its path separator, only `/`\ncharacters are used by this glob implementation. You must use\nforward-slashes **only** in glob expressions. Back-slashes will always\nbe interpreted as escape characters, not path separators.\n\nResults from absolute patterns such as `/foo/*` are mounted onto the\nroot setting using `path.join`. On windows, this will by default result\nin `/foo/*` matching `C:\\foo\\bar.txt`.\n\n## Race Conditions\n\nGlob searching, by its very nature, is susceptible to race conditions,\nsince it relies on directory walking and such.\n\nAs a result, it is possible that a file that exists when glob looks for\nit may have been deleted or modified by the time it returns the result.\n\nAs part of its internal implementation, this program caches all stat\nand readdir calls that it makes, in order to cut down on system\noverhead. However, this also makes it even more susceptible to races,\nespecially if the cache or statCache objects are reused between glob\ncalls.\n\nUsers are thus advised not to use a glob result as a guarantee of\nfilesystem state in the face of rapid changes. For the vast majority\nof operations, this is never a problem.\n\n## Contributing\n\nAny change to behavior (including bugfixes) must come with a test.\n\nPatches that fail tests or reduce performance will be rejected.\n\n```\n# to run tests\nnpm test\n\n# to re-generate test fixtures\nnpm run test-regen\n\n# to benchmark against bash/zsh\nnpm run bench\n\n# to profile javascript\nnpm run prof\n```\n",
70
+ "readmeFilename": "README.md",
70
71
  "repository": {
71
72
  "type": "git",
72
73
  "url": "git://github.com/isaacs/node-glob.git"
@@ -63,7 +63,8 @@
63
63
  ],
64
64
  "name": "rimraf",
65
65
  "optionalDependencies": {},
66
- "readme": "ERROR: No README data found!",
66
+ "readme": "[![Build Status](https://travis-ci.org/isaacs/rimraf.svg?branch=master)](https://travis-ci.org/isaacs/rimraf) [![Dependency Status](https://david-dm.org/isaacs/rimraf.svg)](https://david-dm.org/isaacs/rimraf) [![devDependency Status](https://david-dm.org/isaacs/rimraf/dev-status.svg)](https://david-dm.org/isaacs/rimraf#info=devDependencies)\n\nThe [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node.\n\nInstall with `npm install rimraf`, or just drop rimraf.js somewhere.\n\n## API\n\n`rimraf(f, [opts], callback)`\n\nThe first parameter will be interpreted as a globbing pattern for files. If you\nwant to disable globbing you can do so with `opts.disableGlob` (defaults to\n`false`). This might be handy, for instance, if you have filenames that contain\nglobbing wildcard characters.\n\nThe callback will be called with an error if there is one. Certain\nerrors are handled for you:\n\n* Windows: `EBUSY` and `ENOTEMPTY` - rimraf will back off a maximum of\n `opts.maxBusyTries` times before giving up, adding 100ms of wait\n between each attempt. The default `maxBusyTries` is 3.\n* `ENOENT` - If the file doesn't exist, rimraf will return\n successfully, since your desired outcome is already the case.\n* `EMFILE` - Since `readdir` requires opening a file descriptor, it's\n possible to hit `EMFILE` if too many file descriptors are in use.\n In the sync case, there's nothing to be done for this. But in the\n async case, rimraf will gradually back off with timeouts up to\n `opts.emfileWait` ms, which defaults to 1000.\n\n## options\n\n* unlink, chmod, stat, lstat, rmdir, readdir,\n unlinkSync, chmodSync, statSync, lstatSync, rmdirSync, readdirSync\n\n In order to use a custom file system library, you can override\n specific fs functions on the options object.\n\n If any of these functions are present on the options object, then\n the supplied function will be used instead of the default fs\n method.\n\n Sync methods are only relevant for `rimraf.sync()`, of course.\n\n For example:\n\n ```javascript\n var myCustomFS = require('some-custom-fs')\n\n rimraf('some-thing', myCustomFS, callback)\n ```\n\n* maxBusyTries\n\n If an `EBUSY`, `ENOTEMPTY`, or `EPERM` error code is encountered\n on Windows systems, then rimraf will retry with a linear backoff\n wait of 100ms longer on each try. The default maxBusyTries is 3.\n\n Only relevant for async usage.\n\n* emfileWait\n\n If an `EMFILE` error is encountered, then rimraf will retry\n repeatedly with a linear backoff of 1ms longer on each try, until\n the timeout counter hits this max. The default limit is 1000.\n\n If you repeatedly encounter `EMFILE` errors, then consider using\n [graceful-fs](http://npm.im/graceful-fs) in your program.\n\n Only relevant for async usage.\n\n* glob\n\n Set to `false` to disable [glob](http://npm.im/glob) pattern\n matching.\n\n Set to an object to pass options to the glob module. The default\n glob options are `{ nosort: true, silent: true }`.\n\n Glob version 6 is used in this module.\n\n Relevant for both sync and async usage.\n\n* disableGlob\n\n Set to any non-falsey value to disable globbing entirely.\n (Equivalent to setting `glob: false`.)\n\n## rimraf.sync\n\nIt can remove stuff synchronously, too. But that's not so good. Use\nthe async API. It's better.\n\n## CLI\n\nIf installed with `npm install rimraf -g` it can be used as a global\ncommand `rimraf <path> [<path> ...]` which is useful for cross platform support.\n\n## mkdirp\n\nIf you need to create a directory recursively, check out\n[mkdirp](https://github.com/substack/node-mkdirp).\n",
67
+ "readmeFilename": "README.md",
67
68
  "repository": {
68
69
  "type": "git",
69
70
  "url": "git://github.com/isaacs/rimraf.git"
@@ -47,7 +47,8 @@
47
47
  ],
48
48
  "name": "semver",
49
49
  "optionalDependencies": {},
50
- "readme": "ERROR: No README data found!",
50
+ "readme": "semver(1) -- The semantic versioner for npm\n===========================================\n\n## Usage\n\n $ npm install semver\n\n semver.valid('1.2.3') // '1.2.3'\n semver.valid('a.b.c') // null\n semver.clean(' =v1.2.3 ') // '1.2.3'\n semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true\n semver.gt('1.2.3', '9.8.7') // false\n semver.lt('1.2.3', '9.8.7') // true\n\nAs a command-line utility:\n\n $ semver -h\n\n Usage: semver <version> [<version> [...]] [-r <range> | -i <inc> | --preid <identifier> | -l | -rv]\n Test if version(s) satisfy the supplied range(s), and sort them.\n\n Multiple versions or ranges may be supplied, unless increment\n option is specified. In that case, only a single version may\n be used, and it is incremented by the specified level\n\n Program exits successfully if any valid version satisfies\n all supplied ranges, and prints all satisfying versions.\n\n If no versions are valid, or ranges are not satisfied,\n then exits failure.\n\n Versions are printed in ascending order, so supplying\n multiple versions to the utility will just sort them.\n\n## Versions\n\nA \"version\" is described by the `v2.0.0` specification found at\n<http://semver.org/>.\n\nA leading `\"=\"` or `\"v\"` character is stripped off and ignored.\n\n## Ranges\n\nA `version range` is a set of `comparators` which specify versions\nthat satisfy the range.\n\nA `comparator` is composed of an `operator` and a `version`. The set\nof primitive `operators` is:\n\n* `<` Less than\n* `<=` Less than or equal to\n* `>` Greater than\n* `>=` Greater than or equal to\n* `=` Equal. If no operator is specified, then equality is assumed,\n so this operator is optional, but MAY be included.\n\nFor example, the comparator `>=1.2.7` would match the versions\n`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`\nor `1.1.0`.\n\nComparators can be joined by whitespace to form a `comparator set`,\nwhich is satisfied by the **intersection** of all of the comparators\nit includes.\n\nA range is composed of one or more comparator sets, joined by `||`. A\nversion matches a range if and only if every comparator in at least\none of the `||`-separated comparator sets is satisfied by the version.\n\nFor example, the range `>=1.2.7 <1.3.0` would match the versions\n`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,\nor `1.1.0`.\n\nThe range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,\n`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.\n\n### Prerelease Tags\n\nIf a version has a prerelease tag (for example, `1.2.3-alpha.3`) then\nit will only be allowed to satisfy comparator sets if at least one\ncomparator with the same `[major, minor, patch]` tuple also has a\nprerelease tag.\n\nFor example, the range `>1.2.3-alpha.3` would be allowed to match the\nversion `1.2.3-alpha.7`, but it would *not* be satisfied by\n`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically \"greater\nthan\" `1.2.3-alpha.3` according to the SemVer sort rules. The version\nrange only accepts prerelease tags on the `1.2.3` version. The\nversion `3.4.5` *would* satisfy the range, because it does not have a\nprerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.\n\nThe purpose for this behavior is twofold. First, prerelease versions\nfrequently are updated very quickly, and contain many breaking changes\nthat are (by the author's design) not yet fit for public consumption.\nTherefore, by default, they are excluded from range matching\nsemantics.\n\nSecond, a user who has opted into using a prerelease version has\nclearly indicated the intent to use *that specific* set of\nalpha/beta/rc versions. By including a prerelease tag in the range,\nthe user is indicating that they are aware of the risk. However, it\nis still not appropriate to assume that they have opted into taking a\nsimilar risk on the *next* set of prerelease versions.\n\n#### Prerelease Identifiers\n\nThe method `.inc` takes an additional `identifier` string argument that\nwill append the value of the string as a prerelease identifier:\n\n```javascript\n> semver.inc('1.2.3', 'prerelease', 'beta')\n'1.2.4-beta.0'\n```\n\ncommand-line example:\n\n```shell\n$ semver 1.2.3 -i prerelease --preid beta\n1.2.4-beta.0\n```\n\nWhich then can be used to increment further:\n\n```shell\n$ semver 1.2.4-beta.0 -i prerelease\n1.2.4-beta.1\n```\n\n### Advanced Range Syntax\n\nAdvanced range syntax desugars to primitive comparators in\ndeterministic ways.\n\nAdvanced ranges may be combined in the same way as primitive\ncomparators using white space or `||`.\n\n#### Hyphen Ranges `X.Y.Z - A.B.C`\n\nSpecifies an inclusive set.\n\n* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`\n\nIf a partial version is provided as the first version in the inclusive\nrange, then the missing pieces are replaced with zeroes.\n\n* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`\n\nIf a partial version is provided as the second version in the\ninclusive range, then all versions that start with the supplied parts\nof the tuple are accepted, but nothing that would be greater than the\nprovided tuple parts.\n\n* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`\n* `1.2.3 - 2` := `>=1.2.3 <3.0.0`\n\n#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`\n\nAny of `X`, `x`, or `*` may be used to \"stand in\" for one of the\nnumeric values in the `[major, minor, patch]` tuple.\n\n* `*` := `>=0.0.0` (Any version satisfies)\n* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)\n* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)\n\nA partial version range is treated as an X-Range, so the special\ncharacter is in fact optional.\n\n* `\"\"` (empty string) := `*` := `>=0.0.0`\n* `1` := `1.x.x` := `>=1.0.0 <2.0.0`\n* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`\n\n#### Tilde Ranges `~1.2.3` `~1.2` `~1`\n\nAllows patch-level changes if a minor version is specified on the\ncomparator. Allows minor-level changes if not.\n\n* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`\n* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)\n* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)\n* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`\n* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)\n* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)\n* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in\n the `1.2.3` version will be allowed, if they are greater than or\n equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but\n `1.2.4-beta.2` would not, because it is a prerelease of a\n different `[major, minor, patch]` tuple.\n\n#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`\n\nAllows changes that do not modify the left-most non-zero digit in the\n`[major, minor, patch]` tuple. In other words, this allows patch and\nminor updates for versions `1.0.0` and above, patch updates for\nversions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.\n\nMany authors treat a `0.x` version as if the `x` were the major\n\"breaking-change\" indicator.\n\nCaret ranges are ideal when an author may make breaking changes\nbetween `0.2.4` and `0.3.0` releases, which is a common practice.\nHowever, it presumes that there will *not* be breaking changes between\n`0.2.4` and `0.2.5`. It allows for changes that are presumed to be\nadditive (but non-breaking), according to commonly observed practices.\n\n* `^1.2.3` := `>=1.2.3 <2.0.0`\n* `^0.2.3` := `>=0.2.3 <0.3.0`\n* `^0.0.3` := `>=0.0.3 <0.0.4`\n* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in\n the `1.2.3` version will be allowed, if they are greater than or\n equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but\n `1.2.4-beta.2` would not, because it is a prerelease of a\n different `[major, minor, patch]` tuple.\n* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the\n `0.0.3` version *only* will be allowed, if they are greater than or\n equal to `beta`. So, `0.0.3-pr.2` would be allowed.\n\nWhen parsing caret ranges, a missing `patch` value desugars to the\nnumber `0`, but will allow flexibility within that value, even if the\nmajor and minor versions are both `0`.\n\n* `^1.2.x` := `>=1.2.0 <2.0.0`\n* `^0.0.x` := `>=0.0.0 <0.1.0`\n* `^0.0` := `>=0.0.0 <0.1.0`\n\nA missing `minor` and `patch` values will desugar to zero, but also\nallow flexibility within those values, even if the major version is\nzero.\n\n* `^1.x` := `>=1.0.0 <2.0.0`\n* `^0.x` := `>=0.0.0 <1.0.0`\n\n### Range Grammar\n\nPutting all this together, here is a Backus-Naur grammar for ranges,\nfor the benefit of parser authors:\n\n```bnf\nrange-set ::= range ( logical-or range ) *\nlogical-or ::= ( ' ' ) * '||' ( ' ' ) *\nrange ::= hyphen | simple ( ' ' simple ) * | ''\nhyphen ::= partial ' - ' partial\nsimple ::= primitive | partial | tilde | caret\nprimitive ::= ( '<' | '>' | '>=' | '<=' | '=' | ) partial\npartial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?\nxr ::= 'x' | 'X' | '*' | nr\nnr ::= '0' | ['1'-'9']['0'-'9']+\ntilde ::= '~' partial\ncaret ::= '^' partial\nqualifier ::= ( '-' pre )? ( '+' build )?\npre ::= parts\nbuild ::= parts\nparts ::= part ( '.' part ) *\npart ::= nr | [-0-9A-Za-z]+\n```\n\n## Functions\n\nAll methods and classes take a final `loose` boolean argument that, if\ntrue, will be more forgiving about not-quite-valid semver strings.\nThe resulting output will always be 100% strict, of course.\n\nStrict-mode Comparators and Ranges will be strict about the SemVer\nstrings that they parse.\n\n* `valid(v)`: Return the parsed version, or null if it's not valid.\n* `inc(v, release)`: Return the version incremented by the release\n type (`major`, `premajor`, `minor`, `preminor`, `patch`,\n `prepatch`, or `prerelease`), or null if it's not valid\n * `premajor` in one call will bump the version up to the next major\n version and down to a prerelease of that major version.\n `preminor`, and `prepatch` work the same way.\n * If called from a non-prerelease version, the `prerelease` will work the\n same as `prepatch`. It increments the patch version, then makes a\n prerelease. If the input version is already a prerelease it simply\n increments it.\n* `major(v)`: Return the major version number.\n* `minor(v)`: Return the minor version number.\n* `patch(v)`: Return the patch version number.\n\n### Comparison\n\n* `gt(v1, v2)`: `v1 > v2`\n* `gte(v1, v2)`: `v1 >= v2`\n* `lt(v1, v2)`: `v1 < v2`\n* `lte(v1, v2)`: `v1 <= v2`\n* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,\n even if they're not the exact same string. You already know how to\n compare strings.\n* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.\n* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call\n the corresponding function above. `\"===\"` and `\"!==\"` do simple\n string comparison, but are included for completeness. Throws if an\n invalid comparison string is provided.\n* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if\n `v2` is greater. Sorts in ascending order if passed to `Array.sort()`.\n* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions\n in descending order when passed to `Array.sort()`.\n* `diff(v1, v2)`: Returns difference between two versions by the release type\n (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),\n or null if the versions are the same.\n\n\n### Ranges\n\n* `validRange(range)`: Return the valid range or null if it's not valid\n* `satisfies(version, range)`: Return true if the version satisfies the\n range.\n* `maxSatisfying(versions, range)`: Return the highest version in the list\n that satisfies the range, or `null` if none of them do.\n* `gtr(version, range)`: Return `true` if version is greater than all the\n versions possible in the range.\n* `ltr(version, range)`: Return `true` if version is less than all the\n versions possible in the range.\n* `outside(version, range, hilo)`: Return true if the version is outside\n the bounds of the range in either the high or low direction. The\n `hilo` argument must be either the string `'>'` or `'<'`. (This is\n the function called by `gtr` and `ltr`.)\n\nNote that, since ranges may be non-contiguous, a version might not be\ngreater than a range, less than a range, *or* satisfy a range! For\nexample, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`\nuntil `2.0.0`, so the version `1.2.10` would not be greater than the\nrange (because `2.0.1` satisfies, which is higher), nor less than the\nrange (since `1.2.8` satisfies, which is lower), and it also does not\nsatisfy the range.\n\nIf you want to know if a version satisfies or does not satisfy a\nrange, use the `satisfies(version, range)` function.\n",
51
+ "readmeFilename": "README.md",
51
52
  "repository": {
52
53
  "type": "git",
53
54
  "url": "git+https://github.com/npm/node-semver.git"
@@ -94,7 +94,8 @@
94
94
  "jsbn": ">=0.1.0 <0.2.0",
95
95
  "tweetnacl": ">=0.13.0 <1.0.0"
96
96
  },
97
- "readme": "ERROR: No README data found!",
97
+ "readme": "sshpk\n=========\n\nParse, convert, fingerprint and use SSH keys (both public and private) in pure\nnode -- no `ssh-keygen` or other external dependencies.\n\nSupports RSA, DSA, ECDSA (nistp-\\*) and ED25519 key types, in PEM (PKCS#1, \nPKCS#8) and OpenSSH formats.\n\nThis library has been extracted from\n[`node-http-signature`](https://github.com/joyent/node-http-signature)\n(work by [Mark Cavage](https://github.com/mcavage) and\n[Dave Eddy](https://github.com/bahamas10)) and\n[`node-ssh-fingerprint`](https://github.com/bahamas10/node-ssh-fingerprint)\n(work by Dave Eddy), with additions (including ECDSA support) by\n[Alex Wilson](https://github.com/arekinath).\n\nInstall\n-------\n\n```\nnpm install sshpk\n```\n\nExamples\n--------\n\n```js\nvar sshpk = require('sshpk');\n\nvar fs = require('fs');\n\n/* Read in an OpenSSH-format public key */\nvar keyPub = fs.readFileSync('id_rsa.pub');\nvar key = sshpk.parseKey(keyPub, 'ssh');\n\n/* Get metadata about the key */\nconsole.log('type => %s', key.type);\nconsole.log('size => %d bits', key.size);\nconsole.log('comment => %s', key.comment);\n\n/* Compute key fingerprints, in new OpenSSH (>6.7) format, and old MD5 */\nconsole.log('fingerprint => %s', key.fingerprint().toString());\nconsole.log('old-style fingerprint => %s', key.fingerprint('md5').toString());\n```\n\nExample output:\n\n```\ntype => rsa\nsize => 2048 bits\ncomment => foo@foo.com\nfingerprint => SHA256:PYC9kPVC6J873CSIbfp0LwYeczP/W4ffObNCuDJ1u5w\nold-style fingerprint => a0:c8:ad:6c:32:9a:32:fa:59:cc:a9:8c:0a:0d:6e:bd\n```\n\nMore examples: converting between formats:\n\n```js\n/* Read in a PEM public key */\nvar keyPem = fs.readFileSync('id_rsa.pem');\nvar key = sshpk.parseKey(keyPem, 'pem');\n\n/* Convert to PEM PKCS#8 public key format */\nvar pemBuf = key.toBuffer('pkcs8');\n\n/* Convert to SSH public key format (and return as a string) */\nvar sshKey = key.toString('ssh');\n```\n\nSigning and verifying:\n\n```js\n/* Read in an OpenSSH/PEM *private* key */\nvar keyPriv = fs.readFileSync('id_ecdsa');\nvar key = sshpk.parsePrivateKey(keyPriv, 'pem');\n\nvar data = 'some data';\n\n/* Sign some data with the key */\nvar s = key.createSign('sha1');\ns.update(data);\nvar signature = s.sign();\n\n/* Now load the public key (could also use just key.toPublic()) */\nvar keyPub = fs.readFileSync('id_ecdsa.pub');\nkey = sshpk.parseKey(keyPub, 'ssh');\n\n/* Make a crypto.Verifier with this key */\nvar v = key.createVerify('sha1');\nv.update(data);\nvar valid = v.verify(signature);\n/* => true! */\n```\n\nMatching fingerprints with keys:\n\n```js\nvar fp = sshpk.parseFingerprint('SHA256:PYC9kPVC6J873CSIbfp0LwYeczP/W4ffObNCuDJ1u5w');\n\nvar keys = [sshpk.parseKey(...), sshpk.parseKey(...), ...];\n\nkeys.forEach(function (key) {\n\tif (fp.matches(key))\n\t\tconsole.log('found it!');\n});\n```\n\nUsage\n-----\n\n## Public keys\n\n### `parseKey(data[, format = 'auto'[, name]])`\n\nParses a key from a given data format and returns a new `Key` object.\n\nParameters\n\n- `data` -- Either a Buffer or String, containing the key\n- `format` -- String name of format to use, valid options are:\n - `auto`: choose automatically from all below\n - `pem`: supports both PKCS#1 and PKCS#8\n - `ssh`: standard OpenSSH format,\n - `pkcs1`, `pkcs8`: variants of `pem`\n - `rfc4253`: raw OpenSSH wire format\n - `openssh`: new post-OpenSSH 6.5 internal format, produced by \n `ssh-keygen -o`\n- `name` -- Optional name for the key being parsed (eg. the filename that\n was opened). Used to generate Error messages\n\n### `Key.isKey(obj)`\n\nReturns `true` if the given object is a valid `Key` object created by a version\nof `sshpk` compatible with this one.\n\nParameters\n\n- `obj` -- Object to identify\n\n### `Key#type`\n\nString, the type of key. Valid options are `rsa`, `dsa`, `ecdsa`.\n\n### `Key#size`\n\nInteger, \"size\" of the key in bits. For RSA/DSA this is the size of the modulus;\nfor ECDSA this is the bit size of the curve in use.\n\n### `Key#comment`\n\nOptional string, a key comment used by some formats (eg the `ssh` format).\n\n### `Key#curve`\n\nOnly present if `this.type === 'ecdsa'`, string containing the name of the\nnamed curve used with this key. Possible values include `nistp256`, `nistp384`\nand `nistp521`.\n\n### `Key#toBuffer([format = 'ssh'])`\n\nConvert the key into a given data format and return the serialized key as\na Buffer.\n\nParameters\n\n- `format` -- String name of format to use, for valid options see `parseKey()`\n\n### `Key#toString([format = 'ssh])`\n\nSame as `this.toBuffer(format).toString()`.\n\n### `Key#fingerprint([algorithm = 'sha256'])`\n\nCreates a new `Fingerprint` object representing this Key's fingerprint.\n\nParameters\n\n- `algorithm` -- String name of hash algorithm to use, valid options are `md5`,\n `sha1`, `sha256`, `sha384`, `sha512`\n\n### `Key#createVerify([hashAlgorithm])`\n\nCreates a `crypto.Verifier` specialized to use this Key (and the correct public\nkey algorithm to match it). The returned Verifier has the same API as a regular\none, except that the `verify()` function takes only the target signature as an\nargument.\n\nParameters\n\n- `hashAlgorithm` -- optional String name of hash algorithm to use, any\n supported by OpenSSL are valid, usually including\n `sha1`, `sha256`.\n\n`v.verify(signature[, format])` Parameters\n\n- `signature` -- either a Signature object, or a Buffer or String\n- `format` -- optional String, name of format to interpret given String with.\n Not valid if `signature` is a Signature or Buffer.\n\n### `Key#createDiffieHellman()`\n### `Key#createDH()`\n\nCreates a Diffie-Hellman key exchange object initialized with this key and all\nnecessary parameters. This has the same API as a `crypto.DiffieHellman`\ninstance, except that functions take `Key` and `PrivateKey` objects as\narguments, and return them where indicated for.\n\nThis is only valid for keys belonging to a cryptosystem that supports DHE\nor a close analogue (i.e. `dsa`, `ecdsa` and `curve25519` keys). An attempt\nto call this function on other keys will yield an `Error`.\n\n## Private keys\n\n### `parsePrivateKey(data[, format = 'auto'[, name]])`\n\nParses a private key from a given data format and returns a new\n`PrivateKey` object.\n\nParameters\n\n- `data` -- Either a Buffer or String, containing the key\n- `format` -- String name of format to use, valid options are:\n - `auto`: choose automatically from all below\n - `pem`: supports both PKCS#1 and PKCS#8\n - `ssh`, `openssh`: new post-OpenSSH 6.5 internal format, produced by \n `ssh-keygen -o`\n - `pkcs1`, `pkcs8`: variants of `pem`\n - `rfc4253`: raw OpenSSH wire format\n- `name` -- Optional name for the key being parsed (eg. the filename that\n was opened). Used to generate Error messages\n\n### `PrivateKey.isPrivateKey(obj)`\n\nReturns `true` if the given object is a valid `PrivateKey` object created by a\nversion of `sshpk` compatible with this one.\n\nParameters\n\n- `obj` -- Object to identify\n\n### `PrivateKey#type`\n\nString, the type of key. Valid options are `rsa`, `dsa`, `ecdsa`.\n\n### `PrivateKey#size`\n\nInteger, \"size\" of the key in bits. For RSA/DSA this is the size of the modulus;\nfor ECDSA this is the bit size of the curve in use.\n\n### `PrivateKey#curve`\n\nOnly present if `this.type === 'ecdsa'`, string containing the name of the\nnamed curve used with this key. Possible values include `nistp256`, `nistp384`\nand `nistp521`.\n\n### `PrivateKey#toBuffer([format = 'pkcs1'])`\n\nConvert the key into a given data format and return the serialized key as\na Buffer.\n\nParameters\n\n- `format` -- String name of format to use, valid options are listed under \n `parsePrivateKey`. Note that ED25519 keys default to `openssh`\n format instead (as they have no `pkcs1` representation).\n\n### `PrivateKey#toString([format = 'pkcs1'])`\n\nSame as `this.toBuffer(format).toString()`.\n\n### `PrivateKey#toPublic()`\n\nExtract just the public part of this private key, and return it as a `Key`\nobject.\n\n### `PrivateKey#fingerprint([algorithm = 'sha256'])`\n\nSame as `this.toPublic().fingerprint()`.\n\n### `PrivateKey#createVerify([hashAlgorithm])`\n\nSame as `this.toPublic().createVerify()`.\n\n### `PrivateKey#createSign([hashAlgorithm])`\n\nCreates a `crypto.Sign` specialized to use this PrivateKey (and the correct\nkey algorithm to match it). The returned Signer has the same API as a regular\none, except that the `sign()` function takes no arguments, and returns a\n`Signature` object.\n\nParameters\n\n- `hashAlgorithm` -- optional String name of hash algorithm to use, any\n supported by OpenSSL are valid, usually including\n `sha1`, `sha256`.\n\n`v.sign()` Parameters\n\n- none\n\n### `PrivateKey#derive(newType)`\n\nDerives a related key of type `newType` from this key. Currently this is\nonly supported to change between `ed25519` and `curve25519` keys which are\nstored with the same private key (but usually distinct public keys in order\nto avoid degenerate keys that lead to a weak Diffie-Hellman exchange).\n\nParameters\n\n- `newType` -- String, type of key to derive, either `ed25519` or `curve25519`\n\n## Fingerprints\n\n### `parseFingerprint(fingerprint[, algorithms])`\n\nPre-parses a fingerprint, creating a `Fingerprint` object that can be used to\nquickly locate a key by using the `Fingerprint#matches` function.\n\nParameters\n\n- `fingerprint` -- String, the fingerprint value, in any supported format\n- `algorithms` -- Optional list of strings, names of hash algorithms to limit\n support to. If `fingerprint` uses a hash algorithm not on\n this list, throws `InvalidAlgorithmError`.\n\n### `Fingerprint.isFingerprint(obj)`\n\nReturns `true` if the given object is a valid `Fingerprint` object created by a\nversion of `sshpk` compatible with this one.\n\nParameters\n\n- `obj` -- Object to identify\n\n### `Fingerprint#toString([format])`\n\nReturns a fingerprint as a string, in the given format.\n\nParameters\n\n- `format` -- Optional String, format to use, valid options are `hex` and\n `base64`. If this `Fingerprint` uses the `md5` algorithm, the\n default format is `hex`. Otherwise, the default is `base64`.\n\n### `Fingerprint#matches(key)`\n\nVerifies whether or not this `Fingerprint` matches a given `Key`. This function\nuses double-hashing to avoid leaking timing information. Returns a boolean.\n\nParameters\n\n- `key` -- a `Key` object, the key to match this fingerprint against\n\n## Signatures\n\n### `parseSignature(signature, algorithm, format)`\n\nParses a signature in a given format, creating a `Signature` object. Useful\nfor converting between the SSH and ASN.1 (PKCS/OpenSSL) signature formats, and\nalso returned as output from `PrivateKey#createSign().sign()`.\n\nA Signature object can also be passed to a verifier produced by\n`Key#createVerify()` and it will automatically be converted internally into the\ncorrect format for verification.\n\nParameters\n\n- `signature` -- a Buffer (binary) or String (base64), data of the actual\n signature in the given format\n- `algorithm` -- a String, name of the algorithm to be used, possible values\n are `rsa`, `dsa`, `ecdsa`\n- `format` -- a String, either `asn1` or `ssh`\n\n### `Signature.isSignature(obj)`\n\nReturns `true` if the given object is a valid `Signature` object created by a\nversion of `sshpk` compatible with this one.\n\nParameters\n\n- `obj` -- Object to identify\n\n### `Signature#toBuffer([format = 'asn1'])`\n\nConverts a Signature to the given format and returns it as a Buffer.\n\nParameters\n\n- `format` -- a String, either `asn1` or `ssh`\n\n### `Signature#toString([format = 'asn1'])`\n\nSame as `this.toBuffer(format).toString('base64')`.\n\nErrors\n------\n\n### `InvalidAlgorithmError`\n\nThe specified algorithm is not valid, either because it is not supported, or\nbecause it was not included on a list of allowed algorithms.\n\nThrown by `Fingerprint.parse`, `Key#fingerprint`.\n\nProperties\n\n- `algorithm` -- the algorithm that could not be validated\n\n### `FingerprintFormatError`\n\nThe fingerprint string given could not be parsed as a supported fingerprint\nformat, or the specified fingerprint format is invalid.\n\nThrown by `Fingerprint.parse`, `Fingerprint#toString`.\n\nProperties\n\n- `fingerprint` -- if caused by a fingerprint, the string value given\n- `format` -- if caused by an invalid format specification, the string value given\n\n### `KeyParseError`\n\nThe key data given could not be parsed as a valid key.\n\nProperties\n\n- `keyName` -- `name` that was given to `Key#parse`\n- `format` -- the `format` that was trying to parse the key\n- `innerErr` -- the inner Error thrown by the format parser\n\nFriends of sshpk\n----------------\n\n * [`sshpk-agent`](https://github.com/arekinath/node-sshpk-agent) is a library\n for speaking the `ssh-agent` protocol from node.js, which uses `sshpk`\n",
98
+ "readmeFilename": "README.md",
98
99
  "repository": {
99
100
  "type": "git",
100
101
  "url": "git+https://github.com/arekinath/node-sshpk.git"
@@ -85,7 +85,8 @@
85
85
  ],
86
86
  "name": "strip-ansi",
87
87
  "optionalDependencies": {},
88
- "readme": "ERROR: No README data found!",
88
+ "readme": "# strip-ansi [![Build Status](https://travis-ci.org/chalk/strip-ansi.svg?branch=master)](https://travis-ci.org/chalk/strip-ansi)\n\n> Strip [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code)\n\n\n## Install\n\n```\n$ npm install --save strip-ansi\n```\n\n\n## Usage\n\n```js\nvar stripAnsi = require('strip-ansi');\n\nstripAnsi('\\u001b[4mcake\\u001b[0m');\n//=> 'cake'\n```\n\n\n## Related\n\n- [strip-ansi-cli](https://github.com/chalk/strip-ansi-cli) - CLI for this module\n- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes\n- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes\n- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right\n\n\n## License\n\nMIT © [Sindre Sorhus](http://sindresorhus.com)\n",
89
+ "readmeFilename": "readme.md",
89
90
  "repository": {
90
91
  "type": "git",
91
92
  "url": "git+https://github.com/chalk/strip-ansi.git"
@@ -41,7 +41,7 @@
41
41
  "directories": {},
42
42
  "dist": {
43
43
  "shasum": "611b7e62eb2f27aeda64554f7a7fb48900c7e157",
44
- "tarball": "http://registry.npmjs.org/tar-pack/-/tar-pack-3.1.3.tgz"
44
+ "tarball": "https://registry.npmjs.org/tar-pack/-/tar-pack-3.1.3.tgz"
45
45
  },
46
46
  "gitHead": "556865a6b684bbf1fb44ae1bd92aaf251f3f4a94",
47
47
  "homepage": "https://github.com/ForbesLindesay/tar-pack#readme",
@@ -89,7 +89,8 @@
89
89
  ],
90
90
  "name": "tough-cookie",
91
91
  "optionalDependencies": {},
92
- "readme": "ERROR: No README data found!",
92
+ "readme": "[RFC6265](https://tools.ietf.org/html/rfc6265) Cookies and CookieJar for Node.js\n\n[![Build Status](https://travis-ci.org/SalesforceEng/tough-cookie.png?branch=master)](https://travis-ci.org/SalesforceEng/tough-cookie)\n\n[![NPM Stats](https://nodei.co/npm/tough-cookie.png?downloads=true&stars=true)](https://npmjs.org/package/tough-cookie)\n![NPM Downloads](https://nodei.co/npm-dl/tough-cookie.png?months=9)\n\n# Synopsis\n\n``` javascript\nvar tough = require('tough-cookie');\nvar Cookie = tough.Cookie;\nvar cookie = Cookie.parse(header);\ncookie.value = 'somethingdifferent';\nheader = cookie.toString();\n\nvar cookiejar = new tough.CookieJar();\ncookiejar.setCookie(cookie, 'http://currentdomain.example.com/path', cb);\n// ...\ncookiejar.getCookies('http://example.com/otherpath',function(err,cookies) {\n res.headers['cookie'] = cookies.join('; ');\n});\n```\n\n# Installation\n\nIt's _so_ easy!\n\n`npm install tough-cookie`\n\nWhy the name? NPM modules `cookie`, `cookies` and `cookiejar` were already taken.\n\n# API\n\n## tough\n\nFunctions on the module you get from `require('tough-cookie')`. All can be used as pure functions and don't need to be \"bound\".\n\n**Note**: prior to 1.0.x, several of these functions took a `strict` parameter. This has since been removed from the API as it was no longer necessary.\n\n### `parseDate(string)`\n\nParse a cookie date string into a `Date`. Parses according to RFC6265 Section 5.1.1, not `Date.parse()`.\n\n### `formatDate(date)`\n\nFormat a Date into a RFC1123 string (the RFC6265-recommended format).\n\n### `canonicalDomain(str)`\n\nTransforms a domain-name into a canonical domain-name. The canonical domain-name is a trimmed, lowercased, stripped-of-leading-dot and optionally punycode-encoded domain-name (Section 5.1.2 of RFC6265). For the most part, this function is idempotent (can be run again on its output without ill effects).\n\n### `domainMatch(str,domStr[,canonicalize=true])`\n\nAnswers \"does this real domain match the domain in a cookie?\". The `str` is the \"current\" domain-name and the `domStr` is the \"cookie\" domain-name. Matches according to RFC6265 Section 5.1.3, but it helps to think of it as a \"suffix match\".\n\nThe `canonicalize` parameter will run the other two paramters through `canonicalDomain` or not.\n\n### `defaultPath(path)`\n\nGiven a current request/response path, gives the Path apropriate for storing in a cookie. This is basically the \"directory\" of a \"file\" in the path, but is specified by Section 5.1.4 of the RFC.\n\nThe `path` parameter MUST be _only_ the pathname part of a URI (i.e. excludes the hostname, query, fragment, etc.). This is the `.pathname` property of node's `uri.parse()` output.\n\n### `pathMatch(reqPath,cookiePath)`\n\nAnswers \"does the request-path path-match a given cookie-path?\" as per RFC6265 Section 5.1.4. Returns a boolean.\n\nThis is essentially a prefix-match where `cookiePath` is a prefix of `reqPath`.\n\n### `parse(cookieString[, options])`\n\nalias for `Cookie.parse(cookieString[, options])`\n\n### `fromJSON(string)`\n\nalias for `Cookie.fromJSON(string)`\n\n### `getPublicSuffix(hostname)`\n\nReturns the public suffix of this hostname. The public suffix is the shortest domain-name upon which a cookie can be set. Returns `null` if the hostname cannot have cookies set for it.\n\nFor example: `www.example.com` and `www.subdomain.example.com` both have public suffix `example.com`.\n\nFor further information, see http://publicsuffix.org/. This module derives its list from that site.\n\n### `cookieCompare(a,b)`\n\nFor use with `.sort()`, sorts a list of cookies into the recommended order given in the RFC (Section 5.4 step 2). The sort algorithm is, in order of precedence:\n\n* Longest `.path`\n* oldest `.creation` (which has a 1ms precision, same as `Date`)\n* lowest `.creationIndex` (to get beyond the 1ms precision)\n\n``` javascript\nvar cookies = [ /* unsorted array of Cookie objects */ ];\ncookies = cookies.sort(cookieCompare);\n```\n\n**Note**: Since JavaScript's `Date` is limited to a 1ms precision, cookies within the same milisecond are entirely possible. This is especially true when using the `now` option to `.setCookie()`. The `.creationIndex` property is a per-process global counter, assigned during construction with `new Cookie()`. This preserves the spirit of the RFC sorting: older cookies go first. This works great for `MemoryCookieStore`, since `Set-Cookie` headers are parsed in order, but may not be so great for distributed systems. Sophisticated `Store`s may wish to set this to some other _logical clock_ such that if cookies A and B are created in the same millisecond, but cookie A is created before cookie B, then `A.creationIndex < B.creationIndex`. If you want to alter the global counter, which you probably _shouldn't_ do, it's stored in `Cookie.cookiesCreated`.\n\n### `permuteDomain(domain)`\n\nGenerates a list of all possible domains that `domainMatch()` the parameter. May be handy for implementing cookie stores.\n\n### `permutePath(path)`\n\nGenerates a list of all possible paths that `pathMatch()` the parameter. May be handy for implementing cookie stores.\n\n\n## Cookie\n\nExported via `tough.Cookie`.\n\n### `Cookie.parse(cookieString[, options])`\n\nParses a single Cookie or Set-Cookie HTTP header into a `Cookie` object. Returns `undefined` if the string can't be parsed.\n\nThe options parameter is not required and currently has only one property:\n\n * _loose_ - boolean - if `true` enable parsing of key-less cookies like `=abc` and `=`, which are not RFC-compliant.\n\nIf options is not an object, it is ignored, which means you can use `Array#map` with it.\n\nHere's how to process the Set-Cookie header(s) on a node HTTP/HTTPS response:\n\n``` javascript\nif (res.headers['set-cookie'] instanceof Array)\n cookies = res.headers['set-cookie'].map(Cookie.parse);\nelse\n cookies = [Cookie.parse(res.headers['set-cookie'])];\n```\n\n### Properties\n\nCookie object properties:\n\n * _key_ - string - the name or key of the cookie (default \"\")\n * _value_ - string - the value of the cookie (default \"\")\n * _expires_ - `Date` - if set, the `Expires=` attribute of the cookie (defaults to the string `\"Infinity\"`). See `setExpires()`\n * _maxAge_ - seconds - if set, the `Max-Age=` attribute _in seconds_ of the cookie. May also be set to strings `\"Infinity\"` and `\"-Infinity\"` for non-expiry and immediate-expiry, respectively. See `setMaxAge()`\n * _domain_ - string - the `Domain=` attribute of the cookie\n * _path_ - string - the `Path=` of the cookie\n * _secure_ - boolean - the `Secure` cookie flag\n * _httpOnly_ - boolean - the `HttpOnly` cookie flag\n * _extensions_ - `Array` - any unrecognized cookie attributes as strings (even if equal-signs inside)\n * _creation_ - `Date` - when this cookie was constructed\n * _creationIndex_ - number - set at construction, used to provide greater sort precision (please see `cookieCompare(a,b)` for a full explanation)\n\nAfter a cookie has been passed through `CookieJar.setCookie()` it will have the following additional attributes:\n\n * _hostOnly_ - boolean - is this a host-only cookie (i.e. no Domain field was set, but was instead implied)\n * _pathIsDefault_ - boolean - if true, there was no Path field on the cookie and `defaultPath()` was used to derive one.\n * _creation_ - `Date` - **modified** from construction to when the cookie was added to the jar\n * _lastAccessed_ - `Date` - last time the cookie got accessed. Will affect cookie cleaning once implemented. Using `cookiejar.getCookies(...)` will update this attribute.\n\n### `Cookie([{properties}])`\n\nReceives an options object that can contain any of the above Cookie properties, uses the default for unspecified properties.\n\n### `.toString()`\n\nencode to a Set-Cookie header value. The Expires cookie field is set using `formatDate()`, but is omitted entirely if `.expires` is `Infinity`.\n\n### `.cookieString()`\n\nencode to a Cookie header value (i.e. the `.key` and `.value` properties joined with '=').\n\n### `.setExpires(String)`\n\nsets the expiry based on a date-string passed through `parseDate()`. If parseDate returns `null` (i.e. can't parse this date string), `.expires` is set to `\"Infinity\"` (a string) is set.\n\n### `.setMaxAge(number)`\n\nsets the maxAge in seconds. Coerces `-Infinity` to `\"-Infinity\"` and `Infinity` to `\"Infinity\"` so it JSON serializes correctly.\n\n### `.expiryTime([now=Date.now()])`\n\n### `.expiryDate([now=Date.now()])`\n\nexpiryTime() Computes the absolute unix-epoch milliseconds that this cookie expires. expiryDate() works similarly, except it returns a `Date` object. Note that in both cases the `now` parameter should be milliseconds.\n\nMax-Age takes precedence over Expires (as per the RFC). The `.creation` attribute -- or, by default, the `now` paramter -- is used to offset the `.maxAge` attribute.\n\nIf Expires (`.expires`) is set, that's returned.\n\nOtherwise, `expiryTime()` returns `Infinity` and `expiryDate()` returns a `Date` object for \"Tue, 19 Jan 2038 03:14:07 GMT\" (latest date that can be expressed by a 32-bit `time_t`; the common limit for most user-agents).\n\n### `.TTL([now=Date.now()])`\n\ncompute the TTL relative to `now` (milliseconds). The same precedence rules as for `expiryTime`/`expiryDate` apply.\n\nThe \"number\" `Infinity` is returned for cookies without an explicit expiry and `0` is returned if the cookie is expired. Otherwise a time-to-live in milliseconds is returned.\n\n### `.canonicalizedDoman()`\n\n### `.cdomain()`\n\nreturn the canonicalized `.domain` field. This is lower-cased and punycode (RFC3490) encoded if the domain has any non-ASCII characters.\n\n### `.toJSON()`\n\nFor convenience in using `JSON.serialize(cookie)`. Returns a plain-old `Object` that can be JSON-serialized.\n\nAny `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are exported in ISO format (`.toISOString()`).\n\n**NOTE**: Custom `Cookie` properties will be discarded. In tough-cookie 1.x, since there was no `.toJSON` method explicitly defined, all enumerable properties were captured. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array.\n\n### `Cookie.fromJSON(strOrObj)`\n\nDoes the reverse of `cookie.toJSON()`. If passed a string, will `JSON.parse()` that first.\n\nAny `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are parsed via `Date.parse()`, not the tough-cookie `parseDate`, since it's JavaScript/JSON-y timestamps being handled at this layer.\n\nReturns `null` upon JSON parsing error.\n\n### `.clone()`\n\nDoes a deep clone of this cookie, exactly implemented as `Cookie.fromJSON(cookie.toJSON())`.\n\n### `.validate()`\n\nStatus: *IN PROGRESS*. Works for a few things, but is by no means comprehensive.\n\nvalidates cookie attributes for semantic correctness. Useful for \"lint\" checking any Set-Cookie headers you generate. For now, it returns a boolean, but eventually could return a reason string -- you can future-proof with this construct:\n\n``` javascript\nif (cookie.validate() === true) {\n // it's tasty\n} else {\n // yuck!\n}\n```\n\n\n## CookieJar\n\nExported via `tough.CookieJar`.\n\n### `CookieJar([store],[options])`\n\nSimply use `new CookieJar()`. If you'd like to use a custom store, pass that to the constructor otherwise a `MemoryCookieStore` will be created and used.\n\nThe `options` object can be omitted and can have the following properties:\n\n * _rejectPublicSuffixes_ - boolean - default `true` - reject cookies with domains like \"com\" and \"co.uk\"\n * _looseMode_ - boolean - default `false` - accept malformed cookies like `bar` and `=bar`, which have an implied empty name.\n This is not in the standard, but is used sometimes on the web and is accepted by (most) browsers.\n\nSince eventually this module would like to support database/remote/etc. CookieJars, continuation passing style is used for CookieJar methods.\n\n### `.setCookie(cookieOrString, currentUrl, [{options},] cb(err,cookie))`\n\nAttempt to set the cookie in the cookie jar. If the operation fails, an error will be given to the callback `cb`, otherwise the cookie is passed through. The cookie will have updated `.creation`, `.lastAccessed` and `.hostOnly` properties.\n\nThe `options` object can be omitted and can have the following properties:\n\n * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies.\n * _secure_ - boolean - autodetect from url - indicates if this is a \"Secure\" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`.\n * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies\n * _ignoreError_ - boolean - default `false` - silently ignore things like parse errors and invalid domains. `Store` errors aren't ignored by this option.\n\nAs per the RFC, the `.hostOnly` property is set if there was no \"Domain=\" parameter in the cookie string (or `.domain` was null on the Cookie object). The `.domain` property is set to the fully-qualified hostname of `currentUrl` in this case. Matching this cookie requires an exact hostname match (not a `domainMatch` as per usual).\n\n### `.setCookieSync(cookieOrString, currentUrl, [{options}])`\n\nSynchronous version of `setCookie`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).\n\n### `.getCookies(currentUrl, [{options},] cb(err,cookies))`\n\nRetrieve the list of cookies that can be sent in a Cookie header for the current url.\n\nIf an error is encountered, that's passed as `err` to the callback, otherwise an `Array` of `Cookie` objects is passed. The array is sorted with `cookieCompare()` unless the `{sort:false}` option is given.\n\nThe `options` object can be omitted and can have the following properties:\n\n * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies.\n * _secure_ - boolean - autodetect from url - indicates if this is a \"Secure\" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`.\n * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies\n * _expire_ - boolean - default `true` - perform expiry-time checking of cookies and asynchronously remove expired cookies from the store. Using `false` will return expired cookies and **not** remove them from the store (which is useful for replaying Set-Cookie headers, potentially).\n * _allPaths_ - boolean - default `false` - if `true`, do not scope cookies by path. The default uses RFC-compliant path scoping. **Note**: may not be supported by the underlying store (the default `MemoryCookieStore` supports it).\n\nThe `.lastAccessed` property of the returned cookies will have been updated.\n\n### `.getCookiesSync(currentUrl, [{options}])`\n\nSynchronous version of `getCookies`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).\n\n### `.getCookieString(...)`\n\nAccepts the same options as `.getCookies()` but passes a string suitable for a Cookie header rather than an array to the callback. Simply maps the `Cookie` array via `.cookieString()`.\n\n### `.getCookieStringSync(...)`\n\nSynchronous version of `getCookieString`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).\n\n### `.getSetCookieStrings(...)`\n\nReturns an array of strings suitable for **Set-Cookie** headers. Accepts the same options as `.getCookies()`. Simply maps the cookie array via `.toString()`.\n\n### `.getSetCookieStringsSync(...)`\n\nSynchronous version of `getSetCookieStrings`; only works with synchronous stores (e.g. the default `MemoryCookieStore`).\n\n### `.serialize(cb(err,serializedObject))`\n\nSerialize the Jar if the underlying store supports `.getAllCookies`.\n\n**NOTE**: Custom `Cookie` properties will be discarded. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array.\n\nSee [Serialization Format].\n\n### `.serializeSync()`\n\nSync version of .serialize\n\n### `.toJSON()`\n\nAlias of .serializeSync() for the convenience of `JSON.stringify(cookiejar)`.\n\n### `CookieJar.deserialize(serialized, [store], cb(err,object))`\n\nA new Jar is created and the serialized Cookies are added to the underlying store. Each `Cookie` is added via `store.putCookie` in the order in which they appear in the serialization.\n\nThe `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created.\n\nAs a convenience, if `serialized` is a string, it is passed through `JSON.parse` first. If that throws an error, this is passed to the callback.\n\n### `CookieJar.deserializeSync(serialized, [store])`\n\nSync version of `.deserialize`. _Note_ that the `store` must be synchronous for this to work.\n\n### `CookieJar.fromJSON(string)`\n\nAlias of `.deserializeSync` to provide consistency with `Cookie.fromJSON()`.\n\n### `.clone([store,]cb(err,newJar))`\n\nProduces a deep clone of this jar. Modifications to the original won't affect the clone, and vice versa.\n\nThe `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. Transferring between store types is supported so long as the source implements `.getAllCookies()` and the destination implements `.putCookie()`.\n\n### `.cloneSync([store])`\n\nSynchronous version of `.clone`, returning a new `CookieJar` instance.\n\nThe `store` argument is optional, but must be a _synchronous_ `Store` instance if specified. If not passed, a new instance of `MemoryCookieStore` is used.\n\nThe _source_ and _destination_ must both be synchronous `Store`s. If one or both stores are asynchronous, use `.clone` instead. Recall that `MemoryCookieStore` supports both synchronous and asynchronous API calls.\n\n## Store\n\nBase class for CookieJar stores. Available as `tough.Store`.\n\n## Store API\n\nThe storage model for each `CookieJar` instance can be replaced with a custom implementation. The default is `MemoryCookieStore` which can be found in the `lib/memstore.js` file. The API uses continuation-passing-style to allow for asynchronous stores.\n\nStores should inherit from the base `Store` class, which is available as `require('tough-cookie').Store`.\n\nStores are asynchronous by default, but if `store.synchronous` is set to `true`, then the `*Sync` methods on the of the containing `CookieJar` can be used (however, the continuation-passing style\n\nAll `domain` parameters will have been normalized before calling.\n\nThe Cookie store must have all of the following methods.\n\n### `store.findCookie(domain, path, key, cb(err,cookie))`\n\nRetrieve a cookie with the given domain, path and key (a.k.a. name). The RFC maintains that exactly one of these cookies should exist in a store. If the store is using versioning, this means that the latest/newest such cookie should be returned.\n\nCallback takes an error and the resulting `Cookie` object. If no cookie is found then `null` MUST be passed instead (i.e. not an error).\n\n### `store.findCookies(domain, path, cb(err,cookies))`\n\nLocates cookies matching the given domain and path. This is most often called in the context of `cookiejar.getCookies()` above.\n\nIf no cookies are found, the callback MUST be passed an empty array.\n\nThe resulting list will be checked for applicability to the current request according to the RFC (domain-match, path-match, http-only-flag, secure-flag, expiry, etc.), so it's OK to use an optimistic search algorithm when implementing this method. However, the search algorithm used SHOULD try to find cookies that `domainMatch()` the domain and `pathMatch()` the path in order to limit the amount of checking that needs to be done.\n\nAs of version 0.9.12, the `allPaths` option to `cookiejar.getCookies()` above will cause the path here to be `null`. If the path is `null`, path-matching MUST NOT be performed (i.e. domain-matching only).\n\n### `store.putCookie(cookie, cb(err))`\n\nAdds a new cookie to the store. The implementation SHOULD replace any existing cookie with the same `.domain`, `.path`, and `.key` properties -- depending on the nature of the implementation, it's possible that between the call to `fetchCookie` and `putCookie` that a duplicate `putCookie` can occur.\n\nThe `cookie` object MUST NOT be modified; the caller will have already updated the `.creation` and `.lastAccessed` properties.\n\nPass an error if the cookie cannot be stored.\n\n### `store.updateCookie(oldCookie, newCookie, cb(err))`\n\nUpdate an existing cookie. The implementation MUST update the `.value` for a cookie with the same `domain`, `.path` and `.key`. The implementation SHOULD check that the old value in the store is equivalent to `oldCookie` - how the conflict is resolved is up to the store.\n\nThe `.lastAccessed` property will always be different between the two objects (to the precision possible via JavaScript's clock). Both `.creation` and `.creationIndex` are guaranteed to be the same. Stores MAY ignore or defer the `.lastAccessed` change at the cost of affecting how cookies are selected for automatic deletion (e.g., least-recently-used, which is up to the store to implement).\n\nStores may wish to optimize changing the `.value` of the cookie in the store versus storing a new cookie. If the implementation doesn't define this method a stub that calls `putCookie(newCookie,cb)` will be added to the store object.\n\nThe `newCookie` and `oldCookie` objects MUST NOT be modified.\n\nPass an error if the newCookie cannot be stored.\n\n### `store.removeCookie(domain, path, key, cb(err))`\n\nRemove a cookie from the store (see notes on `findCookie` about the uniqueness constraint).\n\nThe implementation MUST NOT pass an error if the cookie doesn't exist; only pass an error due to the failure to remove an existing cookie.\n\n### `store.removeCookies(domain, path, cb(err))`\n\nRemoves matching cookies from the store. The `path` parameter is optional, and if missing means all paths in a domain should be removed.\n\nPass an error ONLY if removing any existing cookies failed.\n\n### `store.getAllCookies(cb(err, cookies))`\n\nProduces an `Array` of all cookies during `jar.serialize()`. The items in the array can be true `Cookie` objects or generic `Object`s with the [Serialization Format] data structure.\n\nCookies SHOULD be returned in creation order to preserve sorting via `compareCookies()`. For reference, `MemoryCookieStore` will sort by `.creationIndex` since it uses true `Cookie` objects internally. If you don't return the cookies in creation order, they'll still be sorted by creation time, but this only has a precision of 1ms. See `compareCookies` for more detail.\n\nPass an error if retrieval fails.\n\n## MemoryCookieStore\n\nInherits from `Store`.\n\nA just-in-memory CookieJar synchronous store implementation, used by default. Despite being a synchronous implementation, it's usable with both the synchronous and asynchronous forms of the `CookieJar` API.\n\n# Serialization Format\n\n**NOTE**: if you want to have custom `Cookie` properties serialized, add the property name to `Cookie.serializableProperties`.\n\n```js\n {\n // The version of tough-cookie that serialized this jar.\n version: 'tough-cookie@1.x.y',\n\n // add the store type, to make humans happy:\n storeType: 'MemoryCookieStore',\n\n // CookieJar configuration:\n rejectPublicSuffixes: true,\n // ... future items go here\n\n // Gets filled from jar.store.getAllCookies():\n cookies: [\n {\n key: 'string',\n value: 'string',\n // ...\n /* other Cookie.serializableProperties go here */\n }\n ]\n }\n```\n\n# Copyright and License\n\n(tl;dr: BSD-3-Clause with some MPL/2.0)\n\n```text\n Copyright (c) 2015, Salesforce.com, Inc.\n All rights reserved.\n\n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions are met:\n\n 1. Redistributions of source code must retain the above copyright notice,\n this list of conditions and the following disclaimer.\n\n 2. Redistributions in binary form must reproduce the above copyright notice,\n this list of conditions and the following disclaimer in the documentation\n and/or other materials provided with the distribution.\n\n 3. Neither the name of Salesforce.com nor the names of its contributors may\n be used to endorse or promote products derived from this software without\n specific prior written permission.\n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n POSSIBILITY OF SUCH DAMAGE.\n```\n\nPortions may be licensed under different licenses (in particular `public_suffix_list.dat` is MPL/2.0); please read that file and the LICENSE file for full details.\n",
93
+ "readmeFilename": "README.md",
93
94
  "repository": {
94
95
  "type": "git",
95
96
  "url": "git://github.com/SalesforceEng/tough-cookie.git"
@@ -26,7 +26,6 @@ Documentation
26
26
  * [Hashing](#hashing)
27
27
  * [Random bytes generation](#random-bytes-generation)
28
28
  * [Constant-time comparison](#constant-time-comparison)
29
- * [Examples](#examples)
30
29
  * [System requirements](#system-requirements)
31
30
  * [Development and testing](#development-and-testing)
32
31
  * [Contributors](#contributors)
@@ -66,11 +65,20 @@ or [download source code](https://github.com/dchest/tweetnacl-js/releases).
66
65
 
67
66
 
68
67
  Usage
69
- ------
68
+ -----
70
69
 
71
70
  All API functions accept and return bytes as `Uint8Array`s. If you need to
72
- encode or decode strings, use functions from <https://github.com/dchest/tweetnacl-util-js>
73
- or one of the more robust codec packages.
71
+ encode or decode strings, use functions from
72
+ <https://github.com/dchest/tweetnacl-util-js> or one of the more robust codec
73
+ packages.
74
+
75
+ In Node.js v4 and later `Buffer` objects are backed by `Uint8Array`s, so you
76
+ can freely pass them to TweetNaCl.js functions as arguments. The returned
77
+ objects are still `Uint8Array`s, so if you need `Buffer`s, you'll have to
78
+ convert them manually; make sure to convert using copying: `new Buffer(array)`,
79
+ instead of sharing: `new Buffer(array.buffer)`, because some functions return
80
+ subarrays of their buffers.
81
+
74
82
 
75
83
  ### Public-key authenticated encryption (box)
76
84